├── .coveragerc ├── .github ├── CODEOWNERS ├── ISSUE_TEMPLATE │ ├── bug_report.md │ ├── feature_request.md │ └── user-story.md └── workflows │ ├── codeql.yml │ ├── release.yml │ └── tests.yml ├── .gitignore ├── .isort.cfg ├── .pre-commit-config.yaml ├── .prettierignore ├── .prettierrc.yaml ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── Dockerfile ├── LICENSE ├── Makefile ├── README.md ├── import_tracker ├── __init__.py ├── __main__.py ├── constants.py ├── import_tracker.py ├── lazy_import_errors.py ├── log.py └── setup_tools.py ├── requirements_test.txt ├── scripts ├── build_wheel.sh ├── fmt.sh ├── install_release.sh ├── publish.sh ├── release.sh └── run_tests.sh ├── setup.py └── test ├── __init__.py ├── conftest.py ├── helpers.py ├── sample_libs ├── all_import_types │ ├── __init__.py │ ├── sub_module1.py │ ├── sub_module2 │ │ └── __init__.py │ └── sub_module3.py ├── bad_lib │ └── __init__.py ├── conditional_deps │ ├── __init__.py │ └── mod.py ├── decorator_deps │ ├── __init__.py │ └── opt_decorator.py ├── deep_siblings │ ├── __init__.py │ ├── blocks │ │ ├── __init__.py │ │ ├── bar_type │ │ │ ├── __init__.py │ │ │ └── bar.py │ │ └── foo_type │ │ │ ├── __init__.py │ │ │ └── foo.py │ └── workflows │ │ ├── __init__.py │ │ └── foo_type │ │ ├── __init__.py │ │ └── foo.py ├── direct_dep_ambiguous │ ├── __init__.py │ ├── bar.py │ └── foo.py ├── direct_dep_nested │ ├── __init__.py │ ├── nested.py │ └── nested2.py ├── full_depth_direct_and_transitive │ ├── __init__.py │ ├── bar.py │ └── foo.py ├── inter_mod_deps │ ├── __init__.py │ ├── submod1 │ │ └── __init__.py │ ├── submod2 │ │ ├── __init__.py │ │ ├── bar.py │ │ └── foo.py │ ├── submod3.py │ ├── submod4.py │ └── submod5.py ├── intermediate_extras │ ├── __init__.py │ ├── bar │ │ └── __init__.py │ └── foo │ │ ├── __init__.py │ │ ├── bat.py │ │ └── baz │ │ └── __init__.py ├── lazy_import_errors │ ├── __init__.py │ └── foo.py ├── missing_dep │ ├── __init__.py │ ├── mod.py │ └── other.py ├── optional_deps │ ├── __init__.py │ ├── not_opt.py │ └── opt.py ├── optional_deps_upstream │ └── __init__.py ├── sample_lib │ ├── __init__.py │ ├── nested │ │ ├── __init__.py │ │ └── submod3.py │ ├── submod1 │ │ └── __init__.py │ └── submod2 │ │ └── __init__.py ├── side_effects │ ├── __init__.py │ ├── global_thing.py │ └── mod.py ├── single_extra │ ├── __init__.py │ ├── extra.py │ └── not_extra.py └── type_check_deps │ ├── __init__.py │ ├── type_check_dict.py │ └── type_check_union.py ├── test_import_tracker.py ├── test_lazy_import_errors.py ├── test_main.py └── test_setup_tools.py /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | omit = 3 | test/** 4 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | gabe.l.hart@gmail.com 2 | ghart@us.ibm.com 3 | abrooks9944@email.arizona.edu 4 | alex.brooks@ibm.com 5 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: "" 5 | labels: "" 6 | assignees: "" 7 | --- 8 | 9 | ## Describe the bug 10 | 11 | A clear and concise description of what the bug is. 12 | 13 | ## Platform 14 | 15 | Please provide details about the environment you are using, including the following: 16 | 17 | - Interpreter version: 18 | - Library version: 19 | 20 | ## Sample Code 21 | 22 | Please include a minimal sample of the code that will (if possible) reproduce the bug in isolation 23 | 24 | ## Expected behavior 25 | 26 | A clear and concise description of what you expected to happen. 27 | 28 | ## Observed behavior 29 | 30 | What you see happening (error messages, stack traces, etc...) 31 | 32 | ## Additional context 33 | 34 | Add any other context about the problem here. 35 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: "" 5 | labels: "" 6 | assignees: "" 7 | --- 8 | 9 | ## Is your feature request related to a problem? Please describe. 10 | 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | ## Describe the solution you'd like 14 | 15 | A clear and concise description of what you want to happen. 16 | 17 | ## Describe alternatives you've considered 18 | 19 | A clear and concise description of any alternative solutions or features you've considered. 20 | 21 | ## Additional context 22 | 23 | Add any other context about the feature request here. 24 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/user-story.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: User story 3 | about: A user-oriented story describing a piece of work to do 4 | title: "" 5 | labels: "" 6 | assignees: "" 7 | --- 8 | 9 | ## Description 10 | 11 | As a , I want to , so that I can 12 | 13 | ## Discussion 14 | 15 | Provide detailed discussion here 16 | 17 | ## Acceptance Criteria 18 | 19 | 20 | 21 | - [ ] Unit tests cover new/changed code 22 | - [ ] Examples build against new/changed code 23 | - [ ] READMEs are updated 24 | - [ ] Type of [semantic version](https://semver.org/) change is identified 25 | -------------------------------------------------------------------------------- /.github/workflows/codeql.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # 12 | name: "CodeQL" 13 | 14 | on: 15 | push: 16 | branches: ["main"] 17 | pull_request: 18 | # The branches below must be a subset of the branches above 19 | branches: ["main"] 20 | schedule: 21 | - cron: "17 2 * * 1" 22 | 23 | jobs: 24 | analyze: 25 | name: Analyze 26 | runs-on: ubuntu-latest 27 | permissions: 28 | actions: read 29 | contents: read 30 | security-events: write 31 | 32 | strategy: 33 | fail-fast: false 34 | matrix: 35 | language: ["python"] 36 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] 37 | # Use only 'java' to analyze code written in Java, Kotlin or both 38 | # Use only 'javascript' to analyze code written in JavaScript, TypeScript or both 39 | # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support 40 | 41 | steps: 42 | - name: Checkout repository 43 | uses: actions/checkout@v3 44 | 45 | # Initializes the CodeQL tools for scanning. 46 | - name: Initialize CodeQL 47 | uses: github/codeql-action/init@v2 48 | with: 49 | languages: ${{ matrix.language }} 50 | # If you wish to specify custom queries, you can do so here or in a config file. 51 | # By default, queries listed here will override any specified in a config file. 52 | # Prefix the list here with "+" to use these queries and those in the config file. 53 | 54 | # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs 55 | # queries: security-extended,security-and-quality 56 | 57 | # Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java). 58 | # If this step fails, then you should remove it and run the build manually (see below) 59 | - name: Autobuild 60 | uses: github/codeql-action/autobuild@v2 61 | 62 | # ℹ️ Command-line programs to run using the OS shell. 63 | # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun 64 | 65 | # If the Autobuild fails above, remove it and uncomment the following three lines. 66 | # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. 67 | 68 | # - run: | 69 | # echo "Run, Build Application using script" 70 | # ./location_of_script_within_repo/buildscript.sh 71 | 72 | - name: Perform CodeQL Analysis 73 | uses: github/codeql-action/analyze@v2 74 | with: 75 | category: "/language:${{matrix.language}}" 76 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | # This workflow runs the typescript implementation unit tests 2 | name: release 3 | on: 4 | release: 5 | types: [published] 6 | workflow_dispatch: {} 7 | jobs: 8 | build-37: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v2 12 | - name: Run release 13 | env: 14 | PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} 15 | PYTHON_VERSION: "3.7" 16 | run: REF="${{ github.ref }}" ./scripts/release.sh 17 | build-38: 18 | runs-on: ubuntu-latest 19 | steps: 20 | - uses: actions/checkout@v2 21 | - name: Run release 22 | env: 23 | PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} 24 | PYTHON_VERSION: "3.8" 25 | run: REF="${{ github.ref }}" ./scripts/release.sh 26 | build-39: 27 | runs-on: ubuntu-latest 28 | steps: 29 | - uses: actions/checkout@v2 30 | - name: Run release 31 | env: 32 | PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} 33 | PYTHON_VERSION: "3.9" 34 | run: REF="${{ github.ref }}" ./scripts/release.sh 35 | build-310: 36 | runs-on: ubuntu-latest 37 | steps: 38 | - uses: actions/checkout@v2 39 | - name: Run release 40 | env: 41 | PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} 42 | PYTHON_VERSION: "3.10" 43 | run: REF="${{ github.ref }}" ./scripts/release.sh 44 | build-311: 45 | runs-on: ubuntu-latest 46 | steps: 47 | - uses: actions/checkout@v2 48 | - name: Run release 49 | env: 50 | PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} 51 | PYTHON_VERSION: "3.11" 52 | run: REF="${{ github.ref }}" ./scripts/release.sh 53 | build-312: 54 | runs-on: ubuntu-latest 55 | steps: 56 | - uses: actions/checkout@v2 57 | - name: Run release 58 | env: 59 | PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} 60 | PYTHON_VERSION: "3.12" 61 | run: REF="${{ github.ref }}" ./scripts/release.sh 62 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | # This workflow runs the typescript implementation unit tests 2 | name: tests 3 | on: 4 | push: 5 | branches: [main] 6 | pull_request: 7 | branches: [main] 8 | workflow_dispatch: {} 9 | jobs: 10 | build-37: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v2 14 | - name: Run unit tests 15 | run: docker build . --target=test --build-arg PYTHON_VERSION=${PYTHON_VERSION} 16 | env: 17 | PYTHON_VERSION: "3.7" 18 | build-38: 19 | runs-on: ubuntu-latest 20 | steps: 21 | - uses: actions/checkout@v2 22 | - name: Run unit tests 23 | run: docker build . --target=test --build-arg PYTHON_VERSION=${PYTHON_VERSION} 24 | env: 25 | PYTHON_VERSION: "3.8" 26 | build-39: 27 | runs-on: ubuntu-latest 28 | steps: 29 | - uses: actions/checkout@v2 30 | - name: Run unit tests 31 | run: docker build . --target=test --build-arg PYTHON_VERSION=${PYTHON_VERSION} 32 | env: 33 | PYTHON_VERSION: "3.9" 34 | build-310: 35 | runs-on: ubuntu-latest 36 | steps: 37 | - uses: actions/checkout@v2 38 | - name: Run unit tests 39 | run: docker build . --target=test --build-arg PYTHON_VERSION=${PYTHON_VERSION} 40 | env: 41 | PYTHON_VERSION: "3.10" 42 | build-311: 43 | runs-on: ubuntu-latest 44 | steps: 45 | - uses: actions/checkout@v2 46 | - name: Run unit tests 47 | run: docker build . --target=test --build-arg PYTHON_VERSION=${PYTHON_VERSION} 48 | env: 49 | PYTHON_VERSION: "3.11" 50 | build-312: 51 | runs-on: ubuntu-latest 52 | steps: 53 | - uses: actions/checkout@v2 54 | - name: Run unit tests 55 | run: docker build . --target=test --build-arg PYTHON_VERSION=${PYTHON_VERSION} 56 | env: 57 | PYTHON_VERSION: "3.12" 58 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .coverage 2 | htmlcov 3 | __pycache__ 4 | *.egg-info/ 5 | dist/ 6 | .DS_Store 7 | .pytest_cache 8 | .bash_history 9 | .python_history 10 | -------------------------------------------------------------------------------- /.isort.cfg: -------------------------------------------------------------------------------- 1 | [settings] 2 | profile=black 3 | from_first=true 4 | import_heading_future=Future 5 | import_heading_stdlib=Standard 6 | import_heading_thirdparty=Third Party 7 | import_heading_firstparty=First Party 8 | import_heading_localfolder=Local 9 | known_firstparty=alog 10 | known_localfolder=import_tracker,test,sample_lib,bad_deps,conditional_deps 11 | skip=test/sample_libs/direct_dep_ambiguous/__init__.py 12 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/mirrors-prettier 3 | rev: v2.1.2 4 | hooks: 5 | - id: prettier 6 | - repo: https://github.com/psf/black 7 | rev: 22.3.0 8 | hooks: 9 | - id: black 10 | exclude: imports 11 | - repo: https://github.com/PyCQA/isort 12 | rev: 5.11.5 13 | hooks: 14 | - id: isort 15 | exclude: imports 16 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | # Ignore this auto-generated json file 2 | test/sample_libs/sample_lib/__static_import_tracker__.json 3 | -------------------------------------------------------------------------------- /.prettierrc.yaml: -------------------------------------------------------------------------------- 1 | tabWidth: 4 2 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | We as members, contributors, and leaders pledge to make participation in our community a harassment-free experience for everyone, regardless of age, body size, visible or invisible disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. 6 | 7 | We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and healthy community. 8 | 9 | ## Our Standards 10 | 11 | Examples of behavior that contributes to a positive environment for our 12 | community include: 13 | 14 | - Demonstrating empathy and kindness toward other people 15 | - Being respectful of differing opinions, viewpoints, and experiences 16 | - Giving and gracefully accepting constructive feedback 17 | - Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience 18 | - Focusing on what is best not just for us as individuals, but for the overall community 19 | 20 | Examples of unacceptable behavior include: 21 | 22 | - The use of sexualized language or imagery, and sexual attention or advances of any kind 23 | - Trolling, insulting or derogatory comments, and personal or political attacks 24 | - Public or private harassment 25 | - Publishing others' private information, such as a physical or email address, without their explicit permission 26 | - Other conduct which could reasonably be considered inappropriate in a professional setting 27 | 28 | ## Enforcement Responsibilities 29 | 30 | Community leaders are responsible for clarifying and enforcing our standards of acceptable behavior and will take appropriate and fair corrective action in response to any behavior that they deem inappropriate, threatening, offensive, or harmful. 31 | 32 | Community leaders have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, and will communicate reasons for moderation decisions when appropriate. 33 | 34 | ## Scope 35 | 36 | This Code of Conduct applies within all community spaces, and also applies when an individual is officially representing the community in public spaces. 37 | 38 | ## Enforcement 39 | 40 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community leaders responsible for enforcement - [CODOWNERS](.github/CODEOWNERS.md). All complaints will be reviewed and investigated promptly and fairly. 41 | 42 | All community leaders are obligated to respect the privacy and security of the reporter of any incident. 43 | 44 | ## Enforcement Guidelines 45 | 46 | Community leaders will follow these Community Impact Guidelines in determining the consequences for any action they deem in violation of this Code of Conduct: 47 | 48 | ### 1. Correction 49 | 50 | **Community Impact**: Use of inappropriate language or other behavior deemed unprofessional or unwelcome in the community. 51 | 52 | **Consequence**: A private, written warning from community leaders, providing clarity around the nature of the violation and an explanation of why the behavior was inappropriate. A public apology may be requested. 53 | 54 | ### 2. Warning 55 | 56 | **Community Impact**: A violation through a single incident or series of actions. 57 | 58 | **Consequence**: A warning with consequences for continued behavior. No interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, for a specified period of time. This includes avoiding interactions in community spaces as well as external channels like social media. Violating these terms may lead to a temporary or permanent ban. 59 | 60 | ### 3. Temporary Ban 61 | 62 | **Community Impact**: A serious violation of community standards, including sustained inappropriate behavior. 63 | 64 | **Consequence**: A temporary ban from any sort of interaction or public communication with the community for a specified period of time. No public or private interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, is allowed during this period. Violating these terms may lead to a permanent ban. 65 | 66 | ### 4. Permanent Ban 67 | 68 | **Community Impact**: Demonstrating a pattern of violation of community standards, including sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement of classes of individuals. 69 | 70 | **Consequence**: A permanent ban from any sort of public interaction within the community. 71 | 72 | ## Attribution 73 | 74 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 2.0, available at https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. 75 | 76 | Community Impact Guidelines were inspired by [Mozilla's code of conduct enforcement ladder](https://github.com/mozilla/diversity). 77 | 78 | [homepage]: https://www.contributor-covenant.org 79 | 80 | For answers to common questions about this code of conduct, see the FAQ at https://www.contributor-covenant.org/faq. Translations are available at https://www.contributor-covenant.org/translations. 81 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | 👍🎉 First off, thank you for taking the time to contribute! 🎉👍 4 | 5 | The following is a set of guidelines for contributing. These are just guidelines, not rules. Use your best judgment, and feel free to propose changes to this document in a pull request. 6 | 7 | ## What Should I Know Before I Get Started? 8 | 9 | If you're new to GitHub and working with open source repositories, this section will be helpful. Otherwise, you can skip to learning how to [set up your dev environment](#set-up-your-dev-environment) 10 | 11 | ### Code of Conduct 12 | 13 | This project adheres to the [Contributor Covenant](./CODE_OF_CONDUCT.md). By participating, you are expected to uphold this code. 14 | 15 | Please report unacceptable behavior to one of the [Code Owners](./.github/CODEOWNERS). 16 | 17 | ### How Do I Start Contributing? 18 | 19 | The below workflow is designed to help you begin your first contribution journey. It will guide you through creating and picking up issues, working through them, having your work reviewed, and then merging. 20 | 21 | Help on inner source projects is always welcome and there is always something that can be improved. For example, documentation (like the text you are reading now) can always use improvement, code can always be clarified, variables or functions can always be renamed or commented on, and there is always a need for more test coverage. If you see something that you think should be fixed, take ownership! Here is how you get started: 22 | 23 | ## How Can I Contribute? 24 | 25 | When contributing, it's useful to start by looking at [issues](https://github.com/IBM/import-tracker/issues). After picking up an issue, writing code, or updating a document, make a pull request and your work will be reviewed and merged. If you're adding a new feature, it's best to [write an issue](https://github.com/IBM/import-tracker/issues/new?assignees=&labels=&template=feature_request.md&title=) first to discuss it with maintainers first. 26 | 27 | ### Reporting Bugs 28 | 29 | This section guides you through submitting a bug report. Following these guidelines helps maintainers and the community understand your report ✏️, reproduce the behavior 💻, and find related reports 🔎. 30 | 31 | #### How Do I Submit A (Good) Bug Report? 32 | 33 | Bugs are tracked as [GitHub issues using the Bug Report template](https://github.com/IBM/import-tracker/issues/new?assignees=&labels=&template=bug_report.md&title=). Create an issue on that and provide the information suggested in the bug report issue template. 34 | 35 | ### Suggesting Enhancements 36 | 37 | This section guides you through submitting an enhancement suggestion, including completely new features, tools, and minor improvements to existing functionality. Following these guidelines helps maintainers and the community understand your suggestion ✏️ and find related suggestions 🔎 38 | 39 | #### How Do I Submit A (Good) Enhancement Suggestion? 40 | 41 | Enhancement suggestions are tracked as [GitHub issues using the Feature Request template](https://github.com/IBM/import-tracker/issues/new?assignees=&labels=&template=feature_request.md&title=). Create an issue and provide the information suggested in the feature requests or user story issue template. 42 | 43 | #### How Do I Submit A (Good) Improvement Item? 44 | 45 | Improvements to existing functionality are tracked as [GitHub issues using the User Story template](https://github.com/IBM/import-tracker/issues/new?assignees=&labels=&template=user-story.md&title=). Create an issue and provide the information suggested in the feature requests or user story issue template. 46 | 47 | ## Development 48 | 49 | ### Set up your dev environments 50 | 51 | #### Using Docker 52 | 53 | The easiest way to get up and running is to use the dockerized development environment which you can launch using: 54 | 55 | ```sh 56 | make develop 57 | ``` 58 | 59 | Within the `develop` shell, any of the `make` targets that do not require `docker` can be run directly. The shell has the local files mounted, so changes to the files on your host machine will be reflected when commands are run in the `develop` shell. 60 | 61 | #### Locally 62 | 63 | You can also develop locally using standard python development practices. You'll need to install the dependencies for the unit tests. It is recommended that you do this in a virtual environment such as [`conda`](https://docs.conda.io/en/latest/miniconda.html) or [`pyenv`](https://github.com/pyenv/pyenv) so that you avoid version conflicts in a shared global dependency set. 64 | 65 | ```sh 66 | pip install -r requirements_test.txt 67 | ``` 68 | 69 | ### Run unit tests 70 | 71 | Running the tests is as simple as: 72 | 73 | ```sh 74 | make test 75 | ``` 76 | 77 | If you want to use the full set of [`pytest` CLI arguments](https://docs.pytest.org/en/6.2.x/usage.html), you can run the `scripts/run_tests.sh` script directly with any arguments added to the command. For example, to run only a single test without capturing output, you can do: 78 | 79 | ```sh 80 | ./scripts/run_tests.sh \ 81 | test/test_lazy_import_errors.py \ 82 | -k test_lazy_import_happy_package_with_sad_optionals \ 83 | -s 84 | ``` 85 | 86 | ### Code formatting 87 | 88 | This project uses [pre-commit](https://pre-commit.com/) to enforce coding style using [black](https://github.com/psf/black). To set up `pre-commit` locally, you can: 89 | 90 | ```sh 91 | pip install pre-commit 92 | ``` 93 | 94 | Coding style is enforced by the CI tests, so if not installed locally, your PR will fail until formatting has been applied. 95 | 96 | ## Your First Code Contribution 97 | 98 | Unsure where to begin contributing? You can start by looking through these issues: 99 | 100 | - Issues with the [`good first issue` label](https://github.com/IBM/import-tracker/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) - these should only require a few lines of code and are good targets if you're just starting contributing. 101 | - Issues with the [`help wanted` label](https://github.com/IBM/import-tracker/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) - these range from simple to more complex, but are generally things we want but can't get to in a short time frame. 102 | 103 | ### How to contribute 104 | 105 | To contribute to this repo, you'll use the Fork and Pull model common in many open source repositories. For details on this process, watch [how to contribute](https://egghead.io/courses/how-to-contribute-to-an-open-source-project-on-github). 106 | 107 | When ready, you can create a pull request. Pull requests are often referred to as "PR". In general, we follow the standard [github pull request](https://help.github.com/en/articles/about-pull-requests) process. Follow the template to provide details about your pull request to the maintainers. 108 | 109 | Before sending pull requests, make sure your changes pass tests. 110 | 111 | #### Code Review 112 | 113 | Once you've [created a pull request](#how-to-contribute), maintainers will review your code and likely make suggestions to fix before merging. It will be easier for your pull request to receive reviews if you consider the criteria the reviewers follow while working. Remember to: 114 | 115 | - Run tests locally and ensure they pass 116 | - Follow the project coding conventions 117 | - Write detailed commit messages 118 | - Break large changes into a logical series of smaller patches, which are easy to understand individually and combine to solve a broader issue 119 | 120 | ## Releasing (Maintainers only) 121 | 122 | The responsibility for releasing new versions of the libraries falls to the maintainers. Releases will follow standard [semantic versioning](https://semver.org/) and be hosted on [pypi](https://pypi.org/project/import-tracker/). 123 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | ## Base ######################################################################## 2 | # 3 | # This phase sets up dependencies for the other phases 4 | ## 5 | ARG PYTHON_VERSION=3.7 6 | ARG BASE_IMAGE=python:${PYTHON_VERSION}-slim 7 | FROM ${BASE_IMAGE} as base 8 | 9 | # This image is only for building, so we run as root 10 | WORKDIR /src 11 | 12 | # Install build, test, andn publish dependencies 13 | COPY requirements_test.txt /src/ 14 | RUN true && \ 15 | apt-get update -y && \ 16 | apt-get install make git -y && \ 17 | apt-get clean autoclean && \ 18 | apt-get autoremove --yes && \ 19 | pip install pip --upgrade && \ 20 | pip install twine pre-commit && \ 21 | pip install -r /src/requirements_test.txt && \ 22 | true 23 | 24 | ## Test ######################################################################## 25 | # 26 | # This phase runs the unit tests for the library 27 | ## 28 | FROM base as test 29 | COPY . /src 30 | ARG RUN_FMT="true" 31 | RUN true && \ 32 | ./scripts/run_tests.sh && \ 33 | RELEASE_DRY_RUN=true RELEASE_VERSION=0.0.0 \ 34 | ./scripts/publish.sh && \ 35 | ./scripts/fmt.sh && \ 36 | true 37 | 38 | ## Release ##################################################################### 39 | # 40 | # This phase builds the release and publishes it to pypi 41 | ## 42 | FROM test as release 43 | ARG PYPI_TOKEN 44 | ARG RELEASE_VERSION 45 | ARG RELEASE_DRY_RUN 46 | RUN ./scripts/publish.sh && touch /released.txt 47 | 48 | ## Release Test ################################################################ 49 | # 50 | # This phase installs the indicated version from PyPi and runs the unit tests 51 | # against the installed version. 52 | ## 53 | FROM base as release_test 54 | ARG RELEASE_VERSION 55 | ARG RELEASE_DRY_RUN 56 | # Force a dependency on the release phase so that buildkit doesn't run these in 57 | # parallel 58 | COPY --from=release /released.txt /released.txt 59 | COPY ./test /src/test 60 | COPY ./scripts/run_tests.sh /src/scripts/run_tests.sh 61 | COPY ./scripts/install_release.sh /src/scripts/install_release.sh 62 | RUN true && \ 63 | ./scripts/install_release.sh && \ 64 | ./scripts/run_tests.sh && \ 65 | true 66 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 International Business Machines 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | ##@ General 2 | 3 | all: help 4 | 5 | # NOTE: Help stolen from operator-sdk auto-generated makfile! 6 | .PHONY: help 7 | help: ## Display this help. 8 | @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m\n"} /^[a-zA-Z_0-9-\\.]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST) 9 | 10 | .PHONY: test 11 | test: ## Run the unit tests 12 | PARALLEL=1 ./scripts/run_tests.sh 13 | 14 | .PHONY: fmt 15 | fmt: ## Run code formatting 16 | ./scripts/fmt.sh 17 | 18 | .PHONY: wheel 19 | wheel: ## Build release wheels 20 | ./scripts/build_wheel.sh 21 | 22 | ##@ Develop 23 | 24 | .PHONY: develop.build 25 | develop.build: ## Build the development environment container 26 | docker build . --target=base -t import-tracker-develop 27 | 28 | .PHONY: develop 29 | develop: develop.build ## Run the develop shell with the local codebase mounted 30 | touch .bash_history 31 | docker run --rm -it \ 32 | --entrypoint bash \ 33 | -w /src \ 34 | -v ${PWD}:/src \ 35 | -v ${PWD}/.bash_history:/root/.bash_history \ 36 | import-tracker-develop 37 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![OpenSSF Best Practices](https://bestpractices.coreinfrastructure.org/projects/6907/badge)](https://bestpractices.coreinfrastructure.org/projects/6907) 2 | 3 | # Import Tracker 4 | 5 | `Import Tracker` is a Python package offering a number of capabilities related to tracking and managing optional dependencies in Python projects. Specifically, this project enables developers to: 6 | 7 | - Track the dependencies of a python project to map each module within the project to the set of dependencies it relies on. This can be useful for debugging of dependencies in large projects. 8 | 9 | - Enable lazy import errors in a python projects to prevent code from crashing when uninstalled imports are imported, but not utilized. This can be helpful in large projects, especially those which incorporate lots of hierarchical wild imports, as importing the top level package of such projects can often bring a lot of heavy dependencies into `sys.modules`. 10 | 11 | - Programmatically determine the [`install_requires`](https://setuptools.pypa.io/en/latest/userguide/dependency_management.html#declaring-required-dependency) and [`extras_require`](https://setuptools.pypa.io/en/latest/userguide/dependency_management.html#optional-dependencies) arguments to `setuptools.setup` where the extras sets are determined by a set of modules that should be optional. 12 | 13 | ## Table of contents 14 | 15 | 16 | 17 | 18 | 19 | - [Table of contents](#table-of-contents) 20 | - [Running Import Tracker](#running-import-tracker) 21 | - [Integrating `import_tracker` into a project](#integrating-import_tracker-into-a-project) 22 | - [Enabling `lazy_import_errors`](#enabling-lazy_import_errors) 23 | - [Using `setup_tools.parse_requirements`](#using-setup_toolsparse_requirements) 24 | - [Gotchas](#gotchas) 25 | - [Minor issue with zsh](#minor-issue-with-zsh) 26 | 27 | 28 | 29 | ## Running Import Tracker 30 | 31 | To run `import_tracker` against a project, simply invoke the module's main: 32 | 33 | ``` 34 | python3 -m import_tracker --name 35 | ``` 36 | 37 | The main supports the following additional arguments: 38 | 39 | - `--package`: Allows `--name` to be a relative import (see [`importlib.import_module`](https://docs.python.org/3/library/importlib.html#importlib.import_module)) 40 | - `--indent`: Indent the output json for pretty printing 41 | - `--log_level`: Set the level of logging (up to `debug4`) to debug unexpected behavior 42 | - `--submodules`: List of sub-modules to recurse on (or full recursion when no args given) 43 | - `--track_import_stack`: Store the stack trace of imports belonging to the tracked module 44 | - `--detect_transitive`: Mark each dependency as either "direct" (imported directly) or "transitive" (inherited from a direct import) 45 | - `--full_depth`: Track all dependencies, including transitive dependencies of direct third-party deps 46 | - `--show_optional`: Show whether each dependency is optional or required 47 | 48 | ## Integrating `import_tracker` into a project 49 | 50 | When using `import_tracker` to implement optional dependencies in a project, there are two steps to take: 51 | 52 | 1. Enable `lazy_import_errors` for the set of modules that should be managed as optional 53 | 2. Use `setup_tools.parse_requirements` in `setup.py` to determine the `install_requires` and `extras_require` arguments 54 | 55 | In the following examples, we'll use a fictitious project with the following structure: 56 | 57 | ``` 58 | my_module/ 59 | ├── __init__.py 60 | ├── utils.py 61 | └── widgets 62 | ├── __init__.py 63 | ├── widget1.py 64 | └── widget2.py 65 | ``` 66 | 67 | ### Enabling `lazy_import_errors` 68 | 69 | The `import_tracker.lazy_import_errors` function can be invoked directly to enable lazy import errors globally, or used as a context manager to enable them only for a selcted set of modules. 70 | 71 | To globally enable lazy import errors, `my_module/__init__.py` would look like the following: 72 | 73 | ```py 74 | # Globally enable lazy import errors 75 | from import_tracker import lazy_import_errors 76 | lazy_import_errors() 77 | 78 | from . import utils, widgets 79 | ``` 80 | 81 | Alternately, applying lazy import error semantics only to the `widgets` would look like the following: 82 | 83 | ```py 84 | from import_tracker import lazy_import_errors 85 | 86 | # Require all downstream imports from utils to exist 87 | from . import utils 88 | 89 | # Enable lazy import errors for widgets 90 | with lazy_import_errors(): 91 | from . import widgets 92 | ``` 93 | 94 | When using lazy import errors, there are two ways to customize the error message that is raised when a failed import is used: 95 | 96 | 1. 1. The `get_extras_modules` argument takes a function which returns a `Set[str]` of the module names that are tracked as extras. If the import error is triggered within a module that is managed as an extras set, the error message is updated to include instructions on which extras set needs to be installed. 97 | 98 | 2. The `make_error_message` argument allows the caller to specify a fully custom error message generation function. 99 | 100 | ### Using `setup_tools.parse_requirements` 101 | 102 | To take advantage of the automatic dependency parsing when building a package, the `setup.py` would look like the following: 103 | 104 | ```py 105 | import import_tracker 106 | import os 107 | import setuptools 108 | 109 | # Determine the path to the requirements.txt for the project 110 | requirements_file = os.path.join(os.path.dirname(__file__), "requirements.txt") 111 | 112 | # Parse the requirement sets 113 | install_requires, extras_require = import_tracker.setup_tools.parse_requirements( 114 | requirements_file=requirements_file, 115 | library_name="my_module", 116 | extras_modules=[ 117 | "my_module.widgets.widget1", 118 | "my_module.widgets.widget2", 119 | ], 120 | ) 121 | 122 | # Perform the standard setup call 123 | setuptools.setup( 124 | name="my_module", 125 | author="me", 126 | version="1.2.3", 127 | license="MIT", 128 | install_requires=install_requires, 129 | extras_require=extras_require, 130 | packages=setuptools.find_packages(), 131 | ) 132 | ``` 133 | 134 | ## Gotchas 135 | 136 | ### Minor issue with zsh 137 | 138 | As mentioned before, when using lazy import errors in `import_tracker`, if the import error is triggered within a module that is managed as an extras set, the error message is updated to include instructions on which extras set needs to be installed. The error message might look something like this: 139 | 140 | ```bash 141 | ModuleNotFoundError: No module named 'example_module'. 142 | 143 | To install the missing dependencies, run `pip install my_module[my_module.example_module]` 144 | 145 | ``` 146 | 147 | There might be an issue when running `pip install my_module[my_module.example_module]` within a `zsh` environment, since square brackets in `zsh` have special meanings. We have to escape them by putting \ (`backslash`) before them. So for `zsh`, something like this will work: 148 | 149 | ``` 150 | pip install my_module\[my_module.example_module\] 151 | ``` 152 | -------------------------------------------------------------------------------- /import_tracker/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This top-level module conditionally imports some other sub modules in a way that 3 | tracks their third party deps 4 | """ 5 | 6 | # Local 7 | from . import setup_tools 8 | from .import_tracker import track_module 9 | from .lazy_import_errors import lazy_import_errors 10 | -------------------------------------------------------------------------------- /import_tracker/__main__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This main entrypoint allows import_tracker to run as an independent script to 3 | track the imports for a given module. 4 | 5 | Example Usage: 6 | 7 | # Track a single module 8 | python -m import_tracker --name my_library 9 | 10 | # Track a module and all of the sub-modules it contains 11 | python -m import_tracker --name my_library --recursive --num_jobs 2 12 | 13 | # Track a module with relative import syntax 14 | python -m import_tracker --name .my_sub_module --package my_library 15 | """ 16 | 17 | # Standard 18 | import argparse 19 | import json 20 | import logging 21 | import os 22 | 23 | # Local 24 | from .import_tracker import track_module 25 | 26 | ## Main ######################################################################## 27 | 28 | 29 | def main(): 30 | """Main entrypoint as a function""" 31 | 32 | # Set up the args 33 | parser = argparse.ArgumentParser(description=__doc__) 34 | parser.add_argument( 35 | "--name", 36 | "-n", 37 | required=True, 38 | help="Module name to track", 39 | ) 40 | parser.add_argument( 41 | "--package", 42 | "-p", 43 | help="Package for relative imports", 44 | default=None, 45 | ) 46 | parser.add_argument( 47 | "--indent", 48 | "-i", 49 | type=int, 50 | help="Indent for json printing", 51 | default=None, 52 | ) 53 | parser.add_argument( 54 | "--submodules", 55 | "-s", 56 | nargs="*", 57 | default=None, 58 | help="List of subodules to include (all if no value given)", 59 | ) 60 | parser.add_argument( 61 | "--track_import_stack", 62 | "-t", 63 | action="store_true", 64 | default=False, 65 | help="Store the stack trace of imports belonging to the tracked module", 66 | ) 67 | parser.add_argument( 68 | "--full_depth", 69 | "-f", 70 | action="store_true", 71 | default=False, 72 | help="Include transitive third-party deps brought in by direct third-party deps", 73 | ) 74 | parser.add_argument( 75 | "--detect_transitive", 76 | "-d", 77 | action="store_true", 78 | default=False, 79 | help="Detect whether each dependency is 'direct' or 'transitive'", 80 | ) 81 | parser.add_argument( 82 | "--show_optional", 83 | "-o", 84 | action="store_true", 85 | default=False, 86 | help="Show whether each dependency is optional or required", 87 | ) 88 | parser.add_argument( 89 | "--log_level", 90 | "-l", 91 | default=os.environ.get("LOG_LEVEL", "warning"), 92 | help="Default log level", 93 | ) 94 | args = parser.parse_args() 95 | 96 | # Determine the submodules argument value 97 | submodules = ( 98 | False 99 | if args.submodules is None 100 | else (args.submodules if args.submodules else True) 101 | ) 102 | 103 | # Set the level on the shared logger 104 | log_level = getattr(logging, args.log_level.upper(), None) 105 | if log_level is None: 106 | log_level = int(args.log_level) 107 | logging.basicConfig(level=log_level) 108 | 109 | # Perform the tracking and print out the output 110 | print( 111 | json.dumps( 112 | track_module( 113 | module_name=args.name, 114 | package_name=args.package, 115 | submodules=submodules, 116 | track_import_stack=args.track_import_stack, 117 | full_depth=args.full_depth, 118 | detect_transitive=args.detect_transitive, 119 | show_optional=args.show_optional, 120 | ), 121 | indent=args.indent, 122 | ) 123 | ) 124 | 125 | 126 | if __name__ == "__main__": # pragma: no cover 127 | main() 128 | -------------------------------------------------------------------------------- /import_tracker/constants.py: -------------------------------------------------------------------------------- 1 | """ 2 | Shared constants across the various parts of the library 3 | """ 4 | 5 | # Standard 6 | import sys 7 | 8 | # The name of this package (import_tracker) 9 | THIS_PACKAGE = sys.modules[__name__].__package__.partition(".")[0] 10 | 11 | # Labels for direct vs transitive dependencies 12 | TYPE_DIRECT = "direct" 13 | TYPE_TRANSITIVE = "transitive" 14 | 15 | # Info section headers 16 | INFO_TYPE = "type" 17 | INFO_STACK = "stack" 18 | INFO_OPTIONAL = "optional" 19 | -------------------------------------------------------------------------------- /import_tracker/import_tracker.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module implements utilities that enable tracking of third party deps 3 | through import statements 4 | """ 5 | # Standard 6 | from types import ModuleType 7 | from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union 8 | import dis 9 | import importlib 10 | import os 11 | import re 12 | import sys 13 | 14 | # Local 15 | from . import constants 16 | from .log import log 17 | 18 | ## Public ###################################################################### 19 | 20 | 21 | def track_module( 22 | module_name: str, 23 | package_name: Optional[str] = None, 24 | submodules: Union[List[str], bool] = False, 25 | track_import_stack: bool = False, 26 | full_depth: bool = False, 27 | detect_transitive: bool = False, 28 | show_optional: bool = False, 29 | ) -> Union[Dict[str, List[str]], Dict[str, Dict[str, Any]]]: 30 | """Track the dependencies of a single python module 31 | 32 | Args: 33 | module_name: str 34 | The name of the module to track (may be relative if package_name 35 | provided) 36 | package_name: Optional[str] 37 | The parent package name of the module if the module name is relative 38 | submodules: Union[List[str], bool] 39 | If True, all submodules of the given module will also be tracked. If 40 | given as a list of strings, only those submodules will be tracked. 41 | If False, only the named module will be tracked. 42 | track_import_stack: bool 43 | Store the stacks of modules causing each dependency of each tracked 44 | module for debugging purposes. 45 | full_depth: bool 46 | Include transitive dependencies of the third party dependencies that 47 | are direct dependencies of modules within the target module's parent 48 | library. 49 | detect_transitive: bool 50 | Detect whether each dependency is 'direct' or 'transitive' 51 | show_optional: bool 52 | Show whether each requirement is optional (behind a try/except) or 53 | not 54 | 55 | Returns: 56 | import_mapping: Union[Dict[str, List[str]], Dict[str, Dict[str, Any]]] 57 | The mapping from fully-qualified module name to the set of imports 58 | needed by the given module. If tracking import stacks or detecting 59 | direct vs transitive dependencies, the output schema is 60 | Dict[str, Dict[str, Any]] where the nested dicts hold "stack" and/or 61 | "type" keys respectively. If neither feature is enabled, the schema 62 | is Dict[str, List[str]]. 63 | """ 64 | 65 | # Import the target module 66 | log.debug("Importing %s.%s", package_name, module_name) 67 | imported = importlib.import_module(module_name, package=package_name) 68 | full_module_name = imported.__name__ 69 | 70 | # Recursively build the mapping 71 | module_deps_map = dict() 72 | modules_to_check = {imported} 73 | checked_modules = set() 74 | tracked_module_root_pkg = full_module_name.partition(".")[0] 75 | while modules_to_check: 76 | next_modules_to_check = set() 77 | for module_to_check in modules_to_check: 78 | 79 | # Figure out all direct imports from this module 80 | req_imports, opt_imports = _get_imports(module_to_check) 81 | opt_dep_names = {mod.__name__ for mod in opt_imports} 82 | all_imports = req_imports.union(opt_imports) 83 | module_import_names = {mod.__name__ for mod in all_imports} 84 | log.debug3( 85 | "Full import names for [%s]: %s", 86 | module_to_check.__name__, 87 | module_import_names, 88 | ) 89 | 90 | # Trim to just non-standard modules 91 | non_std_module_names = _get_non_std_modules(module_import_names) 92 | log.debug3("Non std module names: %s", non_std_module_names) 93 | non_std_module_imports = [ 94 | mod for mod in all_imports if mod.__name__ in non_std_module_names 95 | ] 96 | 97 | # Set the deps for this module as a mapping from each dep to its 98 | # optional status 99 | module_deps_map[module_to_check.__name__] = { 100 | mod: mod in opt_dep_names for mod in non_std_module_names 101 | } 102 | log.debug2( 103 | "Deps for [%s] -> %s", 104 | module_to_check.__name__, 105 | non_std_module_names, 106 | ) 107 | 108 | # Add each of these modules to the next round of modules to check if 109 | # it has not yet been checked 110 | next_modules_to_check = next_modules_to_check.union( 111 | { 112 | mod 113 | for mod in non_std_module_imports 114 | if ( 115 | mod not in checked_modules 116 | and ( 117 | full_depth 118 | or mod.__name__.partition(".")[0] == tracked_module_root_pkg 119 | ) 120 | ) 121 | } 122 | ) 123 | 124 | # Also check modules with intermediate names 125 | parent_mods = set() 126 | for mod in next_modules_to_check: 127 | mod_name_parts = mod.__name__.split(".") 128 | for parent_mod_name in [ 129 | ".".join(mod_name_parts[: i + 1]) 130 | for i in range(len(mod_name_parts)) 131 | ]: 132 | parent_mod = sys.modules.get(parent_mod_name) 133 | if parent_mod is None: 134 | log.warning( 135 | "Could not find parent module %s of %s", 136 | parent_mod_name, 137 | mod.__name__, 138 | ) 139 | continue 140 | if parent_mod not in checked_modules: 141 | parent_mods.add(parent_mod) 142 | next_modules_to_check = next_modules_to_check.union(parent_mods) 143 | 144 | # Mark this module as checked 145 | checked_modules.add(module_to_check) 146 | 147 | # Set the next iteration 148 | log.debug3("Next modules to check: %s", next_modules_to_check) 149 | modules_to_check = next_modules_to_check 150 | 151 | log.debug3("Full module dep mapping: %s", module_deps_map) 152 | 153 | # Determine all the modules we want the final answer for 154 | output_mods = {full_module_name} 155 | if submodules: 156 | output_mods = output_mods.union( 157 | { 158 | mod 159 | for mod in module_deps_map 160 | if ( 161 | (submodules is True and mod.startswith(full_module_name)) 162 | or (submodules is not True and mod in submodules) 163 | ) 164 | } 165 | ) 166 | log.debug2("Output modules: %s", output_mods) 167 | 168 | # Add parent direct deps to the module deps map 169 | parent_direct_deps = _find_parent_direct_deps(module_deps_map) 170 | 171 | # Flatten each of the output mods' dependency lists 172 | flattened_deps = { 173 | mod: _flatten_deps(mod, module_deps_map, parent_direct_deps) 174 | for mod in output_mods 175 | } 176 | log.debug("Raw output deps map: %s", flattened_deps) 177 | 178 | # If not displaying any of the extra info, the values are simple lists of 179 | # dependency names 180 | if not any([detect_transitive, track_import_stack, show_optional]): 181 | deps_out = { 182 | mod: list(sorted(deps.keys())) for mod, (deps, _) in flattened_deps.items() 183 | } 184 | 185 | # Otherwise, the values will be dicts with some combination of "type" and 186 | # "stack" populated 187 | else: 188 | deps_out = {mod: {} for mod in flattened_deps.keys()} 189 | 190 | # If detecting transitive deps, look through the stacks and mark each dep as 191 | # transitive or direct 192 | if detect_transitive: 193 | for mod, (deps, _) in flattened_deps.items(): 194 | for dep_name, dep_stacks in deps.items(): 195 | deps_out.setdefault(mod, {}).setdefault(dep_name, {})[ 196 | constants.INFO_TYPE 197 | ] = ( 198 | constants.TYPE_DIRECT 199 | if any(len(dep_stack) == 1 for dep_stack in dep_stacks) 200 | else constants.TYPE_TRANSITIVE 201 | ) 202 | 203 | # If tracking import stacks, move them to the "stack" key in the output 204 | if track_import_stack: 205 | for mod, (deps, _) in flattened_deps.items(): 206 | for dep_name, dep_stacks in deps.items(): 207 | deps_out.setdefault(mod, {}).setdefault(dep_name, {})[ 208 | constants.INFO_STACK 209 | ] = dep_stacks 210 | 211 | # If showing optional, add the optional status of each dependency 212 | if show_optional: 213 | for mod, (deps, optional_mapping) in flattened_deps.items(): 214 | for dep_name, dep_stacks in deps.items(): 215 | deps_out.setdefault(mod, {}).setdefault(dep_name, {})[ 216 | constants.INFO_OPTIONAL 217 | ] = optional_mapping.get(dep_name, False) 218 | 219 | log.debug("Final output: %s", deps_out) 220 | return deps_out 221 | 222 | 223 | ## Private ##################################################################### 224 | 225 | 226 | def _get_dylib_dir(): 227 | """Differnet versions/builds of python manage different builtin libraries as 228 | "builtins" versus extensions. As such, we need some heuristics to try to 229 | find the base directory that holds shared objects from the standard library. 230 | """ 231 | is_dylib = lambda x: x is not None and (x.endswith(".so") or x.endswith(".dylib")) 232 | all_mod_paths = list( 233 | filter(is_dylib, (getattr(mod, "__file__", "") for mod in sys.modules.values())) 234 | ) 235 | # If there's any dylib found, return the parent directory 236 | sample_dylib = None 237 | if all_mod_paths: 238 | sample_dylib = all_mod_paths[0] 239 | else: # pragma: no cover 240 | # If not found with the above, look through libraries that are known to 241 | # sometimes be packaged as compiled extensions 242 | # 243 | # NOTE: This code may be unnecessary, but it is intended to catch future 244 | # cases where the above does not yield results 245 | # 246 | # More names can be added here as needed 247 | for lib_name in ["cmath"]: 248 | lib = importlib.import_module(lib_name) 249 | fname = getattr(lib, "__file__", None) 250 | if is_dylib(fname): 251 | sample_dylib = fname 252 | break 253 | 254 | # If all else fails, we'll just return a sentinel string. This will fail to 255 | # match in the below check for builtin modules 256 | return ( 257 | os.path.realpath(os.path.dirname(sample_dylib)) 258 | if sample_dylib is not None 259 | else "BADPATH" 260 | ) 261 | 262 | 263 | # The path where global modules are found 264 | _std_lib_dir = os.path.realpath(os.path.dirname(os.__file__)) 265 | _std_dylib_dir = _get_dylib_dir() 266 | _known_std_pkgs = [ 267 | "collections", 268 | ] 269 | 270 | 271 | # Regex for matching lines in the exception table 272 | _exception_table_expr = re.compile(r" ([0-9]+) to ([0-9]+) -> [0-9]+ \[([0-9]+)\].*") 273 | 274 | 275 | def _mod_defined_in_init_file(mod: ModuleType) -> bool: 276 | """Determine if the given module is defined in an __init__.py[c]""" 277 | mod_file = getattr(mod, "__file__", None) 278 | if mod_file is None: 279 | return False 280 | return os.path.splitext(os.path.basename(mod_file))[0] == "__init__" 281 | 282 | 283 | def _get_import_parent_path(mod_name: str) -> str: 284 | """Get the parent directory of the given module""" 285 | mod = sys.modules[mod_name] # NOTE: Intentionally unsafe to raise if not there! 286 | 287 | # Some standard libs have no __file__ attribute 288 | file_path = getattr(mod, "__file__", None) 289 | if file_path is None: 290 | return _std_lib_dir 291 | 292 | # If the module comes from an __init__, we need to pop two levels off 293 | if _mod_defined_in_init_file(mod): 294 | file_path = os.path.dirname(file_path) 295 | parent_path = os.path.dirname(file_path) 296 | return parent_path 297 | 298 | 299 | def _is_third_party(mod_name: str) -> bool: 300 | """Detect whether the given module is a third party (non-standard and not 301 | import_tracker)""" 302 | mod_pkg = mod_name.partition(".")[0] 303 | return ( 304 | not mod_name.startswith("_") 305 | and ( 306 | mod_name not in sys.modules 307 | or _get_import_parent_path(mod_name) not in [_std_lib_dir, _std_dylib_dir] 308 | ) 309 | and mod_pkg != constants.THIS_PACKAGE 310 | and mod_pkg not in _known_std_pkgs 311 | ) 312 | 313 | 314 | def _get_non_std_modules(mod_names: Iterable[str]) -> Set[str]: 315 | """Take a snapshot of the non-standard modules currently imported""" 316 | # Determine the names from the list that are non-standard 317 | return {mod_name for mod_name in mod_names if _is_third_party(mod_name)} 318 | 319 | 320 | def _get_value_col(dis_line: str) -> str: 321 | """Parse the string value from a `dis` output line""" 322 | loc = dis_line.find("(") 323 | if loc >= 0: 324 | return dis_line[loc + 1 : -1] 325 | return "" 326 | 327 | 328 | def _get_op_number(dis_line: str) -> Optional[int]: 329 | """Get the opcode number out of the line of `dis` output""" 330 | line_parts = dis_line.split() 331 | valid_line_part_idxs = [i for i, val in enumerate(line_parts) if val.isupper()] 332 | if not valid_line_part_idxs: 333 | return None 334 | opcode_idx = min(valid_line_part_idxs) 335 | assert opcode_idx > 0, f"Opcode found at the beginning of line! [{dis_line}]" 336 | return int(line_parts[opcode_idx - 1]) 337 | 338 | 339 | def _get_try_end_number( 340 | dis_line: str, 341 | op_num: Optional[int], 342 | exception_table: Dict[int, int], 343 | ) -> Optional[int]: 344 | """If the line contains a known indicator for a try block, get the 345 | corresponding end number 346 | 347 | NOTE: This contains compatibility code for changes between 3.10 and 3.11 348 | """ 349 | return exception_table.get(op_num or -1) or ( 350 | int(_get_value_col(dis_line).split()[-1]) 351 | if any(op in dis_line for op in ["SETUP_FINALLY", "SETUP_EXCEPT"]) 352 | else None 353 | ) 354 | 355 | 356 | def _get_exception_table(dis_lines: List[str]) -> Dict[int, int]: 357 | """For 3.11+ exception handling, parse the Exception Table""" 358 | table_start = [i for i, line in enumerate(dis_lines) if line == "ExceptionTable:"] 359 | assert len(table_start) <= 1, "Found multiple exception tables!" 360 | return ( 361 | { 362 | int(m.group(1)): int(m.group(2)) 363 | for m in [ 364 | _exception_table_expr.match(line) 365 | for line in dis_lines[table_start[0] + 1 :] 366 | ] 367 | if m and int(m.group(3)) == 0 and m.group(1) != m.group(2) 368 | } 369 | if table_start 370 | else {} 371 | ) 372 | 373 | 374 | def _figure_out_import( 375 | mod: ModuleType, 376 | dots: Optional[int], 377 | import_name: Optional[str], 378 | import_from: Optional[str], 379 | ) -> ModuleType: 380 | """This function takes the set of information about an individual import 381 | statement parsed out of the `dis` output and attempts to find the in-memory 382 | module object it refers to. 383 | """ 384 | log.debug2("Figuring out import [%s/%s/%s]", dots, import_name, import_from) 385 | 386 | # If there are no dots, look for candidate absolute imports 387 | if not dots: 388 | if import_name in sys.modules: 389 | if import_from is not None: 390 | candidate = f"{import_name}.{import_from}" 391 | if candidate in sys.modules: 392 | log.debug3("Found [%s] in sys.modules", candidate) 393 | return sys.modules[candidate] 394 | log.debug3("Found [%s] in sys.modules", import_name) 395 | return sys.modules[import_name] 396 | 397 | # Try simulating a relative import from a non-relative local 398 | dots = dots or 1 399 | 400 | # If there are dots, figure out the parent 401 | parent_mod_name_parts = mod.__name__.split(".") 402 | defined_in_init = _mod_defined_in_init_file(mod) 403 | if dots > 1: 404 | parent_dots = dots - 1 if defined_in_init else dots 405 | root_mod_name = ".".join(parent_mod_name_parts[:-parent_dots]) 406 | elif defined_in_init: 407 | root_mod_name = mod.__name__ 408 | else: 409 | root_mod_name = ".".join(parent_mod_name_parts[:-1]) 410 | log.debug3("Parent mod name parts: %s", parent_mod_name_parts) 411 | log.debug3("Num Dots: %d", dots) 412 | log.debug3("Root mod name: %s", root_mod_name) 413 | log.debug3("Module file: %s", getattr(mod, "__file__", None)) 414 | if not import_name: 415 | import_name = root_mod_name 416 | elif root_mod_name: 417 | import_name = f"{root_mod_name}.{import_name}" 418 | 419 | # Try with the import_from attached. This might be a module name or a 420 | # non-module attribute, so this might not work 421 | full_import_candidate = f"{import_name}.{import_from}" 422 | log.debug3("Looking for [%s] in sys.modules", full_import_candidate) 423 | if full_import_candidate in sys.modules: 424 | return sys.modules[full_import_candidate] 425 | 426 | # If that didn't work, the from is an attribute, so just get the import name 427 | return sys.modules.get(import_name) 428 | 429 | 430 | def _get_imports(mod: ModuleType) -> Tuple[Set[ModuleType], Set[ModuleType]]: 431 | """Get the sets of required and optional imports for the given module by 432 | parsing its bytecode 433 | """ 434 | log.debug2("Getting imports for %s", mod.__name__) 435 | req_imports = set() 436 | opt_imports = set() 437 | 438 | # Attempt to disassemble the byte code for this module. If the module has no 439 | # code, we ignore it since it's most likely a c extension 440 | try: 441 | loader = mod.__loader__ or mod.__spec__.loader 442 | mod_code = loader.get_code(mod.__name__) 443 | except (AttributeError, ImportError): 444 | log.warning("Couldn't find a loader for %s!", mod.__name__) 445 | return req_imports, opt_imports 446 | if mod_code is None: 447 | log.debug2("No code object found for %s", mod.__name__) 448 | return req_imports, opt_imports 449 | bcode = dis.Bytecode(mod_code) 450 | 451 | # Parse all bytecode lines 452 | current_dots = None 453 | current_import_name = None 454 | current_import_from = None 455 | open_import = False 456 | open_tries = set() 457 | log.debug4("Byte Code:") 458 | dis_lines = bcode.dis().split("\n") 459 | 460 | # Look for and parse an Exception Table (3.11+) 461 | exception_table = _get_exception_table(dis_lines) 462 | log.debug4("Exception Table: %s", exception_table) 463 | 464 | for line in dis_lines: 465 | log.debug4(line) 466 | line_val = _get_value_col(line) 467 | 468 | # If this is the beginning of a try block, add the end to the known open 469 | # try set 470 | op_num = _get_op_number(line) 471 | try_end = _get_try_end_number(line, op_num, exception_table) 472 | if try_end: 473 | open_tries.add(try_end) 474 | log.debug3("Open tries: %s", open_tries) 475 | 476 | # Parse the individual ops 477 | if "LOAD_CONST" in line: 478 | if line_val.isnumeric(): 479 | current_dots = int(line_val) 480 | elif "IMPORT_NAME" in line: 481 | open_import = True 482 | current_import_name = line_val 483 | elif "IMPORT_FROM" in line: 484 | open_import = True 485 | current_import_from = line_val 486 | else: 487 | # This closes an import, so figure out what the module is that is 488 | # being imported! 489 | if open_import: 490 | import_mod = _figure_out_import( 491 | mod, current_dots, current_import_name, current_import_from 492 | ) 493 | if import_mod is not None: 494 | log.debug2("Adding import module [%s]", import_mod.__name__) 495 | if open_tries: 496 | log.debug( 497 | "Found optional dependency of [%s]: %s", 498 | mod.__name__, 499 | import_mod.__name__, 500 | ) 501 | opt_imports.add(import_mod) 502 | else: 503 | req_imports.add(import_mod) 504 | 505 | # If this is a STORE_NAME, subsequent "from" statements may use the 506 | # same dots and name 507 | if "STORE_NAME" not in line: 508 | current_dots = None 509 | current_import_name = None 510 | open_import = False 511 | current_import_from = None 512 | 513 | # Close the open try if this ends one 514 | if op_num in open_tries: 515 | open_tries.remove(op_num) 516 | log.debug3("Closed try %d. Remaining open tries: %s", op_num, open_tries) 517 | 518 | # To the best of my knowledge, all bytecode will end with something other 519 | # than an import, even if an import is the last line in the file (e.g. 520 | # STORE_NAME). If this somehow proves to be untrue, please file a bug! 521 | assert not open_import, "Found an unclosed import in {}! {}/{}/{}".format( 522 | mod.__name__, 523 | current_dots, 524 | current_import_name, 525 | current_import_from, 526 | ) 527 | 528 | return req_imports, opt_imports 529 | 530 | 531 | def _find_parent_direct_deps( 532 | module_deps_map: Dict[str, List[str]] 533 | ) -> Dict[str, Dict[str, List[str]]]: 534 | """Construct a mapping for each module (e.g. foo.bar.baz) to a mapping of 535 | parent modules (e.g. [foo, foo.bar]) and the sets of imports that are 536 | directly imported in those modules. This mapping is used to augment the sets 537 | of required imports for each target module in the final flattening. 538 | """ 539 | 540 | parent_direct_deps = {} 541 | for mod_name, mod_deps in module_deps_map.items(): 542 | 543 | # Look through all parent modules of module_name and aggregate all 544 | # third-party deps that are directly used by those modules 545 | mod_base_name = mod_name.partition(".")[0] 546 | mod_name_parts = mod_name.split(".") 547 | for i in range(1, len(mod_name_parts)): 548 | parent_mod_name = ".".join(mod_name_parts[:i]) 549 | parent_deps = module_deps_map.get(parent_mod_name, {}) 550 | for dep, parent_dep_opt in parent_deps.items(): 551 | currently_optional = mod_deps.get(dep, True) 552 | if not dep.startswith(mod_base_name) and currently_optional: 553 | log.debug3( 554 | "Adding direct-dependency of parent mod [%s] to [%s]: %s", 555 | parent_mod_name, 556 | mod_name, 557 | dep, 558 | ) 559 | mod_deps[dep] = currently_optional and parent_dep_opt 560 | parent_direct_deps.setdefault(mod_name, {}).setdefault( 561 | parent_mod_name, set() 562 | ).add(dep) 563 | log.debug3("Parent direct dep map: %s", parent_direct_deps) 564 | return parent_direct_deps 565 | 566 | 567 | def _flatten_deps( 568 | module_name: str, 569 | module_deps_map: Dict[str, List[str]], 570 | parent_direct_deps: Dict[str, Dict[str, List[str]]], 571 | ) -> Tuple[Dict[str, List[str]], Dict[str, bool]]: 572 | """Flatten the names of all modules that the target module depends on""" 573 | 574 | # Look through all modules that are directly required by this target module. 575 | # This only looks at the leaves, so if the module depends on foo.bar.baz, 576 | # only the deps for foo.bar.baz will be incluced and not foo.bar.buz or 577 | # foo.biz. 578 | all_deps = {} 579 | mods_to_check = {module_name: []} 580 | while mods_to_check: 581 | next_mods_to_check = {} 582 | for mod_to_check, parent_path in mods_to_check.items(): 583 | log.debug4("Checking mod %s", mod_to_check) 584 | mod_parents_direct_deps = parent_direct_deps.get(mod_to_check, {}) 585 | mod_path = parent_path + [mod_to_check] 586 | mod_deps = set(module_deps_map.get(mod_to_check, [])) 587 | log.debug4( 588 | "Mod deps for %s at path %s: %s", mod_to_check, mod_path, mod_deps 589 | ) 590 | new_mods = mod_deps - set(all_deps.keys()) 591 | next_mods_to_check.update({new_mod: mod_path for new_mod in new_mods}) 592 | for mod_dep in mod_deps: 593 | # If this is a parent direct dep, and the stack for this parent 594 | # is not already present in the dep stacks for this dependency, 595 | # add the parent to the path 596 | mod_dep_direct_parents = {} 597 | for ( 598 | mod_parent, 599 | mod_parent_direct_deps, 600 | ) in mod_parents_direct_deps.items(): 601 | if mod_dep in mod_parent_direct_deps: 602 | log.debug4( 603 | "Found direct parent dep for [%s] from parent [%s] and dep [%s]", 604 | mod_to_check, 605 | mod_parent, 606 | mod_dep, 607 | ) 608 | mod_dep_direct_parents[mod_parent] = [ 609 | mod_parent 610 | ] in all_deps.get(mod_dep, []) 611 | if mod_dep_direct_parents: 612 | for ( 613 | mod_dep_direct_parent, 614 | already_present, 615 | ) in mod_dep_direct_parents.items(): 616 | if not already_present: 617 | all_deps.setdefault(mod_dep, []).append( 618 | [mod_dep_direct_parent] + mod_path 619 | ) 620 | else: 621 | all_deps.setdefault(mod_dep, []).append(mod_path) 622 | log.debug3("Next mods to check: %s", next_mods_to_check) 623 | mods_to_check = next_mods_to_check 624 | log.debug4("All deps: %s", all_deps) 625 | 626 | # Create the flattened dependencies with the source lists for each 627 | mod_base_name = module_name.partition(".")[0] 628 | flat_base_deps = {} 629 | optional_deps_map = {} 630 | for dep, dep_sources in all_deps.items(): 631 | if not dep.startswith(mod_base_name): 632 | # Truncate the dep_sources entries and trim to avoid duplicates 633 | dep_root_mod_name = dep.partition(".")[0] 634 | flat_dep_sources = flat_base_deps.setdefault(dep_root_mod_name, []) 635 | opt_dep_values = optional_deps_map.setdefault(dep_root_mod_name, []) 636 | for dep_source in dep_sources: 637 | log.debug4("Considering dep source list for %s: %s", dep, dep_source) 638 | 639 | # If any link in the dep_source is optional, the whole 640 | # dep_source should be considered optional 641 | is_optional = False 642 | for parent_idx, dep_mod in enumerate(dep_source[1:] + [dep]): 643 | dep_parent = dep_source[parent_idx] 644 | log.debug4( 645 | "Checking whether [%s -> %s] is optional (dep=%s)", 646 | dep_parent, 647 | dep_mod, 648 | dep_root_mod_name, 649 | ) 650 | if module_deps_map.get(dep_parent, {}).get(dep_mod, False): 651 | log.debug4("Found optional link %s -> %s", dep_parent, dep_mod) 652 | is_optional = True 653 | break 654 | opt_dep_values.append( 655 | [ 656 | is_optional, 657 | dep_source, 658 | ] 659 | ) 660 | 661 | flat_dep_source = dep_source 662 | if dep_root_mod_name in dep_source: 663 | flat_dep_source = dep_source[: dep_source.index(dep_root_mod_name)] 664 | if flat_dep_source not in flat_dep_sources: 665 | flat_dep_sources.append(flat_dep_source) 666 | log.debug3("Optional deps map for [%s]: %s", module_name, optional_deps_map) 667 | optional_deps_map = { 668 | mod: all([opt_val[0] for opt_val in opt_vals]) 669 | for mod, opt_vals in optional_deps_map.items() 670 | } 671 | return flat_base_deps, optional_deps_map 672 | -------------------------------------------------------------------------------- /import_tracker/lazy_import_errors.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module implements a context manager which can be used to wrap import 3 | statements such that ModuleNotFound errors will be deferred until the module is 4 | used. 5 | """ 6 | 7 | # Standard 8 | from contextlib import AbstractContextManager 9 | from functools import partial 10 | from types import ModuleType 11 | from typing import Callable, Optional, Set 12 | import importlib.abc 13 | import importlib.util 14 | import sys 15 | 16 | ## Public ###################################################################### 17 | 18 | 19 | def lazy_import_errors( 20 | *, 21 | get_extras_modules: Optional[Callable[[], Set[str]]] = None, 22 | make_error_message: Optional[Callable[[str], str]] = None, 23 | ): 24 | """Enable lazy import errors. 25 | 26 | When enabled, lazy import errors will capture imports that would otherwise 27 | raise ImportErrors and defer those errors until the last possible moment 28 | when the functionality is needed. This is done by returning a special object 29 | which can be used in all "non-meaningful" ways without raising, but when 30 | used in a "meaningful" way will raise. 31 | 32 | This function may be used either as a function directly or as a 33 | contextmanager which will disable lazy errors upon exit. 34 | 35 | Args: 36 | get_extras_modules: Optional[Callable[[], Set[str]]] 37 | Optional callable that fetches the list of module names in the 38 | calling library that are managed as extras using 39 | setup_tools.parse_requirements. (Mutually exclusive 40 | with make_error_message) 41 | make_error_message: Optional[Callable[[str], str]] 42 | Optional callable that takes the name of the module which faild to 43 | import and returns an error message string to be used for the 44 | ModuleNotFoundError. (Mutually exclusive with get_extras_modules) 45 | """ 46 | if get_extras_modules is not None and make_error_message is not None: 47 | raise TypeError( 48 | "Cannot specify both 'get_extras_modules' and 'make_error_message'" 49 | ) 50 | 51 | if get_extras_modules is not None: 52 | make_error_message = partial(_make_extras_import_error, get_extras_modules) 53 | 54 | return _LazyImportErrorCtx(make_error_message) 55 | 56 | 57 | ## Implementation Details ###################################################### 58 | 59 | 60 | def _make_extras_import_error( 61 | get_extras_modules: Callable[[], Set[str]], 62 | missing_module_name: str, 63 | ) -> Optional[str]: 64 | """This function implements the most common implementation of a custom error 65 | message where the calling library has some mechanism for determining which 66 | modules are managed as extras and wants the error messages to include the 67 | `pip install` command needed to add the missing dependencies. 68 | 69 | NOTE: There is an assumption here that the name of the root module is the 70 | name of the pip package. If this is NOT true (e.g. alchemy-logging vs 71 | alog), the module will need to implement its own custom 72 | make_error_message. 73 | 74 | Args: 75 | get_extras_modules: Callable[[] Set[str]] 76 | The function bound in from the caller that yields the set of extras 77 | modules for the library 78 | missing_module_name: str 79 | The name of the module that failed to import 80 | 81 | Returns: 82 | error_msg: Optional[str] 83 | If the current stack includes an extras module, the formatted string 84 | will be returned, otherwise None will be returned to allow the base 85 | error message to be used. 86 | """ 87 | # Get the set of extras modules from the library 88 | extras_modules = get_extras_modules() 89 | 90 | # Look through frames in the stack to see if there's an extras module 91 | extras_module = None 92 | for frame in _FastFrameGenerator(): 93 | frame_module = frame.f_globals.get("__name__", "") 94 | if frame_module in extras_modules: 95 | extras_module = frame_module 96 | break 97 | 98 | # If an extras module was found, return the formatted message 99 | if extras_module is not None: 100 | base_module = extras_module.partition(".")[0] 101 | return ( 102 | f"No module named '{missing_module_name}'. To install the " 103 | + f"missing dependencies, run `pip install {base_module}[{extras_module}]`" 104 | ) 105 | 106 | 107 | class _LazyImportErrorCtx(AbstractContextManager): 108 | """This class implements the Context Manager version of lazy_import_errors""" 109 | 110 | def __init__(self, make_error_message: Optional[Callable[[str], str]]): 111 | """This class is always constructed inside of lazy_import_errors which 112 | acts as the context manager, so the __enter__ implementation lives in 113 | the constructor. 114 | """ 115 | self.finder = None 116 | if sys.meta_path and not any( 117 | getattr(finder, "owner_content", None) is self for finder in sys.meta_path 118 | ): 119 | self.finder = _LazyErrorMetaFinder(make_error_message, self) 120 | sys.meta_path.append(self.finder) 121 | 122 | @staticmethod 123 | def __enter__(): 124 | """Nothing to do in __enter__ since it's done in __init__""" 125 | pass 126 | 127 | def __exit__(self, *_, **__): 128 | """On exit, ensure there are no lazy meta finders left""" 129 | if self.finder in sys.meta_path: 130 | sys.meta_path.remove(self.finder) 131 | 132 | 133 | class _LazyErrorAttr(type): 134 | """This object is used to recursively allow attribute access from a 135 | _LazyErrorModule and only trigger an error when an attribute is used 136 | 137 | NOTE: This object _is_ itself a type. This is done to ensure that attributes 138 | on a missing module which are types in the module itself can still be 139 | treated as types. This is particularly important when deserializing a 140 | pickled object whose type is not available at unpickling time. By acting 141 | as a type, this object ensures that the appropriate ModuleNotFoundError 142 | is raised rather than an opaque error about NEWOBJ not being a type. 143 | """ 144 | 145 | def __new__( 146 | cls, 147 | missing_module_name: str, 148 | bases=None, 149 | namespace=None, 150 | **__, 151 | ): 152 | # When this is used as a base class, we need to pass __classcell__ 153 | # through to type.__new__ to avoid a runtime warning. 154 | new_namespace = {} 155 | if isinstance(namespace, dict) and "__classcell__" in namespace: 156 | new_namespace["__classcell__"] = namespace.get("__classcell__") 157 | return super().__new__( 158 | cls, f"_LazyErrorAttr[{missing_module_name}]", (), new_namespace 159 | ) 160 | 161 | def __init__( 162 | self, 163 | missing_module_name: str, 164 | *_, 165 | make_error_message: Optional[Callable[[str], str]] = None, 166 | **__, 167 | ): 168 | """Store the name of the attribute being accessed and the missing module""" 169 | 170 | def _raise(*_, **__): 171 | msg = None 172 | if make_error_message is not None: 173 | msg = make_error_message(missing_module_name) 174 | if msg is None: 175 | msg = f"No module named '{missing_module_name}'" 176 | raise ModuleNotFoundError(msg) 177 | 178 | self._raise = _raise 179 | 180 | def __getattr__(self, name: str) -> "_LazyErrorAttr": 181 | """Return self so that attributes can be extracted recursively""" 182 | return self 183 | 184 | ## 185 | # Override _everything_ to raise! This list is taken directly from the 186 | # CPython source code: 187 | # https://github.com/python/cpython/blob/main/Objects/typeobject.c#L7986 188 | # 189 | # The only exclusions from the set defined above are those which are used as 190 | # part of the actual import mechanism: 191 | # __bool__ 192 | # __del__ 193 | # __getattr__ 194 | # __getattribute__ 195 | # __init__ 196 | # __len__ 197 | # __new__ 198 | # __repr__ 199 | # __setattr__ 200 | ## 201 | def __abs__(self, *_, **__): 202 | self._raise() 203 | 204 | def __add__(self, *_, **__): 205 | self._raise() 206 | 207 | async def __aiter__(self, *_, **__): 208 | self._raise() 209 | 210 | def __and__(self, *_, **__): 211 | self._raise() 212 | 213 | async def __anext__(self, *_, **__): 214 | self._raise() 215 | 216 | def __await__(self, *_, **__): 217 | self._raise() 218 | 219 | def __call__(self, *_, **__): 220 | 221 | if _is_import_time(): 222 | # Calling _LazyErrorAttr at import time may happen if the attribute 223 | # is a decorator from a missing dependency, in this case we want 224 | # the call to succeed but the resulting value to deffer the error. 225 | # Other import time calls besides decorators could occur, such as 226 | # as constants, in those cases, we also want the call to succeed 227 | # and the result value to be a defferred error 228 | return self 229 | self._raise() 230 | 231 | def __contains__(self, *_, **__): 232 | self._raise() 233 | 234 | def __delattr__(self, *_, **__): 235 | self._raise() 236 | 237 | def __delete__(self, *_, **__): 238 | self._raise() 239 | 240 | def __delitem__(self, *_, **__): 241 | self._raise() 242 | 243 | def __eq__(self, other, *_, **__): 244 | if not _is_import_time(): 245 | self._raise() 246 | return id(self) == id(other) 247 | 248 | def __float__(self, *_, **__): 249 | self._raise() 250 | 251 | def __floordiv__(self, *_, **__): 252 | self._raise() 253 | 254 | def __ge__(self, *_, **__): 255 | self._raise() 256 | 257 | def __get__(self, *_, **__): 258 | self._raise() 259 | 260 | def __getitem__(self, *_, **__): 261 | self._raise() 262 | 263 | def __gt__(self, *_, **__): 264 | self._raise() 265 | 266 | def __hash__(self, *_, **__): 267 | if not _is_import_time(): 268 | self._raise() 269 | return id(self) 270 | 271 | def __iadd__(self, *_, **__): 272 | self._raise() 273 | 274 | def __iand__(self, *_, **__): 275 | self._raise() 276 | 277 | def __ifloordiv__(self, *_, **__): 278 | self._raise() 279 | 280 | def __ilshift__(self, *_, **__): 281 | self._raise() 282 | 283 | def __imatmul__(self, *_, **__): 284 | self._raise() 285 | 286 | def __imod__(self, *_, **__): 287 | self._raise() 288 | 289 | def __imul__(self, *_, **__): 290 | self._raise() 291 | 292 | def __index__(self, *_, **__): 293 | self._raise() 294 | 295 | def __int__(self, *_, **__): 296 | self._raise() 297 | 298 | def __invert__(self, *_, **__): 299 | self._raise() 300 | 301 | def __ior__(self, *_, **__): 302 | self._raise() 303 | 304 | def __ipow__(self, *_, **__): 305 | self._raise() 306 | 307 | def __irshift__(self, *_, **__): 308 | self._raise() 309 | 310 | def __isub__(self, *_, **__): 311 | self._raise() 312 | 313 | def __iter__(self, *_, **__): 314 | self._raise() 315 | 316 | def __itruediv__(self, *_, **__): 317 | self._raise() 318 | 319 | def __ixor__(self, *_, **__): 320 | self._raise() 321 | 322 | def __le__(self, *_, **__): 323 | self._raise() 324 | 325 | def __lshift__(self, *_, **__): 326 | self._raise() 327 | 328 | def __lt__(self, *_, **__): 329 | self._raise() 330 | 331 | def __matmul__(self, *_, **__): 332 | self._raise() 333 | 334 | def __mod__(self, *_, **__): 335 | self._raise() 336 | 337 | def __mul__(self, *_, **__): 338 | self._raise() 339 | 340 | def __ne__(self, *_, **__): 341 | self._raise() 342 | 343 | def __neg__(self, *_, **__): 344 | self._raise() 345 | 346 | def __next__(self, *_, **__): 347 | self._raise() 348 | 349 | def __or__(self, *_, **__): 350 | self._raise() 351 | 352 | def __pos__(self, *_, **__): 353 | self._raise() 354 | 355 | def __pow__(self, *_, **__): 356 | self._raise() 357 | 358 | def __radd__(self, *_, **__): 359 | self._raise() 360 | 361 | def __rand__(self, *_, **__): 362 | self._raise() 363 | 364 | def __rfloordiv__(self, *_, **__): 365 | self._raise() 366 | 367 | def __rlshift__(self, *_, **__): 368 | self._raise() 369 | 370 | def __rmatmul__(self, *_, **__): 371 | self._raise() 372 | 373 | def __rmod__(self, *_, **__): 374 | self._raise() 375 | 376 | def __rmul__(self, *_, **__): 377 | self._raise() 378 | 379 | def __ror__(self, *_, **__): 380 | self._raise() 381 | 382 | def __rpow__(self, *_, **__): 383 | self._raise() 384 | 385 | def __rrshift__(self, *_, **__): 386 | self._raise() 387 | 388 | def __rshift__(self, *_, **__): 389 | self._raise() 390 | 391 | def __rsub__(self, *_, **__): 392 | self._raise() 393 | 394 | def __rtruediv__(self, *_, **__): 395 | self._raise() 396 | 397 | def __rxor__(self, *_, **__): 398 | self._raise() 399 | 400 | def __set__(self, *_, **__): 401 | self._raise() 402 | 403 | def __setitem__(self, *_, **__): 404 | self._raise() 405 | 406 | def __str__(self, *_, **__): 407 | self._raise() 408 | 409 | def __sub__(self, *_, **__): 410 | self._raise() 411 | 412 | def __truediv__(self, *_, **__): 413 | self._raise() 414 | 415 | def __xor__(self, *_, **__): 416 | self._raise() 417 | 418 | 419 | class _LazyErrorModule(ModuleType): 420 | """This module is a lazy error thrower. It is created when the module cannot 421 | be found so that import errors are deferred until attribute access. 422 | """ 423 | 424 | def __init__(self, name: str, make_error_message: Optional[Callable[[str], str]]): 425 | super().__init__(name) 426 | self.__path__ = None 427 | self._make_error_message = make_error_message 428 | 429 | def __getattr__(self, name: str) -> _LazyErrorAttr: 430 | # For special module attrs, return as if a stub module 431 | if name in ["__file__", "__module__", "__doc__", "__cached__"]: 432 | return None 433 | return _LazyErrorAttr( 434 | self.__name__, make_error_message=self._make_error_message 435 | ) 436 | 437 | 438 | class _LazyErrorLoader(importlib.abc.Loader): 439 | """This "loader" can be used with a MetaFinder to catch not-found modules 440 | and raise a ModuleNotFound error lazily when the module is used rather than 441 | at import time. 442 | """ 443 | 444 | def __init__(self, make_error_message: Optional[Callable[[str], str]]): 445 | self._make_error_message = make_error_message 446 | 447 | def create_module(self, spec): 448 | return _LazyErrorModule(spec.name, self._make_error_message) 449 | 450 | def exec_module(self, *_, **__): 451 | """Nothing to do here because the errors will be thrown by the module 452 | created in create_module 453 | """ 454 | 455 | 456 | class _LazyErrorMetaFinder(importlib.abc.MetaPathFinder): 457 | """A lazy finder that always claims to be able to find the module, but will 458 | potentially raise an ImportError when the module is used 459 | """ 460 | 461 | def __init__( 462 | self, 463 | make_error_message: Optional[Callable[[str], str]], 464 | owner_context: _LazyImportErrorCtx, 465 | ): 466 | self._make_error_message = make_error_message 467 | self.owner_context = owner_context 468 | 469 | self.calling_pkg = None 470 | self.this_module = sys.modules[__name__].__package__.split(".")[0] 471 | for pkgname in self._get_non_import_modules(): 472 | # If this is the first non-initial hit that does match this module 473 | # then the previous module is the one calling import_module 474 | if self.calling_pkg is None and pkgname not in [ 475 | self.this_module, 476 | "contextlib", 477 | ]: 478 | self.calling_pkg = pkgname 479 | break 480 | assert self.calling_pkg is not None 481 | 482 | def find_spec(self, fullname, path, *args, **kwargs): 483 | """Since this meta finder is the last priority, it will only be used for 484 | modules that are not otherwise found. As such, we use it to set up a 485 | lazy ModuleNotFoundError that will trigger when the module is used 486 | rather than when it is imported. 487 | """ 488 | importing_pkg = None 489 | 490 | for pkgname in self._get_non_import_modules(): 491 | # If this is the first hit beyond this module, it's the module doing 492 | # the import 493 | if importing_pkg is None and pkgname != self.this_module: 494 | importing_pkg = pkgname 495 | break 496 | 497 | assert None not in [ 498 | importing_pkg, 499 | self.calling_pkg, 500 | ], "Could not determine calling and importing pkg" 501 | 502 | # If the two are not the same, don't mask this with lazy errors 503 | if importing_pkg != self.calling_pkg: 504 | return None 505 | 506 | # Set up a lazy loader that wraps the Loader that defers the error to 507 | # exec_module time 508 | loader = _LazyErrorLoader(self._make_error_message) 509 | 510 | # Create a spec from this loader so that it acts at import-time like it 511 | # loaded correctly 512 | return importlib.util.spec_from_loader(fullname, loader) 513 | 514 | ## Implementation Details ###################################################### 515 | 516 | @classmethod 517 | def _get_non_import_modules(cls): 518 | 519 | # Figure out the module that is doing the import and the module that is 520 | # calling import_module 521 | return filter( 522 | lambda x: x != "importlib", 523 | ( 524 | frame.f_globals.get("__name__", "").split(".")[0] 525 | for frame in _FastFrameGenerator() 526 | ), 527 | ) 528 | 529 | 530 | class _FastFrameGenerator: 531 | """Custom iterable that uses the low-level sys._getframe to get frames 532 | one-at-a-time. 533 | Iterating over this is way faster than using `inspect.stack()` 534 | """ 535 | 536 | def __init__(self): 537 | self._depth = -1 538 | 539 | def __iter__(self): 540 | return self 541 | 542 | def __next__(self): 543 | self._depth += 1 544 | try: 545 | return sys._getframe(self._depth) 546 | except ValueError: 547 | self._depth = -1 548 | raise StopIteration 549 | 550 | 551 | def _is_import_time() -> bool: 552 | """Function to detect if the execution is being called at import 553 | time by detecting the presence of `importlib._bootstrap` in stack 554 | 555 | Returns: 556 | bool: 557 | True if the execution is at import time otherwise, False 558 | """ 559 | return "importlib._bootstrap" in [ 560 | frame.f_globals.get("__name__", "") for frame in _FastFrameGenerator() 561 | ] 562 | -------------------------------------------------------------------------------- /import_tracker/log.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module holds a shared logging instance handle to use in the other modules. 3 | This log handle has additional higher-order logging functions defined that align 4 | with the levels for alog (https://github.com/IBM/alchemy-logging). 5 | """ 6 | 7 | # Standard 8 | import logging 9 | 10 | log = logging.getLogger("IMPRT") 11 | 12 | # Add higher-order levels as constants to logging 13 | setattr(logging, "DEBUG1", logging.DEBUG - 1) 14 | setattr(logging, "DEBUG2", logging.DEBUG - 2) 15 | setattr(logging, "DEBUG3", logging.DEBUG - 3) 16 | setattr(logging, "DEBUG4", logging.DEBUG - 4) 17 | 18 | # Add higher-order logging 19 | setattr(log, "debug1", lambda *args, **kwargs: log.log(logging.DEBUG1, *args, **kwargs)) 20 | setattr(log, "debug2", lambda *args, **kwargs: log.log(logging.DEBUG2, *args, **kwargs)) 21 | setattr(log, "debug3", lambda *args, **kwargs: log.log(logging.DEBUG3, *args, **kwargs)) 22 | setattr(log, "debug4", lambda *args, **kwargs: log.log(logging.DEBUG4, *args, **kwargs)) 23 | -------------------------------------------------------------------------------- /import_tracker/setup_tools.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module holds tools for libraries to use when definint requirements and 3 | extras_require sets in a setup.py 4 | """ 5 | 6 | # Standard 7 | from functools import reduce 8 | from typing import Dict, Iterable, List, Optional, Tuple, Union 9 | import os 10 | import re 11 | import sys 12 | 13 | # Local 14 | from .constants import INFO_OPTIONAL 15 | from .import_tracker import track_module 16 | from .log import log 17 | 18 | ## Public ###################################################################### 19 | 20 | 21 | def parse_requirements( 22 | requirements: Union[List[str], str], 23 | library_name: str, 24 | extras_modules: Optional[List[str]] = None, 25 | full_depth: bool = True, 26 | keep_optional: Union[bool, Dict[str, List[str]]] = False, 27 | **kwargs, 28 | ) -> Tuple[List[str], Dict[str, List[str]]]: 29 | """This helper uses the lists of required modules and parameters for the 30 | given library to produce requirements and the extras_require dict. 31 | 32 | Args: 33 | requirements: Union[List[str], str] 34 | The list of requirements entries, or a file path pointing to a 35 | requirements file 36 | library_name: str 37 | The top-level name of the library package 38 | extras_modules: Optional[List[str]] 39 | List of module names that should be used to generate extras_require 40 | sets 41 | full_depth: bool 42 | Passthrough to track_module. The default here is switched to True so 43 | that modules which are both direct and transitive dependencies of 44 | the library are correctly allocated. 45 | keep_optional: Union[bool, Dict[str, List[str]]] 46 | Indicate which optional dependencies should be kept when computing 47 | the extras sets. If True, all optional dependencies will be kept. If 48 | False, none will be kept. Otherwise, the argument should be a dict 49 | mapping known optional dependencies of specific modules that should 50 | be kept and all optional dependencies not represented in the dict 51 | will be dropped. 52 | **kwargs: 53 | Additional keyword arguments to pass through to track_module 54 | 55 | Returns: 56 | requirements: List[str] 57 | The list of requirements to pass to setup() 58 | extras_require: Dict[str, List[str]] 59 | The extras_require dict to pass to setup() 60 | """ 61 | 62 | # Load all requirements from the requirements file 63 | if isinstance(requirements, str): 64 | with open(requirements, "r") as handle: 65 | requirements_lines = list(handle.readlines()) 66 | elif not isinstance(requirements, (list, tuple, set)): 67 | raise ValueError( 68 | f"Invalid type for requirements. Expected str (file) or List. Got {type(requirements)}" 69 | ) 70 | else: 71 | requirements_lines = requirements 72 | requirements = { 73 | _standardize_package_name(_REQ_SPLIT_EXPR.split(line, 1)[0]): line.strip() 74 | for line in requirements_lines 75 | if line.strip() and not line.startswith("#") 76 | } 77 | log.debug("Requirements: %s", requirements) 78 | 79 | # Get the set of required modules for each of the listed extras modules 80 | library_import_mapping = track_module( 81 | library_name, 82 | submodules=True, 83 | detect_transitive=True, 84 | full_depth=full_depth, 85 | show_optional=True, 86 | **kwargs, 87 | ) 88 | log.debug4("Library Import Mapping:\n%s", library_import_mapping) 89 | 90 | # Remove any unwanted optional imports 91 | if keep_optional is not True: 92 | keep_optional = keep_optional or {} 93 | log.debug2("Trimming optional deps (keep: %s)", keep_optional) 94 | library_import_mapping = { 95 | mod_name: { 96 | dep_name: dep_info 97 | for dep_name, dep_info in deps_info.items() 98 | if ( 99 | not dep_info[INFO_OPTIONAL] 100 | or dep_name in keep_optional.get(mod_name, []) 101 | ) 102 | } 103 | for mod_name, deps_info in library_import_mapping.items() 104 | } 105 | log.debug4("Trimmed Import Mapping:\n%s", library_import_mapping) 106 | 107 | # If no extras_modules are given, track them all 108 | if not extras_modules: 109 | extras_modules = list(library_import_mapping.keys()) 110 | log.debug2("Tracking extras modules: %s", extras_modules) 111 | 112 | # Get a mapping from all known imports to their requirement names 113 | requirement_name_map = {} 114 | for imports in library_import_mapping.values(): 115 | for import_name in imports: 116 | if import_name not in requirement_name_map: 117 | requirement_name_map[import_name] = _get_required_packages_for_imports( 118 | [import_name] 119 | )[0] 120 | log.debug3("Requirement Name Map: %s", requirement_name_map) 121 | 122 | # Get the import sets for each requested extras 123 | missing_extras_modules = [ 124 | mod for mod in extras_modules if mod not in library_import_mapping 125 | ] 126 | assert ( 127 | not missing_extras_modules 128 | ), f"No tracked imports found for: {missing_extras_modules}" 129 | import_sets = { 130 | mod_name: {requirement_name_map[import_name] for import_name in imports} 131 | for mod_name, imports in library_import_mapping.items() 132 | } 133 | log.debug("Import sets: %s", import_sets) 134 | 135 | # Determine the common requirements as the intersection of all extras sets 136 | # union'ed with all other import sets 137 | common_intersection = None 138 | non_extra_union = set() 139 | for import_set_name, import_set in import_sets.items(): 140 | if common_intersection is None: 141 | common_intersection = import_set 142 | else: 143 | common_intersection = common_intersection.intersection(import_set) 144 | 145 | # Determine if this import set falls outside of the extras 146 | import_set_parts = import_set_name.split(".") 147 | in_extra = any( 148 | extras_module.startswith(import_set_name) 149 | for extras_module in extras_modules 150 | ) 151 | if not in_extra: 152 | for i in range(len(import_set_parts)): 153 | parent_path = ".".join(import_set_parts[: i + 1]) 154 | if parent_path in extras_modules: 155 | in_extra = True 156 | break 157 | if not in_extra: 158 | log.debug3( 159 | "%s not covered by an extra. Adding %s to non extra union", 160 | import_set_name, 161 | import_set, 162 | ) 163 | non_extra_union = non_extra_union.union(import_set) 164 | common_intersection = common_intersection or set() 165 | if len(extras_modules) == 1: 166 | common_intersection = set() 167 | log.debug3("Raw common intersection: %s", common_intersection) 168 | 169 | common_imports = common_intersection.union(non_extra_union) 170 | log.debug3("Common intersection: %s", common_intersection) 171 | log.debug3("Non extra union: %s", non_extra_union) 172 | log.debug("Common Imports: %s", common_imports) 173 | 174 | # Compute the sets of unique requirements for each tracked module 175 | extras_require_sets = { 176 | set_name: import_set - common_imports 177 | for set_name, import_set in import_sets.items() 178 | if set_name in extras_modules 179 | } 180 | log.debug("Extras require sets: %s", extras_require_sets) 181 | 182 | # Add any listed requirements in that don't show up in any tracked module. 183 | # These requirements may be needed by an untracked portion of the library or 184 | # they may be runtime imports. 185 | all_tracked_requirements = reduce( 186 | lambda acc_set, req_set: acc_set.union(req_set), 187 | extras_require_sets.values(), 188 | common_imports, 189 | ) 190 | missing_reqs = ( 191 | set(_get_required_packages_for_imports(requirements.keys())) 192 | - all_tracked_requirements 193 | ) 194 | log.debug( 195 | "Adding missing requirements %s to common_imports", 196 | sorted(list(missing_reqs)), 197 | ) 198 | common_imports = common_imports.union(missing_reqs) 199 | 200 | # Add a special "all" group to the extras_require that will install all deps 201 | # needed for all extras 202 | if _ALL_GROUP not in extras_require_sets: 203 | all_reqs = all_tracked_requirements.union(missing_reqs) 204 | log.debug("Adding [%s] requirement group: %s", _ALL_GROUP, all_reqs) 205 | extras_require_sets[_ALL_GROUP] = all_reqs 206 | 207 | # Map all dependencies through those listed in requirements.txt 208 | standardized_requirements = { 209 | key.replace("-", "_"): val for key, val in requirements.items() 210 | } 211 | return sorted(_map_requirements(standardized_requirements, common_imports)), { 212 | set_name: _map_requirements(standardized_requirements, import_set) 213 | for set_name, import_set in extras_require_sets.items() 214 | } 215 | 216 | 217 | ## Implementation Details ###################################################### 218 | 219 | # Regex for parsing requirements 220 | _REQ_SPLIT_EXPR = re.compile(r"[=> List[str]: 313 | """Get the set of installable packages required by this list of imports""" 314 | # Lazily create the global mapping 315 | global _MODULE_TO_PKG 316 | if _MODULE_TO_PKG is None: 317 | _MODULE_TO_PKG = _map_modules_to_package_names() 318 | 319 | # Merge the required packages for each 320 | required_pkgs = set() 321 | for mod in imports: 322 | # If there is a known mapping, use it 323 | if mod in _MODULE_TO_PKG: 324 | required_pkgs.update(_MODULE_TO_PKG[mod]) 325 | 326 | # Otherwise, assume that the name of the module is itself the name of 327 | # the package 328 | else: 329 | required_pkgs.add(mod) 330 | return sorted(list(required_pkgs)) 331 | -------------------------------------------------------------------------------- /requirements_test.txt: -------------------------------------------------------------------------------- 1 | # submod2 2 | alchemy-logging>=1.0.3 3 | # nested.submod3 4 | PyYaml>=6.0 5 | # Testing 6 | pytest>=6.2.5 7 | pytest-asyncio>=0.16.0 8 | pytest-cov>=3.0.0 9 | pytest-xdist>=2.5.0 10 | # This is only used to validate the funky google namespace corner case 11 | protobuf==3.19.5 12 | -------------------------------------------------------------------------------- /scripts/build_wheel.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Version of Import Tracker that we want to tag our wheel as 4 | release_version=${RELEASE_VERSION:-""} 5 | # Python tags we want to support 6 | python_versions="py37 py38 py39 py310" 7 | GREEN='\033[0;32m' 8 | NC='\033[0m' 9 | 10 | function show_help 11 | { 12 | cat <<- EOM 13 | Usage: scripts/build_wheels.sh -v [Import Tracker Version] -p [python versions] 14 | EOM 15 | } 16 | 17 | while (($# > 0)); do 18 | case "$1" in 19 | -h | --h | --he | --hel | --help) 20 | show_help 21 | exit 2 22 | ;; 23 | -p | --python_versions) 24 | shift 25 | python_versions="" 26 | while [ "$#" -gt "0" ] 27 | do 28 | if [ "$python_versions" != "" ] 29 | then 30 | python_versions="$python_versions " 31 | fi 32 | python_versions="$python_versions$1" 33 | if [ "$#" -gt "1" ] && [[ "$2" == "-"* ]] 34 | then 35 | break 36 | fi 37 | shift 38 | done 39 | ;; 40 | -v | --release_version) 41 | shift; release_version="$1";; 42 | *) 43 | echo "Unkown argument: $1" 44 | show_help 45 | exit 2 46 | ;; 47 | esac 48 | shift 49 | done 50 | 51 | if [ "$release_version" == "" ]; then 52 | echo "ERROR: a release version for Import Tracker must be specified." 53 | show_help 54 | exit 1 55 | else 56 | echo -e "Building wheels for Import Tracker version: ${GREEN}${release_version}${NC}" 57 | sleep 2 58 | fi 59 | for python_version in $python_versions; do 60 | echo -e "${GREEN}Building wheel for Python version [${python_version}]${NC}" 61 | RELEASE_VERSION=$release_version python3 setup.py bdist_wheel --python-tag ${python_version} clean --all 62 | echo -e "${GREEN}Done building wheel for Python version [${python_version}]${NC}" 63 | sleep 1 64 | done 65 | -------------------------------------------------------------------------------- /scripts/fmt.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # If disabled, do nothing (for docker build) 4 | if [ "${RUN_FMT:-"true"}" != "true" ] 5 | then 6 | echo "fmt disabled" 7 | exit 8 | fi 9 | 10 | pre-commit run --all-files 11 | RETURN_CODE=$? 12 | 13 | function echoWarning() { 14 | LIGHT_YELLOW='\033[1;33m' 15 | NC='\033[0m' # No Color 16 | echo -e "${LIGHT_YELLOW}${1}${NC}" 17 | } 18 | 19 | if [ "$RETURN_CODE" -ne 0 ]; then 20 | if [ "${CI}" != "true" ]; then 21 | echoWarning "☝️ This appears to have failed, but actually your files have been formatted." 22 | echoWarning "Make a new commit with these changes before making a pull request." 23 | else 24 | echoWarning "This test failed because your code isn't formatted correctly." 25 | echoWarning 'Locally, run `make run fmt`, it will appear to fail, but change files.' 26 | echoWarning "Add the changed files to your commit and this stage will pass." 27 | fi 28 | 29 | exit $RETURN_CODE 30 | fi 31 | -------------------------------------------------------------------------------- /scripts/install_release.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ################################################################################ 4 | # This script is used to execute unit tests against a recent release without the 5 | # code held locally. It's intended to be used inside of the `release_test` 6 | # phase of the central Dockerfile. 7 | ################################################################################ 8 | 9 | # Make sure RELEASE_VERSION is defined 10 | if [ -z ${RELEASE_VERSION+x} ] 11 | then 12 | echo "RELEASE_VERSION must be set" 13 | exit 1 14 | fi 15 | 16 | # The name of the library we're testing 17 | LIBRARY_NAME="import_tracker" 18 | 19 | # 10 minutes max for trying to install the new version 20 | MAX_DURATION="${MAX_DURATION:-600}" 21 | 22 | # Time to wait between attempts to install the version 23 | RETRY_SLEEP=5 24 | 25 | # Retry the install until it succeeds 26 | start_time=$(date +%s) 27 | success="0" 28 | while [ "$(expr "$(date +%s)" "-" "${start_time}" )" -lt "${MAX_DURATION}" ] 29 | do 30 | pip cache purge 31 | pip install ${LIBRARY_NAME}==${RELEASE_VERSION} 32 | exit_code=$? 33 | if [ "$exit_code" != "0" ] 34 | then 35 | echo "Trying again in [${RETRY_SLEEP}s]" 36 | sleep ${RETRY_SLEEP} 37 | else 38 | success="1" 39 | break 40 | fi 41 | done 42 | 43 | # If the install didn't succeed, exit with failure 44 | if [ "$success" == "0" ] 45 | then 46 | echo "Unable to install [${LIBRARY_NAME}==${RELEASE_VERSION}]!" 47 | exit 1 48 | fi 49 | -------------------------------------------------------------------------------- /scripts/publish.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Run from the base of the python directory 4 | cd $(dirname ${BASH_SOURCE[0]})/.. 5 | 6 | # Clear out old publication files in case they're still around 7 | rm -rf build dist *.egg-info/ 8 | 9 | # Build 10 | py_tag="py$(echo $PYTHON_VERSION | cut -d'.' -f 1,2 | sed 's,\.,,g')" 11 | ./scripts/build_wheel.sh -v $RELEASE_VERSION -p $py_tag 12 | 13 | # Publish to PyPi 14 | if [ "${RELEASE_DRY_RUN}" != "true" ] 15 | then 16 | un_arg="" 17 | pw_arg="" 18 | if [ "$PYPI_TOKEN" != "" ] 19 | then 20 | un_arg="--username __token__" 21 | pw_arg="--password $PYPI_TOKEN" 22 | fi 23 | twine upload $un_arg $pw_arg dist/* 24 | else 25 | echo "Release DRY RUN" 26 | fi 27 | 28 | # Clean up 29 | rm -rf build dist *.egg-info/ 30 | -------------------------------------------------------------------------------- /scripts/release.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Run from the project root 4 | cd $(dirname ${BASH_SOURCE[0]})/.. 5 | 6 | # Get the tag for this release 7 | tag=$(echo $REF | cut -d'/' -f3-) 8 | 9 | # Build the docker phase that will release and then test it 10 | docker build . \ 11 | --target=release_test \ 12 | --build-arg RELEASE_VERSION=$tag \ 13 | --build-arg PYPI_TOKEN=${PYPI_TOKEN:-""} \ 14 | --build-arg RELEASE_DRY_RUN=${RELEASE_DRY_RUN:-"false"} \ 15 | --build-arg PYTHON_VERSION=${PYTHON_VERSION:-"3.7"} 16 | -------------------------------------------------------------------------------- /scripts/run_tests.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | BASE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" 5 | cd "$BASE_DIR" 6 | 7 | if [ "$PARALLEL" == "1" ] 8 | then 9 | if [[ "$OSTYPE" =~ "darwin"* ]] 10 | then 11 | num_procs=$(sysctl -n hw.physicalcpu) 12 | else 13 | num_procs=$(nproc) 14 | fi 15 | procs=${NPROCS:-$num_procs} 16 | echo "Running tests in parallel with [$procs] workers" 17 | procs_arg="-n $procs" 18 | else 19 | echo "Running tests in serial" 20 | procs_arg="--log-cli-level DEBUG4" 21 | fi 22 | 23 | FAIL_THRESH=100.0 24 | python3 -m pytest \ 25 | $procs_arg \ 26 | --cov-config=.coveragerc \ 27 | --cov=import_tracker \ 28 | --cov-report=term \ 29 | --cov-report=html \ 30 | --cov-fail-under=$FAIL_THRESH \ 31 | --asyncio-mode=strict \ 32 | -W error "$@" 33 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """A setuptools setup module for import_tracker""" 2 | 3 | # Standard 4 | import os 5 | 6 | # Third Party 7 | from setuptools import setup 8 | 9 | # Read the README to provide the long description 10 | python_base = os.path.abspath(os.path.dirname(__file__)) 11 | with open(os.path.join(python_base, "README.md"), "r") as handle: 12 | long_description = handle.read() 13 | 14 | # Read version from the env 15 | version = os.environ.get("RELEASE_VERSION") 16 | assert version is not None, "Must set RELEASE_VERSION" 17 | 18 | setup( 19 | name="import_tracker", 20 | version=version, 21 | description="A tool for managing dependencies in a modular python " 22 | "project by tracking which dependencies are needed by which sub-modules", 23 | long_description=long_description, 24 | long_description_content_type="text/markdown", 25 | url="https://github.com/IBM/import-tracker", 26 | author="Gabe Goodhart", 27 | author_email="gabe.l.hart@gmail.com", 28 | license="MIT", 29 | classifiers=[ 30 | "Intended Audience :: Developers", 31 | "Programming Language :: Python :: 3", 32 | "Programming Language :: Python :: 3.7", 33 | "Programming Language :: Python :: 3.8", 34 | "Programming Language :: Python :: 3.9", 35 | "Programming Language :: Python :: 3.10", 36 | "Programming Language :: Python :: 3.11", 37 | "Programming Language :: Python :: 3.12", 38 | ], 39 | keywords=["import", "importlib", "dependencies"], 40 | packages=["import_tracker"], 41 | ) 42 | -------------------------------------------------------------------------------- /test/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Shared test setup 3 | """ 4 | 5 | # Standard 6 | import os 7 | import sys 8 | 9 | # Add the sample_libs directory to the path so that those libs can be imported 10 | # as standalone packages 11 | SAMPLE_LIBS_DIR = os.path.realpath( 12 | os.path.join(os.path.dirname(__file__), "sample_libs") 13 | ) 14 | sys.path.append(SAMPLE_LIBS_DIR) 15 | -------------------------------------------------------------------------------- /test/conftest.py: -------------------------------------------------------------------------------- 1 | """ 2 | Global autouse fixtures that will be used by all tests 3 | """ 4 | 5 | # Standard 6 | import logging 7 | import os 8 | import sys 9 | 10 | # Third Party 11 | import pytest 12 | 13 | # Local 14 | from import_tracker.log import log 15 | 16 | 17 | @pytest.fixture(autouse=True) 18 | def configure_logging(): 19 | """Fixture that configures logging from the env. It is auto-used, so if 20 | imported, it will automatically configure for each test. 21 | """ 22 | logging.basicConfig() 23 | log.root.setLevel(getattr(logging, os.environ.get("LOG_LEVEL", "warning").upper())) 24 | 25 | 26 | @pytest.fixture(autouse=True) 27 | def reset_sys_modules(): 28 | """This fixture will reset the sys.modules dict to only the keys held before 29 | the test initialized 30 | """ 31 | before_keys = list(sys.modules.keys()) 32 | yield 33 | added_keys = [ 34 | module_name 35 | for module_name in sys.modules.keys() 36 | if module_name not in before_keys 37 | ] 38 | for added_key in added_keys: 39 | mod = sys.modules.pop(added_key) 40 | del mod 41 | -------------------------------------------------------------------------------- /test/helpers.py: -------------------------------------------------------------------------------- 1 | """ 2 | Shared test helpers 3 | """ 4 | 5 | 6 | def remove_test_deps(deps): 7 | """If running with pytest coverage enabled, these deps will show up. We 8 | don't want run-env-dependent tests, so we just pop them out. 9 | """ 10 | for test_dep in ["pytest_cov", "coverage"]: 11 | try: 12 | deps.remove(test_dep) 13 | except ValueError: 14 | continue 15 | return deps 16 | -------------------------------------------------------------------------------- /test/sample_libs/all_import_types/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This sample lib explicitly exercises ALL different flavors of import statements 3 | to ensure that the bytecode parsing is handling them correctly. 4 | 5 | NOTE: This does _not_ handle dynamic importing via importlib! 6 | """ 7 | 8 | ## Third Party ################################################################# 9 | 10 | # Third Party 11 | # Import with a * 12 | from inter_mod_deps import * 13 | 14 | # Import local module with a fully qualified name (ug) 15 | import all_import_types.sub_module3 16 | 17 | # First Party 18 | # Import multiple attributes in the same from statement 19 | # Import non-module attribute 20 | from alog import AlogFormatterBase, alog, configure 21 | 22 | # Local 23 | # Import sibling module defined in dir w/ __init__.py 24 | # NOTE: This module imports with .. to import submod1 25 | # Import sibling module defined in file 26 | # NOTE: This module imports with .. to import submod2 27 | from . import sub_module1, sub_module2 28 | 29 | # Import nested submodule with a "from" clause 30 | from sample_lib import submod2 31 | 32 | # Directly import 33 | import sample_lib 34 | 35 | # Directly import nested submodule 36 | import sample_lib.submod1 37 | 38 | ## Local ####################################################################### 39 | -------------------------------------------------------------------------------- /test/sample_libs/all_import_types/sub_module1.py: -------------------------------------------------------------------------------- 1 | # Local 2 | from . import sub_module2 3 | -------------------------------------------------------------------------------- /test/sample_libs/all_import_types/sub_module2/__init__.py: -------------------------------------------------------------------------------- 1 | # Local 2 | from .. import sub_module1 3 | -------------------------------------------------------------------------------- /test/sample_libs/all_import_types/sub_module3.py: -------------------------------------------------------------------------------- 1 | class Foo: 2 | pass 3 | -------------------------------------------------------------------------------- /test/sample_libs/bad_lib/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This library raises an exception at import time! 3 | """ 4 | 5 | raise RuntimeError("Yikes") 6 | -------------------------------------------------------------------------------- /test/sample_libs/conditional_deps/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module has a sub module that uses a standard try/except wrapper around a 3 | conditional dependency 4 | """ 5 | # Local 6 | from . import mod 7 | -------------------------------------------------------------------------------- /test/sample_libs/conditional_deps/mod.py: -------------------------------------------------------------------------------- 1 | """ 2 | Sample of how downstream libs may choose to write conditional dependencies. We 3 | need to make sure that this doesn't break. 4 | """ 5 | 6 | try: 7 | # Third Party 8 | import foobar as fb 9 | 10 | HAS_FB = True 11 | except ModuleNotFoundError: 12 | HAS_FB = False 13 | 14 | 15 | def has_fb(): 16 | if HAS_FB: 17 | print("We've got the foo!") 18 | else: 19 | print("No foo here") 20 | -------------------------------------------------------------------------------- /test/sample_libs/decorator_deps/__init__.py: -------------------------------------------------------------------------------- 1 | # Local 2 | import import_tracker 3 | 4 | with import_tracker.lazy_import_errors(): 5 | # Local 6 | from .opt_decorator import dummy_function 7 | -------------------------------------------------------------------------------- /test/sample_libs/decorator_deps/opt_decorator.py: -------------------------------------------------------------------------------- 1 | # Third Party 2 | from foo import bar 3 | 4 | 5 | @bar 6 | def dummy_function(): 7 | pass 8 | -------------------------------------------------------------------------------- /test/sample_libs/deep_siblings/__init__.py: -------------------------------------------------------------------------------- 1 | # Local 2 | from . import blocks, workflows 3 | -------------------------------------------------------------------------------- /test/sample_libs/deep_siblings/blocks/__init__.py: -------------------------------------------------------------------------------- 1 | # Local 2 | from . import bar_type, foo_type 3 | -------------------------------------------------------------------------------- /test/sample_libs/deep_siblings/blocks/bar_type/__init__.py: -------------------------------------------------------------------------------- 1 | # Local 2 | from . import bar 3 | 4 | Bar = bar.Bar 5 | -------------------------------------------------------------------------------- /test/sample_libs/deep_siblings/blocks/bar_type/bar.py: -------------------------------------------------------------------------------- 1 | # First Party 2 | import alog 3 | 4 | 5 | class Bar: 6 | pass 7 | -------------------------------------------------------------------------------- /test/sample_libs/deep_siblings/blocks/foo_type/__init__.py: -------------------------------------------------------------------------------- 1 | # Local 2 | from . import foo 3 | 4 | Foo = foo.Foo 5 | -------------------------------------------------------------------------------- /test/sample_libs/deep_siblings/blocks/foo_type/foo.py: -------------------------------------------------------------------------------- 1 | # Third Party 2 | import yaml 3 | 4 | SOME_CONSTANT = 1 5 | 6 | 7 | class Foo: 8 | pass 9 | -------------------------------------------------------------------------------- /test/sample_libs/deep_siblings/workflows/__init__.py: -------------------------------------------------------------------------------- 1 | # Local 2 | from . import foo_type 3 | -------------------------------------------------------------------------------- /test/sample_libs/deep_siblings/workflows/foo_type/__init__.py: -------------------------------------------------------------------------------- 1 | # Local 2 | from . import foo 3 | 4 | Foo = foo.Foo 5 | -------------------------------------------------------------------------------- /test/sample_libs/deep_siblings/workflows/foo_type/foo.py: -------------------------------------------------------------------------------- 1 | # Local 2 | from ...blocks.foo_type.foo import SOME_CONSTANT 3 | from ...blocks.foo_type.foo import Foo as FooBlock 4 | 5 | 6 | class Foo: 7 | pass 8 | -------------------------------------------------------------------------------- /test/sample_libs/direct_dep_ambiguous/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This sample lib is carefully crafted such that the order of the imports makes 3 | the allocation of alog ambiguous. As such, we very intentionally have alog after 4 | the local imports and need to ignore this file in isort. 5 | """ 6 | 7 | # Local 8 | # Import the two submodules 9 | from . import bar, foo 10 | 11 | # Import alog here so that is a direct dependency of the top-level 12 | # First Party 13 | import alog 14 | -------------------------------------------------------------------------------- /test/sample_libs/direct_dep_ambiguous/bar.py: -------------------------------------------------------------------------------- 1 | def bar(): 2 | print("bar") 3 | -------------------------------------------------------------------------------- /test/sample_libs/direct_dep_ambiguous/foo.py: -------------------------------------------------------------------------------- 1 | # Import alog and yaml here so that alog is ambiguous and yaml is not 2 | # Third Party 3 | import yaml 4 | 5 | # First Party 6 | import alog 7 | -------------------------------------------------------------------------------- /test/sample_libs/direct_dep_nested/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Sample sub-module for direct/transitive detection in nested modules 3 | """ 4 | # Local 5 | from . import nested, nested2 6 | from sample_lib import submod1 7 | -------------------------------------------------------------------------------- /test/sample_libs/direct_dep_nested/nested.py: -------------------------------------------------------------------------------- 1 | # Third Party 2 | import yaml 3 | 4 | # Local 5 | from sample_lib import submod1 6 | -------------------------------------------------------------------------------- /test/sample_libs/direct_dep_nested/nested2.py: -------------------------------------------------------------------------------- 1 | # First Party 2 | import alog 3 | -------------------------------------------------------------------------------- /test/sample_libs/full_depth_direct_and_transitive/__init__.py: -------------------------------------------------------------------------------- 1 | # Local 2 | from . import bar, foo 3 | -------------------------------------------------------------------------------- /test/sample_libs/full_depth_direct_and_transitive/bar.py: -------------------------------------------------------------------------------- 1 | # Import single_extra to get alog transitively 2 | # Third Party 3 | import single_extra 4 | -------------------------------------------------------------------------------- /test/sample_libs/full_depth_direct_and_transitive/foo.py: -------------------------------------------------------------------------------- 1 | # Depend on alog directly 2 | # First Party 3 | import alog 4 | -------------------------------------------------------------------------------- /test/sample_libs/inter_mod_deps/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module a collection of submodules which exercise different corner cases 3 | around inter-dependency between modules: 4 | 5 | * submod1: Single external import 6 | * submod2: Imports submod1 + external 7 | * Needs to have the union of submod1 + external lib 8 | * submod3: Imports submod2 9 | * Needs to transitively import submod1 10 | * submod4: Imported after the others, but does not depend on them 11 | * Should NOT pick up deps from others 12 | * submod5: Import a nested module from an earlier sibling 13 | * Should trigger logic to pop sibling from sys.module 14 | """ 15 | 16 | # Local 17 | from . import submod1, submod2, submod3, submod4, submod5 18 | -------------------------------------------------------------------------------- /test/sample_libs/inter_mod_deps/submod1/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This submod1 depends on a single external lib 3 | """ 4 | 5 | # First Party 6 | import alog 7 | -------------------------------------------------------------------------------- /test/sample_libs/inter_mod_deps/submod2/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This submod2 depends on its sibling submod1 3 | """ 4 | 5 | # Third Party 6 | import yaml 7 | 8 | # Local 9 | from .. import submod1 10 | from . import foo 11 | from .foo import Foo 12 | -------------------------------------------------------------------------------- /test/sample_libs/inter_mod_deps/submod2/bar.py: -------------------------------------------------------------------------------- 1 | """ 2 | Just another module. Nothing to see here 3 | """ 4 | -------------------------------------------------------------------------------- /test/sample_libs/inter_mod_deps/submod2/foo.py: -------------------------------------------------------------------------------- 1 | """ 2 | This nested sub-module declares a class that others will want 3 | """ 4 | 5 | 6 | class Foo: 7 | pass 8 | -------------------------------------------------------------------------------- /test/sample_libs/inter_mod_deps/submod3.py: -------------------------------------------------------------------------------- 1 | """ 2 | Third submodule which depends on both of the upstreams 3 | """ 4 | 5 | # Local 6 | from . import submod2 7 | 8 | Foo = submod2.Foo 9 | -------------------------------------------------------------------------------- /test/sample_libs/inter_mod_deps/submod4.py: -------------------------------------------------------------------------------- 1 | """ 2 | This submodule _doesn't_ depend on the others and should not accidentally pick 3 | up any of the deps from the others 4 | """ 5 | 6 | # Third Party 7 | import yaml 8 | -------------------------------------------------------------------------------- /test/sample_libs/inter_mod_deps/submod5.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module imports a nested module from a sibling 3 | """ 4 | 5 | # Local 6 | from .submod2 import bar 7 | -------------------------------------------------------------------------------- /test/sample_libs/intermediate_extras/__init__.py: -------------------------------------------------------------------------------- 1 | # Local 2 | from . import bar, foo 3 | -------------------------------------------------------------------------------- /test/sample_libs/intermediate_extras/bar/__init__.py: -------------------------------------------------------------------------------- 1 | print("In bar") 2 | -------------------------------------------------------------------------------- /test/sample_libs/intermediate_extras/foo/__init__.py: -------------------------------------------------------------------------------- 1 | # Local 2 | from . import bat, baz 3 | -------------------------------------------------------------------------------- /test/sample_libs/intermediate_extras/foo/bat.py: -------------------------------------------------------------------------------- 1 | # Third Party 2 | import yaml 3 | -------------------------------------------------------------------------------- /test/sample_libs/intermediate_extras/foo/baz/__init__.py: -------------------------------------------------------------------------------- 1 | # First Party 2 | import alog 3 | -------------------------------------------------------------------------------- /test/sample_libs/lazy_import_errors/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This sample module uses import tracker's lazy_import_errors 3 | """ 4 | 5 | # Local 6 | import import_tracker 7 | 8 | with import_tracker.lazy_import_errors(): 9 | # Local 10 | from . import foo 11 | -------------------------------------------------------------------------------- /test/sample_libs/lazy_import_errors/foo.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module will be loaded with lazy errors 3 | """ 4 | 5 | try: 6 | # Third Party 7 | import not_there 8 | except: 9 | not_there = "NOT THERE" 10 | -------------------------------------------------------------------------------- /test/sample_libs/missing_dep/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This sample lib intentionally implements a bad import so that it can be used to 3 | test lazy import errors 4 | """ 5 | # Local 6 | import import_tracker 7 | 8 | 9 | def get_extras_modules(): 10 | return {"missing_dep.mod"} 11 | 12 | 13 | with import_tracker.lazy_import_errors(get_extras_modules=get_extras_modules): 14 | # Local 15 | from . import mod, other 16 | -------------------------------------------------------------------------------- /test/sample_libs/missing_dep/mod.py: -------------------------------------------------------------------------------- 1 | """ 2 | This sub-module actually does the bad import 3 | """ 4 | 5 | # Third Party 6 | import foobar 7 | 8 | 9 | def use_foobar(): 10 | foobar.doit() 11 | -------------------------------------------------------------------------------- /test/sample_libs/missing_dep/other.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module also does a bad import, but isn't "tracked as an extra" 3 | """ 4 | # Third Party 5 | import bazbat 6 | 7 | 8 | def use_bazbat(): 9 | bazbat.doit() 10 | -------------------------------------------------------------------------------- /test/sample_libs/optional_deps/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This sample library has two dependencies: alog and yaml. The alog dependency is 3 | held as optional in optional_deps.opt and as non-optional in 4 | optional_deps.not_opt. The yaml dependency is held as optional in 5 | optional_deps.not_opt, but it imported _directly_ in the root of optional_deps. 6 | The resulting tracking should indicate that yaml is not optional everywhere 7 | while alog is optional in opt and nowhere else. 8 | """ 9 | 10 | # Third Party 11 | import yaml 12 | 13 | # Local 14 | from . import not_opt, opt 15 | -------------------------------------------------------------------------------- /test/sample_libs/optional_deps/not_opt.py: -------------------------------------------------------------------------------- 1 | # Import alog not optionally! 2 | # First Party 3 | import alog 4 | -------------------------------------------------------------------------------- /test/sample_libs/optional_deps/opt.py: -------------------------------------------------------------------------------- 1 | # Standard 2 | import sys 3 | 4 | try: 5 | # First Party 6 | import alog 7 | 8 | print("imported alog!") 9 | except ImportError: 10 | print("Can't import alog") 11 | except: 12 | print("Double except, just to be sure!") 13 | finally: 14 | HAVE_ALOG = "alog" in sys.modules 15 | 16 | 17 | try: 18 | # Third Party 19 | import yaml 20 | finally: 21 | HAVE_YAML = "yaml" in sys.modules 22 | 23 | 24 | # Third Party 25 | # Import a non-optional dependency here to ensure that try blocks are closed 26 | # correctly on all versions of python 27 | import google.protobuf 28 | -------------------------------------------------------------------------------- /test/sample_libs/optional_deps_upstream/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This sample library includes a "third party" library as optional which itself 3 | includes a different "third party" library as non-optional. The transitive 4 | third party should also be considered optional since the interim link in the 5 | import chain is optional. 6 | """ 7 | 8 | try: 9 | # Third Party 10 | import single_extra 11 | except ImportError: 12 | print("nothing to see here!") 13 | -------------------------------------------------------------------------------- /test/sample_libs/sample_lib/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Sample library to test out the import_tracker functionality 3 | """ 4 | 5 | # Local 6 | from . import submod1, submod2 7 | 8 | # Import the nested submodule 9 | from .nested import submod3 10 | -------------------------------------------------------------------------------- /test/sample_libs/sample_lib/nested/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Sample of a nested module where the lazy importing happens one level down 3 | """ 4 | 5 | # Local 6 | from . import submod3 7 | -------------------------------------------------------------------------------- /test/sample_libs/sample_lib/nested/submod3.py: -------------------------------------------------------------------------------- 1 | # Standard 2 | import json 3 | 4 | # Third Party 5 | import yaml 6 | 7 | # First Party 8 | import alog 9 | 10 | log = alog.use_channel("SUB3") 11 | 12 | 13 | def yaml_to_json(fname, *args, **kwargs): 14 | """Yaml file to json string""" 15 | log.debug("Opening %s", fname) 16 | with open(fname, "r") as handle: 17 | return yaml_to_jsons(handle.read(), *args, **kwargs) 18 | 19 | 20 | def yaml_to_jsons(yaml_str, *args, **kwargs): 21 | """Yaml string to json string""" 22 | return json.dumps(yaml.safe_load(yaml_str, *args, **kwargs)) 23 | 24 | 25 | def json_to_yaml(fname, *args, **kwargs): 26 | """Json file to yaml string""" 27 | log.debug("Opening %s", fname) 28 | with open(fname, "r") as handle: 29 | return yaml.safe_dump(json.load(handle), *args, **kwargs) 30 | 31 | 32 | def json_to_yamls(json_str, *args, **kwargs): 33 | """Json file to yaml string""" 34 | return yaml.safe_dump(json.loads(json_str), *args, **kwargs) 35 | -------------------------------------------------------------------------------- /test/sample_libs/sample_lib/submod1/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Sample sub-module that requires numpy 3 | """ 4 | 5 | # Local 6 | import conditional_deps 7 | 8 | 9 | def check_tf(): 10 | conditional_deps.mod.has_tf() 11 | -------------------------------------------------------------------------------- /test/sample_libs/sample_lib/submod2/__init__.py: -------------------------------------------------------------------------------- 1 | # Standard 2 | import os 3 | 4 | # First Party 5 | import alog 6 | 7 | alog.configure(os.environ.get("LOG_LEVEL", "info")) 8 | alog.use_channel("SUB2").info("Hello there!") 9 | -------------------------------------------------------------------------------- /test/sample_libs/side_effects/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This sample module is an example of a library which uses import-time side 3 | effects. It does so by having `global_thing` modify a central object on the base 4 | module and then requiring that step in the import of `mod`. 5 | """ 6 | 7 | GLOBAL_RESOURCE = [] 8 | 9 | # Local 10 | # Import mod second which requires GLOBAL_RESOURCE to be populated 11 | # Import global_thing first to populate GLOBAL_RESOURCE 12 | from . import global_thing, mod 13 | -------------------------------------------------------------------------------- /test/sample_libs/side_effects/global_thing.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module is responsible for implementing the import-time side effect 3 | """ 4 | 5 | # Local 6 | from . import GLOBAL_RESOURCE 7 | 8 | GLOBAL_RESOURCE.append(1) 9 | -------------------------------------------------------------------------------- /test/sample_libs/side_effects/mod.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module requires that GLOBAL_RESOURCE has at least one valid element 3 | """ 4 | 5 | # Local 6 | from . import GLOBAL_RESOURCE 7 | 8 | assert GLOBAL_RESOURCE, "I need my side effects!" 9 | -------------------------------------------------------------------------------- /test/sample_libs/single_extra/__init__.py: -------------------------------------------------------------------------------- 1 | # First Party 2 | import alog 3 | 4 | # Local 5 | from . import extra 6 | from .not_extra import foo 7 | -------------------------------------------------------------------------------- /test/sample_libs/single_extra/extra.py: -------------------------------------------------------------------------------- 1 | # Third Party 2 | import yaml 3 | -------------------------------------------------------------------------------- /test/sample_libs/single_extra/not_extra.py: -------------------------------------------------------------------------------- 1 | def foo(): 2 | print("Hello Foo!") 3 | -------------------------------------------------------------------------------- /test/sample_libs/type_check_deps/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/import-tracker/a95ebc9f0a350adeb84c496cda27b11754529391/test/sample_libs/type_check_deps/__init__.py -------------------------------------------------------------------------------- /test/sample_libs/type_check_deps/type_check_dict.py: -------------------------------------------------------------------------------- 1 | # Standard 2 | from typing import Dict 3 | 4 | # Local 5 | import import_tracker 6 | 7 | with import_tracker.lazy_import_errors(): 8 | # Third Party 9 | from foo.bar import Bar 10 | 11 | def dummy_type_func(var) -> Dict[str, Bar]: 12 | pass 13 | -------------------------------------------------------------------------------- /test/sample_libs/type_check_deps/type_check_union.py: -------------------------------------------------------------------------------- 1 | # Standard 2 | from typing import Union 3 | 4 | # Local 5 | import import_tracker 6 | 7 | with import_tracker.lazy_import_errors(): 8 | # Third Party 9 | from foo import Bar 10 | 11 | def dummy_type_func(var) -> Union[str, Bar]: 12 | pass 13 | -------------------------------------------------------------------------------- /test/test_import_tracker.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for the import_tracker module's public API 3 | """ 4 | 5 | # Standard 6 | from types import ModuleType 7 | import sys 8 | 9 | # Local 10 | from import_tracker import constants 11 | from import_tracker.import_tracker import ( 12 | _get_imports, 13 | _mod_defined_in_init_file, 14 | track_module, 15 | ) 16 | import import_tracker 17 | 18 | ## Package API ################################################################# 19 | 20 | 21 | def test_import_tracker_public_api(): 22 | """Test to catch changes in the set of attributes exposed on the public 23 | package api 24 | """ 25 | expected_attrs = { 26 | "setup_tools", 27 | "track_module", 28 | "lazy_import_errors", 29 | } 30 | module_attrs = set(dir(import_tracker)) 31 | assert module_attrs.intersection(expected_attrs) == expected_attrs 32 | 33 | 34 | ## track_module ################################################################ 35 | 36 | 37 | def test_track_module_programmatic(): 38 | """Test that calling track_module can be invoked to programmatically do 39 | tracking (vs as a CLI) 40 | """ 41 | sample_lib_mapping = track_module("sample_lib") 42 | assert sample_lib_mapping == { 43 | "sample_lib": sorted(["alog", "yaml", "conditional_deps"]) 44 | } 45 | 46 | 47 | def test_track_module_with_package(): 48 | """Test that calling track_module can be invoked with a relative sub module 49 | and parent package 50 | """ 51 | sample_lib_mapping = track_module(".submod1", "sample_lib") 52 | assert sample_lib_mapping == {"sample_lib.submod1": ["conditional_deps"]} 53 | 54 | 55 | def test_track_module_recursive(): 56 | """Test that calling track_module can recurse with a fixed number of jobs 57 | 58 | NOTE: The num_jobs simply exercises that code as there's no real way to 59 | validate the parallelism 60 | """ 61 | sample_lib_mapping = track_module("sample_lib", submodules=True) 62 | assert sample_lib_mapping == { 63 | "sample_lib": sorted(["conditional_deps", "alog", "yaml"]), 64 | "sample_lib.submod1": ["conditional_deps"], 65 | "sample_lib.submod2": ["alog"], 66 | "sample_lib.nested": sorted(["alog", "yaml"]), 67 | "sample_lib.nested.submod3": sorted(["alog", "yaml"]), 68 | } 69 | 70 | 71 | def test_track_module_with_limited_submodules(): 72 | """Test that the submodules arg can be passed through""" 73 | sample_lib_mapping = track_module( 74 | "sample_lib", 75 | submodules=["sample_lib.submod1"], 76 | ) 77 | assert sample_lib_mapping == { 78 | "sample_lib": sorted(["conditional_deps", "alog", "yaml"]), 79 | "sample_lib.submod1": ["conditional_deps"], 80 | } 81 | 82 | 83 | def test_sibling_import(): 84 | """Make sure that a library with a submodule that imports a sibling 85 | submodule properly tracks dependencies through the sibling 86 | """ 87 | lib_mapping = track_module( 88 | "inter_mod_deps", 89 | submodules=True, 90 | ) 91 | assert (set(lib_mapping["inter_mod_deps.submod1"])) == {"alog"} 92 | assert (set(lib_mapping["inter_mod_deps.submod2"])) == { 93 | "alog", 94 | "yaml", 95 | } 96 | assert (set(lib_mapping["inter_mod_deps.submod2.foo"])) == { 97 | "yaml", 98 | } 99 | assert (set(lib_mapping["inter_mod_deps.submod2.bar"])) == { 100 | "yaml", 101 | } 102 | assert (set(lib_mapping["inter_mod_deps.submod3"])) == { 103 | "alog", 104 | "yaml", 105 | } 106 | assert (set(lib_mapping["inter_mod_deps.submod4"])) == { 107 | "yaml", 108 | } 109 | assert (set(lib_mapping["inter_mod_deps.submod5"])) == { 110 | "yaml", 111 | } 112 | 113 | 114 | def test_import_stack_tracking(): 115 | """Make sure that tracking the import stack works as expected""" 116 | lib_mapping = track_module( 117 | "inter_mod_deps", 118 | submodules=True, 119 | track_import_stack=True, 120 | ) 121 | 122 | assert set(lib_mapping.keys()) == { 123 | "inter_mod_deps", 124 | "inter_mod_deps.submod1", 125 | "inter_mod_deps.submod2", 126 | "inter_mod_deps.submod2.foo", 127 | "inter_mod_deps.submod2.bar", 128 | "inter_mod_deps.submod3", 129 | "inter_mod_deps.submod4", 130 | "inter_mod_deps.submod5", 131 | } 132 | 133 | # Check one of the stacks to make sure it's correct 134 | assert lib_mapping["inter_mod_deps.submod2"] == { 135 | "alog": { 136 | "stack": [ 137 | [ 138 | "inter_mod_deps.submod2", 139 | "inter_mod_deps.submod1", 140 | ] 141 | ] 142 | }, 143 | "yaml": { 144 | "stack": [ 145 | ["inter_mod_deps.submod2"], 146 | ] 147 | }, 148 | } 149 | 150 | 151 | def test_detect_transitive_no_stack_traces(): 152 | """Test that --detect_transitive works as expected""" 153 | lib_mapping = track_module( 154 | "direct_dep_ambiguous", 155 | submodules=True, 156 | detect_transitive=True, 157 | ) 158 | assert lib_mapping == { 159 | "direct_dep_ambiguous": { 160 | "alog": { 161 | "type": constants.TYPE_DIRECT, 162 | }, 163 | "yaml": { 164 | "type": constants.TYPE_TRANSITIVE, 165 | }, 166 | }, 167 | "direct_dep_ambiguous.foo": { 168 | "alog": { 169 | "type": constants.TYPE_DIRECT, 170 | }, 171 | "yaml": { 172 | "type": constants.TYPE_DIRECT, 173 | }, 174 | }, 175 | "direct_dep_ambiguous.bar": { 176 | "alog": { 177 | "type": constants.TYPE_TRANSITIVE, 178 | }, 179 | }, 180 | } 181 | 182 | 183 | def test_detect_transitive_with_stack_traces(): 184 | """Test that detect_transitive + track_import_stack works as expected""" 185 | lib_mapping = track_module( 186 | "direct_dep_ambiguous", 187 | submodules=True, 188 | detect_transitive=True, 189 | track_import_stack=True, 190 | ) 191 | assert lib_mapping == { 192 | "direct_dep_ambiguous": { 193 | "alog": { 194 | "stack": [ 195 | [ 196 | "direct_dep_ambiguous", 197 | ], 198 | [ 199 | "direct_dep_ambiguous", 200 | "direct_dep_ambiguous.foo", 201 | ], 202 | ], 203 | "type": constants.TYPE_DIRECT, 204 | }, 205 | "yaml": { 206 | "stack": [ 207 | [ 208 | "direct_dep_ambiguous", 209 | "direct_dep_ambiguous.foo", 210 | ], 211 | ], 212 | "type": constants.TYPE_TRANSITIVE, 213 | }, 214 | }, 215 | "direct_dep_ambiguous.bar": { 216 | "alog": { 217 | "stack": [ 218 | [ 219 | "direct_dep_ambiguous", 220 | "direct_dep_ambiguous.bar", 221 | ], 222 | ], 223 | "type": constants.TYPE_TRANSITIVE, 224 | }, 225 | }, 226 | "direct_dep_ambiguous.foo": { 227 | "alog": { 228 | "stack": [ 229 | ["direct_dep_ambiguous.foo"], 230 | ], 231 | "type": constants.TYPE_DIRECT, 232 | }, 233 | "yaml": { 234 | "stack": [ 235 | ["direct_dep_ambiguous.foo"], 236 | ], 237 | "type": constants.TYPE_DIRECT, 238 | }, 239 | }, 240 | } 241 | 242 | 243 | def test_with_limited_submodules(): 244 | """Make sure that when a list of submodules is given, the recursion only 245 | applies to those submodules. 246 | """ 247 | lib_mapping = track_module( 248 | "sample_lib", 249 | submodules=["sample_lib.submod1"], 250 | ) 251 | assert set(lib_mapping.keys()) == {"sample_lib", "sample_lib.submod1"} 252 | 253 | 254 | def test_detect_transitive_with_nested_module(): 255 | """Test that detect_transitive works with nested modules as expected""" 256 | lib_mapping = track_module( 257 | "direct_dep_nested", 258 | submodules=True, 259 | detect_transitive=True, 260 | ) 261 | assert lib_mapping == { 262 | "direct_dep_nested": { 263 | "alog": {"type": constants.TYPE_TRANSITIVE}, 264 | "sample_lib": {"type": constants.TYPE_DIRECT}, 265 | "yaml": {"type": constants.TYPE_TRANSITIVE}, 266 | }, 267 | "direct_dep_nested.nested": { 268 | "sample_lib": {"type": constants.TYPE_DIRECT}, 269 | "yaml": {"type": constants.TYPE_DIRECT}, 270 | }, 271 | "direct_dep_nested.nested2": { 272 | "alog": {"type": constants.TYPE_DIRECT}, 273 | "sample_lib": {"type": constants.TYPE_TRANSITIVE}, 274 | }, 275 | } 276 | 277 | 278 | def test_detect_transitive_with_nested_module_full_depth(): 279 | """Test that with full_depth, nested dependencies are taken into account""" 280 | lib_mapping = track_module( 281 | "direct_dep_nested", 282 | submodules=True, 283 | detect_transitive=True, 284 | full_depth=True, 285 | ) 286 | assert lib_mapping == { 287 | "direct_dep_nested": { 288 | "alog": {"type": constants.TYPE_TRANSITIVE}, 289 | "sample_lib": {"type": constants.TYPE_DIRECT}, 290 | "yaml": {"type": constants.TYPE_TRANSITIVE}, 291 | "conditional_deps": {"type": constants.TYPE_TRANSITIVE}, 292 | }, 293 | "direct_dep_nested.nested": { 294 | "sample_lib": {"type": constants.TYPE_DIRECT}, 295 | "yaml": {"type": constants.TYPE_DIRECT}, 296 | "conditional_deps": {"type": constants.TYPE_TRANSITIVE}, 297 | }, 298 | "direct_dep_nested.nested2": { 299 | "alog": {"type": constants.TYPE_DIRECT}, 300 | "sample_lib": {"type": constants.TYPE_TRANSITIVE}, 301 | "conditional_deps": {"type": constants.TYPE_TRANSITIVE}, 302 | }, 303 | } 304 | 305 | 306 | def test_all_import_types(): 307 | """Make sure that all different import statement types are covered""" 308 | assert track_module("all_import_types", submodules=True) == { 309 | "all_import_types": [ 310 | "alog", 311 | "inter_mod_deps", 312 | "sample_lib", 313 | ], 314 | "all_import_types.sub_module1": [ 315 | "alog", 316 | "inter_mod_deps", 317 | "sample_lib", 318 | ], 319 | "all_import_types.sub_module2": [ 320 | "alog", 321 | "inter_mod_deps", 322 | "sample_lib", 323 | ], 324 | "all_import_types.sub_module3": [ 325 | "alog", 326 | "inter_mod_deps", 327 | "sample_lib", 328 | ], 329 | } 330 | 331 | 332 | def test_deep_siblings(): 333 | """This test exercises the sample library that was the main reason for the 334 | full refactor in the first place. The library is constructed such that there 335 | are sub-directories (blocks and workflows) where individual sub-modules 336 | within workflows may depend on a subset of the sub-modules within blocks. In 337 | this case, we do not want the entire dependency set of blocks to be 338 | attributed to a workflows module, but rather we want just the dependencies 339 | of the block modules that it needs. 340 | """ 341 | assert track_module("deep_siblings", submodules=True) == { 342 | "deep_siblings": ["alog", "yaml"], 343 | "deep_siblings.blocks": ["alog", "yaml"], 344 | "deep_siblings.blocks.foo_type": ["yaml"], 345 | "deep_siblings.blocks.foo_type.foo": ["yaml"], 346 | "deep_siblings.blocks.bar_type": ["alog"], 347 | "deep_siblings.blocks.bar_type.bar": ["alog"], 348 | "deep_siblings.workflows": ["yaml"], 349 | "deep_siblings.workflows.foo_type": ["yaml"], 350 | "deep_siblings.workflows.foo_type.foo": ["yaml"], 351 | } 352 | 353 | 354 | def test_optional_deps(): 355 | """Make sure that optional deps are correctly tracked when try/except is 356 | used 357 | """ 358 | assert track_module("optional_deps", submodules=True, show_optional=True) == { 359 | "optional_deps.not_opt": { 360 | "yaml": {"optional": False}, 361 | "alog": {"optional": False}, 362 | }, 363 | "optional_deps": { 364 | "yaml": {"optional": False}, 365 | "alog": {"optional": False}, 366 | "google": {"optional": False}, 367 | }, 368 | "optional_deps.opt": { 369 | "yaml": {"optional": False}, 370 | "alog": {"optional": True}, 371 | "google": {"optional": False}, 372 | }, 373 | } 374 | 375 | 376 | def test_upstream_optional_deps(): 377 | """Make sure that a module which holds a third-party dep as optional where 378 | that third-party dep includes _other_ third-party deps as non-optional 379 | should have the transitive deps held as optional due to the optional dep in 380 | the transitive chain. 381 | """ 382 | assert track_module( 383 | "optional_deps_upstream", full_depth=True, show_optional=True 384 | ) == { 385 | "optional_deps_upstream": { 386 | "yaml": {"optional": True}, 387 | "alog": {"optional": True}, 388 | "single_extra": {"optional": True}, 389 | }, 390 | } 391 | 392 | 393 | ## Details ##################################################################### 394 | 395 | 396 | def test_get_imports_no_bytecode(): 397 | """Excercise _get_imports and _mod_defined_in_init_file on a module with no 398 | bytecode to ensure that they doesn't explode! 399 | """ 400 | new_mod = ModuleType("new_mod") 401 | assert _get_imports(new_mod) == (set(), set()) 402 | assert not _mod_defined_in_init_file(new_mod) 403 | 404 | 405 | def test_missing_parent_mod(): 406 | """This is a likely unreachable corner case, but this test exercises the 407 | case where the expected parent module doesn't exist in sys.modules 408 | """ 409 | # Local 410 | from sample_lib import nested 411 | 412 | del sys.modules["sample_lib"] 413 | assert track_module("sample_lib.nested") 414 | -------------------------------------------------------------------------------- /test/test_lazy_import_errors.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for the lazy_import_errors functionality 3 | """ 4 | 5 | # Standard 6 | from types import ModuleType 7 | import os 8 | import pickle 9 | import shlex 10 | import subprocess 11 | import sys 12 | import tempfile 13 | 14 | # Third Party 15 | import pytest 16 | 17 | # Local 18 | from import_tracker.lazy_import_errors import _FastFrameGenerator, _LazyErrorMetaFinder 19 | import import_tracker 20 | 21 | 22 | @pytest.fixture 23 | def reset_lazy_import_errors(): 24 | yield 25 | while sys.meta_path and isinstance(sys.meta_path[-1], _LazyErrorMetaFinder): 26 | sys.meta_path.pop() 27 | 28 | 29 | ######################## Tests for Direct Invocation of the Context Manager ####################### 30 | def test_lazy_import_sad_package(): 31 | """This test makes sure that the ModuleNotFoundError is not raised for an 32 | unknown module on import, but that it is raised on attribute access. 33 | 34 | This version tests that this is true when imported directly, but wrapped in 35 | lazy_import_errors. 36 | """ 37 | with import_tracker.lazy_import_errors(): 38 | # Third Party 39 | import foobarbaz 40 | with pytest.raises(ModuleNotFoundError): 41 | foobarbaz.foo() 42 | 43 | 44 | def test_lazy_import_happy_package_with_sad_optionals(): 45 | """This test ensures that a library with traditional try/except conditional 46 | dependencies works as expected. 47 | 48 | This version tests that the import works when imported directly, but wrapped 49 | in lazy_import_errors. 50 | """ 51 | # Standard 52 | import pickle 53 | 54 | with import_tracker.lazy_import_errors(): 55 | # Local 56 | import conditional_deps 57 | assert not conditional_deps.mod.HAS_FB 58 | 59 | 60 | def test_lazy_import_errors_direct_call(reset_lazy_import_errors): 61 | """Test that directly invoking lazy_import_errors as a function will 62 | globally perform the setup 63 | """ 64 | import_tracker.lazy_import_errors() 65 | # Third Party 66 | import foobarbaz 67 | 68 | with pytest.raises(ModuleNotFoundError): 69 | foobarbaz.foo() 70 | 71 | 72 | def test_lazy_import_error_with_from(): 73 | """Test that the syntax 'from foo.bar import Baz' does raise lazily""" 74 | with import_tracker.lazy_import_errors(): 75 | # Third Party 76 | from foo.bar import Baz 77 | 78 | # Define a class that has no operators so that the __r*__ operators can be 79 | # exercised 80 | class RTestStub: 81 | pass 82 | 83 | # Define some test cases that can't be formulated as lambdas 84 | def test_delete(): 85 | class Foo: 86 | foo = Baz 87 | 88 | f = Foo() 89 | del f.foo 90 | 91 | def test_delitem(): 92 | del Baz["asdf"] 93 | 94 | def test_get(): 95 | class Foo: 96 | foo = Baz 97 | 98 | f = Foo() 99 | f.foo 100 | 101 | def test_set(): 102 | class Foo: 103 | foo = Baz 104 | 105 | f = Foo() 106 | f.foo = 1 107 | 108 | def test_iadd(): 109 | Baz.buz += 1 110 | 111 | def test_iand(): 112 | Baz.buz &= 1 113 | 114 | def test_ifloordiv(): 115 | Baz.buz //= 1 116 | 117 | def test_ilshift(): 118 | Baz.buz <<= 1 119 | 120 | def test_ishift(): 121 | Baz.buz >>= 1 122 | 123 | def test_imod(): 124 | Baz.buz %= 1 125 | 126 | def test_imatmul(): 127 | Baz.buz @= 1 128 | 129 | def test_imul(): 130 | Baz.buz *= 1 131 | 132 | def test_ior(): 133 | Baz.buz |= 1 134 | 135 | def test_ipow(): 136 | Baz.buz **= 2 137 | 138 | def test_isub(): 139 | Baz.buz -= 1 140 | 141 | def test_itruediv(): 142 | Baz.buz /= 1 143 | 144 | def test_ixor(): 145 | Baz.buz ^= 1 146 | 147 | def test_setitem(): 148 | Baz.buz[1] = 1 149 | 150 | # Make sure that doing _anything_ with Baz does trigger the error 151 | for fn in [ 152 | lambda: Baz(), 153 | lambda: Baz + 1, 154 | lambda: Baz * 2, 155 | lambda: Baz**2, 156 | lambda: Baz @ 2, 157 | lambda: Baz - 1, 158 | lambda: 1 - Baz, 159 | lambda: -Baz, 160 | lambda: +Baz, 161 | lambda: abs(Baz), 162 | lambda: Baz & True, 163 | lambda: Baz | True, 164 | lambda: 1 in Baz, 165 | lambda: delattr(Baz, "foo"), 166 | lambda: [x for x in Baz], 167 | lambda: Baz == Baz, 168 | lambda: Baz != Baz, 169 | lambda: int(Baz), 170 | lambda: float(Baz), 171 | lambda: str(Baz), 172 | lambda: Baz > 1, 173 | lambda: Baz >= 1, 174 | lambda: Baz < 1, 175 | lambda: Baz <= 1, 176 | lambda: hash(Baz), 177 | lambda: Baz[0], 178 | lambda: Baz / 2, 179 | lambda: Baz // 1, 180 | lambda: Baz << 1, 181 | lambda: Baz >> 1, 182 | lambda: Baz % 1, 183 | lambda: Baz ^ 1, 184 | lambda: ~Baz, 185 | lambda: [1, 2, 3][Baz], 186 | lambda: next(Baz), 187 | lambda: RTestStub() + Baz, 188 | lambda: RTestStub() & Baz, 189 | lambda: RTestStub() * Baz, 190 | lambda: RTestStub() / Baz, 191 | lambda: RTestStub() // Baz, 192 | lambda: RTestStub() % Baz, 193 | lambda: RTestStub() ^ Baz, 194 | lambda: RTestStub() @ Baz, 195 | lambda: RTestStub() << Baz, 196 | lambda: RTestStub() >> Baz, 197 | lambda: RTestStub() | Baz, 198 | lambda: RTestStub() ** Baz, 199 | test_delete, 200 | test_delitem, 201 | test_get, 202 | test_set, 203 | test_iadd, 204 | test_iand, 205 | test_ifloordiv, 206 | test_ilshift, 207 | test_ishift, 208 | test_imod, 209 | test_imatmul, 210 | test_imul, 211 | test_ior, 212 | test_ipow, 213 | test_isub, 214 | test_itruediv, 215 | test_ixor, 216 | test_setitem, 217 | ]: 218 | with pytest.raises(ModuleNotFoundError): 219 | fn() 220 | 221 | # Make sure it cannot be pickled 222 | with pytest.raises(pickle.PicklingError): 223 | pickle.dumps(Baz) 224 | 225 | 226 | @pytest.mark.asyncio 227 | async def test_lazy_import_error_with_from_async(): 228 | """Test that the async operators also raise""" 229 | with import_tracker.lazy_import_errors(): 230 | # Third Party 231 | from foo.bar import Baz 232 | 233 | # Make sure that doing _anything_ with Baz does trigger the error 234 | for fn in [ 235 | lambda: Baz, 236 | # These two are really hard to exercise, so we'll just test them 237 | # directly 238 | lambda: Baz.__aiter__(), 239 | lambda: Baz.__anext__(), 240 | ]: 241 | with pytest.raises(ModuleNotFoundError): 242 | await fn() 243 | 244 | 245 | def test_lazy_import_error_attr_pickle(): 246 | """Test that when deserializing a pickled object created using a class that 247 | is not available at unpickling time due to a missing module, an appropriate 248 | ModuleNotFoundError is raised from the _LazyErrorAttr class that fills in 249 | for the missing type. This one is pretty niche since pickling will actually 250 | pickle the contents of the class itself. The error only occurs if the class 251 | relies on _another_ module that is not available at unpickling time. 252 | """ 253 | with tempfile.TemporaryDirectory() as workdir: 254 | mod1 = os.path.join(workdir, "some_module.py") 255 | with open(mod1, "w") as handle: 256 | handle.write( 257 | """ 258 | import pickle 259 | from other_module import Bar 260 | 261 | class Foo: 262 | def __init__(self): 263 | self.val = Bar(1) 264 | """ 265 | ) 266 | mod2 = os.path.join(workdir, "other_module.py") 267 | with open(mod2, "w") as handle: 268 | handle.write( 269 | """ 270 | class Bar: 271 | def __init__(self, val): 272 | self.val = val + 1 273 | """ 274 | ) 275 | out, _ = subprocess.Popen( 276 | shlex.split( 277 | f"{sys.executable} -c 'from some_module import Foo; import pickle; print(pickle.dumps(Foo()).hex())'" 278 | ), 279 | stdout=subprocess.PIPE, 280 | env={"PYTHONPATH": workdir}, 281 | ).communicate() 282 | 283 | # Import the missing module 284 | with import_tracker.lazy_import_errors(): 285 | # Third Party 286 | from some_module import Foo 287 | 288 | # Grab the pickled output 289 | pickled = bytes.fromhex(out.strip().decode("utf-8")) 290 | 291 | # Try to unpickle it 292 | with pytest.raises(ModuleNotFoundError): 293 | pickle.loads(pickled) 294 | 295 | 296 | def test_lazy_import_error_attr_class_inheritance(): 297 | """Test that when a lazily imported error attribute is used as a base class, 298 | the import error occurs when the derived class is instantiated. 299 | """ 300 | with import_tracker.lazy_import_errors(): 301 | # Third Party 302 | from foo.bar import Baz 303 | 304 | class Bat(Baz): 305 | def __init__(self, val): 306 | super().__init__(val) 307 | 308 | with pytest.raises(ModuleNotFoundError): 309 | Bat(1) 310 | 311 | 312 | def test_lazy_import_error_infinite_attrs(): 313 | """Make sure that a _LazyErrorAttr can recursively deliver infinite 314 | attributes to fill in arbitrary attrs on the parent module 315 | """ 316 | with import_tracker.lazy_import_errors(): 317 | # Third Party 318 | from foo.bar import Baz 319 | 320 | assert Baz.bat is Baz 321 | 322 | 323 | def test_lazy_import_error_custom_error_msg(): 324 | """Make sure that the lazy_import_errors context manager can be configured 325 | with a custom function for creating the error message. 326 | """ 327 | custom_error_message = "This is a custom message!" 328 | 329 | def make_error_msg(*_, **__): 330 | return custom_error_message 331 | 332 | with import_tracker.lazy_import_errors(make_error_message=make_error_msg): 333 | # Third Party 334 | from foo.bar import Baz 335 | 336 | with pytest.raises(ModuleNotFoundError, match=custom_error_message): 337 | Baz() 338 | 339 | 340 | def test_lazy_import_error_get_extras_modules(): 341 | """Make sure that the lazy_import_errors context manager can be configured 342 | with a get_extras_modules function and perform the custom error message 343 | creation internally. 344 | """ 345 | # Third Party 346 | import missing_dep 347 | 348 | # Using foobar inside missing_dep.mod should catch the custom error 349 | with pytest.raises( 350 | ModuleNotFoundError, 351 | match=r".*pip install missing_dep\[missing_dep.mod\].*", 352 | ): 353 | missing_dep.mod.use_foobar() 354 | 355 | # Using bazbat inside missing_dep.other should have the standard error since 356 | # missing_dep.other is not tracked as an extra 357 | with pytest.raises( 358 | ModuleNotFoundError, 359 | match="No module named 'bazbat'", 360 | ): 361 | missing_dep.other.use_bazbat() 362 | 363 | 364 | def test_lazy_import_error_mutually_exclusive_args(): 365 | """Make sure the args to lazy_import_errors are mutually exclusive""" 366 | with pytest.raises(TypeError): 367 | with import_tracker.lazy_import_errors( 368 | make_error_message=1, 369 | get_extras_modules=2, 370 | ): 371 | # Third Party 372 | import foobar 373 | 374 | 375 | def test_frame_generator_stop(): 376 | """For completeness, we need to ensure that the FrameGenerator will stop 377 | correctly if iterated to the end 378 | """ 379 | list(_FastFrameGenerator()) 380 | 381 | 382 | def test_lazy_import_error_nested(): 383 | """Make sure the each lazy import errors only pops itself off of sys.metapath""" 384 | with import_tracker.lazy_import_errors(): 385 | with import_tracker.lazy_import_errors(): 386 | pass 387 | # Third Party 388 | import foobar 389 | 390 | 391 | def test_lazy_import_error_modified_meta_path(): 392 | """Make sure lazy import error works if sys.meta_path gets modified 393 | in between 394 | """ 395 | 396 | class MockModule: 397 | def find_spec(self, *args, **kwargs): 398 | pass 399 | 400 | sys.meta_path.append(MockModule) 401 | with import_tracker.lazy_import_errors(): 402 | with import_tracker.lazy_import_errors(): 403 | pass 404 | # Third Party 405 | import foobar 406 | 407 | sys.meta_path.remove(MockModule) 408 | 409 | 410 | def test_lazy_import_error_subclass(): 411 | """Make sure lazy import error works if a class from another module 412 | is used as base class for a module where both of them are in 413 | lazy imports and parent uses an optional dependency 414 | """ 415 | 416 | with import_tracker.lazy_import_errors(): 417 | # Third Party 418 | from foo.bar import Foo 419 | 420 | class Baz(Foo): 421 | pass 422 | 423 | class Bar(Baz): 424 | pass 425 | 426 | 427 | def test_lazy_import_error_import_time_dep(): 428 | """Test lazy import error for the case where the call to optional 429 | dependency happens at import time 430 | """ 431 | # Following library implements a scenario 432 | # where the module captured in lazy_import_error 433 | # calls out to a optional dependency via decorator (hence import time) 434 | # Third Party 435 | from decorator_deps import opt_decorator 436 | 437 | 438 | def test_lazy_import_error_type_dict(): 439 | """Test lazy import error for the case where the call to optional 440 | dependency happens because of type check for Dict 441 | """ 442 | 443 | # Third Party 444 | from type_check_deps import type_check_dict 445 | 446 | 447 | def test_lazy_import_error_type_union(): 448 | """Test lazy import error for the case where the call to optional 449 | dependency happens because of type check for Union 450 | """ 451 | 452 | # Third Party 453 | from type_check_deps import type_check_union 454 | -------------------------------------------------------------------------------- /test/test_main.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for the main entrypoint 3 | """ 4 | 5 | # Standard 6 | from contextlib import contextmanager 7 | import json 8 | import logging 9 | import os 10 | import sys 11 | 12 | # Third Party 13 | import pytest 14 | 15 | # Local 16 | from import_tracker import constants 17 | from import_tracker.__main__ import main 18 | 19 | ## Helpers ##################################################################### 20 | 21 | 22 | @contextmanager 23 | def cli_args(*args): 24 | """Wrapper to set the sys.argv for the enclosed context""" 25 | prev_argv = sys.argv 26 | sys.argv = ["dummy_script"] + list(args) 27 | yield 28 | sys.argv = prev_argv 29 | 30 | 31 | ## Tests ####################################################################### 32 | 33 | 34 | def test_without_package(capsys): 35 | """Run the main function against the sample lib and check the output""" 36 | with cli_args("--name", "sample_lib.submod1"): 37 | main() 38 | captured = capsys.readouterr() 39 | assert captured.out 40 | parsed_out = json.loads(captured.out) 41 | assert list(parsed_out.keys()) == ["sample_lib.submod1"] 42 | assert (set(parsed_out["sample_lib.submod1"])) == {"conditional_deps"} 43 | 44 | 45 | def test_with_package(capsys): 46 | """Run the main function with a package argument""" 47 | with cli_args("--name", ".submod1", "--package", "sample_lib"): 48 | main() 49 | captured = capsys.readouterr() 50 | assert captured.out 51 | parsed_out = json.loads(captured.out) 52 | assert list(parsed_out.keys()) == ["sample_lib.submod1"] 53 | assert (set(parsed_out["sample_lib.submod1"])) == {"conditional_deps"} 54 | 55 | 56 | def test_file_without_parent_path(capsys): 57 | """Check that the corner case of __file__ being unset is caught""" 58 | with cli_args("--name", "google.protobuf"): 59 | main() 60 | captured = capsys.readouterr() 61 | assert captured.out 62 | parsed_out = json.loads(captured.out) 63 | 64 | # Just check the keys. The values are funky because of this being run from 65 | # within a test 66 | assert list(parsed_out.keys()) == ["google.protobuf"] 67 | 68 | 69 | def test_with_logging(capsys): 70 | """Run the main function with logging turned up and make sure the output is 71 | not changed 72 | """ 73 | with cli_args( 74 | "--name", "sample_lib.submod1", "--log_level", str(logging.DEBUG - 3) 75 | ): 76 | main() 77 | captured = capsys.readouterr() 78 | assert captured.err 79 | assert captured.out 80 | parsed_out = json.loads(captured.out) 81 | assert list(parsed_out.keys()) == ["sample_lib.submod1"] 82 | assert (set(parsed_out["sample_lib.submod1"])) == {"conditional_deps"} 83 | 84 | 85 | def test_import_time_error(capsys): 86 | """Check that an exception from the imported module is forwarded""" 87 | with cli_args("--name", "bad_lib"): 88 | with pytest.raises(RuntimeError): 89 | main() 90 | 91 | 92 | def test_submodule_error(capsys): 93 | """Check that an exception from a submodule is forwarded""" 94 | with cli_args("--name", "bad_lib"): 95 | with pytest.raises(RuntimeError): 96 | main() 97 | 98 | 99 | def test_sibling_import(capsys): 100 | """Make sure that a library with a submodule that imports a sibling 101 | submodule properly tracks dependencies through the sibling 102 | """ 103 | with cli_args("--name", "inter_mod_deps", "--submodules"): 104 | main() 105 | captured = capsys.readouterr() 106 | assert captured.out 107 | parsed_out = json.loads(captured.out) 108 | assert (set(parsed_out["inter_mod_deps.submod1"])) == {"alog"} 109 | assert (set(parsed_out["inter_mod_deps.submod2"])) == { 110 | "alog", 111 | "yaml", 112 | } 113 | assert (set(parsed_out["inter_mod_deps.submod2.foo"])) == { 114 | "yaml", 115 | } 116 | assert (set(parsed_out["inter_mod_deps.submod2.bar"])) == { 117 | "yaml", 118 | } 119 | assert (set(parsed_out["inter_mod_deps.submod3"])) == { 120 | "alog", 121 | "yaml", 122 | } 123 | assert (set(parsed_out["inter_mod_deps.submod4"])) == { 124 | "yaml", 125 | } 126 | assert (set(parsed_out["inter_mod_deps.submod5"])) == { 127 | "yaml", 128 | } 129 | 130 | 131 | def test_lib_with_lazy_imports(capsys): 132 | """Make sure that a library which uses import_tracker's lazy import errors 133 | and has "traditional" conditional dependencies does not blow up when tracked 134 | """ 135 | with cli_args("--name", "lazy_import_errors"): 136 | main() 137 | captured = capsys.readouterr() 138 | assert captured.out 139 | parsed_out = json.loads(captured.out) 140 | assert "lazy_import_errors" in parsed_out 141 | -------------------------------------------------------------------------------- /test/test_setup_tools.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for setup tools 3 | """ 4 | 5 | # Standard 6 | import os 7 | import tempfile 8 | 9 | # Third Party 10 | import pytest 11 | 12 | # Local 13 | from import_tracker.setup_tools import parse_requirements 14 | 15 | sample_lib_requirements = [ 16 | "alchemy-logging>=1.0.3", 17 | "PyYaml >= 6.0", 18 | "conditional_deps", 19 | "import-tracker", 20 | ] 21 | 22 | 23 | def test_parse_requirements_happy_file(): 24 | """Make sure that parse_requirements correctly parses requirements for a 25 | library with multiple tracked modules 26 | """ 27 | with tempfile.NamedTemporaryFile("w") as requirements_file: 28 | # Make a requirements file that looks normal 29 | requirements_file.write("\n".join(sample_lib_requirements)) 30 | requirements_file.flush() 31 | 32 | # Parse the reqs for "sample_lib" 33 | requirements, extras_require = parse_requirements( 34 | requirements_file.name, 35 | "sample_lib", 36 | ) 37 | 38 | # Make sure the right parsing happened 39 | assert requirements == ["import-tracker"] 40 | assert extras_require == { 41 | "sample_lib.nested.submod3": sorted( 42 | ["PyYaml >= 6.0", "alchemy-logging>=1.0.3"] 43 | ), 44 | "sample_lib.nested": sorted(["PyYaml >= 6.0", "alchemy-logging>=1.0.3"]), 45 | "sample_lib.submod1": sorted(["conditional_deps"]), 46 | "sample_lib.submod2": sorted(["alchemy-logging>=1.0.3"]), 47 | "sample_lib": sorted(set(sample_lib_requirements) - {"import-tracker"}), 48 | "all": sorted(sample_lib_requirements), 49 | } 50 | 51 | 52 | @pytest.mark.parametrize("iterable_type", [list, tuple, set]) 53 | def test_parse_requirements_happy_iterable(iterable_type): 54 | """Make sure that parse_requirements correctly parses requirements for a 55 | library with multiple tracked modules from the supported iterable types 56 | """ 57 | # Parse the reqs for "sample_lib" 58 | requirements, extras_require = parse_requirements( 59 | iterable_type(sample_lib_requirements), 60 | "sample_lib", 61 | ) 62 | 63 | # Make sure the right parsing happened 64 | assert requirements == ["import-tracker"] 65 | assert extras_require == { 66 | "sample_lib.nested.submod3": sorted( 67 | ["PyYaml >= 6.0", "alchemy-logging>=1.0.3"] 68 | ), 69 | "sample_lib.nested": sorted(["PyYaml >= 6.0", "alchemy-logging>=1.0.3"]), 70 | "sample_lib.submod1": sorted(["conditional_deps"]), 71 | "sample_lib.submod2": sorted(["alchemy-logging>=1.0.3"]), 72 | "sample_lib": sorted(set(sample_lib_requirements) - {"import-tracker"}), 73 | "all": sorted(sample_lib_requirements), 74 | } 75 | 76 | 77 | def test_parse_requirements_add_untracked_reqs(): 78 | """Make sure that packages in the requirements.txt which don't show up in 79 | any of the tracked modules are added to the common requirements 80 | """ 81 | # Make requirements with an extra entry 82 | extra_req = "something-ElSe[extras]~=1.2.3" 83 | reqs = sample_lib_requirements + [extra_req] 84 | requirements, extras_require = parse_requirements(reqs, "sample_lib") 85 | 86 | # Make sure the extra requirement was added 87 | assert extra_req in requirements 88 | assert extras_require["all"] == sorted(reqs) 89 | 90 | 91 | def test_parse_requirements_add_subset_of_submodules(): 92 | """Make sure that parse_requirements can parse only a subset of the full set 93 | of submodules within the target library 94 | """ 95 | # Parse the reqs for "sample_lib" 96 | requirements, extras_require = parse_requirements( 97 | sample_lib_requirements, 98 | "sample_lib", 99 | ["sample_lib.submod1", "sample_lib.submod2"], 100 | ) 101 | 102 | # Make sure the right parsing happened 103 | assert sorted(requirements) == sorted( 104 | ["alchemy-logging>=1.0.3", "PyYaml >= 6.0", "import-tracker"] 105 | ) 106 | assert extras_require == { 107 | "sample_lib.submod1": ["conditional_deps"], 108 | "sample_lib.submod2": [], 109 | "all": sorted(sample_lib_requirements), 110 | } 111 | 112 | 113 | def test_parse_requirements_unknown_extras(): 114 | """Make sure that parse_requirements raises an error if extras_modules are 115 | requested that don't exist 116 | """ 117 | # Make sure the assertion is tripped 118 | with pytest.raises(AssertionError): 119 | parse_requirements( 120 | sample_lib_requirements, 121 | "sample_lib", 122 | ["foobar"], 123 | ) 124 | 125 | 126 | def test_parse_requirements_with_side_effects(): 127 | """Make sure that side_effect_modules can be passed through to allow for 128 | successful parsing 129 | """ 130 | # Parse the reqs for "side_effects". We only care that this doesn't 131 | # raise, so there's no validation of the results. 132 | parse_requirements( 133 | requirements=sample_lib_requirements, 134 | library_name="side_effects", 135 | ) 136 | 137 | 138 | def test_parse_requirements_bad_requirements_type(): 139 | """Make sure that a ValueError is raised if an invalid type is given for the 140 | requirements argument 141 | """ 142 | # Make sure the assertion is tripped 143 | with pytest.raises(ValueError): 144 | parse_requirements({"foo": "bar"}, "sample_lib", ["foobar"]) 145 | 146 | 147 | def test_single_extras_module(): 148 | """Make sure that for a library with a single extras module and a non-zero 149 | set of non-extra modules, the deps for the extra module are correctly 150 | allocated. 151 | """ 152 | requirements, extras_require = parse_requirements( 153 | ["alchemy-logging", "PyYaml"], 154 | "single_extra", 155 | ["single_extra.extra"], 156 | ) 157 | assert requirements == sorted(["alchemy-logging"]) 158 | assert extras_require == { 159 | "all": sorted(["alchemy-logging", "PyYaml"]), 160 | "single_extra.extra": ["PyYaml"], 161 | } 162 | 163 | 164 | def test_parent_direct_deps(): 165 | """Make sure that direct dependencies of parent modules are correctly 166 | attributed when holding children as extras that also require the same deps 167 | """ 168 | requirements, extras_require = parse_requirements( 169 | ["alchemy-logging", "PyYaml"], 170 | "direct_dep_ambiguous", 171 | ["direct_dep_ambiguous.foo"], 172 | ) 173 | assert requirements == ["alchemy-logging"] 174 | assert extras_require == { 175 | "all": sorted(["PyYaml", "alchemy-logging"]), 176 | "direct_dep_ambiguous.foo": ["PyYaml"], 177 | } 178 | 179 | 180 | def test_nested_deps(): 181 | """Make sure that direct depencencies show up in requirements 182 | for nested modules 183 | """ 184 | requirements, extras_require = parse_requirements( 185 | ["sample_lib", "PyYaml", "alchemy-logging"], 186 | "direct_dep_nested", 187 | ["direct_dep_nested.nested", "direct_dep_nested.nested2"], 188 | ) 189 | assert requirements == sorted(["sample_lib"]) 190 | assert extras_require == { 191 | "all": sorted(["sample_lib", "PyYaml", "alchemy-logging"]), 192 | "direct_dep_nested.nested": sorted(["PyYaml"]), 193 | "direct_dep_nested.nested2": sorted(["alchemy-logging"]), 194 | } 195 | 196 | 197 | def test_full_depth_direct_and_transitive(): 198 | """Make sure that a library which holds a dependency as both a direct import 199 | dependency and also requires it transitively through another third party 200 | library correclty allocates the dependency to places where the intermediate 201 | third party library is required. 202 | """ 203 | # Run without full_depth and ensure that alog is only allocated to foo and 204 | # is not in the base requirements 205 | requirements, extras_require = parse_requirements( 206 | ["single_extra", "alchemy-logging"], 207 | "full_depth_direct_and_transitive", 208 | [ 209 | "full_depth_direct_and_transitive.foo", 210 | "full_depth_direct_and_transitive.bar", 211 | ], 212 | full_depth=False, 213 | ) 214 | assert requirements == [] 215 | assert extras_require == { 216 | "all": sorted(["single_extra", "alchemy-logging"]), 217 | "full_depth_direct_and_transitive.foo": ["alchemy-logging"], 218 | "full_depth_direct_and_transitive.bar": ["single_extra"], 219 | } 220 | 221 | # Run without overriding full_depth (defaults to True) and ensure that alog 222 | # is found transitively via single_extra so it ends up in the base 223 | # requirements 224 | requirements, extras_require = parse_requirements( 225 | ["single_extra", "alchemy-logging"], 226 | "full_depth_direct_and_transitive", 227 | [ 228 | "full_depth_direct_and_transitive.foo", 229 | "full_depth_direct_and_transitive.bar", 230 | ], 231 | ) 232 | assert requirements == ["alchemy-logging"] 233 | assert extras_require == { 234 | "all": sorted(["single_extra", "alchemy-logging"]), 235 | "full_depth_direct_and_transitive.foo": [], 236 | "full_depth_direct_and_transitive.bar": ["single_extra"], 237 | } 238 | 239 | 240 | def test_setup_tools_keep_optionals(): 241 | """Make sure that the semantics of keep_optionals work as expected for all 242 | valid inputs to keep_optionals 243 | """ 244 | # Without keep_optionals, optional_deps.opt should not depend on alog 245 | requirements, extras_require = parse_requirements( 246 | ["alchemy-logging", "PyYaml"], 247 | "optional_deps", 248 | ["optional_deps.opt", "optional_deps.not_opt"], 249 | ) 250 | assert requirements == ["PyYaml"] 251 | assert extras_require == { 252 | "all": sorted(["alchemy-logging", "PyYaml"]), 253 | "optional_deps.opt": [], 254 | "optional_deps.not_opt": ["alchemy-logging"], 255 | } 256 | 257 | # With keep_optionals=True, optional_deps.opt should depend on alog 258 | requirements, extras_require = parse_requirements( 259 | ["alchemy-logging", "PyYaml"], 260 | "optional_deps", 261 | ["optional_deps.opt", "optional_deps.not_opt"], 262 | keep_optional=True, 263 | ) 264 | assert requirements == sorted(["alchemy-logging", "PyYaml"]) 265 | assert extras_require == { 266 | "all": sorted(["alchemy-logging", "PyYaml"]), 267 | "optional_deps.opt": [], 268 | "optional_deps.not_opt": [], 269 | } 270 | 271 | # With keep_optionals={"optional_deps.opt": ["alog"]}, optional_deps.opt 272 | # should depend on alog 273 | requirements, extras_require = parse_requirements( 274 | ["alchemy-logging", "PyYaml"], 275 | "optional_deps", 276 | ["optional_deps.opt", "optional_deps.not_opt"], 277 | keep_optional={"optional_deps.opt": ["alog"]}, 278 | ) 279 | assert requirements == sorted(["alchemy-logging", "PyYaml"]) 280 | assert extras_require == { 281 | "all": sorted(["alchemy-logging", "PyYaml"]), 282 | "optional_deps.opt": [], 283 | "optional_deps.not_opt": [], 284 | } 285 | 286 | # With keep_optionals={"optional_deps.opt": ["something_else"]}, 287 | # optional_deps.opt should depend on alog 288 | requirements, extras_require = parse_requirements( 289 | ["alchemy-logging", "PyYaml"], 290 | "optional_deps", 291 | ["optional_deps.opt", "optional_deps.not_opt"], 292 | keep_optional={"optional_deps.opt": ["something_else"]}, 293 | ) 294 | assert requirements == sorted(["PyYaml"]) 295 | assert extras_require == { 296 | "all": sorted(["alchemy-logging", "PyYaml"]), 297 | "optional_deps.opt": [], 298 | "optional_deps.not_opt": ["alchemy-logging"], 299 | } 300 | 301 | 302 | def test_intermediate_extras(): 303 | """Make sure that intermediate extras correctly own unique dependencies that 304 | belong to their children 305 | """ 306 | requirements, extras_require = parse_requirements( 307 | ["alchemy-logging", "PyYAML"], 308 | "intermediate_extras", 309 | ["intermediate_extras.foo", "intermediate_extras.bar"], 310 | ) 311 | assert not requirements 312 | assert extras_require == { 313 | "all": sorted(["alchemy-logging", "PyYAML"]), 314 | "intermediate_extras.foo": sorted(["alchemy-logging", "PyYAML"]), 315 | "intermediate_extras.bar": [], 316 | } 317 | 318 | 319 | def test_intermediate_extras_with_overlap(): 320 | """Make sure that intermediate extras correctly own unique dependencies that 321 | belong to their children, even when other children are held as overlapping 322 | extras. 323 | """ 324 | requirements, extras_require = parse_requirements( 325 | ["alchemy-logging", "PyYAML"], 326 | "intermediate_extras", 327 | [ 328 | "intermediate_extras.foo", 329 | "intermediate_extras.foo.bat", 330 | "intermediate_extras.bar", 331 | ], 332 | ) 333 | assert not requirements 334 | assert extras_require == { 335 | "all": sorted(["alchemy-logging", "PyYAML"]), 336 | "intermediate_extras.foo": sorted(["alchemy-logging", "PyYAML"]), 337 | "intermediate_extras.foo.bat": sorted(["PyYAML"]), 338 | "intermediate_extras.bar": [], 339 | } 340 | --------------------------------------------------------------------------------