├── .coveragerc ├── .git-blame-ignore-revs ├── .github └── workflows │ ├── doc.yml │ ├── release.yml │ └── test.yml ├── .gitignore ├── .pylintrc ├── CONTRIBUTING.rst ├── LICENSE ├── Makefile ├── NEWS.rst ├── README.rst ├── RELEASE.rst ├── data └── share │ ├── bash-completion │ └── completions │ │ └── pkgdev │ └── zsh │ └── site-functions │ └── _pkgdev ├── doc ├── conf.py ├── contributing.rst ├── index.rst ├── man │ ├── config.rst │ └── pkgdev.rst └── news.rst ├── py_build.py ├── pyproject.toml ├── src └── pkgdev │ ├── __init__.py │ ├── __main__.py │ ├── _vendor │ ├── __init__.py │ └── tabulate.py │ ├── cli.py │ ├── const.py │ ├── git.py │ ├── mangle.py │ ├── scripts │ ├── __init__.py │ ├── argparsers.py │ ├── pkgdev.py │ ├── pkgdev_bugs.py │ ├── pkgdev_commit.py │ ├── pkgdev_manifest.py │ ├── pkgdev_mask.py │ ├── pkgdev_push.py │ ├── pkgdev_showkw.py │ └── pkgdev_tatt.py │ └── tatt │ ├── __init__.py │ └── template.sh.jinja └── tests ├── __init__.py ├── conftest.py ├── scripts ├── __init__.py ├── test_cli.py ├── test_pkgdev.py ├── test_pkgdev_bugs.py ├── test_pkgdev_commit.py ├── test_pkgdev_manifest.py ├── test_pkgdev_mask.py ├── test_pkgdev_push.py └── test_pkgdev_showkw.py ├── test_git.py └── test_mangle.py /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | source = pkgdev 3 | branch = True 4 | omit = src/*, tests/*, */_vendor/* 5 | 6 | [paths] 7 | source = **/site-packages/pkgdev 8 | 9 | [report] 10 | show_missing = True 11 | skip_covered = True 12 | exclude_lines = 13 | # re-enable the standard pragma 14 | pragma: no cover 15 | 16 | # ignore defensive assertions 17 | raise AssertionError 18 | raise NotImplementedError 19 | 20 | # ignore unexecutable code 21 | if __name__ == .__main__.: 22 | omit = 23 | **/pkgdev/scripts/pkgdev_bugs.py 24 | -------------------------------------------------------------------------------- /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | # Reformat the repository with black-23 2 | 895f5ec3c2a760b8b8b129191e948efc1ebeb575 3 | # Reformat the repository with black-24 4 | 97df81b07a9dd8ae9b1b90b260f3c1533ef02ea7 5 | -------------------------------------------------------------------------------- /.github/workflows/doc.yml: -------------------------------------------------------------------------------- 1 | name: doc 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | workflow_dispatch: 7 | 8 | permissions: 9 | contents: read 10 | pages: write 11 | id-token: write 12 | 13 | concurrency: 14 | group: "pages" 15 | cancel-in-progress: true 16 | 17 | jobs: 18 | build: 19 | runs-on: ubuntu-latest 20 | steps: 21 | - name: Checkout code 22 | uses: actions/checkout@v4 23 | 24 | - name: Set up Python 3.11 25 | uses: actions/setup-python@v5 26 | with: 27 | python-version: '3.11' 28 | 29 | - name: Install dependencies 30 | run: | 31 | python -m pip install --upgrade pip 32 | pip install ".[doc]" 33 | 34 | - name: Build sphinx documentation 35 | run: | 36 | make html 37 | # notify github this isn't a jekyll site 38 | touch build/sphinx/html/.nojekyll 39 | 40 | - name: Upload artifact 41 | uses: actions/upload-pages-artifact@v3 42 | with: 43 | path: build/sphinx/html 44 | 45 | deploy: 46 | environment: 47 | name: github-pages 48 | url: ${{ steps.deployment.outputs.page_url }} 49 | runs-on: ubuntu-latest 50 | needs: build 51 | 52 | steps: 53 | - name: Deploy to GitHub Pages 54 | id: deployment 55 | uses: actions/deploy-pages@v4 56 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: release 2 | 3 | on: 4 | push: 5 | branches: [deploy] 6 | tags: [v*] 7 | workflow_dispatch: 8 | 9 | jobs: 10 | build-and-deploy: 11 | runs-on: ubuntu-latest 12 | environment: release 13 | 14 | permissions: 15 | id-token: write # Used to authenticate to PyPI via OIDC 16 | 17 | contents: write # Used to authenticate github release publish 18 | 19 | steps: 20 | - name: Checkout code 21 | uses: actions/checkout@v4 22 | 23 | - name: Set up Python 3.10 24 | uses: actions/setup-python@v5 25 | with: 26 | python-version: "3.10" 27 | 28 | - name: Install dependencies 29 | run: | 30 | python -m pip install --upgrade pip 31 | pip install build ".[test,doc]" 32 | 33 | - name: Test with pytest 34 | env: 35 | PY_COLORS: 1 # forcibly enable pytest colors 36 | run: pytest 37 | 38 | - name: Build sdist 39 | run: | 40 | git clean -fxd 41 | make man 42 | make sdist 43 | 44 | - name: Build wheel 45 | run: make wheel 46 | 47 | - name: Output dist file info 48 | run: | 49 | sha512sum dist/* 50 | tar -ztf dist/*.tar.gz | sort 51 | 52 | - uses: actions/upload-artifact@v4 53 | with: 54 | name: results 55 | path: dist/* 56 | 57 | - name: publish 58 | uses: pypa/gh-action-pypi-publish@release/v1 59 | if: startsWith(github.ref, 'refs/tags/') 60 | 61 | - name: Create GitHub release 62 | uses: softprops/action-gh-release@v1 63 | if: startsWith(github.ref, 'refs/tags/') 64 | with: 65 | files: dist/*.tar.gz 66 | fail_on_unmatched_files: true 67 | draft: true 68 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: test 2 | 3 | on: 4 | push: 5 | branches-ignore: [deploy] 6 | pull_request: 7 | branches: [main] 8 | 9 | jobs: 10 | test: 11 | runs-on: ${{ matrix.os }} 12 | continue-on-error: ${{ matrix.experimental }} 13 | strategy: 14 | matrix: 15 | os: [ubuntu-latest] 16 | python-version: ['3.10', '3.11', '3.12'] 17 | experimental: [false] 18 | include: 19 | - os: ubuntu-latest 20 | python-version: '3.13-dev' 21 | experimental: true 22 | - os: macos-latest 23 | python-version: '3.11' 24 | experimental: true 25 | fail-fast: false 26 | 27 | steps: 28 | - name: Checkout code 29 | uses: actions/checkout@v4 30 | 31 | - name: Set up Python ${{ matrix.python-version }} 32 | uses: actions/setup-python@v5 33 | with: 34 | python-version: ${{ matrix.python-version }} 35 | cache: 'pip' 36 | cache-dependency-path: pyproject.toml 37 | 38 | # experimental targets generally lack lxml wheels 39 | - name: Install libxml2 and libxslt development packages 40 | if: ${{ matrix.experimental }} 41 | run: sudo apt install libxml2-dev libxslt-dev python3-dev 42 | 43 | # macos needs newer bash 44 | - name: Install macos deps 45 | if: ${{ matrix.os == 'macos-latest' }} 46 | run: | 47 | brew install bash gnu-sed 48 | # enable gnu-sed usage as "sed" 49 | echo "/usr/local/opt/gnu-sed/libexec/gnubin" >> $GITHUB_PATH 50 | 51 | - name: Install dependencies 52 | run: | 53 | python -m pip install --upgrade pip 54 | pip install ".[test]" pytest-github-actions-annotate-failures 55 | 56 | - name: Test with pytest 57 | env: 58 | PY_COLORS: 1 # forcibly enable pytest colors 59 | run: | 60 | pytest --cov --cov-report=term --cov-report=xml -v 61 | 62 | - name: Submit code coverage to codecov 63 | if: ${{ matrix.os == 'ubuntu-latest' }} 64 | uses: codecov/codecov-action@v3 65 | with: 66 | files: ./coverage.xml 67 | 68 | lint: 69 | runs-on: ubuntu-latest 70 | steps: 71 | - name: Checkout code 72 | uses: actions/checkout@v4 73 | 74 | - name: Set up Python 3 75 | uses: actions/setup-python@v5 76 | with: 77 | python-version: '3.x' 78 | 79 | - name: Install dependencies 80 | run: | 81 | python -m pip install --upgrade pip 82 | pip install . pylint 83 | 84 | - name: Run linting tools 85 | run: pylint --exit-zero src/pkgdev 86 | 87 | format: 88 | runs-on: ubuntu-latest 89 | steps: 90 | - name: Checkout code 91 | uses: actions/checkout@v4 92 | - uses: psf/black@stable 93 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.pyo 3 | *.egg-info 4 | /.coverage 5 | /.tox 6 | /build 7 | /dist 8 | 9 | # docs 10 | /doc/_build 11 | /doc/api 12 | /doc/generated 13 | -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | [MESSAGES CONTROL] 2 | # snake_case variable naming style: C0103 3 | # not enough public methods: R0903 4 | # too many public methods: R0904 5 | # too many return statements: R0911 6 | # too many branches: R0912 7 | # too many local variables: R0914 8 | # too many statements: R0915 9 | disable=C0103,R0903,R0904,R0911,R0912,R0914,R0915 10 | 11 | [DESIGN] 12 | # increase allowed parent classes: R0901 13 | max-parents=15 14 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | Contributing to pkgdev 2 | ====================== 3 | 4 | Thank you for considering contributing to ``pkgdev``! We appreciate your time and 5 | effort in helping us improve our project. This document outlines the guidelines 6 | and steps for contributing to our project. 7 | 8 | Code of Conduct 9 | --------------- 10 | 11 | We expect all contributors to follow `Gentoo's Code of Conduct 12 | `_. Please make 13 | sure to read and understand it before contributing. 14 | 15 | How Can I Contribute? 16 | --------------------- 17 | 18 | There are several ways you can contribute to ``pkgdev``: 19 | 20 | - Reporting bugs 21 | - Suggesting enhancements 22 | - Writing code patches 23 | - Improving documentation 24 | - Providing feedback 25 | 26 | Reporting Bugs 27 | -------------- 28 | 29 | If you encounter any bugs or issues while using ``pkgdev``, please report them 30 | by following these steps: 31 | 32 | 1. Check if the bug has already been reported by searching our `issue tracker 33 | `_. 34 | 2. If the bug hasn't been reported, open a new issue and provide a clear and 35 | detailed description of the problem. 36 | 3. Include any relevant information, such as error messages, screenshots, or 37 | steps to reproduce the issue. 38 | 4. Assign appropriate labels to the issue (e.g., bug, tool/tatt) and provide 39 | any additional context that might be helpful. 40 | 41 | Suggesting Enhancements 42 | ----------------------- 43 | 44 | If you have ideas for new features or improvements to ``pkgdev``, we would love 45 | to hear them! To suggest an enhancement, please follow these steps: 46 | 47 | 1. Check if the enhancement has already been suggested by searching our `issue 48 | tracker `_. 49 | 2. If the enhancement hasn't been suggested, open a new issue and provide a 50 | clear and detailed description of your idea. 51 | 3. Explain why you think the enhancement would be valuable and how it aligns 52 | with the project's goals. 53 | 4. Assign appropriate labels to the issue (e.g., enhancement, tool/bugs) 54 | and provide any additional context that might be helpful. 55 | 56 | Pull Requests 57 | ------------- 58 | 59 | We welcome pull requests from contributors. To submit a pull request, please 60 | follow these steps: 61 | 62 | 1. Fork the repository and create a new branch for your changes. 63 | 2. Make your changes and ensure that the code passes all tests. 64 | 3. Write clear and concise commit messages that describe your changes. 65 | 4. Sign-off your commits, for example using the command ``git commit -s``. Must 66 | confirm to `GLEP-76 `_. 67 | 5. Submit a pull request, explaining the purpose and benefits of your changes. 68 | 6. Be responsive to any feedback or questions during the review process. 69 | 70 | Styleguides 71 | ----------- 72 | 73 | When contributing to ``pkgdev``, please adhere to the following styleguides: 74 | 75 | - Code formatting is done using `black `_. You 76 | can run ``make format`` for it to auto format your files 77 | - While not a hard requirement in all cases, we do want to have a healthy 78 | coverage of branches and flows. Attempt to write unit tests. 79 | 80 | Vulnerabilities reports 81 | ----------------------- 82 | 83 | In case you have found a vulnerability in ``pkgdev``'s code, feel free to open 84 | an issue with as detailed explanation as possible. We believe in reporting as 85 | fast as possible to our user base, so a vulnerability report should start as 86 | public, even if no fix is ready, in which case we would also report it in extra 87 | channels (i.e. IRC channel and gentoo-dev mailing list). 88 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2021-2022, pkgdev contributors 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | 7 | 1. Redistributions of source code must retain the above copyright notice, 8 | this list of conditions and the following disclaimer. 9 | 2. Redistributions in binary form must reproduce the above copyright 10 | notice, this list of conditions and the following disclaimer in the 11 | documentation and/or other materials provided with the distribution. 12 | 3. Neither the name of pkgdev nor the names of its 13 | contributors may be used to endorse or promote products derived from 14 | this software without specific prior written permission. 15 | 16 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 17 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 18 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 19 | ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE 20 | LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 21 | CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 22 | SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 23 | INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 24 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 25 | ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 26 | POSSIBILITY OF SUCH DAMAGE. 27 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | PYTHON ?= python 2 | SPHINX_BUILD ?= $(PYTHON) -m sphinx.cmd.build 3 | 4 | .PHONY: man html 5 | man html: 6 | $(SPHINX_BUILD) -a -b $@ doc build/sphinx/$@ 7 | 8 | .PHONY: sdist wheel 9 | sdist wheel: 10 | $(PYTHON) -m build --$@ 11 | 12 | .PHONY: clean 13 | clean: 14 | $(RM) -r build/sphinx doc/api doc/generated doc/_build dist 15 | 16 | .PHONY: format 17 | format: 18 | $(PYTHON) -m black . 19 | -------------------------------------------------------------------------------- /NEWS.rst: -------------------------------------------------------------------------------- 1 | ============= 2 | Release Notes 3 | ============= 4 | 5 | pkgdev 0.2.11 (2024-09-06) 6 | ------------------------- 7 | 8 | - bash completion: improve path handling (Arthur Zamarin) 9 | 10 | - mask: update removal line to match GLEP-84 (Arthur Zamarin) 11 | 12 | - mask: support auto filing of last-rite bug & PMASKED bugs (Arthur Zamarin, #187) 13 | 14 | - mask: support comma separated bugs for ``-b`` and ``--bug`` (Arthur Zamarin) 15 | 16 | - tatt: fix template generating extra empty file (Arthur Zamarin) 17 | 18 | pkgdev 0.2.10 (2024-03-12) 19 | ------------------------- 20 | 21 | **Fixed bugs:** 22 | 23 | - bugs: don't crash when package isn't found in git history (Arthur Zamarin) 24 | 25 | - tatt: fix ``required_use`` for packages from bug's package list (Arthur 26 | Zamarin) 27 | 28 | - tatt: test run should be after the use combinations (Arthur Zamarin, #174) 29 | 30 | - bash-completion: fix missing args for "bugs" and mistake for "mask" (Arthur 31 | Zamarin) 32 | 33 | pkgdev 0.2.9 (2024-02-08) 34 | ------------------------- 35 | 36 | - ci: add testing on Python 3.12 and Python 3.13 (Sam James, #126) 37 | 38 | **New Features:** 39 | 40 | - tatt: pass specific test configuration for the specific package, while not 41 | affecting the dependencies (Arthur Zamarin, #165) 42 | 43 | - tatt: add support for custom ``env`` files (Arthur Zamarin, #165) 44 | 45 | - bugs: print bug summary where existing bug is found (Arthur Zamarin, #163) 46 | 47 | - bugs: mention age of packages in the bug description (Arthur Zamarin, #140) 48 | 49 | - bugs: support ``~/.bugzrc`` for api-key extraction (Arthur Zamarin, #162) 50 | 51 | - bugs: add ``--find-by-maintainer`` option, for finding all packages 52 | maintained by a given maintainer (Arthur Zamarin, #157, #168) 53 | 54 | - bugs: add support for filtering targets by ``StableRequest`` results from 55 | ``pkgcheck scan`` (Arthur Zamarin, #157, #168) 56 | 57 | - bugs: allow extending maintainer search by project membership (Arthur Zamarin, 58 | #157, #168) 59 | 60 | - bugs: support editing the resulting graph before filing bugs (Arthur Zamarin, 61 | #169) 62 | 63 | - bugs: indicate why dependencies are being added to the graph (Arthur Zamarin, 64 | #170) 65 | 66 | **Fixed bugs:** 67 | 68 | - bugs: prefer using user selected targets over latest dependency (Arthur 69 | Zamarin, #161) 70 | 71 | - bugs: merge stable groups as first step (Arthur Zamarin) 72 | 73 | - bugs: handle merging of top level nodes (Arthur Zamarin, #125, #167) 74 | 75 | - docs: Add ``intersphinx_mapping`` (Brian Harring, #171) 76 | 77 | pkgdev 0.2.8 (2023-09-09) 78 | ------------------------- 79 | 80 | **New Features:** 81 | 82 | - pkgdev bugs: add support for passing a root blocker bug, upon which all top 83 | level bugs will block (Arthur Zamarin, #139) 84 | 85 | - pkgdev bugs: fallback to ``~/.bugz_token`` for api-key (Arthur Zamarin, #138) 86 | 87 | - pkgdev bugs: improve ``--api-key`` description and include appropriate 88 | warning (Florian Schmaus, #159) 89 | 90 | - pkgdev bugs: add support for stabilization groups (Arthur Zamarin, #154) 91 | 92 | - pkgdev commit: add support for enabling or disabling gpg signing (Arthur 93 | Zamarin, #147) 94 | 95 | - pkgdev push: ``--ask`` stops for confirmation on warnings too (Arthur Zamarin, 96 | #152) 97 | 98 | **Fixed bugs:** 99 | 100 | - pkgdev bugs: truncate too long bug summaries (Arthur Zamarin, #141) 101 | 102 | - pkgdev bugs: show correct number of bugs which would be opened (Arthur 103 | Zamarin, #142) 104 | 105 | - pkgdev bugs: do not swallow exceptions when reading ``~/.bugz_token`` 106 | (Florian Schmaus, #158) 107 | 108 | pkgdev 0.2.7 (2023-04-22) 109 | ------------------------- 110 | 111 | **New Features:** 112 | 113 | - pkgdev bugs: query for existing open bugs (Arthur Zamarin) 114 | 115 | - pkgdev bugs: support piping package list from stdin (Arthur Zamarin, #136) 116 | 117 | - git: declare ``PKGDEV=1`` environment variable for git commands (Arthur 118 | Zamarin, #133) 119 | 120 | **Fixed bugs:** 121 | 122 | - pkgdev bugs: handle correctly merge on new keywords of starting point (Arthur 123 | Zamarin) 124 | 125 | - pkgdev bugs: fix spelling of agent noun for 'file' (Arsen Arsenović, #135) 126 | 127 | - pkgdev bugs: better error message when package not found (Arthur Zamarin, 128 | #134) 129 | 130 | - pkgdev bugs: fix restriction passing to ``find_best_match`` (Arthur Zamarin, 131 | #131) 132 | 133 | pkgdev 0.2.5 (2023-03-11) 134 | ------------------------- 135 | 136 | **New Features:** 137 | 138 | - pkgdev tatt: new tool for package testing (Arthur Zamarin, #109) 139 | 140 | - pkgdev bugs: new tool for filing stable bugs (Arthur Zamarin, #113) 141 | 142 | This tool is currently *very experimental* and breakage should be expected. 143 | Use very carefully and monitor created bugs! 144 | 145 | - commit: use same summary when matching across multiple ebuilds (Arthur 146 | Zamarin, #116) 147 | 148 | **Fixed bugs:** 149 | 150 | - commit: enable ``-e`` usage with ``-M`` or ``-m`` (Arthur Zamarin) 151 | 152 | - commit: generate commit title for commit related files only (Arthur Zamarin, 153 | #122) 154 | 155 | pkgdev 0.2.4 (2022-11-26) 156 | ------------------------- 157 | 158 | - commit: don't show disable for python targets that are disabled (Arthur 159 | Zamarin) 160 | 161 | - commit: mention ``-e`` as nice option (Arthur Zamarin) 162 | https://bugs.gentoo.org/846785 163 | 164 | - Use flit with custom wrapper as build backend (Arthur Zamarin, #104) 165 | 166 | - showkw: use color 90 instead of 30 (Arthur Zamarin) 167 | 168 | - cli: add support to disable colors using environment variable ``NOCOLOR`` 169 | (Arthur Zamarin) 170 | 171 | - push: add ``--pull`` option to auto pull and rebase latest changes from 172 | remote before scanning and pushing (Arthur Zamarin, #105) 173 | 174 | pkgdev 0.2.3 (2022-10-14) 175 | ------------------------- 176 | 177 | - mask: fix unrelated addition of trailing whitespace (Arthur Zamarin, #98) 178 | 179 | - commit: add ``--distdir`` for manifest operations (Arthur Zamarin, #99) 180 | 181 | - manifest: better handling of path target (Arthur Zamarin, #85) 182 | 183 | pkgdev 0.2.2 (2022-09-20) 184 | ------------------------- 185 | 186 | - config: fix loading with ``XDG_CONFIG_HOME`` is defined (Arthur Zamarin, #73) 187 | 188 | - enable Python 3.11 (Sam James, #81) 189 | 190 | - mask: improve parsing of empty header line (Arthur Zamarin, #87) 191 | 192 | - mask: improve parsing of empty header line (Arthur Zamarin, #87) 193 | 194 | - config: add support for per repo configuration (Arthur Zamarin, #92) 195 | 196 | - fix issues with tests for masking with VISUAL set (Arthur Zamarin, #93) 197 | 198 | pkgdev 0.2.1 (2022-05-21) 199 | ------------------------- 200 | 201 | - pkgdev commit: **BREAKING-CHANGE** disable sign-off by default (Arthur 202 | Zamarin, #68) 203 | 204 | - pkgdev: add configuration support. For more info look at [#]_. (Arthur 205 | Zamarin, #48, #62) 206 | 207 | - pkgdev commit: new summary for stabilizing ALLARCHES (Arthur Zamarin, #61) 208 | 209 | - pkgdev mask: offer to send last-rite message email to gentoo-dev ML when 210 | last-riting a package (Arthur Zamarin, #63) 211 | 212 | - pkgdev manifest: add ``--if-modified`` - restrict manifest targets to those 213 | having uncommitted modifications (Arthur Zamarin, #66) 214 | 215 | - pkgdev manifest: add ``--ignore-fetch-restricted`` - skip fetch restricted 216 | ebuilds (Arthur Zamarin, #67) 217 | 218 | .. [#] https://pkgcore.github.io/pkgdev/man/pkgdev.html#config-file-support 219 | 220 | pkgdev 0.2.0 (2022-04-10) 221 | ------------------------- 222 | 223 | - pkgdev commit: Mangle copyright header from single year into year range when 224 | appropriate (thanks to Thomas Bracht Laumann Jespersen, #49) 225 | 226 | - pkgdev commit: Always sort KEYWORDS via mangler (Arthur Zamarin, #47) 227 | 228 | - pkgdev commit: For new packages, include version in commit message ("new 229 | package, add ${PV}") (Arthur Zamarin, #53) 230 | 231 | - pkgdev mask: Extend mask comment template (thanks to Thomas Bracht Laumann 232 | Jespersen, #56) 233 | 234 | - pkgdev mask: Accept -b/--bug for referencing bugs (thanks to Thomas Bracht 235 | Laumann Jespersen, #56) 236 | 237 | pkgdev 0.1.9 (2021-07-31) 238 | ------------------------- 239 | 240 | - pkgdev commit: Revert copyright mangling to previous behavior. 241 | 242 | pkgdev 0.1.8 (2021-07-28) 243 | ------------------------- 244 | 245 | - pkgdev commit: Replace entire copyright date range for new files. 246 | 247 | - pkgdev commit: Fix summary generation for certain rename conditions. 248 | 249 | pkgdev 0.1.7 (2021-06-29) 250 | ------------------------- 251 | 252 | - pkgdev commit: Add all matching pkg versions to historical repo (#40). 253 | 254 | - pkgdev commit: Use ``git diff-index`` instead of ``git diff`` to avoid config 255 | settings affecting output. 256 | 257 | pkgdev 0.1.6 (2021-06-11) 258 | ------------------------- 259 | 260 | - pkgdev showkw: Add bash completion support (#38). 261 | 262 | - pkgdev commit: Generate summaries for package changes with profile updates, 263 | e.g. renaming a package and updating profiles/updates in the same commit. 264 | 265 | - pkgdev commit: Avoid crash when footer content exists with no summary 266 | template (#39). 267 | 268 | - pkgdev commit: Add initial support for generating summaries from bash diffs. 269 | For example, this allows automatic summaries to be generated for simple 270 | PYTHON_COMPAT changes. 271 | 272 | pkgdev 0.1.5 (2021-06-03) 273 | ------------------------- 274 | 275 | - Fix historical repo creation for eclass sourcing. 276 | 277 | - Add initial bash completion support. 278 | 279 | pkgdev 0.1.4 (2021-05-25) 280 | ------------------------- 281 | 282 | - pkgdev show: Analog to eshowkw from gentoolkit migrated from pkgcore's 283 | pshowkw. 284 | 285 | - pkgdev manifest: Add -d/--distdir option for custom DISTDIR. 286 | 287 | - pkgdev mask: Change removal format to a 'tag: value' style. 288 | 289 | pkgdev 0.1.3 (2021-03-26) 290 | ------------------------- 291 | 292 | - pkgdev mask: Initial implementation of package.mask mangling support. 293 | 294 | - pkgdev commit: Allow -s/--scan to accept an optional boolean arg for 295 | consistency. 296 | 297 | - pkgdev commit: Support partial package manifesting (#33). 298 | 299 | - pkgdev commit: Add -T/--tag option to add generic commit tags. 300 | 301 | pkgdev 0.1.2 (2021-03-19) 302 | ------------------------- 303 | 304 | - pkgdev commit: Support pulling historical data from unconfigured repos. 305 | 306 | - Add initial zsh completion support (#16). 307 | 308 | pkgdev 0.1.1 (2021-03-12) 309 | ------------------------- 310 | 311 | - Replace --ignore-failures option with -A/--ask for ``pkgdev commit`` and 312 | ``pkgdev push``. 313 | 314 | - pkgdev push: Drop explicitly enabled --signed option for gentoo repo (#27). 315 | 316 | - pkgdev commit: Add support for -b/--bug and -c/--closes options. 317 | 318 | - pkgdev commit: Initial support for summary generation for metadata.xml 319 | changes (#9). 320 | 321 | - pkgdev commit: Enabled signed commits and signoffs based on repo metadata 322 | (#25). 323 | 324 | - pkgdev commit: Initial support for generating modify summaries. 325 | 326 | - pkgdev commit: Support summary generation for single rename changes that 327 | don't involve revbumps. 328 | 329 | - pkgdev commit: Add -M/--message-template support. 330 | 331 | - pkgdev commit: Support multiple -m/--message options similar to ``git 332 | commit``. 333 | 334 | - pkgdev commit: Support generating manifest summaries (#12). 335 | 336 | pkgdev 0.1 (2021-03-05) 337 | ----------------------- 338 | 339 | - Initial release. 340 | 341 | - pkgdev commit: Add subcommand wrapping ``git commit`` supporting commit 342 | message templating, ebuild manifesting, structured file mangling, and commit 343 | scanning via pkgcheck. 344 | 345 | - pkgdev push: Add subcommand wrapping ``git push`` that verifies local commits 346 | with pkgcheck before pushing them upstream. 347 | 348 | - pkgdev manifest: Add subcommand for manifesting ebuilds. 349 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | |pypi| |test| |coverage| 2 | 3 | ====== 4 | pkgdev 5 | ====== 6 | 7 | pkgdev provides a collection of tools for Gentoo development including: 8 | 9 | **pkgdev commit**: commit to an ebuild repository 10 | 11 | **pkgdev manifest**: update package manifests 12 | 13 | **pkgdev mask**: mask packages 14 | 15 | **pkgdev push**: scan commits for QA issues before pushing upstream 16 | 17 | **pkgdev showkw**: show package keywords 18 | 19 | Dependencies 20 | ============ 21 | 22 | pkgdev is developed alongside pkgcheck_, pkgcore_, and snakeoil_. Running 23 | pkgdev from git will often require them from git as well. 24 | 25 | For releases, see the required runtime dependencies_. 26 | 27 | Installing 28 | ========== 29 | 30 | Installing latest pypi release:: 31 | 32 | pip install pkgdev 33 | 34 | Installing from git:: 35 | 36 | pip install https://github.com/pkgcore/pkgdev/archive/main.tar.gz 37 | 38 | Installing from a tarball:: 39 | 40 | pip install . 41 | 42 | 43 | .. _pkgcheck: https://github.com/pkgcore/pkgcheck 44 | .. _pkgcore: https://github.com/pkgcore/pkgcore 45 | .. _snakeoil: https://github.com/pkgcore/snakeoil 46 | .. _dependencies: https://github.com/pkgcore/pkgdev/blob/main/requirements/install.txt 47 | 48 | .. |pypi| image:: https://img.shields.io/pypi/v/pkgdev.svg 49 | :target: https://pypi.python.org/pypi/pkgdev 50 | .. |test| image:: https://github.com/pkgcore/pkgdev/workflows/test/badge.svg 51 | :target: https://github.com/pkgcore/pkgdev/actions?query=workflow%3A%22test%22 52 | .. |coverage| image:: https://codecov.io/gh/pkgcore/pkgdev/branch/main/graph/badge.svg 53 | :target: https://codecov.io/gh/pkgcore/pkgdev 54 | -------------------------------------------------------------------------------- /RELEASE.rst: -------------------------------------------------------------------------------- 1 | Release process 2 | --------------- 3 | 4 | #. Add new entry in NEWS.rst along with changelog updates for the release. 5 | 6 | #. Make sure dependency versions are correct in requirements/install.txt. Also, 7 | if requirements/pyproject.toml exists make sure dependency versions match 8 | those in requirements/install.txt for matching dependencies. 9 | 10 | #. Run a test release build by force pushing to a temporary "deploy" branch. 11 | This triggers the release workflow to run on Github, but doesn't actually 12 | upload any of the generated files to PyPI or Github. 13 | 14 | #. Verify the test build looks correct and passes tests then tag the new 15 | release and push the tag. If everything works as expected, both PyPI and 16 | Github should automatically get the release files pushed to them once the 17 | action completes. 18 | 19 | #. At this point, it's good to remove the temporary deploy branch from the 20 | upstream repo. 21 | 22 | #. Make a commit bumping the package version via __version__ in the base module 23 | and push the commit. 24 | -------------------------------------------------------------------------------- /data/share/bash-completion/completions/pkgdev: -------------------------------------------------------------------------------- 1 | # bash completion for pkgdev 2 | 3 | source "/usr/share/bash-completion/helpers/gentoo-common.sh" 4 | 5 | _pkgdev() { 6 | local i=1 cmd cur prev words cword split 7 | _comp_initialize -n : "$@" || return 8 | 9 | local subcommands=" 10 | bugs 11 | commit 12 | manifest 13 | mask 14 | push 15 | showkw 16 | tatt 17 | " 18 | 19 | local base_options=" 20 | -h --help 21 | --version 22 | --debug 23 | -q --quiet 24 | -v --verbose 25 | --color 26 | " 27 | 28 | local boolean_options=" 29 | true 30 | false 31 | " 32 | 33 | _list_repo_atoms() { 34 | builtin cd "$(git rev-parse --show-toplevel)" || return 35 | if [[ $cur == */* ]]; then 36 | compgen -W "$(compgen -G "${cur}*" )" -- "${cur}" 37 | else 38 | compgen -W "$(compgen -G "${cur}*" -S / )" -- "${cur}" 39 | fi 40 | } 41 | 42 | if [[ ${prev} = "--color" ]]; then 43 | COMPREPLY=($(compgen -W "${boolean_options}" -- "${cur}")) 44 | return 45 | fi 46 | COMPREPLY=($(compgen -W "${base_options}" -- "${cur}")) 47 | 48 | # find the subcommand 49 | for (( i=1; i < COMP_CWORD; i++ )); do 50 | if [[ ${COMP_WORDS[i]} != -* ]]; then 51 | cmd=${COMP_WORDS[i]} 52 | break 53 | fi 54 | done 55 | 56 | if (( i == COMP_CWORD )); then 57 | COMPREPLY+=($(compgen -W "${subcommands}" -- "${cur}")) 58 | return 59 | fi 60 | 61 | local subcmd_options 62 | case "${cmd}" in 63 | commit) 64 | subcmd_options=" 65 | -b --bug 66 | -c --closes 67 | -T --tag 68 | -n --dry-run 69 | -s --scan 70 | -A --ask 71 | --mangle 72 | --signoff 73 | --gpg-sign --no-gpg-sign 74 | -m --message 75 | -M --message-template 76 | -e --edit 77 | -u --update 78 | -a --all 79 | " 80 | 81 | case "${prev}" in 82 | -c | --closes) 83 | local resolutions=( 84 | fixed 85 | obsolete 86 | pkgremoved 87 | ) 88 | 89 | local bug="${cur%:*}" 90 | if [[ ${bug} != ${cur} && ${bug} != http?(s) ]]; then 91 | local bugres="${resolutions[*]/#/${bug}:}" 92 | COMPREPLY=($(compgen -W "${bugres}" -- "${cur}")) 93 | _comp_ltrim_colon_completions "$cur" 94 | else 95 | COMPREPLY=() 96 | fi 97 | ;; 98 | -[bTm] | --bug | --tag | --message) 99 | COMPREPLY=() 100 | ;; 101 | -M | --message-template) 102 | _filedir 103 | ;; 104 | -s | --scan | --mangle) 105 | COMPREPLY=($(compgen -W "${boolean_options}" -- "${cur}")) 106 | ;; 107 | *) 108 | COMPREPLY+=($(compgen -W "${subcmd_options}" -- "${cur}")) 109 | ;; 110 | esac 111 | ;; 112 | manifest) 113 | subcmd_options=" 114 | -f --force 115 | -m --mirrors 116 | -d --distdir 117 | --if-modified 118 | " 119 | 120 | case "${prev}" in 121 | -d | --distdir) 122 | _filedir -d 123 | ;; 124 | *) 125 | COMPREPLY+=($(compgen -W "${subcmd_options}" -- "${cur}")) 126 | COMPREPLY+=($(_list_repo_atoms)) 127 | ;; 128 | esac 129 | ;; 130 | mask) 131 | subcmd_options=" 132 | -r --rites 133 | -b --bug 134 | --email 135 | --api-key 136 | --file-bug 137 | " 138 | 139 | case "${prev}" in 140 | -[rb] | --rites | --bugs | --api-key) 141 | COMPREPLY=() 142 | ;; 143 | *) 144 | COMPREPLY+=($(compgen -W "${subcmd_options}" -- "${cur}")) 145 | COMPREPLY+=($(_list_repo_atoms)) 146 | ;; 147 | esac 148 | ;; 149 | push) 150 | subcmd_options=" 151 | -A --ask 152 | -n --dry-run 153 | --pull 154 | " 155 | 156 | COMPREPLY+=($(compgen -W "${subcmd_options}" -- "${cur}")) 157 | ;; 158 | showkw) 159 | subcmd_options=" 160 | -f --format 161 | -c --collapse 162 | -s --stable 163 | -u --unstable 164 | -o --only-unstable 165 | -p --prefix 166 | -a --arch 167 | -r --repo 168 | " 169 | 170 | case "${prev}" in 171 | -f | --format) 172 | format_options=" 173 | fancy_grid 174 | fancy_outline 175 | github 176 | grid 177 | html 178 | jira 179 | latex 180 | latex_booktabs 181 | latex_longtable 182 | latex_raw 183 | mediawiki 184 | moinmoin 185 | orgtbl 186 | pipe 187 | plain 188 | presto 189 | pretty 190 | psql 191 | rst 192 | showkw 193 | simple 194 | textile 195 | tsv 196 | unsafehtml 197 | youtrack 198 | " 199 | COMPREPLY=($(compgen -W "${format_options}" -- "${cur}")) 200 | ;; 201 | -r | --repo) 202 | COMPREPLY=($(compgen -W "$(_parsereposconf -l)" -- "${cur}")) 203 | ;; 204 | -a | --arch) 205 | COMPREPLY=() 206 | ;; 207 | *) 208 | COMPREPLY+=($(compgen -W "${subcmd_options}" -- "${cur}")) 209 | COMPREPLY+=($(_list_repo_atoms)) 210 | ;; 211 | esac 212 | ;; 213 | tatt) 214 | subcmd_options=" 215 | --api-key 216 | -j --job-name 217 | -b --bug 218 | -t --test 219 | -u --use-combos 220 | --ignore-prefixes 221 | --use-default 222 | --use-random 223 | --use-expand-random 224 | -p --package 225 | -s --stablereq 226 | -k --keywording 227 | --template-file 228 | --logs-dir 229 | --emerge-opts 230 | --extra-env-file 231 | " 232 | 233 | case "${prev}" in 234 | -[jbup] | --api-key | --job-name | --bug | --use-combos | --package | --emerge-opts) 235 | COMPREPLY=() 236 | ;; 237 | --template-file) 238 | _filedir 239 | ;; 240 | --logs-dir) 241 | _filedir -d 242 | ;; 243 | --extra-env-file) 244 | if [[ -d /etc/portage/env/ ]]; then 245 | pushd /etc/portage/env/ >& /dev/null 246 | local SETS=( * ) 247 | COMPREPLY=($(compgen -W "${SETS[*]}" -- "${cur}" )) 248 | popd >& /dev/null 249 | else 250 | COMPREPLY=() 251 | fi 252 | ;; 253 | *) 254 | COMPREPLY+=($(compgen -W "${subcmd_options}" -- "${cur}")) 255 | ;; 256 | esac 257 | ;; 258 | bugs) 259 | subcmd_options=" 260 | --api-key 261 | --edit-graph 262 | --auto-cc-arches 263 | --find-by-maintainer 264 | --projects 265 | --filter-stablereqs 266 | --stabletime 267 | --blocks 268 | --dot 269 | -s --stablereq 270 | -k --keywording 271 | " 272 | 273 | case "${prev}" in 274 | --api-key | --auto-cc-arches | --blocks | --find-by-maintainer | --stabletime) 275 | COMPREPLY=() 276 | ;; 277 | --dot) 278 | _filedir 279 | ;; 280 | *) 281 | COMPREPLY+=($(compgen -W "${subcmd_options}" -- "${cur}")) 282 | ;; 283 | esac 284 | ;; 285 | esac 286 | } 287 | complete -F _pkgdev pkgdev 288 | 289 | # vim: set ft=bash sw=4 et sts=4 : 290 | -------------------------------------------------------------------------------- /data/share/zsh/site-functions/_pkgdev: -------------------------------------------------------------------------------- 1 | #compdef pkgdev 2 | 3 | typeset -a base_options 4 | local curcontext=$curcontext state state_descr line ret=1 5 | 6 | base_options=( 7 | '(- :)'{-h,--help}'[show help information and exit]' 8 | '(- :)'--version'[show version information and exit]' 9 | '(--debug --help -h)--debug[enable debugging output]' 10 | '(--quiet -q --verbose -v)'{-q,--quiet}'[suppress non-error output]' 11 | '(--verbose -v --quiet -q)'{-v,--verbose}'[show verbose output]' 12 | "--color[Color output]:yes/no:((y\:'yes' n\:'no'))" 13 | ) 14 | 15 | _arguments -C \ 16 | $base_options \ 17 | '(-): :->command' \ 18 | '(-)*:: :->subcommand' \ 19 | && ret=0 20 | 21 | case $state in 22 | (command) 23 | typeset -a subcommands 24 | 25 | subcommands=( 26 | commit:'create git commit' 27 | manifest:'update package manifests' 28 | mask:'mask packages' 29 | push:'run QA checks on commits and push them' 30 | showkw:'show package keywords' 31 | ) 32 | 33 | _describe -t subcommands subcommand subcommands && ret=0 34 | 35 | ;; 36 | (subcommand) 37 | curcontext=${curcontext%:*}-$line[1]: 38 | 39 | case $line[1] in 40 | (commit) 41 | _arguments -C -A '-*' \ 42 | $base_options \ 43 | {'(--bug)-b','(-b)--bug'}'[add Bug tag for a given Gentoo or upstream bug]:bug ID or URL' \ 44 | {'(--closes)-c','(-c)--closes'}'[add Closes tag for a given Gentoo bug or upstream PR URL]:bug ID or URL' \ 45 | {'(--tag)-T','(-T)--tag'}'[add commit tag]:tag\:value' \ 46 | {'(--dry-run)-n','(-n)--dry-run'}'[pretend to create commit]' \ 47 | {'(--scan)-s','(-s)--scan'}'[run pkgcheck against staged changes]' \ 48 | {'(--ask)-A','(-A)--ask'}'[confirm creating commit with QA errors]' \ 49 | '--mangle[forcibly enable/disable file mangling]' \ 50 | '--signoff[add a Signed-off-by trailer]' \ 51 | '--gpg-sign[enable GPG signing]' \ 52 | '--no-gpg-sign[disable GPG signing]' \ 53 | \*{--message,-m}'[specify commit message]:message' \ 54 | {'(--message-template)-M','(-M)--message-template'}'[use commit message template from specified file]:template:_files' \ 55 | {'(--edit)-e','(-e)--edit'}'[force edit of commit]' \ 56 | {'(--update)-u','(-u)--update'}'[stage all changed files]' \ 57 | {'(--all)-a','(-a)--all'}'[stage all changed/new/removed files]' \ 58 | && ret=0 59 | ;; 60 | (manifest) 61 | _arguments -C -A '-*' \ 62 | $base_options \ 63 | {'(--distdir)-d','(-d)--distdir'}'[target download directory]:distdir:_files -/' \ 64 | {'(--force)-f','(-f)--force'}'[forcibly remanifest packages]' \ 65 | {'(--mirrors)-m','(-m)--mirrors'}'[enable fetching from Gentoo mirrors]' \ 66 | '--if-modified[only check packages that have uncommitted modifications]' \ 67 | && ret=0 68 | ;; 69 | (mask) 70 | _arguments -C -A '-*' \ 71 | $base_options \ 72 | {'(--rites)-r','(-r)--rites'}'[mark for last rites]' \ 73 | {'(--bugs)-b','(-b)--bugs'}'[reference bug in the mask comment]' \ 74 | '--email[spawn email composer with prepared email for sending to mailing lists]' \ 75 | && ret=0 76 | ;; 77 | (push) 78 | _arguments -C -A '-*' \ 79 | $base_options \ 80 | {'(--ask)-A','(-A)--ask'}'[confirm pushing commits with QA errors]' \ 81 | {'(--dry-run)-n','(-n)--dry-run'}'[pretend to push commits]' \ 82 | '--pull[run git pull --rebase before scanning]' \ 83 | && ret=0 84 | ;; 85 | (showkw) 86 | _arguments -C -A '-*' \ 87 | $base_options \ 88 | {'(--format)-f','(-f)--format'}'[keywords table format]' \ 89 | {'(--collapse)-c','(-c)--collapse'}'[show collapsed list of arches]' \ 90 | {'(--stable)-s','(-s)--stable'}'[show stable arches]' \ 91 | {'(--unstable)-u','(-u)--unstable'}'[show unstable arches]' \ 92 | {'(--only-unstable)-o','(-o)--only-unstable'}'[show arches that only have unstable keywords]' \ 93 | {'(--prefix)-p','(-p)--prefix'}'[show prefix and non-native arches]' \ 94 | {'(--arch)-a','(-a)--arch'}'[select arches to display]:arch' \ 95 | {'(--repo)-r','(-r)--repo'}'[repo to query]:repo' \ 96 | && ret=0 97 | ;; 98 | (*) 99 | _nothing 100 | ;; 101 | esac 102 | ;; 103 | esac 104 | 105 | return ret 106 | 107 | # vim: set et sw=2 ts=2 ft=zsh: 108 | -------------------------------------------------------------------------------- /doc/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # 4 | # pkgdev documentation build configuration file, created by 5 | # sphinx-quickstart on Thu Apr 9 00:50:08 2015. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | # -- General configuration ------------------------------------------------ 17 | 18 | # If your documentation needs a minimal Sphinx version, state it here. 19 | # needs_sphinx = '1.0' 20 | 21 | # Add any Sphinx extension module names here, as strings. They can be 22 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 23 | # ones. 24 | extensions = [ 25 | "sphinx.ext.autodoc", 26 | "sphinx.ext.autosummary", 27 | "sphinx.ext.autosectionlabel", 28 | "sphinx.ext.doctest", 29 | "sphinx.ext.extlinks", 30 | "sphinx.ext.intersphinx", 31 | "sphinx.ext.todo", 32 | "sphinx.ext.coverage", 33 | "sphinx.ext.ifconfig", 34 | "sphinx.ext.viewcode", 35 | "snakeoil.dist.sphinxext", 36 | ] 37 | 38 | # Add any paths that contain templates here, relative to this directory. 39 | # templates_path = ['_templates'] 40 | 41 | # The suffix of source filenames. 42 | source_suffix = ".rst" 43 | 44 | # The encoding of source files. 45 | # source_encoding = 'utf-8-sig' 46 | 47 | # The master toctree document. 48 | master_doc = "index" 49 | 50 | # General information about the project. 51 | project = "pkgdev" 52 | authors = "" 53 | copyright = "2021-2022, pkgdev contributors" 54 | 55 | # version is set by snakeoil extension 56 | release = "master" 57 | 58 | # The language for content autogenerated by Sphinx. Refer to documentation 59 | # for a list of supported languages. 60 | # language = None 61 | 62 | # There are two options for replacing |today|: either, you set today to some 63 | # non-false value, then it is used: 64 | # today = '' 65 | # Else, today_fmt is used as the format for a strftime call. 66 | # today_fmt = '%B %d, %Y' 67 | 68 | # List of patterns, relative to source directory, that match files and 69 | # directories to ignore when looking for source files. 70 | exclude_patterns = ["_build", "generated"] 71 | 72 | # The reST default role (used for this markup: `text`) to use for all 73 | # documents. 74 | # default_role = None 75 | 76 | # If true, '()' will be appended to :func: etc. cross-reference text. 77 | # add_function_parentheses = True 78 | 79 | # If true, the current module name will be prepended to all description 80 | # unit titles (such as .. function::). 81 | # add_module_names = True 82 | 83 | # If true, sectionauthor and moduleauthor directives will be shown in the 84 | # output. They are ignored by default. 85 | # show_authors = False 86 | 87 | # The name of the Pygments (syntax highlighting) style to use. 88 | pygments_style = "sphinx" 89 | 90 | # A list of ignored prefixes for module index sorting. 91 | # modindex_common_prefix = [] 92 | 93 | # If true, keep warnings as "system message" paragraphs in the built documents. 94 | # keep_warnings = False 95 | 96 | # -- Options for HTML output ---------------------------------------------- 97 | 98 | # The theme to use for HTML and HTML Help pages. See the documentation for 99 | # a list of builtin themes. 100 | html_theme = "default" 101 | 102 | # Theme options are theme-specific and customize the look and feel of a theme 103 | # further. For a list of options available for each theme, see the 104 | # documentation. 105 | # html_theme_options = {} 106 | 107 | # Add any paths that contain custom themes here, relative to this directory. 108 | # html_theme_path = [] 109 | 110 | # The name for this set of Sphinx documents. If None, it defaults to 111 | # " v documentation". 112 | # html_title = None 113 | 114 | # A shorter title for the navigation bar. Default is the same as html_title. 115 | # html_short_title = None 116 | 117 | # The name of an image file (relative to this directory) to place at the top 118 | # of the sidebar. 119 | # html_logo = None 120 | 121 | # The name of an image file (within the static path) to use as favicon of the 122 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 123 | # pixels large. 124 | # html_favicon = None 125 | 126 | # Add any paths that contain custom static files (such as style sheets) here, 127 | # relative to this directory. They are copied after the builtin static files, 128 | # so a file named "default.css" will overwrite the builtin "default.css". 129 | # html_static_path = ['_static'] 130 | 131 | # Add any extra paths that contain custom files (such as robots.txt or 132 | # .htaccess) here, relative to this directory. These files are copied 133 | # directly to the root of the documentation. 134 | # html_extra_path = [] 135 | 136 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 137 | # using the given strftime format. 138 | # html_last_updated_fmt = '%b %d, %Y' 139 | 140 | # If true, SmartyPants will be used to convert quotes and dashes to 141 | # typographically correct entities. 142 | # html_use_smartypants = True 143 | 144 | # Custom sidebar templates, maps document names to template names. 145 | # html_sidebars = {} 146 | 147 | # Additional templates that should be rendered to pages, maps page names to 148 | # template names. 149 | # html_additional_pages = {} 150 | 151 | # If false, no module index is generated. 152 | # html_domain_indices = True 153 | 154 | # If false, no index is generated. 155 | # html_use_index = True 156 | 157 | # If true, the index is split into individual pages for each letter. 158 | # html_split_index = False 159 | 160 | # If true, links to the reST sources are added to the pages. 161 | # html_show_sourcelink = True 162 | 163 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 164 | # html_show_sphinx = True 165 | 166 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 167 | # html_show_copyright = True 168 | 169 | # If true, an OpenSearch description file will be output, and all pages will 170 | # contain a tag referring to it. The value of this option must be the 171 | # base URL from which the finished HTML is served. 172 | # html_use_opensearch = '' 173 | 174 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 175 | # html_file_suffix = None 176 | 177 | # Output file base name for HTML help builder. 178 | htmlhelp_basename = "pkgdevdoc" 179 | 180 | 181 | # -- Options for LaTeX output --------------------------------------------- 182 | 183 | latex_elements = { 184 | # The paper size ('letterpaper' or 'a4paper'). 185 | #'papersize': 'letterpaper', 186 | # The font size ('10pt', '11pt' or '12pt'). 187 | #'pointsize': '10pt', 188 | # Additional stuff for the LaTeX preamble. 189 | #'preamble': '', 190 | } 191 | 192 | # Grouping the document tree into LaTeX files. List of tuples 193 | # (source start file, target name, title, 194 | # author, documentclass [howto, manual, or own class]). 195 | latex_documents = [ 196 | ("index", "pkgdev.tex", "pkgdev Documentation", authors, "manual"), 197 | ] 198 | 199 | # The name of an image file (relative to this directory) to place at the top of 200 | # the title page. 201 | # latex_logo = None 202 | 203 | # For "manual" documents, if this is true, then toplevel headings are parts, 204 | # not chapters. 205 | # latex_use_parts = False 206 | 207 | # If true, show page references after internal links. 208 | # latex_show_pagerefs = False 209 | 210 | # If true, show URL addresses after external links. 211 | # latex_show_urls = False 212 | 213 | # Documents to append as an appendix to all manuals. 214 | # latex_appendices = [] 215 | 216 | # If false, no module index is generated. 217 | # latex_domain_indices = True 218 | 219 | 220 | # -- Options for manual page output --------------------------------------- 221 | 222 | # One entry per manual page. List of tuples 223 | # (source start file, name, description, authors, manual section). 224 | man_pages = [] 225 | 226 | # If true, show URL addresses after external links. 227 | # man_show_urls = False 228 | 229 | # -- Options for Texinfo output ------------------------------------------- 230 | 231 | # Grouping the document tree into Texinfo files. List of tuples 232 | # (source start file, target name, title, author, 233 | # dir menu entry, description, category) 234 | texinfo_documents = [ 235 | ( 236 | "index", 237 | "pkgdev", 238 | "pkgdev Documentation", 239 | authors, 240 | "pkgdev", 241 | "One line description of project.", 242 | "Miscellaneous", 243 | ), 244 | ] 245 | 246 | # Documents to append as an appendix to all manuals. 247 | # texinfo_appendices = [] 248 | 249 | # If false, no module index is generated. 250 | # texinfo_domain_indices = True 251 | 252 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 253 | # texinfo_show_urls = 'footnote' 254 | 255 | # If true, do not generate a @detailmenu in the "Top" node's menu. 256 | # texinfo_no_detailmenu = False 257 | 258 | 259 | # -- Options for Epub output ---------------------------------------------- 260 | 261 | # Bibliographic Dublin Core info. 262 | epub_title = project 263 | epub_author = authors 264 | epub_publisher = authors 265 | epub_copyright = copyright 266 | 267 | # The basename for the epub file. It defaults to the project name. 268 | # epub_basename = 'pkgdev' 269 | 270 | # The HTML theme for the epub output. Since the default themes are not optimized 271 | # for small screen space, using the same theme for HTML and epub output is 272 | # usually not wise. This defaults to 'epub', a theme designed to save visual 273 | # space. 274 | # epub_theme = 'epub' 275 | 276 | # The language of the text. It defaults to the language option 277 | # or en if the language is not set. 278 | # epub_language = '' 279 | 280 | # The scheme of the identifier. Typical schemes are ISBN or URL. 281 | # epub_scheme = '' 282 | 283 | # The unique identifier of the text. This can be a ISBN number 284 | # or the project homepage. 285 | # epub_identifier = '' 286 | 287 | # A unique identification for the text. 288 | # epub_uid = '' 289 | 290 | # A tuple containing the cover image and cover page html template filenames. 291 | # epub_cover = () 292 | 293 | # A sequence of (type, uri, title) tuples for the guide element of content.opf. 294 | # epub_guide = () 295 | 296 | # HTML files that should be inserted before the pages created by sphinx. 297 | # The format is a list of tuples containing the path and title. 298 | # epub_pre_files = [] 299 | 300 | # HTML files shat should be inserted after the pages created by sphinx. 301 | # The format is a list of tuples containing the path and title. 302 | # epub_post_files = [] 303 | 304 | # A list of files that should not be packed into the epub file. 305 | epub_exclude_files = ["search.html"] 306 | 307 | # The depth of the table of contents in toc.ncx. 308 | # epub_tocdepth = 3 309 | 310 | # Allow duplicate toc entries. 311 | # epub_tocdup = True 312 | 313 | # Choose between 'default' and 'includehidden'. 314 | # epub_tocscope = 'default' 315 | 316 | # Fix unsupported image types using the PIL. 317 | # epub_fix_images = False 318 | 319 | # Scale large images. 320 | # epub_max_image_width = 0 321 | 322 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 323 | # epub_show_urls = 'inline' 324 | 325 | # If false, no index is generated. 326 | # epub_use_index = True 327 | 328 | intersphinx_mapping = { 329 | "python": ("https://docs.python.org/3", None), 330 | } 331 | intersphinx_mapping.update( 332 | (x, (f"https://pkgcore.github.io/{x}", None)) for x in "pkgcheck pkgcore snakeoil".split() 333 | ) 334 | -------------------------------------------------------------------------------- /doc/contributing.rst: -------------------------------------------------------------------------------- 1 | ../CONTRIBUTING.rst -------------------------------------------------------------------------------- /doc/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to pkgdev's documentation! 2 | ================================== 3 | 4 | ``pkgdev`` is collection of tools for Gentoo development, from generating 5 | commit messages, to pushing commits to the repository, to filing stable 6 | requests bugs, to testing packages in a special environment. 7 | 8 | Contents: 9 | 10 | .. toctree:: 11 | :titlesonly: 12 | :maxdepth: 4 13 | 14 | man/pkgdev 15 | man/pkgdev/bugs 16 | man/pkgdev/commit 17 | man/pkgdev/manifest 18 | man/pkgdev/mask 19 | man/pkgdev/push 20 | man/pkgdev/showkw 21 | man/pkgdev/tatt 22 | man/config 23 | news 24 | 25 | Indices and tables 26 | ================== 27 | 28 | * :ref:`genindex` 29 | * :ref:`modindex` 30 | * :ref:`search` 31 | 32 | .. include:: contributing.rst 33 | -------------------------------------------------------------------------------- /doc/man/config.rst: -------------------------------------------------------------------------------- 1 | Config file support 2 | =================== 3 | 4 | Config files are supported by most subcommands of ``pkgdev`` from any of three 5 | locations. Listed in order of increasing precedence these include the 6 | following: 7 | 8 | - system config -- ``/etc/pkgdev/pkgdev.conf`` 9 | - user config -- ``${XDG_CONFIG_HOME}/pkgdev/pkgdev.conf`` 10 | - user config -- ``~/.config/pkgdev/pkgdev.conf`` 11 | - custom config -- specified via the ``--config`` option 12 | 13 | Any settings from a config file with higher precedence will override matching 14 | settings from a config file with a lower precedence, e.g. user settings 15 | override system settings. Note that command line options override any matching 16 | config file setting. 17 | 18 | In terms of file structure, basic INI formatting is required and allows 19 | creating a default section (DEFAULT) for system-wide settings or repo-specific 20 | sections. The INI key-value pairs directly relate to the available 21 | long-options supported by the various prefixed by the subcommand name and their 22 | related values. To find all possible configuration options, run: 23 | ``pkgdev {subcommand} --help``. See the following examples for config settings: 24 | 25 | - Run ``pkgcheck scan`` before committing and asks for confirmation (instead of 26 | aborting) when creating commits with QA errors:: 27 | 28 | [DEFAULT] 29 | commit.scan = true 30 | commit.ask = true 31 | 32 | - Allow pushing commits with QA errors, but only for the 'gentoo' repository:: 33 | 34 | [gentoo] 35 | push.ask = true 36 | 37 | - Add `Signed-off-by` consenting to the `Certificate of Origin 38 | `_ 39 | to all commits:: 40 | 41 | [DEFAULT] 42 | commit.signoff = true 43 | 44 | - When committing, stage all files in current working directory (note that this 45 | option doesn't expect value, therefore no value is defined post equal sign):: 46 | 47 | [DEFAULT] 48 | commit.all = 49 | 50 | - All previous config settings combined:: 51 | 52 | [DEFAULT] 53 | commit.scan = true 54 | commit.ask = true 55 | commit.all = 56 | 57 | [gentoo] 58 | push.ask = 59 | -------------------------------------------------------------------------------- /doc/man/pkgdev.rst: -------------------------------------------------------------------------------- 1 | ====== 2 | pkgdev 3 | ====== 4 | 5 | .. include:: pkgdev/_synopsis.rst 6 | .. include:: pkgdev/_description.rst 7 | .. include:: pkgdev/_options.rst 8 | .. include:: pkgdev/_subcommands.rst 9 | 10 | .. include:: config.rst 11 | 12 | Reporting Bugs 13 | ============== 14 | 15 | Please submit an issue via github: 16 | 17 | https://github.com/pkgcore/pkgdev/issues 18 | -------------------------------------------------------------------------------- /doc/news.rst: -------------------------------------------------------------------------------- 1 | ../NEWS.rst -------------------------------------------------------------------------------- /py_build.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | from pathlib import Path 3 | 4 | from flit_core import buildapi 5 | 6 | 7 | def write_verinfo(cleanup_files): 8 | from snakeoil.version import get_git_version 9 | 10 | cleanup_files.append(path := Path.cwd() / "src/pkgdev/_verinfo.py") 11 | path.parent.mkdir(parents=True, exist_ok=True) 12 | print(f"generating version info: {path}") 13 | path.write_text(f"version_info={get_git_version(Path.cwd())!r}") 14 | 15 | 16 | def prepare_pkgcore(callback): 17 | cleanup_files = [] 18 | try: 19 | write_verinfo(cleanup_files) 20 | 21 | return callback() 22 | finally: 23 | for path in cleanup_files: 24 | try: 25 | path.unlink() 26 | except OSError: 27 | pass 28 | 29 | 30 | def build_wheel(wheel_directory, config_settings=None, metadata_directory=None): 31 | """Builds a wheel, places it in wheel_directory""" 32 | callback = partial(buildapi.build_wheel, wheel_directory, config_settings, metadata_directory) 33 | return prepare_pkgcore(callback) 34 | 35 | 36 | def build_editable(wheel_directory, config_settings=None, metadata_directory=None): 37 | """Builds an "editable" wheel, places it in wheel_directory""" 38 | callback = partial( 39 | buildapi.build_editable, wheel_directory, config_settings, metadata_directory 40 | ) 41 | return prepare_pkgcore(callback) 42 | 43 | 44 | def build_sdist(sdist_directory, config_settings=None): 45 | """Builds an sdist, places it in sdist_directory""" 46 | callback = partial(buildapi.build_sdist, sdist_directory, config_settings) 47 | return prepare_pkgcore(callback) 48 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = [ 3 | "flit_core >=3.8,<4", 4 | "snakeoil ~=0.10.8", 5 | ] 6 | build-backend = "py_build" 7 | backend-path = ["."] 8 | 9 | [project] 10 | name = "pkgdev" 11 | description = "collection of tools for Gentoo development" 12 | readme = "README.rst" 13 | license = {file = "LICENSE"} 14 | requires-python = "~=3.10" 15 | authors = [ 16 | {name = "Tim Harder", email = "radhermit@gmail.com"}, 17 | {name = "Arthur Zamarin", email = "arthurzam@gentoo.org"}, 18 | ] 19 | maintainers = [ 20 | {name = "Arthur Zamarin", email = "arthurzam@gentoo.org"}, 21 | ] 22 | classifiers = [ 23 | "License :: OSI Approved :: BSD License", 24 | "Programming Language :: Python :: 3.10", 25 | "Programming Language :: Python :: 3.11", 26 | ] 27 | dynamic = ["version"] 28 | 29 | dependencies = [ 30 | "snakeoil~=0.10.8", 31 | "pkgcore~=0.12.23", 32 | "pkgcheck~=0.10.30", 33 | ] 34 | 35 | [project.optional-dependencies] 36 | test = [ 37 | "pytest>=6.0", 38 | "pytest-cov", 39 | ] 40 | doc = [ 41 | "sphinx", 42 | "tomli; python_version < '3.11'" 43 | ] 44 | tatt = [ 45 | "nattka", 46 | "Jinja2", 47 | ] 48 | 49 | [project.urls] 50 | Homepage = "https://github.com/pkgcore/pkgdev" 51 | Documentation = "https://pkgcore.github.io/pkgdev/" 52 | Source = "https://github.com/pkgcore/pkgdev" 53 | 54 | [project.scripts] 55 | pkgdev = "pkgdev.scripts.__init__:main" 56 | 57 | [tool.flit.external-data] 58 | directory = "data" 59 | 60 | [tool.flit.sdist] 61 | include = [ 62 | "Makefile", "py_build.py", 63 | "NEWS.rst", "doc", "tests", 64 | "build/sphinx/man/*.1", 65 | ] 66 | exclude = [ 67 | ".github/", ".gitignore", 68 | "doc/api/", "doc/generated/", "doc/_build/", 69 | ] 70 | 71 | [tool.black] 72 | line-length = 100 73 | 74 | [tool.pytest.ini_options] 75 | minversion = "6.0" 76 | addopts = "-vv -ra -l" 77 | testpaths = ["tests"] 78 | -------------------------------------------------------------------------------- /src/pkgdev/__init__.py: -------------------------------------------------------------------------------- 1 | __title__ = "pkgdev" 2 | __version__ = "0.2.12" 3 | -------------------------------------------------------------------------------- /src/pkgdev/__main__.py: -------------------------------------------------------------------------------- 1 | from .scripts import run 2 | 3 | if __name__ == "__main__": 4 | run("pkgdev") 5 | -------------------------------------------------------------------------------- /src/pkgdev/_vendor/__init__.py: -------------------------------------------------------------------------------- 1 | """Vendored external modules with modifications.""" 2 | -------------------------------------------------------------------------------- /src/pkgdev/cli.py: -------------------------------------------------------------------------------- 1 | """Various command-line specific support.""" 2 | 3 | import argparse 4 | import configparser 5 | import logging 6 | import os 7 | 8 | from pkgcore.util import commandline 9 | from snakeoil.cli import arghparse 10 | from snakeoil.contexts import patch 11 | from snakeoil.klass import jit_attr_none 12 | from snakeoil.mappings import OrderedSet 13 | from pkgcore.repository import errors as repo_errors 14 | from pkgcore.util.commandline import _mk_domain 15 | 16 | from . import const 17 | 18 | 19 | class Tool(commandline.Tool): 20 | def main(self): 21 | # suppress all pkgcore log messages 22 | logging.getLogger("pkgcore").setLevel(100) 23 | return super().main() 24 | 25 | 26 | class ConfigArg(argparse._StoreAction): 27 | """Store config path string or False when explicitly disabled.""" 28 | 29 | def __call__(self, parser, namespace, values, option_string=None): 30 | if values.lower() in ("false", "no", "n"): 31 | values = False 32 | setattr(namespace, self.dest, values) 33 | 34 | 35 | class ConfigParser(configparser.ConfigParser): 36 | """ConfigParser with case-sensitive keys (default forces lowercase).""" 37 | 38 | def optionxform(self, option): 39 | return option 40 | 41 | 42 | class ConfigFileParser: 43 | """Argument parser that supports loading settings from specified config files.""" 44 | 45 | default_configs = (const.SYSTEM_CONF_FILE, const.USER_CONF_FILE) 46 | 47 | def __init__(self, parser: arghparse.ArgumentParser, configs=(), **kwargs): 48 | super().__init__(**kwargs) 49 | self.parser = parser 50 | self.configs = OrderedSet(configs) 51 | 52 | @jit_attr_none 53 | def config(self): 54 | return self.parse_config() 55 | 56 | def parse_config(self, configs=()): 57 | """Parse given config files.""" 58 | configs = configs if configs else self.configs 59 | config = ConfigParser(default_section=None) 60 | try: 61 | for f in configs: 62 | config.read(f) 63 | except configparser.ParsingError as e: 64 | self.parser.error(f"parsing config file failed: {e}") 65 | return config 66 | 67 | def parse_config_sections(self, namespace, sections): 68 | """Parse options from a given iterable of config section names.""" 69 | assert self.parser.prog.startswith("pkgdev ") 70 | module = self.parser.prog.split(" ", 1)[1] + "." 71 | with patch("snakeoil.cli.arghparse.ArgumentParser.error", self._config_error): 72 | for section in (x for x in sections if x in self.config): 73 | config_args = ( 74 | (k.split(".", 1)[1], v) 75 | for k, v in self.config.items(section) 76 | if k.startswith(module) 77 | ) 78 | config_args = (f"--{k}={v}" if v else f"--{k}" for k, v in config_args) 79 | namespace, args = self.parser.parse_known_optionals(config_args, namespace) 80 | if args: 81 | self.parser.error(f"unknown arguments: {' '.join(args)}") 82 | return namespace 83 | 84 | def parse_config_options(self, namespace, configs=()): 85 | """Parse options from config if they exist.""" 86 | configs = list(filter(os.path.isfile, configs)) 87 | if not configs: 88 | return namespace 89 | 90 | self.configs.update(configs) 91 | # reset jit attr to force reparse 92 | self._config = None 93 | 94 | # load default options 95 | namespace = self.parse_config_sections(namespace, ["DEFAULT"]) 96 | 97 | return namespace 98 | 99 | def _config_error(self, message, status=2): 100 | """Stub to replace error method that notes config failure.""" 101 | self.parser.exit(status, f"{self.parser.prog}: failed loading config: {message}\n") 102 | 103 | 104 | class ArgumentParser(arghparse.ArgumentParser): 105 | """Parse all known arguments, from command line and config file.""" 106 | 107 | def __init__(self, parents=(), **kwargs): 108 | self.config_argparser = arghparse.ArgumentParser(suppress=True) 109 | config_options = self.config_argparser.add_argument_group("config options") 110 | config_options.add_argument( 111 | "--config", 112 | action=ConfigArg, 113 | dest="config_file", 114 | help="use custom pkgdev settings file", 115 | docs=""" 116 | Load custom pkgdev scan settings from a given file. 117 | 118 | Note that custom user settings override all other system and repo-level 119 | settings. 120 | 121 | It's also possible to disable all types of settings loading by 122 | specifying an argument of 'false' or 'no'. 123 | """, 124 | ) 125 | _mk_domain(config_options) 126 | super().__init__(parents=[*parents, self.config_argparser], **kwargs) 127 | 128 | def parse_known_args(self, args=None, namespace=None): 129 | temp_namespace, _ = self.config_argparser.parse_known_args(args, namespace) 130 | # parser supporting config file options 131 | config_parser = ConfigFileParser(self) 132 | # always load settings from bundled config 133 | namespace = config_parser.parse_config_options(namespace, configs=[const.BUNDLED_CONF_FILE]) 134 | 135 | # load default args from system/user configs if config-loading is allowed 136 | if temp_namespace.config_file is None: 137 | namespace = config_parser.parse_config_options( 138 | namespace, configs=ConfigFileParser.default_configs 139 | ) 140 | elif temp_namespace.config_file is not False: 141 | namespace = config_parser.parse_config_options( 142 | namespace, configs=(namespace.config_file,) 143 | ) 144 | 145 | try: 146 | repo = temp_namespace.domain.find_repo( 147 | os.getcwd(), config=temp_namespace.config, configure=False 148 | ) 149 | if repo is not None: 150 | namespace = config_parser.parse_config_sections(namespace, repo.aliases) 151 | except (repo_errors.InitializationError, IOError) as exc: 152 | self.error(str(exc)) 153 | 154 | if os.getenv("NOCOLOR"): 155 | namespace.color = False 156 | 157 | # parse command line args to override config defaults 158 | return super().parse_known_args(args, namespace) 159 | -------------------------------------------------------------------------------- /src/pkgdev/const.py: -------------------------------------------------------------------------------- 1 | """Internal constants.""" 2 | 3 | import os 4 | import sys 5 | 6 | from snakeoil import mappings 7 | 8 | _reporoot = os.path.realpath(__file__).rsplit(os.path.sep, 3)[0] 9 | _module = sys.modules[__name__] 10 | 11 | try: 12 | # This is a file written during installation; 13 | # if it exists, we defer to it. If it doesn't, then we're 14 | # running from a git checkout or a tarball. 15 | from . import _const as _defaults 16 | except ImportError: # pragma: no cover 17 | _defaults = object() 18 | 19 | 20 | def _GET_CONST(attr, default_value): 21 | consts = mappings.ProxiedAttrs(_module) 22 | default_value %= consts 23 | return getattr(_defaults, attr, default_value) 24 | 25 | 26 | # determine XDG compatible paths 27 | for xdg_var, var_name, fallback_dir in ( 28 | ("XDG_CONFIG_HOME", "USER_CONFIG_PATH", "~/.config"), 29 | ("XDG_DATA_HOME", "USER_DATA_PATH", "~/.local/share"), 30 | ): 31 | setattr( 32 | _module, 33 | var_name, 34 | os.path.join(os.environ.get(xdg_var, os.path.expanduser(fallback_dir)), "pkgdev"), 35 | ) 36 | 37 | REPO_PATH = _GET_CONST("REPO_PATH", _reporoot) 38 | DATA_PATH = _GET_CONST("DATA_PATH", "%(REPO_PATH)s/data") 39 | 40 | USER_CONF_FILE = os.path.join(getattr(_module, "USER_CONFIG_PATH"), "pkgdev.conf") 41 | SYSTEM_CONF_FILE = "/etc/pkgdev/pkgdev.conf" 42 | BUNDLED_CONF_FILE = os.path.join(DATA_PATH, "pkgdev.conf") 43 | -------------------------------------------------------------------------------- /src/pkgdev/git.py: -------------------------------------------------------------------------------- 1 | import os 2 | import subprocess 3 | import sys 4 | 5 | from snakeoil.cli.exceptions import UserException 6 | 7 | 8 | class GitError(SystemExit): 9 | """Generic error running a git command.""" 10 | 11 | 12 | def run(*args, **kwargs): 13 | """Wrapper for running git via subprocess.run().""" 14 | kwargs.setdefault("check", True) 15 | kwargs.setdefault("text", True) 16 | kwargs.setdefault("env", os.environ.copy())["PKGDEV"] = "1" 17 | cmd = ["git"] + list(args) 18 | 19 | # output git command that would be run to stderr 20 | if "--dry-run" in args: 21 | git_cmd = " ".join(x for x in cmd if x != "--dry-run") 22 | sys.stderr.write(f"{git_cmd}\n") 23 | 24 | try: 25 | return subprocess.run(cmd, **kwargs) 26 | except FileNotFoundError as exc: 27 | raise UserException(str(exc)) 28 | except subprocess.CalledProcessError as exc: 29 | raise GitError(exc.returncode) 30 | -------------------------------------------------------------------------------- /src/pkgdev/mangle.py: -------------------------------------------------------------------------------- 1 | """Formatting and file mangling support.""" 2 | 3 | import functools 4 | import multiprocessing 5 | import os 6 | import re 7 | import signal 8 | import traceback 9 | from datetime import datetime 10 | 11 | from snakeoil.cli.exceptions import UserException 12 | from snakeoil.mappings import OrderedSet 13 | 14 | copyright_regex = re.compile( 15 | r"^# Copyright (?P(?P\d{4}-)?(?P\d{4})) (?P.+)$" 16 | ) 17 | 18 | keywords_regex = re.compile( 19 | r'^(?P
[^#]*\bKEYWORDS=(?P[\'"]?))(?P.*)(?P(?P=quote).*)$'
 20 | )
 21 | 
 22 | 
 23 | def mangle(name: str):
 24 |     """Decorator to register file mangling methods."""
 25 | 
 26 |     class decorator:
 27 |         """Decorator with access to the class of a decorated function."""
 28 | 
 29 |         def __init__(self, func):
 30 |             self.func = func
 31 | 
 32 |         def __set_name__(self, owner, name):
 33 |             owner._mangle_funcs[name] = self.func
 34 |             setattr(owner, name, self.func)
 35 | 
 36 |     return decorator
 37 | 
 38 | 
 39 | class Mangler:
 40 |     """File-mangling iterator using path-based parallelism."""
 41 | 
 42 |     # mapping of mangling types to functions
 43 |     _mangle_funcs = {}
 44 | 
 45 |     def __init__(self, changes, skip_regex=None):
 46 |         self.jobs = os.cpu_count()
 47 |         if skip_regex is not None:
 48 |             changes = (c for c in changes if not skip_regex.match(c.full_path))
 49 |         self.changes = OrderedSet(changes)
 50 | 
 51 |         # setup for parallelizing the mangling procedure across files
 52 |         self._mp_ctx = multiprocessing.get_context("fork")
 53 |         self._mangled_paths_q = self._mp_ctx.SimpleQueue()
 54 |         self._current_year = str(datetime.today().year)
 55 | 
 56 |         # initialize settings used by iterator support
 57 |         self._runner = self._mp_ctx.Process(target=self._run)
 58 |         signal.signal(signal.SIGINT, self._kill_pipe)
 59 |         self._mangled_paths = iter(self._mangled_paths_q.get, None)
 60 | 
 61 |         # construct composed mangling function
 62 |         self.composed_func = functools.reduce(
 63 |             lambda f, g: lambda x: f(g(self, x)), self._mangle_funcs.values(), lambda x: x
 64 |         )
 65 | 
 66 |     @mangle("EOF")
 67 |     def _eof(self, change):
 68 |         """Drop EOF whitespace and forcibly add EOF newline."""
 69 |         return change.update(change.data.rstrip() + "\n")
 70 | 
 71 |     @mangle("keywords")
 72 |     def _keywords(self, change):
 73 |         """Fix keywords order."""
 74 | 
 75 |         def keywords_sort_key(kw):
 76 |             return tuple(reversed(kw.lstrip("-~").partition("-")))
 77 | 
 78 |         lines = change.data.splitlines()
 79 |         for i, line in enumerate(lines):
 80 |             if mo := keywords_regex.match(line):
 81 |                 kw = sorted(mo.group("keywords").split(), key=keywords_sort_key)
 82 |                 new_kw = " ".join(kw)
 83 |                 if not mo.group("quote"):
 84 |                     new_kw = f'"{new_kw}"'
 85 |                 lines[i] = f'{mo.group("pre")}{new_kw}{mo.group("post")}'
 86 |                 break
 87 |         return change.update("\n".join(lines) + "\n")
 88 | 
 89 |     def _kill_pipe(self, *args, error=None):
 90 |         """Handle terminating the mangling process group."""
 91 |         if self._runner.is_alive():
 92 |             os.killpg(self._runner.pid, signal.SIGKILL)
 93 |         if error is not None:
 94 |             # propagate exception raised during parallelized mangling
 95 |             raise UserException(error)
 96 |         raise KeyboardInterrupt
 97 | 
 98 |     def __iter__(self):
 99 |         # start running the mangling processes
100 |         self._runner.start()
101 |         return self
102 | 
103 |     def __next__(self):
104 |         try:
105 |             path = next(self._mangled_paths)
106 |         except StopIteration:
107 |             self._runner.join()
108 |             raise
109 | 
110 |         # Catch propagated, serialized exceptions, output their
111 |         # traceback, and signal the scanning process to end.
112 |         if isinstance(path, list):
113 |             self._kill_pipe(error=path[0])
114 | 
115 |         return path
116 | 
117 |     def _mangle(self, change):
118 |         """Run composed mangling function across a given change."""
119 |         if orig_data := change.read():
120 |             change = self.composed_func(change)
121 |             if change.data != orig_data:
122 |                 change.sync()
123 |                 return change
124 | 
125 |     def _run_manglers(self, paths_q):
126 |         """Consumer that runs mangling functions, queuing mangled paths for output."""
127 |         try:
128 |             for change in iter(paths_q.get, None):
129 |                 if mangled_change := self._mangle(change):
130 |                     self._mangled_paths_q.put(mangled_change.path)
131 |         except Exception:  # pragma: no cover
132 |             # traceback can't be pickled so serialize it
133 |             tb = traceback.format_exc()
134 |             self._mangled_paths_q.put([tb])
135 | 
136 |     def _run(self):
137 |         signal.signal(signal.SIGINT, signal.SIG_DFL)
138 |         os.setpgrp()
139 | 
140 |         paths_q = self._mp_ctx.SimpleQueue()
141 |         pool = self._mp_ctx.Pool(self.jobs, self._run_manglers, (paths_q,))
142 |         pool.close()
143 | 
144 |         # queue paths for processing
145 |         for change in self.changes:
146 |             paths_q.put(change)
147 |         # notify consumers that no more work exists
148 |         for i in range(self.jobs):
149 |             paths_q.put(None)
150 | 
151 |         pool.join()
152 |         # notify iterator that no more results exist
153 |         self._mangled_paths_q.put(None)
154 | 
155 | 
156 | class GentooMangler(Mangler):
157 |     """Gentoo repo specific file mangler."""
158 | 
159 |     _mangle_funcs = Mangler._mangle_funcs.copy()
160 | 
161 |     @mangle("copyright")
162 |     def _copyright(self, change):
163 |         """Fix copyright headers and dates."""
164 |         lines = change.data.splitlines()
165 |         if mo := copyright_regex.match(lines[0]):
166 |             groups = mo.groupdict()
167 |             if groups["begin"] is None and groups["date"] != self._current_year:
168 |                 # use old copyright date as the start of date range
169 |                 date_range = f"{groups['date']}-{self._current_year}"
170 |                 lines[0] = re.sub(groups["date"], date_range, lines[0])
171 |             else:
172 |                 lines[0] = re.sub(mo.group("end"), self._current_year, lines[0])
173 |             lines[0] = re.sub("Gentoo Foundation", "Gentoo Authors", lines[0])
174 |         return change.update("\n".join(lines) + "\n")
175 | 


--------------------------------------------------------------------------------
/src/pkgdev/scripts/__init__.py:
--------------------------------------------------------------------------------
 1 | #!/usr/bin/env python3
 2 | 
 3 | """Wrapper for running commandline scripts."""
 4 | 
 5 | import os
 6 | import sys
 7 | from importlib import import_module
 8 | 
 9 | 
10 | def run(script_name):
11 |     """Run a given script module."""
12 |     # Remove the current working directory to avoid implicit
13 |     # namespace package (PEP 420) imports due to directories
14 |     # matching module names.
15 |     try:
16 |         sys.path.remove(os.getcwd())
17 |     except ValueError:
18 |         pass
19 | 
20 |     try:
21 |         from pkgdev.cli import Tool
22 | 
23 |         script_module = ".".join(
24 |             os.path.realpath(__file__).split(os.path.sep)[-3:-1] + [script_name.replace("-", "_")]
25 |         )
26 |         script = import_module(script_module)
27 |     except ImportError as e:
28 |         python_version = ".".join(map(str, sys.version_info[:3]))
29 |         sys.stderr.write(f"Failed importing: {e}!\n")
30 |         sys.stderr.write(
31 |             "Verify that pkgdev and its deps are properly installed "
32 |             f"and/or PYTHONPATH is set correctly for python {python_version}.\n"
33 |         )
34 |         if "--debug" in sys.argv[1:]:
35 |             raise
36 |         sys.stderr.write("Add --debug to the commandline for a traceback.\n")
37 |         sys.exit(1)
38 | 
39 |     tool = Tool(script.argparser)
40 |     sys.exit(tool())
41 | 
42 | 
43 | def main():
44 |     # We're in a git repo or tarball so add the src dir to the system path.
45 |     # Note that this assumes a certain module layout.
46 |     src_dir = os.path.realpath(__file__).rsplit(os.path.sep, 3)[0]
47 |     sys.path.insert(0, src_dir)
48 |     run(os.path.basename(sys.argv[0]))
49 | 
50 | 
51 | if __name__ == "__main__":
52 |     main()
53 | 


--------------------------------------------------------------------------------
/src/pkgdev/scripts/argparsers.py:
--------------------------------------------------------------------------------
 1 | import os
 2 | import subprocess
 3 | from configparser import ConfigParser
 4 | from contextlib import suppress
 5 | from pathlib import Path
 6 | 
 7 | from pkgcore.repository import errors as repo_errors
 8 | from snakeoil.cli.arghparse import ArgumentParser
 9 | 
10 | from .. import git
11 | 
12 | cwd_repo_argparser = ArgumentParser(suppress=True)
13 | git_repo_argparser = ArgumentParser(suppress=True)
14 | 
15 | 
16 | @cwd_repo_argparser.bind_final_check
17 | def _determine_cwd_repo(parser, namespace):
18 |     namespace.cwd = os.getcwd()
19 |     try:
20 |         repo = namespace.domain.find_repo(namespace.cwd, config=namespace.config, configure=False)
21 |     except (repo_errors.InitializationError, IOError) as e:
22 |         raise parser.error(str(e))
23 | 
24 |     if repo is None:
25 |         raise parser.error("not in ebuild repo")
26 | 
27 |     namespace.repo = repo
28 | 
29 | 
30 | @git_repo_argparser.bind_final_check
31 | def _determine_git_repo(parser, namespace):
32 |     try:
33 |         p = git.run("rev-parse", "--show-toplevel", stdout=subprocess.PIPE)
34 |         path = p.stdout.strip()
35 |     except git.GitError:
36 |         raise parser.error("not in git repo")
37 | 
38 |     # verify the git and ebuild repo roots match when using both
39 |     try:
40 |         if namespace.repo.location != path:
41 |             raise parser.error("not in ebuild git repo")
42 |     except AttributeError:
43 |         # ebuild repo parser not enabled
44 |         pass
45 | 
46 |     namespace.git_repo = path
47 | 
48 | 
49 | class BugzillaApiKey:
50 |     @classmethod
51 |     def mangle_argparser(cls, parser):
52 |         parser.add_argument(
53 |             "--api-key",
54 |             metavar="TOKEN",
55 |             help="Bugzilla API key",
56 |             docs="""
57 |                 The Bugzilla API key to use for authentication. WARNING: using this
58 |                 option will expose your API key to other users of the same system.
59 |                 Consider instead saving your API key in a file named ``~/.bugzrc``
60 |                 in an INI format like so::
61 | 
62 |                         [default]
63 |                         key = 
64 | 
65 |                 Another supported option is to save your API key in a file named
66 |                 ``~/.bugz_token``.
67 |             """,
68 |         )
69 | 
70 |         parser.bind_delayed_default(1000, "api_key")(cls._default_api_key)
71 | 
72 |     @staticmethod
73 |     def _default_api_key(namespace, attr):
74 |         """Use all known arches by default."""
75 |         if (bugz_rc_file := Path.home() / ".bugzrc").is_file():
76 |             try:
77 |                 config = ConfigParser(default_section="default")
78 |                 config.read(bugz_rc_file)
79 |             except Exception as e:
80 |                 raise ValueError(f"failed parsing {bugz_rc_file}: {e}")
81 | 
82 |             for category in ("default", "gentoo", "Gentoo"):
83 |                 with suppress(Exception):
84 |                     setattr(namespace, attr, config.get(category, "key"))
85 |                     return
86 | 
87 |         if (bugz_token_file := Path.home() / ".bugz_token").is_file():
88 |             setattr(namespace, attr, bugz_token_file.read_text().strip())
89 | 


--------------------------------------------------------------------------------
/src/pkgdev/scripts/pkgdev.py:
--------------------------------------------------------------------------------
 1 | """collection of tools for Gentoo development
 2 | 
 3 | pkgdev provides a collection of tools for Gentoo development.
 4 | """
 5 | 
 6 | from pkgcore.util import commandline
 7 | 
 8 | argparser = commandline.ArgumentParser(
 9 |     description=__doc__, help=False, subcmds=True, script=(__file__, __name__)
10 | )
11 | 


--------------------------------------------------------------------------------
/src/pkgdev/scripts/pkgdev_bugs.py:
--------------------------------------------------------------------------------
  1 | """Automatic bugs filer"""
  2 | 
  3 | import contextlib
  4 | import json
  5 | import os
  6 | import shlex
  7 | import subprocess
  8 | import sys
  9 | import tempfile
 10 | import urllib.request as urllib
 11 | from collections import defaultdict
 12 | from datetime import datetime
 13 | from functools import partial
 14 | from itertools import chain
 15 | from urllib.parse import urlencode
 16 | 
 17 | from pkgcheck import const as pkgcheck_const
 18 | from pkgcheck.addons import ArchesAddon, init_addon
 19 | from pkgcheck.addons.profiles import ProfileAddon
 20 | from pkgcheck.addons.git import GitAddon, GitAddedRepo, GitModifiedRepo
 21 | from pkgcheck.checks import visibility, stablereq
 22 | from pkgcheck.scripts import argparse_actions
 23 | from pkgcore.ebuild.atom import atom
 24 | from pkgcore.ebuild.ebuild_src import package
 25 | from pkgcore.ebuild.errors import MalformedAtom
 26 | from pkgcore.ebuild.misc import sort_keywords
 27 | from pkgcore.ebuild.repo_objs import LocalMetadataXml, ProjectsXml
 28 | from pkgcore.repository import multiplex
 29 | from pkgcore.restrictions import boolean, packages, values
 30 | from pkgcore.test.misc import FakePkg
 31 | from pkgcore.util import commandline, parserestrict
 32 | from snakeoil.cli import arghparse
 33 | from snakeoil.cli.input import userquery
 34 | from snakeoil.data_source import bytes_data_source
 35 | from snakeoil.formatters import Formatter
 36 | from snakeoil.osutils import pjoin
 37 | 
 38 | from ..cli import ArgumentParser
 39 | from .argparsers import _determine_cwd_repo, cwd_repo_argparser, BugzillaApiKey
 40 | 
 41 | if sys.version_info >= (3, 11):
 42 |     import tomllib
 43 | else:
 44 |     import tomli as tomllib
 45 | 
 46 | bugs = ArgumentParser(
 47 |     prog="pkgdev bugs",
 48 |     description=__doc__,
 49 |     verbose=False,
 50 |     quiet=False,
 51 |     parents=(cwd_repo_argparser,),
 52 | )
 53 | BugzillaApiKey.mangle_argparser(bugs)
 54 | bugs.add_argument(
 55 |     "targets",
 56 |     metavar="target",
 57 |     nargs="*",
 58 |     action=commandline.StoreTarget,
 59 |     use_sets="sets",
 60 |     help="extended atom matching of packages",
 61 | )
 62 | bugs.add_argument(
 63 |     "--dot",
 64 |     help="path file where to save the graph in dot format",
 65 | )
 66 | bugs.add_argument(
 67 |     "--edit-graph",
 68 |     action="store_true",
 69 |     help="open editor to modify the graph before filing bugs",
 70 |     docs="""
 71 |         When this argument is passed, pkgdev will open the graph in the editor
 72 |         (either ``$VISUAL`` or ``$EDITOR``) before filing bugs. The graph is
 73 |         represented in TOML format. After saving and exiting the editor, the
 74 |         tool would use the graph from the file to file bugs.
 75 |     """,
 76 | )
 77 | bugs.add_argument(
 78 |     "--auto-cc-arches",
 79 |     action=arghparse.CommaSeparatedNegationsAppend,
 80 |     default=([], []),
 81 |     metavar="EMAIL",
 82 |     help="automatically add CC-ARCHES for the listed email addresses",
 83 |     docs="""
 84 |         Comma separated list of email addresses, for which automatically add
 85 |         CC-ARCHES if one of the maintainers matches the email address. If the
 86 |         package is maintainer-needed, always add CC-ARCHES.
 87 |     """,
 88 | )
 89 | bugs.add_argument(
 90 |     "--find-by-maintainer",
 91 |     action=arghparse.CommaSeparatedNegationsAppend,
 92 |     default=([], []),
 93 |     metavar="EMAIL",
 94 |     help="collect all packages maintained by the listed email addresses",
 95 |     docs="""
 96 |         Comma separated list of email addresses, for which pkgdev will collect
 97 |         all packages maintained by.
 98 | 
 99 |         Note that this flag requires to go over all packages in the repository
100 |         to find matches, which can be slow (between 1 to 3 seconds).
101 |     """,
102 | )
103 | bugs.add_argument(
104 |     "--projects",
105 |     action="store_true",
106 |     help="include packages maintained by projects",
107 |     docs="""
108 |         Include packages maintained by projects, whose members include the
109 |         emails of maintainers passed to ``--find-by-maintainer``.
110 | 
111 |         Note that this flag requires to fetch the ``projects.xml`` file from
112 |         ``https://api.gentoo.org``.
113 |     """,
114 | )
115 | bugs.add_argument(
116 |     "--filter-stablereqs",
117 |     action="store_true",
118 |     help="filter targets for packages with active StableRequest result",
119 |     docs="""
120 |         Filter targets passed to pkgdev (command line, stabilization groups,
121 |         maintainer search, stdin) for packages with active ``StableRequest``
122 |         result.
123 |     """,
124 | )
125 | bugs.add_argument(
126 |     "--blocks",
127 |     metavar="BUG",
128 |     action=arghparse.CommaSeparatedValuesAppend,
129 |     default=[],
130 |     help="bugs which should be blocked by newly created bugs",
131 |     docs="""
132 |         Collection of bug ids which should be blocked by newly created bugs.
133 |         Only bugs created for passed targets would be blockers, excluding other
134 |         bugs which were created as dependencies.
135 |     """,
136 | )
137 | 
138 | bugs.add_argument(
139 |     "--cache",
140 |     action=argparse_actions.CacheNegations,
141 |     help=arghparse.SUPPRESS,
142 | )
143 | bugs.add_argument(
144 |     "--cache-dir",
145 |     type=arghparse.create_dir,
146 |     default=pkgcheck_const.USER_CACHE_DIR,
147 |     help=arghparse.SUPPRESS,
148 | )
149 | bugs_state = bugs.add_mutually_exclusive_group()
150 | bugs_state.add_argument(
151 |     "-s",
152 |     "--stablereq",
153 |     dest="keywording",
154 |     default=None,
155 |     action="store_false",
156 |     help="File stable request bugs",
157 | )
158 | bugs_state.add_argument(
159 |     "-k",
160 |     "--keywording",
161 |     dest="keywording",
162 |     default=None,
163 |     action="store_true",
164 |     help="File rekeywording bugs",
165 | )
166 | 
167 | bugs.plugin = bugs
168 | ArchesAddon.mangle_argparser(bugs)
169 | GitAddon.mangle_argparser(bugs)
170 | ProfileAddon.mangle_argparser(bugs)
171 | stablereq.StableRequestCheck.mangle_argparser(bugs)
172 | 
173 | 
174 | @bugs.bind_delayed_default(1500, "target_repo")
175 | def _validate_args(namespace, attr):
176 |     _determine_cwd_repo(bugs, namespace)
177 |     setattr(namespace, attr, namespace.repo)
178 |     setattr(namespace, "verbosity", 1)
179 |     setattr(namespace, "search_repo", search_repo := multiplex.tree(*namespace.repo.trees))
180 |     setattr(namespace, "gentoo_repo", search_repo)
181 |     setattr(namespace, "query_caching_freq", "package")
182 | 
183 | 
184 | @bugs.bind_final_check
185 | def _validate_args(parser, namespace):
186 |     if namespace.keywording:
187 |         parser.error("keywording is not implemented yet, sorry")
188 | 
189 | 
190 | def _get_suggested_keywords(repo, pkg: package):
191 |     match_keywords = {
192 |         x
193 |         for pkgver in repo.match(pkg.unversioned_atom)
194 |         for x in pkgver.keywords
195 |         if x[0] not in "-~"
196 |     }
197 | 
198 |     # limit stablereq to whatever is ~arch right now
199 |     match_keywords.intersection_update(x.lstrip("~") for x in pkg.keywords if x[0] == "~")
200 | 
201 |     return frozenset({x for x in match_keywords if "-" not in x})
202 | 
203 | 
204 | def parse_atom(pkg: str):
205 |     try:
206 |         return atom(pkg)
207 |     except MalformedAtom as exc:
208 |         try:
209 |             return atom(f"={pkg}")
210 |         except MalformedAtom:
211 |             raise exc
212 | 
213 | 
214 | class GraphNode:
215 |     __slots__ = ("pkgs", "edges", "bugno", "summary", "cc_arches")
216 | 
217 |     def __init__(self, pkgs: tuple[tuple[package, set[str]], ...], bugno=None):
218 |         self.pkgs = pkgs
219 |         self.edges: set[GraphNode] = set()
220 |         self.bugno = bugno
221 |         self.summary = ""
222 |         self.cc_arches = None
223 | 
224 |     def __eq__(self, __o: object):
225 |         return self is __o
226 | 
227 |     def __hash__(self):
228 |         return hash(id(self))
229 | 
230 |     def __str__(self):
231 |         return ", ".join(str(pkg.versioned_atom) for pkg, _ in self.pkgs)
232 | 
233 |     def __repr__(self):
234 |         return str(self)
235 | 
236 |     def lines(self):
237 |         for pkg, keywords in self.pkgs:
238 |             yield f"{pkg.versioned_atom} {' '.join(sort_keywords(keywords))}"
239 | 
240 |     @property
241 |     def dot_edge(self):
242 |         if self.bugno is not None:
243 |             return f"bug_{self.bugno}"
244 |         return f'"{self.pkgs[0][0].versioned_atom}"'
245 | 
246 |     def cleanup_keywords(self, repo):
247 |         previous = frozenset()
248 |         for pkg, keywords in self.pkgs:
249 |             if keywords == previous:
250 |                 keywords.clear()
251 |                 keywords.add("^")
252 |             else:
253 |                 previous = frozenset(keywords)
254 | 
255 |         for pkg, keywords in self.pkgs:
256 |             suggested = _get_suggested_keywords(repo, pkg)
257 |             if keywords == set(suggested):
258 |                 keywords.clear()
259 |                 keywords.add("*")
260 | 
261 |     @property
262 |     def bug_summary(self):
263 |         if self.summary:
264 |             return self.summary
265 |         summary = f"{', '.join(pkg.versioned_atom.cpvstr for pkg, _ in self.pkgs)}: stablereq"
266 |         if len(summary) > 90 and len(self.pkgs) > 1:
267 |             return f"{self.pkgs[0][0].versioned_atom.cpvstr} and friends: stablereq"
268 |         return summary
269 | 
270 |     @property
271 |     def node_maintainers(self):
272 |         return dict.fromkeys(
273 |             maintainer.email for pkg, _ in self.pkgs for maintainer in pkg.maintainers
274 |         )
275 | 
276 |     def should_cc_arches(self, auto_cc_arches: frozenset[str]):
277 |         if self.cc_arches is not None:
278 |             return self.cc_arches
279 |         maintainers = self.node_maintainers
280 |         return bool(
281 |             not maintainers or "*" in auto_cc_arches or auto_cc_arches.intersection(maintainers)
282 |         )
283 | 
284 |     def file_bug(
285 |         self,
286 |         api_key: str,
287 |         auto_cc_arches: frozenset[str],
288 |         block_bugs: list[int],
289 |         modified_repo: multiplex.tree,
290 |         observer=None,
291 |     ) -> int:
292 |         if self.bugno is not None:
293 |             return self.bugno
294 |         for dep in self.edges:
295 |             if dep.bugno is None:
296 |                 dep.file_bug(api_key, auto_cc_arches, (), modified_repo, observer)
297 |         maintainers = self.node_maintainers
298 |         if self.should_cc_arches(auto_cc_arches):
299 |             keywords = ["CC-ARCHES"]
300 |         else:
301 |             keywords = []
302 |         maintainers = tuple(maintainers) or ("maintainer-needed@gentoo.org",)
303 | 
304 |         description = ["Please stabilize", ""]
305 |         if modified_repo is not None:
306 |             for pkg, _ in self.pkgs:
307 |                 with contextlib.suppress(StopIteration):
308 |                     match = next(modified_repo.itermatch(pkg.versioned_atom))
309 |                     modified = datetime.fromtimestamp(match.time)
310 |                     days_old = (datetime.today() - modified).days
311 |                     description.append(
312 |                         f" {pkg.versioned_atom.cpvstr}: no change for {days_old} days, since {modified:%Y-%m-%d}"
313 |                     )
314 | 
315 |         request_data = dict(
316 |             Bugzilla_api_key=api_key,
317 |             product="Gentoo Linux",
318 |             component="Stabilization",
319 |             severity="enhancement",
320 |             version="unspecified",
321 |             summary=self.bug_summary,
322 |             description="\n".join(description).strip(),
323 |             keywords=keywords,
324 |             cf_stabilisation_atoms="\n".join(self.lines()),
325 |             assigned_to=maintainers[0],
326 |             cc=maintainers[1:],
327 |             depends_on=list({dep.bugno for dep in self.edges}),
328 |             blocks=block_bugs,
329 |         )
330 |         request = urllib.Request(
331 |             url="https://bugs.gentoo.org/rest/bug",
332 |             data=json.dumps(request_data).encode("utf-8"),
333 |             method="POST",
334 |             headers={
335 |                 "Content-Type": "application/json",
336 |                 "Accept": "application/json",
337 |             },
338 |         )
339 |         with urllib.urlopen(request, timeout=30) as response:
340 |             reply = json.loads(response.read().decode("utf-8"))
341 |         self.bugno = int(reply["id"])
342 |         if observer is not None:
343 |             observer(self)
344 |         return self.bugno
345 | 
346 | 
347 | class DependencyGraph:
348 |     def __init__(self, out: Formatter, err: Formatter, options):
349 |         self.out = out
350 |         self.err = err
351 |         self.options = options
352 |         disabled, enabled = options.auto_cc_arches
353 |         self.auto_cc_arches = frozenset(enabled).difference(disabled)
354 |         self.profile_addon: ProfileAddon = init_addon(ProfileAddon, options)
355 | 
356 |         self.nodes: set[GraphNode] = set()
357 |         self.starting_nodes: set[GraphNode] = set()
358 |         self.targets: tuple[package] = ()
359 | 
360 |         git_addon = init_addon(GitAddon, options)
361 |         self.added_repo = git_addon.cached_repo(GitAddedRepo)
362 |         self.modified_repo = git_addon.cached_repo(GitModifiedRepo)
363 |         self.stablereq_check = stablereq.StableRequestCheck(self.options, git_addon=git_addon)
364 | 
365 |     def mk_fake_pkg(self, pkg: package, keywords: set[str]):
366 |         return FakePkg(
367 |             cpv=pkg.cpvstr,
368 |             eapi=str(pkg.eapi),
369 |             iuse=pkg.iuse,
370 |             repo=pkg.repo,
371 |             keywords=tuple(keywords),
372 |             data={attr: str(getattr(pkg, attr.lower())) for attr in pkg.eapi.dep_keys},
373 |         )
374 | 
375 |     def find_best_match(self, restrict, pkgset: list[package], prefer_semi_stable=True) -> package:
376 |         restrict = boolean.AndRestriction(
377 |             *restrict,
378 |             packages.PackageRestriction("properties", values.ContainmentMatch("live", negate=True)),
379 |         )
380 |         # prefer using user selected targets
381 |         if intersect := tuple(filter(restrict.match, self.targets)):
382 |             return max(intersect)
383 |         # prefer using already selected packages in graph
384 |         all_pkgs = (pkg for node in self.nodes for pkg, _ in node.pkgs)
385 |         if intersect := tuple(filter(restrict.match, all_pkgs)):
386 |             return max(intersect)
387 |         matches = sorted(filter(restrict.match, pkgset), reverse=True)
388 |         # prefer package with any stable keyword
389 |         if prefer_semi_stable:
390 |             for match in matches:
391 |                 if not all(keyword.startswith("~") for keyword in match.keywords):
392 |                     return match
393 |         # prefer package with any keyword
394 |         for match in matches:
395 |             if match.keywords:
396 |                 return match
397 |         return matches[0]
398 | 
399 |     def extend_targets_stable_groups(self, groups):
400 |         stabilization_groups = self.options.repo.stabilization_groups
401 |         for group in groups:
402 |             for pkg in stabilization_groups[group]:
403 |                 try:
404 |                     yield None, pkg
405 |                 except (ValueError, IndexError):
406 |                     self.err.write(f"Unable to find match for {pkg.unversioned_atom}")
407 | 
408 |     def _extend_projects(self, disabled, enabled):
409 |         members = defaultdict(set)
410 |         self.out.write("Fetching projects.xml")
411 |         self.out.flush()
412 |         with urllib.urlopen("https://api.gentoo.org/metastructure/projects.xml", timeout=30) as f:
413 |             for email, project in ProjectsXml(bytes_data_source(f.read())).projects.items():
414 |                 for member in project.members:
415 |                     members[member.email].add(email)
416 | 
417 |         disabled = frozenset(disabled).union(*(members[email] for email in disabled))
418 |         enabled = frozenset(enabled).union(*(members[email] for email in enabled))
419 |         return disabled, enabled
420 | 
421 |     def extend_maintainers(self):
422 |         disabled, enabled = self.options.find_by_maintainer
423 |         if self.options.projects:
424 |             disabled, enabled = self._extend_projects(disabled, enabled)
425 |         emails = frozenset(enabled).difference(disabled)
426 |         if not emails:
427 |             return
428 |         search_repo = self.options.search_repo
429 |         self.out.write("Searching for packages maintained by: ", ", ".join(emails))
430 |         self.out.flush()
431 |         for cat, pkgs in search_repo.packages.items():
432 |             for pkg in pkgs:
433 |                 xml = LocalMetadataXml(pjoin(search_repo.location[0], cat, pkg, "metadata.xml"))
434 |                 if emails.intersection(m.email for m in xml.maintainers):
435 |                     yield None, parserestrict.parse_match(f"{cat}/{pkg}")
436 | 
437 |     def _find_dependencies(self, pkg: package, keywords: set[str]):
438 |         check = visibility.VisibilityCheck(self.options, profile_addon=self.profile_addon)
439 | 
440 |         issues: dict[str, dict[str, set[atom]]] = defaultdict(partial(defaultdict, set))
441 |         for res in check.feed(self.mk_fake_pkg(pkg, keywords)):
442 |             if isinstance(res, visibility.NonsolvableDeps):
443 |                 for dep in res.deps:
444 |                     dep = atom(dep).no_usedeps
445 |                     issues[dep.key][res.keyword.lstrip("~")].add(dep)
446 | 
447 |         for pkgname, problems in issues.items():
448 |             pkgset: list[package] = self.options.repo.match(atom(pkgname))
449 |             try:
450 |                 match = self.find_best_match(set().union(*problems.values()), pkgset)
451 |                 yield match, set(problems.keys())
452 |             except (ValueError, IndexError):
453 |                 results: dict[package, set[str]] = defaultdict(set)
454 |                 for keyword, deps in problems.items():
455 |                     try:
456 |                         match = self.find_best_match(deps, pkgset)
457 |                     except (ValueError, IndexError):
458 |                         deps_str = " , ".join(map(str, deps))
459 |                         bugs.error(
460 |                             f"unable to find match for restrictions: {deps_str}",
461 |                             status=3,
462 |                         )
463 |                     results[match].add(keyword)
464 |                 yield from results.items()
465 | 
466 |     def load_targets(self, targets: list[tuple[str, str]]):
467 |         result = []
468 |         search_repo = self.options.search_repo
469 |         for _, target in targets:
470 |             try:
471 |                 pkgset = search_repo.match(target)
472 |                 if self.options.filter_stablereqs:
473 |                     for res in self.stablereq_check.feed(sorted(pkgset)):
474 |                         if isinstance(res, stablereq.StableRequest):
475 |                             target = atom(f"={res.category}/{res.package}-{res.version}")
476 |                             break
477 |                     else:  # no stablereq
478 |                         continue
479 |                 result.append(self.find_best_match([target], pkgset, False))
480 |             except (ValueError, IndexError):
481 |                 bugs.error(f"Restriction {target} has no match in repository", status=3)
482 |         self.targets = tuple(result)
483 | 
484 |     def build_full_graph(self):
485 |         check_nodes = [(pkg, set(), "") for pkg in self.targets]
486 | 
487 |         vertices: dict[package, GraphNode] = {}
488 |         edges = []
489 |         while len(check_nodes):
490 |             pkg, keywords, reason = check_nodes.pop(0)
491 |             if pkg in vertices:
492 |                 vertices[pkg].pkgs[0][1].update(keywords)
493 |                 continue
494 | 
495 |             pkg_has_stable = any(x[0] not in "-~" for x in pkg.keywords)
496 |             keywords.update(_get_suggested_keywords(self.options.repo, pkg))
497 |             if pkg_has_stable and not keywords:  # package already done
498 |                 self.out.write(f"Nothing to stable for {pkg.unversioned_atom}")
499 |                 continue
500 |             assert (
501 |                 keywords
502 |             ), f"no keywords for {pkg.versioned_atom}, currently unsupported by tool: https://github.com/pkgcore/pkgdev/issues/123"
503 |             self.nodes.add(new_node := GraphNode(((pkg, keywords),)))
504 |             vertices[pkg] = new_node
505 |             if reason:
506 |                 reason = f" [added for {reason}]"
507 |             self.out.write(
508 |                 f"Checking {pkg.versioned_atom} on {' '.join(sort_keywords(keywords))!r}{reason}"
509 |             )
510 |             self.out.flush()
511 | 
512 |             for dep, keywords in self._find_dependencies(pkg, keywords):
513 |                 edges.append((pkg, dep))
514 |                 check_nodes.append((dep, keywords, str(pkg.versioned_atom)))
515 | 
516 |         for src, dst in edges:
517 |             vertices[src].edges.add(vertices[dst])
518 |         self.starting_nodes = {
519 |             vertices[starting_node] for starting_node in self.targets if starting_node in vertices
520 |         }
521 | 
522 |     def output_dot(self, dot_file: str):
523 |         with open(dot_file, "w") as dot:
524 |             dot.write("digraph {\n")
525 |             dot.write("\trankdir=LR;\n")
526 |             for node in self.nodes:
527 |                 node_text = "\\n".join(node.lines())
528 |                 if node.bugno is not None:
529 |                     node_text += f"\\nbug #{node.bugno}"
530 |                 dot.write(f'\t{node.dot_edge}[label="{node_text}"];\n')
531 |                 for other in node.edges:
532 |                     dot.write(f"\t{node.dot_edge} -> {other.dot_edge};\n")
533 |             dot.write("}\n")
534 |             dot.close()
535 | 
536 |     def output_graph_toml(self):
537 |         self.auto_cc_arches
538 |         bugs = dict(enumerate(self.nodes, start=1))
539 |         reverse_bugs = {node: bugno for bugno, node in bugs.items()}
540 | 
541 |         toml = tempfile.NamedTemporaryFile(mode="w", suffix=".toml")
542 |         for bugno, node in bugs.items():
543 |             if node.bugno is not None:
544 |                 continue  # already filed
545 |             toml.write(f"[bug-{bugno}]\n")
546 |             toml.write(f'summary = "{node.bug_summary}"\n')
547 |             toml.write(f"cc_arches = {str(node.should_cc_arches(self.auto_cc_arches)).lower()}\n")
548 |             if node_depends := ", ".join(
549 |                 (f'"bug-{reverse_bugs[dep]}"' if dep.bugno is None else str(dep.bugno))
550 |                 for dep in node.edges
551 |             ):
552 |                 toml.write(f"depends = [{node_depends}]\n")
553 |             if node_blocks := ", ".join(
554 |                 f'"bug-{i}"' for i, src in bugs.items() if node in src.edges
555 |             ):
556 |                 toml.write(f"blocks = [{node_blocks}]\n")
557 |             for pkg, arches in node.pkgs:
558 |                 try:
559 |                     match = next(self.modified_repo.itermatch(pkg.versioned_atom))
560 |                     modified = datetime.fromtimestamp(match.time)
561 |                     age = (datetime.today() - modified).days
562 |                     modified_text = f"{modified:%Y-%m-%d} (age {age} days)"
563 |                 except StopIteration:
564 |                     modified_text = ""
565 | 
566 |                 try:
567 |                     match = next(self.added_repo.itermatch(pkg.versioned_atom))
568 |                     added = datetime.fromtimestamp(match.time)
569 |                     age = (datetime.today() - added).days
570 |                     added_text = f"{added:%Y-%m-%d} (age {age} days)"
571 |                 except StopIteration:
572 |                     added_text = ""
573 | 
574 |                 toml.write(f"# added on {added_text}, last modified on {modified_text}\n")
575 |                 keywords = ", ".join(f'"{x}"' for x in sort_keywords(arches))
576 |                 toml.write(f'"{pkg.versioned_atom}" = [{keywords}]\n')
577 |             toml.write("\n\n")
578 |         toml.flush()
579 |         return toml
580 | 
581 |     def load_graph_toml(self, toml_file: str):
582 |         repo = self.options.search_repo
583 |         with open(toml_file, "rb") as f:
584 |             data = tomllib.load(f)
585 | 
586 |         new_bugs: dict[int | str, GraphNode] = {}
587 |         for node_name, data_node in data.items():
588 |             pkgs = tuple(
589 |                 (next(repo.itermatch(atom(pkg))), set(keywords))
590 |                 for pkg, keywords in data_node.items()
591 |                 if pkg.startswith("=")
592 |             )
593 |             new_bugs[node_name] = GraphNode(pkgs)
594 |         for node_name, data_node in data.items():
595 |             new_bugs[node_name].summary = data_node.get("summary", "")
596 |             new_bugs[node_name].cc_arches = data_node.get("cc_arches", None)
597 |             for dep in data_node.get("depends", ()):
598 |                 if isinstance(dep, int):
599 |                     new_bugs[node_name].edges.add(new_bugs.setdefault(dep, GraphNode((), dep)))
600 |                 elif new_bugs.get(dep) is not None:
601 |                     new_bugs[node_name].edges.add(new_bugs[dep])
602 |                 else:
603 |                     bugs.error(f"[{node_name}]['depends']: unknown dependency {dep!r}")
604 |         self.nodes = set(new_bugs.values())
605 |         self.starting_nodes = {node for node in self.nodes if not node.edges}
606 | 
607 |     def merge_nodes(self, nodes: tuple[GraphNode, ...]) -> GraphNode:
608 |         self.nodes.difference_update(nodes)
609 |         is_start = bool(self.starting_nodes.intersection(nodes))
610 |         self.starting_nodes.difference_update(nodes)
611 |         new_node = GraphNode(list(chain.from_iterable(n.pkgs for n in nodes)))
612 | 
613 |         for node in nodes:
614 |             new_node.edges.update(node.edges.difference(nodes))
615 | 
616 |         for node in self.nodes:
617 |             if node.edges.intersection(nodes):
618 |                 node.edges.difference_update(nodes)
619 |                 node.edges.add(new_node)
620 | 
621 |         self.nodes.add(new_node)
622 |         if is_start:
623 |             self.starting_nodes.add(new_node)
624 |         return new_node
625 | 
626 |     @staticmethod
627 |     def _find_cycles(nodes: tuple[GraphNode, ...], stack: list[GraphNode]) -> tuple[GraphNode, ...]:
628 |         node = stack[-1]
629 |         for edge in node.edges:
630 |             if edge in stack:
631 |                 return tuple(stack[stack.index(edge) :])
632 |             stack.append(edge)
633 |             if cycle := DependencyGraph._find_cycles(nodes, stack):
634 |                 return cycle
635 |             stack.pop()
636 |         return ()
637 | 
638 |     def merge_cycles(self):
639 |         start_nodes = set(self.starting_nodes)
640 |         while start_nodes:
641 |             starting_node = start_nodes.pop()
642 |             assert starting_node in self.nodes
643 |             while cycle := self._find_cycles(tuple(self.nodes), [starting_node]):
644 |                 self.out.write("Found cycle: ", " -> ".join(str(n) for n in cycle))
645 |                 start_nodes.difference_update(cycle)
646 |                 new_node = self.merge_nodes(cycle)
647 |                 if starting_node not in self.nodes:
648 |                     starting_node = new_node
649 | 
650 |     def merge_new_keywords_children(self):
651 |         repo = self.options.search_repo
652 |         found_someone = True
653 |         while found_someone:
654 |             reverse_edges: dict[GraphNode, set[GraphNode]] = defaultdict(set)
655 |             for node in self.nodes:
656 |                 for dep in node.edges:
657 |                     reverse_edges[dep].add(node)
658 |             found_someone = False
659 |             for node, origs in reverse_edges.items():
660 |                 if len(origs) != 1:
661 |                     continue
662 |                 existing_keywords = frozenset().union(
663 |                     *(
664 |                         pkgver.keywords
665 |                         for pkg, _ in node.pkgs
666 |                         for pkgver in repo.match(pkg.unversioned_atom)
667 |                     )
668 |                 )
669 |                 if existing_keywords & frozenset().union(*(pkg[1] for pkg in node.pkgs)):
670 |                     continue  # not fully new keywords
671 |                 orig = next(iter(origs))
672 |                 self.out.write(f"Merging {node} into {orig}")
673 |                 self.merge_nodes((orig, node))
674 |                 found_someone = True
675 |                 break
676 | 
677 |     def merge_stabilization_groups(self):
678 |         for group, pkgs in self.options.repo.stabilization_groups.items():
679 |             restrict = packages.OrRestriction(*pkgs)
680 |             mergable = tuple(
681 |                 node for node in self.nodes if any(restrict.match(pkg) for pkg, _ in node.pkgs)
682 |             )
683 |             if mergable:
684 |                 self.out.write(f"Merging @{group} group nodes: {mergable}")
685 |                 self.merge_nodes(mergable)
686 | 
687 |     def scan_existing_bugs(self, api_key: str):
688 |         params = urlencode(
689 |             {
690 |                 "Bugzilla_api_key": api_key,
691 |                 "include_fields": "id,cf_stabilisation_atoms,summary",
692 |                 "component": "Stabilization",
693 |                 "resolution": "---",
694 |                 "f1": "cf_stabilisation_atoms",
695 |                 "o1": "anywords",
696 |                 "v1": {pkg[0].unversioned_atom for node in self.nodes for pkg in node.pkgs},
697 |             },
698 |             doseq=True,
699 |         )
700 |         request = urllib.Request(
701 |             url="https://bugs.gentoo.org/rest/bug?" + params,
702 |             method="GET",
703 |             headers={
704 |                 "Content-Type": "application/json",
705 |                 "Accept": "application/json",
706 |             },
707 |         )
708 |         with urllib.urlopen(request, timeout=30) as response:
709 |             reply = json.loads(response.read().decode("utf-8"))
710 |         for bug in reply["bugs"]:
711 |             bug_atoms = (
712 |                 parse_atom(line.split(" ", 1)[0]).unversioned_atom
713 |                 for line in map(str.strip, bug["cf_stabilisation_atoms"].splitlines())
714 |                 if line
715 |             )
716 |             bug_match = boolean.OrRestriction(*bug_atoms)
717 |             for node in self.nodes:
718 |                 if node.bugno is None and all(bug_match.match(pkg[0]) for pkg in node.pkgs):
719 |                     node.bugno = bug["id"]
720 |                     self.out.write(
721 |                         self.out.fg("yellow"),
722 |                         f"Found https://bugs.gentoo.org/{node.bugno} for node {node}",
723 |                         self.out.reset,
724 |                     )
725 |                     self.out.write(" -> bug summary: ", bug["summary"])
726 |                     break
727 | 
728 |     def file_bugs(self, api_key: str, auto_cc_arches: frozenset[str], block_bugs: list[int]):
729 |         def observe(node: GraphNode):
730 |             self.out.write(
731 |                 f"https://bugs.gentoo.org/{node.bugno} ",
732 |                 " | ".join(node.lines()),
733 |                 " depends on bugs ",
734 |                 {dep.bugno for dep in node.edges} or "{}",
735 |             )
736 |             self.out.flush()
737 | 
738 |         for node in self.starting_nodes:
739 |             node.file_bug(api_key, auto_cc_arches, block_bugs, self.modified_repo, observe)
740 | 
741 | 
742 | def _load_from_stdin(out: Formatter):
743 |     if not sys.stdin.isatty():
744 |         out.warn("No packages were specified, reading from stdin...")
745 |         for line in sys.stdin.readlines():
746 |             if line := line.split("#", 1)[0].strip():
747 |                 yield line, parserestrict.parse_match(line)
748 |         # reassign stdin to allow interactivity (currently only works for unix)
749 |         sys.stdin = open("/dev/tty")
750 |     else:
751 |         bugs.error("reading from stdin is only valid when piping data in")
752 | 
753 | 
754 | @bugs.bind_main_func
755 | def main(options, out: Formatter, err: Formatter):
756 |     search_repo = options.search_repo
757 |     options.targets = options.targets or []
758 |     d = DependencyGraph(out, err, options)
759 |     options.targets.extend(d.extend_maintainers())
760 |     options.targets.extend(d.extend_targets_stable_groups(options.sets or ()))
761 |     if not options.targets:
762 |         options.targets = list(_load_from_stdin(out))
763 |     d.load_targets(options.targets)
764 |     d.build_full_graph()
765 |     d.merge_stabilization_groups()
766 |     d.merge_cycles()
767 |     d.merge_new_keywords_children()
768 | 
769 |     if not d.nodes:
770 |         out.write(out.fg("red"), "Nothing to do, exiting", out.reset)
771 |         return 1
772 | 
773 |     if userquery("Check for open bugs matching current graph?", out, err, default_answer=False):
774 |         d.scan_existing_bugs(options.api_key)
775 | 
776 |     if options.edit_graph:
777 |         toml = d.output_graph_toml()
778 | 
779 |     for node in d.nodes:
780 |         node.cleanup_keywords(search_repo)
781 | 
782 |     if options.dot is not None:
783 |         d.output_dot(options.dot)
784 |         out.write(out.fg("green"), f"Dot file written to {options.dot}", out.reset)
785 |         out.flush()
786 | 
787 |     if options.edit_graph:
788 |         editor = shlex.split(os.environ.get("VISUAL", os.environ.get("EDITOR", "nano")))
789 |         try:
790 |             subprocess.run(editor + [toml.name], check=True)
791 |         except subprocess.CalledProcessError:
792 |             bugs.error("failed writing mask comment")
793 |         except FileNotFoundError:
794 |             bugs.error(f"nonexistent editor: {editor[0]!r}")
795 |         d.load_graph_toml(toml.name)
796 |         for node in d.nodes:
797 |             node.cleanup_keywords(search_repo)
798 | 
799 |         if options.dot is not None:
800 |             d.output_dot(options.dot)
801 |             out.write(out.fg("green"), f"Dot file written to {options.dot}", out.reset)
802 |             out.flush()
803 | 
804 |     bugs_count = len(tuple(node for node in d.nodes if node.bugno is None))
805 |     if bugs_count == 0:
806 |         out.write(out.fg("red"), "Nothing to do, exiting", out.reset)
807 |         return 1
808 | 
809 |     if not userquery(
810 |         f"Continue and create {bugs_count} stablereq bugs?", out, err, default_answer=False
811 |     ):
812 |         return 1
813 | 
814 |     if options.api_key is None:
815 |         err.write(out.fg("red"), "No API key provided, exiting", out.reset)
816 |         return 1
817 | 
818 |     disabled, enabled = options.auto_cc_arches
819 |     blocks = list(frozenset(map(int, options.blocks)))
820 |     d.file_bugs(options.api_key, frozenset(enabled).difference(disabled), blocks)
821 | 


--------------------------------------------------------------------------------
/src/pkgdev/scripts/pkgdev_manifest.py:
--------------------------------------------------------------------------------
  1 | import os
  2 | import re
  3 | import subprocess
  4 | 
  5 | from pkgcore.operations import observer as observer_mod
  6 | from pkgcore.restrictions import packages, values
  7 | from pkgcore.util.parserestrict import parse_match
  8 | from snakeoil.cli import arghparse
  9 | 
 10 | from .. import cli, git
 11 | from .argparsers import cwd_repo_argparser
 12 | 
 13 | manifest = cli.ArgumentParser(
 14 |     prog="pkgdev manifest", description="update package manifests", parents=(cwd_repo_argparser,)
 15 | )
 16 | manifest.add_argument(
 17 |     "target",
 18 |     nargs="*",
 19 |     help="packages to target",
 20 |     docs="""
 21 |         Packages matching any of these restrictions will have their manifest
 22 |         entries updated. If no target is specified a path restriction is
 23 |         created based on the current working directory. In other words, if
 24 |         ``pkgdev manifest`` is run within an ebuild's directory, all the
 25 |         ebuilds within that directory will be manifested.
 26 |     """,
 27 | )
 28 | manifest_opts = manifest.add_argument_group("manifest options")
 29 | manifest_opts.add_argument(
 30 |     "-d",
 31 |     "--distdir",
 32 |     type=arghparse.create_dir,
 33 |     help="target download directory",
 34 |     docs="""
 35 |         Use a specified target directory for downloads instead of the
 36 |         configured DISTDIR.
 37 |     """,
 38 | )
 39 | manifest_opts.add_argument(
 40 |     "-f",
 41 |     "--force",
 42 |     help="forcibly remanifest packages",
 43 |     action="store_true",
 44 |     docs="""
 45 |         Force package manifest files to be rewritten. Note that this requires
 46 |         downloading all distfiles.
 47 |     """,
 48 | )
 49 | manifest_opts.add_argument(
 50 |     "-m",
 51 |     "--mirrors",
 52 |     help="enable fetching from Gentoo mirrors",
 53 |     action="store_true",
 54 |     docs="""
 55 |         Enable checking Gentoo mirrors first for distfiles. This is disabled by
 56 |         default because manifest generation is often performed when adding new
 57 |         ebuilds with distfiles that aren't on Gentoo mirrors yet.
 58 |     """,
 59 | )
 60 | manifest_opts.add_argument(
 61 |     "--if-modified",
 62 |     dest="if_modified",
 63 |     help="Only check packages that have uncommitted modifications",
 64 |     action="store_true",
 65 |     docs="""
 66 |         In addition to matching the specified restriction, restrict to targets
 67 |         which are marked as modified by git, including untracked files.
 68 |     """,
 69 | )
 70 | manifest_opts.add_argument(
 71 |     "--ignore-fetch-restricted",
 72 |     dest="ignore_fetch_restricted",
 73 |     help="Ignore fetch restricted ebuilds",
 74 |     action="store_true",
 75 |     docs="""
 76 |         Ignore attempting to update manifest entries for ebuilds which are
 77 |         fetch restricted.
 78 |     """,
 79 | )
 80 | 
 81 | 
 82 | def _restrict_targets(repo, targets):
 83 |     restrictions = []
 84 |     for target in targets:
 85 |         if os.path.exists(target):
 86 |             try:
 87 |                 if target in repo:
 88 |                     target = os.path.relpath(target, repo.location)
 89 |                 restrictions.append(repo.path_restrict(target))
 90 |             except ValueError as exc:
 91 |                 manifest.error(exc)
 92 |         else:
 93 |             try:
 94 |                 restrictions.append(parse_match(target))
 95 |             except ValueError:
 96 |                 manifest.error(f"invalid atom: {target!r}")
 97 |     return packages.OrRestriction(*restrictions)
 98 | 
 99 | 
100 | def _restrict_modified_files(repo):
101 |     ebuild_re = re.compile(r"^[ MTARC?]{2} (?P[^/]+/[^/]+/[^/]+\.ebuild)$")
102 |     p = git.run(
103 |         "status", "--porcelain=v1", "-z", "*.ebuild", cwd=repo.location, stdout=subprocess.PIPE
104 |     )
105 | 
106 |     restrictions = []
107 |     for line in p.stdout.strip("\x00").split("\x00"):
108 |         if mo := ebuild_re.match(line):
109 |             restrictions.append(repo.path_restrict(mo.group("path")))
110 |     return packages.OrRestriction(*restrictions)
111 | 
112 | 
113 | @manifest.bind_final_check
114 | def _manifest_validate(parser, namespace):
115 |     targets = namespace.target if namespace.target else [namespace.cwd]
116 | 
117 |     restrictions = [_restrict_targets(namespace.repo, targets)]
118 |     if namespace.if_modified:
119 |         restrictions.append(_restrict_modified_files(namespace.repo))
120 |     if namespace.ignore_fetch_restricted:
121 |         restrictions.append(
122 |             packages.PackageRestriction("restrict", values.ContainmentMatch("fetch", negate=True))
123 |         )
124 |     namespace.restriction = packages.AndRestriction(*restrictions)
125 | 
126 | 
127 | @manifest.bind_main_func
128 | def _manifest(options, out, err):
129 |     failed = options.repo.operations.manifest(
130 |         domain=options.domain,
131 |         restriction=options.restriction,
132 |         observer=observer_mod.formatter_output(out),
133 |         mirrors=options.mirrors,
134 |         force=options.force,
135 |         distdir=options.distdir,
136 |     )
137 | 
138 |     return int(any(failed))
139 | 


--------------------------------------------------------------------------------
/src/pkgdev/scripts/pkgdev_mask.py:
--------------------------------------------------------------------------------
  1 | import json
  2 | import os
  3 | import re
  4 | import shlex
  5 | import subprocess
  6 | import tempfile
  7 | import textwrap
  8 | import urllib.request as urllib
  9 | from collections import deque
 10 | from dataclasses import dataclass
 11 | from datetime import datetime, timedelta, timezone
 12 | from itertools import groupby
 13 | from operator import itemgetter
 14 | from typing import List
 15 | 
 16 | from pkgcore.ebuild.atom import MalformedAtom
 17 | from pkgcore.ebuild.atom import atom as atom_cls
 18 | from pkgcore.ebuild.profiles import ProfileNode
 19 | from snakeoil.bash import iter_read_bash
 20 | from snakeoil.cli import arghparse
 21 | from snakeoil.osutils import pjoin
 22 | from snakeoil.strings import pluralism
 23 | 
 24 | from .. import git
 25 | from .argparsers import cwd_repo_argparser, git_repo_argparser, BugzillaApiKey
 26 | 
 27 | mask = arghparse.ArgumentParser(
 28 |     prog="pkgdev mask",
 29 |     description="mask packages",
 30 |     parents=(cwd_repo_argparser, git_repo_argparser),
 31 | )
 32 | BugzillaApiKey.mangle_argparser(mask)
 33 | mask.add_argument(
 34 |     "targets",
 35 |     metavar="TARGET",
 36 |     nargs="*",
 37 |     help="package to mask",
 38 |     docs="""
 39 |         Packages matching any of these restrictions will have a mask entry in
 40 |         profiles/package.mask added for them. If no target is specified a path
 41 |         restriction is created based on the current working directory. In other
 42 |         words, if ``pkgdev mask`` is run within an ebuild's directory, all the
 43 |         ebuilds within that directory will be masked.
 44 |     """,
 45 | )
 46 | mask_opts = mask.add_argument_group("mask options")
 47 | mask_opts.add_argument(
 48 |     "-r",
 49 |     "--rites",
 50 |     metavar="DAYS",
 51 |     nargs="?",
 52 |     const=30,
 53 |     type=arghparse.positive_int,
 54 |     help="mark for last rites",
 55 |     docs="""
 56 |         Mark a mask entry for last rites. This defaults to 30 days until
 57 |         package removal but accepts an optional argument for the number of
 58 |         days.
 59 |     """,
 60 | )
 61 | mask_opts.add_argument(
 62 |     "-b",
 63 |     "--bug",
 64 |     "--bugs",
 65 |     dest="bugs",
 66 |     action=arghparse.CommaSeparatedValuesAppend,
 67 |     default=[],
 68 |     help="reference bug in the mask comment",
 69 |     docs="""
 70 |         Add a reference to a bug in the mask comment. May be specified multiple
 71 |         times to reference multiple bugs.
 72 |     """,
 73 | )
 74 | mask_opts.add_argument(
 75 |     "--email",
 76 |     action="store_true",
 77 |     help="spawn email composer with prepared email for sending to mailing lists",
 78 |     docs="""
 79 |         Spawn user's preferred email composer with a prepared email for
 80 |         sending a last rites message to Gentoo's mailing list (``gentoo-dev``
 81 |         and ``gentoo-dev-announce``). The user should manually set the Reply-to
 82 |         field for the message to be accepted by ``gentoo-dev-announce``.
 83 | 
 84 |         For spawning the preferred email composer, the ``xdg-email`` tool from
 85 |         ``x11-misc/xdg-utils`` package.
 86 |     """,
 87 | )
 88 | mask_opts.add_argument(
 89 |     "--file-bug",
 90 |     action="store_true",
 91 |     help="file a last-rite bug",
 92 |     docs="""
 93 |         Files a last-rite bug for the masked package, which blocks listed
 94 |         reference bugs. ``PMASKED`` keyword is added all all referenced bugs.
 95 |     """,
 96 | )
 97 | 
 98 | 
 99 | @mask.bind_final_check
100 | def _mask_validate(parser, namespace):
101 |     atoms = set()
102 |     maintainers = set()
103 | 
104 |     try:
105 |         namespace.bugs = list(map(int, dict.fromkeys(namespace.bugs)))
106 |     except ValueError:
107 |         parser.error("argument -b/--bug: invalid integer value")
108 |     if min(namespace.bugs, default=1) < 1:
109 |         parser.error("argument -b/--bug: must be >= 1")
110 | 
111 |     if not namespace.rites and namespace.file_bug:
112 |         mask.error("bug filing requires last rites")
113 |     if namespace.file_bug and not namespace.api_key:
114 |         mask.error("bug filing requires a Bugzilla API key")
115 | 
116 |     if namespace.email and not namespace.rites:
117 |         mask.error("last rites required for email support")
118 | 
119 |     if namespace.targets:
120 |         for x in namespace.targets:
121 |             if os.path.exists(x) and x.endswith(".ebuild"):
122 |                 restrict = namespace.repo.path_restrict(x)
123 |                 pkg = next(namespace.repo.itermatch(restrict))
124 |                 atom = pkg.versioned_atom
125 |                 maintainers.update(maintainer.email for maintainer in pkg.maintainers)
126 |             else:
127 |                 try:
128 |                     atom = atom_cls(x)
129 |                 except MalformedAtom:
130 |                     mask.error(f"invalid atom: {x!r}")
131 |                 if pkgs := namespace.repo.match(atom):
132 |                     maintainers.update(
133 |                         maintainer.email for pkg in pkgs for maintainer in pkg.maintainers
134 |                     )
135 |                 else:
136 |                     mask.error(f"no repo matches: {x!r}")
137 |             atoms.add(atom)
138 |     else:
139 |         restrict = namespace.repo.path_restrict(os.getcwd())
140 |         # repo, category, and package level restricts
141 |         if len(restrict) != 3:
142 |             mask.error("not in a package directory")
143 |         pkg = next(namespace.repo.itermatch(restrict))
144 |         atoms.add(pkg.unversioned_atom)
145 |         maintainers.update(maintainer.email for maintainer in pkg.maintainers)
146 | 
147 |     namespace.atoms = sorted(atoms)
148 |     namespace.maintainers = sorted(maintainers) or ["maintainer-needed@gentoo.org"]
149 | 
150 | 
151 | @dataclass(frozen=True)
152 | class Mask:
153 |     """Entry in package.mask file."""
154 | 
155 |     author: str
156 |     email: str
157 |     date: str
158 |     comment: List[str]
159 |     atoms: List[atom_cls]
160 | 
161 |     _removal_re = re.compile(r"^Removal: (?P\d{4}-\d{2}-\d{2})")
162 | 
163 |     def __str__(self):
164 |         lines = [f"# {self.author} <{self.email}> ({self.date})"]
165 |         lines.extend(f"# {x}" if x else "#" for x in self.comment)
166 |         lines.extend(map(str, self.atoms))
167 |         return "\n".join(lines)
168 | 
169 |     @property
170 |     def removal(self):
171 |         """Pull removal date from comment."""
172 |         if mo := self._removal_re.match(self.comment[-1]):
173 |             return mo.group("date")
174 |         return None
175 | 
176 | 
177 | def consecutive_groups(iterable, ordering=lambda x: x):
178 |     """Return an iterable split into separate, consecutive groups."""
179 |     for k, g in groupby(enumerate(iterable), key=lambda x: x[0] - ordering(x[1])):
180 |         yield map(itemgetter(1), g)
181 | 
182 | 
183 | class MaskFile:
184 |     """Object representing the contents of a package.mask file."""
185 | 
186 |     attribution_re = re.compile(r"^(?P.+) <(?P.+)> \((?P\d{4}-\d{2}-\d{2})\)$")
187 | 
188 |     def __init__(self, path):
189 |         self.path = path
190 |         self.profile = ProfileNode(os.path.dirname(path))
191 |         self.header = []
192 |         self.masks = deque()
193 | 
194 |         # parse existing mask entries
195 |         try:
196 |             self.parse()
197 |         except FileNotFoundError:
198 |             pass
199 | 
200 |     def parse(self):
201 |         """Parse the given file into Mask objects."""
202 |         with open(self.path) as f:
203 |             lines = f.readlines()
204 | 
205 |         # determine mask groups by line number
206 |         mask_map = dict(iter_read_bash(self.path, enum_line=True))
207 |         for mask_lines in map(list, consecutive_groups(mask_map)):
208 |             # use profile's EAPI setting to coerce supported masks
209 |             atoms = [self.profile.eapi_atom(mask_map[x]) for x in mask_lines]
210 | 
211 |             # pull comment lines above initial mask entry line
212 |             comment = []
213 |             i = mask_lines[0] - 2
214 |             while i >= 0 and (line := lines[i].rstrip()):
215 |                 if not line.startswith("# ") and line != "#":
216 |                     mask.error(f"invalid mask entry header, lineno {i + 1}: {line!r}")
217 |                 comment.append(line[2:])
218 |                 i -= 1
219 |             if not self.header:
220 |                 self.header = lines[: i + 1]
221 |             comment = list(reversed(comment))
222 | 
223 |             # pull attribution data from first comment line
224 |             if mo := self.attribution_re.match(comment[0]):
225 |                 author, email, date = mo.group("author"), mo.group("email"), mo.group("date")
226 |             else:
227 |                 mask.error(f"invalid author, lineno {i + 2}: {comment[0]!r}")
228 | 
229 |             self.masks.append(Mask(author, email, date, comment[1:], atoms))
230 | 
231 |     def add(self, mask):
232 |         """Add a new mask to the file."""
233 |         self.masks.appendleft(mask)
234 | 
235 |     def write(self):
236 |         """Serialize the registered masks back to the related file."""
237 |         with open(self.path, "w") as f:
238 |             f.write(f"{self}\n")
239 | 
240 |     def __str__(self):
241 |         return "".join(self.header) + "\n\n".join(map(str, self.masks))
242 | 
243 | 
244 | def get_comment():
245 |     """Spawn editor to get mask comment."""
246 |     tmp = tempfile.NamedTemporaryFile(mode="w")
247 |     tmp.write(
248 |         textwrap.dedent(
249 |             """
250 | 
251 |                 # Please enter the mask message. Lines starting with '#' will be ignored.
252 |                 #
253 |                 # If last-rite was requested, it would be added automatically.
254 |                 #
255 |                 # For rules on writing mask messages, see GLEP-84:
256 |                 #   https://glep.gentoo.org/glep-0084.html
257 |                 #
258 |                 # Example:
259 |                 #
260 |                 # Doesn't work with new libfoo. Upstream dead, gtk-1, smells
261 |                 # funny.
262 |             """
263 |         )
264 |     )
265 |     tmp.flush()
266 | 
267 |     editor = shlex.split(os.environ.get("VISUAL", os.environ.get("EDITOR", "nano")))
268 |     try:
269 |         subprocess.run(editor + [tmp.name], check=True)
270 |     except subprocess.CalledProcessError:
271 |         mask.error("failed writing mask comment")
272 |     except FileNotFoundError:
273 |         mask.error(f"nonexistent editor: {editor[0]!r}")
274 | 
275 |     with open(tmp.name) as f:
276 |         # strip trailing whitespace from lines
277 |         comment = (x.rstrip() for x in f.readlines())
278 |     # strip comments
279 |     comment = (x for x in comment if not x.startswith("#"))
280 |     # strip leading/trailing newlines
281 |     comment = "\n".join(comment).strip().splitlines()
282 |     if not comment:
283 |         mask.error("empty mask comment")
284 |     return comment
285 | 
286 | 
287 | def message_removal_notice(bugs: list[int], rites: int):
288 |     summary = []
289 |     if rites:
290 |         summary.append(f"Removal on {datetime.now(timezone.utc) + timedelta(days=rites):%Y-%m-%d}.")
291 |     if bugs:
292 |         # Bug(s) #A, #B, #C
293 |         bug_list = ", ".join(f"#{b}" for b in bugs)
294 |         s = pluralism(bugs)
295 |         summary.append(f"Bug{s} {bug_list}.")
296 |     return "  ".join(summary)
297 | 
298 | 
299 | def file_last_rites_bug(options, message: str) -> int:
300 |     summary = f"{', '.join(map(str, options.atoms))}: removal"
301 |     if len(summary) > 90 and len(options.atoms) > 1:
302 |         summary = f"{options.atoms[0]} and friends: removal"
303 |     request_data = dict(
304 |         Bugzilla_api_key=options.api_key,
305 |         product="Gentoo Linux",
306 |         component="Current packages",
307 |         version="unspecified",
308 |         summary=summary,
309 |         description="\n".join([*message, "", "package list:", *map(str, options.atoms)]).strip(),
310 |         keywords=["PMASKED"],
311 |         assigned_to=options.maintainers[0],
312 |         cc=options.maintainers[1:] + ["treecleaner@gentoo.org"],
313 |         deadline=(datetime.now(timezone.utc) + timedelta(days=options.rites)).strftime("%Y-%m-%d"),
314 |         blocks=list(options.bugs),
315 |     )
316 |     request = urllib.Request(
317 |         url="https://bugs.gentoo.org/rest/bug",
318 |         data=json.dumps(request_data).encode("utf-8"),
319 |         method="POST",
320 |         headers={
321 |             "Content-Type": "application/json",
322 |             "Accept": "application/json",
323 |         },
324 |     )
325 |     with urllib.urlopen(request, timeout=30) as response:
326 |         reply = json.loads(response.read().decode("utf-8"))
327 |     return int(reply["id"])
328 | 
329 | 
330 | def update_bugs_pmasked(api_key: str, bugs: list[int]):
331 |     if not bugs:
332 |         return True
333 |     request_data = dict(
334 |         Bugzilla_api_key=api_key,
335 |         ids=bugs,
336 |         keywords=dict(add=["PMASKED"]),
337 |     )
338 |     request = urllib.Request(
339 |         url=f"https://bugs.gentoo.org/rest/bug/{bugs[0]}",
340 |         data=json.dumps(request_data).encode("utf-8"),
341 |         method="PUT",
342 |         headers={
343 |             "Content-Type": "application/json",
344 |             "Accept": "application/json",
345 |         },
346 |     )
347 |     with urllib.urlopen(request, timeout=30) as response:
348 |         return response.status == 200
349 | 
350 | 
351 | def send_last_rites_email(m: Mask, subject_prefix: str):
352 |     try:
353 |         atoms = ", ".join(map(str, m.atoms))
354 |         subprocess.run(
355 |             args=[
356 |                 "xdg-email",
357 |                 "--utf8",
358 |                 "--cc",
359 |                 "gentoo-dev@lists.gentoo.org",
360 |                 "--subject",
361 |                 f"{subject_prefix}: {atoms}",
362 |                 "--body",
363 |                 str(m),
364 |                 "gentoo-dev-announce@lists.gentoo.org",
365 |             ],
366 |             check=True,
367 |         )
368 |     except subprocess.CalledProcessError:
369 |         mask.error("failed opening email composer")
370 | 
371 | 
372 | @mask.bind_main_func
373 | def _mask(options, out, err):
374 |     mask_file = MaskFile(pjoin(options.repo.location, "profiles/package.mask"))
375 |     today = datetime.now(timezone.utc)
376 | 
377 |     # pull name/email from git config
378 |     p = git.run("config", "user.name", stdout=subprocess.PIPE)
379 |     author = p.stdout.strip()
380 |     p = git.run("config", "user.email", stdout=subprocess.PIPE)
381 |     email = p.stdout.strip()
382 | 
383 |     message = get_comment()
384 |     if options.file_bug:
385 |         if bug_no := file_last_rites_bug(options, message):
386 |             out.write(out.fg("green"), f"filed bug https://bugs.gentoo.org/{bug_no}", out.reset)
387 |             out.flush()
388 |             if not update_bugs_pmasked(options.api_key, options.bugs):
389 |                 err.write(err.fg("red"), "failed to update referenced bugs", err.reset)
390 |                 err.flush()
391 |             options.bugs.insert(0, bug_no)
392 |     if removal := message_removal_notice(options.bugs, options.rites):
393 |         message.append(removal)
394 | 
395 |     m = Mask(
396 |         author=author,
397 |         email=email,
398 |         date=today.strftime("%Y-%m-%d"),
399 |         comment=message,
400 |         atoms=options.atoms,
401 |     )
402 |     mask_file.add(m)
403 |     mask_file.write()
404 | 
405 |     if options.email:
406 |         send_last_rites_email(m, "Last rites")
407 | 
408 |     return 0
409 | 


--------------------------------------------------------------------------------
/src/pkgdev/scripts/pkgdev_push.py:
--------------------------------------------------------------------------------
 1 | import argparse
 2 | import shlex
 3 | 
 4 | from pkgcheck import reporters, scan
 5 | from pkgcheck.results import Warning as PkgcheckWarning
 6 | from snakeoil.cli import arghparse
 7 | from snakeoil.cli.input import userquery
 8 | 
 9 | from .. import cli, git
10 | from .argparsers import cwd_repo_argparser, git_repo_argparser
11 | 
12 | 
13 | class ArgumentParser(cli.ArgumentParser):
14 |     """Parse all known arguments, passing unknown arguments to ``git push``."""
15 | 
16 |     def parse_known_args(self, args=None, namespace=None):
17 |         namespace, args = super().parse_known_args(args, namespace)
18 |         if namespace.dry_run:
19 |             args.append("--dry-run")
20 |         namespace.push_args = args
21 |         return namespace, []
22 | 
23 | 
24 | push = ArgumentParser(
25 |     prog="pkgdev push",
26 |     description="run QA checks on commits and push them",
27 |     parents=(cwd_repo_argparser, git_repo_argparser),
28 | )
29 | # custom `pkgcheck scan` args used for tests
30 | push.add_argument("--pkgcheck-scan", help=argparse.SUPPRESS)
31 | push_opts = push.add_argument_group("push options")
32 | push_opts.add_argument(
33 |     "-A",
34 |     "--ask",
35 |     nargs="?",
36 |     const=True,
37 |     action=arghparse.StoreBool,
38 |     help="confirm pushing commits with QA errors",
39 | )
40 | push_opts.add_argument("-n", "--dry-run", action="store_true", help="pretend to push the commits")
41 | push_opts.add_argument(
42 |     "--pull", action="store_true", help="run `git pull --rebase` before scanning"
43 | )
44 | 
45 | 
46 | @push.bind_final_check
47 | def _commit_validate(parser, namespace):
48 |     # determine `pkgcheck scan` args
49 |     namespace.scan_args = ["-v"] * namespace.verbosity
50 |     if namespace.pkgcheck_scan:
51 |         namespace.scan_args.extend(shlex.split(namespace.pkgcheck_scan))
52 |     namespace.scan_args.extend(["--exit", "GentooCI", "--commits"])
53 | 
54 | 
55 | @push.bind_main_func
56 | def _push(options, out, err):
57 |     if options.pull:
58 |         git.run("pull", "--rebase", cwd=options.repo.location)
59 | 
60 |     # scan commits for QA issues
61 |     pipe = scan(options.scan_args)
62 |     has_warnings = False
63 |     with reporters.FancyReporter(out) as reporter:
64 |         for result in pipe:
65 |             reporter.report(result)
66 |             if result.level == PkgcheckWarning.level:
67 |                 has_warnings = True
68 | 
69 |     # fail on errors unless they're ignored
70 |     if pipe.errors:
71 |         with reporters.FancyReporter(out) as reporter:
72 |             out.write(out.bold, out.fg("red"), "\nFAILURES", out.reset)
73 |             for result in sorted(pipe.errors):
74 |                 reporter.report(result)
75 |         if not (options.ask and userquery("Push commits anyway?", out, err, default_answer=False)):
76 |             return 1
77 |     elif has_warnings and options.ask:
78 |         if not userquery("warnings detected, push commits anyway?", out, err, default_answer=False):
79 |             return 1
80 | 
81 |     # push commits upstream
82 |     git.run("push", *options.push_args, cwd=options.repo.location)
83 | 
84 |     return 0
85 | 


--------------------------------------------------------------------------------
/src/pkgdev/scripts/pkgdev_showkw.py:
--------------------------------------------------------------------------------
  1 | """display package keywords"""
  2 | 
  3 | import os
  4 | from functools import partial
  5 | 
  6 | from pkgcore.ebuild import restricts
  7 | from pkgcore.util import commandline
  8 | from pkgcore.util import packages as pkgutils
  9 | from snakeoil.strings import pluralism
 10 | 
 11 | from .. import cli
 12 | from .._vendor.tabulate import tabulate, tabulate_formats
 13 | 
 14 | 
 15 | showkw = cli.ArgumentParser(prog="pkgdev showkw", description="show package keywords")
 16 | showkw.add_argument(
 17 |     "targets",
 18 |     metavar="target",
 19 |     nargs="*",
 20 |     action=commandline.StoreTarget,
 21 |     help="extended atom matching of packages",
 22 | )
 23 | 
 24 | output_opts = showkw.add_argument_group("output options")
 25 | output_opts.add_argument(
 26 |     "-f",
 27 |     "--format",
 28 |     default="showkw",
 29 |     metavar="FORMAT",
 30 |     choices=tabulate_formats,
 31 |     help="keywords table format",
 32 |     docs=f"""
 33 |         Output table using specified tabular format (defaults to compressed,
 34 |         custom format).
 35 | 
 36 |         Available formats: {', '.join(tabulate_formats)}
 37 |     """,
 38 | )
 39 | output_opts.add_argument(
 40 |     "-c", "--collapse", action="store_true", help="show collapsed list of arches"
 41 | )
 42 | 
 43 | arch_options = showkw.add_argument_group("arch options")
 44 | arch_options.add_argument("-s", "--stable", action="store_true", help="show stable arches")
 45 | arch_options.add_argument("-u", "--unstable", action="store_true", help="show unstable arches")
 46 | arch_options.add_argument(
 47 |     "-o",
 48 |     "--only-unstable",
 49 |     action="store_true",
 50 |     help="show arches that only have unstable keywords",
 51 | )
 52 | arch_options.add_argument(
 53 |     "-p", "--prefix", action="store_true", help="show prefix and non-native arches"
 54 | )
 55 | arch_options.add_argument("-a", "--arch", action="csv_negations", help="select arches to display")
 56 | 
 57 | # TODO: allow multi-repo comma-separated input
 58 | target_opts = showkw.add_argument_group("target options")
 59 | target_opts.add_argument(
 60 |     "-r",
 61 |     "--repo",
 62 |     dest="selected_repo",
 63 |     metavar="REPO",
 64 |     priority=29,
 65 |     action=commandline.StoreRepoObject,
 66 |     repo_type="all-raw",
 67 |     allow_external_repos=True,
 68 |     help="repo to query (defaults to all ebuild repos)",
 69 | )
 70 | 
 71 | 
 72 | @showkw.bind_delayed_default(30, "repos")
 73 | def _setup_repos(namespace, attr):
 74 |     target_repo = namespace.selected_repo
 75 |     all_ebuild_repos = namespace.domain.all_ebuild_repos_raw
 76 |     namespace.cwd = os.getcwd()
 77 | 
 78 |     # TODO: move this to StoreRepoObject
 79 |     if target_repo is None:
 80 |         # determine target repo from the target directory
 81 |         for repo in all_ebuild_repos.trees:
 82 |             if namespace.cwd in repo:
 83 |                 target_repo = repo
 84 |                 break
 85 |         else:
 86 |             # determine if CWD is inside an unconfigured repo
 87 |             target_repo = namespace.domain.find_repo(namespace.cwd, config=namespace.config)
 88 | 
 89 |     # fallback to using all, unfiltered ebuild repos if no target repo can be found
 90 |     namespace.repo = target_repo if target_repo is not None else all_ebuild_repos
 91 | 
 92 | 
 93 | @showkw.bind_delayed_default(40, "arches")
 94 | def _setup_arches(namespace, attr):
 95 |     default_repo = namespace.config.get_default("repo")
 96 | 
 97 |     try:
 98 |         known_arches = {arch for r in namespace.repo.trees for arch in r.config.known_arches}
 99 |     except AttributeError:
100 |         try:
101 |             # binary/vdb repos use known arches from the default repo
102 |             known_arches = default_repo.config.known_arches
103 |         except AttributeError:
104 |             # TODO: remove fallback for tests after fixing default repo pull
105 |             # from faked config
106 |             known_arches = set()
107 | 
108 |     arches = known_arches
109 |     if namespace.arch is not None:
110 |         disabled_arches, enabled_arches = namespace.arch
111 |         disabled_arches = set(disabled_arches)
112 |         enabled_arches = set(enabled_arches)
113 |         unknown_arches = disabled_arches.difference(known_arches) | enabled_arches.difference(
114 |             known_arches
115 |         )
116 |         if unknown_arches:
117 |             unknown = ", ".join(map(repr, sorted(unknown_arches)))
118 |             known = ", ".join(sorted(known_arches))
119 |             es = pluralism(unknown_arches, plural="es")
120 |             showkw.error(f"unknown arch{es}: {unknown} (choices: {known})")
121 |         if enabled_arches:
122 |             arches = arches.intersection(enabled_arches)
123 |         if disabled_arches:
124 |             arches = arches - disabled_arches
125 | 
126 |     prefix_arches = set(x for x in arches if "-" in x)
127 |     native_arches = arches.difference(prefix_arches)
128 |     arches = native_arches
129 |     if namespace.prefix:
130 |         arches = arches.union(prefix_arches)
131 |     if namespace.stable:
132 |         try:
133 |             stable_arches = {
134 |                 arch for r in namespace.repo.trees for arch in r.config.profiles.arches("stable")
135 |             }
136 |         except AttributeError:
137 |             # binary/vdb repos use stable arches from the default repo
138 |             stable_arches = default_repo.config.profiles.arches("stable")
139 |         arches = arches.intersection(stable_arches)
140 | 
141 |     namespace.known_arches = known_arches
142 |     namespace.prefix_arches = prefix_arches
143 |     namespace.native_arches = native_arches
144 |     namespace.arches = arches
145 | 
146 | 
147 | def _colormap(colors, line):
148 |     if colors is None:
149 |         return line
150 |     return colors[line] + line + colors["reset"]
151 | 
152 | 
153 | @showkw.bind_final_check
154 | def _validate_args(parser, namespace):
155 |     namespace.pkg_dir = False
156 | 
157 |     # disable colors when not using the native output format
158 |     if namespace.format != "showkw":
159 |         namespace.color = False
160 | 
161 |     if namespace.color:
162 |         # default colors to use for keyword types
163 |         _COLORS = {
164 |             "+": "\u001b[32m",
165 |             "~": "\u001b[33m",
166 |             "-": "\u001b[31m",
167 |             "*": "\u001b[31m",
168 |             "o": "\u001b[90;1m",
169 |             "reset": "\u001b[0m",
170 |         }
171 |     else:
172 |         _COLORS = None
173 |     namespace.colormap = partial(_colormap, _COLORS)
174 | 
175 |     if not namespace.targets:
176 |         if namespace.selected_repo:
177 |             # use repo restriction since no targets specified
178 |             restriction = restricts.RepositoryDep(namespace.selected_repo.repo_id)
179 |             token = namespace.selected_repo.repo_id
180 |         else:
181 |             # Use a path restriction if we're in a repo, obviously it'll work
182 |             # faster if we're in an invididual ebuild dir but we're not that
183 |             # restrictive.
184 |             try:
185 |                 restriction = namespace.repo.path_restrict(namespace.cwd)
186 |                 token = namespace.cwd
187 |             except (AttributeError, ValueError):
188 |                 parser.error("missing target argument and not in a supported repo")
189 | 
190 |             # determine if we're grabbing the keywords for a single pkg in cwd
191 |             namespace.pkg_dir = any(
192 |                 isinstance(x, restricts.PackageDep) for x in reversed(restriction.restrictions)
193 |             )
194 | 
195 |         namespace.targets = [(token, restriction)]
196 | 
197 | 
198 | def _collapse_arches(options, pkgs):
199 |     """Collapse arches into a single set."""
200 |     keywords = set()
201 |     stable_keywords = set()
202 |     unstable_keywords = set()
203 |     for pkg in pkgs:
204 |         for x in pkg.keywords:
205 |             if x[0] == "~":
206 |                 unstable_keywords.add(x[1:])
207 |             elif x in options.arches:
208 |                 stable_keywords.add(x)
209 |     if options.unstable:
210 |         keywords.update(unstable_keywords)
211 |     if options.only_unstable:
212 |         keywords.update(unstable_keywords.difference(stable_keywords))
213 |     if not keywords or options.stable:
214 |         keywords.update(stable_keywords)
215 |     return sorted(keywords.intersection(options.native_arches)) + sorted(
216 |         keywords.intersection(options.prefix_arches)
217 |     )
218 | 
219 | 
220 | def _render_rows(options, pkgs, arches):
221 |     """Build rows for tabular data output."""
222 |     for pkg in sorted(pkgs):
223 |         keywords = set(pkg.keywords)
224 |         row = [pkg.fullver]
225 |         for arch in arches:
226 |             if arch in keywords:
227 |                 line = "+"
228 |             elif f"~{arch}" in keywords:
229 |                 line = "~"
230 |             elif f"-{arch}" in keywords:
231 |                 line = "-"
232 |             elif "-*" in keywords:
233 |                 line = "*"
234 |             else:
235 |                 line = "o"
236 |             row.append(options.colormap(line))
237 |         row.extend([pkg.eapi, pkg.fullslot, pkg.repo.repo_id])
238 |         yield row
239 | 
240 | 
241 | @showkw.bind_main_func
242 | def main(options, out, err):
243 |     continued = False
244 |     for token, restriction in options.targets:
245 |         for pkgs in pkgutils.groupby_pkg(options.repo.itermatch(restriction, sorter=sorted)):
246 |             if options.collapse:
247 |                 out.write(" ".join(_collapse_arches(options, pkgs)))
248 |             else:
249 |                 arches = sorted(options.arches.intersection(options.native_arches))
250 |                 if options.prefix:
251 |                     arches += sorted(options.arches.intersection(options.prefix_arches))
252 |                 headers = [""] + arches + ["eapi", "slot", "repo"]
253 |                 if continued:
254 |                     out.write()
255 |                 if not options.pkg_dir:
256 |                     pkgs = list(pkgs)
257 |                     out.write(f"keywords for {pkgs[0].unversioned_atom}:")
258 |                 data = _render_rows(options, pkgs, arches)
259 |                 table = tabulate(
260 |                     data, headers=headers, tablefmt=options.format, disable_numparse=True
261 |                 )
262 |                 out.write(table)
263 |             continued = True
264 | 
265 |     if not continued:
266 |         err.write(f"{options.prog}: no matches for {token!r}")
267 |         return 1
268 | 


--------------------------------------------------------------------------------
/src/pkgdev/scripts/pkgdev_tatt.py:
--------------------------------------------------------------------------------
  1 | """package testing tool"""
  2 | 
  3 | import os
  4 | import random
  5 | import stat
  6 | from collections import defaultdict
  7 | from importlib.resources import read_text
  8 | from itertools import islice
  9 | from pathlib import Path
 10 | 
 11 | from pkgcore.restrictions import boolean, packages, values
 12 | from pkgcore.restrictions.required_use import find_constraint_satisfaction
 13 | from pkgcore.util import commandline
 14 | from pkgcore.util import packages as pkgutils
 15 | from snakeoil.cli import arghparse
 16 | 
 17 | from ..cli import ArgumentParser
 18 | from .argparsers import BugzillaApiKey
 19 | 
 20 | tatt = ArgumentParser(prog="pkgdev tatt", description=__doc__, verbose=False, quiet=False)
 21 | BugzillaApiKey.mangle_argparser(tatt)
 22 | tatt.add_argument(
 23 |     "-j",
 24 |     "--job-name",
 25 |     metavar="NAME",
 26 |     default="{PN}-{BUGNO}",
 27 |     help="Name template for created job script",
 28 |     docs="""
 29 |         The job name to use for the job script and report. The name can use
 30 |         the variables ``{PN}`` (package name) and ``{BUGNO}`` (bug number)
 31 |         to created variable names.
 32 |     """,
 33 | )
 34 | tatt.add_argument(
 35 |     "-b",
 36 |     "--bug",
 37 |     type=arghparse.positive_int,
 38 |     metavar="BUG",
 39 |     help="Single bug to take package list from",
 40 | )
 41 | 
 42 | use_opts = tatt.add_argument_group("Use flags options")
 43 | use_opts.add_argument(
 44 |     "-t",
 45 |     "--test",
 46 |     action="store_true",
 47 |     help="Run test phase for the packages",
 48 |     docs="""
 49 |         Include a test run for packages which define ``src_test`` phase
 50 |         (in the ebuild or inherited from eclass).
 51 |     """,
 52 | )
 53 | use_opts.add_argument(
 54 |     "-u",
 55 |     "--use-combos",
 56 |     default=0,
 57 |     type=int,
 58 |     metavar="NUMBER",
 59 |     help="Maximal number USE combinations to be tested",
 60 | )
 61 | use_opts.add_argument(
 62 |     "--ignore-prefixes",
 63 |     default=[],
 64 |     action=arghparse.CommaSeparatedValuesAppend,
 65 |     help="USE flags prefixes that won't be randomized",
 66 |     docs="""
 67 |         Comma separated USE flags prefixes that won't be randomized. This is
 68 |         useful for USE flags such as ``python_targets_``. Note that this
 69 |         doesn't affect preference, but because of specific REQUIRED_USE will
 70 |         still be changed from defaults.
 71 |     """,
 72 | )
 73 | random_use_opts = use_opts.add_mutually_exclusive_group()
 74 | random_use_opts.add_argument(
 75 |     "--use-default",
 76 |     dest="random_use",
 77 |     const="d",
 78 |     action="store_const",
 79 |     help="Prefer to use default use flags configuration",
 80 | )
 81 | random_use_opts.add_argument(
 82 |     "--use-random",
 83 |     dest="random_use",
 84 |     const="r",
 85 |     action="store_const",
 86 |     help="Turn on random use flags, with default USE_EXPAND",
 87 | )
 88 | random_use_opts.add_argument(
 89 |     "--use-expand-random",
 90 |     dest="random_use",
 91 |     const="R",
 92 |     action="store_const",
 93 |     help="Turn on random use flags, including USE_EXPAND",
 94 | )
 95 | random_use_opts.set_defaults(random_use="r")
 96 | 
 97 | packages_opts = tatt.add_argument_group("manual packages options")
 98 | packages_opts.add_argument(
 99 |     "-p",
100 |     "--packages",
101 |     metavar="TARGET",
102 |     nargs="+",
103 |     help="extended atom matching of packages",
104 | )
105 | bug_state = packages_opts.add_mutually_exclusive_group()
106 | bug_state.add_argument(
107 |     "-s",
108 |     "--stablereq",
109 |     dest="keywording",
110 |     default=None,
111 |     action="store_false",
112 |     help="Test packages for stable keywording requests",
113 | )
114 | bug_state.add_argument(
115 |     "-k",
116 |     "--keywording",
117 |     dest="keywording",
118 |     default=None,
119 |     action="store_true",
120 |     help="Test packages for keywording requests",
121 | )
122 | 
123 | template_opts = tatt.add_argument_group("template options")
124 | template_opts.add_argument(
125 |     "--template-file",
126 |     type=arghparse.existent_path,
127 |     help="Template file to use for the job script",
128 |     docs="""
129 |         Template file to use for the job script. The template file is a
130 |         Jinja template file, which can use the following variables:
131 | 
132 |         .. glossary::
133 | 
134 |             ``jobs``
135 |                 A list of jobs to be run. Each job is a tuple consisting of
136 |                 USE flags values, is a testing job, and the atom to build.
137 | 
138 |             ``report_file``
139 |                 The path to the report file.
140 | 
141 |             ``emerge_opts``
142 |                 Options to be passed to emerge invocations. Taken from
143 |                 ``--emerge-opts``.
144 | 
145 |             ``extra_env_files``
146 |                 A list of extra /etc/portage/env/ file names, to be added to
147 |                 ``package.env`` entry when testing the package. Taken from
148 |                 ``--extra-env-file``.
149 | 
150 |             ``log_dir``
151 |                 irectory to save build logs for failing tasks. Taken from
152 |                 ``--logs-dir``.
153 | 
154 |             ``cleanup_files``
155 |                 A list of files to be removed after the job script is done.
156 |     """,
157 | )
158 | template_opts.add_argument(
159 |     "--logs-dir",
160 |     default="~/logs",
161 |     help="Directory to save build logs for failing tasks",
162 | )
163 | template_opts.add_argument(
164 |     "--emerge-opts",
165 |     default="",
166 |     help="Options to be passed to emerge invocations",
167 |     docs="""
168 |         Space separated single argument, consisting og options to be passed
169 |         to ``emerge`` invocations.
170 |     """,
171 | )
172 | template_opts.add_argument(
173 |     "--extra-env-file",
174 |     default=[],
175 |     metavar="ENV_FILE",
176 |     action=arghparse.CommaSeparatedValuesAppend,
177 |     help="Extra /etc/portage/env/ file names, to be used while testing packages. Can be passed multiple times.",
178 |     docs="""
179 |         Comma separated filenames under /etc/portage/env/, which will all be
180 |         included in the package.env entry when testing the package.
181 |     """,
182 | )
183 | 
184 | portage_config = Path("/etc/portage")
185 | portage_accept_keywords = portage_config / "package.accept_keywords"
186 | portage_package_use = portage_config / "package.use"
187 | portage_package_env = portage_config / "package.env"
188 | portage_env = portage_config / "env"
189 | 
190 | 
191 | @tatt.bind_final_check
192 | def _tatt_validate(parser, namespace):
193 |     for filename in namespace.extra_env_file:
194 |         if not (env_file := portage_env / filename).exists():
195 |             parser.error(f"extra env file '{env_file}' doesn't exist")
196 | 
197 | 
198 | @tatt.bind_final_check
199 | def _validate_args(parser, namespace):
200 |     if namespace.use_combos < 0:
201 |         parser.error("number of use combos must be non-negative")
202 |     if namespace.bug is not None:
203 |         if namespace.keywording is not None:
204 |             parser.error("cannot use --bug with --keywording or --stablereq")
205 |         if namespace.packages:
206 |             parser.error("cannot use --bug with --packages")
207 |     elif not namespace.packages:
208 |         parser.error("no action requested, use --bug or --packages")
209 | 
210 |     if not namespace.test and not namespace.use_combos:
211 |         parser.error("no action requested, use --test or --use-combos")
212 | 
213 |     if namespace.packages:
214 |         arch = namespace.domain.arch
215 |         if namespace.keywording:
216 |             keywords_restrict = packages.PackageRestriction(
217 |                 "keywords",
218 |                 values.ContainmentMatch((f"~{arch}", f"-{arch}", arch), negate=True),
219 |             )
220 |         else:
221 |             keywords_restrict = packages.PackageRestriction(
222 |                 "keywords", values.ContainmentMatch((f"~{arch}", arch))
223 |             )
224 |         namespace.restrict = boolean.AndRestriction(
225 |             boolean.OrRestriction(*commandline.convert_to_restrict(namespace.packages)),
226 |             packages.PackageRestriction("properties", values.ContainmentMatch("live", negate=True)),
227 |             keywords_restrict,
228 |         )
229 | 
230 | 
231 | def _get_bugzilla_packages(namespace):
232 |     from nattka.bugzilla import BugCategory, NattkaBugzilla
233 |     from nattka.package import match_package_list
234 | 
235 |     nattka_bugzilla = NattkaBugzilla(api_key=namespace.api_key)
236 |     bug = next(iter(nattka_bugzilla.find_bugs(bugs=[namespace.bug]).values()))
237 |     namespace.keywording = bug.category == BugCategory.KEYWORDREQ
238 |     repo = namespace.domain.repos["gentoo"].raw_repo
239 |     src_repo = namespace.domain.source_repos_raw
240 |     for pkg, _ in match_package_list(repo, bug, only_new=True, filter_arch=[namespace.domain.arch]):
241 |         yield src_repo.match(pkg.versioned_atom)[0]
242 | 
243 | 
244 | def _get_cmd_packages(namespace):
245 |     repos = namespace.domain.source_repos_raw
246 |     for pkgs in pkgutils.groupby_pkg(repos.itermatch(namespace.restrict, sorter=sorted)):
247 |         pkg = max(pkgs)
248 |         yield pkg.repo.match(pkg.versioned_atom)[0]
249 | 
250 | 
251 | def _groupby_use_expand(
252 |     assignment: dict[str, bool],
253 |     use_expand_prefixes: tuple[str, ...],
254 |     domain_enabled: frozenset[str],
255 |     iuse: frozenset[str],
256 | ):
257 |     use_expand_dict: dict[str, set[str]] = defaultdict(set)
258 |     use_flags: set[str] = set()
259 |     for var, state in assignment.items():
260 |         if var not in iuse:
261 |             continue
262 |         if state == (var in domain_enabled):
263 |             continue
264 |         for use_expand in use_expand_prefixes:
265 |             if var.startswith(use_expand):
266 |                 if state:
267 |                     use_expand_dict[use_expand[:-1]].add(var.removeprefix(use_expand))
268 |                 break
269 |         else:
270 |             use_flags.add(("" if state else "-") + var)
271 |     return use_flags, use_expand_dict
272 | 
273 | 
274 | def _build_job(namespace, pkg, is_test: bool):
275 |     use_expand_prefixes = tuple(s.lower() + "_" for s in namespace.domain.profile.use_expand)
276 |     default_on_iuse = tuple(use[1:] for use in pkg.iuse if use.startswith("+"))
277 |     immutable, enabled, _disabled = namespace.domain.get_package_use_unconfigured(pkg)
278 | 
279 |     iuse = frozenset(pkg.iuse_stripped)
280 |     force_true = immutable.union(("test",) if is_test else ())
281 |     force_false = ("test",) if not is_test else ()
282 | 
283 |     if namespace.random_use == "d":
284 |         prefer_true = enabled.union(default_on_iuse)
285 |     elif namespace.random_use in "rR":
286 |         ignore_prefixes = set(namespace.ignore_prefixes)
287 |         if namespace.random_use == "r":
288 |             ignore_prefixes.update(use_expand_prefixes)
289 |         ignore_prefixes = tuple(ignore_prefixes)
290 | 
291 |         prefer_true = [
292 |             use
293 |             for use in iuse.difference(force_true, force_false)
294 |             if not use.startswith(ignore_prefixes)
295 |         ]
296 |         if prefer_true:
297 |             random.shuffle(prefer_true)
298 |             prefer_true = prefer_true[: random.randint(0, len(prefer_true) - 1)]
299 |         prefer_true.extend(
300 |             use for use in enabled.union(default_on_iuse) if use.startswith(ignore_prefixes)
301 |         )
302 | 
303 |     solutions = find_constraint_satisfaction(
304 |         pkg.required_use,
305 |         iuse.union(immutable),
306 |         force_true,
307 |         force_false,
308 |         frozenset(prefer_true),
309 |     )
310 |     for solution in solutions:
311 |         use_flags, use_expand = _groupby_use_expand(solution, use_expand_prefixes, enabled, iuse)
312 |         yield " ".join(use_flags) + " " + " ".join(
313 |             f'{var.upper()}: {" ".join(vals)}' for var, vals in use_expand.items()
314 |         )
315 | 
316 | 
317 | def _build_jobs(namespace, pkgs):
318 |     for pkg in pkgs:
319 |         for flags in islice(_build_job(namespace, pkg, False), namespace.use_combos):
320 |             yield pkg.versioned_atom, False, flags
321 | 
322 |         if namespace.test and "test" in pkg.defined_phases:
323 |             yield pkg.versioned_atom, True, next(iter(_build_job(namespace, pkg, True)))
324 | 
325 | 
326 | def _create_config_dir(directory: Path):
327 |     if not directory.exists():
328 |         directory.mkdir(parents=True)
329 |     elif not directory.is_dir():
330 |         raise NotADirectoryError(f"{directory} is not a directory")
331 | 
332 | 
333 | def _create_config_files(pkgs, job_name, is_keywording):
334 |     _create_config_dir(portage_accept_keywords)
335 |     with (res := portage_accept_keywords / f"pkgdev_tatt_{job_name}.keywords").open("w") as f:
336 |         f.write(f"# Job created by pkgdev tatt for {job_name!r}\n")
337 |         for pkg in pkgs:
338 |             f.write(f'{pkg.versioned_atom} {"**" if is_keywording else ""}\n')
339 |     yield str(res)
340 | 
341 |     _create_config_dir(portage_env)
342 |     with (res := portage_env / f"pkgdev_tatt_{job_name}_no_test").open("w") as f:
343 |         f.write(f"# Job created by pkgdev tatt for {job_name!r}\n")
344 |         f.write('FEATURES="qa-unresolved-soname-deps multilib-strict"\n')
345 |     yield str(res)
346 |     with (res := portage_env / f"pkgdev_tatt_{job_name}_test").open("w") as f:
347 |         f.write(f"# Job created by pkgdev tatt for {job_name!r}\n")
348 |         f.write('FEATURES="qa-unresolved-soname-deps multilib-strict test"\n')
349 |     yield str(res)
350 | 
351 |     _create_config_dir(portage_package_use)
352 |     (res := portage_package_use / f"pkgdev_tatt_{job_name}").mkdir(exist_ok=True)
353 |     yield str(res)
354 |     _create_config_dir(portage_package_env)
355 |     (res := portage_package_env / f"pkgdev_tatt_{job_name}").mkdir(exist_ok=True)
356 |     yield str(res)
357 | 
358 | 
359 | @tatt.bind_main_func
360 | def main(options, out, err):
361 |     if options.bug is not None:
362 |         pkgs = tuple(_get_bugzilla_packages(options))
363 |     else:
364 |         pkgs = tuple(_get_cmd_packages(options))
365 | 
366 |     if not pkgs:
367 |         return err.error("package query resulted in empty package list")
368 | 
369 |     job_name = options.job_name.format(PN=pkgs[0].package, BUGNO=options.bug or "")
370 |     cleanup_files = []
371 | 
372 |     try:
373 |         for config_file in _create_config_files(pkgs, job_name, options.keywording):
374 |             out.write("created config ", out.fg("green"), config_file, out.reset)
375 |             cleanup_files.append(config_file)
376 |     except Exception as exc:
377 |         err.error(f"failed to create config files: {exc}")
378 | 
379 |     if options.template_file:
380 |         with open(options.template_file) as output:
381 |             template = output.read()
382 |     else:
383 |         template = read_text("pkgdev.tatt", "template.sh.jinja")
384 | 
385 |     from jinja2 import Template
386 | 
387 |     if not any("test" in pkg.defined_phases for pkg in pkgs):
388 |         if not options.use_combos > 0:
389 |             return err.error(
390 |                 "no packages define a src_test, and --use-combos is not a positive integer. Cannot create any jobs, exiting..."
391 |             )
392 | 
393 |     script = Template(template, trim_blocks=True, lstrip_blocks=True).render(
394 |         jobs=list(_build_jobs(options, pkgs)),
395 |         report_file=job_name + ".report",
396 |         job_name=job_name,
397 |         log_dir=options.logs_dir,
398 |         emerge_opts=options.emerge_opts,
399 |         extra_env_files=options.extra_env_file,
400 |         cleanup_files=cleanup_files,
401 |     )
402 |     with open(script_name := job_name + ".sh", "w") as output:
403 |         output.write(script)
404 |     os.chmod(script_name, os.stat(script_name).st_mode | stat.S_IEXEC)
405 |     out.write("created script ", out.fg("green"), script_name, out.reset)
406 | 


--------------------------------------------------------------------------------
/src/pkgdev/tatt/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pkgcore/pkgdev/ff924c6605fb8f99c07e84adbf446f3a6ff5e47b/src/pkgdev/tatt/__init__.py


--------------------------------------------------------------------------------
/src/pkgdev/tatt/template.sh.jinja:
--------------------------------------------------------------------------------
  1 | {#
  2 | Copyright (C) 2010-2022 Gentoo tatt project
  3 | https://gitweb.gentoo.org/proj/tatt.git/
  4 | 
  5 | This program is free software; you can redistribute it and/or
  6 | modify it under the terms of the GNU General Public License
  7 | as published by the Free Software Foundation; either version 2
  8 | of the License, or (at your option) any later version.
  9 | 
 10 | This program is distributed in the hope that it will be useful,
 11 | but WITHOUT ANY WARRANTY; without even the implied warranty of
 12 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 13 | GNU General Public License for more details.
 14 | #}
 15 | #!/bin/bash
 16 | 
 17 | main() {
 18 |     trap "echo 'signal captured, exiting the entire script...'; exit" SIGHUP SIGINT SIGTERM
 19 |     echo -e "USE tests started on $(date)\n" >> "{{ report_file }}"
 20 | 
 21 |     local test_ret=0
 22 | 
 23 |     {% for atom, is_test, use_flags in jobs %}
 24 |     {% if is_test %}
 25 |     TUSE="{{ use_flags }}" tatt_test_pkg '{{ atom }}' --test || test_ret=1
 26 |     {% else %}
 27 |     TUSE="{{ use_flags }}" tatt_test_pkg '{{ atom }}' || test_ret=1
 28 |     {% endif %}
 29 |     {% endfor %}
 30 | 
 31 |     exit ${test_ret}
 32 | }
 33 | 
 34 | cleanup() {
 35 |     echo "Cleaning up"
 36 |     {% for file in cleanup_files %}
 37 |     rm -v -f -r '{{ file }}'
 38 |     {% endfor %}
 39 |     rm -v -f $0
 40 | }
 41 | 
 42 | tatt_pkg_error() {
 43 |     local eout=${2}
 44 | 
 45 |     echo "${eout}"
 46 | 
 47 |     if [[ -n ${TUSE} ]]; then
 48 |         echo -n "USE='${TUSE}'" >> "{{ report_file }}"
 49 |     fi
 50 |     if [[ -n ${FEATURES} ]]; then
 51 |         echo -n " FEATURES='${FEATURES}'" >> "{{ report_file }}"
 52 |     fi
 53 | 
 54 |     if [[ ${eout} =~ REQUIRED_USE ]] ; then
 55 |         echo " : REQUIRED_USE not satisfied (probably) for ${1:?}" >> "{{ report_file }}"
 56 |     elif [[ ${eout} =~ USE\ changes ]] ; then
 57 |         echo " : USE dependencies not satisfied (probably) for ${1:?}" >> "{{ report_file }}"
 58 |     elif [[ ${eout} =~ keyword\ changes ]]; then
 59 |         echo " : unkeyworded dependencies (probably) for ${1:?}" >> "{{ report_file }}"
 60 |     elif [[ ${eout} =~ Error:\ circular\ dependencies: ]]; then
 61 |         echo " : circular dependencies (probably) for ${1:?}" >> "{{ report_file }}"
 62 |     elif [[ ${eout} =~ Blocked\ Packages ]]; then
 63 |         echo " : blocked packages (probably) for ${1:?}" >> "{{ report_file }}"
 64 |     else
 65 |         echo " failed for ${1:?}" >> "{{ report_file }}"
 66 |     fi
 67 | 
 68 |     local CP=${1#=}
 69 |     local BUILDDIR=/var/tmp/portage/${CP}
 70 |     local BUILDLOG=${BUILDDIR}/temp/build.log
 71 |     if [[ -s ${BUILDLOG} ]]; then
 72 |         mkdir -p {{ log_dir }}
 73 |         local LOGNAME=$(mktemp -p {{ log_dir }} "${CP/\//_}_use_XXXXX")
 74 |         cp "${BUILDLOG}" "${LOGNAME}"
 75 |         echo "    log has been saved as ${LOGNAME}" >> "{{ report_file }}"
 76 |         TESTLOGS=($(find ${BUILDDIR}/work -iname '*test*log*'))
 77 |         if [[ {{ "${#TESTLOGS[@]}" }} -gt 0 ]]; then
 78 |             tar cf ${LOGNAME}.tar ${TESTLOGS[@]}
 79 |             echo "    test-suite logs have been saved as ${LOGNAME}.tar" >> "{{ report_file }}"
 80 |         fi
 81 |     fi
 82 | }
 83 | 
 84 | tatt_test_pkg() {
 85 |     local CP=${1#=}
 86 |     CP=${CP/\//_}
 87 | 
 88 |     if [[ ${2} == "--test" ]]; then
 89 |         # Do a first pass to avoid circular dependencies
 90 |         # --onlydeps should mean we're avoiding (too much) duplicate work
 91 |         USE="minimal -doc" emerge --onlydeps -q1 --with-test-deps {{ emerge_opts }} "${1:?}"
 92 | 
 93 |         if ! emerge --onlydeps -q1 --with-test-deps {{ emerge_opts }} "${1:?}"; then
 94 |             echo "merging test dependencies of ${1} failed" >> "{{ report_file }}"
 95 |             return 1
 96 |         fi
 97 |         printf "%s pkgdev_tatt_{{ job_name }}_test\n" "${1:?}"> "/etc/portage/package.env/pkgdev_tatt_{{ job_name }}/${CP}"
 98 |         local TFEATURES="${FEATURES} test"
 99 |     else
100 |         printf "%s pkgdev_tatt_{{ job_name }}_no_test\n" "${1:?}" > "/etc/portage/package.env/pkgdev_tatt_{{ job_name }}/${CP}"
101 |         local TFEATURES="${FEATURES}"
102 |     fi
103 |     {% for env in extra_env_files %}
104 |     printf "%s {{env}}\n" "${1}" >> "/etc/portage/package.env/pkgdev_tatt_{{ job_name }}/${CP}"
105 |     {% endfor %}
106 | 
107 |     printf "%s %s\n" "${1:?}" "${TUSE}" > "/etc/portage/package.use/pkgdev_tatt_{{ job_name }}/${CP}"
108 | 
109 |     # --usepkg-exclude needs the package name, so let's extract it
110 |     # from the atom we have
111 |     local name=$(portageq pquery "${1:?}" -n)
112 | 
113 |     eout=$( emerge -1 --getbinpkg=n --usepkg-exclude="${name}" {{ emerge_opts }} "${1:?}" 2>&1 1>/dev/tty )
114 |     local RES=$?
115 | 
116 |     rm -v -f /etc/portage/package.{env,use}/pkgdev_tatt_{{ job_name }}/${CP}
117 | 
118 |     if [[ ${RES} == 0 ]] ; then
119 |         if [[ -n ${TFEATURES} ]]; then
120 |             echo -n "FEATURES='${TFEATURES}' " >> "{{ report_file }}"
121 |         fi
122 |         echo "USE='${TUSE}' succeeded for ${1:?}" >> "{{ report_file }}"
123 |     else
124 |         FEATURES="${TFEATURES}" tatt_pkg_error "${1:?}" "${eout}"
125 |         return 1
126 |     fi
127 | }
128 | 
129 | if [[ ${1} == "--clean" ]]; then
130 |     cleanup
131 | else
132 |     main
133 | fi
134 | 


--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pkgcore/pkgdev/ff924c6605fb8f99c07e84adbf446f3a6ff5e47b/tests/__init__.py


--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
 1 | import pytest
 2 | from pkgdev.cli import Tool
 3 | from pkgdev.scripts import pkgdev
 4 | from snakeoil.cli import arghparse
 5 | 
 6 | pytest_plugins = ["pkgcore"]
 7 | 
 8 | 
 9 | @pytest.fixture(scope="session")
10 | def tool():
11 |     """Generate a tool utility for running pkgdev."""
12 |     return Tool(pkgdev.argparser)
13 | 
14 | 
15 | @pytest.fixture
16 | def parser():
17 |     """Return a shallow copy of the main pkgdev argparser."""
18 |     return pkgdev.argparser.copy()
19 | 
20 | 
21 | @pytest.fixture
22 | def namespace():
23 |     """Return an arghparse Namespace object."""
24 |     return arghparse.Namespace()
25 | 


--------------------------------------------------------------------------------
/tests/scripts/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pkgcore/pkgdev/ff924c6605fb8f99c07e84adbf446f3a6ff5e47b/tests/scripts/__init__.py


--------------------------------------------------------------------------------
/tests/scripts/test_cli.py:
--------------------------------------------------------------------------------
  1 | import textwrap
  2 | 
  3 | import pytest
  4 | from pkgdev import cli
  5 | from snakeoil.cli import arghparse
  6 | 
  7 | 
  8 | class TestConfigFileParser:
  9 |     @pytest.fixture(autouse=True)
 10 |     def _create_argparser(self, tmp_path):
 11 |         self.config_file = str(tmp_path / "config")
 12 |         self.parser = arghparse.ArgumentParser(prog="pkgdev cli_test")
 13 |         self.namespace = arghparse.Namespace()
 14 |         self.config_parser = cli.ConfigFileParser(self.parser)
 15 | 
 16 |     def test_no_configs(self):
 17 |         config = self.config_parser.parse_config(())
 18 |         assert config.sections() == []
 19 |         namespace = self.config_parser.parse_config_options(self.namespace)
 20 |         assert vars(namespace) == {}
 21 | 
 22 |     def test_ignored_configs(self):
 23 |         # nonexistent config files are ignored
 24 |         config = self.config_parser.parse_config(("foo", "bar"))
 25 |         assert config.sections() == []
 26 | 
 27 |     def test_bad_config_format_no_section(self, capsys):
 28 |         with open(self.config_file, "w") as f:
 29 |             f.write("foobar\n")
 30 |         with pytest.raises(SystemExit) as excinfo:
 31 |             self.config_parser.parse_config((self.config_file,))
 32 |         out, err = capsys.readouterr()
 33 |         assert not out
 34 |         assert "parsing config file failed: File contains no section headers" in err
 35 |         assert self.config_file in err
 36 |         assert excinfo.value.code == 2
 37 | 
 38 |     def test_bad_config_format(self, capsys):
 39 |         with open(self.config_file, "w") as f:
 40 |             f.write(
 41 |                 textwrap.dedent(
 42 |                     """
 43 |                         [DEFAULT]
 44 |                         foobar
 45 |                     """
 46 |                 )
 47 |             )
 48 |         with pytest.raises(SystemExit) as excinfo:
 49 |             self.config_parser.parse_config((self.config_file,))
 50 |         out, err = capsys.readouterr()
 51 |         assert not out
 52 |         assert "parsing config file failed: Source contains parsing errors" in err
 53 |         assert excinfo.value.code == 2
 54 | 
 55 |     def test_nonexistent_config_options(self, capsys):
 56 |         """Nonexistent parser arguments don't cause errors."""
 57 |         with open(self.config_file, "w") as f:
 58 |             f.write(
 59 |                 textwrap.dedent(
 60 |                     """
 61 |                         [DEFAULT]
 62 |                         cli_test.foo=bar
 63 |                     """
 64 |                 )
 65 |             )
 66 |         with pytest.raises(SystemExit) as excinfo:
 67 |             self.config_parser.parse_config_options(None, configs=(self.config_file,))
 68 |         out, err = capsys.readouterr()
 69 |         assert not out
 70 |         assert "failed loading config: unknown arguments: --foo=bar" in err
 71 |         assert excinfo.value.code == 2
 72 | 
 73 |     def test_config_options_other_prog(self):
 74 |         self.parser.add_argument("--foo")
 75 |         with open(self.config_file, "w") as f:
 76 |             f.write(
 77 |                 textwrap.dedent(
 78 |                     """
 79 |                         [DEFAULT]
 80 |                         other.foo=bar
 81 |                     """
 82 |                 )
 83 |             )
 84 |         namespace = self.parser.parse_args(["--foo", "foo"])
 85 |         assert namespace.foo == "foo"
 86 |         # config args don't override not matching namespace attrs
 87 |         namespace = self.config_parser.parse_config_options(namespace, configs=[self.config_file])
 88 |         assert namespace.foo == "foo"
 89 | 
 90 |     def test_config_options(self):
 91 |         self.parser.add_argument("--foo")
 92 |         with open(self.config_file, "w") as f:
 93 |             f.write(
 94 |                 textwrap.dedent(
 95 |                     """
 96 |                         [DEFAULT]
 97 |                         cli_test.foo=bar
 98 |                     """
 99 |                 )
100 |             )
101 |         namespace = self.parser.parse_args(["--foo", "foo"])
102 |         assert namespace.foo == "foo"
103 |         # config args override matching namespace attrs
104 |         namespace = self.config_parser.parse_config_options(namespace, configs=[self.config_file])
105 |         assert namespace.foo == "bar"
106 | 


--------------------------------------------------------------------------------
/tests/scripts/test_pkgdev.py:
--------------------------------------------------------------------------------
 1 | import importlib
 2 | from functools import partial
 3 | from unittest.mock import patch
 4 | 
 5 | import pytest
 6 | 
 7 | from pkgdev import __title__ as project
 8 | from pkgdev.scripts import run
 9 | 
10 | 
11 | def test_script_run(capsys):
12 |     """Test regular code path for running scripts."""
13 |     script = partial(run, project)
14 | 
15 |     with patch(f"{project}.scripts.import_module") as import_module:
16 |         import_module.side_effect = ImportError("baz module doesn't exist")
17 | 
18 |         # default error path when script import fails
19 |         with patch("sys.argv", [project]):
20 |             with pytest.raises(SystemExit) as excinfo:
21 |                 script()
22 |             assert excinfo.value.code == 1
23 |             out, err = capsys.readouterr()
24 |             err = err.strip().split("\n")
25 |             assert len(err) == 3
26 |             assert err[0] == "Failed importing: baz module doesn't exist!"
27 |             assert err[1].startswith(f"Verify that {project} and its deps")
28 |             assert err[2] == "Add --debug to the commandline for a traceback."
29 | 
30 |         # running with --debug should raise an ImportError when there are issues
31 |         with patch("sys.argv", [project, "--debug"]):
32 |             with pytest.raises(ImportError):
33 |                 script()
34 |             out, err = capsys.readouterr()
35 |             err = err.strip().split("\n")
36 |             assert len(err) == 2
37 |             assert err[0] == "Failed importing: baz module doesn't exist!"
38 |             assert err[1].startswith(f"Verify that {project} and its deps")
39 | 
40 |         import_module.reset_mock()
41 | 
42 | 
43 | class TestPkgdev:
44 |     script = staticmethod(partial(run, project))
45 | 
46 |     def test_version(self, capsys):
47 |         with patch("sys.argv", [project, "--version"]):
48 |             with pytest.raises(SystemExit) as excinfo:
49 |                 self.script()
50 |             assert excinfo.value.code == 0
51 |             out, err = capsys.readouterr()
52 |             assert out.startswith(project)
53 | 


--------------------------------------------------------------------------------
/tests/scripts/test_pkgdev_bugs.py:
--------------------------------------------------------------------------------
  1 | import itertools
  2 | import os
  3 | import sys
  4 | import json
  5 | import textwrap
  6 | from types import SimpleNamespace
  7 | from unittest.mock import patch
  8 | 
  9 | import pytest
 10 | from pkgcore.ebuild.atom import atom
 11 | from pkgcore.test.misc import FakePkg
 12 | from pkgdev.scripts import pkgdev_bugs as bugs
 13 | from snakeoil.formatters import PlainTextFormatter
 14 | from snakeoil.osutils import pjoin
 15 | 
 16 | 
 17 | def mk_pkg(repo, cpvstr, maintainers, **kwargs):
 18 |     kwargs.setdefault("KEYWORDS", ["~amd64"])
 19 |     pkgdir = os.path.dirname(repo.create_ebuild(cpvstr, **kwargs))
 20 |     # stub metadata
 21 |     with open(pjoin(pkgdir, "metadata.xml"), "w") as f:
 22 |         f.write(
 23 |             textwrap.dedent(
 24 |                 f"""\
 25 |                     
 26 |                     
 27 |                     
 28 |                         
 29 |                             {' '.join(f'{maintainer}@gentoo.org' for maintainer in maintainers)}
 30 |                         
 31 |                     
 32 |                 """
 33 |             )
 34 |         )
 35 | 
 36 | 
 37 | def mk_repo(repo):
 38 |     mk_pkg(repo, "cat/u-0", ["dev1"])
 39 |     mk_pkg(repo, "cat/z-0", [], RDEPEND=["cat/u", "cat/x"])
 40 |     mk_pkg(repo, "cat/v-0", ["dev2"], RDEPEND="cat/x")
 41 |     mk_pkg(repo, "cat/y-0", ["dev1"], RDEPEND=["cat/z", "cat/v"])
 42 |     mk_pkg(repo, "cat/x-0", ["dev3"], RDEPEND="cat/y")
 43 |     mk_pkg(repo, "cat/w-0", ["dev3"], RDEPEND="cat/x")
 44 | 
 45 | 
 46 | class BugsSession:
 47 |     def __init__(self):
 48 |         self.counter = iter(itertools.count(1))
 49 |         self.calls = []
 50 | 
 51 |     def __enter__(self):
 52 |         return self
 53 | 
 54 |     def __exit__(self, *_args): ...
 55 | 
 56 |     def read(self):
 57 |         return json.dumps({"id": next(self.counter)}).encode("utf-8")
 58 | 
 59 |     def __call__(self, request, *_args, **_kwargs):
 60 |         self.calls.append(json.loads(request.data))
 61 |         return self
 62 | 
 63 | 
 64 | class TestBugFiling:
 65 |     def test_bug_filing(self, repo):
 66 |         mk_repo(repo)
 67 |         session = BugsSession()
 68 |         pkg = max(repo.itermatch(atom("=cat/u-0")))
 69 |         with patch("pkgdev.scripts.pkgdev_bugs.urllib.urlopen", session):
 70 |             bugs.GraphNode(((pkg, {"*"}),)).file_bug("API", frozenset(), (), None)
 71 |         assert len(session.calls) == 1
 72 |         call = session.calls[0]
 73 |         assert call["Bugzilla_api_key"] == "API"
 74 |         assert call["summary"] == "cat/u-0: stablereq"
 75 |         assert call["assigned_to"] == "dev1@gentoo.org"
 76 |         assert not call["cc"]
 77 |         assert call["cf_stabilisation_atoms"] == "=cat/u-0 *"
 78 |         assert not call["depends_on"]
 79 | 
 80 |     def test_bug_filing_maintainer_needed(self, repo):
 81 |         mk_repo(repo)
 82 |         session = BugsSession()
 83 |         pkg = max(repo.itermatch(atom("=cat/z-0")))
 84 |         with patch("pkgdev.scripts.pkgdev_bugs.urllib.urlopen", session):
 85 |             bugs.GraphNode(((pkg, {"*"}),)).file_bug("API", frozenset(), (), None)
 86 |         assert len(session.calls) == 1
 87 |         call = session.calls[0]
 88 |         assert call["assigned_to"] == "maintainer-needed@gentoo.org"
 89 |         assert not call["cc"]
 90 | 
 91 |     def test_bug_filing_multiple_pkgs(self, repo):
 92 |         mk_repo(repo)
 93 |         session = BugsSession()
 94 |         pkgX = max(repo.itermatch(atom("=cat/x-0")))
 95 |         pkgY = max(repo.itermatch(atom("=cat/y-0")))
 96 |         pkgZ = max(repo.itermatch(atom("=cat/z-0")))
 97 |         dep = bugs.GraphNode((), 2)
 98 |         node = bugs.GraphNode(((pkgX, {"*"}), (pkgY, {"*"}), (pkgZ, {"*"})))
 99 |         node.edges.add(dep)
100 |         with patch("pkgdev.scripts.pkgdev_bugs.urllib.urlopen", session):
101 |             node.file_bug("API", frozenset(), (), None)
102 |         assert len(session.calls) == 1
103 |         call = session.calls[0]
104 |         assert call["summary"] == "cat/x-0, cat/y-0, cat/z-0: stablereq"
105 |         assert call["assigned_to"] == "dev3@gentoo.org"
106 |         assert call["cc"] == ["dev1@gentoo.org"]
107 |         assert call["cf_stabilisation_atoms"] == "=cat/x-0 *\n=cat/y-0 *\n=cat/z-0 *"
108 |         assert call["depends_on"] == [2]
109 | 


--------------------------------------------------------------------------------
/tests/scripts/test_pkgdev_manifest.py:
--------------------------------------------------------------------------------
  1 | from functools import partial
  2 | from typing import List, Set
  3 | from unittest.mock import patch
  4 | 
  5 | import pytest
  6 | from pkgdev.scripts import run
  7 | from snakeoil.contexts import chdir
  8 | from snakeoil.osutils import pjoin
  9 | 
 10 | 
 11 | class TestPkgdevManifestParseArgs:
 12 |     def test_non_repo_cwd(self, capsys, tool):
 13 |         with pytest.raises(SystemExit) as excinfo:
 14 |             tool.parse_args(["manifest"])
 15 |         assert excinfo.value.code == 2
 16 |         out, err = capsys.readouterr()
 17 |         assert err.strip() == "pkgdev manifest: error: not in ebuild repo"
 18 | 
 19 |     @pytest.mark.skip
 20 |     def test_repo_cwd(self, repo, capsys, tool):
 21 |         repo.create_ebuild("cat/pkg-0")
 22 |         with chdir(repo.location):
 23 |             options, _ = tool.parse_args(["manifest"])
 24 |         matches = [x.cpvstr for x in repo.itermatch(options.restriction)]
 25 |         assert matches == ["cat/pkg-0"]
 26 | 
 27 |     def test_repo_relative_pkg(self, repo, capsys, tool):
 28 |         repo.create_ebuild("cat/pkg-0")
 29 |         repo.create_ebuild("cat/newpkg-0")
 30 |         with chdir(pjoin(repo.location, "cat/pkg")):
 31 |             options, _ = tool.parse_args(["manifest", "."])
 32 |         matches = [x.cpvstr for x in repo.itermatch(options.restriction)]
 33 |         assert matches == ["cat/pkg-0"]
 34 | 
 35 |     @pytest.mark.skip
 36 |     def test_repo_relative_category(self, repo, capsys, tool):
 37 |         repo.create_ebuild("cat/pkg-0")
 38 |         repo.create_ebuild("cat/newpkg-0")
 39 | 
 40 |         with chdir(pjoin(repo.location, "cat")):
 41 |             options, _ = tool.parse_args(["manifest", "pkg"])
 42 |         matches = [x.cpvstr for x in repo.itermatch(options.restriction)]
 43 |         assert matches == ["cat/pkg-0"]
 44 | 
 45 |         with chdir(pjoin(repo.location, "cat")):
 46 |             options, _ = tool.parse_args(["manifest", "."])
 47 |         matches = [x.cpvstr for x in repo.itermatch(options.restriction)]
 48 |         assert set(matches) == {"cat/pkg-0", "cat/newpkg-0"}
 49 | 
 50 |     def test_repo_relative_outside(self, tmp_path, repo, capsys, tool):
 51 |         repo.create_ebuild("cat/pkg-0")
 52 |         (ebuild := tmp_path / "pkg.ebuild").touch()
 53 |         with pytest.raises(SystemExit) as excinfo:
 54 |             with chdir(repo.location):
 55 |                 tool.parse_args(["manifest", str(ebuild)])
 56 |         assert excinfo.value.code == 2
 57 |         out, err = capsys.readouterr()
 58 |         assert (
 59 |             err.strip()
 60 |             == f"pkgdev manifest: error: {repo.repo_id!r} repo doesn't contain: {str(ebuild)!r}"
 61 |         )
 62 | 
 63 |     @pytest.mark.skip
 64 |     def test_dir_target(self, repo, capsys, tool):
 65 |         repo.create_ebuild("cat/pkg-0")
 66 |         with chdir(repo.location):
 67 |             options, _ = tool.parse_args(["manifest", pjoin(repo.location, "cat")])
 68 |         matches = [x.cpvstr for x in repo.itermatch(options.restriction)]
 69 |         assert matches == ["cat/pkg-0"]
 70 | 
 71 |     def test_ebuild_target(self, repo, capsys, tool):
 72 |         path = repo.create_ebuild("cat/pkg-0")
 73 |         with chdir(repo.location):
 74 |             options, _ = tool.parse_args(["manifest", path])
 75 |         matches = [x.cpvstr for x in repo.itermatch(options.restriction)]
 76 |         assert matches == ["cat/pkg-0"]
 77 | 
 78 |     def test_atom_target(self, repo, capsys, tool):
 79 |         repo.create_ebuild("cat/pkg-0")
 80 |         with chdir(repo.location):
 81 |             options, _ = tool.parse_args(["manifest", "cat/pkg"])
 82 |         matches = [x.cpvstr for x in repo.itermatch(options.restriction)]
 83 |         assert matches == ["cat/pkg-0"]
 84 | 
 85 |     def test_if_modified_target(self, repo, make_git_repo, tool):
 86 |         def manifest_matches() -> Set[str]:
 87 |             repo.sync()
 88 |             with chdir(repo.location):
 89 |                 options, _ = tool.parse_args(["manifest", "--if-modified"])
 90 |             return {x.cpvstr for x in repo.itermatch(options.restriction)}
 91 | 
 92 |         git_repo = make_git_repo(repo.location)
 93 |         repo.create_ebuild("cat/oldpkg-0")
 94 |         git_repo.add_all("cat/oldpkg-0")
 95 | 
 96 |         # New package
 97 |         repo.create_ebuild("cat/newpkg-0")
 98 |         assert manifest_matches() == {"cat/newpkg-0"}
 99 |         git_repo.add_all("cat/newpkg-0")
100 | 
101 |         # Untracked file
102 |         ebuild_path = repo.create_ebuild("cat/newpkg-1")
103 |         assert manifest_matches() == {"cat/newpkg-1"}
104 | 
105 |         # Staged file
106 |         git_repo.add(ebuild_path, commit=False)
107 |         assert manifest_matches() == {"cat/newpkg-1"}
108 | 
109 |         # No modified files
110 |         git_repo.add_all("cat/newpkg-1")
111 |         assert manifest_matches() == set()
112 | 
113 |         # Modified file
114 |         ebuild_path = repo.create_ebuild("cat/newpkg-1", eapi=8)
115 |         assert manifest_matches() == {"cat/newpkg-1"}
116 |         git_repo.add_all("cat/newpkg-1: eapi 8")
117 | 
118 |         # Renamed file
119 |         git_repo.remove(ebuild_path, commit=False)
120 |         ebuild_path = repo.create_ebuild("cat/newpkg-2")
121 |         git_repo.add(ebuild_path, commit=False)
122 |         assert manifest_matches() == {"cat/newpkg-2"}
123 |         git_repo.add_all("cat/newpkg-2: rename")
124 | 
125 |         # Deleted file
126 |         git_repo.remove(ebuild_path, commit=False)
127 |         assert manifest_matches() == set()
128 | 
129 |         # Deleted package
130 |         ebuild_path = repo.create_ebuild("cat/newpkg-0")
131 |         git_repo.remove(ebuild_path, commit=False)
132 |         assert manifest_matches() == set()
133 | 
134 |     @pytest.mark.skip
135 |     def test_ignore_fetch_restricted(self, repo, tool):
136 |         def manifest_matches() -> List[str]:
137 |             with chdir(repo.location):
138 |                 options, _ = tool.parse_args(["manifest", "--ignore-fetch-restricted"])
139 |             return [x.cpvstr for x in repo.itermatch(options.restriction)]
140 | 
141 |         # No RESTRICT
142 |         repo.create_ebuild("cat/pkg-0")
143 |         assert manifest_matches() == ["cat/pkg-0"]
144 | 
145 |         # Not fetch RESTRICT
146 |         repo.create_ebuild("cat/pkg-0", restrict=("mirror"))
147 |         assert manifest_matches() == ["cat/pkg-0"]
148 | 
149 |         # fetch RESTRICT
150 |         repo.create_ebuild("cat/pkg-0", restrict=("fetch"))
151 |         assert manifest_matches() == []
152 | 
153 |         # Multiple RESTRICT
154 |         repo.create_ebuild("cat/pkg-0", restrict=("mirror", "fetch"))
155 |         assert manifest_matches() == []
156 | 
157 |     def test_non_repo_dir_target(self, tmp_path, repo, capsys, tool):
158 |         with pytest.raises(SystemExit) as excinfo, chdir(repo.location):
159 |             tool.parse_args(["manifest", str(tmp_path)])
160 |         assert excinfo.value.code == 2
161 |         out, err = capsys.readouterr()
162 |         assert err.startswith("pkgdev manifest: error: 'fake' repo doesn't contain:")
163 | 
164 |     def test_invalid_atom_target(self, repo, capsys, tool):
165 |         with pytest.raises(SystemExit) as excinfo, chdir(repo.location):
166 |             tool.parse_args(["manifest", "=cat/pkg"])
167 |         assert excinfo.value.code == 2
168 |         out, err = capsys.readouterr()
169 |         assert err.startswith("pkgdev manifest: error: invalid atom: '=cat/pkg'")
170 | 
171 | 
172 | class TestPkgdevManifest:
173 |     script = staticmethod(partial(run, "pkgdev"))
174 | 
175 |     @pytest.fixture(autouse=True)
176 |     def _setup(self):
177 |         self.args = ["pkgdev", "manifest"]
178 | 
179 |     def test_good_manifest(self, capsys, repo):
180 |         repo.create_ebuild("cat/pkg-0")
181 |         with (
182 |             patch("sys.argv", self.args),
183 |             pytest.raises(SystemExit) as excinfo,
184 |             chdir(repo.location),
185 |         ):
186 |             self.script()
187 |         assert excinfo.value.code == 0
188 |         out, err = capsys.readouterr()
189 |         assert out == err == ""
190 | 
191 |     def test_bad_manifest(self, capsys, repo):
192 |         repo.create_ebuild("cat/pkg-0")
193 |         repo.create_ebuild("cat/pkg-1", eapi="-1")
194 |         with (
195 |             patch("sys.argv", self.args),
196 |             pytest.raises(SystemExit) as excinfo,
197 |             chdir(repo.location),
198 |         ):
199 |             self.script()
200 |         assert excinfo.value.code == 1
201 |         out, err = capsys.readouterr()
202 |         assert not err
203 |         assert out == " * cat/pkg-1: invalid EAPI '-1'\n"
204 | 


--------------------------------------------------------------------------------
/tests/scripts/test_pkgdev_mask.py:
--------------------------------------------------------------------------------
  1 | import os
  2 | import sys
  3 | import textwrap
  4 | from datetime import datetime, timedelta, timezone
  5 | from functools import partial
  6 | from pathlib import Path
  7 | from unittest.mock import patch
  8 | 
  9 | import pytest
 10 | from pkgcore.ebuild.atom import atom as atom_cls
 11 | from pkgdev.scripts import run
 12 | from snakeoil.contexts import chdir, os_environ
 13 | from snakeoil.osutils import pjoin
 14 | 
 15 | 
 16 | class TestPkgdevMaskParseArgs:
 17 |     def test_non_repo_cwd(self, capsys, tool):
 18 |         with pytest.raises(SystemExit):
 19 |             tool.parse_args(["mask"])
 20 |         out, err = capsys.readouterr()
 21 |         assert err.strip() == "pkgdev mask: error: not in ebuild repo"
 22 | 
 23 |     def test_non_git_repo_cwd(self, repo, capsys, tool):
 24 |         with pytest.raises(SystemExit), chdir(repo.location):
 25 |             tool.parse_args(["mask"])
 26 |         out, err = capsys.readouterr()
 27 |         assert err.strip() == "pkgdev mask: error: not in git repo"
 28 | 
 29 |     def test_non_ebuild_git_repo_cwd(self, make_repo, git_repo, capsys, tool):
 30 |         os.mkdir(pjoin(git_repo.path, "repo"))
 31 |         repo = make_repo(pjoin(git_repo.path, "repo"))
 32 |         with pytest.raises(SystemExit), chdir(repo.location):
 33 |             tool.parse_args(["mask"])
 34 |         out, err = capsys.readouterr()
 35 |         assert err.strip() == "pkgdev mask: error: not in ebuild git repo"
 36 | 
 37 |     def test_cwd_target(self, repo, make_git_repo, capsys, tool):
 38 |         git_repo = make_git_repo(repo.location)
 39 |         # empty repo
 40 |         with pytest.raises(SystemExit), chdir(repo.location):
 41 |             tool.parse_args(["mask"])
 42 |         out, err = capsys.readouterr()
 43 |         assert err.strip() == "pkgdev mask: error: not in a package directory"
 44 | 
 45 |         # not in package dir
 46 |         repo.create_ebuild("cat/pkg-0")
 47 |         git_repo.add_all("cat/pkg-0")
 48 |         with pytest.raises(SystemExit), chdir(repo.location):
 49 |             tool.parse_args(["mask"])
 50 |         out, err = capsys.readouterr()
 51 |         assert err.strip() == "pkgdev mask: error: not in a package directory"
 52 | 
 53 |         # masking CWD package
 54 |         with chdir(pjoin(repo.location, "cat/pkg")):
 55 |             options, _ = tool.parse_args(["mask"])
 56 |         assert options.atoms == [atom_cls("cat/pkg")]
 57 | 
 58 |     def test_targets(self, repo, make_git_repo, capsys, tool):
 59 |         git_repo = make_git_repo(repo.location)
 60 | 
 61 |         # invalid atom
 62 |         with pytest.raises(SystemExit), chdir(repo.location):
 63 |             tool.parse_args(["mask", "pkg"])
 64 |         out, err = capsys.readouterr()
 65 |         assert err.strip() == "pkgdev mask: error: invalid atom: 'pkg'"
 66 | 
 67 |         # nonexistent pkg
 68 |         with pytest.raises(SystemExit), chdir(repo.location):
 69 |             tool.parse_args(["mask", "cat/nonexistent"])
 70 |         out, err = capsys.readouterr()
 71 |         assert err.strip() == "pkgdev mask: error: no repo matches: 'cat/nonexistent'"
 72 | 
 73 |         # masked pkg
 74 |         repo.create_ebuild("cat/pkg-0")
 75 |         git_repo.add_all("cat/pkg-0")
 76 |         with chdir(repo.location):
 77 |             options, _ = tool.parse_args(["mask", "cat/pkg"])
 78 |         assert options.atoms == [atom_cls("cat/pkg")]
 79 | 
 80 |     def test_email_not_rites(self, repo, make_git_repo, capsys, tool):
 81 |         git_repo = make_git_repo(repo.location)
 82 | 
 83 |         # masked pkg
 84 |         repo.create_ebuild("cat/pkg-0")
 85 |         git_repo.add_all("cat/pkg-0")
 86 |         with pytest.raises(SystemExit), chdir(repo.location):
 87 |             tool.parse_args(["mask", "--email", "cat/pkg"])
 88 |         _, err = capsys.readouterr()
 89 |         assert err.strip() == "pkgdev mask: error: last rites required for email support"
 90 | 
 91 | 
 92 | class TestPkgdevMask:
 93 |     script = staticmethod(partial(run, "pkgdev"))
 94 | 
 95 |     @pytest.fixture(autouse=True)
 96 |     def _setup(self, make_repo, make_git_repo):
 97 |         # args for running pkgdev like a script
 98 |         self.args = ["pkgdev", "mask"]
 99 |         self.repo = make_repo(arches=["amd64"])
100 |         self.git_repo = make_git_repo(self.repo.location)
101 |         self.today = datetime.now(timezone.utc)
102 | 
103 |         # add stub pkg
104 |         self.repo.create_ebuild("cat/pkg-0")
105 |         self.git_repo.add_all("cat/pkg-0")
106 | 
107 |         # create profile
108 |         self.profile_path = pjoin(self.repo.location, "profiles/arch/amd64")
109 |         os.makedirs(self.profile_path)
110 |         with open(pjoin(self.repo.location, "profiles/profiles.desc"), "w") as f:
111 |             f.write("amd64 arch/amd64 stable\n")
112 | 
113 |         self.masks_path = Path(pjoin(self.repo.location, "profiles/package.mask"))
114 | 
115 |     @property
116 |     def profile(self):
117 |         profile = list(self.repo.config.profiles)[0]
118 |         return self.repo.config.profiles.create_profile(profile)
119 | 
120 |     def test_empty_repo(self):
121 |         assert self.profile.masks == frozenset()
122 | 
123 |     def test_nonexistent_editor(self, capsys):
124 |         with (
125 |             os_environ("VISUAL", EDITOR="12345"),
126 |             patch("sys.argv", self.args + ["cat/pkg"]),
127 |             pytest.raises(SystemExit),
128 |             chdir(pjoin(self.repo.path)),
129 |         ):
130 |             self.script()
131 |         out, err = capsys.readouterr()
132 |         assert err.strip() == "pkgdev mask: error: nonexistent editor: '12345'"
133 | 
134 |     def test_nonexistent_visual(self, capsys):
135 |         with (
136 |             os_environ("EDITOR", VISUAL="12345"),
137 |             patch("sys.argv", self.args + ["cat/pkg"]),
138 |             pytest.raises(SystemExit),
139 |             chdir(pjoin(self.repo.path)),
140 |         ):
141 |             self.script()
142 |         out, err = capsys.readouterr()
143 |         assert err.strip() == "pkgdev mask: error: nonexistent editor: '12345'"
144 | 
145 |     def test_failed_editor(self, capsys):
146 |         with (
147 |             os_environ("VISUAL", EDITOR="sed -i 's///'"),
148 |             patch("sys.argv", self.args + ["cat/pkg"]),
149 |             pytest.raises(SystemExit),
150 |             chdir(pjoin(self.repo.path)),
151 |         ):
152 |             self.script()
153 |         out, err = capsys.readouterr()
154 |         assert err.strip() == "pkgdev mask: error: failed writing mask comment"
155 | 
156 |     def test_empty_mask_comment(self, capsys):
157 |         with (
158 |             os_environ("VISUAL", EDITOR="sed -i 's/#/#/'"),
159 |             patch("sys.argv", self.args + ["cat/pkg"]),
160 |             pytest.raises(SystemExit),
161 |             chdir(pjoin(self.repo.path)),
162 |         ):
163 |             self.script()
164 |         out, err = capsys.readouterr()
165 |         assert err.strip() == "pkgdev mask: error: empty mask comment"
166 | 
167 |     def test_mask_cwd(self):
168 |         with (
169 |             os_environ("VISUAL", EDITOR="sed -i '1s/$/mask comment/'"),
170 |             patch("sys.argv", self.args),
171 |             pytest.raises(SystemExit),
172 |             chdir(pjoin(self.repo.path, "cat/pkg")),
173 |         ):
174 |             self.script()
175 |         assert self.profile.masks == frozenset([atom_cls("cat/pkg")])
176 | 
177 |     def test_mask_target(self):
178 |         with (
179 |             os_environ("VISUAL", EDITOR="sed -i '1s/$/mask comment/'"),
180 |             patch("sys.argv", self.args + ["cat/pkg"]),
181 |             pytest.raises(SystemExit),
182 |             chdir(pjoin(self.repo.path)),
183 |         ):
184 |             self.script()
185 |         assert self.profile.masks == frozenset([atom_cls("cat/pkg")])
186 | 
187 |     def test_mask_ebuild_path(self):
188 |         with (
189 |             os_environ("VISUAL", EDITOR="sed -i '1s/$/mask comment/'"),
190 |             patch("sys.argv", self.args + ["cat/pkg/pkg-0.ebuild"]),
191 |             pytest.raises(SystemExit),
192 |             chdir(pjoin(self.repo.path)),
193 |         ):
194 |             self.script()
195 |         assert self.profile.masks == frozenset([atom_cls("=cat/pkg-0")])
196 | 
197 |     def test_existing_masks(self):
198 |         self.masks_path.write_text(
199 |             textwrap.dedent(
200 |                 """\
201 |                     # Random Dev  (2021-03-24)
202 |                     # masked
203 |                     cat/masked
204 |                 """
205 |             )
206 |         )
207 | 
208 |         with (
209 |             os_environ("VISUAL", EDITOR="sed -i '1s/$/mask comment/'"),
210 |             patch("sys.argv", self.args + ["=cat/pkg-0"]),
211 |             pytest.raises(SystemExit),
212 |             chdir(pjoin(self.repo.path)),
213 |         ):
214 |             self.script()
215 |         assert self.profile.masks == frozenset([atom_cls("cat/masked"), atom_cls("=cat/pkg-0")])
216 | 
217 |     def test_invalid_header(self, capsys):
218 |         self.masks_path.write_text(
219 |             textwrap.dedent(
220 |                 """\
221 |                     # Random Dev  (2022-09-09)
222 |                     #
223 |                     # Larry the Cow was here
224 |                     #
225 |                     # masked
226 |                     cat/masked
227 | 
228 |                     # Larry the Cow  (2022-09-09)
229 |                     #test
230 |                     # Larry the Cow wasn't here
231 |                     cat/masked2
232 |                 """
233 |             )
234 |         )
235 | 
236 |         with (
237 |             os_environ("VISUAL", EDITOR="sed -i '1s/$/mask comment/'"),
238 |             patch("sys.argv", self.args + ["=cat/pkg-0"]),
239 |             pytest.raises(SystemExit),
240 |             chdir(pjoin(self.repo.path)),
241 |         ):
242 |             self.script()
243 |         _, err = capsys.readouterr()
244 |         assert "invalid mask entry header, lineno 9" in err
245 | 
246 |     def test_invalid_author(self, capsys):
247 |         for line in (
248 |             "# Random Dev ",
249 |             "# Random Dev  2021-03-24",
252 |             "# Random Dev  (24-03-2021)",
253 |         ):
254 |             self.masks_path.write_text(
255 |                 textwrap.dedent(
256 |                     f"""\
257 |                         # Random Dev  (2021-03-24)
258 |                         # masked
259 |                         cat/masked
260 | 
261 |                         {line}
262 |                         # masked
263 |                         cat/masked2
264 |                     """
265 |                 )
266 |             )
267 | 
268 |             with (
269 |                 os_environ("VISUAL", EDITOR="sed -i '1s/$/mask comment/'"),
270 |                 patch("sys.argv", self.args + ["=cat/pkg-0"]),
271 |                 pytest.raises(SystemExit),
272 |                 chdir(pjoin(self.repo.path)),
273 |             ):
274 |                 self.script()
275 |             _, err = capsys.readouterr()
276 |             assert "pkgdev mask: error: invalid author, lineno 5" in err
277 | 
278 |     def test_last_rites(self):
279 |         for rflag in ("-r", "--rites"):
280 |             for args in ([rflag], [rflag, "14"]):
281 |                 with (
282 |                     os_environ("VISUAL", EDITOR="sed -i '1s/$/mask comment/'"),
283 |                     patch("sys.argv", self.args + ["cat/pkg"] + args),
284 |                     pytest.raises(SystemExit),
285 |                     chdir(pjoin(self.repo.path)),
286 |                 ):
287 |                     self.script()
288 | 
289 |                 days = 30 if len(args) == 1 else int(args[1])
290 |                 removal_date = self.today + timedelta(days=days)
291 |                 today = self.today.strftime("%Y-%m-%d")
292 |                 removal = removal_date.strftime("%Y-%m-%d")
293 |                 assert self.masks_path.read_text() == textwrap.dedent(
294 |                     f"""\
295 |                         # First Last  ({today})
296 |                         # mask comment
297 |                         # Removal on {removal}.
298 |                         cat/pkg
299 |                     """
300 |                 )
301 |                 self.masks_path.write_text("")  # Reset the contents of package.mask
302 | 
303 |     @pytest.mark.skipif(sys.platform == "darwin", reason="no xdg-email on mac os")
304 |     def test_last_rites_with_email(self, tmp_path):
305 |         output_file = tmp_path / "mail.txt"
306 |         for rflag in ("-r", "--rites"):
307 |             with (
308 |                 os_environ(
309 |                     "VISUAL", EDITOR="sed -i '1s/$/mask comment/'", MAILER=f"> {output_file} echo"
310 |                 ),
311 |                 patch("sys.argv", self.args + ["cat/pkg", rflag, "--email"]),
312 |                 pytest.raises(SystemExit),
313 |                 chdir(pjoin(self.repo.path)),
314 |             ):
315 |                 self.script()
316 |             out = output_file.read_text()
317 |             assert "mailto:gentoo-dev-announce@lists.gentoo.org" in out
318 | 
319 |             self.masks_path.write_text("")  # Reset the contents of package.mask
320 | 
321 |     @pytest.mark.skipif(sys.platform == "darwin", reason="no xdg-email on mac os")
322 |     def test_last_email_bad_mailer(self, capsys):
323 |         for rflag in ("-r", "--rites"):
324 |             with (
325 |                 os_environ("VISUAL", EDITOR="sed -i '1s/$/mask comment/'", MAILER="false"),
326 |                 patch("sys.argv", self.args + ["cat/pkg", rflag, "--email"]),
327 |                 pytest.raises(SystemExit),
328 |                 chdir(pjoin(self.repo.path)),
329 |             ):
330 |                 self.script()
331 |             _, err = capsys.readouterr()
332 |             assert err.strip() == "pkgdev mask: error: failed opening email composer"
333 | 
334 |     def test_mask_bugs(self):
335 |         today = self.today.strftime("%Y-%m-%d")
336 |         for bflag in ("-b", "--bug"):
337 |             for bug_nums, expected in [
338 |                 (["42"], "Bug #42."),
339 |                 (["42", "43"], "Bugs #42, #43."),
340 |                 (["42,43", "43"], "Bugs #42, #43."),
341 |             ]:
342 |                 args = []
343 |                 for bug_num in bug_nums:
344 |                     args += [bflag, bug_num]
345 |                 with (
346 |                     os_environ("VISUAL", EDITOR="sed -i '1s/$/mask comment/'"),
347 |                     patch("sys.argv", self.args + ["cat/pkg"] + args),
348 |                     pytest.raises(SystemExit),
349 |                     chdir(pjoin(self.repo.path)),
350 |                 ):
351 |                     self.script()
352 | 
353 |                 assert self.masks_path.read_text() == textwrap.dedent(
354 |                     f"""\
355 |                         # First Last  ({today})
356 |                         # mask comment
357 |                         # {expected}
358 |                         cat/pkg
359 |                     """
360 |                 )
361 |                 self.masks_path.write_text("")  # Reset the contents of package.mask
362 | 
363 |     def test_mask_bug_bad(self, capsys, tool):
364 |         for arg, expected in [("-1", "must be >= 1"), ("foo", "invalid integer value")]:
365 |             with pytest.raises(SystemExit), chdir(pjoin(self.repo.path)):
366 |                 tool.parse_args(["mask", "--bug", arg])
367 |             out, err = capsys.readouterr()
368 |             assert err.strip() == f"pkgdev mask: error: argument -b/--bug: {expected}"
369 | 


--------------------------------------------------------------------------------
/tests/scripts/test_pkgdev_push.py:
--------------------------------------------------------------------------------
  1 | import os
  2 | import textwrap
  3 | from functools import partial
  4 | from io import StringIO
  5 | from unittest.mock import patch
  6 | 
  7 | import pytest
  8 | from pkgdev.scripts import run
  9 | from snakeoil.contexts import chdir
 10 | from snakeoil.osutils import pjoin
 11 | 
 12 | 
 13 | class TestPkgdevPushParseArgs:
 14 |     def test_non_repo_cwd(self, capsys, tool):
 15 |         with pytest.raises(SystemExit):
 16 |             tool.parse_args(["push"])
 17 |         out, err = capsys.readouterr()
 18 |         assert err.strip() == "pkgdev push: error: not in ebuild repo"
 19 | 
 20 |     def test_non_git_repo_cwd(self, repo, capsys, tool):
 21 |         with pytest.raises(SystemExit), chdir(repo.location):
 22 |             tool.parse_args(["push"])
 23 |         out, err = capsys.readouterr()
 24 |         assert err.strip() == "pkgdev push: error: not in git repo"
 25 | 
 26 |     def test_non_ebuild_git_repo_cwd(self, make_repo, git_repo, capsys, tool):
 27 |         os.mkdir(pjoin(git_repo.path, "repo"))
 28 |         repo = make_repo(pjoin(git_repo.path, "repo"))
 29 |         with pytest.raises(SystemExit), chdir(repo.location):
 30 |             tool.parse_args(["push"])
 31 |         out, err = capsys.readouterr()
 32 |         assert err.strip() == "pkgdev push: error: not in ebuild git repo"
 33 | 
 34 |     def test_git_push_args_passthrough(self, repo, make_git_repo, tool):
 35 |         """Unknown arguments for ``pkgdev push`` are passed to ``git push``."""
 36 |         git_repo = make_git_repo(repo.location)
 37 |         with chdir(git_repo.path):
 38 |             options, _ = tool.parse_args(["push", "origin", "main"])
 39 |             assert options.push_args == ["origin", "main"]
 40 |             options, _ = tool.parse_args(["push", "-n", "--signed"])
 41 |             assert "--dry-run" in options.push_args
 42 |             assert "--signed" in options.push_args
 43 | 
 44 |     def test_scan_args(self, repo, make_git_repo, tool):
 45 |         git_repo = make_git_repo(repo.location)
 46 |         repo.create_ebuild("cat/pkg-0")
 47 |         git_repo.add_all("cat/pkg-0", commit=False)
 48 |         # pkgcheck isn't run in verbose mode by default
 49 |         with chdir(repo.location):
 50 |             options, _ = tool.parse_args(["commit"])
 51 |         assert "-v" not in options.scan_args
 52 |         # verbosity level is passed down to pkgcheck
 53 |         with chdir(repo.location):
 54 |             options, _ = tool.parse_args(["commit", "-v"])
 55 |         assert "-v" in options.scan_args
 56 | 
 57 | 
 58 | class TestPkgdevPush:
 59 |     script = staticmethod(partial(run, "pkgdev"))
 60 | 
 61 |     @pytest.fixture(autouse=True)
 62 |     def _setup(self, tmp_path, make_repo, make_git_repo):
 63 |         self.cache_dir = str(tmp_path / "cache")
 64 |         self.scan_args = [
 65 |             "--config",
 66 |             "no",
 67 |             "--pkgcheck-scan",
 68 |             f"--config no --cache-dir {self.cache_dir}",
 69 |         ]
 70 |         # args for running pkgdev like a script
 71 |         self.args = ["pkgdev", "push"] + self.scan_args
 72 | 
 73 |         # initialize parent repo
 74 |         self.parent_git_repo = make_git_repo(bare=True)
 75 |         # initialize child repo
 76 |         child_repo_path = tmp_path / "child-repo"
 77 |         child_repo_path.mkdir()
 78 |         self.child_git_repo = make_git_repo(str(child_repo_path))
 79 |         self.child_repo = make_repo(self.child_git_repo.path)
 80 |         self.child_git_repo.add_all("initial commit")
 81 |         # create a stub pkg and commit it
 82 |         self.child_repo.create_ebuild("cat/pkg-0")
 83 |         self.child_git_repo.add_all("cat/pkg-0")
 84 |         # set up parent repo as origin and push to it
 85 |         self.child_git_repo.run(["git", "remote", "add", "origin", self.parent_git_repo.path])
 86 |         self.child_git_repo.run(["git", "push", "-u", "origin", "main"])
 87 |         self.child_git_repo.run(["git", "remote", "set-head", "origin", "main"])
 88 | 
 89 |     def test_push(self, capsys):
 90 |         self.child_repo.create_ebuild("cat/pkg-1")
 91 |         self.child_git_repo.add_all("cat/pkg-1")
 92 | 
 93 |         with (
 94 |             patch("sys.argv", self.args),
 95 |             pytest.raises(SystemExit) as excinfo,
 96 |             chdir(self.child_git_repo.path),
 97 |         ):
 98 |             self.script()
 99 |         assert excinfo.value.code == 0
100 | 
101 |     def test_failed_push(self, capsys):
102 |         self.child_repo.create_ebuild("cat/pkg-1", eapi="-1")
103 |         self.child_git_repo.add_all("cat/pkg-1")
104 | 
105 |         # failed scans don't push commits
106 |         with (
107 |             patch("sys.argv", self.args),
108 |             pytest.raises(SystemExit) as excinfo,
109 |             chdir(self.child_git_repo.path),
110 |         ):
111 |             self.script()
112 |         assert excinfo.value.code == 1
113 |         out, err = capsys.readouterr()
114 |         assert out == textwrap.dedent(
115 |             """\
116 |                 cat/pkg
117 |                   InvalidEapi: version 1: invalid EAPI '-1'
118 | 
119 |                 FAILURES
120 |                 cat/pkg
121 |                   InvalidEapi: version 1: invalid EAPI '-1'
122 |             """
123 |         )
124 | 
125 |         # but failures can be ignored to push anyway
126 |         with (
127 |             patch("sys.argv", self.args + ["--ask"]),
128 |             patch("sys.stdin", StringIO("y\n")),
129 |             pytest.raises(SystemExit) as excinfo,
130 |             chdir(self.child_git_repo.path),
131 |         ):
132 |             self.script()
133 |         assert excinfo.value.code == 0
134 | 
135 |     def test_warnings(self, capsys):
136 |         pkgdir = os.path.dirname(self.child_repo.create_ebuild("cat/pkg-1"))
137 |         os.makedirs((filesdir := pjoin(pkgdir, "files")), exist_ok=True)
138 |         with open(pjoin(filesdir, "foo"), "w") as f:
139 |             f.write("")
140 |         self.child_git_repo.add_all("cat/pkg-1")
141 | 
142 |         # scans with warnings ask for confirmation before pushing with "--ask"
143 |         with (
144 |             patch("sys.argv", self.args + ["--ask"]),
145 |             patch("sys.stdin", StringIO("n\n")),
146 |             pytest.raises(SystemExit) as excinfo,
147 |             chdir(self.child_git_repo.path),
148 |         ):
149 |             self.script()
150 |         assert excinfo.value.code == 1
151 |         out, err = capsys.readouterr()
152 |         assert "EmptyFile" in out
153 | 
154 |         # but without "--ask" it still pushes
155 |         with (
156 |             patch("sys.argv", self.args),
157 |             pytest.raises(SystemExit) as excinfo,
158 |             chdir(self.child_git_repo.path),
159 |         ):
160 |             self.script()
161 |         assert excinfo.value.code == 0
162 | 


--------------------------------------------------------------------------------
/tests/scripts/test_pkgdev_showkw.py:
--------------------------------------------------------------------------------
  1 | import textwrap
  2 | from functools import partial
  3 | from typing import List, NamedTuple
  4 | from unittest.mock import patch
  5 | 
  6 | import pytest
  7 | from snakeoil.contexts import chdir, os_environ
  8 | 
  9 | from pkgdev.scripts import run
 10 | 
 11 | 
 12 | class Profile(NamedTuple):
 13 |     """Profile record used to create profiles in a repository."""
 14 | 
 15 |     path: str
 16 |     arch: str
 17 |     status: str = "stable"
 18 |     deprecated: bool = False
 19 |     defaults: List[str] = None
 20 |     eapi: str = "5"
 21 | 
 22 | 
 23 | class TestPkgdevShowkwParseArgs:
 24 |     args = ("showkw", "--config", "no")
 25 | 
 26 |     def test_missing_target(self, capsys, tool):
 27 |         with pytest.raises(SystemExit):
 28 |             tool.parse_args(self.args)
 29 |         captured = capsys.readouterr()
 30 |         assert captured.err.strip() == (
 31 |             "pkgdev showkw: error: missing target argument and not in a supported repo"
 32 |         )
 33 | 
 34 |     def test_unknown_arches(self, capsys, tool, make_repo):
 35 |         repo = make_repo(arches=["amd64"])
 36 |         with pytest.raises(SystemExit):
 37 |             tool.parse_args([*self.args, "-a", "unknown", "-r", repo.location])
 38 |         captured = capsys.readouterr()
 39 |         assert captured.err.strip() == (
 40 |             "pkgdev showkw: error: unknown arch: 'unknown' (choices: amd64)"
 41 |         )
 42 | 
 43 |     def test_no_color(self, tool, make_repo, tmp_path):
 44 |         repo = make_repo(arches=["amd64"])
 45 |         repo.create_ebuild("foo/bar-0", keywords=("x86"))
 46 | 
 47 |         (config_file := tmp_path / "pkgcheck.conf").write_text(
 48 |             textwrap.dedent(
 49 |                 """\
 50 |             [DEFAULT]
 51 |             showkw.color = true
 52 |         """
 53 |             )
 54 |         )
 55 | 
 56 |         def parse(*args):
 57 |             options, _ = tool.parse_args(
 58 |                 ["showkw", "-r", repo.location, "foo/bar", "--config", str(config_file), *args]
 59 |             )
 60 |             return options
 61 | 
 62 |         with os_environ("NOCOLOR"):
 63 |             assert parse().color is True
 64 |         with os_environ(NOCOLOR="1"):
 65 |             # NOCOLOR overrides config file
 66 |             assert parse().color is False
 67 |             # cmd line option overrides NOCOLOR
 68 |             assert parse("--color", "n").color is False
 69 |             assert parse("--color", "y").color is True
 70 | 
 71 | 
 72 | class TestPkgdevShowkw:
 73 |     script = staticmethod(partial(run, "pkgdev"))
 74 |     base_args = ("pkgdev", "showkw", "--config", "n", "--color", "n")
 75 | 
 76 |     def _create_repo(self, make_repo):
 77 |         repo = make_repo(arches=["amd64", "ia64", "mips", "x86"])
 78 |         repo.create_profiles(
 79 |             [
 80 |                 Profile("default/linux/amd64", "amd64"),
 81 |                 Profile("default/linux/x86", "x86"),
 82 |                 Profile("default/linux/ia64", "ia64", "dev"),
 83 |                 Profile("default/linux/mips", "mips", "exp"),
 84 |             ]
 85 |         )
 86 |         return repo
 87 | 
 88 |     def _run_and_parse(self, capsys, *args):
 89 |         with (
 90 |             patch("sys.argv", [*self.base_args, "--format", "presto", *args]),
 91 |             pytest.raises(SystemExit) as excinfo,
 92 |         ):
 93 |             self.script()
 94 |         assert excinfo.value.code is None
 95 |         out, err = capsys.readouterr()
 96 |         assert not err
 97 |         lines = out.split("\n")
 98 |         table_columns = [s.strip() for s in lines[1].split("|")][1:]
 99 |         return {
100 |             ver: dict(zip(table_columns, values))
101 |             for ver, *values in map(lambda s: map(str.strip, s.split("|")), lines[3:-1])
102 |         }
103 | 
104 |     def test_match(self, capsys, make_repo):
105 |         repo = self._create_repo(make_repo)
106 |         repo.create_ebuild("foo/bar-0")
107 |         with (
108 |             patch("sys.argv", [*self.base_args, "-r", repo.location, "foo/bar"]),
109 |             pytest.raises(SystemExit) as excinfo,
110 |         ):
111 |             self.script()
112 |         assert excinfo.value.code is None
113 |         out, err = capsys.readouterr()
114 |         assert not err
115 |         assert out.split("\n")[0] == "keywords for foo/bar:"
116 | 
117 |     def test_match_short_name(self, capsys, make_repo):
118 |         repo = self._create_repo(make_repo)
119 |         repo.create_ebuild("foo/bar-0")
120 |         with (
121 |             patch("sys.argv", [*self.base_args, "-r", repo.location, "bar"]),
122 |             pytest.raises(SystemExit) as excinfo,
123 |         ):
124 |             self.script()
125 |         assert excinfo.value.code is None
126 |         out, err = capsys.readouterr()
127 |         assert not err
128 |         assert out.split("\n")[0] == "keywords for foo/bar:"
129 | 
130 |     def test_match_cwd_repo(self, capsys, make_repo):
131 |         repo = self._create_repo(make_repo)
132 |         repo.create_ebuild("foo/bar-0")
133 |         with (
134 |             patch("sys.argv", [*self.base_args, "foo/bar"]),
135 |             pytest.raises(SystemExit) as excinfo,
136 |             chdir(repo.location),
137 |         ):
138 |             self.script()
139 |         assert excinfo.value.code is None
140 |         out, err = capsys.readouterr()
141 |         assert not err
142 |         assert out.split("\n")[0] == "keywords for foo/bar:"
143 | 
144 |     def test_match_cwd_pkg(self, capsys, make_repo):
145 |         repo = self._create_repo(make_repo)
146 |         repo.create_ebuild("foo/bar-0")
147 |         with (
148 |             patch("sys.argv", self.base_args),
149 |             pytest.raises(SystemExit) as excinfo,
150 |             chdir(repo.location + "/foo/bar"),
151 |         ):
152 |             self.script()
153 |         assert excinfo.value.code is None
154 |         _, err = capsys.readouterr()
155 |         assert not err
156 | 
157 |     def test_no_matches(self, capsys, make_repo):
158 |         repo = self._create_repo(make_repo)
159 |         with (
160 |             patch("sys.argv", [*self.base_args, "-r", repo.location, "foo/bar"]),
161 |             pytest.raises(SystemExit) as excinfo,
162 |         ):
163 |             self.script()
164 |         assert excinfo.value.code == 1
165 |         out, err = capsys.readouterr()
166 |         assert not out
167 |         assert err.strip() == "pkgdev showkw: no matches for 'foo/bar'"
168 | 
169 |     def test_match_stable(self, capsys, make_repo):
170 |         repo = self._create_repo(make_repo)
171 |         repo.create_ebuild("foo/bar-0", keywords=("~amd64", "~ia64", "~mips", "x86"))
172 |         res = self._run_and_parse(capsys, "-r", repo.location, "foo/bar", "--stable")
173 |         assert set(res.keys()) == {"0"}
174 |         assert {"amd64", "ia64", "mips", "x86"} & res["0"].keys() == {"amd64", "x86"}
175 | 
176 |     def test_match_unstable(self, capsys, make_repo):
177 |         repo = self._create_repo(make_repo)
178 |         repo.create_ebuild("foo/bar-0", keywords=("~amd64", "~ia64", "~mips", "x86"))
179 |         res = self._run_and_parse(capsys, "-r", repo.location, "foo/bar", "--unstable")
180 |         assert set(res.keys()) == {"0"}
181 |         assert {"amd64", "ia64", "mips", "x86"} <= res["0"].keys()
182 | 
183 |     def test_match_specific_arch(self, capsys, make_repo):
184 |         repo = self._create_repo(make_repo)
185 |         repo.create_ebuild("foo/bar-0", keywords=("~amd64", "~ia64", "~mips", "x86"))
186 |         res = self._run_and_parse(capsys, "-r", repo.location, "foo/bar", "--arch", "amd64")
187 |         assert set(res.keys()) == {"0"}
188 |         assert {"amd64", "ia64", "mips", "x86"} & res["0"].keys() == {"amd64"}
189 | 
190 |     def test_match_specific_multiple_arch(self, capsys, make_repo):
191 |         repo = self._create_repo(make_repo)
192 |         repo.create_ebuild("foo/bar-0", keywords=("~amd64", "~ia64", "~mips", "x86"))
193 |         res = self._run_and_parse(capsys, "-r", repo.location, "foo/bar", "--arch", "amd64,mips")
194 |         assert set(res.keys()) == {"0"}
195 |         assert {"amd64", "ia64", "mips", "x86"} & res["0"].keys() == {"amd64", "mips"}
196 | 
197 |     def test_correct_keywords_status(self, capsys, make_repo):
198 |         repo = self._create_repo(make_repo)
199 |         repo.create_ebuild("foo/bar-0", keywords=("amd64", "~ia64", "~mips", "x86"))
200 |         repo.create_ebuild("foo/bar-1", keywords=("~amd64", "-mips", "~x86"))
201 |         repo.create_ebuild("foo/bar-2", keywords=("-*", "amd64", "-x86"), eapi=8, slot=2)
202 |         res = self._run_and_parse(capsys, "-r", repo.location, "foo/bar")
203 |         assert set(res.keys()) == {"0", "1", "2"}
204 |         assert dict(amd64="+", ia64="~", mips="~", x86="+", slot="0").items() <= res["0"].items()
205 |         assert dict(amd64="~", ia64="o", mips="-", x86="~", slot="0").items() <= res["1"].items()
206 |         assert (
207 |             dict(amd64="+", ia64="*", mips="*", x86="-", slot="2", eapi="8").items()
208 |             <= res["2"].items()
209 |         )
210 | 
211 |     @pytest.mark.parametrize(
212 |         ("arg", "expected"),
213 |         (
214 |             pytest.param("--stable", {"amd64", "x86"}, id="stable"),
215 |             pytest.param("--unstable", {"amd64", "ia64", "mips", "x86"}, id="unstable"),
216 |             pytest.param("--only-unstable", {"ia64", "mips"}, id="only-unstable"),
217 |         ),
218 |     )
219 |     def test_collapse(self, capsys, make_repo, arg, expected):
220 |         repo = self._create_repo(make_repo)
221 |         repo.create_ebuild("foo/bar-0", keywords=("amd64", "~ia64", "~mips", "~x86"))
222 |         repo.create_ebuild("foo/bar-1", keywords=("~amd64", "~ia64", "~mips", "x86"))
223 |         with (
224 |             patch("sys.argv", [*self.base_args, "-r", repo.location, "foo/bar", "--collapse", arg]),
225 |             pytest.raises(SystemExit) as excinfo,
226 |         ):
227 |             self.script()
228 |         out, err = capsys.readouterr()
229 |         assert excinfo.value.code is None
230 |         assert not err
231 |         arches = set(out.split("\n")[0].split())
232 |         assert arches == expected
233 | 


--------------------------------------------------------------------------------
/tests/test_git.py:
--------------------------------------------------------------------------------
 1 | import subprocess
 2 | from unittest.mock import patch
 3 | 
 4 | import pytest
 5 | from snakeoil.cli.exceptions import UserException
 6 | from snakeoil.contexts import chdir
 7 | from pkgdev import git
 8 | 
 9 | 
10 | class TestGitRun:
11 |     def test_git_missing(self):
12 |         with patch("subprocess.run") as git_run:
13 |             git_run.side_effect = FileNotFoundError("no such file 'git'")
14 |             with pytest.raises(UserException, match="no such file 'git'"):
15 |                 git.run("commit")
16 | 
17 |     def test_failed_run(self):
18 |         with patch("subprocess.run") as git_run:
19 |             git_run.side_effect = subprocess.CalledProcessError(1, "git commit")
20 |             with pytest.raises(git.GitError):
21 |                 git.run("commit")
22 | 
23 |     def test_successful_run(self, git_repo):
24 |         with chdir(git_repo.path):
25 |             p = git.run("rev-parse", "--abbrev-ref", "HEAD", stdout=subprocess.PIPE)
26 |         assert p.stdout.strip() == "main"
27 | 


--------------------------------------------------------------------------------
/tests/test_mangle.py:
--------------------------------------------------------------------------------
  1 | import os
  2 | import multiprocessing
  3 | import re
  4 | import signal
  5 | from functools import partial
  6 | from unittest.mock import patch
  7 | 
  8 | from pkgdev.mangle import Mangler
  9 | from pkgdev.scripts.pkgdev_commit import Change
 10 | import pytest
 11 | from snakeoil.cli.exceptions import UserException
 12 | 
 13 | 
 14 | def fake_change(s):
 15 |     return Change("/repo", "A", str(s))
 16 | 
 17 | 
 18 | class TestMangler:
 19 |     def test_nonexistent_file(self, tmp_path):
 20 |         path = tmp_path / "nonexistent"
 21 |         assert list(Mangler([fake_change(path)])) == []
 22 | 
 23 |     def test_empty_file(self, tmp_path):
 24 |         path = tmp_path / "empty"
 25 |         path.touch()
 26 |         assert list(Mangler([fake_change(path)])) == []
 27 | 
 28 |     def test_skipped_file(self, tmp_path):
 29 |         paths = [(tmp_path / x) for x in ("file", "file.patch")]
 30 | 
 31 |         for p in paths:
 32 |             p.write_text("# comment")
 33 |         # skip patch files
 34 |         skip_regex = re.compile(r".+\.patch$")
 35 |         mangled_paths = set(Mangler(map(fake_change, paths), skip_regex=skip_regex))
 36 |         assert mangled_paths == {str(tmp_path / "file")}
 37 | 
 38 |         for p in paths:
 39 |             p.write_text("# comment")
 40 |         # don't skip any files
 41 |         mangled_paths = set(Mangler(map(fake_change, paths)))
 42 |         assert mangled_paths == set(map(str, paths))
 43 | 
 44 |     def test_nonmangled_file(self, tmp_path):
 45 |         path = tmp_path / "file"
 46 |         path.write_text("# comment\n")
 47 |         assert list(Mangler([fake_change(path)])) == []
 48 | 
 49 |     def test_mangled_file(self, tmp_path):
 50 |         path = tmp_path / "file"
 51 |         path.write_text("# comment")
 52 |         assert list(Mangler([fake_change(path)])) == [str(path)]
 53 |         assert path.read_text() == "# comment\n"
 54 | 
 55 |     def test_iterator_exceptions(self, tmp_path):
 56 |         """Test parallelized iterator against unhandled exceptions."""
 57 |         path = tmp_path / "file"
 58 |         path.write_text("# comment\n")
 59 | 
 60 |         def _mangle_func(self, data):
 61 |             raise Exception("func failed")
 62 | 
 63 |         with patch("pkgdev.mangle.Mangler._mangle", _mangle_func):
 64 |             with pytest.raises(UserException, match="Exception: func failed"):
 65 |                 list(Mangler([fake_change(path)]))
 66 | 
 67 |     def test_sigint_handling(self, tmp_path):
 68 |         """Verify SIGINT is properly handled by the parallelized pipeline."""
 69 |         path = tmp_path / "file"
 70 |         path.write_text("# comment\n")
 71 | 
 72 |         def run(queue):
 73 |             """Mangler run in a separate process that gets interrupted."""
 74 |             import sys
 75 |             import time
 76 |             from unittest.mock import patch
 77 | 
 78 |             from pkgdev.mangle import Mangler
 79 | 
 80 |             def sleep():
 81 |                 """Notify testing process then sleep."""
 82 |                 queue.put("ready")
 83 |                 time.sleep(100)
 84 | 
 85 |             with patch("pkgdev.mangle.Mangler.__iter__") as fake_iter:
 86 |                 fake_iter.side_effect = partial(sleep)
 87 |                 try:
 88 |                     iter(Mangler([fake_change(path)]))
 89 |                 except KeyboardInterrupt:
 90 |                     queue.put(None)
 91 |                     sys.exit(0)
 92 |                 queue.put(None)
 93 |                 sys.exit(1)
 94 | 
 95 |         mp_ctx = multiprocessing.get_context("fork")
 96 |         queue = mp_ctx.SimpleQueue()
 97 |         p = mp_ctx.Process(target=run, args=(queue,))
 98 |         p.start()
 99 |         # wait for pipeline object to be fully initialized then send SIGINT
100 |         for _ in iter(queue.get, None):
101 |             os.kill(p.pid, signal.SIGINT)
102 |             p.join()
103 |             assert p.exitcode == 0
104 | 


--------------------------------------------------------------------------------