├── .editorconfig ├── .flake8 ├── .github ├── FUNDING.yml ├── dependabot.yml ├── linters │ └── .jscpd.json └── workflows │ ├── awesomebot.yml │ └── mega-linter.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .trivyignore ├── CODE_OF_CONDUCT.md ├── Dockerfile ├── LICENSE ├── Makefile ├── README.md ├── bin └── jc ├── hooks ├── autohook.sh ├── post-checkout │ └── 001-clean-up-pyc-and-pyo-files ├── pre-commit │ ├── 001-format-python-files-with-black │ └── 002-clean-up-pyc-and-pyo-files └── scripts │ ├── clean-up-pyc-and-pyo-files │ └── format-python-files-with-black ├── jira-commands.plugin.zsh ├── jira_commands ├── __init__.py ├── cli │ ├── __init__.py │ ├── common.py │ ├── crudops.py │ ├── jc.py │ ├── jql.py │ ├── labels.py │ ├── list.py │ ├── map_extractor.py │ ├── subtasks.py │ └── vivisect.py ├── jira.py └── utils.py ├── poetry.lock ├── pyproject.toml └── tests ├── __init__.py └── test_jira_commands.py /.editorconfig: -------------------------------------------------------------------------------- 1 | # EditorConfig is awesome: https://EditorConfig.org 2 | 3 | # top-most EditorConfig file 4 | root = true 5 | 6 | # Unix-style newlines with a newline ending every file 7 | [*] 8 | end_of_line = lf 9 | insert_final_newline = true 10 | 11 | [*.json] 12 | trim_trailing_whitespace = true 13 | 14 | [*.py] 15 | trim_trailing_whitespace = true 16 | 17 | [*.{yaml,yml}] 18 | trim_trailing_whitespace = true 19 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 160 3 | # E203 = Whitespace before : 4 | extend-ignore = E203 5 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: unixorn # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] 4 | patreon: unixorn # Replace with a single Patreon username 5 | open_collective: # Replace with a single Open Collective username 6 | ko_fi: unixorn 7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel 8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry 9 | liberapay: # Replace with a single Liberapay username 10 | issuehunt: # Replace with a single IssueHunt username 11 | otechie: # Replace with a single Otechie username 12 | custom: https://www.redbubble.com/people/unixorn/shop?asc=u 13 | # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] 14 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Use `allow` to specify which dependencies to maintain 3 | 4 | version: 2 5 | updates: 6 | - package-ecosystem: "github-actions" 7 | directory: "/" 8 | schedule: 9 | interval: "weekly" 10 | 11 | - package-ecosystem: "pip" 12 | directory: "/" 13 | schedule: 14 | interval: "weekly" 15 | -------------------------------------------------------------------------------- /.github/linters/.jscpd.json: -------------------------------------------------------------------------------- 1 | { 2 | "hasModules": false, 3 | "threshold": 0, 4 | "configurations": { 5 | "reporters": ["html", "verbose"], 6 | "format": ["csharp", "typescript", "javascript", "java", "cpp", "c"], 7 | "ignore": [ 8 | "**/.github/**", 9 | "**/*.yml", 10 | "**/github/workspace/.github/workflows/**", 11 | "**/github/workspace/.github/linters/**", 12 | "jira_commands/cli/*.py" 13 | ], 14 | "min-tokens": 200, 15 | "absolute": true 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /.github/workflows/awesomebot.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Check links in README.md 3 | 4 | on: 5 | push: 6 | branches: ['*'] 7 | pull_request: 8 | branches: ['*'] 9 | 10 | jobs: 11 | build: 12 | 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - uses: actions/checkout@v4 17 | - uses: docker://dkhamsing/awesome_bot:latest 18 | with: 19 | args: /github/workspace/README.md --allow-timeout --allow-dupe --request-delay 1 --allow-redirect --white-list https://img.shields.io,https://jira.example.com 20 | -------------------------------------------------------------------------------- /.github/workflows/mega-linter.yml: -------------------------------------------------------------------------------- 1 | --- 2 | ########################### 3 | ########################### 4 | ## Linter GitHub Actions ## 5 | ########################### 6 | ########################### 7 | name: Lint Code Base 8 | 9 | # 10 | # Documentation: 11 | # https://help.github.com/en/articles/workflow-syntax-for-github-actions 12 | # 13 | 14 | ############################# 15 | # Start the job on all push # 16 | ############################# 17 | on: 18 | push: 19 | branches-ignore: [main] 20 | # Remove the line above to run when pushing to main 21 | pull_request: 22 | branches: [main] 23 | 24 | concurrency: 25 | group: ${{ github.ref }}-${{ github.workflow }} 26 | cancel-in-progress: true 27 | 28 | ############### 29 | # Set the Job # 30 | ############### 31 | jobs: 32 | build: 33 | # Name the Job 34 | name: Megalint Code Base 35 | # Set the agent to run on 36 | runs-on: ubuntu-latest 37 | 38 | ################## 39 | # Load all steps # 40 | ################## 41 | steps: 42 | ########################## 43 | # Checkout the code base # 44 | ########################## 45 | - name: Checkout Code 46 | uses: actions/checkout@v4 47 | with: 48 | # Full git history is needed to get a proper list of changed files within `super-linter` 49 | fetch-depth: 0 50 | 51 | ################################ 52 | # Run Linter against code base # 53 | ################################ 54 | - name: Lint Code Base 55 | uses: megalinter/megalinter@v8 56 | env: 57 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 58 | DEFAULT_BRANCH: main 59 | VALIDATE_ALL_CODEBASE: false 60 | DISABLE: COPYPASTE,SPELL 61 | DISABLE_LINTERS: PYTHON_MYPY,PYTHON_PYRIGHT,MARKDOWN_MARKDOWN_LINK_CHECK,REPOSITORY_TRIVY,REPOSITORY_CHECKOV 62 | ACTION_ACTIONLINT_DISABLE_ERRORS: true 63 | DOCKERFILE_HADOLINT_DISABLE_ERRORS: true 64 | PYTHON_BANDIT_DISABLE_ERRORS: true 65 | PYTHON_PYLINT_ARGUMENTS: --disable=F0401 66 | PYTHON_RUFF_DISABLE_ERRORS: true 67 | REPOSITORY_CHECKOV_DISABLE_ERRORS: true 68 | REPOSITORY_KICS_DISABLE_ERRORS: true 69 | REPOSITORY_TRUFFLEHOG_DISABLE_ERRORS: true 70 | VALIDATE_PYTHON_ISORT: false 71 | 72 | # Upload Mega-Linter artifacts. They will be available on Github action page "Artifacts" section 73 | - name: Archive production artifacts 74 | if: ${{ success() }} || ${{ failure() }} 75 | uses: actions/upload-artifact@v4 76 | with: 77 | name: Mega-Linter reports 78 | path: | 79 | report 80 | mega-linter.log 81 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Editor scraps 2 | .idea 3 | .vscode 4 | 5 | # Byte-compiled / optimized / DLL files 6 | __pycache__/ 7 | *.py[cod] 8 | *$py.class 9 | 10 | # Ignore ZS-specific files 11 | zscaler 12 | 13 | # Ignore vim scratch files 14 | .*.un~ 15 | 16 | 17 | # C extensions 18 | *.so 19 | 20 | # Distribution / packaging 21 | .Python 22 | build/ 23 | develop-eggs/ 24 | dist/ 25 | downloads/ 26 | eggs/ 27 | .eggs/ 28 | lib/ 29 | lib64/ 30 | parts/ 31 | sdist/ 32 | var/ 33 | wheels/ 34 | pip-wheel-metadata/ 35 | share/python-wheels/ 36 | *.egg-info/ 37 | .installed.cfg 38 | *.egg 39 | MANIFEST 40 | 41 | # PyInstaller 42 | # Usually these files are written by a python script from a template 43 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 44 | *.manifest 45 | *.spec 46 | 47 | # Installer logs 48 | pip-log.txt 49 | pip-delete-this-directory.txt 50 | 51 | # Unit test / coverage reports 52 | htmlcov/ 53 | .tox/ 54 | .nox/ 55 | .coverage 56 | .coverage.* 57 | .cache 58 | nosetests.xml 59 | coverage.xml 60 | *.cover 61 | *.py,cover 62 | .hypothesis/ 63 | .pytest_cache/ 64 | 65 | # Translations 66 | *.mo 67 | *.pot 68 | 69 | # Django stuff: 70 | *.log 71 | local_settings.py 72 | db.sqlite3 73 | db.sqlite3-journal 74 | 75 | # Flask stuff: 76 | instance/ 77 | .webassets-cache 78 | 79 | # Scrapy stuff: 80 | .scrapy 81 | 82 | # Sphinx documentation 83 | docs/_build/ 84 | 85 | # PyBuilder 86 | target/ 87 | 88 | # Jupyter Notebook 89 | .ipynb_checkpoints 90 | 91 | # IPython 92 | profile_default/ 93 | ipython_config.py 94 | 95 | # pyenv 96 | .python-version 97 | 98 | # pipenv 99 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 100 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 101 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 102 | # install all needed dependencies. 103 | #Pipfile.lock 104 | 105 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 106 | __pypackages__/ 107 | 108 | # Celery stuff 109 | celerybeat-schedule 110 | celerybeat.pid 111 | 112 | # SageMath parsed files 113 | *.sage.py 114 | 115 | # Environments 116 | .env 117 | .venv 118 | env/ 119 | venv/ 120 | ENV/ 121 | env.bak/ 122 | venv.bak/ 123 | 124 | # Spyder project settings 125 | .spyderproject 126 | .spyproject 127 | 128 | # Rope project settings 129 | .ropeproject 130 | 131 | # mkdocs documentation 132 | /site 133 | 134 | # mypy 135 | .mypy_cache/ 136 | .dmypy.json 137 | dmypy.json 138 | 139 | # Pyre type checker 140 | .pyre/ 141 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # See https://pre-commit.com for more information 2 | # See https://pre-commit.com/hooks.html for more hooks 3 | repos: 4 | - repo: local 5 | hooks: 6 | - id: clean-up-pyc-and-pyo-files 7 | name: Scrub all .pyc and .pyo files before committing 8 | language: script 9 | entry: ./hooks/scripts/clean-up-pyc-and-pyo-files 10 | 11 | - repo: https://github.com/astral-sh/ruff-pre-commit 12 | # Ruff version. 13 | rev: 'v0.12.2' 14 | hooks: 15 | - id: ruff 16 | 17 | - repo: https://github.com/psf/black 18 | rev: "25.1.0" 19 | hooks: 20 | - id: black 21 | 22 | - repo: https://github.com/pre-commit/pre-commit-hooks 23 | rev: v5.0.0 24 | hooks: 25 | - id: check-added-large-files 26 | - id: check-docstring-first 27 | - id: check-executables-have-shebangs 28 | - id: check-merge-conflict 29 | - id: check-shebang-scripts-are-executable 30 | - id: check-symlinks 31 | - id: debug-statements 32 | - id: end-of-file-fixer 33 | - id: trailing-whitespace 34 | -------------------------------------------------------------------------------- /.trivyignore: -------------------------------------------------------------------------------- 1 | # We don't run as a daemon so we don't need a healthcheck in our Dockerfile 2 | DS026 3 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## TL;DR 4 | 5 | Don't be an asshole. I'm fine with losing contributions from smart assholes. 6 | 7 | ## Our Pledge 8 | 9 | In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. 10 | 11 | ## Our Standards 12 | 13 | Examples of behavior that contributes to creating a positive environment include: 14 | 15 | * Using welcoming and inclusive language 16 | * Being respectful of differing viewpoints and experiences 17 | * Gracefully accepting constructive criticism 18 | * Focusing on what is best for the community 19 | * Showing empathy towards other community members 20 | 21 | Examples of unacceptable behavior by participants include: 22 | 23 | * The use of sexualized language or imagery and unwelcome sexual attention or advances 24 | * Trolling, insulting/derogatory comments, and personal or political attacks 25 | * Public or private harassment 26 | * Publishing others' private information, such as a physical or electronic address, without explicit permission 27 | * Other conduct which could reasonably be considered inappropriate in a professional setting 28 | 29 | ## Our Responsibilities 30 | 31 | Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. 32 | 33 | Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. 34 | 35 | ## Scope 36 | 37 | This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. 38 | 39 | ## Enforcement 40 | 41 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at jpb@unixorn.net. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. 42 | 43 | Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. 44 | 45 | ## Attribution 46 | 47 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] 48 | 49 | [homepage]: http://contributor-covenant.org 50 | [version]: http://contributor-covenant.org/version/1/4/ 51 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.10-slim 2 | ARG application_version=0.0 3 | LABEL maintainer="Joe Block " 4 | LABEL description="jira-commands tooling on a debian bullseye base" 5 | LABEL version=${application_version} 6 | 7 | RUN mkdir -p /data && mkdir -p /config 8 | RUN apt-get update && \ 9 | apt-get install -y apt-utils ca-certificates --no-install-recommends && \ 10 | apt-get upgrade -y --no-install-recommends && \ 11 | update-ca-certificates && \ 12 | apt-get install -y --no-install-recommends python3-pip python3-dev && \ 13 | rm -fr /tmp/* /var/lib/apt/lists/* 14 | 15 | COPY dist/*.whl /data 16 | RUN pip install --no-cache-dir --disable-pip-version-check /data/*.whl 17 | 18 | # Use bash -l so that we pick up the REQUESTS_CA_BUNDLE value from 19 | # /etc/profile.d/python-enable-all-ssl-certs.sh 20 | CMD ["bash", "-l"] 21 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | h: help 2 | 3 | .PHONY: all clean format help image test tests f h i t 4 | 5 | help: 6 | @echo "Options:" 7 | @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' 8 | 9 | f: format 10 | t: test 11 | i: image 12 | image: local ## Make a docker image that only supports the architecture we're running on for quick testing 13 | 14 | format: format_code ## Reformat our .py files with black 15 | 16 | format_code: 17 | black . 18 | 19 | tests: test ## Run nose tests 20 | test: 21 | nosetests 22 | 23 | verbose_tests: verbose_test ## Run nose tests with verbose enabled 24 | verbose_test: 25 | nosetests -v 26 | 27 | wheel: clean format requirements.txt ## Make a wheel file 28 | poetry build 29 | 30 | local: wheel requirements.txt 31 | docker build --load -t ${USER}/jira-commands --build-arg -f Dockerfile.testing --progress plain . 32 | 33 | multiimage: wheel ## Make a multi-architecture docker image 34 | docker buildx build --platform linux/arm64,linux/amd64 --push -t unixorn/jira-commands:${MODULE_VERSION} --build-arg application_version=${MODULE_VERSION} . 35 | make local 36 | 37 | clean: format ## Clean up our checkout 38 | rm -fv dist/* 39 | hooks/scripts/clean-up-pyc-and-pyo-files 40 | 41 | multi: 42 | make multiimage 43 | 44 | requirements.txt: poetry.lock Makefile 45 | poetry export -o requirements.txt --without-hashes 46 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # jira-commands 2 | 3 | [![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/license/apache2-0-php/) 4 | [![Build Status](https://img.shields.io/endpoint.svg?url=https%3A%2F%2Factions-badge.atrox.dev%2Funixorn%2Fjira-commands%2Fbadge%3Fref%3Dmain&style=plastic)](https://actions-badge.atrox.dev/unixorn/jira-commands/goto?ref=main) 5 | ![Megalinter](https://github.com/unixorn/jira-commands/actions/workflows/mega-linter.yml/badge.svg) 6 | ![PyPI - Format](https://img.shields.io/pypi/format/jira-commands?style=plastic) 7 | 8 | 9 | 10 | ## Table of Contents 11 | 12 | - [Scripts](#scripts) 13 | - [Configuration](#configuration) 14 | - [Basic Authentication](#basic-authentication) 15 | - [OAuth Authentication](#oauth-authentication) 16 | - [PAT authentication](#pat-authentication) 17 | - [Installation](#installation) 18 | - [Run via docker / nerdctl](#run-via-docker--nerdctl) 19 | - [Direct pip install](#direct-pip-install) 20 | - [ZSH plugin](#zsh-plugin) 21 | - [zgenom](#zgenom) 22 | - [Antigen](#antigen) 23 | - [oh-my-zsh](#oh-my-zsh) 24 | 25 | 26 | 27 | Some command-line tools for interacting with JIRA. 28 | 29 | ## Scripts 30 | 31 | All of these scripts support `--help` to get a detailed list of command line options. 32 | 33 | | Name | Description | 34 | | -----------------------------| ----------------------------------------------------- | 35 | | `jc` | Main driver. Will run all the other commands inside a docker container for you. | 36 | | `jc add label` / `jc label add` | Add a label to a ticket | 37 | | `jc assign subtasks` / `jc assign ticket subtasks` | Assign a ticket to someone. | 38 | | `jc assign ticket` / `jc ticket assign` | Assign a ticket to someone. | 39 | | `jc close subtasks` / `jc close ticket subtasks` | Close all of a ticket's subtasks | 40 | | `jc close ticket` / `jc ticket close` | Close a ticket | 41 | | `jc comment on subtasks` / `jc comment on ticket subtasks` | Add identical comment to all of a ticket's subtasks | 42 | | `jc comment on ticket` / `jc ticket comment` | Comment on a ticket | 43 | | `jc create ticket` / `jc ticket create` | Create a ticket. You will need| 44 | | `jc custom field allowed values` | List a custom field's allowed values since JIRA isn't forthcoming about them. | 45 | | `jc examine ticket` / `jc ticket examine` | Detailed dump of a ticket and all its custom field names | 46 | | `jc extract customfield mappings` | Extract the custom field mappings from an issue into a file | 47 | | `jc get label` / `jc list labels` | List labels on a ticket | 48 | | `jc get link types` | Prints the names of all link types defined on your JIRA instance. | 49 | | `jc get priority ids` | Prints the names of all ticket priorities defined on your JIRA instance. | 50 | | `jc label remove` / `jc remove label` | Remove a label from a ticket | 51 | | `jc link tickets` / `jc ticket link` | Link two tickets. Use `jc get link types` to see what link names are defined on your JIRA server. Case matters. | 52 | | `jc list project tickets` | List open tickets in a given JIRA project | 53 | | `jc list subtasks` | Close all of a ticket's subtasks | 54 | | `jc list ticket transitions` / `jc ticket transition list` | See the availale transitions for a given ticket. | 55 | | `jc transition ticket to` / `jc ticket transition set` | Transition a ticket to another state. Use `jc list ticket transitions` to see which are available | 56 | | `jc vivisect ticket` / `jc ticket vivisect` | Detailed dump of a ticket to find out all the custom field names and other innards. | 57 | | `jc transition subtasks` / `jc transition ticket subtasks`| Transition all of a ticket's subtasks to a specific state | 58 | 59 | The `jc` program is the main driver script and will find the subcommands, so you can do `jc ticket comment --ticket ABC-123 --comment 'foo bar baz'` and it will find the `jc-ticket-comment` script and run it with the `--ticket` and `--comment` arguments. 60 | 61 | If you're using the docker method, `jc` will automatically run the subcommands inside a container for you. If you've installed via pip, it'll find the commands where they were installed in your `$PATH`. 62 | 63 | ## Configuration 64 | 65 | The `jc` commands all read settings from `~/.jira-commands/jira.yaml`. Settings in the file can be overridden by specifying command-line options. 66 | 67 | ### Basic Authentication 68 | 69 | I'm setting my username and jira server in the example configuraation file below. The tools will ask for my password when I run them. 70 | 71 | ```yaml 72 | jira_server: https://jira.example.com 73 | username: YOUR_JIRA_USER 74 | ``` 75 | 76 | You can specify a `password` key but it's a terrible idea. 77 | 78 | ### OAuth Authentication 79 | 80 | Here's an example settings file for oauth authentication. Add `--auth=OAUTH` to use oath instead of basic authentication. 81 | 82 | ```yaml 83 | jira_server: https://jira.example.com/ 84 | oauth_access_token: ABCDabcdABCDabcdABCDabcdABCDabcd 85 | oauth_access_token_secret: ABCDabcdABCDabcdABCDabcdABCDabcd 86 | oauth_consumer_key: OAUTH_CONSUMER_KEY_ID 87 | oauth_private_key_pem_pathL: /path/to/pem/file 88 | username: YOUR_JIRA_USER 89 | ``` 90 | 91 | ### PAT authentication 92 | 93 | Here's an example settings file for PAT authentication. 94 | 95 | ```yaml 96 | username: YOUR_JIRA_USER 97 | pat_token: xyzzyAbc123 98 | jira_server: https://jira.example.com/ 99 | ``` 100 | 101 | ## Installation 102 | 103 | ### Run via docker / nerdctl 104 | 105 | This is the recommended way to use the `jc` commands, and how it will be run if you use one of the ZSH frameworks detailed below. 106 | 107 | If you're not using a ZSH framework, clone this repository and add its `bin` directory to your `$PATH`. It contains a `jc` script that will detect whether you have `nerdctl` or `docker` and if it finds them, map `~/jira-commands` (and the configuration file there) into a volume in the `jira-commands` container and run the tools inside the container. 108 | 109 | ### Direct pip install 110 | 111 | `sudo pip install jira-commands` will install the command-line tools via `pip`. This may cause compatibility annoyances with other python tools on your system, so there's a `docker`/`nerdctl` option as well. 112 | 113 | ### ZSH plugin 114 | 115 | The tooling has been packaged as a ZSH plugin to make using it as easy as possible for ZSH users. 116 | 117 | #### zgenom 118 | 119 | If you're using [Zgenom](https://github.com/jandamm/zgenom): 120 | 121 | 1. Add `zgenom load unixorn/jira-commands` to your `.zshrc` with your other plugins 122 | 2. `zgenom reset && zgenom save` 123 | 124 | #### Antigen 125 | 126 | If you're using [Antigen](https://github.com/zsh-users/antigen): 127 | 128 | 1. Add `antigen bundle unixorn/jira-commands` to your .zshrc where you've listed your other plugins. 129 | 2. Close and reopen your Terminal/iTerm window to refresh context and use the plugin. Alternatively, you can run `antigen bundle unixorn/jira-commands` in a running shell to have `antigen` load the new plugin. 130 | 131 | #### oh-my-zsh 132 | 133 | If you're using [oh-my-zsh](https://ohmyz.sh): 134 | 135 | 1. Clone the repository into a new `jira-commands` directory in oh-my-zsh's plugin folder: 136 | 137 | `git clone https://github.com/unixorn/jira-commands.git $ZSH_CUSTOM/plugins/jira-commands` 138 | 139 | 2. Edit your `~/.zshrc` and add `jira-commands` – same as clone directory – to the list of plugins to enable: 140 | 141 | `plugins=( ... jira-commands )` 142 | 143 | 3. Then, restart your terminal application to refresh context and use the plugin. Alternatively, you can source your current shell configuration: 144 | 145 | `source ~/.zshrc` 146 | -------------------------------------------------------------------------------- /bin/jc: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Shim script to run jc inside a container 4 | # 5 | # Author: Joe Block 6 | # Copyright 2022, ZScaler 7 | # License: Apache 2.0 8 | # shellcheck disable=SC2003 9 | 10 | set -o pipefail 11 | if [[ "$VERBOSE" -gt 0 ]]; then 12 | set -x 13 | fi 14 | 15 | function debug() { 16 | if [[ -n "$DEBUG" ]]; then 17 | echo "$@" 18 | fi 19 | } 20 | 21 | function fail() { 22 | printf '%s\n' "$1" >&2 ## Send message to stderr. Exclude >&2 if you don't want it that way. 23 | exit "${2-1}" ## Return a code specified by $2 or 1 by default. 24 | } 25 | 26 | function has() { 27 | # Check if a command is in $PATH 28 | which "$@" > /dev/null 2>&1 29 | } 30 | 31 | load-lastupdate-age-from-file() { 32 | local interval 33 | local last_update 34 | local now 35 | now=$(date +%s) 36 | if [[ -r "${1}" ]]; then 37 | last_update=$(cat "${1}") 38 | else 39 | # no cookie file, default to dawn of time 40 | last_update=0 41 | fi 42 | # shellcheck disable=SC2086 43 | interval="$(expr ${now} - ${last_update})" 44 | echo "${interval}" 45 | } 46 | 47 | check-for-image-update() { 48 | local day_seconds 49 | local refresh_seconds 50 | local last_image_pull 51 | mkdir -p "$SETTINGS_D" 52 | day_seconds=$(expr 24 \* 60 \* 60) 53 | refresh_seconds=$(expr "${day_seconds}" \* "${PULL_INTERVAL_IN_DAYS}") 54 | last_image_pull=$(load-lastupdate-age-from-file "$PULL_COOKIE_F") 55 | 56 | if [ "${last_image_pull}" -gt "${refresh_seconds}" ]; then 57 | debug "Checking for container image updates..." 58 | if "$CONTAINER_TOOL" pull "$JIRA_COMMAND_IMAGE"; then 59 | debug "Writing timestamp to $PULL_COOKIE_F" 60 | date '+%s' > "$PULL_COOKIE_F" 61 | else 62 | debug "Could not pull $JIRA_COMMAND_IMAGE with $CONTAINER_TOOL" 63 | fi 64 | fi 65 | } 66 | 67 | 68 | SETTINGS_D=${SETTINGS_D:-"$HOME/.jira-commands"} 69 | CONTAINER_TOOL=${CONTAINER_TOOL:-'NONE'} 70 | JIRA_COMMAND_IMAGE=${JIRA_COMMAND_IMAGE:-'unixorn/jira-commands'} 71 | PULL_COOKIE_F=${PULL_COOKIE_F:-"$SETTINGS_D/last-image-pull"} 72 | PULL_INTERVAL_IN_DAYS=${PULL_INTERVAL_IN_DAYS:-"${PULL_INTERVAL_IN_DAYS:-14}"} 73 | 74 | debug "CONTAINER_TOOL: $CONTAINER_TOOL" 75 | debug "JIRA_COMMAND_IMAGE: $JIRA_COMMAND_IMAGE" 76 | debug "PULL_COOKIE_F: $PULL_COOKIE_F" 77 | debug "PULL_INTERVAL_IN_DAYS: $PULL_INTERVAL_IN_DAYS" 78 | debug "SETTINGS_D: $SETTINGS_D" 79 | 80 | # List in descending preference order so we use `nerdctl` instead of `docker` 81 | # when both are present - we prefer to use `nerdctl` because on my 82 | # M1 MacBook Pro, it runs 30% faster than `docker` does. 83 | for tool in docker nerdctl 84 | do 85 | if has $tool; then 86 | debug "Setting CONTAINER_TOOL to $tool" 87 | CONTAINER_TOOL="$tool" 88 | fi 89 | done 90 | 91 | if [[ -r "$SETTINGS_D/container-tool" ]]; then 92 | # Force a specific container engine 93 | CONTAINER_TOOL=$(cat "$HOME/.jira-commands/container-tool") 94 | debug "FORCING $CONTAINER_TOOL" 95 | fi 96 | 97 | if [[ $CONTAINER_TOOL == 'NONE' ]]; then 98 | fail "Can't find a tool to run jc jira commands in a container" 99 | fi 100 | 101 | check-for-image-update 102 | 103 | exec "$CONTAINER_TOOL" run \ 104 | -v "${SETTINGS_D}":/config \ 105 | -v "$(pwd)":/pwd \ 106 | --rm -it "$JIRA_COMMAND_IMAGE" jc "$@" 107 | -------------------------------------------------------------------------------- /hooks/autohook.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Autohook 4 | # A very, very small Git hook manager with focus on automation 5 | # Contributors: https://github.com/Autohook/Autohook/graphs/contributors 6 | # Version: 2.3.0 7 | # Website: https://github.com/Autohook/Autohook 8 | 9 | 10 | echo() { 11 | builtin echo "[Autohook] $@"; 12 | } 13 | 14 | 15 | install() { 16 | hook_types=( 17 | "applypatch-msg" 18 | "commit-msg" 19 | "post-applypatch" 20 | "post-checkout" 21 | "post-commit" 22 | "post-merge" 23 | "post-receive" 24 | "post-rewrite" 25 | "post-update" 26 | "pre-applypatch" 27 | "pre-auto-gc" 28 | "pre-commit" 29 | "pre-push" 30 | "pre-rebase" 31 | "pre-receive" 32 | "prepare-commit-msg" 33 | "update" 34 | ) 35 | 36 | repo_root=$(git rev-parse --show-toplevel) 37 | hooks_dir="$repo_root/.git/hooks" 38 | autohook_linktarget="../../hooks/autohook.sh" 39 | for hook_type in "${hook_types[@]}" 40 | do 41 | hook_symlink="$hooks_dir/$hook_type" 42 | ln -sf $autohook_linktarget $hook_symlink 43 | done 44 | } 45 | 46 | 47 | main() { 48 | calling_file=$(basename $0) 49 | 50 | if [[ $calling_file == "autohook.sh" ]] 51 | then 52 | command=$1 53 | if [[ $command == "install" ]] 54 | then 55 | install 56 | fi 57 | else 58 | repo_root=$(git rev-parse --show-toplevel) 59 | hook_type=$calling_file 60 | symlinks_dir="$repo_root/hooks/$hook_type" 61 | files=("$symlinks_dir"/*) 62 | number_of_symlinks="${#files[@]}" 63 | if [[ $number_of_symlinks == 1 ]] 64 | then 65 | if [[ "$(basename ${files[0]})" == "*" ]] 66 | then 67 | number_of_symlinks=0 68 | fi 69 | fi 70 | echo "Looking for $hook_type scripts to run...found $number_of_symlinks!" 71 | if [[ $number_of_symlinks -gt 0 ]] 72 | then 73 | hook_exit_code=0 74 | for file in "${files[@]}" 75 | do 76 | scriptname=$(basename $file) 77 | echo "BEGIN $scriptname" 78 | if [[ "${AUTOHOOK_DEBUG-}" == '' ]]; then 79 | eval "\"$file\"" &>/dev/null 80 | else 81 | eval "\"$file\"" 82 | fi 83 | script_exit_code="$?" 84 | if [[ "$script_exit_code" != 0 ]] 85 | then 86 | hook_exit_code=$script_exit_code 87 | fi 88 | echo "FINISH $scriptname" 89 | done 90 | if [[ $hook_exit_code != 0 ]] 91 | then 92 | echo "A $hook_type script yielded negative exit code $hook_exit_code" 93 | exit $hook_exit_code 94 | fi 95 | fi 96 | fi 97 | } 98 | 99 | 100 | main "$@" 101 | -------------------------------------------------------------------------------- /hooks/post-checkout/001-clean-up-pyc-and-pyo-files: -------------------------------------------------------------------------------- 1 | ../scripts/clean-up-pyc-and-pyo-files -------------------------------------------------------------------------------- /hooks/pre-commit/001-format-python-files-with-black: -------------------------------------------------------------------------------- 1 | ../scripts/format-python-files-with-black -------------------------------------------------------------------------------- /hooks/pre-commit/002-clean-up-pyc-and-pyo-files: -------------------------------------------------------------------------------- 1 | ../scripts/clean-up-pyc-and-pyo-files -------------------------------------------------------------------------------- /hooks/scripts/clean-up-pyc-and-pyo-files: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | find . -iname '*.pyc' -delete 4 | find . -iname '*.pyo' -delete 5 | -------------------------------------------------------------------------------- /hooks/scripts/format-python-files-with-black: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Run black on all files in the repo 4 | 5 | set -o pipefail 6 | 7 | exec black . 8 | -------------------------------------------------------------------------------- /jira-commands.plugin.zsh: -------------------------------------------------------------------------------- 1 | # Copyright 2022, Zscaler 2 | # 3 | # Author: Joe Block 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | # Add our plugin's bin diretory to user's path 18 | PLUGIN_BIN="$(dirname $0)/bin" 19 | path+=($PLUGIN_BIN) 20 | export PATH 21 | -------------------------------------------------------------------------------- /jira_commands/__init__.py: -------------------------------------------------------------------------------- 1 | from importlib import metadata 2 | 3 | # Read version from the package metadata 4 | __version__ = metadata.version(__package__) 5 | -------------------------------------------------------------------------------- /jira_commands/cli/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/unixorn/jira-commands/3e0035f61b7f358cf6bf282af9bde7678e9786e5/jira_commands/cli/__init__.py -------------------------------------------------------------------------------- /jira_commands/cli/common.py: -------------------------------------------------------------------------------- 1 | # 2 | # Author: Joe Block 3 | # License: Apache 2.0 4 | # Copyright 2022, ZScaler Inc. 5 | 6 | import argparse 7 | import logging 8 | import os 9 | import sys 10 | 11 | from thelogrus.fileops import readableFile 12 | 13 | 14 | def baseCLIParser(description: str = None): 15 | return base_cli_parser(description=description) 16 | 17 | 18 | def base_cli_parser(description: str = None): 19 | """ 20 | Create the base argument parser that we build on for individual scripts 21 | 22 | Args: 23 | description: What we want displayed by --help 24 | """ 25 | parser = argparse.ArgumentParser(description=description) 26 | parser.add_argument("-d", "--debug", help="Enable debug mode", action="store_true") 27 | parser.add_argument( 28 | "--log-level", 29 | "--logging", 30 | "-l", 31 | type=str.upper, 32 | help="set log level", 33 | choices=["DEBUG", "INFO", "ERROR", "WARNING", "CRITICAL"], 34 | default="INFO", 35 | ) 36 | parser.add_argument( 37 | "--auth", 38 | "--auth-type", 39 | type=str.upper, 40 | help="Set authentication method to use.", 41 | choices=["BASIC", "OAUTH", "PAT"], 42 | default="PAT", 43 | ) 44 | parser.add_argument( 45 | "--oauth-access-token", 46 | "--oauth-access_token", 47 | help="Oauth access_token", 48 | type=str, 49 | ) 50 | parser.add_argument( 51 | "--oauth-access-token-secret", 52 | "--oauth-access_token_secret", 53 | help="Oauth access_token_secret", 54 | type=str, 55 | ) 56 | parser.add_argument( 57 | "--oauth-consumer-key", 58 | "--oauth-consumer_key", 59 | help="Oauth consumer_key", 60 | type=str, 61 | ) 62 | parser.add_argument( 63 | "--oauth-private-key-pem-path", 64 | "--oauth-pem-path", 65 | "--oauth-pem_path", 66 | help="Path to file containing a PEM format Oauth private key", 67 | type=str, 68 | ) 69 | parser.add_argument( 70 | "--pat-token", 71 | "--pat-authentication-token", 72 | help="PAT authentication token", 73 | type=str, 74 | ) 75 | 76 | # We have different default settings file paths based on whether we're 77 | # running in a container, on a server, or on a laptop. 78 | settingsFileDefault = "/config/jira.yaml" 79 | settingsFileCandidates = [settingsFileDefault, "/etc/zscaler/jira/jira.yaml"] 80 | if "HOME" in os.environ: 81 | settingsFileCandidates.append(f"{os.environ.get('HOME')}/.zscaler/jira.yaml") 82 | if "JIRA_CREDENTIALS_FILE" in os.environ: 83 | settingsFileCandidates.append(os.environ.get("JIRA_CREDENTIALS_FILE")) 84 | for candidate in settingsFileCandidates: 85 | if readableFile(candidate): 86 | settingsFileDefault = candidate 87 | parser.add_argument( 88 | "--settings-file", "--settings", type=str, default=settingsFileDefault 89 | ) 90 | parser.add_argument("--server", type=str) 91 | 92 | parser.add_argument( 93 | "--username", 94 | type=str, 95 | help="What username to use with JIRA. This overrides any setting in the settings file.", 96 | ) 97 | parser.add_argument( 98 | "--password", 99 | type=str, 100 | help="What username to use with JIRA. This overrides any setting in the settings file.", 101 | ) 102 | 103 | return parser 104 | 105 | 106 | def parseTicketCLI(description: str): 107 | return parse_ticket_cli(description=description) 108 | 109 | 110 | def parse_ticket_cli(description: str = None): 111 | """ 112 | Parse the command line options and return the ticket id 113 | """ 114 | parser = base_cli_parser(description=description) 115 | 116 | parser.add_argument( 117 | "--ticket", "-t", type=str, required=True, help="Which JIRA ticket to act on" 118 | ) 119 | return parser 120 | 121 | 122 | def ticketCreationParser(description: str): 123 | return ticket_creation_parser(description=description) 124 | 125 | 126 | def ticket_creation_parser(description: str = None): 127 | """ 128 | Create the base ticket creation parser 129 | """ 130 | parser = base_cli_parser(description="Create a JIRA ticket") 131 | 132 | # Collect issue attributes 133 | parser.add_argument("--description", help="Ticket description", type=str) 134 | 135 | parser.add_argument( 136 | "--json", "--json-data", help="Custom ticket data as a JSON string", type=str 137 | ) 138 | parser.add_argument("--label", help="Ticket label", type=str) 139 | # JIRA is case sensitive about issue IDs. If we ever have a JIRA project that 140 | # is not all-caps, this will break. 141 | parser.add_argument( 142 | "--parent", 143 | help="Ticket parent - required when creating subtasks", 144 | type=str.upper, 145 | ) 146 | parser.add_argument( 147 | "--priority", 148 | help="Set priority for the new ticket. Use 'jc get priorities' to find a list of available ticket priorities.", 149 | type=str, 150 | ) 151 | parser.add_argument( 152 | "--project", 153 | help="What JIRA project to create the new ticket in", 154 | type=str, 155 | default="SYSENG", 156 | ) 157 | parser.add_argument("--summary", help="Ticket summary", type=str) 158 | 159 | # JIRA is picky about capitalization, so enforce valid spellings 160 | baseTicketTypes = (["Bug", "Epic", "Improvement", "Sub-task", "Task"],) 161 | parser.add_argument( 162 | "--issue-type", 163 | type=str, 164 | help=f"set issue type (try {baseTicketTypes})", 165 | default="Task", 166 | ) 167 | return parser 168 | 169 | 170 | def stdin_to_string() -> str: 171 | stdin_comment = "" 172 | for comment_line in sys.stdin: 173 | stdin_comment = stdin_comment + comment_line 174 | logging.debug(f"stdin_comment: {stdin_comment}") 175 | return stdin_comment 176 | 177 | 178 | if __name__ == "__main__": 179 | print("Don't run this directly, import functions from it") 180 | sys.exit(13) 181 | -------------------------------------------------------------------------------- /jira_commands/cli/crudops.py: -------------------------------------------------------------------------------- 1 | # 2 | # interact with jira 3 | # 4 | # Author: Joe Block 5 | # License: Apache 2.0 6 | # Copyright 2022-2023, ZScaler Inc. 7 | 8 | import json 9 | import logging 10 | import os 11 | import sys 12 | 13 | from jira_commands import __version__ as cli_version 14 | from jira_commands.cli.common import ( 15 | base_cli_parser, 16 | parse_ticket_cli, 17 | stdin_to_string, 18 | ticket_creation_parser, 19 | ) 20 | from jira_commands.jira import JiraTool, load_jira_settings, make_issue_data 21 | 22 | 23 | def default_comment() -> str: 24 | username = os.environ.get("USER", "docker container") 25 | comment = "Updated with jc v" + cli_version + " by " + username 26 | logging.debug(f"Default comment: {comment}") 27 | return comment 28 | 29 | 30 | # CLI parsers 31 | 32 | 33 | def parse_ticket_assign_cli(description: str = "Assign a JIRA ticket to someone"): 34 | """ 35 | Parses the command line options for assigning a ticket and 36 | initializes logging. 37 | 38 | Returns: 39 | An argparse CLI object 40 | """ 41 | parser = parse_ticket_cli(description=description) 42 | parser.add_argument( 43 | "--assignee", 44 | type=str, 45 | required=True, 46 | help="Username to assign ticket to. Specify None if you want to unassign the ticket", 47 | ) 48 | cli = parser.parse_args() 49 | loglevel = getattr(logging, cli.log_level.upper(), None) 50 | logFormat = "[%(asctime)s][%(levelname)8s][%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s" 51 | logging.basicConfig(level=loglevel, format=logFormat) 52 | logging.info("Set log level to %s", cli.log_level.upper()) 53 | return cli 54 | 55 | 56 | def parse_ticket_comment_cli(description: str = "Comment on a JIRA ticket"): 57 | """ 58 | Parse command line options for commenting on a ticket and initializes 59 | logging. 60 | 61 | Returns: 62 | An argparse CLI object 63 | """ 64 | parser = parse_ticket_cli(description=description) 65 | parser.add_argument( 66 | "--comment", 67 | type=str, 68 | default=default_comment(), 69 | help="Comment to add to the specified ticket. It only supports very " 70 | "limited formatting - _italic_ and *bold* work, but `code` doesn't." 71 | " Default: " + default_comment(), 72 | ) 73 | parser.add_argument( 74 | "--stdin-comment", 75 | "--stdin", 76 | help="Read a comment from STDIN", 77 | action="store_true", 78 | ) 79 | cli = parser.parse_args() 80 | loglevel = getattr(logging, cli.log_level.upper(), None) 81 | logFormat = "[%(asctime)s][%(levelname)8s][%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s" 82 | logging.basicConfig(level=loglevel, format=logFormat) 83 | logging.info("Set log level to %s", cli.log_level.upper()) 84 | return cli 85 | 86 | 87 | def parse_ticket_close_cli(description="Close a JIRA ticket"): 88 | """ 89 | Parses command line options for closing a ticket and initializes logging. 90 | 91 | Returns: 92 | An argparse CLI object 93 | """ 94 | parser = parse_ticket_cli(description=description) 95 | parser.add_argument( 96 | "--comment", 97 | type=str, 98 | default=default_comment(), 99 | help="Comment to add to the specified ticket. It only supports very " 100 | "limited formatting - _italic_ and *bold* work, but `code` doesn't." 101 | " Defaults: " + default_comment(), 102 | ) 103 | cli = parser.parse_args() 104 | loglevel = getattr(logging, cli.log_level.upper(), None) 105 | logFormat = "[%(asctime)s][%(levelname)8s][%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s" 106 | logging.basicConfig(level=loglevel, format=logFormat) 107 | logging.info("Set log level to %s", cli.log_level.upper()) 108 | return cli 109 | 110 | 111 | def parseTicketInspectionCLI(): 112 | logging.warning( 113 | "parseTicketInspectionCLI() is deprecated and will be removed. Use parse_ticket_inspection_cli() instead" 114 | ) 115 | parse_ticket_inspection_cli() 116 | 117 | 118 | def parse_ticket_inspection_cli( 119 | description: str = "Vivisect a JIRA ticket so we can determine which " 120 | "custom fields map to which data keys", 121 | ): 122 | """ 123 | Parses command line options for ticket inspectors and initializes logging. 124 | 125 | Returns: 126 | An argparse CLI object 127 | """ 128 | parser = parse_ticket_cli(description=description) 129 | cli = parser.parse_args() 130 | loglevel = getattr(logging, cli.log_level.upper(), None) 131 | logFormat = "[%(asctime)s][%(levelname)8s][%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s" 132 | logging.basicConfig(level=loglevel, format=logFormat) 133 | logging.info("Set log level to %s", cli.log_level.upper()) 134 | return cli 135 | 136 | 137 | def parse_create_ticket_cli(description: str = "Create a JIRA ticket"): 138 | """ 139 | Parse the command line options 140 | """ 141 | parser = ticket_creation_parser(description=description) 142 | cli = parser.parse_args() 143 | 144 | loglevel = getattr(logging, cli.log_level.upper(), None) 145 | logFormat = "[%(asctime)s][%(levelname)8s][%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s" 146 | logging.basicConfig(level=loglevel, format=logFormat) 147 | logging.info("Set log level to %s", cli.log_level.upper()) 148 | 149 | # Validity checks 150 | if cli.issue_type == "Sub-task": 151 | if not cli.parent: 152 | logging.error( 153 | "You must specify a parent with --parent when you are creating a subtask" 154 | ) 155 | sys.exit(13) 156 | return cli 157 | 158 | 159 | def parse_get_transitions_cli( 160 | description: str = "See all transitions available on a JIRA ticket", 161 | ): 162 | """ 163 | Parse the command line options for transition list tool 164 | """ 165 | parser = parse_ticket_cli(description=description) 166 | 167 | cli = parser.parse_args() 168 | loglevel = getattr(logging, cli.log_level.upper(), None) 169 | logFormat = "[%(asctime)s][%(levelname)8s][%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s" 170 | logging.basicConfig(level=loglevel, format=logFormat) 171 | logging.debug("Set log level to %s", cli.log_level.upper()) 172 | return cli 173 | 174 | 175 | def parse_ticket_link_cli(description: str = "Link two JIRA tickets"): 176 | """ 177 | Command line options for linking two tickets 178 | """ 179 | parser = parse_ticket_cli(description=description) 180 | parser.add_argument( 181 | "--target", 182 | type=str, 183 | required=True, 184 | help="Target ticket", 185 | ) 186 | 187 | link_types = [ 188 | "Blocks", 189 | "Depends", 190 | "Bugs" "Clones", 191 | ] 192 | parser.add_argument( 193 | "--link-type", 194 | type=str, 195 | required=True, 196 | help=f"Link type. Case matters. Consider {link_types} as options, " 197 | "though your server may have other types too. 'jc get link types' " 198 | "will show all the link types on your JIRA server", 199 | ) 200 | cli = parser.parse_args() 201 | loglevel = getattr(logging, cli.log_level.upper(), None) 202 | logFormat = "[%(asctime)s][%(levelname)8s][%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s" 203 | logging.basicConfig(level=loglevel, format=logFormat) 204 | logging.debug("Set log level to %s", cli.log_level.upper()) 205 | return cli 206 | 207 | 208 | def parse_transition_to_cli( 209 | description: str = "See all transitions available on a JIRA ticket", 210 | ): 211 | """ 212 | Parse the command line options for transition set tool 213 | """ 214 | parser = parse_ticket_cli(description=description) 215 | parser.add_argument( 216 | "--comment", 217 | type=str, 218 | default=default_comment(), 219 | help="Comment to add to the specified ticket. It only supports very " 220 | "limited formatting - _italic_ and *bold* work, but `code` doesn't." 221 | " Default: " + default_comment(), 222 | ) 223 | parser.add_argument( 224 | "--transition-to", 225 | help="Transition a ticket to a named state", 226 | type=str, 227 | default="Done", 228 | ) 229 | cli = parser.parse_args() 230 | loglevel = getattr(logging, cli.log_level.upper(), None) 231 | logFormat = "[%(asctime)s][%(levelname)8s][%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s" 232 | logging.basicConfig(level=loglevel, format=logFormat) 233 | logging.debug("Set log level to %s", cli.log_level.upper()) 234 | return cli 235 | 236 | 237 | # Entrypoints 238 | 239 | 240 | def assign_ticket(): 241 | """ 242 | Assign a ticket to someone 243 | """ 244 | cli = parse_ticket_assign_cli() 245 | logging.debug(f"cli: {cli}") 246 | 247 | settings = load_jira_settings(path=cli.settings_file, cli=cli) 248 | 249 | jira = JiraTool(settings=settings) 250 | if cli.assignee.lower() == "none": 251 | jira.unassign_ticket(ticket=cli.ticket) 252 | else: 253 | jira.assign_ticket(ticket=cli.ticket, assignee=cli.assignee) 254 | 255 | 256 | def close_ticket(): 257 | """ 258 | Close a ticket 259 | """ 260 | cli = parse_ticket_comment_cli() 261 | logging.debug(f"cli: {cli}") 262 | 263 | settings = load_jira_settings(path=cli.settings_file, cli=cli) 264 | 265 | jira = JiraTool(settings=settings) 266 | jira.transition_ticket(ticket=cli.ticket, state="Done", comment=cli.comment) 267 | 268 | 269 | def commentOnTicket(): 270 | logging.warning( 271 | "commentOnTicket is deprecated and will be removed, use comment_on_ticket instead" 272 | ) 273 | comment_on_ticket() 274 | 275 | 276 | def comment_on_ticket(): 277 | """ 278 | Comment on a ticket 279 | """ 280 | cli = parse_ticket_comment_cli() 281 | logging.debug(f"cli: {cli}") 282 | 283 | settings = load_jira_settings(path=cli.settings_file, cli=cli) 284 | 285 | jira = JiraTool(settings=settings) 286 | jira.add_comment(ticket=cli.ticket, comment=cli.comment) 287 | if cli.stdin_comment: 288 | stdin_comment = stdin_to_string() 289 | if stdin_comment: 290 | jira.add_comment(ticket=cli.ticket, comment=stdin_comment) 291 | 292 | 293 | def createTicket(): 294 | logging.warning( 295 | "createTicket is deprecated and will be removed, use create_ticket instead" 296 | ) 297 | create_ticket() 298 | 299 | 300 | def create_ticket(): 301 | """ 302 | Create a JIRA ticket 303 | """ 304 | cli = parse_create_ticket_cli() 305 | logging.debug(f"cli: {cli}") 306 | 307 | settings = load_jira_settings(path=cli.settings_file, cli=cli) 308 | issue_data = make_issue_data(cli=cli) 309 | 310 | jira = JiraTool(settings=settings) 311 | if cli.issue_type == "Sub-task": 312 | results = jira.create_subtask(issue_data=issue_data, parent=cli.parent) 313 | else: 314 | results = jira.create_ticket( 315 | issue_data=issue_data, strict=False, priority=cli.priority 316 | ) 317 | print(results) 318 | # return results 319 | 320 | 321 | def getLinkTypes(): 322 | logging.warning( 323 | "getLinkTypes is deprecated and will be removed, use get_link_types instead" 324 | ) 325 | get_link_types() 326 | 327 | 328 | def get_link_types(): 329 | """ 330 | Get all the link types on a server 331 | """ 332 | parser = base_cli_parser() 333 | parser.add_argument("--json", help="Output in JSON format", action="store_true") 334 | cli = parser.parse_args() 335 | 336 | loglevel = getattr(logging, cli.log_level.upper(), None) 337 | logFormat = "[%(asctime)s][%(levelname)8s][%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s" 338 | logging.basicConfig(level=loglevel, format=logFormat) 339 | logging.debug("Set log level to %s", cli.log_level.upper()) 340 | 341 | settings = load_jira_settings(path=cli.settings_file, cli=cli) 342 | 343 | jira = JiraTool(settings=settings) 344 | 345 | link_type_names = [] 346 | for link_type in jira.connection.issue_link_types(): 347 | logging.debug(link_type.name) 348 | link_type_names.append(link_type.name) 349 | if cli.json: 350 | print(json.dumps({"link_types": link_type_names}, indent=2)) 351 | else: 352 | print(f"Link type names: {link_type_names}") 353 | 354 | 355 | def linkTickets(): 356 | logging.warning( 357 | "linkTickets is deprecated and will be removed, use link_tickets instead" 358 | ) 359 | 360 | 361 | def link_tickets(): 362 | """ 363 | Link two tickets 364 | """ 365 | cli = parse_ticket_link_cli() 366 | logging.debug(f"cli: {cli}") 367 | 368 | settings = load_jira_settings(path=cli.settings_file, cli=cli) 369 | 370 | jira = JiraTool(settings=settings) 371 | results = jira.link_issues( 372 | source=cli.ticket, target=cli.target, link_type=cli.link_type 373 | ) 374 | logging.debug(results) 375 | if results: 376 | print(f"({cli.link_type}) link created between {cli.ticket} and {cli.target}") 377 | else: 378 | print( 379 | f"Could not create ({cli.link_type})Link between {cli.ticket} and {cli.target}" 380 | ) 381 | print(results) 382 | 383 | 384 | def getPriorities(): 385 | logging.warning( 386 | "getPriorities is deprecated and will be removed, use get_priorities instead" 387 | ) 388 | 389 | 390 | def get_priorities(): 391 | """ 392 | Get all the priorities on a server 393 | """ 394 | parser = base_cli_parser( 395 | description="Get list of priorities on a server and their IDs" 396 | ) 397 | parser.add_argument("--json", help="Output in JSON format", action="store_true") 398 | cli = parser.parse_args() 399 | 400 | loglevel = getattr(logging, cli.log_level.upper(), None) 401 | logFormat = "[%(asctime)s][%(levelname)8s][%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s" 402 | logging.basicConfig(level=loglevel, format=logFormat) 403 | logging.debug("Set log level to %s", cli.log_level.upper()) 404 | 405 | settings = load_jira_settings(path=cli.settings_file, cli=cli) 406 | 407 | jira = JiraTool(settings=settings) 408 | priority_data = jira.get_priority_dict() 409 | 410 | if cli.json: 411 | print(json.dumps({"priorities": priority_data}, indent=2)) 412 | else: 413 | print(f"Issue Priorities: {priority_data}") 414 | 415 | 416 | def getTransitions(): 417 | logging.warning( 418 | "getTransitions is deprecated and will be removed, use get_transitions instead" 419 | ) 420 | 421 | 422 | def get_transitions(): 423 | """ 424 | Print all the available transitions on a given ticket 425 | """ 426 | cli = parse_get_transitions_cli() 427 | logging.debug(f"cli: {cli}") 428 | 429 | settings = load_jira_settings(path=cli.settings_file, cli=cli) 430 | 431 | jira = JiraTool(settings=settings) 432 | print(jira.ticket_transitions(ticket=cli.ticket)) 433 | 434 | 435 | def transitionTo(): 436 | logging.warning( 437 | "transitionTo is deprecated and will be removed, use transition_to instead" 438 | ) 439 | 440 | 441 | def transition_to(): 442 | """ 443 | Transition a given ticket to a specified state 444 | """ 445 | cli = parse_transition_to_cli() 446 | logging.debug(f"cli: {cli}") 447 | 448 | settings = load_jira_settings(path=cli.settings_file, cli=cli) 449 | 450 | jira = JiraTool(settings=settings) 451 | print(jira.transition_ticket(ticket=cli.ticket, state=cli.transition_to)) 452 | 453 | 454 | if __name__ == "__main__": 455 | raise RuntimeError("This file should not be run directly, import functions from it") 456 | -------------------------------------------------------------------------------- /jira_commands/cli/jc.py: -------------------------------------------------------------------------------- 1 | # 2 | # Gives us a git-style main command that calls subcommands. 3 | # 4 | # Author: Joe Block 5 | # License: Apache 2.0 6 | # Copyright 2022, ZScaler Inc. 7 | 8 | 9 | import os 10 | import subprocess 11 | import sys 12 | 13 | from thelogrus.cli import find_subcommand 14 | 15 | 16 | def jc_usage(): 17 | """ 18 | They called jc with no subcommands, or we couldn't find a subcommand 19 | """ 20 | myName = os.path.basename(sys.argv[0]) 21 | print( 22 | f"{myName} calls subcommands - try '{myName} ticket examine --ticket TICKET' for example." 23 | ) 24 | 25 | 26 | def jc_driver(): 27 | """ 28 | Process the command line arguments, find and run the appropriate 29 | subcommand. 30 | 31 | We want to be able to do git-style handoffs to subcommands where if we 32 | do `jc blah foo bar` and the executable `jc-blah-foo` exists, we'll call 33 | it with the argument bar. 34 | 35 | We deliberately don't do anything with the arguments other than hand 36 | them off to the jc subcommand found. 37 | 38 | Subcommands are responsible for their own argument parsing. 39 | """ 40 | try: 41 | (command, args) = find_subcommand(sys.argv) 42 | 43 | # If we can't construct a subcommand from sys.argv, it'll still be able 44 | # to find this driver script, and re-running ourself isn't useful. 45 | if os.path.basename(command) == sys.argv[0]: 46 | print("Could not find a subcommand for %s" % " ".join(sys.argv)) 47 | sys.exit(13) 48 | except RuntimeError as e: 49 | print(str(e)) 50 | jc_usage() 51 | sys.exit(13) 52 | subprocess.check_call([command] + args) 53 | 54 | 55 | if __name__ == "__main__": 56 | jc_driver() 57 | -------------------------------------------------------------------------------- /jira_commands/cli/jql.py: -------------------------------------------------------------------------------- 1 | # JQL query support 2 | # 3 | # Author: Joe Block 4 | # License: Apache 2.0 5 | # Copyright 2024, ZScaler Inc. 6 | 7 | import logging 8 | 9 | from jira_commands import __version__ as cli_version 10 | from jira_commands.cli.common import ( 11 | base_cli_parser, 12 | ) 13 | from jira_commands.jira import JiraTool, load_jira_settings 14 | 15 | 16 | def parse_jql_cli(description: str = f"Run a JQL query {cli_version}"): 17 | """ 18 | Run a JQL query 19 | """ 20 | parser = base_cli_parser(description=description) 21 | 22 | parser.add_argument( 23 | "--jql", "--jql-query", type=str, required=True, help="JQL query to run" 24 | ) 25 | cli = parser.parse_args() 26 | loglevel = getattr(logging, cli.log_level.upper(), None) 27 | logFormat = "[%(asctime)s][%(levelname)8s][%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s" 28 | logging.basicConfig(level=loglevel, format=logFormat) 29 | logging.info("Set log level to %s", cli.log_level.upper()) 30 | return cli 31 | 32 | 33 | def run_jql(): 34 | """ 35 | Run a JQL query 36 | """ 37 | cli = parse_jql_cli(description="Run a JQL query") 38 | logging.debug(f"cli: {cli}") 39 | 40 | settings = load_jira_settings(path=cli.settings_file, cli=cli) 41 | 42 | jira = JiraTool(settings=settings) 43 | print(jira.jql(jql=cli.jql)) 44 | -------------------------------------------------------------------------------- /jira_commands/cli/labels.py: -------------------------------------------------------------------------------- 1 | # 2 | # Label operations 3 | # 4 | # Author: Joe Block 5 | # License: Apache 2.0 6 | # Copyright 2024, ZScaler Inc. 7 | import json 8 | import logging 9 | 10 | from jira_commands.cli.common import parse_ticket_cli 11 | from jira_commands.jira import JiraTool, load_jira_settings 12 | 13 | 14 | def add_label_parser(description="Add labels to an issue"): 15 | """ 16 | Add a label to a ticket 17 | """ 18 | parser = parse_ticket_cli(description=description) 19 | parser.add_argument( 20 | "--label", 21 | type=str, 22 | required=True, 23 | help="label to add to the target issue", 24 | ) 25 | parser.add_argument( 26 | "--include-subtasks", help="Include subtasks", action="store_true" 27 | ) 28 | return parser 29 | 30 | 31 | def get_labels_parser(description="Get the labels on an issue"): 32 | """ 33 | Add a label to a ticket 34 | """ 35 | parser = parse_ticket_cli(description=description) 36 | parser.add_argument( 37 | "--include-subtasks", 38 | help="Also alter the ticket's subtasks", 39 | action="store_true", 40 | ) 41 | parser.add_argument("--json", help="use json for output", action="store_true") 42 | return parser 43 | 44 | 45 | def remove_label_parser(description="Delete labels from an issue"): 46 | """ 47 | Delete a label from a ticket 48 | """ 49 | parser = parse_ticket_cli(description=description) 50 | parser.add_argument( 51 | "--delete-label", 52 | type=str, 53 | required=True, 54 | help="label to remove from the target issue", 55 | ) 56 | parser.add_argument( 57 | "--include-subtasks", 58 | help="Also alter the ticket's subtasks", 59 | action="store_true", 60 | ) 61 | return parser 62 | 63 | 64 | def cli_setup(parser=None): 65 | cli = parser.parse_args() 66 | 67 | loglevel = getattr(logging, cli.log_level.upper(), None) 68 | logFormat = "[%(asctime)s][%(levelname)8s][%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s" 69 | logging.basicConfig(level=loglevel, format=logFormat) 70 | logging.info("Set log level to %s", cli.log_level.upper()) 71 | return cli 72 | 73 | 74 | # Entrypoints 75 | 76 | 77 | def add_label(): 78 | """ 79 | Add a label to a ticket and optionally its subtasks 80 | """ 81 | parser = add_label_parser( 82 | description="Add a label or labels to an issue and optionally its subtasks" 83 | ) 84 | cli = cli_setup(parser=parser) 85 | logging.debug(f"cli: {cli}") 86 | 87 | settings = load_jira_settings(path=cli.settings_file, cli=cli) 88 | 89 | jira = JiraTool(settings=settings) 90 | 91 | jira.add_issue_label(ticket=cli.ticket, labels=cli.label) 92 | if cli.include_subtasks: 93 | subtasks_l = jira.list_issue_subtasks(ticket=cli.ticket) 94 | for subtask in subtasks_l: 95 | jira.add_issue_label(ticket=subtask, labels=cli.label) 96 | 97 | 98 | def get_labels(): 99 | """ 100 | Get all the labels on a ticket 101 | """ 102 | parser = get_labels_parser(description="Show the labels on an issue") 103 | cli = cli_setup(parser=parser) 104 | logging.debug(f"cli: {cli}") 105 | 106 | settings = load_jira_settings(path=cli.settings_file, cli=cli) 107 | 108 | jira = JiraTool(settings=settings) 109 | 110 | labels = jira.get_issue_labels( 111 | ticket=cli.ticket, 112 | ) 113 | json_results = {cli.ticket: labels} 114 | 115 | if cli.include_subtasks: 116 | subtasks_l = jira.list_issue_subtasks(ticket=cli.ticket) 117 | for subtask in subtasks_l: 118 | subtask_labels = jira.get_issue_labels(ticket=subtask) 119 | json_results[subtask] = subtask_labels 120 | if not cli.json: 121 | print(f"{subtask}:{subtask_labels}") 122 | if cli.json: 123 | print(json.dumps(json_results)) 124 | else: 125 | print(f"{cli.ticket}:{labels}") 126 | 127 | 128 | def remove_label(): 129 | """ 130 | Add a label to a ticket and optionally its subtasks 131 | """ 132 | parser = remove_label_parser( 133 | description="Remove a label or labels from an issue and optionally its subtasks", 134 | ) 135 | cli = cli_setup(parser=parser) 136 | logging.debug(f"cli: {cli}") 137 | 138 | settings = load_jira_settings(path=cli.settings_file, cli=cli) 139 | 140 | jira = JiraTool(settings=settings) 141 | 142 | jira.remove_issue_label(ticket=cli.ticket, label=cli.delete_label) 143 | if cli.include_subtasks: 144 | subtasks_l = jira.list_issue_subtasks(ticket=cli.ticket) 145 | for subtask in subtasks_l: 146 | jira.remove_issue_label(ticket=subtask, label=cli.delete_label) 147 | -------------------------------------------------------------------------------- /jira_commands/cli/list.py: -------------------------------------------------------------------------------- 1 | # 2 | # interact with jira 3 | # 4 | # Author: Joe Block 5 | # License: Apache 2.0 6 | # Copyright 2022-2023, ZScaler Inc. 7 | 8 | import logging 9 | 10 | from jira_commands.cli.common import base_cli_parser 11 | from jira_commands.jira import JiraTool, load_jira_settings 12 | 13 | 14 | def parseListCLI(description="List JIRA tickets in a project"): 15 | """ 16 | Parse the command line options for the ticket list script and 17 | initialize logging. 18 | """ 19 | parser = base_cli_parser(description=description) 20 | parser.add_argument("--project", "-p", type=str, default="SYSENG") 21 | 22 | cli = parser.parse_args() 23 | loglevel = getattr(logging, cli.log_level.upper(), None) 24 | logFormat = "[%(asctime)s][%(levelname)8s][%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s" 25 | logging.basicConfig(level=loglevel, format=logFormat) 26 | logging.info("Set log level to %s", cli.log_level.upper()) 27 | return cli 28 | 29 | 30 | def listTickets(): 31 | """ 32 | List tickets in a project 33 | """ 34 | cli = parseListCLI() 35 | logging.debug(f"cli: {cli}") 36 | 37 | settings = load_jira_settings(path=cli.settings_file, cli=cli) 38 | 39 | jira = JiraTool(settings=settings) 40 | jira.list_tickets(project="SYSENG") 41 | 42 | 43 | if __name__ == "__main__": 44 | listTickets() 45 | -------------------------------------------------------------------------------- /jira_commands/cli/map_extractor.py: -------------------------------------------------------------------------------- 1 | # 2 | # Extract the mappings from an issue to use in ticket creation 3 | # 4 | # We have to do this ugly hackery because JIRA will sometimes renumber 5 | # the mappings in a given issue type if you change any of the dropdown 6 | # menu options. 7 | # 8 | # For extra fun, it will even renumber options in the dropdowns you 9 | # _didn't_ edit sometimes. 10 | # 11 | # Copyright 2022-2023 Zscaler 12 | # License: Apache 2.0 13 | 14 | import logging 15 | import re 16 | 17 | from thelogrus.yaml import writeYamlFile 18 | 19 | from jira_commands.cli.common import base_cli_parser 20 | from jira_commands.jira import JiraTool, load_jira_settings 21 | 22 | 23 | def mappings_extractor_parser( 24 | description: str = "Extract field maps for a JIRA issue type from a golden issue", 25 | ): 26 | """ 27 | Create a command line parser for our field map extractor 28 | 29 | Args: 30 | description: What description we want printed by --help 31 | """ 32 | parser = base_cli_parser(description=description) 33 | parser.add_argument( 34 | "--mapping-output-file", 35 | help="Where to write the extracted JIRA field mappings", 36 | type=str, 37 | required=True, 38 | ) 39 | parser.add_argument( 40 | "--template-ticket", 41 | "--get-field-choices-from", 42 | type=str, 43 | help="Read valid dropdowns from a ticket. JIRA occasionally renumbers " 44 | "the dropdowns if _any_ dropdown for an issue type is modified.", 45 | ) 46 | return parser 47 | 48 | 49 | def mappings_extractor_cli( 50 | description: str = "Extract field maps for a JIRA issue type from a golden issue", 51 | ): 52 | """ 53 | Parse the command line options for the custom mapping file creator and 54 | set up logging. 55 | 56 | Args: 57 | description: What description we want printed by --help 58 | 59 | Returns: 60 | argparse cli object 61 | """ 62 | parser = mappings_extractor_parser(description=description) 63 | 64 | cli = parser.parse_args() 65 | loglevel = getattr(logging, cli.log_level.upper(), None) 66 | logFormat = "[%(asctime)s][%(levelname)8s][%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s" 67 | logging.basicConfig(level=loglevel, format=logFormat) 68 | logging.info("Set log level to %s", cli.log_level.upper()) 69 | return cli 70 | 71 | 72 | def cleanup_mappings(data: dict = None): 73 | """ 74 | We only need the customfield_* keys, not the extra garbage jira returned 75 | 76 | Args: 77 | data: dictionary to purge irrelevant entries from 78 | 79 | Returns: 80 | dict with the irrelevant entries removed 81 | """ 82 | logging.info("Cleaning up field mappings data") 83 | badkeys = [] 84 | for k in data.keys(): 85 | valid = r"customfield_.*" 86 | check = re.search(valid, k) 87 | if not check: 88 | badkeys.append(k) 89 | for b in badkeys: 90 | logging.warning(f"Removing invalid key {b}") 91 | data.pop(b) 92 | return data 93 | 94 | 95 | def create_mapping_file(): 96 | """ 97 | Create an mapping file for the custom fields in an issue type 98 | """ 99 | cli = mappings_extractor_cli() 100 | 101 | settings = load_jira_settings(path=cli.settings_file, cli=cli) 102 | logging.debug(f"settings: {settings}") 103 | 104 | jira = JiraTool(settings=settings) 105 | allowed_dict = cleanup_mappings( 106 | jira.load_customfield_allowed_values(ticket=cli.template_ticket) 107 | ) 108 | logging.info(f"Writing to {cli.mapping_output_file}") 109 | writeYamlFile(path=cli.mapping_output_file, data=allowed_dict) 110 | -------------------------------------------------------------------------------- /jira_commands/cli/subtasks.py: -------------------------------------------------------------------------------- 1 | # 2 | # Get the subtasks of a ticket 3 | # 4 | # Author: Joe Block 5 | # License: Apache 2.0 6 | # Copyright 2023, ZScaler Inc. 7 | 8 | import logging 9 | 10 | from jira_commands.cli.common import parse_ticket_cli, stdin_to_string 11 | from jira_commands.jira import JiraTool, load_jira_settings 12 | from jira_commands.cli.crudops import ( 13 | parse_ticket_assign_cli, 14 | parse_ticket_comment_cli, 15 | parse_transition_to_cli, 16 | ) 17 | 18 | 19 | def parse_subtasks_cli(description="List subtasks for an issue"): 20 | """ 21 | Parse the command line options for jc-ticket-subtasks 22 | """ 23 | parser = parse_ticket_cli(description=description) 24 | cli = parser.parse_args() 25 | 26 | loglevel = getattr(logging, cli.log_level.upper(), None) 27 | logFormat = "[%(asctime)s][%(levelname)8s][%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s" 28 | logging.basicConfig(level=loglevel, format=logFormat) 29 | logging.info("Set log level to %s", cli.log_level.upper()) 30 | 31 | return cli 32 | 33 | 34 | # Entrypoints 35 | 36 | 37 | def assign_subtasks(): 38 | """ 39 | Assign all of an issue's subtasks to the same user 40 | """ 41 | cli = parse_ticket_assign_cli( 42 | description="Assign all of an issue's subtasks to the same user" 43 | ) 44 | logging.debug(f"cli: {cli}") 45 | 46 | settings = load_jira_settings(path=cli.settings_file, cli=cli) 47 | 48 | jira = JiraTool(settings=settings) 49 | subtasks_l = jira.list_issue_subtasks(ticket=cli.ticket) 50 | for subtask in subtasks_l: 51 | jira.assign_ticket(ticket=subtask, assignee=cli.assignee) 52 | 53 | 54 | def close_subtasks(): 55 | """ 56 | Close a ticket 57 | """ 58 | cli = parse_ticket_comment_cli(description="Close all of an issue's subtasks") 59 | logging.debug(f"cli: {cli}") 60 | 61 | settings = load_jira_settings(path=cli.settings_file, cli=cli) 62 | 63 | jira = JiraTool(settings=settings) 64 | subtasks_l = jira.list_issue_subtasks(ticket=cli.ticket) 65 | for subtask in subtasks_l: 66 | jira.transition_ticket(ticket=subtask, state="Done", comment=cli.comment) 67 | 68 | 69 | def comment_on_subtasks(): 70 | """ 71 | Add an identical comment to all of an issue's subtasks 72 | """ 73 | cli = parse_ticket_comment_cli( 74 | description="Add an identical comment to all of an issue's subtasks" 75 | ) 76 | logging.debug(f"cli: {cli}") 77 | 78 | settings = load_jira_settings(path=cli.settings_file, cli=cli) 79 | 80 | jira = JiraTool(settings=settings) 81 | subtasks_l = jira.list_issue_subtasks(ticket=cli.ticket) 82 | if cli.stdin_comment: 83 | stdin_comment = stdin_to_string() 84 | for subtask in subtasks_l: 85 | jira.add_comment(ticket=subtask, comment=cli.comment) 86 | if stdin_comment: 87 | jira.add_comment(ticket=cli.ticket, comment=stdin_comment) 88 | 89 | 90 | def list_subtasks(): 91 | """ 92 | List a ticket's subtasks 93 | """ 94 | cli = parse_subtasks_cli() 95 | logging.debug(f"cli: {cli}") 96 | 97 | settings = load_jira_settings(path=cli.settings_file, cli=cli) 98 | 99 | jira = JiraTool(settings=settings) 100 | subtasks = jira.list_issue_subtasks(ticket=cli.ticket) 101 | # print(f" {pprint.pformat(subtasks, indent=2)}") 102 | for s in subtasks: 103 | print(f"{s} ", end="") 104 | 105 | 106 | def transition_subtasks( 107 | description="Transition all subtasks of an issue to a specific state", 108 | ): 109 | """ 110 | Transition all subtasks of an issue to a given state 111 | """ 112 | cli = parse_transition_to_cli(description=description) 113 | logging.debug(f"cli: {cli}") 114 | 115 | settings = load_jira_settings(path=cli.settings_file, cli=cli) 116 | 117 | jira = JiraTool(settings=settings) 118 | subtasks_l = jira.list_issue_subtasks(ticket=cli.ticket) 119 | for subtask in subtasks_l: 120 | jira.transition_ticket( 121 | ticket=subtask, state=cli.transition_to, comment=cli.comment 122 | ) 123 | -------------------------------------------------------------------------------- /jira_commands/cli/vivisect.py: -------------------------------------------------------------------------------- 1 | # 2 | # Vivisect a jira ticket so we can figure out the data structure and custom 3 | # fields for a specific ticket type. 4 | # 5 | # Author: Joe Block 6 | # License: Apache 2.0 7 | # Copyright 2022-2023, ZScaler Inc. 8 | 9 | import json 10 | import logging 11 | import pprint 12 | 13 | from jira_commands.cli.common import base_cli_parser, parse_ticket_cli 14 | from jira_commands.jira import JiraTool, load_jira_settings 15 | 16 | 17 | def dump_all_customfield_allowed_values(): 18 | """ 19 | Dump all the customfield allowed options for a given ticket 20 | """ 21 | cli = parse_dump_all_customfields_cli() 22 | logging.debug(f"cli: {cli}") 23 | 24 | settings = load_jira_settings(path=cli.settings_file, cli=cli) 25 | 26 | jira = JiraTool(settings=settings) 27 | allowed_dict = jira.load_customfield_allowed_values(ticket=cli.ticket) 28 | print(json.dumps(allowed_dict)) 29 | 30 | 31 | def parse_dump_all_customfields_cli(): 32 | """ 33 | Parse the command line options for jc-ticket-dump-all-customfields 34 | """ 35 | parser = parse_ticket_cli(description="Dump a ticket's metadata") 36 | 37 | cliArgs = parser.parse_args() 38 | loglevel = getattr(logging, cliArgs.log_level.upper(), None) 39 | logFormat = "[%(asctime)s][%(levelname)8s][%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s" 40 | logging.basicConfig(level=loglevel, format=logFormat) 41 | logging.info("Set log level to %s", cliArgs.log_level.upper()) 42 | return cliArgs 43 | 44 | 45 | def dump_metadata(): 46 | """ 47 | Dump a ticket's metadata 48 | """ 49 | cli = parse_metadata_cli() 50 | logging.debug(f"cli: {cli}") 51 | 52 | settings = load_jira_settings(path=cli.settings_file, cli=cli) 53 | 54 | jira = JiraTool(settings=settings) 55 | metadata = jira.get_issue_metadata(ticket=cli.ticket) 56 | print(f" {pprint.pformat(metadata, indent=2)}") 57 | 58 | 59 | def extract_allowed_values(): 60 | cli = parse_ticket_field_cli( 61 | description="Get the allowed values for custom field on a ticket. " 62 | " Jira's API requires it be read from a ticket, not an issue type." 63 | ).parse_args() 64 | 65 | loglevel = getattr(logging, cli.log_level.upper(), None) 66 | logFormat = "[%(asctime)s][%(levelname)8s][%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s" 67 | logging.basicConfig(level=loglevel, format=logFormat) 68 | logging.info("Set log level to %s", cli.log_level.upper()) 69 | 70 | logging.debug(f"cli: {cli}") 71 | logging.debug(f"ticket: {cli.ticket}") 72 | logging.debug(f"custom_field: {cli.custom_field}") 73 | 74 | settings = load_jira_settings(path=cli.settings_file, cli=cli) 75 | jira = JiraTool(settings=settings) 76 | human_names = jira.customfield_id_map(ticket=cli.ticket) 77 | custom_field_name = human_names[cli.custom_field] 78 | 79 | print(f"Values for {cli.custom_field} of {cli.ticket} aka '{custom_field_name}':") 80 | field_allowed_values = jira.allowed_values_for_field( 81 | ticket=cli.ticket, custom_field=cli.custom_field 82 | ) 83 | print(f" {pprint.pformat(field_allowed_values,indent=2)}") 84 | 85 | 86 | def parse_metadata_cli(): 87 | """ 88 | Parse the command line options for jc-ticket-metadata 89 | """ 90 | parser = parse_ticket_cli(description="Dump a ticket's metadata") 91 | 92 | cliArgs = parser.parse_args() 93 | loglevel = getattr(logging, cliArgs.log_level.upper(), None) 94 | logFormat = "[%(asctime)s][%(levelname)8s][%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s" 95 | logging.basicConfig(level=loglevel, format=logFormat) 96 | logging.info("Set log level to %s", cliArgs.log_level.upper()) 97 | return cliArgs 98 | 99 | 100 | def parseVivisectCLI(): 101 | logging.warning( 102 | "parseVivisectCLI is deprecated and will be removed soon. Use parse_vivisect_cli instead" 103 | ) 104 | return parse_vivisect_cli() 105 | 106 | 107 | def parse_vivisect_cli(): 108 | """ 109 | Parse the command line options 110 | """ 111 | parser = parse_ticket_cli( 112 | description="Vivisect a JIRA ticket so we can determine which custom fields map to which data keys" 113 | ) 114 | 115 | cliArgs = parser.parse_args() 116 | loglevel = getattr(logging, cliArgs.log_level.upper(), None) 117 | logFormat = "[%(asctime)s][%(levelname)8s][%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s" 118 | logging.basicConfig(level=loglevel, format=logFormat) 119 | logging.info("Set log level to %s", cliArgs.log_level.upper()) 120 | return cliArgs 121 | 122 | 123 | def vivisect(): 124 | """ 125 | Vivisect a ticket so we can figure out what key names the various custom 126 | fields have, what transitions are available, etc. 127 | """ 128 | cli = parse_vivisect_cli() 129 | logging.debug(f"cli: {cli}") 130 | 131 | settings = load_jira_settings(path=cli.settings_file, cli=cli) 132 | 133 | jira = JiraTool(settings=settings) 134 | jira.vivisect(ticket_id=cli.ticket) 135 | 136 | 137 | def parseTicketFieldCLI(description: str): 138 | logging.warning( 139 | "parseTicketFieldCLI is deprecated and will be removed, use parse_ticket_field_cli instead" 140 | ) 141 | return parse_ticket_field_cli(description=description) 142 | 143 | 144 | def parse_ticket_field_cli(description: str): 145 | """ 146 | Parse the command line options and return the ticket id 147 | """ 148 | parser = base_cli_parser(description=description) 149 | 150 | parser.add_argument("--ticket", "-t", type=str, required=True) 151 | parser.add_argument("--custom-field", "-c", type=str, required=True) 152 | return parser 153 | 154 | 155 | def listAllowedFieldValues(): 156 | logging.warning( 157 | "listAllowedFieldValues is deprecated and will be removed, use parse_ticket_field_cli instead" 158 | ) 159 | 160 | 161 | def list_allowed_field_values(): 162 | """ 163 | Get the allowed values for a ticket's custom fields. 164 | 165 | JIRA won't let us do this by issue type because that would be too logical, 166 | we have to examine a ticket instead. 167 | """ 168 | cli = parse_ticket_field_cli( 169 | description="Get the allowed values for a ticket's custom fields" 170 | ).parse_args() 171 | 172 | loglevel = getattr(logging, cli.log_level.upper(), None) 173 | logFormat = "[%(asctime)s][%(levelname)8s][%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s" 174 | logging.basicConfig(level=loglevel, format=logFormat) 175 | logging.info("Set log level to %s", cli.log_level.upper()) 176 | 177 | logging.debug(f"cli: {cli}") 178 | logging.debug(f"ticket: {cli.ticket}") 179 | logging.debug(f"custom_field: {cli.custom_field}") 180 | 181 | settings = load_jira_settings(path=cli.settings_file, cli=cli) 182 | jira = JiraTool(settings=settings) 183 | print(f"Values for {cli.ticket}'s {cli.custom_field}:") 184 | for allowed in jira.allowed_values_for_field( 185 | ticket=cli.ticket, custom_field=cli.custom_field 186 | ): 187 | print(f" {pprint.pformat(allowed, indent=2)}") 188 | 189 | 190 | if __name__ == "__main__": 191 | raise RuntimeError("This is a library, not meant to run on its own.") 192 | -------------------------------------------------------------------------------- /jira_commands/jira.py: -------------------------------------------------------------------------------- 1 | # 2 | # Author: Joe Block 3 | # License: Apache 2.0 4 | # Copyright 2022-2023, ZScaler Inc. 5 | # 6 | # Interact with JIRA 7 | 8 | from functools import lru_cache 9 | import getpass 10 | import json 11 | import logging 12 | import requests 13 | 14 | from jira import JIRA 15 | from jira_commands.utils import dump_object 16 | from thelogrus.yaml import readYamlFile 17 | 18 | 19 | def loadJiraSettings(path: str, cli): 20 | logging.warning( 21 | "loadJiraSettings() is deprecated and will be removed soon, use load_jira_settings()" 22 | ) 23 | return load_jira_settings(path=path, cli=cli) 24 | 25 | 26 | def load_jira_settings(path: str, cli): 27 | """ 28 | Load JIRA settings from a yaml file, allowing overrides from the CLI 29 | 30 | Args: 31 | path: Path to configuration file 32 | cli (argparse cli object): Command line options 33 | 34 | Returns: 35 | dict: A dictionary containing all of our settings 36 | """ 37 | settings = readYamlFile(path=path) 38 | 39 | # Command line arguments should override the settings file 40 | if cli.server: 41 | settings["jira_server"] = cli.server 42 | 43 | if cli.username: 44 | settings["username"] = cli.username 45 | 46 | if cli.password: 47 | settings["password"] = cli.password 48 | 49 | settings["auth"] = cli.auth 50 | 51 | # Make sure we have all the settings we need 52 | if "jira_server" not in settings: 53 | raise RuntimeError("You must specify the jira server") 54 | 55 | if cli.auth == "BASIC": 56 | # We can fall back to asking the user if we're doing basic auth 57 | if "username" not in settings: 58 | settings["username"] = input("Username: ") 59 | 60 | if "password" not in settings: 61 | settings["password"] = getpass.getpass("Password: ") 62 | 63 | logging.debug("Using basic auth") 64 | if not settings["username"]: 65 | raise RuntimeError("You must specify the jira server username") 66 | if not settings["password"]: 67 | raise RuntimeError("You must specify the jira server password") 68 | 69 | credentials = { 70 | "username": settings["username"], 71 | "password": settings["password"], 72 | } 73 | if "credentials" not in settings: 74 | logging.debug("Setting credentials key in settings") 75 | settings["credentials"] = credentials 76 | else: 77 | logging.warning(f"There is already a credentials key in {path}") 78 | 79 | if cli.auth == "OAUTH": 80 | logging.debug("Auth set to OAUTH") 81 | settings["oauth_access_token"] = cli.oauth_access_token 82 | settings["oauth_access_token_secret"] = cli.oauth_access_token_secret 83 | settings["oauth_consumer_key"] = cli.oauth_consumer_key 84 | settings["oauth_private_key_pem_path"] = cli.oauth_private_key_pem_path 85 | # We need all of these when auth is set to OAUTH 86 | logging.info(f"settings: {settings}") 87 | if "oauth_access_token" not in settings: 88 | raise RuntimeError( 89 | "You must specify an Oauth access_token when auth is set to OAUTH" 90 | ) 91 | if "oauth_access_token_secret" not in settings: 92 | raise RuntimeError( 93 | "You must specify an Oauth access_token_secret when auth is set to OAUTH" 94 | ) 95 | if "oauth_consumer_key" not in settings: 96 | raise RuntimeError( 97 | "You must specify an Oauth consumer_key when auth is set to OAUTH" 98 | ) 99 | if "oauth_private_key_pem_path" not in settings: 100 | raise RuntimeError( 101 | "You must specify the path to a pem file containing the Oauth private key when auth is set to OAUTH" 102 | ) 103 | 104 | if cli.auth == "PAT": 105 | logging.debug("Auth set to PAT") 106 | if hasattr(cli, "pat_token"): 107 | if cli.pat_token: 108 | settings["pat_token"] = cli.pat_token 109 | else: 110 | logging.debug("cli pat token is None") 111 | if "pat_token" in settings: 112 | logging.debug("Found pat token in settings...") 113 | if "pat_token" not in settings: 114 | settings["pat_token"] = input("pat_token: ") 115 | 116 | logging.debug(f"Using JIRA server: {settings['jira_server']}") 117 | logging.debug(f"username: {settings['username']}") 118 | 119 | return settings 120 | 121 | 122 | def makeIssueData(cli): 123 | logging.warning( 124 | "makeIssueData() is deprecated and will be removed soon, use make_issue_data()" 125 | ) 126 | return make_issue_data(cli=cli) 127 | 128 | 129 | def make_issue_data(cli): 130 | """ 131 | Create issue_data from command line arguments 132 | 133 | This sets up the standard issue data fields - if a JIRA project has 134 | custom fields, it should get it's own issueData function that starts 135 | by calling this. 136 | 137 | Args: 138 | cli (argparse cli): Command line arguments 139 | 140 | Returns: 141 | dict: A dictionary containing data fields to be used to create a JIRA issue. 142 | """ 143 | try: 144 | if hasattr(cli, "json"): 145 | if cli.json: 146 | issue_data = json.loads(cli.json) 147 | logging.debug(f"issue_data (from --json): {issue_data}") 148 | else: 149 | logging.debug("json cli argument is None, leaving it unset") 150 | issue_data = {} 151 | else: 152 | logging.debug("Starting with blank issue data") 153 | issue_data = {} 154 | except AttributeError as missing_json: 155 | logging.warning("No json command line argument found") 156 | raise missing_json 157 | 158 | if hasattr(cli, "description"): 159 | if cli.description: 160 | logging.debug(f"description: {cli.description}") 161 | issue_data["description"] = cli.description 162 | else: 163 | issue_data["description"] = "No description set" 164 | 165 | if hasattr(cli, "issue_type"): 166 | logging.debug(f"issue_type: {cli.issue_type}") 167 | issue_data["issuetype"] = cli.issue_type 168 | 169 | if hasattr(cli, "label"): 170 | if cli.label: 171 | logging.debug(f"label: {cli.label}") 172 | issue_data["label"] = cli.label 173 | 174 | if hasattr(cli, "project"): 175 | logging.debug(f"project: {cli.project}") 176 | issue_data["project"] = cli.project 177 | 178 | if hasattr(cli, "summary"): 179 | if cli.summary: 180 | logging.debug(f"summary: {cli.summary}") 181 | issue_data["summary"] = cli.summary 182 | else: 183 | issue_data["summary"] = "No ticket summary set" 184 | 185 | return issue_data 186 | 187 | 188 | class JiraTool: 189 | # Jira housekeeping 190 | def __init__(self, settings: dict): 191 | """ 192 | Create a JIRA helper object. 193 | 194 | This wraps an upstream JIRA object with helper methods and breakfixes 195 | to make it less painful to use. 196 | 197 | It's still painful, just less so that using the upstream module. 198 | 199 | Args: 200 | settings: All settings required to connect to JIRA. 201 | """ 202 | 203 | self.jira_server = settings["jira_server"] 204 | self.auth = settings["auth"] 205 | self.supported_authentications = ["basic", "oauth", "pat"] 206 | 207 | # Basic AUTH 208 | if "username" in settings: 209 | self.username = settings["username"] 210 | if "password" in settings: 211 | self.password = settings["password"] 212 | 213 | # Load OAUTH credentials 214 | if "oauth_access_token" in settings: 215 | self.oauth_access_token = settings["oauth_access_token"] 216 | if "oauth_access_token_secret" in settings: 217 | self.oauth_access_token_secret = settings["oauth_access_token_secret"] 218 | if "oauth_consumer_key" in settings: 219 | self.oauth_consumer_key = settings["oauth_consumer_key"] 220 | if "oauth_private_key_pem_path" in settings: 221 | self.oauth_private_key_pem_path = settings["oauth_private_key_pem_path"] 222 | 223 | # PAT token 224 | if "pat_token" in settings: 225 | self.pat_token = settings["pat_token"] 226 | 227 | self.connect(auth=settings["auth"]) 228 | 229 | def __str__(self): 230 | """ 231 | Print a representation of the object 232 | """ 233 | raw = {"username": self.username, "jira_server": self.jira_server} 234 | return raw.__str__() 235 | 236 | def connect(self, auth: str = "basic"): 237 | """ 238 | Connects to JIRA and stores the connection object as a property. 239 | Reads required data from the JiraTool object's properties. 240 | 241 | Args: 242 | auth: What type of authentication to use to connect to JIRA. Allowed options are ["basic", "oauth", "pat"] 243 | """ 244 | jiraOptions = {"server": self.jira_server} 245 | logging.debug(f"Connecting to {self.jira_server} using {auth} authentication.") 246 | 247 | if auth.lower() not in self.supported_authentications: 248 | raise NotImplementedError( 249 | f"'{auth}' is not a valid authentication type. The only valid types are {', '.join(self.supported_authentications)}" 250 | ) 251 | 252 | if auth.lower() == "basic": 253 | jiraBasicAuth = (self.username, self.password) 254 | logging.debug( 255 | f"Creating connection to {self.jira_server} with user {self.username}" 256 | ) 257 | self.connection = JIRA(options=jiraOptions, basic_auth=jiraBasicAuth) # type: ignore 258 | 259 | if auth.lower() == "oauth": 260 | with open(self.oauth_private_key_pem_path, "r") as key_cert_file: 261 | key_cert_data = key_cert_file.read() 262 | 263 | oauth_dict = { 264 | "access_token": self.oauth_access_token, 265 | "access_token_secret": self.oauth_access_token_secret, 266 | "consumer_key": self.oauth_consumer_key, 267 | "key_cert": key_cert_data, 268 | } 269 | logging.debug( 270 | f"Creating connection to {self.jira_server} with Oauth athentication, consumer key {self.oauth_consumer_key}" 271 | ) 272 | self.connection = JIRA(options=jiraOptions, oauth=oauth_dict) 273 | 274 | if auth.lower() == "pat": 275 | logging.debug( 276 | f"Creating connection to {self.jira_server} with PAT authentication" 277 | ) 278 | self.connection = JIRA(options=jiraOptions, token_auth=self.pat_token) 279 | 280 | # Field manipulations 281 | 282 | @lru_cache(maxsize=128) 283 | def allowed_values_for_field(self, ticket: str, custom_field: str): 284 | """ 285 | Get the allowed values for a custom field on an issue 286 | 287 | JIRA isn't very forgiving about ticket values, so provide a way to 288 | extract what it's expecting to find in a given custom field. 289 | 290 | Args: 291 | ticket: The ticket to load values from. 292 | custom_field: Which custom field to determine valid values for. 293 | 294 | Returns: 295 | dict: A dictionary containing all the allowed values for field custom_field. 296 | """ 297 | logging.debug(f"connection: {self.connection}") 298 | 299 | issue = self.get_issue_data(ticket) 300 | logging.debug(f"issue: {issue}") 301 | 302 | meta = self.get_issue_metadata(ticket=ticket) 303 | raw_fields = meta["fields"][custom_field]["allowedValues"] 304 | allowed = {} 305 | for r in raw_fields: 306 | allowed[r["value"]] = r["id"] 307 | return allowed 308 | 309 | @lru_cache(maxsize=128) 310 | def customfield_id_map(self, ticket: str): 311 | """ 312 | Create a dict keyed by customfield id with the the human names for 313 | a ticket's custom fields. 314 | 315 | JIRA's API won't let you get the custom field data from an issue 316 | type because that would be too logical. Instead, you have to read 317 | them from an existing ticket of the type, which encourages people 318 | to keep golden tickets lying around. 319 | 320 | Instead of winning a trip to Wonka's factory, all you get for a 321 | golden ticket is more aggravation from JIRA when someone inevitably 322 | deletes them. 323 | 324 | Args: 325 | ticket: which ticket to load custom field data from 326 | 327 | Returns: 328 | dict containing customfield id -> human name mappings 329 | """ 330 | issue = self.get_issue_data(ticket) 331 | logging.debug(f"issue: {issue}") 332 | meta = self.get_issue_metadata(ticket=ticket) 333 | fields = meta["fields"] 334 | logging.debug(f"fields: {fields.keys()}") 335 | 336 | allfields = self.connection.fields() 337 | name_map = { 338 | self.connection.field["id"]: self.connection.field["name"] 339 | for self.connection.field in allfields 340 | } 341 | logging.debug(f"name_map: {name_map}") 342 | return name_map 343 | 344 | @lru_cache(maxsize=128) 345 | def customfield_title(self, ticket: str, custom_field: str) -> str: 346 | """ 347 | Return the human name of a custom field 348 | 349 | Args: 350 | ticket: ticket to read field data from 351 | custom_field: which field 352 | 353 | Returns: 354 | str human readable name of the custom field 355 | """ 356 | human_names = self.customfield_id_map(ticket=ticket) 357 | return human_names[custom_field] 358 | 359 | def get_issue_subtasks(self, ticket: str): 360 | """ 361 | Return the list of subtask objects in the specified jira issue 362 | """ 363 | issue = self.get_issue_data(ticket) 364 | return issue.fields.subtasks 365 | 366 | def list_issue_subtasks(self, ticket: str): 367 | """ 368 | Return a list of all subtasks for the specified jira issue 369 | """ 370 | subtasks = self.get_issue_subtasks(ticket) 371 | logging.debug(f"subtasks: {subtasks}") 372 | subtask_list = [] 373 | logging.debug(f"subtasks: {subtasks}") 374 | for k in subtasks: 375 | logging.debug(f"subtask: {k.key}") 376 | subtask_list.append(k.key) 377 | subtask_list.sort() 378 | return subtask_list 379 | 380 | def updateField(self, ticket: str, custom_field: str, value, field_type: str): 381 | logging.warning( 382 | "JiraTool.updateField() is deprecated and will be removed soon, use JiraTool.update_field" 383 | ) 384 | return self.update_field( 385 | ticket=ticket, custom_field=custom_field, value=value, field_type=field_type 386 | ) 387 | 388 | def update_field(self, ticket: str, custom_field: str, value, field_type: str): 389 | """ 390 | Update a field on an issue. 391 | 392 | Args: 393 | ticket: Which ticket to update. 394 | custom_field: Which field to alter 395 | field_type: JIRA's API is too janky to figure this out 396 | for itself, even though it knows what the field type is, 397 | so we have to specify it. 398 | value (varies): Varies based on field_type. 399 | 400 | Returns: 401 | Update results 402 | 403 | Raises: 404 | Re-raises any exceptions from underlying JIRA object during update 405 | """ 406 | try: 407 | issue = self.get_ticket(ticket=ticket) 408 | logging.debug("Updating issue: %s", issue) 409 | fields = {} 410 | fields = self.update_field_dict( 411 | custom_field=custom_field, 412 | value=value, 413 | field_type=field_type, 414 | fields=fields, 415 | ) 416 | return issue.update(fields=fields) 417 | except Exception as jiraConniption: 418 | logging.exception(jiraConniption) 419 | raise jiraConniption 420 | 421 | def updateMultipleFields(self, ticket: str, fields: dict): 422 | logging.warning( 423 | "JiraTool.updateField() is deprecated and will be removed soon, use JiraTool.update_field" 424 | ) 425 | return self.update_multiple_fields(ticket=ticket, fields=fields) 426 | 427 | def update_multiple_fields(self, ticket: str, fields: dict): 428 | """ 429 | Update multiple fields from a fields dictionary 430 | 431 | Args: 432 | ticket: Which ticket to update 433 | fields: A dictionary with keys for each field we need to update 434 | 435 | Raises: 436 | Re-raises any exceptions from underlying JIRA object during update 437 | """ 438 | try: 439 | issue = self.get_ticket(ticket=ticket) 440 | logging.debug("Updating %s using %s", issue, fields) 441 | return issue.update(fields=fields) 442 | except Exception as jiraConniption: 443 | logging.exception(jiraConniption) 444 | raise jiraConniption 445 | 446 | # Utility functions 447 | def assignTicket(self, ticket: str, assignee: str): 448 | logging.warning( 449 | "JiraTool.assignTicket() is deprecated and will be removed soon, use JiraTool.assign_ticket" 450 | ) 451 | return self.assign_ticket(ticket=ticket, assignee=assignee) 452 | 453 | def assign_ticket(self, ticket: str, assignee: str): 454 | """ 455 | Assign a ticket 456 | 457 | Args: 458 | ticket: What ticket to assign 459 | assignee: Who to assign it to 460 | 461 | Returns: 462 | Update results 463 | """ 464 | logging.debug(f"Assigning {ticket} to {assignee}") 465 | return self.connection.assign_issue(ticket, assignee) 466 | 467 | def unassignTicket(self, ticket: str): 468 | logging.warning( 469 | "JiraTool.unassignTicket is deprecated and will be removed soon, use JiraTool.unassign_ticket" 470 | ) 471 | return self.unassign_ticket(ticket=ticket) 472 | 473 | def unassign_ticket(self, ticket: str): 474 | """ 475 | Assign a ticket to no one 476 | 477 | Args: 478 | ticket: Which ticket to remove the assignee from 479 | 480 | Returns: 481 | Ticket update results 482 | """ 483 | logging.debug(f"Assigning {ticket} to No one") 484 | return self.connection.assign_issue(ticket, None) 485 | 486 | def addComment(self, ticket: str, comment: str): 487 | logging.warning( 488 | "JiraTool.unassignTicket is deprecated and will be removed soon, use JiraTool.unassign_ticket" 489 | ) 490 | return self.add_comment(ticket=ticket, comment=comment) 491 | 492 | def add_comment(self, ticket: str, comment: str): 493 | """ 494 | Comment on a ticket. 495 | 496 | Args: 497 | ticket: Ticket to comment on 498 | comment: Comment to add 499 | 500 | Returns: 501 | Ticket update results 502 | """ 503 | if comment: 504 | logging.debug(f"Adding comment {comment} to ticket {ticket}") 505 | return self.connection.add_comment(ticket, comment) 506 | else: 507 | raise RuntimeError("You must specify a comment to add to the ticket") 508 | 509 | def createTicket( 510 | self, 511 | issue_data: dict, 512 | priority: str = None, 513 | strict: bool = True, 514 | required_fields: list = None, 515 | ): 516 | logging.warning( 517 | "JiraTool.createTicket() is deprecated and will be removed soon, use JiraTool.create_ticket" 518 | ) 519 | return self.create_ticket( 520 | issue_data=issue_data, 521 | priority=priority, 522 | strict=strict, 523 | required_fields=required_fields, 524 | ) 525 | 526 | def create_ticket( 527 | self, 528 | issue_data: dict, 529 | priority: str = None, 530 | strict: bool = True, 531 | required_fields: list = None, 532 | ): 533 | """ 534 | Creates a JIRA ticket from a data dictionary. 535 | 536 | Args: 537 | issue_data: dictionary with keys for every field we want 538 | to set during ticket creation. 539 | priority: What priority to assign the new ticket 540 | required_fields: What fields to ensure are set during creation 541 | strict: Enforce the required_fields 542 | 543 | Returns: 544 | Newly created issue 545 | """ 546 | logging.debug(f"Creating ticket using {issue_data}") 547 | # Make sure we have a minimum set of fields 548 | logging.debug(f"required_fields: {required_fields}") 549 | if not required_fields: 550 | required_fields = [] 551 | if strict: 552 | valid = True 553 | for r in required_fields: 554 | if r not in issue_data: 555 | valid = False 556 | logging.error(f"{r} not specified in issue_data") 557 | if not valid: 558 | logging.critical( 559 | f"You must specify all the mandatory issue fields: {required_fields}" 560 | ) 561 | raise ValueError( 562 | f"You must specify all the mandatory issue fields: {required_fields}" 563 | ) 564 | if priority: 565 | logging.debug(f"Setting ticket priority to {priority}") 566 | priority_info = self.get_priority_dict() 567 | priority_data = {"id": priority_info[priority]} 568 | issue_data["priority"] = priority_data 569 | logging.debug(f"issue_data: {issue_data}") 570 | new_issue = self.connection.create_issue(fields=issue_data) 571 | logging.debug(f"new_issue: {new_issue}") 572 | return new_issue 573 | 574 | def createSubtask( 575 | self, 576 | issue_data: dict, 577 | parent: str, 578 | required_fields: list = None, 579 | strict: bool = True, 580 | ): 581 | logging.warning( 582 | "JiraTool.createSubtask() is deprecated and will be removed soon, use JiraTool.create_subtask" 583 | ) 584 | return self.create_subtask( 585 | issue_data=issue_data, 586 | parent=parent, 587 | required_fields=required_fields, 588 | strict=strict, 589 | ) 590 | 591 | def create_subtask( 592 | self, 593 | issue_data: dict, 594 | parent: str, 595 | required_fields: list = None, 596 | strict: bool = True, 597 | ): 598 | """ 599 | Create a subtask. 600 | 601 | Creates a subtask on an existing ticket. 602 | 603 | Args: 604 | issue_data: Field data for the new subtask. 605 | parent: Ticket to add a subtask to. 606 | required_fields: List of fields to enforce in the subtask if strict is set. 607 | strict: Whether or not to enforce the field list. Defaults to True. 608 | 609 | Returns: 610 | Ticket ID of new subtask 611 | """ 612 | logging.debug("Creating a subtask") 613 | if not parent: 614 | logging.error("You must specify a parent ticket when creating a Sub-Task") 615 | raise ValueError( 616 | "You must specify a parent ticket when creating a Sub-Task" 617 | ) 618 | issue_data["parent"] = {"id": parent} 619 | logging.debug(f"required_fields: {required_fields}") 620 | return self.create_ticket( 621 | issue_data=issue_data, required_fields=required_fields, strict=strict 622 | ) 623 | 624 | def getIssueData(self, ticket: str): 625 | logging.warning( 626 | "JiraTool.getIssueData() is deprecated and will be removed soon, use JiraTool.get_issue_data()" 627 | ) 628 | return self.get_issue_data(ticket=ticket) 629 | 630 | def get_issue_data(self, ticket: str): 631 | """ 632 | Returns the JIRA issue data for a ticket 633 | 634 | This is a shim to keep JiraTool users from having to rummage through 635 | its internals to use the jira object it's connecting to your jira 636 | server with. 637 | 638 | Args: 639 | ticket: JIRA ticket number 640 | """ 641 | return self.connection.issue(ticket) 642 | 643 | def get_issue_type(self, ticket: str) -> str: 644 | """ 645 | Convenience function to get the issue type for an issue 646 | 647 | Args: 648 | ticket: JIRA ticket number 649 | 650 | Returns: 651 | str issue type 652 | """ 653 | issue = self.get_issue_data(ticket) 654 | return issue.fields.issuetype.name 655 | 656 | def getIssueMetaData(self, ticket: str): 657 | logging.warning( 658 | "JiraTool.getIssueMetaData() is deprecated and will be removed soon, use JiraTool.get_issue_metadata()" 659 | ) 660 | return self.get_issue_metadata(ticket=ticket) 661 | 662 | def get_issue_metadata(self, ticket: str): 663 | """ 664 | Get an issue's metadata. 665 | 666 | This is a shim to keep JiraTool users from having to rummage through 667 | its internals to use the jira object it's connecting to your jira 668 | server with. 669 | 670 | Args: 671 | ticket: JIRA ticket number 672 | """ 673 | issue = self.get_issue_data(ticket=ticket) 674 | meta = self.connection.editmeta(issue) 675 | return meta 676 | 677 | def linkIssues(self, source: str, target: str, link_type: str): 678 | logging.warning( 679 | "JiraTool.linkIssues() is deprecated and will be removed soon, use JiraTool.link_issues()" 680 | ) 681 | return self.link_issues(source=source, target=target, link_type=link_type) 682 | 683 | def link_issues(self, source: str, target: str, link_type: str): 684 | """ 685 | Link two issues 686 | 687 | This is a horrible hack because the jira module fails with a permission 688 | error when I use its create_issue_link method, but I can use the same 689 | username and password with curl against the JIRA API directly and that 690 | works, so I created an issue upstream. 691 | 692 | I'm using this requests.put hack until https://github.com/pycontribs/jira/issues/1296 693 | is fixed upstream. 694 | 695 | Based on https://confluence.atlassian.com/jirakb/how-to-use-rest-api-to-add-issue-links-in-jira-issues-939932271.html 696 | 697 | Args: 698 | source: ticket id of source ticket 699 | target: ticket id of target ticket 700 | link_type: What kind of linkage (Blocks, Related, etc) 701 | 702 | Returns: 703 | bool : Whether or not the link was successfully created 704 | """ 705 | # Jira is inconsistent about when you can use string ticket ids and 706 | # when you have to use issue objects 707 | source_issue = self.connection.issue(source) 708 | target_issue = self.connection.issue(target) 709 | logging.debug(f"source_issue: {source_issue}") 710 | logging.debug(f"target_issue: {target_issue}") 711 | 712 | # This is documented to work, but returns an error that we don't have 713 | # link issue permission. 714 | 715 | # logging.info(f"Creating '{link_type}' link from {source} to {target}") 716 | # result = self.connection.create_issue_link( 717 | # type=link_type, inwardIssue=source, outwardIssue=target 718 | # ) 719 | 720 | # Instead, we're going to hit the REST api ourselves :-( 721 | 722 | data = { 723 | "update": { 724 | "issuelinks": [ 725 | { 726 | "add": { 727 | "type": { 728 | "name": link_type, 729 | }, 730 | "outwardIssue": {"key": target}, 731 | } 732 | } 733 | ] 734 | } 735 | } 736 | url = f"{self.jira_server}/rest/api/2/issue/{source}" 737 | 738 | logging.debug(f"url: {url}") 739 | logging.debug(f"data: {data}") 740 | 741 | # Instead of maessing with creating our own oauth or PAT credential, 742 | # extract the auth method & data out of the JIRA object created in our 743 | # connect() method. 744 | # Ugly, but better than trying to do it ourselves. 745 | jira_auth = self.connection._session.auth 746 | 747 | logging.debug(f"Auth: {jira_auth}") 748 | results = requests.put(url, auth=jira_auth, json=data, timeout=30) 749 | 750 | logging.debug(f"status code: {results.status_code}") 751 | 752 | if results.status_code >= 200 and results.status_code < 300: 753 | logging.debug("Successful") 754 | logging.debug(f"results: {results}") 755 | status = True 756 | else: 757 | logging.error(f"Call failed: {results.status_code}") 758 | logging.error(f"results: {results}") 759 | status = False 760 | return status 761 | 762 | def listTickets(self, project: str): 763 | logging.warning( 764 | "JiraTool.listTickets() is deprecated and will be removed soon, use JiraTool.list_tickets()" 765 | ) 766 | return self.list_tickets(project=project) 767 | 768 | def list_tickets(self, project: str): 769 | """ 770 | Prints all the tickets in a given project. 771 | 772 | Args: 773 | project: Which JIRA project to list 774 | """ 775 | for singleIssue in self.connection.search_issues( 776 | jql_str=f"project = {project}" 777 | ): 778 | print( 779 | f"{singleIssue.key} {singleIssue.fields.summary} {singleIssue.fields.reporter.displayName}" 780 | ) 781 | 782 | def getPriorityDict(self): 783 | logging.warning( 784 | "JiraTool.getPriorityDict() is deprecated and will be removed soon, use JiraTool.get_priority_dict()" 785 | ) 786 | return self.get_priority_dict() 787 | 788 | def get_priority_dict(self): 789 | """ 790 | Priorities can be altered by the local JIRA administrator. 791 | 792 | Returns: 793 | dictionary of all the priorities on a server and their IDs 794 | """ 795 | raw_priorities = self.connection.priorities() 796 | priority_data = {} 797 | 798 | for priority in raw_priorities: 799 | logging.debug(f"{priority.name} : {priority.id}") 800 | priority_data[priority.name] = priority.id 801 | return priority_data 802 | 803 | def getTicket(self, ticket: str): 804 | logging.warning( 805 | "JiraTool.getTicket() is deprecated and will be removed soon, use JiraTool.get_ticket()" 806 | ) 807 | return self.get_ticket(ticket=ticket) 808 | 809 | def get_ticket(self, ticket: str): 810 | """ 811 | Peel a ticket out of JIRA 812 | 813 | Args: 814 | ticket: Which ticket to load 815 | 816 | Returns: 817 | JIRA issue object 818 | """ 819 | issue = self.connection.issue(ticket) 820 | return issue 821 | 822 | def getTicketDict(self, project: str): 823 | logging.warning( 824 | "JiraTool.getTicketDict() is deprecated and will be removed soon, use JiraTool.get_ticket_dict()" 825 | ) 826 | return self.get_ticket_dict(project=project) 827 | 828 | def get_ticket_dict(self, project: str): 829 | """ 830 | Get all the JIRA tickets in a project. This is slow. 831 | 832 | Args: 833 | project: Which project to read from 834 | 835 | Returns: 836 | dict containing dictionaries for every ticket in the project, keyed by their ID. 837 | """ 838 | tickets = {} 839 | for singleIssue in self.connection.search_issues( 840 | jql_str=f"project = {project}" 841 | ): 842 | tickets[singleIssue.key] = singleIssue 843 | logging.debug(f"{singleIssue.key} : {singleIssue}") 844 | logging.debug(f"{singleIssue.key} : fields {singleIssue.fields}") 845 | logging.debug(f"dumpObj(singleIssue : {dump_object(singleIssue)}") 846 | logging.debug(" ") 847 | return tickets 848 | 849 | def transitionTicket(self, ticket: str, state: str, comment: str = None): 850 | logging.warning( 851 | "JiraTool.getTicket() is deprecated and will be removed soon, use JiraTool.get_ticket()" 852 | ) 853 | return self.transition_ticket(ticket=ticket, state=state) 854 | 855 | def transition_ticket(self, ticket: str, state: str, comment: str = None): 856 | """ 857 | Transition a ticket to a new state. 858 | 859 | This is dangerous because the API doesn't enforce any transition 860 | constraints in the project's workflows. 861 | 862 | Sometimes workflows get can have states that can't be transitioned out 863 | of, so it's nice to have this available to pry tickets out of the 864 | dead-end states. 865 | 866 | Args: 867 | ticket: Which ticket to transition. 868 | state: What state to transition ticket to. 869 | comment: What comment to add to the ticket during transition. 870 | 871 | Raises: 872 | ValueError if state is not an available transition for ticket 873 | 874 | Returns: 875 | Result of the attempted ticket transition 876 | """ 877 | issue = self.connection.issue(ticket) 878 | available_transitions = self.ticket_transitions(ticket=ticket) 879 | 880 | if state in available_transitions: 881 | logging.info(f"Transitioning issue {ticket} to state {state}") 882 | if comment: 883 | self.addComment(ticket=ticket, comment=comment) 884 | return self.connection.transition_issue(issue, available_transitions[state]) 885 | else: 886 | raise ValueError( 887 | f"{ticket} does not have {state} as an available transition. Perhaps your user doesn't have privilege for that?" 888 | ) 889 | 890 | # debug tools 891 | 892 | @lru_cache(maxsize=128) 893 | def customfield_human_names(self, ticket: str): 894 | """ 895 | Get the human names for a ticket's custom fields. 896 | 897 | JIRA's API won't let you get the custom field data from an issue 898 | type because that would be too logical. Instead, you have to read 899 | them from an existing ticket of the type, which encourages people 900 | to keep golden tickets lying around. 901 | 902 | Instead of winning a trip to Wonka's factory, all you get for a 903 | golden ticket is more aggravation from JIRA when someone inevitably 904 | deletes them. 905 | 906 | Args: 907 | ticket: which ticket to load custom field data from 908 | 909 | Returns: 910 | dict containing customfield -> human name mappings 911 | """ 912 | issue = self.get_issue_data(ticket) 913 | logging.debug(f"issue: {issue}") 914 | meta = self.get_issue_metadata(ticket=ticket) 915 | fields = meta["fields"] 916 | logging.debug(f"fields: {fields.keys()}") 917 | 918 | allfields = self.connection.fields() 919 | name_map = { 920 | self.connection.field["name"]: self.connection.field["id"] 921 | for self.connection.field in allfields 922 | } 923 | logging.debug(f"name_map: {name_map}") 924 | return name_map 925 | 926 | def vivisect(self, ticket_id: str): 927 | """ 928 | Vivisect a ticket so we can figure out what attributes are visible 929 | via the module's API. 930 | 931 | Args: 932 | ticket_id: Which ticket to vivisect 933 | """ 934 | ticket = self.get_ticket(ticket=ticket_id) 935 | print(f"ticket: {ticket}") 936 | print(f"Issue type: {ticket.fields.issuetype.name}") 937 | print("ticket transitions available:") 938 | for transition in self.connection.transitions(ticket): 939 | print(f" {transition}") 940 | print() 941 | print(f"ticket.fields.issuetype: {ticket.fields.issuetype}") 942 | print(f"ticket.fields.issuelinks: {ticket.fields.issuelinks}") 943 | print(f"ticket.fields.issuelinks dump: {dump_object(ticket.fields.issuelinks)}") 944 | print() 945 | print(f"ticket.fields: {ticket.fields}") 946 | print() 947 | print(f"dir(ticket): {dir(ticket)}") 948 | print() 949 | print(f"ticket.fields (dump): {dump_object(ticket.fields)}") 950 | 951 | # Internal helpers 952 | def set_template_ticket(self, ticket: str = ""): 953 | """ 954 | If a custom field only allows specific values, JIRA won't let us read 955 | those allowed values for a custom field from an issue type, only from 956 | an actual issue. 957 | 958 | If we want to assign values then, we need to know what issue to read 959 | the allowed list from, and it's less painful to assign that to the 960 | JIRA object than constantly pass a ticket argument around. 961 | 962 | With a well engineered API, you wouldn't have to do this. You'd 963 | assign a value to a field, and if it wasn't an allowed value, the 964 | server would return an error. 965 | 966 | JIRA's API on the other hand, is a dumpster fire and forces the user 967 | to care about internal implementation details. 968 | 969 | If a custom field is constrained to a list of values - let's use issue 970 | severity as an example, you first have to load JIRA's value mappings 971 | to integer ids. And those integers aren't even necessarily sequential. 972 | 973 | For example, here's a custom field where we might store dumpster color. 974 | 975 | "customfield_867": { 976 | "1 - grey": "5309", 977 | "2 - green": "16243", 978 | "3 - blue": "337", 979 | "4 - rust": "10967", 980 | } 981 | 982 | It's more stupid than it appears at first glance - those values can 983 | _change_ if you add or edit those values, which leads me to believe 984 | they're row numbers in a table somewhere - not even unique ids, just 985 | the row number. 986 | 987 | But wait, it's even more stupid than that - they can change if you edit 988 | _other_ custom fields in that issue type. No, really. 989 | 990 | Args: 991 | ticket: Which ticket to read template values from. 992 | """ 993 | self.template_ticket = ticket 994 | 995 | def ticketTransitions(self, ticket: str): 996 | logging.warning( 997 | "JiraTool.ticketTransitions() is deprecated and will be removed soon, use JiraTool.ticket_transitions()" 998 | ) 999 | return self.ticket_transitions(ticket=ticket) 1000 | 1001 | def ticket_transitions(self, ticket: str): 1002 | """ 1003 | Find the available transitions for a given ticket. 1004 | 1005 | JIRA won't let you read these from an issue type, only an existing 1006 | ticket. 1007 | 1008 | Args: 1009 | ticket: Which ticket to scrape the transitions from 1010 | 1011 | Returns: 1012 | dictionary keyed by transition name where the values are 1013 | transition ids. 1014 | """ 1015 | # Map the names to ids so the caller can use a human-understandable 1016 | # name instead of having to track down the id. 1017 | transitions = {} 1018 | for t in self.connection.transitions(ticket): 1019 | logging.debug(f"Found transition '{t['name']}, id {t['id']}") 1020 | transitions[t["name"]] = t["id"] 1021 | logging.debug(f"Transition lookup table: {transitions}") 1022 | return transitions 1023 | 1024 | @lru_cache(maxsize=128) 1025 | def load_customfield_allowed_values(self, ticket: str): 1026 | """ 1027 | Get the allowed values for all custom fields on a ticket 1028 | 1029 | JIRA isn't very forgiving about ticket values, so provide a way to 1030 | extract what it's expecting to find in a given custom field. 1031 | 1032 | We need this when setting menu type custom fields. 1033 | 1034 | Args: 1035 | ticket: which ticket to scrape for values 1036 | 1037 | Returns: 1038 | dictionary of allowed values for each custom field on a ticket, 1039 | keyed by customfield_XXXX 1040 | """ 1041 | logging.debug(f"connection: {self.connection}") 1042 | 1043 | issue = self.get_issue_data(ticket) 1044 | logging.debug(f"issue: {issue}") 1045 | 1046 | meta = self.get_issue_metadata(ticket=ticket) 1047 | 1048 | allowed = {} 1049 | fields = meta["fields"] 1050 | logging.debug(f"fields: {fields.keys()}") 1051 | for field in fields: 1052 | logging.debug(f"Scanning {field}") 1053 | if "allowedValues" in fields[field]: 1054 | logging.info( 1055 | f"Field {field} has an allowedValues list, converting to dict" 1056 | ) 1057 | logging.debug(f"Found {fields[field]['allowedValues']}") 1058 | data = {} 1059 | for opt in fields[field]["allowedValues"]: 1060 | if ("value" in opt) and ("id" in opt): 1061 | data[opt["value"]] = opt["id"] 1062 | logging.debug(f"Setting data['{opt['value']}'] to {opt['id']}") 1063 | allowed[field] = data 1064 | return allowed 1065 | 1066 | def _create_choice_field_entry( 1067 | self, custom_field: str, value: str, ticket: str = "" 1068 | ) -> dict: 1069 | """ 1070 | Create a field entry for a choice field. We break this out so we 1071 | can use it in both single field update calls and when we're updating 1072 | multiple fields at once to minimize JIRA notifications 1073 | 1074 | Args: 1075 | ticket: ticket to update 1076 | custom_field: field to update 1077 | value: value to assign 1078 | 1079 | Returns: 1080 | dict with field data 1081 | """ 1082 | if not ticket: 1083 | ticket = self.template_ticket 1084 | logging.debug(f"loading id map for {custom_field}...") 1085 | value_mapping = self.allowed_values_for_field( 1086 | ticket=ticket, custom_field=custom_field 1087 | ) 1088 | entry = {"id": value_mapping[value], "value": value} 1089 | logging.debug(f"entry: {entry}") 1090 | return entry 1091 | 1092 | def _update_choice_field(self, custom_field: str, value: str, ticket: str) -> None: 1093 | """ 1094 | Update a choice-style field 1095 | 1096 | Args: 1097 | ticket: ticket to update 1098 | custom_field: field to update 1099 | value: value to assign 1100 | 1101 | Returns: 1102 | update results 1103 | """ 1104 | try: 1105 | issue = self.get_ticket(ticket=ticket) 1106 | logging.debug("Updating issue: %s", issue) 1107 | logging.debug( 1108 | f"Updating choice data, setting '{custom_field}' to '{value}'" 1109 | ) 1110 | entry = self._create_choice_field_entry( 1111 | ticket=ticket, custom_field=custom_field, value=value 1112 | ) 1113 | fields = {custom_field: entry} 1114 | logging.critical("Updating using %s", fields) 1115 | return issue.update(fields=fields) 1116 | except Exception as jiraConniption: 1117 | logging.exception(jiraConniption) 1118 | 1119 | def updateFieldDict( 1120 | self, 1121 | custom_field: str, 1122 | field_type: str, 1123 | fields: dict = None, 1124 | value=None, 1125 | child_data=None, 1126 | ): 1127 | logging.warning( 1128 | "JiraTool.updateFieldDict() is deprecated and will be removed soon, use JiraTool.update_field_dict()" 1129 | ) 1130 | return self.update_field_dict( 1131 | custom_field=custom_field, 1132 | field_type=field_type, 1133 | fields=fields, 1134 | value=value, 1135 | child_data=child_data, 1136 | ) 1137 | 1138 | def update_field_dict( 1139 | self, 1140 | custom_field: str, 1141 | field_type: str, 1142 | fields: dict = None, 1143 | value=None, 1144 | child_data=None, 1145 | ): 1146 | """ 1147 | Update the optional fields dictionary argument with an entry for the 1148 | custom field & value specified. We create a blank fields dictionary if 1149 | one is not provided. 1150 | 1151 | Args: 1152 | custom_field: Which custom field to set 1153 | field_type: What type is the field? JIRA makes us update them 1154 | differently 1155 | fields: An optional dictionary containing fields we've already set 1156 | value: What value to assign to custom_field 1157 | child_data: Some JIRA custom field types require two values, not 1158 | just one. 1159 | 1160 | Returns: 1161 | dictionary of field data 1162 | """ 1163 | if not fields: 1164 | fields = {} 1165 | 1166 | if field_type.lower() == "array" or field_type.lower() == "list": 1167 | if custom_field not in fields: 1168 | fields[custom_field] = [] 1169 | logging.debug( 1170 | "%s not found in fields, creating empty list", custom_field 1171 | ) 1172 | 1173 | if isinstance(value, list): 1174 | for v in value: 1175 | logging.debug("Appending %s to %s", v, fields[custom_field]) 1176 | fields[custom_field].append(v) 1177 | logging.debug("%s is now %s", custom_field, fields[custom_field]) 1178 | else: 1179 | logging.debug("Appending %s to %s", value, fields[custom_field]) 1180 | fields[custom_field].append(value) 1181 | logging.debug("%s is now %s", custom_field, fields[custom_field]) 1182 | 1183 | if field_type.lower() == "choice": 1184 | fields[custom_field] = {"value": value} 1185 | 1186 | if field_type.lower() == "multi-select": 1187 | if custom_field not in fields: 1188 | logging.debug( 1189 | "%s not found in fields, creating empty list", custom_field 1190 | ) 1191 | fields[custom_field] = [] 1192 | if isinstance(value, list): 1193 | for v in value: 1194 | logging.debug("Appending %s to %s", v, fields[custom_field]) 1195 | fields[custom_field].append({"value": v}) 1196 | else: 1197 | fields[custom_field].append({"value": value}) 1198 | 1199 | if field_type.lower() == "menu" or field_type.lower() == "dropdown": 1200 | # Suck abounds. 1201 | # 1202 | # JIRA dropdown field value menus are an aggravating sharp edge. 1203 | # If you have a predefined list of menu items, you can't just 1204 | # shovel in a string that corresponds to one of those defined 1205 | # menu items. JIRA isn't smart enough to compare that string to 1206 | # it's list of allowed values and use it if it's a valid option. 1207 | # 1208 | # Instead, you have to figure out what id that corresponds to, and 1209 | # set _that_. Along with the damn original value, of course. 1210 | entry = self._create_choice_field_entry( 1211 | ticket=self.template_ticket, custom_field=custom_field, value=value 1212 | ) 1213 | fields[custom_field] = entry 1214 | 1215 | if field_type.lower() == "parent": 1216 | fields[custom_field] = { 1217 | "value": value, 1218 | "child": {"value": child_data}, 1219 | } 1220 | 1221 | if field_type.lower() == "priority": 1222 | fields[custom_field] = {"name": value} 1223 | 1224 | if field_type.lower() == "string" or field_type.lower() == "str": 1225 | fields[custom_field] = value 1226 | 1227 | logging.debug("Set data[%s] to %s", custom_field, fields[custom_field]) 1228 | return fields 1229 | 1230 | def add_issue_label(self, ticket: str = None, labels=None): 1231 | """ 1232 | Add a label or labels to a ticket. 1233 | 1234 | Args: 1235 | ticket: what ticket to add the label(s) to 1236 | labels: either a str or a list of str 1237 | """ 1238 | 1239 | if not (isinstance(labels, list) or isinstance(labels, str)): 1240 | raise ValueError("labels must be a str or a list of strings") 1241 | issue = self.get_ticket(ticket=ticket) 1242 | if isinstance(labels, str): 1243 | labels = [labels] 1244 | if isinstance(labels, list): 1245 | logging.debug(f"labels = {labels}") 1246 | for lbl in labels: 1247 | if isinstance(lbl, str): 1248 | # JIRA is slow, so eliminate unnecessary calls to the API 1249 | if lbl not in issue.fields.labels: 1250 | issue.fields.labels.append(lbl) 1251 | else: 1252 | raise ValueError( 1253 | f"Attempted to add labels {labels} from {ticket}, but {lbl} is not type str" 1254 | ) 1255 | return issue.update(fields={"labels": issue.fields.labels}) 1256 | 1257 | def get_issue_labels(self, ticket: str = None): 1258 | """ 1259 | Get labels for an issue 1260 | 1261 | Args: 1262 | ticket: str of ticket to get labels for 1263 | """ 1264 | issue = self.get_ticket(ticket=ticket) 1265 | return issue.fields.labels 1266 | 1267 | def remove_issue_label(self, ticket: str = None, label=None): 1268 | """ 1269 | Remove a label or list of labels from an issue 1270 | 1271 | Args: 1272 | ticket: what ticket to remove the label(s) from 1273 | labels: either a str or a list of str 1274 | """ 1275 | if not (isinstance(label, list) or isinstance(label, str)): 1276 | raise ValueError("label must be a str or a list of strings") 1277 | issue = self.get_ticket(ticket=ticket) 1278 | if isinstance(label, str): 1279 | label = [label] 1280 | 1281 | if isinstance(label, list): 1282 | for lbl in label: 1283 | if isinstance(lbl, str): 1284 | # if the label isn't present, we don't want to error 1285 | if lbl in issue.fields.labels: 1286 | issue.fields.labels.remove(lbl) 1287 | else: 1288 | logging.warning( 1289 | f"Attempted to remove label {lbl} but it is not in {ticket}'s labels: {issue.fields.labels}" 1290 | ) 1291 | else: 1292 | logging.warning(f"label: {label}") 1293 | logging.warning(f"type: {type(label)}") 1294 | raise ValueError( 1295 | f"Attempted to remove labels {label} from {ticket}, but {lbl} is not type str" 1296 | ) 1297 | return issue.update(fields={"labels": issue.fields.labels}) 1298 | 1299 | def jql(self, jql: str = None): 1300 | """ 1301 | Return issues matching a JQL query 1302 | 1303 | Args: 1304 | jql: A string containing a JQL query 1305 | """ 1306 | logging.debug(f"JQL: {jql}") 1307 | results = self.connection.search_issues(jql) 1308 | logging.debug(f"QUERY RESULTS: {results}") 1309 | return results 1310 | -------------------------------------------------------------------------------- /jira_commands/utils.py: -------------------------------------------------------------------------------- 1 | # 2 | # Utility functions 3 | # 4 | # Author: Joe Block 5 | # License: Apache 2.0 6 | # Copyright 2022, ZScaler Inc. 7 | 8 | 9 | def dump_object(obj): 10 | """ 11 | Dump an object for debugging 12 | 13 | Args: 14 | obj: a python object to dump 15 | """ 16 | for attr in dir(obj): 17 | print("obj.%s = %r" % (attr, getattr(obj, attr))) 18 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. 2 | 3 | [[package]] 4 | name = "black" 5 | version = "24.4.2" 6 | description = "The uncompromising code formatter." 7 | optional = false 8 | python-versions = ">=3.8" 9 | files = [ 10 | {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, 11 | {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, 12 | {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, 13 | {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, 14 | {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, 15 | {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, 16 | {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, 17 | {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, 18 | {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, 19 | {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, 20 | {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, 21 | {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, 22 | {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, 23 | {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, 24 | {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, 25 | {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, 26 | {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, 27 | {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, 28 | {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, 29 | {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, 30 | {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, 31 | {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, 32 | ] 33 | 34 | [package.dependencies] 35 | click = ">=8.0.0" 36 | mypy-extensions = ">=0.4.3" 37 | packaging = ">=22.0" 38 | pathspec = ">=0.9.0" 39 | platformdirs = ">=2" 40 | tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} 41 | typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} 42 | 43 | [package.extras] 44 | colorama = ["colorama (>=0.4.3)"] 45 | d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] 46 | jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] 47 | uvloop = ["uvloop (>=0.15.2)"] 48 | 49 | [[package]] 50 | name = "certifi" 51 | version = "2024.6.2" 52 | description = "Python package for providing Mozilla's CA Bundle." 53 | optional = false 54 | python-versions = ">=3.6" 55 | files = [ 56 | {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, 57 | {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, 58 | ] 59 | 60 | [[package]] 61 | name = "cffi" 62 | version = "1.16.0" 63 | description = "Foreign Function Interface for Python calling C code." 64 | optional = false 65 | python-versions = ">=3.8" 66 | files = [ 67 | {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, 68 | {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, 69 | {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, 70 | {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, 71 | {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, 72 | {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, 73 | {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, 74 | {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, 75 | {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, 76 | {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, 77 | {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, 78 | {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, 79 | {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, 80 | {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, 81 | {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, 82 | {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, 83 | {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, 84 | {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, 85 | {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, 86 | {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, 87 | {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, 88 | {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, 89 | {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, 90 | {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, 91 | {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, 92 | {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, 93 | {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, 94 | {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, 95 | {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, 96 | {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, 97 | {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, 98 | {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, 99 | {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, 100 | {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, 101 | {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, 102 | {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, 103 | {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, 104 | {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, 105 | {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, 106 | {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, 107 | {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, 108 | {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, 109 | {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, 110 | {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, 111 | {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, 112 | {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, 113 | {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, 114 | {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, 115 | {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, 116 | {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, 117 | {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, 118 | {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, 119 | ] 120 | 121 | [package.dependencies] 122 | pycparser = "*" 123 | 124 | [[package]] 125 | name = "cfgv" 126 | version = "3.4.0" 127 | description = "Validate configuration and produce human readable error messages." 128 | optional = false 129 | python-versions = ">=3.8" 130 | files = [ 131 | {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, 132 | {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, 133 | ] 134 | 135 | [[package]] 136 | name = "charset-normalizer" 137 | version = "3.3.2" 138 | description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." 139 | optional = false 140 | python-versions = ">=3.7.0" 141 | files = [ 142 | {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, 143 | {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, 144 | {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, 145 | {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, 146 | {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, 147 | {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, 148 | {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, 149 | {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, 150 | {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, 151 | {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, 152 | {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, 153 | {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, 154 | {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, 155 | {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, 156 | {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, 157 | {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, 158 | {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, 159 | {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, 160 | {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, 161 | {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, 162 | {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, 163 | {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, 164 | {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, 165 | {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, 166 | {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, 167 | {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, 168 | {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, 169 | {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, 170 | {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, 171 | {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, 172 | {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, 173 | {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, 174 | {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, 175 | {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, 176 | {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, 177 | {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, 178 | {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, 179 | {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, 180 | {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, 181 | {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, 182 | {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, 183 | {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, 184 | {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, 185 | {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, 186 | {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, 187 | {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, 188 | {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, 189 | {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, 190 | {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, 191 | {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, 192 | {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, 193 | {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, 194 | {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, 195 | {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, 196 | {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, 197 | {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, 198 | {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, 199 | {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, 200 | {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, 201 | {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, 202 | {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, 203 | {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, 204 | {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, 205 | {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, 206 | {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, 207 | {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, 208 | {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, 209 | {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, 210 | {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, 211 | {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, 212 | {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, 213 | {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, 214 | {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, 215 | {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, 216 | {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, 217 | {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, 218 | {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, 219 | {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, 220 | {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, 221 | {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, 222 | {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, 223 | {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, 224 | {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, 225 | {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, 226 | {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, 227 | {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, 228 | {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, 229 | {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, 230 | {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, 231 | {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, 232 | ] 233 | 234 | [[package]] 235 | name = "click" 236 | version = "8.1.7" 237 | description = "Composable command line interface toolkit" 238 | optional = false 239 | python-versions = ">=3.7" 240 | files = [ 241 | {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, 242 | {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, 243 | ] 244 | 245 | [package.dependencies] 246 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 247 | 248 | [[package]] 249 | name = "colorama" 250 | version = "0.4.6" 251 | description = "Cross-platform colored terminal text." 252 | optional = false 253 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" 254 | files = [ 255 | {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, 256 | {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, 257 | ] 258 | 259 | [[package]] 260 | name = "cryptography" 261 | version = "42.0.8" 262 | description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." 263 | optional = false 264 | python-versions = ">=3.7" 265 | files = [ 266 | {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, 267 | {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, 268 | {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, 269 | {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, 270 | {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, 271 | {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, 272 | {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, 273 | {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, 274 | {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, 275 | {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, 276 | {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, 277 | {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, 278 | {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, 279 | {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, 280 | {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, 281 | {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, 282 | {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, 283 | {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, 284 | {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, 285 | {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, 286 | {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, 287 | {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, 288 | {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, 289 | {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, 290 | {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, 291 | {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, 292 | {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, 293 | {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, 294 | {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, 295 | {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, 296 | {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, 297 | {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, 298 | ] 299 | 300 | [package.dependencies] 301 | cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} 302 | 303 | [package.extras] 304 | docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] 305 | docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] 306 | nox = ["nox"] 307 | pep8test = ["check-sdist", "click", "mypy", "ruff"] 308 | sdist = ["build"] 309 | ssh = ["bcrypt (>=3.1.5)"] 310 | test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] 311 | test-randomorder = ["pytest-randomly"] 312 | 313 | [[package]] 314 | name = "dateutils" 315 | version = "0.6.12" 316 | description = "Various utilities for working with date and datetime objects" 317 | optional = false 318 | python-versions = "*" 319 | files = [ 320 | {file = "dateutils-0.6.12-py2.py3-none-any.whl", hash = "sha256:f33b6ab430fa4166e7e9cb8b21ee9f6c9843c48df1a964466f52c79b2a8d53b3"}, 321 | {file = "dateutils-0.6.12.tar.gz", hash = "sha256:03dd90bcb21541bd4eb4b013637e4f1b5f944881c46cc6e4b67a6059e370e3f1"}, 322 | ] 323 | 324 | [package.dependencies] 325 | python-dateutil = "*" 326 | pytz = "*" 327 | 328 | [[package]] 329 | name = "defusedxml" 330 | version = "0.7.1" 331 | description = "XML bomb protection for Python stdlib modules" 332 | optional = false 333 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 334 | files = [ 335 | {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, 336 | {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, 337 | ] 338 | 339 | [[package]] 340 | name = "distlib" 341 | version = "0.3.8" 342 | description = "Distribution utilities" 343 | optional = false 344 | python-versions = "*" 345 | files = [ 346 | {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, 347 | {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, 348 | ] 349 | 350 | [[package]] 351 | name = "exceptiongroup" 352 | version = "1.2.1" 353 | description = "Backport of PEP 654 (exception groups)" 354 | optional = false 355 | python-versions = ">=3.7" 356 | files = [ 357 | {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, 358 | {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, 359 | ] 360 | 361 | [package.extras] 362 | test = ["pytest (>=6)"] 363 | 364 | [[package]] 365 | name = "filelock" 366 | version = "3.15.3" 367 | description = "A platform independent file lock." 368 | optional = false 369 | python-versions = ">=3.8" 370 | files = [ 371 | {file = "filelock-3.15.3-py3-none-any.whl", hash = "sha256:0151273e5b5d6cf753a61ec83b3a9b7d8821c39ae9af9d7ecf2f9e2f17404103"}, 372 | {file = "filelock-3.15.3.tar.gz", hash = "sha256:e1199bf5194a2277273dacd50269f0d87d0682088a3c561c15674ea9005d8635"}, 373 | ] 374 | 375 | [package.extras] 376 | docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] 377 | testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] 378 | typing = ["typing-extensions (>=4.8)"] 379 | 380 | [[package]] 381 | name = "flake8" 382 | version = "7.1.0" 383 | description = "the modular source code checker: pep8 pyflakes and co" 384 | optional = false 385 | python-versions = ">=3.8.1" 386 | files = [ 387 | {file = "flake8-7.1.0-py2.py3-none-any.whl", hash = "sha256:2e416edcc62471a64cea09353f4e7bdba32aeb079b6e360554c659a122b1bc6a"}, 388 | {file = "flake8-7.1.0.tar.gz", hash = "sha256:48a07b626b55236e0fb4784ee69a465fbf59d79eec1f5b4785c3d3bc57d17aa5"}, 389 | ] 390 | 391 | [package.dependencies] 392 | mccabe = ">=0.7.0,<0.8.0" 393 | pycodestyle = ">=2.12.0,<2.13.0" 394 | pyflakes = ">=3.2.0,<3.3.0" 395 | 396 | [[package]] 397 | name = "identify" 398 | version = "2.5.36" 399 | description = "File identification library for Python" 400 | optional = false 401 | python-versions = ">=3.8" 402 | files = [ 403 | {file = "identify-2.5.36-py2.py3-none-any.whl", hash = "sha256:37d93f380f4de590500d9dba7db359d0d3da95ffe7f9de1753faa159e71e7dfa"}, 404 | {file = "identify-2.5.36.tar.gz", hash = "sha256:e5e00f54165f9047fbebeb4a560f9acfb8af4c88232be60a488e9b68d122745d"}, 405 | ] 406 | 407 | [package.extras] 408 | license = ["ukkonen"] 409 | 410 | [[package]] 411 | name = "idna" 412 | version = "3.7" 413 | description = "Internationalized Domain Names in Applications (IDNA)" 414 | optional = false 415 | python-versions = ">=3.5" 416 | files = [ 417 | {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, 418 | {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, 419 | ] 420 | 421 | [[package]] 422 | name = "iniconfig" 423 | version = "2.0.0" 424 | description = "brain-dead simple config-ini parsing" 425 | optional = false 426 | python-versions = ">=3.7" 427 | files = [ 428 | {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, 429 | {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, 430 | ] 431 | 432 | [[package]] 433 | name = "isort" 434 | version = "5.13.2" 435 | description = "A Python utility / library to sort Python imports." 436 | optional = false 437 | python-versions = ">=3.8.0" 438 | files = [ 439 | {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, 440 | {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, 441 | ] 442 | 443 | [package.extras] 444 | colors = ["colorama (>=0.4.6)"] 445 | 446 | [[package]] 447 | name = "jira" 448 | version = "3.8.0" 449 | description = "Python library for interacting with JIRA via REST APIs." 450 | optional = false 451 | python-versions = ">=3.8" 452 | files = [ 453 | {file = "jira-3.8.0-py3-none-any.whl", hash = "sha256:12190dc84dad00b8a6c0341f7e8a254b0f38785afdec022bd5941e1184a5a3fb"}, 454 | {file = "jira-3.8.0.tar.gz", hash = "sha256:63719c529a570aaa01c3373dbb5a104dab70381c5be447f6c27f997302fa335a"}, 455 | ] 456 | 457 | [package.dependencies] 458 | defusedxml = "*" 459 | packaging = "*" 460 | Pillow = ">=2.1.0" 461 | requests = ">=2.10.0" 462 | requests-oauthlib = ">=1.1.0" 463 | requests-toolbelt = "*" 464 | typing-extensions = ">=3.7.4.2" 465 | 466 | [package.extras] 467 | async = ["requests-futures (>=0.9.7)"] 468 | cli = ["ipython (>=4.0.0)", "keyring"] 469 | docs = ["furo", "sphinx (>=5.0.0)", "sphinx-copybutton"] 470 | opt = ["PyJWT", "filemagic (>=1.6)", "requests-jwt", "requests-kerberos"] 471 | test = ["MarkupSafe (>=0.23)", "PyYAML (>=5.1)", "docutils (>=0.12)", "flaky", "oauthlib", "parameterized (>=0.8.1)", "pytest (>=6.0.0)", "pytest-cache", "pytest-cov", "pytest-instafail", "pytest-sugar", "pytest-timeout (>=1.3.1)", "pytest-xdist (>=2.2)", "requests-mock", "requires.io", "tenacity", "wheel (>=0.24.0)", "yanc (>=0.3.3)"] 472 | 473 | [[package]] 474 | name = "mccabe" 475 | version = "0.7.0" 476 | description = "McCabe checker, plugin for flake8" 477 | optional = false 478 | python-versions = ">=3.6" 479 | files = [ 480 | {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, 481 | {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, 482 | ] 483 | 484 | [[package]] 485 | name = "mypy-extensions" 486 | version = "1.0.0" 487 | description = "Type system extensions for programs checked with the mypy type checker." 488 | optional = false 489 | python-versions = ">=3.5" 490 | files = [ 491 | {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, 492 | {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, 493 | ] 494 | 495 | [[package]] 496 | name = "nodeenv" 497 | version = "1.9.1" 498 | description = "Node.js virtual environment builder" 499 | optional = false 500 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" 501 | files = [ 502 | {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, 503 | {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, 504 | ] 505 | 506 | [[package]] 507 | name = "nose" 508 | version = "1.3.7" 509 | description = "nose extends unittest to make testing easier" 510 | optional = false 511 | python-versions = "*" 512 | files = [ 513 | {file = "nose-1.3.7-py2-none-any.whl", hash = "sha256:dadcddc0aefbf99eea214e0f1232b94f2fa9bd98fa8353711dacb112bfcbbb2a"}, 514 | {file = "nose-1.3.7-py3-none-any.whl", hash = "sha256:9ff7c6cc443f8c51994b34a667bbcf45afd6d945be7477b52e97516fd17c53ac"}, 515 | {file = "nose-1.3.7.tar.gz", hash = "sha256:f1bffef9cbc82628f6e7d7b40d7e255aefaa1adb6a1b1d26c69a8b79e6208a98"}, 516 | ] 517 | 518 | [[package]] 519 | name = "oauthlib" 520 | version = "3.2.2" 521 | description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" 522 | optional = false 523 | python-versions = ">=3.6" 524 | files = [ 525 | {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, 526 | {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, 527 | ] 528 | 529 | [package.extras] 530 | rsa = ["cryptography (>=3.0.0)"] 531 | signals = ["blinker (>=1.4.0)"] 532 | signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] 533 | 534 | [[package]] 535 | name = "packaging" 536 | version = "24.1" 537 | description = "Core utilities for Python packages" 538 | optional = false 539 | python-versions = ">=3.8" 540 | files = [ 541 | {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, 542 | {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, 543 | ] 544 | 545 | [[package]] 546 | name = "pathspec" 547 | version = "0.12.1" 548 | description = "Utility library for gitignore style pattern matching of file paths." 549 | optional = false 550 | python-versions = ">=3.8" 551 | files = [ 552 | {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, 553 | {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, 554 | ] 555 | 556 | [[package]] 557 | name = "pillow" 558 | version = "10.3.0" 559 | description = "Python Imaging Library (Fork)" 560 | optional = false 561 | python-versions = ">=3.8" 562 | files = [ 563 | {file = "pillow-10.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45"}, 564 | {file = "pillow-10.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c"}, 565 | {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf"}, 566 | {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599"}, 567 | {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475"}, 568 | {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf"}, 569 | {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3"}, 570 | {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5"}, 571 | {file = "pillow-10.3.0-cp310-cp310-win32.whl", hash = "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2"}, 572 | {file = "pillow-10.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f"}, 573 | {file = "pillow-10.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b"}, 574 | {file = "pillow-10.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795"}, 575 | {file = "pillow-10.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57"}, 576 | {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27"}, 577 | {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994"}, 578 | {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451"}, 579 | {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd"}, 580 | {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad"}, 581 | {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c"}, 582 | {file = "pillow-10.3.0-cp311-cp311-win32.whl", hash = "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09"}, 583 | {file = "pillow-10.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d"}, 584 | {file = "pillow-10.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f"}, 585 | {file = "pillow-10.3.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84"}, 586 | {file = "pillow-10.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19"}, 587 | {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338"}, 588 | {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1"}, 589 | {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462"}, 590 | {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a"}, 591 | {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef"}, 592 | {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3"}, 593 | {file = "pillow-10.3.0-cp312-cp312-win32.whl", hash = "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d"}, 594 | {file = "pillow-10.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b"}, 595 | {file = "pillow-10.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a"}, 596 | {file = "pillow-10.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4eaa22f0d22b1a7e93ff0a596d57fdede2e550aecffb5a1ef1106aaece48e96b"}, 597 | {file = "pillow-10.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cd5e14fbf22a87321b24c88669aad3a51ec052eb145315b3da3b7e3cc105b9a2"}, 598 | {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1530e8f3a4b965eb6a7785cf17a426c779333eb62c9a7d1bbcf3ffd5bf77a4aa"}, 599 | {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d512aafa1d32efa014fa041d38868fda85028e3f930a96f85d49c7d8ddc0383"}, 600 | {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:339894035d0ede518b16073bdc2feef4c991ee991a29774b33e515f1d308e08d"}, 601 | {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:aa7e402ce11f0885305bfb6afb3434b3cd8f53b563ac065452d9d5654c7b86fd"}, 602 | {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0ea2a783a2bdf2a561808fe4a7a12e9aa3799b701ba305de596bc48b8bdfce9d"}, 603 | {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c78e1b00a87ce43bb37642c0812315b411e856a905d58d597750eb79802aaaa3"}, 604 | {file = "pillow-10.3.0-cp38-cp38-win32.whl", hash = "sha256:72d622d262e463dfb7595202d229f5f3ab4b852289a1cd09650362db23b9eb0b"}, 605 | {file = "pillow-10.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:2034f6759a722da3a3dbd91a81148cf884e91d1b747992ca288ab88c1de15999"}, 606 | {file = "pillow-10.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936"}, 607 | {file = "pillow-10.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002"}, 608 | {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60"}, 609 | {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375"}, 610 | {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57"}, 611 | {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8"}, 612 | {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9"}, 613 | {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb"}, 614 | {file = "pillow-10.3.0-cp39-cp39-win32.whl", hash = "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572"}, 615 | {file = "pillow-10.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb"}, 616 | {file = "pillow-10.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f"}, 617 | {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355"}, 618 | {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9"}, 619 | {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2"}, 620 | {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463"}, 621 | {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced"}, 622 | {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3"}, 623 | {file = "pillow-10.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170"}, 624 | {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32"}, 625 | {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828"}, 626 | {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f"}, 627 | {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015"}, 628 | {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5"}, 629 | {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a"}, 630 | {file = "pillow-10.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591"}, 631 | {file = "pillow-10.3.0.tar.gz", hash = "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d"}, 632 | ] 633 | 634 | [package.extras] 635 | docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] 636 | fpx = ["olefile"] 637 | mic = ["olefile"] 638 | tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] 639 | typing = ["typing-extensions"] 640 | xmp = ["defusedxml"] 641 | 642 | [[package]] 643 | name = "platformdirs" 644 | version = "4.2.2" 645 | description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." 646 | optional = false 647 | python-versions = ">=3.8" 648 | files = [ 649 | {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, 650 | {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, 651 | ] 652 | 653 | [package.extras] 654 | docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] 655 | test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] 656 | type = ["mypy (>=1.8)"] 657 | 658 | [[package]] 659 | name = "pluggy" 660 | version = "1.5.0" 661 | description = "plugin and hook calling mechanisms for python" 662 | optional = false 663 | python-versions = ">=3.8" 664 | files = [ 665 | {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, 666 | {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, 667 | ] 668 | 669 | [package.extras] 670 | dev = ["pre-commit", "tox"] 671 | testing = ["pytest", "pytest-benchmark"] 672 | 673 | [[package]] 674 | name = "pre-commit" 675 | version = "3.7.1" 676 | description = "A framework for managing and maintaining multi-language pre-commit hooks." 677 | optional = false 678 | python-versions = ">=3.9" 679 | files = [ 680 | {file = "pre_commit-3.7.1-py2.py3-none-any.whl", hash = "sha256:fae36fd1d7ad7d6a5a1c0b0d5adb2ed1a3bda5a21bf6c3e5372073d7a11cd4c5"}, 681 | {file = "pre_commit-3.7.1.tar.gz", hash = "sha256:8ca3ad567bc78a4972a3f1a477e94a79d4597e8140a6e0b651c5e33899c3654a"}, 682 | ] 683 | 684 | [package.dependencies] 685 | cfgv = ">=2.0.0" 686 | identify = ">=1.0.0" 687 | nodeenv = ">=0.11.1" 688 | pyyaml = ">=5.1" 689 | virtualenv = ">=20.10.0" 690 | 691 | [[package]] 692 | name = "pyaml" 693 | version = "21.10.1" 694 | description = "PyYAML-based module to produce pretty and readable YAML-serialized data" 695 | optional = false 696 | python-versions = "*" 697 | files = [ 698 | {file = "pyaml-21.10.1-py2.py3-none-any.whl", hash = "sha256:19985ed303c3a985de4cf8fd329b6d0a5a5b5c9035ea240eccc709ebacbaf4a0"}, 699 | {file = "pyaml-21.10.1.tar.gz", hash = "sha256:c6519fee13bf06e3bb3f20cacdea8eba9140385a7c2546df5dbae4887f768383"}, 700 | ] 701 | 702 | [package.dependencies] 703 | PyYAML = "*" 704 | 705 | [[package]] 706 | name = "pycodestyle" 707 | version = "2.12.0" 708 | description = "Python style guide checker" 709 | optional = false 710 | python-versions = ">=3.8" 711 | files = [ 712 | {file = "pycodestyle-2.12.0-py2.py3-none-any.whl", hash = "sha256:949a39f6b86c3e1515ba1787c2022131d165a8ad271b11370a8819aa070269e4"}, 713 | {file = "pycodestyle-2.12.0.tar.gz", hash = "sha256:442f950141b4f43df752dd303511ffded3a04c2b6fb7f65980574f0c31e6e79c"}, 714 | ] 715 | 716 | [[package]] 717 | name = "pycparser" 718 | version = "2.22" 719 | description = "C parser in Python" 720 | optional = false 721 | python-versions = ">=3.8" 722 | files = [ 723 | {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, 724 | {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, 725 | ] 726 | 727 | [[package]] 728 | name = "pyflakes" 729 | version = "3.2.0" 730 | description = "passive checker of Python programs" 731 | optional = false 732 | python-versions = ">=3.8" 733 | files = [ 734 | {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, 735 | {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, 736 | ] 737 | 738 | [[package]] 739 | name = "pytest" 740 | version = "8.2.2" 741 | description = "pytest: simple powerful testing with Python" 742 | optional = false 743 | python-versions = ">=3.8" 744 | files = [ 745 | {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"}, 746 | {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"}, 747 | ] 748 | 749 | [package.dependencies] 750 | colorama = {version = "*", markers = "sys_platform == \"win32\""} 751 | exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} 752 | iniconfig = "*" 753 | packaging = "*" 754 | pluggy = ">=1.5,<2.0" 755 | tomli = {version = ">=1", markers = "python_version < \"3.11\""} 756 | 757 | [package.extras] 758 | dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] 759 | 760 | [[package]] 761 | name = "python-dateutil" 762 | version = "2.9.0.post0" 763 | description = "Extensions to the standard Python datetime module" 764 | optional = false 765 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" 766 | files = [ 767 | {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, 768 | {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, 769 | ] 770 | 771 | [package.dependencies] 772 | six = ">=1.5" 773 | 774 | [[package]] 775 | name = "pytz" 776 | version = "2024.1" 777 | description = "World timezone definitions, modern and historical" 778 | optional = false 779 | python-versions = "*" 780 | files = [ 781 | {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, 782 | {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, 783 | ] 784 | 785 | [[package]] 786 | name = "pyyaml" 787 | version = "6.0.1" 788 | description = "YAML parser and emitter for Python" 789 | optional = false 790 | python-versions = ">=3.6" 791 | files = [ 792 | {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, 793 | {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, 794 | {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, 795 | {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, 796 | {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, 797 | {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, 798 | {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, 799 | {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, 800 | {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, 801 | {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, 802 | {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, 803 | {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, 804 | {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, 805 | {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, 806 | {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, 807 | {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, 808 | {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, 809 | {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, 810 | {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, 811 | {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, 812 | {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, 813 | {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, 814 | {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, 815 | {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, 816 | {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, 817 | {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, 818 | {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, 819 | {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, 820 | {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, 821 | {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, 822 | {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, 823 | {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, 824 | {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, 825 | {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, 826 | {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, 827 | {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, 828 | {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, 829 | {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, 830 | {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, 831 | {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, 832 | {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, 833 | {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, 834 | {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, 835 | {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, 836 | {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, 837 | {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, 838 | {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, 839 | {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, 840 | {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, 841 | {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, 842 | ] 843 | 844 | [[package]] 845 | name = "requests" 846 | version = "2.32.3" 847 | description = "Python HTTP for Humans." 848 | optional = false 849 | python-versions = ">=3.8" 850 | files = [ 851 | {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, 852 | {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, 853 | ] 854 | 855 | [package.dependencies] 856 | certifi = ">=2017.4.17" 857 | charset-normalizer = ">=2,<4" 858 | idna = ">=2.5,<4" 859 | urllib3 = ">=1.21.1,<3" 860 | 861 | [package.extras] 862 | socks = ["PySocks (>=1.5.6,!=1.5.7)"] 863 | use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] 864 | 865 | [[package]] 866 | name = "requests-oauthlib" 867 | version = "2.0.0" 868 | description = "OAuthlib authentication support for Requests." 869 | optional = false 870 | python-versions = ">=3.4" 871 | files = [ 872 | {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, 873 | {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, 874 | ] 875 | 876 | [package.dependencies] 877 | oauthlib = ">=3.0.0" 878 | requests = ">=2.0.0" 879 | 880 | [package.extras] 881 | rsa = ["oauthlib[signedtoken] (>=3.0.0)"] 882 | 883 | [[package]] 884 | name = "requests-toolbelt" 885 | version = "1.0.0" 886 | description = "A utility belt for advanced users of python-requests" 887 | optional = false 888 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 889 | files = [ 890 | {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, 891 | {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, 892 | ] 893 | 894 | [package.dependencies] 895 | requests = ">=2.0.1,<3.0.0" 896 | 897 | [[package]] 898 | name = "ruff" 899 | version = "0.4.8" 900 | description = "An extremely fast Python linter and code formatter, written in Rust." 901 | optional = false 902 | python-versions = ">=3.7" 903 | files = [ 904 | {file = "ruff-0.4.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:7663a6d78f6adb0eab270fa9cf1ff2d28618ca3a652b60f2a234d92b9ec89066"}, 905 | {file = "ruff-0.4.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eeceb78da8afb6de0ddada93112869852d04f1cd0f6b80fe464fd4e35c330913"}, 906 | {file = "ruff-0.4.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aad360893e92486662ef3be0a339c5ca3c1b109e0134fcd37d534d4be9fb8de3"}, 907 | {file = "ruff-0.4.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:284c2e3f3396fb05f5f803c9fffb53ebbe09a3ebe7dda2929ed8d73ded736deb"}, 908 | {file = "ruff-0.4.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7354f921e3fbe04d2a62d46707e569f9315e1a613307f7311a935743c51a764"}, 909 | {file = "ruff-0.4.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:72584676164e15a68a15778fd1b17c28a519e7a0622161eb2debdcdabdc71883"}, 910 | {file = "ruff-0.4.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9678d5c9b43315f323af2233a04d747409d1e3aa6789620083a82d1066a35199"}, 911 | {file = "ruff-0.4.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704977a658131651a22b5ebeb28b717ef42ac6ee3b11e91dc87b633b5d83142b"}, 912 | {file = "ruff-0.4.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d05f8d6f0c3cce5026cecd83b7a143dcad503045857bc49662f736437380ad45"}, 913 | {file = "ruff-0.4.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6ea874950daca5697309d976c9afba830d3bf0ed66887481d6bca1673fc5b66a"}, 914 | {file = "ruff-0.4.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fc95aac2943ddf360376be9aa3107c8cf9640083940a8c5bd824be692d2216dc"}, 915 | {file = "ruff-0.4.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:384154a1c3f4bf537bac69f33720957ee49ac8d484bfc91720cc94172026ceed"}, 916 | {file = "ruff-0.4.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e9d5ce97cacc99878aa0d084c626a15cd21e6b3d53fd6f9112b7fc485918e1fa"}, 917 | {file = "ruff-0.4.8-py3-none-win32.whl", hash = "sha256:6d795d7639212c2dfd01991259460101c22aabf420d9b943f153ab9d9706e6a9"}, 918 | {file = "ruff-0.4.8-py3-none-win_amd64.whl", hash = "sha256:e14a3a095d07560a9d6769a72f781d73259655919d9b396c650fc98a8157555d"}, 919 | {file = "ruff-0.4.8-py3-none-win_arm64.whl", hash = "sha256:14019a06dbe29b608f6b7cbcec300e3170a8d86efaddb7b23405cb7f7dcaf780"}, 920 | {file = "ruff-0.4.8.tar.gz", hash = "sha256:16d717b1d57b2e2fd68bd0bf80fb43931b79d05a7131aa477d66fc40fbd86268"}, 921 | ] 922 | 923 | [[package]] 924 | name = "six" 925 | version = "1.16.0" 926 | description = "Python 2 and 3 compatibility utilities" 927 | optional = false 928 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" 929 | files = [ 930 | {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, 931 | {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, 932 | ] 933 | 934 | [[package]] 935 | name = "thelogrus" 936 | version = "0.7.0" 937 | description = "The Logrus is a collection of random utility functions" 938 | optional = false 939 | python-versions = ">=3.6,<4.0" 940 | files = [ 941 | {file = "thelogrus-0.7.0-py3-none-any.whl", hash = "sha256:d6b3e16e9a6c7ef8d9b9e5823434aab2720d5d7bb83450f69aaa589b9c68eda3"}, 942 | {file = "thelogrus-0.7.0.tar.gz", hash = "sha256:fd7171e065bb739ab54ac3c5098e601037ca7ca91366b8e96140e078b1fe4e04"}, 943 | ] 944 | 945 | [package.dependencies] 946 | dateutils = ">=0.6.12,<0.7.0" 947 | pyaml = ">=21.10.1,<22.0.0" 948 | 949 | [[package]] 950 | name = "tomli" 951 | version = "2.0.1" 952 | description = "A lil' TOML parser" 953 | optional = false 954 | python-versions = ">=3.7" 955 | files = [ 956 | {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, 957 | {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, 958 | ] 959 | 960 | [[package]] 961 | name = "typing-extensions" 962 | version = "4.12.2" 963 | description = "Backported and Experimental Type Hints for Python 3.8+" 964 | optional = false 965 | python-versions = ">=3.8" 966 | files = [ 967 | {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, 968 | {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, 969 | ] 970 | 971 | [[package]] 972 | name = "urllib3" 973 | version = "2.2.2" 974 | description = "HTTP library with thread-safe connection pooling, file post, and more." 975 | optional = false 976 | python-versions = ">=3.8" 977 | files = [ 978 | {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, 979 | {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, 980 | ] 981 | 982 | [package.extras] 983 | brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] 984 | h2 = ["h2 (>=4,<5)"] 985 | socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] 986 | zstd = ["zstandard (>=0.18.0)"] 987 | 988 | [[package]] 989 | name = "virtualenv" 990 | version = "20.26.2" 991 | description = "Virtual Python Environment builder" 992 | optional = false 993 | python-versions = ">=3.7" 994 | files = [ 995 | {file = "virtualenv-20.26.2-py3-none-any.whl", hash = "sha256:a624db5e94f01ad993d476b9ee5346fdf7b9de43ccaee0e0197012dc838a0e9b"}, 996 | {file = "virtualenv-20.26.2.tar.gz", hash = "sha256:82bf0f4eebbb78d36ddaee0283d43fe5736b53880b8a8cdcd37390a07ac3741c"}, 997 | ] 998 | 999 | [package.dependencies] 1000 | distlib = ">=0.3.7,<1" 1001 | filelock = ">=3.12.2,<4" 1002 | platformdirs = ">=3.9.1,<5" 1003 | 1004 | [package.extras] 1005 | docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] 1006 | test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] 1007 | 1008 | [metadata] 1009 | lock-version = "2.0" 1010 | python-versions = ">=3.10,<4.0.0" 1011 | content-hash = "a42856da8f3618450f50768415b80f57464dfb36fd96c6c51fd71386261b17f2" 1012 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "jira-commands" 3 | version = "0.22.2" 4 | description = "Command line utilities for interacting with JIRA" 5 | authors = ["Joe Block "] 6 | homepage = "https://github.com/unixorn/jira-commands" 7 | repository = "https://github.com/unixorn/jira-commands" 8 | readme = 'README.md' 9 | 10 | [tool.poetry.dependencies] 11 | python = ">=3.10,<4.0.0" 12 | jira = "^3.1.1" 13 | PyYAML = "^6.0" 14 | thelogrus = "^0.7.0" 15 | cryptography = ">=38.0.3,<43.0.0" 16 | 17 | [tool.poetry.dev-dependencies] 18 | nose = "^1.3.7" 19 | black = "^24.4.2" 20 | isort = "^5.10.1" 21 | 22 | [tool.poetry.scripts] 23 | # There are multiple names for some of these for ease of use 24 | jc = 'jira_commands.cli.jc:jc_driver' 25 | 26 | jc-assign-ticket = 'jira_commands.cli.crudops:assign_ticket' 27 | jc-ticket-assign = 'jira_commands.cli.crudops:assign_ticket' 28 | 29 | jc-close-ticket = 'jira_commands.cli.crudops:close_ticket' 30 | jc-ticket-close = 'jira_commands.cli.crudops:close_ticket' 31 | 32 | jc-comment-on-ticket = 'jira_commands.cli.crudops:comment_on_ticket' 33 | jc-ticket-comment = 'jira_commands.cli.crudops:comment_on_ticket' 34 | 35 | jc-create-ticket = 'jira_commands.cli.crudops:create_ticket' 36 | jc-ticket-create = 'jira_commands.cli.crudops:create_ticket' 37 | 38 | jc-dump-all-customfield-allowed-values = 'jira_commands.cli.vivisect:dump_all_customfield_allowed_values' 39 | jc-dump-all-custom-field-allowed-values = 'jira_commands.cli.vivisect:dump_all_customfield_allowed_values' 40 | 41 | jc-values-for-custom-field = 'jira_commands.cli.vivisect:extract_allowed_values' 42 | jc-extract-values-for-custom-field = 'jira_commands.cli.vivisect:extract_allowed_values' 43 | jc-extract-customfield-mappings = 'jira_commands.cli.map_extractor:create_mapping_file' 44 | jc-extract-custom-field-mappings = 'jira_commands.cli.map_extractor:create_mapping_file' 45 | jc-ticket-dump-metadata = 'jira_commands.cli.vivisect:dump_metadata' 46 | jc-ticket-metadata = 'jira_commands.cli.vivisect:dump_metadata' 47 | 48 | jc-examine-ticket = 'jira_commands.cli.vivisect:vivisect' 49 | jc-vivisect-ticket = 'jira_commands.cli.vivisect:vivisect' 50 | jc-ticket-examine = 'jira_commands.cli.vivisect:vivisect' 51 | jc-ticket-vivisect = 'jira_commands.cli.vivisect:vivisect' 52 | 53 | jc-custom-field-allowed-values = 'jira_commands.cli.vivisect:list_allowed_field_values' 54 | 55 | jc-get-link-types = 'jira_commands.cli.crudops:get_link_types' 56 | jc-get-priorities = 'jira_commands.cli.crudops:get_priorities' 57 | jc-get-priority-ids = 'jira_commands.cli.crudops:get_priorities' 58 | 59 | jc-jql-query = 'jira_commands.cli.jql:run_jql' 60 | jc-run-jql-query = 'jira_commands.cli.jql:run_jql' 61 | 62 | jc-link-tickets = 'jira_commands.cli.crudops:link_tickets' 63 | jc-ticket-link = 'jira_commands.cli.crudops:link_tickets' 64 | 65 | jc-list-ticket-transitions = 'jira_commands.cli.crudops:get_transitions' 66 | jc-ticket-transition-list = 'jira_commands.cli.crudops:get_transitions' 67 | 68 | jc-assign-subtasks = 'jira_commands.cli.subtasks:assign_subtasks' 69 | jc-assign-ticket-subtasks = 'jira_commands.cli.subtasks:assign_subtasks' 70 | jc-close-subtasks = 'jira_commands.cli.subtasks:close_subtasks' 71 | jc-close-ticket-subtasks = 'jira_commands.cli.subtasks:close_subtasks' 72 | jc-comment-on-subtasks = 'jira_commands.cli.subtasks:comment_on_subtasks' 73 | jc-comment-on-ticket-subtasks = 'jira_commands.cli.subtasks:comment_on_subtasks' 74 | jc-list-ticket-subtasks = 'jira_commands.cli.subtasks:list_subtasks' 75 | jc-ticket-list-subtasks = 'jira_commands.cli.subtasks:list_subtasks' 76 | jc-ticket-subtasks = 'jira_commands.cli.subtasks:list_subtasks' 77 | jc-transition-subtasks = 'jira_commands.cli.subtasks:transition_subtasks' 78 | jc-transition-ticket-subtasks = 'jira_commands.cli.subtasks:transition_subtasks' 79 | 80 | jc-add-label = 'jira_commands.cli.labels:add_label' 81 | jc-get-labels = 'jira_commands.cli.labels:get_labels' 82 | jc-label-add = 'jira_commands.cli.labels:add_label' 83 | jc-label-get = 'jira_commands.cli.labels:get_labels' 84 | jc-label-remove = 'jira_commands.cli.labels:remove_label' 85 | jc-list-labels = 'jira_commands.cli.labels:get_labels' 86 | jc-remove-label = 'jira_commands.cli.labels:remove_label' 87 | 88 | jc-list-project-tickets = 'jira_commands.cli.list:listTickets' 89 | 90 | jc-ticket-transition-set = 'jira_commands.cli.crudops:transition_to' 91 | jc-transition-ticket-to = 'jira_commands.cli.crudops:transition_to' 92 | 93 | [tool.poetry.urls] 94 | "Bug Tracker" = "https://github.com/unixorn/jira-commands/issues" 95 | 96 | [tool.poetry.group.dev.dependencies] 97 | ruff = ">=0.0.254,<0.4.9" 98 | pytest = ">=7.2.2,<9.0.0" 99 | flake8 = ">=6,<8" 100 | pre-commit = "^3.2.0" 101 | 102 | [tool.pyright] 103 | reportMissingImports = false 104 | 105 | [build-system] 106 | requires = ["poetry-core>=1.0.0"] 107 | build-backend = "poetry.core.masonry.api" 108 | 109 | [tool.ruff] 110 | line-length = 132 111 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/unixorn/jira-commands/3e0035f61b7f358cf6bf282af9bde7678e9786e5/tests/__init__.py -------------------------------------------------------------------------------- /tests/test_jira_commands.py: -------------------------------------------------------------------------------- 1 | from jira_commands import __version__ 2 | 3 | 4 | def test_version(): 5 | assert __version__ == "0.14.0" 6 | --------------------------------------------------------------------------------