├── src
├── __init__.py
├── run_git.py
├── config.py
├── utils.py
└── main.py
├── .dockerignore
├── requirements.in
├── .github
├── dependabot.yml
└── workflows
│ ├── changelog-ci.yaml
│ ├── docker-build.yaml
│ └── update-actions.yaml
├── Dockerfile
├── LICENSE
├── .pre-commit-config.yaml
├── .gitignore
├── action.yaml
├── CHANGELOG.md
├── README.md
└── requirements.txt
/src/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | .github/
2 | .gitignore
3 | README.md
4 | CHANGELOG.md
5 |
--------------------------------------------------------------------------------
/requirements.in:
--------------------------------------------------------------------------------
1 | packaging==21.3
2 |
3 | PyYAML
4 | requests
5 | github-action-utils
6 | pydantic
7 | pydantic-settings
8 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: "pip"
4 | directory: "/"
5 | groups:
6 | dependencies:
7 | patterns:
8 | - "*"
9 | schedule:
10 | interval: "monthly"
11 |
--------------------------------------------------------------------------------
/.github/workflows/changelog-ci.yaml:
--------------------------------------------------------------------------------
1 | name: Changelog CI
2 |
3 | on:
4 | pull_request:
5 | types: [opened]
6 |
7 | jobs:
8 | build:
9 | runs-on: ubuntu-latest
10 |
11 | steps:
12 | - uses: actions/checkout@v3
13 |
14 | - name: Run Changelog CI
15 | uses: saadmk11/changelog-ci@v1.2.0
16 |
--------------------------------------------------------------------------------
/.github/workflows/docker-build.yaml:
--------------------------------------------------------------------------------
1 | name: Test Docker Build
2 |
3 | on:
4 | push:
5 | branches: [ "main" ]
6 | pull_request:
7 | branches: [ "main" ]
8 |
9 | permissions:
10 | contents: read
11 |
12 | jobs:
13 | test:
14 |
15 | runs-on: ubuntu-latest
16 |
17 | steps:
18 | - uses: actions/checkout@v3
19 | - name: Build Docker Image
20 | run: docker build -t github-actions-version-updater:${{ github.sha }} .
21 |
--------------------------------------------------------------------------------
/.github/workflows/update-actions.yaml:
--------------------------------------------------------------------------------
1 | name: GitHub Actions Version Updater
2 |
3 | # Controls when the action will run.
4 | on:
5 | # can be used to run workflow manually
6 | workflow_dispatch:
7 | schedule:
8 | # Automatically run once every month
9 | - cron: '0 0 1 * *'
10 |
11 | jobs:
12 | build:
13 | runs-on: ubuntu-latest
14 |
15 | steps:
16 | - uses: actions/checkout@v3
17 | with:
18 | # [Required] Access token with `workflow` scope.
19 | token: ${{ secrets.WORKFLOW_SECRET }}
20 |
21 | - name: Run GitHub Actions Version Updater
22 | uses: saadmk11/github-actions-version-updater@main
23 | with:
24 | # [Required] Access token with `workflow` scope.
25 | token: ${{ secrets.WORKFLOW_SECRET }}
26 | update_version_with: "release-tag"
27 | release_types: "major"
28 | ignore: 'saadmk11/github-actions-version-updater@main'
29 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.12-slim-bullseye
2 |
3 | LABEL "com.github.actions.name"="GitHub Actions Version Updater"
4 | LABEL "com.github.actions.description"="GitHub Actions Version Updater updates GitHub Action versions in a repository and creates a pull request with the changes."
5 | LABEL "com.github.actions.icon"="upload-cloud"
6 | LABEL "com.github.actions.color"="green"
7 |
8 | LABEL "repository"="https://github.com/saadmk11/github-actions-version-updater"
9 | LABEL "homepage"="https://github.com/saadmk11/github-actions-version-updater"
10 | LABEL "maintainer"="saadmk11"
11 |
12 | RUN apt-get update \
13 | && apt-get install \
14 | -y \
15 | --no-install-recommends \
16 | --no-install-suggests \
17 | git \
18 | && apt-get clean \
19 | && rm -rf /var/lib/apt/lists/*
20 |
21 | COPY ./requirements.txt .
22 |
23 | RUN pip install --no-cache-dir -r requirements.txt
24 |
25 | COPY . ./app
26 |
27 | ENV PYTHONPATH "${PYTHONPATH}:/app"
28 |
29 | CMD ["python", "-m", "src.main"]
30 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2021 Maksudul Haque
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | default_language_version:
2 | python: python3.10
3 |
4 | repos:
5 | - repo: https://github.com/pre-commit/pre-commit-hooks
6 | rev: v4.6.0
7 | hooks:
8 | - id: end-of-file-fixer
9 | - id: trailing-whitespace
10 | - id: check-case-conflict
11 | - id: check-merge-conflict
12 |
13 | - repo: https://github.com/asottile/pyupgrade
14 | rev: v3.17.0
15 | hooks:
16 | - id: pyupgrade
17 | args: [--py310-plus]
18 |
19 | - repo: https://github.com/psf/black
20 | rev: 24.8.0
21 | hooks:
22 | - id: black
23 |
24 | - repo: https://github.com/asottile/blacken-docs
25 | rev: 1.18.0
26 | hooks:
27 | - id: blacken-docs
28 | additional_dependencies:
29 | - black==22.6.0
30 |
31 | - repo: https://github.com/pycqa/isort
32 | rev: 5.13.2
33 | hooks:
34 | - id: isort
35 | args: ["--profile", "black"]
36 |
37 | - repo: https://github.com/PyCQA/flake8
38 | rev: 7.1.1
39 | hooks:
40 | - id: flake8
41 | args:
42 | - "--max-line-length=88"
43 | - "--min-python-version=3.10"
44 | additional_dependencies:
45 | - flake8-bugbear
46 | - flake8-comprehensions
47 | - flake8-tidy-imports
48 | - flake8-typing-imports
49 |
50 | - repo: https://github.com/pre-commit/mirrors-mypy
51 | rev: v1.11.2
52 | hooks:
53 | - id: mypy
54 | files: ^src/
55 | additional_dependencies:
56 | - types-requests
57 | - types-PyYAML
58 | - types-setuptools
59 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | .pybuilder/
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | # For a library or package, you might want to ignore these files since the code is
87 | # intended to run in multiple environments; otherwise, check them in:
88 | # .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
98 | __pypackages__/
99 |
100 | # Celery stuff
101 | celerybeat-schedule
102 | celerybeat.pid
103 |
104 | # SageMath parsed files
105 | *.sage.py
106 |
107 | # Environments
108 | .env
109 | .venv
110 | env/
111 | venv/
112 | ENV/
113 | env.bak/
114 | venv.bak/
115 |
116 | # Spyder project settings
117 | .spyderproject
118 | .spyproject
119 |
120 | # Rope project settings
121 | .ropeproject
122 |
123 | # mkdocs documentation
124 | /site
125 |
126 | # mypy
127 | .mypy_cache/
128 | .dmypy.json
129 | dmypy.json
130 |
131 | # Pyre type checker
132 | .pyre/
133 |
134 | # pytype static type analyzer
135 | .pytype/
136 |
137 | # Cython debug symbols
138 | cython_debug/
139 |
140 | # PyCharm file
141 | .idea/
142 |
--------------------------------------------------------------------------------
/src/run_git.py:
--------------------------------------------------------------------------------
1 | import subprocess
2 |
3 | import github_action_utils as gha_utils # type: ignore
4 |
5 |
6 | def configure_git_author(username: str, email: str) -> None:
7 | """
8 | Configure the git author.
9 | """
10 | with gha_utils.group("Configure Git Author"):
11 | gha_utils.notice(f"Setting Git Commit User to '{username}'.")
12 | gha_utils.notice(f"Setting Git Commit email to '{email}'.")
13 |
14 | run_subprocess_command(["git", "config", "user.name", username])
15 | run_subprocess_command(["git", "config", "user.email", email])
16 |
17 |
18 | def configure_safe_directory(directory: str) -> None:
19 | """
20 | Configure git safe.directory.
21 | """
22 | with gha_utils.group("Configure Git Safe Directory"):
23 | run_subprocess_command(
24 | ["git", "config", "--global", "--add", "safe.directory", directory]
25 | )
26 |
27 |
28 | def create_new_git_branch(base_branch_name: str, new_branch_name: str) -> None:
29 | """
30 | Create a new git branch from base branch.
31 | """
32 | with gha_utils.group(
33 | f"Create New Branch ({base_branch_name} -> {new_branch_name})"
34 | ):
35 | run_subprocess_command(["git", "checkout", base_branch_name])
36 | run_subprocess_command(["git", "checkout", "-b", new_branch_name])
37 |
38 |
39 | def git_commit_changes(
40 | commit_message: str,
41 | commit_author: str,
42 | commit_branch_name: str,
43 | force_push: bool = False,
44 | ) -> None:
45 | """
46 | Commit the changed files.
47 | """
48 | with gha_utils.group("Commit Changes"):
49 | run_subprocess_command(["git", "add", "."])
50 | run_subprocess_command(
51 | ["git", "commit", f"--author={commit_author}", "-m", commit_message]
52 | )
53 | push_command = ["git", "push", "-u"]
54 |
55 | if force_push:
56 | push_command.append("-f")
57 |
58 | push_command.extend(["origin", commit_branch_name])
59 | run_subprocess_command(push_command)
60 |
61 |
62 | def git_has_changes() -> bool:
63 | """
64 | Check if there are changes to commit.
65 | """
66 | try:
67 | subprocess.check_output(["git", "diff", "--exit-code"])
68 | return False
69 | except subprocess.CalledProcessError:
70 | return True
71 |
72 |
73 | def git_diff() -> str:
74 | """Return the git diff"""
75 | return subprocess.run(["git", "diff"], capture_output=True, text=True).stdout
76 |
77 |
78 | def run_subprocess_command(command: list[str]) -> None:
79 | result = subprocess.run(command, capture_output=True, text=True)
80 |
81 | if result.returncode != 0:
82 | gha_utils.error(result.stderr)
83 | raise SystemExit(result.returncode)
84 |
85 | gha_utils.echo(result.stdout)
86 |
--------------------------------------------------------------------------------
/action.yaml:
--------------------------------------------------------------------------------
1 | name: GitHub Actions Version Updater
2 | description: GitHub Actions Version Updater updates GitHub Action versions in a repository and creates a pull request with the changes.
3 | author: Maksudul Haque
4 | branding:
5 | icon: 'upload-cloud'
6 | color: 'green'
7 | inputs:
8 | committer_username:
9 | description: 'Name of the user who will commit the changes to GitHub'
10 | required: false
11 | default: 'github-actions[bot]'
12 | committer_email:
13 | description: 'Email Address of the user who will commit the changes to GitHub'
14 | required: false
15 | default: 'github-actions[bot]@users.noreply.github.com'
16 | commit_message:
17 | description: 'Commit message for the commits created by the action'
18 | required: false
19 | default: 'Update GitHub Action Versions'
20 | pull_request_title:
21 | description: 'Title of the pull requests generated by the action'
22 | required: false
23 | default: 'Update GitHub Action Versions'
24 | pull_request_branch:
25 | description: 'The pull request branch name (If provided, the action will force push to the branch)'
26 | required: false
27 | default: ''
28 | ignore:
29 | description: 'A comma separated string of GitHub Actions to ignore updates for'
30 | required: false
31 | default: ''
32 | token:
33 | description: 'GitHub Personal Access Token with `workflow` scope'
34 | required: true
35 | skip_pull_request:
36 | description: 'Skip Pull Request creation'
37 | required: false
38 | default: 'false'
39 | update_version_with:
40 | description: 'Use The Latest Release Tag/Commit SHA or Default Branch Commit SHA to update the actions. options: "release-tag" (default), "release-commit-sha", "default-branch-sha"'
41 | required: false
42 | default: 'release-tag'
43 | release_types:
44 | description: 'A comma separated string of release types (major, minor, patch) to use when updating the actions. By default, all release types are used to update the actions. Only Applicable for "release-tag", "release-commit-sha"'
45 | required: false
46 | default: 'all'
47 | pull_request_user_reviewers:
48 | description: 'A comma separated string (usernames) which denotes the users that should be added as reviewers to the pull request'
49 | required: false
50 | default: ''
51 | pull_request_team_reviewers:
52 | description: 'A comma separated string (team slugs) which denotes the teams that should be added as reviewers to the pull request'
53 | required: false
54 | default: ''
55 | pull_request_labels:
56 | description: 'A comma separated string (label names) which denotes the labels which will be added to the pull request'
57 | required: false
58 | default: ''
59 | extra_workflow_locations:
60 | description: 'A comma separated string of file or directory paths to look for workflows. By default, only the workflow files in the .github/workflows directory are checked updates'
61 | required: false
62 | default: ''
63 |
64 | runs:
65 | using: 'docker'
66 | image: 'Dockerfile'
67 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Version: v0.9.0
2 |
3 | * [#92](https://github.com/saadmk11/github-actions-version-updater/pull/92): [pre-commit.ci] pre-commit autoupdate
4 | * [#100](https://github.com/saadmk11/github-actions-version-updater/pull/100): Update README.md - update checkout versions
5 | * [#106](https://github.com/saadmk11/github-actions-version-updater/pull/106): Fix the actions enclosed in quotes are not updated
6 | * [#115](https://github.com/saadmk11/github-actions-version-updater/pull/115): Base on 3.12-slim-bullseye
7 | * [#116](https://github.com/saadmk11/github-actions-version-updater/pull/116): Update changelog-ci version
8 |
9 |
10 | # Version: v0.8.1
11 |
12 | * [#89](https://github.com/saadmk11/github-actions-version-updater/pull/89): Use `regex.sub` to replace old versions with new ones
13 |
14 |
15 | # Version: v0.8.0
16 |
17 | * [#73](https://github.com/saadmk11/github-actions-version-updater/pull/73): [pre-commit.ci] pre-commit autoupdate
18 | * [#80](https://github.com/saadmk11/github-actions-version-updater/pull/80): [pre-commit.ci] pre-commit autoupdate
19 | * [#82](https://github.com/saadmk11/github-actions-version-updater/pull/82): Update changelog-ci.yaml
20 | * [#83](https://github.com/saadmk11/github-actions-version-updater/pull/83): Set Created Pull Request Number as Action Output
21 | * [#84](https://github.com/saadmk11/github-actions-version-updater/pull/84): Warn users when GitHub Release do not use Semantic Versioning specification
22 | * [#85](https://github.com/saadmk11/github-actions-version-updater/pull/85): Manage Dependencies with pip-tools and Migrate to Pydantic V2
23 |
24 |
25 | # Version: v0.7.4
26 |
27 | * [#60](https://github.com/saadmk11/github-actions-version-updater/pull/60): [pre-commit.ci] pre-commit autoupdate
28 | * [#65](https://github.com/saadmk11/github-actions-version-updater/pull/65): Configuration Management with Pydantic
29 | * [#66](https://github.com/saadmk11/github-actions-version-updater/pull/66): [pre-commit.ci] pre-commit autoupdate
30 | * [#67](https://github.com/saadmk11/github-actions-version-updater/pull/67): use notice, not warning, for unsupported formats
31 | * [#68](https://github.com/saadmk11/github-actions-version-updater/pull/68): Add Fine-grained Personal Access Token Documentation
32 | * [#69](https://github.com/saadmk11/github-actions-version-updater/pull/69): Add Alternatives
33 |
34 |
35 | # Version: v0.7.3
36 |
37 | * [#53](https://github.com/saadmk11/github-actions-version-updater/pull/53): [pre-commit.ci] pre-commit autoupdate
38 | * [#54](https://github.com/saadmk11/github-actions-version-updater/pull/54): Fix badge and update `actions/checkout` in README
39 | * [#55](https://github.com/saadmk11/github-actions-version-updater/pull/55): [Experimental Feature] Pull Request Branch Input Option
40 | * [#56](https://github.com/saadmk11/github-actions-version-updater/pull/56): [Feature] Add Option to Add Labels to Pull Requests
41 | * [#57](https://github.com/saadmk11/github-actions-version-updater/pull/57): [Enhancement] Handle Updates for GitHub Actions that are Located Inside Sub-Directories
42 | * [#58](https://github.com/saadmk11/github-actions-version-updater/pull/58): Update changelog-ci.yaml
43 |
44 |
45 | # Version: v0.7.2
46 |
47 | * [#38](https://github.com/saadmk11/github-actions-version-updater/pull/38): [pre-commit.ci] pre-commit autoupdate
48 | * [#42](https://github.com/saadmk11/github-actions-version-updater/pull/42): [pre-commit.ci] pre-commit autoupdate
49 | * [#44](https://github.com/saadmk11/github-actions-version-updater/pull/44): Document required workaround for LFS-enabled repositories
50 | * [#45](https://github.com/saadmk11/github-actions-version-updater/pull/45): [pre-commit.ci] pre-commit autoupdate
51 | * [#46](https://github.com/saadmk11/github-actions-version-updater/pull/46): [pre-commit.ci] pre-commit autoupdate
52 | * [#50](https://github.com/saadmk11/github-actions-version-updater/pull/50): Try Git Safe Directory to Resolve `fatal: not in a git directory`
53 | * [#51](https://github.com/saadmk11/github-actions-version-updater/pull/51): [pre-commit.ci] pre-commit autoupdate
54 |
55 |
56 | # Version: v0.7.1
57 |
58 | * [#32](https://github.com/saadmk11/github-actions-version-updater/pull/32): ci: update checkout version
59 | * [#33](https://github.com/saadmk11/github-actions-version-updater/pull/33): Handle Workflow File Not Found Error
60 | * [#34](https://github.com/saadmk11/github-actions-version-updater/pull/34): Add Option to Specify Custom Workflow File/Directory Paths
61 | * [#35](https://github.com/saadmk11/github-actions-version-updater/pull/35): Add GitHub Actions Version Updater
62 | * [#36](https://github.com/saadmk11/github-actions-version-updater/pull/36): Update GitHub Action Versions
63 |
64 |
65 | # Version: v0.7.0
66 |
67 | * [#12](https://github.com/saadmk11/github-actions-version-updater/pull/12): Allow custom commit message and pull request title
68 | * [#19](https://github.com/saadmk11/github-actions-version-updater/pull/19): Add Option to Use Commit SHA as a Version and FIx Latest Release Version Resolver
69 | * [#17](https://github.com/saadmk11/github-actions-version-updater/pull/17): Refactor Code and Use `github-action-utils` for logging
70 | * [#24](https://github.com/saadmk11/github-actions-version-updater/pull/24): Add Option to Request Reviews for Generated Pull Request
71 | * [#18](https://github.com/saadmk11/github-actions-version-updater/pull/18): Add Option to Skip Pull Request
72 | * [#26](https://github.com/saadmk11/github-actions-version-updater/pull/26): Add Option to use Release Types (major, minor, patch) for Updates
73 | * [#27](https://github.com/saadmk11/github-actions-version-updater/pull/27): Improve Documentation
74 |
75 |
76 | # Version: v0.5.6
77 |
78 | * [#9](https://github.com/saadmk11/github-actions-version-updater/pull/9): Update marketplace badge URL
79 | * [#10](https://github.com/saadmk11/github-actions-version-updater/pull/10): Remove duplicate changes from pull request body and improve code
80 |
81 |
82 | # Version: v0.5.5
83 |
84 | * [#6](https://github.com/saadmk11/github-actions-version-updater/pull/6): Fix badge URL
85 | * [#7](https://github.com/saadmk11/github-actions-version-updater/pull/7): Fix inconsistent naming
86 |
87 |
88 | # Version: v0.5.0
89 |
90 | * [#1](https://github.com/saadmk11/github-actions-version-updater/pull/1): Create LICENSE
91 | * [#2](https://github.com/saadmk11/github-actions-version-updater/pull/2): Add ignore option to ignore particular action updates
92 | * [#3](https://github.com/saadmk11/github-actions-version-updater/pull/3): Add documentation
93 | * [#4](https://github.com/saadmk11/github-actions-version-updater/pull/4): Add Changelog CI
94 |
--------------------------------------------------------------------------------
/src/config.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import time
4 | from enum import Enum
5 | from pathlib import Path
6 | from typing import Any
7 |
8 | import github_action_utils as gha_utils # type: ignore
9 | from pydantic import Field, field_validator, model_validator
10 | from pydantic.fields import FieldInfo
11 | from pydantic_settings import (
12 | BaseSettings,
13 | EnvSettingsSource,
14 | PydanticBaseSettingsSource,
15 | SettingsConfigDict,
16 | )
17 |
18 |
19 | class UpdateVersionWith(str, Enum):
20 | LATEST_RELEASE_TAG = "release-tag"
21 | LATEST_RELEASE_COMMIT_SHA = "release-commit-sha"
22 | DEFAULT_BRANCH_COMMIT_SHA = "default-branch-sha"
23 |
24 | def __repr__(self):
25 | return self.value
26 |
27 |
28 | class ReleaseType(str, Enum):
29 | MAJOR = "major"
30 | MINOR = "minor"
31 | PATCH = "patch"
32 |
33 | def __repr__(self):
34 | return self.value
35 |
36 |
37 | class CustomEnvSettingsSource(EnvSettingsSource):
38 | def prepare_field_value(
39 | self, field_name: str, field: FieldInfo, value: Any, value_is_complex: bool
40 | ) -> Any:
41 | if field_name in [
42 | "ignore_actions",
43 | "pull_request_user_reviewers",
44 | "pull_request_team_reviewers",
45 | "pull_request_labels",
46 | "release_types",
47 | "extra_workflow_locations",
48 | ]:
49 | if not value:
50 | return None
51 | if value.startswith("[") and value.endswith("]"):
52 | return frozenset(json.loads(value))
53 | return frozenset(s.strip() for s in value.strip().split(",") if s)
54 |
55 | return value
56 |
57 |
58 | class ActionEnvironment(BaseSettings):
59 | repository: str
60 | base_branch: str = Field(alias="GITHUB_REF")
61 | event_name: str
62 | workspace: str
63 |
64 | model_config = SettingsConfigDict(
65 | case_sensitive=False, frozen=True, env_prefix="GITHUB_"
66 | )
67 |
68 |
69 | class Configuration(BaseSettings):
70 | """Configuration class for GitHub Actions Version Updater"""
71 |
72 | token: str = Field(min_length=10)
73 | pull_request_branch: str = Field(min_length=1)
74 | skip_pull_request: bool = False
75 | force_push: bool = False
76 | committer_username: str = Field(min_length=1, default="github-actions[bot]")
77 | committer_email: str = Field(
78 | min_length=5, default="github-actions[bot]@users.noreply.github.com"
79 | )
80 | pull_request_title: str = Field(
81 | min_length=1, default="Update GitHub Action Versions"
82 | )
83 | commit_message: str = Field(min_length=1, default="Update GitHub Action Versions")
84 | update_version_with: UpdateVersionWith = UpdateVersionWith.LATEST_RELEASE_TAG
85 | release_types: frozenset[ReleaseType] = frozenset(
86 | [
87 | ReleaseType.MAJOR,
88 | ReleaseType.MINOR,
89 | ReleaseType.PATCH,
90 | ]
91 | )
92 | ignore_actions: frozenset[str] = Field(
93 | default_factory=frozenset, alias="INPUT_IGNORE"
94 | )
95 | pull_request_user_reviewers: frozenset[str] = Field(default_factory=frozenset)
96 | pull_request_team_reviewers: frozenset[str] = Field(default_factory=frozenset)
97 | pull_request_labels: frozenset[str] = Field(default_factory=frozenset)
98 | extra_workflow_locations: frozenset[str] = Field(default_factory=frozenset)
99 | model_config = SettingsConfigDict(
100 | case_sensitive=False, frozen=True, env_prefix="INPUT_"
101 | )
102 |
103 | @classmethod
104 | def settings_customise_sources(
105 | cls,
106 | settings_cls: type[BaseSettings],
107 | init_settings: PydanticBaseSettingsSource,
108 | env_settings: PydanticBaseSettingsSource,
109 | dotenv_settings: PydanticBaseSettingsSource,
110 | file_secret_settings: PydanticBaseSettingsSource,
111 | ) -> tuple[PydanticBaseSettingsSource, ...]:
112 | return (
113 | init_settings,
114 | CustomEnvSettingsSource(settings_cls),
115 | dotenv_settings,
116 | file_secret_settings,
117 | )
118 |
119 | @property
120 | def git_commit_author(self) -> str:
121 | """git_commit_author option"""
122 | return f"{self.committer_username} <{self.committer_email}>"
123 |
124 | @model_validator(mode="before")
125 | @classmethod
126 | def validate_pull_request_branch(cls, values: Any) -> Any:
127 | if not values.get("pull_request_branch"):
128 | values["pull_request_branch"] = f"gh-actions-update-{int(time.time())}"
129 | values["force_push"] = False
130 | else:
131 | values["force_push"] = True
132 | return values
133 |
134 | @field_validator("release_types", mode="before")
135 | @classmethod
136 | def check_release_types(cls, value: frozenset[str]) -> frozenset[str]:
137 | if value == {"all"}:
138 | return frozenset(
139 | [
140 | ReleaseType.MAJOR,
141 | ReleaseType.MINOR,
142 | ReleaseType.PATCH,
143 | ]
144 | )
145 |
146 | return value
147 |
148 | @field_validator("extra_workflow_locations")
149 | @classmethod
150 | def check_extra_workflow_locations(cls, value: frozenset[str]) -> frozenset[str]:
151 | workflow_file_paths = []
152 |
153 | for workflow_location in value:
154 | if os.path.isdir(workflow_location):
155 | workflow_file_paths.extend(
156 | [str(path) for path in Path(workflow_location).rglob("*.y*ml")]
157 | )
158 | elif os.path.isfile(workflow_location):
159 | if workflow_location.endswith(".yml") or workflow_location.endswith(
160 | ".yaml"
161 | ):
162 | workflow_file_paths.append(workflow_location)
163 | else:
164 | gha_utils.warning(
165 | f"Skipping '{workflow_location}' "
166 | "as it is not a valid file or directory"
167 | )
168 |
169 | return frozenset(workflow_file_paths)
170 |
171 | @field_validator("pull_request_branch")
172 | @classmethod
173 | def check_pull_request_branch(cls, value: str) -> str:
174 | if value.lower() in ["main", "master"]:
175 | raise ValueError(
176 | "Invalid input for `pull_request_branch` field, "
177 | f"branch `{value}` can not be used as the pull request branch."
178 | )
179 | return value
180 |
--------------------------------------------------------------------------------
/src/utils.py:
--------------------------------------------------------------------------------
1 | from functools import cache
2 |
3 | import github_action_utils as gha_utils # type: ignore
4 | import requests
5 |
6 | from .run_git import git_diff
7 |
8 |
9 | @cache
10 | def get_request_headers(github_token: str | None = None) -> dict[str, str]:
11 | """Get headers for GitHub API request"""
12 | headers = {"Accept": "application/vnd.github.v3+json"}
13 |
14 | if github_token:
15 | headers.update({"authorization": f"Bearer {github_token}"})
16 |
17 | return headers
18 |
19 |
20 | def create_pull_request(
21 | pull_request_title: str,
22 | repository_name: str,
23 | base_branch_name: str,
24 | head_branch_name: str,
25 | body: str,
26 | github_token: str | None = None,
27 | ) -> int | None:
28 | """Create pull request on GitHub"""
29 | with gha_utils.group("Create Pull Request"):
30 | url = f"https://api.github.com/repos/{repository_name}/pulls"
31 | payload = {
32 | "title": pull_request_title,
33 | "head": head_branch_name,
34 | "base": base_branch_name,
35 | "body": body,
36 | }
37 |
38 | response = requests.post(
39 | url, json=payload, headers=get_request_headers(github_token)
40 | )
41 |
42 | if response.status_code == 201:
43 | response_data = response.json()
44 | gha_utils.notice(
45 | f"Pull request opened at {response_data['html_url']} \U0001F389"
46 | )
47 | return response_data["number"]
48 |
49 | elif (
50 | response.status_code == 422
51 | and "A pull request already exists for" in response.text
52 | ):
53 | gha_utils.notice("A pull request already exists")
54 | return None
55 |
56 | gha_utils.error(
57 | f"Could not create a pull request on "
58 | f"{repository_name}, GitHub API Response: {response.json()}"
59 | )
60 | raise SystemExit(1)
61 |
62 |
63 | def add_pull_request_reviewers(
64 | repository_name: str,
65 | pull_request_number: int,
66 | pull_request_user_reviewers: frozenset[str],
67 | pull_request_team_reviewers: frozenset[str],
68 | github_token: str | None = None,
69 | ) -> None:
70 | """Request reviewers for a pull request on GitHub"""
71 | with gha_utils.group(f"Request Reviewers for Pull Request #{pull_request_number}"):
72 | payload = {}
73 |
74 | if pull_request_user_reviewers:
75 | payload["reviewers"] = list(pull_request_user_reviewers)
76 |
77 | if pull_request_team_reviewers:
78 | payload["team_reviewers"] = list(pull_request_team_reviewers)
79 |
80 | if not payload:
81 | gha_utils.echo("No reviewers were requested.")
82 | return
83 |
84 | url = (
85 | f"https://api.github.com/repos/{repository_name}/pulls"
86 | f"/{pull_request_number}/requested_reviewers"
87 | )
88 |
89 | response = requests.post(
90 | url, json=payload, headers=get_request_headers(github_token)
91 | )
92 |
93 | if response.status_code == 201:
94 | gha_utils.notice(
95 | "Requested review from "
96 | f"{pull_request_user_reviewers.union(pull_request_team_reviewers)} "
97 | "\U0001F389"
98 | )
99 | return
100 |
101 | gha_utils.error(
102 | f"Could not request reviews on pull request #{pull_request_number} "
103 | f"on {repository_name}, GitHub API Response: {response.json()}"
104 | )
105 |
106 |
107 | def add_pull_request_labels(
108 | repository_name: str,
109 | pull_request_number: int,
110 | labels: frozenset[str],
111 | github_token: str | None = None,
112 | ) -> None:
113 | """Request reviewers for a pull request on GitHub"""
114 | with gha_utils.group(f"Add Labels to Pull Request #{pull_request_number}"):
115 | if not labels:
116 | gha_utils.echo("No labels to add.")
117 | return
118 |
119 | payload = {"labels": list(labels)}
120 |
121 | url = (
122 | f"https://api.github.com/repos/{repository_name}/issues"
123 | f"/{pull_request_number}/labels"
124 | )
125 |
126 | response = requests.post(
127 | url, json=payload, headers=get_request_headers(github_token)
128 | )
129 |
130 | if response.status_code == 200:
131 | gha_utils.notice(
132 | f"Added '{labels}' labels to "
133 | f"pull request #{pull_request_number} \U0001F389"
134 | )
135 | return
136 |
137 | gha_utils.error(
138 | f"Could not add labels to pull request #{pull_request_number} "
139 | f"on {repository_name}, GitHub API Response: {response.json()}"
140 | )
141 |
142 |
143 | def add_git_diff_to_job_summary() -> None:
144 | """Add git diff to job summary"""
145 | markdown_diff = (
146 | ""
147 | "Git Diff
"
148 | f"\n\n```diff\n{git_diff()}```\n\n"
149 | " "
150 | )
151 | gha_utils.append_job_summary(markdown_diff)
152 |
153 |
154 | def display_whats_new() -> None:
155 | """Print what's new in GitHub Actions Version Updater Latest Version"""
156 | url = (
157 | "https://api.github.com/repos"
158 | "/saadmk11/github-actions-version-updater"
159 | "/releases/latest"
160 | )
161 | response = requests.get(url)
162 |
163 | if response.status_code == 200:
164 | response_data = response.json()
165 | latest_release_tag = response_data["tag_name"]
166 | latest_release_html_url = response_data["html_url"]
167 | latest_release_body = response_data["body"]
168 |
169 | group_title = (
170 | "\U0001F389 What's New In "
171 | f"GitHub Actions Version Updater {latest_release_tag} \U0001F389"
172 | )
173 |
174 | with gha_utils.group(group_title):
175 | gha_utils.echo(latest_release_body)
176 | gha_utils.echo(
177 | f"\nGet More Information about '{latest_release_tag}' "
178 | f"Here: {latest_release_html_url}"
179 | )
180 | gha_utils.echo(
181 | "\nTo use these features please upgrade to "
182 | f"version '{latest_release_tag}' if you haven't already."
183 | )
184 | gha_utils.echo(
185 | "\nReport Bugs or Add Feature Requests Here: "
186 | "https://github.com/saadmk11/github-actions-version-updater/issues"
187 | )
188 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ## GitHub Actions Version Updater
2 |
3 | [](https://github.com/saadmk11/github-actions-version-updater/releases/latest)
4 | [](https://github.com/saadmk11/github-actions-version-updater/blob/main/LICENSE)
5 | [](https://github.com/marketplace/actions/github-actions-version-updater)
6 | [](https://github.com/saadmk11/github-actions-version-updater/stargazers)
7 | 
8 |
9 | **GitHub Actions Version Updater** is a GitHub Action that is used to **Update All GitHub Actions** in a Repository
10 | and create a **pull request** with the updates (if enabled).
11 | It is an automated dependency updater similar to GitHub's **Dependabot** but for GitHub Actions.
12 |
13 | ### How Does It Work?
14 |
15 | * GitHub Actions Version Updater first goes through all the **workflows**
16 | in a repository and **checks for updates** for each of the action used in those workflows.
17 |
18 | * If an update is found and if that action is **not ignored** then the workflows are updated
19 | with the **new version** of the action being used.
20 |
21 | * If at least one workflow file is updated then a new branch is created with the changes and pushed to GitHub. (If enabled)
22 |
23 | * Finally, a pull request is created with the newly created branch. (If enabled)
24 |
25 | ### Supported Version Fetch Sources
26 |
27 | - **`release-tag` (default):** Uses **specific release tag** from **the latest release** to update a GitHub Action. (e.g. `actions/checkout@v1.2.3`)
28 |
29 | - **`release-commit-sha`:** Uses the **latest release tag commit SHA** to update a GitHub Action. (e.g. `actions/checkout@c18e2a1b1a95d0c5c63af210857e8718a479f56f`)
30 |
31 | - **`default-branch-sha`:** Uses **default branch** (e.g: `main`, `master`) **latest commit SHA** to update a GitHub Action. (e.g. `actions/checkout@c18e2a1b1a95d0c5c63af210857e8718a479f56f`)
32 |
33 | You can use `update_version_with` input option to select one of them. (e.g. `update_version_with: 'default-branch-sha'`)
34 |
35 | ### Release Types
36 |
37 | - **`all` (default):** Actions with **any** new release will be updated.
38 | - **`major`:** Actions with only new **major** release will be updated.
39 | - **`minor`:** Actions with only new **minor** release will be updated.
40 | - **`patch`:** Actions with only new **patch** release will be updated.
41 |
42 | You can use `release_types` input option to select one/all of them. (e.g. `"major, minor"`)
43 |
44 | ### Usage
45 |
46 | We recommend running this action on a [`schedule`](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#schedule)
47 | event or a [`workflow_dispatch`](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#workflow_dispatch) event.
48 |
49 | To integrate `GitHub Actions Version Updater` on your repository, create a `YAML` file
50 | inside `.github/workflows/` directory (e.g: `.github/workflows/updater.yaml`) add the following lines into the file:
51 |
52 | ```yaml
53 | name: GitHub Actions Version Updater
54 |
55 | # Controls when the action will run.
56 | on:
57 | schedule:
58 | # Automatically run on every Sunday
59 | - cron: '0 0 * * 0'
60 |
61 | jobs:
62 | build:
63 | runs-on: ubuntu-latest
64 |
65 | steps:
66 | - uses: actions/checkout@v4
67 | with:
68 | # [Required] Access token with `workflow` scope.
69 | token: ${{ secrets.WORKFLOW_SECRET }}
70 |
71 | - name: Run GitHub Actions Version Updater
72 | uses: saadmk11/github-actions-version-updater@v0.9.0
73 | with:
74 | # [Required] Access token with `workflow` scope.
75 | token: ${{ secrets.WORKFLOW_SECRET }}
76 | ```
77 |
78 | ### Workflow input options
79 |
80 | These are the inputs that can be provided on the workflow.
81 |
82 | | Name | Required | Description | Default | Example |
83 | |--------------------------------------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------|--------------------------------------------|
84 | | `token` | Yes | GitHub Access Token with `workflow` scope (The Token needs to be added to the actions secrets) | `null` | `${{ secrets.WORKFLOW_SECRET }}` |
85 | | `committer_username` | No | Name of the user who will commit the changes to GitHub | "github-actions[bot]" | "Test User" |
86 | | `committer_email` | No | Email Address of the user who will commit the changes to GitHub | "github-actions[bot]@users.noreply.github.com" | "test@test.com" |
87 | | `commit_message` | No | Commit message for the commits created by the action | "Update GitHub Action Versions" | "Custom Commit Message" |
88 | | `pull_request_title` | No | Title of the pull requests generated by the action | "Update GitHub Action Versions" | "Custom PR Title" |
89 | | `pull_request_branch` (Experimental) | No | The pull request branch name. (If provided, the action will force push to the branch) | "gh-actions-update-" | "github/actions-update" |
90 | | `ignore` | No | A comma separated string of GitHub Actions to ignore updates for | `null` | "actions/checkout@v2, actions/cache@v2" |
91 | | `skip_pull_request` | No | If **"true"**, the action will only check for updates and if any update is found the job will fail and update the build summary with the diff (**Options:** "true", "false") | "false" | "true" |
92 | | `update_version_with` | No | Use The Latest Release Tag/Commit SHA or Default Branch Commit SHA to update the actions (**options:** "release-tag", "release-commit-sha", "default-branch-sha"') | "release-tag" | "release-commit-sha" |
93 | | `release_types` | No | A comma separated string of release types to use when updating the actions. By default, all release types are used to update the actions. Only Applicable for **"release-tag", "release-commit-sha"** (**Options:** "major", "minor", "patch" **[one or many seperated by comma]**) | "all" | "minor, patch" |
94 | | `pull_request_user_reviewers` | No | A comma separated string (usernames) which denotes the users that should be added as reviewers to the pull request | `null` | "octocat, hubot, other_user" |
95 | | `pull_request_team_reviewers` | No | A comma separated string (team slugs) which denotes the teams that should be added as reviewers to the pull request | `null` | "justice-league, other_team" |
96 | | `pull_request_labels` | No | A comma separated string (label names) which denotes the labels which will be added to the pull request | `null` | "dependencies, automated" |
97 | | `extra_workflow_locations` | No | A comma separated string of file or directory paths to look for workflows. By default, only the workflow files in the `.github/workflows` directory are checked updates | `null` | "path/to/directory, path/to/workflow.yaml" |
98 |
99 | #### Workflow with all options
100 |
101 | ```yaml
102 | name: GitHub Actions Version Updater
103 |
104 | # Controls when the action will run.
105 | on:
106 | # can be used to run workflow manually
107 | workflow_dispatch:
108 | schedule:
109 | # Automatically run on every Sunday
110 | - cron: '0 0 * * 0'
111 |
112 | jobs:
113 | build:
114 | runs-on: ubuntu-latest
115 |
116 | steps:
117 | - uses: actions/checkout@v4
118 | with:
119 | # [Required] Access token with `workflow` scope.
120 | token: ${{ secrets.WORKFLOW_SECRET }}
121 |
122 | - name: Run GitHub Actions Version Updater
123 | uses: saadmk11/github-actions-version-updater@v0.9.0
124 | with:
125 | # [Required] Access token with `workflow` scope.
126 | token: ${{ secrets.WORKFLOW_SECRET }}
127 | committer_username: 'Test'
128 | committer_email: 'test@test.com'
129 | commit_message: 'Commit Message'
130 | pull_request_title: 'Pull Request Title'
131 | ignore: 'actions/checkout@v2, actions/cache@v2'
132 | skip_pull_request: 'false'
133 | update_version_with: 'release-tag'
134 | release_types: "minor, patch"
135 | pull_request_user_reviewers: "octocat, hubot, other_user"
136 | pull_request_team_reviewers: "justice-league, other_team"
137 | pull_request_labels: "dependencies, automated"
138 | extra_workflow_locations: "path/to/directory, path/to/workflow.yaml"
139 | # [Experimental]
140 | pull_request_branch: "actions-update"
141 | ```
142 |
143 | ### Important Note
144 |
145 | GitHub does not allow updating workflow files inside a workflow run.
146 | The token generated by GitHub in every workflow (`${{secrets.GITHUB_TOKEN}}`) does not have
147 | permission to update a workflow. That's why you need to create a [Personal Access Token](https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token)
148 |
149 | **For Personal Access Token (Classic):**
150 |
151 | You need to create a classic Personal Access Token with these scopes:
152 |
153 | - `repo` (To Push Changes to the Repository and Create Pull Requests)
154 | - `workflow` (To Update GitHub Action workflow files)
155 |
156 | **For Fine-grained Personal Access Token:**
157 |
158 | You need to create a Fine-grained Personal Access Token with these Repository permissions:
159 |
160 | - `Contents: Read and write` (To Push Changes to the Repository)
161 | - `Workflows: Read and write` (To Update GitHub Action workflow files)
162 | - `Pull requests: Read and write` (To Create Pull Requests)
163 | - `Metadata: Read-only` (Required by Above Permissions)
164 |
165 | After creating the token, you need to add it to your repository actions secrets and use it in the workflow.
166 | To know more about how to pass a secret to GitHub actions you can [Read GitHub Docs](https://docs.github.com/en/actions/reference/encrypted-secrets)
167 |
168 | ### A note about Git Large File Storage (LFS)
169 |
170 | If your repository uses [Git LFS](https://git-lfs.github.com/), you will need to manually remove the LFS-related hook files, otherwise the action
171 | will fail because Git will not be able to create a branch because the lfs executable is not installed inside the
172 | container used by this action.
173 |
174 | To work around this, just remove the hook files manually as an extra step **before** this action executes:
175 |
176 | ```yaml
177 | # ...
178 | jobs:
179 | build:
180 | runs-on: ubuntu-latest
181 |
182 | steps:
183 | - uses: actions/checkout@v4
184 | with:
185 | token: ${{ secrets.WORKFLOW_SECRET }}
186 | lfs: false
187 |
188 | - name: Remove LFS hooks
189 | # This repository uses Git LFS, but it not being
190 | # in the container causes the action to fail to create a new branch.
191 | # Removing the hooks manually is harmless and works around this issue.
192 | run: |
193 | rm .git/hooks/post-checkout
194 | rm .git/hooks/pre-push
195 |
196 | - name: Run GitHub Actions Version Updater
197 | uses: saadmk11/github-actions-version-updater@v0.9.0
198 | with:
199 | # ...
200 | ```
201 |
202 | ### Outputs
203 |
204 | | Output Name | Description |
205 | | ----------- |-----------------------------------------|
206 | | `GHA_UPDATE_PR_NUMBER` | The number of the created pull request. |
207 |
208 | #### Example Workflow
209 |
210 | ```yaml
211 | name: GitHub Actions Version Updater
212 |
213 | # Controls when the action will run.
214 | on:
215 | # can be used to run workflow manually
216 | workflow_dispatch:
217 | schedule:
218 | # Automatically run on every Sunday
219 | - cron: '0 0 * * 0'
220 |
221 | jobs:
222 | build:
223 | runs-on: ubuntu-latest
224 |
225 | steps:
226 | - uses: actions/checkout@v4
227 | with:
228 | # [Required] Access token with `workflow` scope.
229 | token: ${{ secrets.WORKFLOW_SECRET }}
230 |
231 | - name: Run GitHub Actions Version Updater
232 | uses: saadmk11/github-actions-version-updater@v0.9.0
233 | # Required to get the PR number
234 | id: gha-update
235 | with:
236 | # [Required] Access token with `workflow` scope.
237 | token: ${{ secrets.WORKFLOW_SECRET }}
238 | skip_pull_request: 'false'
239 | - name: Get PR Number
240 | run: echo "The PR Number is ${{ steps.gha-update.outputs.GHA_UPDATE_PR_NUMBER }}"
241 | ```
242 |
243 | ### Alternative
244 |
245 | You can also use [Dependabot](https://docs.github.com/en/github/administering-a-repository/keeping-your-actions-up-to-date-with-dependabot) to update your GitHub Actions.
246 |
247 |
248 | ### GitHub Actions Version Updater in Action
249 |
250 | 
251 |
252 |
253 | ### License
254 |
255 | The code in this project is released under the [MIT License](LICENSE).
256 |
--------------------------------------------------------------------------------
/src/main.py:
--------------------------------------------------------------------------------
1 | import re
2 | from collections.abc import Generator
3 | from functools import cache, cached_property
4 | from typing import Any
5 |
6 | import github_action_utils as gha_utils # type: ignore
7 | import requests
8 | import yaml
9 | from packaging.version import LegacyVersion, Version, parse
10 |
11 | from .config import ActionEnvironment, Configuration, ReleaseType, UpdateVersionWith
12 | from .run_git import (
13 | configure_git_author,
14 | configure_safe_directory,
15 | create_new_git_branch,
16 | git_commit_changes,
17 | git_has_changes,
18 | )
19 | from .utils import (
20 | add_git_diff_to_job_summary,
21 | add_pull_request_labels,
22 | add_pull_request_reviewers,
23 | create_pull_request,
24 | display_whats_new,
25 | get_request_headers,
26 | )
27 |
28 |
29 | class GitHubActionsVersionUpdater:
30 | """Check for GitHub Action updates"""
31 |
32 | github_api_url = "https://api.github.com"
33 | github_url = "https://github.com/"
34 | workflow_action_key = "uses"
35 |
36 | def __init__(self, env: ActionEnvironment, user_config: Configuration):
37 | self.env = env
38 | self.user_config = user_config
39 |
40 | def run(self) -> None:
41 | """Entrypoint to the GitHub Action"""
42 | workflow_paths = self._get_workflow_paths()
43 | updated_item_markdown_set: set[str] = set()
44 |
45 | if not workflow_paths:
46 | gha_utils.warning(
47 | f'No Workflow found in "{self.env.repository}". '
48 | "Skipping GitHub Actions Version Update"
49 | )
50 | raise SystemExit(0)
51 |
52 | if self.user_config.ignore_actions:
53 | gha_utils.echo(
54 | f'Actions "{self.user_config.ignore_actions}" will be skipped'
55 | )
56 |
57 | for workflow_path in workflow_paths:
58 | updated_item_markdown_set = updated_item_markdown_set.union(
59 | self._update_workflow(workflow_path)
60 | )
61 |
62 | if git_has_changes():
63 | # Use timestamp to ensure uniqueness of the new branch
64 | pull_request_body = "### GitHub Actions Version Updates\n" + "".join(
65 | updated_item_markdown_set
66 | )
67 | gha_utils.append_job_summary(pull_request_body)
68 |
69 | if not self.user_config.skip_pull_request:
70 | create_new_git_branch(
71 | self.env.base_branch, self.user_config.pull_request_branch
72 | )
73 | git_commit_changes(
74 | self.user_config.commit_message,
75 | self.user_config.git_commit_author,
76 | self.user_config.pull_request_branch,
77 | self.user_config.force_push,
78 | )
79 | pull_request_number = create_pull_request(
80 | self.user_config.pull_request_title,
81 | self.env.repository,
82 | self.env.base_branch,
83 | self.user_config.pull_request_branch,
84 | pull_request_body,
85 | self.user_config.token,
86 | )
87 | if pull_request_number is not None:
88 | gha_utils.set_output("GHA_UPDATE_PR_NUMBER", pull_request_number)
89 | add_pull_request_reviewers(
90 | self.env.repository,
91 | pull_request_number,
92 | self.user_config.pull_request_user_reviewers,
93 | self.user_config.pull_request_team_reviewers,
94 | self.user_config.token,
95 | )
96 | add_pull_request_labels(
97 | self.env.repository,
98 | pull_request_number,
99 | self.user_config.pull_request_labels,
100 | self.user_config.token,
101 | )
102 | else:
103 | add_git_diff_to_job_summary()
104 | gha_utils.error(
105 | "Updates found but skipping pull request. "
106 | "Checkout build summary for update details."
107 | )
108 | raise SystemExit(1)
109 | else:
110 | gha_utils.notice("Everything is up-to-date! \U0001F389 \U0001F389")
111 |
112 | def _update_workflow(self, workflow_path: str) -> set[str]:
113 | """Update the workflow file with the updated data"""
114 | updated_item_markdown_set: set[str] = set()
115 |
116 | try:
117 | with open(workflow_path, "r+") as file, gha_utils.group(
118 | f'Checking "{workflow_path}" for updates'
119 | ):
120 | file_data = file.read()
121 | updated_workflow_data = file_data
122 |
123 | try:
124 | workflow_data = yaml.load(file_data, Loader=yaml.FullLoader)
125 | except yaml.YAMLError as exc:
126 | gha_utils.error(
127 | f"Error while parsing YAML from '{workflow_path}' file. "
128 | f"Reason: {exc}"
129 | )
130 | return updated_item_markdown_set
131 |
132 | all_actions = set(self._get_all_actions(workflow_data))
133 | # Remove ignored actions
134 | all_actions.difference_update(self.user_config.ignore_actions)
135 |
136 | for action in all_actions:
137 | try:
138 | action_location, current_version = action.split("@")
139 | # A GitHub Action can be in a subdirectory of a repository
140 | # e.g. `flatpak/flatpak-github-actions/flatpak-builder@v4`.
141 | # we only need `user/repo` part from action_repository
142 | action_repository = "/".join(action_location.split("/")[:2])
143 | except ValueError:
144 | gha_utils.notice(
145 | f'Action "{action}" is in an unsupported format. '
146 | "We only support community actions currently."
147 | )
148 | continue
149 |
150 | new_version, new_version_data = self._get_new_version(
151 | action_repository,
152 | current_version,
153 | )
154 |
155 | if not new_version:
156 | gha_utils.warning(
157 | f"Could not find any new version for {action}. Skipping..."
158 | )
159 | continue
160 |
161 | updated_action = f"{action_location}@{new_version}"
162 |
163 | if action != updated_action:
164 | gha_utils.echo(f'Found new version for "{action_repository}"')
165 | updated_item_markdown_set.add(
166 | self._generate_updated_item_markdown(
167 | action_repository, new_version_data
168 | )
169 | )
170 | gha_utils.echo(
171 | f'Updating "{action}" with "{updated_action}"...'
172 | )
173 | updated_workflow_data = re.sub(
174 | rf"({action})(\s+['\"]?|['\"]?$)",
175 | rf"{updated_action}\2",
176 | updated_workflow_data,
177 | 0,
178 | re.MULTILINE,
179 | )
180 | else:
181 | gha_utils.echo(f'No updates found for "{action_repository}"')
182 |
183 | if updated_item_markdown_set:
184 | file.seek(0)
185 | file.write(updated_workflow_data)
186 | file.truncate()
187 | except FileNotFoundError:
188 | gha_utils.warning(f"Workflow file '{workflow_path}' not found")
189 | return updated_item_markdown_set
190 |
191 | def _generate_updated_item_markdown(
192 | self, action_repository: str, version_data: dict[str, str]
193 | ) -> str:
194 | """Generate pull request body line for pull request body"""
195 | start = f"* **[{action_repository}]({self.github_url}{action_repository})**"
196 |
197 | if self.user_config.update_version_with == UpdateVersionWith.LATEST_RELEASE_TAG:
198 | return (
199 | f"{start} published a new release "
200 | f"**[{version_data['tag_name']}]({version_data['html_url']})** "
201 | f"on {version_data['published_at']}\n"
202 | )
203 | elif (
204 | self.user_config.update_version_with
205 | == UpdateVersionWith.LATEST_RELEASE_COMMIT_SHA
206 | ):
207 | return (
208 | f"{start} added a new "
209 | f"**[commit]({version_data['commit_url']})** to "
210 | f"**[{version_data['tag_name']}]({version_data['html_url']})** Tag "
211 | f"on {version_data['commit_date']}\n"
212 | )
213 | else:
214 | return (
215 | f"{start} added a new "
216 | f"**[commit]({version_data['commit_url']})** to "
217 | f"**[{version_data['branch_name']}]({version_data['branch_url']})** "
218 | f"branch on {version_data['commit_date']}\n"
219 | )
220 |
221 | def _get_github_releases(
222 | self, action_repository: str
223 | ) -> list[dict[str, str | Version | LegacyVersion]]:
224 | """Get the GitHub releases using GitHub API"""
225 | url = f"{self.github_api_url}/repos/{action_repository}/releases?per_page=50"
226 |
227 | response = requests.get(
228 | url, headers=get_request_headers(self.user_config.token)
229 | )
230 |
231 | if response.status_code == 200:
232 | response_data = response.json()
233 |
234 | if response_data:
235 | releases = [
236 | {
237 | "published_at": release["published_at"],
238 | "html_url": release["html_url"],
239 | "tag_name": release["tag_name"],
240 | "tag_name_parsed": parse(release["tag_name"]),
241 | }
242 | for release in response_data
243 | if not release["prerelease"]
244 | ]
245 | # Sort through the releases returned by GitHub API using tag_name
246 | return sorted(
247 | releases,
248 | key=lambda r: r["tag_name_parsed"],
249 | reverse=True,
250 | )
251 |
252 | gha_utils.warning(
253 | f"Could not find any release for "
254 | f'"{action_repository}", GitHub API Response: {response.json()}'
255 | )
256 | return []
257 |
258 | @cached_property
259 | def _release_filter_function(self):
260 | """Get the release filter function"""
261 | if self.user_config.release_types == [
262 | ReleaseType.MAJOR,
263 | ReleaseType.MINOR,
264 | ReleaseType.PATCH,
265 | ]:
266 | return lambda r, c: True
267 |
268 | checks = []
269 |
270 | if ReleaseType.MAJOR in self.user_config.release_types:
271 | checks.append(lambda r, c: r.major > c.major)
272 |
273 | if ReleaseType.MINOR in self.user_config.release_types:
274 | checks.append(
275 | lambda r, c: r.major == c.major and r.minor > c.minor,
276 | )
277 |
278 | if ReleaseType.PATCH in self.user_config.release_types:
279 | checks.append(
280 | lambda r, c: r.major == c.major
281 | and r.minor == c.minor
282 | and r.micro > c.micro
283 | )
284 |
285 | def filter_func(
286 | release_tag: LegacyVersion | Version, current_version: Version
287 | ) -> bool:
288 | return any(check(release_tag, current_version) for check in checks)
289 |
290 | return filter_func
291 |
292 | def _get_latest_version_release(
293 | self, action_repository: str, current_version: str
294 | ) -> dict[str, str]:
295 | """Get the latest release"""
296 | github_releases = self._get_github_releases(action_repository)
297 | latest_release: dict[str, Any] = {}
298 |
299 | if not github_releases:
300 | return latest_release
301 |
302 | parsed_current_version: LegacyVersion | Version = parse(current_version)
303 |
304 | if isinstance(parsed_current_version, LegacyVersion):
305 | gha_utils.warning(
306 | f"Current version (`{current_version}`) of `{action_repository}` does not follow "
307 | "Semantic Versioning specification. This can yield unexpected results, "
308 | "please be careful while using the updates suggested by this action."
309 | )
310 | latest_release = github_releases[0]
311 | else:
312 | try:
313 | latest_release = next(
314 | filter(
315 | lambda r: self._release_filter_function(
316 | r["tag_name_parsed"], parsed_current_version
317 | ),
318 | github_releases,
319 | ),
320 | {},
321 | )
322 | except AttributeError:
323 | latest_release = github_releases[0]
324 | gha_utils.warning(
325 | f"GitHub releases of `{action_repository}` does not follow "
326 | "Semantic Versioning specification. This can yield unexpected results, "
327 | "please be careful while using the updates suggested by this action."
328 | )
329 |
330 | return latest_release
331 |
332 | def _get_commit_data(
333 | self, action_repository: str, tag_or_branch_name: str
334 | ) -> dict[str, str]:
335 | """Get the commit Data for Tag or Branch using GitHub API"""
336 | url = (
337 | f"{self.github_api_url}/repos"
338 | f"/{action_repository}/commits?sha={tag_or_branch_name}"
339 | )
340 |
341 | response = requests.get(
342 | url, headers=get_request_headers(self.user_config.token)
343 | )
344 |
345 | if response.status_code == 200:
346 | response_data = response.json()[0]
347 |
348 | return {
349 | "commit_sha": response_data["sha"],
350 | "commit_url": response_data["html_url"],
351 | "commit_date": response_data["commit"]["author"]["date"],
352 | }
353 |
354 | gha_utils.warning(
355 | f"Could not find commit data for tag/branch {tag_or_branch_name} on "
356 | f'"{action_repository}", GitHub API Response: {response.json()}'
357 | )
358 | return {}
359 |
360 | def _get_default_branch_name(self, action_repository: str) -> str | None:
361 | """Get the Action Repository's Default Branch Name using GitHub API"""
362 | url = f"{self.github_api_url}/repos/{action_repository}"
363 |
364 | response = requests.get(
365 | url, headers=get_request_headers(self.user_config.token)
366 | )
367 |
368 | if response.status_code == 200:
369 | return response.json()["default_branch"]
370 |
371 | gha_utils.warning(
372 | f"Could not find default branch for "
373 | f'"{action_repository}", GitHub API Response: {response.json()}'
374 | )
375 | return None
376 |
377 | # flake8: noqa: B019
378 | @cache
379 | def _get_new_version(
380 | self, action_repository: str, current_version: str
381 | ) -> tuple[str | None, dict[str, str]]:
382 | """Get the new version for the action"""
383 | gha_utils.echo(f'Checking "{action_repository}" for updates...')
384 |
385 | if self.user_config.update_version_with == UpdateVersionWith.LATEST_RELEASE_TAG:
386 | latest_release_data = self._get_latest_version_release(
387 | action_repository, current_version
388 | )
389 | return latest_release_data.get("tag_name"), latest_release_data
390 |
391 | elif (
392 | self.user_config.update_version_with
393 | == UpdateVersionWith.LATEST_RELEASE_COMMIT_SHA
394 | ):
395 | latest_release_data = self._get_latest_version_release(
396 | action_repository, current_version
397 | )
398 |
399 | if not latest_release_data:
400 | return None, {}
401 |
402 | tag_commit_data = self._get_commit_data(
403 | action_repository, latest_release_data["tag_name"]
404 | )
405 |
406 | if not tag_commit_data:
407 | return None, {}
408 |
409 | return tag_commit_data["commit_sha"], {
410 | **latest_release_data,
411 | **tag_commit_data,
412 | }
413 |
414 | else:
415 | default_branch_name = self._get_default_branch_name(action_repository)
416 |
417 | if not default_branch_name:
418 | return None, {}
419 |
420 | branch_commit_data = self._get_commit_data(
421 | action_repository, default_branch_name
422 | )
423 |
424 | if not branch_commit_data:
425 | return None, {}
426 |
427 | return branch_commit_data["commit_sha"], {
428 | "branch_name": default_branch_name,
429 | "branch_url": (
430 | f"{self.github_url}{action_repository}"
431 | f"/tree/{default_branch_name}"
432 | ),
433 | **branch_commit_data,
434 | }
435 |
436 | def _get_workflow_paths_from_api(self) -> set[str]:
437 | """Get all workflows of the repository using GitHub API"""
438 | url = f"{self.github_api_url}/repos/{self.env.repository}/actions/workflows"
439 |
440 | response = requests.get(
441 | url, headers=get_request_headers(self.user_config.token)
442 | )
443 |
444 | if response.status_code == 200:
445 | return {workflow["path"] for workflow in response.json()["workflows"]}
446 |
447 | gha_utils.error(
448 | f"An error occurred while getting workflows for"
449 | f"{self.env.repository}, GitHub API Response: {response.json()}"
450 | )
451 | return set()
452 |
453 | def _get_workflow_paths(self) -> set[str]:
454 | """Get all workflows of the repository"""
455 | workflow_paths = self._get_workflow_paths_from_api()
456 | workflow_paths.update(self.user_config.extra_workflow_locations)
457 |
458 | if not workflow_paths:
459 | raise SystemExit(1)
460 |
461 | return workflow_paths
462 |
463 | def _get_all_actions(self, data: Any) -> Generator[str, None, None]:
464 | """Recursively get all action names from workflow data"""
465 | if isinstance(data, dict):
466 | for key, value in data.items():
467 | if key == self.workflow_action_key:
468 | yield value
469 | elif isinstance(value, dict) or isinstance(value, list):
470 | yield from self._get_all_actions(value)
471 |
472 | elif isinstance(data, list):
473 | for element in data:
474 | yield from self._get_all_actions(element)
475 |
476 |
477 | if __name__ == "__main__":
478 | with gha_utils.group("Parse Configuration"):
479 | user_configuration = Configuration()
480 | action_environment = ActionEnvironment()
481 |
482 | gha_utils.echo("Using Configuration:")
483 | gha_utils.echo(user_configuration.model_dump_json(exclude={"token"}, indent=4))
484 |
485 | # Configure Git Safe Directory
486 | configure_safe_directory(action_environment.workspace)
487 |
488 | # Configure Git Author
489 | configure_git_author(
490 | user_configuration.committer_username,
491 | user_configuration.committer_email,
492 | )
493 |
494 | with gha_utils.group("Run GitHub Actions Version Updater"):
495 | actions_version_updater = GitHubActionsVersionUpdater(
496 | action_environment,
497 | user_configuration,
498 | )
499 | actions_version_updater.run()
500 |
501 | display_whats_new()
502 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | #
2 | # This file is autogenerated by pip-compile with Python 3.10
3 | # by the following command:
4 | #
5 | # pip-compile --generate-hashes --output-file=requirements.txt requirements.in
6 | #
7 | annotated-types==0.5.0 \
8 | --hash=sha256:47cdc3490d9ac1506ce92c7aaa76c579dc3509ff11e098fc867e5130ab7be802 \
9 | --hash=sha256:58da39888f92c276ad970249761ebea80ba544b77acddaa1a4d6cf78287d45fd
10 | # via pydantic
11 | certifi==2023.7.22 \
12 | --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \
13 | --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9
14 | # via requests
15 | charset-normalizer==3.2.0 \
16 | --hash=sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96 \
17 | --hash=sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c \
18 | --hash=sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710 \
19 | --hash=sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706 \
20 | --hash=sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020 \
21 | --hash=sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252 \
22 | --hash=sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad \
23 | --hash=sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329 \
24 | --hash=sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a \
25 | --hash=sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f \
26 | --hash=sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6 \
27 | --hash=sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4 \
28 | --hash=sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a \
29 | --hash=sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46 \
30 | --hash=sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2 \
31 | --hash=sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23 \
32 | --hash=sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace \
33 | --hash=sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd \
34 | --hash=sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982 \
35 | --hash=sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10 \
36 | --hash=sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2 \
37 | --hash=sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea \
38 | --hash=sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09 \
39 | --hash=sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5 \
40 | --hash=sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149 \
41 | --hash=sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489 \
42 | --hash=sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9 \
43 | --hash=sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80 \
44 | --hash=sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592 \
45 | --hash=sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3 \
46 | --hash=sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6 \
47 | --hash=sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed \
48 | --hash=sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c \
49 | --hash=sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200 \
50 | --hash=sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a \
51 | --hash=sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e \
52 | --hash=sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d \
53 | --hash=sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6 \
54 | --hash=sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623 \
55 | --hash=sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669 \
56 | --hash=sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3 \
57 | --hash=sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa \
58 | --hash=sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9 \
59 | --hash=sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2 \
60 | --hash=sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f \
61 | --hash=sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1 \
62 | --hash=sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4 \
63 | --hash=sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a \
64 | --hash=sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8 \
65 | --hash=sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3 \
66 | --hash=sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029 \
67 | --hash=sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f \
68 | --hash=sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959 \
69 | --hash=sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22 \
70 | --hash=sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7 \
71 | --hash=sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952 \
72 | --hash=sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346 \
73 | --hash=sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e \
74 | --hash=sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d \
75 | --hash=sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299 \
76 | --hash=sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd \
77 | --hash=sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a \
78 | --hash=sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3 \
79 | --hash=sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037 \
80 | --hash=sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94 \
81 | --hash=sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c \
82 | --hash=sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858 \
83 | --hash=sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a \
84 | --hash=sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449 \
85 | --hash=sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c \
86 | --hash=sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918 \
87 | --hash=sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1 \
88 | --hash=sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c \
89 | --hash=sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac \
90 | --hash=sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa
91 | # via requests
92 | github-action-utils==1.1.0 \
93 | --hash=sha256:8aa40d90b89d814004160bb7e90b42cc07b55f41f66e4a4a32766d26c9ca3d61 \
94 | --hash=sha256:bc84bac22e8a25ebe86370b08ff2c174960e468e899ffd313cb09d19629acefb
95 | # via -r requirements.in
96 | idna==3.4 \
97 | --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
98 | --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
99 | # via requests
100 | packaging==21.3 \
101 | --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \
102 | --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522
103 | # via -r requirements.in
104 | pydantic==2.1.1 \
105 | --hash=sha256:22d63db5ce4831afd16e7c58b3192d3faf8f79154980d9397d9867254310ba4b \
106 | --hash=sha256:43bdbf359d6304c57afda15c2b95797295b702948082d4c23851ce752f21da70
107 | # via
108 | # -r requirements.in
109 | # pydantic-settings
110 | pydantic-core==2.4.0 \
111 | --hash=sha256:01947ad728f426fa07fcb26457ebf90ce29320259938414bc0edd1476e75addb \
112 | --hash=sha256:0455876d575a35defc4da7e0a199596d6c773e20d3d42fa1fc29f6aa640369ed \
113 | --hash=sha256:047580388644c473b934d27849f8ed8dbe45df0adb72104e78b543e13bf69762 \
114 | --hash=sha256:04922fea7b13cd480586fa106345fe06e43220b8327358873c22d8dfa7a711c7 \
115 | --hash=sha256:08f89697625e453421401c7f661b9d1eb4c9e4c0a12fd256eeb55b06994ac6af \
116 | --hash=sha256:0a507d7fa44688bbac76af6521e488b3da93de155b9cba6f2c9b7833ce243d59 \
117 | --hash=sha256:0d726108c1c0380b88b6dd4db559f0280e0ceda9e077f46ff90bc85cd4d03e77 \
118 | --hash=sha256:12ef6838245569fd60a179fade81ca4b90ae2fa0ef355d616f519f7bb27582db \
119 | --hash=sha256:153a61ac4030fa019b70b31fb7986461119230d3ba0ab661c757cfea652f4332 \
120 | --hash=sha256:16468bd074fa4567592d3255bf25528ed41e6b616d69bf07096bdb5b66f947d1 \
121 | --hash=sha256:17156abac20a9feed10feec867fddd91a80819a485b0107fe61f09f2117fe5f3 \
122 | --hash=sha256:1927f0e15d190f11f0b8344373731e28fd774c6d676d8a6cfadc95c77214a48b \
123 | --hash=sha256:1e8a7c62d15a5c4b307271e4252d76ebb981d6251c6ecea4daf203ef0179ea4f \
124 | --hash=sha256:2ad538b7e07343001934417cdc8584623b4d8823c5b8b258e75ec8d327cec969 \
125 | --hash=sha256:2ca4687dd996bde7f3c420def450797feeb20dcee2b9687023e3323c73fc14a2 \
126 | --hash=sha256:2edef05b63d82568b877002dc4cb5cc18f8929b59077120192df1e03e0c633f8 \
127 | --hash=sha256:2f9ea0355f90db2a76af530245fa42f04d98f752a1236ed7c6809ec484560d5b \
128 | --hash=sha256:30527d173e826f2f7651f91c821e337073df1555e3b5a0b7b1e2c39e26e50678 \
129 | --hash=sha256:32a1e0352558cd7ccc014ffe818c7d87b15ec6145875e2cc5fa4bb7351a1033d \
130 | --hash=sha256:3534118289e33130ed3f1cc487002e8d09b9f359be48b02e9cd3de58ce58fba9 \
131 | --hash=sha256:36ba9e728588588f0196deaf6751b9222492331b5552f865a8ff120869d372e0 \
132 | --hash=sha256:382f0baa044d674ad59455a5eff83d7965572b745cc72df35c52c2ce8c731d37 \
133 | --hash=sha256:394f12a2671ff8c4dfa2e85be6c08be0651ad85bc1e6aa9c77c21671baaf28cd \
134 | --hash=sha256:3ba2c9c94a9176f6321a879c8b864d7c5b12d34f549a4c216c72ce213d7d953c \
135 | --hash=sha256:3ded19dcaefe2f6706d81e0db787b59095f4ad0fbadce1edffdf092294c8a23f \
136 | --hash=sha256:3fcf529382b282a30b466bd7af05be28e22aa620e016135ac414f14e1ee6b9e1 \
137 | --hash=sha256:43a405ce520b45941df9ff55d0cd09762017756a7b413bbad3a6e8178e64a2c2 \
138 | --hash=sha256:453862ab268f6326b01f067ed89cb3a527d34dc46f6f4eeec46a15bbc706d0da \
139 | --hash=sha256:4665f7ed345012a8d2eddf4203ef145f5f56a291d010382d235b94e91813f88a \
140 | --hash=sha256:478f5f6d7e32bd4a04d102160efb2d389432ecf095fe87c555c0a6fc4adfc1a4 \
141 | --hash=sha256:49db206eb8fdc4b4f30e6e3e410584146d813c151928f94ec0db06c4f2595538 \
142 | --hash=sha256:4b262bbc13022f2097c48a21adcc360a81d83dc1d854c11b94953cd46d7d3c07 \
143 | --hash=sha256:4cbe929efa77a806e8f1a97793f2dc3ea3475ae21a9ed0f37c21320fe93f6f50 \
144 | --hash=sha256:4e562cc63b04636cde361fd47569162f1daa94c759220ff202a8129902229114 \
145 | --hash=sha256:546064c55264156b973b5e65e5fafbe5e62390902ce3cf6b4005765505e8ff56 \
146 | --hash=sha256:54df7df399b777c1fd144f541c95d351b3aa110535a6810a6a569905d106b6f3 \
147 | --hash=sha256:56a85fa0dab1567bd0cac10f0c3837b03e8a0d939e6a8061a3a420acd97e9421 \
148 | --hash=sha256:57a53a75010c635b3ad6499e7721eaa3b450e03f6862afe2dbef9c8f66e46ec8 \
149 | --hash=sha256:584a7a818c84767af16ce8bda5d4f7fedb37d3d231fc89928a192f567e4ef685 \
150 | --hash=sha256:5fd905a69ac74eaba5041e21a1e8b1a479dab2b41c93bdcc4c1cede3c12a8d86 \
151 | --hash=sha256:61d4e713f467abcdd59b47665d488bb898ad3dd47ce7446522a50e0cbd8e8279 \
152 | --hash=sha256:6213b471b68146af97b8551294e59e7392c2117e28ffad9c557c65087f4baee3 \
153 | --hash=sha256:63797499a219d8e81eb4e0c42222d0a4c8ec896f5c76751d4258af95de41fdf1 \
154 | --hash=sha256:64e8012ad60a5f0da09ed48725e6e923d1be25f2f091a640af6079f874663813 \
155 | --hash=sha256:664402ef0c238a7f8a46efb101789d5f2275600fb18114446efec83cfadb5b66 \
156 | --hash=sha256:68199ada7c310ddb8c76efbb606a0de656b40899388a7498954f423e03fc38be \
157 | --hash=sha256:69159afc2f2dc43285725f16143bc5df3c853bc1cb7df6021fce7ef1c69e8171 \
158 | --hash=sha256:6f855bcc96ed3dd56da7373cfcc9dcbabbc2073cac7f65c185772d08884790ce \
159 | --hash=sha256:6feb4b64d11d5420e517910d60a907d08d846cacaf4e029668725cd21d16743c \
160 | --hash=sha256:72f1216ca8cef7b8adacd4c4c6b89c3b0c4f97503197f5284c80f36d6e4edd30 \
161 | --hash=sha256:77dadc764cf7c5405e04866181c5bd94a447372a9763e473abb63d1dfe9b7387 \
162 | --hash=sha256:782fced7d61469fd1231b184a80e4f2fa7ad54cd7173834651a453f96f29d673 \
163 | --hash=sha256:79262be5a292d1df060f29b9a7cdd66934801f987a817632d7552534a172709a \
164 | --hash=sha256:7aa82d483d5fb867d4fb10a138ffd57b0f1644e99f2f4f336e48790ada9ada5e \
165 | --hash=sha256:853f103e2b9a58832fdd08a587a51de8b552ae90e1a5d167f316b7eabf8d7dde \
166 | --hash=sha256:867d3eea954bea807cabba83cfc939c889a18576d66d197c60025b15269d7cc0 \
167 | --hash=sha256:878a5017d93e776c379af4e7b20f173c82594d94fa073059bcc546789ad50bf8 \
168 | --hash=sha256:884235507549a6b2d3c4113fb1877ae263109e787d9e0eb25c35982ab28d0399 \
169 | --hash=sha256:8c938c96294d983dcf419b54dba2d21056959c22911d41788efbf949a29ae30d \
170 | --hash=sha256:8efc1be43b036c2b6bcfb1451df24ee0ddcf69c31351003daf2699ed93f5687b \
171 | --hash=sha256:8fba0aff4c407d0274e43697e785bcac155ad962be57518d1c711f45e72da70f \
172 | --hash=sha256:90f3785146f701e053bb6b9e8f53acce2c919aca91df88bd4975be0cb926eb41 \
173 | --hash=sha256:9137289de8fe845c246a8c3482dd0cb40338846ba683756d8f489a4bd8fddcae \
174 | --hash=sha256:9206c14a67c38de7b916e486ae280017cf394fa4b1aa95cfe88621a4e1d79725 \
175 | --hash=sha256:94d2b36a74623caab262bf95f0e365c2c058396082bd9d6a9e825657d0c1e7fa \
176 | --hash=sha256:97c6349c81cee2e69ef59eba6e6c08c5936e6b01c2d50b9e4ac152217845ae09 \
177 | --hash=sha256:a027f41c5008571314861744d83aff75a34cf3a07022e0be32b214a5bc93f7f1 \
178 | --hash=sha256:a08fd490ba36d1fbb2cd5dcdcfb9f3892deb93bd53456724389135712b5fc735 \
179 | --hash=sha256:a297c0d6c61963c5c3726840677b798ca5b7dfc71bc9c02b9a4af11d23236008 \
180 | --hash=sha256:a4ea23b07f29487a7bef2a869f68c7ee0e05424d81375ce3d3de829314c6b5ec \
181 | --hash=sha256:a8b7acd04896e8f161e1500dc5f218017db05c1d322f054e89cbd089ce5d0071 \
182 | --hash=sha256:ac2b680de398f293b68183317432b3d67ab3faeba216aec18de0c395cb5e3060 \
183 | --hash=sha256:af24ad4fbaa5e4a2000beae0c3b7fd1c78d7819ab90f9370a1cfd8998e3f8a3c \
184 | --hash=sha256:af788b64e13d52fc3600a68b16d31fa8d8573e3ff2fc9a38f8a60b8d94d1f012 \
185 | --hash=sha256:b013c7861a7c7bfcec48fd709513fea6f9f31727e7a0a93ca0dd12e056740717 \
186 | --hash=sha256:b2799c2eaf182769889761d4fb4d78b82bc47dae833799fedbf69fc7de306faa \
187 | --hash=sha256:b27f3e67f6e031f6620655741b7d0d6bebea8b25d415924b3e8bfef2dd7bd841 \
188 | --hash=sha256:b7206e41e04b443016e930e01685bab7a308113c0b251b3f906942c8d4b48fcb \
189 | --hash=sha256:b85778308bf945e9b33ac604e6793df9b07933108d20bdf53811bc7c2798a4af \
190 | --hash=sha256:bd7d1dde70ff3e09e4bc7a1cbb91a7a538add291bfd5b3e70ef1e7b45192440f \
191 | --hash=sha256:be86c2eb12fb0f846262ace9d8f032dc6978b8cb26a058920ecb723dbcb87d05 \
192 | --hash=sha256:bf10963d8aed8bbe0165b41797c9463d4c5c8788ae6a77c68427569be6bead41 \
193 | --hash=sha256:c1375025f0bfc9155286ebae8eecc65e33e494c90025cda69e247c3ccd2bab00 \
194 | --hash=sha256:c5d8e764b5646623e57575f624f8ebb8f7a9f7fd1fae682ef87869ca5fec8dcf \
195 | --hash=sha256:cba5ad5eef02c86a1f3da00544cbc59a510d596b27566479a7cd4d91c6187a11 \
196 | --hash=sha256:cc086ddb6dc654a15deeed1d1f2bcb1cb924ebd70df9dca738af19f64229b06c \
197 | --hash=sha256:d0c2b713464a8e263a243ae7980d81ce2de5ac59a9f798a282e44350b42dc516 \
198 | --hash=sha256:d93aedbc4614cc21b9ab0d0c4ccd7143354c1f7cffbbe96ae5216ad21d1b21b5 \
199 | --hash=sha256:d9610b47b5fe4aacbbba6a9cb5f12cbe864eec99dbfed5710bd32ef5dd8a5d5b \
200 | --hash=sha256:da055a1b0bfa8041bb2ff586b2cb0353ed03944a3472186a02cc44a557a0e661 \
201 | --hash=sha256:dd2429f7635ad4857b5881503f9c310be7761dc681c467a9d27787b674d1250a \
202 | --hash=sha256:de39eb3bab93a99ddda1ac1b9aa331b944d8bcc4aa9141148f7fd8ee0299dafc \
203 | --hash=sha256:e40b1e97edd3dc127aa53d8a5e539a3d0c227d71574d3f9ac1af02d58218a122 \
204 | --hash=sha256:e412607ca89a0ced10758dfb8f9adcc365ce4c1c377e637c01989a75e9a9ec8a \
205 | --hash=sha256:e953353180bec330c3b830891d260b6f8e576e2d18db3c78d314e56bb2276066 \
206 | --hash=sha256:ec3473c9789cc00c7260d840c3db2c16dbfc816ca70ec87a00cddfa3e1a1cdd5 \
207 | --hash=sha256:efff8b6761a1f6e45cebd1b7a6406eb2723d2d5710ff0d1b624fe11313693989 \
208 | --hash=sha256:f773b39780323a0499b53ebd91a28ad11cde6705605d98d999dfa08624caf064 \
209 | --hash=sha256:fa8e48001b39d54d97d7b380a0669fa99fc0feeb972e35a2d677ba59164a9a22 \
210 | --hash=sha256:ff246c0111076c8022f9ba325c294f2cb5983403506989253e04dbae565e019b \
211 | --hash=sha256:ffe18407a4d000c568182ce5388bbbedeb099896904e43fc14eee76cfae6dec5
212 | # via pydantic
213 | pydantic-settings==2.0.2 \
214 | --hash=sha256:342337fff50b23585e807a86dec85037900972364435c55c2fc00d16ff080539 \
215 | --hash=sha256:6183a2abeab465d5a3ab69758e9a22d38b0cc2ba193f0b85f6971a252ea630f6
216 | # via -r requirements.in
217 | pyparsing==3.1.1 \
218 | --hash=sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb \
219 | --hash=sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db
220 | # via packaging
221 | python-dotenv==1.0.0 \
222 | --hash=sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba \
223 | --hash=sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a
224 | # via pydantic-settings
225 | pyyaml==6.0.1 \
226 | --hash=sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc \
227 | --hash=sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741 \
228 | --hash=sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206 \
229 | --hash=sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27 \
230 | --hash=sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595 \
231 | --hash=sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62 \
232 | --hash=sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98 \
233 | --hash=sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696 \
234 | --hash=sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d \
235 | --hash=sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867 \
236 | --hash=sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47 \
237 | --hash=sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486 \
238 | --hash=sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6 \
239 | --hash=sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3 \
240 | --hash=sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007 \
241 | --hash=sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938 \
242 | --hash=sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c \
243 | --hash=sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735 \
244 | --hash=sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d \
245 | --hash=sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba \
246 | --hash=sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8 \
247 | --hash=sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5 \
248 | --hash=sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd \
249 | --hash=sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3 \
250 | --hash=sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0 \
251 | --hash=sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515 \
252 | --hash=sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c \
253 | --hash=sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c \
254 | --hash=sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924 \
255 | --hash=sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34 \
256 | --hash=sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43 \
257 | --hash=sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859 \
258 | --hash=sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673 \
259 | --hash=sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a \
260 | --hash=sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab \
261 | --hash=sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa \
262 | --hash=sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c \
263 | --hash=sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585 \
264 | --hash=sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d \
265 | --hash=sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f
266 | # via -r requirements.in
267 | requests==2.31.0 \
268 | --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \
269 | --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1
270 | # via -r requirements.in
271 | typing-extensions==4.7.1 \
272 | --hash=sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36 \
273 | --hash=sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2
274 | # via
275 | # pydantic
276 | # pydantic-core
277 | urllib3==2.0.4 \
278 | --hash=sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11 \
279 | --hash=sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4
280 | # via requests
281 |
--------------------------------------------------------------------------------