├── .all-contributorsrc ├── .dockerignore ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md ├── dependabot.yml ├── scripts │ ├── README.md │ ├── generate_inputs.py │ ├── replace.sed │ └── replace_inputs.sh ├── settings.yml └── workflows │ ├── action-integration.yml │ ├── codeql.yml │ ├── dependabot-auto-merge.yml │ ├── generate-docs.yml │ ├── issues.yml │ ├── python-ci.yml │ ├── release-docker-images.yml │ ├── release-major-version-tag.yml │ ├── release-please.yml │ ├── release.yml │ └── test-docker-image.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .prettierignore ├── CHANGELOG.md ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── Dockerfile ├── LICENSE ├── Makefile ├── README.md ├── action.yml ├── examples └── settings.yml ├── noxfile.py ├── package-requirements.txt ├── poetry.lock ├── pyproject.toml ├── repo_manager ├── __init__.py ├── gh │ ├── __init__.py │ ├── branch_protections.py │ ├── files.py │ ├── labels.py │ ├── repos.py │ ├── secrets.py │ └── settings.py ├── main.py ├── schemas │ ├── __init__.py │ ├── branch_protection.py │ ├── file.py │ ├── label.py │ ├── secret.py │ └── settings.py └── utils │ ├── __init__.py │ └── _inputs.py └── tests └── unit ├── github └── test_files.py └── schemas ├── test_file.py ├── test_label.py └── test_secret.py /.all-contributorsrc: -------------------------------------------------------------------------------- 1 | { 2 | "files": ["README.md"], 3 | "imageSize": 100, 4 | "commit": false, 5 | "commitConvention": "angular", 6 | "contributors": [ 7 | { 8 | "login": "andrewthetechie", 9 | "name": "Andrew", 10 | "avatar_url": "https://avatars.githubusercontent.com/u/1377314?v=4", 11 | "profile": "https://github.com/andrewthetechie", 12 | "contributions": ["ideas", "test", "code"] 13 | }, 14 | { 15 | "login": "actuarysailor", 16 | "name": "shiro", 17 | "avatar_url": "https://avatars.githubusercontent.com/u/24359398?v=4", 18 | "profile": "https://github.com/actuarysailor", 19 | "contributions": ["bug", "code"] 20 | } 21 | ], 22 | "contributorsPerLine": 7, 23 | "skipCi": true, 24 | "repoType": "github", 25 | "repoHost": "https://github.com", 26 | "projectName": "gha-repo-manager", 27 | "projectOwner": "andrewthetechie", 28 | "commitType": "docs" 29 | } 30 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | 2 | 3 | # C extensions 4 | *.so 5 | 6 | # Distribution / packaging 7 | .Python 8 | build/ 9 | develop-eggs/ 10 | dist/ 11 | downloads/ 12 | eggs/ 13 | .eggs/ 14 | lib64/ 15 | parts/ 16 | sdist/ 17 | var/ 18 | wheels/ 19 | pip-wheel-metadata/ 20 | share/python-wheels/ 21 | *.egg-info/ 22 | .installed.cfg 23 | *.egg 24 | MANIFEST 25 | 26 | # PyInstaller 27 | # Usually these files are written by a python script from a template 28 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 29 | *.manifest 30 | *.spec 31 | 32 | # Installer logs 33 | pip-log.txt 34 | pip-delete-this-directory.txt 35 | 36 | # Unit test / coverage reports 37 | htmlcov/ 38 | .tox/ 39 | .nox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *.cover 46 | *.py,cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | .ruff_cache/ 50 | 51 | # Translations 52 | *.mo 53 | *.pot 54 | 55 | # Sphinx documentation 56 | docs/ 57 | # PyBuilder 58 | target/ 59 | 60 | # Jupyter Notebook 61 | .ipynb_checkpoints 62 | 63 | # IPython 64 | profile_default/ 65 | ipython_config.py 66 | 67 | # pyenv 68 | .python-version 69 | 70 | # pipenv 71 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 72 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 73 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 74 | # install all needed dependencies. 75 | #Pipfile.lock 76 | 77 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 78 | __pypackages__/ 79 | 80 | # Pycache 81 | __pycache__/ 82 | 83 | # Environments 84 | .env 85 | .venv 86 | env/ 87 | venv/ 88 | ENV/ 89 | env.bak/ 90 | venv.bak/ 91 | 92 | # mypy 93 | .mypy_cache/ 94 | .dmypy.json 95 | dmypy.json 96 | 97 | # Pyre type checker 98 | .pyre/ 99 | 100 | Docker/Dockerfile 101 | Docker/ActionDockerfile.j2 102 | noxfile.py 103 | action.yml 104 | .prettierignore 105 | .pre-commit-config.yaml 106 | .git 107 | .github 108 | .gitignore 109 | tests/ 110 | 111 | Makefile 112 | DEVELOPER.md 113 | LICENSE 114 | .pre-commit-config.yaml 115 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Desktop (please complete the following information):** 27 | - OS: [e.g. iOS] 28 | - Browser [e.g. chrome, safari] 29 | - Version [e.g. 22] 30 | 31 | **Smartphone (please complete the following information):** 32 | - Device: [e.g. iPhone6] 33 | - OS: [e.g. iOS8.1] 34 | - Browser [e.g. stock browser, safari] 35 | - Version [e.g. 22] 36 | 37 | **Additional context** 38 | Add any other context about the problem here. 39 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "pip" 4 | directory: "/Docker/" 5 | schedule: 6 | interval: "daily" 7 | - package-ecosystem: "pip" 8 | directory: "/" 9 | schedule: 10 | interval: "daily" 11 | -------------------------------------------------------------------------------- /.github/scripts/README.md: -------------------------------------------------------------------------------- 1 | # .github/scripts 2 | 3 | Contains scripts used for CI and local dev 4 | -------------------------------------------------------------------------------- /.github/scripts/generate_inputs.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | 3 | with open("action.yml") as fh: 4 | data = yaml.safe_load(fh) 5 | 6 | print(f"INPUTS = {data['inputs']}") 7 | -------------------------------------------------------------------------------- /.github/scripts/replace.sed: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env -S sed -nEf 2 | 3 | 4 | /###START_INPUT_AUTOMATION###/ { 5 | # [p]rint the start-marker line. 6 | p 7 | 8 | # Next, we'll read lines (using `n`) in a loop, so mark this point in 9 | # the script as the beginning of the loop using a label called `loop`. 10 | :loop 11 | 12 | # Read the next line. 13 | n 14 | 15 | # If the last read line doesn't match the pattern for the end marker, 16 | # just continue looping by [b]ranching to the `:loop` label. 17 | /^###END_INPUT_AUTOMATION###/! { 18 | b loop 19 | } 20 | 21 | # If the last read line matches the end marker pattern, then just insert 22 | # the text we want and print the last read line. The net effect is that 23 | # all the previous read lines will be replaced by the inserted text. 24 | /^###END_INPUT_AUTOMATION###/ { 25 | # Insert the replacement text 26 | i\ 27 | $REPLACEMENT 28 | 29 | # [print] the end-marker line 30 | p 31 | } 32 | 33 | # Exit the script, so that we don't hit the [p]rint command below. 34 | b 35 | } 36 | 37 | # Print all other lines. 38 | p 39 | -------------------------------------------------------------------------------- /.github/scripts/replace_inputs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | mytmpdir=$(mktemp -d 2>/dev/null || mktemp -d -t 'mytmpdir') 4 | export INPUT_FILE="repo_manager/utils/_inputs.py" 5 | export REPLACEMENT=$(python .github/scripts/generate_inputs.py) 6 | 7 | NEW_INIT=$(.github/scripts/replace.sed $INPUT_FILE | envsubst) 8 | echo -n "$NEW_INIT" > "$mytmpdir/__init__.py" 9 | black --line-length=119 --target-version=py39 "$mytmpdir/__init__.py" 10 | 11 | if diff $INPUT_FILE "$mytmpdir/__init__.py"; then 12 | echo "Inputs up to date" 13 | else 14 | echo "Inputs out of date...updating" 15 | cp "$mytmpdir/__init__.py" $INPUT_FILE 16 | fi 17 | 18 | rm -rf $mytmpdir 19 | -------------------------------------------------------------------------------- /.github/settings.yml: -------------------------------------------------------------------------------- 1 | settings: 2 | # See https://docs.github.com/en/rest/reference/repos#update-a-repository for all available settings. 3 | # any of these settings can be ommitted to just leave the repo's current setting 4 | # If a setting has a value in settings.yml, it will always overwrite what exists in the repo. 5 | 6 | # A short description of the repository that will show up on GitHub. Set to an empty string to clear. 7 | description: Manage your Github repo(s) settings and secrets using Github Actions and a yaml file 8 | # A URL with more information about the repository. Set to an empty string to clear. 9 | homepage: https://github.com/andrewthetechie/gha-repo-manager 10 | 11 | # A list of strings to apply as topics on the repo. Set to an empty string to clear topics. Omit or set to null to leave what repo already has 12 | topics: 13 | - github-actions 14 | - python 15 | 16 | # Either `true` to make the repository private, or `false` to make it public. 17 | private: false 18 | 19 | # Either `true` to enable issues for this repository, `false` to disable them. 20 | has_issues: true 21 | 22 | # Either `true` to enable projects for this repository, or `false` to disable them. 23 | # If projects are disabled for the organization, passing `true` will cause an API error. 24 | has_projects: false 25 | 26 | # Either `true` to enable the wiki for this repository, `false` to disable it. 27 | has_wiki: false 28 | 29 | # Either `true` to enable downloads for this repository, `false` to disable them. 30 | has_downloads: false 31 | 32 | # Set the default branch for this repository. 33 | default_branch: main 34 | 35 | # Either `true` to allow squash-merging pull requests, or `false` to prevent 36 | # squash-merging. 37 | allow_squash_merge: true 38 | 39 | # Either `true` to allow merging pull requests with a merge commit, or `false` 40 | # to prevent merging pull requests with merge commits. 41 | allow_merge_commit: true 42 | 43 | # Either `true` to allow rebase-merging pull requests, or `false` to prevent 44 | # rebase-merging. 45 | allow_rebase_merge: true 46 | 47 | # Either `true` to enable automatic deletion of branches on merge, or `false` to disable 48 | delete_branch_on_merge: true 49 | 50 | # Either `true` to enable automated security fixes, or `false` to disable 51 | # automated security fixes. 52 | enable_automated_security_fixes: true 53 | 54 | # Either `true` to enable vulnerability alerts, or `false` to disable 55 | # vulnerability alerts. 56 | enable_vulnerability_alerts: true 57 | 58 | # Labels: define labels for Issues and Pull Requests 59 | labels: 60 | - name: bug 61 | color: CC0000 62 | description: An issue with the system. 63 | 64 | - name: feature 65 | # If including a `#`, make sure to wrap it with quotes! 66 | color: '#336699' 67 | description: New functionality. 68 | 69 | branch_protections: 70 | # branch protection can only be created for branches that exist. 71 | - name: main 72 | # https://docs.github.com/en/rest/reference/repos#update-branch-protection 73 | # Branch Protection settings. Leave a value out to leave set at current repo settings 74 | protection: 75 | # Require at least one approving review on a pull request, before merging. Set to null to disable. 76 | pr_options: 77 | # Dismiss approved reviews automatically when a new commit is pushed. 78 | dismiss_stale_reviews: true 79 | # Specify which users and teams can dismiss pull request reviews. Pass an empty dismissal_restrictions object to disable. User and team dismissal_restrictions are only available for organization-owned repositories. Omit this parameter for personal repositories. 80 | # dismissal_restrictions: 81 | # users: [] 82 | # teams: [] 83 | # Require status checks to pass before merging. Set to null to disable 84 | required_status_checks: 85 | # Require branches to be up to date before merging. 86 | strict: true 87 | # The list of status checks to require in order to merge into this branch 88 | checks: 89 | - Lint 90 | - Integration Testing 91 | - Generate Docs 92 | # Enforce all configured restrictions for administrators. Set to true to enforce required status checks for repository administrators. Set to null to disable. 93 | enforce_admins: false 94 | # Permit force pushes for all users with push access. 95 | allow_force_pushes: true 96 | -------------------------------------------------------------------------------- /.github/workflows/action-integration.yml: -------------------------------------------------------------------------------- 1 | # Tests athe github action on each push 2 | name: Action Integration Test 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | jobs: 9 | action-integration-testing: 10 | name: Action Integration Testing 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v3 14 | name: Checkout 15 | - name: Set up Docker Buildx 16 | uses: docker/setup-buildx-action@v2 17 | - name: Update action.yml to use dockerfile 18 | uses: rmeneely/update-yaml@v1 19 | with: 20 | infile: action.yml 21 | varlist: "runs.image=Dockerfile" 22 | - name: cat action.yml 23 | run: cat action.yml 24 | - name: Test action 25 | id: test-action 26 | # test with the local checkout of the action 27 | uses: ./ 28 | with: 29 | token: ${{ secrets.THIS_PAT || github.token }} 30 | action: "check" 31 | - name: Check outputs 32 | run: | 33 | test "${{ steps.test-action.outputs.result }}" == "Check passed" 34 | -------------------------------------------------------------------------------- /.github/workflows/codeql.yml: -------------------------------------------------------------------------------- 1 | name: "CodeQL" 2 | on: 3 | push: 4 | branches: 5 | - main 6 | schedule: 7 | - cron: "0 0 * * 1" 8 | workflow_dispatch: 9 | jobs: 10 | analyze: 11 | name: Analyze 12 | runs-on: ubuntu-latest 13 | permissions: 14 | actions: read 15 | contents: read 16 | security-events: write 17 | steps: 18 | - name: Checkout repository 19 | uses: actions/checkout@v3 20 | with: 21 | token: ${{ secrets.GITHUB_TOKEN }} 22 | # Initializes the CodeQL tools for scanning. 23 | - name: Initialize CodeQL 24 | uses: github/codeql-action/init@v2 25 | with: 26 | languages: Python 27 | - name: Perform CodeQL Analysis 28 | uses: github/codeql-action/analyze@v2 29 | -------------------------------------------------------------------------------- /.github/workflows/dependabot-auto-merge.yml: -------------------------------------------------------------------------------- 1 | name: Auto Merge Dependabot 2 | 3 | on: 4 | pull_request: 5 | 6 | jobs: 7 | auto-merge: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/checkout@v3 11 | - uses: ahmadnassri/action-dependabot-auto-merge@v2 12 | with: 13 | target: minor 14 | github-token: ${{ secrets.THIS_PAT }} 15 | -------------------------------------------------------------------------------- /.github/workflows/generate-docs.yml: -------------------------------------------------------------------------------- 1 | name: Generate Docs 2 | on: 3 | push: 4 | pull_request: 5 | jobs: 6 | docs: 7 | name: Generate Docs 8 | runs-on: ubuntu-latest 9 | steps: 10 | - name: Set up Node 11 | uses: actions/setup-node@v3 12 | with: 13 | node-version: 16 14 | - uses: actions/checkout@v3 15 | # https://github.com/stefanzweifel/git-auto-commit-action#commits-made-by-this-action-do-not-trigger-new-workflow-runs 16 | with: 17 | token: ${{ secrets.GITHUB_TOKEN }} 18 | - name: Run doc generation 19 | uses: npalm/action-docs-action@v1.2.0 20 | - name: Commit doc changes 21 | uses: stefanzweifel/git-auto-commit-action@v4 22 | with: 23 | commit_message: "docs: automated doc update" 24 | file_pattern: README.md 25 | -------------------------------------------------------------------------------- /.github/workflows/issues.yml: -------------------------------------------------------------------------------- 1 | name: Issues to Discord 2 | on: 3 | issues: 4 | types: 5 | - opened 6 | - reopened 7 | - deleted 8 | - closed 9 | jobs: 10 | issue-to-discord: 11 | name: issue-to-discord 12 | runs-on: ubuntu-latest 13 | steps: 14 | - name: Post to discord 15 | uses: Ilshidur/action-discord@0.3.2 16 | env: 17 | DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK_ISSUES }} 18 | ACTION: ${{ github.event.action }} 19 | REPO: ${{ github.repository }} 20 | ISSUE_URL: ${{ github.event.issue.html_url }} 21 | ISSUE_USER: ${{ github.event.issue.user.login }} 22 | with: 23 | args: "{{ REPO }} had an issue {{ ACTION }} by {{ ISSUE_USER }} at {{ ISSUE_URL }}." 24 | -------------------------------------------------------------------------------- /.github/workflows/python-ci.yml: -------------------------------------------------------------------------------- 1 | name: Python CI 2 | 3 | on: 4 | push: 5 | pull_request: 6 | 7 | jobs: 8 | python-ci: 9 | name: ${{ matrix.session }} ${{ matrix.python }} / ${{ matrix.os }} 10 | runs-on: ${{ matrix.os }} 11 | strategy: 12 | fail-fast: false 13 | matrix: 14 | include: 15 | - { python: "3.11", os: "ubuntu-latest", session: "pre-commit" } 16 | - { python: "3.11", os: "ubuntu-latest", session: "safety" } 17 | # - { python: "3.11", os: "ubuntu-latest", session: "mypy" } 18 | - { python: "3.11", os: "ubuntu-latest", session: "tests" } 19 | 20 | env: 21 | NOXSESSION: ${{ matrix.session }} 22 | FORCE_COLOR: "1" 23 | PRE_COMMIT_COLOR: "always" 24 | 25 | steps: 26 | - name: Check out the repository 27 | uses: actions/checkout@v3.3.0 28 | 29 | - name: Set up Python ${{ matrix.python }} 30 | uses: actions/setup-python@v4.5.0 31 | with: 32 | python-version: ${{ matrix.python }} 33 | 34 | - name: Upgrade pip 35 | run: | 36 | pip install --constraint=package-requirements.txt pip 37 | pip --version 38 | 39 | - name: Upgrade pip in virtual environments 40 | shell: python 41 | run: | 42 | import os 43 | import pip 44 | 45 | with open(os.environ["GITHUB_ENV"], mode="a") as io: 46 | print(f"VIRTUALENV_PIP={pip.__version__}", file=io) 47 | 48 | - name: Install package-requirements 49 | run: | 50 | pip install --upgrade -r package-requirements.txt 51 | poetry --version 52 | nox --version 53 | 54 | - name: Compute pre-commit cache key 55 | if: matrix.session == 'pre-commit' 56 | id: pre-commit-cache 57 | shell: python 58 | run: | 59 | import hashlib 60 | import sys 61 | import os 62 | 63 | python = "py{}.{}".format(*sys.version_info[:2]) 64 | payload = sys.version.encode() + sys.executable.encode() 65 | digest = hashlib.sha256(payload).hexdigest() 66 | result = "${{ runner.os }}-{}-{}-pre-commit".format(python, digest[:8]) 67 | 68 | with open(os.environ['GITHUB_OUTPUT'], 'a') as fh: 69 | fh.write(f"result={result}\n") 70 | 71 | - name: Restore pre-commit cache 72 | uses: actions/cache@v3.3.1 73 | if: matrix.session == 'pre-commit' 74 | with: 75 | path: ~/.cache/pre-commit 76 | key: ${{ steps.pre-commit-cache.outputs.result }}-${{ hashFiles('.pre-commit-config.yaml') }} 77 | restore-keys: | 78 | ${{ steps.pre-commit-cache.outputs.result }}- 79 | 80 | - name: Run Nox 81 | run: | 82 | nox --force-color --python=${{ matrix.python }} 83 | 84 | - name: Upload coverage data 85 | if: always() && matrix.session == 'tests' 86 | uses: "actions/upload-artifact@v3.1.2" 87 | with: 88 | name: coverage-data 89 | path: ".coverage.*" 90 | 91 | - name: Upload documentation 92 | if: matrix.session == 'docs-build' 93 | uses: actions/upload-artifact@v3.1.2 94 | with: 95 | name: docs 96 | path: docs/_build 97 | 98 | coverage: 99 | runs-on: ubuntu-latest 100 | needs: python-ci 101 | steps: 102 | - name: Download coverage data 103 | uses: actions/download-artifact@v3.0.2 104 | with: 105 | name: coverage-data 106 | 107 | - name: Upload coverage report 108 | uses: codecov/codecov-action@v3.1.0 109 | with: 110 | files: .coverage.xml 111 | verbose: true 112 | -------------------------------------------------------------------------------- /.github/workflows/release-docker-images.yml: -------------------------------------------------------------------------------- 1 | # Runs after release-please creates a new release 2 | # Builds and pushes the docker images for the release 3 | name: Release Docker Images 4 | on: 5 | release: 6 | types: [released] 7 | 8 | jobs: 9 | build-and-push-dockerimage: 10 | name: Buld and push dockerimage 11 | if: github.repository_owner == 'andrewthetechie' 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@v3 15 | - name: Set up QEMU 16 | uses: docker/setup-qemu-action@v2 17 | - name: Set up Docker Buildx 18 | uses: docker/setup-buildx-action@v2 19 | - name: Login to DockerHub 20 | uses: docker/login-action@v2 21 | with: 22 | username: ${{ secrets.DOCKERHUB_USERNAME }} 23 | password: ${{ secrets.DOCKERHUB_TOKEN }} 24 | - name: Log in to the Container registry 25 | uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 26 | with: 27 | registry: ghcr.io 28 | username: ${{ github.actor }} 29 | password: ${{ secrets.THIS_PAT }} 30 | - name: Docker metadata 31 | uses: docker/metadata-action@v4 32 | id: meta 33 | with: 34 | images: | 35 | ${{ github.repository }} 36 | ghcr.io/${{ github.repository }} 37 | tags: | 38 | type=raw,value=${{ github.ref_name }} 39 | # minimal (short sha) 40 | type=sha,prefix= 41 | # full length sha 42 | type=sha,format=long,prefix= 43 | - name: Build and push 44 | id: docker_build 45 | uses: docker/build-push-action@v3 46 | with: 47 | context: . 48 | file: Dockerfile 49 | push: true 50 | tags: ${{ steps.meta.outputs.tags }} 51 | labels: ${{ steps.meta.outputs.labels }} 52 | platforms: linux/amd64,linux/arm64 53 | # https://github.com/docker/build-push-action/blob/master/docs/advanced/cache.md#registry-cache 54 | cache-from: type=gha 55 | cache-to: type=gha,mode=max 56 | -------------------------------------------------------------------------------- /.github/workflows/release-major-version-tag.yml: -------------------------------------------------------------------------------- 1 | # Updates major version tag for GHA ease of use 2 | name: Update Major Version Tag 3 | 4 | on: 5 | push: 6 | tags: 7 | - "v*" 8 | 9 | jobs: 10 | update-majorver: 11 | name: Update Major Version Tag 12 | if: github.repository_owner == 'andrewthetechie' 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: nowactions/update-majorver@v1 16 | -------------------------------------------------------------------------------- /.github/workflows/release-please.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - main 5 | name: release-please 6 | jobs: 7 | release-please: 8 | if: github.repository_owner == 'andrewthetechie' 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: google-github-actions/release-please-action@v3 12 | with: 13 | token: ${{ secrets.THIS_PAT }} 14 | release-type: python 15 | extra-files: | 16 | action.yml 17 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | on: 3 | workflow_dispatch: 4 | inputs: 5 | version: 6 | description: "Version to release, like v1.0.0. Must increment current versions" 7 | required: true 8 | releaseNotes: 9 | description: "Any notes for this release" 10 | required: false 11 | default: "" 12 | prerelease: 13 | description: "Is this a prerelease" 14 | required: false 15 | default: false 16 | type: boolean 17 | jobs: 18 | update-dockerfile: 19 | if: github.repository_owner == 'andrewthetechie' 20 | runs-on: ubuntu-latest 21 | steps: 22 | - uses: actions/checkout@v3 23 | with: 24 | fetch-depth: 0 25 | token: ${{ secrets.THIS_PAT }} 26 | - name: Set up QEMU 27 | uses: docker/setup-qemu-action@v2 28 | - name: Set up Docker Buildx 29 | uses: docker/setup-buildx-action@v2 30 | - name: Login to DockerHub 31 | uses: docker/login-action@v2 32 | with: 33 | username: ${{ secrets.DOCKERHUB_USERNAME }} 34 | password: ${{ secrets.DOCKERHUB_TOKEN }} 35 | - name: Docker metadata 36 | uses: docker/metadata-action@v4 37 | id: meta 38 | with: 39 | images: | 40 | ${{ github.repository }} 41 | tags: | 42 | type=raw,value=${{ inputs.version }} 43 | # minimal (short sha) 44 | type=sha,prefix= 45 | # full length sha 46 | type=sha,format=long,prefix= 47 | - name: Build and push 48 | id: docker_build 49 | uses: docker/build-push-action@v3 50 | with: 51 | context: . 52 | file: Docker/Dockerfile 53 | push: true 54 | tags: ${{ steps.meta.outputs.tags }} 55 | labels: ${{ steps.meta.outputs.labels }} 56 | platforms: linux/amd64,linux/arm64 57 | # https://github.com/docker/build-push-action/blob/master/docs/advanced/cache.md#registry-cache 58 | cache-from: type=gha 59 | cache-to: type=gha,mode=max 60 | - name: Update Action Dockerfile with Jinja 61 | id: update_action-dockerfile 62 | uses: cuchi/jinja2-action@v1.2.0 63 | with: 64 | template: Docker/ActionDockerfile.j2 65 | output_file: Dockerfile 66 | strict: true 67 | variables: | 68 | image_version=${{ inputs.version }} 69 | - name: Commit Dockerfile changes 70 | uses: stefanzweifel/git-auto-commit-action@v4 71 | with: 72 | commit_message: "ci: update actions dockerfile for release" 73 | file_pattern: Dockerfile 74 | tagging_message: ${{ inputs.version }} 75 | release: 76 | runs-on: ubuntu-latest 77 | needs: update-dockerfile 78 | steps: 79 | - name: Create Release 80 | id: create_release 81 | uses: actions/create-release@v1 82 | env: 83 | GITHUB_TOKEN: ${{ secrets.THIS_PAT }} 84 | with: 85 | tag_name: ${{ inputs.version }} 86 | release_name: ${{ inputs.version }} 87 | body: ${{ inputs.releaseNotes }} 88 | draft: false 89 | prerelease: ${{ inputs.prerelease }} 90 | -------------------------------------------------------------------------------- /.github/workflows/test-docker-image.yml: -------------------------------------------------------------------------------- 1 | name: Test Docker Image Build 2 | 3 | on: 4 | push: 5 | pull_request: 6 | 7 | jobs: 8 | test-docker-image: 9 | name: Test Docker Image 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Check out the repository 13 | uses: actions/checkout@v3.3.0 14 | - # Add support for more platforms with QEMU (optional) 15 | # https://github.com/docker/setup-qemu-action 16 | name: Set up QEMU 17 | uses: docker/setup-qemu-action@v2 18 | - name: Set up Docker Buildx 19 | uses: docker/setup-buildx-action@v2 20 | # The build runs some tests on the built binary to make sure it works as part of its build 21 | - name: Test Build 22 | id: docker_build 23 | uses: docker/build-push-action@v3 24 | with: 25 | context: . 26 | file: Dockerfile 27 | push: false 28 | platforms: linux/amd64,linux/arm64 29 | # https://github.com/docker/build-push-action/blob/master/docs/advanced/cache.md#registry-cache 30 | cache-from: type=gha 31 | cache-to: type=gha,mode=max 32 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .mypy_cache/ 2 | /.coverage 3 | /.coverage.* 4 | coverage.xml 5 | /.nox/ 6 | /.python-version 7 | /.pytype/ 8 | /dist/ 9 | /docs/_build/ 10 | /src/*.egg-info/ 11 | __pycache__/ 12 | .idea 13 | .vscode 14 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | exclude: ".*tests\/fixtures.*" 2 | repos: 3 | - repo: https://github.com/pre-commit/pre-commit-hooks 4 | rev: v4.6.0 5 | hooks: 6 | - id: check-yaml 7 | - id: debug-statements 8 | - id: end-of-file-fixer 9 | - id: trailing-whitespace 10 | - repo: https://github.com/astral-sh/ruff-pre-commit 11 | # Ruff version. 12 | rev: v0.4.4 13 | hooks: 14 | # Run the linter. 15 | - id: ruff 16 | args: [--fix] 17 | # Run the formatter. 18 | - id: ruff-format 19 | - repo: https://github.com/pre-commit/mirrors-prettier 20 | rev: v4.0.0-alpha.8 21 | hooks: 22 | - id: prettier 23 | - repo: https://github.com/rhysd/actionlint 24 | rev: v1.7.0 25 | hooks: 26 | - id: actionlint-docker 27 | name: Actionlint 28 | - repo: local 29 | hooks: 30 | - id: bandit 31 | name: bandit 32 | entry: bandit 33 | language: system 34 | types: [python] 35 | require_serial: true 36 | args: ["-c", "pyproject.toml"] 37 | - id: check-added-large-files 38 | name: Check for added large files 39 | entry: check-added-large-files 40 | language: system 41 | - id: check-toml 42 | name: Check Toml 43 | entry: check-toml 44 | language: system 45 | types: [toml] 46 | - id: check-yaml 47 | name: Check Yaml 48 | entry: check-yaml 49 | language: system 50 | types: [yaml] 51 | - id: end-of-file-fixer 52 | name: Fix End of Files 53 | entry: end-of-file-fixer 54 | language: system 55 | types: [text] 56 | stages: [commit, push, manual] 57 | - id: trailing-whitespace 58 | name: Trim Trailing Whitespace 59 | entry: trailing-whitespace-fixer 60 | language: system 61 | types: [text] 62 | stages: [commit, push, manual] 63 | - repo: local 64 | hooks: 65 | - id: update-inputs 66 | name: Update inputs 67 | entry: make generate-inputs 68 | language: system 69 | pass_filenames: false 70 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | .github/* 2 | CHANGELOG.md 3 | tests/fixtures/* 4 | .release-please-manifest.json 5 | README.md 6 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## [1.8.0](https://github.com/andrewthetechie/gha-repo-manager/compare/v1.7.2...v1.8.0) (2024-06-02) 4 | 5 | 6 | ### Features 7 | 8 | * Ability to configure repo Collaborators (teams + users) ([#232](https://github.com/andrewthetechie/gha-repo-manager/issues/232)) ([1bd6d38](https://github.com/andrewthetechie/gha-repo-manager/commit/1bd6d382c795e30990b71a202981e40c4cde323a)) 9 | * Pydantic 2.7.1 PR ([#225](https://github.com/andrewthetechie/gha-repo-manager/issues/225)) ([c1e014a](https://github.com/andrewthetechie/gha-repo-manager/commit/c1e014adcf31bafbcd7b29087ebd4e4a4b052ee0)) 10 | 11 | 12 | ### Bug Fixes 13 | 14 | * pydantic 2 fixes ([#237](https://github.com/andrewthetechie/gha-repo-manager/issues/237)) ([252c43a](https://github.com/andrewthetechie/gha-repo-manager/commit/252c43af4de68f15ebfb70ef7292bd10b4cc0b6c)) 15 | 16 | ## [1.7.2](https://github.com/andrewthetechie/gha-repo-manager/compare/v1.7.1...v1.7.2) (2023-10-06) 17 | 18 | 19 | ### Bug Fixes 20 | 21 | * pin to bullseye docker image ([#69](https://github.com/andrewthetechie/gha-repo-manager/issues/69)) ([863bf6b](https://github.com/andrewthetechie/gha-repo-manager/commit/863bf6b257c6b32cb1284f19f604102d45abc499)) 22 | 23 | ## [1.7.1](https://github.com/andrewthetechie/gha-repo-manager/compare/v1.7.0...v1.7.1) (2023-05-29) 24 | 25 | 26 | ### Bug Fixes 27 | 28 | * **nulls:** Minor fixes to address null values ([#52](https://github.com/andrewthetechie/gha-repo-manager/issues/52)) ([ec5c9be](https://github.com/andrewthetechie/gha-repo-manager/commit/ec5c9be75600f37953800dc8a4d2ad25d1099521)) 29 | 30 | ## [1.7.0](https://github.com/andrewthetechie/gha-repo-manager/compare/v1.6.0...v1.7.0) (2023-05-25) 31 | 32 | 33 | ### Features 34 | 35 | * **new_bp_comparisons:** New Branch Protection Comparisons previously omitted ([#43](https://github.com/andrewthetechie/gha-repo-manager/issues/43)) ([ef6dad4](https://github.com/andrewthetechie/gha-repo-manager/commit/ef6dad4f17703353eab5cda8dc3a2c59fa4602e9)) 36 | * **settings.py:** Now compares all settings ([#41](https://github.com/andrewthetechie/gha-repo-manager/issues/41)) ([441b8e4](https://github.com/andrewthetechie/gha-repo-manager/commit/441b8e49c8ce09a74dc525e2808a5a74db0dd459)) 37 | * update poetry ([#25](https://github.com/andrewthetechie/gha-repo-manager/issues/25)) ([7983a04](https://github.com/andrewthetechie/gha-repo-manager/commit/7983a049789d053d343ee4c6465a5227e5995b6c)) 38 | 39 | 40 | ### Bug Fixes 41 | 42 | * add debug logging of diff ([#46](https://github.com/andrewthetechie/gha-repo-manager/issues/46)) ([ad86b78](https://github.com/andrewthetechie/gha-repo-manager/commit/ad86b7813217db76d997ab704607bc9d930599fb)) 43 | * **branch_protection:** fix false to False ([c4a164d](https://github.com/andrewthetechie/gha-repo-manager/commit/c4a164d99755b865d3b58f1fbff322fdb2b9947a)) 44 | * **branch_protections.py:** Sort Required Status Checks ([#38](https://github.com/andrewthetechie/gha-repo-manager/issues/38)) ([4d4c44f](https://github.com/andrewthetechie/gha-repo-manager/commit/4d4c44fd10847c7ecc8539e6b84701d7ddd2e439)) 45 | * **branch_protections.py:** Working Status Check Reqs ([#42](https://github.com/andrewthetechie/gha-repo-manager/issues/42)) ([33090f5](https://github.com/andrewthetechie/gha-repo-manager/commit/33090f570282feb6866c73f41ab2a90ac6556d43)) 46 | * fix bugs from pr 43 ([#45](https://github.com/andrewthetechie/gha-repo-manager/issues/45)) ([b21e432](https://github.com/andrewthetechie/gha-repo-manager/commit/b21e4323ff9409093c0f559dc508232903f538fa)) 47 | * **github_nulls:** Better handling of Nulls in GitHub API ([#39](https://github.com/andrewthetechie/gha-repo-manager/issues/39)) ([6744d11](https://github.com/andrewthetechie/gha-repo-manager/commit/6744d11f832826994b3eafb4bb59a0d546a3ac74)) 48 | 49 | ## [1.6.0](https://github.com/andrewthetechie/gha-repo-manager/compare/v1.5.0...v1.6.0) (2023-05-21) 50 | 51 | 52 | ### Features 53 | 54 | * ghe support ([#23](https://github.com/andrewthetechie/gha-repo-manager/issues/23)) ([60e128a](https://github.com/andrewthetechie/gha-repo-manager/commit/60e128a42d6a1da90ee5defc9a2b71d1024b4189)) 55 | 56 | 57 | ### Documentation 58 | 59 | * automated doc update ([7567b6a](https://github.com/andrewthetechie/gha-repo-manager/commit/7567b6a00c07c2976582af89923deec4b4bf8db1)) 60 | * cleanup badges ([7fdc028](https://github.com/andrewthetechie/gha-repo-manager/commit/7fdc028972a6d686392fce64491029f099483ab6)) 61 | 62 | ## [1.5.0](https://github.com/andrewthetechie/gha-repo-manager/compare/v1.4.0...v1.5.0) (2023-05-21) 63 | 64 | 65 | ### Features 66 | 67 | * Poetry rework ([#19](https://github.com/andrewthetechie/gha-repo-manager/issues/19)) ([2f41b7b](https://github.com/andrewthetechie/gha-repo-manager/commit/2f41b7be4186ae1ffb7865838191234a1df11748)) 68 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | We as members, contributors, and leaders pledge to make participation in 6 | our community a harassment-free experience for everyone, regardless of 7 | age, body size, visible or invisible disability, ethnicity, sex 8 | characteristics, gender identity and expression, level of experience, 9 | education, socio-economic status, nationality, personal appearance, 10 | race, religion, or sexual identity and orientation. 11 | 12 | We pledge to act and interact in ways that contribute to an open, 13 | welcoming, diverse, inclusive, and healthy community. 14 | 15 | ## Our Standards 16 | 17 | Examples of behavior that contributes to a positive environment for our 18 | community include: 19 | 20 | - Demonstrating empathy and kindness toward other people 21 | - Being respectful of differing opinions, viewpoints, and experiences 22 | - Giving and gracefully accepting constructive feedback 23 | - Accepting responsibility and apologizing to those affected by our 24 | mistakes, and learning from the experience 25 | - Focusing on what is best not just for us as individuals, but for the 26 | overall community 27 | 28 | Examples of unacceptable behavior include: 29 | 30 | - The use of sexualized language or imagery, and sexual attention or 31 | advances of any kind 32 | - Trolling, insulting or derogatory comments, and personal or 33 | political attacks 34 | - Public or private harassment 35 | - Publishing others\' private information, such as a physical or email 36 | address, without their explicit permission 37 | - Other conduct which could reasonably be considered inappropriate in 38 | a professional setting 39 | 40 | ## Enforcement Responsibilities 41 | 42 | Community leaders are responsible for clarifying and enforcing our 43 | standards of acceptable behavior and will take appropriate and fair 44 | corrective action in response to any behavior that they deem 45 | inappropriate, threatening, offensive, or harmful. 46 | 47 | Community leaders have the right and responsibility to remove, edit, or 48 | reject comments, commits, code, wiki edits, issues, and other 49 | contributions that are not aligned to this Code of Conduct, and will 50 | communicate reasons for moderation decisions when appropriate. 51 | 52 | ## Scope 53 | 54 | This Code of Conduct applies within all community spaces, and also 55 | applies when an individual is officially representing the community in 56 | public spaces. Examples of representing our community include using an 57 | official e-mail address, posting via an official social media account, 58 | or acting as an appointed representative at an online or offline event. 59 | 60 | ## Enforcement 61 | 62 | Instances of abusive, harassing, or otherwise unacceptable behavior may 63 | be reported to the community leaders responsible for enforcement at 64 | andrew@💻.kz. All complaints will be reviewed and investigated promptly 65 | and fairly. 66 | 67 | All community leaders are obligated to respect the privacy and security 68 | of the reporter of any incident. 69 | 70 | ## Enforcement Guidelines 71 | 72 | Community leaders will follow these Community Impact Guidelines in 73 | determining the consequences for any action they deem in violation of 74 | this Code of Conduct: 75 | 76 | ### 1. Correction 77 | 78 | **Community Impact**: Use of inappropriate language or other behavior 79 | deemed unprofessional or unwelcome in the community. 80 | 81 | **Consequence**: A private, written warning from community leaders, 82 | providing clarity around the nature of the violation and an explanation 83 | of why the behavior was inappropriate. A public apology may be 84 | requested. 85 | 86 | ### 2. Warning 87 | 88 | **Community Impact**: A violation through a single incident or series of 89 | actions. 90 | 91 | **Consequence**: A warning with consequences for continued behavior. No 92 | interaction with the people involved, including unsolicited interaction 93 | with those enforcing the Code of Conduct, for a specified period of 94 | time. This includes avoiding interactions in community spaces as well as 95 | external channels like social media. Violating these terms may lead to a 96 | temporary or permanent ban. 97 | 98 | ### 3. Temporary Ban 99 | 100 | **Community Impact**: A serious violation of community standards, 101 | including sustained inappropriate behavior. 102 | 103 | **Consequence**: A temporary ban from any sort of interaction or public 104 | communication with the community for a specified period of time. No 105 | public or private interaction with the people involved, including 106 | unsolicited interaction with those enforcing the Code of Conduct, is 107 | allowed during this period. Violating these terms may lead to a 108 | permanent ban. 109 | 110 | ### 4. Permanent Ban 111 | 112 | **Community Impact**: Demonstrating a pattern of violation of community 113 | standards, including sustained inappropriate behavior, harassment of an 114 | individual, or aggression toward or disparagement of classes of 115 | individuals. 116 | 117 | **Consequence**: A permanent ban from any sort of public interaction 118 | within the community. 119 | 120 | ## Attribution 121 | 122 | This Code of Conduct is adapted from the [Contributor 123 | Covenant](https://www.contributor-covenant.org), version 2.0, available 124 | at 125 | . 126 | 127 | Community Impact Guidelines were inspired by [Mozilla's code of conduct 128 | enforcement ladder](https://github.com/mozilla/diversity). 129 | 130 | For answers to common questions about this code of conduct, see the FAQ 131 | at . Translations are 132 | available at . 133 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributor Guide 2 | 3 | ** THIS ACTION IS NOT ACTIVELY UNDER DEVELOPMENT. ISSUES AND PRS MAY NOT BE RESPONDED TO ** 4 | 5 | Thank you for your interest in improving this project. This project is 6 | open-source under the [MIT license](https://opensource.org/licenses/MIT) 7 | and welcomes contributions in the form of bug reports, feature requests, 8 | and pull requests. 9 | 10 | Here is a list of important resources for contributors: 11 | 12 | - [Source Code](https://github.com/andrewthetechie/cookiecutter-autodocs) 13 | - [Documentation](https://cookiecutter-autodocs.readthedocs.io/) 14 | - [Issue Tracker](https://github.com/andrewthetechie/cookiecutter-autodocs/issues) 15 | - [Code of Conduct](CODE_OF_CONDUCT.md) 16 | 17 | ## How to report a bug 18 | 19 | Report bugs on the [Issue 20 | Tracker](https://github.com/andrewthetechie/cookiecutter-autodocs/issues). 21 | 22 | When filing an issue, make sure to answer these questions: 23 | 24 | - Which operating system and Python version are you using? 25 | - Which version of this project are you using? 26 | - What did you do? 27 | - What did you expect to see? 28 | - What did you see instead? 29 | 30 | The best way to get your bug fixed is to provide a test case, and/or 31 | steps to reproduce the issue. 32 | 33 | ## How to request a feature 34 | 35 | Request features on the [Issue 36 | Tracker](https://github.com/andrewthetechie/cookiecutter-autodocs/issues). 37 | 38 | ## How to set up your development environment 39 | 40 | You need Python 3.8+ and the following tools: 41 | 42 | - [Poetry](https://python-poetry.org/) 43 | - [Nox](https://nox.thea.codes/) 44 | - [nox-poetry](https://nox-poetry.readthedocs.io/) 45 | 46 | Install the package with development requirements: 47 | 48 | ```shell 49 | poetry install 50 | ``` 51 | 52 | You can now run an interactive Python session, or the command-line 53 | interface: 54 | 55 | ```shell 56 | poetry run python 57 | ``` 58 | 59 | ## How to test the project 60 | 61 | Run the full test suite: 62 | 63 | ```shell 64 | nox 65 | ``` 66 | 67 | List the available Nox sessions: 68 | 69 | ```shell 70 | nox --list-sessions 71 | ``` 72 | 73 | You can also run a specific Nox session. For example, invoke the unit 74 | test suite like this: 75 | 76 | ```shell 77 | nox --session=tests 78 | ``` 79 | 80 | Unit tests are located in the `tests` directory, and are written using 81 | the [pytest](https://pytest.readthedocs.io/) testing framework. 82 | 83 | ## How to submit changes 84 | 85 | Open a [pull 86 | request](https://github.com/andrewthetechie/cookiecutter-autodocs/pulls) to 87 | submit changes to this project. 88 | 89 | Your pull request needs to meet the following guidelines for acceptance: 90 | 91 | - The Nox test suite must pass without errors and warnings. 92 | - Include unit tests. This project maintains 100% code coverage. 93 | - If your changes add functionality, update the documentation accordingly. 94 | 95 | Feel free to submit early, though---we can always iterate on this. 96 | 97 | To run linting and code formatting checks before committing your change, 98 | you can install pre-commit as a Git hook by running the following 99 | command: 100 | 101 | ``shell 102 | $ nox --session=pre-commit -- install 103 | 104 | ``` 105 | 106 | It is recommended to open an issue before starting work on anything. 107 | This will allow a chance to talk it over with the owners and validate 108 | your approach. 109 | ``` 110 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.11-slim-bullseye AS builder 2 | WORKDIR /app 3 | 4 | # install build requirements 5 | RUN apt-get update && apt-get install -y binutils patchelf build-essential scons upx 6 | 7 | # copy the app 8 | COPY ./ /app 9 | 10 | # install python build requirements 11 | RUN pip install --no-warn-script-location --upgrade virtualenv pip poetry pyinstaller staticx --constraint=package-requirements.txt 12 | 13 | # build the app 14 | RUN poetry build 15 | # Install the app 16 | RUN pip install dist/gha_repo_manager*.whl 17 | 18 | # pyinstaller package the app 19 | RUN python -OO -m PyInstaller -F repo_manager/main.py --name repo-manager --hidden-import _cffi_backend 20 | # static link the repo-manager binary 21 | RUN cd ./dist && \ 22 | staticx -l $(ldconfig -p| grep libgcc_s.so.1 | awk -F "=>" '{print $2}' | tr -d " ") --strip repo-manager repo-manager-static && \ 23 | strip -s -R .comment -R .gnu.version --strip-unneeded repo-manager-static 24 | # will be copied over to the scratch container, pyinstaller needs a /tmp to exist 25 | RUN mkdir /app/tmp 26 | 27 | 28 | FROM scratch 29 | 30 | ENTRYPOINT ["/repo-manager"] 31 | 32 | COPY --from=builder /app/dist/repo-manager-static /repo-manager 33 | COPY --from=builder /app/tmp /tmp 34 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | The MIT License (MIT) 3 | 4 | Copyright (c) 2018 GitHub, Inc. and contributors 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy 7 | of this software and associated documentation files (the "Software"), to deal 8 | in the Software without restriction, including without limitation the rights 9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | copies of the Software, and to permit persons to whom the Software is 11 | furnished to do so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included in 14 | all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 22 | THE SOFTWARE. 23 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .DEFAULT_GOAL := help 2 | 3 | # This help function will automatically generate help/usage text for any make target that is commented with "##". 4 | # Targets with a singe "#" description do not show up in the help text 5 | help: 6 | @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-40s\033[0m %s\n", $$1, $$2}' 7 | 8 | 9 | build: ## build a docker image locally 10 | docker build -t gha-repo-manager -f Dockerfile . 11 | 12 | generate-inputs: ## Generate a dict of inputs from actions.yml into repo_manager/utils/__init__.py 13 | ./.github/scripts/replace_inputs.sh 14 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Repo Manager via Github Actions 2 | 3 | [![All Contributors](https://img.shields.io/badge/all_contributors-2-orange.svg?style=flat-square)](#contributors-) 4 | 5 | 6 | 7 | ## Description 8 | 9 | Manage your Github repo(s) settings and secrets using Github Actions and a yaml file 10 | 11 | 12 | 13 | 14 | ## Usage 15 | 16 | This action manages your repo from a yaml file. You can manage: 17 | 18 | * branch protection 19 | * labels 20 | * repos 21 | * secrets 22 | * repo settings 23 | * Files 24 | 25 | See [examples/settings.yml](./examples/settings.yml) for an example config file. The schemas for this file are in [repo_manager.schemas](./repo_magager/schemas). 26 | 27 | ### File Management -- Experimental 28 | 29 | File management can copy files from your local environment to a target repo, copy files from one location to another in the target repo, move files in the target repo, and delete files in the target repo. 30 | 31 | File operations are performed using the Github BLOB API and your PAT. Each file operation is a separate commit. 32 | 33 | This feature is helpful to keep workflows or settings file in sync from a central repo to many repos. 34 | 35 | ### Example workflow 36 | 37 | ```yaml 38 | name: Run Repo Manager 39 | on: [workflow_dispatch] 40 | jobs: 41 | repo-manager: 42 | runs-on: ubuntu-latest 43 | steps: 44 | - uses: actions/checkout@v3 45 | name: Checkout 46 | - name: Run RepoManager 47 | uses: andrewthetechie/gha-repo-manager@main 48 | with: 49 | # Apply your settings to the repo, can also be check to just check repo settings vs your file or validate, to validate your 50 | # file is valid 51 | action: apply 52 | settings_file: .github/settings.yml 53 | # need a PAT that can edit repo settings 54 | token: ${{ secrets.GITHUB_PAT }} 55 | 56 | ``` 57 | 58 | 59 | ## Inputs 60 | 61 | | parameter | description | required | default | 62 | | - | - | - | - | 63 | | action | What action to take with this action. One of validate, check, or apply. Validate will validate your settings file, but not touch your repo. Check will check your repo with your settings file and output a report of any drift. Apply will apply the settings in your settings file to your repo | `false` | check | 64 | | settings_file | What yaml file to use as your settings. This is local to runner running this action. | `false` | .github/settings.yml | 65 | | repo | What repo to perform this action on. Default is self, as in the repo this action is running in | `false` | self | 66 | | github_server_url | Set a custom github server url for github api operations. Useful if you're running on GHE. Will try to autodiscover from env.GITHUB_SERVER_URL if left at default | `false` | none | 67 | | token | What github token to use with this action. | `true` | | 68 | 69 | 70 | 71 | 72 | 73 | 74 | ## Outputs 75 | 76 | | parameter | description | 77 | | - | - | 78 | | result | Result of the action | 79 | | diff | Diff of this action, dumped to a json string | 80 | 81 | 82 | 83 | 84 | 85 | 86 | ## Runs 87 | 88 | This action is a `docker` action. 89 | 90 | 91 | 92 | 93 | ## Contributors 94 | 95 | Please see our [Contribution Guide](./CONTRIBUTING.md) for more info on how you can contribute. All contributors and participants in this repo must follow our [Code of Conduct](./CODE_OF_CONDUCT.md). 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 |
Andrew
Andrew

🤔 ⚠️ 💻
shiro
shiro

🐛 💻
107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | -------------------------------------------------------------------------------- /action.yml: -------------------------------------------------------------------------------- 1 | name: "Yaml Repo Manager" 2 | description: "Manage your Github repo(s) settings and secrets using Github Actions and a yaml file" 3 | author: "Andrew Herrington" 4 | inputs: 5 | action: 6 | description: What action to take with this action. One of validate, check, or apply. Validate will validate your settings file, but not touch your repo. Check will check your repo with your settings file and output a report of any drift. Apply will apply the settings in your settings file to your repo 7 | default: "check" 8 | settings_file: 9 | description: What yaml file to use as your settings. This is local to runner running this action. 10 | default: ".github/settings.yml" 11 | repo: 12 | description: What repo to perform this action on. Default is self, as in the repo this action is running in 13 | default: "self" 14 | github_server_url: 15 | description: Set a custom github server url for github api operations. Useful if you're running on GHE. Will try to autodiscover from env.GITHUB_SERVER_URL if left at default 16 | default: "none" 17 | token: 18 | description: What github token to use with this action. 19 | required: true 20 | outputs: 21 | result: 22 | description: "Result of the action" 23 | diff: 24 | description: "Diff of this action, dumped to a json string" 25 | runs: 26 | using: "docker" 27 | image: "docker://ghcr.io/andrewthetechie/gha-repo-manager:v1.8.0" # x-release-please-version 28 | branding: 29 | icon: "settings" 30 | color: "blue" 31 | -------------------------------------------------------------------------------- /examples/settings.yml: -------------------------------------------------------------------------------- 1 | # settings.yml can live in two places: 2 | # 1. in the repo itself 3 | # 2. in a centralized repo 4 | 5 | # The Action is able to apply settings to any repo that its token can manage 6 | # You can run Action from each repo, acting on that repo's settings.yml, or 7 | # from a central repo, using a single settings.yml to control many repos. 8 | 9 | # Which method you choose is up to you. See README.md for more info and example 10 | # Workflows to implement these strategies. 11 | settings: 12 | # See https://docs.github.com/en/rest/reference/repos#update-a-repository for all available settings. 13 | # any of these settings can be ommitted to just leave the repo's current setting 14 | # If a setting has a value in settings.yml, it will always overwrite what exists in the repo. 15 | 16 | # A short description of the repository that will show up on GitHub. Set to an empty string to clear. 17 | description: description of repo 18 | 19 | # A URL with more information about the repository. Set to an empty string to clear. 20 | homepage: https://example.github.io/ 21 | 22 | # A list of strings to apply as topics on the repo. Set to an empty string to clear topics. Omit or set to null to leave what repo already has 23 | topics: 24 | - gha 25 | - foo 26 | - bar 27 | 28 | # Either `true` to make the repository private, or `false` to make it public. 29 | private: false 30 | 31 | # Either `true` to enable issues for this repository, `false` to disable them. 32 | has_issues: true 33 | 34 | # Either `true` to enable projects for this repository, or `false` to disable them. 35 | # If projects are disabled for the organization, passing `true` will cause an API error. 36 | has_projects: true 37 | 38 | # Either `true` to enable the wiki for this repository, `false` to disable it. 39 | has_wiki: true 40 | 41 | # Either `true` to enable downloads for this repository, `false` to disable them. 42 | has_downloads: true 43 | 44 | # Set the default branch for this repository. 45 | default_branch: main 46 | 47 | # Either `true` to allow squash-merging pull requests, or `false` to prevent 48 | # squash-merging. 49 | allow_squash_merge: true 50 | 51 | # Either `true` to allow merging pull requests with a merge commit, or `false` 52 | # to prevent merging pull requests with merge commits. 53 | allow_merge_commit: true 54 | 55 | # Either `true` to allow rebase-merging pull requests, or `false` to prevent 56 | # rebase-merging. 57 | allow_rebase_merge: true 58 | 59 | # Either `true` to enable automatic deletion of branches on merge, or `false` to disable 60 | delete_branch_on_merge: true 61 | 62 | # Either `true` to enable automated security fixes, or `false` to disable 63 | # automated security fixes. 64 | enable_automated_security_fixes: true 65 | 66 | # Either `true` to enable vulnerability alerts, or `false` to disable 67 | # vulnerability alerts. 68 | enable_vulnerability_alerts: true 69 | 70 | # Labels: define labels for Issues and Pull Requests 71 | labels: 72 | - name: bug 73 | color: CC0000 74 | description: An issue with the system. 75 | 76 | - name: feature 77 | # If including a `#`, make sure to wrap it with quotes! 78 | color: "#336699" 79 | description: New functionality. 80 | 81 | - name: Help Wanted 82 | # Provide a new name to rename an existing label. A rename that results in a 'not found' will not fail a run 83 | new_name: first-timers-only 84 | 85 | - name: Old Label 86 | # set exists: false to delete a label. A delete that results in a "not found" will not fail a run 87 | exists: false 88 | 89 | branch_protections: 90 | # branch protection can only be created for branches that exist. 91 | - name: main 92 | # https://docs.github.com/en/rest/reference/repos#update-branch-protection 93 | # Branch Protection settings. Leave a value out to leave set at current repo settings 94 | protection: 95 | # Require at least one approving review on a pull request, before merging. Set to null to disable. 96 | pr_options: 97 | # The number of approvals required. (1-6) 98 | required_approving_review_count: 1 99 | # Dismiss approved reviews automatically when a new commit is pushed. 100 | dismiss_stale_reviews: true 101 | # Blocks merge until code owners have reviewed. 102 | require_code_owner_reviews: true 103 | # Specify which users and teams can dismiss pull request reviews. Pass an empty dismissal_restrictions object to disable. User and team dismissal_restrictions are only available for organization-owned repositories. Omit this parameter for personal repositories. 104 | # dismissal_restrictions: 105 | # users: [] 106 | # teams: [] 107 | # Require status checks to pass before merging. Set to null to disable 108 | # required_status_checks: 109 | # # Require branches to be up to date before merging. 110 | # strict: true 111 | # # The list of status checks to require in order to merge into this branch 112 | # checks: 113 | # - lint 114 | # - test 115 | # - docker 116 | # Blocks merge until all conversations on a pull request have been resolved 117 | # require_conversation_resolution: true 118 | # Enforce all configured restrictions for administrators. Set to true to enforce required status checks for repository administrators. Set to null to disable. 119 | enforce_admins: true 120 | # Prevent merge commits from being pushed to matching branches 121 | require_linear_history: true 122 | # Permit force pushes for all users with push access. 123 | allow_force_pushes: true 124 | # Allow users with push access to delete matching branches. 125 | allow_deletions: true 126 | # If set to true, the restrictions branch protection settings which limits who can push will also block pushes which create new branches, unless the push is initiated by a user, team, or app which has the ability to push. Set to true to restrict new branch creation. 127 | block_creations: true 128 | # Restrict who can push to this branch. Team and user restrictions are only available for organization-owned repositories. Set to null to disable. 129 | restrictions: 130 | users: [] 131 | teams: [] 132 | - name: dev 133 | # will clear any branch protection on the dev branch, IF the dev branch exists. If you setup protection for a non-existant branch, this action cannot delete it 134 | exists: False 135 | # if the repo has a third branch named test with branch protections setup, by not adding a protection with name: test, this config will not change 136 | # those existing protections. 137 | - name: test 138 | exists: True 139 | 140 | secrets: 141 | # Manage secrets in your repo. Useful to manage secrets from a central repo for non organizations or to manage secrets org wide 142 | - key: SECRET_KEY 143 | # pull the value from an environment variable. If this variable is not found in the env, throw an error and fail the run 144 | # Set env vars on the github action job from secrets in your repo to sync screts across repos 145 | env: SECRET_VALUE 146 | # Set a dependabot secret on the repo 147 | - key: SECRET_KEY 148 | env: SECRET_VALUE 149 | type: dependabot 150 | - key: ANOTHER_SECRET 151 | # set a value directly in your yaml, probably not a good idea for things that are actually a secret 152 | value: bar 153 | - key: THIRD_SECRET 154 | # pull the value from an environment variable 155 | env: THIRD_VALUE 156 | # setting a value as not required allows you to not pass in an env var. if THIRD_VALUE is not set in the env, this secret won't be set but no error will be thrown 157 | required: false 158 | - key: DELETED_SECRET 159 | # setting exists to false will delete a secret. A delete that results in "not found" won't fail a run, so you can use this to make sure a secret is always deleted 160 | exists: false 161 | 162 | # Can copy files from your local context to the repo. 163 | # Manipulate files in the target repo 164 | # * move files around 165 | # * delete files 166 | # Changes are automatically commited and pushed to a target branch (default is default branch) 167 | # File operations are applied sequentially 168 | files: 169 | # copy templates/actions/my_workflow.yml to .github/workflows/my_workflow.yml in your target repo 170 | # and commit it with the default commit message and to your repo's default branch. 171 | # default commit message is "repo_manager file commit" 172 | - src_file: templates/actions/my_workflow.yml 173 | dest_file: .github/workflows/my_workflow.yml 174 | - src_file: templates/issues/issue_template.md 175 | dest_file: .github/ISSUE_TEMPLATE/issue.md 176 | commit_msg: update issue template 177 | # Update this file in the dev branch. If the dev branch doesn't exist, this will fail the workflow 178 | - src_file: templates/dev/dev.md 179 | dest_file: dev.md 180 | target_branch: dev 181 | # This moves README.md to README.rst in the remote. If README.md doesn't exist, the workflow will not fail and will emit a warning. 182 | - src_file: remote://README.md 183 | dest_file: README.rst 184 | move: true 185 | commit_msg: "move readme" 186 | # This removes OLDDOC.md in the dev branch. If OLDDOC.md doesn't exist, the workflow will emit a warning 187 | - dest_file: OLDDOC.md 188 | exists: false 189 | branch: dev 190 | commit_msg: "remove OLDDOC.md from dev" 191 | -------------------------------------------------------------------------------- /noxfile.py: -------------------------------------------------------------------------------- 1 | """Nox sessions.""" 2 | 3 | import os 4 | import shlex 5 | import sys 6 | from pathlib import Path 7 | from textwrap import dedent 8 | 9 | import nox 10 | import toml 11 | 12 | try: 13 | from nox_poetry import Session 14 | from nox_poetry import session 15 | except ImportError: 16 | message = f"""\ 17 | Nox failed to import the 'nox-poetry' package. 18 | 19 | Please install it using the following command: 20 | 21 | {sys.executable} -m pip install nox-poetry""" 22 | raise SystemExit(dedent(message)) from None 23 | 24 | 25 | package = "repo_manager" 26 | python_versions = [ 27 | "3.11", 28 | ] 29 | nox.needs_version = ">= 2021.6.6" 30 | nox.options.sessions = ( 31 | "pre-commit", 32 | "safety", 33 | # "mypy", 34 | "tests", 35 | ) 36 | pyproject = toml.load("pyproject.toml") 37 | test_requirements = pyproject["tool"]["poetry"]["group"]["dev"]["dependencies"].keys() 38 | mypy_type_packages = [requirement for requirement in test_requirements if requirement.startswith("types-")] 39 | 40 | 41 | def activate_virtualenv_in_precommit_hooks(session: Session) -> None: 42 | """Activate virtualenv in hooks installed by pre-commit. 43 | 44 | This function patches git hooks installed by pre-commit to activate the 45 | session's virtual environment. This allows pre-commit to locate hooks in 46 | that environment when invoked from git. 47 | 48 | Args: 49 | session: The Session object. 50 | """ 51 | assert session.bin is not None # noqa: S101 52 | 53 | # Only patch hooks containing a reference to this session's bindir. Support 54 | # quoting rules for Python and bash, but strip the outermost quotes so we 55 | # can detect paths within the bindir, like /python. 56 | bindirs = [ 57 | bindir[1:-1] if bindir[0] in "'\"" else bindir for bindir in (repr(session.bin), shlex.quote(session.bin)) 58 | ] 59 | 60 | virtualenv = session.env.get("VIRTUAL_ENV") 61 | if virtualenv is None: 62 | return 63 | 64 | headers = { 65 | # pre-commit < 2.16.0 66 | "python": f"""\ 67 | import os 68 | os.environ["VIRTUAL_ENV"] = {virtualenv!r} 69 | os.environ["PATH"] = os.pathsep.join(( 70 | {session.bin!r}, 71 | os.environ.get("PATH", ""), 72 | )) 73 | """, 74 | # pre-commit >= 2.16.0 75 | "bash": f"""\ 76 | VIRTUAL_ENV={shlex.quote(virtualenv)} 77 | PATH={shlex.quote(session.bin)}"{os.pathsep}$PATH" 78 | """, 79 | } 80 | 81 | hookdir = Path(".git") / "hooks" 82 | if not hookdir.is_dir(): 83 | return 84 | 85 | for hook in hookdir.iterdir(): 86 | if hook.name.endswith(".sample") or not hook.is_file(): 87 | continue 88 | 89 | if not hook.read_bytes().startswith(b"#!"): 90 | continue 91 | 92 | text = hook.read_text() 93 | 94 | if not any(Path("A") == Path("a") and bindir.lower() in text.lower() or bindir in text for bindir in bindirs): 95 | continue 96 | 97 | lines = text.splitlines() 98 | 99 | for executable, header in headers.items(): 100 | if executable in lines[0].lower(): 101 | lines.insert(1, dedent(header)) 102 | hook.write_text("\n".join(lines)) 103 | break 104 | 105 | 106 | @session(name="pre-commit", python=python_versions[0]) 107 | def precommit(session: Session) -> None: 108 | """Lint using pre-commit.""" 109 | args = session.posargs or ["run", "--all-files", "--show-diff-on-failure"] 110 | session.install(*test_requirements) 111 | session.install(".") 112 | session.run("pre-commit", *args) 113 | if args and args[0] == "install": 114 | activate_virtualenv_in_precommit_hooks(session) 115 | 116 | 117 | @session(python=python_versions[0]) 118 | def safety(session: Session) -> None: 119 | """Scan dependencies for insecure packages.""" 120 | requirements = session.poetry.export_requirements() 121 | session.install("safety") 122 | # ignore https://github.com/pytest-dev/py/issues/287 123 | # its an irresposnbily filed CVE causing nose 124 | session.run("safety", "check", "--full-report", f"--file={requirements}", "--ignore=51457") 125 | 126 | 127 | @session(python=python_versions) 128 | def mypy(session: Session) -> None: 129 | """Type-check using mypy.""" 130 | args = session.posargs or ["repo_manager", "--follow-imports=silent", "--ignore-missing-imports"] 131 | session.install(".") 132 | session.install("mypy", "pytest") 133 | if len(mypy_type_packages) > 0: 134 | session.install(*mypy_type_packages) 135 | session.run("mypy", *args) 136 | 137 | 138 | @session(python=python_versions) 139 | def tests(session: Session) -> None: 140 | """Run the test suite.""" 141 | session.install(".") 142 | session.install(*test_requirements) 143 | session.run("poetry", "run", "pytest", *session.posargs) 144 | -------------------------------------------------------------------------------- /package-requirements.txt: -------------------------------------------------------------------------------- 1 | pip==24.0 2 | nox==2024.4.15 3 | nox-poetry==1.0.3 4 | poetry==1.8.3 5 | virtualenv==20.26.2 6 | poetry-dynamic-versioning==1.4.0 7 | toml==0.10.2 8 | pyinstaller==6.8.0 9 | staticx==0.14.1 10 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "gha-repo-manager" 3 | version = "1.8.0" 4 | description = "Manage your Github repo(s) settings and secrets using Github Actions and a yaml file" 5 | authors = ["Andrew Herrington "] 6 | license = "MIT" 7 | readme = "README.md" 8 | packages = [{include = "repo_manager"}] 9 | 10 | [tool.poetry.scripts] 11 | repo-manager = "repo_manager.main:main" 12 | 13 | 14 | [tool.poetry.dependencies] 15 | python = "^3.11" 16 | pydantic = "^2.7.4" 17 | typing-extensions = "^4.12.2" 18 | actions-toolkit = "^0.1.15" 19 | pygithub = "^2.3.0" 20 | pyyaml = "^6.0" 21 | 22 | 23 | [tool.poetry.group.dev.dependencies] 24 | pre-commit = "^3.7.1" 25 | ruff = "^0.4.9" 26 | pytest = "^8.2.2" 27 | pytest-mock = "^3.12.0" 28 | pytest-cov = "^5.0.0" 29 | pytest-asyncio = "^0.23.7" 30 | bandit = "^1.7.9" 31 | mypy = "^1.6.1" 32 | pre-commit-hooks = "^4.5.0" 33 | reorder-python-imports = "^3.13.0" 34 | pytest-xdist = "^3.2.1" 35 | pyupgrade = "^3.16.0" 36 | pyflakes = "^3.1.0" 37 | black = "^24.4.1" 38 | 39 | 40 | [build-system] 41 | requires = ["poetry-core"] 42 | build-backend = "poetry.core.masonry.api" 43 | 44 | [tool.bandit] 45 | exclude_dirs = ["tests", "noxfile.py", ".github/scripts"] 46 | 47 | [tool.bandit.assert_used] 48 | skips = ["repo_manager/github/branch_protections.py"] 49 | 50 | [tool.ruff] 51 | line-length = 120 52 | target-version = "py311" 53 | 54 | [tool.mypy] 55 | warn_unreachable = true 56 | pretty = true 57 | show_column_numbers = true 58 | show_error_codes = true 59 | show_error_context = true 60 | 61 | [tool.pytest.ini_options] 62 | norecursedirs = ".github ci .git .idea" 63 | addopts = "--cov=repo_manager --cov-report xml:.coverage.xml --cov-report=term-missing" 64 | -------------------------------------------------------------------------------- /repo_manager/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/andrewthetechie/gha-repo-manager/4f67f851fced12931aaef54dafafbe2d54b2c315/repo_manager/__init__.py -------------------------------------------------------------------------------- /repo_manager/gh/__init__.py: -------------------------------------------------------------------------------- 1 | from functools import lru_cache 2 | 3 | from github import Github 4 | from github.GithubException import GithubException 5 | from github.GithubException import UnknownObjectException 6 | 7 | 8 | @lru_cache 9 | def get_github_client(token: str, api_url: str) -> Github: 10 | """ """ 11 | return Github(token, base_url=api_url) 12 | 13 | 14 | __all__ = ["get_github_client", "GithubException", "UnknownObjectException"] 15 | -------------------------------------------------------------------------------- /repo_manager/gh/branch_protections.py: -------------------------------------------------------------------------------- 1 | from copy import deepcopy 2 | from typing import Any 3 | 4 | from github.Consts import mediaTypeRequireMultipleApprovingReviews 5 | from github.GithubException import GithubException 6 | from github.GithubObject import NotSet 7 | from github.Repository import Repository 8 | 9 | from repo_manager.schemas.branch_protection import BranchProtection 10 | from repo_manager.schemas.branch_protection import ProtectionOptions 11 | from repo_manager.utils import attr_to_kwarg 12 | from repo_manager.utils import objary_to_list 13 | 14 | 15 | def diff_option(key: str, expected: Any, repo_value: Any) -> str | None: 16 | if expected is not None: 17 | if expected != repo_value: 18 | return f"{key} -- Expected: {expected} Found: {repo_value}" 19 | return None 20 | 21 | 22 | def update_branch_protection(repo: Repository, branch: str, protection_config: ProtectionOptions): # noqa: C901 23 | # Copied from https://github.com/PyGithub/PyGithub/blob/001970d4a828017f704f6744a5775b4207a6523c/github/Branch.py#L112 24 | # Until pygithub supports this, we need to do it manually 25 | def edit_protection( # nosec 26 | branch, 27 | required_status_checks=NotSet, 28 | enforce_admins=NotSet, 29 | dismissal_users=NotSet, 30 | dismissal_teams=NotSet, 31 | dismiss_stale_reviews=NotSet, 32 | require_code_owner_reviews=NotSet, 33 | required_approving_review_count=NotSet, 34 | user_push_restrictions=NotSet, 35 | team_push_restrictions=NotSet, 36 | user_bypass_pull_request_allowances=NotSet, 37 | team_bypass_pull_request_allowances=NotSet, 38 | required_linear_history=NotSet, 39 | allow_force_pushes=NotSet, 40 | allow_deletions=NotSet, 41 | block_creations=NotSet, 42 | required_conversation_resolution=NotSet, 43 | ): # nosec 44 | """ 45 | :calls: `PUT /repos/{owner}/{repo}/branches/{branch}/protection `_ 46 | :required_status_checks: dict 47 | :enforce_admins: bool 48 | :dismissal_users: list of strings 49 | :dismissal_teams: list of strings 50 | :dismiss_stale_reviews: bool 51 | :require_code_owner_reviews: bool 52 | :required_approving_review_count: int 53 | :user_push_restrictions: list of strings 54 | :team_push_restrictions: list of strings 55 | :user_bypass_pull_request_allowances: list of strings 56 | :team_bypass_pull_request_allowances: list of strings 57 | NOTE: The GitHub API groups strict and contexts together, both must 58 | be submitted. Take care to pass both as arguments even if only one is 59 | changing. Use edit_required_status_checks() to avoid this. 60 | """ 61 | assert required_status_checks is NotSet or isinstance(required_status_checks, dict), required_status_checks # nosec assert_used 62 | assert enforce_admins is NotSet or isinstance(enforce_admins, bool), enforce_admins # nosec assert_used 63 | assert dismissal_users is NotSet or all( # nosec assert_used 64 | isinstance(element, str) for element in dismissal_users 65 | ), dismissal_users 66 | assert dismissal_teams is NotSet or all( # nosec assert_used 67 | isinstance(element, str) for element in dismissal_teams 68 | ), dismissal_teams 69 | assert dismiss_stale_reviews is NotSet or isinstance(dismiss_stale_reviews, bool), dismiss_stale_reviews # nosec assert_used 70 | assert require_code_owner_reviews is NotSet or isinstance( # nosec assert_used 71 | require_code_owner_reviews, bool 72 | ), require_code_owner_reviews 73 | assert required_approving_review_count is NotSet or isinstance( # nosec assert_used 74 | required_approving_review_count, int 75 | ), required_approving_review_count 76 | 77 | post_parameters = {} 78 | if required_status_checks is not NotSet: 79 | post_parameters["required_status_checks"] = required_status_checks 80 | else: 81 | post_parameters["required_status_checks"] = { 82 | "strict": False, 83 | "contexts": [], 84 | } 85 | 86 | if enforce_admins is not NotSet: 87 | post_parameters["enforce_admins"] = enforce_admins 88 | else: 89 | post_parameters["enforce_admins"] = None 90 | 91 | if ( 92 | dismissal_users is not NotSet 93 | or dismissal_teams is not NotSet 94 | or dismiss_stale_reviews is not NotSet 95 | or require_code_owner_reviews is not NotSet 96 | or required_approving_review_count is not NotSet 97 | or user_bypass_pull_request_allowances is not NotSet 98 | or team_bypass_pull_request_allowances is not NotSet 99 | ): 100 | post_parameters["required_pull_request_reviews"] = {} 101 | if dismiss_stale_reviews is not NotSet: 102 | post_parameters["required_pull_request_reviews"]["dismiss_stale_reviews"] = dismiss_stale_reviews 103 | if require_code_owner_reviews is not NotSet: 104 | post_parameters["required_pull_request_reviews"]["require_code_owner_reviews"] = ( 105 | require_code_owner_reviews 106 | ) 107 | if required_approving_review_count is not NotSet: 108 | post_parameters["required_pull_request_reviews"]["required_approving_review_count"] = ( 109 | required_approving_review_count 110 | ) 111 | if dismissal_users is not NotSet or dismissal_teams is not NotSet: 112 | if dismissal_users is NotSet: 113 | dismissal_teams = [] 114 | if dismissal_teams is NotSet: 115 | dismissal_teams = [] 116 | post_parameters["required_pull_request_reviews"]["dismissal_restrictions"] = { 117 | "users": dismissal_users, 118 | "teams": dismissal_teams, 119 | } 120 | if user_bypass_pull_request_allowances is not NotSet or team_bypass_pull_request_allowances is not NotSet: 121 | if user_bypass_pull_request_allowances is NotSet: 122 | user_bypass_pull_request_allowances = [] 123 | if team_bypass_pull_request_allowances is NotSet: 124 | team_bypass_pull_request_allowances = [] 125 | post_parameters["required_pull_request_reviews"]["bypass_pull_request_allowances"] = { 126 | "users": user_bypass_pull_request_allowances, 127 | "teams": team_bypass_pull_request_allowances, 128 | } 129 | else: 130 | post_parameters["required_pull_request_reviews"] = None 131 | 132 | if user_push_restrictions is not NotSet or team_push_restrictions is not NotSet: 133 | if user_push_restrictions is NotSet: 134 | user_push_restrictions = [] 135 | if team_push_restrictions is NotSet: 136 | team_push_restrictions = [] 137 | post_parameters["restrictions"] = { 138 | "users": user_push_restrictions, 139 | "teams": team_push_restrictions, 140 | } 141 | else: 142 | post_parameters["restrictions"] = None 143 | 144 | if required_linear_history is not NotSet: 145 | post_parameters["required_linear_history"] = required_linear_history 146 | else: 147 | post_parameters["required_linear_history"] = None 148 | 149 | if allow_force_pushes is not NotSet: 150 | post_parameters["allow_force_pushes"] = allow_force_pushes 151 | else: 152 | post_parameters["allow_force_pushes"] = None 153 | 154 | if allow_deletions is not NotSet: 155 | post_parameters["allow_deletions"] = allow_deletions 156 | else: 157 | post_parameters["allow_deletions"] = None 158 | 159 | if block_creations is not NotSet: 160 | post_parameters["block_creations"] = block_creations 161 | else: 162 | post_parameters["block_creations"] = None 163 | 164 | if required_conversation_resolution is not NotSet: 165 | post_parameters["required_conversation_resolution"] = required_conversation_resolution 166 | else: 167 | post_parameters["required_conversation_resolution"] = None 168 | 169 | headers, data = branch._requester.requestJsonAndCheck( 170 | "PUT", 171 | branch.protection_url, 172 | headers={"Accept": mediaTypeRequireMultipleApprovingReviews}, 173 | input=post_parameters, 174 | ) 175 | 176 | this_branch = repo.get_branch(branch) 177 | kwargs = {} 178 | status_check_kwargs = {} 179 | extra_kwargs = {} 180 | 181 | if protection_config.pr_options is not None: 182 | attr_to_kwarg("required_approving_review_count", protection_config.pr_options, kwargs) 183 | attr_to_kwarg("dismiss_stale_reviews", protection_config.pr_options, kwargs) 184 | attr_to_kwarg("require_code_owner_reviews", protection_config.pr_options, kwargs) 185 | 186 | if repo.organization is not None: 187 | attr_to_kwarg( 188 | "users", 189 | protection_config.pr_options.dismissal_restrictions, 190 | kwargs, 191 | transform_key="dismissal_users", 192 | ) 193 | attr_to_kwarg( 194 | "teams", 195 | protection_config.pr_options.dismissal_restrictions, 196 | kwargs, 197 | transform_key="dismissal_teams", 198 | ) 199 | attr_to_kwarg( 200 | "users", 201 | protection_config.pr_options.dismissal_restrictions, 202 | kwargs, 203 | transform_key="user_bypass_pull_request_allowances", 204 | ) 205 | attr_to_kwarg( 206 | "teams", 207 | protection_config.pr_options.dismissal_restrictions, 208 | kwargs, 209 | transform_key="team_bypass_pull_request_allowances", 210 | ) 211 | 212 | if repo.organization is not None: 213 | attr_to_kwarg( 214 | "users", 215 | protection_config.restrictions, 216 | kwargs, 217 | transform_key="user_push_restrictions", 218 | ) 219 | attr_to_kwarg( 220 | "teams", 221 | protection_config.restrictions, 222 | kwargs, 223 | transform_key="team_push_restrictions", 224 | ) 225 | 226 | attr_to_kwarg("enforce_admins", protection_config, kwargs) 227 | 228 | # these are going to be used by edit_required_status_checks 229 | attr_to_kwarg("strict", protection_config.required_status_checks, status_check_kwargs) 230 | if protection_config.required_status_checks.checks is None: 231 | status_check_kwargs["contexts"] = [] 232 | else: 233 | attr_to_kwarg( 234 | "checks", 235 | protection_config.required_status_checks, 236 | status_check_kwargs, 237 | transform_key="contexts", 238 | ) 239 | extra_kwargs["required_status_checks"] = status_check_kwargs 240 | 241 | # these are not handled by edit_protection, so we have to use the custom api 242 | attr_to_kwarg( 243 | "require_linear_history", 244 | protection_config, 245 | extra_kwargs, 246 | transform_key="required_linear_history", 247 | ) 248 | attr_to_kwarg("allow_force_pushes", protection_config, extra_kwargs) 249 | attr_to_kwarg("allow_deletions", protection_config, extra_kwargs) 250 | attr_to_kwarg("block_creations", protection_config, extra_kwargs) 251 | attr_to_kwarg( 252 | "require_conversation_resolution", 253 | protection_config, 254 | extra_kwargs, 255 | transform_key="required_conversation_resolution", 256 | ) 257 | 258 | try: 259 | edit_protection(branch=this_branch, **kwargs, **extra_kwargs) 260 | except GithubException as exc: 261 | raise ValueError(f"{exc.data['message']} {exc.data['documentation_url']}") 262 | # This errors out because the underlying method does a UPDATE instead of a POST as stated by GitHub documentation 263 | # was able to fix this issue by adding the additional key to kwargs above; signed commits could maybe be done too.. 264 | # if status_check_kwargs != {}: 265 | # try: 266 | # this_branch.edit_required_status_checks(**status_check_kwargs) 267 | # except GithubException as exc: 268 | # raise ValueError(f"{exc.data['message']} {exc.data['documentation_url']}") 269 | 270 | # signed commits has its own method 271 | if protection_config.require_signed_commits is not None: 272 | if protection_config.require_signed_commits: 273 | this_branch.add_required_signatures() 274 | else: 275 | this_branch.remove_required_signatures() 276 | 277 | 278 | def check_repo_branch_protections( 279 | repo: Repository, config_branch_protections: list[BranchProtection] 280 | ) -> tuple[bool, dict[str, list[str] | dict[str, Any]]]: 281 | """Checks a repo's branch protections vs our expected settings 282 | 283 | Args: 284 | repo (Repository): [description] 285 | secrets (List[Secret]): [description] 286 | 287 | """ 288 | repo_branches = {branch.name: branch for branch in repo.get_branches()} 289 | 290 | missing_protections = [] 291 | extra_protections = [] 292 | diff_protections = {} 293 | 294 | for config_bp in config_branch_protections: 295 | repo_bp = repo_branches.get(config_bp.name, None) 296 | if repo_bp is None and config_bp.exists: 297 | missing_protections.append(config_bp.name) 298 | continue 299 | if not config_bp.exists and repo_bp is not None: 300 | extra_protections.append(config_bp.name) 301 | continue 302 | 303 | diffs = [] 304 | if config_bp.protection is None: 305 | continue 306 | 307 | # if our repo isn't protected and we've made it this far, it should be 308 | if not repo_bp.protected: 309 | diff_protections[config_bp.name] = ["Branch is not protected"] 310 | continue 311 | 312 | this_protection = repo_bp.get_protection() 313 | if config_bp.protection.pr_options is not None: 314 | diffs.append( 315 | diff_option( 316 | "required_approving_review_count", 317 | config_bp.protection.pr_options.required_approving_review_count, 318 | ## Had issues when the YAML defines this but the Repo has none (e.g. it's null in the cloud) 319 | None 320 | if (this_protection.required_pull_request_reviews is None) 321 | else this_protection.required_pull_request_reviews.required_approving_review_count, 322 | ) 323 | ) 324 | diffs.append( 325 | diff_option( 326 | "dismiss_stale_reviews", 327 | config_bp.protection.pr_options.dismiss_stale_reviews, 328 | ## Had issues when the YAML defines this but the Repo has none (e.g. it's null in the cloud) 329 | None 330 | if (this_protection.required_pull_request_reviews is None) 331 | else this_protection.required_pull_request_reviews.dismiss_stale_reviews, 332 | ) 333 | ) 334 | diffs.append( 335 | diff_option( 336 | "require_code_owner_reviews", 337 | config_bp.protection.pr_options.require_code_owner_reviews, 338 | ## Had issues when the YAML defines this but the Repo has none (e.g. it's null in the cloud) 339 | None 340 | if (this_protection.required_pull_request_reviews is None) 341 | else this_protection.required_pull_request_reviews.require_code_owner_reviews, 342 | ) 343 | ) 344 | # for now, not checking dismissal options. Will note that in the docs 345 | 346 | if ( 347 | config_bp.protection.required_status_checks is not None 348 | and this_protection.required_status_checks is not None 349 | ): 350 | diffs.append( 351 | diff_option( 352 | "required_status_checks::strict", 353 | config_bp.protection.required_status_checks.strict, 354 | this_protection.required_status_checks.strict, 355 | ) 356 | ) 357 | # Without sorting, they sometimes get flagged as different just due to the ordinality of them 358 | if config_bp.protection.required_status_checks.checks is not None: 359 | config_bp.protection.required_status_checks.checks.sort() 360 | if this_protection.required_status_checks.contexts is not None: 361 | this_protection.required_status_checks.contexts.sort() 362 | diffs.append( 363 | diff_option( 364 | "required_status_checks::checks", 365 | config_bp.protection.required_status_checks.checks, 366 | this_protection.required_status_checks.contexts, 367 | ) 368 | ) 369 | 370 | diffs.append( 371 | diff_option( 372 | "enforce_admins", 373 | config_bp.protection.enforce_admins, 374 | this_protection.enforce_admins, 375 | ) 376 | ) 377 | diffs.append( 378 | diff_option( 379 | "require_linear_history", 380 | config_bp.protection.require_linear_history, 381 | this_protection.raw_data["required_linear_history"]["enabled"], 382 | ) 383 | ) 384 | diffs.append( 385 | diff_option( 386 | "allow_force_pushes", 387 | config_bp.protection.allow_force_pushes, 388 | this_protection.raw_data["allow_force_pushes"]["enabled"], 389 | ) 390 | ) 391 | diffs.append( 392 | diff_option( 393 | "allow_deletions", 394 | config_bp.protection.allow_deletions, 395 | this_protection.raw_data["allow_deletions"]["enabled"], 396 | ) 397 | ) 398 | diffs.append( 399 | diff_option( 400 | "block_creations", 401 | config_bp.protection.block_creations, 402 | this_protection.raw_data["block_creations"]["enabled"], 403 | ) 404 | ) 405 | diffs.append( 406 | diff_option( 407 | "require_conversation_resolution", 408 | config_bp.protection.require_conversation_resolution, 409 | this_protection.raw_data["required_conversation_resolution"]["enabled"], 410 | ) 411 | ) 412 | diffs.append( 413 | diff_option( 414 | "require_signed_commits", 415 | config_bp.protection.require_signed_commits, 416 | this_protection.raw_data["required_signatures"]["enabled"], 417 | ) 418 | ) 419 | 420 | # TODO: Figure out how to diff Restriction options 421 | # I figured out some of them.... 422 | try: 423 | dismissal_teams = objary_to_list("slug", this_protection.required_pull_request_reviews.dismissal_teams) 424 | except TypeError: 425 | dismissal_teams = [] 426 | 427 | dismissal_teams.sort() 428 | if config_bp.protection.pr_options.dismissal_restrictions is not None: 429 | if config_bp.protection.pr_options.dismissal_restrictions.teams is not None: 430 | config_bp.protection.pr_options.dismissal_restrictions.teams.sort() 431 | diffs.append( 432 | diff_option( 433 | "dismissal_teams", 434 | config_bp.protection.pr_options.dismissal_restrictions.teams, 435 | dismissal_teams, 436 | ) 437 | ) 438 | try: 439 | dismissal_users = objary_to_list("name", this_protection.required_pull_request_reviews.dismissal_users) 440 | except TypeError: 441 | dismissal_users = [] 442 | dismissal_users.sort() 443 | if config_bp.protection.pr_options.dismissal_restrictions is not None: 444 | if config_bp.protection.pr_options.dismissal_restrictions.teams is not None: 445 | config_bp.protection.pr_options.dismissal_restrictions.teams.sort() 446 | diffs.append( 447 | diff_option( 448 | "dismissal_users", 449 | config_bp.protection.pr_options.dismissal_restrictions.users, 450 | dismissal_users, 451 | ) 452 | ) 453 | 454 | diffs = [i for i in diffs if i is not None] 455 | if len(diffs) > 0: 456 | diff_protections[config_bp.name] = deepcopy(diffs) 457 | 458 | return len(missing_protections) == 0 & len(extra_protections) == 0 & len(diff_protections.keys()) == 0, { 459 | "missing": missing_protections, 460 | "extra": extra_protections, 461 | "diffs": diff_protections, 462 | } 463 | -------------------------------------------------------------------------------- /repo_manager/gh/files.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from github.GithubException import UnknownObjectException 4 | from github.Repository import Repository 5 | 6 | from repo_manager.schemas import FileConfig 7 | 8 | 9 | class RemoteSrcNotFoundError(Exception): ... 10 | 11 | 12 | def copy_file(repo: Repository, file_config: FileConfig) -> str: 13 | """Copy files to a repository using the BLOB API 14 | Files can be sourced from a local file or a remote repository 15 | """ 16 | target_branch = file_config.target_branch if file_config.target_branch is not None else repo.default_branch 17 | try: 18 | file_contents = ( 19 | file_config.src_file_contents 20 | if not file_config.remote_src 21 | else get_remote_file_contents(repo, file_config.src_file, target_branch) 22 | ) 23 | except UnknownObjectException: 24 | raise RemoteSrcNotFoundError(f"Remote file {file_config.src_file} not found in {target_branch}") 25 | 26 | try: 27 | dest_contents = repo.get_contents(str(file_config.dest_file), ref=target_branch) 28 | result = repo.update_file( 29 | str(file_config.dest_file.relative_to(".")), 30 | file_config.commit_msg, 31 | file_contents, 32 | sha=dest_contents.sha, 33 | branch=target_branch, 34 | ) 35 | except UnknownObjectException: 36 | # if dest_contents are unknown, this is a new file 37 | result = repo.create_file( 38 | str(file_config.dest_file.relative_to(".")), file_config.commit_msg, file_contents, branch=target_branch 39 | ) 40 | 41 | return result["commit"].sha 42 | 43 | 44 | def get_remote_file_contents(repo: Repository, path: Path, target_branch: str) -> str: 45 | """Get the contents of a file from a remote repository""" 46 | contents = repo.get_contents(str(path.relative_to(".")), ref=target_branch) 47 | return contents.decoded_content.decode("utf-8") 48 | 49 | 50 | def move_file(repo: Repository, file_config: FileConfig) -> tuple[str, str]: 51 | """Move a file from a repository""" 52 | return copy_file(repo, file_config), delete_file(repo, file_config) 53 | 54 | 55 | def delete_file( 56 | repo: Repository, 57 | file_config: FileConfig, 58 | ) -> str: 59 | """Delete a file from a repository""" 60 | # if we're doing a delete for a move, delete the src_file rather than the dest_file 61 | to_delete = file_config.src_file if file_config.move else file_config.dest_file 62 | target_branch = file_config.target_branch if file_config.target_branch is not None else repo.default_branch 63 | contents = repo.get_contents(str(to_delete.relative_to(".")), ref=target_branch) 64 | result = repo.delete_file(contents.path, file_config.commit_msg, contents.sha, branch=target_branch) 65 | return result["commit"].sha 66 | -------------------------------------------------------------------------------- /repo_manager/gh/labels.py: -------------------------------------------------------------------------------- 1 | from copy import deepcopy 2 | from typing import Any 3 | 4 | from github.Repository import Repository 5 | 6 | from repo_manager.schemas.label import Label 7 | 8 | 9 | def update_label(repo: Repository, label: Label): 10 | this_label = repo.get_label(label.name) 11 | color = this_label.color if label.color_no_hash is None else label.color_no_hash 12 | description = this_label.description if label.description is None else label.description 13 | this_label.edit(label.expected_name, color, description) 14 | 15 | 16 | def check_repo_labels( 17 | repo: Repository, config_labels: list[Label] 18 | ) -> tuple[bool, dict[str, list[str] | dict[str, Any]]]: 19 | """Checks a repo's labels vs our expected settings 20 | 21 | Args: 22 | repo (Repository): [description] 23 | secrets (List[Secret]): [description] 24 | 25 | """ 26 | repo_labels = {label.name: label for label in repo.get_labels()} 27 | 28 | missing_labels = [] 29 | extra_labels = [] 30 | diff_labels = {} 31 | 32 | for config_label in config_labels: 33 | repo_label = repo_labels.get(config_label.expected_name, None) 34 | if repo_label is None and config_label.exists: 35 | missing_labels.append(config_label.expected_name) 36 | continue 37 | if not config_label.exists and repo_label is not None: 38 | extra_labels.append(config_label.expected_name) 39 | continue 40 | 41 | diffs = [] 42 | if config_label.color is not None: 43 | if config_label.color_no_hash != repo_label.color: 44 | diffs.append(f"Expected color '{config_label.color_no_hash}'. Repo has color '{repo_label.color}'") 45 | 46 | if config_label.description is not None: 47 | if config_label.description != repo_label.description: 48 | diffs.append( 49 | f"Expected description '{config_label.description}'. Repo description '{repo_label.description}" 50 | ) 51 | if len(diffs) > 0: 52 | diff_labels[config_label.expected_name] = deepcopy(diffs) 53 | 54 | return len(missing_labels) == 0 & len(extra_labels) == 0 & len(diff_labels.keys()) == 0, { 55 | "missing": missing_labels, 56 | "extra": extra_labels, 57 | "diffs": diff_labels, 58 | } 59 | -------------------------------------------------------------------------------- /repo_manager/gh/repos.py: -------------------------------------------------------------------------------- 1 | from gh import Github 2 | from github.Repository import Repository 3 | 4 | 5 | class BadTokenError(Exception): ... 6 | 7 | 8 | def get_repo(client: Github, repo: str) -> tuple[bool, Repository | None]: 9 | """Gets a repo""" 10 | try: 11 | repo = client.get_repo(repo) 12 | except Exception as exc: # this exception should be tighter 13 | raise BadTokenError(exc) 14 | 15 | return True, repo 16 | -------------------------------------------------------------------------------- /repo_manager/gh/secrets.py: -------------------------------------------------------------------------------- 1 | import json 2 | from typing import Any 3 | 4 | from github.PublicKey import PublicKey 5 | from github.Repository import Repository 6 | 7 | from repo_manager.schemas.secret import Secret 8 | 9 | 10 | def get_public_key(repo: Repository, is_dependabot: bool = False) -> PublicKey: 11 | """ 12 | :calls: `GET /repos/{owner}/{repo}/actions/secrets/public-key 13 | `_ 14 | :rtype: :class:`github.PublicKey.PublicKey` 15 | """ 16 | secret_type = "actions" if not is_dependabot else "dependabot" 17 | headers, data = repo._requester.requestJsonAndCheck("GET", f"{repo.url}/{secret_type}/secrets/public-key") 18 | return PublicKey(repo._requester, headers, data, completed=True) 19 | 20 | 21 | def create_secret(repo: Repository, secret_name: str, unencrypted_value: str, is_dependabot: bool = False) -> bool: 22 | """ 23 | :calls: `PUT /repos/{owner}/{repo}/actions/secrets/{secret_name} 24 | `_ 25 | 26 | Copied from https://github.com/PyGithub/PyGithub/blob/master/github/Repository.py#L1428 in order to 27 | support dependabot 28 | :param secret_name: string 29 | :param unencrypted_value: string 30 | :rtype: bool 31 | """ 32 | public_key = get_public_key(repo, is_dependabot) 33 | payload = public_key.encrypt(unencrypted_value) 34 | put_parameters = { 35 | "key_id": public_key.key_id, 36 | "encrypted_value": payload, 37 | } 38 | secret_type = "actions" if not is_dependabot else "dependabot" 39 | status, headers, data = repo._requester.requestJson( 40 | "PUT", f"{repo.url}/{secret_type}/secrets/{secret_name}", input=put_parameters 41 | ) 42 | if status not in (201, 204): 43 | raise Exception(f"Unable to create {secret_type} secret. Status code: {status}") 44 | return True 45 | 46 | 47 | def delete_secret(repo: Repository, secret_name: str, is_dependabot: bool = False) -> bool: 48 | """ 49 | Copied from https://github.com/PyGithub/PyGithub/blob/master/github/Repository.py#L1448 50 | to add support for dependabot 51 | :calls: `DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name} 52 | `_ 53 | :param secret_name: string 54 | :rtype: bool 55 | """ 56 | secret_type = "actions" if not is_dependabot else "dependabot" 57 | status, headers, data = repo._requester.requestJson("DELETE", f"{repo.url}/{secret_type}/secrets/{secret_name}") 58 | return status == 204 59 | 60 | 61 | def check_repo_secrets(repo: Repository, secrets: list[Secret]) -> tuple[bool, dict[str, list[str] | dict[str, Any]]]: 62 | """Checks a repo's secrets vs our expected settings 63 | 64 | Args: 65 | repo (Repository): [description] 66 | secrets (List[Secret]): [description] 67 | 68 | Returns: 69 | Tuple[bool, Optional[List[str]]]: [description] 70 | """ 71 | actions_secrets_names = _get_repo_secret_names(repo) 72 | dependabot_secret_names = _get_repo_secret_names(repo, "dependabot") 73 | secrets_dict = {secret.key: secret for secret in secrets} 74 | checked = True 75 | 76 | actions_expected_secrets_names = {secret.key for secret in secrets if (secret.exists and secret.type == "actions")} 77 | dependabot_expected_secret_names = { 78 | secret.key for secret in secrets if (secret.exists and secret.type == "dependabot") 79 | } 80 | diff = { 81 | "missing": list(actions_expected_secrets_names - (actions_secrets_names)) 82 | + list((dependabot_expected_secret_names) - (dependabot_secret_names)), 83 | "extra": [], 84 | } 85 | extra_secret_names = (list((actions_secrets_names) - (actions_expected_secrets_names))) + ( 86 | list(dependabot_secret_names - dependabot_expected_secret_names) 87 | ) 88 | for secret_name in extra_secret_names: 89 | secret_config = secrets_dict.get(secret_name, None) 90 | if secret_config is None: 91 | continue 92 | if not secret_config.exists: 93 | diff["extra"].append(secret_name) 94 | checked = False 95 | 96 | if len(diff["missing"]) > 0: 97 | checked = False 98 | 99 | return checked, diff 100 | 101 | 102 | def _get_repo_secret_names(repo: Repository, type: str = "actions") -> set[str]: 103 | status, headers, raw_data = repo._requester.requestJson("GET", f"{repo.url}/{type}/secrets") 104 | if status != 200: 105 | raise Exception(f"Unable to get repo's secrets {status}") 106 | try: 107 | secret_data = json.loads(raw_data) 108 | except json.JSONDecodeError as exc: 109 | raise Exception(f"Github apu returned invalid json {exc}") 110 | 111 | return {secret["name"] for secret in secret_data["secrets"]} 112 | -------------------------------------------------------------------------------- /repo_manager/gh/settings.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from github.Repository import Repository 4 | 5 | from repo_manager.schemas.settings import Settings 6 | from repo_manager.utils import attr_to_kwarg 7 | 8 | 9 | def update_settings(repo: Repository, settings: Settings): 10 | kwargs = {"name": None} 11 | 12 | attr_to_kwarg("description", settings, kwargs) 13 | attr_to_kwarg("homepage", settings, kwargs) 14 | attr_to_kwarg("private", settings, kwargs) 15 | attr_to_kwarg("has_issues", settings, kwargs) 16 | attr_to_kwarg("has_projects", settings, kwargs) 17 | attr_to_kwarg("has_wiki", settings, kwargs) 18 | attr_to_kwarg("has_downloads", settings, kwargs) 19 | attr_to_kwarg("default_branch", settings, kwargs) 20 | attr_to_kwarg("allow_squash_merge", settings, kwargs) 21 | attr_to_kwarg("allow_merge_commit", settings, kwargs) 22 | attr_to_kwarg("allow_rebase_merge", settings, kwargs) 23 | attr_to_kwarg("delete_branch_on_merge", settings, kwargs) 24 | repo.edit(**kwargs) 25 | 26 | if settings.enable_automated_security_fixes is not None: 27 | if settings.enable_automated_security_fixes: 28 | repo.enable_automated_security_fixes() 29 | else: 30 | repo.disable_automated_security_fixes() 31 | 32 | if settings.enable_vulnerability_alerts is not None: 33 | if settings.enable_vulnerability_alerts: 34 | repo.enable_vulnerability_alert() 35 | else: 36 | repo.disable_vulnerability_alert() 37 | 38 | if settings.topics is not None: 39 | repo.replace_topics(settings.topics) 40 | 41 | 42 | def check_repo_settings(repo: Repository, settings: Settings) -> tuple[bool, list[str | None]]: 43 | """Checks a repo's settings vs our expected settings 44 | 45 | Args: 46 | repo (Repository): [description] 47 | settings (Settings): [description] 48 | 49 | Returns: 50 | Tuple[bool, Optional[List[str]]]: [description] 51 | """ 52 | 53 | def get_repo_value(setting_name: str, repo: Repository) -> Any | None: 54 | """Get a value from the repo object""" 55 | getter_val = SETTINGS[setting_name].get("get", setting_name) 56 | if getter_val is None: 57 | return None 58 | getter = getattr(repo, getter_val) 59 | if not callable(getter): 60 | return getter 61 | else: 62 | return getter() 63 | 64 | drift = [] 65 | checked = True 66 | for setting_name in settings.dict().keys(): 67 | repo_value = get_repo_value(setting_name, repo) 68 | settings_value = getattr(settings, setting_name) 69 | # These don't seem to update if changed; may need to explore a different API call 70 | if (setting_name == "enable_automated_security_fixes") | (setting_name == "enable_vulnerability_alerts"): 71 | continue 72 | # We don't want to flag description being different if the YAML is None 73 | if (setting_name == "description") & (not settings_value): 74 | continue 75 | elif (setting_name == "topics") & (settings_value is None): 76 | settings_value = [] 77 | if repo_value != settings_value: 78 | drift.append(f"{setting_name} -- Expected: '{settings_value}' Found: '{repo_value}'") 79 | checked &= False if (settings_value is not None) else True 80 | return checked, drift 81 | 82 | 83 | def update(repo: Repository, setting_name: str, new_value: Any): 84 | """[summary] 85 | 86 | Args: 87 | repo (Repository): [description] 88 | setting_name (str): [description] 89 | new_value (Any): [description] 90 | """ 91 | ... 92 | 93 | 94 | def set_topics(repo: Repository, setting_name: str, new_value: Any): 95 | """[summary] 96 | 97 | Args: 98 | repo (Repository): [description] 99 | setting_name (str): [description] 100 | new_value (Any): [description] 101 | """ 102 | ... 103 | 104 | 105 | def set_security_fixes(repo: Repository, setting_name: str, new_value: Any): 106 | """[summary] 107 | 108 | Args: 109 | repo (Repository): [description] 110 | setting_name (str): [description] 111 | new_value (Any): [description] 112 | """ 113 | ... 114 | 115 | 116 | def set_vuln_alerts(repo: Repository, setting_name: str, new_value: Any): 117 | """[summary] 118 | 119 | Args: 120 | repo (Repository): [description] 121 | setting_name (str): [description] 122 | new_value (Any): [description] 123 | """ 124 | ... 125 | 126 | 127 | # "name_of_setting_from_repo_manager.schemas.settings.Settings": { 128 | # Optional entry, a method to call on the repo object to get a setting. 129 | # Default is the name of the setting 130 | # If the value of repo.getatter(get) is a callable, we'll call it to get the result 131 | # "get": "get_setting" 132 | # Optional entry, a method to call to update this setting on the repo. Is passed the repo, setting_name, and new_value 133 | # Default is repo_manager.github.settings.update 134 | # } 135 | # an empty dict means to just use the default methods 136 | SETTINGS = { 137 | "description": {}, 138 | "homepage": {}, 139 | "topics": {"get": "get_topics", "set": set_topics}, 140 | "private": {}, 141 | "has_issues": {}, 142 | "has_projects": {}, 143 | "has_wiki": {}, 144 | "has_downloads": {}, 145 | "default_branch": {}, 146 | "allow_squash_merge": {}, 147 | "allow_merge_commit": {}, 148 | "allow_rebase_merge": {}, 149 | "delete_branch_on_merge": {"set": ""}, 150 | # Checks set to none are for values that there are no api endpoints to get 151 | # Like the security and vulnerability alerts 152 | "enable_automated_security_fixes": {"get": None, "set": set_security_fixes}, 153 | "enable_vulnerability_alerts": {"get": None, "set": set_vuln_alerts}, 154 | } 155 | -------------------------------------------------------------------------------- /repo_manager/main.py: -------------------------------------------------------------------------------- 1 | import json 2 | import sys 3 | 4 | from actions_toolkit import core as actions_toolkit 5 | 6 | from repo_manager.gh.branch_protections import check_repo_branch_protections 7 | from repo_manager.gh.branch_protections import update_branch_protection 8 | from repo_manager.gh.files import copy_file 9 | from repo_manager.gh.files import delete_file 10 | from repo_manager.gh.files import move_file 11 | from repo_manager.gh.files import RemoteSrcNotFoundError 12 | from repo_manager.gh.labels import check_repo_labels 13 | from repo_manager.gh.labels import update_label 14 | from repo_manager.gh.secrets import check_repo_secrets 15 | from repo_manager.gh.secrets import create_secret 16 | from repo_manager.gh.secrets import delete_secret 17 | from repo_manager.gh.settings import check_repo_settings 18 | from repo_manager.gh.settings import update_settings 19 | from repo_manager.schemas import load_config 20 | from repo_manager.utils import get_inputs 21 | from yaml import YAMLError 22 | from pydantic import ValidationError 23 | from repo_manager.gh import GithubException, UnknownObjectException 24 | 25 | 26 | def main(): # noqa: C901 27 | try: 28 | inputs = get_inputs() 29 | # actions toolkit has very broad exceptions :( 30 | except Exception as exc: 31 | actions_toolkit.set_failed(f"Unable to collect inputs {exc}") 32 | actions_toolkit.debug(f"Loading config from {inputs['settings_file']}") 33 | try: 34 | config = load_config(inputs["settings_file"]) 35 | except FileNotFoundError: 36 | actions_toolkit.set_failed(f"{inputs['settings_file']} does not exist or is not readable") 37 | except YAMLError as exc: 38 | actions_toolkit.set_failed(f"Unable to read {inputs['settings_file']} - {exc}") 39 | except ValidationError as exc: 40 | actions_toolkit.set_failed(f"{inputs['settings_file']} is invalid - {exc}") 41 | 42 | actions_toolkit.debug(f"Inputs: {inputs}") 43 | if inputs["action"] == "validate": 44 | actions_toolkit.set_output("result", f"Validated {inputs['settings_file']}") 45 | actions_toolkit.debug(json_diff := json.dumps({})) 46 | actions_toolkit.set_output("diff", json_diff) 47 | sys.exit(0) 48 | actions_toolkit.info(f"Config from {inputs['settings_file']} validated.") 49 | 50 | check_result = True 51 | diffs = {} 52 | for check, to_check in { 53 | check_repo_settings: ("settings", config.settings), 54 | check_repo_secrets: ("secrets", config.secrets), 55 | check_repo_labels: ("labels", config.labels), 56 | check_repo_branch_protections: ( 57 | "branch_protections", 58 | config.branch_protections, 59 | ), 60 | }.items(): 61 | check_name, to_check = to_check 62 | if to_check is not None: 63 | this_check, this_diffs = check(inputs["repo_object"], to_check) 64 | check_result &= this_check 65 | if this_diffs is not None: 66 | diffs[check_name] = this_diffs 67 | 68 | actions_toolkit.debug(json_diff := json.dumps({})) 69 | actions_toolkit.set_output("diff", json_diff) 70 | 71 | if inputs["action"] == "check": 72 | if not check_result: 73 | actions_toolkit.set_output("result", "Check failed, diff detected") 74 | actions_toolkit.set_failed("Diff detected") 75 | actions_toolkit.set_output("result", "Check passed") 76 | sys.exit(0) 77 | 78 | if inputs["action"] == "apply": 79 | errors = [] 80 | 81 | # Because we cannot diff secrets, just apply it every time 82 | if config.secrets is not None: 83 | for secret in config.secrets: 84 | if secret.exists: 85 | try: 86 | create_secret( 87 | inputs["repo_object"], secret.key, secret.expected_value, secret.type == "dependabot" 88 | ) 89 | actions_toolkit.info(f"Set {secret.key} to expected value") 90 | except Exception as exc: # this should be tighter 91 | errors.append( 92 | { 93 | "type": "secret-update", 94 | "key": secret.key, 95 | "error": f"{exc}", 96 | } 97 | ) 98 | else: 99 | try: 100 | delete_secret(inputs["repo_object"], secret.key, secret.type == "dependabot") 101 | actions_toolkit.info(f"Deleted {secret.key}") 102 | except Exception as exc: # this should be tighter 103 | errors.append( 104 | { 105 | "type": "secret-delete", 106 | "key": secret.key, 107 | "error": f"{exc}", 108 | } 109 | ) 110 | 111 | labels_diff = diffs.get("labels", None) 112 | if labels_diff is not None: 113 | for label_name in labels_diff["extra"]: 114 | try: 115 | this_label = inputs["repo_object"].get_label(label_name) 116 | this_label.delete() 117 | actions_toolkit.info(f"Deleted {label_name}") 118 | except Exception as exc: # this should be tighter 119 | errors.append({"type": "label-delete", "name": label_name, "error": f"{exc}"}) 120 | for label_name in labels_diff["missing"]: 121 | label_object = config.labels_dict[label_name] 122 | if label_object.name != label_object.expected_name: 123 | update_label(inputs["repo_object"], label_object) 124 | actions_toolkit.info(f"Renamed {label_object.name} to {label_object.expected_name}") 125 | else: 126 | try: 127 | inputs["repo_object"].create_label( 128 | label_object.expected_name, 129 | label_object.color_no_hash, 130 | label_object.description, 131 | ) 132 | actions_toolkit.info(f"Created label {label_name}") 133 | except Exception as exc: # this should be tighter 134 | errors.append( 135 | { 136 | "type": "label-create", 137 | "name": label_name, 138 | "error": f"{exc}", 139 | } 140 | ) 141 | for label_name in labels_diff["diffs"].keys(): 142 | update_label(inputs["repo_object"], config.labels_dict[label_name]) 143 | actions_toolkit.info(f"Updated label {label_name}") 144 | 145 | bp_diff = diffs.get("branch_protections", None) 146 | if bp_diff is not None: 147 | # delete branch protection 148 | for branch_name in bp_diff["extra"]: 149 | try: 150 | this_branch = inputs["repo_object"].get_branch(branch_name) 151 | this_branch.remove_protection() 152 | except GithubException as ghexc: 153 | if ghexc.status != 404: 154 | # a 404 on a delete is fine, means it isnt protected 155 | errors.append( 156 | { 157 | "type": "bp-delete", 158 | "name": branch_name, 159 | "error": f"{ghexc}", 160 | } 161 | ) 162 | except Exception as exc: # this should be tighter 163 | errors.append({"type": "bp-delete", "name": branch_name, "error": f"{exc}"}) 164 | 165 | # update or create branch protection 166 | for branch_name in bp_diff["missing"] + list(bp_diff["diffs"].keys()): 167 | try: 168 | bp_config = config.branch_protections_dict[branch_name] 169 | if bp_config.protection is not None: 170 | update_branch_protection(inputs["repo_object"], branch_name, bp_config.protection) 171 | actions_toolkit.info(f"Updated branch proection for {branch_name}") 172 | else: 173 | actions_toolkit.warning(f"Branch protection config for {branch_name} is empty") 174 | except GithubException as ghexc: 175 | if ghexc.status == 404: 176 | actions_toolkit.info( 177 | f"Can't Update branch protection for {branch_name} because the branch does not exist" 178 | ) 179 | else: 180 | errors.append( 181 | { 182 | "type": "bp-update", 183 | "name": branch_name, 184 | "error": f"{ghexc}", 185 | } 186 | ) 187 | except Exception as exc: # this should be tighter 188 | errors.append({"type": "bp-update", "name": branch_name, "error": f"{exc}"}) 189 | 190 | if config.settings is not None: 191 | try: 192 | update_settings(inputs["repo_object"], config.settings) 193 | actions_toolkit.info("Synced Settings") 194 | except Exception as exc: 195 | errors.append({"type": "settings-update", "error": f"{exc}"}) 196 | 197 | commits = [] 198 | if config.files is not None: 199 | for file_config in config.files: 200 | # delete files 201 | if not file_config.exists: 202 | try: 203 | commits.append(delete_file(inputs["repo_object"], file_config)) 204 | actions_toolkit.info(f"Deleted {str(file_config.dest_file)}") 205 | except UnknownObjectException: 206 | target_branch = ( 207 | file_config.target_branch 208 | if file_config.target_branch is not None 209 | else inputs["repo_object"].default_branch 210 | ) 211 | actions_toolkit.warning( 212 | f"{str(file_config.dest_file)} does not exist in " 213 | + f"{target_branch}" 214 | + " branch. Because this is a delete, not failing run" 215 | ) 216 | except Exception as exc: 217 | errors.append({"type": "file-delete", "file": str(file_config.dest_file), "error": f"{exc}"}) 218 | elif file_config.move: 219 | try: 220 | copy_commit, delete_commit = move_file(inputs["repo_object"], file_config) 221 | commits.append(copy_commit) 222 | commits.append(delete_commit) 223 | actions_toolkit.info(f"Moved {str(file_config.src_file)} to {str(file_config.dest_file)}") 224 | except RemoteSrcNotFoundError: 225 | target_branch = ( 226 | file_config.target_branch 227 | if file_config.target_branch is not None 228 | else inputs["repo_object"].default_branch 229 | ) 230 | actions_toolkit.warning( 231 | f"{str(file_config.src_file)} does not exist in " 232 | + f"{target_branch}" 233 | + " branch. Because this is a move, not failing run" 234 | ) 235 | except Exception as exc: 236 | errors.append( 237 | { 238 | "type": "file-move", 239 | "src_file": str(file_config.src_file), 240 | "dest_file": str(file_config.dest_file), 241 | "error": f"{exc}", 242 | } 243 | ) 244 | else: 245 | try: 246 | commits.append(copy_file(inputs["repo_object"], file_config)) 247 | actions_toolkit.info( 248 | f"Copied{' remote ' if file_config.remote_src else ' '}{str(file_config.src_file)}" 249 | + f" to {str(file_config.dest_file)}" 250 | ) 251 | except Exception as exc: 252 | errors.append( 253 | { 254 | "type": "file-copy", 255 | "src_file": str(file_config.src_file), 256 | "dest_file": str(file_config.dest_file), 257 | "error": f"{exc}", 258 | } 259 | ) 260 | actions_toolkit.info("Commit SHAs: " + ",".join(commits)) 261 | 262 | if len(errors) > 0: 263 | actions_toolkit.error(json.dumps(errors)) 264 | actions_toolkit.set_failed("Errors during apply") 265 | actions_toolkit.set_output("result", "Apply successful") 266 | 267 | 268 | if __name__ == "__main__": 269 | main() 270 | -------------------------------------------------------------------------------- /repo_manager/schemas/__init__.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | from pydantic import BaseModel # pylint: disable=E0611 3 | 4 | from .branch_protection import BranchProtection 5 | from .file import FileConfig 6 | from .label import Label 7 | from .secret import Secret 8 | from .settings import Settings 9 | from pydantic import Field 10 | from copy import copy 11 | 12 | 13 | def empty_list(): 14 | this_list = list() 15 | return copy(this_list) 16 | 17 | 18 | class RepoManagerConfig(BaseModel): 19 | settings: Settings | None 20 | branch_protections: list[BranchProtection] = Field(default_factory=empty_list) 21 | secrets: list[Secret] = Field(default_factory=empty_list) 22 | labels: list[Label] = Field(default_factory=empty_list) 23 | files: list[FileConfig] = Field(default_factory=empty_list) 24 | 25 | @property 26 | def secrets_dict(self): 27 | return {secret.key: secret for secret in self.secrets} if self.secrets is not None else {} 28 | 29 | @property 30 | def labels_dict(self): 31 | return {label.expected_name: label for label in self.labels} if self.labels is not None else {} 32 | 33 | @property 34 | def branch_protections_dict(self): 35 | return ( 36 | {branch_protection.name: branch_protection for branch_protection in self.branch_protections} 37 | if self.branch_protections is not None 38 | else {} 39 | ) 40 | 41 | 42 | def load_config(filename: str) -> RepoManagerConfig: 43 | """Loads a yaml file into a RepoManagerconfig""" 44 | with open(filename) as fh: 45 | this_dict = yaml.safe_load(fh) 46 | 47 | return RepoManagerConfig.model_validate(this_dict) 48 | -------------------------------------------------------------------------------- /repo_manager/schemas/branch_protection.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel # pylint: disable=E0611 2 | from typing import Annotated 3 | from pydantic import Field 4 | 5 | OptBool = bool | None 6 | OptStr = str | None 7 | 8 | 9 | class RestrictionOptions(BaseModel): 10 | users: list[str] | None = Field( 11 | None, description="List of users who cannot push to this branch, only available to orgs" 12 | ) 13 | teams: list[str] | None = Field( 14 | None, description="List of teams who cannot push to this branch, only available to orgs" 15 | ) 16 | 17 | 18 | class StatusChecksOptions(BaseModel): 19 | strict: OptBool = Field(False, description="Require branches to be up to date before merging.") 20 | checks: list[str] | None = Field( 21 | [], description="The list of status checks to require in order to merge into this branch" 22 | ) 23 | 24 | 25 | class DismissalOptions(BaseModel): 26 | users: list[str] | None = Field( 27 | None, description="List of users who can dismiss pull request reviews, only available to orgs" 28 | ) 29 | teams: list[str] | None = Field( 30 | None, description="List of teams who can dismiss pull request reviews, only available to orgs" 31 | ) 32 | 33 | 34 | class PROptions(BaseModel): 35 | required_approving_review_count: Annotated[int, Field(strict=True, gt=1, le=6)] | None = Field( 36 | None, description="The number of approvals required. (1-6)" 37 | ) 38 | dismiss_stale_reviews: OptBool = Field( 39 | None, description="Dismiss approved reviews automatically when a new commit is pushed." 40 | ) 41 | require_code_owner_reviews: OptBool = Field(None, description="Blocks merge until code owners have reviewed.") 42 | dismissal_restrictions: DismissalOptions | None = Field( 43 | None, description="Options related to PR dismissal. Only available to Orgs. Not available in the Check command" 44 | ) 45 | 46 | 47 | class ProtectionOptions(BaseModel): 48 | pr_options: PROptions | None = Field(None, description="Options related to PR reviews") 49 | required_status_checks: StatusChecksOptions | None = Field( 50 | StatusChecksOptions(), description="Options related to required status checks" 51 | ) 52 | enforce_admins: OptBool = Field( 53 | None, 54 | description="Enforce all configured restrictions for administrators. Set to true to enforce required status " 55 | + "checks for repository administrators. Set to null to disable.", 56 | ) 57 | require_linear_history: OptBool = Field( 58 | None, description="Prevent merge commits from being pushed to matching branches" 59 | ) 60 | restrictions: RestrictionOptions | None = Field( 61 | None, description="Options related to restricting who can push to this branch" 62 | ) 63 | allow_force_pushes: OptBool = Field(None, description="Permit force pushes for all users with push access.") 64 | allow_deletions: OptBool = Field(None, description="Allow users with push access to delete matching branches.") 65 | block_creations: OptBool = Field( 66 | None, 67 | description="If set to true, the restrictions branch protection settings which limits who can push " 68 | + "will also block pushes which create new branches, unless the push is initiated by a user, team, or " 69 | + "app which has the ability to push. Set to true to restrict new branch creation.", 70 | ) 71 | 72 | require_conversation_resolution: OptBool = Field( 73 | None, 74 | description="When enabled, all conversations on code must be resolved before a pull request can be merged.", 75 | ) 76 | require_signed_commits: OptBool = Field( 77 | None, description="Commits pushed to matching branches must have verified signatures." 78 | ) 79 | 80 | 81 | class BranchProtection(BaseModel): 82 | name: OptStr = Field(None, description="Name of the branch") 83 | protection: ProtectionOptions | None = Field(None, description="Protection options for the branch") 84 | exists: OptBool = Field(True, description="Set to false to delete a branch protection rule") 85 | -------------------------------------------------------------------------------- /repo_manager/schemas/file.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import Path 3 | from typing import Optional 4 | 5 | from pydantic import ( 6 | BaseModel, # pylint: disable=E0611 7 | Field, 8 | ValidationInfo, 9 | field_validator, 10 | ) 11 | 12 | OptBool = Optional[bool] 13 | OptStr = Optional[str] 14 | OptPath = Optional[Path] 15 | 16 | 17 | class FileConfig(BaseModel): 18 | exists: OptBool = Field(True, description="Set to false to delete dest_file") 19 | remote_src: OptBool = Field(False, description="If true, src_file is a remote file") 20 | src_file: Path | None = Field( 21 | None, 22 | description="Sourrce file to copy from. Can me a local file path, or if you prefix with remote://, " 23 | + "a path inside the target_repo. Can be relative to the GHA workspace", 24 | ) 25 | dest_file: Path = Field( 26 | None, description="Dest file path in the dest_repo for src_file. Relative to root of the target repo" 27 | ) 28 | move: OptBool = Field( 29 | False, 30 | description="If true and dealing with a remote src_file, repo_manager will move the file instead of " 31 | + "copying it, by removing src_file after copy. If src_file is a local file, this option is ignored.", 32 | ) 33 | commit_msg: str = Field( 34 | "repo_manager file commit", 35 | description="Commit message to commit the file with. Files with the same commit message " 36 | + "and target_branch will be commited in one commit.", 37 | ) 38 | target_branch: OptStr = Field( 39 | None, 40 | description="Target branch to commit this file to. Default(None) " 41 | + "means to lookup the default branch of the repo", 42 | ) 43 | 44 | @field_validator("src_file", mode="before") 45 | def validate_src_file(cls, v, info: ValidationInfo) -> Path: 46 | if v is None and info.data["exists"]: 47 | raise ValueError("Missing src_file") 48 | v = str(v) 49 | if v.startswith("remote:"): 50 | info.data["remote_src"] = True 51 | v = v.replace("remote://", "") 52 | return Path(v) 53 | 54 | @field_validator("dest_file") 55 | def validate_dest_file(cls, v) -> Path: 56 | if v is None: 57 | raise ValueError("Missing dest_file") 58 | return v 59 | 60 | @property 61 | def src_file_exists(self) -> bool: 62 | """Checks if local file exists""" 63 | return os.path.exists(self.src_file) if self.src_file is not None else None 64 | 65 | @property 66 | def src_file_contents(self) -> str: 67 | """Returns the contents of the local file""" 68 | if not self.src_file_exists: 69 | raise ValueError("Local file does not exist") 70 | with open(self.src_file) as fh: 71 | return fh.read() 72 | 73 | @property 74 | def commit_key(self) -> str: 75 | """Returns the commit key for this file_config, a combination of commit msg and target_branch""" 76 | target_branch = self.target_branch if self.target_branch is not None else "" 77 | 78 | return f"{self.commit_msg}_{target_branch}" 79 | 80 | class Config: 81 | use_enum_values = True 82 | -------------------------------------------------------------------------------- /repo_manager/schemas/label.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from pydantic import BaseModel # pylint: disable=E0611 4 | from pydantic import Field 5 | from pydantic.color import Color 6 | 7 | OptBool = Optional[bool] 8 | OptStr = Optional[str] 9 | 10 | 11 | class Label(BaseModel): 12 | name: OptStr = Field(None, description="Label's name.") 13 | color: Color | None = Field(None, description="Color of this label") 14 | description: OptStr = Field(None, description="Description of the label") 15 | new_name: OptStr = Field(None, description="If set, rename a label from name to new_name.") 16 | exists: OptBool = Field(True, description="Set to false to delete a label") 17 | 18 | @property 19 | def expected_name(self) -> str: 20 | """What the expected label name of this label is. If new_name is set, it will be new_name. Otherwise, name""" 21 | return self.new_name if self.new_name is not None else self.name 22 | 23 | @property 24 | def color_no_hash(self) -> str: 25 | """Returns the color without the leader # if it exists""" 26 | return self.color if self.color is None else str(self.color.as_hex()).replace("#", "") 27 | -------------------------------------------------------------------------------- /repo_manager/schemas/secret.py: -------------------------------------------------------------------------------- 1 | import os 2 | from typing import Optional 3 | 4 | from pydantic import ( 5 | BaseModel, # pylint: disable=E0611 6 | Field, 7 | ValidationInfo, 8 | field_validator, 9 | ) 10 | 11 | OptBool = Optional[bool] 12 | OptStr = Optional[str] 13 | 14 | 15 | class SecretEnvError(Exception): ... 16 | 17 | 18 | class Secret(BaseModel): 19 | type: str = Field( 20 | "actions", 21 | description="Type of secret, can be `dependabot` or `actions` or an `environment` path", 22 | ) 23 | key: str = Field(None, description="Secret's name.") 24 | env: OptStr = Field(None, description="Environment variable to pull the secret from") 25 | value: OptStr = Field(None, description="Value to set this secret to", validate_default=True) 26 | required: OptBool = Field( 27 | True, 28 | description="Setting a value as not required allows you to not pass in an env var without causing an error", 29 | ) 30 | exists: OptBool = Field(True, description="Set to false to delete a secret") 31 | 32 | @field_validator("value") 33 | def validate_value(cls, v, info: ValidationInfo) -> OptStr: 34 | if v is None: 35 | return None 36 | 37 | if info.data["env"] is not None: 38 | raise ValueError("Cannot set an env and a value in the same secret, remove one.") 39 | 40 | return v 41 | 42 | @property 43 | def expected_value(self): 44 | if self.value is None: 45 | env_var = os.environ.get(self.env) 46 | if env_var is None: 47 | raise SecretEnvError(f"{self.env} is not set") 48 | return env_var 49 | return self.value 50 | -------------------------------------------------------------------------------- /repo_manager/schemas/settings.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel # pylint: disable=E0611 2 | from pydantic import Field 3 | from pydantic import HttpUrl # pylint: disable=E0611 4 | 5 | 6 | OptBool = bool | None 7 | OptStr = str | None 8 | 9 | 10 | class Settings(BaseModel): 11 | description: OptStr = Field(None, description="A short description of the repository that will show up on GitHub.") 12 | homepage: str | HttpUrl | None = Field(None, description="A URL with more information about the repository.") 13 | topics: str | list[str] | None = Field(None, description="A list of strings to apply as topics on the repo") 14 | private: OptBool = Field( 15 | None, description="Either `true` to make the repository private, or `false` to make it public." 16 | ) 17 | has_issues: OptBool = Field( 18 | None, description="Either `true` to enable issues for this repository, `false` to disable them." 19 | ) 20 | has_projects: OptBool = Field( 21 | None, 22 | description="Either `true` to enable projects for this repository, or `false` to disable them. " 23 | + "If projects are disabled for the organization, passing `true` will cause an API error.", 24 | ) 25 | has_wiki: OptBool = Field( 26 | None, description="Either `true` to enable the wiki for this repository, `false` to disable it." 27 | ) 28 | has_downloads: OptBool = Field( 29 | None, description="Either `true` to enable downloads for this repository, `false` to disable them." 30 | ) 31 | default_branch: OptStr = Field(None, description="Set the default branch for this repository. ") 32 | allow_squash_merge: OptBool = Field( 33 | None, description="Either `true` to allow squash-merging pull requests, or `false` to prevent squash-merging." 34 | ) 35 | allow_merge_commit: OptBool = Field( 36 | None, 37 | description="Either `true` to allow merging pull requests with a merge commit, or `false` to prevent " 38 | + "merging pull requests with merge commits.", 39 | ) 40 | allow_rebase_merge: OptBool = Field( 41 | None, 42 | description=" # Either `true` to allow rebase-merging pull requests, or `false` to prevent rebase-merging.", 43 | ) 44 | delete_branch_on_merge: OptBool = Field( 45 | None, description="Either `true` to enable automatic deletion of branches on merge, or `false` to disable" 46 | ) 47 | enable_automated_security_fixes: OptBool = Field( 48 | None, 49 | description="Either `true` to enable automated security fixes, or `false` to disable automated security fixes.", 50 | ) 51 | enable_vulnerability_alerts: OptBool = Field( 52 | None, description="Either `true` to enable vulnerability alerts, or `false` to disable vulnerability alerts." 53 | ) 54 | -------------------------------------------------------------------------------- /repo_manager/utils/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | from typing import Any 3 | 4 | from actions_toolkit import core as actions_toolkit 5 | 6 | # Needed to handle extracting certain attributes/fields from nested objects and lists 7 | from itertools import repeat 8 | 9 | from repo_manager.gh import get_github_client 10 | 11 | from ._inputs import INPUTS 12 | 13 | VALID_ACTIONS = {"validate": None, "check": None, "apply": None} 14 | 15 | 16 | def get_inputs() -> dict[str, Any]: 17 | """Get inputs from our workflow, valudate them, and return as a dict 18 | Reads inputs from the dict INPUTS. This dict is generated from the actions.yml file. 19 | Non required inputs that are not set are returned as None 20 | Returns: 21 | Dict[str, Any]: [description] 22 | """ 23 | parsed_inputs = dict() 24 | for input_name, input_config in INPUTS.items(): 25 | this_input_value = actions_toolkit.get_input( 26 | input_name, 27 | required=input_config.get("required", input_config.get("default", None) is None), 28 | ) 29 | parsed_inputs[input_name] = this_input_value if this_input_value != "" else None 30 | # set defaults if not running in github, this is to ease local testing 31 | # https://docs.github.com/en/actions/learn-github-actions/environment-variables 32 | if ( 33 | os.environ.get("CI", "false").lower() == "false" 34 | and os.environ.get("GITHUB_ACTIONS", "false").lower() == "false" 35 | ): 36 | if parsed_inputs[input_name] is None: 37 | parsed_inputs[input_name] = input_config.get("default", None) 38 | if parsed_inputs[input_name] is None: 39 | actions_toolkit.set_failed(f"Error getting inputs. {input_name} is missing a default") 40 | return validate_inputs(parsed_inputs) 41 | 42 | 43 | def validate_inputs(parsed_inputs: dict[str, Any]) -> dict[str, Any]: 44 | """Validate inputs 45 | Args: 46 | inputs (Dict[str, Any]): [description] 47 | """ 48 | if parsed_inputs["action"] not in VALID_ACTIONS: 49 | actions_toolkit.set_failed(f"Invalid action: {parsed_inputs['action']}") 50 | # validate our inputs 51 | parsed_inputs["action"] = parsed_inputs["action"].lower() 52 | if parsed_inputs["action"] not in VALID_ACTIONS.keys(): 53 | actions_toolkit.set_failed( 54 | f"Error while loading RepoManager Config. {parsed_inputs['action']} " 55 | + "is not a valid action in {VALID_ACTIONS.keys()}" 56 | ) 57 | 58 | if not os.path.exists(parsed_inputs["settings_file"]): 59 | actions_toolkit.set_failed( 60 | f"Error while loading RepoManager Config. {parsed_inputs['settings_file']} does not exist" 61 | ) 62 | 63 | if parsed_inputs["repo"] != "self": 64 | if len(parsed_inputs["repo"].split("/")) != 2: 65 | actions_toolkit.set_failed( 66 | f"Error while loading RepoManager Config. {parsed_inputs['repo']} is not a valid github " 67 | + "repo. Please be sure to enter in the style of 'owner/repo-name'." 68 | ) 69 | else: 70 | parsed_inputs["repo"] = os.environ.get("GITHUB_REPOSITORY", None) 71 | if parsed_inputs["repo"] is None: 72 | actions_toolkit.set_failed( 73 | "Error getting inputs. repo is 'self' and " 74 | + "GITHUB_REPOSITORY env var is not set. Please set INPUT_REPO or GITHUB_REPOSITORY in the env" 75 | ) 76 | 77 | if parsed_inputs["github_server_url"].lower() == "none": 78 | parsed_inputs["github_server_url"] = os.environ.get("GITHUB_SERVER_URL", None) 79 | if parsed_inputs["github_server_url"] is None: 80 | actions_toolkit.set_failed( 81 | "Error getting inputs. github_server_url is 'none' and " 82 | + "GITHUB_SERVER_URL env var is not set. Please set " 83 | + "INPUT_GITHUB_SERVER_URL or GITHUB_SERVER_URL in the env" 84 | ) 85 | actions_toolkit.debug(f"github_server_url: {parsed_inputs['github_server_url']}") 86 | if parsed_inputs["github_server_url"] == "https://github.com": 87 | api_url = "https://api.github.com" 88 | else: 89 | api_url = parsed_inputs["github_server_url"] + "/api/v3" 90 | 91 | actions_toolkit.debug(f"api_url: {api_url}") 92 | 93 | try: 94 | repo = get_github_client(parsed_inputs["token"], api_url=api_url).get_repo(parsed_inputs["repo"]) 95 | except Exception as exc: # this should be tighter 96 | actions_toolkit.set_failed(f"Error while retriving {parsed_inputs['repo']} from Github. {exc}") 97 | 98 | parsed_inputs["repo_object"] = repo 99 | 100 | return parsed_inputs 101 | 102 | 103 | def attr_to_kwarg(attr_name: str, obj: Any, kwargs: dict, transform_key: str = None): 104 | value = getattr(obj, attr_name, None) 105 | if value is not None: 106 | if transform_key is None: 107 | kwargs[attr_name] = value 108 | else: 109 | kwargs[transform_key] = value 110 | 111 | 112 | # Allows use to extract a certain field on a list of objects into a list of strings etc. 113 | def objary_to_list(attr_name: str, obj: Any): 114 | return list(map(getattr, obj, repeat(attr_name))) 115 | -------------------------------------------------------------------------------- /repo_manager/utils/_inputs.py: -------------------------------------------------------------------------------- 1 | # ruff: noqa: E501 2 | # This code is automatically generated by actions.yml and make generate-inputs 3 | ###START_INPUT_AUTOMATION### 4 | INPUTS = { 5 | "action": { 6 | "description": "What action to take with this action. One of validate, check, or apply. Validate will validate your settings file, but not touch your repo. Check will check your repo with your settings file and output a report of any drift. Apply will apply the settings in your settings file to your repo", 7 | "default": "check", 8 | }, 9 | "settings_file": { 10 | "description": "What yaml file to use as your settings. This is local to runner running this action.", 11 | "default": ".github/settings.yml", 12 | }, 13 | "repo": { 14 | "description": "What repo to perform this action on. Default is self, as in the repo this action is running in", 15 | "default": "self", 16 | }, 17 | "github_server_url": { 18 | "description": "Set a custom github server url for github api operations. Useful if you're running on GHE. Will try to autodiscover from env.GITHUB_SERVER_URL if left at default", 19 | "default": "none", 20 | }, 21 | "token": {"description": "What github token to use with this action.", "required": True}, 22 | } 23 | ###END_INPUT_AUTOMATION### 24 | -------------------------------------------------------------------------------- /tests/unit/github/test_files.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from github.GithubException import UnknownObjectException 3 | 4 | from repo_manager.gh import files 5 | from repo_manager.gh.files import copy_file 6 | from repo_manager.gh.files import RemoteSrcNotFoundError 7 | from repo_manager.schemas import FileConfig 8 | 9 | VALID_CONFIG = { 10 | "src_file": "tests/unit/schemas/test_file.py", 11 | "dest_file": "test", 12 | } 13 | 14 | 15 | class MockBlob: 16 | @property 17 | def sha(self): 18 | return "1234" 19 | 20 | 21 | def test_copy_file_new_local_to_dest(mocker): 22 | mock_repo = mocker.MagicMock() 23 | mock_repo.get_contents.side_effect = UnknownObjectException(status=404, data={}, headers={}) 24 | mock_repo.create_file = mocker.MagicMock(return_value={"commit": mocker.MagicMock(sha="1234")}) 25 | mock_repo.create_git_blob = mocker.MagicMock(return_value=MockBlob()) 26 | this_config = FileConfig(**VALID_CONFIG, target_branch="test") 27 | result = copy_file(mock_repo, this_config) 28 | assert result == "1234" 29 | assert mock_repo.get_contents.call_count == 1 30 | assert mock_repo.create_file.call_count == 1 31 | 32 | 33 | def test_copy_file_remote_src_not_found(mocker): 34 | mock_repo = mocker.MagicMock() 35 | mock_repo.get_contents.side_effect = UnknownObjectException(status=404, data={}, headers={}) 36 | this_config = FileConfig(**VALID_CONFIG, remote_src=True) 37 | with pytest.raises(RemoteSrcNotFoundError): 38 | copy_file(mock_repo, this_config) 39 | 40 | 41 | def test_copy_file_update_remote_file(mocker): 42 | mock_repo = mocker.MagicMock() 43 | files.get_remote_file_contents = mocker.MagicMock(return_value="test") 44 | mock_repo.get_contents = mocker.MagicMock(return_value=mocker.MagicMock(sha="1234")) 45 | mock_repo.update_file = mocker.MagicMock(return_value={"commit": mocker.MagicMock(sha="1234")}) 46 | mock_repo.create_git_blob = mocker.MagicMock(return_value=MockBlob()) 47 | this_config = FileConfig(**VALID_CONFIG, remote_src=True) 48 | result = copy_file(mock_repo, this_config) 49 | assert result == "1234" 50 | assert mock_repo.get_contents.call_count == 1 51 | assert mock_repo.update_file.call_count == 1 52 | 53 | 54 | def test_move_file(mocker): 55 | files.copy_file = mocker.MagicMock(return_value="1234") 56 | files.delete_file = mocker.MagicMock(return_value="1234") 57 | this_config = FileConfig(**VALID_CONFIG, target_branch="test") 58 | copy, delete = files.move_file(mocker.MagicMock(), this_config) 59 | assert copy == "1234" 60 | assert delete == "1234" 61 | 62 | 63 | def test_delete_file(mocker): 64 | mock_repo = mocker.MagicMock() 65 | mock_repo.get_contents = mocker.MagicMock(return_value=mocker.MagicMock(sha="1234")) 66 | mock_repo.delete_file = mocker.MagicMock(return_value={"commit": mocker.MagicMock(sha="1234")}) 67 | this_config = FileConfig(**VALID_CONFIG, target_branch="test") 68 | result = files.delete_file(mock_repo, this_config) 69 | assert result == "1234" 70 | -------------------------------------------------------------------------------- /tests/unit/schemas/test_file.py: -------------------------------------------------------------------------------- 1 | import random 2 | import string 3 | from copy import deepcopy 4 | from pathlib import Path 5 | 6 | import pytest 7 | import yaml 8 | from pydantic import ValidationError 9 | 10 | from repo_manager.schemas import FileConfig 11 | 12 | 13 | VALID_CONFIG = { 14 | "src_file": "tests/unit/schemas/test_file.py", 15 | "dest_file": "test", 16 | } 17 | 18 | 19 | def test_file_valid_config(): 20 | this_file_config = FileConfig(**VALID_CONFIG) 21 | assert this_file_config.src_file == Path(VALID_CONFIG["src_file"]) 22 | assert this_file_config.dest_file == Path(VALID_CONFIG["dest_file"]) 23 | assert this_file_config.move is False 24 | assert this_file_config.exists 25 | assert this_file_config.commit_msg == "repo_manager file commit" 26 | assert this_file_config.target_branch is None 27 | assert this_file_config.src_file_exists 28 | assert this_file_config.remote_src is False 29 | 30 | 31 | def test_file_src_file_exists(): 32 | this_file_config = FileConfig(**VALID_CONFIG) 33 | assert this_file_config.src_file_exists 34 | 35 | missing_file = deepcopy(VALID_CONFIG) 36 | missing_file["src_file"] = "".join(random.choices(string.ascii_lowercase, k=16)) 37 | missing_file_config = FileConfig(**missing_file) 38 | assert missing_file_config.src_file_exists is False 39 | with pytest.raises(ValueError): 40 | missing_file_config.src_file_contents 41 | 42 | executable_file = deepcopy(VALID_CONFIG) 43 | executable_file["src_file"] = "./.github/scripts/replace_inputs.sh" 44 | executable_file["mode"] = "100755" 45 | executable_config = FileConfig(**executable_file) 46 | assert executable_config.src_file_exists 47 | 48 | 49 | def test_file_args_validation(): 50 | invalid_config = deepcopy(VALID_CONFIG) 51 | invalid_config["src_file"] = None 52 | with pytest.raises(ValidationError): 53 | FileConfig(**invalid_config) 54 | invalid_config = deepcopy(VALID_CONFIG) 55 | invalid_config["dest_file"] = None 56 | with pytest.raises(ValidationError): 57 | FileConfig(**invalid_config) 58 | 59 | 60 | def test_example_works(): 61 | with open("examples/settings.yml") as fh: 62 | example_data = yaml.safe_load(fh) 63 | 64 | assert len(example_data["files"]) > 0 65 | for file_config_dict in example_data["files"]: 66 | FileConfig(**file_config_dict) 67 | 68 | 69 | def test_file_commit_key(): 70 | this_file_config = FileConfig(**VALID_CONFIG) 71 | assert this_file_config.commit_key == "repo_manager file commit_" 72 | 73 | this_file_config.target_branch = "master" 74 | assert this_file_config.commit_key == "repo_manager file commit_master" 75 | 76 | this_file_config.target_branch = "develop" 77 | assert this_file_config.commit_key == "repo_manager file commit_develop" 78 | -------------------------------------------------------------------------------- /tests/unit/schemas/test_label.py: -------------------------------------------------------------------------------- 1 | import random 2 | import string 3 | from copy import deepcopy 4 | 5 | import yaml 6 | 7 | from repo_manager.schemas import Label 8 | 9 | 10 | VALID_CONFIG = { 11 | "name": "test", 12 | "color": "ff00ff", 13 | "description": "test", 14 | } 15 | 16 | 17 | def test_valid_config(): 18 | this_label = Label(**VALID_CONFIG) 19 | assert this_label.name == "test" 20 | assert this_label.color.as_hex() == "#f0f" 21 | assert this_label.description == "test" 22 | assert this_label.exists 23 | 24 | 25 | def test_color_no_hash(): 26 | this_label = Label(**VALID_CONFIG) 27 | assert this_label.color_no_hash == "f0f" 28 | 29 | no_color_config = deepcopy(VALID_CONFIG) 30 | no_color_config["color"] = None 31 | no_color_label = Label(**no_color_config) 32 | assert no_color_label.color_no_hash is None 33 | 34 | 35 | def test_expected_name(): 36 | this_config = deepcopy(VALID_CONFIG) 37 | this_config["new_name"] = "".join(random.choices(string.ascii_lowercase, k=16)) 38 | this_label = Label(**this_config) 39 | 40 | assert this_label.expected_name == this_config["new_name"] 41 | 42 | 43 | def test_example_works(): 44 | with open("examples/settings.yml") as fh: 45 | example_data = yaml.safe_load(fh) 46 | 47 | assert len(example_data["labels"]) > 0 48 | for label in example_data["labels"]: 49 | Label(**label) 50 | -------------------------------------------------------------------------------- /tests/unit/schemas/test_secret.py: -------------------------------------------------------------------------------- 1 | import os 2 | import random 3 | import string 4 | from copy import deepcopy 5 | 6 | import pytest 7 | import yaml 8 | from pydantic import ValidationError 9 | 10 | from repo_manager.schemas import Secret 11 | from repo_manager.schemas.secret import SecretEnvError 12 | 13 | 14 | VALID_CONFIG = { 15 | "key": "test", 16 | "env": "test", 17 | } 18 | 19 | 20 | def test_valid_secret(): 21 | this_secret = Secret(**VALID_CONFIG) 22 | assert this_secret.key == "test" 23 | assert this_secret.env == "test" 24 | assert this_secret.value is None 25 | assert this_secret.required 26 | 27 | 28 | def test_secret_validate_value(): 29 | value_config = deepcopy(VALID_CONFIG) 30 | value_config["value"] = "test" 31 | with pytest.raises(ValidationError): 32 | Secret(**value_config) 33 | 34 | value_config["env"] = None 35 | this_secret = Secret(**value_config) 36 | assert this_secret.value == "test" 37 | 38 | 39 | def test_secret_expected_value(): 40 | this_config = deepcopy(VALID_CONFIG) 41 | this_config["env"] = "".join(random.choices(string.ascii_lowercase, k=16)) 42 | this_secret = Secret(**this_config) 43 | 44 | with pytest.raises(SecretEnvError): 45 | this_secret.expected_value 46 | 47 | os.environ[this_config["env"]] = "test" 48 | assert this_secret.expected_value == "test" 49 | 50 | value_config = deepcopy(VALID_CONFIG) 51 | value_config["env"] = None 52 | value_config["value"] = "bar" 53 | value_secret = Secret(**value_config) 54 | 55 | assert value_secret.expected_value == "bar" 56 | 57 | 58 | def test_example_works(): 59 | with open("examples/settings.yml") as fh: 60 | example_data = yaml.safe_load(fh) 61 | 62 | assert len(example_data["secrets"]) > 0 63 | for secret in example_data["secrets"]: 64 | Secret(**secret) 65 | --------------------------------------------------------------------------------