├── .codecov.yml ├── .coderabbit.yaml ├── .coveragerc ├── .dockerignore ├── .editorconfig ├── .git-blame-ignore-revs ├── .git_archival.txt ├── .gitattributes ├── .github ├── .container-structure-test-config.yaml ├── .dive-ci.yaml ├── CODEOWNERS ├── CONTRIBUTING.md ├── FUNDING.yml ├── ISSUE_TEMPLATE │ ├── bug_report_docker.md │ ├── bug_report_local_install.md │ └── feature_request.md ├── PULL_REQUEST_TEMPLATE.md ├── SECURITY.md ├── renovate.json5 └── workflows │ ├── build-image-test.yaml │ ├── build-image.yaml │ ├── ci-cd.yml │ ├── codeql.yml │ ├── dependency-review.yml │ ├── pr-title.yml │ ├── pre-commit.yaml │ ├── release.yml │ ├── reusable-tox.yml │ ├── scheduled-runs.yml │ ├── scorecards.yml │ └── stale-actions.yaml ├── .gitignore ├── .mypy.ini ├── .pre-commit-config.yaml ├── .pre-commit-hooks.yaml ├── .releaserc.json ├── .vscode ├── extensions.json └── settings.json ├── .yamllint ├── CHANGELOG.md ├── Dockerfile ├── LICENSE ├── README.md ├── assets ├── contributing │ └── enable_actions_in_fork.png └── pre-commit-terraform-banner.png ├── hatch.toml ├── hooks ├── _common.sh ├── infracost_breakdown.sh ├── terraform_checkov.sh ├── terraform_docs.sh ├── terraform_fmt.sh ├── terraform_providers_lock.sh ├── terraform_tflint.sh ├── terraform_tfsec.sh ├── terraform_trivy.sh ├── terraform_validate.sh ├── terraform_wrapper_module_for_each.sh ├── terragrunt_fmt.sh ├── terragrunt_providers_lock.sh ├── terragrunt_validate.sh ├── terragrunt_validate_inputs.sh ├── terrascan.sh └── tfupdate.sh ├── lib_getopt ├── pyproject.toml ├── pytest.ini ├── ruff.toml ├── src └── pre_commit_terraform │ ├── README.md │ ├── __main__.py │ ├── _cli.py │ ├── _cli_parsing.py │ ├── _cli_subcommands.py │ ├── _errors.py │ ├── _structs.py │ ├── _types.py │ └── terraform_docs_replace.py ├── tests ├── Dockerfile ├── hooks_performance_test.sh └── pytest │ ├── _cli_test.py │ └── terraform_docs_replace_test.py ├── tools ├── entrypoint.sh └── install │ ├── _common.sh │ ├── checkov.sh │ ├── hcledit.sh │ ├── infracost.sh │ ├── opentofu.sh │ ├── pre-commit.sh │ ├── terraform-docs.sh │ ├── terraform.sh │ ├── terragrunt.sh │ ├── terrascan.sh │ ├── tflint.sh │ ├── tfsec.sh │ ├── tfupdate.sh │ └── trivy.sh └── tox.ini /.codecov.yml: -------------------------------------------------------------------------------- 1 | codecov: 2 | notify: 3 | after_n_builds: 21 # Number of test matrix+lint jobs uploading coverage 4 | wait_for_ci: false 5 | 6 | require_ci_to_pass: false 7 | # notsecret # repo-scoped, upload-only, stability in fork PRs 8 | token: >- 9 | 7316089b-55fe-4646-b640-78d84b79d109 10 | 11 | comment: 12 | require_changes: true 13 | 14 | coverage: 15 | range: 100..100 16 | status: 17 | patch: 18 | default: 19 | target: 100% 20 | pytest: 21 | target: 100% 22 | flags: 23 | - pytest 24 | typing: 25 | flags: 26 | - MyPy 27 | project: 28 | default: 29 | target: 95% 30 | lib: 31 | flags: 32 | - pytest 33 | paths: 34 | - src/ 35 | target: 100% 36 | tests: 37 | flags: 38 | - pytest 39 | paths: 40 | - tests/ 41 | target: 100% 42 | typing: 43 | flags: 44 | - MyPy 45 | target: 90% 46 | 47 | github_checks: 48 | # Annotations are deprecated in Codecov because they are misleading. 49 | # Ref: https://github.com/codecov/codecov-action/issues/1710 50 | annotations: false 51 | -------------------------------------------------------------------------------- /.coderabbit.yaml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://coderabbit.ai/integrations/schema.v2.json 2 | reviews: 3 | # Collapse main AI comment by default, as it takes too much space when 4 | # expanded. It also is needless for subsequent rounds of PR review, mostly 5 | # only for the first one 6 | collapse_walkthrough: true 7 | # Move AI-generated summary from PR description to main AI comment. It 8 | # hallucinates sometimes, especially with PRs that change code linting rules 9 | high_level_summary_in_walkthrough: true 10 | # Disable false-positive cross links to issues 11 | related_issues: false 12 | # Disable false-positive cross links to PRs 13 | related_prs: false 14 | # Disable useless Poem generation 15 | poem: false 16 | 17 | auto_review: 18 | # Enable AI review for Draft PRs 19 | drafts: true 20 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [html] 2 | show_contexts = true 3 | skip_covered = false 4 | 5 | [paths] 6 | _site-packages-to-src-mapping = 7 | src 8 | */src 9 | *\src 10 | */lib/pypy*/site-packages 11 | */lib/python*/site-packages 12 | *\Lib\site-packages 13 | 14 | [report] 15 | skip_covered = true 16 | skip_empty = true 17 | show_missing = true 18 | exclude_also = 19 | ^\s*@pytest\.mark\.xfail 20 | ^\s*\.\.\.\s*(#.*)?$ 21 | 22 | [run] 23 | branch = true 24 | cover_pylib = false 25 | # https://coverage.rtfd.io/en/latest/contexts.html#dynamic-contexts 26 | # dynamic_context = test_function # conflicts with `pytest-cov` if set here 27 | parallel = true 28 | plugins = 29 | covdefaults 30 | relative_files = true 31 | source = 32 | . 33 | source_pkgs = 34 | pre_commit_terraform 35 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | * 2 | !.dockerignore 3 | !Dockerfile 4 | !tools/entrypoint.sh 5 | !tools/install/*.sh 6 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | indent_style = space 5 | indent_size = 2 6 | charset = utf-8 7 | trim_trailing_whitespace = true 8 | insert_final_newline = true 9 | 10 | [{*.{py,md},Dockerfile}] 11 | indent_size = 4 12 | 13 | [*.md] 14 | trim_trailing_whitespace = false 15 | -------------------------------------------------------------------------------- /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | # `git blame` master ignore list. 2 | # 3 | # This file contains a list of git hashes of revisions to be ignored 4 | # by `git blame`. These revisions are considered "unimportant" in 5 | # that they are unlikely to be what you are interested in when blaming. 6 | # They are typically expected to be formatting-only changes. 7 | # 8 | # It can be used for `git blame` using `--ignore-revs-file` or by 9 | # setting `blame.ignoreRevsFile` in the `git config`[1]. 10 | # 11 | # Ignore these commits when reporting with blame. Calling 12 | # 13 | # git blame --ignore-revs-file .git-blame-ignore-revs 14 | # 15 | # will tell `git blame` to ignore changes made by these revisions when 16 | # assigning blame, as if the change never happened. 17 | # 18 | # You can enable this as a default for your local repository by 19 | # running 20 | # 21 | # git config blame.ignoreRevsFile .git-blame-ignore-revs 22 | # 23 | # This will probably be automatically picked by your IDE 24 | # (VSCode+GitLens and JetBrains products are confirmed to do this). 25 | # 26 | # Important: if you are switching to a branch without this file, 27 | # `git blame` will fail with an error. 28 | # 29 | # GitHub also excludes the commits listed below from its "Blame" 30 | # views[2][3]. 31 | # 32 | # [1]: https://git-scm.com/docs/git-blame#Documentation/git-blame.txt-blameignoreRevsFile 33 | # [2]: https://github.blog/changelog/2022-03-24-ignore-commits-in-the-blame-view-beta/ 34 | # [3]: https://docs.github.com/en/repositories/working-with-files/using-files/viewing-a-file#ignore-commits-in-the-blame-view 35 | # 36 | # Guidelines: 37 | # - Only large (generally automated) reformatting or renaming PRs 38 | # should be added to this list. Do not put things here just because 39 | # you feel they are trivial or unimportant. If in doubt, do not put 40 | # it on this list. 41 | # - When adding a single revision, use inline comment to link relevant 42 | # issue/PR. Alternatively, paste the commit title instead. 43 | # Example: 44 | # d4a8b7307acc2dc8a8833ccfa65426ad28b3ffc9 # https://github.com/sanitizers/octomachinery/issues/1 45 | # - When adding multiple revisions (like a bulk of work over many 46 | # commits), organize them in blocks. Precede each such block with a 47 | # comment starting with the word "START", followed by a link to the 48 | # relevant issue or PR. Add a similar comment after the last block 49 | # line but use the word "END", followed by the same link. 50 | # Alternatively, add or augment the link with a text motivation and 51 | # description of work performed in each commit. 52 | # After each individual commit in the block, add an inline comment 53 | # with the commit title line. 54 | # Example: 55 | # # START https://github.com/sanitizers/octomachinery/issues/1 56 | # 6f0bd2d8a1e6cd2e794cd39976e9756e0c85ac66 # Bulk-replace smile emojis with unicorns 57 | # d53974df11dbc22cbea9dc7dcbc9896c25979a27 # Replace double with single quotes 58 | # ... 59 | # # END https://github.com/sanitizers/octomachinery/issues/1 60 | # - Only put full 40-character hashes on this list (not short hashes 61 | # or any other revision reference). 62 | # - Append to the bottom of the file, regardless of the chronological 63 | # order of the revisions. Revisions within blocks should be in 64 | # chronological order from oldest to newest. 65 | # - Because you must use a hash, you need to append to this list in a 66 | # follow-up PR to the actual reformatting PR that you are trying to 67 | # ignore. This approach helps avoid issues with arbitrary rebases 68 | # and squashes while the pull request is in progress. 69 | 70 | 23928fbf8511697c915c3231977ee254bd3fa0c2 # chore(linters): Apply ruff-format 71 | -------------------------------------------------------------------------------- /.git_archival.txt: -------------------------------------------------------------------------------- 1 | node: 79085a61bf51c92032101ce5a29525f0ec76fc86 2 | node-date: 2025-05-31T00:04:22+03:00 3 | describe-name: v1.99.1-1-g79085a61b 4 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | .git_archival.txt export-subst 2 | -------------------------------------------------------------------------------- /.github/.container-structure-test-config.yaml: -------------------------------------------------------------------------------- 1 | schemaVersion: 2.0.0 2 | 3 | commandTests: 4 | - name: git 5 | command: git 6 | args: 7 | - --version 8 | expectedOutput: 9 | - ^git version 2\.[0-9]+\.[0-9]+\n$ 10 | 11 | - name: pre-commit 12 | command: pre-commit 13 | args: 14 | - -V 15 | expectedOutput: 16 | - ^pre-commit ([0-9]+\.){2}[0-9]+\n$ 17 | 18 | - name: gcc 19 | command: gcc 20 | args: 21 | - --version 22 | expectedOutput: 23 | - ^gcc \(Alpine 12\. 24 | 25 | - name: checkov 26 | command: checkov 27 | args: 28 | - --version 29 | expectedOutput: 30 | - ^([0-9]+\.){2}[0-9]+\n$ 31 | 32 | - name: infracost 33 | command: infracost 34 | args: 35 | - --version 36 | expectedOutput: 37 | - ^Infracost v([0-9]+\.){2}[0-9]+ 38 | 39 | - name: opentofu 40 | command: tofu 41 | args: 42 | - --version 43 | expectedOutput: 44 | - ^OpenTofu v([0-9]+\.){2}[0-9]+\n 45 | 46 | - name: terraform 47 | command: terraform 48 | args: 49 | - --version 50 | expectedOutput: 51 | - ^Terraform v([0-9]+\.){2}[0-9]+\n 52 | 53 | - name: terraform-docs 54 | command: terraform-docs 55 | args: 56 | - --version 57 | expectedOutput: 58 | - ^terraform-docs version v([0-9]+\.){2}[0-9]+ [a-z0-9]+ 59 | 60 | - name: terragrunt 61 | command: terragrunt 62 | args: 63 | - --version 64 | expectedOutput: 65 | - ^terragrunt version v([0-9]+\.){2}[0-9]+\n$ 66 | 67 | - name: terrascan 68 | command: terrascan 69 | args: 70 | - version 71 | expectedOutput: 72 | - >- 73 | ^version: v([0-9]+\.){2}[0-9]+\n$ 74 | 75 | - name: tflint 76 | command: tflint 77 | args: 78 | - --version 79 | expectedOutput: 80 | - TFLint version ([0-9]+\.){2}[0-9]+\n 81 | 82 | - name: tfsec 83 | command: tfsec 84 | args: 85 | - --version 86 | expectedOutput: 87 | - ([0-9]+\.){2}[0-9]+\n$ 88 | 89 | - name: trivy 90 | command: trivy 91 | args: 92 | - --version 93 | expectedOutput: 94 | - >- 95 | Version: ([0-9]+\.){2}[0-9]+\n 96 | 97 | - name: tfupdate 98 | command: tfupdate 99 | args: 100 | - --version 101 | expectedOutput: 102 | - ([0-9]+\.){2}[0-9]+\n$ 103 | 104 | - name: hcledit 105 | command: hcledit 106 | args: 107 | - version 108 | expectedOutput: 109 | - ([0-9]+\.){2}[0-9]+\n$ 110 | 111 | - name: entrypoint.sh 112 | envVars: 113 | - key: USERID 114 | value: 1000:1000 115 | command: /entrypoint.sh 116 | args: 117 | - -V 118 | expectedError: 119 | - >- 120 | ^ERROR: uid:gid 1000:1000 lacks permissions to //\n$ 121 | exitCode: 1 122 | 123 | - name: su-exec 124 | command: su-exec 125 | expectedOutput: 126 | - >- 127 | ^Usage: su-exec user-spec command \[args\]\n$ 128 | 129 | - name: ssh 130 | command: ssh 131 | args: 132 | - -V 133 | expectedError: 134 | - ^OpenSSH_9\.[0-9]+ 135 | 136 | fileExistenceTests: 137 | - name: terrascan init 138 | path: >- 139 | /root/.terrascan/pkg/policies/opa/rego/github/github_repository/privateRepoEnabled.rego 140 | shouldExist: true 141 | uid: 0 142 | gid: 0 143 | -------------------------------------------------------------------------------- /.github/.dive-ci.yaml: -------------------------------------------------------------------------------- 1 | rules: 2 | # If the efficiency is measured below X%, mark as failed. 3 | # Expressed as a ratio between 0-1. 4 | lowestEfficiency: 0.981 5 | 6 | # If the amount of wasted space is at least X or larger than X, mark 7 | # as failed. 8 | # Expressed in B, KB, MB, and GB. 9 | highestWastedBytes: 32MB 10 | 11 | # If the amount of wasted space makes up for X% or more of the image, 12 | # mark as failed. 13 | # Note: the base image layer is NOT included in the total image size. 14 | # Expressed as a ratio between 0-1; fails if the threshold is met or crossed. 15 | highestUserWastedPercent: 0.036 16 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @maxymvlasov @yermulnik @antonbabenko 2 | -------------------------------------------------------------------------------- /.github/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Notes for contributors 2 | 3 | * [Configure `git blame` to ignore formatting commits](#configure-git-blame-to-ignore-formatting-commits) 4 | * [Run and debug hooks locally](#run-and-debug-hooks-locally) 5 | * [Run hook performance test](#run-hook-performance-test) 6 | * [Run via BASH](#run-via-bash) 7 | * [Run via Docker](#run-via-docker) 8 | * [Check results](#check-results) 9 | * [Cleanup](#cleanup) 10 | * [Required tools and plugins to simplify review process](#required-tools-and-plugins-to-simplify-review-process) 11 | * [Add new hook](#add-new-hook) 12 | * [Before write code](#before-write-code) 13 | * [Prepare basic documentation](#prepare-basic-documentation) 14 | * [Add code](#add-code) 15 | * [Finish with the documentation](#finish-with-the-documentation) 16 | * [Contributing to Python code](#contributing-to-python-code) 17 | * [Run tests in your fork](#run-tests-in-your-fork) 18 | 19 | ## Configure `git blame` to ignore formatting commits 20 | 21 | This project uses `.git-blame-ignore-revs` to exclude formatting-related commits from `git blame` history. To configure your local `git blame` to ignore these commits, refer to the [.git-blame-ignore-revs](/.git-blame-ignore-revs) file for details. 22 | 23 | ## Run and debug hooks locally 24 | 25 | ```bash 26 | pre-commit try-repo {-a} /path/to/local/pre-commit-terraform/repo {hook_name} 27 | ``` 28 | 29 | I.e. 30 | 31 | ```bash 32 | pre-commit try-repo /mnt/c/Users/tf/pre-commit-terraform terraform_fmt # Run only `terraform_fmt` check 33 | pre-commit try-repo -a ~/pre-commit-terraform # run all existing checks from repo 34 | ``` 35 | 36 | Running `pre-commit` with `try-repo` ignores all arguments specified in `.pre-commit-config.yaml`. 37 | 38 | If you need to test hook with arguments, follow [pre-commit doc](https://pre-commit.com/#arguments-pattern-in-hooks) to test hooks. 39 | 40 | For example, to test that the [`terraform_fmt`](../README.md#terraform_fmt) hook works fine with arguments: 41 | 42 | ```bash 43 | /tmp/pre-commit-terraform/terraform_fmt.sh --args=-diff --args=-write=false test-dir/main.tf test-dir/vars.tf 44 | ``` 45 | 46 | ## Run hook performance test 47 | 48 | To check is your improvement not violate performance, we have dummy execution time tests. 49 | 50 | Script accept next options: 51 | 52 | | # | Name | Example value | Description | 53 | | --- | ---------------------------------- | ------------------------------------------------------------------------ | ---------------------------------------------------- | 54 | | 1 | `TEST_NUM` | `200` | How many times need repeat test | 55 | | 2 | `TEST_COMMAND` | `'pre-commit try-repo -a /tmp/159/pre-commit-terraform terraform_tfsec'` | Valid pre-commit command | 56 | | 3 | `TEST_DIR` | `'/tmp/infrastructure'` | Dir on what you run tests. | 57 | | 4 | `TEST_DESCRIPTION` | ```'`terraform_tfsec` PR #123:'``` | Text that you'd like to see in result | 58 | | 5 | `RAW_TEST_`
`RESULTS_FILE_NAME` | `terraform_tfsec_pr123` | (Temporary) File where all test data will be stored. | 59 | 60 | 61 | > **Note:** To make test results repeatable and comparable, be sure that on the test machine nothing generates an unstable workload. During tests good to stop any other apps and do not interact with the test machine. 62 | > 63 | > Otherwise, for eg, when you watch Youtube videos during one test and not during other, test results can differ up to 30% for the same test. 64 | 65 | ### Run via BASH 66 | 67 | ```bash 68 | # Install deps 69 | sudo apt install -y datamash 70 | # Run tests 71 | ./hooks_performance_test.sh 200 'pre-commit try-repo -a /tmp/159/pre-commit-terraform terraform_tfsec' '/tmp/infrastructure' '`terraform_tfsec` v1.51.0:' 'terraform_tfsec_pr159' 72 | ``` 73 | 74 | ### Run via Docker 75 | 76 | ```bash 77 | # Build `pre-commit-terraform` image 78 | docker build -t pre-commit-terraform --build-arg INSTALL_ALL=true . 79 | # Build test image 80 | docker build -t pre-commit-tests tests/ 81 | # Run 82 | TEST_NUM=1 83 | TEST_DIR='/tmp/infrastructure' 84 | PRE_COMMIT_DIR="$(pwd)" 85 | TEST_COMMAND='pre-commit try-repo -a /pct terraform_tfsec' 86 | TEST_DESCRIPTION='`terraform_tfsec` v1.51.0:' 87 | RAW_TEST_RESULTS_FILE_NAME='terraform_tfsec_pr159' 88 | 89 | docker run -v "$PRE_COMMIT_DIR:/pct:rw" -v "$TEST_DIR:/lint:ro" pre-commit-tests \ 90 | $TEST_NUM "$TEST_COMMAND" '/lint' "$RAW_TEST_RESULTS_FILE_NAME" "$RAW_TEST_RESULTS_FILE_NAME" 91 | ``` 92 | 93 | ### Check results 94 | 95 | Results will be located at `./test/results` dir. 96 | 97 | ### Cleanup 98 | 99 | ```bash 100 | sudo rm -rf tests/results 101 | ``` 102 | 103 | ## Required tools and plugins to simplify review process 104 | 105 | 1. [editorconfig.org](https://editorconfig.org/) (preinstalled in some IDE) 106 | 2. [pre-commit](https://pre-commit.com/#install) 107 | 3. (Optional) If you use VS Code - feel free to install all recommended extensions 108 | 109 | 110 | ## Add new hook 111 | 112 | You can use [this PR](https://github.com/antonbabenko/pre-commit-terraform/pull/252) as an example. 113 | 114 | ### Before write code 115 | 116 | 1. Try to figure out future hook usage. 117 | 2. Confirm the concept with [Anton Babenko](https://github.com/antonbabenko). 118 | 3. Install [required tools and plugins](#required-tools-and-plugins-to-simplify-review-process) 119 | 120 | 121 | ### Prepare basic documentation 122 | 123 | 1. Identify and describe dependencies in [Install dependencies](../README.md#1-install-dependencies) and [Available Hooks](../README.md#available-hooks) sections 124 | 125 | ### Add code 126 | 127 | > [!TIP] 128 | > Here is a screencast of [how to add new dependency in `tools/install/`](https://github.com/antonbabenko/pre-commit-terraform/assets/11096782/8fc461e9-f163-4592-9497-4a18fa89c0e8) - used in Dockerfile 129 | 130 | 1. Based on prev. block, add hook dependencies installation to [Dockerfile](../Dockerfile). 131 | Check that works: 132 | * `docker build -t pre-commit --build-arg INSTALL_ALL=true .` 133 | * `docker build -t pre-commit --build-arg _VERSION=latest .` 134 | * `docker build -t pre-commit --build-arg _VERSION=<1.2.3> .` 135 | 2. Add Docker structure tests to [`.github/.container-structure-test-config.yaml`](.container-structure-test-config.yaml) 136 | 3. Add new hook to [`.pre-commit-hooks.yaml`](../.pre-commit-hooks.yaml) 137 | 4. Create hook file. Don't forget to make it executable via `chmod +x /path/to/hook/file`. 138 | 5. Test hook. How to do it is described in [Run and debug hooks locally](#run-and-debug-hooks-locally) section. 139 | 6. Test hook one more time. 140 | 1. Push commit with hook file to GitHub 141 | 2. Grab SHA hash of the commit 142 | 3. Test hook using `.pre-commit-config.yaml`: 143 | 144 | ```yaml 145 | repos: 146 | - repo: https://github.com/antonbabenko/pre-commit-terraform # Your repo 147 | rev: 3d76da3885e6a33d59527eff3a57d246dfb66620 # Your commit SHA 148 | hooks: 149 | - id: terraform_docs # New hook name 150 | args: 151 | - --args=--config=.terraform-docs.yml # Some args that you'd like to test 152 | ``` 153 | 154 | ### Finish with the documentation 155 | 156 | 1. Add the hook description to [Available Hooks](../README.md#available-hooks). 157 | 2. Create and populate a new hook section in [Hooks usage notes and examples](../README.md#hooks-usage-notes-and-examples). 158 | 159 | ## Contributing to Python code 160 | 161 | 1. [Install `tox`](https://tox.wiki/en/stable/installation.html) 162 | 2. To run tests, run: 163 | 164 | ```bash 165 | tox -qq 166 | ``` 167 | 168 | The easiest way to find out what parts of the code base are left uncovered, is to copy-paste and run the `python3 ...` command that will open the HTML report, so you can inspect it visually. 169 | 170 | 3. Before committing any changes (if you do not have `pre-commit` installed locally), run: 171 | 172 | ```bash 173 | tox r -qq -e pre-commit 174 | ``` 175 | 176 | Make sure that all checks pass. 177 | 178 | 4. (Optional): If you want to limit the checks to MyPy only, you can run: 179 | 180 | ```bash 181 | tox r -qq -e pre-commit -- mypy --all-files 182 | ``` 183 | 184 | Then copy-paste and run the `python3 ...` commands to inspect the strictest MyPy coverage reports visually. 185 | 186 | 5. (Optional): You can find all available `tox` environments by running: 187 | 188 | ```bash 189 | tox list 190 | ``` 191 | 192 | ## Run tests in your fork 193 | 194 | Go to your fork's `Actions` tab and click the big green button. 195 | 196 | ![Enable workflows](/assets/contributing/enable_actions_in_fork.png) 197 | 198 | Now you can verify that the tests pass before submitting your PR. 199 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: [antonbabenko] 2 | custom: https://www.paypal.me/antonbabenko 3 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report_docker.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Docker bug report 3 | about: Create a bug report 4 | labels: 5 | - bug 6 | - area/docker 7 | --- 8 | 9 | 16 | 17 | ### Describe the bug 18 | 19 | 23 | 24 | 25 | ### How can we reproduce it? 26 | 27 | 41 | 42 | 43 | ### Environment information 44 | 45 | * OS: 46 | 47 | 53 | 54 | * `docker info`: 55 | 56 |
command output 57 | 58 | ```bash 59 | INSERT_OUTPUT_HERE 60 | ``` 61 | 62 |
63 | 64 | * Docker image tag/git commit: 65 | 66 | * Tools versions. Don't forget to specify right tag in command - 67 | `TAG=latest && docker run --entrypoint cat pre-commit:$TAG /usr/bin/tools_versions_info` 68 | 69 | ```bash 70 | INSERT_OUTPUT_HERE 71 | ``` 72 | 73 | * `.pre-commit-config.yaml`: 74 | 75 |
file content 76 | 77 | ```yaml 78 | INSERT_FILE_CONTENT_HERE 79 | ``` 80 | 81 |
82 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report_local_install.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Local installation bug report 3 | about: Create a bug report 4 | labels: 5 | - bug 6 | - area/local_installation 7 | --- 8 | 9 | 16 | 17 | ### Describe the bug 18 | 19 | 23 | 24 | 25 | ### How can we reproduce it? 26 | 27 | 41 | 42 | 43 | ### Environment information 44 | 45 | * OS: 46 | 52 | 53 | * `uname -a` and/or `systeminfo | Select-String "^OS"` output: 54 | 55 | ```bash 56 | INSERT_OUTPUT_HERE 57 | ``` 58 | 59 | 73 | 74 | * Tools availability and versions: 75 | 76 | 98 | 99 | ```bash 100 | INSERT_TOOLS_VERSIONS_HERE 101 | ``` 102 | 103 | 104 | * `.pre-commit-config.yaml`: 105 | 106 |
file content 107 | 108 | ```yaml 109 | INSERT_FILE_CONTENT_HERE 110 | ``` 111 | 112 |
113 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | labels: 5 | - feature 6 | --- 7 | 8 | 15 | 16 | ### What problem are you facing? 17 | 18 | 23 | 24 | 25 | ### How could pre-commit-terraform help solve your problem? 26 | 27 | 30 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 4 | 5 | Put an `x` into the box if that apply: 6 | 7 | - [ ] This PR introduces breaking change. 8 | - [ ] This PR fixes a bug. 9 | - [ ] This PR adds new functionality. 10 | - [ ] This PR enhances existing functionality. 11 | 12 | ### Description of your changes 13 | 14 | 22 | 23 | 24 | 25 | ### How can we test changes 26 | 27 | 32 | -------------------------------------------------------------------------------- /.github/SECURITY.md: -------------------------------------------------------------------------------- 1 | # Reporting a Vulnerability 2 | 3 | If you believe you have discovered a potential security vulnerability in this project, please report it securely. **Do not create a public GitHub issue for any security concerns.** 4 | 5 | ## How to Report 6 | 7 | Send an email with a detailed description of the vulnerability, including any evidence of the disclosure, the impact, and any timelines related to the issue to: [anton@antonbabenko.com](mailto:anton@antonbabenko.com) 8 | 9 | ## Vulnerability Disclosure Process 10 | 11 | - **Confidential Disclosure:** All vulnerability reports will be kept confidential until a fix is developed and verified. 12 | - **Assessment and Response:** We aim to acknowledge any valid report within 15 business days. 13 | - **Timelines:** After verification, we plan to have a coordinated disclosure within 60 days, though this may vary depending on the complexity of the fix. 14 | - **Communication:** We will work directly with the vulnerability reporter to clarify details, answer questions, and discuss potential mitigations. 15 | - **Updates:** We may provide periodic updates on the progress of the remediation of the reported vulnerability. 16 | 17 | ## Guidelines 18 | 19 | - **Vulnerability Definition:** A vulnerability is any flaw or weakness in this project that can be exploited to compromise system security. 20 | - **Disclosure Expectations:** When you report a vulnerability, please include as much detail as possible to allow us to assess its validity and scope without exposing sensitive information publicly. 21 | 22 | By following this process, you help us improve the security of our project while protecting users and maintainers. We appreciate your efforts to responsibly disclose vulnerabilities. 23 | -------------------------------------------------------------------------------- /.github/renovate.json5: -------------------------------------------------------------------------------- 1 | { 2 | $schema: "https://docs.renovatebot.com/renovate-schema.json", 3 | extends: [ 4 | "local>SpotOnInc/renovate-config", 5 | // Automerge patch and minor upgrades if they pass tests. | https://docs.renovatebot.com/presets-default/#automergeminor 6 | ":automergeMinor", 7 | // Require all status checks to pass before any automerging. | https://docs.renovatebot.com/presets-default/#automergerequireallstatuschecks 8 | ":automergeRequireAllStatusChecks", 9 | // Automerge digest upgrades if they pass tests. | https://docs.renovatebot.com/presets-default/#automergedigest 10 | ":automergeDigest", 11 | // Raise a PR first before any automerging. | https://docs.renovatebot.com/presets-default/#automergepr 12 | ":automergePr", 13 | ], 14 | // To make happy 'Validate PR title' GHA 15 | commitMessageLowerCase: "never", 16 | // Disable auto-rebase on every commit to avoid reaching Github limits on macos runners 17 | rebaseWhen: "conflicted", 18 | } 19 | -------------------------------------------------------------------------------- /.github/workflows/build-image-test.yaml: -------------------------------------------------------------------------------- 1 | name: Build Dockerfile if changed and run smoke tests 2 | 3 | on: 4 | merge_group: 5 | pull_request: 6 | 7 | permissions: 8 | contents: read 9 | 10 | env: 11 | IMAGE_TAG: pr-test 12 | 13 | jobs: 14 | build: 15 | permissions: 16 | # for MaxymVlasov/dive-action to write comments to PRs 17 | pull-requests: write 18 | 19 | strategy: 20 | matrix: 21 | arch: 22 | - amd64 23 | - arm64 24 | include: 25 | - os-name: Ubuntu x64 26 | os: ubuntu-latest 27 | arch: amd64 28 | 29 | - os-name: Ubuntu ARM 30 | os: ubuntu-24.04-arm 31 | arch: arm64 32 | 33 | name: ${{ matrix.os-name }} 34 | runs-on: ${{ matrix.os }} 35 | steps: 36 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 37 | with: 38 | fetch-depth: 0 39 | 40 | - name: Get changed Docker related files 41 | id: changed-files-specific 42 | uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5 43 | with: 44 | files: | 45 | .dockerignore 46 | .github/workflows/build-image-test.yaml 47 | Dockerfile 48 | tools/entrypoint.sh 49 | tools/install/*.sh 50 | 51 | - name: Set IMAGE environment variable 52 | if: steps.changed-files-specific.outputs.any_changed == 'true' 53 | # Lowercase the org/repo name to allow for workflow to run in forks, 54 | # which owners have uppercase letters in username 55 | run: >- 56 | echo "IMAGE=ghcr.io/${GITHUB_REPOSITORY@L}:${{ env.IMAGE_TAG }}" 57 | >> $GITHUB_ENV 58 | 59 | - name: Set up Docker Buildx 60 | uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0 61 | if: steps.changed-files-specific.outputs.any_changed == 'true' 62 | 63 | - name: Build if Dockerfile changed 64 | if: steps.changed-files-specific.outputs.any_changed == 'true' 65 | uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0 66 | with: 67 | context: . 68 | build-args: | 69 | INSTALL_ALL=true 70 | push: false 71 | load: true 72 | tags: ${{ env.IMAGE }} 73 | # Fix multi-platform: https://github.com/docker/buildx/issues/1533 74 | provenance: false 75 | secrets: | 76 | "github_token=${{ secrets.GITHUB_TOKEN }}" 77 | 78 | - name: Setup Container Structure Tests 79 | if: steps.changed-files-specific.outputs.any_changed == 'true' 80 | env: 81 | # yamllint disable-line rule:line-length 82 | # renovate: datasource=github-releases depName=container-structure-test lookupName=GoogleContainerTools/container-structure-test 83 | CST_VERSION: 1.19.3 84 | CST_REPO: github.com/GoogleContainerTools/container-structure-test 85 | run: >- 86 | curl -L "https://${{ env.CST_REPO }}/releases/download/v${{ 87 | env.CST_VERSION }}/container-structure-test-linux-${{ matrix.arch }}" 88 | > container-structure-test 89 | && chmod +x container-structure-test 90 | && mkdir -p $HOME/bin/ 91 | && mv container-structure-test $HOME/bin/ 92 | && echo $HOME/bin/ >> $GITHUB_PATH 93 | 94 | - name: Run structure tests 95 | if: steps.changed-files-specific.outputs.any_changed == 'true' 96 | run: >- 97 | container-structure-test test 98 | --config ${{ github.workspace 99 | }}/.github/.container-structure-test-config.yaml 100 | --image ${{ env.IMAGE }} 101 | 102 | - name: Dive - check image for waste files 103 | if: steps.changed-files-specific.outputs.any_changed == 'true' 104 | uses: MaxymVlasov/dive-action@43dafd0015826beaca5110157c9262c5dc10672a # v1.4.0 105 | with: 106 | image: ${{ env.IMAGE }} 107 | config-file: ${{ github.workspace }}/.github/.dive-ci.yaml 108 | github-token: ${{ secrets.GITHUB_TOKEN }} 109 | 110 | # Can't build both platforms and use --load at the same time 111 | # https://github.com/docker/buildx/issues/59#issuecomment-1433097926 112 | - name: Build Multi-arch docker-image 113 | if: >- 114 | steps.changed-files-specific.outputs.any_changed == 'true' 115 | && matrix.os == 'ubuntu-latest' 116 | uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0 117 | with: 118 | context: . 119 | build-args: | 120 | INSTALL_ALL=true 121 | platforms: linux/amd64,linux/arm64 122 | push: false 123 | tags: ${{ env.IMAGE }} 124 | # Fix multi-platform: https://github.com/docker/buildx/issues/1533 125 | provenance: false 126 | secrets: | 127 | "github_token=${{ secrets.GITHUB_TOKEN }}" 128 | -------------------------------------------------------------------------------- /.github/workflows/build-image.yaml: -------------------------------------------------------------------------------- 1 | name: Publish container image 2 | 3 | on: 4 | workflow_dispatch: 5 | release: 6 | types: 7 | - created 8 | schedule: 9 | - cron: 00 00 * * * 10 | 11 | permissions: 12 | contents: read 13 | 14 | jobs: 15 | docker: 16 | permissions: 17 | # for docker/build-push-action to publish docker image 18 | packages: write 19 | 20 | runs-on: ubuntu-latest 21 | steps: 22 | - name: Checkout code 23 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 24 | - name: Set up Docker Buildx 25 | uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0 26 | - name: Login to GitHub Container Registry 27 | uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 28 | with: 29 | registry: ghcr.io 30 | username: ${{ github.repository_owner }} 31 | password: ${{ secrets.GITHUB_TOKEN }} 32 | - name: Set tag for image 33 | run: >- 34 | echo IMAGE_TAG=$( 35 | [ ${{ github.ref_type }} == 'tag' ] 36 | && echo ${{ github.ref_name }} 37 | || echo 'latest' 38 | ) >> $GITHUB_ENV 39 | 40 | - name: Set IMAGE_REPO environment variable 41 | # Lowercase the org/repo name to allow for workflow to run in forks, 42 | # which owners have uppercase letters in username 43 | run: >- 44 | echo "IMAGE_REPO=ghcr.io/${GITHUB_REPOSITORY@L}" >> $GITHUB_ENV 45 | - name: Set up Docker Buildx 46 | uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0 47 | 48 | - name: Build and Push release 49 | if: github.event_name != 'schedule' 50 | uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0 51 | with: 52 | context: . 53 | build-args: | 54 | INSTALL_ALL=true 55 | platforms: linux/amd64,linux/arm64 56 | push: true 57 | tags: | 58 | ${{ env.IMAGE_REPO }}:${{ env.IMAGE_TAG }} 59 | ${{ env.IMAGE_REPO }}:latest 60 | # Fix multi-platform: https://github.com/docker/buildx/issues/1533 61 | provenance: false 62 | secrets: | 63 | "github_token=${{ secrets.GITHUB_TOKEN }}" 64 | 65 | - name: Build and Push nightly 66 | if: github.event_name == 'schedule' 67 | uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0 68 | with: 69 | context: . 70 | build-args: | 71 | INSTALL_ALL=true 72 | platforms: linux/amd64,linux/arm64 73 | push: true 74 | tags: | 75 | ${{ env.IMAGE_REPO }}:nightly 76 | # Fix multi-platform: https://github.com/docker/buildx/issues/1533 77 | provenance: false 78 | secrets: | 79 | "github_token=${{ secrets.GITHUB_TOKEN }}" 80 | -------------------------------------------------------------------------------- /.github/workflows/codeql.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # 12 | name: CodeQL 13 | 14 | on: 15 | push: 16 | branches: 17 | - master 18 | merge_group: 19 | pull_request: 20 | schedule: 21 | - cron: 0 0 * * 1 22 | 23 | permissions: 24 | contents: read 25 | 26 | jobs: 27 | analyze: 28 | name: Analyze 29 | runs-on: ubuntu-latest 30 | permissions: 31 | actions: read 32 | contents: read 33 | security-events: write 34 | 35 | strategy: 36 | fail-fast: false 37 | matrix: 38 | language: 39 | - python 40 | # CodeQL supports [ $supported-codeql-languages ] 41 | # Learn more about CodeQL language support at 42 | # https://aka.ms/codeql-docs/language-support 43 | 44 | steps: 45 | - name: Checkout repository 46 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 47 | 48 | # Initializes the CodeQL tools for scanning. 49 | - name: Initialize CodeQL 50 | uses: github/codeql-action/init@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18 51 | with: 52 | languages: ${{ matrix.language }} 53 | # If you wish to specify custom queries, you can do so here or in 54 | # a config file. # By default, queries listed here will override any 55 | # specified in a config file. Prefix the list here with "+" to use 56 | # these queries and those in the config file. 57 | 58 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java) 59 | # If this step fails, then you should remove it and run the build 60 | # manually (see below) 61 | - name: Autobuild 62 | uses: github/codeql-action/autobuild@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18 63 | 64 | # ℹ️ Command-line programs to run using the OS shell. 65 | # yamllint disable-line rule:line-length 66 | # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun 67 | 68 | # If the Autobuild fails above, remove it and uncomment the following 69 | # three lines. Modify them (or add more) to build your code if your 70 | # project, please refer to the EXAMPLE below for guidance. 71 | 72 | # - run: | 73 | # echo "Run, Build Application using script" 74 | # ./location_of_script_within_repo/buildscript.sh 75 | 76 | - name: Perform CodeQL Analysis 77 | uses: github/codeql-action/analyze@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18 78 | with: 79 | category: /language:${{matrix.language}} 80 | -------------------------------------------------------------------------------- /.github/workflows/dependency-review.yml: -------------------------------------------------------------------------------- 1 | # Dependency Review Action 2 | # 3 | # This Action will scan dependency manifest files that change as part of a 4 | # Pull Request, surfacing known-vulnerable versions of the packages declared 5 | # or updated in the PR. 6 | # Once installed, if the workflow run is marked as required, 7 | # PRs introducing known-vulnerable packages will be blocked from merging. 8 | # 9 | # Source repository: https://github.com/actions/dependency-review-action 10 | name: Dependency Review 11 | 12 | on: 13 | merge_group: 14 | pull_request: 15 | 16 | permissions: 17 | contents: read 18 | 19 | jobs: 20 | dependency-review: 21 | runs-on: ubuntu-latest 22 | steps: 23 | - name: Checkout Repository 24 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 25 | - name: Dependency Review 26 | uses: actions/dependency-review-action@da24556b548a50705dd671f47852072ea4c105d9 # v4.7.1 27 | -------------------------------------------------------------------------------- /.github/workflows/pr-title.yml: -------------------------------------------------------------------------------- 1 | name: Validate PR title 2 | 3 | on: 4 | pull_request_target: 5 | types: 6 | - opened 7 | - edited 8 | - synchronize 9 | 10 | permissions: 11 | contents: read 12 | 13 | jobs: 14 | main: 15 | permissions: 16 | # for amannn/action-semantic-pull-request to analyze PRs 17 | pull-requests: read 18 | # for amannn/action-semantic-pull-request to mark status of analyzed PR 19 | statuses: write 20 | 21 | name: Validate PR title 22 | runs-on: ubuntu-latest 23 | steps: 24 | # Please look up the latest version from 25 | # https://github.com/amannn/action-semantic-pull-request/releases 26 | - uses: amannn/action-semantic-pull-request@0723387faaf9b38adef4775cd42cfd5155ed6017 # v5.5.3 27 | env: 28 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 29 | with: 30 | # Configure which types are allowed. 31 | # Default: https://github.com/commitizen/conventional-commit-types 32 | types: | 33 | fix 34 | feat 35 | docs 36 | ci 37 | chore 38 | # Configure that a scope must always be provided. 39 | requireScope: false 40 | # Configure additional validation for the subject based on a regex. 41 | # This example ensures the subject starts with an uppercase character. 42 | subjectPattern: ^[A-Z].+$ 43 | # If `subjectPattern` is configured, you can use this property to 44 | # override the default error message that is shown when the pattern 45 | # doesn't match. The variables `subject` and `title` can be used within 46 | # the message. 47 | subjectPatternError: | 48 | The subject "{subject}" found in the pull request title "{title}" 49 | didn't match the configured pattern. Please ensure that the subject 50 | starts with an uppercase character. 51 | # For work-in-progress PRs you can typically use draft pull requests 52 | # from Github. However, private repositories on the free plan don't 53 | # have this option and therefore this action allows you to opt-in to 54 | # using the special "[WIP]" prefix to indicate this state. This will 55 | # avoid the validation of the PR title and the pull request checks 56 | # remain pending. Note that a second check will be reported if this 57 | # is enabled. 58 | wip: true 59 | # When using "Squash and merge" on a PR with only one commit, GitHub 60 | # will suggest using that commit message instead of the PR title for 61 | # the merge commit, and it's easy to commit this by mistake. Enable 62 | # this option to also validate the commit message for one commit PRs. 63 | validateSingleCommit: false 64 | -------------------------------------------------------------------------------- /.github/workflows/pre-commit.yaml: -------------------------------------------------------------------------------- 1 | name: Common issues check 2 | 3 | on: 4 | merge_group: 5 | pull_request: 6 | 7 | permissions: 8 | contents: read 9 | 10 | jobs: 11 | pre-commit: 12 | permissions: 13 | contents: write # for pre-commit/action to push back fixes to PR branch 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 17 | - run: >- 18 | git fetch --no-tags --prune --depth=1 origin 19 | +refs/heads/*:refs/remotes/origin/* 20 | 21 | - name: Get changed files 22 | id: file_changes 23 | run: | 24 | export DIFF=$(git diff --name-only origin/${{ github.base_ref }} ${{ 25 | github.sha 26 | }}) 27 | echo "Diff between ${{ github.base_ref }} and ${{ github.sha }}" 28 | echo "files=$( echo "$DIFF" | xargs echo )" >> $GITHUB_OUTPUT 29 | 30 | - name: Install shfmt 31 | run: >- 32 | curl -L "$( 33 | curl -s https://api.github.com/repos/mvdan/sh/releases/latest 34 | | grep -o -E -m 1 "https://.+?linux_amd64" 35 | )" 36 | > shfmt 37 | && chmod +x shfmt && sudo mv shfmt /usr/bin/ 38 | 39 | - name: Install shellcheck 40 | run: | 41 | sudo apt update && sudo apt install shellcheck 42 | 43 | - name: Install hadolint 44 | run: >- 45 | curl -L "$( 46 | curl -s https://api.github.com/repos/hadolint/hadolint/releases/latest 47 | | grep -o -E -m 1 "https://.+?/hadolint-Linux-x86_64" 48 | )" 49 | > hadolint 50 | && chmod +x hadolint && sudo mv hadolint /usr/bin/ 51 | # Needed for pre-commit fix push to succeed 52 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 53 | with: 54 | fetch-depth: 0 55 | ref: ${{ github.event.pull_request.head.sha }} 56 | # Needed to trigger pre-commit workflow on autofix commit. Guide: 57 | # https://web.archive.org/web/20210731173012/https://github.community/t/required-check-is-expected-after-automated-push/187545/ 58 | ssh-key: ${{ secrets.GHA_AUTOFIX_COMMIT_KEY }} 59 | # Skip terraform_tflint which interferes to commit pre-commit auto-fixes 60 | - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 61 | with: 62 | python-version: '3.13' 63 | 64 | - name: Execute pre-commit 65 | uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # v3.0.1 66 | env: 67 | SKIP: no-commit-to-branch 68 | with: 69 | extra_args: >- 70 | --color=always 71 | --show-diff-on-failure 72 | --files ${{ steps.file_changes.outputs.files}} 73 | 74 | # Needed to trigger pre-commit workflow on autofix commit 75 | - name: Push fixes 76 | if: failure() 77 | uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4 78 | with: 79 | # Determines the way the action fills missing author name and email. 80 | # Three options are available: 81 | # - github_actor -> UserName 82 | # - user_info -> Your Display Name 83 | # - github_actions -> github-actions 84 | # Default: github_actor 85 | default_author: github_actor 86 | # The message for the commit. 87 | # Default: 'Commit from GitHub Actions (name of the workflow)' 88 | message: '[pre-commit] Autofix violations' 89 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | workflow_dispatch: 5 | push: 6 | branches: 7 | - master 8 | paths: 9 | - '**/*.py' 10 | - '**/*.sh' 11 | - Dockerfile 12 | - .pre-commit-hooks.yaml 13 | # Ignore paths 14 | - '!tests/**' 15 | 16 | permissions: 17 | contents: read 18 | 19 | jobs: 20 | release: 21 | permissions: 22 | # for cycjimmy/semantic-release-action to create a release 23 | contents: write 24 | # for cycjimmy/semantic-release-action to write comments to issues 25 | issues: write 26 | # for cycjimmy/semantic-release-action to write comments to PRs 27 | pull-requests: write 28 | 29 | name: Release 30 | runs-on: ubuntu-latest 31 | steps: 32 | - name: Checkout 33 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 34 | with: 35 | persist-credentials: false 36 | fetch-depth: 0 37 | 38 | - name: Release 39 | uses: cycjimmy/semantic-release-action@0a51e81a6baff2acad3ee88f4121c589c73d0f0e # v4.2.0 40 | with: 41 | semantic_version: 18.0.0 42 | extra_plugins: | 43 | @semantic-release/changelog@6.0.0 44 | @semantic-release/git@10.0.0 45 | env: 46 | # Custom token for triggering Docker image build GH Workflow on release 47 | # created by cycjimmy/semantic-release-action. Events created by 48 | # workflows with default GITHUB_TOKEN not trigger other GH Workflow. 49 | GITHUB_TOKEN: ${{ secrets.SEMANTIC_RELEASE_TOKEN }} 50 | -------------------------------------------------------------------------------- /.github/workflows/reusable-tox.yml: -------------------------------------------------------------------------------- 1 | name: >- 2 | ❌ 3 | [DO NOT CLICK] 4 | Reusable Tox 5 | 6 | on: 7 | workflow_call: 8 | inputs: 9 | built-wheel-names: 10 | description: >- 11 | A glob for the built distributions in the artifact 12 | to test (is installed into tox env if passed) 13 | required: false 14 | type: string 15 | cache-key-files: 16 | description: Dependency files cache 17 | required: true 18 | type: string 19 | check-name: 20 | description: A custom name for the Checks API-reported status 21 | required: false 22 | type: string 23 | dists-artifact-name: 24 | description: Workflow artifact name containing dists 25 | required: true 26 | type: string 27 | environment-variables: 28 | description: >- 29 | A newline-delimited blob of text with environment variables 30 | to be set using `${GITHUB_ENV}` 31 | required: false 32 | type: string 33 | python-version: 34 | description: Python version to provision in the VM 35 | required: true 36 | type: string 37 | release-requested: 38 | description: Flag whether this is CI run is a release request 39 | default: 'false' 40 | required: false 41 | type: string 42 | runner-vm-os: 43 | description: VM OS to use 44 | default: ubuntu 45 | required: false 46 | type: string 47 | source-tarball-name: 48 | description: Sdist filename wildcard 49 | required: true 50 | type: string 51 | timeout-minutes: 52 | description: Deadline for the job to complete 53 | required: true 54 | type: string 55 | toxenv: 56 | description: Name of the tox environment to use 57 | required: true 58 | type: string 59 | tox-run-posargs: 60 | description: Positional arguments to pass to the regular tox run 61 | required: false 62 | type: string 63 | tox-rerun-posargs: 64 | description: Positional arguments to pass to the re-attempted tox run 65 | required: false 66 | type: string 67 | upstream-repository-id: 68 | description: ID of the upstream GitHub Repository 69 | required: true 70 | type: string 71 | xfail: 72 | description: >- 73 | Whether this job is expected to fail. Controls if the run outcomes 74 | contribute to the failing CI status or not. The job status will be 75 | treated as successful if this is set to `true`. Setting `false` 76 | should be preferred typically. 77 | required: true 78 | type: string 79 | secrets: 80 | codecov-token: 81 | description: Mandatory token for uploading to Codecov 82 | required: true 83 | 84 | permissions: 85 | contents: read 86 | 87 | env: 88 | # Supposedly, pytest or coveragepy use this 89 | COLOR: >- 90 | yes 91 | FORCE_COLOR: 1 # Request colored output from CLI tools supporting it 92 | MYPY_FORCE_COLOR: 1 # MyPy's color enforcement 93 | PIP_DISABLE_PIP_VERSION_CHECK: 1 94 | PIP_NO_PYTHON_VERSION_WARNING: 1 95 | PIP_NO_WARN_SCRIPT_LOCATION: 1 96 | PRE_COMMIT_COLOR: always 97 | PY_COLORS: 1 # Recognized by the `py` package, dependency of `pytest` 98 | PYTHONIOENCODING: utf-8 99 | PYTHONUTF8: 1 100 | TOX_PARALLEL_NO_SPINNER: 1 101 | # Make tox-wrapped tools see color requests 102 | TOX_TESTENV_PASSENV: >- 103 | COLOR 104 | FORCE_COLOR 105 | MYPY_FORCE_COLOR 106 | NO_COLOR 107 | PIP_DISABLE_PIP_VERSION_CHECK 108 | PIP_NO_PYTHON_VERSION_WARNING 109 | PIP_NO_WARN_SCRIPT_LOCATION 110 | PRE_COMMIT_COLOR 111 | PY_COLORS 112 | PYTEST_THEME 113 | PYTEST_THEME_MODE 114 | PYTHONIOENCODING 115 | PYTHONLEGACYWINDOWSSTDIO 116 | PYTHONUTF8 117 | 118 | jobs: 119 | tox: 120 | name: >- 121 | ${{ 122 | inputs.check-name 123 | && inputs.check-name 124 | || format( 125 | '{0}@🐍{1}@{2}', 126 | inputs.toxenv, 127 | inputs.python-version, 128 | inputs.runner-vm-os 129 | ) 130 | }} 131 | 132 | runs-on: ${{ inputs.runner-vm-os }} 133 | 134 | timeout-minutes: ${{ fromJSON(inputs.timeout-minutes) }} 135 | 136 | continue-on-error: >- 137 | ${{ 138 | ( 139 | fromJSON(inputs.xfail) || 140 | ( 141 | startsWith(inputs.python-version, '~') 142 | ) || 143 | contains(inputs.python-version, 'alpha') 144 | ) && true || false 145 | }} 146 | 147 | env: 148 | TOXENV: ${{ inputs.toxenv }} 149 | 150 | steps: 151 | - name: Export requested job-global environment variables 152 | if: inputs.environment-variables != '' 153 | run: >- 154 | echo '${{ inputs.environment-variables }}' 155 | >> "${GITHUB_ENV}" 156 | 157 | - name: >- 158 | Switch to using Python v${{ inputs.python-version }} 159 | by default 160 | id: python-install 161 | uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 162 | with: 163 | python-version: ${{ inputs.python-version }} 164 | 165 | # NOTE: `pre-commit --show-diff-on-failure` and `sphinxcontrib-spellcheck` 166 | # NOTE: with Git authors allowlist enabled both depend on the presence of a 167 | # NOTE: Git repository. 168 | - name: Grab the source from Git 169 | if: >- 170 | contains(fromJSON('["pre-commit", "spellcheck-docs"]'), inputs.toxenv) 171 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 172 | with: 173 | ref: ${{ github.event.inputs.release-committish }} 174 | - name: Retrieve the project source from an sdist inside the GHA artifact 175 | if: >- 176 | !contains(fromJSON('["pre-commit", "spellcheck-docs"]'), inputs.toxenv) 177 | uses: re-actors/checkout-python-sdist@187f55296b0f54d88259aaaf99af32ad3647d3bc # v2.0.0 178 | with: 179 | source-tarball-name: ${{ inputs.source-tarball-name }} 180 | workflow-artifact-name: ${{ inputs.dists-artifact-name }} 181 | 182 | - name: Cache pre-commit.com virtualenvs 183 | if: inputs.toxenv == 'pre-commit' 184 | uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3 185 | with: 186 | path: ~/.cache/pre-commit 187 | key: >- 188 | ${{ 189 | runner.os 190 | }}-pre-commit-${{ 191 | hashFiles('.pre-commit-config.yaml') 192 | }} 193 | 194 | - name: Figure out if the interpreter ABI is stable 195 | id: py-abi 196 | run: | 197 | from os import environ 198 | from pathlib import Path 199 | from sys import version_info 200 | 201 | FILE_APPEND_MODE = 'a' 202 | 203 | is_stable_abi = version_info.releaselevel == 'final' 204 | 205 | with Path(environ['GITHUB_OUTPUT']).open( 206 | mode=FILE_APPEND_MODE, 207 | ) as outputs_file: 208 | print( 209 | 'is-stable-abi={is_stable_abi}'. 210 | format(is_stable_abi=str(is_stable_abi).lower()), 211 | file=outputs_file, 212 | ) 213 | shell: python 214 | - name: >- 215 | Calculate Python interpreter version hash value 216 | for use in the cache key 217 | if: fromJSON(steps.py-abi.outputs.is-stable-abi) 218 | id: calc-cache-key-py 219 | run: | 220 | from hashlib import sha512 221 | from os import environ 222 | from pathlib import Path 223 | from sys import version 224 | 225 | FILE_APPEND_MODE = 'a' 226 | 227 | hash = sha512(version.encode()).hexdigest() 228 | 229 | with Path(environ['GITHUB_OUTPUT']).open( 230 | mode=FILE_APPEND_MODE, 231 | ) as outputs_file: 232 | print(f'py-hash-key={hash}', file=outputs_file) 233 | shell: python 234 | - name: Get pip cache dir 235 | if: fromJSON(steps.py-abi.outputs.is-stable-abi) 236 | id: pip-cache-dir 237 | run: >- 238 | echo "dir=$(python -Im pip cache dir)" >> "${GITHUB_OUTPUT}" 239 | shell: bash 240 | - name: Set up pip cache 241 | if: fromJSON(steps.py-abi.outputs.is-stable-abi) 242 | uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3 243 | with: 244 | path: ${{ steps.pip-cache-dir.outputs.dir }} 245 | key: >- 246 | ${{ runner.os }}-pip-${{ 247 | steps.calc-cache-key-py.outputs.py-hash-key }}-${{ 248 | inputs.cache-key-files }} 249 | restore-keys: | 250 | ${{ runner.os }}-pip-${{ 251 | steps.calc-cache-key-py.outputs.py-hash-key 252 | }}- 253 | ${{ runner.os }}-pip- 254 | 255 | - name: Install tox 256 | run: >- 257 | python -Im pip install tox 258 | shell: bash # windows compat 259 | 260 | - name: Make the env clean of non-test files 261 | if: inputs.toxenv == 'metadata-validation' 262 | run: | 263 | shopt -s extglob 264 | rm -rf !tox.ini 265 | shell: bash 266 | - name: Download all the dists 267 | if: >- 268 | contains(fromJSON('["metadata-validation", "pytest"]'), inputs.toxenv) 269 | uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 270 | with: 271 | name: ${{ inputs.dists-artifact-name }} 272 | path: dist/ 273 | 274 | - name: >- 275 | Pre-populate tox envs: `${{ env.TOXENV }}` 276 | run: >- 277 | python -Im 278 | tox 279 | --parallel auto 280 | --parallel-live 281 | --skip-missing-interpreters false 282 | ${{ 283 | inputs.built-wheel-names != '' 284 | && format('--installpkg dist/{0}', inputs.built-wheel-names) 285 | || '' 286 | }} 287 | --notest 288 | - name: Initialize pre-commit envs if needed 289 | if: inputs.toxenv == 'pre-commit' 290 | run: >- 291 | python -Im 292 | tox 293 | exec 294 | --skip-pkg-install 295 | --quiet 296 | -- 297 | python -Im pre_commit install-hooks 298 | # Create GHA Job Summary markdown table of the coverage report 299 | # But only for 'pytest' env in 'tox'. 300 | # For details: ../../tox.ini '[testenv:pytest]' 'commands_post' 301 | - name: >- 302 | Run tox envs: `${{ env.TOXENV }}` 303 | id: tox-run 304 | run: >- 305 | python -Im 306 | tox 307 | --parallel auto 308 | --parallel-live 309 | --skip-missing-interpreters false 310 | --skip-pkg-install 311 | --quiet 312 | ${{ 313 | inputs.tox-run-posargs != '' 314 | && format('-- {0}', inputs.tox-run-posargs) 315 | || '' 316 | }} 317 | # Generate nice SVG image of passed/failed tests in GHA Job Summary 318 | - name: Produce markdown test summary from JUnit 319 | if: >- 320 | !cancelled() 321 | && steps.tox-run.outputs.test-result-files != '' 322 | uses: test-summary/action@31493c76ec9e7aa675f1585d3ed6f1da69269a86 # v2.4 323 | with: 324 | paths: >- 325 | ${{ steps.tox-run.outputs.test-result-files }} 326 | - name: Produce markdown test summary from Cobertura XML 327 | # NOTE: MyPy is temporarily excluded because it produces incomplete XML 328 | # NOTE: files that `irongut/CodeCoverageSummary` can't stomach. 329 | # Refs: 330 | # * https://github.com/irongut/CodeCoverageSummary/issues/324 331 | # * https://github.com/python/mypy/issues/17689 332 | # FIXME: Revert the exclusion once upstream fixes the bug. 333 | if: >- 334 | !cancelled() 335 | && runner.os == 'Linux' 336 | && steps.tox-run.outputs.cov-report-files != '' 337 | && steps.tox-run.outputs.test-result-files == '' 338 | && steps.tox-run.outputs.codecov-flags != 'MyPy' 339 | uses: irongut/CodeCoverageSummary@51cc3a756ddcd398d447c044c02cb6aa83fdae95 # v1.3.0 340 | with: 341 | badge: true 342 | filename: >- 343 | ${{ steps.tox-run.outputs.cov-report-files }} 344 | format: markdown 345 | output: both 346 | # Ref: https://github.com/irongut/CodeCoverageSummary/issues/66 347 | - name: Append coverage results to Job Summary 348 | if: >- 349 | !cancelled() 350 | && runner.os == 'Linux' 351 | && steps.tox-run.outputs.cov-report-files != '' 352 | && steps.tox-run.outputs.test-result-files == '' 353 | && steps.tox-run.outputs.codecov-flags != 'MyPy' 354 | run: >- 355 | cat code-coverage-results.md >> "${GITHUB_STEP_SUMMARY}" 356 | - name: Re-run the failing tests with maximum verbosity 357 | if: >- 358 | !cancelled() 359 | && failure() 360 | && inputs.tox-rerun-posargs != '' 361 | # `exit 1` makes sure that the job remains red with flaky runs 362 | run: >- 363 | python -Im 364 | tox 365 | --parallel auto 366 | --parallel-live 367 | --skip-missing-interpreters false 368 | -vvvvv 369 | --skip-pkg-install 370 | -- 371 | ${{ inputs.tox-rerun-posargs }} 372 | && exit 1 373 | shell: bash 374 | - name: Send coverage data to Codecov 375 | if: >- 376 | !cancelled() 377 | && steps.tox-run.outputs.cov-report-files != '' 378 | uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3 379 | with: 380 | disable_search: true 381 | fail_ci_if_error: >- 382 | ${{ toJSON(inputs.upstream-repository-id == github.repository_id) }} 383 | files: >- 384 | ${{ steps.tox-run.outputs.cov-report-files }} 385 | flags: >- 386 | CI-GHA, 387 | ${{ steps.tox-run.outputs.codecov-flags }}, 388 | OS-${{ 389 | runner.os 390 | }}, 391 | VM-${{ 392 | inputs.runner-vm-os 393 | }}, 394 | Py-${{ 395 | steps.python-install.outputs.python-version 396 | }} 397 | token: ${{ secrets.codecov-token }} 398 | - name: Upload test results to Codecov 399 | if: >- 400 | !cancelled() 401 | && steps.tox-run.outputs.test-result-files != '' 402 | uses: codecov/test-results-action@47f89e9acb64b76debcd5ea40642d25a4adced9f # v1.1.1 403 | # FIXME There is a bug in action which provokes it to fail during upload 404 | # Related issue: https://github.com/codecov/codecov-action/issues/1794 405 | continue-on-error: true 406 | with: 407 | disable_search: true 408 | fail_ci_if_error: >- 409 | ${{ toJSON(inputs.upstream-repository-id == github.repository_id) }} 410 | files: >- 411 | ${{ steps.tox-run.outputs.test-result-files }} 412 | flags: >- 413 | CI-GHA, 414 | ${{ steps.tox-run.outputs.codecov-flags }}, 415 | OS-${{ 416 | runner.os 417 | }}, 418 | VM-${{ 419 | inputs.runner-vm-os 420 | }}, 421 | Py-${{ 422 | steps.python-install.outputs.python-version 423 | }} 424 | token: ${{ secrets.codecov-token }} 425 | -------------------------------------------------------------------------------- /.github/workflows/scheduled-runs.yml: -------------------------------------------------------------------------------- 1 | name: 🕔 2 | 3 | on: 4 | pull_request: 5 | paths: # only changes to this workflow itself trigger PR testing 6 | - .github/workflows/scheduled-runs.yml 7 | schedule: 8 | - cron: 3 5 * * * # run daily at 5:03 UTC 9 | workflow_dispatch: # manual trigger 10 | 11 | permissions: 12 | contents: read 13 | 14 | run-name: >- 15 | 🌃 16 | Nightly run of 17 | ${{ 18 | github.event.pull_request.number && 'PR' || '' 19 | }}${{ 20 | !github.event.pull_request.number && 'Commit' || '' 21 | }} 22 | ${{ github.event.pull_request.number || github.sha }} 23 | triggered by: ${{ github.event_name }} of ${{ 24 | github.ref 25 | }} ${{ 26 | github.ref_type 27 | }} 28 | (workflow run ID: ${{ 29 | github.run_id 30 | }}; number: ${{ 31 | github.run_number 32 | }}; attempt: ${{ 33 | github.run_attempt 34 | }}) 35 | 36 | jobs: 37 | main-ci-cd-pipeline: 38 | name: ∞ Main CI/CD pipeline 39 | uses: ./.github/workflows/ci-cd.yml 40 | secrets: inherit 41 | -------------------------------------------------------------------------------- /.github/workflows/scorecards.yml: -------------------------------------------------------------------------------- 1 | # This workflow uses actions that are not certified by GitHub. They are 2 | # provided by a third-party and are governed by separate terms of service, 3 | # privacy policy, and support documentation. 4 | 5 | name: Scorecard supply-chain security 6 | on: 7 | # For Branch-Protection check. Only the default branch is supported. See 8 | # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection 9 | branch_protection_rule: 10 | # To guarantee Maintained check is occasionally updated. See 11 | # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained 12 | schedule: 13 | - cron: 20 7 * * 2 14 | push: 15 | branches: 16 | - master 17 | 18 | # Declare default permissions as read only. 19 | permissions: read-all 20 | 21 | jobs: 22 | analysis: 23 | name: Scorecard analysis 24 | runs-on: ubuntu-latest 25 | permissions: 26 | # Needed to upload the results to code-scanning dashboard. 27 | security-events: write 28 | # Needed to publish results and get a badge (see publish_results below). 29 | id-token: write 30 | contents: read 31 | actions: read 32 | # To allow GraphQL ListCommits to work 33 | issues: read 34 | pull-requests: read 35 | # To detect SAST tools 36 | checks: read 37 | 38 | steps: 39 | - name: Checkout code 40 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 41 | with: 42 | persist-credentials: false 43 | 44 | - name: Run analysis 45 | uses: ossf/scorecard-action@f49aabe0b5af0936a0987cfb85d86b75731b0186 # v2.4.1 46 | with: 47 | results_file: results.sarif 48 | results_format: sarif 49 | # (Optional) "write" PAT token. Uncomment the `repo_token` line if: 50 | # - you want to enable the Branch-Protection check on a *public* 51 | # repository, or 52 | # - you are installing Scorecards on a *private* repository 53 | # To create the PAT, follow the steps in 54 | # https://github.com/ossf/scorecard-action#authentication-with-pat. 55 | # repo_token: ${{ secrets.SCORECARD_TOKEN }} 56 | 57 | # Public repositories: 58 | # - Publish results to OpenSSF REST API for easy access by consumers 59 | # - Allows the repository to include the Scorecard badge. 60 | # - See https://github.com/ossf/scorecard-action#publishing-results. 61 | # For private repositories: 62 | # - `publish_results` will always be set to `false`, regardless 63 | # of the value entered here. 64 | publish_results: true 65 | 66 | # Upload the results as artifacts (optional). Commenting out will disable 67 | # uploads of run results in SARIF format to the repository Actions tab. 68 | - name: Upload artifact 69 | uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 70 | with: 71 | name: SARIF file 72 | path: results.sarif 73 | retention-days: 5 74 | 75 | # Upload the results to GitHub's code scanning dashboard. 76 | - name: Upload to code-scanning 77 | uses: github/codeql-action/upload-sarif@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18 78 | with: 79 | sarif_file: results.sarif 80 | -------------------------------------------------------------------------------- /.github/workflows/stale-actions.yaml: -------------------------------------------------------------------------------- 1 | name: Mark or close stale issues and PRs 2 | on: 3 | schedule: 4 | - cron: 0 0 * * * 5 | 6 | permissions: 7 | contents: read 8 | 9 | jobs: 10 | stale: 11 | permissions: 12 | issues: write # for actions/stale to close stale issues 13 | pull-requests: write # for actions/stale to close stale PRs 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # v9.1.0 17 | with: 18 | repo-token: ${{ secrets.GITHUB_TOKEN }} 19 | # Staling issues and PR's 20 | days-before-stale: 30 21 | stale-issue-label: stale 22 | stale-pr-label: stale 23 | stale-issue-message: > 24 | This issue has been automatically marked as stale because it has been 25 | open 30 days with no activity. Remove stale label or comment or this 26 | issue will be closed in 10 days 27 | stale-pr-message: > 28 | This PR has been automatically marked as stale because it has been 29 | open 30 days 30 | 31 | 32 | with no activity. Remove stale label or comment or this PR will be 33 | closed in 10 days 34 | # Not stale if have this labels or part of milestone 35 | exempt-issue-labels: bug,wip,on-hold,auto-update 36 | exempt-pr-labels: bug,wip,on-hold 37 | exempt-all-milestones: true 38 | # Close issue operations 39 | # Label will be automatically removed if the issues are no longer 40 | # closed nor locked. 41 | days-before-close: 10 42 | delete-branch: true 43 | close-issue-message: >- 44 | This issue was automatically closed because of stale in 10 days 45 | close-pr-message: >- 46 | This PR was automatically closed because of stale in 10 days 47 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by https://gitignore.io 2 | # Created by https://www.toptal.com/developers/gitignore/api/python 3 | # Edit at https://www.toptal.com/developers/gitignore?templates=python 4 | 5 | ### Python ### 6 | # Byte-compiled / optimized / DLL files 7 | __pycache__/ 8 | *.py[cod] 9 | *$py.class 10 | 11 | # C extensions 12 | *.so 13 | 14 | # Distribution / packaging 15 | .Python 16 | build/ 17 | develop-eggs/ 18 | dist/ 19 | downloads/ 20 | eggs/ 21 | .eggs/ 22 | lib/ 23 | lib64/ 24 | parts/ 25 | sdist/ 26 | var/ 27 | wheels/ 28 | share/python-wheels/ 29 | *.egg-info/ 30 | .installed.cfg 31 | *.egg 32 | MANIFEST 33 | 34 | # PyInstaller 35 | # Usually these files are written by a python script from a template 36 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 37 | *.manifest 38 | *.spec 39 | 40 | # Installer logs 41 | pip-log.txt 42 | pip-delete-this-directory.txt 43 | 44 | # Unit test / coverage reports 45 | htmlcov/ 46 | .tox/ 47 | .nox/ 48 | .coverage 49 | .coverage.* 50 | .cache 51 | nosetests.xml 52 | coverage.xml 53 | *.cover 54 | *.py,cover 55 | .hypothesis/ 56 | .pytest_cache/ 57 | cover/ 58 | 59 | # Translations 60 | *.mo 61 | *.pot 62 | 63 | # Django stuff: 64 | *.log 65 | local_settings.py 66 | db.sqlite3 67 | db.sqlite3-journal 68 | 69 | # Flask stuff: 70 | instance/ 71 | .webassets-cache 72 | 73 | # Scrapy stuff: 74 | .scrapy 75 | 76 | # Sphinx documentation 77 | docs/_build/ 78 | 79 | # PyBuilder 80 | .pybuilder/ 81 | target/ 82 | 83 | # Jupyter Notebook 84 | .ipynb_checkpoints 85 | 86 | # IPython 87 | profile_default/ 88 | ipython_config.py 89 | 90 | # pyenv 91 | # For a library or package, you might want to ignore these files since the code is 92 | # intended to run in multiple environments; otherwise, check them in: 93 | # .python-version 94 | 95 | # pipenv 96 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 97 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 98 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 99 | # install all needed dependencies. 100 | #Pipfile.lock 101 | 102 | # poetry 103 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 104 | # This is especially recommended for binary packages to ensure reproducibility, and is more 105 | # commonly ignored for libraries. 106 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 107 | #poetry.lock 108 | 109 | # pdm 110 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 111 | #pdm.lock 112 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 113 | # in version control. 114 | # https://pdm.fming.dev/#use-with-ide 115 | .pdm.toml 116 | 117 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 118 | __pypackages__/ 119 | 120 | # Celery stuff 121 | celerybeat-schedule 122 | celerybeat.pid 123 | 124 | # SageMath parsed files 125 | *.sage.py 126 | 127 | # Environments 128 | .env 129 | .venv 130 | env/ 131 | venv/ 132 | ENV/ 133 | env.bak/ 134 | venv.bak/ 135 | 136 | # Spyder project settings 137 | .spyderproject 138 | .spyproject 139 | 140 | # Rope project settings 141 | .ropeproject 142 | 143 | # mkdocs documentation 144 | /site 145 | 146 | # mypy 147 | .mypy_cache/ 148 | .dmypy.json 149 | dmypy.json 150 | 151 | # Pyre type checker 152 | .pyre/ 153 | 154 | # pytype static type analyzer 155 | .pytype/ 156 | 157 | # Cython debug symbols 158 | cython_debug/ 159 | 160 | # PyCharm 161 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 162 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 163 | # and can be added to the global gitignore or merged into this file. For a more nuclear 164 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 165 | #.idea/ 166 | 167 | ### Python Patch ### 168 | # Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration 169 | poetry.toml 170 | 171 | # ruff 172 | .ruff_cache/ 173 | 174 | # LSP config files 175 | pyrightconfig.json 176 | 177 | # End of https://www.toptal.com/developers/gitignore/api/python 178 | 179 | tests/results/* 180 | -------------------------------------------------------------------------------- /.mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | python_version = 3.9 3 | color_output = true 4 | error_summary = true 5 | # IMPORTANT: The file list MUST NOT have a trailing comma after the last entry. 6 | # Ref: https://github.com/python/mypy/issues/11171#issuecomment-2567150548 7 | files = 8 | src/, 9 | tests/pytest/ 10 | 11 | check_untyped_defs = true 12 | 13 | disallow_any_explicit = true 14 | disallow_any_expr = true 15 | disallow_any_decorated = true 16 | disallow_any_generics = true 17 | disallow_any_unimported = true 18 | disallow_incomplete_defs = true 19 | disallow_subclassing_any = true 20 | disallow_untyped_calls = true 21 | disallow_untyped_decorators = true 22 | disallow_untyped_defs = true 23 | 24 | enable_error_code = 25 | ignore-without-code 26 | 27 | explicit_package_bases = true 28 | 29 | extra_checks = true 30 | 31 | follow_imports = normal 32 | 33 | ignore_missing_imports = false 34 | 35 | local_partial_types = true 36 | 37 | mypy_path = ${MYPY_CONFIG_FILE_DIR}/src:${MYPY_CONFIG_FILE_DIR}/_type_stubs 38 | 39 | namespace_packages = true 40 | 41 | no_implicit_reexport = true 42 | 43 | pretty = true 44 | 45 | show_column_numbers = true 46 | show_error_code_links = true 47 | show_error_codes = true 48 | show_error_context = true 49 | show_error_end = true 50 | 51 | # `strict` will pick up any future strictness-related settings: 52 | strict = true 53 | strict_equality = true 54 | strict_optional = true 55 | 56 | warn_no_return = true 57 | warn_redundant_casts = true 58 | warn_return_any = true 59 | warn_unused_configs = true 60 | warn_unused_ignores = true 61 | 62 | [mypy-tests.*] 63 | # crashes with some decorators like `@pytest.mark.parametrize`: 64 | disallow_any_expr = false 65 | # fails on `@hypothesis.given()`: 66 | disallow_any_decorated = false 67 | 68 | [mypy-tests.pytest.*] 69 | disable_error_code = attr-defined 70 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | ci: 2 | autoupdate_schedule: quarterly 3 | skip: 4 | - shfmt 5 | - shellcheck 6 | - hadolint 7 | 8 | repos: 9 | - repo: https://github.com/pre-commit/pre-commit-hooks 10 | rev: v5.0.0 11 | hooks: 12 | # Git style 13 | - id: check-added-large-files 14 | - id: check-merge-conflict 15 | - id: check-vcs-permalinks 16 | - id: forbid-new-submodules 17 | - id: no-commit-to-branch 18 | 19 | # Common errors 20 | - id: end-of-file-fixer 21 | - id: trailing-whitespace 22 | args: [--markdown-linebreak-ext=md] 23 | exclude: CHANGELOG.md 24 | - id: check-yaml 25 | - id: check-merge-conflict 26 | - id: check-executables-have-shebangs 27 | 28 | # Cross platform 29 | - id: check-case-conflict 30 | - id: mixed-line-ending 31 | args: [--fix=lf] 32 | 33 | # Non-modifying checks: 34 | - id: name-tests-test 35 | files: >- 36 | ^tests/[^_].*\.py$ 37 | 38 | # Security 39 | - id: detect-aws-credentials 40 | args: 41 | - --allow-missing-credentials 42 | - id: detect-private-key 43 | 44 | # Detect hardcoded secrets 45 | - repo: https://github.com/gitleaks/gitleaks 46 | rev: v8.26.0 47 | hooks: 48 | - id: gitleaks 49 | 50 | # Dockerfile 51 | - repo: https://github.com/hadolint/hadolint 52 | rev: v2.13.1-beta 53 | hooks: 54 | - id: hadolint 55 | 56 | # YAML 57 | - repo: https://github.com/jumanjihouse/pre-commit-hook-yamlfmt 58 | rev: 0.2.3 59 | hooks: 60 | - id: yamlfmt 61 | args: 62 | - --mapping=2 63 | - --sequence=2 64 | - --offset=0 65 | - --width=75 66 | - --implicit_start 67 | 68 | - repo: https://github.com/adrienverge/yamllint.git 69 | rev: v1.37.1 70 | hooks: 71 | - id: yamllint 72 | types: 73 | - file 74 | - yaml 75 | args: 76 | - --strict 77 | 78 | # JSON5 79 | - repo: https://github.com/pre-commit/mirrors-prettier 80 | rev: v4.0.0-alpha.8 81 | hooks: 82 | - id: prettier 83 | # https://prettier.io/docs/en/options.html#parser 84 | files: .json5$ 85 | 86 | # Bash 87 | - repo: https://github.com/jumanjihouse/pre-commit-hooks 88 | rev: 3.0.0 89 | hooks: 90 | - id: shfmt 91 | args: 92 | - -l 93 | - -i 94 | - '2' 95 | - -ci 96 | - -sr 97 | - -w 98 | - id: shellcheck 99 | 100 | # Python 101 | - repo: https://github.com/astral-sh/ruff-pre-commit 102 | rev: v0.11.11 103 | hooks: 104 | - id: ruff 105 | args: 106 | - --fix 107 | - id: ruff-format 108 | 109 | - repo: https://github.com/pre-commit/mirrors-mypy.git 110 | rev: v1.15.0 111 | hooks: 112 | - id: mypy 113 | alias: mypy-py313 114 | name: MyPy, for Python 3.13 115 | additional_dependencies: 116 | - lxml # dep of `--txt-report`, `--cobertura-xml-report` & `--html-report` 117 | - pytest 118 | - pytest-mock 119 | args: 120 | - --python-version=3.13 121 | - --any-exprs-report=.tox/.tmp/.test-results/mypy--py-3.13 122 | - --cobertura-xml-report=.tox/.tmp/.test-results/mypy--py-3.13 123 | - --html-report=.tox/.tmp/.test-results/mypy--py-3.13 124 | - --linecount-report=.tox/.tmp/.test-results/mypy--py-3.13 125 | - --linecoverage-report=.tox/.tmp/.test-results/mypy--py-3.13 126 | - --lineprecision-report=.tox/.tmp/.test-results/mypy--py-3.13 127 | - --txt-report=.tox/.tmp/.test-results/mypy--py-3.13 128 | pass_filenames: false 129 | - id: mypy 130 | alias: mypy-py311 131 | name: MyPy, for Python 3.11 132 | additional_dependencies: 133 | - lxml # dep of `--txt-report`, `--cobertura-xml-report` & `--html-report` 134 | - pytest 135 | - pytest-mock 136 | args: 137 | - --python-version=3.11 138 | - --any-exprs-report=.tox/.tmp/.test-results/mypy--py-3.11 139 | - --cobertura-xml-report=.tox/.tmp/.test-results/mypy--py-3.11 140 | - --html-report=.tox/.tmp/.test-results/mypy--py-3.11 141 | - --linecount-report=.tox/.tmp/.test-results/mypy--py-3.11 142 | - --linecoverage-report=.tox/.tmp/.test-results/mypy--py-3.11 143 | - --lineprecision-report=.tox/.tmp/.test-results/mypy--py-3.11 144 | - --txt-report=.tox/.tmp/.test-results/mypy--py-3.11 145 | pass_filenames: false 146 | - id: mypy 147 | alias: mypy-py39 148 | name: MyPy, for Python 3.9 149 | additional_dependencies: 150 | - lxml # dep of `--txt-report`, `--cobertura-xml-report` & `--html-report` 151 | - pytest 152 | - pytest-mock 153 | args: 154 | - --python-version=3.9 155 | - --any-exprs-report=.tox/.tmp/.test-results/mypy--py-3.9 156 | - --cobertura-xml-report=.tox/.tmp/.test-results/mypy--py-3.9 157 | - --html-report=.tox/.tmp/.test-results/mypy--py-3.9 158 | - --linecount-report=.tox/.tmp/.test-results/mypy--py-3.9 159 | - --linecoverage-report=.tox/.tmp/.test-results/mypy--py-3.9 160 | - --lineprecision-report=.tox/.tmp/.test-results/mypy--py-3.9 161 | - --txt-report=.tox/.tmp/.test-results/mypy--py-3.9 162 | pass_filenames: false 163 | -------------------------------------------------------------------------------- /.pre-commit-hooks.yaml: -------------------------------------------------------------------------------- 1 | - id: infracost_breakdown 2 | name: Infracost breakdown 3 | description: Check terraform infrastructure cost 4 | entry: hooks/infracost_breakdown.sh 5 | language: script 6 | require_serial: true 7 | files: \.(tf|tofu|tfvars|hcl)$ 8 | exclude: \.terraform/.*$ 9 | 10 | - id: terraform_fmt 11 | name: Terraform fmt 12 | description: >- 13 | Rewrites all Terraform configuration files to a canonical format. 14 | entry: hooks/terraform_fmt.sh 15 | language: script 16 | files: \.(tf|tofu|tfvars)$ 17 | exclude: \.terraform/.*$ 18 | 19 | - id: terraform_docs 20 | name: Terraform docs 21 | description: >- 22 | Inserts input and output documentation into README.md 23 | (using terraform-docs). 24 | require_serial: true 25 | entry: hooks/terraform_docs.sh 26 | language: script 27 | files: \.(tf|tofu|terraform\.lock\.hcl)$ 28 | exclude: \.terraform/.*$ 29 | 30 | - id: terraform_docs_without_aggregate_type_defaults 31 | name: Terraform docs (without aggregate type defaults) 32 | description: >- 33 | Inserts input and output documentation into README.md 34 | (using terraform-docs). Identical to terraform_docs. 35 | require_serial: true 36 | entry: hooks/terraform_docs.sh 37 | language: script 38 | files: \.(tf|tofu)$ 39 | exclude: \.terraform/.*$ 40 | 41 | - id: terraform_docs_replace 42 | name: Terraform docs (overwrite README.md) 43 | description: Overwrite content of README.md with terraform-docs. 44 | require_serial: true 45 | entry: python -Im pre_commit_terraform replace-docs 46 | language: python 47 | files: \.(tf|tofu)$ 48 | exclude: \.terraform/.*$ 49 | 50 | - id: terraform_validate 51 | name: Terraform validate 52 | description: Validates all Terraform configuration files. 53 | require_serial: true 54 | entry: hooks/terraform_validate.sh 55 | language: script 56 | files: \.(tf|tofu|tfvars|terraform\.lock\.hcl)$ 57 | exclude: \.terraform/.*$ 58 | 59 | - id: terraform_providers_lock 60 | name: Lock terraform provider versions 61 | description: Updates provider signatures in dependency lock files. 62 | require_serial: true 63 | entry: hooks/terraform_providers_lock.sh 64 | language: script 65 | files: (\.terraform\.lock\.hcl)$ 66 | exclude: \.terraform/.*$ 67 | 68 | - id: terraform_tflint 69 | name: Terraform validate with tflint 70 | description: Validates all Terraform configuration files with TFLint. 71 | require_serial: true 72 | entry: hooks/terraform_tflint.sh 73 | language: script 74 | files: \.(tf|tofu|tfvars)$ 75 | exclude: \.terraform/.*$ 76 | 77 | - id: terragrunt_fmt 78 | name: Terragrunt fmt 79 | description: >- 80 | Rewrites all Terragrunt configuration files to a canonical format. 81 | entry: hooks/terragrunt_fmt.sh 82 | language: script 83 | files: (\.hcl)$ 84 | exclude: \.terraform/.*$ 85 | 86 | - id: terragrunt_validate 87 | name: Terragrunt validate 88 | description: Validates all Terragrunt configuration files. 89 | entry: hooks/terragrunt_validate.sh 90 | language: script 91 | files: (\.hcl)$ 92 | exclude: \.terraform/.*$ 93 | 94 | - id: terragrunt_validate_inputs 95 | name: Terragrunt validate inputs 96 | description: Validates Terragrunt unused and undefined inputs. 97 | entry: hooks/terragrunt_validate_inputs.sh 98 | language: script 99 | files: (\.hcl)$ 100 | exclude: \.terraform/.*$ 101 | 102 | - id: terragrunt_providers_lock 103 | name: Terragrunt providers lock 104 | description: >- 105 | Updates provider signatures in dependency lock files using terragrunt. 106 | entry: hooks/terragrunt_providers_lock.sh 107 | language: script 108 | files: (terragrunt|\.terraform\.lock)\.hcl$ 109 | exclude: \.(terraform/.*|terragrunt-cache)$ 110 | 111 | - id: terraform_tfsec 112 | name: Terraform validate with tfsec (deprecated, use "terraform_trivy") 113 | description: >- 114 | Static analysis of Terraform templates to spot potential security issues. 115 | require_serial: true 116 | entry: hooks/terraform_tfsec.sh 117 | files: \.(tf|tofu|tfvars)$ 118 | language: script 119 | 120 | - id: terraform_trivy 121 | name: Terraform validate with trivy 122 | description: >- 123 | Static analysis of Terraform templates to spot potential security issues. 124 | require_serial: true 125 | entry: hooks/terraform_trivy.sh 126 | files: \.(tf|tofu|tfvars)$ 127 | language: script 128 | 129 | - id: checkov 130 | name: checkov (deprecated, use "terraform_checkov") 131 | description: Runs checkov on Terraform templates. 132 | entry: checkov -d . 133 | language: python 134 | pass_filenames: false 135 | always_run: false 136 | files: \.(tf|tofu)$ 137 | exclude: \.terraform/.*$ 138 | require_serial: true 139 | 140 | - id: terraform_checkov 141 | name: Checkov 142 | description: Runs checkov on Terraform templates. 143 | entry: hooks/terraform_checkov.sh 144 | language: script 145 | always_run: false 146 | files: \.(tf|tofu)$ 147 | exclude: \.terraform/.*$ 148 | require_serial: true 149 | 150 | - id: terraform_wrapper_module_for_each 151 | name: Terraform wrapper with for_each in module 152 | description: Generate Terraform wrappers with for_each in module. 153 | entry: hooks/terraform_wrapper_module_for_each.sh 154 | language: script 155 | pass_filenames: false 156 | always_run: false 157 | require_serial: true 158 | files: \.(tf|tofu)$ 159 | exclude: \.terraform/.*$ 160 | 161 | - id: terrascan 162 | name: terrascan 163 | description: Runs terrascan on Terraform templates. 164 | language: script 165 | entry: hooks/terrascan.sh 166 | files: \.(tf|tofu)$ 167 | exclude: \.terraform/.*$ 168 | require_serial: true 169 | 170 | - id: tfupdate 171 | name: tfupdate 172 | description: Runs tfupdate on Terraform templates. 173 | language: script 174 | entry: hooks/tfupdate.sh 175 | args: 176 | - --args=terraform 177 | files: \.(tf|tofu)$ 178 | require_serial: true 179 | -------------------------------------------------------------------------------- /.releaserc.json: -------------------------------------------------------------------------------- 1 | { 2 | "branches": [ 3 | "main", 4 | "master" 5 | ], 6 | "ci": false, 7 | "plugins": [ 8 | "@semantic-release/commit-analyzer", 9 | "@semantic-release/release-notes-generator", 10 | [ 11 | "@semantic-release/github", 12 | { 13 | "successComment": 14 | "This ${issue.pull_request ? 'PR is included' : 'issue has been resolved'} in version ${nextRelease.version} :tada:", 15 | "labels": false, 16 | "releasedLabels": false 17 | } 18 | ], 19 | [ 20 | "@semantic-release/changelog", 21 | { 22 | "changelogFile": "CHANGELOG.md", 23 | "changelogTitle": "# Changelog\n\nAll notable changes to this project will be documented in this file." 24 | } 25 | ], 26 | [ 27 | "@semantic-release/git", 28 | { 29 | "assets": [ 30 | "CHANGELOG.md" 31 | ], 32 | "message": "chore(release): version ${nextRelease.version} [skip ci]\n\n${nextRelease.notes}" 33 | } 34 | ] 35 | ] 36 | } 37 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | // See https://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations. 3 | // Extension identifier format: ${publisher}.${name}. Example: vscode.csharp 4 | 5 | // List of extensions which should be recommended for users of this workspace. 6 | "recommendations": [ 7 | // Global 8 | "aaron-bond.better-comments", 9 | "gruntfuggly.todo-tree", 10 | "shardulm94.trailing-spaces", 11 | "glenbuktenica.unicode-substitutions", 12 | "editorconfig.editorconfig", 13 | 14 | // Grammar 15 | "streetsidesoftware.code-spell-checker", 16 | "znck.grammarly", 17 | 18 | // Documentation 19 | "bierner.markdown-preview-github-styles", 20 | "yzhang.markdown-all-in-one", 21 | "DavidAnson.vscode-markdownlint", 22 | 23 | ], 24 | // List of extensions recommended by VS Code that should not be recommended for users of this workspace. 25 | "unwantedRecommendations": [] 26 | } 27 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "editor.guides.bracketPairs": "active", 3 | "markdown.extension.toc.unorderedList.marker": "*", 4 | "markdown.extension.toc.levels": "2..6", 5 | "cSpell.language": "en", 6 | "markdownlint.config": { 7 | "code-block-style": false 8 | }, 9 | "markdown.validate.enabled": true, 10 | "python.analysis.extraPaths": [ 11 | "./src", 12 | "./tests/pytest" 13 | ], 14 | } 15 | -------------------------------------------------------------------------------- /.yamllint: -------------------------------------------------------------------------------- 1 | extends: default 2 | 3 | rules: 4 | indentation: 5 | level: error 6 | indent-sequences: false 7 | document-start: 8 | present: false 9 | quoted-strings: 10 | required: only-when-needed 11 | line-length: 12 | max: 100 13 | truthy: 14 | allowed-values: 15 | - >- 16 | false 17 | - >- 18 | true 19 | # Allow "on" key name in GHA CI/CD workflow definitions 20 | - >- 21 | on 22 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.12.0-alpine3.17@sha256:fc34b07ec97a4f288bc17083d288374a803dd59800399c76b977016c9fe5b8f2 AS python_base 2 | 3 | FROM python_base AS builder 4 | ARG TARGETOS 5 | ARG TARGETARCH 6 | 7 | WORKDIR /bin_dir 8 | 9 | RUN apk add --no-cache \ 10 | # Builder deps 11 | bash=~5 \ 12 | curl=~8 && \ 13 | # Upgrade packages for be able get latest Checkov 14 | python3 -m pip install --no-cache-dir --upgrade \ 15 | pip~=25.0 \ 16 | setuptools~=75.8 17 | 18 | COPY tools/install/ /install/ 19 | 20 | # 21 | # Install required tools 22 | # 23 | ARG PRE_COMMIT_VERSION=${PRE_COMMIT_VERSION:-latest} 24 | RUN touch /.env && \ 25 | if [ "$PRE_COMMIT_VERSION" = "false" ]; then \ 26 | echo "Vital software can't be skipped" && exit 1; \ 27 | fi 28 | RUN /install/pre-commit.sh 29 | 30 | # 31 | # Install tools 32 | # 33 | ARG OPENTOFU_VERSION=${OPENTOFU_VERSION:-false} 34 | ARG TERRAFORM_VERSION=${TERRAFORM_VERSION:-false} 35 | 36 | ARG CHECKOV_VERSION=${CHECKOV_VERSION:-false} 37 | ARG HCLEDIT_VERSION=${HCLEDIT_VERSION:-false} 38 | ARG INFRACOST_VERSION=${INFRACOST_VERSION:-false} 39 | ARG TERRAFORM_DOCS_VERSION=${TERRAFORM_DOCS_VERSION:-false} 40 | ARG TERRAGRUNT_VERSION=${TERRAGRUNT_VERSION:-false} 41 | ARG TERRASCAN_VERSION=${TERRASCAN_VERSION:-false} 42 | ARG TFLINT_VERSION=${TFLINT_VERSION:-false} 43 | ARG TFSEC_VERSION=${TFSEC_VERSION:-false} 44 | ARG TFUPDATE_VERSION=${TFUPDATE_VERSION:-false} 45 | ARG TRIVY_VERSION=${TRIVY_VERSION:-false} 46 | 47 | 48 | # Tricky thing to install all tools by set only one arg. 49 | # In RUN command below used `. /.env` <- this is sourcing vars that 50 | # specified in step below 51 | ARG INSTALL_ALL=${INSTALL_ALL:-false} 52 | RUN if [ "$INSTALL_ALL" != "false" ]; then \ 53 | echo "OPENTOFU_VERSION=latest" >> /.env && \ 54 | echo "TERRAFORM_VERSION=latest" >> /.env && \ 55 | \ 56 | echo "CHECKOV_VERSION=latest" >> /.env && \ 57 | echo "HCLEDIT_VERSION=latest" >> /.env && \ 58 | echo "INFRACOST_VERSION=latest" >> /.env && \ 59 | echo "TERRAFORM_DOCS_VERSION=latest" >> /.env && \ 60 | echo "TERRAGRUNT_VERSION=latest" >> /.env && \ 61 | echo "TERRASCAN_VERSION=latest" >> /.env && \ 62 | echo "TFLINT_VERSION=latest" >> /.env && \ 63 | echo "TFSEC_VERSION=latest" >> /.env && \ 64 | echo "TFUPDATE_VERSION=latest" >> /.env && \ 65 | echo "TRIVY_VERSION=latest" >> /.env \ 66 | ; fi 67 | 68 | # Docker `RUN`s shouldn't be consolidated here 69 | # hadolint global ignore=DL3059 70 | RUN /install/opentofu.sh 71 | RUN /install/terraform.sh 72 | 73 | RUN /install/checkov.sh 74 | RUN /install/hcledit.sh 75 | RUN /install/infracost.sh 76 | RUN /install/terraform-docs.sh 77 | RUN /install/terragrunt.sh 78 | RUN /install/terrascan.sh 79 | RUN /install/tflint.sh 80 | RUN /install/tfsec.sh 81 | RUN /install/tfupdate.sh 82 | RUN /install/trivy.sh 83 | 84 | 85 | # Checking binaries versions and write it to debug file 86 | 87 | # SC2086 - We do not need to quote "$F" variable, because it's not contain spaces 88 | # DL4006 - Not Applicable for /bin/sh in alpine images. Disable, as recommended by check itself 89 | # hadolint ignore=SC2086,DL4006 90 | RUN . /.env && \ 91 | F=tools_versions_info && \ 92 | pre-commit --version >> $F && \ 93 | (if [ "$OPENTOFU_VERSION" != "false" ]; then ./tofu --version | head -n 1 >> $F; else echo "opentofu SKIPPED" >> $F ; fi) && \ 94 | (if [ "$TERRAFORM_VERSION" != "false" ]; then ./terraform --version | head -n 1 >> $F; else echo "terraform SKIPPED" >> $F ; fi) && \ 95 | \ 96 | (if [ "$CHECKOV_VERSION" != "false" ]; then echo "checkov $(checkov --version)" >> $F; else echo "checkov SKIPPED" >> $F ; fi) && \ 97 | (if [ "$HCLEDIT_VERSION" != "false" ]; then echo "hcledit $(./hcledit version)" >> $F; else echo "hcledit SKIPPED" >> $F ; fi) && \ 98 | (if [ "$INFRACOST_VERSION" != "false" ]; then echo "$(./infracost --version)" >> $F; else echo "infracost SKIPPED" >> $F ; fi) && \ 99 | (if [ "$TERRAFORM_DOCS_VERSION" != "false" ]; then ./terraform-docs --version >> $F; else echo "terraform-docs SKIPPED" >> $F ; fi) && \ 100 | (if [ "$TERRAGRUNT_VERSION" != "false" ]; then ./terragrunt --version >> $F; else echo "terragrunt SKIPPED" >> $F ; fi) && \ 101 | (if [ "$TERRASCAN_VERSION" != "false" ]; then echo "terrascan $(./terrascan version)" >> $F; else echo "terrascan SKIPPED" >> $F ; fi) && \ 102 | (if [ "$TFLINT_VERSION" != "false" ]; then ./tflint --version >> $F; else echo "tflint SKIPPED" >> $F ; fi) && \ 103 | (if [ "$TFSEC_VERSION" != "false" ]; then echo "tfsec $(./tfsec --version)" >> $F; else echo "tfsec SKIPPED" >> $F ; fi) && \ 104 | (if [ "$TFUPDATE_VERSION" != "false" ]; then echo "tfupdate $(./tfupdate --version)" >> $F; else echo "tfupdate SKIPPED" >> $F ; fi) && \ 105 | (if [ "$TRIVY_VERSION" != "false" ]; then echo "trivy $(./trivy --version)" >> $F; else echo "trivy SKIPPED" >> $F ; fi) && \ 106 | printf "\n\n\n" && cat $F && printf "\n\n\n" 107 | 108 | 109 | 110 | FROM python_base 111 | 112 | RUN apk add --no-cache \ 113 | # pre-commit deps 114 | git=~2 \ 115 | # All hooks deps 116 | bash=~5 \ 117 | # pre-commit-hooks deps: https://github.com/pre-commit/pre-commit-hooks 118 | musl-dev=~1 \ 119 | gcc=~12 \ 120 | # entrypoint wrapper deps 121 | su-exec=~0.2 \ 122 | # ssh-client for external private module in ssh 123 | openssh-client=~9 124 | 125 | # Copy tools 126 | COPY --from=builder \ 127 | # Needed for all hooks 128 | /usr/local/bin/pre-commit \ 129 | # Hooks and terraform binaries 130 | /bin_dir/ \ 131 | /usr/local/bin/checkov* \ 132 | /usr/bin/ 133 | # Copy pre-commit packages 134 | COPY --from=builder /usr/local/lib/python3.12/site-packages/ /usr/local/lib/python3.12/site-packages/ 135 | # Copy terrascan policies 136 | COPY --from=builder /root/ /root/ 137 | 138 | # Install hooks extra deps 139 | RUN if [ "$(grep -o '^terraform-docs SKIPPED$' /usr/bin/tools_versions_info)" = "" ]; then \ 140 | apk add --no-cache perl=~5 \ 141 | ; fi && \ 142 | if [ "$(grep -o '^infracost SKIPPED$' /usr/bin/tools_versions_info)" = "" ]; then \ 143 | apk add --no-cache jq=~1 \ 144 | ; fi && \ 145 | # Fix git runtime fatal: 146 | # unsafe repository ('/lint' is owned by someone else) 147 | git config --global --add safe.directory /lint 148 | 149 | COPY tools/entrypoint.sh /entrypoint.sh 150 | 151 | ENV PRE_COMMIT_COLOR=${PRE_COMMIT_COLOR:-always} 152 | 153 | ENV INFRACOST_API_KEY=${INFRACOST_API_KEY:-} 154 | ENV INFRACOST_SKIP_UPDATE_CHECK=${INFRACOST_SKIP_UPDATE_CHECK:-false} 155 | 156 | ENTRYPOINT [ "/entrypoint.sh" ] 157 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2017 Anton Babenko 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining 4 | a copy of this software and associated documentation files (the 5 | "Software"), to deal in the Software without restriction, including 6 | without limitation the rights to use, copy, modify, merge, publish, 7 | distribute, sublicense, and/or sell copies of the Software, and to 8 | permit persons to whom the Software is furnished to do so, subject to 9 | the following conditions: 10 | 11 | The above copyright notice and this permission notice shall be 12 | included in all copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 15 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 16 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 17 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 18 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 19 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 20 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | -------------------------------------------------------------------------------- /assets/contributing/enable_actions_in_fork.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/antonbabenko/pre-commit-terraform/79085a61bf51c92032101ce5a29525f0ec76fc86/assets/contributing/enable_actions_in_fork.png -------------------------------------------------------------------------------- /assets/pre-commit-terraform-banner.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/antonbabenko/pre-commit-terraform/79085a61bf51c92032101ce5a29525f0ec76fc86/assets/pre-commit-terraform-banner.png -------------------------------------------------------------------------------- /hatch.toml: -------------------------------------------------------------------------------- 1 | [build.targets.sdist] 2 | include = [ 3 | '.codecov.yml', 4 | '.coveragerc', 5 | 'src/', 6 | 'tests/', 7 | 'pytest.ini', 8 | 'tox.ini', 9 | ] 10 | 11 | [build.targets.wheel] 12 | packages = [ 13 | 'src/pre_commit_terraform/', 14 | ] 15 | 16 | [metadata.hooks.vcs.urls] 17 | # FIXME: Uncomment 'Source Archive' as soon as 18 | # FIXME: https://github.com/ofek/hatch-vcs/issues/80 is fixed. 19 | # 'Source Archive' = 'https://github.com/antonbabenko/pre-commit-terraform/archive/{commit_hash}.tar.gz' 20 | 'GitHub: repo' = 'https://github.com/antonbabenko/pre-commit-terraform' 21 | 22 | [version] 23 | source = 'vcs' 24 | -------------------------------------------------------------------------------- /hooks/infracost_breakdown.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eo pipefail 3 | 4 | # globals variables 5 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 6 | readonly SCRIPT_DIR 7 | # shellcheck source=_common.sh 8 | . "$SCRIPT_DIR/_common.sh" 9 | 10 | function main { 11 | common::initialize "$SCRIPT_DIR" 12 | common::parse_cmdline "$@" 13 | common::export_provided_env_vars "${ENV_VARS[@]}" 14 | common::parse_and_export_env_vars 15 | # shellcheck disable=SC2153 # False positive 16 | infracost_breakdown_ "${HOOK_CONFIG[*]}" "${ARGS[*]}" 17 | } 18 | 19 | ####################################################################### 20 | # Wrapper around `infracost breakdown` tool which checks and compares 21 | # infra cost based on provided hook_config 22 | # Environment variables: 23 | # PRE_COMMIT_COLOR (string) If set to `never` - do not colorize output 24 | # Arguments: 25 | # hook_config (string with array) arguments that configure hook behavior 26 | # args (string with array) arguments that configure wrapped tool behavior 27 | # Outputs: 28 | # Print out hook checks status (Passed/Failed), total monthly cost and 29 | # diff, summary about infracost check (non-supported resources etc.) 30 | ####################################################################### 31 | function infracost_breakdown_ { 32 | local -r hook_config="$1" 33 | local args 34 | read -r -a args <<< "$2" 35 | 36 | # Get hook settings 37 | IFS=";" read -r -a checks <<< "$hook_config" 38 | # Suppress infracost color 39 | if [ "$PRE_COMMIT_COLOR" = "never" ]; then 40 | args+=("--no-color") 41 | fi 42 | 43 | local RESULTS 44 | RESULTS="$(infracost breakdown "${args[@]}" --format json)" 45 | local API_VERSION 46 | API_VERSION="$(jq -r .version <<< "$RESULTS")" 47 | 48 | if [ "$API_VERSION" != "0.2" ]; then 49 | common::colorify "yellow" "WARNING: Hook supports Infracost API version \"0.2\", got \"$API_VERSION\"" 50 | common::colorify "yellow" " Some things may not work as expected" 51 | fi 52 | 53 | local dir 54 | dir="$(jq '.projects[].metadata.vcsSubPath' <<< "$RESULTS")" 55 | echo -e "\nRunning in $dir" 56 | 57 | local have_failed_checks=false 58 | 59 | for check in "${checks[@]}"; do 60 | # $hook_config receives string like '1 > 2; 3 == 4;' etc. 61 | # It gets split by `;` into array, which we're parsing here ('1 > 2' ' 3 == 4') 62 | # Next line removes leading spaces, just for fancy output reason. 63 | # shellcheck disable=SC2001 # Rule exception 64 | check=$(echo "$check" | sed 's/^[[:space:]]*//') 65 | 66 | # Drop quotes in hook args section. From: 67 | # -h ".totalHourlyCost > 0.1" 68 | # --hook-config='.currency == "USD"' 69 | # To: 70 | # -h .totalHourlyCost > 0.1 71 | # --hook-config=.currency == "USD" 72 | first_char=${check:0:1} 73 | last_char=${check: -1} 74 | if [ "$first_char" == "$last_char" ] && { 75 | [ "$first_char" == '"' ] || [ "$first_char" == "'" ] 76 | }; then 77 | check="${check:1:-1}" 78 | fi 79 | 80 | mapfile -t operations < <(echo "$check" | grep -oE '[!<>=]{1,2}') 81 | # Get the very last operator, that is used in comparison inside `jq` query. 82 | # From the example below we need to pick the `>` which is in between `add` and `1000`, 83 | # but not the `!=`, which goes earlier in the `jq` expression 84 | # [.projects[].diff.totalMonthlyCost | select (.!=null) | tonumber] | add > 1000 85 | operation=${operations[-1]} 86 | 87 | IFS="$operation" read -r -a jq_check <<< "$check" 88 | real_value="$(jq "${jq_check[0]}" <<< "$RESULTS")" 89 | compare_value="${jq_check[1]}${jq_check[2]}" 90 | # Check types 91 | jq_check_type="$(jq -r "${jq_check[0]} | type" <<< "$RESULTS")" 92 | compare_value_type="$(jq -r "$compare_value | type" <<< "$RESULTS")" 93 | # Fail if comparing different types 94 | if [ "$jq_check_type" != "$compare_value_type" ]; then 95 | common::colorify "yellow" "Warning: Comparing values with different types may give incorrect result" 96 | common::colorify "yellow" " Expression: $check" 97 | common::colorify "yellow" " Types in the expression: [$jq_check_type] $operation [$compare_value_type]" 98 | common::colorify "yellow" " Use 'tonumber' filter when comparing costs (e.g. '.totalMonthlyCost|tonumber')" 99 | have_failed_checks=true 100 | continue 101 | fi 102 | # Fail if string is compared not with `==` or `!=` 103 | if [ "$jq_check_type" == "string" ] && { 104 | [ "$operation" != '==' ] && [ "$operation" != '!=' ] 105 | }; then 106 | common::colorify "yellow" "Warning: Wrong comparison operator is used in expression: $check" 107 | common::colorify "yellow" " Use 'tonumber' filter when comparing costs (e.g. '.totalMonthlyCost|tonumber')" 108 | common::colorify "yellow" " Use '==' or '!=' when comparing strings (e.g. '.currency == \"USD\"')." 109 | have_failed_checks=true 110 | continue 111 | fi 112 | 113 | # Compare values 114 | check_passed="$(echo "$RESULTS" | jq "$check")" 115 | 116 | status="Passed" 117 | color="green" 118 | if ! $check_passed; then 119 | status="Failed" 120 | color="red" 121 | have_failed_checks=true 122 | fi 123 | 124 | # Print check result 125 | common::colorify $color "$status: $check\t\t$real_value $operation $compare_value" 126 | done 127 | 128 | # Fancy informational output 129 | currency="$(jq -r '.currency' <<< "$RESULTS")" 130 | 131 | echo -e "\nSummary: $(jq -r '.summary' <<< "$RESULTS")" 132 | 133 | echo -e "\nTotal Monthly Cost: $(jq -r .totalMonthlyCost <<< "$RESULTS") $currency" 134 | echo "Total Monthly Cost (diff): $(jq -r .projects[].diff.totalMonthlyCost <<< "$RESULTS") $currency" 135 | 136 | if $have_failed_checks; then 137 | exit 1 138 | fi 139 | } 140 | 141 | [ "${BASH_SOURCE[0]}" != "$0" ] || main "$@" 142 | -------------------------------------------------------------------------------- /hooks/terraform_checkov.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eo pipefail 3 | 4 | # globals variables 5 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 6 | readonly SCRIPT_DIR 7 | # shellcheck source=_common.sh 8 | . "$SCRIPT_DIR/_common.sh" 9 | 10 | function main { 11 | common::initialize "$SCRIPT_DIR" 12 | common::parse_cmdline "$@" 13 | common::export_provided_env_vars "${ENV_VARS[@]}" 14 | common::parse_and_export_env_vars 15 | # Support for setting PATH to repo root. 16 | for i in "${!ARGS[@]}"; do 17 | ARGS[i]=${ARGS[i]/__GIT_WORKING_DIR__/$(pwd)\/} 18 | done 19 | 20 | # Suppress checkov color 21 | if [ "$PRE_COMMIT_COLOR" = "never" ]; then 22 | export ANSI_COLORS_DISABLED=true 23 | fi 24 | 25 | common::per_dir_hook "$HOOK_ID" "${#ARGS[@]}" "${ARGS[@]}" "${FILES[@]}" 26 | } 27 | 28 | ####################################################################### 29 | # Unique part of `common::per_dir_hook`. The function is executed in loop 30 | # on each provided dir path. Run wrapped tool with specified arguments 31 | # Arguments: 32 | # dir_path (string) PATH to dir relative to git repo root. 33 | # Can be used in error logging 34 | # change_dir_in_unique_part (string/false) Modifier which creates 35 | # possibilities to use non-common chdir strategies. 36 | # Availability depends on hook. 37 | # parallelism_disabled (bool) if true - skip lock mechanism 38 | # args (array) arguments that configure wrapped tool behavior 39 | # tf_path (string) PATH to Terraform/OpenTofu binary 40 | # Outputs: 41 | # If failed - print out hook checks status 42 | ####################################################################### 43 | function per_dir_hook_unique_part { 44 | # shellcheck disable=SC2034 # Unused var. 45 | local -r dir_path="$1" 46 | # shellcheck disable=SC2034 # Unused var. 47 | local -r change_dir_in_unique_part="$2" 48 | # shellcheck disable=SC2034 # Unused var. 49 | local -r parallelism_disabled="$3" 50 | # shellcheck disable=SC2034 # Unused var. 51 | local -r tf_path="$4" 52 | shift 4 53 | local -a -r args=("$@") 54 | 55 | checkov -d . "${args[@]}" 56 | 57 | # return exit code to common::per_dir_hook 58 | local exit_code=$? 59 | return $exit_code 60 | } 61 | 62 | ####################################################################### 63 | # Unique part of `common::per_dir_hook`. The function is executed one time 64 | # in the root git repo 65 | # Arguments: 66 | # args (array) arguments that configure wrapped tool behavior 67 | ####################################################################### 68 | function run_hook_on_whole_repo { 69 | local -a -r args=("$@") 70 | 71 | # pass the arguments to hook 72 | checkov -d "$(pwd)" "${args[@]}" 73 | 74 | # return exit code to common::per_dir_hook 75 | local exit_code=$? 76 | return $exit_code 77 | } 78 | 79 | [[ ${BASH_SOURCE[0]} != "$0" ]] || main "$@" 80 | -------------------------------------------------------------------------------- /hooks/terraform_docs.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eo pipefail 3 | 4 | # globals variables 5 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 6 | readonly SCRIPT_DIR 7 | # shellcheck source=_common.sh 8 | . "$SCRIPT_DIR/_common.sh" 9 | 10 | insertion_marker_begin="" 11 | insertion_marker_end="" 12 | doc_header="# " 13 | 14 | # Old markers used by the hook before the introduction of the terraform-docs markers 15 | readonly old_insertion_marker_begin="" 16 | readonly old_insertion_marker_end="" 17 | 18 | function main { 19 | common::initialize "$SCRIPT_DIR" 20 | common::parse_cmdline "$@" 21 | common::export_provided_env_vars "${ENV_VARS[@]}" 22 | common::parse_and_export_env_vars 23 | # Support for setting relative PATH to .terraform-docs.yml config. 24 | for i in "${!ARGS[@]}"; do 25 | ARGS[i]=${ARGS[i]/--config=/--config=$(pwd)\/} 26 | done 27 | # shellcheck disable=SC2153 # False positive 28 | terraform_docs "${HOOK_CONFIG[*]}" "${ARGS[*]}" "${FILES[@]}" 29 | } 30 | 31 | ####################################################################### 32 | # Function to replace old markers with new markers affected files 33 | # Globals: 34 | # insertion_marker_begin - Standard insertion marker at beginning 35 | # insertion_marker_end - Standard insertion marker at the end 36 | # old_insertion_marker_begin - Old insertion marker at beginning 37 | # old_insertion_marker_end - Old insertion marker at the end 38 | # Arguments: 39 | # file (string) filename to check 40 | ####################################################################### 41 | function replace_old_markers { 42 | local -r file=$1 43 | 44 | # Determine the appropriate sed command based on the operating system (GNU sed or BSD sed) 45 | sed --version &> /dev/null && SED_CMD=(sed -i) || SED_CMD=(sed -i '') 46 | "${SED_CMD[@]}" -e "s/^${old_insertion_marker_begin}$/${insertion_marker_begin//\//\\/}/" "$file" 47 | "${SED_CMD[@]}" -e "s/^${old_insertion_marker_end}$/${insertion_marker_end//\//\\/}/" "$file" 48 | } 49 | 50 | ####################################################################### 51 | # Wrapper around `terraform-docs` tool that checks and changes/creates 52 | # (depending on provided hook_config) terraform documentation in 53 | # Markdown 54 | # Arguments: 55 | # hook_config (string with array) arguments that configure hook behavior 56 | # args (string with array) arguments that configure wrapped tool behavior 57 | # files (array) filenames to check 58 | ####################################################################### 59 | function terraform_docs { 60 | local -r hook_config="$1" 61 | local args="$2" 62 | shift 2 63 | local -a -r files=("$@") 64 | 65 | if [[ ! $(command -v terraform-docs) ]]; then 66 | echo "ERROR: terraform-docs is required by terraform_docs pre-commit hook but is not installed or in the system's PATH." 67 | exit 1 68 | fi 69 | 70 | local -a paths 71 | 72 | local index=0 73 | local file_with_path 74 | for file_with_path in "${files[@]}"; do 75 | file_with_path="${file_with_path// /__REPLACED__SPACE__}" 76 | 77 | paths[index]=$(dirname "$file_with_path") 78 | 79 | ((index += 1)) 80 | done 81 | 82 | # 83 | # Get hook settings 84 | # 85 | local output_file="README.md" 86 | local output_mode="inject" 87 | local use_path_to_file=false 88 | local add_to_existing=false 89 | local create_if_not_exist=false 90 | local use_standard_markers=true 91 | local have_config_flag=false 92 | 93 | IFS=";" read -r -a configs <<< "$hook_config" 94 | 95 | for c in "${configs[@]}"; do 96 | 97 | IFS="=" read -r -a config <<< "$c" 98 | # $hook_config receives string like '--foo=bar; --baz=4;' etc. 99 | # It gets split by `;` into array, which we're parsing here ('--foo=bar' ' --baz=4') 100 | # Next line removes leading spaces, to support >1 `--hook-config` args 101 | key="${config[0]## }" 102 | value=${config[1]} 103 | 104 | case $key in 105 | --path-to-file) 106 | output_file=$value 107 | use_path_to_file=true 108 | ;; 109 | --add-to-existing-file) 110 | add_to_existing=$value 111 | ;; 112 | --create-file-if-not-exist) 113 | create_if_not_exist=$value 114 | ;; 115 | --use-standard-markers) 116 | use_standard_markers=$value 117 | common::colorify "yellow" "WARNING: --use-standard-markers is deprecated and will be removed in the future." 118 | common::colorify "yellow" " All needed changes already done by the hook, feel free to remove --use-standard-markers setting from your pre-commit config" 119 | ;; 120 | --custom-marker-begin) 121 | insertion_marker_begin=$value 122 | common::colorify "green" "INFO: --custom-marker-begin is used and the marker is set to \"$value\"." 123 | ;; 124 | --custom-marker-end) 125 | insertion_marker_end=$value 126 | common::colorify "green" "INFO: --custom-marker-end is used and the marker is set to \"$value\"." 127 | ;; 128 | --custom-doc-header) 129 | doc_header=$value 130 | common::colorify "green" "INFO: --custom-doc-header is used and the doc header is set to \"$value\"." 131 | ;; 132 | esac 133 | done 134 | 135 | if [[ $use_standard_markers == false ]]; then 136 | # update the insertion markers to those used by pre-commit-terraform before v1.93 137 | insertion_marker_begin="$old_insertion_marker_begin" 138 | insertion_marker_end="$old_insertion_marker_end" 139 | fi 140 | 141 | # Override formatter if no config file set 142 | if [[ "$args" != *"--config"* ]]; then 143 | local tf_docs_formatter="md" 144 | 145 | else 146 | have_config_flag=true 147 | # Enable extended pattern matching operators 148 | shopt -qp extglob || EXTGLOB_IS_NOT_SET=true && shopt -s extglob 149 | # Trim any args before the `--config` arg value 150 | local config_file=${args##*--config@(+([[:space:]])|=)} 151 | # Trim any trailing spaces and args (if any) 152 | config_file="${config_file%%+([[:space:]])?(--*)}" 153 | # Trim `--config` arg and its value from original args as we will 154 | # pass `--config` separately to allow whitespaces in its value 155 | args=${args/--config@(+([[:space:]])|=)$config_file*([[:space:]])/} 156 | # Restore state of `extglob` if we changed it 157 | [[ $EXTGLOB_IS_NOT_SET ]] && shopt -u extglob 158 | 159 | # Prioritize `.terraform-docs.yml` `output.file` over 160 | # `--hook-config=--path-to-file=` if it set 161 | local config_output_file 162 | # Get latest non-commented `output.file` from `.terraform-docs.yml` 163 | config_output_file=$(grep -A1000 -e '^output:$' "$config_file" 2> /dev/null | grep -E '^[[:space:]]+file:' | tail -n 1) || true 164 | 165 | if [[ $config_output_file ]]; then 166 | # Extract filename from `output.file` line 167 | config_output_file=$(echo "$config_output_file" | awk -F':' '{print $2}' | tr -d '[:space:]"' | tr -d "'") 168 | 169 | if [[ $use_path_to_file == true && "$config_output_file" != "$output_file" ]]; then 170 | common::colorify "yellow" "NOTE: You set both '--hook-config=--path-to-file=$output_file' and 'output.file: $config_output_file' in '$config_file'" 171 | common::colorify "yellow" " 'output.file' from '$config_file' will be used." 172 | fi 173 | 174 | output_file=$config_output_file 175 | fi 176 | 177 | # Use `.terraform-docs.yml` `output.mode` if it set 178 | local config_output_mode 179 | config_output_mode=$(grep -A1000 -e '^output:$' "$config_file" 2> /dev/null | grep -E '^[[:space:]]+mode:' | tail -n 1) || true 180 | if [[ $config_output_mode ]]; then 181 | # Extract mode from `output.mode` line 182 | output_mode=$(echo "$config_output_mode" | awk -F':' '{print $2}' | tr -d '[:space:]"' | tr -d "'") 183 | fi 184 | 185 | # Suppress terraform_docs color 186 | local config_file_no_color 187 | config_file_no_color="$config_file$(date +%s).yml" 188 | 189 | if [ "$PRE_COMMIT_COLOR" = "never" ] && 190 | [[ $(grep -e '^formatter:' "$config_file") == *"pretty"* ]] && 191 | [[ $(grep ' color: ' "$config_file") != *"false"* ]]; then 192 | 193 | cp "$config_file" "$config_file_no_color" 194 | echo -e "settings:\n color: false" >> "$config_file_no_color" 195 | args=${args/$config_file/$config_file_no_color} 196 | fi 197 | fi 198 | 199 | local dir_path 200 | for dir_path in $(echo "${paths[*]}" | tr ' ' '\n' | sort -u); do 201 | dir_path="${dir_path//__REPLACED__SPACE__/ }" 202 | 203 | pushd "$dir_path" > /dev/null || continue 204 | 205 | # 206 | # Create file if it not exist and `--create-if-not-exist=true` provided 207 | # 208 | if $create_if_not_exist && [[ ! -f "$output_file" ]]; then 209 | dir_have_tf_files="$( 210 | find . -maxdepth 1 -type f | sed 's|.*\.||' | sort -u | grep -oE '^tf$|^tfvars$' || 211 | exit 0 212 | )" 213 | 214 | # if no TF files - skip dir 215 | [ ! "$dir_have_tf_files" ] && popd > /dev/null && continue 216 | 217 | dir="$(dirname "$output_file")" 218 | 219 | mkdir -p "$dir" 220 | 221 | # Use of insertion markers, where there is no existing README file 222 | { 223 | echo -e "${doc_header}${PWD##*/}\n" 224 | echo "$insertion_marker_begin" 225 | echo "$insertion_marker_end" 226 | } >> "$output_file" 227 | fi 228 | 229 | # If file still not exist - skip dir 230 | [[ ! -f "$output_file" ]] && popd > /dev/null && continue 231 | 232 | replace_old_markers "$output_file" 233 | 234 | # 235 | # If `--add-to-existing-file=false` (default behavior), check if "hook markers" exist in file, 236 | # and, if not, skip execution to avoid addition of terraform-docs section, as 237 | # terraform-docs in 'inject' mode adds markers by default if they are not present 238 | # 239 | if [[ $add_to_existing == false ]]; then 240 | have_marker=$(grep -o "$insertion_marker_begin" "$output_file") || unset have_marker 241 | [[ ! $have_marker ]] && popd > /dev/null && continue 242 | fi 243 | 244 | # shellcheck disable=SC2206 245 | # Need to pass $tf_docs_formatter and $args as separate arguments, not as single string 246 | local tfdocs_cmd=( 247 | terraform-docs 248 | --output-mode="$output_mode" 249 | --output-file="$output_file" 250 | $tf_docs_formatter 251 | $args 252 | ) 253 | if [[ $have_config_flag == true ]]; then 254 | "${tfdocs_cmd[@]}" "--config=$config_file" ./ > /dev/null 255 | else 256 | "${tfdocs_cmd[@]}" ./ > /dev/null 257 | fi 258 | 259 | popd > /dev/null 260 | done 261 | 262 | # Cleanup 263 | rm -f "$config_file_no_color" 264 | } 265 | 266 | [ "${BASH_SOURCE[0]}" != "$0" ] || main "$@" 267 | -------------------------------------------------------------------------------- /hooks/terraform_fmt.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eo pipefail 3 | 4 | # globals variables 5 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 6 | readonly SCRIPT_DIR 7 | # shellcheck source=_common.sh 8 | . "$SCRIPT_DIR/_common.sh" 9 | 10 | function main { 11 | common::initialize "$SCRIPT_DIR" 12 | common::parse_cmdline "$@" 13 | common::export_provided_env_vars "${ENV_VARS[@]}" 14 | common::parse_and_export_env_vars 15 | 16 | # Suppress terraform fmt color 17 | if [ "$PRE_COMMIT_COLOR" = "never" ]; then 18 | ARGS+=("-no-color") 19 | fi 20 | 21 | # shellcheck disable=SC2153 # False positive 22 | common::per_dir_hook "$HOOK_ID" "${#ARGS[@]}" "${ARGS[@]}" "${FILES[@]}" 23 | } 24 | 25 | ####################################################################### 26 | # Unique part of `common::per_dir_hook`. The function is executed in loop 27 | # on each provided dir path. Run wrapped tool with specified arguments 28 | # Arguments: 29 | # dir_path (string) PATH to dir relative to git repo root. 30 | # Can be used in error logging 31 | # change_dir_in_unique_part (string/false) Modifier which creates 32 | # possibilities to use non-common chdir strategies. 33 | # Availability depends on hook. 34 | # parallelism_disabled (bool) if true - skip lock mechanism 35 | # args (array) arguments that configure wrapped tool behavior 36 | # tf_path (string) PATH to Terraform/OpenTofu binary 37 | # Outputs: 38 | # If failed - print out hook checks status 39 | ####################################################################### 40 | function per_dir_hook_unique_part { 41 | # shellcheck disable=SC2034 # Unused var. 42 | local -r dir_path="$1" 43 | # shellcheck disable=SC2034 # Unused var. 44 | local -r change_dir_in_unique_part="$2" 45 | # shellcheck disable=SC2034 # Unused var. 46 | local -r parallelism_disabled="$3" 47 | local -r tf_path="$4" 48 | shift 4 49 | local -a -r args=("$@") 50 | 51 | # pass the arguments to hook 52 | "$tf_path" fmt "${args[@]}" 53 | 54 | # return exit code to common::per_dir_hook 55 | local exit_code=$? 56 | return $exit_code 57 | } 58 | 59 | [ "${BASH_SOURCE[0]}" != "$0" ] || main "$@" 60 | -------------------------------------------------------------------------------- /hooks/terraform_providers_lock.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -eo pipefail 4 | 5 | # globals variables 6 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 7 | readonly SCRIPT_DIR 8 | # shellcheck source=_common.sh 9 | . "$SCRIPT_DIR/_common.sh" 10 | 11 | function main { 12 | common::initialize "$SCRIPT_DIR" 13 | common::parse_cmdline "$@" 14 | common::export_provided_env_vars "${ENV_VARS[@]}" 15 | common::parse_and_export_env_vars 16 | # JFYI: suppress color for `terraform providers lock` is N/A` 17 | 18 | # shellcheck disable=SC2153 # False positive 19 | common::per_dir_hook "$HOOK_ID" "${#ARGS[@]}" "${ARGS[@]}" "${FILES[@]}" 20 | } 21 | 22 | ####################################################################### 23 | # Check that all needed `h1` and `zh` SHAs are included in lockfile for 24 | # each provider. 25 | # Arguments: 26 | # platforms_count (number) How many `-platform` flags provided 27 | # Outputs: 28 | # Return 0 when lockfile has all needed SHAs 29 | # Return 1-99 when lockfile is invalid 30 | # Return 100+ when not all SHAs found 31 | ####################################################################### 32 | function lockfile_contains_all_needed_sha { 33 | local -r platforms_count="$1" 34 | 35 | local h1_counter="$platforms_count" 36 | local zh_counter=0 37 | 38 | # Reading each line 39 | while read -r line; do 40 | 41 | if grep -Eq '^"h1:' <<< "$line"; then 42 | h1_counter=$((h1_counter - 1)) 43 | continue 44 | fi 45 | 46 | if grep -Eq '^"zh:' <<< "$line"; then 47 | zh_counter=0 48 | continue 49 | fi 50 | 51 | if grep -Eq '^provider' <<< "$line"; then 52 | h1_counter="$platforms_count" 53 | zh_counter=$((zh_counter + 1)) 54 | continue 55 | fi 56 | # Not all SHA inside provider lock definition block found 57 | if grep -Eq '^}' <<< "$line"; then 58 | if [ "$h1_counter" -ge 1 ] || [ "$zh_counter" -ge 1 ]; then 59 | # h1_counter can be less than 0, in the case when lockfile 60 | # contains more platforms than you currently specify 61 | # That's why here extra +50 - for safety reasons, to be sure 62 | # that error goes exactly from this part of the function 63 | return $((150 + h1_counter + zh_counter)) 64 | fi 65 | fi 66 | 67 | # lockfile always exists, because the hook triggered only on 68 | # `files: (\.terraform\.lock\.hcl)$` 69 | done < ".terraform.lock.hcl" 70 | 71 | # When you specify `-platform``, but don't specify current platform - 72 | # platforms_count will be less than `h1:` headers` 73 | [ "$h1_counter" -lt 0 ] && h1_counter=0 74 | 75 | # 0 if all OK, 2+ when invalid lockfile 76 | return $((h1_counter + zh_counter)) 77 | } 78 | 79 | ####################################################################### 80 | # Unique part of `common::per_dir_hook`. The function is executed in loop 81 | # on each provided dir path. Run wrapped tool with specified arguments 82 | # Arguments: 83 | # dir_path (string) PATH to dir relative to git repo root. 84 | # Can be used in error logging 85 | # change_dir_in_unique_part (string/false) Modifier which creates 86 | # possibilities to use non-common chdir strategies. 87 | # Availability depends on hook. 88 | # parallelism_disabled (bool) if true - skip lock mechanism 89 | # args (array) arguments that configure wrapped tool behavior 90 | # tf_path (string) PATH to Terraform/OpenTofu binary 91 | # Outputs: 92 | # If failed - print out hook checks status 93 | ####################################################################### 94 | function per_dir_hook_unique_part { 95 | local -r dir_path="$1" 96 | # shellcheck disable=SC2034 # Unused var. 97 | local -r change_dir_in_unique_part="$2" 98 | local -r parallelism_disabled="$3" 99 | local -r tf_path="$4" 100 | shift 4 101 | local -a -r args=("$@") 102 | 103 | local platforms_count=0 104 | for arg in "${args[@]}"; do 105 | if grep -Eq '^-platform=' <<< "$arg"; then 106 | platforms_count=$((platforms_count + 1)) 107 | fi 108 | done 109 | 110 | local exit_code 111 | # 112 | # Get hook settings 113 | # 114 | local mode 115 | 116 | IFS=";" read -r -a configs <<< "${HOOK_CONFIG[*]}" 117 | 118 | for c in "${configs[@]}"; do 119 | 120 | IFS="=" read -r -a config <<< "$c" 121 | key=${config[0]} 122 | value=${config[1]} 123 | 124 | case $key in 125 | --mode) 126 | if [ "$mode" ]; then 127 | common::colorify "yellow" 'Invalid hook config. Make sure that you specify not more than one "--mode" flag' 128 | exit 1 129 | fi 130 | mode=$value 131 | ;; 132 | esac 133 | done 134 | 135 | # Available options: 136 | # only-check-is-current-lockfile-cross-platform (will be default) 137 | # always-regenerate-lockfile 138 | # TODO: Remove in 2.0 139 | if [ ! "$mode" ]; then 140 | common::colorify "yellow" "DEPRECATION NOTICE: We introduced '--mode' flag for this hook. 141 | Check migration instructions at https://github.com/antonbabenko/pre-commit-terraform#terraform_providers_lock 142 | " 143 | common::terraform_init "$tf_path providers lock" "$dir_path" "$parallelism_disabled" "$tf_path" || { 144 | exit_code=$? 145 | return $exit_code 146 | } 147 | fi 148 | 149 | if [ "$mode" == "only-check-is-current-lockfile-cross-platform" ] && 150 | lockfile_contains_all_needed_sha "$platforms_count"; then 151 | 152 | exit 0 153 | fi 154 | 155 | #? Don't require `tf init` for providers, but required `tf init` for modules 156 | #? Mitigated by `function match_validate_errors` from terraform_validate hook 157 | # pass the arguments to hook 158 | "$tf_path" providers lock "${args[@]}" 159 | 160 | # return exit code to common::per_dir_hook 161 | exit_code=$? 162 | return $exit_code 163 | } 164 | 165 | [ "${BASH_SOURCE[0]}" != "$0" ] || main "$@" 166 | -------------------------------------------------------------------------------- /hooks/terraform_tflint.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -eo pipefail 4 | 5 | # globals variables 6 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 7 | readonly SCRIPT_DIR 8 | # shellcheck source=_common.sh 9 | . "$SCRIPT_DIR/_common.sh" 10 | 11 | function main { 12 | common::initialize "$SCRIPT_DIR" 13 | common::parse_cmdline "$@" 14 | common::export_provided_env_vars "${ENV_VARS[@]}" 15 | common::parse_and_export_env_vars 16 | # Support for setting PATH to repo root. 17 | for i in "${!ARGS[@]}"; do 18 | ARGS[i]=${ARGS[i]/__GIT_WORKING_DIR__/$(pwd)\/} 19 | done 20 | # JFYI: tflint color already suppressed via PRE_COMMIT_COLOR=never 21 | 22 | # Run `tflint --init` for check that plugins installed. 23 | # It should run once on whole repo. 24 | { 25 | TFLINT_INIT=$(tflint --init "${ARGS[@]}" 2>&1) 2> /dev/null && 26 | common::colorify "green" "Command 'tflint --init' successfully done:" && 27 | echo -e "${TFLINT_INIT}\n\n\n" 28 | } || { 29 | local exit_code=$? 30 | common::colorify "red" "Command 'tflint --init' failed:" 31 | echo -e "${TFLINT_INIT}" 32 | return ${exit_code} 33 | } 34 | 35 | common::per_dir_hook "$HOOK_ID" "${#ARGS[@]}" "${ARGS[@]}" "${FILES[@]}" 36 | } 37 | 38 | ####################################################################### 39 | # Unique part of `common::per_dir_hook`. The function is executed in loop 40 | # on each provided dir path. Run wrapped tool with specified arguments 41 | # Arguments: 42 | # dir_path (string) PATH to dir relative to git repo root. 43 | # Can be used in error logging 44 | # change_dir_in_unique_part (string/false) Modifier which creates 45 | # possibilities to use non-common chdir strategies. 46 | # Availability depends on hook. 47 | # parallelism_disabled (bool) if true - skip lock mechanism 48 | # args (array) arguments that configure wrapped tool behavior 49 | # tf_path (string) PATH to Terraform/OpenTofu binary 50 | # Outputs: 51 | # If failed - print out hook checks status 52 | ####################################################################### 53 | function per_dir_hook_unique_part { 54 | local -r dir_path="$1" 55 | local -r change_dir_in_unique_part="$2" 56 | # shellcheck disable=SC2034 # Unused var. 57 | local -r parallelism_disabled="$3" 58 | # shellcheck disable=SC2034 # Unused var. 59 | local -r tf_path="$4" 60 | shift 4 61 | local -a -r args=("$@") 62 | 63 | if [ "$change_dir_in_unique_part" == "delegate_chdir" ]; then 64 | local dir_args="--chdir=$dir_path" 65 | fi 66 | 67 | # shellcheck disable=SC2086 # we need to remove the arg if its unset 68 | TFLINT_OUTPUT=$(tflint ${dir_args:-} "${args[@]}" 2>&1) 69 | local exit_code=$? 70 | 71 | if [ $exit_code -ne 0 ]; then 72 | common::colorify "yellow" "TFLint in $dir_path/:" 73 | echo -e "$TFLINT_OUTPUT" 74 | fi 75 | 76 | # return exit code to common::per_dir_hook 77 | return $exit_code 78 | } 79 | 80 | [ "${BASH_SOURCE[0]}" != "$0" ] || main "$@" 81 | -------------------------------------------------------------------------------- /hooks/terraform_tfsec.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eo pipefail 3 | 4 | # globals variables 5 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 6 | readonly SCRIPT_DIR 7 | # shellcheck source=_common.sh 8 | . "$SCRIPT_DIR/_common.sh" 9 | 10 | function main { 11 | common::initialize "$SCRIPT_DIR" 12 | common::parse_cmdline "$@" 13 | common::export_provided_env_vars "${ENV_VARS[@]}" 14 | common::parse_and_export_env_vars 15 | # Support for setting PATH to repo root. 16 | for i in "${!ARGS[@]}"; do 17 | ARGS[i]=${ARGS[i]/__GIT_WORKING_DIR__/$(pwd)\/} 18 | done 19 | 20 | # Suppress tfsec color 21 | if [ "$PRE_COMMIT_COLOR" = "never" ]; then 22 | ARGS+=("--no-color") 23 | fi 24 | 25 | common::colorify "yellow" "tfsec tool was deprecated, and replaced by trivy. You can check trivy hook here:" 26 | common::colorify "yellow" "https://github.com/antonbabenko/pre-commit-terraform/tree/master#terraform_trivy" 27 | 28 | common::per_dir_hook "$HOOK_ID" "${#ARGS[@]}" "${ARGS[@]}" "${FILES[@]}" 29 | } 30 | 31 | ####################################################################### 32 | # Unique part of `common::per_dir_hook`. The function is executed in loop 33 | # on each provided dir path. Run wrapped tool with specified arguments 34 | # Arguments: 35 | # dir_path (string) PATH to dir relative to git repo root. 36 | # Can be used in error logging 37 | # change_dir_in_unique_part (string/false) Modifier which creates 38 | # possibilities to use non-common chdir strategies. 39 | # Availability depends on hook. 40 | # parallelism_disabled (bool) if true - skip lock mechanism 41 | # args (array) arguments that configure wrapped tool behavior 42 | # tf_path (string) PATH to Terraform/OpenTofu binary 43 | # Outputs: 44 | # If failed - print out hook checks status 45 | ####################################################################### 46 | function per_dir_hook_unique_part { 47 | # shellcheck disable=SC2034 # Unused var. 48 | local -r dir_path="$1" 49 | # shellcheck disable=SC2034 # Unused var. 50 | local -r change_dir_in_unique_part="$2" 51 | # shellcheck disable=SC2034 # Unused var. 52 | local -r parallelism_disabled="$3" 53 | # shellcheck disable=SC2034 # Unused var. 54 | local -r tf_path="$4" 55 | shift 4 56 | local -a -r args=("$@") 57 | 58 | # pass the arguments to hook 59 | tfsec "${args[@]}" 60 | 61 | # return exit code to common::per_dir_hook 62 | local exit_code=$? 63 | return $exit_code 64 | } 65 | 66 | ####################################################################### 67 | # Unique part of `common::per_dir_hook`. The function is executed one time 68 | # in the root git repo 69 | # Arguments: 70 | # args (array) arguments that configure wrapped tool behavior 71 | ####################################################################### 72 | function run_hook_on_whole_repo { 73 | local -a -r args=("$@") 74 | 75 | # pass the arguments to hook 76 | tfsec "$(pwd)" "${args[@]}" 77 | 78 | # return exit code to common::per_dir_hook 79 | local exit_code=$? 80 | return $exit_code 81 | } 82 | 83 | [ "${BASH_SOURCE[0]}" != "$0" ] || main "$@" 84 | -------------------------------------------------------------------------------- /hooks/terraform_trivy.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eo pipefail 3 | 4 | # globals variables 5 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 6 | readonly SCRIPT_DIR 7 | # shellcheck source=_common.sh 8 | . "$SCRIPT_DIR/_common.sh" 9 | 10 | function main { 11 | common::initialize "$SCRIPT_DIR" 12 | common::parse_cmdline "$@" 13 | common::export_provided_env_vars "${ENV_VARS[@]}" 14 | common::parse_and_export_env_vars 15 | # Support for setting PATH to repo root. 16 | for i in "${!ARGS[@]}"; do 17 | ARGS[i]=${ARGS[i]/__GIT_WORKING_DIR__/$(pwd)\/} 18 | done 19 | 20 | common::per_dir_hook "$HOOK_ID" "${#ARGS[@]}" "${ARGS[@]}" "${FILES[@]}" 21 | } 22 | 23 | ####################################################################### 24 | # Unique part of `common::per_dir_hook`. The function is executed in loop 25 | # on each provided dir path. Run wrapped tool with specified arguments 26 | # Arguments: 27 | # dir_path (string) PATH to dir relative to git repo root. 28 | # Can be used in error logging 29 | # change_dir_in_unique_part (string/false) Modifier which creates 30 | # possibilities to use non-common chdir strategies. 31 | # Availability depends on hook. 32 | # parallelism_disabled (bool) if true - skip lock mechanism 33 | # args (array) arguments that configure wrapped tool behavior 34 | # tf_path (string) PATH to Terraform/OpenTofu binary 35 | # Outputs: 36 | # If failed - print out hook checks status 37 | ####################################################################### 38 | function per_dir_hook_unique_part { 39 | # shellcheck disable=SC2034 # Unused var. 40 | local -r dir_path="$1" 41 | # shellcheck disable=SC2034 # Unused var. 42 | local -r change_dir_in_unique_part="$2" 43 | # shellcheck disable=SC2034 # Unused var. 44 | local -r parallelism_disabled="$3" 45 | # shellcheck disable=SC2034 # Unused var. 46 | local -r tf_path="$4" 47 | shift 4 48 | local -a -r args=("$@") 49 | 50 | # pass the arguments to hook 51 | trivy conf "$(pwd)" --exit-code=1 "${args[@]}" 52 | 53 | # return exit code to common::per_dir_hook 54 | local exit_code=$? 55 | return $exit_code 56 | } 57 | 58 | ####################################################################### 59 | # Unique part of `common::per_dir_hook`. The function is executed one time 60 | # in the root git repo 61 | # Arguments: 62 | # args (array) arguments that configure wrapped tool behavior 63 | ####################################################################### 64 | function run_hook_on_whole_repo { 65 | local -a -r args=("$@") 66 | 67 | # pass the arguments to hook 68 | trivy conf "$(pwd)" "${args[@]}" 69 | 70 | # return exit code to common::per_dir_hook 71 | local exit_code=$? 72 | return $exit_code 73 | } 74 | 75 | [ "${BASH_SOURCE[0]}" != "$0" ] || main "$@" 76 | -------------------------------------------------------------------------------- /hooks/terraform_validate.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eo pipefail 3 | 4 | # globals variables 5 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 6 | readonly SCRIPT_DIR 7 | # shellcheck source=_common.sh 8 | . "$SCRIPT_DIR/_common.sh" 9 | 10 | # `terraform validate` requires this env variable to be set 11 | export AWS_DEFAULT_REGION=${AWS_DEFAULT_REGION:-us-east-1} 12 | 13 | function main { 14 | common::initialize "$SCRIPT_DIR" 15 | common::parse_cmdline "$@" 16 | common::export_provided_env_vars "${ENV_VARS[@]}" 17 | common::parse_and_export_env_vars 18 | 19 | # Suppress terraform validate color 20 | if [ "$PRE_COMMIT_COLOR" = "never" ]; then 21 | ARGS+=("-no-color") 22 | fi 23 | # shellcheck disable=SC2153 # False positive 24 | common::per_dir_hook "$HOOK_ID" "${#ARGS[@]}" "${ARGS[@]}" "${FILES[@]}" 25 | } 26 | 27 | ####################################################################### 28 | # Run `terraform validate` and match errors. Requires `jq` 29 | # Arguments: 30 | # validate_output (string with json) output of `terraform validate` command 31 | # Outputs: 32 | # Returns integer: 33 | # - 0 (no errors) 34 | # - 1 (matched errors; retry) 35 | # - 2 (no matched errors; do not retry) 36 | ####################################################################### 37 | function match_validate_errors { 38 | local validate_output=$1 39 | 40 | local valid 41 | local summary 42 | 43 | valid=$(jq -rc '.valid' <<< "$validate_output") 44 | 45 | if [ "$valid" == "true" ]; then 46 | return 0 47 | fi 48 | 49 | # Parse error message for retry-able errors. 50 | while IFS= read -r error_message; do 51 | summary=$(jq -rc '.summary' <<< "$error_message") 52 | case $summary in 53 | "missing or corrupted provider plugins") return 1 ;; 54 | "Module source has changed") return 1 ;; 55 | "Module version requirements have changed") return 1 ;; 56 | "Module not installed") return 1 ;; 57 | "Could not load plugin") return 1 ;; 58 | "Missing required provider") return 1 ;; 59 | *"there is no package for"*"cached in .terraform/providers") return 1 ;; 60 | esac 61 | done < <(jq -rc '.diagnostics[]' <<< "$validate_output") 62 | 63 | return 2 # Some other error; don't retry 64 | } 65 | 66 | ####################################################################### 67 | # Unique part of `common::per_dir_hook`. The function is executed in loop 68 | # on each provided dir path. Run wrapped tool with specified arguments 69 | # 1. Check if `.terraform` dir exists and if not - run `terraform init` 70 | # 2. Run `terraform validate` 71 | # 3. If at least 1 check failed - change the exit code to non-zero 72 | # Arguments: 73 | # dir_path (string) PATH to dir relative to git repo root. 74 | # Can be used in error logging 75 | # change_dir_in_unique_part (string/false) Modifier which creates 76 | # possibilities to use non-common chdir strategies. 77 | # Availability depends on hook. 78 | # parallelism_disabled (bool) if true - skip lock mechanism 79 | # args (array) arguments that configure wrapped tool behavior 80 | # tf_path (string) PATH to Terraform/OpenTofu binary 81 | # Outputs: 82 | # If failed - print out hook checks status 83 | ####################################################################### 84 | function per_dir_hook_unique_part { 85 | local -r dir_path="$1" 86 | # shellcheck disable=SC2034 # Unused var. 87 | local -r change_dir_in_unique_part="$2" 88 | local -r parallelism_disabled="$3" 89 | local -r tf_path="$4" 90 | shift 4 91 | local -a -r args=("$@") 92 | 93 | local exit_code 94 | # 95 | # Get hook settings 96 | # 97 | local retry_once_with_cleanup 98 | 99 | IFS=";" read -r -a configs <<< "${HOOK_CONFIG[*]}" 100 | 101 | for c in "${configs[@]}"; do 102 | 103 | IFS="=" read -r -a config <<< "$c" 104 | key=${config[0]} 105 | value=${config[1]} 106 | 107 | case $key in 108 | --retry-once-with-cleanup) 109 | if [ "$retry_once_with_cleanup" ]; then 110 | common::colorify "yellow" 'Invalid hook config. Make sure that you specify not more than one "--retry-once-with-cleanup" flag' 111 | exit 1 112 | fi 113 | retry_once_with_cleanup=$value 114 | ;; 115 | esac 116 | done 117 | 118 | # First try `terraform validate` with the hope that all deps are 119 | # pre-installed. That is needed for cases when `.terraform/modules` 120 | # or `.terraform/providers` missed AND that is expected. 121 | "$tf_path" validate "${args[@]}" &> /dev/null && { 122 | exit_code=$? 123 | return $exit_code 124 | } 125 | 126 | # In case `terraform validate` failed to execute 127 | # - check is simple `terraform init` will help 128 | common::terraform_init "$tf_path validate" "$dir_path" "$parallelism_disabled" "$tf_path" || { 129 | exit_code=$? 130 | return $exit_code 131 | } 132 | 133 | if [ "$retry_once_with_cleanup" != "true" ]; then 134 | # terraform validate only 135 | validate_output=$("$tf_path" validate "${args[@]}" 2>&1) 136 | exit_code=$? 137 | else 138 | # terraform validate, plus capture possible errors 139 | validate_output=$("$tf_path" validate -json "${args[@]}" 2>&1) 140 | exit_code=$? 141 | 142 | # Match specific validation errors 143 | local -i validate_errors_matched 144 | match_validate_errors "$validate_output" 145 | validate_errors_matched=$? 146 | 147 | # Errors matched; Retry validation 148 | if [ "$validate_errors_matched" -eq 1 ]; then 149 | common::colorify "yellow" "Validation failed. Removing cached providers and modules from \"$dir_path/.terraform\" directory" 150 | # `.terraform` dir may comprise some extra files, like `environment` 151 | # which stores info about current TF workspace, so we can't just remove 152 | # `.terraform` dir completely. 153 | rm -rf .terraform/{modules,providers}/ 154 | 155 | common::colorify "yellow" "Re-validating: $dir_path" 156 | 157 | common::terraform_init "$tf_path validate" "$dir_path" "$parallelism_disabled" "$tf_path" || { 158 | exit_code=$? 159 | return $exit_code 160 | } 161 | 162 | validate_output=$("$tf_path" validate "${args[@]}" 2>&1) 163 | exit_code=$? 164 | fi 165 | fi 166 | 167 | if [ $exit_code -ne 0 ]; then 168 | common::colorify "red" "Validation failed: $dir_path" 169 | echo -e "$validate_output\n\n" 170 | fi 171 | 172 | # return exit code to common::per_dir_hook 173 | return $exit_code 174 | } 175 | 176 | [ "${BASH_SOURCE[0]}" != "$0" ] || main "$@" 177 | -------------------------------------------------------------------------------- /hooks/terragrunt_fmt.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eo pipefail 3 | 4 | # globals variables 5 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 6 | readonly SCRIPT_DIR 7 | # shellcheck source=_common.sh 8 | . "$SCRIPT_DIR/_common.sh" 9 | 10 | function main { 11 | common::initialize "$SCRIPT_DIR" 12 | common::parse_cmdline "$@" 13 | common::export_provided_env_vars "${ENV_VARS[@]}" 14 | common::parse_and_export_env_vars 15 | # JFYI: `terragrunt hcl format` color already suppressed via PRE_COMMIT_COLOR=never 16 | 17 | if common::terragrunt_version_ge_0.78; then 18 | local -ra SUBCOMMAND=(hcl format) 19 | else 20 | local -ra SUBCOMMAND=(hclfmt) 21 | fi 22 | 23 | # shellcheck disable=SC2153 # False positive 24 | common::per_dir_hook "$HOOK_ID" "${#ARGS[@]}" "${ARGS[@]}" "${FILES[@]}" 25 | } 26 | 27 | ####################################################################### 28 | # Unique part of `common::per_dir_hook`. The function is executed in loop 29 | # on each provided dir path. Run wrapped tool with specified arguments 30 | # Arguments: 31 | # dir_path (string) PATH to dir relative to git repo root. 32 | # Can be used in error logging 33 | # change_dir_in_unique_part (string/false) Modifier which creates 34 | # possibilities to use non-common chdir strategies. 35 | # Availability depends on hook. 36 | # parallelism_disabled (bool) if true - skip lock mechanism 37 | # args (array) arguments that configure wrapped tool behavior 38 | # tf_path (string) PATH to Terraform/OpenTofu binary 39 | # Outputs: 40 | # If failed - print out hook checks status 41 | ####################################################################### 42 | function per_dir_hook_unique_part { 43 | # shellcheck disable=SC2034 # Unused var. 44 | local -r dir_path="$1" 45 | # shellcheck disable=SC2034 # Unused var. 46 | local -r change_dir_in_unique_part="$2" 47 | # shellcheck disable=SC2034 # Unused var. 48 | local -r parallelism_disabled="$3" 49 | # shellcheck disable=SC2034 # Unused var. 50 | local -r tf_path="$4" 51 | shift 4 52 | local -a -r args=("$@") 53 | 54 | # pass the arguments to hook 55 | terragrunt "${SUBCOMMAND[@]}" "${args[@]}" 56 | 57 | # return exit code to common::per_dir_hook 58 | local exit_code=$? 59 | return $exit_code 60 | } 61 | 62 | ####################################################################### 63 | # Unique part of `common::per_dir_hook`. The function is executed one time 64 | # in the root git repo 65 | # Arguments: 66 | # args (array) arguments that configure wrapped tool behavior 67 | ####################################################################### 68 | function run_hook_on_whole_repo { 69 | local -a -r args=("$@") 70 | 71 | # pass the arguments to hook 72 | terragrunt "${SUBCOMMAND[@]}" "$(pwd)" "${args[@]}" 73 | 74 | # return exit code to common::per_dir_hook 75 | local exit_code=$? 76 | return $exit_code 77 | } 78 | 79 | [ "${BASH_SOURCE[0]}" != "$0" ] || main "$@" 80 | -------------------------------------------------------------------------------- /hooks/terragrunt_providers_lock.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eo pipefail 3 | 4 | # globals variables 5 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 6 | readonly SCRIPT_DIR 7 | # shellcheck source=_common.sh 8 | . "$SCRIPT_DIR/_common.sh" 9 | 10 | function main { 11 | common::initialize "$SCRIPT_DIR" 12 | common::parse_cmdline "$@" 13 | common::export_provided_env_vars "${ENV_VARS[@]}" 14 | common::parse_and_export_env_vars 15 | # JFYI: terragrunt providers lock color already suppressed via PRE_COMMIT_COLOR=never 16 | 17 | if common::terragrunt_version_ge_0.78; then 18 | local -ra RUN_ALL_SUBCOMMAND=(run --all providers lock) 19 | else 20 | local -ra RUN_ALL_SUBCOMMAND=(run-all providers lock) 21 | fi 22 | 23 | # shellcheck disable=SC2153 # False positive 24 | common::per_dir_hook "$HOOK_ID" "${#ARGS[@]}" "${ARGS[@]}" "${FILES[@]}" 25 | } 26 | 27 | ####################################################################### 28 | # Unique part of `common::per_dir_hook`. The function is executed in loop 29 | # on each provided dir path. Run wrapped tool with specified arguments 30 | # Arguments: 31 | # dir_path (string) PATH to dir relative to git repo root. 32 | # Can be used in error logging 33 | # change_dir_in_unique_part (string/false) Modifier which creates 34 | # possibilities to use non-common chdir strategies. 35 | # Availability depends on hook. 36 | # parallelism_disabled (bool) if true - skip lock mechanism 37 | # args (array) arguments that configure wrapped tool behavior 38 | # tf_path (string) PATH to Terraform/OpenTofu binary 39 | # Outputs: 40 | # If failed - print out hook checks status 41 | ####################################################################### 42 | function per_dir_hook_unique_part { 43 | # shellcheck disable=SC2034 # Unused var. 44 | local -r dir_path="$1" 45 | # shellcheck disable=SC2034 # Unused var. 46 | local -r change_dir_in_unique_part="$2" 47 | # shellcheck disable=SC2034 # Unused var. 48 | local -r parallelism_disabled="$3" 49 | # shellcheck disable=SC2034 # Unused var. 50 | local -r tf_path="$4" 51 | shift 4 52 | local -a -r args=("$@") 53 | 54 | # pass the arguments to hook 55 | terragrunt providers lock "${args[@]}" 56 | 57 | # return exit code to common::per_dir_hook 58 | local exit_code=$? 59 | return $exit_code 60 | } 61 | 62 | ####################################################################### 63 | # Unique part of `common::per_dir_hook`. The function is executed one time 64 | # in the root git repo 65 | # Arguments: 66 | # args (array) arguments that configure wrapped tool behavior 67 | ####################################################################### 68 | function run_hook_on_whole_repo { 69 | local -a -r args=("$@") 70 | 71 | # pass the arguments to hook 72 | terragrunt "${RUN_ALL_SUBCOMMAND[@]}" "${args[@]}" 73 | 74 | # return exit code to common::per_dir_hook 75 | local exit_code=$? 76 | return $exit_code 77 | } 78 | 79 | [ "${BASH_SOURCE[0]}" != "$0" ] || main "$@" 80 | -------------------------------------------------------------------------------- /hooks/terragrunt_validate.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eo pipefail 3 | 4 | # globals variables 5 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 6 | readonly SCRIPT_DIR 7 | # shellcheck source=_common.sh 8 | . "$SCRIPT_DIR/_common.sh" 9 | 10 | function main { 11 | common::initialize "$SCRIPT_DIR" 12 | common::parse_cmdline "$@" 13 | common::export_provided_env_vars "${ENV_VARS[@]}" 14 | common::parse_and_export_env_vars 15 | # JFYI: terragrunt validate color already suppressed via PRE_COMMIT_COLOR=never 16 | 17 | if common::terragrunt_version_ge_0.78; then 18 | local -ra RUN_ALL_SUBCOMMAND=(run --all validate) 19 | else 20 | local -ra RUN_ALL_SUBCOMMAND=(run-all validate) 21 | fi 22 | 23 | # shellcheck disable=SC2153 # False positive 24 | common::per_dir_hook "$HOOK_ID" "${#ARGS[@]}" "${ARGS[@]}" "${FILES[@]}" 25 | } 26 | 27 | ####################################################################### 28 | # Unique part of `common::per_dir_hook`. The function is executed in loop 29 | # on each provided dir path. Run wrapped tool with specified arguments 30 | # Arguments: 31 | # dir_path (string) PATH to dir relative to git repo root. 32 | # Can be used in error logging 33 | # change_dir_in_unique_part (string/false) Modifier which creates 34 | # possibilities to use non-common chdir strategies. 35 | # Availability depends on hook. 36 | # parallelism_disabled (bool) if true - skip lock mechanism 37 | # args (array) arguments that configure wrapped tool behavior 38 | # tf_path (string) PATH to Terraform/OpenTofu binary 39 | # Outputs: 40 | # If failed - print out hook checks status 41 | ####################################################################### 42 | function per_dir_hook_unique_part { 43 | # shellcheck disable=SC2034 # Unused var. 44 | local -r dir_path="$1" 45 | # shellcheck disable=SC2034 # Unused var. 46 | local -r change_dir_in_unique_part="$2" 47 | # shellcheck disable=SC2034 # Unused var. 48 | local -r parallelism_disabled="$3" 49 | # shellcheck disable=SC2034 # Unused var. 50 | local -r tf_path="$4" 51 | shift 4 52 | local -a -r args=("$@") 53 | 54 | # pass the arguments to hook 55 | terragrunt validate "${args[@]}" 56 | 57 | # return exit code to common::per_dir_hook 58 | local exit_code=$? 59 | return $exit_code 60 | } 61 | 62 | ####################################################################### 63 | # Unique part of `common::per_dir_hook`. The function is executed one time 64 | # in the root git repo 65 | # Arguments: 66 | # args (array) arguments that configure wrapped tool behavior 67 | ####################################################################### 68 | function run_hook_on_whole_repo { 69 | local -a -r args=("$@") 70 | 71 | # pass the arguments to hook 72 | terragrunt "${RUN_ALL_SUBCOMMAND[@]}" "${args[@]}" 73 | 74 | # return exit code to common::per_dir_hook 75 | local exit_code=$? 76 | return $exit_code 77 | } 78 | 79 | [ "${BASH_SOURCE[0]}" != "$0" ] || main "$@" 80 | -------------------------------------------------------------------------------- /hooks/terragrunt_validate_inputs.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eo pipefail 3 | 4 | # globals variables 5 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 6 | readonly SCRIPT_DIR 7 | # shellcheck source=_common.sh 8 | . "$SCRIPT_DIR/_common.sh" 9 | 10 | function main { 11 | common::initialize "$SCRIPT_DIR" 12 | common::parse_cmdline "$@" 13 | common::export_provided_env_vars "${ENV_VARS[@]}" 14 | common::parse_and_export_env_vars 15 | # JFYI: terragrunt validate color already suppressed via PRE_COMMIT_COLOR=never 16 | 17 | if common::terragrunt_version_ge_0.78; then 18 | local -ra SUBCOMMAND=(hcl validate --inputs) 19 | local -ra RUN_ALL_SUBCOMMAND=(run --all hcl validate --inputs) 20 | else 21 | local -ra SUBCOMMAND=(validate-inputs) 22 | local -ra RUN_ALL_SUBCOMMAND=(run-all validate-inputs) 23 | fi 24 | 25 | # shellcheck disable=SC2153 # False positive 26 | common::per_dir_hook "$HOOK_ID" "${#ARGS[@]}" "${ARGS[@]}" "${FILES[@]}" 27 | } 28 | 29 | ####################################################################### 30 | # Unique part of `common::per_dir_hook`. The function is executed in loop 31 | # on each provided dir path. Run wrapped tool with specified arguments 32 | # Arguments: 33 | # dir_path (string) PATH to dir relative to git repo root. 34 | # Can be used in error logging 35 | # change_dir_in_unique_part (string/false) Modifier which creates 36 | # possibilities to use non-common chdir strategies. 37 | # Availability depends on hook. 38 | # parallelism_disabled (bool) if true - skip lock mechanism 39 | # args (array) arguments that configure wrapped tool behavior 40 | # tf_path (string) PATH to Terraform/OpenTofu binary 41 | # Outputs: 42 | # If failed - print out hook checks status 43 | ####################################################################### 44 | function per_dir_hook_unique_part { 45 | # shellcheck disable=SC2034 # Unused var. 46 | local -r dir_path="$1" 47 | # shellcheck disable=SC2034 # Unused var. 48 | local -r change_dir_in_unique_part="$2" 49 | # shellcheck disable=SC2034 # Unused var. 50 | local -r parallelism_disabled="$3" 51 | # shellcheck disable=SC2034 # Unused var. 52 | local -r tf_path="$4" 53 | shift 4 54 | local -a -r args=("$@") 55 | 56 | # pass the arguments to hook 57 | terragrunt "${SUBCOMMAND[@]}" "${args[@]}" 58 | 59 | # return exit code to common::per_dir_hook 60 | local exit_code=$? 61 | return $exit_code 62 | } 63 | 64 | ####################################################################### 65 | # Unique part of `common::per_dir_hook`. The function is executed one time 66 | # in the root git repo 67 | # Arguments: 68 | # args (array) arguments that configure wrapped tool behavior 69 | ####################################################################### 70 | function run_hook_on_whole_repo { 71 | local -a -r args=("$@") 72 | 73 | # pass the arguments to hook 74 | terragrunt "${RUN_ALL_SUBCOMMAND[@]}" "${args[@]}" 75 | 76 | # return exit code to common::per_dir_hook 77 | local exit_code=$? 78 | return $exit_code 79 | } 80 | 81 | [ "${BASH_SOURCE[0]}" != "$0" ] || main "$@" 82 | -------------------------------------------------------------------------------- /hooks/terrascan.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eo pipefail 3 | 4 | # globals variables 5 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 6 | readonly SCRIPT_DIR 7 | # shellcheck source=_common.sh 8 | . "$SCRIPT_DIR/_common.sh" 9 | 10 | function main { 11 | common::initialize "$SCRIPT_DIR" 12 | common::parse_cmdline "$@" 13 | common::export_provided_env_vars "${ENV_VARS[@]}" 14 | common::parse_and_export_env_vars 15 | # JFYI: terrascan color already suppressed via PRE_COMMIT_COLOR=never 16 | 17 | # shellcheck disable=SC2153 # False positive 18 | common::per_dir_hook "$HOOK_ID" "${#ARGS[@]}" "${ARGS[@]}" "${FILES[@]}" 19 | } 20 | 21 | ####################################################################### 22 | # Unique part of `common::per_dir_hook`. The function is executed in loop 23 | # on each provided dir path. Run wrapped tool with specified arguments 24 | # Arguments: 25 | # dir_path (string) PATH to dir relative to git repo root. 26 | # Can be used in error logging 27 | # change_dir_in_unique_part (string/false) Modifier which creates 28 | # possibilities to use non-common chdir strategies. 29 | # Availability depends on hook. 30 | # parallelism_disabled (bool) if true - skip lock mechanism 31 | # args (array) arguments that configure wrapped tool behavior 32 | # tf_path (string) PATH to Terraform/OpenTofu binary 33 | # Outputs: 34 | # If failed - print out hook checks status 35 | ####################################################################### 36 | function per_dir_hook_unique_part { 37 | # shellcheck disable=SC2034 # Unused var. 38 | local -r dir_path="$1" 39 | # shellcheck disable=SC2034 # Unused var. 40 | local -r change_dir_in_unique_part="$2" 41 | # shellcheck disable=SC2034 # Unused var. 42 | local -r parallelism_disabled="$3" 43 | # shellcheck disable=SC2034 # Unused var. 44 | local -r tf_path="$4" 45 | shift 4 46 | local -a -r args=("$@") 47 | 48 | # pass the arguments to hook 49 | terrascan scan -i terraform "${args[@]}" 50 | 51 | # return exit code to common::per_dir_hook 52 | local exit_code=$? 53 | return $exit_code 54 | } 55 | 56 | ####################################################################### 57 | # Unique part of `common::per_dir_hook`. The function is executed one time 58 | # in the root git repo 59 | # Arguments: 60 | # args (array) arguments that configure wrapped tool behavior 61 | ####################################################################### 62 | function run_hook_on_whole_repo { 63 | local -a -r args=("$@") 64 | 65 | # pass the arguments to hook 66 | terrascan scan -i terraform "${args[@]}" 67 | 68 | # return exit code to common::per_dir_hook 69 | local exit_code=$? 70 | return $exit_code 71 | } 72 | 73 | [ "${BASH_SOURCE[0]}" != "$0" ] || main "$@" 74 | -------------------------------------------------------------------------------- /hooks/tfupdate.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eo pipefail 3 | 4 | # globals variables 5 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 6 | readonly SCRIPT_DIR 7 | # shellcheck source=_common.sh 8 | . "$SCRIPT_DIR/_common.sh" 9 | 10 | function main { 11 | common::initialize "$SCRIPT_DIR" 12 | common::parse_cmdline "$@" 13 | common::export_provided_env_vars "${ENV_VARS[@]}" 14 | common::parse_and_export_env_vars 15 | # JFYI: suppress color for `tfupdate` is N/A` 16 | 17 | # Prevent PASSED scenarios for things like: 18 | # - --args=--version '~> 4.2.0' 19 | # - --args=provider aws 20 | # shellcheck disable=SC2153 # False positive 21 | if ! [[ ${ARGS[0]} =~ ^[a-z] ]]; then 22 | common::colorify 'red' "Check the hook args order in .pre-commit.config.yaml." 23 | common::colorify 'red' "Current command looks like:" 24 | common::colorify 'red' "tfupdate ${ARGS[*]}" 25 | exit 1 26 | fi 27 | 28 | # shellcheck disable=SC2153 # False positive 29 | common::per_dir_hook "$HOOK_ID" "${#ARGS[@]}" "${ARGS[@]}" "${FILES[@]}" 30 | } 31 | ####################################################################### 32 | # Unique part of `common::per_dir_hook`. The function is executed in loop 33 | # on each provided dir path. Run wrapped tool with specified arguments 34 | # Arguments: 35 | # dir_path (string) PATH to dir relative to git repo root. 36 | # Can be used in error logging 37 | # change_dir_in_unique_part (string/false) Modifier which creates 38 | # possibilities to use non-common chdir strategies. 39 | # Availability depends on hook. 40 | # parallelism_disabled (bool) if true - skip lock mechanism 41 | # args (array) arguments that configure wrapped tool behavior 42 | # tf_path (string) PATH to Terraform/OpenTofu binary 43 | # Outputs: 44 | # If failed - print out hook checks status 45 | ####################################################################### 46 | function per_dir_hook_unique_part { 47 | # shellcheck disable=SC2034 # Unused var. 48 | local -r dir_path="$1" 49 | # shellcheck disable=SC2034 # Unused var. 50 | local -r change_dir_in_unique_part="$2" 51 | # shellcheck disable=SC2034 # Unused var. 52 | local -r parallelism_disabled="$3" 53 | # shellcheck disable=SC2034 # Unused var. 54 | local -r tf_path="$4" 55 | shift 4 56 | local -a -r args=("$@") 57 | 58 | # pass the arguments to hook 59 | tfupdate "${args[@]}" . 60 | 61 | # return exit code to common::per_dir_hook 62 | local exit_code=$? 63 | return $exit_code 64 | } 65 | 66 | ####################################################################### 67 | # Unique part of `common::per_dir_hook`. The function is executed one time 68 | # in the root git repo 69 | # Arguments: 70 | # args (array) arguments that configure wrapped tool behavior 71 | ####################################################################### 72 | function run_hook_on_whole_repo { 73 | local -a -r args=("$@") 74 | 75 | # pass the arguments to hook 76 | tfupdate "${args[@]}" --recursive . 77 | 78 | # return exit code to common::per_dir_hook 79 | local exit_code=$? 80 | return $exit_code 81 | } 82 | 83 | [ "${BASH_SOURCE[0]}" != "$0" ] || main "$@" 84 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = [ 3 | 'hatchling', 4 | 'hatch-vcs', # setuptools-scm 5 | ] 6 | build-backend = 'hatchling.build' 7 | 8 | [dependency-groups] 9 | building = [ 10 | 'build', 11 | ] 12 | linting = [ 13 | 'pre-commit', 14 | ] 15 | testing = [ 16 | 'covdefaults', # sets up `coveragepy` config boilerplate 17 | 'pytest >= 8', 18 | 'pytest-cov', # integrates `coveragepy` into pytest runs 19 | 'pytest-mock', # provides a `mocker` fixture 20 | 'pytest-xdist', # paralellizes tests through subprocesses 21 | ] 22 | upstreaming = [ 23 | 'twine', 24 | ] 25 | 26 | [project] 27 | name = 'pre-commit-terraform' 28 | classifiers = [ 29 | 'License :: OSI Approved :: MIT License', 30 | 'Programming Language :: Python :: 3', 31 | 'Programming Language :: Python :: 3 :: Only', 32 | 'Programming Language :: Python :: 3.9', 33 | 'Programming Language :: Python :: 3.10', 34 | 'Programming Language :: Python :: 3.11', 35 | 'Programming Language :: Python :: 3.12', 36 | 'Programming Language :: Python :: 3.13', 37 | 'Programming Language :: Python :: Implementation :: CPython', 38 | 'Programming Language :: Python :: Implementation :: PyPy', 39 | ] 40 | description = 'Pre-commit hooks for Terraform, OpenTofu, Terragrunt and related tools' 41 | dependencies = [] 42 | dynamic = [ 43 | 'urls', 44 | 'version', 45 | ] 46 | requires-python = ">= 3.9" 47 | 48 | [[project.authors]] 49 | name = 'Anton Babenko' 50 | email = 'anton@antonbabenko.com' 51 | 52 | [[project.authors]] 53 | name = 'Contributors' 54 | 55 | [[project.maintainers]] 56 | name = 'Maksym Vlasov' 57 | 58 | [[project.maintainers]] 59 | name = 'George L. Yermulnik' 60 | email = 'yz@yz.kiev.ua' 61 | 62 | [project.readme] 63 | file = 'README.md' 64 | content-type = 'text/markdown' 65 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | addopts = 3 | # `pytest-xdist`: 4 | --numprocesses=auto 5 | # NOTE: the plugin is disabled because it's slower with so few tests 6 | --numprocesses=0 7 | 8 | # Show 10 slowest invocations: 9 | --durations=10 10 | 11 | # Report all the things == -rxXs: 12 | -ra 13 | 14 | # Show values of the local vars in errors/tracebacks: 15 | --showlocals 16 | 17 | # Autocollect and invoke the doctests from all modules: 18 | # https://docs.pytest.org/en/stable/doctest.html 19 | --doctest-modules 20 | 21 | # Pre-load the `pytest-cov` plugin early: 22 | -p pytest_cov 23 | 24 | # `pytest-cov`: 25 | --cov 26 | --cov-config=.coveragerc 27 | --cov-context=test 28 | --no-cov-on-fail 29 | 30 | # Fail on config parsing warnings: 31 | # --strict-config 32 | 33 | # Fail on non-existing markers: 34 | # * Deprecated since v6.2.0 but may be reintroduced later covering a 35 | # broader scope: 36 | # --strict 37 | # * Exists since v4.5.0 (advised to be used instead of `--strict`): 38 | --strict-markers 39 | 40 | doctest_optionflags = ALLOW_UNICODE ELLIPSIS 41 | 42 | # Marks tests with an empty parameterset as xfail(run=False) 43 | empty_parameter_set_mark = xfail 44 | 45 | faulthandler_timeout = 30 46 | # Turn all warnings into errors 47 | filterwarnings = 48 | error 49 | 50 | # https://docs.pytest.org/en/stable/usage.html#creating-junitxml-format-files 51 | junit_duration_report = call 52 | # xunit1 contains more metadata than xunit2 so it's better for CI UIs: 53 | junit_family = xunit1 54 | junit_logging = all 55 | junit_log_passing_tests = true 56 | junit_suite_name = awx_plugins_test_suite 57 | 58 | # A mapping of markers to their descriptions allowed in strict mode: 59 | markers = 60 | 61 | minversion = 6.1.0 62 | 63 | # Optimize pytest's lookup by restricting potentially deep dir tree scan: 64 | norecursedirs = 65 | build 66 | dependencies 67 | dist 68 | docs 69 | .cache 70 | .eggs 71 | .git 72 | .github 73 | .tox 74 | *.egg 75 | *.egg-info 76 | */*.egg-info 77 | */**/*.egg-info 78 | *.dist-info 79 | */*.dist-info 80 | */**/*.dist-info 81 | 82 | testpaths = tests/pytest/ 83 | 84 | xfail_strict = true 85 | -------------------------------------------------------------------------------- /ruff.toml: -------------------------------------------------------------------------------- 1 | # Assume Python 3.9 2 | target-version = "py39" 3 | 4 | line-length = 79 # To decrease PR diff size 5 | 6 | namespace-packages = ["src/pre_commit_terraform/", "tests/pytest/"] 7 | 8 | [format] 9 | quote-style = "single" 10 | 11 | [lint.flake8-quotes] 12 | inline-quotes = "single" 13 | 14 | [lint.pydocstyle] 15 | convention = "pep257" 16 | 17 | [lint] 18 | select = ["ALL"] 19 | preview = true 20 | ignore = [ 21 | "CPY001", # Skip copyright notice requirement at top of files 22 | ] 23 | 24 | [lint.isort] 25 | # force-single-line = true # To decrease PR diff size 26 | lines-after-imports = 2 27 | 28 | [lint.flake8-pytest-style] 29 | parametrize-values-type = "tuple" 30 | 31 | [lint.per-file-ignores] 32 | # Exceptions for test files 33 | "tests/**.py" = [ 34 | "S101", # Allow use of `assert` in test files 35 | "PLC2701", # Allow importing internal files needed for testing 36 | "PLR6301", # Allow 'self' parameter in method definitions (required for test stubs) 37 | "ARG002", # Allow unused arguments in instance methods (required for test stubs) 38 | "S404", # Allow importing 'subprocess' module to testing call external tools needed by these hooks 39 | 40 | ] 41 | -------------------------------------------------------------------------------- /src/pre_commit_terraform/README.md: -------------------------------------------------------------------------------- 1 | # Maintainer's manual 2 | 3 | ## Structure 4 | 5 | This folder is what's called an [importable package]. It's a top-level folder 6 | that ends up being installed into `site-packages/` of virtualenvs. 7 | 8 | When the Git repository is `pip install`ed, this [import package] becomes 9 | available for use within respective Python interpreter instance. It can be 10 | imported and sub-modules can be imported through the dot-syntax. Additionally, 11 | the modules within can import the neighboring ones using relative imports that 12 | have a leading dot in them. 13 | 14 | It additionally implements a [runpy interface], meaning that its name can 15 | be passed to `python -m` to invoke the CLI. This is the primary method of 16 | integration with the [`pre-commit` framework] and local development/testing. 17 | 18 | The layout allows for having several Python modules wrapping third-party tools, 19 | each having an argument parser and being a subcommand for the main CLI 20 | interface. 21 | 22 | ## Control flow 23 | 24 | When `python -m pre_commit_terraform` is executed, it imports `__main__.py`. 25 | Which in turn, performs the initialization of the main argument parser and the 26 | parsers of subcommands, followed by executing the logic defined in dedicated 27 | subcommand modules. 28 | 29 | ## Integrating a new subcommand 30 | 31 | 1. Create a new module called `subcommand_x.py`. 32 | 2. Within that module, define two functions — 33 | `invoke_cli_app(parsed_cli_args: Namespace) -> ReturnCodeType | int` and 34 | `populate_argument_parser(subcommand_parser: ArgumentParser) -> None`. 35 | Additionally, define a module-level constant 36 | `CLI_SUBCOMMAND_NAME: Final[str] = 'subcommand-x'`. 37 | 3. Edit [`_cli_subcommands.py`], importing `subcommand_x` as a relative module 38 | and add it into the `SUBCOMMAND_MODULES` list. 39 | 4. Edit [`.pre-commit-hooks.yaml`], adding a new hook that invokes 40 | `python -m pre_commit_terraform subcommand-x`. 41 | 42 | ## Manual testing 43 | 44 | Usually, having a development virtualenv where you `pip install -e .` is enough 45 | to make it possible to invoke the CLI app. Do so first. Most source code 46 | updates do not require running it again. But sometimes, it's needed. 47 | 48 | Once done, you can run `python -m pre_commit_terraform` and/or 49 | `python -m pre_commit_terraform subcommand-x` to see how it behaves. There's 50 | `--help` and all other typical conventions one would usually expect from a 51 | POSIX-inspired CLI app. 52 | 53 | ## DX/UX considerations 54 | 55 | Since it's an app that can be executed outside the [`pre-commit` framework], 56 | it is useful to check out and follow these [CLI guidelines][clig]. 57 | 58 | ## Subcommand development 59 | 60 | `populate_argument_parser()` accepts a regular instance of 61 | [`argparse.ArgumentParser`]. Call its methods to extend the CLI arguments that 62 | would be specific for the subcommand you are creating. Those arguments will be 63 | available later, as an argument to the `invoke_cli_app()` function — through an 64 | instance of [`argparse.Namespace`]. For the `CLI_SUBCOMMAND_NAME` constant, 65 | choose `kebab-space-sub-command-style`, it does not need to be `snake_case`. 66 | 67 | Make sure to return a `ReturnCode` instance or an integer from 68 | `invoke_cli_app()`. Returning a non-zero value will result in the CLI app 69 | exiting with a return code typically interpreted as an error while zero means 70 | success. You can `import errno` to use typical POSIX error codes through their 71 | human-readable identifiers. 72 | 73 | Another way to interrupt the CLI app control flow is by raising an instance of 74 | one of the in-app errors. `raise PreCommitTerraformExit` for a successful exit, 75 | but it can be turned into an error outcome via 76 | `raise PreCommitTerraformExit(1)`. 77 | `raise PreCommitTerraformRuntimeError('The world is broken')` to indicate 78 | problems within the runtime. The framework will intercept any exceptions 79 | inheriting `PreCommitTerraformBaseError`, so they won't be presented to the 80 | end-users. 81 | 82 | [`.pre-commit-hooks.yaml`]: ../../.pre-commit-hooks.yaml 83 | [`_cli_parsing.py`]: ./_cli_parsing.py 84 | [`_cli_subcommands.py`]: ./_cli_subcommands.py 85 | [`argparse.ArgumentParser`]: 86 | https://docs.python.org/3/library/argparse.html#argparse.ArgumentParser 87 | [`argparse.Namespace`]: 88 | https://docs.python.org/3/library/argparse.html#argparse.Namespace 89 | [clig]: https://clig.dev 90 | [importable package]: https://docs.python.org/3/tutorial/modules.html#packages 91 | [import package]: https://packaging.python.org/en/latest/glossary/#term-Import-Package 92 | [`pre-commit` framework]: https://pre-commit.com 93 | [runpy interface]: https://docs.python.org/3/library/__main__.html 94 | -------------------------------------------------------------------------------- /src/pre_commit_terraform/__main__.py: -------------------------------------------------------------------------------- 1 | """A runpy-style CLI entry-point module.""" 2 | 3 | from sys import argv 4 | from sys import exit as exit_with_return_code 5 | 6 | from ._cli import invoke_cli_app 7 | 8 | 9 | return_code = invoke_cli_app(argv[1:]) 10 | exit_with_return_code(return_code) 11 | -------------------------------------------------------------------------------- /src/pre_commit_terraform/_cli.py: -------------------------------------------------------------------------------- 1 | """Outer CLI layer of the app interface.""" 2 | 3 | import sys 4 | from typing import cast as cast_to 5 | 6 | from ._cli_parsing import initialize_argument_parser 7 | from ._errors import ( 8 | PreCommitTerraformBaseError, 9 | PreCommitTerraformExit, 10 | PreCommitTerraformRuntimeError, 11 | ) 12 | from ._structs import ReturnCode 13 | from ._types import CLIAppEntryPointCallableType, ReturnCodeType 14 | 15 | 16 | def invoke_cli_app(cli_args: list[str]) -> ReturnCodeType: 17 | """Run the entry-point of the CLI app. 18 | 19 | Includes initializing parsers of all the sub-apps and 20 | choosing what to execute. 21 | 22 | Returns: 23 | ReturnCodeType: The return code of the app. 24 | 25 | Raises: 26 | PreCommitTerraformExit: If the app is exiting with error. 27 | """ 28 | root_cli_parser = initialize_argument_parser() 29 | parsed_cli_args = root_cli_parser.parse_args(cli_args) 30 | invoke_cli_app = cast_to( 31 | # FIXME: attempt typing per https://stackoverflow.com/a/75666611/595220 # noqa: TD001, TD002, FIX001, E501 All these suppressions caused by "FIXME" comment 32 | 'CLIAppEntryPointCallableType', 33 | parsed_cli_args.invoke_cli_app, 34 | ) 35 | 36 | try: 37 | return invoke_cli_app(parsed_cli_args) 38 | except PreCommitTerraformExit as exit_err: 39 | # T201 - FIXME here and below - we will replace 'print' with 40 | # logging later 41 | print(f'App exiting: {exit_err!s}', file=sys.stderr) # noqa: T201 42 | raise 43 | except PreCommitTerraformRuntimeError as unhandled_exc: 44 | print( # noqa: T201 45 | f'App execution took an unexpected turn: {unhandled_exc!s}. ' 46 | 'Exiting...', 47 | file=sys.stderr, 48 | ) 49 | return ReturnCode.ERROR 50 | except PreCommitTerraformBaseError as unhandled_exc: 51 | print( # noqa: T201 52 | f'A surprising exception happened: {unhandled_exc!s}. Exiting...', 53 | file=sys.stderr, 54 | ) 55 | return ReturnCode.ERROR 56 | except KeyboardInterrupt as ctrl_c_exc: 57 | print( # noqa: T201 58 | f'User-initiated interrupt: {ctrl_c_exc!s}. Exiting...', 59 | file=sys.stderr, 60 | ) 61 | return ReturnCode.ERROR 62 | 63 | 64 | __all__ = ('invoke_cli_app',) 65 | -------------------------------------------------------------------------------- /src/pre_commit_terraform/_cli_parsing.py: -------------------------------------------------------------------------------- 1 | """Argument parser initialization logic. 2 | 3 | This defines helpers for setting up both the root parser and the parsers 4 | of all the sub-commands. 5 | """ 6 | 7 | from argparse import ArgumentParser 8 | 9 | from ._cli_subcommands import SUBCOMMAND_MODULES 10 | 11 | 12 | def attach_subcommand_parsers_to(root_cli_parser: ArgumentParser, /) -> None: 13 | """Connect all sub-command parsers to the given one. 14 | 15 | This functions iterates over a mapping of subcommands to their 16 | respective population functions, executing them to augment the 17 | main parser. 18 | """ 19 | subcommand_parsers = root_cli_parser.add_subparsers( 20 | dest='check_name', 21 | help='A check to be performed.', 22 | required=True, 23 | ) 24 | for subcommand_module in SUBCOMMAND_MODULES: 25 | subcommand_parser = subcommand_parsers.add_parser( 26 | subcommand_module.CLI_SUBCOMMAND_NAME, 27 | ) 28 | subcommand_parser.set_defaults( 29 | invoke_cli_app=subcommand_module.invoke_cli_app, 30 | ) 31 | subcommand_module.populate_argument_parser(subcommand_parser) 32 | 33 | 34 | def initialize_argument_parser() -> ArgumentParser: 35 | """Return the root argument parser with sub-commands. 36 | 37 | Returns: 38 | ArgumentParser: The root parser with sub-commands attached. 39 | """ 40 | root_cli_parser = ArgumentParser(prog=f'python -m {__package__!s}') 41 | attach_subcommand_parsers_to(root_cli_parser) 42 | return root_cli_parser 43 | 44 | 45 | __all__ = ('initialize_argument_parser',) 46 | -------------------------------------------------------------------------------- /src/pre_commit_terraform/_cli_subcommands.py: -------------------------------------------------------------------------------- 1 | """A CLI sub-commands organization module.""" 2 | 3 | from . import terraform_docs_replace 4 | from ._types import CLISubcommandModuleProtocol 5 | 6 | 7 | SUBCOMMAND_MODULES: list[CLISubcommandModuleProtocol] = [ 8 | terraform_docs_replace, 9 | ] 10 | 11 | 12 | __all__ = ('SUBCOMMAND_MODULES',) 13 | -------------------------------------------------------------------------------- /src/pre_commit_terraform/_errors.py: -------------------------------------------------------------------------------- 1 | """App-specific exceptions.""" 2 | 3 | 4 | class PreCommitTerraformBaseError(Exception): 5 | """Base exception for all the in-app errors.""" 6 | 7 | 8 | class PreCommitTerraformRuntimeError( 9 | PreCommitTerraformBaseError, 10 | RuntimeError, 11 | ): 12 | """An exception representing a runtime error condition.""" 13 | 14 | 15 | # N818 - The name mimics the built-in SystemExit and is meant to have exactly 16 | # the same semantics. For this reason, it shouldn't have Error in the name to 17 | # maintain resemblance. 18 | class PreCommitTerraformExit(PreCommitTerraformBaseError, SystemExit): # noqa: N818 19 | """An exception for terminating execution from deep app layers.""" 20 | -------------------------------------------------------------------------------- /src/pre_commit_terraform/_structs.py: -------------------------------------------------------------------------------- 1 | """Data structures to be reused across the app.""" 2 | 3 | from enum import IntEnum 4 | 5 | 6 | class ReturnCode(IntEnum): 7 | """POSIX-style return code values. 8 | 9 | To be used in check callable implementations. 10 | """ 11 | 12 | OK = 0 13 | ERROR = 1 14 | 15 | 16 | __all__ = ('ReturnCode',) 17 | -------------------------------------------------------------------------------- /src/pre_commit_terraform/_types.py: -------------------------------------------------------------------------------- 1 | """Composite types for annotating in-project code.""" 2 | 3 | from argparse import ArgumentParser, Namespace 4 | from collections.abc import Callable 5 | from typing import Protocol, Union 6 | 7 | from ._structs import ReturnCode 8 | 9 | 10 | ReturnCodeType = Union[ReturnCode, int] # Union instead of pipe for Python 3.9 11 | CLIAppEntryPointCallableType = Callable[[Namespace], ReturnCodeType] 12 | 13 | 14 | class CLISubcommandModuleProtocol(Protocol): 15 | """A protocol for the subcommand-implementing module shape.""" 16 | 17 | CLI_SUBCOMMAND_NAME: str 18 | """This constant contains a CLI.""" 19 | 20 | def populate_argument_parser( 21 | self, 22 | subcommand_parser: ArgumentParser, 23 | ) -> None: 24 | """Run a module hook for populating the subcommand parser.""" 25 | 26 | def invoke_cli_app(self, parsed_cli_args: Namespace) -> ReturnCodeType: 27 | """Run a module hook implementing the subcommand logic.""" 28 | ... # pylint: disable=unnecessary-ellipsis 29 | 30 | 31 | __all__ = ('CLISubcommandModuleProtocol', 'ReturnCodeType') 32 | -------------------------------------------------------------------------------- /src/pre_commit_terraform/terraform_docs_replace.py: -------------------------------------------------------------------------------- 1 | """Terraform Docs Replace Hook. 2 | 3 | This hook is deprecated and will be removed in the future. 4 | Please, use 'terraform_docs' hook instead. 5 | """ 6 | 7 | import os 8 | 9 | # S404 - Allow importing 'subprocess' module to call external tools 10 | # needed by these hooks. FIXME - should be moved to separate module 11 | # when more hooks will be introduced 12 | import subprocess # noqa: S404 13 | import warnings 14 | from argparse import ArgumentParser, Namespace 15 | from typing import Final 16 | from typing import cast as cast_to 17 | 18 | from ._structs import ReturnCode 19 | from ._types import ReturnCodeType 20 | 21 | 22 | CLI_SUBCOMMAND_NAME: Final[str] = 'replace-docs' 23 | 24 | 25 | def populate_argument_parser(subcommand_parser: ArgumentParser) -> None: 26 | """Populate the parser for the subcommand.""" 27 | subcommand_parser.description = ( 28 | 'Run terraform-docs on a set of files. Follows the standard ' 29 | 'convention of pulling the documentation from main.tf in order to ' 30 | 'replace the entire README.md file each time.' 31 | ) 32 | subcommand_parser.add_argument( 33 | '--dest', 34 | dest='dest', 35 | default='README.md', 36 | ) 37 | subcommand_parser.add_argument( 38 | '--sort-inputs-by-required', 39 | dest='sort', 40 | action='store_true', 41 | help='[deprecated] use --sort-by-required instead', 42 | ) 43 | subcommand_parser.add_argument( 44 | '--sort-by-required', 45 | dest='sort', 46 | action='store_true', 47 | ) 48 | subcommand_parser.add_argument( 49 | '--with-aggregate-type-defaults', 50 | dest='aggregate', 51 | action='store_true', 52 | help='[deprecated]', 53 | ) 54 | subcommand_parser.add_argument( 55 | 'filenames', 56 | nargs='*', 57 | help='Filenames to check.', 58 | ) 59 | 60 | 61 | def invoke_cli_app(parsed_cli_args: Namespace) -> ReturnCodeType: 62 | """Run the entry-point of the CLI app. 63 | 64 | Returns: 65 | ReturnCodeType: The return code of the app. 66 | """ 67 | warnings.warn( 68 | '`terraform_docs_replace` hook is DEPRECATED.' 69 | 'For migration instructions see ' 70 | 'https://github.com/antonbabenko/pre-commit-terraform/issues/248' 71 | '#issuecomment-1290829226', 72 | category=UserWarning, 73 | stacklevel=1, # It's should be 2, but tests are failing w/ values >1. 74 | # As it's deprecated hook, it's safe to leave it as is w/o fixing it. 75 | ) 76 | 77 | dirs: list[str] = [] 78 | for filename in cast_to('list[str]', parsed_cli_args.filenames): 79 | if os.path.realpath(filename) not in dirs and ( 80 | filename.endswith(('.tf', '.tfvars')) 81 | ): 82 | # PTH120 - It should use 'pathlib', but this hook is deprecated and 83 | # we don't want to spent time on testing fixes for it 84 | dirs.append(os.path.dirname(filename)) # noqa: PTH120 85 | 86 | retval = ReturnCode.OK 87 | 88 | for directory in dirs: 89 | try: 90 | proc_args = [] 91 | proc_args.append('terraform-docs') 92 | if cast_to('bool', parsed_cli_args.sort): 93 | proc_args.append('--sort-by-required') 94 | proc_args.extend( 95 | ( 96 | 'md', 97 | f'./{directory}', 98 | '>', 99 | './{dir}/{dest}'.format( 100 | dir=directory, 101 | dest=cast_to('str', parsed_cli_args.dest), 102 | ), 103 | ), 104 | ) 105 | # S602 - 'shell=True' is insecure, but this hook is deprecated and 106 | # we don't want to spent time on testing fixes for it 107 | subprocess.check_call(' '.join(proc_args), shell=True) # noqa: S602 108 | # PERF203 - try-except shouldn't be in a loop, but it's deprecated 109 | # hook, so leave as is 110 | except subprocess.CalledProcessError as e: # noqa: PERF203 111 | # T201 - Leave print statement as is, as this is deprecated hook 112 | print(e) # noqa: T201 113 | retval = ReturnCode.ERROR 114 | return retval 115 | -------------------------------------------------------------------------------- /tests/Dockerfile: -------------------------------------------------------------------------------- 1 | # We use `latest` tag for tests proposes 2 | # hadolint ignore=DL3007 3 | FROM pre-commit-terraform:latest 4 | 5 | RUN apk add --no-cache \ 6 | datamash=~1.8 7 | 8 | WORKDIR /pct 9 | ENTRYPOINT [ "/pct/tests/hooks_performance_test.sh" ] 10 | -------------------------------------------------------------------------------- /tests/hooks_performance_test.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | TEST_NUM=$1 # 1000 4 | TEST_COMMAND=$2 # 'pre-commit try-repo -a /tmp/159/pre-commit-terraform terraform_tfsec' 5 | TEST_DIR=$3 # '/tmp/infrastructure' 6 | TEST_DESCRIPTION="$TEST_NUM runs '$4'" # '`terraform_tfsec` PR #123:' 7 | RAW_TEST_RESULTS_FILE_NAME=$5 # terraform_tfsec_pr123 8 | 9 | function run_tests { 10 | local TEST_NUM=$1 11 | local TEST_DIR=$2 12 | local TEST_COMMAND 13 | IFS=" " read -r -a TEST_COMMAND <<< "$3" 14 | local FILE_NAME_TO_SAVE_TEST_RESULTS=$4 15 | 16 | local RESULTS_DIR 17 | RESULTS_DIR="$(pwd)/tests/results" 18 | 19 | cd "$TEST_DIR" || { echo "Specified TEST_DIR does not exist" && exit 1; } 20 | # Cleanup 21 | rm "$RESULTS_DIR/$FILE_NAME_TO_SAVE_TEST_RESULTS" 22 | 23 | for ((i = 1; i <= TEST_NUM; i++)); do 24 | { 25 | echo -e "\n\nTest run $i times\n\n" 26 | /usr/bin/time --quiet -f '%U user %S system %P cpu %e total' \ 27 | "${TEST_COMMAND[@]}" 28 | } 2>> "$RESULTS_DIR/$FILE_NAME_TO_SAVE_TEST_RESULTS" 29 | done 30 | # shellcheck disable=2164 # Always exist 31 | cd - > /dev/null 32 | } 33 | 34 | function generate_table { 35 | local FILE_PATH="tests/results/$1" 36 | 37 | local users_seconds system_seconds cpu total_time 38 | users_seconds=$(awk '{ print $1; }' "$FILE_PATH") 39 | system_seconds=$(awk '{ print $3; }' "$FILE_PATH") 40 | cpu=$(awk '{ gsub("%","",$5); print $5; }' "$FILE_PATH") 41 | total_time=$(awk '{ print $7; }' "$FILE_PATH") 42 | 43 | echo " 44 | | time command | max | min | mean | median | 45 | | -------------- | ------ | ------ | -------- | ------ | 46 | | users seconds | $( 47 | printf %"s\n" "$users_seconds" | datamash max 1 48 | ) | $( 49 | printf %"s\n" "$users_seconds" | datamash min 1 50 | ) | $( 51 | printf %"s\n" "$users_seconds" | datamash mean 1 52 | ) | $(printf %"s\n" "$users_seconds" | datamash median 1) | 53 | | system seconds | $( 54 | printf %"s\n" "$system_seconds" | datamash max 1 55 | ) | $( 56 | printf %"s\n" "$system_seconds" | datamash min 1 57 | ) | $( 58 | printf %"s\n" "$system_seconds" | datamash mean 1 59 | ) | $(printf %"s\n" "$system_seconds" | datamash median 1) | 60 | | CPU % | $( 61 | printf %"s\n" "$cpu" | datamash max 1 62 | ) | $( 63 | printf %"s\n" "$cpu" | datamash min 1 64 | ) | $( 65 | printf %"s\n" "$cpu" | datamash mean 1 66 | ) | $(printf %"s\n" "$cpu" | datamash median 1) | 67 | | Total time | $( 68 | printf %"s\n" "$total_time" | datamash max 1 69 | ) | $( 70 | printf %"s\n" "$total_time" | datamash min 1 71 | ) | $( 72 | printf %"s\n" "$total_time" | datamash mean 1 73 | ) | $(printf %"s\n" "$total_time" | datamash median 1) | 74 | " 75 | } 76 | 77 | function save_result { 78 | local DESCRIPTION=$1 79 | local TABLE=$2 80 | local TEST_RUN_START_TIME=$3 81 | local TEST_RUN_END_TIME=$4 82 | 83 | local FILE_NAME=${5:-"tests_result.md"} 84 | 85 | echo -e "\n$DESCRIPTION\n$TABLE" >> "tests/results/$FILE_NAME" 86 | # shellcheck disable=SC2016,SC2128 # Irrelevant 87 | echo -e ' 88 |
Run details 89 | 90 | * Test Start: '"$TEST_RUN_START_TIME"' 91 | * Test End: '"$TEST_RUN_END_TIME"' 92 | 93 | | Variable name | Value | 94 | | ---------------------------- | --- | 95 | | `TEST_NUM` | '"$TEST_NUM"' | 96 | | `TEST_COMMAND` | '"$TEST_COMMAND"' | 97 | | `TEST_DIR` | '"$TEST_DIR"' | 98 | | `TEST_DESCRIPTION` | '"$TEST_DESCRIPTION"' | 99 | | `RAW_TEST_RESULTS_FILE_NAME` | '"$RAW_TEST_RESULTS_FILE_NAME"' | 100 | 101 | Memory info (`head -n 6 /proc/meminfo`): 102 | 103 | ```bash 104 | '"$(head -n 6 /proc/meminfo)"' 105 | ``` 106 | 107 | CPU info: 108 | 109 | ```bash 110 | Real procs: '"$(grep ^cpu\\scores /proc/cpuinfo | uniq | awk '{print $4}')"' 111 | Virtual (hyper-threading) procs: '"$(grep -c ^processor /proc/cpuinfo)"' 112 | '"$(tail -n 28 /proc/cpuinfo)"' 113 | ``` 114 | 115 |
116 | ' >> "tests/results/$FILE_NAME" 117 | 118 | } 119 | 120 | mkdir -p tests/results 121 | TEST_RUN_START_TIME=$(date -u) 122 | # shellcheck disable=SC2128 # Irrelevant 123 | run_tests "$TEST_NUM" "$TEST_DIR" "$TEST_COMMAND" "$RAW_TEST_RESULTS_FILE_NAME" 124 | TEST_RUN_END_TIME=$(date -u) 125 | 126 | TABLE=$(generate_table "$RAW_TEST_RESULTS_FILE_NAME") 127 | save_result "$TEST_DESCRIPTION" "$TABLE" "$TEST_RUN_START_TIME" "$TEST_RUN_END_TIME" 128 | -------------------------------------------------------------------------------- /tests/pytest/_cli_test.py: -------------------------------------------------------------------------------- 1 | """Tests for the high-level CLI entry point.""" 2 | 3 | from argparse import ArgumentParser, Namespace 4 | 5 | import pytest 6 | from pre_commit_terraform import _cli_parsing as _cli_parsing_mod 7 | from pre_commit_terraform._cli import invoke_cli_app 8 | from pre_commit_terraform._errors import ( 9 | PreCommitTerraformBaseError, 10 | PreCommitTerraformExit, 11 | PreCommitTerraformRuntimeError, 12 | ) 13 | from pre_commit_terraform._structs import ReturnCode 14 | from pre_commit_terraform._types import ReturnCodeType 15 | 16 | 17 | pytestmark = pytest.mark.filterwarnings( 18 | 'ignore:`terraform_docs_replace` hook is DEPRECATED.:UserWarning:' 19 | 'pre_commit_terraform.terraform_docs_replace', 20 | ) 21 | 22 | 23 | @pytest.mark.parametrize( 24 | ('raised_error', 'expected_stderr'), 25 | ( 26 | pytest.param( 27 | PreCommitTerraformRuntimeError('sentinel'), 28 | 'App execution took an unexpected turn: sentinel. Exiting...', 29 | id='app-runtime-exc', 30 | ), 31 | pytest.param( 32 | PreCommitTerraformBaseError('sentinel'), 33 | 'A surprising exception happened: sentinel. Exiting...', 34 | id='app-base-exc', 35 | ), 36 | pytest.param( 37 | KeyboardInterrupt('sentinel'), 38 | 'User-initiated interrupt: sentinel. Exiting...', 39 | id='ctrl-c', 40 | ), 41 | ), 42 | ) 43 | def test_known_interrupts( 44 | capsys: pytest.CaptureFixture[str], 45 | expected_stderr: str, 46 | monkeypatch: pytest.MonkeyPatch, 47 | raised_error: BaseException, 48 | ) -> None: 49 | """Check that known interrupts are turned into return code 1.""" 50 | 51 | class CustomCmdStub: 52 | CLI_SUBCOMMAND_NAME = 'sentinel' 53 | 54 | def populate_argument_parser( 55 | self, 56 | subcommand_parser: ArgumentParser, 57 | ) -> None: 58 | return None 59 | 60 | def invoke_cli_app(self, parsed_cli_args: Namespace) -> ReturnCodeType: 61 | raise raised_error 62 | 63 | monkeypatch.setattr( 64 | _cli_parsing_mod, 65 | 'SUBCOMMAND_MODULES', 66 | [CustomCmdStub()], 67 | ) 68 | 69 | assert invoke_cli_app(['sentinel']) == ReturnCode.ERROR 70 | 71 | captured_outputs = capsys.readouterr() 72 | assert captured_outputs.err == f'{expected_stderr!s}\n' 73 | 74 | 75 | def test_app_exit( 76 | capsys: pytest.CaptureFixture[str], 77 | monkeypatch: pytest.MonkeyPatch, 78 | ) -> None: 79 | """Check that an exit exception is re-raised.""" 80 | 81 | class CustomCmdStub: 82 | CLI_SUBCOMMAND_NAME = 'sentinel' 83 | 84 | def populate_argument_parser( 85 | self, 86 | subcommand_parser: ArgumentParser, 87 | ) -> None: 88 | return None 89 | 90 | def invoke_cli_app(self, parsed_cli_args: Namespace) -> ReturnCodeType: 91 | raise PreCommitTerraformExit(self.CLI_SUBCOMMAND_NAME) 92 | 93 | monkeypatch.setattr( 94 | _cli_parsing_mod, 95 | 'SUBCOMMAND_MODULES', 96 | [CustomCmdStub()], 97 | ) 98 | 99 | with pytest.raises(PreCommitTerraformExit, match=r'^sentinel$'): 100 | invoke_cli_app(['sentinel']) 101 | 102 | captured_outputs = capsys.readouterr() 103 | assert captured_outputs.err == 'App exiting: sentinel\n' 104 | -------------------------------------------------------------------------------- /tests/pytest/terraform_docs_replace_test.py: -------------------------------------------------------------------------------- 1 | """Tests for the `replace-docs` subcommand.""" 2 | 3 | from argparse import ArgumentParser, Namespace 4 | from subprocess import CalledProcessError 5 | 6 | import pytest_mock 7 | 8 | import pytest 9 | from pre_commit_terraform._structs import ReturnCode 10 | from pre_commit_terraform.terraform_docs_replace import ( 11 | invoke_cli_app, 12 | populate_argument_parser, 13 | ) 14 | from pre_commit_terraform.terraform_docs_replace import ( 15 | subprocess as replace_docs_subprocess_mod, 16 | ) 17 | 18 | 19 | def test_arg_parser_populated() -> None: 20 | """Check that `replace-docs` populates its parser.""" 21 | test_arg_parser = ArgumentParser() 22 | populate_argument_parser(test_arg_parser) 23 | assert test_arg_parser.get_default('dest') == 'README.md' 24 | 25 | 26 | def test_check_is_deprecated() -> None: 27 | """Verify that `replace-docs` shows a deprecation warning.""" 28 | deprecation_msg_regex = ( 29 | r'^`terraform_docs_replace` hook is DEPRECATED\.For migration.*$' 30 | ) 31 | with pytest.warns(UserWarning, match=deprecation_msg_regex): 32 | # not `pytest.deprecated_call()` due to this being a user warning 33 | invoke_cli_app(Namespace(filenames=[])) 34 | 35 | 36 | @pytest.mark.parametrize( 37 | ('parsed_cli_args', 'expected_cmds'), 38 | ( 39 | pytest.param(Namespace(filenames=[]), [], id='no-files'), 40 | pytest.param( 41 | Namespace( 42 | dest='SENTINEL.md', 43 | filenames=['some.tf'], 44 | sort=False, 45 | ), 46 | ['terraform-docs md ./ > .//SENTINEL.md'], 47 | id='one-file', 48 | ), 49 | pytest.param( 50 | Namespace( 51 | dest='SENTINEL.md', 52 | filenames=['some.tf', 'thing/weird.tfvars'], 53 | sort=True, 54 | ), 55 | [ 56 | 'terraform-docs --sort-by-required md ./ > .//SENTINEL.md', 57 | 'terraform-docs --sort-by-required md ./thing ' 58 | '> ./thing/SENTINEL.md', 59 | ], 60 | id='two-sorted-files', 61 | ), 62 | pytest.param( 63 | Namespace(filenames=['some.thing', 'un.supported']), 64 | [], 65 | id='invalid-files', 66 | ), 67 | ), 68 | ) 69 | @pytest.mark.filterwarnings( 70 | 'ignore:`terraform_docs_replace` hook is DEPRECATED.:UserWarning:' 71 | 'pre_commit_terraform.terraform_docs_replace', 72 | ) 73 | def test_control_flow_positive( 74 | expected_cmds: list[str], 75 | mocker: pytest_mock.MockerFixture, 76 | monkeypatch: pytest.MonkeyPatch, 77 | parsed_cli_args: Namespace, 78 | ) -> None: 79 | """Check that the subcommand's happy path works.""" 80 | check_call_mock = mocker.Mock() 81 | monkeypatch.setattr( 82 | replace_docs_subprocess_mod, 83 | 'check_call', 84 | check_call_mock, 85 | ) 86 | 87 | assert invoke_cli_app(parsed_cli_args) == ReturnCode.OK 88 | 89 | executed_commands = [ 90 | cmd for ((cmd,), _shell) in check_call_mock.call_args_list 91 | ] 92 | 93 | assert len(expected_cmds) == check_call_mock.call_count 94 | assert expected_cmds == executed_commands 95 | 96 | 97 | @pytest.mark.filterwarnings( 98 | 'ignore:`terraform_docs_replace` hook is DEPRECATED.:UserWarning:' 99 | 'pre_commit_terraform.terraform_docs_replace', 100 | ) 101 | def test_control_flow_negative( 102 | mocker: pytest_mock.MockerFixture, 103 | monkeypatch: pytest.MonkeyPatch, 104 | ) -> None: 105 | """Check that the subcommand's error processing works.""" 106 | parsed_cli_args = Namespace( 107 | dest='SENTINEL.md', 108 | filenames=['some.tf'], 109 | sort=True, 110 | ) 111 | expected_cmd = 'terraform-docs --sort-by-required md ./ > .//SENTINEL.md' 112 | 113 | check_call_mock = mocker.Mock( 114 | side_effect=CalledProcessError(ReturnCode.ERROR, expected_cmd), 115 | ) 116 | monkeypatch.setattr( 117 | replace_docs_subprocess_mod, 118 | 'check_call', 119 | check_call_mock, 120 | ) 121 | 122 | assert invoke_cli_app(parsed_cli_args) == ReturnCode.ERROR 123 | # S604 - 'shell=True' is insecure, but this hook is deprecated and we don't 124 | # want to spent time on testing fixes for it 125 | check_call_mock.assert_called_once_with(expected_cmd, shell=True) # noqa: S604 126 | -------------------------------------------------------------------------------- /tools/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | #exit on error 3 | set -e 4 | 5 | readonly USERBASE="run" 6 | readonly BASHPATH="/bin/bash" 7 | readonly HOMEPATH="/home" 8 | 9 | function echo_error_and_exit { 10 | echo -e "ERROR: " "$@" >&2 11 | exit 1 12 | } 13 | 14 | # make sure entrypoint is running as root 15 | if [[ $(id -u) -ne 0 ]]; then 16 | echo_error_and_exit "Container must run as root. Use environment variable USERID to set user.\n" \ 17 | "Example: \"TAG=latest && " \ 18 | "docker run -e USERID=$(id -u):$(id -g) -v $(pwd):/lint -w /lint ghcr.io/antonbabenko/pre-commit-terraform:$TAG run -a\"" 19 | fi 20 | 21 | # make sure USERID makes sense as UID:GID 22 | # it looks like the alpine distro limits UID and GID to 256000, but 23 | # could be more, so we accept any valid integers 24 | USERID=${USERID:-"0:0"} 25 | if [[ ! $USERID =~ ^[0-9]+:[0-9]+$ ]]; then 26 | echo_error_and_exit "USERID environment variable invalid, format is userid:groupid. Received: \"$USERID\"" 27 | fi 28 | 29 | # separate uid and gid 30 | uid=${USERID%%:*} 31 | gid=${USERID##*:} 32 | 33 | # if requested UID:GID is root, go ahead and run without other processing 34 | [[ $USERID == "0:0" ]] && exec su-exec "$USERID" pre-commit "$@" 35 | 36 | # make sure workdir and some files are readable/writable by the provided UID/GID 37 | # combo, otherwise will have errors when processing hooks 38 | wdir="$(pwd)" 39 | if ! su-exec "$USERID" "$BASHPATH" -c "test -w $wdir && test -r $wdir"; then 40 | echo_error_and_exit "uid:gid $USERID lacks permissions to $wdir/" 41 | fi 42 | wdirgitindex="$(git rev-parse --git-dir 2>&1)/index" || echo_error_and_exit "${wdirgitindex%/index}" 43 | if ! su-exec "$USERID" "$BASHPATH" -c "test -w $wdirgitindex && test -r $wdirgitindex"; then 44 | echo_error_and_exit "uid:gid $USERID cannot write to $wdirgitindex" 45 | fi 46 | 47 | # check if group by this GID already exists, if so get the name since adduser 48 | # only accepts names 49 | if groupinfo="$(getent group "$gid")"; then 50 | groupname="${groupinfo%%:*}" 51 | else 52 | # create group in advance in case GID is different than UID 53 | groupname="$USERBASE$gid" 54 | if ! err="$(addgroup -g "$gid" "$groupname" 2>&1)"; then 55 | echo_error_and_exit "failed to create gid \"$gid\" with name \"$groupname\"\ncommand output: \"$err\"" 56 | fi 57 | fi 58 | 59 | # check if user by this UID already exists, if so get the name since id 60 | # only accepts names 61 | if userinfo="$(getent passwd "$uid")"; then 62 | username="${userinfo%%:*}" 63 | else 64 | username="$USERBASE$uid" 65 | if ! err="$(adduser -h "$HOMEPATH$username" -s "$BASHPATH" -G "$groupname" -D -u "$uid" -k "$HOME" "$username" 2>&1)"; then 66 | echo_error_and_exit "failed to create uid \"$uid\" with name \"$username\" and group \"$groupname\"\ncommand output: \"$err\"" 67 | fi 68 | fi 69 | 70 | # it's possible it was not in the group specified, add it 71 | if ! idgroupinfo="$(id -G "$username" 2>&1)"; then 72 | echo_error_and_exit "failed to get group list for username \"$username\"\ncommand output: \"$idgroupinfo\"" 73 | fi 74 | if [[ ! " $idgroupinfo " =~ [[:blank:]]${gid}[[:blank:]] ]]; then 75 | if ! err="$(addgroup "$username" "$groupname" 2>&1)"; then 76 | echo_error_and_exit "failed to add user \"$username\" to group \"$groupname\"\ncommand output: \"$err\"" 77 | fi 78 | fi 79 | 80 | # user and group of specified UID/GID should exist now, and user should be 81 | # a member of group, so execute pre-commit 82 | exec su-exec "$USERID" pre-commit "$@" 83 | -------------------------------------------------------------------------------- /tools/install/_common.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -eo pipefail 4 | 5 | # Tool name, based on filename. 6 | # Tool filename MUST BE same as in package manager/binary name 7 | TOOL=${0##*/} 8 | readonly TOOL=${TOOL%%.*} 9 | 10 | # Get "TOOL_VERSION" 11 | # shellcheck disable=SC1091 # Created in Dockerfile before execution of this script 12 | source /.env 13 | env_var_name="${TOOL//-/_}" 14 | env_var_name="${env_var_name^^}_VERSION" 15 | # shellcheck disable=SC2034 # Used in other scripts 16 | readonly VERSION="${!env_var_name}" 17 | 18 | # Skip tool installation if the version is set to "false" 19 | if [[ $VERSION == false ]]; then 20 | echo "'$TOOL' skipped" 21 | exit 0 22 | fi 23 | 24 | ####################################################################### 25 | # Install the latest or specific version of the tool from GitHub release 26 | # Globals: 27 | # TOOL - Name of the tool 28 | # VERSION - Version of the tool 29 | # Arguments: 30 | # GH_ORG - GitHub organization name where the tool is hosted 31 | # DISTRIBUTED_AS - How the tool is distributed. 32 | # Can be: 'tar.gz', 'zip' or 'binary' 33 | # GH_RELEASE_REGEX_LATEST - Regular expression to match the latest 34 | # release URL 35 | # GH_RELEASE_REGEX_SPECIFIC_VERSION - Regular expression to match the 36 | # specific version release URL 37 | # UNUSUAL_TOOL_NAME_IN_PKG - If the tool in the tar.gz package is 38 | # not in the root or named differently than the tool name itself, 39 | # For example, includes the version number or is in a subdirectory 40 | ####################################################################### 41 | function common::install_from_gh_release { 42 | local -r GH_ORG=$1 43 | local -r DISTRIBUTED_AS=$2 44 | local -r GH_RELEASE_REGEX_LATEST=$3 45 | local -r GH_RELEASE_REGEX_SPECIFIC_VERSION=$4 46 | local -r UNUSUAL_TOOL_NAME_IN_PKG=$5 47 | 48 | case $DISTRIBUTED_AS in 49 | tar.gz | zip) 50 | local -r PKG="${TOOL}.${DISTRIBUTED_AS}" 51 | ;; 52 | binary) 53 | local -r PKG="$TOOL" 54 | ;; 55 | *) 56 | echo "Unknown DISTRIBUTED_AS: '$DISTRIBUTED_AS'. Should be one of: 'tar.gz', 'zip' or 'binary'." >&2 57 | exit 1 58 | ;; 59 | esac 60 | 61 | # Download tool 62 | local -r RELEASES="https://api.github.com/repos/${GH_ORG}/${TOOL}/releases" 63 | 64 | if [[ $VERSION == latest ]]; then 65 | curl -L "$(curl -s "${RELEASES}/latest" | grep -o -E -i -m 1 "$GH_RELEASE_REGEX_LATEST")" > "$PKG" 66 | else 67 | curl -L "$(curl -s "$RELEASES" | grep -o -E -i -m 1 "$GH_RELEASE_REGEX_SPECIFIC_VERSION")" > "$PKG" 68 | fi 69 | 70 | # Make tool ready to use 71 | if [[ $DISTRIBUTED_AS == tar.gz ]]; then 72 | if [[ -z $UNUSUAL_TOOL_NAME_IN_PKG ]]; then 73 | tar -xzf "$PKG" "$TOOL" 74 | else 75 | tar -xzf "$PKG" "$UNUSUAL_TOOL_NAME_IN_PKG" 76 | mv "$UNUSUAL_TOOL_NAME_IN_PKG" "$TOOL" 77 | fi 78 | rm "$PKG" 79 | 80 | elif [[ $DISTRIBUTED_AS == zip ]]; then 81 | unzip "$PKG" 82 | rm "$PKG" 83 | else 84 | chmod +x "$PKG" 85 | fi 86 | } 87 | -------------------------------------------------------------------------------- /tools/install/checkov.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 3 | readonly SCRIPT_DIR 4 | # shellcheck source=_common.sh 5 | . "$SCRIPT_DIR/_common.sh" 6 | 7 | # 8 | # Unique part 9 | # 10 | 11 | apk add --no-cache \ 12 | gcc=~12 \ 13 | libffi-dev=~3 \ 14 | musl-dev=~1 15 | 16 | # cargo, gcc, git, musl-dev, rust and CARGO envvar required for compilation of rustworkx@0.13.2 17 | # no longer required once checkov version depends on rustworkx >0.14.0 18 | # https://github.com/bridgecrewio/checkov/pull/6045 19 | # gcc libffi-dev musl-dev required for compilation of cffi, until it contains musl aarch64 20 | export CARGO_NET_GIT_FETCH_WITH_CLI=true 21 | apk add --no-cache \ 22 | cargo=~1 \ 23 | git=~2 \ 24 | libgcc=~12 \ 25 | rust=~1 26 | 27 | if [[ $VERSION == latest ]]; then 28 | pip3 install --no-cache-dir "${TOOL}" 29 | else 30 | pip3 install --no-cache-dir "${TOOL}==${VERSION}" 31 | fi 32 | 33 | apk del gcc libffi-dev musl-dev 34 | # no longer required once checkov version depends on rustworkx >0.14.0 35 | # https://github.com/bridgecrewio/checkov/pull/6045 36 | apk del cargo git rust 37 | -------------------------------------------------------------------------------- /tools/install/hcledit.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 3 | readonly SCRIPT_DIR 4 | # shellcheck source=_common.sh 5 | . "$SCRIPT_DIR/_common.sh" 6 | 7 | # 8 | # Unique part 9 | # 10 | 11 | GH_ORG="minamijoyo" 12 | GH_RELEASE_REGEX_SPECIFIC_VERSION="https://.+?${VERSION}_${TARGETOS}_${TARGETARCH}.tar.gz" 13 | GH_RELEASE_REGEX_LATEST="https://.+?_${TARGETOS}_${TARGETARCH}.tar.gz" 14 | DISTRIBUTED_AS="tar.gz" 15 | 16 | common::install_from_gh_release "$GH_ORG" "$DISTRIBUTED_AS" \ 17 | "$GH_RELEASE_REGEX_LATEST" "$GH_RELEASE_REGEX_SPECIFIC_VERSION" 18 | -------------------------------------------------------------------------------- /tools/install/infracost.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 3 | readonly SCRIPT_DIR 4 | # shellcheck source=_common.sh 5 | . "$SCRIPT_DIR/_common.sh" 6 | 7 | # 8 | # Unique part 9 | # 10 | 11 | GH_ORG="infracost" 12 | GH_RELEASE_REGEX_SPECIFIC_VERSION="https://.+?v${VERSION}/${TOOL}-${TARGETOS}-${TARGETARCH}.tar.gz" 13 | GH_RELEASE_REGEX_LATEST="https://.+?-${TARGETOS}-${TARGETARCH}.tar.gz" 14 | DISTRIBUTED_AS="tar.gz" 15 | UNUSUAL_TOOL_NAME_IN_PKG="${TOOL}-${TARGETOS}-${TARGETARCH}" 16 | 17 | common::install_from_gh_release "$GH_ORG" "$DISTRIBUTED_AS" \ 18 | "$GH_RELEASE_REGEX_LATEST" "$GH_RELEASE_REGEX_SPECIFIC_VERSION" \ 19 | "$UNUSUAL_TOOL_NAME_IN_PKG" 20 | -------------------------------------------------------------------------------- /tools/install/opentofu.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 3 | readonly SCRIPT_DIR 4 | # shellcheck source=_common.sh 5 | . "$SCRIPT_DIR/_common.sh" 6 | 7 | # 8 | # Unique part 9 | # 10 | GH_ORG="opentofu" 11 | GH_RELEASE_REGEX_SPECIFIC_VERSION="https://.+?v${VERSION}_${TARGETOS}_${TARGETARCH}.tar.gz" 12 | GH_RELEASE_REGEX_LATEST="https://.+?_${TARGETOS}_${TARGETARCH}.tar.gz" 13 | DISTRIBUTED_AS="tar.gz" 14 | UNUSUAL_TOOL_NAME_IN_PKG="tofu" 15 | 16 | common::install_from_gh_release "$GH_ORG" "$DISTRIBUTED_AS" \ 17 | "$GH_RELEASE_REGEX_LATEST" "$GH_RELEASE_REGEX_SPECIFIC_VERSION" \ 18 | "$UNUSUAL_TOOL_NAME_IN_PKG" 19 | 20 | # restore original binary name 21 | mv "$TOOL" "$UNUSUAL_TOOL_NAME_IN_PKG" 22 | -------------------------------------------------------------------------------- /tools/install/pre-commit.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 3 | readonly SCRIPT_DIR 4 | 5 | # shellcheck source=_common.sh 6 | . "$SCRIPT_DIR/_common.sh" 7 | 8 | # 9 | # Unique part 10 | # 11 | 12 | if [[ $VERSION == latest ]]; then 13 | pip3 install --no-cache-dir "$TOOL" 14 | else 15 | pip3 install --no-cache-dir "${TOOL}==${VERSION}" 16 | fi 17 | -------------------------------------------------------------------------------- /tools/install/terraform-docs.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 3 | readonly SCRIPT_DIR 4 | # shellcheck source=_common.sh 5 | . "$SCRIPT_DIR/_common.sh" 6 | 7 | # 8 | # Unique part 9 | # 10 | 11 | GH_ORG="terraform-docs" 12 | GH_RELEASE_REGEX_SPECIFIC_VERSION="https://.+?v${VERSION}-${TARGETOS}-${TARGETARCH}.tar.gz" 13 | GH_RELEASE_REGEX_LATEST="https://.+?-${TARGETOS}-${TARGETARCH}.tar.gz" 14 | DISTRIBUTED_AS="tar.gz" 15 | 16 | common::install_from_gh_release "$GH_ORG" "$DISTRIBUTED_AS" \ 17 | "$GH_RELEASE_REGEX_LATEST" "$GH_RELEASE_REGEX_SPECIFIC_VERSION" 18 | -------------------------------------------------------------------------------- /tools/install/terraform.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 3 | readonly SCRIPT_DIR 4 | # shellcheck source=_common.sh 5 | . "$SCRIPT_DIR/_common.sh" 6 | 7 | # 8 | # Unique part 9 | # 10 | # shellcheck disable=SC2153 # We are using the variable from _common.sh 11 | if [[ $VERSION == latest ]]; then 12 | version="$(curl -s https://api.github.com/repos/hashicorp/terraform/releases/latest | grep tag_name | grep -o -E -m 1 "[0-9.]+")" 13 | else 14 | version=$VERSION 15 | fi 16 | readonly version 17 | 18 | curl -L "https://releases.hashicorp.com/terraform/${version}/${TOOL}_${version}_${TARGETOS}_${TARGETARCH}.zip" > "${TOOL}.zip" 19 | unzip "${TOOL}.zip" "$TOOL" 20 | rm "${TOOL}.zip" 21 | -------------------------------------------------------------------------------- /tools/install/terragrunt.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 3 | readonly SCRIPT_DIR 4 | # shellcheck source=_common.sh 5 | . "$SCRIPT_DIR/_common.sh" 6 | 7 | # 8 | # Unique part 9 | # 10 | 11 | GH_ORG="gruntwork-io" 12 | GH_RELEASE_REGEX_SPECIFIC_VERSION="https://.+?v${VERSION}/${TOOL}_${TARGETOS}_${TARGETARCH}" 13 | GH_RELEASE_REGEX_LATEST="https://.+?/${TOOL}_${TARGETOS}_${TARGETARCH}" 14 | DISTRIBUTED_AS="binary" 15 | 16 | common::install_from_gh_release "$GH_ORG" "$DISTRIBUTED_AS" \ 17 | "$GH_RELEASE_REGEX_LATEST" "$GH_RELEASE_REGEX_SPECIFIC_VERSION" 18 | -------------------------------------------------------------------------------- /tools/install/terrascan.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 3 | readonly SCRIPT_DIR 4 | # shellcheck source=_common.sh 5 | . "$SCRIPT_DIR/_common.sh" 6 | 7 | # 8 | # Unique part 9 | # 10 | 11 | [[ $TARGETARCH == amd64 ]] && ARCH="x86_64" || ARCH="$TARGETARCH" 12 | readonly ARCH 13 | # Convert the first letter to Uppercase 14 | OS="${TARGETOS^}" 15 | 16 | GH_ORG="tenable" 17 | GH_RELEASE_REGEX_SPECIFIC_VERSION="https://.+?${VERSION}_${OS}_${ARCH}.tar.gz" 18 | GH_RELEASE_REGEX_LATEST="https://.+?_${OS}_${ARCH}.tar.gz" 19 | DISTRIBUTED_AS="tar.gz" 20 | 21 | common::install_from_gh_release "$GH_ORG" "$DISTRIBUTED_AS" \ 22 | "$GH_RELEASE_REGEX_LATEST" "$GH_RELEASE_REGEX_SPECIFIC_VERSION" 23 | 24 | # Download (caching) terrascan rego policies to save time during terrascan run 25 | # https://runterrascan.io/docs/usage/_print/#pg-2cba380a2ef14e4ae3c674e02c5f9f53 26 | ./"$TOOL" init 27 | -------------------------------------------------------------------------------- /tools/install/tflint.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 3 | readonly SCRIPT_DIR 4 | # shellcheck source=_common.sh 5 | . "$SCRIPT_DIR/_common.sh" 6 | 7 | # 8 | # Unique part 9 | # 10 | 11 | GH_ORG="terraform-linters" 12 | GH_RELEASE_REGEX_SPECIFIC_VERSION="https://.+?/v${VERSION}/${TOOL}_${TARGETOS}_${TARGETARCH}.zip" 13 | GH_RELEASE_REGEX_LATEST="https://.+?_${TARGETOS}_${TARGETARCH}.zip" 14 | DISTRIBUTED_AS="zip" 15 | 16 | common::install_from_gh_release "$GH_ORG" "$DISTRIBUTED_AS" \ 17 | "$GH_RELEASE_REGEX_LATEST" "$GH_RELEASE_REGEX_SPECIFIC_VERSION" 18 | -------------------------------------------------------------------------------- /tools/install/tfsec.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 3 | readonly SCRIPT_DIR 4 | # shellcheck source=_common.sh 5 | . "$SCRIPT_DIR/_common.sh" 6 | 7 | # 8 | # Unique part 9 | # 10 | 11 | GH_ORG="aquasecurity" 12 | GH_RELEASE_REGEX_SPECIFIC_VERSION="https://.+?v${VERSION}/${TOOL}-${TARGETOS}-${TARGETARCH}" 13 | GH_RELEASE_REGEX_LATEST="https://.+?/${TOOL}-${TARGETOS}-${TARGETARCH}" 14 | DISTRIBUTED_AS="binary" 15 | 16 | common::install_from_gh_release "$GH_ORG" "$DISTRIBUTED_AS" \ 17 | "$GH_RELEASE_REGEX_LATEST" "$GH_RELEASE_REGEX_SPECIFIC_VERSION" 18 | -------------------------------------------------------------------------------- /tools/install/tfupdate.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 3 | readonly SCRIPT_DIR 4 | # shellcheck source=_common.sh 5 | . "$SCRIPT_DIR/_common.sh" 6 | 7 | # 8 | # Unique part 9 | # 10 | 11 | GH_ORG="minamijoyo" 12 | GH_RELEASE_REGEX_SPECIFIC_VERSION="https://.+?${VERSION}_${TARGETOS}_${TARGETARCH}.tar.gz" 13 | GH_RELEASE_REGEX_LATEST="https://.+?_${TARGETOS}_${TARGETARCH}.tar.gz" 14 | DISTRIBUTED_AS="tar.gz" 15 | 16 | common::install_from_gh_release "$GH_ORG" "$DISTRIBUTED_AS" \ 17 | "$GH_RELEASE_REGEX_LATEST" "$GH_RELEASE_REGEX_SPECIFIC_VERSION" 18 | -------------------------------------------------------------------------------- /tools/install/trivy.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)" 3 | readonly SCRIPT_DIR 4 | # shellcheck source=_common.sh 5 | . "$SCRIPT_DIR/_common.sh" 6 | 7 | # 8 | # Unique part 9 | # 10 | 11 | [[ $TARGETARCH == amd64 ]] && ARCH="64bit" || ARCH="$TARGETARCH" 12 | readonly ARCH 13 | 14 | GH_ORG="aquasecurity" 15 | GH_RELEASE_REGEX_SPECIFIC_VERSION="https://.+?/v${VERSION}/${TOOL}_.+?_${TARGETOS}-${ARCH}.tar.gz" 16 | GH_RELEASE_REGEX_LATEST="https://.+?/${TOOL}_.+?_${TARGETOS}-${ARCH}.tar.gz" 17 | DISTRIBUTED_AS="tar.gz" 18 | 19 | common::install_from_gh_release "$GH_ORG" "$DISTRIBUTED_AS" \ 20 | "$GH_RELEASE_REGEX_LATEST" "$GH_RELEASE_REGEX_SPECIFIC_VERSION" 21 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | isolated_build = true 3 | 4 | 5 | [python-cli-options] 6 | byte-warnings = -b 7 | byte-errors = -bb 8 | max-isolation = -E -s -I 9 | some-isolation = -E -s 10 | warnings-to-errors = -Werror 11 | 12 | 13 | [testenv] 14 | description = Run pytest under {envpython} 15 | dependency_groups = 16 | testing 17 | 18 | # In: 19 | # 'tox run -e py -- --lf', 'tox run -- --lf', 'tox run -e py313,py312 -- --lf' 20 | # '{posargs}' (positional arguments) == '--lf' 21 | commands = 22 | {envpython} \ 23 | {[python-cli-options]byte-errors} \ 24 | {[python-cli-options]max-isolation} \ 25 | {[python-cli-options]warnings-to-errors} \ 26 | -W 'ignore:Coverage failure::pytest_cov.plugin' \ 27 | -m pytest \ 28 | {tty:--color=yes} \ 29 | {posargs:--cov-report=html:{envtmpdir}{/}htmlcov{/}} 30 | commands_post = 31 | # Create GHA Job Summary markdown table of the coverage report 32 | # https://github.blog/news-insights/product-news/supercharging-github-actions-with-job-summaries/ 33 | # a leading '-' suppresses non-zero return codes 34 | -{envpython} \ 35 | {[python-cli-options]byte-errors} \ 36 | {[python-cli-options]max-isolation} \ 37 | {[python-cli-options]warnings-to-errors} \ 38 | -c \ 39 | 'import atexit, os, sys; \ 40 | os.getenv("GITHUB_ACTIONS") == "true" or sys.exit(); \ 41 | import coverage; \ 42 | gh_summary_fd = open(\ 43 | os.environ["GITHUB_STEP_SUMMARY"], encoding="utf-8", mode="a",\ 44 | ); \ 45 | atexit.register(gh_summary_fd.close); \ 46 | cov = coverage.Coverage(); \ 47 | cov.load(); \ 48 | cov.report(file=gh_summary_fd, output_format="markdown")' 49 | # Expose the coverage & test run XML report paths into GHA 50 | {envpython} \ 51 | {[python-cli-options]byte-errors} \ 52 | {[python-cli-options]max-isolation} \ 53 | {[python-cli-options]warnings-to-errors} \ 54 | -c \ 55 | 'import os, pathlib, sys; \ 56 | os.getenv("GITHUB_ACTIONS") == "true" or sys.exit(); \ 57 | cov_report_arg_prefix = "--cov-report=xml:"; \ 58 | test_report_arg_prefix = "--junitxml="; \ 59 | cov_reports = [\ 60 | arg[len(cov_report_arg_prefix):] for arg in sys.argv \ 61 | if arg.startswith(cov_report_arg_prefix)\ 62 | ]; \ 63 | test_reports = [\ 64 | arg[len(test_report_arg_prefix):] for arg in sys.argv \ 65 | if arg.startswith(test_report_arg_prefix)\ 66 | ]; \ 67 | cov_report_file = cov_reports[-1] if cov_reports else None; \ 68 | test_report_file = test_reports[-1] if test_reports else None; \ 69 | gh_output_fd = open(\ 70 | os.environ["GITHUB_OUTPUT"], encoding="utf-8", mode="a",\ 71 | ); \ 72 | cov_report_file and \ 73 | print(f"cov-report-files={cov_report_file !s}", file=gh_output_fd); \ 74 | test_report_file and \ 75 | print(f"test-result-files={test_report_file !s}", file=gh_output_fd); \ 76 | print("codecov-flags=pytest", file=gh_output_fd); \ 77 | gh_output_fd.close()' \ 78 | {posargs} 79 | # Print out the output coverage dir and a way to serve html: 80 | {envpython} \ 81 | {[python-cli-options]byte-errors} \ 82 | {[python-cli-options]max-isolation} \ 83 | {[python-cli-options]warnings-to-errors} \ 84 | -c\ 85 | 'import pathlib, shlex, sys; \ 86 | cov_html_report_arg_prefix = "--cov-report=html:"; \ 87 | cov_html_reports = [\ 88 | arg[len(cov_html_report_arg_prefix):] for arg in sys.argv \ 89 | if arg.startswith(cov_html_report_arg_prefix)\ 90 | ]; \ 91 | cov_html_reports or sys.exit(); \ 92 | cov_html_report_dir = pathlib.Path(cov_html_reports[-1]); \ 93 | index_file = cov_html_report_dir / "index.html";\ 94 | index_file.exists() or sys.exit(); \ 95 | html_url = f"file://\{index_file\}";\ 96 | browse_cmd = shlex.join(("python3", "-Im", "webbrowser", html_url)); \ 97 | serve_cmd = shlex.join((\ 98 | "python3", "-Im", "http.server", \ 99 | "--directory", str(cov_html_report_dir), "0", \ 100 | )); \ 101 | print(f"\nTo open the HTML coverage report, run\n\n\ 102 | \t\{browse_cmd !s\}\n");\ 103 | print(f"To serve \ 104 | the HTML coverage report with a local web server, use\n\n\ 105 | \t\{serve_cmd !s\}\n")' \ 106 | {posargs:--cov-report=html:{envtmpdir}{/}htmlcov{/}} 107 | package = editable 108 | pass_env = 109 | CI 110 | GITHUB_* 111 | SSH_AUTH_SOCK 112 | TERM 113 | set_env = 114 | COVERAGE_PROCESS_START = {toxinidir}{/}.coveragerc 115 | wheel_build_env = .pkg 116 | 117 | # Duplicate default 'py' env to 'pytest' to be able run pytest with 'tox run -e pytest' 118 | [testenv:pytest] 119 | 120 | 121 | [testenv:cleanup-dists] 122 | description = 123 | Wipe the the dist{/} folder 124 | dependency_groups = 125 | commands_pre = 126 | commands = 127 | {envpython} \ 128 | {[python-cli-options]byte-errors} \ 129 | {[python-cli-options]max-isolation} \ 130 | {[python-cli-options]warnings-to-errors} \ 131 | -c \ 132 | 'import os, shutil, sys; \ 133 | dists_dir = "{toxinidir}{/}dist{/}"; \ 134 | shutil.rmtree(dists_dir, ignore_errors=True); \ 135 | sys.exit(os.path.exists(dists_dir))' 136 | commands_post = 137 | package = skip 138 | 139 | 140 | [testenv:build-dists] 141 | description = 142 | Build dists with {basepython} and put them into the dist{/} folder 143 | dependency_groups = 144 | building 145 | depends = 146 | cleanup-dists 147 | commands = 148 | {envpython} \ 149 | {[python-cli-options]byte-errors} \ 150 | {[python-cli-options]max-isolation} \ 151 | {[python-cli-options]warnings-to-errors} \ 152 | -m build \ 153 | {posargs:} 154 | commands_post = 155 | package = skip 156 | 157 | 158 | [testenv:metadata-validation] 159 | description = 160 | Verify that dists under the `dist{/}` dir 161 | have valid metadata 162 | dependency_groups = 163 | upstreaming 164 | depends = 165 | build-dists 166 | commands = 167 | {envpython} \ 168 | {[python-cli-options]byte-errors} \ 169 | {[python-cli-options]max-isolation} \ 170 | {[python-cli-options]warnings-to-errors} \ 171 | -m twine \ 172 | check \ 173 | --strict \ 174 | dist{/}* 175 | commands_post = 176 | package = skip 177 | 178 | # In: 179 | # 'tox run -e pre-commit -- mypy-py313 --all' 180 | # '{posargs}' == 'mypy-py313 --all' 181 | [testenv:pre-commit] 182 | description = 183 | Run the quality checks under {basepython}; run as 184 | `SKIP=check-id1,check-id2 tox r -e pre-commit` to instruct the underlying 185 | `pre-commit` invocation avoid running said checks; Use 186 | `tox r -e pre-commit -- check-id1 --all-files` to select checks matching IDs 187 | aliases{:} `tox r -e pre-commit -- mypy --all-files` will run 3 MyPy 188 | invocations, but `tox r -e pre-commit -- mypy-py313 --all-files` runs one. 189 | commands = 190 | {envpython} \ 191 | {[python-cli-options]byte-errors} \ 192 | {[python-cli-options]max-isolation} \ 193 | {[python-cli-options]warnings-to-errors} \ 194 | -m pre_commit \ 195 | run \ 196 | --color=always \ 197 | --show-diff-on-failure \ 198 | {posargs:--all-files} 199 | 200 | # Print out the advice on how to install pre-commit from this env into Git: 201 | # a leading '-' suppresses non-zero return codes 202 | -{envpython} \ 203 | {[python-cli-options]byte-errors} \ 204 | {[python-cli-options]max-isolation} \ 205 | {[python-cli-options]warnings-to-errors} \ 206 | -c \ 207 | 'cmd = "{envpython} -m pre_commit install"; \ 208 | scr_width = len(cmd) + 10; \ 209 | sep = "=" * scr_width; \ 210 | cmd_str = " $ \{cmd\}";' \ 211 | 'print(f"\n\{sep\}\nTo install pre-commit hooks into the Git repo, run:\ 212 | \n\n\{cmd_str\}\n\n\{sep\}\n")' 213 | commands_post = 214 | {envpython} \ 215 | {[python-cli-options]byte-errors} \ 216 | {[python-cli-options]max-isolation} \ 217 | {[python-cli-options]warnings-to-errors} \ 218 | -c \ 219 | 'import os, pathlib, sys; \ 220 | os.getenv("GITHUB_ACTIONS") == "true" or sys.exit(); \ 221 | project_root_path = pathlib.Path(r"{toxinidir}"); \ 222 | test_results_dir = pathlib.Path(r"{temp_dir}") / ".test-results"; \ 223 | coverage_result_files = ",".join(\ 224 | str(xml_path.relative_to(project_root_path)) \ 225 | for xml_path in test_results_dir.glob("mypy--py-*{/}cobertura.xml")\ 226 | ); \ 227 | gh_output_fd = open(\ 228 | os.environ["GITHUB_OUTPUT"], encoding="utf-8", mode="a",\ 229 | ); \ 230 | print(\ 231 | f"cov-report-files={coverage_result_files !s}", file=gh_output_fd\ 232 | ); \ 233 | print("codecov-flags=MyPy", file=gh_output_fd); \ 234 | gh_output_fd.close()' 235 | # Publish available MyPy-produced text and JSON reports wrapped as Markdown code blocks, to a GHA job summary 236 | {envpython} \ 237 | {[python-cli-options]byte-errors} \ 238 | {[python-cli-options]max-isolation} \ 239 | {[python-cli-options]warnings-to-errors} \ 240 | -c \ 241 | 'import itertools, os, pathlib, shlex, sys; \ 242 | os.getenv("GITHUB_ACTIONS") == "true" or sys.exit(); \ 243 | test_results_dir = pathlib.Path(r"{temp_dir}") / ".test-results"; \ 244 | text_and_json_reports = itertools.chain( \ 245 | test_results_dir.glob("mypy--py-*{/}*.json"), \ 246 | test_results_dir.glob("mypy--py-*{/}*.txt"), \ 247 | ); \ 248 | report_contents = { \ 249 | report{:} report.read_text() \ 250 | for report in text_and_json_reports \ 251 | }; \ 252 | reports_summary_text_blob = "\n\n".join( \ 253 | f"\N\{NUMBER SIGN\}\N\{NUMBER SIGN\} {report_path.parent.name}{:} " \ 254 | f"`{report_path.name}`\n\n" \ 255 | f"```{report_path.suffix[1:]}\n{report_text}\n```\n" \ 256 | for report_path, report_text in report_contents.items() \ 257 | ); \ 258 | gh_summary_fd = open( \ 259 | os.environ["GITHUB_STEP_SUMMARY"], encoding="utf-8", mode="a", \ 260 | ); \ 261 | print(reports_summary_text_blob, file=gh_summary_fd); \ 262 | gh_summary_fd.close()' 263 | # Print out the output coverage dir and a way to serve html: 264 | {envpython} \ 265 | {[python-cli-options]byte-errors} \ 266 | {[python-cli-options]max-isolation} \ 267 | {[python-cli-options]warnings-to-errors} \ 268 | -c\ 269 | 'import os, pathlib, sys; \ 270 | os.getenv("GITHUB_ACTIONS") == "true" and sys.exit(); \ 271 | len(sys.argv) >= 3 and all(\ 272 | arg != "mypy" and not arg.startswith("mypy-py3") \ 273 | for arg in sys.argv \ 274 | ) and sys.exit(); \ 275 | project_root_path = pathlib.Path(r"{toxinidir}"); \ 276 | test_results_dir = pathlib.Path(r"{temp_dir}") / ".test-results"; \ 277 | coverage_html_report_urls = [\ 278 | f"file://\{xml_path !s\}" \ 279 | for xml_path in test_results_dir.glob("mypy--py-*{/}index.html")\ 280 | ]; \ 281 | coverage_html_report_open_cmds = [\ 282 | f"python3 -Im webbrowser \N\{QUOTATION MARK\}\{html_url !s\}\N\{QUOTATION MARK\}" \ 283 | for html_url in coverage_html_report_urls\ 284 | ]; \ 285 | coverage_html_report_open_cmds_blob = "\n\n\t".join(\ 286 | coverage_html_report_open_cmds,\ 287 | ); \ 288 | print(\ 289 | f"\nTo open the HTML coverage reports, run\n\n\ 290 | \t\{coverage_html_report_open_cmds_blob !s\}\n"\ 291 | ); \ 292 | print(\ 293 | f"[*] Find rest of JSON and text reports, are in the same directories."\ 294 | )\ 295 | ' \ 296 | {posargs:--all-files} 297 | dependency_groups = 298 | linting 299 | isolated_build = true 300 | package = skip 301 | pass_env = 302 | {[testenv]pass_env} 303 | SKIP # set this variable 304 | --------------------------------------------------------------------------------