├── .github ├── dependabot.yml └── workflows │ ├── auto-merge.yaml │ ├── codeql.yml │ ├── docs.yaml │ ├── pre-merge-ci.yaml │ ├── push-bundles.yaml │ └── scorecards.yml ├── .gitignore ├── .regal └── config.yaml ├── .tool-versions ├── .vscode └── settings.json ├── LICENSE ├── Makefile ├── README.md ├── acceptance ├── acceptance_test.go ├── features │ ├── release.feature │ └── task.feature ├── go.mod ├── go.sum └── samples │ ├── README.md │ ├── clamav-task.json │ └── policy-input-golden-container.json ├── antora └── docs │ ├── antora.yml │ └── modules │ └── ROOT │ ├── attachments │ └── rule_data.yml │ ├── nav.adoc │ ├── pages │ ├── authoring.adoc │ ├── build_task_policy.adoc │ ├── index.adoc │ ├── pipeline_policy.adoc │ ├── policy_bundles.adoc │ ├── release_policy.adoc │ ├── stepaction_policy.adoc │ ├── task_policy.adoc │ ├── tasks.adoc │ ├── trusted_tasks.adoc │ └── trusting_tasks.adoc │ └── partials │ ├── build_task_policy_nav.adoc │ ├── pipeline_policy_nav.adoc │ ├── release_policy_nav.adoc │ ├── stepaction_policy_nav.adoc │ └── task_policy_nav.adoc ├── checks ├── annotations.rego └── annotations_test.rego ├── docs ├── asciidoc │ ├── asciidoc.go │ ├── nav.template │ └── policy.template ├── go.mod ├── go.sum └── main.go ├── example └── data │ ├── README.md │ ├── known_rpm_repositories.yml │ ├── required_tasks.yml │ ├── rule_data.yml │ └── trusted_tekton_tasks.yml ├── go.mod ├── go.sum ├── hack ├── ec-opa.sh ├── refresh-examples.sh ├── regal.sh ├── simplecov.rego ├── update-bundles.sh ├── update-infra-deployments.sh └── validate-acceptable-bundles.sh ├── policy ├── artifacthub-repo.yml ├── build_task │ └── build_labels │ │ ├── build_labels.rego │ │ └── build_labels_test.rego ├── lib │ ├── arrays │ │ ├── array_helpers.rego │ │ └── array_helpers_test.rego │ ├── assertions.rego │ ├── assertions_test.rego │ ├── image │ │ ├── image.rego │ │ └── image_test.rego │ ├── json │ │ ├── schema.rego │ │ └── schema_test.rego │ ├── k8s │ │ ├── k8s.rego │ │ └── k8s_test.rego │ ├── konflux │ │ ├── konflux.rego │ │ └── konflux_test.rego │ ├── result_helper.rego │ ├── result_helper_test.rego │ ├── rule_data.rego │ ├── rule_data_test.rego │ ├── sbom │ │ ├── rpm.rego │ │ ├── rpm_test.rego │ │ ├── sbom.rego │ │ └── sbom_test.rego │ ├── set_helpers.rego │ ├── set_helpers_test.rego │ ├── sigstore.rego │ ├── sigstore_test.rego │ ├── string_utils.rego │ ├── string_utils_test.rego │ ├── tekton │ │ ├── bundles.rego │ │ ├── bundles_test.rego │ │ ├── pipeline.rego │ │ ├── pipeline_test.rego │ │ ├── recorded_att_data_test.rego │ │ ├── recorded_att_test.rego │ │ ├── refs.rego │ │ ├── refs_test.rego │ │ ├── task.rego │ │ ├── task_results.rego │ │ ├── task_results_test.rego │ │ ├── task_test.rego │ │ ├── trusted.rego │ │ └── trusted_test.rego │ └── time │ │ ├── time.rego │ │ └── time_test.rego ├── pipeline │ ├── artifacthub-pkg.yml │ ├── basic │ │ ├── basic.rego │ │ └── basic_test.rego │ ├── required_tasks │ │ ├── required_tasks.rego │ │ └── required_tasks_test.rego │ └── task_bundle │ │ ├── task_bundle.rego │ │ └── task_bundle_test.rego ├── release │ ├── artifacthub-pkg.yml │ ├── attestation_task_bundle │ │ ├── attestation_task_bundle.rego │ │ └── attestation_task_bundle_test.rego │ ├── attestation_type │ │ ├── attestation_type.rego │ │ └── attestation_type_test.rego │ ├── base_image_registries │ │ ├── base_image_registries.rego │ │ └── base_image_registries_test.rego │ ├── buildah_build_task │ │ ├── buildah_build_task.rego │ │ └── buildah_build_task_test.rego │ ├── collection │ │ ├── github │ │ │ └── github.rego │ │ ├── minimal │ │ │ └── minimal.rego │ │ ├── policy_data │ │ │ └── policy_data.rego │ │ ├── redhat │ │ │ └── redhat.rego │ │ ├── redhat_rpms │ │ │ └── redhat_rpms.rego │ │ ├── rhtap_multi_ci │ │ │ └── rhtap_multi_ci.rego │ │ └── slsa3 │ │ │ └── slsa3.rego │ ├── cve │ │ ├── cve.rego │ │ └── cve_test.rego │ ├── external_parameters │ │ ├── external_parameters.rego │ │ └── external_parameters_test.rego │ ├── git_branch │ │ ├── git_branch.rego │ │ └── git_branch_test.rego │ ├── github_certificate │ │ ├── github_certificate.rego │ │ └── github_certificate_test.rego │ ├── hermetic_build_task │ │ ├── hermetic_build_task.rego │ │ └── hermetic_build_task_test.rego │ ├── labels │ │ ├── labels.rego │ │ └── labels_test.rego │ ├── lib │ │ ├── attestations.rego │ │ └── attestations_test.rego │ ├── olm │ │ ├── olm.rego │ │ └── olm_test.rego │ ├── provenance_materials │ │ ├── provenance_materials.rego │ │ └── provenance_materials_test.rego │ ├── quay_expiration │ │ ├── quay_expiration.rego │ │ └── quay_expiration_test.rego │ ├── rhtap_multi_ci │ │ ├── rhtap_multi_ci.rego │ │ └── rhtap_multi_ci_test.rego │ ├── rpm_ostree_task │ │ ├── rpm_ostree_task.rego │ │ └── rpm_ostree_task_test.rego │ ├── rpm_packages │ │ ├── rpm_packages.rego │ │ └── rpm_packages_test.rego │ ├── rpm_pipeline │ │ ├── rpm_pipeline.rego │ │ └── rpm_pipeline_test.rego │ ├── rpm_repos │ │ ├── rpm_repos.rego │ │ └── rpm_repos_test.rego │ ├── rpm_signature │ │ ├── rpm_signature.rego │ │ └── rpm_signature_test.rego │ ├── sbom │ │ ├── sbom.rego │ │ └── sbom_test.rego │ ├── sbom_cyclonedx │ │ ├── sbom_cyclonedx.rego │ │ ├── sbom_cyclonedx_schema.rego │ │ └── sbom_cyclonedx_test.rego │ ├── sbom_spdx │ │ ├── sbom_spdx.rego │ │ ├── sbom_spdx_schema.rego │ │ └── sbom_spdx_test.rego │ ├── schedule │ │ ├── schedule.rego │ │ └── schedule_test.rego │ ├── slsa_build_build_service │ │ ├── slsa_build_build_service.rego │ │ └── slsa_build_build_service_test.rego │ ├── slsa_build_scripted_build │ │ ├── slsa_build_scripted_build.rego │ │ └── slsa_build_scripted_build_test.rego │ ├── slsa_provenance_available │ │ ├── slsa_provenance_available.rego │ │ └── slsa_provenance_available_test.rego │ ├── slsa_source_correlated │ │ ├── slsa_source_correlated.rego │ │ └── slsa_source_correlated_test.rego │ ├── slsa_source_version_controlled │ │ ├── slsa_source_version_controlled.rego │ │ └── slsa_source_version_controlled_test.rego │ ├── source_image │ │ ├── source_image.rego │ │ └── source_image_test.rego │ ├── tasks │ │ ├── tasks.rego │ │ └── tasks_test.rego │ ├── test │ │ ├── test.rego │ │ └── test_test.rego │ └── trusted_task │ │ ├── trusted_task.rego │ │ └── trusted_task_test.rego ├── stepaction │ ├── image │ │ ├── image.rego │ │ └── image_test.rego │ └── kind │ │ ├── kind.rego │ │ └── kind_test.rego └── task │ ├── annotations │ ├── annotations.rego │ └── annotations_test.rego │ ├── kind │ ├── kind.rego │ └── kind_test.rego │ ├── results │ ├── results.rego │ └── results_test.rego │ ├── step_image_registries │ ├── step_image_registries.rego │ └── step_image_registries_test.rego │ ├── step_images │ ├── step_images.rego │ └── step_images_test.rego │ └── trusted_artifacts │ ├── trusted_artifacts.rego │ └── trusted_artifacts_test.rego ├── regal.go ├── renovate.json └── tools.go /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Copyright The Conforma Contributors 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | # SPDX-License-Identifier: Apache-2.0 17 | 18 | version: 2 19 | updates: 20 | - package-ecosystem: "github-actions" 21 | directory: "/" 22 | schedule: 23 | interval: weekly 24 | time: '15:00' 25 | timezone: Etc/UTC 26 | - package-ecosystem: "gomod" 27 | directory: "/" 28 | schedule: 29 | interval: weekly 30 | time: '15:00' 31 | timezone: Etc/UTC 32 | - package-ecosystem: "npm" 33 | directory: "/antora/policy-antora-extension" 34 | schedule: 35 | interval: weekly 36 | time: '15:00' 37 | timezone: Etc/UTC 38 | -------------------------------------------------------------------------------- /.github/workflows/auto-merge.yaml: -------------------------------------------------------------------------------- 1 | # Copyright The Conforma Contributors 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # 15 | # SPDX-License-Identifier: Apache-2.0 16 | 17 | --- 18 | name: 'Auto merge' 19 | 'on': 20 | pull_request_review: 21 | types: [submitted] 22 | 23 | jobs: 24 | auto-merge: 25 | uses: enterprise-contract/github-workflows/.github/workflows/auto-merge.yaml@main 26 | secrets: inherit 27 | permissions: 28 | pull-requests: write 29 | contents: write 30 | -------------------------------------------------------------------------------- /.github/workflows/codeql.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Copyright The Conforma Contributors 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | # SPDX-License-Identifier: Apache-2.0 17 | 18 | # For most projects, this workflow file will not need changing; you simply need 19 | # to commit it to your repository. 20 | # 21 | # You may wish to alter this file to override the set of languages analyzed, 22 | # or to provide custom queries or build logic. 23 | # 24 | # ******** NOTE ******** 25 | # We have attempted to detect the languages in your repository. Please check 26 | # the `language` matrix defined below to confirm you have the correct set of 27 | # supported CodeQL languages. 28 | # 29 | name: "CodeQL" 30 | 31 | on: 32 | push: 33 | branches: ["main"] 34 | pull_request: 35 | # The branches below must be a subset of the branches above 36 | branches: ["main"] 37 | schedule: 38 | - cron: "0 0 * * 1" 39 | 40 | permissions: 41 | contents: read 42 | 43 | jobs: 44 | analyze: 45 | name: Analyze 46 | runs-on: ubuntu-latest 47 | permissions: 48 | actions: read 49 | contents: read 50 | security-events: write 51 | 52 | strategy: 53 | fail-fast: false 54 | matrix: 55 | language: ["javascript"] 56 | # CodeQL supports [ $supported-codeql-languages ] 57 | # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support 58 | 59 | steps: 60 | - name: Harden Runner 61 | uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0 62 | with: 63 | egress-policy: audit 64 | disable-telemetry: true 65 | 66 | - name: Checkout repository 67 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 68 | 69 | # Initializes the CodeQL tools for scanning. 70 | - name: Initialize CodeQL 71 | uses: github/codeql-action/init@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18 72 | with: 73 | languages: ${{ matrix.language }} 74 | # If you wish to specify custom queries, you can do so here or in a config file. 75 | # By default, queries listed here will override any specified in a config file. 76 | # Prefix the list here with "+" to use these queries and those in the config file. 77 | 78 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 79 | # If this step fails, then you should remove it and run the build manually (see below) 80 | - name: Autobuild 81 | uses: github/codeql-action/autobuild@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18 82 | 83 | # ℹ️ Command-line programs to run using the OS shell. 84 | # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun 85 | 86 | # If the Autobuild fails above, remove it and uncomment the following three lines. 87 | # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. 88 | 89 | # - run: | 90 | # echo "Run, Build Application using script" 91 | # ./location_of_script_within_repo/buildscript.sh 92 | 93 | - name: Perform CodeQL Analysis 94 | uses: github/codeql-action/analyze@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18 95 | with: 96 | category: "/language:${{matrix.language}}" 97 | -------------------------------------------------------------------------------- /.github/workflows/docs.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | # Copyright The Conforma Contributors 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | # SPDX-License-Identifier: Apache-2.0 17 | 18 | name: docs 19 | 20 | 'on': 21 | push: 22 | branches: 23 | - main 24 | 25 | permissions: 26 | contents: read 27 | 28 | jobs: 29 | website_update: 30 | runs-on: ubuntu-latest 31 | strategy: 32 | matrix: 33 | include: 34 | - APP_INSTALL_ID: 59973090 35 | REPOSITORY: conforma/conforma.github.io 36 | steps: 37 | - name: Trigger website update 38 | env: 39 | EC_AUTOMATION_KEY: ${{ secrets.EC_AUTOMATION_KEY }} 40 | run: | 41 | function createJWT() { 42 | local header=$(echo -n '{"alg":"RS256","typ":"JWT"}' | base64 | sed s/\+/-/ | sed -E s/=+$//) 43 | local now_utc=$(date --utc +%s) 44 | local payload=$(echo -n '{"iat":'$((now_utc - 60))',"exp":'$((now_utc + 120))',"iss":245286}' | base64 | sed s/\+/-/ | sed -E s/=+$//) 45 | local signature=$(echo -n "${header}.${payload}" | openssl dgst -sha256 -binary -sign <(echo "${EC_AUTOMATION_KEY}")| base64 | tr -d '\n=' | tr -- '+/' '-_') 46 | echo "${header}.${payload}.${signature}" 47 | } 48 | 49 | GITHUB_TOKEN=$(curl -s -X POST -H "Authorization: Bearer $(createJWT)" -H "Accept: application/vnd.github+json" https://api.github.com/app/installations/${{ matrix.APP_INSTALL_ID }}/access_tokens | jq -r .token) \ 50 | gh api repos/${{ matrix.REPOSITORY }}/dispatches -X POST --input <(echo '{"event_type":"update","client_payload":{}}') 51 | -------------------------------------------------------------------------------- /.github/workflows/pre-merge-ci.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | # Copyright The Conforma Contributors 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | # SPDX-License-Identifier: Apache-2.0 17 | 18 | name: pre-merge-ci 19 | 20 | on: 21 | pull_request: 22 | branches: 23 | - main 24 | 25 | push: 26 | branches: 27 | - main 28 | 29 | workflow_dispatch: 30 | 31 | permissions: 32 | contents: read 33 | 34 | jobs: 35 | all-tests-and-checks: 36 | runs-on: ubuntu-latest 37 | steps: 38 | - name: Harden Runner 39 | uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0 40 | with: 41 | egress-policy: audit 42 | disable-telemetry: true 43 | 44 | - name: Checkout code 45 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 46 | 47 | - name: Setup Go environment 48 | uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0 49 | with: 50 | go-version-file: go.mod 51 | cache: true 52 | 53 | - name: Run checks 54 | run: | 55 | # allows us to use unshare to restrict network access 56 | sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0 57 | make ci 58 | 59 | - name: Check for uncommitted changes 60 | run: | 61 | if ! git diff --exit-code -s; then 62 | for f in $(git diff --exit-code --name-only); do 63 | echo "::error file=$f,line=1,col=1,endColumn=1::File was modified in build" 64 | echo -e "\033[1;33mHint:\033[0m Maybe you need to run \033[1;32mmake generate-docs\033[0m" 65 | done 66 | exit 1 67 | fi 68 | 69 | # See https://issues.redhat.com/browse/HACBS-2391 to re-enable these checks 70 | # - name: Validate added bundles 71 | # run: ./hack/validate-acceptable-bundles.sh 72 | 73 | - name: Upload test coverage report 74 | uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3 75 | if: always() 76 | env: 77 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} 78 | -------------------------------------------------------------------------------- /.github/workflows/push-bundles.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | # Copyright The Conforma Contributors 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | # SPDX-License-Identifier: Apache-2.0 17 | 18 | name: push-bundles 19 | 20 | on: 21 | push: 22 | branches: 23 | - main 24 | paths: 25 | - policy/** 26 | - data/** 27 | 28 | workflow_dispatch: 29 | 30 | permissions: 31 | contents: read 32 | 33 | jobs: 34 | push-policy-bundles: 35 | runs-on: ubuntu-latest 36 | 37 | steps: 38 | - name: Harden Runner 39 | uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0 40 | with: 41 | egress-policy: audit 42 | disable-telemetry: true 43 | 44 | - name: Checkout code 45 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 46 | with: 47 | # So we can see in which commit a bundle's content was 48 | # most recently updated 49 | fetch-depth: 0 50 | 51 | - name: Setup Go environment 52 | uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0 53 | with: 54 | go-version-file: go.mod 55 | cache: true 56 | 57 | - name: Docker login (quay.io/enterprise-contract) 58 | uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 59 | with: 60 | # See also BUNDLE_REPO defined in Makefile 61 | registry: quay.io 62 | username: ${{ secrets.BUNDLE_PUSH_USER_EC }} 63 | password: ${{ secrets.BUNDLE_PUSH_PASS_EC }} 64 | 65 | - name: Push bundles (quay.io/enterprise-contract) 66 | env: 67 | EC_AUTOMATION_KEY: ${{ secrets.EC_AUTOMATION_KEY }} 68 | APP_INSTALL_ID: 32872589 69 | run: hack/update-bundles.sh 70 | -------------------------------------------------------------------------------- /.github/workflows/scorecards.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Copyright The Conforma Contributors 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | # SPDX-License-Identifier: Apache-2.0 17 | 18 | # This workflow uses actions that are not certified by GitHub. They are provided 19 | # by a third-party and are governed by separate terms of service, privacy 20 | # policy, and support documentation. 21 | 22 | name: Scorecard supply-chain security 23 | on: 24 | # For Branch-Protection check. Only the default branch is supported. See 25 | # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection 26 | branch_protection_rule: 27 | # To guarantee Maintained check is occasionally updated. See 28 | # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained 29 | schedule: 30 | - cron: '20 7 * * 2' 31 | push: 32 | branches: ["main"] 33 | 34 | # Declare default permissions as read only. 35 | permissions: read-all 36 | 37 | jobs: 38 | analysis: 39 | name: Scorecard analysis 40 | runs-on: ubuntu-latest 41 | permissions: 42 | # Needed to upload the results to code-scanning dashboard. 43 | security-events: write 44 | # Needed to publish results and get a badge (see publish_results below). 45 | id-token: write 46 | contents: read 47 | actions: read 48 | 49 | steps: 50 | - name: Harden Runner 51 | uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0 52 | with: 53 | egress-policy: audit 54 | disable-telemetry: true 55 | 56 | - name: "Checkout code" 57 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 58 | with: 59 | persist-credentials: false 60 | 61 | - name: "Run analysis" 62 | uses: ossf/scorecard-action@f49aabe0b5af0936a0987cfb85d86b75731b0186 # v2.4.1 63 | with: 64 | results_file: results.sarif 65 | results_format: sarif 66 | # (Optional) "write" PAT token. Uncomment the `repo_token` line below if: 67 | # - you want to enable the Branch-Protection check on a *public* repository, or 68 | # - you are installing Scorecards on a *private* repository 69 | # To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat. 70 | # repo_token: ${{ secrets.SCORECARD_TOKEN }} 71 | 72 | # Public repositories: 73 | # - Publish results to OpenSSF REST API for easy access by consumers 74 | # - Allows the repository to include the Scorecard badge. 75 | # - See https://github.com/ossf/scorecard-action#publishing-results. 76 | # For private repositories: 77 | # - `publish_results` will always be set to `false`, regardless 78 | # of the value entered here. 79 | publish_results: true 80 | 81 | # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF 82 | # format to the repository Actions tab. 83 | - name: "Upload artifact" 84 | uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 85 | with: 86 | name: SARIF file 87 | path: results.sarif 88 | retention-days: 5 89 | 90 | # Upload the results to GitHub's code scanning dashboard. 91 | - name: "Upload to code-scanning" 92 | uses: github/codeql-action/upload-sarif@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18 93 | with: 94 | sarif_file: results.sarif 95 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | data/config.json 2 | input/ 3 | public/ 4 | node_modules/ 5 | .cache/ 6 | *.swp 7 | coverage.json 8 | bin/ 9 | .idea/ 10 | -------------------------------------------------------------------------------- /.regal/config.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | # Copyright The Conforma Contributors 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | # SPDX-License-Identifier: Apache-2.0 17 | 18 | # Configures Regal rules, see https://docs.styra.com/regal/category/rules 19 | 20 | # In most cases, consider addressing violations even if it requires restructuring the code. Chances 21 | # are that this will improve the code quality. If that is not feasible, then consider using an 22 | # inline comment to ignore the rule at a particular place. As a last resort, add an entry here. If 23 | # so, be sure to include a comment explaining why. Avoid turning violations into warnings as these 24 | # make it difficult to detect when a new issue is detected and they end up being ignored and 25 | # cluttering the output. 26 | rules: 27 | style: 28 | todo-comment: 29 | # We use TODO comments through out the code base to indicate work that 30 | # needs further refinement. 31 | level: ignore 32 | external-reference: 33 | # Although not using external-references in some places is a good idea, it can 34 | # quickly turn into a hot mess of parameters being passed around. 35 | level: ignore 36 | rule-length: 37 | # The test rules can be quite verbose as most of them use hard-coded data 38 | # which is very useful in tests. 39 | ignore: 40 | files: 41 | - "*_test.rego" 42 | file-length: 43 | ignore: 44 | files: 45 | # schemas can be quite large. 46 | - "*_schema.rego" 47 | line-length: 48 | ignore: 49 | files: 50 | # schemas can have very long attribute values, e.g. description. 51 | - "*_schema.rego" 52 | # This is generated by `make sync-test-data` and has many long lines 53 | - policy/lib/tekton/recorded_att_data_test.rego 54 | pointless-reassignment: 55 | ignore: 56 | files: 57 | # Tests usually use variables to improve code-readability. Although, this is functionaly 58 | # not needed, it does make it easier to understand the tests. 59 | - "*_test.rego" 60 | idiomatic: 61 | no-defined-entrypoint: 62 | level: ignore 63 | bugs: 64 | leaked-internal-reference: 65 | ignore: 66 | files: 67 | - "*_test.rego" 68 | argument-always-wildcard: 69 | ignore: 70 | files: 71 | # Using fn(_) is a common pattern to mock functions in tests. 72 | - "*_test.rego" 73 | -------------------------------------------------------------------------------- /.tool-versions: -------------------------------------------------------------------------------- 1 | golang 1.23.1 2 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "opa.dependency_paths.opa": "${workspaceFolder}/hack/ec-opa.sh", 3 | "opa.dependency_paths.regal": "${workspaceFolder}/hack/regal.sh", 4 | "opa.formatter": "opa-fmt", 5 | "opa.languageServers": [ 6 | "regal" 7 | ], 8 | "[rego]": { 9 | "editor.defaultFormatter": "tsandall.opa" 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /acceptance/features/release.feature: -------------------------------------------------------------------------------- 1 | Feature: Golden Container Image 2 | 3 | Scenario: Red Hat collection 4 | Given a sample policy input "golden-container" 5 | And a policy config: 6 | """ 7 | { 8 | "sources": [ 9 | { 10 | "policy": [ 11 | "$GITROOT/policy/lib", 12 | "$GITROOT/policy/release" 13 | ], 14 | "data": [ 15 | "$GITROOT/example/data" 16 | ], 17 | "config": { 18 | "include": [ 19 | "@redhat" 20 | ], 21 | "exclude": [ 22 | "cve.deprecated_cve_result_name", 23 | "source_image" 24 | ] 25 | } 26 | } 27 | ] 28 | } 29 | """ 30 | When input is validated 31 | Then there should be no violations in the result 32 | Then there should be no warnings in the result 33 | 34 | Scenario: Various excludes 35 | Given a sample policy input "golden-container" 36 | And a policy config: 37 | """ 38 | { 39 | "sources": [ 40 | { 41 | "policy": [ 42 | "$GITROOT/policy/lib", 43 | "$GITROOT/policy/release" 44 | ], 45 | "data": [ 46 | "$GITROOT/example/data" 47 | ], 48 | "config": { 49 | "include": ["*"], 50 | "exclude": [ 51 | "@rhtap-jenkins", 52 | "source_image", 53 | "github_certificate", 54 | "rpm_repos.ids_known:pkg:rpm/rhel/basesystem@11-13.el9?arch=noarch&upstream=basesystem-11-13.el9.src.rpm&distro=rhel-9.4" 55 | ] 56 | } 57 | } 58 | ] 59 | } 60 | """ 61 | When input is validated 62 | Then there should be no violations with "rhtap-jenkins" collection in the result 63 | And there should be no violations with "source_image" package in the result 64 | And there should be no violations with "rpm_repos.ids_known" code and "pkg:rpm/rhel/basesystem@11-13.el9?arch=noarch&upstream=basesystem-11-13.el9.src.rpm&distro=rhel-9.4" term in the result 65 | And there should be no warnings with "github_certificate" package in the result 66 | -------------------------------------------------------------------------------- /acceptance/features/task.feature: -------------------------------------------------------------------------------- 1 | Feature: Task Definition 2 | 3 | Scenario: Successful Red Hat collection 4 | Given a sample policy input "clamav-task" 5 | And a policy config: 6 | """ 7 | { 8 | "sources": [ 9 | { 10 | "policy": [ 11 | "$GITROOT/policy/lib", 12 | "$GITROOT/policy/task" 13 | ], 14 | "data": [ 15 | "$GITROOT/example/data" 16 | ], 17 | "config": { 18 | "include": [ 19 | "@redhat" 20 | ] 21 | } 22 | } 23 | ] 24 | } 25 | """ 26 | When input is validated 27 | Then there should be no violations in the result 28 | Then there should be no warnings in the result 29 | -------------------------------------------------------------------------------- /acceptance/samples/README.md: -------------------------------------------------------------------------------- 1 | # Samples 2 | 3 | This directory contains sample files meant to use in the acceptance test scenarios. 4 | 5 | [policy-input-golden-container.json](./policy-input-golden-container.json) holds the 6 | [policy input](https://conforma.dev/docs/ec-cli/policy_input.html) as used by the 7 | the EC CLI. 8 | 9 | The script `hack/refresh-examples.sh` will regenerate these files. 10 | 11 | The files were initially generated by capturing the output of executing `ec validate input ... --output policy-input`. 12 | With the source information provided alongside with the image reference, for example: 13 | 14 | ```text 15 | $ cat images.json 16 | { 17 | "components": [ 18 | { 19 | "containerImage": "quay.io/konflux-ci/ec-golden-image:latest", 20 | "source": { 21 | "git": { 22 | "revision": "68b69547cad3c4ba856fe6b06154012f33dd8b5a", 23 | "url": "https://github.com/conforma/golden-container.git" 24 | } 25 | } 26 | } 27 | ] 28 | } 29 | 30 | $ ec validate image --images images.json --output policy-input \ 31 | --public-key cosign.pub --ignore-rekor --policy policy.json 32 | ``` 33 | 34 | [clamav-task.json](./clamav-task.json) contains a Task definition. It is a direct copy of the 35 | [ClamAV Task](https://github.com/konflux-ci/build-definitions/tree/main/task/clamav-scan) 36 | found in the 37 | [build-definitions](https://github.com/konflux-ci/build-definitions) repository. To fetch the 38 | latest: 39 | 40 | ```text 41 | curl -L https://raw.githubusercontent.com/konflux-ci/build-definitions/main/task/clamav-scan/0.1/clamav-scan.yaml | \ 42 | yq '.' -o json > acceptance/samples/clamav-task.json 43 | ``` 44 | -------------------------------------------------------------------------------- /antora/docs/antora.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Copyright The Conforma Contributors 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | # SPDX-License-Identifier: Apache-2.0 17 | 18 | name: policy 19 | title: Conforma Policies 20 | version: ~ 21 | nav: 22 | - modules/ROOT/nav.adoc 23 | -------------------------------------------------------------------------------- /antora/docs/modules/ROOT/attachments/rule_data.yml: -------------------------------------------------------------------------------- 1 | ../../../../../example/data/rule_data.yml -------------------------------------------------------------------------------- /antora/docs/modules/ROOT/nav.adoc: -------------------------------------------------------------------------------- 1 | * xref:index.adoc[About Conforma] 2 | include::partial$release_policy_nav.adoc[] 3 | include::partial$pipeline_policy_nav.adoc[] 4 | include::partial$build_task_policy_nav.adoc[] 5 | include::partial$task_policy_nav.adoc[] 6 | include::partial$stepaction_policy_nav.adoc[] 7 | * xref:trusted_tasks.adoc[Trusted Tasks and Trusted Artifacts] 8 | * xref:trusting_tasks.adoc[Trusting Tasks] 9 | * xref:policy_bundles.adoc[Policy Bundles] 10 | * xref:authoring.adoc[Policy Authoring] 11 | * xref:tasks.adoc[Task Authoring] 12 | -------------------------------------------------------------------------------- /antora/docs/modules/ROOT/pages/build_task_policy.adoc: -------------------------------------------------------------------------------- 1 | = Build Task Policy 2 | 3 | :numbered: 4 | 5 | These rules are applied to Tekton build task definitions. 6 | 7 | [#build_labels_package] 8 | == link:#build_labels_package[Tekton task build type label checks] 9 | 10 | Policies to verify that a Tekton build task definition has the required build type label. 11 | 12 | * Package name: `build_labels` 13 | 14 | [#build_labels__build_type_label_set] 15 | === link:#build_labels__build_type_label_set[Build task has build type label] 16 | 17 | Confirm the build task definition has the required build type label. 18 | 19 | * Rule type: [rule-type-indicator failure]#FAILURE# 20 | * FAILURE message: `The required build label '%s' is missing` 21 | * Code: `build_labels.build_type_label_set` 22 | * https://github.com/conforma/policy/blob/{page-origin-refhash}/policy/build_task/build_labels/build_labels.rego#L17[Source, window="_blank"] 23 | 24 | [#build_labels__build_task_has_label] 25 | === link:#build_labels__build_task_has_label[Build task has label] 26 | 27 | Confirm that the build task definition includes at least one label. 28 | 29 | * Rule type: [rule-type-indicator failure]#FAILURE# 30 | * FAILURE message: `The task definition does not include any labels` 31 | * Code: `build_labels.build_task_has_label` 32 | * https://github.com/conforma/policy/blob/{page-origin-refhash}/policy/build_task/build_labels/build_labels.rego#L30[Source, window="_blank"] 33 | -------------------------------------------------------------------------------- /antora/docs/modules/ROOT/pages/index.adoc: -------------------------------------------------------------------------------- 1 | = Conforma Policies 2 | 3 | :numbered: 4 | 5 | Conforma (formerly known as Enterprise Contract) is a tool for verifing the 6 | provenance of container images built in a CI system such as Konflux, and 7 | validating them against a clearly defined policy. 8 | 9 | Conforma policies are defined using the 10 | https://www.openpolicyagent.org/docs/latest/policy-language/[rego policy 11 | language] and are described here in xref:release_policy.adoc[Release Policy] and 12 | xref:pipeline_policy.adoc[Pipeline Policy]. 13 | 14 | == Additional Documentation 15 | 16 | * https://konflux-ci.dev/docs/[Konflux Documentation] 17 | * xref:ec-cli::index.adoc[Conforma CLI Documentation] 18 | 19 | == Code 20 | 21 | * https://github.com/conforma/plicy[github.com/conforma/policy] 22 | * https://github.com/enterprise-contract/ec-cli[github.com/enterprise-contract/ec-cli] 23 | * https://github.com/enterprise-contract[github.com/enterprise-contract] 24 | * https://github.com/konflux-ci[github.com/konflux-ci] 25 | -------------------------------------------------------------------------------- /antora/docs/modules/ROOT/pages/policy_bundles.adoc: -------------------------------------------------------------------------------- 1 | = Policy Bundles 2 | 3 | :numbered: 4 | 5 | The policies and the data used by the policies are available as OCI artifacts 6 | compatible with the `conftest pull` command. 7 | 8 | == Location 9 | 10 | The latest versions of the bundles can be found in the following repos: 11 | 12 | https://quay.io/repository/enterprise-contract/ec-release-policy?tab=tags[`quay.io/enterprise-contract/ec-release-policy`]:: 13 | 14 | Used for validating attestations created by Tekton Chains. Contains the 15 | contents of https://github.com/conforma/policy/tree/main/policy/release[`policy/release`] 16 | and https://github.com/conforma/policy/tree/main/policy/lib[`policy/lib`] 17 | in this repo. 18 | 19 | https://quay.io/repository/enterprise-contract/ec-pipeline-policy?tab=tags[`quay.io/enterprise-contract/ec-pipeline-policy`]:: 20 | 21 | Used for validating Tekton Pipeline definitions. Contains the contents of 22 | https://github.com/conforma/policy/tree/main/policy/pipeline[`policy/pipeline`] 23 | and https://github.com/conforma/policy/tree/main/policy/lib[`policy/lib`]. 24 | 25 | == Artifact Hub entries 26 | 27 | The bundles mentioned above are also listed in https://artifacthub.io/packages/search?org=enterprise-contract[Artifact Hub]. 28 | 29 | == Example usage 30 | 31 | The bundles are designed to be used with the xref:ec-cli::index.adoc[ec-cli], but you 32 | can also use them with conftest directly. The input should include a top level key called `attestations` which contains a list of 33 | attestations for the image being validated. For example: 34 | 35 | ``` 36 | cosign download attestation quay.io/konflux-ci/ec-golden-image:latest | jq --slurp '{"attestations":[.[].payload|@base64d|fromjson]}' > input.json 37 | conftest pull -p . quay.io/enterprise-contract/ec-release-policy quay.io/enterprise-contract/ec-policy-data 38 | conftest test input.json -d data -p policy --all-namespaces -o json | yq -P 39 | ``` 40 | -------------------------------------------------------------------------------- /antora/docs/modules/ROOT/pages/stepaction_policy.adoc: -------------------------------------------------------------------------------- 1 | = StepAction Policy 2 | 3 | :numbered: 4 | 5 | These rules are applied to Tekton StepAction definitions. 6 | 7 | [#image_package] 8 | == link:#image_package[Tekton StepAction images policies] 9 | 10 | This package ensures that a StepAction definition contains a valid and allowed value for the image reference. 11 | 12 | * Package name: `image` 13 | 14 | [#image__permitted] 15 | === link:#image__permitted[Image comes from permitted registry] 16 | 17 | Confirm the StepAction uses a container image with a URL that matches one of the prefixes in the provided list of allowed step image registry prefixes. The list is customizeable via the `allowed_step_image_registry_prefixes` rule data key. 18 | 19 | *Solution*: Make sure the container image used comes from an approved registry. 20 | 21 | * Rule type: [rule-type-indicator failure]#FAILURE# 22 | * FAILURE message: `Image ref %q is disallowed` 23 | * Code: `image.permitted` 24 | * https://github.com/conforma/policy/blob/{page-origin-refhash}/policy/stepaction/image/image.rego#L38[Source, window="_blank"] 25 | 26 | [#image__accessible] 27 | === link:#image__accessible[Image is accessible] 28 | 29 | Confirm the container image used in the StepTemplate is accessible. 30 | 31 | *Solution*: Make sure the container image used in the StepTemplate is pushed to the registry and that it can be fetched. 32 | 33 | * Rule type: [rule-type-indicator failure]#FAILURE# 34 | * FAILURE message: `Image ref %q is inaccessible` 35 | * Code: `image.accessible` 36 | * https://github.com/conforma/policy/blob/{page-origin-refhash}/policy/stepaction/image/image.rego#L16[Source, window="_blank"] 37 | 38 | [#image__rule_data] 39 | === link:#image__rule_data[Rule data provided] 40 | 41 | Confirm the `allowed_step_image_registry_prefixes` rule data is provided. 42 | 43 | *Solution*: Make sure the xref:ec-cli:ROOT:configuration.adoc#_data_sources[data sources] contains a key 'allowed_step_image_registry_prefixes' that contains a list of approved registries. 44 | 45 | * Rule type: [rule-type-indicator failure]#FAILURE# 46 | * FAILURE message: `%s` 47 | * Code: `image.rule_data` 48 | * https://github.com/conforma/policy/blob/{page-origin-refhash}/policy/stepaction/image/image.rego#L62[Source, window="_blank"] 49 | 50 | [#kind_package] 51 | == link:#kind_package[Tekton StepAction kind checks] 52 | 53 | Policies to verify that a Tekton StepAction definition has the expected value for kind. 54 | 55 | * Package name: `kind` 56 | 57 | [#kind__valid] 58 | === link:#kind__valid[StepAction definition has expected kind] 59 | 60 | Confirm the StepAction definition has the kind "StepAction". 61 | 62 | * Rule type: [rule-type-indicator failure]#FAILURE# 63 | * FAILURE message: `Unexpected kind %q for StepAction definition` 64 | * Code: `kind.valid` 65 | * https://github.com/conforma/policy/blob/{page-origin-refhash}/policy/stepaction/kind/kind.rego#L14[Source, window="_blank"] 66 | -------------------------------------------------------------------------------- /antora/docs/modules/ROOT/pages/tasks.adoc: -------------------------------------------------------------------------------- 1 | = Task Authoring 2 | 3 | :numbered: 4 | 5 | This document is meant to assist Tekton Task authors in authoring and conforming 6 | with the xref:task_policy.adoc[Task policies] and 7 | xref:release_policy.adoc[Release Policies]. 8 | 9 | == Task annotations 10 | 11 | Tasks can be annotated to influence the xref:release_policy.adoc[Release Policy] 12 | rules. These annotations are placed in the `annotations` section under the 13 | `metadata` key of the Task definition. 14 | 15 | === Setting Task expiry 16 | 17 | A Task can be set to expire by setting the 18 | `build.appstudio.redhat.com/expires-on` annotation. The presence of the 19 | annotation means that the task is or will be unsupported by a certain date/time 20 | provided in the value of the annotation in the RFC3339 format. 21 | 22 | By default, the rule will prompt the user to `Update to a newer version of the Task.`. 23 | The message can be customized by setting the `build.appstudio.redhat.com/expiry-message` 24 | annotation. 25 | 26 | For example, this will set the Task to be unsupported after 2025-01-01 at 27 | midnight UTC; prior to that a warning will be emited by the 28 | xref:release_policy.adoc#tasks__unsupported[Task version unsupported] rule and 29 | following that date/time a violation will be reported instead: 30 | 31 | ```yaml 32 | apiVersion: tekton.dev/v1 33 | kind: Task 34 | metadata: 35 | name: my-task 36 | annotations: 37 | build.appstudio.redhat.com/expires-on: 2025-01-01T00:00:00Z 38 | spec: 39 | ... 40 | ``` 41 | -------------------------------------------------------------------------------- /antora/docs/modules/ROOT/partials/build_task_policy_nav.adoc: -------------------------------------------------------------------------------- 1 | * xref:build_task_policy.adoc[Build Task Policy] 2 | ** xref:build_task_policy.adoc#build_labels_package[Tekton task build type label checks] 3 | *** xref:build_task_policy.adoc#build_labels__build_type_label_set[Build task has build type label] 4 | *** xref:build_task_policy.adoc#build_labels__build_task_has_label[Build task has label] 5 | -------------------------------------------------------------------------------- /antora/docs/modules/ROOT/partials/pipeline_policy_nav.adoc: -------------------------------------------------------------------------------- 1 | * xref:pipeline_policy.adoc[Pipeline Policy] 2 | ** xref:pipeline_policy.adoc#task_bundle_package[Pipeline definition Task bundle policies] 3 | *** xref:pipeline_policy.adoc#task_bundle__missing_required_data[Missing required data] 4 | *** xref:pipeline_policy.adoc#task_bundle__untrusted_task_bundle[Task bundle is not trusted] 5 | *** xref:pipeline_policy.adoc#task_bundle__out_of_date_task_bundle[Task bundle is out of date] 6 | *** xref:pipeline_policy.adoc#task_bundle__empty_task_bundle_reference[Task bundle reference is empty] 7 | *** xref:pipeline_policy.adoc#task_bundle__disallowed_task_reference[Task bundle was not used or is not defined] 8 | *** xref:pipeline_policy.adoc#task_bundle__unpinned_task_bundle[Unpinned task bundle reference] 9 | ** xref:pipeline_policy.adoc#basic_package[Pipeline definition sanity checks] 10 | *** xref:pipeline_policy.adoc#basic__expected_kind[Pipeline definition has expected kind] 11 | ** xref:pipeline_policy.adoc#required_tasks_package[Required tasks] 12 | *** xref:pipeline_policy.adoc#required_tasks__missing_future_required_task[Missing future required task] 13 | *** xref:pipeline_policy.adoc#required_tasks__missing_required_task[Missing required task] 14 | *** xref:pipeline_policy.adoc#required_tasks__tasks_found[Pipeline contains tasks] 15 | *** xref:pipeline_policy.adoc#required_tasks__required_tasks_list_present[Required task list is present in rule data] 16 | *** xref:pipeline_policy.adoc#required_tasks__required_tasks_found[Required tasks found in pipeline definition] 17 | -------------------------------------------------------------------------------- /antora/docs/modules/ROOT/partials/stepaction_policy_nav.adoc: -------------------------------------------------------------------------------- 1 | * xref:stepaction_policy.adoc[StepAction Policy] 2 | ** xref:stepaction_policy.adoc#image_package[Tekton StepAction images policies] 3 | *** xref:stepaction_policy.adoc#image__permitted[Image comes from permitted registry] 4 | *** xref:stepaction_policy.adoc#image__accessible[Image is accessible] 5 | *** xref:stepaction_policy.adoc#image__rule_data[Rule data provided] 6 | ** xref:stepaction_policy.adoc#kind_package[Tekton StepAction kind checks] 7 | *** xref:stepaction_policy.adoc#kind__valid[StepAction definition has expected kind] 8 | -------------------------------------------------------------------------------- /antora/docs/modules/ROOT/partials/task_policy_nav.adoc: -------------------------------------------------------------------------------- 1 | * xref:task_policy.adoc[Task Policy] 2 | ** xref:task_policy.adoc#step_images_package[Tekton Task Step image policies] 3 | *** xref:task_policy.adoc#step_images__step_images_accessible[Step images are valid] 4 | ** xref:task_policy.adoc#step_image_registries_package[Tekton Task Step image registry policies] 5 | *** xref:task_policy.adoc#step_image_registries__step_image_registry_prefix_list_provided[Permitted step image registry prefix list provided] 6 | *** xref:task_policy.adoc#step_image_registries__step_images_permitted[Step images come from permitted registry] 7 | ** xref:task_policy.adoc#annotations_package[Tekton Task annotations] 8 | *** xref:task_policy.adoc#annotations__expires_on_format[Task definition uses expires-on annotation in RFC3339 format] 9 | ** xref:task_policy.adoc#results_package[Tekton Task result] 10 | *** xref:task_policy.adoc#results__required[Required result defined] 11 | *** xref:task_policy.adoc#results__rule_data_provided[Rule data provided] 12 | ** xref:task_policy.adoc#kind_package[Tekton task kind checks] 13 | *** xref:task_policy.adoc#kind__kind_present[Kind field is present in task definition] 14 | *** xref:task_policy.adoc#kind__expected_kind[Task definition has expected kind] 15 | ** xref:task_policy.adoc#trusted_artifacts_package[Trusted Artifacts Conventions] 16 | *** xref:task_policy.adoc#trusted_artifacts__parameter[Parameter] 17 | *** xref:task_policy.adoc#trusted_artifacts__result[Result] 18 | *** xref:task_policy.adoc#trusted_artifacts__workspace[Workspace] 19 | -------------------------------------------------------------------------------- /docs/asciidoc/nav.template: -------------------------------------------------------------------------------- 1 | {{- $doc := . }} 2 | {{- $lvl := "" -}} 3 | * xref:{{ .Qualifier }}_policy.adoc[{{ .Name }} Policy] 4 | {{- with .Collections }} 5 | {{- $lvl = "*" }} 6 | ** xref:_available_rule_collections[Rule Collections] 7 | {{- range . }} 8 | *** xref:release_policy.adoc#{{ .Title }}[{{ .Title }}] 9 | {{- end }} 10 | ** {{ $doc.Name }} Rules 11 | {{- end }}{{/* .Collections */}} 12 | {{- range .Packages }} 13 | {{ $lvl }}** xref:{{ $doc.Qualifier }}_policy.adoc#{{ anchor .Annotations }}[{{ .Annotations.Title }}] 14 | {{- range .Rules }} 15 | {{ $lvl }}*** xref:{{ $doc.Qualifier }}_policy.adoc#{{ anchor . }}[{{ .Title }}] 16 | {{- end}} 17 | {{- end}} 18 | -------------------------------------------------------------------------------- /docs/asciidoc/policy.template: -------------------------------------------------------------------------------- 1 | {{- $doc := . -}} 2 | = {{ .Name }} Policy 3 | 4 | :numbered: 5 | 6 | {{ .Description }} 7 | {{- with .Collections }} 8 | 9 | == Available rule collections 10 | 11 | [cols="2,6"] 12 | |=== 13 | |*Name* 14 | |*Description* 15 | {{- range . }} 16 | 17 | | [#{{ .Title }}]`{{ .Title }}` 18 | a| {{ .Description }} 19 | 20 | Rules included:{{ "\n" }} 21 | 22 | {{- range .Rules }} 23 | * xref:{{ $doc.Qualifier }}_policy.adoc#{{ anchor . }}[{{ index .Custom "package_title" }}: {{ .Title }}] 24 | {{- end}} 25 | {{- end }}{{/* range . */}} 26 | |=== 27 | {{- end }}{{/* .Collections */}} 28 | {{- range .Packages }} 29 | {{- $pkg := . }} 30 | 31 | [#{{ anchor .Annotations }}] 32 | == link:#{{ anchor .Annotations }}[{{ .Title }}] 33 | 34 | {{ .Description }} 35 | 36 | * Package name: `{{ packageName . }}` 37 | 38 | {{- range .Rules }} 39 | 40 | [#{{ anchor . }}] 41 | === link:#{{ anchor . }}[{{ .Title }}] 42 | 43 | {{ .Description }} 44 | 45 | {{- with index .Custom "solution" }} 46 | 47 | *Solution*: {{ . }} 48 | {{- end }} 49 | 50 | * Rule type: [rule-type-indicator {{ warningOrFailure . }}]#{{ toUpper (warningOrFailure .) }}# 51 | * {{ toTitle (warningOrFailure .) }} message: `{{ index .Custom "failure_msg" }}` 52 | * Code: `{{ packageName $pkg }}.{{ index .Custom "short_name" }}` 53 | {{- with index .Custom "effective_on" }} 54 | * Effective from: `{{ . }}` 55 | {{- end }}{{/* index .Custom "effective_on" */}} 56 | {{- if not (isBuiltIn .) }} 57 | * https://github.com/conforma/policy/blob/{page-origin-refhash}/{{ .Location.File }}#L{{ .Location.Row }}[Source, window="_blank"] 58 | {{- end }}{{/* isBuiltIn */}} 59 | {{- end }}{{/* range .Rules */}} 60 | 61 | {{- end}} 62 | -------------------------------------------------------------------------------- /docs/go.mod: -------------------------------------------------------------------------------- 1 | module github.com/conforma/policy/docs 2 | 3 | go 1.24.2 4 | 5 | require github.com/open-policy-agent/opa v0.68.0 6 | 7 | require ( 8 | github.com/OneOfOne/xxhash v1.2.8 // indirect 9 | github.com/rcrowley/go-metrics v0.0.0-20200313005456-10cdbea86bc0 // indirect 10 | github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect 11 | github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect 12 | gopkg.in/yaml.v2 v2.4.0 // indirect 13 | sigs.k8s.io/yaml v1.4.0 // indirect 14 | ) 15 | -------------------------------------------------------------------------------- /docs/go.sum: -------------------------------------------------------------------------------- 1 | github.com/OneOfOne/xxhash v1.2.8 h1:31czK/TI9sNkxIKfaUfGlU47BAxQ0ztGgd9vPyqimf8= 2 | github.com/OneOfOne/xxhash v1.2.8/go.mod h1:eZbhyaAYD41SGSSsnmcpxVoRiQ/MPUTjUdIIOT9Um7Q= 3 | github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= 4 | github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= 5 | github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= 6 | github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= 7 | github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= 8 | github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= 9 | github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= 10 | github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= 11 | github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= 12 | github.com/open-policy-agent/opa v0.68.0 h1:Jl3U2vXRjwk7JrHmS19U3HZO5qxQRinQbJ2eCJYSqJQ= 13 | github.com/open-policy-agent/opa v0.68.0/go.mod h1:5E5SvaPwTpwt2WM177I9Z3eT7qUpmOGjk1ZdHs+TZ4w= 14 | github.com/rcrowley/go-metrics v0.0.0-20200313005456-10cdbea86bc0 h1:MkV+77GLUNo5oJ0jf870itWm3D0Sjh7+Za9gazKc5LQ= 15 | github.com/rcrowley/go-metrics v0.0.0-20200313005456-10cdbea86bc0/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= 16 | github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= 17 | github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= 18 | github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= 19 | github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= 20 | github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= 21 | github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= 22 | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= 23 | gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= 24 | gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= 25 | gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= 26 | gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= 27 | sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E= 28 | sigs.k8s.io/yaml v1.4.0/go.mod h1:Ejl7/uTz7PSA4eKMyQCUTnhZYNmLIl+5c2lQPGR2BPY= 29 | -------------------------------------------------------------------------------- /docs/main.go: -------------------------------------------------------------------------------- 1 | // Copyright The Conforma Contributors 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | // 15 | // SPDX-License-Identifier: Apache-2.0 16 | 17 | package main 18 | 19 | import ( 20 | "flag" 21 | "fmt" 22 | "os" 23 | "strings" 24 | 25 | "github.com/conforma/policy/docs/asciidoc" 26 | ) 27 | 28 | var adoc = flag.String("adoc", "", "Location of the generated Asciidoc files") 29 | 30 | var rego stringAry 31 | 32 | type stringAry []string 33 | 34 | func (s *stringAry) String() string { 35 | return strings.Join(*s, ",") 36 | } 37 | 38 | func (s *stringAry) Set(v string) error { 39 | *s = append(*s, v) 40 | return nil 41 | } 42 | 43 | func main() { 44 | flag.Var(®o, "rego", "Location of the Rego files") 45 | flag.Parse() 46 | 47 | if *adoc == "" || len(rego) == 0 { 48 | fmt.Fprintf(os.Stderr, "-adoc and -rego flags are required\n") 49 | os.Exit(1) 50 | } 51 | 52 | var err error 53 | defer func() { 54 | if err != nil { 55 | fmt.Fprintf(os.Stderr, "%v\n", err) 56 | os.Exit(1) 57 | } 58 | }() 59 | 60 | if err = os.MkdirAll(*adoc, 0755); err != nil { 61 | return 62 | } 63 | 64 | if err = asciidoc.GenerateAsciidoc(*adoc, rego...); err != nil { 65 | return 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /example/data/README.md: -------------------------------------------------------------------------------- 1 | # Example Policy Data 2 | 3 | This repository provides a set of files that include example policy data. These are meant to help 4 | users that want to create their own policy data. 5 | 6 | Currently, the data is split up into three different files. This is done purely to facilitate 7 | maintenance of the data. They could, instead, be split across many other files, or even combined 8 | into a single one. In any case, the data is merged into a single data source. 9 | -------------------------------------------------------------------------------- /example/data/known_rpm_repositories.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Copyright The Conforma Contributors 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | # SPDX-License-Identifier: Apache-2.0 17 | 18 | # See also https://github.com/release-engineering/rhtap-ec-policy/blob/main/data/known_rpm_repositories.yml 19 | rule_data: 20 | known_rpm_repositories: 21 | - "rhel-9-for-x86_64-appstream-rpms" 22 | - "rhel-9-for-x86_64-appstream-source-rpms" 23 | - "rhel-9-for-x86_64-baseos-rpms" 24 | - "rhel-9-for-x86_64-baseos-source-rpms" 25 | -------------------------------------------------------------------------------- /example/data/required_tasks.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Copyright The Conforma Contributors 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | # SPDX-License-Identifier: Apache-2.0 17 | 18 | # Usage: https://conforma.dev/docs/policy/release_policy.html#tasks_package 19 | pipeline-required-tasks: 20 | fbc: 21 | - effective_on: "2023-08-31T00:00:00Z" 22 | tasks: 23 | - buildah 24 | - clair-scan 25 | - clamav-scan 26 | - deprecated-image-check 27 | - fbc-related-image-check 28 | - fbc-validation 29 | - git-clone 30 | - init 31 | - prefetch-dependencies 32 | - sast-snyk-check 33 | - show-sbom 34 | - summary 35 | docker: 36 | - effective_on: "2023-11-11T00:00:00Z" 37 | tasks: 38 | - buildah 39 | - clair-scan 40 | - clamav-scan 41 | - deprecated-image-check 42 | - git-clone 43 | - init 44 | - prefetch-dependencies 45 | - sast-snyk-check 46 | - show-sbom 47 | - summary 48 | generic: 49 | - effective_on: "2023-08-31T00:00:00Z" 50 | tasks: 51 | - buildah 52 | - clair-scan 53 | - clamav-scan 54 | - deprecated-image-check 55 | - git-clone 56 | - init 57 | - prefetch-dependencies 58 | - sast-snyk-check 59 | - show-sbom 60 | - summary 61 | java: 62 | - effective_on: "2023-08-31T00:00:00Z" 63 | tasks: 64 | - clair-scan 65 | - clamav-scan 66 | - deprecated-image-check 67 | - git-clone 68 | - init 69 | - prefetch-dependencies 70 | - s2i-java 71 | - sast-snyk-check 72 | - show-sbom 73 | - summary 74 | nodejs: 75 | - effective_on: "2023-08-31T00:00:00Z" 76 | tasks: 77 | - clair-scan 78 | - clamav-scan 79 | - deprecated-image-check 80 | - git-clone 81 | - init 82 | - prefetch-dependencies 83 | - s2i-nodejs 84 | - sast-snyk-check 85 | - show-sbom 86 | - summary 87 | 88 | # Usage: https://conforma.dev/docs/policy/release_policy.html#tasks_package 89 | required-tasks: 90 | - effective_on: "2023-08-31T00:00:00Z" 91 | tasks: 92 | - clair-scan 93 | - clamav-scan 94 | - git-clone 95 | - init 96 | - prefetch-dependencies 97 | - sast-snyk-check 98 | - summary 99 | -------------------------------------------------------------------------------- /hack/ec-opa.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright The Conforma Contributors 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | # SPDX-License-Identifier: Apache-2.0 17 | 18 | # This script is a wrapper around the EC version of OPA. Due to the custom rego functions provided 19 | # by EC, e.g. ec.oci.image_files, a custom version of OPA is required. 20 | set -euo pipefail 21 | cd "$(dirname "$0")/.." 22 | make --silent bin/ec 23 | exec bin/ec opa "$@" 24 | -------------------------------------------------------------------------------- /hack/refresh-examples.sh: -------------------------------------------------------------------------------- 1 | #!/bin/env bash 2 | # Copyright The Conforma Contributors 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | # SPDX-License-Identifier: Apache-2.0 17 | 18 | # Updates the JSON sample files used by acceptance tests in /acceptance/samples 19 | 20 | set -o errexit 21 | set -o pipefail 22 | set -o nounset 23 | 24 | ROOT_DIR=$(git rev-parse --show-toplevel) 25 | IMAGE=quay.io/konflux-ci/ec-golden-image:latest 26 | # If $IMAGE is released to a different repo, some of the attachments may not get copied over, e.g. 27 | # CVE scan report. Set $ORIGINAL_IMAGE_REPO to the repo in which the image was originally built into 28 | # which should contain all the attachments. 29 | ORIGINAL_IMAGE_REPO='quay.io/redhat-user-workloads/rhtap-contract-tenant/golden-container' 30 | REPOSITORY=https://github.com/conforma/golden-container.git 31 | PUBLIC_KEY='-----BEGIN PUBLIC KEY----- 32 | MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEZP/0htjhVt2y0ohjgtIIgICOtQtA 33 | naYJRuLprwIv6FDhZ5yFjYUEtsmoNcW7rx2KM6FOXGsCX3BNc7qhHELT+g== 34 | -----END PUBLIC KEY-----' 35 | INPUT_FILE="${ROOT_DIR}/acceptance/samples/policy-input-golden-container.json" 36 | TRUSTED_TASKS_FILE="${ROOT_DIR}/example/data/trusted_tekton_tasks.yml" 37 | 38 | trap 'rm -f "${TRUSTED_TASKS_FILE}-update"' EXIT 39 | 40 | DIGEST="$(skopeo inspect --no-tags "docker://${IMAGE}" --format '{{index .Digest}}')" 41 | ORIGINAL_IMAGE_REF="${ORIGINAL_IMAGE_REPO}@${DIGEST}" 42 | REVISION="$(skopeo inspect --no-tags "docker://${ORIGINAL_IMAGE_REF}" --format '{{index .Labels "vcs-ref"}}')" 43 | 44 | IMAGES="{ 45 | "components": [ 46 | { 47 | "containerImage": "${ORIGINAL_IMAGE_REF}", 48 | "source": { 49 | "git": { 50 | "revision": "${REVISION}", 51 | "url": "${REPOSITORY}" 52 | } 53 | } 54 | } 55 | ] 56 | }" 57 | 58 | POLICY='{ 59 | "sources": [ 60 | { 61 | "policy": [ 62 | "'${ROOT_DIR}'/policy/lib", 63 | "'${ROOT_DIR}'/policy/release" 64 | ], 65 | "data": [ 66 | "github.com/release-engineering/rhtap-ec-policy//data", 67 | "oci::quay.io/konflux-ci/tekton-catalog/data-acceptable-bundles:latest" 68 | ], 69 | "config": { 70 | "include": [ 71 | "@redhat" 72 | ], 73 | "exclude": [ 74 | "source_image" 75 | ] 76 | } 77 | } 78 | ] 79 | }' 80 | 81 | go run -modfile "${ROOT_DIR}/go.mod" github.com/enterprise-contract/ec-cli validate image \ 82 | --images "${IMAGES}" \ 83 | --public-key "${PUBLIC_KEY}" \ 84 | --ignore-rekor \ 85 | --policy "${POLICY}" \ 86 | --output policy-input="${INPUT_FILE}" \ 87 | --output data="${TRUSTED_TASKS_FILE}-update" \ 88 | --output text 89 | 90 | # shellcheck disable=SC2094 91 | # we have one attestation per CPU architecture, so we pick the first one 92 | cat <<< "$(jq --slurp --sort-keys '.[0]' "${INPUT_FILE}")" > "${INPUT_FILE}" 93 | 94 | # shellcheck disable=SC2094 95 | cat <<< "$(yq eval-all 'select(fileIndex==0).trusted_tasks = (select(fileIndex==1) | .[0].[0].trusted_tasks) | select(fileIndex==0)' \ 96 | "${TRUSTED_TASKS_FILE}" \ 97 | "${TRUSTED_TASKS_FILE}-update")" > "${TRUSTED_TASKS_FILE}" 98 | -------------------------------------------------------------------------------- /hack/regal.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright The Conforma Contributors 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | # SPDX-License-Identifier: Apache-2.0 17 | 18 | # This script is a wrapper around the Regal version specified in go.mod. It is intended to ensure 19 | # the expected regal version is used. 20 | set -euo pipefail 21 | cd "$(dirname "$0")/.." 22 | make --silent bin/regal 23 | exec bin/regal "$@" 24 | -------------------------------------------------------------------------------- /hack/simplecov.rego: -------------------------------------------------------------------------------- 1 | # Copied from https://github.com/open-policy-agent/contrib/tree/main/simplecov/simplecov.rego 2 | package hack 3 | 4 | import rego.v1 5 | 6 | from_opa := {"coverage": coverage} 7 | 8 | coverage[file] := obj if { 9 | some file, report in input.files 10 | obj := {"lines": lines(report)} 11 | } 12 | 13 | covered_map(report) := cm if { 14 | covered := object.get(report, "covered", []) 15 | cm := {line: 1 | 16 | some item in covered 17 | some line in numbers.range(item.start.row, item.end.row) 18 | } 19 | } 20 | 21 | not_covered_map(report) := ncm if { 22 | not_covered := object.get(report, "not_covered", []) 23 | ncm := {line: 0 | 24 | some item in not_covered 25 | some line in numbers.range(item.start.row, item.end.row) 26 | } 27 | } 28 | 29 | lines(report) := lines if { 30 | cm := covered_map(report) 31 | ncm := not_covered_map(report) 32 | keys := sort([line | some line, _ in object.union(cm, ncm)]) 33 | last := keys[count(keys) - 1] 34 | 35 | lines := [value | 36 | some i in numbers.range(1, last) 37 | value := to_value(cm, ncm, i) 38 | ] 39 | } 40 | 41 | to_value(cm, _, line) := 1 if { 42 | cm[line] 43 | } 44 | 45 | to_value(_, ncm, line) := 0 if { 46 | ncm[line] 47 | } 48 | 49 | to_value(cm, ncm, line) := null if { 50 | not cm[line] 51 | not ncm[line] 52 | } 53 | -------------------------------------------------------------------------------- /hack/update-bundles.sh: -------------------------------------------------------------------------------- 1 | #!/bin/env bash 2 | # Copyright The Conforma Contributors 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | # SPDX-License-Identifier: Apache-2.0 17 | 18 | # Pushes policy bundles to quay.io, but only if anything changed since 19 | # the last bundle was pushed. 20 | # 21 | set -o errexit 22 | set -o pipefail 23 | set -o nounset 24 | 25 | REPO_PREFIX="${REPO_PREFIX-quay.io/enterprise-contract/}" 26 | ROOT_DIR=$( git rev-parse --show-toplevel ) 27 | BUNDLES="release pipeline task build_task" 28 | OPA="go run github.com/enterprise-contract/ec-cli opa" 29 | ORAS="go run oras.land/oras/cmd/oras" 30 | 31 | DRY_RUN=${DRY_RUN:-""} 32 | DRY_RUN_ECHO="" 33 | [ "$DRY_RUN" == "1" ] && DRY_RUN_ECHO="echo #" 34 | 35 | function bundle_src_dirs() { 36 | echo policy/lib 37 | echo "policy/$1" 38 | } 39 | 40 | function bundle_subdir() { 41 | echo "policy" 42 | } 43 | 44 | function exclusions() { 45 | echo "artifacthub-pkg.yml" 46 | } 47 | 48 | function repo_name() { 49 | echo "ec-$1-policy" 50 | } 51 | 52 | tmp_oci_dirs=() 53 | function cleanup() { 54 | rm -rf "${tmp_oci_dirs[@]}" 55 | } 56 | trap cleanup EXIT 57 | 58 | 59 | for b in $BUNDLES; do 60 | # Find the git sha where the source files were last updated 61 | mapfile -t src_dirs < <(bundle_src_dirs "$b") 62 | last_update_sha=$(git log -n 1 --pretty=format:%h -- "${src_dirs[@]}") 63 | 64 | # Check if the bundle for that git sha exists already 65 | repo=$(repo_name "$b") 66 | tag=git-$last_update_sha 67 | push_repo="${REPO_PREFIX}$repo" 68 | 69 | skopeo_args=() 70 | skopeo_cp_args=() 71 | if [[ $push_repo == *'localhost:'* ]]; then 72 | skopeo_args+=(--tls-verify=false) 73 | skopeo_cp_args+=(--dest-tls-verify=false --src-tls-verify=false) 74 | fi 75 | 76 | tag_found="$( 77 | { 78 | skopeo list-tags "${skopeo_args[@]}" "docker://${push_repo}" | 79 | jq --arg tag "${tag}" -r 'any(.Tags[]; . == $tag)'; 80 | } || echo false 81 | )" 82 | if [[ "$tag_found" == 'true' ]]; then 83 | # No push needed 84 | echo "Policy bundle $push_repo:$tag exists already, no push needed" 85 | else 86 | # Push needed 87 | echo "Pushing policy bundle $push_repo:$tag now" 88 | 89 | # Prepare a temp dir with the bundle's content 90 | tmp_dir=$(mktemp -d -t "ec-bundle-$b.XXXXXXXXXX") 91 | tmp_oci_dirs+=("${tmp_dir}") 92 | content_dir=$tmp_dir/$(bundle_subdir "$b") 93 | mkdir "${content_dir}" 94 | for d in "${src_dirs[@]}"; do 95 | cp -r "$d" "${content_dir}" 96 | done 97 | 98 | # Remove some files 99 | exclude_files=$(exclusions "$b") 100 | for f in $exclude_files; do 101 | find "${content_dir}" -name "$f" -delete 102 | done 103 | 104 | # Show the content 105 | cd "${tmp_dir}" || exit 1 106 | find . -type f 107 | 108 | # go.mod/go.sum files needs to be copied for go run to function 109 | cp "${ROOT_DIR}/go.mod" "${ROOT_DIR}/go.sum" "$tmp_dir" 110 | 111 | # Verify the selected sources can be compiled as one unit, e.g. "policy/lib" is included 112 | ${OPA} build "${src_dirs[@]}" --output /dev/null 113 | 114 | # Now push 115 | ${ORAS} push "$push_repo:$tag" "${src_dirs[@]}" \ 116 | --annotation "org.opencontainers.image.revision=${last_update_sha}" 117 | 118 | # Set the 'latest' tag 119 | $DRY_RUN_ECHO skopeo copy --quiet "docker://$push_repo:$tag" "docker://$push_repo:latest" "${skopeo_cp_args[@]}" 120 | 121 | cd "${ROOT_DIR}" 122 | fi 123 | 124 | done 125 | -------------------------------------------------------------------------------- /hack/update-infra-deployments.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Copyright The Conforma Contributors 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | # SPDX-License-Identifier: Apache-2.0 17 | 18 | # Updates a local clone of redhat-appstudio/infra-deployments to use the latest 19 | # packages produced by this repository. 20 | # Usage: 21 | # update-infra-deployments.sh 22 | 23 | set -o errexit 24 | set -o pipefail 25 | set -o nounset 26 | 27 | TARGET_DIR="${1}" 28 | cd "${TARGET_DIR}" || exit 1 29 | 30 | TASK_POLICY_REF='quay.io/enterprise-contract/ec-task-policy:latest' 31 | 32 | function oci_source() { 33 | img="${1}" 34 | manifest="$(mktemp --tmpdir)" 35 | function cleanup() { 36 | # shellcheck disable=SC2317 37 | rm "${manifest}" 38 | } 39 | trap cleanup RETURN 40 | # Must use --raw because skopeo cannot handle an OPA bundle image format. 41 | skopeo inspect --raw "docker://${img}" > "${manifest}" 42 | revision="$(jq -r '.annotations["org.opencontainers.image.revision"]' "${manifest}")" 43 | if [[ -n "${revision}" && "${revision}" != "null" ]]; then 44 | img="${img/:latest/:git-${revision}}" 45 | fi 46 | digest="$(sha256sum "${manifest}" | awk '{print $1}')" 47 | img_ref_tag="${img}" 48 | img_ref_digest="${img/:*/}@sha256:${digest}" 49 | # sanity check 50 | diff <(skopeo inspect --raw "docker://${img_ref_tag}") <(skopeo inspect --raw "docker://${img_ref_digest}") >&2 51 | img_ref="${img}@sha256:${digest}" 52 | echo "oci::${img_ref}" 53 | } 54 | 55 | function update_ecp_resources() { 56 | local source_matcher=$1 57 | local source_url=$2 58 | 59 | for yaml_file in $(find . -type f \( -name "*.yaml" -o -name "*.yml" \)); do 60 | # First, filter out irrelevant files. stderr is discarded because if the YAML file is not a 61 | # match, then yq prints the error "no matches found" which is quite noisy given the amount of 62 | # YAML files being ignored. 63 | yq e -e \ 64 | '(select(has("kind")) | select(.kind == "EnterpriseContractPolicy"))' \ 65 | $yaml_file 2> /dev/null || continue 66 | # Finally, update the source references. A previous iteration used yq to perform a more 67 | # precise update. However, making a conditional update is non-trivial. sed is simpler here. 68 | sed -i 's%'${source_matcher}'%'${source_url}'%' $yaml_file 69 | done 70 | } 71 | 72 | echo 'Resolving task bundle image references...' 73 | TASK_POLICY_REF_OCI="$(oci_source ${TASK_POLICY_REF})" 74 | echo "Resolved task policy is ${TASK_POLICY_REF_OCI}" 75 | 76 | echo 'Updating infra-deployments...' 77 | # The "oci::" is not required by EC CLI. The expression below handles both cases. It's important to 78 | # note that this script will normalize the source references to always include the oci:: prefix. 79 | update_ecp_resources '\b\(oci::\)\{0,1\}.*/ec-task-policy:.*$' "${TASK_POLICY_REF_OCI}" 80 | echo 'infra-deployments updated successfully' 81 | -------------------------------------------------------------------------------- /hack/validate-acceptable-bundles.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Copyright The Conforma Contributors 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | # SPDX-License-Identifier: Apache-2.0 17 | 18 | # Use Conforma to validate the Tekton pipeline bundles being added to 19 | # data/acceptable_tekton_bundles.yml. The list of bundles to verify is a computed by 20 | # diff'ing against the list of bundles in origin/main. If there's no difference, this 21 | # script succeeds. 22 | # Usage: 23 | # hack/validate-acceptable-bundles.sh 24 | 25 | set -o errexit 26 | set -o pipefail 27 | set -o nounset 28 | 29 | if [ "${RUNNER_DEBUG:-}" == "1" ]; then 30 | set -x 31 | fi 32 | 33 | TKN="go run github.com/tektoncd/cli/cmd/tkn" 34 | 35 | if ! command -v ec > /dev/null 2>&1; then 36 | # this is most likely on GitHub Actions, which runs on 64bit Linux 37 | curl -o ec -sSL https://github.com/enterprise-contract/ec-cli/releases/download/snapshot/ec_linux_amd64 38 | chmod +x ec 39 | PATH=$PATH:$PWD 40 | trap "rm ec" EXIT 41 | fi 42 | 43 | bundles_file='data/acceptable_tekton_bundles.yml' 44 | 45 | function list_pipeline_bundles() { 46 | < "${1}" yq \ 47 | '."pipeline-bundles" | to_entries | .[] | [.key + "@" + .value[].digest] | .[]' | sort -u 48 | } 49 | 50 | origin_bundles="$(list_pipeline_bundles <(curl -s "https://raw.githubusercontent.com/conforma/policy/main/${bundles_file}"))" 51 | pr_bundles="$(list_pipeline_bundles "${bundles_file}")" 52 | new_bundles="$(comm -13 <(echo "${origin_bundles}") <(echo "${pr_bundles}"))" 53 | 54 | all_success=true 55 | for ref in ${new_bundles}; do 56 | # Verify the image is accessible 57 | set +e 58 | skopeo_error="$(skopeo inspect --raw "docker://${ref}" 2>&1 >/dev/null)" 59 | accessbile="$?" 60 | set -e 61 | if [[ $accessbile -ne 0 ]]; then 62 | all_success=false 63 | echo "❌ ${ref}" 64 | echo "${skopeo_error}" 65 | echo 66 | continue 67 | fi 68 | 69 | # Evaluate the pipeline definition 70 | report="$(ec validate definition \ 71 | --policy git::https://github.com/conforma/policy//policy/lib?ref=main \ 72 | --policy git::https://github.com/conforma/policy//policy/pipeline?ref=main \ 73 | --data git::https://github.com/release-engineering/rhtap-ec-policy//data \ 74 | --file <(${TKN} bundle list -o json "${ref}" 2> /dev/null) \ 75 | || true)" 76 | 77 | # Process evaluation result 78 | ref_success="$(echo -n "${report}" | jq -r '.success')" 79 | if [[ "$ref_success" == "true" ]]; then 80 | echo "✅ ${ref}" 81 | else 82 | all_success=false 83 | echo "❌ ${ref}" 84 | echo "${report}" | jq '.definitions[].violations[]' 85 | echo 86 | fi 87 | done 88 | 89 | if [[ "$all_success" == false ]]; then 90 | echo "😭 Validation failed!" 91 | exit 1 92 | fi 93 | echo "🎉 Great success!" 94 | -------------------------------------------------------------------------------- /policy/artifacthub-repo.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Copyright The Conforma Contributors 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | # SPDX-License-Identifier: Apache-2.0 17 | 18 | # Artifact Hub repository metadata file 19 | # 20 | repositoryID: f96ee0c3-136b-4459-afc3-42f452f21bfc 21 | owners: # (optional, used to claim repository ownership) 22 | - name: sbaird 23 | email: sbaird@redhat.com 24 | - name: lcarva 25 | email: lcarva@redhat.com 26 | - name: zregvart 27 | email: zregvart@redhat.com 28 | - name: robnester-rh 29 | email: rnester@redhat.com 30 | - name: cuipinghuo 31 | email: chuo@redhat.com 32 | - name: jstuart 33 | email: jstuart@redhat.com 34 | -------------------------------------------------------------------------------- /policy/build_task/build_labels/build_labels.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: Tekton task build type label checks 4 | # description: >- 5 | # Policies to verify that a Tekton build task definition has the 6 | # required build type label. 7 | # 8 | package build_labels 9 | 10 | import rego.v1 11 | 12 | import data.lib 13 | import data.lib.tekton 14 | 15 | build_label := "build.appstudio.redhat.com/build_type" 16 | 17 | # METADATA 18 | # title: Build task has build type label 19 | # description: >- 20 | # Confirm the build task definition has the required build type label. 21 | # custom: 22 | # short_name: build_type_label_set 23 | # failure_msg: The required build label '%s' is missing 24 | # 25 | deny contains result if { 26 | not build_label in object.keys(tekton.task_labels(input)) 27 | result := lib.result_helper(rego.metadata.chain(), [build_label]) 28 | } 29 | 30 | # METADATA 31 | # title: Build task has label 32 | # description: >- 33 | # Confirm that the build task definition includes at least one label. 34 | # custom: 35 | # short_name: build_task_has_label 36 | # failure_msg: The task definition does not include any labels 37 | # 38 | deny contains result if { 39 | not tekton.task_labels(input) 40 | result := lib.result_helper(rego.metadata.chain(), []) 41 | } 42 | -------------------------------------------------------------------------------- /policy/build_task/build_labels/build_labels_test.rego: -------------------------------------------------------------------------------- 1 | package build_labels_test 2 | 3 | import rego.v1 4 | 5 | import data.build_labels 6 | import data.lib 7 | 8 | test_build_label_found if { 9 | # regal ignore:line-length 10 | lib.assert_empty(build_labels.deny) with input as {"metadata": {"labels": {"build.appstudio.redhat.com/build_type": "docker"}}} 11 | } 12 | 13 | test_build_label_not_found if { 14 | lib.assert_equal_results(build_labels.deny, {{ 15 | "code": "build_labels.build_type_label_set", 16 | "msg": "The required build label 'build.appstudio.redhat.com/build_type' is missing", 17 | }}) with input as {"metadata": {"labels": {"bad": "docker"}}} 18 | } 19 | 20 | test_no_labels if { 21 | lib.assert_equal_results(build_labels.deny, {{ 22 | "code": "build_labels.build_task_has_label", 23 | "msg": "The task definition does not include any labels", 24 | }}) with input as {"metadata": {"name": "no_labels"}} 25 | } 26 | -------------------------------------------------------------------------------- /policy/lib/arrays/array_helpers.rego: -------------------------------------------------------------------------------- 1 | package lib.arrays 2 | 3 | import rego.v1 4 | 5 | _max_int := 9223372036854775807 6 | 7 | # Returns true if left is less or equal to right. Comparison is done by using 8 | # native comparison in Rego if both left and right are of the same type, or by 9 | # comparing their numerical values if they're not. Undefined values are always 10 | # less or equal to any other value. 11 | le(left, right) := is_le if { 12 | type_name(left) == type_name(right) 13 | is_le := left <= right 14 | } else := is_le if { 15 | is_le := to_number(left) <= to_number(right) 16 | } 17 | 18 | # Calculates the rank of an object by given key within an array ary. That is, 19 | # returns number of elements `o` of ary that have `o[key]` less than `obj[key]` 20 | # for a given object `obj`. 21 | rank(obj, key, ary) := count(less_or_eq) if { 22 | less_or_eq := [o | 23 | some o in ary 24 | left := object.get(o, key, _max_int) 25 | right := object.get(obj, key, _max_int) 26 | le(left, right) 27 | ] 28 | } 29 | 30 | # Sorts elements of the array of objects by the the specified key in ascending 31 | # order. Performs a # N x (N-1) search of an element of `ary` that has the rank 32 | # corresponding to the indexing variable 1..N. 33 | sort_by(key, ary) := [sorted | 34 | some i in numbers.range(1, count(ary)) 35 | 36 | ranked := [o | 37 | some o in ary 38 | 39 | i == rank(o, key, ary) 40 | ] 41 | 42 | count(ranked) > 0 # skip gaps in ranking that happen when two or more objects have the same rank 43 | some sorted in ranked # flatten any objects with the same rank 44 | ] 45 | -------------------------------------------------------------------------------- /policy/lib/arrays/array_helpers_test.rego: -------------------------------------------------------------------------------- 1 | package lib.arrays_test 2 | 3 | import rego.v1 4 | 5 | import data.lib 6 | import data.lib.arrays 7 | 8 | ary := [{"x": 1, "z": "X"}, {"x": 2}, {"x": 6, "y": "B"}, {"x": 1, "z": "X"}, {"x": -1}] 9 | 10 | test_rank if { 11 | lib.assert_equal(4, arrays.rank({"x": 4, "y": "A"}, "x", ary)) 12 | lib.assert_equal(1, arrays.rank({"x": -1}, "x", ary)) 13 | lib.assert_equal(0, arrays.rank({"x": -2}, "x", ary)) 14 | lib.assert_equal(5, arrays.rank({"x": 7}, "x", ary)) 15 | lib.assert_equal(count(ary), arrays.rank({}, "x", ary)) 16 | lib.assert_equal(count(ary), arrays.rank({}, "w", ary)) 17 | } 18 | 19 | test_sort_by if { 20 | lib.assert_equal( 21 | [ 22 | {"x": -1}, 23 | {"x": 1, "z": "X"}, {"x": 1, "z": "X"}, {"x": 2}, {"x": 6, "y": "B"}, 24 | ], 25 | arrays.sort_by("x", ary), 26 | ) 27 | lib.assert_equal( 28 | [ 29 | {"x": 6, "y": "B"}, 30 | {"x": 1, "z": "X"}, {"x": 2}, {"x": 1, "z": "X"}, {"x": -1}, 31 | ], 32 | arrays.sort_by("y", ary), 33 | ) 34 | } 35 | 36 | test_sort_by_mixed_types if { 37 | lib.assert_equal([{"x": 0}, {"x": "1"}, {"x": 2.0}], arrays.sort_by("x", [{"x": "1"}, {"x": 0}, {"x": 2.0}])) 38 | } 39 | 40 | test_le if { 41 | arrays.le(0, 0) 42 | arrays.le("A", "A") 43 | arrays.le(1, "1") 44 | arrays.le("2", 2) 45 | arrays.le(3.0, 3.0) 46 | arrays.le("4.0", 4.0) 47 | arrays.le(5.0, "5.0") 48 | 49 | arrays.le(0, 1) 50 | arrays.le("A", "B") 51 | arrays.le("0", 1) 52 | arrays.le(0, "1") 53 | 54 | not arrays.le(1, 0) 55 | not arrays.le("B", "A") 56 | not arrays.le(1, "0") 57 | not arrays.le("1", 0) 58 | } 59 | -------------------------------------------------------------------------------- /policy/lib/assertions.rego: -------------------------------------------------------------------------------- 1 | package lib 2 | 3 | import rego.v1 4 | 5 | # Produce more useful output when a test fails 6 | 7 | # Beware: `lib.assert_equal(, ...)` does not work like 8 | # you would expect, so it's better not to use this for booleans 9 | # 10 | assert_equal(left_value, right_value) if { 11 | not _assert_equal_fails(left_value, right_value) 12 | } 13 | 14 | _assert_equal_fails(left_value, right_value) if { 15 | left_value != right_value 16 | _assert_output_two_values("equal", left_value, right_value) 17 | } 18 | 19 | assert_not_equal(left_value, right_value) if { 20 | not _assert_not_equal_fails(left_value, right_value) 21 | } 22 | 23 | _assert_not_equal_fails(left_value, right_value) if { 24 | left_value == right_value 25 | _assert_output_two_values("not equal", left_value, right_value) 26 | } 27 | 28 | assert_empty(value) if { 29 | not _assert_empty_fails(value) 30 | } 31 | 32 | _assert_empty_fails(value) if { 33 | count(value) > 0 34 | _assert_output_one_value("empty", value) 35 | } 36 | 37 | assert_not_empty(value) if { 38 | not _assert_not_empty_fails(value) 39 | } 40 | 41 | _assert_not_empty_fails(value) if { 42 | count(value) == 0 43 | _assert_output_one_value("not empty", value) 44 | } 45 | 46 | _assert_output_two_values(assert_type, left_value, right_value) if { 47 | debug_output := sprintf("Assert %s failure:\n Left value: %s\n Right value: %s", [ 48 | assert_type, 49 | left_value, right_value, 50 | ]) 51 | 52 | # Use trace to show debug output in query explanations and print for stdout 53 | # regal ignore:print-or-trace-call 54 | trace(debug_output) 55 | 56 | # regal ignore:print-or-trace-call 57 | print(debug_output) 58 | } 59 | 60 | _assert_output_one_value(assert_type, value) if { 61 | debug_output := sprintf("Assert %s failure:\n Value: %s", [assert_type, value]) 62 | 63 | # regal ignore:print-or-trace-call 64 | trace(debug_output) 65 | 66 | # regal ignore:print-or-trace-call 67 | print(debug_output) 68 | } 69 | 70 | # assert_equal_results is successful if both results match. 71 | # The values of "collections" and "effective_on" attributes are ignored. 72 | assert_equal_results(left_result, right_result) if { 73 | ignore_paths := ["/collections", "/effective_on"] 74 | assert_equal( 75 | _ignore_attributes(left_result, ignore_paths), 76 | _ignore_attributes(right_result, ignore_paths), 77 | ) 78 | } 79 | 80 | # assert_equal_results_no_collections is successful if both results match. 81 | # The values of "collections" are ignored. 82 | assert_equal_results_no_collections(left_result, right_result) if { 83 | ignore_paths := ["/collections"] 84 | assert_equal( 85 | _ignore_attributes(left_result, ignore_paths), 86 | _ignore_attributes(right_result, ignore_paths), 87 | ) 88 | } 89 | 90 | _ignore_attributes(values, ignore_paths) := new_values if { 91 | new_values := {new_value | 92 | some value in values 93 | new_value := json.remove(value, ignore_paths) 94 | } 95 | count(values) == count(new_values) 96 | } else := values 97 | -------------------------------------------------------------------------------- /policy/lib/image/image.rego: -------------------------------------------------------------------------------- 1 | package lib.image 2 | 3 | import rego.v1 4 | 5 | # parse returns a data structure representing the different portions 6 | # of the OCI image reference. 7 | parse(ref) := d if { 8 | trimmed_ref := trim_space(ref) 9 | 10 | # Note: This regex is simplified and does not cover all valid hostname cases. 11 | # It only matches hostnames in the form of registry.local' or 'Registry1.io'. 12 | # It does not include all subdomains and does not support Unicode. 13 | regex.match(`^(?:[a-zA-Z0-9-])+\.[a-zA-Z]+`, trimmed_ref) 14 | 15 | # a valid repo will contain a / 16 | contains(trimmed_ref, "/") 17 | 18 | digest_parts := split(trimmed_ref, "@") 19 | 20 | repo_parts := split(digest_parts[0], "/") 21 | 22 | tag_parts := split(repo_parts[count(repo_parts) - 1], ":") 23 | count(tag_parts) <= 2 24 | tag := _get(tag_parts, 1, "") 25 | 26 | repo := concat( 27 | "/", 28 | array.concat( 29 | array.slice(repo_parts, 0, count(repo_parts) - 1), 30 | [tag_parts[0]], 31 | ), 32 | ) 33 | 34 | digest := _get(digest_parts, 1, "") 35 | 36 | d := { 37 | "digest": digest, 38 | "repo": repo, 39 | "tag": tag, 40 | } 41 | } 42 | 43 | # Formats the parsed reference as string 44 | str(d) := s1 if { 45 | d.repo != "" 46 | d.digest != "" 47 | d.tag != "" 48 | s1 := sprintf("%s:%s@%s", [d.repo, d.tag, d.digest]) 49 | } else := s2 if { 50 | d.repo != "" 51 | d.digest != "" 52 | s2 := sprintf("%s@%s", [d.repo, d.digest]) 53 | } else := s3 if { 54 | d.repo != "" 55 | d.tag != "" 56 | s3 := sprintf("%s:%s", [d.repo, d.tag]) 57 | } 58 | 59 | # equal_ref returns true if two image references point to the same image. The 60 | # algorithm first checks if the constituent parts repository, tag and digest are 61 | # all equal 62 | equal_ref(ref1, ref2) if { 63 | img1 := parse(ref1) 64 | img2 := parse(ref2) 65 | 66 | img1 == img2 67 | } 68 | 69 | # equal_ref returns true if two image references point to the same image, 70 | # ignoring the tag. This complements the case where all parts of the reference 71 | # need to be equal. 72 | equal_ref(ref1, ref2) if { 73 | img1 := parse(ref1) 74 | 75 | # need to make sure that the digest of one reference is present, otherwise we 76 | # might end up comparing image references without tags and digests. equal_ref is 77 | # commutative, so we can check that the digest exists for one of the references, 78 | # in this case img1 79 | img1.digest != "" 80 | 81 | img2 := parse(ref2) 82 | object.remove(img1, ["tag"]) == object.remove(img2, ["tag"]) 83 | } 84 | 85 | _get(ary, index, default_value) := value if { 86 | value := ary[index] 87 | } else := default_value 88 | 89 | # Returns a value if the reference is for an Image Index. 90 | is_image_index(ref) if { 91 | ec.oci.descriptor(ref).mediaType in { 92 | "application/vnd.oci.image.index.v1+json", 93 | "application/vnd.docker.distribution.manifest.list.v2+json", 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /policy/lib/json/schema.rego: -------------------------------------------------------------------------------- 1 | package lib.json 2 | 3 | import rego.v1 4 | 5 | # Validates schema reporting the error message as well as the severity 6 | validate_schema(doc, schema) := issues if { 7 | count(_arg_issues(doc, schema)) == 0 8 | issues := _validation_issues(doc, schema) 9 | } else := _arg_issues(doc, schema) 10 | 11 | _validation_issues(doc, schema) := issues if { 12 | not is_null(doc) 13 | not is_null(schema) 14 | d := _prepare_document(doc) 15 | ok_error := json.match_schema(d, schema) 16 | ok := ok_error[0] 17 | not ok 18 | errors := ok_error[1] 19 | issues := [i | 20 | some e in errors 21 | i := { 22 | "message": e.error, # e.desc is ignored, seems to repeat what is in e.error 23 | "severity": _severity(e), 24 | } 25 | ] 26 | } 27 | 28 | _arg_issues(doc, schema) := [i | 29 | some check in [ 30 | {is_null(doc) == false: "Provided empty document for schema validation"}, 31 | {is_null(schema) == false: "Provided empty schema for schema validation"}, 32 | _check_schema(schema), 33 | ] 34 | some ok, msg in check 35 | not ok 36 | i := { 37 | "message": msg, 38 | "severity": "failure", 39 | } 40 | ] 41 | 42 | _check_schema(schema) := ok_msg if { 43 | not is_null(schema) 44 | ok_error := json.verify_schema(schema) 45 | ok := ok_error[0] 46 | not ok 47 | error := ok_error[1] 48 | ok_msg := {false: sprintf("Provided schema is not a valid JSON Schema: %s", [error])} 49 | } else := {true, ""} 50 | 51 | _prepare_document(doc) := d if { 52 | is_array(doc) 53 | 54 | # match_schema expects either a marshaled JSON resource (String) or an 55 | # Object. It doesn't handle an Array directly. 56 | d := json.marshal(doc) 57 | } else := doc 58 | 59 | _severity(e) := "warning" if { 60 | startswith(e.desc, "Additional property") 61 | } else := "failure" 62 | -------------------------------------------------------------------------------- /policy/lib/json/schema_test.rego: -------------------------------------------------------------------------------- 1 | package lib.json_test 2 | 3 | import data.lib 4 | import data.lib.json as j 5 | import rego.v1 6 | 7 | test_validate_args if { 8 | lib.assert_equal( 9 | [ 10 | { 11 | "message": "Provided empty document for schema validation", 12 | "severity": "failure", 13 | }, 14 | { 15 | "message": "Provided empty schema for schema validation", 16 | "severity": "failure", 17 | }, 18 | ], 19 | j.validate_schema(null, null), 20 | ) 21 | lib.assert_equal( 22 | [{ 23 | "message": "Provided empty schema for schema validation", 24 | "severity": "failure", 25 | }], 26 | j.validate_schema({}, null), 27 | ) 28 | lib.assert_equal( 29 | [{ 30 | "message": "Provided empty document for schema validation", 31 | "severity": "failure", 32 | }], 33 | j.validate_schema(null, {}), 34 | ) 35 | lib.assert_equal( 36 | [{ 37 | "message": "Provided schema is not a valid JSON Schema: jsonschema: wrong type, expected string or object", 38 | "severity": "failure", 39 | }], 40 | j.validate_schema({}, ["something"]), 41 | ) 42 | } 43 | 44 | test_validate_schema_ok if { 45 | lib.assert_equal( 46 | [], 47 | j.validate_schema({"a": 3}, { 48 | "$schema": "https://json-schema.org/draft/2020-12/schema", 49 | "properties": {"a": {"type": "number"}}, 50 | }), 51 | ) 52 | lib.assert_equal( 53 | [], 54 | j.validate_schema([{"a": 3}], { 55 | "$schema": "https://json-schema.org/draft/2020-12/schema", 56 | "type": "array", 57 | "items": {"properties": {"a": {"type": "number"}}}, 58 | }), 59 | ) 60 | } 61 | 62 | test_validate_schema_not_ok if { 63 | lib.assert_equal( 64 | [{ 65 | "message": "a: Invalid type. Expected: number, given: string", 66 | "severity": "failure", 67 | }], 68 | j.validate_schema({"a": "b"}, { 69 | "$schema": "https://json-schema.org/draft/2020-12/schema", 70 | "properties": {"a": {"type": "number"}}, 71 | }), 72 | ) 73 | lib.assert_equal( 74 | [{ 75 | "message": "0.a: Invalid type. Expected: number, given: string", 76 | "severity": "failure", 77 | }], 78 | j.validate_schema([{"a": "b"}], { 79 | "$schema": "https://json-schema.org/draft/2020-12/schema", 80 | "type": "array", 81 | "items": {"properties": {"a": {"type": "number"}}}, 82 | }), 83 | ) 84 | } 85 | 86 | test_validate_schema_unknown_property_warning if { 87 | lib.assert_equal( 88 | [{ 89 | "message": "(Root): Additional property b is not allowed", 90 | "severity": "warning", 91 | }], 92 | j.validate_schema({"a": 3, "b": "here"}, { 93 | "$schema": "https://json-schema.org/draft/2020-12/schema", 94 | "properties": {"a": {"type": "number"}}, 95 | "additionalProperties": false, 96 | }), 97 | ) 98 | } 99 | -------------------------------------------------------------------------------- /policy/lib/k8s/k8s.rego: -------------------------------------------------------------------------------- 1 | package lib.k8s 2 | 3 | import rego.v1 4 | 5 | # name returns the name of the resource. If a name is not defined, "noname" is returned. This 6 | # function always returns a value. 7 | name(resource) := name if { 8 | name := resource.metadata.name 9 | } else := "noname" 10 | 11 | # version returns the version of the resource as defined via the "app.kubernetes.io/version" label. 12 | # This is NOT the API Version of the resource. More info about this label in 13 | # https://kubernetes.io/docs/concepts/overview/working-with-objects/common-labels/#labels 14 | # If a version is not defined, "noversion" is returned. This function always returns a value. 15 | version(resource) := version if { 16 | version := resource.metadata.labels["app.kubernetes.io/version"] 17 | } else := "noversion" 18 | 19 | # name_version is a convenience function that returns the resource's name and version. This 20 | # function always returns a value. 21 | name_version(resource) := sprintf("%s/%s", [name(resource), version(resource)]) 22 | -------------------------------------------------------------------------------- /policy/lib/k8s/k8s_test.rego: -------------------------------------------------------------------------------- 1 | package lib.k8s_test 2 | 3 | import rego.v1 4 | 5 | import data.lib 6 | import data.lib.k8s 7 | 8 | test_name if { 9 | lib.assert_equal(k8s.name({}), "noname") 10 | lib.assert_equal(k8s.name(""), "noname") 11 | lib.assert_equal(k8s.name(123), "noname") 12 | 13 | lib.assert_equal(k8s.name({"metadata": {"name": "spam"}}), "spam") 14 | } 15 | 16 | test_version if { 17 | lib.assert_equal(k8s.version({}), "noversion") 18 | lib.assert_equal(k8s.version(""), "noversion") 19 | lib.assert_equal(k8s.version(123), "noversion") 20 | 21 | lib.assert_equal( 22 | k8s.version({"metadata": {"labels": {"app.kubernetes.io/version": "1.0"}}}), 23 | "1.0", 24 | ) 25 | } 26 | 27 | test_name_version if { 28 | lib.assert_equal(k8s.name_version({}), "noname/noversion") 29 | lib.assert_equal(k8s.name_version(""), "noname/noversion") 30 | lib.assert_equal(k8s.name_version(123), "noname/noversion") 31 | 32 | lib.assert_equal(k8s.name_version({"metadata": {"name": "spam"}}), "spam/noversion") 33 | 34 | lib.assert_equal( 35 | k8s.name_version({"metadata": {"labels": {"app.kubernetes.io/version": "1.0"}}}), 36 | "noname/1.0", 37 | ) 38 | 39 | lib.assert_equal( 40 | k8s.name_version({"metadata": {"name": "spam", "labels": {"app.kubernetes.io/version": "1.0"}}}), 41 | "spam/1.0", 42 | ) 43 | } 44 | -------------------------------------------------------------------------------- /policy/lib/konflux/konflux.rego: -------------------------------------------------------------------------------- 1 | package lib.konflux 2 | 3 | import rego.v1 4 | 5 | import data.lib 6 | import data.lib.image 7 | import data.lib.tekton 8 | 9 | # Currently, it's not possible to determine if the image being validated is an Image Index or an 10 | # Image Manifest, see https://github.com/enterprise-contract/ec-cli/issues/2121. This function is 11 | # implemented as a workaround. It uses Konflux-specific heuristics to determine if the provided 12 | # image is an Image Index. 13 | is_validating_image_index if { 14 | image_index_digests := {digest | 15 | some attestation in lib.pipelinerun_attestations 16 | some task in tekton.build_tasks(attestation) 17 | 18 | # In Konflux, the Task that creates an Image Index emits the IMAGES result which contains 19 | # all of the related Image Manifests. 20 | count(trim_space(tekton.task_result(task, "IMAGES"))) > 0 21 | digest := trim_space(tekton.task_result(task, "IMAGE_DIGEST")) 22 | count(digest) > 0 23 | } 24 | 25 | image.parse(input.image.ref).digest in image_index_digests 26 | } 27 | -------------------------------------------------------------------------------- /policy/lib/konflux/konflux_test.rego: -------------------------------------------------------------------------------- 1 | package lib.konflux_test 2 | 3 | import rego.v1 4 | 5 | import data.lib 6 | import data.lib.konflux 7 | 8 | test_is_image_index if { 9 | konflux.is_validating_image_index with input.attestations as [_attestation] 10 | with input.image.ref as "registry.local/ham@sha256:fff" 11 | } 12 | 13 | test_is_image_index_unknown_digest if { 14 | not konflux.is_validating_image_index with input.attestations as [_attestation] 15 | with input.image.ref as "registry.local/ham@sha256:bbb" 16 | } 17 | 18 | test_is_image_index_empty_images if { 19 | att := json.patch( 20 | _attestation, 21 | [{"op": "add", "path": "/statement/predicate/buildConfig/tasks/0/results/0/value", "value": ""}], 22 | ) 23 | not konflux.is_validating_image_index with input.attestations as [att] 24 | with input.image.ref as "registry.local/ham@sha256:fff" 25 | } 26 | 27 | _attestation := {"statement": {"predicate": { 28 | "buildType": lib.tekton_pipeline_run, 29 | "buildConfig": {"tasks": [{"results": [ 30 | { 31 | "name": "IMAGES", 32 | "type": "string", 33 | "value": "registry.local/spam@sha256:abc, registry.local/bacon@sha256:bcd", 34 | }, 35 | { 36 | "name": "IMAGE_URL", 37 | "type": "string", 38 | "value": "registry.local/eggs:latest", 39 | }, 40 | { 41 | "name": "IMAGE_DIGEST", 42 | "type": "string", 43 | "value": "sha256:fff", 44 | }, 45 | ]}]}, 46 | }}} 47 | -------------------------------------------------------------------------------- /policy/lib/result_helper.rego: -------------------------------------------------------------------------------- 1 | package lib 2 | 3 | import rego.v1 4 | 5 | import data.lib.time as time_lib 6 | 7 | result_helper(chain, failure_sprintf_params) := result if { 8 | with_collections := {"collections": _rule_annotations(chain).custom.collections} 9 | result := object.union(_basic_result(chain, failure_sprintf_params), with_collections) 10 | } else := result if { 11 | result := _basic_result(chain, failure_sprintf_params) 12 | } 13 | 14 | result_helper_with_term(chain, failure_sprintf_params, term) := object.union( 15 | result_helper(chain, failure_sprintf_params), 16 | {"term": term}, 17 | ) 18 | 19 | result_helper_with_severity(chain, failure_sprintf_params, severity) := object.union( 20 | result_helper(chain, failure_sprintf_params), 21 | {"severity": severity}, 22 | ) 23 | 24 | _basic_result(chain, failure_sprintf_params) := { 25 | "code": _code(chain), 26 | "msg": sprintf(_rule_annotations(chain).custom.failure_msg, failure_sprintf_params), 27 | "effective_on": time_lib.when(chain), 28 | } 29 | 30 | _code(chain) := code if { 31 | rule_path := chain[0].path 32 | pkg_name := _pkg_name(rule_path) 33 | 34 | # Todo someday maybe: Conftest supports denies named deny_some_name, 35 | # so we could use that format and ditch the short name annotation. 36 | rule_name := _rule_annotations(chain).custom.short_name 37 | 38 | # Put them together 39 | code := sprintf("%s.%s", [pkg_name, rule_name]) 40 | } 41 | 42 | # The first entry in the chain always points to the active rule, even if it has 43 | # no declared annotations (in which case the annotations member is not present). 44 | # Thus, result_helper assumes every rule defines annotations. At the very least 45 | # custom.short_name must be present. 46 | _rule_annotations(chain) := chain[0].annotations 47 | 48 | _pkg_name(rule_path) := name if { 49 | # "data" is automatically added by rego. 50 | p1 := _left_strip_elements(["data"], rule_path) 51 | 52 | # Remove the actual rule name as that is not part of the package. 53 | p2 := _right_strip_elements(["deny"], p1) 54 | p3 := _right_strip_elements(["warn"], p2) 55 | 56 | name := concat(".", p3) 57 | } 58 | 59 | _left_strip_elements(items_to_strip, list) := new_list if { 60 | items_to_strip_count := count(items_to_strip) 61 | array.slice(list, 0, items_to_strip_count) == items_to_strip 62 | new_list := array.slice(list, items_to_strip_count, count(list)) 63 | } else := list 64 | 65 | _right_strip_elements(items_to_strip, list) := array.reverse(_left_strip_elements(items_to_strip, array.reverse(list))) 66 | -------------------------------------------------------------------------------- /policy/lib/result_helper_test.rego: -------------------------------------------------------------------------------- 1 | package lib_test 2 | 3 | import rego.v1 4 | 5 | import data.lib 6 | 7 | test_result_helper if { 8 | expected_result := { 9 | "code": "oh.Hey", 10 | "effective_on": "2022-01-01T00:00:00Z", 11 | "msg": "Bad thing foo", 12 | } 13 | 14 | rule_annotations := {"custom": { 15 | "short_name": "Hey", 16 | "failure_msg": "Bad thing %s", 17 | }} 18 | 19 | chain := [ 20 | {"annotations": rule_annotations, "path": ["data", "oh", "deny"]}, 21 | {"annotations": {}, "path": ["ignored", "ignored"]}, # Actually not needed any more 22 | ] 23 | 24 | lib.assert_equal(expected_result, lib.result_helper(chain, ["foo"])) 25 | } 26 | 27 | test_result_helper_without_package_annotation if { 28 | expected_result := { 29 | "code": "package_name.Hey", # Fixme 30 | "effective_on": "2022-01-01T00:00:00Z", 31 | "msg": "Bad thing foo", 32 | } 33 | 34 | rule_annotations := {"custom": { 35 | "short_name": "Hey", 36 | "failure_msg": "Bad thing %s", 37 | }} 38 | 39 | chain := [{"annotations": rule_annotations, "path": ["package_name", "deny"]}] 40 | 41 | lib.assert_equal(expected_result, lib.result_helper(chain, ["foo"])) 42 | } 43 | 44 | test_result_helper_with_collections if { 45 | expected := { 46 | "code": "some.path.oh.Hey", 47 | "collections": ["spam"], 48 | "effective_on": "2022-01-01T00:00:00Z", 49 | "msg": "Bad thing foo", 50 | } 51 | 52 | rule_annotations := {"custom": { 53 | "collections": ["spam"], 54 | "short_name": "Hey", 55 | "failure_msg": "Bad thing %s", 56 | }} 57 | 58 | chain := [ 59 | {"annotations": rule_annotations, "path": ["some", "path", "oh", "deny"]}, 60 | {"annotations": {}, "path": ["ignored", "ignored"]}, # Actually not needed any more 61 | ] 62 | 63 | lib.assert_equal(expected, lib.result_helper(chain, ["foo"])) 64 | } 65 | 66 | test_result_helper_with_term if { 67 | expected := { 68 | "code": "path.oh.Hey", 69 | "term": "ola", 70 | "effective_on": "2022-01-01T00:00:00Z", 71 | "msg": "Bad thing foo", 72 | } 73 | 74 | rule_annotations := {"custom": { 75 | "short_name": "Hey", 76 | "failure_msg": "Bad thing %s", 77 | }} 78 | 79 | chain := [ 80 | {"annotations": rule_annotations, "path": ["data", "path", "oh", "deny"]}, 81 | {"annotations": {}, "path": ["ignored", "also_ignored"]}, 82 | ] 83 | 84 | lib.assert_equal(expected, lib.result_helper_with_term(chain, ["foo"], "ola")) 85 | } 86 | 87 | test_result_helper_pkg_name if { 88 | # "Normal" for policy repo 89 | lib.assert_equal("foo", lib._pkg_name(["data", "foo", "deny"])) 90 | lib.assert_equal("foo", lib._pkg_name(["data", "foo", "warn"])) 91 | 92 | # Long package paths are retained 93 | lib.assert_equal("another.foo.bar", lib._pkg_name(["data", "another", "foo", "bar", "deny"])) 94 | lib.assert_equal("another.foo.bar", lib._pkg_name(["data", "another", "foo", "bar", "warn"])) 95 | 96 | # Unlikely edge case: No deny or warn 97 | lib.assert_equal("foo", lib._pkg_name(["data", "foo"])) 98 | lib.assert_equal("foo.bar", lib._pkg_name(["data", "foo", "bar"])) 99 | 100 | # Unlikely edge case: No data 101 | lib.assert_equal("foo", lib._pkg_name(["foo", "deny"])) 102 | lib.assert_equal("foo.bar", lib._pkg_name(["foo", "bar", "warn"])) 103 | 104 | # Very unlikely edge case: Just to illustrate how deny/warn/data are stripped once 105 | lib.assert_equal("foo", lib._pkg_name(["data", "foo", "warn", "deny"])) 106 | lib.assert_equal("foo.deny", lib._pkg_name(["data", "foo", "deny", "warn"])) 107 | lib.assert_equal("foo.warn", lib._pkg_name(["data", "foo", "warn", "warn"])) 108 | lib.assert_equal("data.foo.warn.deny", lib._pkg_name(["data", "data", "foo", "warn", "deny", "warn"])) 109 | } 110 | -------------------------------------------------------------------------------- /policy/lib/rule_data_test.rego: -------------------------------------------------------------------------------- 1 | package lib_test 2 | 3 | import rego.v1 4 | 5 | import data.lib 6 | 7 | test_rule_data if { 8 | lib.assert_equal( 9 | [ 10 | 40, # key0 value comes from data.rule_data__configuration__ 11 | 30, # key1 value comes from data.rule_data_custom 12 | 20, # key2 value comes from data.rule_data 13 | 10, # key3 value comes from lib.rule_data_defaults 14 | [], # key4 value is not defined 15 | ], 16 | [ 17 | lib.rule_data("key0"), 18 | lib.rule_data("key1"), 19 | lib.rule_data("key2"), 20 | lib.rule_data("key3"), 21 | lib.rule_data("key4"), 22 | ], 23 | ) with data.rule_data__configuration__ as {"key0": 40} 24 | with data.rule_data_custom as {"key0": 30, "key1": 30} 25 | with data.rule_data as {"key0": 20, "key1": 20, "key2": 20} 26 | with lib.rule_data_defaults as {"key3": 10} 27 | } 28 | 29 | # Need this for 100% coverage 30 | test_rule_data_defaults if { 31 | lib.assert_not_empty(lib.rule_data_defaults) 32 | } 33 | -------------------------------------------------------------------------------- /policy/lib/sbom/rpm.rego: -------------------------------------------------------------------------------- 1 | package lib.sbom 2 | 3 | import rego.v1 4 | 5 | all_rpm_entities contains entity if { 6 | some sbom in all_sboms 7 | some entity in rpms_from_sbom(sbom) 8 | } 9 | 10 | rpms_from_sbom(s) := entities if { 11 | # CycloneDX 12 | entities := {entity | 13 | some component in s.components 14 | purl := component.purl 15 | _is_rpmish(purl) 16 | entity := { 17 | "purl": purl, 18 | "found_by_cachi2": _component_found_by_cachi2(component), 19 | } 20 | } 21 | count(entities) > 0 22 | } else := entities if { 23 | # SPDX 24 | entities := {entity | 25 | some pkg in s.packages 26 | some ref in pkg.externalRefs 27 | ref.referenceType == "purl" 28 | ref.referenceCategory == "PACKAGE-MANAGER" 29 | purl := ref.referenceLocator 30 | _is_rpmish(purl) 31 | entity := { 32 | "purl": purl, 33 | "found_by_cachi2": _package_found_by_cachi2(pkg), 34 | } 35 | } 36 | count(entities) > 0 37 | } 38 | 39 | # Match rpms and modules 40 | # (Use a string match instead of parsing it and checking the type) 41 | _is_rpmish(purl) if { 42 | startswith(purl, "pkg:rpm/") 43 | } else if { 44 | startswith(purl, "pkg:rpmmod/") 45 | } 46 | 47 | # CycloneDX style 48 | _component_found_by_cachi2(component) if { 49 | some property in component.properties 50 | some cachi2_name in _cachi2_names 51 | property == _cachi2_found_by_property(cachi2_name) 52 | } else := false 53 | 54 | # Expecting this to be called with one of _cachi2_names 55 | _cachi2_found_by_property(cachi2_name) := { 56 | "name": sprintf("%s:found_by", [cachi2_name]), 57 | "value": cachi2_name, 58 | } 59 | 60 | # SPDX style 61 | _package_found_by_cachi2(pkg) if { 62 | some annotation in pkg.annotations 63 | some cachi2_name in _cachi2_names 64 | regex.match(sprintf(`.*%s.*`, [cachi2_name]), annotation.annotator) 65 | annotation.annotationType == "OTHER" 66 | # `comment` contains additional information, but that is not needed for the purpose of 67 | # simply filtering what was found by cachi2. 68 | } else := false 69 | 70 | # The new name for cachi2 is hermeto. We want to treat them 71 | # as as synonymous when looking in the SBOM data. 72 | _cachi2_names := ["cachi2", "hermeto"] 73 | -------------------------------------------------------------------------------- /policy/lib/sbom/rpm_test.rego: -------------------------------------------------------------------------------- 1 | package lib.sbom_test 2 | 3 | import rego.v1 4 | 5 | import data.lib 6 | import data.lib.sbom 7 | 8 | test_all_rpm_entities if { 9 | s_cyclonedx := _cyclonedx_sbom([_cyclonedx_component(_rpm_spam_1, [_cachi2_found_by_property])]) 10 | s_spdx := _spdx_sbom([_spdx_package(_rpm_spam_2, [_cachi2_spdx_annotation])]) 11 | 12 | expected := { 13 | { 14 | "found_by_cachi2": true, 15 | "purl": _rpm_spam_1, 16 | }, 17 | { 18 | "found_by_cachi2": true, 19 | "purl": _rpm_spam_2, 20 | }, 21 | } 22 | 23 | all_sboms := [s_cyclonedx, s_spdx] 24 | lib.assert_equal_results(expected, sbom.all_rpm_entities) with lib.sbom.all_sboms as all_sboms 25 | } 26 | 27 | test_all_rpm_entities_no_dupes if { 28 | s_cyclonedx := _cyclonedx_sbom([ 29 | _cyclonedx_component(_rpm_spam_1, [_cachi2_found_by_property]), 30 | _cyclonedx_component(_rpm_spam_2, [_hermeto_found_by_property]), 31 | ]) 32 | s_spdx := _spdx_sbom([ 33 | _spdx_package(_rpm_spam_1, [_cachi2_spdx_annotation]), 34 | _spdx_package(_rpm_spam_2, [_hermeto_spdx_annotation]), 35 | ]) 36 | 37 | # Duplicated entries across SBOMs are ignored. 38 | expected := { 39 | { 40 | "found_by_cachi2": true, 41 | "purl": _rpm_spam_1, 42 | }, 43 | { 44 | "found_by_cachi2": true, 45 | "purl": _rpm_spam_2, 46 | }, 47 | } 48 | 49 | all_sboms := [s_cyclonedx, s_spdx] 50 | lib.assert_equal_results(expected, sbom.all_rpm_entities) with lib.sbom.all_sboms as all_sboms 51 | } 52 | 53 | test_rpms_from_sbom_cyclonedx if { 54 | s := _cyclonedx_sbom([ 55 | _cyclonedx_component(_rpm_spam_1, []), 56 | _cyclonedx_component(_rpm_spam_2, [_cachi2_found_by_property]), 57 | _cyclonedx_component(_not_rpm, []), 58 | ]) 59 | expected := { 60 | { 61 | "found_by_cachi2": false, 62 | "purl": _rpm_spam_1, 63 | }, 64 | { 65 | "found_by_cachi2": true, 66 | "purl": _rpm_spam_2, 67 | }, 68 | } 69 | 70 | lib.assert_equal_results(expected, sbom.rpms_from_sbom(s)) 71 | } 72 | 73 | test_rpms_from_sbom_spdx if { 74 | s := _spdx_sbom([ 75 | _spdx_package(_rpm_spam_1, []), 76 | _spdx_package(_rpm_spam_2, [_cachi2_spdx_annotation]), 77 | _spdx_package(_not_rpm, []), 78 | ]) 79 | expected := { 80 | { 81 | "found_by_cachi2": false, 82 | "purl": _rpm_spam_1, 83 | }, 84 | { 85 | "found_by_cachi2": true, 86 | "purl": _rpm_spam_2, 87 | }, 88 | } 89 | 90 | lib.assert_equal_results(expected, sbom.rpms_from_sbom(s)) 91 | } 92 | 93 | _cyclonedx_sbom(components) := {"components": components} 94 | 95 | _cyclonedx_component(purl, properties) := { 96 | "purl": purl, 97 | "properties": properties, 98 | } 99 | 100 | _spdx_sbom(packages) := {"packages": packages} 101 | 102 | _spdx_package(purl, annotations) := { 103 | "annotations": annotations, 104 | "externalRefs": [{ 105 | "referenceType": "purl", 106 | "referenceCategory": "PACKAGE-MANAGER", 107 | "referenceLocator": purl, 108 | }], 109 | } 110 | 111 | _cachi2_found_by_property := sbom._cachi2_found_by_property("cachi2") 112 | 113 | _hermeto_found_by_property := sbom._cachi2_found_by_property("hermeto") 114 | 115 | _cachi2_spdx_annotation := {"annotator": "Tool: cachi2:jsonencoded", "annotationType": "OTHER"} 116 | 117 | _hermeto_spdx_annotation := {"annotator": "Tool: hermeto:jsonencoded", "annotationType": "OTHER"} 118 | 119 | _rpm_spam_1 := "pkg:rpm/redhat/spam@1.0.0-1" 120 | 121 | _rpm_spam_2 := "pkg:rpm/redhat/spam@1.0.0-2" 122 | 123 | _not_rpm := "pkg:golang/gitplanet.com/bacon@1.2.3?arch=amd64" 124 | -------------------------------------------------------------------------------- /policy/lib/set_helpers.rego: -------------------------------------------------------------------------------- 1 | package lib 2 | 3 | import rego.v1 4 | 5 | # It's fairly idiomatic rego to do this inline but these 6 | # can make your code a little more readable in some cases 7 | # 8 | to_set(arr) := {member | some member in arr} 9 | 10 | to_array(s) := [member | some member in s] 11 | 12 | # Without the in keyword it could be done like this: 13 | # needle == haystack[_] 14 | # 15 | included_in(needle, haystack) if { 16 | needle in haystack 17 | } 18 | 19 | # Return true if any of the needles are found in the haystack 20 | any_included_in(needles, haystack) if { 21 | # (Set intersection) 22 | count(to_set(needles) & to_set(haystack)) > 0 23 | } 24 | 25 | # Return true if all of the needles are found in the haystack 26 | all_included_in(needles, haystack) if { 27 | # (Set difference) 28 | count(to_set(needles) - to_set(haystack)) == 0 29 | } 30 | 31 | # Return true if none of the needles are found in the haystack 32 | none_included_in(needles, haystack) if { 33 | not any_included_in(needles, haystack) 34 | } 35 | 36 | # Return true if any of the needles are missing from the haystack 37 | any_not_included_in(needles, haystack) if { 38 | not all_included_in(needles, haystack) 39 | } 40 | -------------------------------------------------------------------------------- /policy/lib/set_helpers_test.rego: -------------------------------------------------------------------------------- 1 | package lib_test 2 | 3 | import rego.v1 4 | 5 | import data.lib 6 | 7 | my_list := ["a", "b", "c"] 8 | 9 | my_set := {"a", "b", "c"} 10 | 11 | test_to_set if { 12 | lib.assert_equal(my_set, lib.to_set(my_list)) 13 | lib.assert_equal(my_set, lib.to_set(my_set)) 14 | } 15 | 16 | test_to_array if { 17 | lib.assert_equal(my_list, lib.to_array(my_set)) 18 | lib.assert_equal(my_list, lib.to_array(my_list)) 19 | } 20 | 21 | test_included_in if { 22 | lib.included_in("a", my_list) 23 | lib.included_in("a", my_set) 24 | not lib.included_in("z", my_list) 25 | not lib.included_in("z", my_set) 26 | } 27 | 28 | test_any_included_in if { 29 | lib.any_included_in(["a", "z"], my_list) 30 | lib.any_included_in(["a", "z"], my_set) 31 | lib.any_included_in({"a", "z"}, my_list) 32 | lib.any_included_in({"a", "z"}, my_set) 33 | 34 | not lib.any_included_in({"x", "z"}, my_set) 35 | } 36 | 37 | test_all_included_in if { 38 | lib.all_included_in({"a", "b"}, my_set) 39 | not lib.all_included_in({"a", "z"}, my_set) 40 | } 41 | 42 | test_none_included_in if { 43 | lib.none_included_in({"x", "z"}, my_set) 44 | not lib.none_included_in({"a", "z"}, my_set) 45 | } 46 | 47 | test_any_not_included_in if { 48 | lib.any_not_included_in({"a", "z"}, my_set) 49 | not lib.any_not_included_in({"a", "b"}, my_set) 50 | } 51 | -------------------------------------------------------------------------------- /policy/lib/sigstore.rego: -------------------------------------------------------------------------------- 1 | package lib 2 | 3 | import rego.v1 4 | 5 | # sigstore_opts provides a safe way to access the default sigstore opts. It ensures policy rules 6 | # don't accidentally evaluate to passing if the default values are not in the config. 7 | default sigstore_opts := { 8 | "certificate_identity": "", 9 | "certificate_identity_regexp": "", 10 | "certificate_oidc_issuer": "", 11 | "certificate_oidc_issuer_regexp": "", 12 | "ignore_rekor": false, 13 | "public_key": "", 14 | "rekor_url": "", 15 | } 16 | 17 | sigstore_opts := data.config.default_sigstore_opts 18 | -------------------------------------------------------------------------------- /policy/lib/sigstore_test.rego: -------------------------------------------------------------------------------- 1 | package lib_test 2 | 3 | import rego.v1 4 | 5 | import data.lib 6 | 7 | test_sigstore_opts if { 8 | lib.assert_equal(lib.sigstore_opts, { 9 | "certificate_identity": "", 10 | "certificate_identity_regexp": "", 11 | "certificate_oidc_issuer": "", 12 | "certificate_oidc_issuer_regexp": "", 13 | "ignore_rekor": false, 14 | "public_key": "", 15 | "rekor_url": "", 16 | }) 17 | 18 | opts := { 19 | "certificate_identity": "subject", 20 | "certificate_identity_regexp": "subject-regexp", 21 | "certificate_oidc_issuer": "issuer", 22 | "certificate_oidc_issuer_regexp": "issuer-regexp", 23 | "ignore_rekor": true, 24 | "public_key": "public-key", 25 | "rekor_url": "https://rekor.local", 26 | } 27 | lib.assert_equal(lib.sigstore_opts, opts) with data.config.default_sigstore_opts as opts 28 | } 29 | -------------------------------------------------------------------------------- /policy/lib/string_utils.rego: -------------------------------------------------------------------------------- 1 | package lib 2 | 3 | import rego.v1 4 | 5 | quoted_values_string(value_list) := result if { 6 | quoted_list := [quoted_item | 7 | some item in value_list 8 | quoted_item := sprintf("'%s'", [item]) 9 | ] 10 | 11 | result := concat(", ", quoted_list) 12 | } 13 | -------------------------------------------------------------------------------- /policy/lib/string_utils_test.rego: -------------------------------------------------------------------------------- 1 | package lib_test 2 | 3 | import rego.v1 4 | 5 | import data.lib 6 | 7 | test_quoted_values_string if { 8 | lib.assert_equal("'a', 'b', 'c'", lib.quoted_values_string(["a", "b", "c"])) 9 | lib.assert_equal("'a', 'b', 'c'", lib.quoted_values_string({"a", "b", "c"})) 10 | } 11 | -------------------------------------------------------------------------------- /policy/lib/tekton/bundles.rego: -------------------------------------------------------------------------------- 1 | package lib.tekton 2 | 3 | import rego.v1 4 | 5 | import data.lib.image 6 | 7 | # Return the bundle reference as is 8 | bundle(task) := task_ref(task).bundle 9 | 10 | # Returns a subset of tasks that do not use a bundle reference. 11 | disallowed_task_reference(tasks) := {task | 12 | some task in tasks 13 | not bundle(task) 14 | } 15 | 16 | # Returns a subset of tasks that use an empty bundle reference. 17 | empty_task_bundle_reference(tasks) := {task | 18 | some task in tasks 19 | bundle(task) == "" 20 | } 21 | 22 | # Returns a subset of tasks that use bundle references not pinned to a digest. 23 | unpinned_task_bundle(tasks) := {task | 24 | some task in tasks 25 | ref := image.parse(bundle(task)) 26 | ref.digest == "" 27 | } 28 | -------------------------------------------------------------------------------- /policy/lib/tekton/bundles_test.rego: -------------------------------------------------------------------------------- 1 | package lib.tekton_test 2 | 3 | import rego.v1 4 | 5 | import data.lib 6 | import data.lib.tekton 7 | 8 | test_disallowed_task_reference if { 9 | tasks := [ 10 | {"name": "my-task-1", "taskRef": {}}, 11 | {"name": "my-task-2", "ref": {}}, 12 | ] 13 | 14 | expected := lib.to_set(tasks) 15 | lib.assert_equal(tekton.disallowed_task_reference(tasks), expected) 16 | } 17 | 18 | test_empty_task_bundle_reference if { 19 | tasks := [ 20 | {"name": "my-task-1", "taskRef": {"bundle": ""}}, 21 | {"name": "my-task-2", "ref": {"bundle": ""}}, 22 | ] 23 | 24 | expected := lib.to_set(tasks) 25 | lib.assert_equal(tekton.empty_task_bundle_reference(tasks), expected) 26 | } 27 | 28 | test_unpinned_task_bundle if { 29 | tasks := [ 30 | { 31 | "name": "my-task-1", 32 | "taskRef": {"bundle": "reg.com/repo:903d49a833d22f359bce3d67b15b006e1197bae5"}, 33 | }, 34 | { 35 | "name": "my-task-2", 36 | "ref": {"bundle": "reg.com/repo:903d49a833d22f359bce3d67b15b006e1197bae5"}, 37 | }, 38 | ] 39 | 40 | expected := lib.to_set(tasks) 41 | lib.assert_equal(tekton.unpinned_task_bundle(tasks), expected) with data["task-bundles"] as [] 42 | } 43 | -------------------------------------------------------------------------------- /policy/lib/tekton/pipeline.rego: -------------------------------------------------------------------------------- 1 | package lib.tekton 2 | 3 | import rego.v1 4 | 5 | import data.lib.time as ectime 6 | 7 | pipeline_label := "pipelines.openshift.io/runtime" 8 | 9 | task_label := "build.appstudio.redhat.com/build_type" 10 | 11 | latest_required_pipeline_tasks(pipeline) := pipeline_tasks if { 12 | pipeline_data := required_task_list(pipeline) 13 | pipeline_tasks := ectime.newest(pipeline_data) 14 | } 15 | 16 | current_required_pipeline_tasks(pipeline) := pipeline_tasks if { 17 | pipeline_data := required_task_list(pipeline) 18 | pipeline_tasks := ectime.most_current(pipeline_data) 19 | } 20 | 21 | # get the label from the pipelineRun attestation and return the 22 | # required task list FOR that pipeline 23 | required_task_list(pipeline) := pipeline_data if { 24 | pipeline_selector := pipeline_label_selector(pipeline) 25 | pipeline_data := data["pipeline-required-tasks"][pipeline_selector] 26 | } 27 | 28 | # pipeline_label_selector is a specialized function that returns the name of the 29 | # required tasks list that should be used. 30 | # Note: If we import data.lib in this file, Regal reports a circular import error. 31 | # So that's why we need `data.lib.to_set` here. Todo: Figure out a nicer way to do it. 32 | pipeline_label_selector(pipeline) := value if { 33 | not is_fbc # given that the build task is shared between fbc and docker builds we can't rely on the task's label 34 | 35 | # Labels of the build Task from the SLSA Provenance v1.0 of a PipelineRun 36 | values := [l | some build_task in build_tasks(pipeline); l := build_task.metadata.labels[task_label]] 37 | count(data.lib.to_set(values)) == 1 38 | value := values[0] 39 | } else := value if { 40 | not is_fbc # given that the build task is shared between fbc and docker builds we can't rely on the task's label 41 | 42 | # Labels of the build Task from the SLSA Provenance v0.2 of a PipelineRun 43 | values := [l | some build_task in build_tasks(pipeline); l := build_task.invocation.environment.labels[task_label]] 44 | count(data.lib.to_set(values)) == 1 45 | value := values[0] 46 | } else := value if { 47 | # PipelineRun labels found in the SLSA Provenance v1.0 48 | value := pipeline.statement.predicate.buildDefinition.internalParameters.labels[pipeline_label] 49 | } else := value if { 50 | # PipelineRun labels found in the SLSA Provenance v0.2 51 | value := pipeline.statement.predicate.invocation.environment.labels[pipeline_label] 52 | } else := value if { 53 | # Labels from a Tekton Pipeline definition 54 | value := pipeline.metadata.labels[pipeline_label] 55 | } else := value if { 56 | # special handling for fbc pipelines, they're detected via image label 57 | is_fbc 58 | 59 | value := "fbc" 60 | } 61 | 62 | pipeline_name := input.metadata.name 63 | 64 | # evaluates to true for FBC image builds, for which we cannot rely on the build 65 | # task labels 66 | is_fbc if { 67 | input.image.config.Labels["operators.operatorframework.io.index.configs.v1"] 68 | } 69 | -------------------------------------------------------------------------------- /policy/lib/tekton/pipeline_test.rego: -------------------------------------------------------------------------------- 1 | package lib.tekton_test 2 | 3 | import rego.v1 4 | 5 | import data.lib 6 | import data.lib.tekton 7 | 8 | test_pipeline_label_selector_build_task_slsa_v1_0 if { 9 | task := json.patch( 10 | slsav1_task_result_ref( 11 | "build-container", 12 | [ 13 | {"name": "IMAGE_URL", "type": "string", "value": "localhost:5000/repo:latest"}, 14 | {"name": "IMAGE_DIGEST", "type": "string", "value": "sha256:abc"}, 15 | ], 16 | ), 17 | [{"op": "add", "path": "/metadata/labels", "value": {tekton.task_label: "generic"}}], 18 | ) 19 | 20 | attestation := {"statement": { 21 | "predicateType": "https://slsa.dev/provenance/v1", 22 | "predicate": {"buildDefinition": { 23 | "buildType": "https://tekton.dev/chains/v2/slsa-tekton", 24 | "resolvedDependencies": resolved_dependencies([task]), 25 | "internalParameters": {"labels": {tekton.pipeline_label: "ignored"}}, 26 | }}, 27 | }} 28 | 29 | lib.assert_equal(tekton.pipeline_label_selector(attestation), "generic") 30 | } 31 | 32 | test_pipeline_label_selector_build_task_slsa_v0_2 if { 33 | task := { 34 | "ref": {"name": "build-container", "kind": "Task"}, 35 | "results": [ 36 | {"name": "IMAGE_URL", "type": "string", "value": "localhost:5000/repo:latest"}, 37 | {"name": "IMAGE_DIGEST", "type": "string", "value": "sha256:abc"}, 38 | ], 39 | "invocation": {"environment": {"labels": {tekton.task_label: "generic"}}}, 40 | } 41 | 42 | attestation := {"statement": { 43 | "predicateType": "https://slsa.dev/provenance/v0.2", 44 | "predicate": { 45 | "buildConfig": {"tasks": [task]}, 46 | "invocation": {"environment": {"labels": {tekton.pipeline_label: "ignored"}}}, 47 | }, 48 | }} 49 | 50 | lib.assert_equal(tekton.pipeline_label_selector(attestation), "generic") 51 | } 52 | 53 | test_pipeline_label_selector_pipeline_run_slsa_v1_0 if { 54 | task := slsav1_task_result_ref("build-container", [ 55 | {"name": "IMAGE_URL", "type": "string", "value": "localhost:5000/repo:latest"}, 56 | {"name": "IMAGE_DIGEST", "type": "string", "value": "sha256:abc"}, 57 | ]) 58 | 59 | attestation := {"statement": { 60 | "predicateType": "https://slsa.dev/provenance/v1", 61 | "predicate": {"buildDefinition": { 62 | "buildType": "https://tekton.dev/chains/v2/slsa-tekton", 63 | "resolvedDependencies": resolved_dependencies([task]), 64 | "internalParameters": {"labels": {tekton.pipeline_label: "generic"}}, 65 | }}, 66 | }} 67 | 68 | lib.assert_equal(tekton.pipeline_label_selector(attestation), "generic") 69 | } 70 | 71 | test_pipeline_label_selector_pipeline_run_slsa_v0_2 if { 72 | task := { 73 | "ref": {"name": "build-container", "kind": "Task"}, 74 | "results": [ 75 | {"name": "IMAGE_URL", "type": "string", "value": "localhost:5000/repo:latest"}, 76 | {"name": "IMAGE_DIGEST", "type": "string", "value": "sha256:abc"}, 77 | ], 78 | } 79 | 80 | attestation := {"statement": { 81 | "predicateType": "https://slsa.dev/provenance/v0.2", 82 | "predicate": { 83 | "buildConfig": {"tasks": [task]}, 84 | "invocation": {"environment": {"labels": {tekton.pipeline_label: "generic"}}}, 85 | }, 86 | }} 87 | 88 | lib.assert_equal(tekton.pipeline_label_selector(attestation), "generic") 89 | } 90 | 91 | test_pipeline_label_selector_pipeline_definition if { 92 | pipeline := {"metadata": {"labels": {tekton.pipeline_label: "generic"}}} 93 | lib.assert_equal(tekton.pipeline_label_selector(pipeline), "generic") 94 | } 95 | 96 | test_fbc_pipeline_label_selector if { 97 | image := {"config": {"Labels": {"operators.operatorframework.io.index.configs.v1": "/configs"}}} 98 | lib.assert_equal(tekton.pipeline_label_selector({}), "fbc") with input.image as image 99 | } 100 | -------------------------------------------------------------------------------- /policy/lib/tekton/recorded_att_test.rego: -------------------------------------------------------------------------------- 1 | package lib.tekton_test 2 | 3 | import rego.v1 4 | 5 | import data.lib 6 | import data.lib.tekton 7 | 8 | test_slsa_v02_task_extraction if { 9 | lib.assert_equal( 10 | [t | 11 | some task in tekton.tasks({"statement": input}) 12 | t := tekton.task_data(task) 13 | ], 14 | [ 15 | {"name": "mock-av-scanner"}, 16 | {"name": ""}, 17 | { 18 | # regal ignore:line-length 19 | "bundle": "quay.io/lucarval/test-policies-chains@sha256:ae5952d5aac1664fbeae9191d9445244051792af903d28d3e0084e9d9b7cce61", 20 | "name": "mock-build", 21 | }, 22 | {"name": "mock-git-clone"}, 23 | ], 24 | ) with input as att_01_slsa_v0_2_pipeline_in_cluster 25 | } 26 | 27 | test_slsa_v1_task_extraction if { 28 | lib.assert_equal( 29 | [t | 30 | some task in tekton.tasks({"statement": input}) 31 | t := tekton.task_data(task) 32 | ], 33 | [ 34 | {"name": "mock-git-clone"}, 35 | {"name": "mock-av-scanner"}, 36 | {"name": ""}, 37 | { 38 | # regal ignore:line-length 39 | "bundle": "quay.io/lucarval/test-policies-chains@sha256:b766741b8b3e135e4e31281aa4b25899e951798b5f213cc4a5360d01eb9b6880", 40 | "name": "mock-build", 41 | }, 42 | ], 43 | ) with input as att_05_slsa_v1_0_tekton_build_type_pipeline_in_cluster 44 | } 45 | -------------------------------------------------------------------------------- /policy/lib/tekton/task_results.rego: -------------------------------------------------------------------------------- 1 | package lib.tekton 2 | 3 | import rego.v1 4 | 5 | # handle the output artifacts from Tekton Chains 6 | # https://tekton.dev/docs/chains/slsa-provenance/#output-artifacts 7 | 8 | task_result_artifact_url(task) := array.concat( 9 | _task_result_image_urls(task), 10 | array.concat( 11 | _task_result_artifact_uris(task), 12 | array.concat( 13 | _task_result_images_urls(task), 14 | _task_result_artifact_outputs_urls(task), 15 | ), 16 | ), 17 | ) 18 | 19 | # returns the value of a task result with name IMAGE_URL 20 | _task_result_image_urls(task) := _non_empty_strings(task_result_endswith(task, "IMAGE_URL")) 21 | 22 | # returns the value of a task result with name ARTIFACT_URI 23 | _task_result_artifact_uris(task) := _non_empty_strings(task_result_endswith(task, "ARTIFACT_URI")) 24 | 25 | # returns the image url from the task result IMAGES 26 | _task_result_images_urls(task) := _non_empty_strings([v | 27 | some result in task_result_endswith(task, "IMAGES") 28 | some image in split(result, ",") 29 | split_item := split(image, "@") 30 | v := split_item[0] 31 | ]) 32 | 33 | # returns the image url from the task result ARTIFACT_OUTPUTS 34 | _task_result_artifact_outputs_urls(task) := _non_empty_strings([result.uri | 35 | some result in task_result_endswith(task, "ARTIFACT_OUTPUTS") 36 | ]) 37 | 38 | task_result_artifact_digest(task) := array.concat( 39 | _task_result_image_digests(task), 40 | array.concat( 41 | _task_result_artifact_digests(task), 42 | array.concat( 43 | _task_result_images_digests(task), 44 | _task_result_artifact_outputs_digests(task), 45 | ), 46 | ), 47 | ) 48 | 49 | # returns the value of a task result with name IMAGE_DIGEST 50 | _task_result_image_digests(task) := _non_empty_strings(task_result_endswith(task, "IMAGE_DIGEST")) 51 | 52 | # returns the value of a task result with name ARTIFACT_DIGEST 53 | _task_result_artifact_digests(task) := _non_empty_strings(task_result_endswith(task, "ARTIFACT_DIGEST")) 54 | 55 | # returns the image digest from the task result IMAGES 56 | _task_result_images_digests(task) := _non_empty_strings([v | 57 | some result in task_result_endswith(task, "IMAGES") 58 | some image in split(result, ",") 59 | split_item := split(image, "@") 60 | v := split_item[1] 61 | ]) 62 | 63 | # returns the image digest from the task result ARTIFACT_OUTPUTS 64 | _task_result_artifact_outputs_digests(task) := _non_empty_strings([result.digest | 65 | some result in task_result_endswith(task, "ARTIFACT_OUTPUTS") 66 | ]) 67 | 68 | _non_empty_strings(values) := [trimmed_value | 69 | some value in values 70 | trimmed_value := trim_space(value) 71 | count(trimmed_value) > 0 72 | ] 73 | 74 | images_with_digests(tasks) := [sprintf("%v@%v", [image, digest]) | 75 | some task in tasks 76 | some image_index, image in task_result_artifact_url(task) 77 | some digest_index, digest in task_result_artifact_digest(task) 78 | image_index == digest_index 79 | ] 80 | -------------------------------------------------------------------------------- /policy/lib/time/time.rego: -------------------------------------------------------------------------------- 1 | package lib.time 2 | 3 | import rego.v1 4 | 5 | import data.lib.arrays 6 | 7 | # A default value in the past. Could be whatever but beware you'll have to 8 | # update a bunch of tests if you change it. 9 | # 10 | default_effective_on := "2022-01-01T00:00:00Z" 11 | 12 | # This supports finding an effective_on date in multiple scopes, giving 13 | # precedence to the narrowest scope. Let's keep it that way even though 14 | # currently we're not using any scopes except for the rule scope. 15 | # 16 | when(metadata_chain) := effective_on if { 17 | scope_precedence := ["rule", "document", "package"] 18 | all_effective_on := [metadata.annotations.custom.effective_on | 19 | some metadata in metadata_chain 20 | metadata.annotations.scope in scope_precedence 21 | ] 22 | 23 | # Use the first one found in scope_precedence or fall back to the default 24 | # value if effective_on was not found in annotations 25 | effective_on := array.concat(all_effective_on, [default_effective_on])[0] 26 | } 27 | 28 | # Use the nanosecond epoch defined in the policy config if it is 29 | # present, otherwise use the real current time 30 | effective_current_time_ns := now_ns if { 31 | data.config 32 | now_ns := object.get(data.config, ["policy", "when_ns"], time.now_ns()) 33 | } 34 | 35 | # Handle edge case where data.config is not present 36 | # (We can't do `object.get(data, ...)` for some reason) 37 | effective_current_time_ns := now_ns if { 38 | not data.config 39 | now_ns := time.now_ns() 40 | } 41 | 42 | # most_current returns the first item in the given list of objects where 43 | # effective_on is NOT in the future (less than or equal to now). Items that do 44 | # not define the effective_on attribute are ignored. If the given list of 45 | # items is empty, or no items are current, most_current does not produce a 46 | # value. 47 | most_current(items) := item if { 48 | current := [i | 49 | some i in items 50 | i.effective_on 51 | not time.parse_rfc3339_ns(i.effective_on) > effective_current_time_ns 52 | ] 53 | 54 | item := newest(current) 55 | } 56 | 57 | # newest returns the newest item by `effective_on`. Assumes same date format and 58 | # time-zone for `effective_on` field. 59 | newest(items) := item if { 60 | ordered := arrays.sort_by("effective_on", items) 61 | 62 | item := ordered[count(ordered) - 1] 63 | } 64 | -------------------------------------------------------------------------------- /policy/lib/time/time_test.rego: -------------------------------------------------------------------------------- 1 | # METADATA 2 | # custom: 3 | # effective_on: 2001-02-03T00:00:00Z 4 | # scope: package 5 | package lib.time_test 6 | 7 | import rego.v1 8 | 9 | import data.lib 10 | import data.lib.time as lib_time 11 | 12 | future_timestamp := time.add_date(time.now_ns(), 0, 0, 1) 13 | 14 | # METADATA 15 | # custom: 16 | # effective_on: 2004-05-06T00:00:00Z 17 | test_when_rule_precedence if { 18 | lib_time.when(rego.metadata.chain()) == "2004-05-06T00:00:00Z" 19 | } 20 | 21 | test_when_package_precedence if { 22 | lib_time.when(rego.metadata.chain()) == "2001-02-03T00:00:00Z" 23 | } 24 | 25 | test_effective_current_time_ns if { 26 | # with no config at all 27 | lib.assert_equal(lib_time.effective_current_time_ns, time.now_ns()) 28 | 29 | # no config.policy 30 | lib.assert_equal(lib_time.effective_current_time_ns, time.now_ns()) with data.config as {} 31 | 32 | # no config.policy.when_ns 33 | lib.assert_equal(lib_time.effective_current_time_ns, time.now_ns()) with data.config.policy as {} 34 | lib.assert_equal( 35 | lib_time.effective_current_time_ns, 36 | future_timestamp, 37 | ) with data.config.policy.when_ns as future_timestamp 38 | } 39 | 40 | # regal ignore:rule-length 41 | test_most_current if { 42 | # Ignore future item 43 | lib.assert_equal( 44 | lib_time.most_current([ 45 | {"name": "future", "effective_on": "2099-01-01T00:00:00Z"}, 46 | {"name": "past", "effective_on": "2022-01-01T00:00:00Z"}, 47 | {"name": "ancient", "effective_on": "1985-01-01T00:00:00Z"}, 48 | ]), 49 | {"name": "past", "effective_on": "2022-01-01T00:00:00Z"}, 50 | ) 51 | 52 | # Produce no value when input is an empty list 53 | not lib_time.most_current([]) 54 | 55 | # Produce no value if there are no current items 56 | not lib_time.most_current([ 57 | {"name": "supernova", "effective_on": "2262-04-11T00:00:00Z"}, 58 | {"name": "visionary", "effective_on": "2199-01-01T00:00:00Z"}, 59 | {"name": "future", "effective_on": "2099-01-01T00:00:00Z"}, 60 | ]) 61 | 62 | # Ignore items without effective_on 63 | lib.assert_equal( 64 | lib_time.most_current([ 65 | {"name": "incomplete"}, 66 | {"name": "past", "effective_on": "2022-01-01T00:00:00Z"}, 67 | {"name": "lacking"}, 68 | ]), 69 | {"name": "past", "effective_on": "2022-01-01T00:00:00Z"}, 70 | ) 71 | } 72 | 73 | test_newest if { 74 | lib.assert_equal({"effective_on": "2262-04-11T00:00:00Z"}, lib_time.newest([ 75 | {"effective_on": "2199-01-01T00:00:00Z"}, 76 | {"effective_on": "2262-04-11T00:00:00Z"}, 77 | {"effective_on": "2099-01-01T00:00:00Z"}, 78 | ])) 79 | } 80 | -------------------------------------------------------------------------------- /policy/pipeline/artifacthub-pkg.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Copyright The Conforma Contributors 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | # SPDX-License-Identifier: Apache-2.0 17 | 18 | # from https://github.com/artifacthub/hub/blob/master/docs/metadata/artifacthub-pkg.yml 19 | version: 0.0.1 20 | name: OPA Pipeline Policies 21 | displayName: OPA Pipeline Policies 22 | createdAt: 2023-01-05T21:27:55.549418+00:00 23 | description: OPA policies enforcing standards on a tekton pipeline definition 24 | readme: | 25 | OPA policies enforcing standards on a tekton pipeline definition. These policies 26 | are a work in progress and not yet production ready. 27 | install: | 28 | `conftest pull oci::quay.io/enterprise-contracttract/ec-pipeline-policy:latest` 29 | homeURL: https://conforma.dev/docs/policy/ 30 | keywords: 31 | - opa 32 | - conftest 33 | license: MIT 34 | provider: 35 | name: Red Hat 36 | -------------------------------------------------------------------------------- /policy/pipeline/basic/basic.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: Pipeline definition sanity checks 4 | # description: >- 5 | # Policies to confirm the Tekton Pipeline definition has the expected kind. 6 | # 7 | package basic 8 | 9 | import rego.v1 10 | 11 | import data.lib 12 | 13 | expected_kind := "Pipeline" 14 | 15 | # (Not sure if we need this, but I'm using it to test the docs build.) 16 | 17 | # Fixme: It doesn't fail if the kind key is entirely missing.. 18 | 19 | # METADATA 20 | # title: Pipeline definition has expected kind 21 | # description: >- 22 | # Confirm that the pipeline definition has the kind "Pipeline". 23 | # custom: 24 | # short_name: expected_kind 25 | # failure_msg: Unexpected kind '%s' for pipeline definition 26 | # 27 | deny contains result if { 28 | expected_kind != input.kind 29 | result := lib.result_helper(rego.metadata.chain(), [input.kind]) 30 | } 31 | -------------------------------------------------------------------------------- /policy/pipeline/basic/basic_test.rego: -------------------------------------------------------------------------------- 1 | package basic_test 2 | 3 | import rego.v1 4 | 5 | import data.basic 6 | import data.lib 7 | 8 | test_unexpected_kind if { 9 | lib.assert_equal_results(basic.deny, {{ 10 | "code": "basic.expected_kind", 11 | "msg": "Unexpected kind 'Foo' for pipeline definition", 12 | }}) with input.kind as "Foo" 13 | } 14 | -------------------------------------------------------------------------------- /policy/pipeline/task_bundle/task_bundle.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: Pipeline definition Task bundle policies 4 | # description: >- 5 | # To be able to reproduce and audit builds accurately it's important 6 | # to know exactly what happens during the build. To do this 7 | # Conforma requires that all tasks are defined in a set of 8 | # known and trusted task bundles. This package includes rules to 9 | # confirm that the tasks in a Pipeline definition are defined in task 10 | # bundles, and that the task bundles are from the list of known 11 | # and trusted bundles. 12 | # 13 | package task_bundle 14 | 15 | import rego.v1 16 | 17 | import data.lib 18 | import data.lib.tekton 19 | 20 | # METADATA 21 | # title: Unpinned task bundle reference 22 | # description: >- 23 | # Check if the Tekton Bundle used for the Tasks in the Pipeline definition 24 | # is pinned to a digest. 25 | # custom: 26 | # short_name: unpinned_task_bundle 27 | # failure_msg: Pipeline task '%s' uses an unpinned task bundle reference '%s' 28 | # 29 | warn contains result if { 30 | some task in tekton.unpinned_task_bundle(input.spec.tasks) 31 | result := lib.result_helper(rego.metadata.chain(), [task.name, tekton.bundle(task)]) 32 | } 33 | 34 | # METADATA 35 | # title: Task bundle is out of date 36 | # description: >- 37 | # For each Task in the Pipeline definition, check if the Tekton Bundle used is 38 | # the most recent. 39 | # custom: 40 | # short_name: out_of_date_task_bundle 41 | # failure_msg: >- 42 | # Pipeline task '%s' uses an out of date task bundle '%s', new version of the 43 | # Task must be used before %s 44 | # 45 | warn contains result if { 46 | some task in input.spec.tasks 47 | expiry := tekton.expiry_of(task) 48 | bundle := tekton.bundle(task) 49 | result := lib.result_helper(rego.metadata.chain(), [task.name, bundle, time.format(expiry)]) 50 | } 51 | 52 | # METADATA 53 | # title: Task bundle was not used or is not defined 54 | # description: >- 55 | # Check for the existence of a task bundle. This rule will 56 | # fail if the task is not called from a bundle. 57 | # custom: 58 | # short_name: disallowed_task_reference 59 | # failure_msg: Pipeline task '%s' does not contain a bundle reference 60 | # 61 | deny contains result if { 62 | some task in tekton.disallowed_task_reference(input.spec.tasks) 63 | result := lib.result_helper(rego.metadata.chain(), [task.name]) 64 | } 65 | 66 | # METADATA 67 | # title: Task bundle reference is empty 68 | # description: >- 69 | # Check that a valid task bundle reference is being used. 70 | # custom: 71 | # short_name: empty_task_bundle_reference 72 | # failure_msg: Pipeline task '%s' uses an empty bundle image reference 73 | # 74 | deny contains result if { 75 | some task in tekton.empty_task_bundle_reference(input.spec.tasks) 76 | result := lib.result_helper(rego.metadata.chain(), [task.name]) 77 | } 78 | 79 | # METADATA 80 | # title: Task bundle is not trusted 81 | # description: >- 82 | # For each Task in the Pipeline definition, check if the Tekton Bundle used is a trusted task. 83 | # custom: 84 | # short_name: untrusted_task_bundle 85 | # failure_msg: Pipeline task '%s' uses an untrusted task bundle '%s' 86 | # 87 | deny contains result if { 88 | some task in tekton.untrusted_task_refs(input.spec.tasks) 89 | bundle := tekton.bundle(task) 90 | bundle != "" 91 | result := lib.result_helper(rego.metadata.chain(), [task.name, bundle]) 92 | } 93 | 94 | # METADATA 95 | # title: Missing required data 96 | # description: >- 97 | # Confirm the `trusted_tasks` rule data was provided, since it's 98 | # required by the policy rules in this package. 99 | # custom: 100 | # short_name: missing_required_data 101 | # failure_msg: Missing required trusted_tasks data 102 | deny contains result if { 103 | tekton.missing_trusted_tasks_data 104 | result := lib.result_helper(rego.metadata.chain(), []) 105 | } 106 | -------------------------------------------------------------------------------- /policy/release/artifacthub-pkg.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Copyright The Conforma Contributors 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | # SPDX-License-Identifier: Apache-2.0 17 | 18 | # from https://github.com/artifacthub/hub/blob/master/docs/metadata/artifacthub-pkg.yml 19 | version: 0.0.1 20 | name: OPA Release Policies 21 | displayName: OPA Release Policies 22 | createdAt: 2023-01-05T21:27:55.549418+00:00 23 | description: OPA policies enforcing releasable build standards 24 | readme: | 25 | OPA policies enforcing releasable build standards. These policies 26 | are a work in progress and not yet production ready. 27 | install: | 28 | `conftest pull oci::quay.io/enterprise-contracttract/ec-release-policy:latest` 29 | 30 | Configure your org-specific `--data` as necessary from [data/](https://github.com/conforma/policy/tree/main/data) 31 | 32 | Run your conftest command: `conftest verify --data data/` 33 | homeURL: https://conforma.dev/docs/policy/ 34 | keywords: 35 | - opa 36 | - conftest 37 | license: MIT 38 | provider: 39 | name: Red Hat 40 | -------------------------------------------------------------------------------- /policy/release/attestation_type/attestation_type.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: Attestation type 4 | # description: >- 5 | # Sanity checks related to the format of the image build's attestation. 6 | # 7 | package attestation_type 8 | 9 | import rego.v1 10 | 11 | import data.lib 12 | import data.lib.json as j 13 | 14 | # METADATA 15 | # title: Known attestation type found 16 | # description: >- 17 | # Confirm the attestation found for the image has a known 18 | # attestation type. 19 | # custom: 20 | # short_name: known_attestation_type 21 | # failure_msg: Unknown attestation type '%s' 22 | # solution: >- 23 | # Make sure the "_type" field in the attestation is supported. Supported types are configured 24 | # in xref:ec-cli:ROOT:configuration.adoc#_data_sources[data sources]. 25 | # collections: 26 | # - minimal 27 | # - redhat 28 | # - redhat_rpms 29 | # depends_on: 30 | # - attestation_type.pipelinerun_attestation_found 31 | # 32 | deny contains result if { 33 | some att in lib.pipelinerun_attestations 34 | 35 | # regal ignore:leaked-internal-reference 36 | att_type := att.statement._type 37 | not att_type in lib.rule_data(_rule_data_key) 38 | result := lib.result_helper(rego.metadata.chain(), [att_type]) 39 | } 40 | 41 | # METADATA 42 | # title: Known attestation types provided 43 | # description: Confirm the `known_attestation_types` rule data was provided. 44 | # custom: 45 | # short_name: known_attestation_types_provided 46 | # failure_msg: '%s' 47 | # solution: Provide a list of known attestation types. 48 | # collections: 49 | # - minimal 50 | # - redhat 51 | # - redhat_rpms 52 | # - policy_data 53 | # 54 | deny contains result if { 55 | some error in _rule_data_errors 56 | result := lib.result_helper_with_severity(rego.metadata.chain(), [error.message], error.severity) 57 | } 58 | 59 | # METADATA 60 | # title: PipelineRun attestation found 61 | # description: >- 62 | # Confirm at least one PipelineRun attestation is present. 63 | # custom: 64 | # short_name: pipelinerun_attestation_found 65 | # failure_msg: Missing pipelinerun attestation 66 | # solution: >- 67 | # Make sure the attestation being verified was generated from a Tekton pipelineRun. 68 | # collections: 69 | # - minimal 70 | # - redhat 71 | # - redhat_rpms 72 | # 73 | deny contains result if { 74 | count(lib.pipelinerun_attestations) == 0 75 | result := lib.result_helper(rego.metadata.chain(), []) 76 | } 77 | 78 | # METADATA 79 | # title: Deprecated policy attestation format 80 | # description: >- 81 | # The Conforma CLI now places the attestation data in a different location. 82 | # This check fails if the expected new format is not found. 83 | # custom: 84 | # short_name: deprecated_policy_attestation_format 85 | # failure_msg: Deprecated policy attestation format found 86 | # solution: Use a newer version of the Conforma CLI. 87 | # collections: 88 | # - minimal 89 | # - redhat 90 | # - redhat_rpms 91 | # effective_on: 2023-08-31T00:00:00Z 92 | deny contains result if { 93 | # Use input.attestations directly so we can detect the actual format in use. 94 | some att in input.attestations 95 | not att.statement 96 | result := lib.result_helper(rego.metadata.chain(), []) 97 | } 98 | 99 | # Verify known_attestation_types is a non-empty list of strings 100 | _rule_data_errors contains error if { 101 | some e in j.validate_schema( 102 | lib.rule_data(_rule_data_key), 103 | { 104 | "$schema": "http://json-schema.org/draft-07/schema#", 105 | "type": "array", 106 | "items": {"type": "string"}, 107 | "uniqueItems": true, 108 | "minItems": 1, 109 | }, 110 | ) 111 | error := { 112 | "message": sprintf("Rule data %s has unexpected format: %s", [_rule_data_key, e.message]), 113 | "severity": e.severity, 114 | } 115 | } 116 | 117 | _rule_data_key := "known_attestation_types" 118 | -------------------------------------------------------------------------------- /policy/release/attestation_type/attestation_type_test.rego: -------------------------------------------------------------------------------- 1 | package attestation_type_test 2 | 3 | import rego.v1 4 | 5 | import data.attestation_type 6 | import data.lib 7 | 8 | good_type := "https://in-toto.io/Statement/v0.1" 9 | 10 | bad_type := "https://in-toto.io/Statement/v0.0.9999999" 11 | 12 | mock_data(att_type) := [{"statement": { 13 | "_type": att_type, 14 | "predicate": {"buildType": lib.tekton_pipeline_run}, 15 | }}] 16 | 17 | test_allow_when_permitted if { 18 | lib.assert_empty(attestation_type.deny) with input.attestations as mock_data(good_type) 19 | } 20 | 21 | test_deny_when_not_permitted if { 22 | expected_msg := sprintf("Unknown attestation type '%s'", [bad_type]) 23 | lib.assert_equal_results(attestation_type.deny, {{ 24 | "code": "attestation_type.known_attestation_type", 25 | "msg": expected_msg, 26 | }}) with input.attestations as mock_data(bad_type) 27 | } 28 | 29 | test_deny_when_pipelinerun_attestation_founds if { 30 | expected := {{ 31 | "code": "attestation_type.pipelinerun_attestation_found", 32 | "msg": "Missing pipelinerun attestation", 33 | }} 34 | attestations := [ 35 | {"statement": { 36 | "_type": good_type, 37 | "predicate": {"buildType": "tekton.dev/v1beta1/TaskRun"}, 38 | }}, 39 | {"statement": { 40 | "_type": good_type, 41 | "predicate": {"buildType": "spam/spam/eggs/spam"}, 42 | }}, 43 | ] 44 | lib.assert_equal_results(attestation_type.deny, expected) with input.attestations as attestations 45 | } 46 | 47 | test_deny_deprecated_policy_attestation_format if { 48 | expected := { 49 | { 50 | "code": "attestation_type.deprecated_policy_attestation_format", 51 | "msg": "Deprecated policy attestation format found", 52 | }, 53 | { 54 | "code": "attestation_type.pipelinerun_attestation_found", 55 | "msg": "Missing pipelinerun attestation", 56 | }, 57 | } 58 | attestations := [{ 59 | "_type": good_type, 60 | "predicate": {"buildType": lib.tekton_pipeline_run}, 61 | }] 62 | lib.assert_equal_results(attestation_type.deny, expected) with input.attestations as attestations 63 | } 64 | 65 | test_rule_data_validation if { 66 | d := {"known_attestation_types": [ 67 | # Wrong type 68 | 1, 69 | # Duplicated items 70 | "foo", 71 | "foo", 72 | ]} 73 | 74 | expected := { 75 | { 76 | "code": "attestation_type.known_attestation_types_provided", 77 | "msg": "Rule data known_attestation_types has unexpected format: (Root): array items[1,2] must be unique", 78 | "severity": "failure", 79 | }, 80 | { 81 | "code": "attestation_type.known_attestation_types_provided", 82 | "msg": "Rule data known_attestation_types has unexpected format: 0: Invalid type. Expected: string, given: integer", 83 | "severity": "failure", 84 | }, 85 | } 86 | 87 | lib.assert_equal_results(attestation_type.deny, expected) with data.rule_data as d 88 | with input.attestations as mock_data("foo") 89 | } 90 | -------------------------------------------------------------------------------- /policy/release/collection/github/github.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: github 4 | # description: >- 5 | # A set of policy rules to validate artifacts built on GitHub. 6 | package collection.github 7 | 8 | import rego.v1 9 | -------------------------------------------------------------------------------- /policy/release/collection/minimal/minimal.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: minimal 4 | # description: >- 5 | # Includes a minimal set of policy rules to ensure the build pipeline is 6 | # functioning as expected, and able to produce signed attestations of the 7 | # expected type. 8 | package collection.minimal 9 | 10 | import rego.v1 11 | -------------------------------------------------------------------------------- /policy/release/collection/policy_data/policy_data.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: policy_data 4 | # description: >- 5 | # Include policy rules responsible for validating rule data. 6 | package collection.policy_data 7 | 8 | import rego.v1 9 | -------------------------------------------------------------------------------- /policy/release/collection/redhat/redhat.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: redhat 4 | # description: >- 5 | # Include the set of policy rules required for Red Hat products. 6 | package collection.redhat 7 | 8 | import rego.v1 9 | -------------------------------------------------------------------------------- /policy/release/collection/redhat_rpms/redhat_rpms.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: redhat_rpms 4 | # description: >- 5 | # Include the set of policy rules required for building Red Hat RPMs. 6 | package collection.redhat_rpms 7 | 8 | import rego.v1 9 | -------------------------------------------------------------------------------- /policy/release/collection/rhtap_multi_ci/rhtap_multi_ci.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: rhtap-multi-ci 4 | # description: >- 5 | # A set of policy rules to validate artifacts built using RHTAP Multi-CI pipelines. 6 | package collection.rhtap_multi_ci 7 | 8 | import rego.v1 9 | -------------------------------------------------------------------------------- /policy/release/collection/slsa3/slsa3.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: slsa3 4 | # description: >- 5 | # Includes policy rules required to meet SLSA Level 3. 6 | package collection.slsa3 7 | 8 | import rego.v1 9 | -------------------------------------------------------------------------------- /policy/release/external_parameters/external_parameters.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: External parameters 4 | # description: >- 5 | # Verify the attribute .predicate.buildDefinition.externalParameters of a 6 | # SLSA Provenance v1.0 matches the expectation. 7 | # 8 | package external_parameters 9 | 10 | import rego.v1 11 | 12 | import data.lib 13 | import data.lib.json as j 14 | 15 | # METADATA 16 | # title: Pipeline run params 17 | # description: >- 18 | # Verify the PipelineRun was initialized with a set of expected parameters. 19 | # By default it asserts git-repo, git-revision, and output-image are provided 20 | # with non-empty values. This is configurable by the rule data key 21 | # `pipeline_run_params`. Any additional parameters are NOT allowed. 22 | # custom: 23 | # short_name: pipeline_run_params 24 | # failure_msg: PipelineRun params, %v, do not match expectation, %v. 25 | # 26 | deny contains result if { 27 | some provenance in lib.pipelinerun_attestations 28 | 29 | param_names := {p.name | 30 | some p in provenance.statement.predicate.buildDefinition.externalParameters.runSpec.params 31 | p.value != "" 32 | } 33 | expected_names := {n | some n in lib.rule_data(_rule_data_key)} 34 | 35 | expected_names != param_names 36 | result := lib.result_helper(rego.metadata.chain(), [param_names, expected_names]) 37 | } 38 | 39 | # METADATA 40 | # title: PipelineRun params provided 41 | # description: Confirm the `pipeline_run_params` rule data was provided. 42 | # custom: 43 | # short_name: pipeline_run_params_provided 44 | # failure_msg: '%s' 45 | # solution: Provide a non-empty list of expected PipelineRun parameters. 46 | # collections: 47 | # - policy_data 48 | # 49 | deny contains result if { 50 | some e in _rule_data_errors 51 | result := lib.result_helper_with_severity(rego.metadata.chain(), [e.message], e.severity) 52 | } 53 | 54 | # METADATA 55 | # title: Restrict shared volumes 56 | # description: >- 57 | # Verify the PipelineRun did not use any pre-existing PersistentVolumeClaim 58 | # workspaces. 59 | # custom: 60 | # short_name: restrict_shared_volumes 61 | # failure_msg: PipelineRun uses shared volumes, %v. 62 | # 63 | deny contains result if { 64 | some provenance in lib.pipelinerun_attestations 65 | shared_workspaces := {w | 66 | some w in provenance.statement.predicate.buildDefinition.externalParameters.runSpec.workspaces 67 | w.persistentVolumeClaim 68 | } 69 | count(shared_workspaces) > 0 70 | result := lib.result_helper(rego.metadata.chain(), [shared_workspaces]) 71 | } 72 | 73 | # Verify pipeline_run_params is a non-empty list of strings 74 | _rule_data_errors contains error if { 75 | some e in j.validate_schema( 76 | lib.rule_data(_rule_data_key), 77 | { 78 | "$schema": "http://json-schema.org/draft-07/schema#", 79 | "type": "array", 80 | "items": {"type": "string"}, 81 | "uniqueItems": true, 82 | "minItems": 1, 83 | }, 84 | ) 85 | error := { 86 | "message": sprintf("Rule data %s has unexpected format: %s", [_rule_data_key, e.message]), 87 | "severity": e.severity, 88 | } 89 | } 90 | 91 | _rule_data_key := "pipeline_run_params" 92 | -------------------------------------------------------------------------------- /policy/release/external_parameters/external_parameters_test.rego: -------------------------------------------------------------------------------- 1 | package external_parameters_test 2 | 3 | import rego.v1 4 | 5 | import data.external_parameters 6 | import data.lib 7 | 8 | test_success if { 9 | lib.assert_empty(external_parameters.deny) with input.attestations as [good_provenance] 10 | } 11 | 12 | test_pipeline_run_params_missing_params if { 13 | # regal ignore:line-length 14 | provenance := json.remove(good_provenance, ["/statement/predicate/buildDefinition/externalParameters/runSpec/params/0"]) 15 | expected := {{ 16 | "code": "external_parameters.pipeline_run_params", 17 | # regal ignore:line-length 18 | "msg": `PipelineRun params, {"git-revision", "output-image"}, do not match expectation, {"git-repo", "git-revision", "output-image"}.`, 19 | }} 20 | lib.assert_equal_results(external_parameters.deny, expected) with input.attestations as [provenance] 21 | } 22 | 23 | test_pipeline_run_params_empty_values if { 24 | provenance := json.patch(good_provenance, [{ 25 | "op": "add", 26 | "path": "/statement/predicate/buildDefinition/externalParameters/runSpec/params/0/value", 27 | "value": "", 28 | }]) 29 | expected := {{ 30 | "code": "external_parameters.pipeline_run_params", 31 | # regal ignore:line-length 32 | "msg": `PipelineRun params, {"git-revision", "output-image"}, do not match expectation, {"git-repo", "git-revision", "output-image"}.`, 33 | }} 34 | lib.assert_equal_results(external_parameters.deny, expected) with input.attestations as [provenance] 35 | } 36 | 37 | test_restrict_shared_volumes_existing_pvc if { 38 | provenance := json.patch(good_provenance, [{ 39 | "op": "add", 40 | "path": "/statement/predicate/buildDefinition/externalParameters/runSpec/workspaces/0", 41 | "value": {"persistentVolumeClaim": {"claimName": "my-pvc"}}, 42 | }]) 43 | expected := {{ 44 | "code": "external_parameters.restrict_shared_volumes", 45 | "msg": "PipelineRun uses shared volumes, {{\"persistentVolumeClaim\": {\"claimName\": \"my-pvc\"}}}.", 46 | }} 47 | lib.assert_equal_results(external_parameters.deny, expected) with input.attestations as [provenance] 48 | } 49 | 50 | test_rule_data_validation if { 51 | d := {"pipeline_run_params": [ 52 | # Wrong type 53 | 1, 54 | # Duplicated items 55 | "foo", 56 | "foo", 57 | ]} 58 | 59 | expected := { 60 | { 61 | "code": "external_parameters.pipeline_run_params_provided", 62 | "msg": "Rule data pipeline_run_params has unexpected format: (Root): array items[1,2] must be unique", 63 | "severity": "failure", 64 | }, 65 | { 66 | "code": "external_parameters.pipeline_run_params_provided", 67 | "msg": "Rule data pipeline_run_params has unexpected format: 0: Invalid type. Expected: string, given: integer", 68 | "severity": "failure", 69 | }, 70 | } 71 | 72 | provenance := json.patch(good_provenance, [{ 73 | "op": "add", 74 | "path": "/statement/predicate/buildDefinition/externalParameters/runSpec/params", 75 | "value": [{"name": 1, "value": "one"}, {"name": "foo", "value": "oof"}], 76 | }]) 77 | lib.assert_equal_results(external_parameters.deny, expected) with data.rule_data as d 78 | with input.attestations as [provenance] 79 | } 80 | 81 | good_provenance := {"statement": { 82 | "predicateType": "https://slsa.dev/provenance/v1", 83 | "predicate": {"buildDefinition": { 84 | "buildType": "https://tekton.dev/chains/v2/slsa", 85 | "externalParameters": {"runSpec": { 86 | "pipelineSpec": {}, 87 | "params": [ 88 | {"name": "git-repo", "value": "some-git-repo"}, 89 | {"name": "git-revision", "value": "some-git-revision"}, 90 | {"name": "output-image", "value": "some-output-image"}, 91 | ], 92 | "workspaces": [{"volumeClaimTemplate": {"spec": {}}}], 93 | }}, 94 | }}, 95 | }} 96 | -------------------------------------------------------------------------------- /policy/release/git_branch/git_branch.rego: -------------------------------------------------------------------------------- 1 | package git_branch 2 | 3 | import data.lib 4 | import rego.v1 5 | 6 | # METADATA 7 | # title: Only allow builds from a trusted branch 8 | # description: Build must originate from a configured branch pattern (e.g., 'refs/heads/main') 9 | # custom: 10 | # short_name: git_branch 11 | # failure_msg: Build is from a branch %s which is not a trusted branch 12 | # collections: 13 | # - redhat_rpms 14 | # effective_on: 2025-07-01 15 | deny contains result if { 16 | some task in lib.tasks_from_pipelinerun 17 | 18 | # Note that we're assuming that the annotation exists. 19 | # This will not produce a violation if the annotation is missing 20 | branch := task.invocation.environment.annotations["pipelinesascode.tekton.dev/source-branch"] 21 | not matches_any(branch) 22 | result := lib.result_helper(rego.metadata.chain(), [branch]) 23 | } 24 | 25 | matches_any(branch) if { 26 | some pattern in lib.rule_data("allowed_branch_patterns") 27 | regex.match(pattern, branch) 28 | } 29 | -------------------------------------------------------------------------------- /policy/release/git_branch/git_branch_test.rego: -------------------------------------------------------------------------------- 1 | package git_branch_test 2 | 3 | import data.git_branch 4 | import data.lib 5 | import rego.v1 6 | 7 | single_test_case(branch, expected_results) if { 8 | # regal ignore:line-length 9 | mock_input := {"attestations": [{"statement": {"predicate": {"buildConfig": {"tasks": [{"invocation": {"environment": {"annotations": {"pipelinesascode.tekton.dev/source-branch": branch}}}}]}}}}]} 10 | 11 | mock_rule_data := ["^refs/heads/main$", "^refs/heads/release-[23]$"] 12 | 13 | mock_tasks := mock_input.attestations[0].statement.predicate.buildConfig.tasks 14 | 15 | # regal ignore:with-outside-test-context 16 | lib.assert_equal_results(expected_results, git_branch.deny) with input as mock_input 17 | with lib.rule_data as mock_rule_data 18 | with lib.tasks_from_pipelinerun as mock_tasks 19 | } 20 | 21 | test_allow_with_main_branch if { 22 | single_test_case("refs/heads/main", []) 23 | } 24 | 25 | test_allow_with_release_branch if { 26 | single_test_case("refs/heads/release-2", []) 27 | } 28 | 29 | test_deny_with_disallowed_branch if { 30 | expected := {{ 31 | "code": "git_branch.git_branch", 32 | "msg": "Build is from a branch refs/heads/feature-branch which is not a trusted branch", 33 | }} 34 | single_test_case("refs/heads/feature-branch", expected) 35 | } 36 | 37 | test_deny_with_unmatched_branch if { 38 | expected := {{ 39 | "code": "git_branch.git_branch", 40 | "msg": "Build is from a branch refs/heads/release-1 which is not a trusted branch", 41 | }} 42 | single_test_case("refs/heads/release-1", expected) 43 | } 44 | -------------------------------------------------------------------------------- /policy/release/hermetic_build_task/hermetic_build_task.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: Hermetic build task 4 | # description: >- 5 | # This package verifies the build task in the attestation was invoked 6 | # with the expected parameters to perform a hermetic build. 7 | # 8 | package hermetic_build_task 9 | 10 | import rego.v1 11 | 12 | import data.lib 13 | import data.lib.tekton 14 | 15 | # METADATA 16 | # title: Build task called with hermetic param set 17 | # description: >- 18 | # Verify the build task in the PipelineRun attestation 19 | # was invoked with the proper parameters to make the build process 20 | # hermetic. 21 | # custom: 22 | # short_name: build_task_hermetic 23 | # failure_msg: Build task was not invoked with the hermetic parameter set 24 | # solution: >- 25 | # Make sure the task that builds the image has a parameter named 'HERMETIC' and 26 | # it's set to 'true'. 27 | # collections: 28 | # - redhat 29 | # depends_on: 30 | # - attestation_type.known_attestation_type 31 | # 32 | deny contains result if { 33 | _hermetic_build != {"true"} 34 | result := lib.result_helper(rego.metadata.chain(), []) 35 | } 36 | 37 | _hermetic_build contains value if { 38 | some attestation in lib.pipelinerun_attestations 39 | some task in tekton.build_tasks(attestation) 40 | value := tekton.task_param(task, "HERMETIC") 41 | } 42 | -------------------------------------------------------------------------------- /policy/release/hermetic_build_task/hermetic_build_task_test.rego: -------------------------------------------------------------------------------- 1 | package hermetic_build_task_test 2 | 3 | import rego.v1 4 | 5 | import data.hermetic_build_task 6 | import data.lib 7 | 8 | test_hermetic_build if { 9 | lib.assert_empty(hermetic_build_task.deny) with input.attestations as [_good_attestation] 10 | } 11 | 12 | test_not_hermetic_build if { 13 | expected := {{ 14 | "code": "hermetic_build_task.build_task_hermetic", 15 | "msg": "Build task was not invoked with the hermetic parameter set", 16 | }} 17 | 18 | hermetic_not_true := json.patch(_good_attestation, [{ 19 | "op": "add", 20 | "path": "/statement/predicate/buildConfig/tasks/0/invocation/parameters/HERMETIC", 21 | "value": "false", 22 | }]) 23 | lib.assert_equal_results(expected, hermetic_build_task.deny) with input.attestations as [hermetic_not_true] 24 | 25 | # regal ignore:line-length 26 | hermetic_missing := json.remove(_good_attestation, ["/statement/predicate/buildConfig/tasks/0/invocation/parameters/HERMETIC"]) 27 | lib.assert_equal_results(expected, hermetic_build_task.deny) with input.attestations as [hermetic_missing] 28 | } 29 | 30 | test_hermetic_build_many_build_tasks if { 31 | task1 := { 32 | "results": [ 33 | {"name": "IMAGE_URL", "value": "registry/repo"}, 34 | {"name": "IMAGE_DIGEST", "value": "digest"}, 35 | ], 36 | "ref": {"kind": "Task", "name": "build-1", "bundle": "reg.img/spam@sha256:abc"}, 37 | "invocation": {"parameters": {"HERMETIC": "true"}}, 38 | } 39 | 40 | task2 := { 41 | "results": [ 42 | {"name": "IMAGE_URL", "value": "registry/repo"}, 43 | {"name": "IMAGE_DIGEST", "value": "digest"}, 44 | ], 45 | "ref": {"kind": "Task", "name": "build-2", "bundle": "reg.img/spam@sha256:abc"}, 46 | "invocation": {"parameters": {"HERMETIC": "true"}}, 47 | } 48 | 49 | attestation := {"statement": {"predicate": { 50 | "buildType": lib.tekton_pipeline_run, 51 | "buildConfig": {"tasks": [task1, task2]}, 52 | }}} 53 | lib.assert_empty(hermetic_build_task.deny) with input.attestations as [attestation] 54 | 55 | attestation_mixed_hermetic := json.patch( 56 | {"statement": {"predicate": { 57 | "buildType": lib.tekton_pipeline_run, 58 | "buildConfig": {"tasks": [task1, task2]}, 59 | }}}, 60 | [{ 61 | "op": "replace", 62 | "path": "/statement/predicate/buildConfig/tasks/0/invocation/parameters/HERMETIC", 63 | "value": "false", 64 | }], 65 | ) 66 | expected := {{ 67 | "code": "hermetic_build_task.build_task_hermetic", 68 | "msg": "Build task was not invoked with the hermetic parameter set", 69 | }} 70 | lib.assert_equal_results(expected, hermetic_build_task.deny) with input.attestations as [attestation_mixed_hermetic] 71 | 72 | attestation_non_hermetic := json.patch( 73 | {"statement": {"predicate": { 74 | "buildType": lib.tekton_pipeline_run, 75 | "buildConfig": {"tasks": [task1, task2]}, 76 | }}}, 77 | [ 78 | { 79 | "op": "replace", 80 | "path": "/statement/predicate/buildConfig/tasks/0/invocation/parameters/HERMETIC", 81 | "value": "false", 82 | }, 83 | { 84 | "op": "replace", 85 | "path": "/statement/predicate/buildConfig/tasks/1/invocation/parameters/HERMETIC", 86 | "value": "false", 87 | }, 88 | ], 89 | ) 90 | lib.assert_equal_results(expected, hermetic_build_task.deny) with input.attestations as [attestation_non_hermetic] 91 | } 92 | 93 | _good_attestation := {"statement": {"predicate": { 94 | "buildType": lib.tekton_pipeline_run, 95 | "buildConfig": {"tasks": [{ 96 | "results": [ 97 | {"name": "IMAGE_URL", "value": "registry/repo"}, 98 | {"name": "IMAGE_DIGEST", "value": "digest"}, 99 | ], 100 | "ref": {"kind": "Task", "name": "any-task", "bundle": "reg.img/spam@sha256:abc"}, 101 | "invocation": {"parameters": {"HERMETIC": "true"}}, 102 | }]}, 103 | }}} 104 | -------------------------------------------------------------------------------- /policy/release/provenance_materials/provenance_materials.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: Provenance Materials 4 | # description: >- 5 | # This package provides rules for verifying the contents of the materials section 6 | # of the SLSA Provenance attestation. 7 | # 8 | package provenance_materials 9 | 10 | import rego.v1 11 | 12 | import data.lib 13 | import data.lib.tekton 14 | 15 | # METADATA 16 | # title: Git clone task found 17 | # description: >- 18 | # Confirm that the attestation contains a git-clone task with `commit` and `url` task results. 19 | # custom: 20 | # short_name: git_clone_task_found 21 | # failure_msg: Task git-clone not found 22 | # solution: >- 23 | # Make sure the build pipeline contains a task named 'git-clone'. 24 | # collections: 25 | # - minimal 26 | # - redhat 27 | # - redhat_rpms 28 | # depends_on: 29 | # - attestation_type.known_attestation_type 30 | # 31 | deny contains result if { 32 | some attestation in lib.pipelinerun_attestations 33 | count(tekton.git_clone_tasks(attestation)) == 0 34 | result := lib.result_helper(rego.metadata.chain(), []) 35 | } 36 | 37 | # METADATA 38 | # title: Git clone source matches materials provenance 39 | # description: >- 40 | # Confirm that the result of the git-clone task is included in the materials section of the SLSA 41 | # provenance attestation. 42 | # custom: 43 | # short_name: git_clone_source_matches_provenance 44 | # failure_msg: Entry in materials for the git repo %q and commit %q not found 45 | # solution: >- 46 | # The build pipeline must contain a task named 'git-clone' and that task must emit 47 | # results named 'url' and 'commit' and contain the clone git repository and commit, 48 | # respectively. 49 | # collections: 50 | # - minimal 51 | # - redhat 52 | # - redhat_rpms 53 | # depends_on: 54 | # - provenance_materials.git_clone_task_found 55 | # 56 | deny contains result if { 57 | some attestation in lib.pipelinerun_attestations 58 | 59 | some task in tekton.git_clone_tasks(attestation) 60 | url := _normalize_git_url(tekton.task_result(task, "url")) 61 | commit := tekton.task_result(task, "commit") 62 | 63 | materials := [m | 64 | some m in attestation.statement.predicate.materials 65 | m.uri == url 66 | m.digest.sha1 == commit 67 | ] 68 | count(materials) == 0 69 | 70 | result := lib.result_helper(rego.metadata.chain(), [url, commit]) 71 | } 72 | 73 | _normalize_git_url(url) := _suffix_git_url(_prefix_git_url(url)) 74 | 75 | _prefix_git_url(url) := normalized if { 76 | prefix := "git+" 77 | not strings.any_prefix_match(url, prefix) 78 | normalized := concat("", [prefix, url]) 79 | } else := normalized if { 80 | normalized := url 81 | } 82 | 83 | _suffix_git_url(url) := normalized if { 84 | suffix := ".git" 85 | not strings.any_suffix_match(url, suffix) 86 | normalized := concat("", [url, suffix]) 87 | } else := normalized if { 88 | normalized := url 89 | } 90 | -------------------------------------------------------------------------------- /policy/release/quay_expiration/quay_expiration.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: Quay expiration 4 | # description: >- 5 | # Policies to prevent releasing an image to quay that has a quay 6 | # expiration date. In Konflux images with an expiration date are 7 | # produced by "on-pr" build pipelines, i.e. pre-merge CI builds, 8 | # so this is intended to prevent accidentally releasing a CI build. 9 | # 10 | package quay_expiration 11 | 12 | import rego.v1 13 | 14 | import data.lib 15 | 16 | # METADATA 17 | # title: Expires label 18 | # description: >- 19 | # Check the image metadata for the presence of a "quay.expires-after" 20 | # label. If it's present then produce a violation. This check is enforced 21 | # only for a "release", "production", or "staging" pipeline, as determined by 22 | # the value of the `pipeline_intention` rule data. 23 | # custom: 24 | # short_name: expires_label 25 | # failure_msg: The image has a 'quay.expires-after' label set to '%s' 26 | # solution: >- 27 | # Make sure the image is built without setting the "quay.expires-after" label. This 28 | # label is usually set if the container image was built by an "on-pr" pipeline 29 | # during pre-merge CI. 30 | # collections: 31 | # - redhat 32 | # 33 | deny contains result if { 34 | _expires_label_check_applies 35 | 36 | # This is where we can access the image labels 37 | some label_name, label_value in input.image.config.Labels 38 | 39 | # The quay.expires-after label is present 40 | label_name == "quay.expires-after" 41 | 42 | # This is an edge case that may never happen, but let's assume that if 43 | # the value is an empty string then it is not an expiration and therefore 44 | # can be permitted 45 | count(label_value) > 0 46 | 47 | # Send up the violation the details 48 | result := lib.result_helper(rego.metadata.chain(), [label_value]) 49 | } 50 | 51 | # The check only applies if we're intending to release the image 52 | default _expires_label_check_applies := false 53 | 54 | _expires_label_check_applies if { 55 | lib.rule_data("pipeline_intention") in {"release", "production", "staging"} 56 | } 57 | -------------------------------------------------------------------------------- /policy/release/quay_expiration/quay_expiration_test.rego: -------------------------------------------------------------------------------- 1 | package quay_expiration_test 2 | 3 | import rego.v1 4 | 5 | import data.lib 6 | import data.quay_expiration 7 | 8 | test_ci_pipeline if { 9 | # Should not produce violations when we're in a non-release pipeline 10 | lib.assert_equal(false, quay_expiration._expires_label_check_applies) with data.rule_data as _rule_data_for_ci 11 | 12 | lib.assert_empty(quay_expiration.deny) with input.image as _image_expires_none 13 | with data.rule_data as _rule_data_for_ci 14 | 15 | lib.assert_empty(quay_expiration.deny) with input.image as _image_expires_blank 16 | with data.rule_data as _rule_data_for_ci 17 | 18 | lib.assert_empty(quay_expiration.deny) with input.image as _image_expires_5d 19 | with data.rule_data as _rule_data_for_ci 20 | } 21 | 22 | test_release_pipeline if { 23 | # Should produce violations when we're in a release pipeline 24 | lib.assert_equal(true, quay_expiration._expires_label_check_applies) with data.rule_data as _rule_data_for_release 25 | 26 | lib.assert_empty(quay_expiration.deny) with input.image as _image_expires_none 27 | with data.rule_data as _rule_data_for_release 28 | 29 | lib.assert_empty(quay_expiration.deny) with input.image as _image_expires_blank 30 | with data.rule_data as _rule_data_for_release 31 | 32 | expected := {{ 33 | "code": "quay_expiration.expires_label", 34 | "msg": "The image has a 'quay.expires-after' label set to '5d'", 35 | }} 36 | lib.assert_equal_results(expected, quay_expiration.deny) with input.image as _image_expires_5d 37 | with data.rule_data as _rule_data_for_release 38 | } 39 | 40 | _image_expires_5d := {"config": {"Labels": { 41 | "foo": "bar", 42 | "quay.expires-after": "5d", 43 | }}} 44 | 45 | _image_expires_blank := {"config": {"Labels": { 46 | "foo": "bar", 47 | "quay.expires-after": "", 48 | }}} 49 | 50 | _image_expires_none := {"config": {"Labels": {"foo": "bar"}}} 51 | 52 | _rule_data_for_ci := {} 53 | 54 | _rule_data_for_release := {"pipeline_intention": "release"} 55 | -------------------------------------------------------------------------------- /policy/release/rhtap_multi_ci/rhtap_multi_ci_test.rego: -------------------------------------------------------------------------------- 1 | package rhtap_multi_ci_test 2 | 3 | import rego.v1 4 | 5 | import data.lib 6 | import data.rhtap_multi_ci 7 | 8 | test_atts_happy_path if { 9 | lib.assert_empty(rhtap_multi_ci.deny) with input.attestations as [good_att] 10 | lib.assert_empty(rhtap_multi_ci.deny) with input.attestations as [ignored_att, good_att] 11 | } 12 | 13 | test_atts_missing if { 14 | expected := { 15 | "code": "rhtap_multi_ci.attestation_found", 16 | "msg": sprintf("%s%s%s%s%s", [ 17 | "A SLSA v1.0 provenance with one of the following RHTAP Multi-CI build types was not found:", 18 | " 'https://redhat.com/rhtap/slsa-build-types/jenkins-build/v1',", 19 | " 'https://redhat.com/rhtap/slsa-build-types/github-build/v1',", 20 | " 'https://redhat.com/rhtap/slsa-build-types/gitlab-build/v1',", 21 | " 'https://redhat.com/rhtap/slsa-build-types/azure-build/v1'.", 22 | ]), 23 | } 24 | 25 | lib.assert_equal_results({expected}, rhtap_multi_ci.deny) with input.attestations as [] 26 | lib.assert_equal_results({expected}, rhtap_multi_ci.deny) with input.attestations as [ignored_att] 27 | } 28 | 29 | test_fields_missing if { 30 | expected = { 31 | { 32 | "code": "rhtap_multi_ci.attestation_format", 33 | "msg": "RHTAP jenkins attestation problem: runDetails.metadata: invocationID is required", 34 | }, 35 | { 36 | "code": "rhtap_multi_ci.attestation_format", 37 | "msg": "RHTAP jenkins attestation problem: runDetails.builder: id is required", 38 | }, 39 | } 40 | lib.assert_equal_results(expected, rhtap_multi_ci.deny) with input.attestations as [missing_fields_att] 41 | } 42 | 43 | # Not very useful except to get 100% coverage 44 | # (I don't feel like repeating the above tests with the other two build types) 45 | test_schema_sanity if { 46 | lib.assert_not_equal(rhtap_multi_ci._predicate_schema_base, rhtap_multi_ci._predicate_schema("jenkins")) 47 | lib.assert_equal(rhtap_multi_ci._predicate_schema_base, rhtap_multi_ci._predicate_schema("github")) 48 | lib.assert_equal(rhtap_multi_ci._predicate_schema_base, rhtap_multi_ci._predicate_schema("gitlab")) 49 | } 50 | 51 | good_build_type := "https://redhat.com/rhtap/slsa-build-types/jenkins-build/v1" 52 | 53 | good_att := mock_att(good_build_type, { 54 | "metadata": {"invocationID": "foo"}, 55 | "builder": {"id": "42", "version": {}}, 56 | }) 57 | 58 | missing_fields_att := mock_att(good_build_type, { 59 | "metadata": {"vacationID": "foo"}, 60 | "builder": {"name": "Bob", "version": {}}, 61 | }) 62 | 63 | ignored_att := mock_att("https://other/build/type/v1", {}) 64 | 65 | mock_att(build_type, run_details) := {"statement": { 66 | "predicateType": "https://slsa.dev/provenance/v1", 67 | "predicate": { 68 | "buildDefinition": {"buildType": build_type}, 69 | "runDetails": run_details, 70 | }, 71 | }} 72 | 73 | test_rhtap_build_type if { 74 | lib.assert_equal( 75 | "https://redhat.com/rhtap/slsa-build-types/bacon-build/v1", 76 | rhtap_multi_ci._build_type("bacon"), 77 | ) 78 | } 79 | -------------------------------------------------------------------------------- /policy/release/rpm_packages/rpm_packages.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: RPM Packages 4 | # description: >- 5 | # Rules used to verify different properties of specific RPM packages found in the SBOM of the 6 | # image being validated. 7 | # 8 | package rpm_packages 9 | 10 | import rego.v1 11 | 12 | import data.lib 13 | import data.lib.image 14 | import data.lib.sbom 15 | import data.lib.tekton 16 | 17 | # METADATA 18 | # title: Unique Version 19 | # description: >- 20 | # Check if there is more than one version of the same RPM installed across different 21 | # architectures. This check only applies for Image Indexes, aka multi-platform images. 22 | # Use the `non_unique_rpm_names` rule data key to ignore certain RPMs. 23 | # custom: 24 | # short_name: unique_version 25 | # failure_msg: 'Multiple versions of the %q RPM were found: %s' 26 | # collections: 27 | # - redhat 28 | # # Pushed back due to https://issues.redhat.com/browse/EC-1232 29 | # effective_on: 2025-06-28T00:00:00Z 30 | # 31 | deny contains result if { 32 | image.is_image_index(input.image.ref) 33 | 34 | some name, versions in grouped_rpm_purls 35 | count(versions) > 1 36 | not name in lib.rule_data("non_unique_rpm_names") 37 | result := lib.result_helper_with_term( 38 | rego.metadata.chain(), 39 | [name, concat(", ", versions)], 40 | name, 41 | ) 42 | } 43 | 44 | # grouped_rpm_purls groups the found RPMs by name to facilitate detecting different versions. It 45 | # has the following structure: 46 | # { 47 | # "spam-maps": {"1.2.3-0", "1.2.3-9"}, 48 | # "bacon": {"7.8.8-8"}, 49 | # } 50 | grouped_rpm_purls[name] contains version if { 51 | some rpm_purl in all_rpm_purls 52 | rpm := ec.purl.parse(rpm_purl) 53 | name := rpm.name 54 | 55 | # NOTE: This includes both version and release. 56 | version := rpm.version 57 | } 58 | 59 | all_rpm_purls contains rpm.purl if { 60 | some attestation in lib.pipelinerun_attestations 61 | some build_task in tekton.build_tasks(attestation) 62 | some result in tekton.task_results(build_task) 63 | result.name == "SBOM_BLOB_URL" 64 | url := result.value 65 | blob := ec.oci.blob(url) 66 | s := json.unmarshal(blob) 67 | some rpm in sbom.rpms_from_sbom(s) 68 | } 69 | -------------------------------------------------------------------------------- /policy/release/rpm_pipeline/rpm_pipeline.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: RPM Pipeline 4 | # description: >- 5 | # This package provides rules for verifying the RPMs are built in an approved pipeline 6 | # 7 | package rpm_pipeline 8 | 9 | import rego.v1 10 | 11 | import data.lib 12 | import data.lib.tekton 13 | 14 | _pipeline_key := "build.appstudio.redhat.com/pipeline" 15 | 16 | _rule_data_key := "allowed_rpm_build_pipelines" 17 | 18 | # METADATA 19 | # title: Task version invalid_pipeline 20 | # description: >- 21 | # The Tekton Task used specifies an invalid pipeline. The Task is annotated with 22 | # `build.appstudio.redhat.com/pipeline` annotation, which must be in the set of 23 | # `allowed_rpm_build_pipelines` in the rule data. 24 | # custom: 25 | # short_name: invalid_pipeline 26 | # failure_msg: >- 27 | # Task %q uses invalid pipleline %s, which is not in the list of valid pipelines: %s 28 | # collections: 29 | # - redhat_rpms 30 | # depends_on: 31 | # - tasks.pipeline_has_tasks 32 | # 33 | deny contains result if { 34 | some att in lib.pipelinerun_attestations 35 | some task in tekton.tasks(att) 36 | 37 | labels := tekton.task_labels(task) 38 | pipeline := labels[_pipeline_key] 39 | allowed_pipelines := lib.rule_data(_rule_data_key) 40 | 41 | not pipeline in allowed_pipelines 42 | 43 | result := lib.result_helper( 44 | rego.metadata.chain(), 45 | [tekton.pipeline_task_name(task), pipeline, concat(",", allowed_pipelines)], 46 | ) 47 | } 48 | -------------------------------------------------------------------------------- /policy/release/rpm_pipeline/rpm_pipeline_test.rego: -------------------------------------------------------------------------------- 1 | package rpm_pipeline_test 2 | 3 | import rego.v1 4 | 5 | import data.lib 6 | import data.rpm_pipeline 7 | 8 | test_invalid_pipeline if { 9 | attestations := [{"statement": {"predicate": { 10 | "buildType": lib.tekton_pipeline_run, 11 | "buildConfig": {"tasks": [_valid_pipeline_task, _invalid_pipeline_task]}, 12 | }}}] 13 | 14 | expected := {{ 15 | "code": "rpm_pipeline.invalid_pipeline", 16 | "msg": "Task \"build\" uses invalid pipleline not_allowed, which is not in the list of valid pipelines: foobar", 17 | }} 18 | lib.assert_equal_results(expected, rpm_pipeline.deny) with data.rule_data.allowed_rpm_build_pipelines as ["foobar"] 19 | with input.attestations as attestations 20 | } 21 | 22 | test_valid_pipelines_met if { 23 | attestations := [{"statement": {"predicate": { 24 | "buildType": lib.tekton_pipeline_run, 25 | "buildConfig": {"tasks": [_valid_pipeline_task, _valid_pipeline_task_2]}, 26 | }}}] 27 | 28 | lib.assert_empty(rpm_pipeline.deny) with data.rule_data.allowed_rpm_build_pipelines as ["foobar", "baz"] 29 | with input.attestations as attestations 30 | } 31 | 32 | _invalid_pipeline_task := { 33 | "name": "build", 34 | "status": "Succeeded", 35 | "ref": {"name": "init", "kind": "Task", "bundle": "quay.io/konflux-ci/tekton-catalog/task-init"}, 36 | "invocation": {"environment": {"labels": {"build.appstudio.redhat.com/pipeline": "not_allowed"}}}, 37 | } 38 | 39 | _valid_pipeline_task := { 40 | "name": "init", 41 | "status": "Succeeded", 42 | "ref": {"name": "init", "kind": "Task", "bundle": "quay.io/konflux-ci/tekton-catalog/task-init"}, 43 | "invocation": {"environment": {"labels": {"build.appstudio.redhat.com/pipeline": "foobar"}}}, 44 | } 45 | 46 | _valid_pipeline_task_2 := { 47 | "name": "get-rpm-sources", 48 | "status": "Succeeded", 49 | "ref": {"name": "init", "kind": "Task", "bundle": "quay.io/konflux-ci/tekton-catalog/task-init"}, 50 | "invocation": {"environment": {"labels": {"build.appstudio.redhat.com/pipeline": "baz"}}}, 51 | } 52 | -------------------------------------------------------------------------------- /policy/release/sbom/sbom.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: SBOM 4 | # description: >- 5 | # Checks general properties of the SBOMs associated with the image being validated. More specific 6 | # rules for SPDX and CycloneDX SBOMs are in separate packages. 7 | # 8 | package sbom 9 | 10 | import rego.v1 11 | 12 | import data.lib 13 | import data.lib.konflux 14 | 15 | # METADATA 16 | # title: Found 17 | # description: Confirm an SBOM attestation exists. 18 | # custom: 19 | # short_name: found 20 | # failure_msg: No SBOM attestations found 21 | # solution: >- 22 | # Make sure the build process produces an SBOM attestation. 23 | # collections: 24 | # - minimal 25 | # - redhat 26 | deny contains result if { 27 | # TODO: Workaround until Konflux produces SBOMs for Image Indexes: 28 | # https://issues.redhat.com/browse/KONFLUX-4330 29 | not konflux.is_validating_image_index 30 | 31 | count(_sboms) == 0 32 | result := lib.result_helper(rego.metadata.chain(), []) 33 | } 34 | 35 | # METADATA 36 | # title: Disallowed packages list is provided 37 | # description: >- 38 | # Confirm the `disallowed_packages` and `disallowed_attributes` rule data were 39 | # provided, since they are required by the policy rules in this package. 40 | # custom: 41 | # short_name: disallowed_packages_provided 42 | # failure_msg: "%s" 43 | # solution: >- 44 | # Provide a list of disallowed packages or package attributes in the 45 | # expected format. 46 | # collections: 47 | # - redhat 48 | # - policy_data 49 | # - redhat_rpms 50 | deny contains result if { 51 | some error in lib.sbom.rule_data_errors 52 | result := lib.result_helper_with_severity(rego.metadata.chain(), [error.message], error.severity) 53 | } 54 | 55 | _sboms := array.concat(lib.sbom.spdx_sboms, lib.sbom.cyclonedx_sboms) 56 | -------------------------------------------------------------------------------- /policy/release/slsa_build_build_service/slsa_build_build_service.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: SLSA - Build - Build Service 4 | # description: >- 5 | # The SLSA requirement states the following: 6 | # 7 | # "All build steps ran using some build service, not on a 8 | # developer’s workstation." 9 | # 10 | # This package verifies the requirement by asserting the image was 11 | # built by Tekton Pipelines. 12 | # 13 | package slsa_build_build_service 14 | 15 | import rego.v1 16 | 17 | import data.lib 18 | import data.lib.json as j 19 | 20 | # METADATA 21 | # title: SLSA Builder ID found 22 | # description: >- 23 | # Verify that the attestation attribute predicate.builder.id is set. 24 | # custom: 25 | # short_name: slsa_builder_id_found 26 | # failure_msg: Builder ID not set in attestation 27 | # solution: >- 28 | # The builder id in the attestation is missing. Make sure the build system 29 | # is setting the build id when generating an attestation. 30 | # collections: 31 | # - slsa3 32 | # - redhat 33 | # depends_on: 34 | # - attestation_type.known_attestation_type 35 | # 36 | deny contains result if { 37 | some att in lib.pipelinerun_attestations 38 | not att.statement.predicate.builder.id 39 | result := lib.result_helper(rego.metadata.chain(), []) 40 | } 41 | 42 | # METADATA 43 | # title: SLSA Builder ID is known and accepted 44 | # description: >- 45 | # Verify that the attestation attribute predicate.builder.id is set to one 46 | # of the values in the `allowed_builder_ids` rule data, e.g. 47 | # "https://tekton.dev/chains/v2". 48 | # custom: 49 | # short_name: slsa_builder_id_accepted 50 | # failure_msg: Builder ID %q is unexpected 51 | # solution: >- 52 | # Make sure the build id is set to an expected value. The expected values 53 | # are set in the xref:ec-cli:ROOT:configuration.adoc#_data_sources[data sources]. 54 | # collections: 55 | # - slsa3 56 | # - redhat 57 | # - redhat_rpms 58 | # depends_on: 59 | # - attestation_type.known_attestation_type 60 | # 61 | deny contains result if { 62 | allowed_builder_ids := lib.rule_data(_rule_data_key) 63 | some att in lib.pipelinerun_attestations 64 | builder_id := att.statement.predicate.builder.id 65 | not builder_id in allowed_builder_ids 66 | result := lib.result_helper(rego.metadata.chain(), [builder_id]) 67 | } 68 | 69 | # METADATA 70 | # title: Allowed builder IDs provided 71 | # description: >- 72 | # Confirm the `allowed_builder_ids` rule data was provided, since it is required by the policy 73 | # rules in this package. 74 | # custom: 75 | # short_name: allowed_builder_ids_provided 76 | # failure_msg: "%s" 77 | # collections: 78 | # - slsa3 79 | # - redhat 80 | # - redhat_rpms 81 | # - policy_data 82 | # 83 | deny contains result if { 84 | some e in _rule_data_errors 85 | result := lib.result_helper_with_severity(rego.metadata.chain(), [e.message], e.severity) 86 | } 87 | 88 | # Verify allowed_builder_ids is a non-empty list of strings 89 | _rule_data_errors contains error if { 90 | some e in j.validate_schema( 91 | lib.rule_data(_rule_data_key), 92 | { 93 | "$schema": "http://json-schema.org/draft-07/schema#", 94 | "type": "array", 95 | "items": {"type": "string"}, 96 | "uniqueItems": true, 97 | "minItems": 1, 98 | }, 99 | ) 100 | error := { 101 | "message": sprintf("Rule data %s has unexpected format: %s", [_rule_data_key, e.message]), 102 | "severity": e.severity, 103 | } 104 | } 105 | 106 | _rule_data_key := "allowed_builder_ids" 107 | -------------------------------------------------------------------------------- /policy/release/slsa_build_build_service/slsa_build_build_service_test.rego: -------------------------------------------------------------------------------- 1 | package slsa_build_build_service_test 2 | 3 | import rego.v1 4 | 5 | import data.lib 6 | import data.slsa_build_build_service 7 | 8 | test_all_good if { 9 | builder_id := lib.rule_data("allowed_builder_ids")[0] 10 | lib.assert_empty(slsa_build_build_service.deny) with input.attestations as [_mock_attestation(builder_id)] 11 | } 12 | 13 | test_slsa_builder_id_found if { 14 | attestations := [ 15 | # Missing predicate.builder.id 16 | {"statement": {"predicate": { 17 | "builder": {}, 18 | "buildType": lib.tekton_pipeline_run, 19 | }}}, 20 | # Missing predicate.builder 21 | {"statement": {"predicate": {"buildType": lib.tekton_pipeline_run}}}, 22 | ] 23 | 24 | expected := {{ 25 | "code": "slsa_build_build_service.slsa_builder_id_found", 26 | "msg": "Builder ID not set in attestation", 27 | }} 28 | 29 | lib.assert_equal_results(expected, slsa_build_build_service.deny) with input.attestations as attestations 30 | } 31 | 32 | test_accepted_slsa_builder_id if { 33 | builder_id := "https://notket.ved/sniahc/2v" 34 | expected := {{ 35 | "code": "slsa_build_build_service.slsa_builder_id_accepted", 36 | "msg": "Builder ID \"https://notket.ved/sniahc/2v\" is unexpected", 37 | }} 38 | lib.assert_equal_results( 39 | expected, 40 | slsa_build_build_service.deny, 41 | ) with input.attestations as [_mock_attestation(builder_id)] 42 | } 43 | 44 | test_rule_data_format if { 45 | d := {"allowed_builder_ids": [ 46 | # Wrong type 47 | 1, 48 | # Duplicated items 49 | "foo", 50 | "foo", 51 | ]} 52 | 53 | expected := { 54 | { 55 | "code": "slsa_build_build_service.allowed_builder_ids_provided", 56 | "msg": "Rule data allowed_builder_ids has unexpected format: 0: Invalid type. Expected: string, given: integer", 57 | "severity": "failure", 58 | }, 59 | { 60 | "code": "slsa_build_build_service.allowed_builder_ids_provided", 61 | "msg": "Rule data allowed_builder_ids has unexpected format: (Root): array items[1,2] must be unique", 62 | "severity": "failure", 63 | }, 64 | } 65 | 66 | lib.assert_equal_results(slsa_build_build_service.deny, expected) with data.rule_data as d 67 | with input.attestations as [_mock_attestation("foo")] 68 | } 69 | 70 | _mock_attestation(builder_id) := {"statement": {"predicate": { 71 | "builder": {"id": builder_id}, 72 | "buildType": lib.tekton_pipeline_run, 73 | }}} 74 | -------------------------------------------------------------------------------- /policy/release/slsa_provenance_available/slsa_provenance_available.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: SLSA - Provenance - Available 4 | # description: >- 5 | # The SLSA Provenance Available requirement states the following: 6 | # 7 | # "The provenance is available to the consumer in a format that the consumer accepts. The 8 | # format SHOULD be in-toto SLSA Provenance, but another format MAY be used if both producer 9 | # and consumer agree and it meets all the other requirements." 10 | # 11 | # This package only accepts the in-toto SLSA Provenance format. 12 | # 13 | package slsa_provenance_available 14 | 15 | import rego.v1 16 | 17 | import data.lib 18 | import data.lib.json as j 19 | 20 | # METADATA 21 | # title: Expected attestation predicate type found 22 | # description: >- 23 | # Verify that the predicateType field of the attestation indicates the in-toto SLSA Provenance 24 | # format was used to attest the PipelineRun. 25 | # custom: 26 | # short_name: attestation_predicate_type_accepted 27 | # failure_msg: Attestation predicate type %q is not an expected type (%s) 28 | # solution: >- 29 | # The predicate type field in the attestation does not match the 'allowed_predicate_types' field. 30 | # This field is set in the xref:ec-cli:ROOT:configuration.adoc#_data_sources[data sources]. 31 | # collections: 32 | # - minimal 33 | # - slsa3 34 | # - redhat 35 | # - redhat_rpms 36 | # depends_on: 37 | # - attestation_type.known_attestation_type 38 | # 39 | deny contains result if { 40 | some att in lib.pipelinerun_attestations 41 | allowed_predicate_types := lib.rule_data(_rule_data_key) 42 | not att.statement.predicateType in allowed_predicate_types 43 | result := lib.result_helper( 44 | rego.metadata.chain(), 45 | [att.statement.predicateType, concat(", ", allowed_predicate_types)], 46 | ) 47 | } 48 | 49 | # METADATA 50 | # title: Allowed predicate types provided 51 | # description: >- 52 | # Confirm the `allowed_predicate_types` rule data was provided, since it is required by the policy 53 | # rules in this package. 54 | # custom: 55 | # short_name: allowed_predicate_types_provided 56 | # failure_msg: "%s" 57 | # collections: 58 | # - minimal 59 | # - slsa3 60 | # - redhat 61 | # - redhat_rpms 62 | # - policy_data 63 | # 64 | deny contains result if { 65 | some e in _rule_data_errors 66 | result := lib.result_helper_with_severity(rego.metadata.chain(), [e.message], e.severity) 67 | } 68 | 69 | # Verify allowed_predicate_types is a non-empty list of strings 70 | _rule_data_errors contains error if { 71 | some e in j.validate_schema( 72 | lib.rule_data(_rule_data_key), 73 | { 74 | "$schema": "http://json-schema.org/draft-07/schema#", 75 | "type": "array", 76 | "items": {"type": "string"}, 77 | "uniqueItems": true, 78 | "minItems": 1, 79 | }, 80 | ) 81 | error := { 82 | "message": sprintf("Rule data %s has unexpected format: %s", [_rule_data_key, e.message]), 83 | "severity": e.severity, 84 | } 85 | } 86 | 87 | _rule_data_key := "allowed_predicate_types" 88 | -------------------------------------------------------------------------------- /policy/release/slsa_provenance_available/slsa_provenance_available_test.rego: -------------------------------------------------------------------------------- 1 | package slsa_provenance_available_test 2 | 3 | import rego.v1 4 | 5 | import data.lib 6 | import data.slsa_provenance_available 7 | 8 | test_expected_predicate_type if { 9 | attestations := _mock_attestations(["https://slsa.dev/provenance/v0.2"]) 10 | lib.assert_empty(slsa_provenance_available.deny) with input.attestations as attestations 11 | } 12 | 13 | test_att_predicate_type if { 14 | attestations := _mock_attestations(["spam"]) 15 | expected_deny := {{ 16 | "code": "slsa_provenance_available.attestation_predicate_type_accepted", 17 | "msg": "Attestation predicate type \"spam\" is not an expected type (https://slsa.dev/provenance/v0.2)", 18 | }} 19 | lib.assert_equal_results(slsa_provenance_available.deny, expected_deny) with input.attestations as attestations 20 | } 21 | 22 | test_rule_data_format if { 23 | d := {"allowed_predicate_types": [ 24 | # Wrong type 25 | 1, 26 | # Duplicated items 27 | "foo", 28 | "foo", 29 | ]} 30 | 31 | expected := { 32 | { 33 | "code": "slsa_provenance_available.allowed_predicate_types_provided", 34 | "msg": "Rule data allowed_predicate_types has unexpected format: 0: Invalid type. Expected: string, given: integer", 35 | "severity": "failure", 36 | }, 37 | { 38 | "code": "slsa_provenance_available.allowed_predicate_types_provided", 39 | "msg": "Rule data allowed_predicate_types has unexpected format: (Root): array items[1,2] must be unique", 40 | "severity": "failure", 41 | }, 42 | } 43 | 44 | lib.assert_equal_results(slsa_provenance_available.deny, expected) with data.rule_data as d 45 | with input.attestations as _mock_attestations("foo") 46 | } 47 | 48 | _mock_attestations(types) := [attestation | 49 | some type in types 50 | attestation := {"statement": { 51 | "predicateType": type, 52 | "predicate": {"buildType": lib.tekton_pipeline_run}, 53 | }} 54 | ] 55 | -------------------------------------------------------------------------------- /policy/release/slsa_source_version_controlled/slsa_source_version_controlled_test.rego: -------------------------------------------------------------------------------- 1 | package slsa_source_version_controlled_test 2 | 3 | import rego.v1 4 | 5 | import data.lib 6 | import data.slsa_source_version_controlled 7 | 8 | test_all_good if { 9 | materials := [ 10 | { 11 | "uri": "git+https://example/repo", 12 | "digest": {"sha1": "49ef4c1f9273718b2421b2c076f09786ede5982c"}, 13 | }, 14 | { 15 | "uri": "git+https://exmaple/other-repo.git", 16 | "digest": {"sha1": "f1d2d2f924e986ac86fdf7b36c94bcdf32beec15"}, 17 | }, 18 | ] 19 | 20 | lib.assert_empty(slsa_source_version_controlled.deny) with input.attestations as [_mock_attestation(materials)] 21 | } 22 | 23 | test_non_git_uri if { 24 | materials := [ 25 | { 26 | "uri": "ggit+https://example/repo", 27 | "digest": {"sha1": "49ef4c1f9273718b2421b2c076f09786ede5982c"}, 28 | }, 29 | { 30 | "uri": "svn+https://exmaple/other-repo.git", 31 | "digest": {"sha1": "f1d2d2f924e986ac86fdf7b36c94bcdf32beec15"}, 32 | }, 33 | ] 34 | 35 | expected := { 36 | { 37 | "code": "slsa_source_version_controlled.materials_uri_is_git_repo", 38 | "msg": "Material URI \"ggit+https://example/repo\" is not a git URI", 39 | }, 40 | { 41 | "code": "slsa_source_version_controlled.materials_uri_is_git_repo", 42 | "msg": "Material URI \"svn+https://exmaple/other-repo.git\" is not a git URI", 43 | }, 44 | } 45 | 46 | lib.assert_equal_results( 47 | expected, 48 | slsa_source_version_controlled.deny, 49 | ) with input.attestations as [_mock_attestation(materials)] 50 | } 51 | 52 | # regal ignore:rule-length 53 | test_non_git_commit if { 54 | materials := [ 55 | { 56 | "uri": "git+https://example/repo", 57 | # Invalid hexadecimal character "g" 58 | "digest": {"sha1": "g9ef4c1f9273718b2421b2c076f09786ede5982c"}, 59 | }, 60 | { 61 | "uri": "git+https://exmaple/other-repo.git", 62 | # Too short, 39 characters 63 | "digest": {"sha1": "1d2d2f924e986ac86fdf7b36c94bcdf32beec15"}, 64 | }, 65 | { 66 | "uri": "git+https://exmaple/yet-another-repo.git", 67 | # Too long, 41 characters 68 | "digest": {"sha1": "36d89a3cadcdf269110757df1074b4ef45fe641ee"}, 69 | }, 70 | ] 71 | 72 | expected := { 73 | { 74 | "code": "slsa_source_version_controlled.materials_include_git_sha", 75 | "msg": "Material digest \"g9ef4c1f9273718b2421b2c076f09786ede5982c\" is not a git commit sha", 76 | }, 77 | { 78 | "code": "slsa_source_version_controlled.materials_include_git_sha", 79 | "msg": "Material digest \"1d2d2f924e986ac86fdf7b36c94bcdf32beec15\" is not a git commit sha", 80 | }, 81 | { 82 | "code": "slsa_source_version_controlled.materials_include_git_sha", 83 | "msg": "Material digest \"36d89a3cadcdf269110757df1074b4ef45fe641ee\" is not a git commit sha", 84 | }, 85 | } 86 | 87 | lib.assert_equal_results( 88 | expected, 89 | slsa_source_version_controlled.deny, 90 | ) with input.attestations as [_mock_attestation(materials)] 91 | } 92 | 93 | test_invalid_materials if { 94 | materials := [ 95 | # Missing uri 96 | {"digest": {"sha1": "49ef4c1f9273718b2421b2c076f09786ede5982c"}}, 97 | # Missing digest 98 | {"uri": "git+https://example/repo"}, 99 | # Missing digest.sha1 100 | {"url": "git+https://example/repo", "digest": {}}, 101 | ] 102 | 103 | expected := {{ 104 | "code": "slsa_source_version_controlled.materials_format_okay", 105 | "msg": "No materials match expected format", 106 | }} 107 | 108 | lib.assert_equal_results( 109 | expected, 110 | slsa_source_version_controlled.deny, 111 | ) with input.attestations as [_mock_attestation(materials)] 112 | } 113 | 114 | _mock_attestation(materials) := {"statement": {"predicate": { 115 | "buildType": lib.tekton_pipeline_run, 116 | "materials": materials, 117 | }}} 118 | -------------------------------------------------------------------------------- /policy/release/source_image/source_image.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: Source image 4 | # description: >- 5 | # This package is reponsible for verifying the source container image associated with the image 6 | # being validated. 7 | # 8 | package source_image 9 | 10 | import rego.v1 11 | 12 | import data.lib 13 | import data.lib.tekton 14 | 15 | # METADATA 16 | # title: Exists 17 | # description: Verify the source container image exists. 18 | # custom: 19 | # short_name: exists 20 | # failure_msg: "%s" 21 | # collections: 22 | # - redhat 23 | # effective_on: 2024-06-05T00:00:00Z 24 | # 25 | deny contains result if { 26 | some error in _source_image_errors 27 | result := lib.result_helper(rego.metadata.chain(), [error]) 28 | } 29 | 30 | # METADATA 31 | # title: Signed 32 | # description: Verify the source container image is signed. 33 | # custom: 34 | # short_name: signed 35 | # failure_msg: "%s" 36 | # depends_on: 37 | # - source_image.exists 38 | # collections: 39 | # - redhat 40 | # effective_on: 2024-05-04T00:00:00Z 41 | # 42 | deny contains result if { 43 | some error in _source_image_sig_errors 44 | result := lib.result_helper(rego.metadata.chain(), [error]) 45 | } 46 | 47 | _source_image_errors contains error if { 48 | count(_source_images) == 0 49 | error := "No source image references found" 50 | } 51 | 52 | _source_image_errors contains error if { 53 | some img in _source_images 54 | not ec.oci.image_manifest(img) 55 | error := sprintf("Unable to access source image %q", [img]) 56 | } 57 | 58 | _source_image_errors contains error if { 59 | some img in _source_images 60 | manifest := ec.oci.image_manifest(img) 61 | layers := object.get(manifest, "layers", []) 62 | count(layers) == 0 63 | error := sprintf("Source image has no layers %q", [img]) 64 | } 65 | 66 | _source_image_sig_errors contains error if { 67 | some img in _source_images 68 | info := ec.sigstore.verify_image(img, lib.sigstore_opts) 69 | some raw_error in info.errors 70 | error := sprintf("Image signature verification failed for %s: %s", [img, raw_error]) 71 | } 72 | 73 | # _source_images is a set of image references. Each corresponding to the 74 | # SOURCE_IMAGE_URL@SOURCE_IMAGE_DIGEST parameter of a source-build Task. 75 | _source_images contains img if { 76 | some att in lib.pipelinerun_attestations 77 | some task in tekton.source_build_tasks(att) 78 | 79 | url := trim_space(tekton.task_result(task, "SOURCE_IMAGE_URL")) 80 | digest := trim_space(tekton.task_result(task, "SOURCE_IMAGE_DIGEST")) 81 | img := sprintf("%s@%s", [url, digest]) 82 | } 83 | -------------------------------------------------------------------------------- /policy/stepaction/image/image.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: Tekton StepAction images policies 4 | # description: >- 5 | # This package ensures that a StepAction definition contains a valid and allowed value for the 6 | # image reference. 7 | # 8 | package stepaction.image 9 | 10 | import rego.v1 11 | 12 | import data.lib 13 | import data.lib.json as j 14 | import data.lib.k8s 15 | 16 | # METADATA 17 | # title: Image is accessible 18 | # description: >- 19 | # Confirm the container image used in the StepTemplate is accessible. 20 | # custom: 21 | # short_name: accessible 22 | # failure_msg: Image ref %q is inaccessible 23 | # solution: >- 24 | # Make sure the container image used in the StepTemplate is pushed to the registry and that it 25 | # can be fetched. 26 | # 27 | deny contains result if { 28 | image_ref := input.spec.image 29 | not ec.oci.image_manifest(image_ref) 30 | 31 | result := lib.result_helper_with_term( 32 | rego.metadata.chain(), 33 | [image_ref], 34 | image_ref, 35 | ) 36 | } 37 | 38 | # METADATA 39 | # title: Image comes from permitted registry 40 | # description: >- 41 | # Confirm the StepAction uses a container image with a URL that matches one of the prefixes in the 42 | # provided list of allowed step image registry prefixes. The list is customizeable via the 43 | # `allowed_step_image_registry_prefixes` rule data key. 44 | # custom: 45 | # short_name: permitted 46 | # failure_msg: Image ref %q is disallowed 47 | # solution: >- 48 | # Make sure the container image used comes from an approved registry. 49 | # 50 | deny contains result if { 51 | image_ref := input.spec.image 52 | allowed_registry_prefixes := lib.rule_data(_rule_data_key) 53 | not ref_permitted(image_ref, allowed_registry_prefixes) 54 | 55 | result := lib.result_helper_with_term( 56 | rego.metadata.chain(), 57 | [image_ref], 58 | k8s.name_version(input), 59 | ) 60 | } 61 | 62 | # METADATA 63 | # title: Rule data provided 64 | # description: >- 65 | # Confirm the `allowed_step_image_registry_prefixes` rule data is provided. 66 | # custom: 67 | # short_name: rule_data 68 | # failure_msg: "%s" 69 | # solution: >- 70 | # Make sure the xref:ec-cli:ROOT:configuration.adoc#_data_sources[data sources] contains a key 71 | # 'allowed_step_image_registry_prefixes' that contains a list of approved registries. 72 | # 73 | deny contains result if { 74 | some e in _rule_data_errors 75 | result := lib.result_helper_with_severity(rego.metadata.chain(), [e.message], e.severity) 76 | } 77 | 78 | ref_permitted(image_ref, allowed_prefixes) if { 79 | some allowed_prefix in allowed_prefixes 80 | startswith(image_ref, allowed_prefix) 81 | } 82 | 83 | _rule_data_errors contains error if { 84 | some e in j.validate_schema( 85 | lib.rule_data(_rule_data_key), 86 | { 87 | "$schema": "http://json-schema.org/draft-07/schema#", 88 | "type": "array", 89 | "items": {"type": "string"}, 90 | "uniqueItems": true, 91 | "minItems": 1, 92 | }, 93 | ) 94 | error := { 95 | "message": sprintf("Rule data %s has unexpected format: %s", [_rule_data_key, e.message]), 96 | "severity": e.severity, 97 | } 98 | } 99 | 100 | _rule_data_key := "allowed_step_image_registry_prefixes" 101 | -------------------------------------------------------------------------------- /policy/stepaction/image/image_test.rego: -------------------------------------------------------------------------------- 1 | package policy.stepaction.image_test 2 | 3 | import rego.v1 4 | 5 | import data.lib 6 | import data.stepaction.image 7 | 8 | test_image_accessible if { 9 | stepaction := { 10 | "kind": "StepAction", 11 | "spec": {"image": "registry.io/repository/ok:1"}, 12 | } 13 | 14 | lib.assert_empty(image.deny) with input as stepaction 15 | with ec.oci.image_manifest as mock_image_manifest 16 | with data.rule_data as default_rule_data 17 | } 18 | 19 | test_image_not_accessible if { 20 | stepaction := { 21 | "kind": "StepAction", 22 | "spec": {"image": "registry.io/repository/not_ok:1"}, 23 | } 24 | 25 | expected := {{ 26 | "code": "stepaction.image.accessible", 27 | "msg": `Image ref "registry.io/repository/not_ok:1" is inaccessible`, 28 | "term": "registry.io/repository/not_ok:1", 29 | }} 30 | 31 | lib.assert_equal_results(expected, image.deny) with input as stepaction 32 | with ec.oci.image_manifest as mock_image_manifest 33 | with data.rule_data as default_rule_data 34 | } 35 | 36 | test_image_not_permitted if { 37 | stepaction := { 38 | "kind": "StepAction", 39 | "spec": {"image": "registry.io/repository/ok:1"}, 40 | } 41 | 42 | rule_data := {"allowed_step_image_registry_prefixes": ["dope.registry.io/"]} 43 | 44 | expected := {{ 45 | "code": "stepaction.image.permitted", 46 | "msg": `Image ref "registry.io/repository/ok:1" is disallowed`, 47 | "term": "noname/noversion", 48 | }} 49 | 50 | lib.assert_equal_results(expected, image.deny) with input as stepaction 51 | with ec.oci.image_manifest as mock_image_manifest 52 | with data.rule_data as rule_data 53 | } 54 | 55 | test_rule_data_list_empty if { 56 | expected := {{ 57 | "code": "stepaction.image.rule_data", 58 | # regal ignore:line-length 59 | "msg": "Rule data allowed_step_image_registry_prefixes has unexpected format: (Root): Array must have at least 1 items", 60 | "severity": "failure", 61 | }} 62 | 63 | lib.assert_equal_results(expected, image.deny) with data.rule_data as {} 64 | } 65 | 66 | test_rule_data_list_format if { 67 | d := {"allowed_step_image_registry_prefixes": [ 68 | # Wrong type 69 | 1, 70 | # Duplicated items 71 | "registry.local/", 72 | "registry.local/", 73 | ]} 74 | 75 | expected := { 76 | { 77 | "code": "stepaction.image.rule_data", 78 | # regal ignore:line-length 79 | "msg": "Rule data allowed_step_image_registry_prefixes has unexpected format: 0: Invalid type. Expected: string, given: integer", 80 | "severity": "failure", 81 | }, 82 | { 83 | "code": "stepaction.image.rule_data", 84 | # regal ignore:line-length 85 | "msg": "Rule data allowed_step_image_registry_prefixes has unexpected format: (Root): array items[1,2] must be unique", 86 | "severity": "failure", 87 | }, 88 | } 89 | 90 | lib.assert_equal_results(expected, image.deny) with data.rule_data as d 91 | } 92 | 93 | mock_image_manifest(ref) := {} if { 94 | startswith(ref, "registry.io/repository/ok") 95 | } 96 | 97 | default_rule_data := {"allowed_step_image_registry_prefixes": ["registry.io/"]} 98 | -------------------------------------------------------------------------------- /policy/stepaction/kind/kind.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: Tekton StepAction kind checks 4 | # description: >- 5 | # Policies to verify that a Tekton StepAction definition has the expected 6 | # value for kind. 7 | # 8 | package stepaction.kind 9 | 10 | import rego.v1 11 | 12 | import data.lib 13 | 14 | # METADATA 15 | # title: StepAction definition has expected kind 16 | # description: >- 17 | # Confirm the StepAction definition has the kind "StepAction". 18 | # custom: 19 | # short_name: valid 20 | # failure_msg: Unexpected kind %q for StepAction definition 21 | # 22 | deny contains result if { 23 | k := object.get(input, "kind", "") 24 | k != "StepAction" 25 | result := lib.result_helper(rego.metadata.chain(), [k]) 26 | } 27 | -------------------------------------------------------------------------------- /policy/stepaction/kind/kind_test.rego: -------------------------------------------------------------------------------- 1 | package policy.stepaction.kind_test 2 | 3 | import rego.v1 4 | 5 | import data.lib 6 | import data.stepaction.kind 7 | 8 | test_invalid_kind if { 9 | lib.assert_equal_results(kind.deny, {{ 10 | "code": "stepaction.kind.valid", 11 | "msg": `Unexpected kind "Foo" for StepAction definition`, 12 | }}) with input.kind as "Foo" 13 | } 14 | 15 | test_valid_kind if { 16 | lib.assert_empty(kind.deny) with input as {"kind": "StepAction"} 17 | } 18 | -------------------------------------------------------------------------------- /policy/task/annotations/annotations.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: Tekton Task annotations 4 | # description: >- 5 | # Policies to verify that a Tekton Task definition uses well formed expected 6 | # annotations . 7 | # 8 | package annotations 9 | 10 | import rego.v1 11 | 12 | import data.lib 13 | 14 | # METADATA 15 | # title: Task definition uses expires-on annotation in RFC3339 format 16 | # description: >- 17 | # Make sure to use the date format in RFC3339 format in the 18 | # "build.appstudio.redhat.com/expires-on" annotation. 19 | # custom: 20 | # short_name: expires_on_format 21 | # failure_msg: >- 22 | # Expires on time is not in RFC3339 format: %q 23 | # 24 | deny contains result if { 25 | expires_on := input.metadata.annotations[_expires_on_annotation] 26 | 27 | not time.parse_rfc3339_ns(expires_on) 28 | 29 | result := lib.result_helper(rego.metadata.chain(), [expires_on]) 30 | } 31 | 32 | _expires_on_annotation := "build.appstudio.redhat.com/expires-on" 33 | -------------------------------------------------------------------------------- /policy/task/annotations/annotations_test.rego: -------------------------------------------------------------------------------- 1 | package annotations_test 2 | 3 | import rego.v1 4 | 5 | import data.annotations 6 | import data.lib 7 | 8 | test_valid_expiry_dates if { 9 | # regal ignore:line-length 10 | lib.assert_empty(annotations.deny) with input.metadata.annotations as {annotations._expires_on_annotation: "2000-01-02T03:04:05Z"} 11 | } 12 | 13 | test_invalid_expiry_dates if { 14 | lib.assert_equal_results(annotations.deny, {{ 15 | "code": "annotations.expires_on_format", 16 | "msg": `Expires on time is not in RFC3339 format: "meh"`, 17 | }}) with input.metadata.annotations as {annotations._expires_on_annotation: "meh"} 18 | } 19 | -------------------------------------------------------------------------------- /policy/task/kind/kind.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: Tekton task kind checks 4 | # description: >- 5 | # Policies to verify that a Tekton task definition has the expected 6 | # value for kind. 7 | # 8 | package kind 9 | 10 | import rego.v1 11 | 12 | import data.lib 13 | 14 | expected_kind := "Task" 15 | 16 | # METADATA 17 | # title: Task definition has expected kind 18 | # description: >- 19 | # Confirm the task definition has the kind "Task". 20 | # custom: 21 | # short_name: expected_kind 22 | # failure_msg: Unexpected kind '%s' for task definition 23 | # 24 | deny contains result if { 25 | expected_kind != input.kind 26 | result := lib.result_helper(rego.metadata.chain(), [input.kind]) 27 | } 28 | 29 | # METADATA 30 | # title: Kind field is present in task definition 31 | # description: >- 32 | # Confirm the task definition includes the kind field. 33 | # custom: 34 | # short_name: kind_present 35 | # failure_msg: Required field 'kind' not found 36 | # 37 | deny contains result if { 38 | not input.kind 39 | result := lib.result_helper(rego.metadata.chain(), []) 40 | } 41 | -------------------------------------------------------------------------------- /policy/task/kind/kind_test.rego: -------------------------------------------------------------------------------- 1 | package kind_test 2 | 3 | import rego.v1 4 | 5 | import data.kind 6 | import data.lib 7 | 8 | test_unexpected_kind if { 9 | lib.assert_equal_results(kind.deny, {{ 10 | "code": "kind.expected_kind", 11 | "msg": "Unexpected kind 'Foo' for task definition", 12 | }}) with input.kind as "Foo" 13 | } 14 | 15 | test_expected_kind if { 16 | lib.assert_empty(kind.deny) with input as {"kind": "Task"} 17 | } 18 | 19 | test_kind_not_found if { 20 | lib.assert_equal_results(kind.deny, {{ 21 | "code": "kind.kind_present", 22 | "msg": "Required field 'kind' not found", 23 | }}) with input as {"bad": "Foo"} 24 | } 25 | -------------------------------------------------------------------------------- /policy/task/results/results.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: Tekton Task result 4 | # description: Verify Tekton Task definitions provide expected results. 5 | # 6 | package results 7 | 8 | import rego.v1 9 | 10 | import data.lib 11 | import data.lib.json as j 12 | 13 | # METADATA 14 | # title: Required result defined 15 | # description: >- 16 | # Verify if Task defines the required result. This is controlled by the `required_task_results` 17 | # rule data key. By default this is empty making this rule a no-op. 18 | # custom: 19 | # short_name: required 20 | # failure_msg: '%s' 21 | # 22 | deny contains result if { 23 | some err in errors 24 | result := lib.result_helper(rego.metadata.chain(), [err]) 25 | } 26 | 27 | # METADATA 28 | # title: Rule data provided 29 | # description: >- 30 | # Confirm the expected `required_task_results` rule data key has been provided in the expected 31 | # format. 32 | # custom: 33 | # short_name: rule_data_provided 34 | # failure_msg: '%s' 35 | # solution: If provided, ensure the rule data is in the expected format. 36 | # collections: 37 | # - redhat 38 | # - policy_data 39 | # 40 | deny contains result if { 41 | some e in _rule_data_errors 42 | result := lib.result_helper_with_severity(rego.metadata.chain(), [e.message], e.severity) 43 | } 44 | 45 | errors contains err if { 46 | version := object.get(input.metadata, ["labels", "app.kubernetes.io/version"], "") 47 | version_constraints := {r.version | some r in lib.rule_data(_rule_data_key)} 48 | not version in version_constraints 49 | 50 | some required in {r | 51 | some r in lib.rule_data(_rule_data_key) 52 | input.metadata.name == r.task 53 | not r.version 54 | } 55 | found := [result | 56 | some result in input.spec.results 57 | result.name == required.result 58 | ] 59 | count(found) == 0 60 | err := sprintf("%q result not found in %q Task%s (all versions)", [required.result, required.task, _vstr(version)]) 61 | } 62 | 63 | errors contains err if { 64 | version := object.get(input.metadata, ["labels", "app.kubernetes.io/version"], "") 65 | some required in {r | 66 | some r in lib.rule_data(_rule_data_key) 67 | input.metadata.name == r.task 68 | r.version == version 69 | } 70 | found := [result | 71 | some result in input.spec.results 72 | result.name == required.result 73 | ] 74 | count(found) == 0 75 | err := sprintf("%q result not found in %q Task/v%s", [required.result, required.task, version]) 76 | } 77 | 78 | _rule_data_errors contains error if { 79 | schema := { 80 | "$schema": "http://json-schema.org/draft-07/schema#", 81 | "type": "array", 82 | "items": { 83 | "type": "object", 84 | "properties": { 85 | "task": {"type": "string"}, 86 | "version": {"type": "string"}, 87 | "result": {"type": "string"}, 88 | }, 89 | "additionalProperties": false, 90 | "required": ["task", "result"], 91 | }, 92 | "uniqueItems": true, 93 | } 94 | 95 | some e in j.validate_schema(lib.rule_data(_rule_data_key), schema) 96 | error := { 97 | "message": sprintf("Rule data %s has unexpected format: %s", [_rule_data_key, e.message]), 98 | "severity": e.severity, 99 | } 100 | } 101 | 102 | _rule_data_key := "required_task_results" 103 | 104 | _vstr(v) := s if { 105 | v != "" 106 | s := sprintf("/v%s", [v]) 107 | } else := "" 108 | -------------------------------------------------------------------------------- /policy/task/step_image_registries/step_image_registries.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: Tekton Task Step image registry policies 4 | # description: >- 5 | # This package ensures that a Task definition contains expected values for the image references 6 | # used by the Task's steps. 7 | # 8 | package step_image_registries 9 | 10 | import rego.v1 11 | 12 | import data.lib 13 | import data.lib.json as j 14 | import data.lib.k8s 15 | 16 | # METADATA 17 | # title: Step images come from permitted registry 18 | # description: >- 19 | # Confirm that each step in the Task uses a container image with a URL that matches one of the 20 | # prefixes in the provided list of allowed step image registry prefixes. The list is customizeable 21 | # via the `allowed_step_image_registry_prefixes` rule data key. 22 | # custom: 23 | # short_name: step_images_permitted 24 | # failure_msg: Step %d uses disallowed image ref '%s' 25 | # solution: >- 26 | # Make sure the container image used in each step of the Task comes from an approved registry. 27 | # 28 | deny contains result if { 29 | input.kind == "Task" 30 | 31 | some step_index, step in input.spec.steps 32 | image_ref := step.image 33 | allowed_registry_prefixes := lib.rule_data(_rule_data_key) 34 | not image_ref_permitted(image_ref, allowed_registry_prefixes) 35 | 36 | result := lib.result_helper_with_term( 37 | rego.metadata.chain(), 38 | [step_index, image_ref], 39 | k8s.name_version(input), 40 | ) 41 | } 42 | 43 | # METADATA 44 | # title: Permitted step image registry prefix list provided 45 | # description: >- 46 | # Confirm the `allowed_step_image_registry_prefixes` rule data was provided, since it's 47 | # required by the policy rules in this package. 48 | # custom: 49 | # short_name: step_image_registry_prefix_list_provided 50 | # failure_msg: "%s" 51 | # solution: >- 52 | # Make sure the xref:ec-cli:ROOT:configuration.adoc#_data_sources[data sources] contains a key 53 | # 'allowed_step_image_registry_prefixes' that contains a list of approved registries 54 | # that can be used to run tasks in the build pipeline. 55 | # 56 | deny contains result if { 57 | some e in _rule_data_errors 58 | result := lib.result_helper_with_severity(rego.metadata.chain(), [e.message], e.severity) 59 | } 60 | 61 | image_ref_permitted(image_ref, allowed_prefixes) if { 62 | some allowed_prefix in allowed_prefixes 63 | startswith(image_ref, allowed_prefix) 64 | } 65 | 66 | _rule_data_errors contains error if { 67 | some e in j.validate_schema( 68 | lib.rule_data(_rule_data_key), 69 | { 70 | "$schema": "http://json-schema.org/draft-07/schema#", 71 | "type": "array", 72 | "items": {"type": "string"}, 73 | "uniqueItems": true, 74 | "minItems": 1, 75 | }, 76 | ) 77 | error := { 78 | "message": sprintf("Rule data %s has unexpected format: %s", [_rule_data_key, e.message]), 79 | "severity": e.severity, 80 | } 81 | } 82 | 83 | _rule_data_key := "allowed_step_image_registry_prefixes" 84 | -------------------------------------------------------------------------------- /policy/task/step_images/step_images.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: Tekton Task Step image policies 4 | # description: >- 5 | # This package ensures that a Task definition contains valid values for the image references 6 | # used by the Task's steps. 7 | # 8 | package step_images 9 | 10 | import rego.v1 11 | 12 | import data.lib 13 | 14 | # METADATA 15 | # title: Step images are valid 16 | # description: >- 17 | # Confirm that each step in the Task uses a container image that is accessible. 18 | # custom: 19 | # short_name: step_images_accessible 20 | # failure_msg: Step %d uses inaccessible image ref '%s' 21 | # solution: >- 22 | # Make sure the container image used in each step of the Task is pushed to the 23 | # registry and that it can be fetched. 24 | # effective_on: 2025-02-10T00:00:00Z 25 | # 26 | deny contains result if { 27 | input.kind == "Task" 28 | 29 | some step_index, step in input.spec.steps 30 | image_ref := step.image 31 | not ec.oci.image_manifest(image_ref) 32 | 33 | result := lib.result_helper_with_term( 34 | rego.metadata.chain(), 35 | [step_index, image_ref], 36 | image_ref, 37 | ) 38 | } 39 | -------------------------------------------------------------------------------- /policy/task/step_images/step_images_test.rego: -------------------------------------------------------------------------------- 1 | package step_images_test 2 | 3 | import rego.v1 4 | 5 | import data.lib 6 | import data.step_images 7 | 8 | test_looks_at_tasks_only if { 9 | pipeline := { 10 | "kind": "Pipeline", 11 | "spec": {"steps": [{"image": "registry.io/repository/not_ok"}]}, 12 | } 13 | 14 | lib.assert_empty(step_images.deny) with input as pipeline 15 | } 16 | 17 | test_task_with_no_steps if { 18 | task := {"kind": "Task"} 19 | 20 | lib.assert_empty(step_images.deny) with input as task 21 | } 22 | 23 | test_task_with_valid_steps if { 24 | task := { 25 | "kind": "Task", 26 | "spec": {"steps": [ 27 | {"image": "registry.io/repository/ok:1"}, 28 | {"image": "registry.io/repository/ok:2"}, 29 | {"image": "registry.io/repository/ok:3"}, 30 | ]}, 31 | } 32 | 33 | lib.assert_empty(step_images.deny) with input as task with ec.oci.image_manifest as mock_image_manifest 34 | } 35 | 36 | test_task_with_invalid_steps if { 37 | task := { 38 | "kind": "Task", 39 | "spec": {"steps": [ 40 | {"image": "registry.io/repository/ok:1"}, 41 | {"image": "registry.io/repository/not_ok:2"}, 42 | {"image": "registry.io/repository/ok:3"}, 43 | {"image": "registry.io/repository/not_ok:4"}, 44 | {"image": "registry.io/repository/ok:5"}, 45 | ]}, 46 | } 47 | 48 | expected := { 49 | { 50 | "code": "step_images.step_images_accessible", 51 | "msg": "Step 1 uses inaccessible image ref 'registry.io/repository/not_ok:2'", 52 | "term": "registry.io/repository/not_ok:2", 53 | }, 54 | { 55 | "code": "step_images.step_images_accessible", 56 | "msg": "Step 3 uses inaccessible image ref 'registry.io/repository/not_ok:4'", 57 | "term": "registry.io/repository/not_ok:4", 58 | }, 59 | } 60 | 61 | lib.assert_equal_results(expected, step_images.deny) with input as task 62 | with ec.oci.image_manifest as mock_image_manifest 63 | } 64 | 65 | mock_image_manifest(ref) := m if { 66 | startswith(ref, "registry.io/repository/ok") 67 | m := {} 68 | } 69 | -------------------------------------------------------------------------------- /policy/task/trusted_artifacts/trusted_artifacts.rego: -------------------------------------------------------------------------------- 1 | # 2 | # METADATA 3 | # title: Trusted Artifacts Conventions 4 | # description: >- 5 | # Policies to verify that a Tekton task definition conforms to the expected conventions required 6 | # for using Trusted Artifacts. 7 | # 8 | package trusted_artifacts 9 | 10 | import rego.v1 11 | 12 | import data.lib 13 | import data.lib.k8s 14 | 15 | # METADATA 16 | # title: Parameter 17 | # description: Trusted Artifact parameters follow the expected naming convention. 18 | # custom: 19 | # short_name: parameter 20 | # failure_msg: The parameter %q of the Task %q does not use the _ARTIFACT suffix 21 | # 22 | deny contains result if { 23 | some param_name in _ta_parameters 24 | not _has_ta_suffix(param_name) 25 | result := lib.result_helper(rego.metadata.chain(), [param_name, k8s.name_version(input)]) 26 | } 27 | 28 | # METADATA 29 | # title: Result 30 | # description: Trusted Artifact results follow the expected naming convention. 31 | # custom: 32 | # short_name: result 33 | # failure_msg: The result %q of the Task %q does not use the _ARTIFACT suffix 34 | # 35 | deny contains result if { 36 | some result_name in _ta_results 37 | not _has_ta_suffix(result_name) 38 | result := lib.result_helper(rego.metadata.chain(), [result_name, k8s.name_version(input)]) 39 | } 40 | 41 | # METADATA 42 | # title: Workspace 43 | # description: >- 44 | # Tasks that implement the Trusted Artifacts pattern should not allow general purpose workspaces 45 | # to share data. Instead, data should be passed around via Trusted Artifacts. Workspaces used for 46 | # other purposes, e.g. provide auth credentials, are allowed. Use the rule data key 47 | # `allowed_trusted_artifacts_workspaces` to specify which workspace names are allowed. By default 48 | # this value is empty which effectively disallows any workspace. 49 | # custom: 50 | # short_name: workspace 51 | # failure_msg: General purpose workspace %q is not allowed 52 | # effective_on: 2024-07-07T00:00:00Z 53 | # 54 | deny contains result if { 55 | _uses_trusted_artifacts(input) 56 | some workspace in input.spec.workspaces 57 | not workspace.name in lib.rule_data("allowed_trusted_artifacts_workspaces") 58 | result := lib.result_helper(rego.metadata.chain(), [workspace.name]) 59 | } 60 | 61 | _ta_parameters contains param_name if { 62 | some step in input.spec.steps 63 | _is_ta_step(step) 64 | "use" in step.args 65 | some arg in step.args 66 | some arg_param in regex.find_n(`\$\(params\..*\)`, arg, -1) 67 | param_name := trim_prefix(trim_suffix(arg_param, ")"), "$(params.") 68 | } 69 | 70 | _ta_results contains result_name if { 71 | some step in input.spec.steps 72 | _is_ta_step(step) 73 | "create" in step.args 74 | some arg in step.args 75 | some arg_result in regex.find_n(`\$\(results\..*\.path\)`, arg, -1) 76 | result_name := trim_prefix(trim_suffix(arg_result, ".path)"), "$(results.") 77 | } 78 | 79 | _has_ta_suffix(name) if endswith(name, "_ARTIFACT") 80 | 81 | _is_ta_step(step) if contains(step.image, "trusted-artifacts") 82 | 83 | # _uses_trusted_artifacts relies on heuristics to determine if the given Task definition uses the 84 | # Trusted Artifacts pattern. It does so my looking for any parameters or results which have the 85 | # _ARTIFACT suffix in its name. 86 | _uses_trusted_artifacts(task) if { 87 | params := {param.name | some param in task.spec.params} 88 | results := {result.name | some result in task.spec.results} 89 | all_names := params | results 90 | ta_names := {name | 91 | some name in all_names 92 | _has_ta_suffix(name) 93 | } 94 | count(ta_names) > 0 95 | } 96 | -------------------------------------------------------------------------------- /regal.go: -------------------------------------------------------------------------------- 1 | // Copyright The Conforma Contributors 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | // 15 | // SPDX-License-Identifier: Apache-2.0 16 | 17 | package main 18 | 19 | import ( 20 | "errors" 21 | "log" 22 | "os" 23 | 24 | // Register custom rego functions 25 | _ "github.com/enterprise-contract/ec-cli/cmd/validate" 26 | "github.com/styrainc/regal/cmd" 27 | ) 28 | 29 | func main() { 30 | // Remove date and time from any `log.*` calls, as that doesn't add much of value here 31 | // Evaluate options for logging later 32 | log.SetFlags(0) 33 | 34 | if err := cmd.RootCommand.Execute(); err != nil { 35 | code := 1 36 | if e := (cmd.ExitError{}); errors.As(err, &e) { 37 | code = e.Code() 38 | } 39 | 40 | os.Exit(code) 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json", 3 | "extends": [ 4 | "github>conforma/.github//config/renovate/renovate.json" 5 | ] 6 | } 7 | -------------------------------------------------------------------------------- /tools.go: -------------------------------------------------------------------------------- 1 | // Copyright The Conforma Contributors 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | // 15 | // SPDX-License-Identifier: Apache-2.0 16 | 17 | package main 18 | 19 | import ( 20 | _ "github.com/enterprise-contract/ec-cli" 21 | _ "github.com/google/addlicense" 22 | _ "github.com/open-policy-agent/conftest" 23 | _ "github.com/styrainc/regal" 24 | _ "github.com/tektoncd/cli/cmd/tkn" 25 | _ "oras.land/oras/cmd/oras" 26 | ) 27 | --------------------------------------------------------------------------------