├── .gitignore ├── LICENSE ├── README.md ├── cosign_examples ├── .gitignore ├── 01_cosign_generate_keypair.sh ├── 02_cosign_sign_image.sh ├── 03_cosign_attach_attestation.sh ├── 04_cosign_verify_signature.sh ├── 05_cosign_verify_attestation.sh ├── 10_cosign_sign_image2.sh ├── 11_syft_attest_image.sh ├── 12_cosign_attach_syft_attestation.sh ├── 13_cosign_verify_syft_attestation.sh ├── 20_cosign_attach_attestation_for_subsequent_policy_verification.sh ├── 21_cosign_verify_attestation.sh ├── 22_cosign_verify_attestation_rego_policy.sh ├── 23_cosign_verify_attestation_cue_policy.sh ├── 30_cosign_verify_keyless_signing.sh ├── 40_cosign_verify_google_distroless.sh ├── 41_cosign_verify_google_distroless_python.sh ├── 50_cosign_verify_python_download.sh ├── 60_cosign_blob_operations.sh ├── artifact ├── cosign_policy.cue ├── cosign_policy.rego ├── cosign_policy_after.cue ├── cosign_policy_before.cue ├── digest.json ├── enable_gitsign.sh └── git_verify_commit.sh ├── deps_dev_examples └── deps_dev_examples.sh ├── distroless_examples └── run_distroless_images.sh ├── docker_attach_container_example └── attach_debug_container.sh ├── generic_scripts ├── remove_all_images.sh └── remove_images_with_no_tag.sh ├── image_history_inspect_examples ├── .gitignore ├── 1_1_nginx_example_docker_crane.sh ├── 1_2_nginx_example_cosign.sh ├── 2_nginx_example_podman_skopeo.sh └── 3_nerdctl_image_inspect.sh ├── install_scripts ├── install_bomber.sh ├── install_containerd.sh ├── install_cosign.sh ├── install_crane.sh ├── install_cyclonedx.sh ├── install_dive.sh ├── install_docker.sh ├── install_go.sh ├── install_grype_syft.sh ├── install_hadolint.sh ├── install_jib.sh ├── install_k8s_tools.sh ├── install_ms_sbom_tool.sh ├── install_nerdctl_buildkit.sh ├── install_oras.sh ├── install_pack.sh ├── install_podman_tools_fedora.sh ├── install_podman_tools_ubuntu.sh ├── install_sbom_scorecard.sh ├── install_sbom_tools.sh ├── install_semgrep.sh ├── install_task.sh ├── install_trivy.sh └── install_trivy_fedora.sh ├── kind_examples ├── 1_install_kind.sh ├── 2_1_create_kind_cluster_with_registry.sh ├── 2_2_test_kind_local_registry.sh ├── 2_3_crane_operations.sh ├── 3_0_show_node_labels.sh ├── 3_1_get_nginx_ingress_controller_manifest.sh ├── 3_2_install_nginx_ingress_controller.sh ├── 3_3_apply_test_ingress.sh ├── ingress-nginx-deploy.yaml ├── ingress_usage_example.yaml └── test_ingress.yaml ├── ko_sbom_examples └── get_digest_and_sbom.sh ├── microk8s_examples ├── 1_install_microk8s.sh ├── 2_microk8s_operations.sh ├── 3_test_local_registry.sh ├── 4_crane_operations.sh ├── 5_crane_push_jib_image_tar_to_ghcr.sh ├── 5_crane_push_jib_image_tar_to_local_registry.sh └── hello-server-deployment.yaml ├── oci_examples ├── 1_inspect_image_dive.sh ├── 2_crane_operations.sh ├── 3_oras_operations.sh ├── 4_regcli_operations.sh └── 5_skopeo_operations.sh ├── podman_examples ├── podman_operations.sh └── skopeo_operations.sh ├── runc_example ├── .gitignore └── create_alpine_container.sh ├── scan_result_parsing_and_sbom_examples ├── .gitignore ├── bomber_inspect_sbom.sh ├── grype_generate_json_from_syft_sbom.sh ├── grype_generate_sarif_from_image.sh ├── grype_generate_sarif_from_syft_sbom.sh ├── inspect_hadolint_sarif.sh ├── inspect_sarif.sh ├── run_sbom_scorecard.sh ├── sbom_manager_operations.sh ├── sbom_utility_operations.sh ├── syft_generate_sbom_json.sh ├── syft_generate_sbom_spdx.sh ├── trivy_generate_json.sh ├── trivy_generate_sarif.sh ├── trivy_generate_sbom_spdx.sh └── trivy_scan_sbom_spdx.sh ├── slsa-verifier-examples ├── .gitignore ├── extract_payload_from_intoto_attestation.sh ├── run_slsa_verifier.sh ├── sbom-scorecard-attestation.json └── sbom-scorecard-linux-amd64.intoto.jsonl └── taskfiles ├── .env ├── build-curated-docker-images ├── .env ├── alpine3.16-build │ ├── Dockerfile │ └── Taskfile.yaml ├── alpine3.16-openjdk11 │ ├── Dockerfile │ └── Taskfile.yaml ├── alpine3.16-openjdk17 │ ├── Dockerfile │ └── Taskfile.yaml ├── alpine3.16-openjre11 │ ├── Dockerfile │ └── Taskfile.yaml ├── alpine3.16-openjre17 │ ├── Dockerfile │ └── Taskfile.yaml ├── alpine3.16-run │ ├── Dockerfile │ ├── Taskfile.yaml │ └── scan_results │ │ └── trivy-dockerfile-Dockerfile.json ├── ubuntu22.04-base │ ├── Dockerfile │ └── Taskfile.yaml ├── ubuntu22.04-cicd │ ├── Dockerfile │ └── Taskfile.yaml ├── ubuntu22.04-node16 │ ├── Dockerfile │ └── Taskfile.yaml ├── ubuntu22.04-openjdk11 │ ├── Dockerfile │ └── Taskfile.yaml └── ubuntu22.04-openjdk17 │ ├── Dockerfile │ └── Taskfile.yaml ├── build-java-gradle-spring-music └── Taskfile.yaml ├── build-java-maven-sample-springboot ├── Taskfile-containerd.yaml ├── Taskfile-docker.yaml └── Taskfile.yaml ├── build-jib-springboot-helloworld ├── Taskfile-containerd.yaml └── Taskfile.yaml ├── build-python-flask ├── Taskfile-docker.yaml ├── Taskfile-podman.yaml └── Taskfile.yaml ├── common └── Taskfile.yaml └── testing └── Taskfile.yaml /.gitignore: -------------------------------------------------------------------------------- 1 | sourcecode 2 | artifacts 3 | .secrets 4 | workspace* 5 | tmp 6 | scan_results 7 | *bak 8 | *.pub 9 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Deep Dive into Docker Containers and DevSecOps pipelines 2 | 3 | ## Part 1: Anatomy of a Docker image 4 | 5 | **Inspecting and manipulating Docker images** 6 | 7 | * Dive: https://github.com/wagoodman/dive 8 | * Crane: https://github.com/google/go-containerregistry/tree/main/cmd/crane 9 | * ORAS: https://github.com/oras-project/oras 10 | * Skopeo: https://github.com/containers/skopeo 11 | 12 | Articles: 13 | 14 | * https://github.com/iximiuz/awesome-container-tinkering 15 | * https://iximiuz.com/en/series/debunking-container-myths/ 16 | * https://lwn.net/Articles/902049/ 17 | * https://www.tutorialworks.com/difference-docker-containerd-runc-crio-oci/ 18 | * https://dlorenc.medium.com/oci-artifacts-explained-8f4a77945c13 19 | * https://itnext.io/reverse-engineer-docker-images-into-dockerfiles-453d3d21d896 20 | * https://containers.gitbook.io/build-containers-the-hard-way/ 21 | * http://jpetazzo.github.io/2017/02/24/from-dotcloud-to-docker/ 22 | * https://iximiuz.com/en/posts/docker-debug-slim-containers/ 23 | * https://www.nginx.com/blog/what-are-namespaces-cgroups-how-do-they-work/ 24 | * https://martinheinz.dev/blog/44 25 | 26 | * DockerHub OCI Artifact support: https://www.docker.com/blog/announcing-docker-hub-oci-artifacts-support/ 27 | 28 | ## Part 2: Various methods for building Docker images 29 | 30 | **Using Taskfiles for local pipelines** 31 | 32 | * Task: https://taskfile.dev/#/ 33 | * https://dev.to/stack-labs/introduction-to-taskfile-a-makefile-alternative-h92 34 | 35 | **Building curated Docker images** 36 | 37 | * https://github.com/codepraxis-io/curated-docker-images 38 | * https://github.com/orgs/codepraxis-io/packages/container/package/ubuntu22.04 39 | * https://github.com/orgs/codepraxis-io/packages/container/package/alpine3.16 40 | 41 | **Building a Java SpringBoot app and creating Docker images based on various base images** 42 | 43 | * https://github.com/codepraxis-io/docker-deep-dive/blob/main/taskfiles/build-java-gradle-spring-music/Taskfile.yaml 44 | 45 | **Dockerfiles** 46 | 47 | Java SpringBoot / Gradle app: 48 | * https://github.com/codepraxis-io/spring-music 49 | 50 | Java SpringBoot / Maven app: 51 | * https://github.com/codepraxis-io/dddive-springboot 52 | 53 | Python Flask app: 54 | * https://github.com/codepraxis-io/flask-bootstrap 55 | 56 | 57 | **Docker images available to use as base images** 58 | 59 | * Distroless: https://github.com/GoogleContainerTools/distroless 60 | * https://iximiuz.com/en/posts/containers-distroless-images/ 61 | * Distroless vs alpine: https://itnext.io/which-container-images-to-use-distroless-or-alpine-96e3dab43a22 62 | 63 | **Docker image build mechanisms** 64 | 65 | * Docker Buildx: https://github.com/docker/buildx 66 | * Kaniko: https://github.com/GoogleContainerTools/kaniko 67 | * Cloud Native Buildpacks: https://buildpacks.io/ 68 | * https://paketo.io/ 69 | * https://paketo.io/docs/howto/java/ 70 | * Containerd/nerdctl: https://github.com/containerd/nerdctl 71 | * https://medium.com/nttlabs/nerdctl-359311b32d0e 72 | * https://earthly.dev/blog/containerd-vs-docker/ 73 | * Podman https://github.com/containers/podman 74 | * Buildah: https://github.com/containers/buildah 75 | * https://www.redhat.com/en/topics/containers/what-is-podman 76 | * https://developers.redhat.com/blog/2020/09/25/rootless-containers-with-podman-the-basics 77 | * https://podman.io/blogs/2018/10/31/podman-buildah-relationship.html 78 | * Jib: https://github.com/GoogleContainerTools/jib 79 | * https://github.com/GoogleContainerTools/jib/blob/master/docs/faq.md 80 | * https://snyk.io/blog/building-java-container-images-using-jib/ 81 | * Sample app built with Jib: https://github.com/codepraxis-io/springboot-helloworld 82 | 83 | **Other resources** 84 | 85 | * Earthly https://earthly.dev 86 | * https://earthly.dev/blog/chroot/ 87 | * https://jpetazzo.github.io/2020/02/01/quest-minimal-docker-images-part-1/ 88 | * https://blog.kubesimplify.com/the-secret-gems-behind-building-container-images-enter-buildkit-and-docker-buildx 89 | * https://ashishtechmill.com/comparing-modern-day-container-image-builders-jib-buildpacks-and-docker 90 | * https://medium.com/geekculture/create-docker-images-without-docker-daemon-kaniko-847a688155a6 91 | 92 | 93 | Creating a new SpringBoot application: https://start.spring.io/ 94 | 95 | Go ecosystem: 96 | 97 | * Ko: https://github.com/ko-build/ko 98 | * https://snyk.io/blog/container-images-simplified-with-google-ko/ 99 | * goreleaser: https://github.com/goreleaser/goreleaser 100 | * goreleaser supply chain example: https://github.com/goreleaser/supply-chain-example 101 | * podinfo: https://github.com/stefanprodan/podinfo 102 | 103 | Kubernetes-in-a-box 104 | 105 | * KIND: https://kind.sigs.k8s.io/ 106 | * Running a local Docker registry in kind: https://kind.sigs.k8s.io/docs/user/private-registries/ 107 | * microk8s: https://microk8s.io/ 108 | 109 | ## Part 3 - Docker Supply Chain Security 110 | 111 | **Vulnerability databases** 112 | * "Common Vulnerabilities and Exposures" aka CVE: https://cve.mitre.org/cve/search_cve_list.html 113 | * "National Vulnerability Database" aka NVD: https://nvd.nist.gov/ 114 | 115 | **Google Security ecosystem** 116 | * https://deps.dev/ 117 | * https://security.googleblog.com/2023/04/announcing-depsdev-api-critical.html 118 | * https://osv.dev/ 119 | * https://github.com/google/osv-scanner 120 | 121 | **Scanning Dockerfiles for security vulnerabilities** 122 | * Hadolint: https://github.com/hadolint/hadolint 123 | * Semgrep: https://semgrep.dev/docs/ 124 | 125 | **Scanning Docker images for security vulnerabilities** 126 | * Trivy: https://github.com/aquasecurity/trivy 127 | * Grype: https://github.com/anchore/grype 128 | * SARIF format: https://sarifweb.azurewebsites.net/ 129 | 130 | **Generating SBOMs from Docker images** 131 | * Syft: https://github.com/anchore/syft 132 | * Syft and Grype: https://medium.com/rahasak/container-vulnerability-scan-with-syft-and-grype-f4ec9cd4d7f1 133 | 134 | **Generating CycloneDX and SPDX SBOMs from code** 135 | * CycloneDX 136 | https://github.com/CycloneDX/cyclonedx-python 137 | * SPDX 138 | * https://github.com/microsoft/sbom-tool 139 | * https://security.googleblog.com/2022/06/sbom-in-action-finding-vulnerabilities.html 140 | * https://github.com/spdx/spdx-to-osv/ 141 | 142 | **More SBOM resources** 143 | * https://owasp.org/www-project-dependency-track/ 144 | * https://cyclonedx.org/tool-center/ 145 | * https://spdx.dev/tools-community/ 146 | * https://sysdig.com/blog/sbom-101-software-bill-of-materials 147 | * https://mergebase.com/blog/best-tools-for-generating-sbom/ 148 | * https://github.com/opensbom-generator/spdx-sbom-generator 149 | * https://pedrodelgallego.github.io/blog/engineering/capabilities/security/software-bill-of-materials-devsecops/ 150 | * https://github.com/IBM/sbom-utility 151 | * https://paketo.io/docs/howto/sbom/#access-the-software-bill-of-materials-on-a-sample-application 152 | 153 | ### Signing Docker images and verifying signatures 154 | 155 | **Sigstore tools** 156 | 157 | * Cosign: https://github.com/sigstore/cosign 158 | * Keyless signing with GitHub Actions: 159 | * https://www.chainguard.dev/unchained/zero-friction-keyless-signing-with-github-actions 160 | * https://github.com/codepraxis-io/keyless-cosign 161 | * Python sigstore module: 162 | * https://github.com/sigstore/sigstore-python 163 | * https://www.python.org/download/sigstore/ 164 | * https://blog.trailofbits.com/2022/11/08/sigstore-code-signing-verification-software-supply-chain/ 165 | 166 | 167 | **Attesting artifacts** 168 | 169 | * in-toto: https://in-toto.io/ 170 | * https://github.com/in-toto/attestation 171 | * https://anchore.com/sbom/creating-sbom-attestations-using-syft-and-sigstore/ 172 | * OCI as attestations storage for your packages: https://nextjs.marcofranssen.nl/oci-as-attestations-storage-for-your-packages 173 | 174 | **Chainguard ecosystem** 175 | 176 | * Wolfi: https://github.com/wolfi-dev/os 177 | * Apko: https://github.com/chainguard-dev/apko 178 | * Melange: https://github.com/chainguard-dev/melange 179 | 180 | ## Part 4 - GitHub Actions Workflows and SLSA 181 | 182 | **GitHub Actions Reusable Workflows** 183 | 184 | * Reusable workflows and reusable actions 185 | * Types of triggers (branch, tag, release) 186 | * https://docs.github.com/en/actions/using-workflows/reusing-workflows 187 | * https://betterprogramming.pub/how-to-use-github-actions-reusable-workflow-8604e8cbf258 188 | 189 | **Safeguarding artifact integrity (SLSA)** 190 | 191 | * SLSA: https://slsa.dev/ 192 | * SLSA levels: https://slsa.dev/spec/v1.0/levels 193 | * SLSA guide: https://fossa.com/blog/practical-guide-slsa-framework/ 194 | * SLSA GitHub generator: https://github.com/slsa-framework/slsa-github-generator 195 | * SLSA verifier: https://github.com/slsa-framework/slsa-verifier 196 | * SLSA and in-toto: http://slsa.dev/blog/2023/05/in-toto-and-slsa 197 | * SLSA and node.js: http://slsa.dev/blog/2023/05/bringing-improved-supply-chain-security-to-the-nodejs-ecosystem 198 | * Google Distroless achieves SLSA level 2: https://www.infoq.com/news/2022/10/distroless-slsa-level-two/ 199 | * VEX: https://cloud.google.com/blog/products/identity-security/how-vex-helps-sbomslsa-improve-supply-chain-visibility 200 | * SLSA and GUAC: https://github.com/guacsec/guac 201 | * https://slsa.dev/blog/2022/08/slsa-github-workflows-generic-ga 202 | * https://slsa.dev/blog/2022/05/slsa-sbom 203 | 204 | **GitHub Actions and SLSA** 205 | 206 | * Generation of SLSA3+ provenance for native GitHub projects: https://github.com/slsa-framework/slsa-github-generator 207 | * https://security.googleblog.com/2022/04/improving-software-supply-chain.html 208 | * https://github.com/slsa-framework/github-actions-demo 209 | * https://marcofranssen.nl/secure-your-software-supply-chain-using-sigstore-and-github-actions 210 | * https://github.com/philips-labs/slsa-provenance-action 211 | * Example of using SLSA GitHub Action Workflows: https://github.com/codepraxis-io/katana 212 | 213 | ## Extra material 214 | 215 | * Microsoft's Open Source Software (OSS) Secure Supply Chain (SSC) Framework: https://github.com/microsoft/oss-ssc-framework 216 | * Running Dependency Track for inspecting SBOMs: https://dependencytrack.org/ 217 | * Kyverno policies for allowing only signed images to run in kind: https://kyverno.io/docs/writing-policies/verify-images/ 218 | * https://nirmata.com/2022/09/29/how-to-migrate-from-kubernetes-pod-security-policies-to-kyverno/ 219 | * Carvel suite of tools: https://carvel.dev/ 220 | * Build reproducibility: https://buildpacks.io/docs/features/reproducibility/ 221 | * https://medium.com/@michael.vittrup.larsen/why-we-should-use-latest-tag-on-container-images-fc0266877ab5 222 | * CNI: https://medium.com/geekculture/k8s-network-cni-introduction-b035d42ad68f 223 | * DevSecOps with GitHub security: https://learn.microsoft.com/en-us/azure/architecture/solution-ideas/articles/devsecops-in-github 224 | -------------------------------------------------------------------------------- /cosign_examples/.gitignore: -------------------------------------------------------------------------------- 1 | artifact 2 | artifact-fetched 3 | digest.json 4 | Python* 5 | -------------------------------------------------------------------------------- /cosign_examples/01_cosign_generate_keypair.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | PRIVATE_KEY_PASS=$(cat ~/.k) 4 | 5 | echo -n $PRIVATE_KEY_PASS | cosign generate-key-pair 6 | mkdir -p ~/.cosign 7 | mv cosign.* ~/.cosign 8 | -------------------------------------------------------------------------------- /cosign_examples/02_cosign_sign_image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | COSIGN_PRIVATE_KEY=$HOME/.cosign/cosign.key 4 | PRIVATE_KEY_PASS=$(cat ~/.k) 5 | #IMAGE=ghcr.io/codepraxis-io/spring-music:4.0.0-distroless-java17-debian11 6 | IMAGE=ghcr.io/codepraxis-io/spring-music:6.0.0-temurin-17-jre-jammy 7 | 8 | echo -n $PRIVATE_KEY_PASS | cosign sign --key $COSIGN_PRIVATE_KEY "$IMAGE" 9 | -------------------------------------------------------------------------------- /cosign_examples/03_cosign_attach_attestation.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | COSIGN_PRIVATE_KEY=$HOME/.cosign/cosign.key 4 | #IMAGE=ghcr.io/codepraxis-io/spring-music:4.0.0-distroless-java17-debian11 5 | #SCAN_RESULT_FILE=reference-workspace/scan_results/spring-music-4.0.0-distroless-java17-debian11/grype-scan.json 6 | IMAGE=ghcr.io/codepraxis-io/spring-music:6.0.0-temurin-17-jre-jammy 7 | SCAN_RESULT_FILE=reference-workspace/scan_results/spring-music-6.0.0-temurin-17-jre-jammy/trivy-scan.json 8 | 9 | PRIVATE_KEY_PASS=$(cat ~/.k) 10 | 11 | echo -n $PRIVATE_KEY_PASS | cosign attest --predicate $SCAN_RESULT_FILE --key $COSIGN_PRIVATE_KEY "$IMAGE" 12 | 13 | <<<<<<< HEAD 14 | crane manifest ghcr.io/codepraxis-io/spring-music:sha256-4cbf5098d309334d17ef9fb23236376dfac25c0e85bf19540cd8fd82b1e0460f.att | jq 15 | ======= 16 | #crane manifest ghcr.io/codepraxis-io/spring-music:sha256-acb13064190c9264c12838dacc4cae13d73e31d6f596e101f091b760c02d71fe.att | jq 17 | >>>>>>> 609d1f09afdf1cc324937a2198bc80325b096bb6 18 | -------------------------------------------------------------------------------- /cosign_examples/04_cosign_verify_signature.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | COSIGN_PUBLIC_KEY=$HOME/.cosign/cosign.pub 4 | <<<<<<< HEAD 5 | #IMAGE=ghcr.io/codepraxis-io/spring-music:4.0.0-distroless-java17-debian11 6 | IMAGE=ghcr.io/codepraxis-io/spring-music:6.0.0-temurin-17-jre-jammy 7 | ======= 8 | IMAGE=ghcr.io/codepraxis-io/spring-music:5.0.0-distroless-java17-debian11 9 | >>>>>>> 609d1f09afdf1cc324937a2198bc80325b096bb6 10 | 11 | cosign verify --key $COSIGN_PUBLIC_KEY "$IMAGE" 12 | 13 | ### Triangulation 14 | 15 | # show signature artifact 16 | cosign triangulate $IMAGE 17 | 18 | # show signature manifest 19 | crane manifest $(cosign triangulate $IMAGE) | jq . 20 | -------------------------------------------------------------------------------- /cosign_examples/05_cosign_verify_attestation.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | COSIGN_PUBLIC_KEY=$HOME/.cosign/cosign.pub 4 | #IMAGE=ghcr.io/codepraxis-io/spring-music:4.0.0-distroless-java17-debian11 5 | IMAGE=ghcr.io/codepraxis-io/spring-music:6.0.0-temurin-17-jre-jammy 6 | 7 | #cosign verify-attestation --key $COSIGN_PUBLIC_KEY "$IMAGE" | jq -r .payload | base64 --decode | jq . 8 | TMPDIR=./tmp 9 | mkdir -p $TMPDIR 10 | ATTESTATION_PAYLOAD=$TMPDIR/attestation_payload.json 11 | SCANNER_PAYLOAD=$TMPDIR/scanner_payload.json 12 | 13 | cosign verify-attestation --key $COSIGN_PUBLIC_KEY "$IMAGE" | jq --slurp 'map(.payload | @base64d | fromjson | .predicate.Data | fromjson)' > $ATTESTATION_PAYLOAD 14 | 15 | cat $ATTESTATION_PAYLOAD | jq -r '.[0]' > $SCANNER_PAYLOAD 16 | 17 | echo Vulnerabilities 18 | cat $SCANNER_PAYLOAD |jq -r '.' 19 | #cat $SCANNER_PAYLOAD |jq -r '.Vulnerabilities[] | .VulnerabilityID,.Severity' 20 | -------------------------------------------------------------------------------- /cosign_examples/10_cosign_sign_image2.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | COSIGN_PRIVATE_KEY=$HOME/.cosign/cosign.key 4 | PRIVATE_KEY_PASS=$(cat ~/.k) 5 | IMAGE=ghcr.io/codepraxis-io/spring-music:2.0.0-eclipse-temurin-17-jre-jammy 6 | 7 | echo -n $PRIVATE_KEY_PASS | cosign sign --key $COSIGN_PRIVATE_KEY "$IMAGE" 8 | -------------------------------------------------------------------------------- /cosign_examples/11_syft_attest_image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | COSIGN_PRIVATE_KEY=$HOME/.cosign/cosign.key 4 | IMAGE=ghcr.io/codepraxis-io/spring-music:2.0.0-eclipse-temurin-17-jre-jammy 5 | PRIVATE_KEY_PASS=$(cat ~/.k) 6 | 7 | TMPDIR=./tmp 8 | rm -rf $TMPDIR 9 | mkdir -p $TMPDIR 10 | SYFT_ATTESTATION_FILE=$TMPDIR/syft.att.json 11 | echo -n $PRIVATE_KEY_PASS | syft attest --key $COSIGN_PRIVATE_KEY "$IMAGE" -o cyclonedx-json > $SYFT_ATTESTATION_FILE 12 | 13 | echo Inspecting the attestation file: 14 | 15 | PAYLOAD=$TMPDIR/payload.json 16 | cat $SYFT_ATTESTATION_FILE | jq --slurp 'map(.payload | @base64d | fromjson)' > $PAYLOAD 17 | 18 | echo "value of key '_type':" 19 | cat $PAYLOAD | jq '.[0]._type' 20 | 21 | echo "value of key 'predicateType':" 22 | cat $PAYLOAD | jq '.[0].predicateType' 23 | 24 | echo "value of key 'subject':" 25 | cat $PAYLOAD | jq '.[0].subject' 26 | -------------------------------------------------------------------------------- /cosign_examples/12_cosign_attach_syft_attestation.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | IMAGE=ghcr.io/codepraxis-io/spring-music:2.0.0-eclipse-temurin-17-jre-jammy 4 | 5 | TMPDIR=./tmp 6 | SYFT_ATTESTATION_FILE=$TMPDIR/syft.att.json 7 | 8 | cosign attach attestation "$IMAGE" --attestation $SYFT_ATTESTATION_FILE 9 | 10 | -------------------------------------------------------------------------------- /cosign_examples/13_cosign_verify_syft_attestation.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | COSIGN_PUBLIC_KEY=$HOME/.cosign/cosign.pub 4 | IMAGE=ghcr.io/codepraxis-io/spring-music:2.0.0-eclipse-temurin-17-jre-jammy 5 | 6 | #cosign verify-attestation --key $COSIGN_PUBLIC_KEY "$IMAGE" | jq -r .payload | base64 --decode | jq . 7 | TMPDIR=./tmp 8 | rm -rf $TMPDIR 9 | mkdir -p $TMPDIR 10 | SYFT_ATTESTATION_FILE=$TMPDIR/syft_attestation.json 11 | SYFT_ATTESTATION_PAYLOAD=$TMPDIR/syft_attestation_payload.json 12 | 13 | PREDICATE_TYPE="https://cyclonedx.org/bom" 14 | cosign verify-attestation --key $COSIGN_PUBLIC_KEY --type ${PREDICATE_TYPE} "$IMAGE" > $SYFT_ATTESTATION_FILE 15 | 16 | cat $SYFT_ATTESTATION_FILE | jq --slurp 'map(.payload | @base64d | fromjson)' > $SYFT_ATTESTATION_PAYLOAD 17 | 18 | echo SBOM Components: 19 | cat $SYFT_ATTESTATION_PAYLOAD | jq -r '.[0].predicate.components[]."bom-ref"' 20 | -------------------------------------------------------------------------------- /cosign_examples/20_cosign_attach_attestation_for_subsequent_policy_verification.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #IMAGE=ghcr.io/codepraxis-io/library/busybox:latest 4 | IMAGE=ghcr.io/codepraxis-io/library/alpinelinux/docker-cli:latest 5 | COSIGN_PRIVATE_KEY=$HOME/.cosign/cosign.key 6 | PREDICATE_FILE=/tmp/predicate 7 | PRIVATE_KEY_PASS=$(cat ~/.k) 8 | 9 | crane digest $IMAGE 10 | 11 | echo "test20230927" > $PREDICATE_FILE 12 | 13 | echo -n $PRIVATE_KEY_PASS | cosign attest --predicate $PREDICATE_FILE --key $COSIGN_PRIVATE_KEY "$IMAGE" 14 | -------------------------------------------------------------------------------- /cosign_examples/21_cosign_verify_attestation.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #IMAGE=ghcr.io/codepraxis-io/library/busybox:latest 4 | IMAGE=ghcr.io/codepraxis-io/library/alpinelinux/docker-cli:latest 5 | COSIGN_PUBLIC_KEY=$HOME/.cosign/cosign.pub 6 | 7 | cosign verify-attestation --key $COSIGN_PUBLIC_KEY $IMAGE | jq -r .payload | base64 --decode | jq . 8 | 9 | -------------------------------------------------------------------------------- /cosign_examples/22_cosign_verify_attestation_rego_policy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | IMAGE=ghcr.io/codepraxis-io/library/busybox:latest 4 | COSIGN_PUBLIC_KEY=$HOME/.cosign/cosign.pub 5 | 6 | cosign verify-attestation --policy cosign_policy.rego --key $COSIGN_PUBLIC_KEY $IMAGE 7 | -------------------------------------------------------------------------------- /cosign_examples/23_cosign_verify_attestation_cue_policy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #IMAGE=ghcr.io/codepraxis-io/library/busybox:latest 4 | IMAGE=ghcr.io/codepraxis-io/library/alpinelinux/docker-cli:latest 5 | COSIGN_PUBLIC_KEY=$HOME/.cosign/cosign.pub 6 | 7 | # this should pass 8 | cosign verify-attestation --policy cosign_policy_before.cue --key $COSIGN_PUBLIC_KEY $IMAGE 9 | 10 | # this should fail 11 | cosign verify-attestation --policy cosign_policy_after.cue --key $COSIGN_PUBLIC_KEY $IMAGE 12 | -------------------------------------------------------------------------------- /cosign_examples/30_cosign_verify_keyless_signing.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #DIGEST=ghcr.io/codepraxis-io/keyless-cosign@sha256:928d9b771f083c0722ca58ae2a551c48e346aa9df74b84f3b5829b79b74fc79b 4 | DIGEST=ghcr.io/codepraxis-io/keyless-cosign@sha256:6b7cdd9860d9319be7e3bbf807e219baa919413947862c0abed8fbc0fe9f975f 5 | COSIGN_EXPERIMENTAL=true cosign verify $DIGEST | jq . | tee digest.json 6 | cat digest.json | jq -r '.[0].optional.Bundle.Payload.body' | base64 -d | jq 7 | -------------------------------------------------------------------------------- /cosign_examples/40_cosign_verify_google_distroless.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | GOOGLE_COSIGN_PUB_KEY=google_cosign.pub 4 | rm -rf cosign.pub 5 | wget https://raw.githubusercontent.com/GoogleContainerTools/distroless/main/cosign.pub 6 | mv cosign.pub $GOOGLE_COSIGN_PUB_KEY 7 | 8 | GOOGLE_DISTROLESS_IMAGES=' 9 | gcr.io/distroless/static-debian11 10 | gcr.io/distroless/base-debian11 11 | gcr.io/distroless/cc-debian11 12 | gcr.io/distroless/python3-debian11 13 | gcr.io/distroless/java-base-debian11 14 | gcr.io/distroless/java11-debian11 15 | gcr.io/distroless/java17-debian11 16 | gcr.io/distroless/nodejs14-debian11 17 | gcr.io/distroless/nodejs16-debian11 18 | gcr.io/distroless/nodejs18-debian11' 19 | 20 | TAGS=' 21 | latest 22 | nonroot 23 | debug 24 | debug-nonroot' 25 | 26 | for IMAGE in $GOOGLE_DISTROLESS_IMAGES; do 27 | for TAG in $TAGS; do 28 | docker pull $IMAGE:$TAG 29 | echo Verifying $IMAGE:$TAG 30 | cosign verify --key $GOOGLE_COSIGN_PUB_KEY $IMAGE:$TAG 31 | echo 32 | done 33 | done 34 | 35 | # found an issue! 36 | #Verifying gcr.io/distroless/python3-debian11:debug 37 | #Error: no matching signatures: 38 | # 39 | #main.go:62: error during command execution: no matching signatures: 40 | -------------------------------------------------------------------------------- /cosign_examples/41_cosign_verify_google_distroless_python.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | GOOGLE_COSIGN_PUB_KEY=google_cosign.pub 4 | rm -rf cosign.pub 5 | wget https://raw.githubusercontent.com/GoogleContainerTools/distroless/main/cosign.pub 6 | mv cosign.pub $GOOGLE_COSIGN_PUB_KEY 7 | 8 | GOOGLE_DISTROLESS_IMAGES=' 9 | gcr.io/distroless/python3-debian11' 10 | 11 | TAGS=' 12 | latest 13 | nonroot 14 | debug 15 | debug-nonroot' 16 | 17 | for IMAGE in $GOOGLE_DISTROLESS_IMAGES; do 18 | for TAG in $TAGS; do 19 | docker pull $IMAGE:$TAG 20 | echo Verifying $IMAGE:$TAG 21 | cosign verify --key $GOOGLE_COSIGN_PUB_KEY $IMAGE:$TAG 22 | #/usr/local/bin/cosign-1.13.1 verify --key $GOOGLE_COSIGN_PUB_KEY $IMAGE:$TAG 23 | echo 24 | done 25 | done 26 | 27 | # found an issue! 28 | #Verifying gcr.io/distroless/python3-debian11:debug 29 | #Error: no matching signatures: 30 | # 31 | #main.go:62: error during command execution: no matching signatures: 32 | -------------------------------------------------------------------------------- /cosign_examples/50_cosign_verify_python_download.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | VERSION=3.11.5 4 | 5 | rm -rf Python-${VERSION}.* 6 | 7 | wget https://www.python.org/ftp/python/${VERSION}/Python-${VERSION}.tgz 8 | wget https://www.python.org/ftp/python/${VERSION}/Python-${VERSION}.tgz.sig 9 | wget https://www.python.org/ftp/python/${VERSION}/Python-${VERSION}.tgz.crt 10 | 11 | python3 -m pip install sigstore 12 | 13 | # this signarure is known 14 | EXPECTED_SIGNATURE_EMAIL=pablogsal@python.org 15 | 16 | # this signarure is not known 17 | #EXPECTED_SIGNATURE_EMAIL=nad@python.org 18 | 19 | python3 -m sigstore verify \ 20 | --certificate Python-${VERSION}.tgz.crt \ 21 | --signature Python-${VERSION}.tgz.sig \ 22 | --cert-email ${EXPECTED_SIGNATURE_EMAIL} \ 23 | Python-${VERSION}.tgz 24 | -------------------------------------------------------------------------------- /cosign_examples/60_cosign_blob_operations.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "my first artifact" > artifact 4 | BLOB_SUM=$(shasum -a 256 artifact | cut -d' ' -f 1) 5 | echo BLOB_SUM=$BLOB_SUM 6 | RND=$(uuidgen | head -c 8 | tr 'A-Z' 'a-z') 7 | BLOB_NAME=my-artifact-$RND 8 | echo BLOB_NAME=$BLOB_NAME 9 | BLOB_URI=ttl.sh/$BLOB_NAME:1h 10 | BLOB_URI_DIGEST=$(cosign upload blob -f artifact $BLOB_URI) 11 | echo $BLOB_URI_DIGEST 12 | 13 | crane ls ttl.sh/$BLOB_NAME 14 | #Uploading file from [artifact] to [ttl.sh/my-artifact-41edcd17:1h] with media type [text/plain] 15 | #File [artifact] is available directly at [ttl.sh/v2/my-artifact-41edcd17/blobs/sha256:c69d72c98b55258f9026f984e4656f0e9fd3ef024ea3fac1d7e5c7e6249f1626] 16 | #Uploaded image to: 17 | #ttl.sh/my-artifact-41edcd17@sha256:d3d5dac02d390a83b22db0ef6147d20dfee065aa07f5e64a3432e5f8bfb30524 18 | 19 | # Then 20 | # curl -L ttl.sh/v2/my-artifact-41edcd17/blobs/sha256:c69d72c98b55258f9026f984e4656f0e9fd3ef024ea3fac1d7e5c7e6249f1626 > artifact-fetched 21 | 22 | # The digest is baked right into the URL, so we can check that as well: 23 | 24 | # cat artifact-fetched | shasum -a 256 25 | # c69d72c98b55258f9026f984e4656f0e9fd3ef024ea3fac1d7e5c7e6249f1626 - 26 | -------------------------------------------------------------------------------- /cosign_examples/artifact: -------------------------------------------------------------------------------- 1 | my first artifact 2 | -------------------------------------------------------------------------------- /cosign_examples/cosign_policy.cue: -------------------------------------------------------------------------------- 1 | import "time" 2 | 3 | //before: time.Parse(time.RFC3339, "2022-10-23T01:36:02Z") 4 | after: time.Parse(time.RFC3339, "2022-11-01T00:00:00Z") 5 | 6 | // The predicateType field must match this string 7 | predicateType: "cosign.sigstore.dev/attestation/v1" 8 | 9 | // The predicate must match the following constraints. 10 | predicate: { 11 | Timestamp: <=before 12 | Data: "test1\n" 13 | } 14 | 15 | // This will fail because of the Timestamp condition 16 | //predicate: { 17 | // Timestamp: =after 12 | Data: "test20230927\n" 13 | } 14 | 15 | // This will fail because of the Timestamp condition 16 | //predicate: { 17 | // Timestamp: docker-save-$IMAGE-$TAG.tar 49 | 50 | echo 51 | echo tar tvf docker-save-$IMAGE-$TAG.tar 52 | tar tvf docker-save-$IMAGE-$TAG.tar 53 | 54 | echo mkdir -p docker-save-$IMAGE-$TAG 55 | rm -rf docker-save-$IMAGE-$TAG 56 | mkdir -p docker-save-$IMAGE-$TAG 57 | mv docker-save-$IMAGE-$TAG.tar docker-save-$IMAGE-$TAG 58 | cd docker-save-$IMAGE-$TAG 59 | tar xvf docker-save-$IMAGE-$TAG.tar 60 | cd .. 61 | 62 | echo 63 | echo display manifest from tar 64 | cat docker-save-$IMAGE-$TAG/manifest.json | jq -r '.' | tee docker-manifest-from-tar-$IMAGE-$TAG.json 65 | 66 | echo 67 | echo display config from tar 68 | CONFIG=$(cat docker-save-$IMAGE-$TAG/manifest.json | jq -r '.[0].Config') 69 | cat docker-save-$IMAGE-$TAG/$CONFIG | jq -r '.' | tee docker-config-from-tar-$IMAGE-$TAG.json 70 | 71 | echo 72 | echo crane pull $IMAGE:$TAG 73 | crane pull $IMAGE:$TAG crane-pull-$IMAGE-$TAG.tar 74 | 75 | echo 76 | echo crane export $IMAGE:$TAG 77 | crane export $IMAGE:$TAG > crane-export-$IMAGE-$TAG.tar 78 | 79 | echo 80 | echo tar tvf crane-export-$IMAGE-$TAG.tar 81 | tar tvf crane-export-$IMAGE-$TAG.tar > crane-export-$IMAGE-$TAG-tar-contents.txt 82 | 83 | echo 84 | echo crane copy $IMAGE:$TAG ghcr.io/codepraxis-io/$IMAGE:$TAG 85 | crane copy $IMAGE:$TAG ghcr.io/codepraxis-io/$IMAGE:$TAG 86 | 87 | echo 88 | echo crane digest $IMAGE:$TAG 89 | crane digest $IMAGE:$TAG 90 | 91 | echo 92 | echo crane digest ghcr.io/codepraxis-io/$IMAGE:$TAG 93 | crane digest ghcr.io/codepraxis-io/$IMAGE:$TAG 94 | 95 | #echo 96 | #echo dive $IMAGE:$TAG 97 | #dive $IMAGE:$TAG 98 | 99 | -------------------------------------------------------------------------------- /image_history_inspect_examples/1_2_nginx_example_cosign.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | COSIGN_PRIVATE_KEY=$HOME/.cosign/cosign.key 4 | COSIGN_PUBLIC_KEY=$HOME/.cosign/cosign.pub 5 | PRIVATE_KEY_PASS=$(cat ~/.k) 6 | IMAGE=ghcr.io/codepraxis-io/nginx 7 | TAG=stable-alpine 8 | 9 | echo cosign sign $IMAGE:$TAG 10 | echo -n $PRIVATE_KEY_PASS | cosign sign --key $COSIGN_PRIVATE_KEY $IMAGE:$TAG 11 | 12 | echo 13 | echo crane ls $IMAGE 14 | crane ls $IMAGE 15 | 16 | echo 17 | echo crane digest $IMAGE:$TAG 18 | crane digest $IMAGE:$TAG 19 | 20 | SHA=$(crane ls $IMAGE|grep sha256) 21 | echo 22 | echo Running: crane config $IMAGE:$SHA 23 | crane config $IMAGE:$SHA | jq -r '.' | tee crane-config-$IMAGE-$SHA.json 24 | 25 | echo 26 | echo Running: crane manifest $IMAGE:$SHA 27 | crane manifest $IMAGE:$SHA | jq -r '.' | tee crane-manifest-$IMAGE-$SHA.json 28 | 29 | echo 30 | echo crane export $IMAGE:$SHA 31 | crane export $IMAGE:$SHA > crane-export-nginx-$SHA.json 32 | 33 | echo 34 | echo display crane-export-nginx-$SHA.json 35 | cat crane-export-nginx-$SHA.json | jq -r '.' 36 | 37 | echo 38 | echo cosign verify --key $COSIGN_PUBLIC_KEY $IMAGE:$TAG 39 | cosign verify --key $COSIGN_PUBLIC_KEY $IMAGE:$TAG 40 | 41 | #echo 42 | #echo dive $IMAGE:$TAG 43 | #dive $IMAGE:$TAG 44 | 45 | -------------------------------------------------------------------------------- /image_history_inspect_examples/2_nginx_example_podman_skopeo.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | IMAGE=nginx 4 | 5 | # show image tags in DockerHub 6 | echo 7 | echo skopeo list-tags docker://$IMAGE 8 | skopeo list-tags docker://$IMAGE | jq -r '.Tags[]' 9 | 10 | TAG=stable-alpine 11 | FULL_IMAGE=docker.io/library/$IMAGE:$TAG 12 | 13 | echo 14 | echo skopeo inspect docker://$FULL_IMAGE 15 | skopeo inspect docker://$FULL_IMAGE | jq -r '.' | tee skopeo-inspect-$IMAGE-$TAG.json 16 | 17 | echo 18 | echo podman pull $FULL_IMAGE 19 | podman pull $FULL_IMAGE 20 | 21 | echo 22 | echo podman images 23 | podman images | grep $IMAGE | grep $TAG 24 | 25 | echo 26 | echo podman image history $FULL_IMAGE 27 | podman image history $FULL_IMAGE 28 | 29 | echo 30 | echo podman inspect $FULL_IMAGE 31 | podman inspect $FULL_IMAGE | jq -r '.' | tee podman-inspect-$IMAGE-$TAG.json 32 | 33 | echo 34 | echo podman save $IMAGE:$TAG 35 | podman save $IMAGE:$TAG > podman-save-$IMAGE-$TAG.tar 36 | 37 | echo 38 | echo tar tvf podman-save-$IMAGE-$TAG.tar 39 | tar tvf podman-save-$IMAGE-$TAG.tar 40 | 41 | echo mkdir -p podman-save-$IMAGE-$TAG 42 | rm -rf podman-save-$IMAGE-$TAG 43 | mkdir -p podman-save-$IMAGE-$TAG 44 | mv podman-save-$IMAGE-$TAG.tar podman-save-$IMAGE-$TAG 45 | cd podman-save-$IMAGE-$TAG 46 | tar xvf podman-save-$IMAGE-$TAG.tar 47 | cd .. 48 | 49 | echo 50 | echo display manifest from tar 51 | cat podman-save-$IMAGE-$TAG/manifest.json | jq -r '.' | tee podman-manifest-from-tar-$IMAGE-$TAG.json 52 | 53 | echo 54 | echo display config from tar 55 | CONFIG=$(cat podman-save-$IMAGE-$TAG/manifest.json | jq -r '.[0].Config') 56 | cat podman-save-$IMAGE-$TAG/$CONFIG | jq -r '.' | tee podman-config-from-tar-$IMAGE-$TAG.json 57 | 58 | exit 0 59 | echo 60 | echo crane export $IMAGE:$TAG > crane-export-$IMAGE-$TAG.tar 61 | crane export $IMAGE:$TAG > crane-export-$IMAGE-$TAG.tar 62 | 63 | echo 64 | echo tar tvf crane-export-$IMAGE-$TAG.tar 65 | tar tvf crane-export-$IMAGE-$TAG.tar > crane-export-$IMAGE-$TAG-tar-contents.txt 66 | 67 | #echo 68 | #echo dive $IMAGE:$TAG 69 | #dive $IMAGE:$TAG 70 | 71 | -------------------------------------------------------------------------------- /image_history_inspect_examples/3_nerdctl_image_inspect.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | IMAGES=$(nerdctl images | grep -v REPO | awk '{printf "%s:%s\n",$1, $2 }') 4 | 5 | for IMAGE in $IMAGES; do 6 | echo Running: nerdctl inspect $IMAGE 7 | nerdctl inspect $IMAGE 8 | done 9 | 10 | for IMAGE in $IMAGES; do 11 | echo Running: nerdctl history $IMAGE 12 | #nerdctl history $IMAGE 13 | nerdctl history --no-trunc $IMAGE 14 | done 15 | -------------------------------------------------------------------------------- /install_scripts/install_bomber.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | wget https://github.com/devops-kung-fu/bomber/releases/download/v0.3.4/bomber_0.3.4_linux_amd64.deb 4 | sudo apt install ./bomber_0.3.4_linux_amd64.deb 5 | rm bomber_0.3.4_linux_amd64.deb 6 | -------------------------------------------------------------------------------- /install_scripts/install_containerd.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | CONTAINERD_VERSION=1.6.8 4 | RUNC_VERSION=1.1.3 5 | CNI_VERSION=1.1.1 6 | 7 | # install containerd 8 | wget https://github.com/containerd/containerd/releases/download/v${CONTAINERD_VERSION}/containerd-${CONTAINERD_VERSION}-linux-amd64.tar.gz 9 | sudo tar Cxzvf /usr/local containerd-${CONTAINERD_VERSION}-linux-amd64.tar.gz 10 | rm containerd-${CONTAINERD_VERSION}-linux-amd64.tar.gz 11 | 12 | # install runc 13 | wget https://github.com/opencontainers/runc/releases/download/v${RUNC_VERSION}/runc.amd64 14 | sudo install -m 755 runc.amd64 /usr/local/sbin/runc 15 | rm runc.amd64 16 | 17 | # install cni 18 | wget https://github.com/containernetworking/plugins/releases/download/v${CNI_VERSION}/cni-plugins-linux-amd64-v${CNI_VERSION}.tgz 19 | sudo mkdir -p /opt/cni/bin 20 | sudo tar Cxzvf /opt/cni/bin cni-plugins-linux-amd64-v${CNI_VERSION}.tgz 21 | rm cni-plugins-linux-amd64-v${CNI_VERSION}.tgz 22 | 23 | # configure containerd 24 | sudo mkdir /etc/containerd 25 | containerd config default | sudo tee /etc/containerd/config.toml 26 | sudo sed -i 's/SystemdCgroup \= false/SystemdCgroup \= true/g' /etc/containerd/config.toml 27 | sudo curl -L https://raw.githubusercontent.com/containerd/containerd/main/containerd.service -o /etc/systemd/system/containerd.service 28 | sudo systemctl daemon-reload 29 | sudo systemctl enable --now containerd 30 | sudo systemctl status containerd 31 | -------------------------------------------------------------------------------- /install_scripts/install_cosign.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | COSIGN_VERSION=2.0.0 4 | 5 | wget "https://github.com/sigstore/cosign/releases/download/v${COSIGN_VERSION}/cosign-linux-amd64" 6 | sudo mv cosign-linux-amd64 /usr/local/bin/cosign 7 | chmod +x /usr/local/bin/cosign 8 | -------------------------------------------------------------------------------- /install_scripts/install_crane.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | CRANE_VERSION=0.14.0 4 | 5 | mkdir -p /tmp/crane 6 | cd /tmp/crane 7 | curl -sL "https://github.com/google/go-containerregistry/releases/download/v${CRANE_VERSION}/go-containerregistry_Linux_x86_64.tar.gz" > go-containerregistry.tar.gz 8 | 9 | # verify SLSA attestation 10 | #curl -sL https://github.com/google/go-containerregistry/releases/download/v${CRANE_VERSION}/attestation.intoto.jsonl > provenance.intoto.jsonl 11 | #slsa-verifier -artifact-path go-containerregistry.tar.gz -provenance provenance.intoto.jsonl -source github.com/google/go-containerregistry -tag "v${CRANE_VERSION}" 12 | 13 | tar xvfz go-containerregistry.tar.gz 14 | sudo mv *crane /usr/local/bin 15 | crane version 16 | gcrane version 17 | cd /tmp 18 | rm -rf crane 19 | 20 | -------------------------------------------------------------------------------- /install_scripts/install_cyclonedx.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Python 4 | pip install cyclonedx-bom 5 | -------------------------------------------------------------------------------- /install_scripts/install_dive.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | DIVE_VERSION=0.10.0 4 | 5 | wget https://github.com/wagoodman/dive/releases/download/v${DIVE_VERSION}/dive_${DIVE_VERSION}_linux_amd64.deb 6 | sudo apt install ./dive_${DIVE_VERSION}_linux_amd64.deb 7 | #rm ./dive_${DIVE_VERSION}_linux_amd64.deb 8 | -------------------------------------------------------------------------------- /install_scripts/install_docker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #!/bin/bash 4 | 5 | sudo apt-get update 6 | sudo apt-get install -y apt-transport-https ca-certificates curl gnupg lsb-release 7 | curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg 8 | echo "deb [arch=amd64 signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \ 9 | $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null 10 | sudo apt-get update 11 | sudo apt-get install -y docker-ce docker-ce-cli containerd.io 12 | sudo usermod -aG docker ubuntu 13 | sudo curl -L "https://github.com/docker/compose/releases/download/1.29.0/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose 14 | sudo chmod +x /usr/local/bin/docker-compose; sudo chown root:root /usr/local/bin/docker-compose 15 | -------------------------------------------------------------------------------- /install_scripts/install_go.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | VERSION=1.20.5 4 | 5 | wget https://go.dev/dl/go${VERSION}.linux-amd64.tar.gz 6 | tar xvfz go${VERSION}.linux-amd64.tar.gz 7 | sudo rm -rf /usr/local/go 8 | sudo mv go /usr/local 9 | rm -rf go${VERSION}.linux-amd64.tar.gz 10 | echo Make sure you set PATH=/usr/local/go/bin:$PATH 11 | -------------------------------------------------------------------------------- /install_scripts/install_grype_syft.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | mkdir -p $HOME/.local/bin/ 4 | 5 | curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b $HOME/.local/bin 6 | chmod +x $HOME/.local/bin/grype 7 | 8 | curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b $HOME/.local/bin 9 | chmod +x $HOME/.local/bin/syft 10 | 11 | # Add $HOME/.local/bin/ to $PATH 12 | -------------------------------------------------------------------------------- /install_scripts/install_hadolint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | curl -Lo hadolint https://github.com/hadolint/hadolint/releases/download/v2.12.0/hadolint-Linux-x86_64 4 | chmod +x hadolint 5 | sudo mv hadolint /usr/local/bin/ 6 | 7 | -------------------------------------------------------------------------------- /install_scripts/install_jib.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # install slsa-verifier 4 | wget https://github.com/slsa-framework/slsa-verifier/releases/download/v1.3.1/slsa-verifier-linux-amd64 5 | wget https://github.com/slsa-framework/slsa-verifier/releases/download/v1.3.1/slsa-verifier-linux-amd64.intoto.jsonl 6 | mv slsa-verifier-linux-amd64 slsa-verifier 7 | chmod +x slsa-verifier 8 | # verify attestation 9 | ./slsa-verifier -artifact-path slsa-verifier \ 10 | -provenance slsa-verifier-linux-amd64.intoto.jsonl \ 11 | -source github.com/slsa-framework/slsa-verifier \ 12 | -tag v1.3.1 13 | 14 | sudo mv slsa-verifier /usr/local/bin 15 | 16 | sudo apt install -y default-jre 17 | wget https://github.com/GoogleContainerTools/jib/releases/download/v0.11.0-cli/jib-jre-0.11.0.zip 18 | wget https://github.com/GoogleContainerTools/jib/releases/download/v0.11.0-cli/attestation.intoto.jsonl 19 | 20 | # verify attestation 21 | slsa-verifier -artifact-path jib-jre-0.11.0.zip -provenance attestation.intoto.jsonl -source github.com/GoogleContainerTools/jib -branch master -workflow-input release_version=0.11.0 22 | 23 | unzip jib-jre-0.11.0.zip 24 | mv jib-0.11.0 $HOME 25 | 26 | rm -rf slsa-verifier* 27 | rm -rf jib*zip 28 | rm -rf attestation.intoto* 29 | 30 | echo 'export PATH=$PATH:$HOME/jib-0.11.0/bin' >> $HOME/.bashrc 31 | -------------------------------------------------------------------------------- /install_scripts/install_k8s_tools.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | KUBECTL_VERSION=1.25.0 4 | HELM_VERSION=3.10.1 5 | 6 | # install kubectl 7 | curl -L https://storage.googleapis.com/kubernetes-release/release/v${KUBECTL_VERSION}/bin/linux/amd64/kubectl -o /tmp/kubectl 8 | sudo mv /tmp/kubectl /usr/local/bin/ 9 | sudo chmod +x /usr/local/bin/kubectl 10 | 11 | # install helm 12 | curl -L https://get.helm.sh/helm-v${HELM_VERSION}-linux-amd64.tar.gz -o /tmp/helm-v${HELM_VERSION}-linux-amd64.tar.gz 13 | cd /tmp 14 | tar xfz helm-v${HELM_VERSION}-linux-amd64.tar.gz 15 | sudo mv linux-amd64/helm /usr/local/bin/ 16 | sudo chmod +x /usr/local/bin/helm 17 | rm -rf helm* linux-amd64 18 | 19 | # install jq 20 | sudo apt -y install jq 21 | -------------------------------------------------------------------------------- /install_scripts/install_ms_sbom_tool.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | curl -Lo sbom-tool https://github.com/microsoft/sbom-tool/releases/latest/download/sbom-tool-linux-x64 4 | chmod +x sbom-tool 5 | sudo mv sbom-tool /usr/local/bin/ 6 | 7 | # running the sbom-tool# 8 | # sbom-tool generate -b ./ms-sbom -bc path/to/sourcecode -pn flask-bootstrap -pv 0.0.1 -ps codepraxis -nsb https://codepraxis.io 9 | -------------------------------------------------------------------------------- /install_scripts/install_nerdctl_buildkit.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | NERDCTL_VERSION=0.22.2 4 | BUILDKIT_VERSION=0.10.4 5 | 6 | sudo apt-get install uidmap -y 7 | sudo apt-get install rootlesskit -y 8 | wget https://github.com/containerd/nerdctl/releases/download/v${NERDCTL_VERSION}/nerdctl-${NERDCTL_VERSION}-linux-amd64.tar.gz 9 | sudo tar Cxzvf /usr/local/bin nerdctl-${NERDCTL_VERSION}-linux-amd64.tar.gz 10 | rm nerdctl-${NERDCTL_VERSION}-linux-amd64.tar.gz 11 | 12 | # set up rootless mode 13 | echo "kernel.unprivileged_userns_clone=1" | sudo tee -a /etc/sysctl.d/99-rootless.conf 14 | containerd-rootless-setuptool.sh install 15 | 16 | sudo sh -c "echo 1 > /proc/sys/kernel/unprivileged_userns_clone" 17 | sudo sysctl --system 18 | 19 | # install buildkitd 20 | wget https://github.com/moby/buildkit/releases/download/v${BUILDKIT_VERSION}/buildkit-v${BUILDKIT_VERSION}.linux-amd64.tar.gz 21 | sudo tar Cxzvf /usr/local buildkit-v${BUILDKIT_VERSION}.linux-amd64.tar.gz 22 | rm buildkit-v${BUILDKIT_VERSION}.linux-amd64.tar.gz 23 | 24 | # configure buildkitd with containerd in rootless mode 25 | CONTAINERD_NAMESPACE=default containerd-rootless-setuptool.sh install-buildkit-containerd 26 | 27 | systemctl --user status default-buildkit.service 28 | 29 | # then 30 | # nerdctl login YOUR_REGISTRY 31 | -------------------------------------------------------------------------------- /install_scripts/install_oras.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | curl -LO https://github.com/oras-project/oras/releases/download/v0.15.0/oras_0.15.0_linux_amd64.tar.gz 4 | mkdir -p oras-install/ 5 | tar -zxf oras_0.15.0_*.tar.gz -C oras-install/ 6 | sudo mv oras-install/oras /usr/local/bin/ 7 | rm -rf oras_0.15.0_*.tar.gz oras-install/ 8 | -------------------------------------------------------------------------------- /install_scripts/install_pack.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | sudo add-apt-repository ppa:cncf-buildpacks/pack-cli 4 | sudo apt-get update 5 | sudo apt-get install pack-cli 6 | -------------------------------------------------------------------------------- /install_scripts/install_podman_tools_fedora.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | sudo dnf install -y podman 4 | sudo dnf install -y buildah 5 | sudo dnf install -y skopeo 6 | -------------------------------------------------------------------------------- /install_scripts/install_podman_tools_ubuntu.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | sudo apt install -y podman 4 | sudo apt install -y buildah 5 | sudo apt install -y skopeo 6 | -------------------------------------------------------------------------------- /install_scripts/install_sbom_scorecard.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | curl -Lo sbom-scorecard https://github.com/eBay/sbom-scorecard/releases/download/0.0.6/sbom-scorecard-linux-amd64 4 | chmod +x sbom-scorecard 5 | sudo mv sbom-scorecard /usr/local/bin/ 6 | 7 | -------------------------------------------------------------------------------- /install_scripts/install_sbom_tools.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # sbom-manager 4 | pip3 install sbom-manager 5 | 6 | # sbom-utility 7 | VERSION=0.11.0 8 | curl -LO https://github.com/CycloneDX/sbom-utility/releases/download/v$VERSION/sbom-utility-v$VERSION-linux-amd64.tar.gz 9 | mkdir -p ~/code/sbom-utility 10 | tar -zxf sbom-utility-v$VERSION-linux-amd64.tar.gz -C ~/code/sbom-utility 11 | rm -rf sbom-utility-v$VERSION-linux-amd64.tar.gz 12 | -------------------------------------------------------------------------------- /install_scripts/install_semgrep.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | python3 -m pip install semgrep 4 | 5 | # add to ~/.bashrc 6 | # export PATH="$PATH:/home/ubuntu/.local/bin" 7 | -------------------------------------------------------------------------------- /install_scripts/install_task.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | sh -c "$(curl --location https://taskfile.dev/install.sh)" -- -d 4 | sudo mv bin/task /usr/local/bin 5 | sudo chmod +x /usr/local/bin/task 6 | rm -rf bin 7 | -------------------------------------------------------------------------------- /install_scripts/install_trivy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | sudo apt-get install -y wget apt-transport-https gnupg lsb-release 4 | wget -qO - https://aquasecurity.github.io/trivy-repo/deb/public.key | sudo apt-key add - 5 | echo deb https://aquasecurity.github.io/trivy-repo/deb $(lsb_release -sc) main | sudo tee -a /etc/apt/sources.list.d/trivy.list 6 | sudo apt-get update 7 | sudo apt-get install -y trivy 8 | -------------------------------------------------------------------------------- /install_scripts/install_trivy_fedora.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | TRIVY_VERSION=0.32.1 4 | sudo rpm -ivh https://github.com/aquasecurity/trivy/releases/download/v${TRIVY_VERSION}/trivy_${TRIVY_VERSION}_Linux-64bit.rpm 5 | 6 | -------------------------------------------------------------------------------- /kind_examples/1_install_kind.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | VERSION=0.19.0 4 | 5 | curl -Lo ./kind https://kind.sigs.k8s.io/dl/v${VERSION}/kind-linux-amd64 6 | chmod +x ./kind 7 | sudo mv ./kind /usr/local/bin/kind 8 | -------------------------------------------------------------------------------- /kind_examples/2_1_create_kind_cluster_with_registry.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o errexit 4 | 5 | # create registry container unless it already exists 6 | reg_name='kind-registry' 7 | reg_port='5001' 8 | if [ "$(docker inspect -f '{{.State.Running}}' "${reg_name}" 2>/dev/null || true)" != 'true' ]; then 9 | docker run \ 10 | -d --restart=always -p "127.0.0.1:${reg_port}:5000" --name "${reg_name}" \ 11 | registry:2 12 | fi 13 | 14 | # create a cluster with the local registry enabled in containerd 15 | # also allow ingress controllers and expose ports 80 and 443 16 | cat < blob_config.json 18 | cat blob_config.json | jq -r '.' 19 | 20 | echo Running: crane blob localhost:5001/hello-app@sha256:59bf1c3509f33515622619af21ed55bbe26d24913cedbca106468a5fb37a50c3 21 | crane blob localhost:5001/hello-app@sha256:59bf1c3509f33515622619af21ed55bbe26d24913cedbca106468a5fb37a50c3 > blob_layer1.tar.gz 22 | 23 | echo Running: crane validate --remote localhost:5001/hello-app:$tag 24 | crane validate --remote localhost:5001/hello-app:$tag 25 | 26 | newtag=1.2 27 | echo Running: crane tag localhost:5001/hello-app:$tag $newtag 28 | crane tag localhost:5001/hello-app:$tag $newtag 29 | 30 | echo Running: crane ls localhost:5001/hello-app 31 | crane ls localhost:5001/hello-app 32 | 33 | echo Running: crane digest localhost:5001/hello-app:$newtag 34 | crane digest localhost:5001/hello-app:$newtag 35 | 36 | echo Running: crane export localhost:5001/hello-app:$newtag hello-app.tar 37 | crane export localhost:5001/hello-app:$newtag hello-app.tar 38 | 39 | echo Running: crane ls ghcr.io/codepraxis-io/spring-music 40 | crane ls ghcr.io/codepraxis-io/spring-music 41 | 42 | echo Running: crane copy localhost:5001/hello-app:1.0 ghcr.io/codepraxis-io/hello-app:1.0 43 | crane copy localhost:5001/hello-app:1.0 ghcr.io/codepraxis-io/hello-app:1.0 44 | 45 | echo Running: crane ls gcr.io/google_containers/busybox 46 | crane ls gcr.io/google_containers/busybox 47 | 48 | echo Running: crane manifest gcr.io/google_containers/busybox:latest 49 | crane manifest gcr.io/google_containers/busybox:latest | jq -r '.' 50 | 51 | echo Running: crane export gcr.io/google_containers/busybox busybox.tar 52 | crane export gcr.io/google_containers/busybox:latest busybox.tar 53 | 54 | echo Running: crane ls nginx:latest 55 | crane ls docker.io/nginx 56 | 57 | echo Running: crane export docker.io/nginx:1.23.1-alpine nginx_1.23.1-alpine.tar 58 | crane export docker.io/nginx:1.23.1-alpine nginx_1.23.1-alpine.tar 59 | 60 | echo Running: crane copy docker.io/nginx:1.23.1-alpine localhost:5001/nginx:1.23.1-alpine 61 | crane copy docker.io/nginx:1.23.1-alpine localhost:5001/nginx:1.23.1-alpine 62 | 63 | echo Running: crane digest localhost:5001/nginx:1.23.1-alpine 64 | crane digest localhost:5001/nginx:1.23.1-alpine 65 | 66 | echo Running: crane delete localhost:5001/nginx@sha256:b87c350e6c69e0dc7069093dcda226c4430f3836682af4f649f2af9e9b5f1c74 67 | crane delete localhost:5001/nginx@sha256:b87c350e6c69e0dc7069093dcda226c4430f3836682af4f649f2af9e9b5f1c74 68 | -------------------------------------------------------------------------------- /kind_examples/3_0_show_node_labels.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | kubectl get nodes --show-labels 4 | -------------------------------------------------------------------------------- /kind_examples/3_1_get_nginx_ingress_controller_manifest.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | wget https://raw.githubusercontent.com/kubernetes/ingress-nginx/main/deploy/static/provider/kind/deploy.yaml 4 | mv deploy.yaml ingress-nginx-deploy.yaml 5 | -------------------------------------------------------------------------------- /kind_examples/3_2_install_nginx_ingress_controller.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # First make sure these lines are commented out in ingress-nginx-deploy.yaml: 4 | # tolerations: 5 | # - effect: NoSchedule 6 | # key: node-role.kubernetes.io/master 7 | # operator: Equal 8 | # - effect: NoSchedule 9 | # key: node-role.kubernetes.io/control-plane 10 | # operator: Equal 11 | 12 | # After line: 13 | #nodeSelector: 14 | #ingress-ready: "true" 15 | kubectl apply -f ingress-nginx-deploy.yaml 16 | -------------------------------------------------------------------------------- /kind_examples/3_3_apply_test_ingress.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | kubectl apply -f test_ingress.yaml 4 | -------------------------------------------------------------------------------- /kind_examples/ingress-nginx-deploy.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | labels: 5 | app.kubernetes.io/instance: ingress-nginx 6 | app.kubernetes.io/name: ingress-nginx 7 | name: ingress-nginx 8 | --- 9 | apiVersion: v1 10 | automountServiceAccountToken: true 11 | kind: ServiceAccount 12 | metadata: 13 | labels: 14 | app.kubernetes.io/component: controller 15 | app.kubernetes.io/instance: ingress-nginx 16 | app.kubernetes.io/name: ingress-nginx 17 | app.kubernetes.io/part-of: ingress-nginx 18 | app.kubernetes.io/version: 1.8.0 19 | name: ingress-nginx 20 | namespace: ingress-nginx 21 | --- 22 | apiVersion: v1 23 | kind: ServiceAccount 24 | metadata: 25 | labels: 26 | app.kubernetes.io/component: admission-webhook 27 | app.kubernetes.io/instance: ingress-nginx 28 | app.kubernetes.io/name: ingress-nginx 29 | app.kubernetes.io/part-of: ingress-nginx 30 | app.kubernetes.io/version: 1.8.0 31 | name: ingress-nginx-admission 32 | namespace: ingress-nginx 33 | --- 34 | apiVersion: rbac.authorization.k8s.io/v1 35 | kind: Role 36 | metadata: 37 | labels: 38 | app.kubernetes.io/component: controller 39 | app.kubernetes.io/instance: ingress-nginx 40 | app.kubernetes.io/name: ingress-nginx 41 | app.kubernetes.io/part-of: ingress-nginx 42 | app.kubernetes.io/version: 1.8.0 43 | name: ingress-nginx 44 | namespace: ingress-nginx 45 | rules: 46 | - apiGroups: 47 | - "" 48 | resources: 49 | - namespaces 50 | verbs: 51 | - get 52 | - apiGroups: 53 | - "" 54 | resources: 55 | - configmaps 56 | - pods 57 | - secrets 58 | - endpoints 59 | verbs: 60 | - get 61 | - list 62 | - watch 63 | - apiGroups: 64 | - "" 65 | resources: 66 | - services 67 | verbs: 68 | - get 69 | - list 70 | - watch 71 | - apiGroups: 72 | - networking.k8s.io 73 | resources: 74 | - ingresses 75 | verbs: 76 | - get 77 | - list 78 | - watch 79 | - apiGroups: 80 | - networking.k8s.io 81 | resources: 82 | - ingresses/status 83 | verbs: 84 | - update 85 | - apiGroups: 86 | - networking.k8s.io 87 | resources: 88 | - ingressclasses 89 | verbs: 90 | - get 91 | - list 92 | - watch 93 | - apiGroups: 94 | - coordination.k8s.io 95 | resourceNames: 96 | - ingress-nginx-leader 97 | resources: 98 | - leases 99 | verbs: 100 | - get 101 | - update 102 | - apiGroups: 103 | - coordination.k8s.io 104 | resources: 105 | - leases 106 | verbs: 107 | - create 108 | - apiGroups: 109 | - "" 110 | resources: 111 | - events 112 | verbs: 113 | - create 114 | - patch 115 | - apiGroups: 116 | - discovery.k8s.io 117 | resources: 118 | - endpointslices 119 | verbs: 120 | - list 121 | - watch 122 | - get 123 | --- 124 | apiVersion: rbac.authorization.k8s.io/v1 125 | kind: Role 126 | metadata: 127 | labels: 128 | app.kubernetes.io/component: admission-webhook 129 | app.kubernetes.io/instance: ingress-nginx 130 | app.kubernetes.io/name: ingress-nginx 131 | app.kubernetes.io/part-of: ingress-nginx 132 | app.kubernetes.io/version: 1.8.0 133 | name: ingress-nginx-admission 134 | namespace: ingress-nginx 135 | rules: 136 | - apiGroups: 137 | - "" 138 | resources: 139 | - secrets 140 | verbs: 141 | - get 142 | - create 143 | --- 144 | apiVersion: rbac.authorization.k8s.io/v1 145 | kind: ClusterRole 146 | metadata: 147 | labels: 148 | app.kubernetes.io/instance: ingress-nginx 149 | app.kubernetes.io/name: ingress-nginx 150 | app.kubernetes.io/part-of: ingress-nginx 151 | app.kubernetes.io/version: 1.8.0 152 | name: ingress-nginx 153 | rules: 154 | - apiGroups: 155 | - "" 156 | resources: 157 | - configmaps 158 | - endpoints 159 | - nodes 160 | - pods 161 | - secrets 162 | - namespaces 163 | verbs: 164 | - list 165 | - watch 166 | - apiGroups: 167 | - coordination.k8s.io 168 | resources: 169 | - leases 170 | verbs: 171 | - list 172 | - watch 173 | - apiGroups: 174 | - "" 175 | resources: 176 | - nodes 177 | verbs: 178 | - get 179 | - apiGroups: 180 | - "" 181 | resources: 182 | - services 183 | verbs: 184 | - get 185 | - list 186 | - watch 187 | - apiGroups: 188 | - networking.k8s.io 189 | resources: 190 | - ingresses 191 | verbs: 192 | - get 193 | - list 194 | - watch 195 | - apiGroups: 196 | - "" 197 | resources: 198 | - events 199 | verbs: 200 | - create 201 | - patch 202 | - apiGroups: 203 | - networking.k8s.io 204 | resources: 205 | - ingresses/status 206 | verbs: 207 | - update 208 | - apiGroups: 209 | - networking.k8s.io 210 | resources: 211 | - ingressclasses 212 | verbs: 213 | - get 214 | - list 215 | - watch 216 | - apiGroups: 217 | - discovery.k8s.io 218 | resources: 219 | - endpointslices 220 | verbs: 221 | - list 222 | - watch 223 | - get 224 | --- 225 | apiVersion: rbac.authorization.k8s.io/v1 226 | kind: ClusterRole 227 | metadata: 228 | labels: 229 | app.kubernetes.io/component: admission-webhook 230 | app.kubernetes.io/instance: ingress-nginx 231 | app.kubernetes.io/name: ingress-nginx 232 | app.kubernetes.io/part-of: ingress-nginx 233 | app.kubernetes.io/version: 1.8.0 234 | name: ingress-nginx-admission 235 | rules: 236 | - apiGroups: 237 | - admissionregistration.k8s.io 238 | resources: 239 | - validatingwebhookconfigurations 240 | verbs: 241 | - get 242 | - update 243 | --- 244 | apiVersion: rbac.authorization.k8s.io/v1 245 | kind: RoleBinding 246 | metadata: 247 | labels: 248 | app.kubernetes.io/component: controller 249 | app.kubernetes.io/instance: ingress-nginx 250 | app.kubernetes.io/name: ingress-nginx 251 | app.kubernetes.io/part-of: ingress-nginx 252 | app.kubernetes.io/version: 1.8.0 253 | name: ingress-nginx 254 | namespace: ingress-nginx 255 | roleRef: 256 | apiGroup: rbac.authorization.k8s.io 257 | kind: Role 258 | name: ingress-nginx 259 | subjects: 260 | - kind: ServiceAccount 261 | name: ingress-nginx 262 | namespace: ingress-nginx 263 | --- 264 | apiVersion: rbac.authorization.k8s.io/v1 265 | kind: RoleBinding 266 | metadata: 267 | labels: 268 | app.kubernetes.io/component: admission-webhook 269 | app.kubernetes.io/instance: ingress-nginx 270 | app.kubernetes.io/name: ingress-nginx 271 | app.kubernetes.io/part-of: ingress-nginx 272 | app.kubernetes.io/version: 1.8.0 273 | name: ingress-nginx-admission 274 | namespace: ingress-nginx 275 | roleRef: 276 | apiGroup: rbac.authorization.k8s.io 277 | kind: Role 278 | name: ingress-nginx-admission 279 | subjects: 280 | - kind: ServiceAccount 281 | name: ingress-nginx-admission 282 | namespace: ingress-nginx 283 | --- 284 | apiVersion: rbac.authorization.k8s.io/v1 285 | kind: ClusterRoleBinding 286 | metadata: 287 | labels: 288 | app.kubernetes.io/instance: ingress-nginx 289 | app.kubernetes.io/name: ingress-nginx 290 | app.kubernetes.io/part-of: ingress-nginx 291 | app.kubernetes.io/version: 1.8.0 292 | name: ingress-nginx 293 | roleRef: 294 | apiGroup: rbac.authorization.k8s.io 295 | kind: ClusterRole 296 | name: ingress-nginx 297 | subjects: 298 | - kind: ServiceAccount 299 | name: ingress-nginx 300 | namespace: ingress-nginx 301 | --- 302 | apiVersion: rbac.authorization.k8s.io/v1 303 | kind: ClusterRoleBinding 304 | metadata: 305 | labels: 306 | app.kubernetes.io/component: admission-webhook 307 | app.kubernetes.io/instance: ingress-nginx 308 | app.kubernetes.io/name: ingress-nginx 309 | app.kubernetes.io/part-of: ingress-nginx 310 | app.kubernetes.io/version: 1.8.0 311 | name: ingress-nginx-admission 312 | roleRef: 313 | apiGroup: rbac.authorization.k8s.io 314 | kind: ClusterRole 315 | name: ingress-nginx-admission 316 | subjects: 317 | - kind: ServiceAccount 318 | name: ingress-nginx-admission 319 | namespace: ingress-nginx 320 | --- 321 | apiVersion: v1 322 | data: 323 | allow-snippet-annotations: "true" 324 | kind: ConfigMap 325 | metadata: 326 | labels: 327 | app.kubernetes.io/component: controller 328 | app.kubernetes.io/instance: ingress-nginx 329 | app.kubernetes.io/name: ingress-nginx 330 | app.kubernetes.io/part-of: ingress-nginx 331 | app.kubernetes.io/version: 1.8.0 332 | name: ingress-nginx-controller 333 | namespace: ingress-nginx 334 | --- 335 | apiVersion: v1 336 | kind: Service 337 | metadata: 338 | labels: 339 | app.kubernetes.io/component: controller 340 | app.kubernetes.io/instance: ingress-nginx 341 | app.kubernetes.io/name: ingress-nginx 342 | app.kubernetes.io/part-of: ingress-nginx 343 | app.kubernetes.io/version: 1.8.0 344 | name: ingress-nginx-controller 345 | namespace: ingress-nginx 346 | spec: 347 | ipFamilies: 348 | - IPv4 349 | ipFamilyPolicy: SingleStack 350 | ports: 351 | - appProtocol: http 352 | name: http 353 | port: 80 354 | protocol: TCP 355 | targetPort: http 356 | - appProtocol: https 357 | name: https 358 | port: 443 359 | protocol: TCP 360 | targetPort: https 361 | selector: 362 | app.kubernetes.io/component: controller 363 | app.kubernetes.io/instance: ingress-nginx 364 | app.kubernetes.io/name: ingress-nginx 365 | type: NodePort 366 | --- 367 | apiVersion: v1 368 | kind: Service 369 | metadata: 370 | labels: 371 | app.kubernetes.io/component: controller 372 | app.kubernetes.io/instance: ingress-nginx 373 | app.kubernetes.io/name: ingress-nginx 374 | app.kubernetes.io/part-of: ingress-nginx 375 | app.kubernetes.io/version: 1.8.0 376 | name: ingress-nginx-controller-admission 377 | namespace: ingress-nginx 378 | spec: 379 | ports: 380 | - appProtocol: https 381 | name: https-webhook 382 | port: 443 383 | targetPort: webhook 384 | selector: 385 | app.kubernetes.io/component: controller 386 | app.kubernetes.io/instance: ingress-nginx 387 | app.kubernetes.io/name: ingress-nginx 388 | type: ClusterIP 389 | --- 390 | apiVersion: apps/v1 391 | kind: Deployment 392 | metadata: 393 | labels: 394 | app.kubernetes.io/component: controller 395 | app.kubernetes.io/instance: ingress-nginx 396 | app.kubernetes.io/name: ingress-nginx 397 | app.kubernetes.io/part-of: ingress-nginx 398 | app.kubernetes.io/version: 1.8.0 399 | name: ingress-nginx-controller 400 | namespace: ingress-nginx 401 | spec: 402 | minReadySeconds: 0 403 | revisionHistoryLimit: 10 404 | selector: 405 | matchLabels: 406 | app.kubernetes.io/component: controller 407 | app.kubernetes.io/instance: ingress-nginx 408 | app.kubernetes.io/name: ingress-nginx 409 | strategy: 410 | rollingUpdate: 411 | maxUnavailable: 1 412 | type: RollingUpdate 413 | template: 414 | metadata: 415 | labels: 416 | app.kubernetes.io/component: controller 417 | app.kubernetes.io/instance: ingress-nginx 418 | app.kubernetes.io/name: ingress-nginx 419 | app.kubernetes.io/part-of: ingress-nginx 420 | app.kubernetes.io/version: 1.8.0 421 | spec: 422 | containers: 423 | - args: 424 | - /nginx-ingress-controller 425 | - --election-id=ingress-nginx-leader 426 | - --controller-class=k8s.io/ingress-nginx 427 | - --ingress-class=nginx 428 | - --configmap=$(POD_NAMESPACE)/ingress-nginx-controller 429 | - --validating-webhook=:8443 430 | - --validating-webhook-certificate=/usr/local/certificates/cert 431 | - --validating-webhook-key=/usr/local/certificates/key 432 | - --watch-ingress-without-class=true 433 | - --publish-status-address=localhost 434 | env: 435 | - name: POD_NAME 436 | valueFrom: 437 | fieldRef: 438 | fieldPath: metadata.name 439 | - name: POD_NAMESPACE 440 | valueFrom: 441 | fieldRef: 442 | fieldPath: metadata.namespace 443 | - name: LD_PRELOAD 444 | value: /usr/local/lib/libmimalloc.so 445 | image: registry.k8s.io/ingress-nginx/controller:v1.8.0@sha256:744ae2afd433a395eeb13dc03d3313facba92e96ad71d9feaafc85925493fee3 446 | imagePullPolicy: IfNotPresent 447 | lifecycle: 448 | preStop: 449 | exec: 450 | command: 451 | - /wait-shutdown 452 | livenessProbe: 453 | failureThreshold: 5 454 | httpGet: 455 | path: /healthz 456 | port: 10254 457 | scheme: HTTP 458 | initialDelaySeconds: 10 459 | periodSeconds: 10 460 | successThreshold: 1 461 | timeoutSeconds: 1 462 | name: controller 463 | ports: 464 | - containerPort: 80 465 | hostPort: 80 466 | name: http 467 | protocol: TCP 468 | - containerPort: 443 469 | hostPort: 443 470 | name: https 471 | protocol: TCP 472 | - containerPort: 8443 473 | name: webhook 474 | protocol: TCP 475 | readinessProbe: 476 | failureThreshold: 3 477 | httpGet: 478 | path: /healthz 479 | port: 10254 480 | scheme: HTTP 481 | initialDelaySeconds: 10 482 | periodSeconds: 10 483 | successThreshold: 1 484 | timeoutSeconds: 1 485 | resources: 486 | requests: 487 | cpu: 100m 488 | memory: 90Mi 489 | securityContext: 490 | allowPrivilegeEscalation: true 491 | capabilities: 492 | add: 493 | - NET_BIND_SERVICE 494 | drop: 495 | - ALL 496 | runAsUser: 101 497 | volumeMounts: 498 | - mountPath: /usr/local/certificates/ 499 | name: webhook-cert 500 | readOnly: true 501 | dnsPolicy: ClusterFirst 502 | # nodeSelector: 503 | # ingress-ready: "true" 504 | # kubernetes.io/os: linux 505 | serviceAccountName: ingress-nginx 506 | terminationGracePeriodSeconds: 0 507 | # tolerations: 508 | # - effect: NoSchedule 509 | # key: node-role.kubernetes.io/master 510 | # operator: Equal 511 | # - effect: NoSchedule 512 | # key: node-role.kubernetes.io/control-plane 513 | # operator: Equal 514 | volumes: 515 | - name: webhook-cert 516 | secret: 517 | secretName: ingress-nginx-admission 518 | --- 519 | apiVersion: batch/v1 520 | kind: Job 521 | metadata: 522 | labels: 523 | app.kubernetes.io/component: admission-webhook 524 | app.kubernetes.io/instance: ingress-nginx 525 | app.kubernetes.io/name: ingress-nginx 526 | app.kubernetes.io/part-of: ingress-nginx 527 | app.kubernetes.io/version: 1.8.0 528 | name: ingress-nginx-admission-create 529 | namespace: ingress-nginx 530 | spec: 531 | template: 532 | metadata: 533 | labels: 534 | app.kubernetes.io/component: admission-webhook 535 | app.kubernetes.io/instance: ingress-nginx 536 | app.kubernetes.io/name: ingress-nginx 537 | app.kubernetes.io/part-of: ingress-nginx 538 | app.kubernetes.io/version: 1.8.0 539 | name: ingress-nginx-admission-create 540 | spec: 541 | containers: 542 | - args: 543 | - create 544 | - --host=ingress-nginx-controller-admission,ingress-nginx-controller-admission.$(POD_NAMESPACE).svc 545 | - --namespace=$(POD_NAMESPACE) 546 | - --secret-name=ingress-nginx-admission 547 | env: 548 | - name: POD_NAMESPACE 549 | valueFrom: 550 | fieldRef: 551 | fieldPath: metadata.namespace 552 | image: registry.k8s.io/ingress-nginx/kube-webhook-certgen:v20230407@sha256:543c40fd093964bc9ab509d3e791f9989963021f1e9e4c9c7b6700b02bfb227b 553 | imagePullPolicy: IfNotPresent 554 | name: create 555 | securityContext: 556 | allowPrivilegeEscalation: false 557 | nodeSelector: 558 | kubernetes.io/os: linux 559 | restartPolicy: OnFailure 560 | securityContext: 561 | fsGroup: 2000 562 | runAsNonRoot: true 563 | runAsUser: 2000 564 | serviceAccountName: ingress-nginx-admission 565 | --- 566 | apiVersion: batch/v1 567 | kind: Job 568 | metadata: 569 | labels: 570 | app.kubernetes.io/component: admission-webhook 571 | app.kubernetes.io/instance: ingress-nginx 572 | app.kubernetes.io/name: ingress-nginx 573 | app.kubernetes.io/part-of: ingress-nginx 574 | app.kubernetes.io/version: 1.8.0 575 | name: ingress-nginx-admission-patch 576 | namespace: ingress-nginx 577 | spec: 578 | template: 579 | metadata: 580 | labels: 581 | app.kubernetes.io/component: admission-webhook 582 | app.kubernetes.io/instance: ingress-nginx 583 | app.kubernetes.io/name: ingress-nginx 584 | app.kubernetes.io/part-of: ingress-nginx 585 | app.kubernetes.io/version: 1.8.0 586 | name: ingress-nginx-admission-patch 587 | spec: 588 | containers: 589 | - args: 590 | - patch 591 | - --webhook-name=ingress-nginx-admission 592 | - --namespace=$(POD_NAMESPACE) 593 | - --patch-mutating=false 594 | - --secret-name=ingress-nginx-admission 595 | - --patch-failure-policy=Fail 596 | env: 597 | - name: POD_NAMESPACE 598 | valueFrom: 599 | fieldRef: 600 | fieldPath: metadata.namespace 601 | image: registry.k8s.io/ingress-nginx/kube-webhook-certgen:v20230407@sha256:543c40fd093964bc9ab509d3e791f9989963021f1e9e4c9c7b6700b02bfb227b 602 | imagePullPolicy: IfNotPresent 603 | name: patch 604 | securityContext: 605 | allowPrivilegeEscalation: false 606 | nodeSelector: 607 | kubernetes.io/os: linux 608 | restartPolicy: OnFailure 609 | securityContext: 610 | fsGroup: 2000 611 | runAsNonRoot: true 612 | runAsUser: 2000 613 | serviceAccountName: ingress-nginx-admission 614 | --- 615 | apiVersion: networking.k8s.io/v1 616 | kind: IngressClass 617 | metadata: 618 | labels: 619 | app.kubernetes.io/component: controller 620 | app.kubernetes.io/instance: ingress-nginx 621 | app.kubernetes.io/name: ingress-nginx 622 | app.kubernetes.io/part-of: ingress-nginx 623 | app.kubernetes.io/version: 1.8.0 624 | name: nginx 625 | spec: 626 | controller: k8s.io/ingress-nginx 627 | --- 628 | apiVersion: admissionregistration.k8s.io/v1 629 | kind: ValidatingWebhookConfiguration 630 | metadata: 631 | labels: 632 | app.kubernetes.io/component: admission-webhook 633 | app.kubernetes.io/instance: ingress-nginx 634 | app.kubernetes.io/name: ingress-nginx 635 | app.kubernetes.io/part-of: ingress-nginx 636 | app.kubernetes.io/version: 1.8.0 637 | name: ingress-nginx-admission 638 | webhooks: 639 | - admissionReviewVersions: 640 | - v1 641 | clientConfig: 642 | service: 643 | name: ingress-nginx-controller-admission 644 | namespace: ingress-nginx 645 | path: /networking/v1/ingresses 646 | failurePolicy: Fail 647 | matchPolicy: Equivalent 648 | name: validate.nginx.ingress.kubernetes.io 649 | rules: 650 | - apiGroups: 651 | - networking.k8s.io 652 | apiVersions: 653 | - v1 654 | operations: 655 | - CREATE 656 | - UPDATE 657 | resources: 658 | - ingresses 659 | sideEffects: None 660 | -------------------------------------------------------------------------------- /kind_examples/ingress_usage_example.yaml: -------------------------------------------------------------------------------- 1 | kind: Pod 2 | apiVersion: v1 3 | metadata: 4 | name: foo-app 5 | labels: 6 | app: foo 7 | spec: 8 | containers: 9 | - command: 10 | - /agnhost 11 | - netexec 12 | - --http-port 13 | - "8080" 14 | image: registry.k8s.io/e2e-test-images/agnhost:2.39 15 | name: foo-app 16 | --- 17 | kind: Service 18 | apiVersion: v1 19 | metadata: 20 | name: foo-service 21 | spec: 22 | selector: 23 | app: foo 24 | ports: 25 | # Default port used by the image 26 | - port: 8080 27 | --- 28 | kind: Pod 29 | apiVersion: v1 30 | metadata: 31 | name: bar-app 32 | labels: 33 | app: bar 34 | spec: 35 | containers: 36 | - command: 37 | - /agnhost 38 | - netexec 39 | - --http-port 40 | - "8080" 41 | image: registry.k8s.io/e2e-test-images/agnhost:2.39 42 | name: bar-app 43 | --- 44 | kind: Service 45 | apiVersion: v1 46 | metadata: 47 | name: bar-service 48 | spec: 49 | selector: 50 | app: bar 51 | ports: 52 | # Default port used by the image 53 | - port: 8080 54 | --- 55 | apiVersion: networking.k8s.io/v1 56 | kind: Ingress 57 | metadata: 58 | name: example-ingress 59 | annotations: 60 | nginx.ingress.kubernetes.io/rewrite-target: /$2 61 | spec: 62 | rules: 63 | - http: 64 | paths: 65 | - pathType: Prefix 66 | path: /foo(/|$)(.*) 67 | backend: 68 | service: 69 | name: foo-service 70 | port: 71 | number: 8080 72 | - pathType: Prefix 73 | path: /bar(/|$)(.*) 74 | backend: 75 | service: 76 | name: bar-service 77 | port: 78 | number: 8080 79 | --- 80 | -------------------------------------------------------------------------------- /kind_examples/test_ingress.yaml: -------------------------------------------------------------------------------- 1 | kind: Pod 2 | apiVersion: v1 3 | metadata: 4 | name: foo-app 5 | labels: 6 | app: foo 7 | spec: 8 | containers: 9 | - name: foo-app 10 | image: hashicorp/http-echo:0.2.3 11 | args: 12 | - "-text=foo" 13 | --- 14 | kind: Service 15 | apiVersion: v1 16 | metadata: 17 | name: foo-service 18 | spec: 19 | selector: 20 | app: foo 21 | ports: 22 | # Default port used by the image 23 | - port: 5678 24 | --- 25 | kind: Pod 26 | apiVersion: v1 27 | metadata: 28 | name: bar-app 29 | labels: 30 | app: bar 31 | spec: 32 | containers: 33 | - name: bar-app 34 | image: hashicorp/http-echo:0.2.3 35 | args: 36 | - "-text=bar" 37 | --- 38 | kind: Service 39 | apiVersion: v1 40 | metadata: 41 | name: bar-service 42 | spec: 43 | selector: 44 | app: bar 45 | ports: 46 | # Default port used by the image 47 | - port: 5678 48 | --- 49 | apiVersion: networking.k8s.io/v1 50 | kind: Ingress 51 | metadata: 52 | name: example-ingress 53 | spec: 54 | rules: 55 | - host: cicd.codepraxis.dev 56 | http: 57 | paths: 58 | - pathType: Prefix 59 | path: "/foo" 60 | backend: 61 | service: 62 | name: foo-service 63 | port: 64 | number: 5678 65 | - pathType: Prefix 66 | path: "/bar" 67 | backend: 68 | service: 69 | name: bar-service 70 | port: 71 | number: 5678 72 | --- 73 | -------------------------------------------------------------------------------- /ko_sbom_examples/get_digest_and_sbom.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | IMAGE=ghcr.io/timoniersystems/gocobra 4 | TAG=0.1.6 5 | IMAGE_DIGEST=$(crane digest $IMAGE:$TAG) 6 | 7 | cosign download sbom $IMAGE@${IMAGE_DIGEST} 8 | 9 | -------------------------------------------------------------------------------- /microk8s_examples/1_install_microk8s.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | sudo snap install microk8s --classic 4 | 5 | # firewall rules 6 | sudo ufw allow in on cni0 && sudo ufw allow out on cni0 7 | sudo ufw default allow routed 8 | 9 | # enable addons 10 | sudo microk8s enable dns dashboard storage ingress registry 11 | 12 | # grant access to ubuntu so we can run without sudo 13 | sudo chown -R root:ubuntu /var/snap/microk8s/ 14 | 15 | #If RBAC is not enabled access the dashboard using the token retrieved with: 16 | #microk8s kubectl describe secret -n kube-system microk8s-dashboard-token 17 | #Use this token in the https login UI of the kubernetes-dashboard service. 18 | #In an RBAC enabled setup (microk8s enable RBAC) you need to create a user with restricted 19 | #permissions as shown in: 20 | #https://github.com/kubernetes/dashboard/blob/master/docs/user/access-control/creating-sample-user.md 21 | 22 | # calico pod crashing 23 | microk8s kubectl get ippools.crd.projectcalico.org default-ipv4-ippool 24 | # then 25 | # microk8s kubectl edit ippools.crd.projectcalico.org default-ipv4-ippool 26 | # and set 27 | # vxlanMode: Never 28 | -------------------------------------------------------------------------------- /microk8s_examples/2_microk8s_operations.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | sudo microk8s kubectl get all --all-namespaces 4 | -------------------------------------------------------------------------------- /microk8s_examples/3_test_local_registry.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | nerdctl pull --platform x86_64 gcr.io/google-samples/hello-app:2.0 4 | nerdctl tag gcr.io/google-samples/hello-app:2.0 127.0.0.1:32000/hello-app:2.0 5 | nerdctl image save 127.0.0.1:32000/hello-app:2.0 -o hello-app-2.0.tar 6 | 7 | crane push hello-app-2.0.tar 127.0.0.1:32000/hello-app:2.0 8 | crane ls 127.0.0.1:32000/hello-app 9 | crane digest 10 | crane manifest 127.0.0.1:32000/hello-app:2.0 | jq 11 | 12 | rm -rf hello-app-2.0.tar 13 | 14 | #sudo nerdctl push --insecure-registry 127.0.0.1:32000/hello-app:2.0 15 | sudo microk8s kubectl apply -f hello-server-deployment.yaml 16 | -------------------------------------------------------------------------------- /microk8s_examples/4_crane_operations.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo Running: crane catalog localhost:5001 4 | crane catalog localhost:5001 5 | 6 | echo Running: crane ls localhost:5001/hello-app 7 | crane ls localhost:5001/hello-app 8 | 9 | tag=1.0 10 | echo Running: crane digest localhost:5001/hello-app:$tag 11 | crane digest localhost:5001/hello-app:$tag 12 | 13 | echo Running: crane manifest localhost:5001/hello-app:$tag 14 | crane manifest localhost:5001/hello-app:$tag 15 | 16 | echo Running: crane blob localhost:5001/hello-app@sha256:f4d42e081a02b89d8bab78682f5879e6b593efe76690872c71a7a04910ee474a 17 | crane blob localhost:5001/hello-app@sha256:f4d42e081a02b89d8bab78682f5879e6b593efe76690872c71a7a04910ee474a > blob_config.json 18 | cat blob_config.json | jq -r '.' 19 | 20 | echo Running: crane blob localhost:5001/hello-app@sha256:59bf1c3509f33515622619af21ed55bbe26d24913cedbca106468a5fb37a50c3 21 | crane blob localhost:5001/hello-app@sha256:59bf1c3509f33515622619af21ed55bbe26d24913cedbca106468a5fb37a50c3 > blob_layer1.tar.gz 22 | 23 | echo Running: crane validate --remote localhost:5001/hello-app:$tag 24 | crane validate --remote localhost:5001/hello-app:$tag 25 | 26 | newtag=1.1 27 | echo Running: crane tag localhost:5001/hello-app:$tag $newtag 28 | crane tag localhost:5001/hello-app:$tag $newtag 29 | 30 | echo Running: crane ls localhost:5001/hello-app 31 | crane ls localhost:5001/hello-app 32 | 33 | echo Running: crane digest localhost:5001/hello-app:$newtag 34 | crane digest localhost:5001/hello-app:$newtag 35 | 36 | echo Running: crane export localhost:5001/hello-app:$newtag hello-app.tar 37 | crane export localhost:5001/hello-app:$newtag hello-app.tar 38 | 39 | echo Running: crane ls ghcr.io/codepraxis-io/spring-music 40 | crane ls ghcr.io/codepraxis-io/spring-music 41 | 42 | echo Running: crane copy localhost:5001/hello-app:1.0 ghcr.io/codepraxis-io/hello-app:1.0 43 | crane copy localhost:5001/hello-app:1.0 ghcr.io/codepraxis-io/hello-app:1.0 44 | 45 | echo Running: crane ls gcr.io/google_containers/busybox 46 | crane ls gcr.io/google_containers/busybox 47 | 48 | echo Running: crane manifest gcr.io/google_containers/busybox:latest 49 | crane manifest gcr.io/google_containers/busybox:latest | jq -r '.' 50 | 51 | echo Running: crane export gcr.io/google_containers/busybox busybox.tar 52 | crane export gcr.io/google_containers/busybox:latest busybox.tar 53 | -------------------------------------------------------------------------------- /microk8s_examples/5_crane_push_jib_image_tar_to_ghcr.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | JIB_IMAGE_TAR=../taskfiles/build-jib-springboot-helloworld/workspace-QatQ6HRcFZ/sourcecode/springboot-helloworld/target/jib-image.tar 4 | REGISTRY=ghcr.io/codepraxis-io 5 | IMAGE=springboot-helloworld 6 | IMAGE_TAG=0.0.2-jib-nerdctl 7 | 8 | echo Running: crane push $JIB_IMAGE_TAR ${REGISTRY}/$IMAGE:$IMAGE_TAG 9 | crane push $JIB_IMAGE_TAR ${REGISTRY}/$IMAGE:$IMAGE_TAG 10 | echo 11 | 12 | echo Runing: crane manifest ${REGISTRY}/$IMAGE:$IMAGE_TAG 13 | crane manifest ${REGISTRY}/$IMAGE:$IMAGE_TAG | jq -r '.' 14 | echo 15 | 16 | echo Running: crane config ${REGISTRY}/$IMAGE:$IMAGE_TAG 17 | crane config ${REGISTRY}/$IMAGE:$IMAGE_TAG | jq -r '.' 18 | echo 19 | 20 | # to run the image as a container: 21 | # nerdctl run --cni-path=/opt/cni/bin --rm ${REGISTRY}/$IMAGE:$IMAGE_TAG 22 | # or 23 | # nerdctl run -d --cni-path=/opt/cni/bin -p 8080:8080 ghcr.io/codepraxis-io/springboot-helloworld:0.0.1-jib-nerdctl 24 | # nerdctl ps 25 | # curl localhost:8080 26 | -------------------------------------------------------------------------------- /microk8s_examples/5_crane_push_jib_image_tar_to_local_registry.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | JIB_IMAGE_TAR=../taskfiles/build-jib-springboot-helloworld/workspace-QatQ6HRcFZ/sourcecode/springboot-helloworld/target/jib-image.tar 4 | LOCAL_REGISTRY=127.0.0.1:32000 5 | IMAGE=springboot-helloworld 6 | IMAGE_TAG=0.0.2-jib-nerdctl 7 | 8 | echo Running: crane push $JIB_IMAGE_TAR ${LOCAL_REGISTRY}/$IMAGE:$IMAGE_TAG 9 | crane push $JIB_IMAGE_TAR ${LOCAL_REGISTRY}/$IMAGE:$IMAGE_TAG 10 | echo 11 | 12 | echo Runing: crane manifest ${LOCAL_REGISTRY}/$IMAGE:$IMAGE_TAG 13 | crane manifest ${LOCAL_REGISTRY}/$IMAGE:$IMAGE_TAG | jq -r '.' 14 | echo 15 | 16 | echo Running: crane config ${LOCAL_REGISTRY}/$IMAGE:$IMAGE_TAG 17 | crane config ${LOCAL_REGISTRY}/$IMAGE:$IMAGE_TAG | jq -r '.' 18 | echo 19 | -------------------------------------------------------------------------------- /microk8s_examples/hello-server-deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | labels: 5 | app: hello-server 6 | name: hello-server 7 | namespace: default 8 | spec: 9 | replicas: 1 10 | selector: 11 | matchLabels: 12 | app: hello-server 13 | strategy: 14 | rollingUpdate: 15 | maxSurge: 25% 16 | maxUnavailable: 25% 17 | type: RollingUpdate 18 | template: 19 | metadata: 20 | labels: 21 | app: hello-server 22 | spec: 23 | containers: 24 | - image: 127.0.0.1:32000/hello-app:2.0 25 | imagePullPolicy: IfNotPresent 26 | name: hello-app 27 | resources: {} 28 | -------------------------------------------------------------------------------- /oci_examples/1_inspect_image_dive.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | IMAGE=ghcr.io/codepraxis-io/spring-music:2.0.0-amazoncorretto-17-alpine3-15 4 | IMAGE=ghcr.io/codepraxis-io/spring-music:2.0.0-curated-alpine3.16-openjdk17 5 | IMAGE=ghcr.io/codepraxis-io/spring-music:2.0.0-distroless-java11-debian11-multistage 6 | IMAGE=ghcr.io/codepraxis-io/spring-music:2.0.0-distroless-java17-debian11 7 | IMAGE=ghcr.io/codepraxis-io/spring-music:2.0.0-distroless-java17-debian11-multistage 8 | IMAGE=ghcr.io/codepraxis-io/spring-music:2.0.0-eclipse-temurin-17-jre-jammy 9 | IMAGE=ghcr.io/codepraxis-io/alpine3.16:openjre17 10 | IMAGE=ghcr.io/codepraxis-io/alpine3.16:openjdk17 11 | IMAGE=ghcr.io/codepraxis-io/ubuntu22.04:openjdk17 12 | IMAGE=ghcr.io/codepraxis-io/ubuntu22.04:openjdk11 13 | IMAGE=ghcr.io/codepraxis-io/ubuntu22.04:node16 14 | IMAGE=ghcr.io/codepraxis-io/ubuntu22.04:base 15 | IMAGE=ghcr.io/codepraxis-io/alpine3.16:openjre11 16 | IMAGE=ghcr.io/codepraxis-io/alpine3.16:run 17 | IMAGE=ghcr.io/codepraxis-io/alpine3.16:base 18 | IMAGE=ghcr.io/codepraxis-io/alpine3.16:build 19 | IMAGE=ghcr.io/codepraxis-io/alpine3.16:openjdk11 20 | 21 | dive $IMAGE 22 | -------------------------------------------------------------------------------- /oci_examples/2_crane_operations.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | IMAGE=ghcr.io/codepraxis-io/dddive-springboot 4 | TAGS=$(crane ls $IMAGE | grep 0.0.1) 5 | for TAG in $TAGS; do 6 | echo Running: crane manifest $IMAGE:$TAG 7 | crane manifest $IMAGE:$TAG | jq -r '.' 8 | 9 | echo Running: crane config $IMAGE:$TAG 10 | crane config $IMAGE:$TAG | jq -r '.' 11 | done 12 | 13 | IMAGE=ghcr.io/codepraxis-io/flask-bootstrap 14 | TAGS=$(crane ls $IMAGE | grep 0.0.1) 15 | for TAG in $TAGS; do 16 | echo Running: crane manifest $IMAGE:$TAG 17 | crane manifest $IMAGE:$TAG | jq -r '.' 18 | 19 | echo Running: crane config $IMAGE:$TAG 20 | crane config $IMAGE:$TAG | jq -r '.' 21 | done 22 | 23 | IMAGE=ghcr.io/codepraxis-io/spring-music 24 | TAGS=$(crane ls $IMAGE | grep 2.0.0) 25 | for TAG in $TAGS; do 26 | echo Running: crane manifest $IMAGE:$TAG 27 | crane manifest $IMAGE:$TAG | jq -r '.' 28 | 29 | echo Running: crane config $IMAGE:$TAG 30 | crane config $IMAGE:$TAG | jq -r '.' 31 | done 32 | 33 | IMAGE=ghcr.io/codepraxis-io/alpine3.16 34 | TAGS=$(crane ls $IMAGE | grep -v sig) 35 | for TAG in $TAGS; do 36 | echo Running: crane manifest $IMAGE:$TAG 37 | crane manifest $IMAGE:$TAG | jq -r '.' 38 | 39 | echo Running: crane config $IMAGE:$TAG 40 | crane config $IMAGE:$TAG | jq -r '.' 41 | done 42 | 43 | IMAGE=ghcr.io/codepraxis-io/ubuntu22.04 44 | TAGS=$(crane ls $IMAGE | grep -v sig) 45 | for TAG in $TAGS; do 46 | echo Running: crane manifest $IMAGE:$TAG 47 | crane manifest $IMAGE:$TAG | jq -r '.' 48 | 49 | echo Running: crane config $IMAGE:$TAG 50 | crane config $IMAGE:$TAG | jq -r '.' 51 | done 52 | 53 | #echo Running: crane export gcr.io/google_containers/busybox busybox.tar 54 | #crane export gcr.io/google_containers/busybox:latest busybox.tar 55 | -------------------------------------------------------------------------------- /oci_examples/3_oras_operations.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | oras login ghcr.io/codepraxis-io 4 | 5 | IMAGE=ghcr.io/codepraxis-io/dddive-springboot 6 | TAGS=$(crane ls $IMAGE | grep 0.0.1) 7 | for TAG in $TAGS; do 8 | echo Running: oras manifest fetch-config $IMAGE:$TAG 9 | oras manifest fetch-config $IMAGE:$TAG | jq -r '.' 10 | done 11 | 12 | exit 0 13 | 14 | IMAGE=ghcr.io/codepraxis-io/flask-bootstrap 15 | TAGS=$(crane ls $IMAGE | grep 0.0.1) 16 | for TAG in $TAGS; do 17 | echo Running: crane manifest $IMAGE:$TAG 18 | crane manifest $IMAGE:$TAG | jq -r '.' 19 | 20 | echo Running: crane config $IMAGE:$TAG 21 | crane config $IMAGE:$TAG | jq -r '.' 22 | done 23 | 24 | IMAGE=ghcr.io/codepraxis-io/spring-music 25 | TAGS=$(crane ls $IMAGE | grep 2.0.0) 26 | for TAG in $TAGS; do 27 | echo Running: crane manifest $IMAGE:$TAG 28 | crane manifest $IMAGE:$TAG | jq -r '.' 29 | 30 | echo Running: crane config $IMAGE:$TAG 31 | crane config $IMAGE:$TAG | jq -r '.' 32 | done 33 | 34 | IMAGE=ghcr.io/codepraxis-io/alpine3.16 35 | TAGS=$(crane ls $IMAGE | grep -v sig) 36 | for TAG in $TAGS; do 37 | echo Running: crane manifest $IMAGE:$TAG 38 | crane manifest $IMAGE:$TAG | jq -r '.' 39 | 40 | echo Running: crane config $IMAGE:$TAG 41 | crane config $IMAGE:$TAG | jq -r '.' 42 | done 43 | 44 | IMAGE=ghcr.io/codepraxis-io/ubuntu22.04 45 | TAGS=$(crane ls $IMAGE | grep -v sig) 46 | for TAG in $TAGS; do 47 | echo Running: crane manifest $IMAGE:$TAG 48 | crane manifest $IMAGE:$TAG | jq -r '.' 49 | 50 | echo Running: crane config $IMAGE:$TAG 51 | crane config $IMAGE:$TAG | jq -r '.' 52 | done 53 | 54 | #echo Running: crane export gcr.io/google_containers/busybox busybox.tar 55 | #crane export gcr.io/google_containers/busybox:latest busybox.tar 56 | -------------------------------------------------------------------------------- /oci_examples/4_regcli_operations.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | IMAGES=" 4 | ghcr.io/codepraxis-io/spring-music:2.0.0-buildx-eclipse-temurin-17-jre-jammy 5 | ghcr.io/codepraxis-io/spring-music:2.0.0-jib-eclipse-temurin-17-jre-jammy 6 | ghcr.io/codepraxis-io/spring-music:2.0.0-podman-eclipse-temurin-17-jre-jammy 7 | ghcr.io/codepraxis-io/spring-music:2.0.0-kaniko-amazoncorretto-17-alpine3-15 8 | ghcr.io/codepraxis-io/spring-music:2.0.0-kaniko-curated-alpine3.16-openjdk17 9 | ghcr.io/codepraxis-io/spring-music:2.0.0-buildx-distroless-java11-debian11-multistage 10 | ghcr.io/codepraxis-io/spring-music:2.0.0-buildx-distroless-java17-debian11 11 | ghcr.io/codepraxis-io/spring-music:2.0.0-cnb-java11" 12 | 13 | 14 | echo " 15 | ghcr.io/codepraxis-io/alpine3.16:base 16 | ghcr.io/codepraxis-io/alpine3.16:build 17 | ghcr.io/codepraxis-io/alpine3.16:run 18 | ghcr.io/codepraxis-io/alpine3.16:openjre11 19 | ghcr.io/codepraxis-io/alpine3.16:openjdk11 20 | ghcr.io/codepraxis-io/alpine3.16:openjre17 21 | ghcr.io/codepraxis-io/alpine3.16:openjdk17 22 | ghcr.io/codepraxis-io/ubuntu22.04:base 23 | ghcr.io/codepraxis-io/ubuntu22.04:openjdk11 24 | ghcr.io/codepraxis-io/ubuntu22.04:openjdk17 25 | ghcr.io/codepraxis-io/ubuntu22.04:node16" 26 | 27 | for IMAGE in $IMAGES; do 28 | echo Running: regctl image manifest $IMAGE 29 | regctl image manifest $IMAGE 30 | done 31 | -------------------------------------------------------------------------------- /oci_examples/5_skopeo_operations.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | IMAGE=ghcr.io/codepraxis-io/dddive-springboot 4 | TAGS=$(skopeo list-tags docker://$IMAGE | jq -r '.Tags[]' | grep 0.0.1) 5 | for TAG in $TAGS; do 6 | echo Running: skopeo inspect docker://$IMAGE:$TAG 7 | skopeo inspect docker://$IMAGE:$TAG | jq -r '.' 8 | done 9 | 10 | IMAGE=ghcr.io/codepraxis-io/flask-bootstrap 11 | TAGS=$(skopeo list-tags docker://$IMAGE | jq -r '.Tags[]' | grep 0.0.1) 12 | for TAG in $TAGS; do 13 | echo Running: skopeo inspect docker://$IMAGE:$TAG 14 | skopeo inspect docker://$IMAGE:$TAG | jq -r '.' 15 | done 16 | 17 | IMAGE=ghcr.io/codepraxis-io/spring-music 18 | TAGS=$(skopeo list-tags docker://$IMAGE | jq -r '.Tags[]' | grep 2.0.0) 19 | for TAG in $TAGS; do 20 | echo Running: skopeo inspect docker://$IMAGE:$TAG 21 | skopeo inspect docker://$IMAGE:$TAG | jq -r '.' 22 | done 23 | 24 | IMAGE=ghcr.io/codepraxis-io/alpine3.16 25 | TAGS=$(skopeo list-tags docker://$IMAGE | jq -r '.Tags[]' | grep -v sig) 26 | for TAG in $TAGS; do 27 | echo Running: skopeo inspect docker://$IMAGE:$TAG 28 | skopeo inspect docker://$IMAGE:$TAG | jq -r '.' 29 | done 30 | 31 | IMAGE=ghcr.io/codepraxis-io/ubuntu22.04 32 | TAGS=$(skopeo list-tags docker://$IMAGE | jq -r '.Tags[]' | grep -v sig) 33 | for TAG in $TAGS; do 34 | echo Running: skopeo inspect docker://$IMAGE:$TAG 35 | skopeo inspect docker://$IMAGE:$TAG | jq -r '.' 36 | done 37 | -------------------------------------------------------------------------------- /podman_examples/podman_operations.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | podman pull docker.io/library/alpine 4 | podman run -it --rm docker.io/library/alpine /bin/sh 5 | podman images 6 | 7 | podman run -it --rm docker.io/library/busybox /bin/sh 8 | podman images 9 | 10 | podman login ghcr.io/codepraxis-io 11 | podman tag docker.io/library/busybox:latest ghcr.io/codepraxis-io/library/busybox:latest 12 | podman push ghcr.io/codepraxis-io/library/busybox:latest 13 | -------------------------------------------------------------------------------- /podman_examples/skopeo_operations.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # first run 4 | # skopeo login ghcr.io/codepraxis-io 5 | # 6 | IMAGE=ghcr.io/codepraxis-io/flask-bootstrap 7 | TAGS=$(skopeo list-tags docker://$IMAGE | jq -r '.Tags[]' | grep 0.5.9) 8 | for TAG in $TAGS; do 9 | echo Running: skopeo inspect docker://$IMAGE:$TAG 10 | skopeo inspect docker://$IMAGE:$TAG | jq -r '.' 11 | # podman pull docker://$IMAGE:$TAG 12 | done 13 | 14 | exit 0 15 | 16 | IMAGE=ghcr.io/codepraxis-io/dddive-springboot 17 | TAGS=$(skopeo list-tags docker://$IMAGE | jq -r '.Tags[]' | grep 0.0.1) 18 | for TAG in $TAGS; do 19 | echo Running: skopeo inspect docker://$IMAGE:$TAG 20 | skopeo inspect docker://$IMAGE:$TAG | jq -r '.' 21 | done 22 | 23 | 24 | IMAGE=ghcr.io/codepraxis-io/spring-music 25 | TAGS=$(skopeo list-tags docker://$IMAGE | jq -r '.Tags[]' | grep 2.0.0) 26 | for TAG in $TAGS; do 27 | echo Running: skopeo inspect docker://$IMAGE:$TAG 28 | skopeo inspect docker://$IMAGE:$TAG | jq -r '.' 29 | done 30 | 31 | IMAGE=ghcr.io/codepraxis-io/alpine3.16 32 | TAGS=$(skopeo list-tags docker://$IMAGE | jq -r '.Tags[]' | grep -v sig) 33 | for TAG in $TAGS; do 34 | echo Running: skopeo inspect docker://$IMAGE:$TAG 35 | skopeo inspect docker://$IMAGE:$TAG | jq -r '.' 36 | done 37 | 38 | IMAGE=ghcr.io/codepraxis-io/ubuntu22.04 39 | TAGS=$(skopeo list-tags docker://$IMAGE | jq -r '.Tags[]' | grep -v sig) 40 | for TAG in $TAGS; do 41 | echo Running: skopeo inspect docker://$IMAGE:$TAG 42 | skopeo inspect docker://$IMAGE:$TAG | jq -r '.' 43 | done 44 | -------------------------------------------------------------------------------- /runc_example/.gitignore: -------------------------------------------------------------------------------- 1 | myalpine 2 | -------------------------------------------------------------------------------- /runc_example/create_alpine_container.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | mkdir -p myalpine/rootfs 4 | 5 | echo Download and unarchive Alpine mini rootfs 6 | MINIROOT=alpine-minirootfs-3.17.2-x86_64.tar.gz 7 | wget https://dl-cdn.alpinelinux.org/alpine/v3.17/releases/x86_64/$MINIROOT 8 | tar -xzf $MINIROOT -C myalpine/rootfs 9 | rm $MINIROOT 10 | 11 | -------------------------------------------------------------------------------- /scan_result_parsing_and_sbom_examples/.gitignore: -------------------------------------------------------------------------------- 1 | *.json 2 | *.sarif 3 | *.xml 4 | -------------------------------------------------------------------------------- /scan_result_parsing_and_sbom_examples/bomber_inspect_sbom.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | SBOM_FILE=$1 4 | 5 | bomber scan $SBOM_FILE 6 | 7 | # for HTML output 8 | #bomber scan $SBOM_FILE --output=html 9 | 10 | # for ossindex provider; needs auth 11 | # bomber scan --provider=ossindex $SBOM_FILE 12 | -------------------------------------------------------------------------------- /scan_result_parsing_and_sbom_examples/grype_generate_json_from_syft_sbom.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | SBOM=syft_sbom.json 4 | REPORT=grype_report.json 5 | grype -o json --file $REPORT sbom:$SBOM 6 | -------------------------------------------------------------------------------- /scan_result_parsing_and_sbom_examples/grype_generate_sarif_from_image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | IMAGE=golang:1.16-alpine 4 | #IMAGE=golang:1.17-alpine 5 | #IMAGE=golang:1.18-alpine 6 | #IMAGE=gcr.io/distroless/python3-debian11:latest 7 | REPORT=grype_report.sarif 8 | 9 | grype -o sarif $IMAGE > $REPORT 10 | -------------------------------------------------------------------------------- /scan_result_parsing_and_sbom_examples/grype_generate_sarif_from_syft_sbom.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | SBOM=syft_sbom.json 3 | REPORT=grype_report.sarif 4 | grype -o sarif --file $REPORT sbom:$SBOM 5 | 6 | -------------------------------------------------------------------------------- /scan_result_parsing_and_sbom_examples/inspect_hadolint_sarif.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | REPORT=$1 4 | 5 | echo Scan tool info 6 | cat $REPORT |jq '.runs[0].tool.driver.fullName, .runs[0].tool.driver.informationUri,.runs[0].tool.driver.name, .runs[0].tool.driver.version' 7 | echo 8 | 9 | echo Result list 10 | cat $REPORT |jq '.runs[0].results' 11 | 12 | -------------------------------------------------------------------------------- /scan_result_parsing_and_sbom_examples/inspect_sarif.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | REPORT=$1 4 | 5 | echo Scan tool info 6 | cat $REPORT |jq '.runs[0].tool.driver.fullName, .runs[0].tool.driver.informationUri,.runs[0].tool.driver.name, .runs[0].tool.driver.version' 7 | echo 8 | 9 | echo Result list 10 | cat $REPORT |jq '.runs[0].tool.driver.rules[]' 11 | echo 12 | 13 | echo CVE list 14 | cat $REPORT |jq '.runs[0].tool.driver.rules[].id' 15 | echo 16 | 17 | echo CVE + URL 18 | cat $REPORT |jq '.runs[0].tool.driver.rules[]|.id,.helpUri' 19 | echo 20 | 21 | echo CVE + URL + help text 22 | cat $REPORT |jq '.runs[0].tool.driver.rules[]|.id,.helpUri,.help.text' 23 | echo 24 | 25 | echo CVE + severity 26 | cat $REPORT |jq '.runs[0].tool.driver.rules[]|.id,.properties."security-severity"' 27 | echo 28 | 29 | echo Select records with severity score greater than 7 30 | cat $REPORT |jq '.runs[0].tool.driver.rules[] | select(.properties."security-severity">"7")' | jq ".id,.helpUri,.fullDescription.text" 31 | echo 32 | -------------------------------------------------------------------------------- /scan_result_parsing_and_sbom_examples/run_sbom_scorecard.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo sbom-scorecard score syft_sbom_spdx.json 4 | sbom-scorecard score syft_sbom_spdx.json 5 | 6 | echo sbom-scorecard score trivy_sbom_spdx.json 7 | sbom-scorecard score trivy_sbom_spdx.json 8 | -------------------------------------------------------------------------------- /scan_result_parsing_and_sbom_examples/sbom_manager_operations.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # initialize DB 4 | sbom-manager -I 5 | 6 | SBOM_FILE=~/code/sbom-utility-github/examples/cyclonedx/BOM/juice-shop-11.1.2/bom.json 7 | PROJECT=juice-shop 8 | sbom-manager -a $SBOM_FILE -t cyclonedx -p $PROJECT 9 | sbom-manager -l module -p $PROJECT 10 | sbom-manager -m uri-j -p $PROJECT 11 | -------------------------------------------------------------------------------- /scan_result_parsing_and_sbom_examples/sbom_utility_operations.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | SBOM_UTILITY=~/code/sbom-utility/sbom-utility 4 | 5 | $SBOM_UTILITY schema 6 | 7 | SBOM_FILE=cyclonedx-sbom.json 8 | $SBOM_UTILITY resource list -i $SBOM_FILE 9 | 10 | $SBOM_UTILITY query -i $SBOM_FILE --from metadata.tools 11 | -------------------------------------------------------------------------------- /scan_result_parsing_and_sbom_examples/syft_generate_sbom_json.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | IMAGE=golang:1.16-alpine 4 | #IMAGE=golang:1.17-alpine 5 | #IMAGE=golang:1.18-alpine 6 | #IMAGE=gcr.io/distroless/python3-debian11:latest 7 | REPORT=syft_sbom.json 8 | 9 | syft -o json --file $REPORT packages $IMAGE 10 | -------------------------------------------------------------------------------- /scan_result_parsing_and_sbom_examples/syft_generate_sbom_spdx.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | IMAGE=golang:1.16-alpine 4 | #IMAGE=golang:1.17-alpine 5 | #IMAGE=golang:1.18-alpine 6 | #IMAGE=gcr.io/distroless/python3-debian11:latest 7 | REPORT=syft_sbom_spdx.json 8 | 9 | syft -o spdx-json --file $REPORT packages $IMAGE 10 | -------------------------------------------------------------------------------- /scan_result_parsing_and_sbom_examples/trivy_generate_json.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | IMAGE=$1 4 | if [ "$IMAGE" == "" ]; then 5 | IMAGE=golang:1.16-alpine 6 | fi 7 | # crane ls golang | grep alpine | grep 19 8 | #IMAGE=golang:1.17-alpine 9 | #IMAGE=golang:1.18-alpine 10 | #IMAGE=golang:1.19-alpine 11 | #IMAGE=gcr.io/distroless/python3-debian11:latest 12 | #crane ls ghcr.io/codepraxis-io/flask-bootstrap | grep 0.0.1|grep podman 13 | # 14 | REPORT=trivy_report.json 15 | 16 | trivy image --format json -o $REPORT $IMAGE 17 | 18 | echo Vulnerabilities 19 | cat $REPORT | jq '.Results[0].Vulnerabilities[]' 20 | echo 21 | 22 | echo CVE list 23 | cat $REPORT | jq '.Results[0].Vulnerabilities[].VulnerabilityID' 24 | echo 25 | 26 | echo CVE + URL 27 | cat $REPORT | jq '.Results[0].Vulnerabilities[]|.VulnerabilityID,.PrimaryURL' 28 | echo 29 | 30 | echo CVE + URL + Title 31 | cat $REPORT | jq '.Results[0].Vulnerabilities[]|.VulnerabilityID,.PrimaryURL,.Title' 32 | echo 33 | 34 | echo CVE + severity 35 | cat $REPORT | jq '.Results[0].Vulnerabilities[]|.VulnerabilityID,.Severity' 36 | echo 37 | 38 | echo CVE + severity + CVSS NVD v3 score 39 | # See https://nvd.nist.gov/vuln-metrics/cvss 40 | cat $REPORT | jq '.Results[0].Vulnerabilities[]|.VulnerabilityID,.Severity,.CVSS.nvd.V3Score' 41 | echo 42 | 43 | echo Select records with severity score greater than 7 44 | cat $REPORT | jq '.Results[0].Vulnerabilities[]|select(.CVSS.nvd.V3Score>7)' | jq '.VulnerabilityID,.PrimaryURL,.Title,.Severity,.CVSS.nvd.V3Score' 45 | echo 46 | -------------------------------------------------------------------------------- /scan_result_parsing_and_sbom_examples/trivy_generate_sarif.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | IMAGE=golang:1.16-alpine 4 | #IMAGE=golang:1.17-alpine 5 | #IMAGE=golang:1.18-alpine 6 | #IMAGE=gcr.io/distroless/python3-debian11:latest 7 | REPORT=trivy_report.sarif 8 | 9 | trivy image --format sarif -o $REPORT $IMAGE 10 | -------------------------------------------------------------------------------- /scan_result_parsing_and_sbom_examples/trivy_generate_sbom_spdx.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | IMAGE=$1 4 | if [ "$IMAGE" == "" ]; then 5 | IMAGE=golang:1.16-alpine 6 | fi 7 | # crane ls golang | grep alpine | grep 19 8 | #IMAGE=golang:1.17-alpine 9 | #IMAGE=golang:1.18-alpine 10 | #IMAGE=golang:1.19-alpine 11 | #IMAGE=gcr.io/distroless/python3-debian11:latest 12 | #crane ls ghcr.io/codepraxis-io/flask-bootstrap | grep 0.0.1|grep podman 13 | # 14 | REPORT=trivy_sbom_spdx.json 15 | 16 | trivy image --format spdx-json --output $REPORT $IMAGE 17 | 18 | -------------------------------------------------------------------------------- /scan_result_parsing_and_sbom_examples/trivy_scan_sbom_spdx.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | SBOM=trivy_sbom_spdx.json 4 | REPORT=trivy_sbom_scan_report.json 5 | 6 | trivy sbom $SBOM -f json -o $REPORT 7 | 8 | echo Vulnerabilities 9 | cat $REPORT | jq '.Results[0].Vulnerabilities[]' 10 | echo 11 | 12 | echo CVE list 13 | cat $REPORT | jq '.Results[0].Vulnerabilities[].VulnerabilityID' 14 | echo 15 | 16 | echo CVE + URL 17 | cat $REPORT | jq '.Results[0].Vulnerabilities[]' | jq -r '[.VulnerabilityID,.PrimaryURL]|@tsv' 18 | echo 19 | 20 | echo CVE + URL + Title 21 | cat $REPORT | jq '.Results[0].Vulnerabilities[]' | jq -r '[.VulnerabilityID,.PrimaryURL,.Title]|@tsv' 22 | echo 23 | 24 | echo CVE + severity 25 | cat $REPORT | jq '.Results[0].Vulnerabilities[]' | jq -r '[.VulnerabilityID,.Severity]|@tsv' 26 | echo 27 | 28 | echo CVE + severity + CVSS NVD v3 score 29 | # See https://nvd.nist.gov/vuln-metrics/cvss 30 | cat $REPORT | jq '.Results[0].Vulnerabilities[]' | jq -r '[.VulnerabilityID,.Severity,.CVSS.nvd.V3Score]|@tsv' 31 | echo 32 | 33 | echo Select records with severity score greater than 7 34 | cat $REPORT | jq '.Results[0].Vulnerabilities[]|select(.CVSS.nvd.V3Score>7)' | jq -r '[.VulnerabilityID,.PrimaryURL,.Title,.Severity,.CVSS.nvd.V3Score]|@tsv' 35 | echo 36 | -------------------------------------------------------------------------------- /slsa-verifier-examples/.gitignore: -------------------------------------------------------------------------------- 1 | katana* 2 | sbom-scorecard-attestation.json 3 | -------------------------------------------------------------------------------- /slsa-verifier-examples/extract_payload_from_intoto_attestation.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | INTOTO_ATTESTATION_FILE=$1 4 | 5 | cat $INTOTO_ATTESTATION_FILE | jq -r '.payload' | base64 --decode | jq -r '.' 6 | -------------------------------------------------------------------------------- /slsa-verifier-examples/run_slsa_verifier.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | SOURCE_URI=github.com/codepraxis-io/katana 4 | #SOURCE_TAG=v1.0.1 5 | SOURCE_TAG=v1.0.6 6 | ARTIFACT_FILE=katana-linux-amd64 7 | INTOTO_ATTESTATION=katana-linux-amd64.intoto.jsonl 8 | 9 | rm -rf $ARTIFACT_FILE $INTOTO_ATTESTATION 10 | wget https://${SOURCE_URI}/releases/download/${SOURCE_TAG}/${ARTIFACT_FILE} 11 | wget https://${SOURCE_URI}/releases/download/${SOURCE_TAG}/${INTOTO_ATTESTATION} 12 | 13 | ~/go/bin/slsa-verifier verify-artifact $ARTIFACT_FILE \ 14 | --provenance-path $INTOTO_ATTESTATION \ 15 | --source-uri $SOURCE_URI \ 16 | --source-tag $SOURCE_TAG \ 17 | --print-provenance 18 | -------------------------------------------------------------------------------- /slsa-verifier-examples/sbom-scorecard-attestation.json: -------------------------------------------------------------------------------- 1 | { 2 | "_type": "https://in-toto.io/Statement/v0.1", 3 | "predicateType": "https://slsa.dev/provenance/v0.2", 4 | "subject": [ 5 | { 6 | "name": "sbom-scorecard-linux-amd64", 7 | "digest": { 8 | "sha256": "648c4de39e9eb7a113ac0443f1f52e914c2b77546aae20ff9d95ffd180c22b7f" 9 | } 10 | } 11 | ], 12 | "predicate": { 13 | "builder": { 14 | "id": "https://github.com/slsa-framework/slsa-github-generator/.github/workflows/builder_go_slsa3.yml@refs/tags/v1.2.2" 15 | }, 16 | "buildType": "https://github.com/slsa-framework/slsa-github-generator/go@v1", 17 | "invocation": { 18 | "configSource": { 19 | "uri": "git+https://github.com/eBay/sbom-scorecard@refs/tags/0.0.5", 20 | "digest": { 21 | "sha1": "b324861a7e2c6d538143bd4f07bd05d2b70059df" 22 | }, 23 | "entryPoint": ".github/workflows/go-ossf-slsa3-publish.yml" 24 | }, 25 | "parameters": {}, 26 | "environment": { 27 | "arch": "X64", 28 | "github_actor": "justinabrahms", 29 | "github_actor_id": "3853", 30 | "github_base_ref": "", 31 | "github_event_name": "release", 32 | "github_event_payload": { 33 | "action": "created", 34 | "enterprise": { 35 | "avatar_url": "https://avatars.githubusercontent.com/b/4208?v=4", 36 | "created_at": "2020-09-25T23:27:27Z", 37 | "description": "", 38 | "html_url": "https://github.com/enterprises/ebay", 39 | "id": 4208, 40 | "name": "eBay", 41 | "node_id": "MDEwOkVudGVycHJpc2U0MjA4", 42 | "slug": "ebay", 43 | "updated_at": "2022-05-13T19:04:05Z", 44 | "website_url": "" 45 | }, 46 | "organization": { 47 | "avatar_url": "https://avatars.githubusercontent.com/u/3639281?v=4", 48 | "description": "", 49 | "events_url": "https://api.github.com/orgs/eBay/events", 50 | "hooks_url": "https://api.github.com/orgs/eBay/hooks", 51 | "id": 3639281, 52 | "issues_url": "https://api.github.com/orgs/eBay/issues", 53 | "login": "eBay", 54 | "members_url": "https://api.github.com/orgs/eBay/members{/member}", 55 | "node_id": "MDEyOk9yZ2FuaXphdGlvbjM2MzkyODE=", 56 | "public_members_url": "https://api.github.com/orgs/eBay/public_members{/member}", 57 | "repos_url": "https://api.github.com/orgs/eBay/repos", 58 | "url": "https://api.github.com/orgs/eBay" 59 | }, 60 | "release": { 61 | "assets": [], 62 | "assets_url": "https://api.github.com/repos/eBay/sbom-scorecard/releases/89324699/assets", 63 | "author": { 64 | "avatar_url": "https://avatars.githubusercontent.com/u/3853?v=4", 65 | "events_url": "https://api.github.com/users/justinabrahms/events{/privacy}", 66 | "followers_url": "https://api.github.com/users/justinabrahms/followers", 67 | "following_url": "https://api.github.com/users/justinabrahms/following{/other_user}", 68 | "gists_url": "https://api.github.com/users/justinabrahms/gists{/gist_id}", 69 | "gravatar_id": "", 70 | "html_url": "https://github.com/justinabrahms", 71 | "id": 3853, 72 | "login": "justinabrahms", 73 | "node_id": "MDQ6VXNlcjM4NTM=", 74 | "organizations_url": "https://api.github.com/users/justinabrahms/orgs", 75 | "received_events_url": "https://api.github.com/users/justinabrahms/received_events", 76 | "repos_url": "https://api.github.com/users/justinabrahms/repos", 77 | "site_admin": false, 78 | "starred_url": "https://api.github.com/users/justinabrahms/starred{/owner}{/repo}", 79 | "subscriptions_url": "https://api.github.com/users/justinabrahms/subscriptions", 80 | "type": "User", 81 | "url": "https://api.github.com/users/justinabrahms" 82 | }, 83 | "body": "Bugfix & documentation release\r\n\r\n## What's Changed\r\n* (feat) Add SPDX 2.3 support by @puerco in https://github.com/eBay/sbom-scorecard/pull/13\r\n* (bug) Fix Package Version Logic for CDX Parsing by @jspeed-meyers in https://github.com/eBay/sbom-scorecard/pull/24\r\n* (bug) Fix integer division by integer bug by @jspeed-meyers in https://github.com/eBay/sbom-scorecard/pull/29\r\n* (chore) Minor tutorial updates by @jspeed-meyers in https://github.com/eBay/sbom-scorecard/pull/20\r\n* (chore) Inline tutorial into the README by @justinabrahms in https://github.com/eBay/sbom-scorecard/pull/23\r\n* (chore) Add usage image by @justinabrahms in https://github.com/eBay/sbom-scorecard/pull/30\r\n\r\n**Full Changelog**: https://github.com/eBay/sbom-scorecard/compare/0.0.4...0.0.5", 84 | "created_at": "2023-01-17T23:34:38Z", 85 | "draft": false, 86 | "html_url": "https://github.com/eBay/sbom-scorecard/releases/tag/0.0.5", 87 | "id": 89324699, 88 | "mentions_count": 3, 89 | "name": "0.0.5", 90 | "node_id": "RE_kwDOIXhkFM4FUvyb", 91 | "prerelease": false, 92 | "published_at": "2023-01-17T23:36:26Z", 93 | "tag_name": "0.0.5", 94 | "tarball_url": "https://api.github.com/repos/eBay/sbom-scorecard/tarball/0.0.5", 95 | "target_commitish": "main", 96 | "upload_url": "https://uploads.github.com/repos/eBay/sbom-scorecard/releases/89324699/assets{?name,label}", 97 | "url": "https://api.github.com/repos/eBay/sbom-scorecard/releases/89324699", 98 | "zipball_url": "https://api.github.com/repos/eBay/sbom-scorecard/zipball/0.0.5" 99 | }, 100 | "repository": { 101 | "allow_forking": true, 102 | "archive_url": "https://api.github.com/repos/eBay/sbom-scorecard/{archive_format}{/ref}", 103 | "archived": false, 104 | "assignees_url": "https://api.github.com/repos/eBay/sbom-scorecard/assignees{/user}", 105 | "blobs_url": "https://api.github.com/repos/eBay/sbom-scorecard/git/blobs{/sha}", 106 | "branches_url": "https://api.github.com/repos/eBay/sbom-scorecard/branches{/branch}", 107 | "clone_url": "https://github.com/eBay/sbom-scorecard.git", 108 | "collaborators_url": "https://api.github.com/repos/eBay/sbom-scorecard/collaborators{/collaborator}", 109 | "comments_url": "https://api.github.com/repos/eBay/sbom-scorecard/comments{/number}", 110 | "commits_url": "https://api.github.com/repos/eBay/sbom-scorecard/commits{/sha}", 111 | "compare_url": "https://api.github.com/repos/eBay/sbom-scorecard/compare/{base}...{head}", 112 | "contents_url": "https://api.github.com/repos/eBay/sbom-scorecard/contents/{+path}", 113 | "contributors_url": "https://api.github.com/repos/eBay/sbom-scorecard/contributors", 114 | "created_at": "2022-11-03T22:58:57Z", 115 | "default_branch": "main", 116 | "deployments_url": "https://api.github.com/repos/eBay/sbom-scorecard/deployments", 117 | "description": "Generate a score for your sbom to understand if it will actually be useful.", 118 | "disabled": false, 119 | "downloads_url": "https://api.github.com/repos/eBay/sbom-scorecard/downloads", 120 | "events_url": "https://api.github.com/repos/eBay/sbom-scorecard/events", 121 | "fork": false, 122 | "forks": 3, 123 | "forks_count": 3, 124 | "forks_url": "https://api.github.com/repos/eBay/sbom-scorecard/forks", 125 | "full_name": "eBay/sbom-scorecard", 126 | "git_commits_url": "https://api.github.com/repos/eBay/sbom-scorecard/git/commits{/sha}", 127 | "git_refs_url": "https://api.github.com/repos/eBay/sbom-scorecard/git/refs{/sha}", 128 | "git_tags_url": "https://api.github.com/repos/eBay/sbom-scorecard/git/tags{/sha}", 129 | "git_url": "git://github.com/eBay/sbom-scorecard.git", 130 | "has_discussions": false, 131 | "has_downloads": true, 132 | "has_issues": true, 133 | "has_pages": false, 134 | "has_projects": true, 135 | "has_wiki": true, 136 | "homepage": "", 137 | "hooks_url": "https://api.github.com/repos/eBay/sbom-scorecard/hooks", 138 | "html_url": "https://github.com/eBay/sbom-scorecard", 139 | "id": 561538068, 140 | "is_template": false, 141 | "issue_comment_url": "https://api.github.com/repos/eBay/sbom-scorecard/issues/comments{/number}", 142 | "issue_events_url": "https://api.github.com/repos/eBay/sbom-scorecard/issues/events{/number}", 143 | "issues_url": "https://api.github.com/repos/eBay/sbom-scorecard/issues{/number}", 144 | "keys_url": "https://api.github.com/repos/eBay/sbom-scorecard/keys{/key_id}", 145 | "labels_url": "https://api.github.com/repos/eBay/sbom-scorecard/labels{/name}", 146 | "language": "Go", 147 | "languages_url": "https://api.github.com/repos/eBay/sbom-scorecard/languages", 148 | "license": { 149 | "key": "apache-2.0", 150 | "name": "Apache License 2.0", 151 | "node_id": "MDc6TGljZW5zZTI=", 152 | "spdx_id": "Apache-2.0", 153 | "url": "https://api.github.com/licenses/apache-2.0" 154 | }, 155 | "merges_url": "https://api.github.com/repos/eBay/sbom-scorecard/merges", 156 | "milestones_url": "https://api.github.com/repos/eBay/sbom-scorecard/milestones{/number}", 157 | "mirror_url": null, 158 | "name": "sbom-scorecard", 159 | "node_id": "R_kgDOIXhkFA", 160 | "notifications_url": "https://api.github.com/repos/eBay/sbom-scorecard/notifications{?since,all,participating}", 161 | "open_issues": 5, 162 | "open_issues_count": 5, 163 | "owner": { 164 | "avatar_url": "https://avatars.githubusercontent.com/u/3639281?v=4", 165 | "events_url": "https://api.github.com/users/eBay/events{/privacy}", 166 | "followers_url": "https://api.github.com/users/eBay/followers", 167 | "following_url": "https://api.github.com/users/eBay/following{/other_user}", 168 | "gists_url": "https://api.github.com/users/eBay/gists{/gist_id}", 169 | "gravatar_id": "", 170 | "html_url": "https://github.com/eBay", 171 | "id": 3639281, 172 | "login": "eBay", 173 | "node_id": "MDEyOk9yZ2FuaXphdGlvbjM2MzkyODE=", 174 | "organizations_url": "https://api.github.com/users/eBay/orgs", 175 | "received_events_url": "https://api.github.com/users/eBay/received_events", 176 | "repos_url": "https://api.github.com/users/eBay/repos", 177 | "site_admin": false, 178 | "starred_url": "https://api.github.com/users/eBay/starred{/owner}{/repo}", 179 | "subscriptions_url": "https://api.github.com/users/eBay/subscriptions", 180 | "type": "Organization", 181 | "url": "https://api.github.com/users/eBay" 182 | }, 183 | "private": false, 184 | "pulls_url": "https://api.github.com/repos/eBay/sbom-scorecard/pulls{/number}", 185 | "pushed_at": "2023-01-17T23:36:26Z", 186 | "releases_url": "https://api.github.com/repos/eBay/sbom-scorecard/releases{/id}", 187 | "size": 237, 188 | "ssh_url": "git@github.com:eBay/sbom-scorecard.git", 189 | "stargazers_count": 44, 190 | "stargazers_url": "https://api.github.com/repos/eBay/sbom-scorecard/stargazers", 191 | "statuses_url": "https://api.github.com/repos/eBay/sbom-scorecard/statuses/{sha}", 192 | "subscribers_url": "https://api.github.com/repos/eBay/sbom-scorecard/subscribers", 193 | "subscription_url": "https://api.github.com/repos/eBay/sbom-scorecard/subscription", 194 | "svn_url": "https://github.com/eBay/sbom-scorecard", 195 | "tags_url": "https://api.github.com/repos/eBay/sbom-scorecard/tags", 196 | "teams_url": "https://api.github.com/repos/eBay/sbom-scorecard/teams", 197 | "topics": [], 198 | "trees_url": "https://api.github.com/repos/eBay/sbom-scorecard/git/trees{/sha}", 199 | "updated_at": "2023-01-13T00:14:22Z", 200 | "url": "https://api.github.com/repos/eBay/sbom-scorecard", 201 | "visibility": "public", 202 | "watchers": 44, 203 | "watchers_count": 44, 204 | "web_commit_signoff_required": false 205 | }, 206 | "sender": { 207 | "avatar_url": "https://avatars.githubusercontent.com/u/3853?v=4", 208 | "events_url": "https://api.github.com/users/justinabrahms/events{/privacy}", 209 | "followers_url": "https://api.github.com/users/justinabrahms/followers", 210 | "following_url": "https://api.github.com/users/justinabrahms/following{/other_user}", 211 | "gists_url": "https://api.github.com/users/justinabrahms/gists{/gist_id}", 212 | "gravatar_id": "", 213 | "html_url": "https://github.com/justinabrahms", 214 | "id": 3853, 215 | "login": "justinabrahms", 216 | "node_id": "MDQ6VXNlcjM4NTM=", 217 | "organizations_url": "https://api.github.com/users/justinabrahms/orgs", 218 | "received_events_url": "https://api.github.com/users/justinabrahms/received_events", 219 | "repos_url": "https://api.github.com/users/justinabrahms/repos", 220 | "site_admin": false, 221 | "starred_url": "https://api.github.com/users/justinabrahms/starred{/owner}{/repo}", 222 | "subscriptions_url": "https://api.github.com/users/justinabrahms/subscriptions", 223 | "type": "User", 224 | "url": "https://api.github.com/users/justinabrahms" 225 | } 226 | }, 227 | "github_head_ref": "", 228 | "github_ref": "refs/tags/0.0.5", 229 | "github_ref_type": "tag", 230 | "github_repository_id": "561538068", 231 | "github_repository_owner": "eBay", 232 | "github_repository_owner_id": "3639281", 233 | "github_run_attempt": "1", 234 | "github_run_id": "3944244311", 235 | "github_run_number": "15", 236 | "github_sha1": "b324861a7e2c6d538143bd4f07bd05d2b70059df", 237 | "os": "ubuntu22" 238 | } 239 | }, 240 | "buildConfig": { 241 | "version": 1, 242 | "steps": [ 243 | { 244 | "command": [ 245 | "/opt/hostedtoolcache/go/1.17.13/x64/bin/go", 246 | "mod", 247 | "vendor" 248 | ], 249 | "env": null, 250 | "workingDir": "/home/runner/work/sbom-scorecard/sbom-scorecard/__PROJECT_CHECKOUT_DIR__" 251 | }, 252 | { 253 | "command": [ 254 | "/opt/hostedtoolcache/go/1.17.13/x64/bin/go", 255 | "build", 256 | "-mod=vendor", 257 | "-trimpath", 258 | "-tags=netgo", 259 | "-ldflags=-X main.Version=.0.5 -X main.Commit=b324861a7e2c6d538143bd4f07bd05d2b70059df -X main.CommitDate=1673998478 -X main.TreeState=clean", 260 | "-o", 261 | "sbom-scorecard-linux-amd64", 262 | "./cmd/sbom-scorecard/main.go" 263 | ], 264 | "env": [ 265 | "GOOS=linux", 266 | "GOARCH=amd64", 267 | "GO111MODULE=on", 268 | "CGO_ENABLED=0" 269 | ], 270 | "workingDir": "/home/runner/work/sbom-scorecard/sbom-scorecard/__PROJECT_CHECKOUT_DIR__" 271 | } 272 | ] 273 | }, 274 | "metadata": { 275 | "buildInvocationID": "3944244311-1", 276 | "completeness": { 277 | "parameters": true, 278 | "environment": false, 279 | "materials": false 280 | }, 281 | "reproducible": false 282 | }, 283 | "materials": [ 284 | { 285 | "uri": "git+https://github.com/eBay/sbom-scorecard@refs/tags/0.0.5", 286 | "digest": { 287 | "sha1": "b324861a7e2c6d538143bd4f07bd05d2b70059df" 288 | } 289 | }, 290 | { 291 | "uri": "https://github.com/actions/virtual-environments/releases/tag/ubuntu22/20230109.1" 292 | } 293 | ] 294 | } 295 | } 296 | -------------------------------------------------------------------------------- /slsa-verifier-examples/sbom-scorecard-linux-amd64.intoto.jsonl: -------------------------------------------------------------------------------- 1 | {"payloadType":"application/vnd.in-toto+json","payload":"{"_type":"https://in-toto.io/Statement/v0.1","predicateType":"https://slsa.dev/provenance/v0.2","subject":[{"name":"sbom-scorecard-linux-amd64","digest":{"sha256":"648c4de39e9eb7a113ac0443f1f52e914c2b77546aae20ff9d95ffd180c22b7f"}}],"predicate":{"builder":{"id":"https://github.com/slsa-framework/slsa-github-generator/.github/workflows/builder_go_slsa3.yml@refs/tags/v1.2.2"},"buildType":"https://github.com/slsa-framework/slsa-github-generator/go@v1","invocation":{"configSource":{"uri":"git+https://github.com/eBay/sbom-scorecard@refs/tags/0.0.5","digest":{"sha1":"b324861a7e2c6d538143bd4f07bd05d2b70059df"},"entryPoint":".github/workflows/go-ossf-slsa3-publish.yml"},"parameters":{},"environment":{"arch":"X64","github_actor":"justinabrahms","github_actor_id":"3853","github_base_ref":"","github_event_name":"release","github_event_payload":{"action":"created","enterprise":{"avatar_url":"https://avatars.githubusercontent.com/b/4208?v=4","created_at":"2020-09-25T23:27:27Z","description":"","html_url":"https://github.com/enterprises/ebay","id":4208,"name":"eBay","node_id":"MDEwOkVudGVycHJpc2U0MjA4","slug":"ebay","updated_at":"2022-05-13T19:04:05Z","website_url":""},"organization":{"avatar_url":"https://avatars.githubusercontent.com/u/3639281?v=4","description":"","events_url":"https://api.github.com/orgs/eBay/events","hooks_url":"https://api.github.com/orgs/eBay/hooks","id":3639281,"issues_url":"https://api.github.com/orgs/eBay/issues","login":"eBay","members_url":"https://api.github.com/orgs/eBay/members{/member}","node_id":"MDEyOk9yZ2FuaXphdGlvbjM2MzkyODE=","public_members_url":"https://api.github.com/orgs/eBay/public_members{/member}","repos_url":"https://api.github.com/orgs/eBay/repos","url":"https://api.github.com/orgs/eBay"},"release":{"assets":[],"assets_url":"https://api.github.com/repos/eBay/sbom-scorecard/releases/89324699/assets","author":{"avatar_url":"https://avatars.githubusercontent.com/u/3853?v=4","events_url":"https://api.github.com/users/justinabrahms/events{/privacy}","followers_url":"https://api.github.com/users/justinabrahms/followers","following_url":"https://api.github.com/users/justinabrahms/following{/other_user}","gists_url":"https://api.github.com/users/justinabrahms/gists{/gist_id}","gravatar_id":"","html_url":"https://github.com/justinabrahms","id":3853,"login":"justinabrahms","node_id":"MDQ6VXNlcjM4NTM=","organizations_url":"https://api.github.com/users/justinabrahms/orgs","received_events_url":"https://api.github.com/users/justinabrahms/received_events","repos_url":"https://api.github.com/users/justinabrahms/repos","site_admin":false,"starred_url":"https://api.github.com/users/justinabrahms/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/justinabrahms/subscriptions","type":"User","url":"https://api.github.com/users/justinabrahms"},"body":"Bugfix \u0026 documentation release\r\n\r\n## What's Changed\r\n* (feat) Add SPDX 2.3 support by @puerco in https://github.com/eBay/sbom-scorecard/pull/13\r\n* (bug) Fix Package Version Logic for CDX Parsing by @jspeed-meyers in https://github.com/eBay/sbom-scorecard/pull/24\r\n* (bug) Fix integer division by integer bug by @jspeed-meyers in https://github.com/eBay/sbom-scorecard/pull/29\r\n* (chore) Minor tutorial updates by @jspeed-meyers in https://github.com/eBay/sbom-scorecard/pull/20\r\n* (chore) Inline tutorial into the README by @justinabrahms in https://github.com/eBay/sbom-scorecard/pull/23\r\n* (chore) Add usage image by @justinabrahms in https://github.com/eBay/sbom-scorecard/pull/30\r\n\r\n**Full Changelog**: https://github.com/eBay/sbom-scorecard/compare/0.0.4...0.0.5","created_at":"2023-01-17T23:34:38Z","draft":false,"html_url":"https://github.com/eBay/sbom-scorecard/releases/tag/0.0.5","id":89324699,"mentions_count":3,"name":"0.0.5","node_id":"RE_kwDOIXhkFM4FUvyb","prerelease":false,"published_at":"2023-01-17T23:36:26Z","tag_name":"0.0.5","tarball_url":"https://api.github.com/repos/eBay/sbom-scorecard/tarball/0.0.5","target_commitish":"main","upload_url":"https://uploads.github.com/repos/eBay/sbom-scorecard/releases/89324699/assets{?name,label}","url":"https://api.github.com/repos/eBay/sbom-scorecard/releases/89324699","zipball_url":"https://api.github.com/repos/eBay/sbom-scorecard/zipball/0.0.5"},"repository":{"allow_forking":true,"archive_url":"https://api.github.com/repos/eBay/sbom-scorecard/{archive_format}{/ref}","archived":false,"assignees_url":"https://api.github.com/repos/eBay/sbom-scorecard/assignees{/user}","blobs_url":"https://api.github.com/repos/eBay/sbom-scorecard/git/blobs{/sha}","branches_url":"https://api.github.com/repos/eBay/sbom-scorecard/branches{/branch}","clone_url":"https://github.com/eBay/sbom-scorecard.git","collaborators_url":"https://api.github.com/repos/eBay/sbom-scorecard/collaborators{/collaborator}","comments_url":"https://api.github.com/repos/eBay/sbom-scorecard/comments{/number}","commits_url":"https://api.github.com/repos/eBay/sbom-scorecard/commits{/sha}","compare_url":"https://api.github.com/repos/eBay/sbom-scorecard/compare/{base}...{head}","contents_url":"https://api.github.com/repos/eBay/sbom-scorecard/contents/{+path}","contributors_url":"https://api.github.com/repos/eBay/sbom-scorecard/contributors","created_at":"2022-11-03T22:58:57Z","default_branch":"main","deployments_url":"https://api.github.com/repos/eBay/sbom-scorecard/deployments","description":"Generate a score for your sbom to understand if it will actually be useful.","disabled":false,"downloads_url":"https://api.github.com/repos/eBay/sbom-scorecard/downloads","events_url":"https://api.github.com/repos/eBay/sbom-scorecard/events","fork":false,"forks":3,"forks_count":3,"forks_url":"https://api.github.com/repos/eBay/sbom-scorecard/forks","full_name":"eBay/sbom-scorecard","git_commits_url":"https://api.github.com/repos/eBay/sbom-scorecard/git/commits{/sha}","git_refs_url":"https://api.github.com/repos/eBay/sbom-scorecard/git/refs{/sha}","git_tags_url":"https://api.github.com/repos/eBay/sbom-scorecard/git/tags{/sha}","git_url":"git://github.com/eBay/sbom-scorecard.git","has_discussions":false,"has_downloads":true,"has_issues":true,"has_pages":false,"has_projects":true,"has_wiki":true,"homepage":"","hooks_url":"https://api.github.com/repos/eBay/sbom-scorecard/hooks","html_url":"https://github.com/eBay/sbom-scorecard","id":561538068,"is_template":false,"issue_comment_url":"https://api.github.com/repos/eBay/sbom-scorecard/issues/comments{/number}","issue_events_url":"https://api.github.com/repos/eBay/sbom-scorecard/issues/events{/number}","issues_url":"https://api.github.com/repos/eBay/sbom-scorecard/issues{/number}","keys_url":"https://api.github.com/repos/eBay/sbom-scorecard/keys{/key_id}","labels_url":"https://api.github.com/repos/eBay/sbom-scorecard/labels{/name}","language":"Go","languages_url":"https://api.github.com/repos/eBay/sbom-scorecard/languages","license":{"key":"apache-2.0","name":"Apache License 2.0","node_id":"MDc6TGljZW5zZTI=","spdx_id":"Apache-2.0","url":"https://api.github.com/licenses/apache-2.0"},"merges_url":"https://api.github.com/repos/eBay/sbom-scorecard/merges","milestones_url":"https://api.github.com/repos/eBay/sbom-scorecard/milestones{/number}","mirror_url":null,"name":"sbom-scorecard","node_id":"R_kgDOIXhkFA","notifications_url":"https://api.github.com/repos/eBay/sbom-scorecard/notifications{?since,all,participating}","open_issues":5,"open_issues_count":5,"owner":{"avatar_url":"https://avatars.githubusercontent.com/u/3639281?v=4","events_url":"https://api.github.com/users/eBay/events{/privacy}","followers_url":"https://api.github.com/users/eBay/followers","following_url":"https://api.github.com/users/eBay/following{/other_user}","gists_url":"https://api.github.com/users/eBay/gists{/gist_id}","gravatar_id":"","html_url":"https://github.com/eBay","id":3639281,"login":"eBay","node_id":"MDEyOk9yZ2FuaXphdGlvbjM2MzkyODE=","organizations_url":"https://api.github.com/users/eBay/orgs","received_events_url":"https://api.github.com/users/eBay/received_events","repos_url":"https://api.github.com/users/eBay/repos","site_admin":false,"starred_url":"https://api.github.com/users/eBay/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/eBay/subscriptions","type":"Organization","url":"https://api.github.com/users/eBay"},"private":false,"pulls_url":"https://api.github.com/repos/eBay/sbom-scorecard/pulls{/number}","pushed_at":"2023-01-17T23:36:26Z","releases_url":"https://api.github.com/repos/eBay/sbom-scorecard/releases{/id}","size":237,"ssh_url":"git@github.com:eBay/sbom-scorecard.git","stargazers_count":44,"stargazers_url":"https://api.github.com/repos/eBay/sbom-scorecard/stargazers","statuses_url":"https://api.github.com/repos/eBay/sbom-scorecard/statuses/{sha}","subscribers_url":"https://api.github.com/repos/eBay/sbom-scorecard/subscribers","subscription_url":"https://api.github.com/repos/eBay/sbom-scorecard/subscription","svn_url":"https://github.com/eBay/sbom-scorecard","tags_url":"https://api.github.com/repos/eBay/sbom-scorecard/tags","teams_url":"https://api.github.com/repos/eBay/sbom-scorecard/teams","topics":[],"trees_url":"https://api.github.com/repos/eBay/sbom-scorecard/git/trees{/sha}","updated_at":"2023-01-13T00:14:22Z","url":"https://api.github.com/repos/eBay/sbom-scorecard","visibility":"public","watchers":44,"watchers_count":44,"web_commit_signoff_required":false},"sender":{"avatar_url":"https://avatars.githubusercontent.com/u/3853?v=4","events_url":"https://api.github.com/users/justinabrahms/events{/privacy}","followers_url":"https://api.github.com/users/justinabrahms/followers","following_url":"https://api.github.com/users/justinabrahms/following{/other_user}","gists_url":"https://api.github.com/users/justinabrahms/gists{/gist_id}","gravatar_id":"","html_url":"https://github.com/justinabrahms","id":3853,"login":"justinabrahms","node_id":"MDQ6VXNlcjM4NTM=","organizations_url":"https://api.github.com/users/justinabrahms/orgs","received_events_url":"https://api.github.com/users/justinabrahms/received_events","repos_url":"https://api.github.com/users/justinabrahms/repos","site_admin":false,"starred_url":"https://api.github.com/users/justinabrahms/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/justinabrahms/subscriptions","type":"User","url":"https://api.github.com/users/justinabrahms"}},"github_head_ref":"","github_ref":"refs/tags/0.0.5","github_ref_type":"tag","github_repository_id":"561538068","github_repository_owner":"eBay","github_repository_owner_id":"3639281","github_run_attempt":"1","github_run_id":"3944244311","github_run_number":"15","github_sha1":"b324861a7e2c6d538143bd4f07bd05d2b70059df","os":"ubuntu22"}},"buildConfig":{"version":1,"steps":[{"command":["/opt/hostedtoolcache/go/1.17.13/x64/bin/go","mod","vendor"],"env":null,"workingDir":"/home/runner/work/sbom-scorecard/sbom-scorecard/__PROJECT_CHECKOUT_DIR__"},{"command":["/opt/hostedtoolcache/go/1.17.13/x64/bin/go","build","-mod=vendor","-trimpath","-tags=netgo","-ldflags=-X main.Version=.0.5 -X main.Commit=b324861a7e2c6d538143bd4f07bd05d2b70059df -X main.CommitDate=1673998478 -X main.TreeState=clean","-o","sbom-scorecard-linux-amd64","./cmd/sbom-scorecard/main.go"],"env":["GOOS=linux","GOARCH=amd64","GO111MODULE=on","CGO_ENABLED=0"],"workingDir":"/home/runner/work/sbom-scorecard/sbom-scorecard/__PROJECT_CHECKOUT_DIR__"}]},"metadata":{"buildInvocationID":"3944244311-1","completeness":{"parameters":true,"environment":false,"materials":false},"reproducible":false},"materials":[{"uri":"git+https://github.com/eBay/sbom-scorecard@refs/tags/0.0.5","digest":{"sha1":"b324861a7e2c6d538143bd4f07bd05d2b70059df"}},{"uri":"https://github.com/actions/virtual-environments/releases/tag/ubuntu22/20230109.1"}]}}","signatures":[{"keyid":"","sig":"MEYCIQC+lb9LTIWOMBpcna2DUTDhxShTvgCa1HXDKj/RaWJqCgIhAMMZOJ7V/FCrM14gSAlnQ2O3UE3SvrH/pWjK63KFBf76","cert":"-----BEGIN CERTIFICATE-----\nMIIDuzCCA0GgAwIBAgIUIplDWRwKo6tMFvV7zbhr3AwzBJAwCgYIKoZIzj0EAwMw\nNzEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MR4wHAYDVQQDExVzaWdzdG9yZS1pbnRl\ncm1lZGlhdGUwHhcNMjMwMTE3MjMzOTIyWhcNMjMwMTE3MjM0OTIyWjAAMFkwEwYH\nKoZIzj0CAQYIKoZIzj0DAQcDQgAE6UuEGGEkpCeFYPSlnnYdS83fK8Z3UsiBRp1m\nafG8oj4i/ZsmOy9pyS4BukbMyCLJoC8WlCLeBYWMI3GiKnRq1qOCAmAwggJcMA4G\nA1UdDwEB/wQEAwIHgDATBgNVHSUEDDAKBggrBgEFBQcDAzAdBgNVHQ4EFgQUj9kP\nEjk9PVZYXx0FGgC8zW1CH1swHwYDVR0jBBgwFoAU39Ppz1YkEZb5qNjpKFWixi4Y\nZD8wfQYDVR0RAQH/BHMwcYZvaHR0cHM6Ly9naXRodWIuY29tL3Nsc2EtZnJhbWV3\nb3JrL3Nsc2EtZ2l0aHViLWdlbmVyYXRvci8uZ2l0aHViL3dvcmtmbG93cy9idWls\nZGVyX2dvX3Nsc2EzLnltbEByZWZzL3RhZ3MvdjEuMi4yMDkGCisGAQQBg78wAQEE\nK2h0dHBzOi8vdG9rZW4uYWN0aW9ucy5naXRodWJ1c2VyY29udGVudC5jb20wFQYK\nKwYBBAGDvzABAgQHcmVsZWFzZTA2BgorBgEEAYO/MAEDBChiMzI0ODYxYTdlMmM2\nZDUzODE0M2JkNGYwN2JkMDVkMmI3MDA1OWRmMB4GCisGAQQBg78wAQQEEFNMU0Eg\nR28gcmVsZWFzZXIwIQYKKwYBBAGDvzABBQQTZUJheS9zYm9tLXNjb3JlY2FyZDAd\nBgorBgEEAYO/MAEGBA9yZWZzL3RhZ3MvMC4wLjUwgYkGCisGAQQB1nkCBAIEewR5\nAHcAdQDdPTBqxscRMmMZHhyZZzcCokpeuN48rf+HinKALynujgAAAYXCGgMGAAAE\nAwBGMEQCIAjE2VgLclVhSkSFV1IzsNHghq2AInZ0px/7wCR/R8yOAiBXUb/NQnTC\nLFpzSl6bsD3cv1mVrBm/gmvXC+H/tpBD+DAKBggqhkjOPQQDAwNoADBlAjEAvlGS\nwnjU6dJbI9xfOGeyKvMHNOuwAOKJQcWQXrwxNn9lqq/M1KWovkQ2GZIgRDBbAjAk\nBn38+Y02XERc2S+VLScSidimHhhXfiWHd5Rw2glvY9FT0yXs879/jTpd0v8646U=\n-----END CERTIFICATE-----\n"}]} -------------------------------------------------------------------------------- /taskfiles/.env: -------------------------------------------------------------------------------- 1 | GH_ORG=codepraxis-io 2 | DOCKER_REGISTRY=ghcr.io 3 | DOCKER_USERNAME=griggheo 4 | DOCKER_BUILD_TYPE=buildx 5 | DOCKER_IMAGE_SCANNER=trivy 6 | DOCKERFILE_SCANNER=trivy 7 | VERIFY_BASE_IMAGE=true 8 | STOP_ON_CRITICAL_VULNS=false 9 | DOCKER_SBOM_GENERATOR=syft 10 | COSIGN_PRIVATE_KEY={{.HOME}}/.cosign/cosign.key 11 | COSIGN_PUBLIC_KEY={{.HOME}}/.cosign/cosign.pub 12 | -------------------------------------------------------------------------------- /taskfiles/build-curated-docker-images/.env: -------------------------------------------------------------------------------- 1 | DOCKER_DIR='.' 2 | DOCKERFILE_NAME='Dockerfile' 3 | SCAN_RESULT_DIR='scan_results' 4 | DOCKER_IMAGE_SCANNER='grype' 5 | DOCKERFILE_SCANNER='trivy' 6 | STOP_ON_CRITICAL_VULNS='false' 7 | DOCKER_SBOM_GENERATOR='syft' -------------------------------------------------------------------------------- /taskfiles/build-curated-docker-images/alpine3.16-build/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine:3.16 2 | 3 | # Install basic tools 4 | RUN set -eux \ 5 | #&& apk --no-cache add alpine-sdk \ 6 | && apk --no-cache add \ 7 | bash \ 8 | ca-certificates \ 9 | curl \ 10 | gettext \ 11 | git \ 12 | gnupg \ 13 | gpg-agent \ 14 | jq \ 15 | lsof \ 16 | unzip \ 17 | vim \ 18 | wget \ 19 | zip 20 | 21 | # add user with UID and GID 1000 to be used instead of root 22 | ENV USER_ID=1000 23 | ENV GROUP_ID=1000 24 | ENV USER_NAME=devsecops 25 | ENV GROUP_NAME=devsecops 26 | ENV USER_HOMEDIR=/home/devsecops 27 | 28 | RUN addgroup -g ${GROUP_ID} ${GROUP_NAME} \ 29 | && adduser -u ${USER_ID} -G ${GROUP_NAME} -D -h ${USER_HOMEDIR} ${USER_NAME} 30 | 31 | # Cleanup 32 | RUN rm -rf /var/cache/apk/* 33 | 34 | # Switch to non-root user 35 | USER devsecops 36 | 37 | -------------------------------------------------------------------------------- /taskfiles/build-curated-docker-images/alpine3.16-build/Taskfile.yaml: -------------------------------------------------------------------------------- 1 | # https://taskfile.dev 2 | 3 | version: '3' 4 | 5 | includes: 6 | common: ../../common 7 | 8 | dotenv: ['.env', '.secrets', '../.env', '../.secrets', '../../.env', '../../.secrets'] 9 | 10 | tasks: 11 | default: 12 | cmds: 13 | - task: common:build-push-docker-image 14 | vars: 15 | DOCKER_IMAGE_NAME: alpine3.16 16 | DOCKER_IMAGE_TAG: build 17 | - defer: 18 | task: common:cleanup-workdir 19 | -------------------------------------------------------------------------------- /taskfiles/build-curated-docker-images/alpine3.16-openjdk11/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ghcr.io/codepraxis-io/alpine3.16:build 2 | 3 | USER root 4 | RUN apk add openjdk11 5 | USER devsecops 6 | -------------------------------------------------------------------------------- /taskfiles/build-curated-docker-images/alpine3.16-openjdk11/Taskfile.yaml: -------------------------------------------------------------------------------- 1 | # https://taskfile.dev 2 | 3 | version: '3' 4 | 5 | includes: 6 | common: ../../common 7 | 8 | dotenv: ['.env', '.secrets', '../.env', '../.secrets', '../../.env', '../../.secrets'] 9 | 10 | tasks: 11 | default: 12 | cmds: 13 | - task: common:build-push-docker-image 14 | vars: 15 | DOCKER_IMAGE_NAME: alpine3.16 16 | DOCKER_IMAGE_TAG: openjdk11 17 | - defer: 18 | task: common:cleanup-workdir 19 | -------------------------------------------------------------------------------- /taskfiles/build-curated-docker-images/alpine3.16-openjdk17/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ghcr.io/codepraxis-io/alpine3.16:build 2 | 3 | USER root 4 | RUN apk add openjdk17 5 | #USER devsecops 6 | -------------------------------------------------------------------------------- /taskfiles/build-curated-docker-images/alpine3.16-openjdk17/Taskfile.yaml: -------------------------------------------------------------------------------- 1 | # https://taskfile.dev 2 | 3 | version: '3' 4 | 5 | includes: 6 | common: ../../common 7 | 8 | dotenv: ['.env', '.secrets', '../.env', '../.secrets', '../../.env', '../../.secrets'] 9 | 10 | tasks: 11 | default: 12 | cmds: 13 | - task: common:docker-pipeline 14 | vars: 15 | DOCKER_BUILD_TYPE: nerdctl 16 | DOCKER_IMAGE_NAME: alpine3.16 17 | DOCKER_IMAGE_TAG: openjdk17 18 | -------------------------------------------------------------------------------- /taskfiles/build-curated-docker-images/alpine3.16-openjre11/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ghcr.io/codepraxis-io/alpine3.16:run 2 | 3 | USER root 4 | RUN apk add openjdk11-jre-headless 5 | USER devsecops 6 | -------------------------------------------------------------------------------- /taskfiles/build-curated-docker-images/alpine3.16-openjre11/Taskfile.yaml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | 3 | includes: 4 | common: ../../common 5 | 6 | dotenv: ['.env', '.secrets', '../.env', '../.secrets', '../../.env', '../../.secrets'] 7 | 8 | tasks: 9 | default: 10 | cmds: 11 | - task: common:build-push-docker-image 12 | vars: 13 | DOCKER_IMAGE_NAME: alpine3.16 14 | DOCKER_IMAGE_TAG: openjre11 15 | -------------------------------------------------------------------------------- /taskfiles/build-curated-docker-images/alpine3.16-openjre17/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ghcr.io/codepraxis-io/alpine3.16:run 2 | 3 | USER root 4 | RUN apk add openjdk17-jre-headless 5 | USER devsecops 6 | -------------------------------------------------------------------------------- /taskfiles/build-curated-docker-images/alpine3.16-openjre17/Taskfile.yaml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | 3 | includes: 4 | common: ../../common 5 | 6 | dotenv: ['.env', '.secrets', '../.env', '../.secrets', '../../.env', '../../.secrets'] 7 | 8 | tasks: 9 | default: 10 | cmds: 11 | - task: common:build-push-docker-image 12 | vars: 13 | DOCKER_IMAGE_NAME: alpine3.16 14 | DOCKER_IMAGE_TAG: openjre17 15 | -------------------------------------------------------------------------------- /taskfiles/build-curated-docker-images/alpine3.16-run/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine:3.16 2 | 3 | # Install basic tools 4 | RUN set -eux \ 5 | && apk --no-cache add \ 6 | bash \ 7 | ca-certificates \ 8 | curl \ 9 | gettext \ 10 | jq \ 11 | vim \ 12 | wget \ 13 | zip 14 | 15 | # add user with UID and GID 1000 to be used instead of root 16 | ENV USER_ID=1000 17 | ENV GROUP_ID=1000 18 | ENV USER_NAME=devsecops 19 | ENV GROUP_NAME=devsecops 20 | ENV USER_HOMEDIR=/home/devsecops 21 | 22 | RUN addgroup -g ${GROUP_ID} ${GROUP_NAME} \ 23 | && adduser -u ${USER_ID} -G ${GROUP_NAME} -D -h ${USER_HOMEDIR} ${USER_NAME} 24 | 25 | # Cleanup 26 | RUN rm -rf /var/cache/apk/* 27 | 28 | # Switch to non-root user 29 | USER devsecops 30 | 31 | -------------------------------------------------------------------------------- /taskfiles/build-curated-docker-images/alpine3.16-run/Taskfile.yaml: -------------------------------------------------------------------------------- 1 | # https://taskfile.dev 2 | 3 | version: '3' 4 | 5 | includes: 6 | common: ../../common 7 | 8 | dotenv: ['.env', '.secrets', '../.env', '../.secrets', '../../.env', '../../.secrets'] 9 | 10 | tasks: 11 | default: 12 | cmds: 13 | - task: common:build-push-docker-image 14 | vars: 15 | DOCKER_IMAGE_NAME: alpine3.16 16 | DOCKER_IMAGE_TAG: run 17 | -------------------------------------------------------------------------------- /taskfiles/build-curated-docker-images/alpine3.16-run/scan_results/trivy-dockerfile-Dockerfile.json: -------------------------------------------------------------------------------- 1 | { 2 | "SchemaVersion": 2, 3 | "ArtifactName": "Dockerfile", 4 | "ArtifactType": "filesystem", 5 | "Metadata": { 6 | "ImageConfig": { 7 | "architecture": "", 8 | "created": "0001-01-01T00:00:00Z", 9 | "os": "", 10 | "rootfs": { 11 | "type": "", 12 | "diff_ids": null 13 | }, 14 | "config": {} 15 | } 16 | }, 17 | "Results": [ 18 | { 19 | "Target": "Dockerfile", 20 | "Class": "config", 21 | "Type": "dockerfile", 22 | "MisconfSummary": { 23 | "Successes": 22, 24 | "Failures": 0, 25 | "Exceptions": 0 26 | } 27 | } 28 | ] 29 | } 30 | -------------------------------------------------------------------------------- /taskfiles/build-curated-docker-images/ubuntu22.04-base/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ghcr.io/codepraxis-io/library/ubuntu:22.04 2 | 3 | ENV DEBIAN_FRONTEND=noninteractive 4 | 5 | # Install basic tools 6 | RUN set -eux \ 7 | && apt-get update -y \ 8 | && mkdir -p /usr/share/man/man1 \ 9 | && apt-get install -y --no-install-recommends \ 10 | apt-transport-https \ 11 | bash \ 12 | build-essential \ 13 | ca-certificates \ 14 | cmake \ 15 | curl \ 16 | gettext-base \ 17 | git-all \ 18 | gnupg-agent \ 19 | gnupg2 \ 20 | jq \ 21 | lsb-release \ 22 | lsof \ 23 | software-properties-common \ 24 | unzip \ 25 | vim \ 26 | wget \ 27 | zip 28 | 29 | 30 | # Install Docker 31 | RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg \ 32 | && echo \ 33 | "deb [arch=amd64 signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \ 34 | $(lsb_release -cs) stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null \ 35 | && apt-get update \ 36 | && apt-get install docker-ce docker-ce-cli containerd.io -y 37 | 38 | # add user with UID and GID 1000 to be used instead of root in Dockerfiles derived from this one 39 | ENV USER_ID=1000 40 | ENV USER_NAME=devsecops 41 | ENV USER_HOMEDIR=/home/devsecops 42 | 43 | RUN useradd -u ${USER_ID} -U -m -d ${USER_HOMEDIR} ${USER_NAME} 44 | 45 | # Cleanup 46 | RUN rm -rf /var/lib/apt/lists/* /var/cache/apt/* 47 | 48 | # Become non-root user 49 | USER devsecops 50 | 51 | -------------------------------------------------------------------------------- /taskfiles/build-curated-docker-images/ubuntu22.04-base/Taskfile.yaml: -------------------------------------------------------------------------------- 1 | # https://taskfile.dev 2 | 3 | version: '3' 4 | 5 | includes: 6 | common: ../../common 7 | 8 | dotenv: ['.env', '.secrets', '../.env', '../.secrets', '../../.env', '../../.secrets'] 9 | 10 | tasks: 11 | default: 12 | cmds: 13 | - task: common:build-push-docker-image 14 | vars: 15 | DOCKER_IMAGE_NAME: ubuntu22.04 16 | DOCKER_IMAGE_TAG: base 17 | -------------------------------------------------------------------------------- /taskfiles/build-curated-docker-images/ubuntu22.04-cicd/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ghcr.io/codepraxis-io/ubuntu22.04:base 2 | 3 | ENV DEBIAN_FRONTEND=noninteractive 4 | ENV go_version=1.18.6 5 | ENV cosign_version=1.12.1 6 | ENV crane_version=0.11.0 7 | ENV ko_version=0.12.0 8 | 9 | USER root 10 | 11 | # install go 12 | #RUN \ 13 | #curl -L https://golang.org/dl/go${go_version}.linux-amd64.tar.gz -o /tmp/go${go_version}.linux-amd64.tar.gz \ 14 | #&& tar -C /usr/local -xzf /tmp/go${go_version}.linux-amd64.tar.gz \ 15 | #&& mv /usr/local/go /usr/local/go${go_version} \ 16 | #&& ln -snf /usr/local/go${go_version}/bin/go /usr/local/bin/go 17 | 18 | # install cosign 19 | RUN \ 20 | cd /tmp \ 21 | && wget "https://github.com/sigstore/cosign/releases/download/v${cosign_version}/cosign-linux-amd64" \ 22 | && mv cosign-linux-amd64 /usr/local/bin/cosign \ 23 | && chmod +x /usr/local/bin/cosign 24 | 25 | # install crane and gcrane 26 | RUN \ 27 | mkdir -p /tmp/crane \ 28 | && cd /tmp/crane \ 29 | && curl -sL "https://github.com/google/go-containerregistry/releases/download/v${crane_version}/go-containerregistry_Linux_x86_64.tar.gz" > go-containerregistry.tar.gz \ 30 | && tar xvfz go-containerregistry.tar.gz \ 31 | && mv *crane /usr/local/bin \ 32 | && crane version \ 33 | && gcrane version \ 34 | && cd /tmp \ 35 | && rm -rf crane 36 | 37 | # install ko 38 | RUN \ 39 | mkdir -p /tmp/ko \ 40 | && cd /tmp/ko \ 41 | && curl -sL "https://github.com/ko-build/ko/releases/download/v${ko_version}/ko_${ko_version}_Linux_x86_64.tar.gz" > ko.tar.gz \ 42 | && tar xvfz ko.tar.gz \ 43 | && mv ko /usr/local/bin \ 44 | && ko version \ 45 | && cd /tmp \ 46 | && rm -rf ko 47 | 48 | # install trivy 49 | RUN \ 50 | wget -qO - https://aquasecurity.github.io/trivy-repo/deb/public.key | apt-key add - \ 51 | && echo deb https://aquasecurity.github.io/trivy-repo/deb $(lsb_release -sc) main | tee -a /etc/apt/sources.list.d/trivy.list \ 52 | && apt-get update \ 53 | && apt-get install trivy 54 | 55 | # install semgrep 56 | RUN \ 57 | apt-get install -y python3-pip \ 58 | && python3 -m pip install semgrep 59 | 60 | # install grype 61 | RUN \ 62 | curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin \ 63 | && chmod +x /usr/local/bin/grype 64 | 65 | # install syft 66 | RUN \ 67 | curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin \ 68 | && chmod +x /usr/local/bin/syft 69 | 70 | # Add $HOME/go/bin to $PATH 71 | RUN \ 72 | echo 'PATH=$PATH:$HOME/go/bin' >> /root/.bashrc 73 | 74 | # Cleanup 75 | RUN rm -rf /var/lib/apt/lists/* /var/cache/apt/* 76 | 77 | # Switch to non-root user 78 | #USER devsecops 79 | -------------------------------------------------------------------------------- /taskfiles/build-curated-docker-images/ubuntu22.04-cicd/Taskfile.yaml: -------------------------------------------------------------------------------- 1 | # https://taskfile.dev 2 | 3 | version: '3' 4 | 5 | includes: 6 | common: ../../common 7 | 8 | dotenv: ['.env', '.secrets', '../.env', '../.secrets', '../../.env', '../../.secrets'] 9 | 10 | tasks: 11 | default: 12 | cmds: 13 | - task: common:build-push-docker-image 14 | vars: 15 | DOCKER_IMAGE_NAME: ubuntu22.04 16 | DOCKER_IMAGE_TAG: cicd 17 | -------------------------------------------------------------------------------- /taskfiles/build-curated-docker-images/ubuntu22.04-node16/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ghcr.io/codepraxis-io/ubuntu22.04:base 2 | 3 | ENV DEBIAN_FRONTEND=noninteractive 4 | 5 | USER root 6 | 7 | # Install node 16 8 | RUN curl -fsSL https://deb.nodesource.com/setup_16.x | bash - 9 | RUN apt-get install -y nodejs 10 | 11 | # Cleanup 12 | RUN rm -rf /var/lib/apt/lists/* /var/cache/apt/* 13 | 14 | # Switch to non-root user 15 | USER devsecops 16 | -------------------------------------------------------------------------------- /taskfiles/build-curated-docker-images/ubuntu22.04-node16/Taskfile.yaml: -------------------------------------------------------------------------------- 1 | # https://taskfile.dev 2 | 3 | version: '3' 4 | 5 | includes: 6 | common: ../../common 7 | 8 | dotenv: ['.env', '.secrets', '../.env', '../.secrets', '../../.env', '../../.secrets'] 9 | 10 | 11 | tasks: 12 | default: 13 | cmds: 14 | - task: common:build-push-docker-image 15 | vars: 16 | DOCKER_IMAGE_NAME: ubuntu22.04 17 | DOCKER_IMAGE_TAG: node16 18 | -------------------------------------------------------------------------------- /taskfiles/build-curated-docker-images/ubuntu22.04-openjdk11/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ghcr.io/codepraxis-io/ubuntu22.04:base 2 | 3 | ENV DEBIAN_FRONTEND=noninteractive 4 | 5 | USER root 6 | 7 | # Install default-jdk 8 | RUN apt update -y \ 9 | && apt install default-jdk -y \ 10 | && apt install maven -y 11 | 12 | # Cleanup 13 | RUN rm -rf /var/lib/apt/lists/* /var/cache/apt/* 14 | 15 | # Switch to non-root user 16 | USER devsecops 17 | -------------------------------------------------------------------------------- /taskfiles/build-curated-docker-images/ubuntu22.04-openjdk11/Taskfile.yaml: -------------------------------------------------------------------------------- 1 | # https://taskfile.dev 2 | 3 | version: '3' 4 | 5 | includes: 6 | common: ../../common 7 | 8 | dotenv: ['.env', '.secrets', '../.env', '../.secrets', '../../.env', '../../.secrets'] 9 | 10 | tasks: 11 | default: 12 | cmds: 13 | - task: common:build-push-docker-image 14 | vars: 15 | DOCKER_IMAGE_NAME: ubuntu22.04 16 | DOCKER_IMAGE_TAG: openjdk11 17 | -------------------------------------------------------------------------------- /taskfiles/build-curated-docker-images/ubuntu22.04-openjdk17/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ghcr.io/codepraxis-io/ubuntu22.04:base 2 | 3 | ENV DEBIAN_FRONTEND=noninteractive 4 | 5 | USER root 6 | 7 | # Install openjdk-17 8 | RUN apt update -y \ 9 | && apt install openjdk-17-jdk -y \ 10 | && apt install maven -y 11 | 12 | # Cleanup 13 | RUN rm -rf /var/lib/apt/lists/* /var/cache/apt/* 14 | 15 | # Switch to non-root user 16 | USER devsecops 17 | -------------------------------------------------------------------------------- /taskfiles/build-curated-docker-images/ubuntu22.04-openjdk17/Taskfile.yaml: -------------------------------------------------------------------------------- 1 | # https://taskfile.dev 2 | 3 | version: '3' 4 | 5 | includes: 6 | common: ../../common 7 | 8 | dotenv: ['.env', '.secrets', '../.env', '../.secrets', '../../.env', '../../.secrets'] 9 | 10 | tasks: 11 | default: 12 | cmds: 13 | - task: common:build-push-docker-image 14 | vars: 15 | DOCKER_IMAGE_NAME: ubuntu22.04 16 | DOCKER_IMAGE_TAG: openjdk17 17 | -------------------------------------------------------------------------------- /taskfiles/build-java-gradle-spring-music/Taskfile.yaml: -------------------------------------------------------------------------------- 1 | # https://taskfile.dev 2 | 3 | version: '3' 4 | 5 | includes: 6 | common: ../common 7 | 8 | dotenv: ['.env', '.secrets', '../.env', '../.secrets'] 9 | env: 10 | APP_VERSION: 6.0.0 # needs to be the same as in gradle.properties 11 | BUILD_DIR: build/libs 12 | BUILD_SCRIPT: .github/scripts/build-app.sh 13 | GH_REPO: spring-music 14 | WORKDIR: 15 | sh: mktemp -d -p . -t workspace-XXXXXXXXXX 16 | ARTIFACT_DIR_NAME: "artifacts" 17 | SCAN_RESULT_DIR_NAME: "scan_results" 18 | SRC_DIR_NAME: "sourcecode" 19 | ARTIFACT_DIR: "{{.WORKDIR}}/{{.ARTIFACT_DIR_NAME}}" 20 | SRC_DIR: "{{.WORKDIR}}/{{.SRC_DIR_NAME}}" 21 | SCAN_RESULT_DIR: "./{{.SCAN_RESULT_DIR_NAME}}" 22 | DOCKER_DIR: "{{.SRC_DIR}}/{{.GH_REPO}}" 23 | DOCKER_COMMAND: "docker" 24 | DOCKER_BUILD_TYPE: "docker" 25 | DOCKER_IMAGE_NAME: "{{.GH_REPO}}" 26 | DOCKER_IMAGE_SCANNER: "trivy" 27 | DOCKERFILE_SCANNER: "trivy" 28 | DOCKER_SBOM_GENERATOR: "syft" 29 | STOP_ON_CRITICAL_VULNS: "false" 30 | 31 | tasks: 32 | amazoncorretto: 33 | cmds: 34 | - task: common:java-pipeline 35 | vars: 36 | DOCKER_BUILD_TYPE: "kaniko" 37 | DOCKER_BUILD_IMAGE: "alpine3.16:openjdk17" 38 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-kaniko-amazoncorretto-17-alpine3-15" 39 | DOCKER_RUN_IMAGE: "ghcr.io/codepraxis-io/library/amazoncorretto:17.0.4-alpine3.15" 40 | DOCKERFILE_NAME: "Dockerfile.amazoncorretto-17-alpine3-15" 41 | - defer: 42 | task: common:cleanup-workdir 43 | 44 | curated-alpine: 45 | cmds: 46 | - task: common:java-pipeline 47 | vars: 48 | DOCKER_BUILD_IMAGE: "alpine3.16:openjdk17" 49 | DOCKER_IMAGE_SCANNER: "grype" 50 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-curated-alpine3.16-openjre17" 51 | DOCKERFILE_NAME: "Dockerfile.curated-alpine3.16-openjre17" 52 | - defer: 53 | task: common:cleanup-workdir 54 | 55 | temurin: 56 | cmds: 57 | - task: common:java-pipeline 58 | vars: 59 | DOCKER_BUILD_IMAGE: "ubuntu22.04:openjdk17" 60 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-temurin-17-jre-jammy" 61 | DOCKER_RUN_IMAGE: "ghcr.io/codepraxis-io/library/eclipse-temurin:17-jre-jammy" 62 | DOCKERFILE_NAME: "Dockerfile.eclipse-temurin-17-jre-jammy" 63 | #STOP_ON_CRITICAL_VULNS: "true" 64 | - defer: 65 | task: common:cleanup-workdir 66 | 67 | distroless: 68 | cmds: 69 | - task: common:java-pipeline 70 | vars: 71 | DOCKER_BUILD_IMAGE: "ubuntu22.04:openjdk17" 72 | DOCKER_IMAGE_SCANNER: "grype" 73 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-distroless-java17-debian11" 74 | DOCKER_RUN_IMAGE: "ghcr.io/codepraxis-io/distroless/java17-debian11" 75 | DOCKERFILE_NAME: "Dockerfile.distroless-java17-debian11" 76 | DOCKERFILE_SCANNER: "semgrep" 77 | - defer: 78 | task: common:cleanup-workdir 79 | 80 | distroless-multistage: 81 | cmds: 82 | - task: common:git-clone 83 | - task: common:docker-pipeline 84 | vars: 85 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-distroless-java11-debian11-multistage" 86 | DOCKERFILE_NAME: "Dockerfile.distroless-java11-debian11-multistage" 87 | VERIFY_BASE_IMAGE: "false" 88 | - defer: 89 | task: common:cleanup-workdir 90 | 91 | cloud-native-buildpacks: 92 | cmds: 93 | - task: common:git-clone 94 | - task: common:docker-pipeline 95 | vars: 96 | DOCKER_BUILD_TYPE: "cnb" 97 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-cnb-java11" 98 | VERIFY_BASE_IMAGE: "false" 99 | - defer: 100 | task: common:cleanup-workdir 101 | 102 | jib: 103 | cmds: 104 | - task: common:java-pipeline 105 | vars: 106 | DOCKER_BUILD_IMAGE: "ubuntu22.04:openjdk17" 107 | DOCKER_BUILD_TYPE: "jib" 108 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-jib-eclipse-temurin-17-jre-jammy" 109 | DOCKER_RUN_IMAGE: "ghcr.io/codepraxis-io/library/eclipse-temurin:17-jre-jammy" 110 | - defer: 111 | task: common:cleanup-workdir 112 | -------------------------------------------------------------------------------- /taskfiles/build-java-maven-sample-springboot/Taskfile-containerd.yaml: -------------------------------------------------------------------------------- 1 | # https://taskfile.dev 2 | 3 | version: '3' 4 | 5 | includes: 6 | common: ../common 7 | 8 | dotenv: ['.env', '.secrets', '../.env', '../.secrets'] 9 | env: 10 | APP_VERSION: 0.0.1 11 | BUILD_DIR: target 12 | BUILD_SCRIPT: .github/scripts/build-app.sh 13 | GH_REPO: dddive-springboot 14 | WORKDIR: 15 | sh: mktemp -d -p . -t workspace-XXXXXXXXXX 16 | ARTIFACT_DIR_NAME: "artifacts" 17 | SCAN_RESULT_DIR_NAME: "scan_results" 18 | SRC_DIR_NAME: "sourcecode" 19 | ARTIFACT_DIR: "{{.WORKDIR}}/{{.ARTIFACT_DIR_NAME}}" 20 | SRC_DIR: "{{.WORKDIR}}/{{.SRC_DIR_NAME}}" 21 | SCAN_RESULT_DIR: "./{{.SCAN_RESULT_DIR_NAME}}" 22 | DOCKER_DIR: "{{.SRC_DIR}}/{{.GH_REPO}}" 23 | DOCKER_COMMAND: "nerdctl" 24 | DOCKER_BUILD_TYPE: "nerdctl" 25 | DOCKER_IMAGE_NAME: "{{.GH_REPO}}" 26 | DOCKER_IMAGE_SCANNER: "trivy" 27 | DOCKERFILE_SCANNER: "trivy" 28 | DOCKER_SBOM_GENERATOR: "syft" 29 | STOP_ON_CRITICAL_VULNS: "false" 30 | 31 | tasks: 32 | amazoncorretto: 33 | cmds: 34 | - task: common:java-pipeline 35 | vars: 36 | DOCKER_BUILD_TYPE: "kaniko" 37 | DOCKER_BUILD_IMAGE: "alpine3.16:openjdk17" 38 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-kaniko-amazoncorretto-17-alpine3-15-nerdctl" 39 | DOCKER_RUN_IMAGE: "ghcr.io/codepraxis-io/library/amazoncorretto:17.0.4-alpine3.15" 40 | DOCKERFILE_NAME: "Dockerfile.amazoncorretto-17-alpine3-15" 41 | - defer: 42 | task: common:cleanup-workdir 43 | 44 | curated-alpine: 45 | cmds: 46 | - task: common:java-pipeline 47 | vars: 48 | DOCKER_BUILD_IMAGE: "alpine3.16:openjdk17" 49 | DOCKER_IMAGE_SCANNER: "grype" 50 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-curated-alpine3.16-openjdk17-nerdctl" 51 | DOCKERFILE_NAME: "Dockerfile.curated-alpine3.16-openjdk17" 52 | - defer: 53 | task: common:cleanup-workdir 54 | 55 | temurin: 56 | cmds: 57 | - task: common:java-pipeline 58 | vars: 59 | DOCKER_BUILD_IMAGE: "ubuntu22.04:openjdk17" 60 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-eclipse-temurin-17-jre-jammy-nerdctl" 61 | DOCKER_RUN_IMAGE: "ghcr.io/codepraxis-io/library/eclipse-temurin:17-jre-jammy" 62 | DOCKERFILE_NAME: "Dockerfile.eclipse-temurin-17-jre-jammy" 63 | #STOP_ON_CRITICAL_VULNS: "true" 64 | - defer: 65 | task: common:cleanup-workdir 66 | 67 | distroless: 68 | cmds: 69 | - task: common:java-pipeline 70 | vars: 71 | DOCKER_BUILD_IMAGE: "ubuntu22.04:openjdk17" 72 | DOCKER_IMAGE_SCANNER: "grype" 73 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-distroless-java17-debian11-nerdctl" 74 | DOCKER_RUN_IMAGE: "ghcr.io/codepraxis-io/distroless/java17-debian11" 75 | DOCKERFILE_NAME: "Dockerfile.distroless-java17-debian11" 76 | DOCKERFILE_SCANNER: "semgrep" 77 | - defer: 78 | task: common:cleanup-workdir 79 | 80 | distroless-multistage: 81 | cmds: 82 | - task: common:git-clone 83 | - task: common:docker-pipeline 84 | vars: 85 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-buildx-distroless-java11-debian11-multistage-nerdctl" 86 | DOCKERFILE_NAME: "Dockerfile.distroless-java11-debian11-multistage" 87 | VERIFY_BASE_IMAGE: "false" 88 | - defer: 89 | task: common:cleanup-workdir 90 | 91 | cloud-native-buildpacks: 92 | cmds: 93 | - task: common:git-clone 94 | - task: common:docker-pipeline 95 | vars: 96 | DOCKER_BUILD_TYPE: "cnb" 97 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-cnb-java11-nerdctl" 98 | VERIFY_BASE_IMAGE: "false" 99 | - defer: 100 | task: common:cleanup-workdir 101 | 102 | jib: 103 | cmds: 104 | - task: common:java-pipeline 105 | vars: 106 | BUILD_SCRIPT: .github/scripts/build-app-jib.sh 107 | DOCKER_BUILD_IMAGE: "ubuntu22.04:openjdk17" 108 | DOCKER_BUILD_TYPE: "jib" 109 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-jib-eclipse-temurin-17-jre-jammy-nerdctl" 110 | DOCKER_RUN_IMAGE: "ghcr.io/codepraxis-io/library/eclipse-temurin:17-jre-jammy" 111 | - defer: 112 | task: common:cleanup-workdir 113 | -------------------------------------------------------------------------------- /taskfiles/build-java-maven-sample-springboot/Taskfile-docker.yaml: -------------------------------------------------------------------------------- 1 | # https://taskfile.dev 2 | 3 | version: '3' 4 | 5 | includes: 6 | common: ../common 7 | 8 | dotenv: ['.env', '.secrets', '../.env', '../.secrets'] 9 | env: 10 | APP_VERSION: 0.0.2 11 | BUILD_DIR: target 12 | BUILD_SCRIPT: .github/scripts/build-app.sh 13 | GH_REPO: dddive-springboot 14 | WORKDIR: 15 | sh: mktemp -d -p . -t workspace-XXXXXXXXXX 16 | ARTIFACT_DIR_NAME: "artifacts" 17 | SCAN_RESULT_DIR_NAME: "scan_results" 18 | SRC_DIR_NAME: "sourcecode" 19 | ARTIFACT_DIR: "{{.WORKDIR}}/{{.ARTIFACT_DIR_NAME}}" 20 | SRC_DIR: "{{.WORKDIR}}/{{.SRC_DIR_NAME}}" 21 | SCAN_RESULT_DIR: "./{{.SCAN_RESULT_DIR_NAME}}" 22 | DOCKER_DIR: "{{.SRC_DIR}}/{{.GH_REPO}}" 23 | DOCKER_COMMAND: "docker" 24 | DOCKER_BUILD_TYPE: "docker" 25 | DOCKER_IMAGE_NAME: "{{.GH_REPO}}" 26 | DOCKER_IMAGE_SCANNER: "trivy" 27 | DOCKERFILE_SCANNER: "trivy" 28 | DOCKER_SBOM_GENERATOR: "syft" 29 | STOP_ON_CRITICAL_VULNS: "false" 30 | 31 | tasks: 32 | amazoncorretto: 33 | cmds: 34 | - task: common:java-pipeline 35 | vars: 36 | DOCKER_BUILD_TYPE: "kaniko" 37 | DOCKER_BUILD_IMAGE: "alpine3.16:openjdk17" 38 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-kaniko-amazoncorretto-17-alpine3-15" 39 | DOCKER_RUN_IMAGE: "ghcr.io/codepraxis-io/library/amazoncorretto:17.0.4-alpine3.15" 40 | DOCKERFILE_NAME: "Dockerfile.amazoncorretto-17-alpine3-15" 41 | - defer: 42 | task: common:cleanup-workdir 43 | 44 | curated-alpine: 45 | cmds: 46 | - task: common:java-pipeline 47 | vars: 48 | DOCKER_BUILD_IMAGE: "alpine3.16:openjdk17" 49 | DOCKER_IMAGE_SCANNER: "grype" 50 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-curated-alpine3.16-openjdk17" 51 | DOCKERFILE_NAME: "Dockerfile.curated-alpine3.16-openjdk17" 52 | - defer: 53 | task: common:cleanup-workdir 54 | 55 | temurin: 56 | cmds: 57 | - task: common:java-pipeline 58 | vars: 59 | DOCKER_BUILD_IMAGE: "ubuntu22.04:openjdk17" 60 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-eclipse-temurin-17-jre-jammy" 61 | DOCKER_RUN_IMAGE: "ghcr.io/codepraxis-io/library/eclipse-temurin:17-jre-jammy" 62 | DOCKERFILE_NAME: "Dockerfile.eclipse-temurin-17-jre-jammy" 63 | DOCKERFILE_SCANNER: "hadolint" 64 | #STOP_ON_CRITICAL_VULNS: "true" 65 | - defer: 66 | task: common:cleanup-workdir 67 | 68 | distroless: 69 | cmds: 70 | - task: common:java-pipeline 71 | vars: 72 | DOCKER_BUILD_IMAGE: "ubuntu22.04:openjdk17" 73 | DOCKER_IMAGE_SCANNER: "grype" 74 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-distroless-java17-debian11" 75 | DOCKER_RUN_IMAGE: "ghcr.io/codepraxis-io/distroless/java17-debian11" 76 | DOCKERFILE_NAME: "Dockerfile.distroless-java17-debian11" 77 | DOCKERFILE_SCANNER: "semgrep" 78 | - defer: 79 | task: common:cleanup-workdir 80 | 81 | distroless-multistage: 82 | cmds: 83 | - task: common:git-clone 84 | - task: common:docker-pipeline 85 | vars: 86 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-distroless-java11-debian11-multistage" 87 | DOCKERFILE_NAME: "Dockerfile.distroless-java11-debian11-multistage" 88 | VERIFY_BASE_IMAGE: "false" 89 | - defer: 90 | task: common:cleanup-workdir 91 | 92 | cloud-native-buildpacks: 93 | cmds: 94 | - task: common:git-clone 95 | - task: common:docker-pipeline 96 | vars: 97 | DOCKER_BUILD_TYPE: "cnb" 98 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-cnb-java11" 99 | VERIFY_BASE_IMAGE: "false" 100 | - defer: 101 | task: common:cleanup-workdir 102 | 103 | jib: 104 | cmds: 105 | - task: common:java-pipeline 106 | vars: 107 | DOCKER_BUILD_IMAGE: "ubuntu22.04:openjdk17" 108 | DOCKER_BUILD_TYPE: "jib" 109 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-jib-eclipse-temurin-17-jre-jammy" 110 | DOCKER_RUN_IMAGE: "ghcr.io/codepraxis-io/library/eclipse-temurin:17-jre-jammy" 111 | - defer: 112 | task: common:cleanup-workdir 113 | -------------------------------------------------------------------------------- /taskfiles/build-java-maven-sample-springboot/Taskfile.yaml: -------------------------------------------------------------------------------- 1 | Taskfile-docker.yaml -------------------------------------------------------------------------------- /taskfiles/build-jib-springboot-helloworld/Taskfile-containerd.yaml: -------------------------------------------------------------------------------- 1 | # https://taskfile.dev 2 | 3 | version: '3' 4 | 5 | includes: 6 | common: ../common 7 | 8 | dotenv: ['.env', '.secrets', '../.env', '../.secrets'] 9 | env: 10 | APP_VERSION: 0.0.1 11 | BUILD_DIR: target 12 | BUILD_SCRIPT: .github/scripts/build-app.sh 13 | GH_REPO: springboot-helloworld 14 | WORKDIR: 15 | sh: mktemp -d -p . -t workspace-XXXXXXXXXX 16 | ARTIFACT_DIR_NAME: "artifacts" 17 | SCAN_RESULT_DIR_NAME: "scan_results" 18 | SRC_DIR_NAME: "sourcecode" 19 | ARTIFACT_DIR: "{{.WORKDIR}}/{{.ARTIFACT_DIR_NAME}}" 20 | SRC_DIR: "{{.WORKDIR}}/{{.SRC_DIR_NAME}}" 21 | SCAN_RESULT_DIR: "./{{.SCAN_RESULT_DIR_NAME}}" 22 | DOCKER_DIR: "{{.SRC_DIR}}/{{.GH_REPO}}" 23 | DOCKER_COMMAND: "nerdctl" 24 | DOCKER_BUILD_TYPE: "nerdctl" 25 | DOCKER_IMAGE_NAME: "{{.GH_REPO}}" 26 | DOCKER_IMAGE_SCANNER: "trivy" 27 | DOCKERFILE_SCANNER: "trivy" 28 | DOCKER_SBOM_GENERATOR: "syft" 29 | STOP_ON_CRITICAL_VULNS: "false" 30 | 31 | tasks: 32 | amazoncorretto: 33 | cmds: 34 | - task: common:java-pipeline 35 | vars: 36 | DOCKER_BUILD_TYPE: "kaniko" 37 | DOCKER_BUILD_IMAGE: "alpine3.16:openjdk17" 38 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-kaniko-amazoncorretto-17-alpine3-15-nerdctl" 39 | DOCKER_RUN_IMAGE: "ghcr.io/codepraxis-io/library/amazoncorretto:17.0.4-alpine3.15" 40 | DOCKERFILE_NAME: "Dockerfile.amazoncorretto-17-alpine3-15" 41 | - defer: 42 | task: common:cleanup-workdir 43 | 44 | curated-alpine: 45 | cmds: 46 | - task: common:java-pipeline 47 | vars: 48 | DOCKER_BUILD_IMAGE: "alpine3.16:openjdk17" 49 | DOCKER_IMAGE_SCANNER: "grype" 50 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-curated-alpine3.16-openjdk17-nerdctl" 51 | DOCKERFILE_NAME: "Dockerfile.curated-alpine3.16-openjdk17" 52 | - defer: 53 | task: common:cleanup-workdir 54 | 55 | temurin: 56 | cmds: 57 | - task: common:java-pipeline 58 | vars: 59 | DOCKER_BUILD_IMAGE: "ubuntu22.04:openjdk17" 60 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-eclipse-temurin-17-jre-jammy-nerdctl" 61 | DOCKER_RUN_IMAGE: "ghcr.io/codepraxis-io/library/eclipse-temurin:17-jre-jammy" 62 | DOCKERFILE_NAME: "Dockerfile.eclipse-temurin-17-jre-jammy" 63 | #STOP_ON_CRITICAL_VULNS: "true" 64 | - defer: 65 | task: common:cleanup-workdir 66 | 67 | distroless: 68 | cmds: 69 | - task: common:java-pipeline 70 | vars: 71 | DOCKER_BUILD_IMAGE: "ubuntu22.04:openjdk17" 72 | DOCKER_IMAGE_SCANNER: "grype" 73 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-distroless-java17-debian11-nerdctl" 74 | DOCKER_RUN_IMAGE: "ghcr.io/codepraxis-io/distroless/java17-debian11" 75 | DOCKERFILE_NAME: "Dockerfile.distroless-java17-debian11" 76 | DOCKERFILE_SCANNER: "semgrep" 77 | - defer: 78 | task: common:cleanup-workdir 79 | 80 | distroless-multistage: 81 | cmds: 82 | - task: common:git-clone 83 | - task: common:docker-pipeline 84 | vars: 85 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-buildx-distroless-java11-debian11-multistage-nerdctl" 86 | DOCKERFILE_NAME: "Dockerfile.distroless-java11-debian11-multistage" 87 | VERIFY_BASE_IMAGE: "false" 88 | - defer: 89 | task: common:cleanup-workdir 90 | 91 | cloud-native-buildpacks: 92 | cmds: 93 | - task: common:git-clone 94 | - task: common:docker-pipeline 95 | vars: 96 | DOCKER_BUILD_TYPE: "cnb" 97 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-cnb-java11-nerdctl" 98 | VERIFY_BASE_IMAGE: "false" 99 | - defer: 100 | task: common:cleanup-workdir 101 | 102 | jib: 103 | cmds: 104 | - task: common:java-pipeline 105 | vars: 106 | BUILD_SCRIPT: .github/scripts/build-app-jib.sh 107 | DOCKER_BUILD_IMAGE: "ubuntu22.04:openjdk17" 108 | DOCKER_BUILD_TYPE: "jib" 109 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-jib-eclipse-temurin-17-jre-jammy-nerdctl" 110 | DOCKER_RUN_IMAGE: "ghcr.io/codepraxis-io/library/eclipse-temurin:17-jre-jammy" 111 | - defer: 112 | task: common:cleanup-workdir 113 | -------------------------------------------------------------------------------- /taskfiles/build-jib-springboot-helloworld/Taskfile.yaml: -------------------------------------------------------------------------------- 1 | Taskfile-containerd.yaml -------------------------------------------------------------------------------- /taskfiles/build-python-flask/Taskfile-docker.yaml: -------------------------------------------------------------------------------- 1 | # https://taskfile.dev 2 | 3 | version: '3' 4 | 5 | includes: 6 | common: ../common 7 | 8 | dotenv: ['.env', '.secrets', '../.env', '../.secrets'] 9 | env: 10 | APP_VERSION: 0.0.1 11 | BUILD_SCRIPT: .github/scripts/build-app.sh 12 | GH_REPO: flask-bootstrap 13 | WORKDIR: 14 | sh: mktemp -d -p . -t workspace-XXXXXXXXXX 15 | ARTIFACT_DIR_NAME: "artifacts" 16 | SCAN_RESULT_DIR_NAME: "scan_results" 17 | SRC_DIR_NAME: "sourcecode" 18 | ARTIFACT_DIR: "{{.WORKDIR}}/{{.ARTIFACT_DIR_NAME}}" 19 | SRC_DIR: "{{.WORKDIR}}/{{.SRC_DIR_NAME}}" 20 | SCAN_RESULT_DIR: "./{{.SCAN_RESULT_DIR_NAME}}" 21 | DOCKER_DIR: "{{.SRC_DIR}}/{{.GH_REPO}}" 22 | DOCKER_BUILD_TYPE: "docker" 23 | DOCKER_COMMAND: "docker" 24 | DOCKER_IMAGE_NAME: "{{.GH_REPO}}" 25 | DOCKER_IMAGE_SCANNER: "trivy" 26 | DOCKERFILE_SCANNER: "trivy" 27 | DOCKER_SBOM_GENERATOR: "syft" 28 | STOP_ON_CRITICAL_VULNS: "false" 29 | 30 | tasks: 31 | ubuntu: 32 | cmds: 33 | - task: common:python-pipeline 34 | vars: 35 | DOCKER_BUILD_IMAGE: "ubuntu22.04:python3.10-build" 36 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-ubuntu22.04-python3.10" 37 | DOCKERFILE_NAME: "Dockerfile" 38 | - defer: 39 | task: common:cleanup-workdir 40 | 41 | ubuntu-kaniko: 42 | cmds: 43 | - task: common:python-pipeline 44 | vars: 45 | DOCKER_BUILD_IMAGE: "ubuntu22.04:python3.10-build" 46 | DOCKER_BUILD_TYPE: "kaniko" 47 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-kaniko-python3.10" 48 | DOCKERFILE_NAME: "Dockerfile" 49 | - defer: 50 | task: common:cleanup-workdir 51 | 52 | alpine: 53 | cmds: 54 | - task: common:python-pipeline 55 | vars: 56 | DOCKER_BUILD_IMAGE: "alpine3.16:python3.10-build" 57 | DOCKER_IMAGE_SCANNER: "grype" 58 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-alpine3.16-python3.10" 59 | DOCKER_RUN_IMAGE: "ghcr.io/codepraxis-io/alpine3.16:python3.10-run" 60 | DOCKERFILE_NAME: "Dockerfile" 61 | - defer: 62 | task: common:cleanup-workdir 63 | 64 | distroless: 65 | cmds: 66 | - task: common:python-pipeline 67 | vars: 68 | DOCKER_BUILD_IMAGE: "ubuntu22.04:python3.10-build" 69 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-distroless-python3-debian11" 70 | #DOCKER_RUN_IMAGE: "ghcr.io/codepraxis-io/distroless/python3-debian11" 71 | DOCKERFILE_NAME: "Dockerfile.distroless" 72 | DOCKERFILE_SCANNER: "semgrep" 73 | VERIFY_BASE_IMAGE: "false" 74 | - defer: 75 | task: common:cleanup-workdir 76 | 77 | distroless-multistage: 78 | cmds: 79 | - task: common:git-clone 80 | - task: common:docker-pipeline 81 | vars: 82 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-distroless-python3-debian11-multistage" 83 | DOCKERFILE_NAME: "Dockerfile.distroless-multistage" 84 | VERIFY_BASE_IMAGE: "false" 85 | - defer: 86 | task: common:cleanup-workdir 87 | 88 | cloud-native-buildpacks: 89 | cmds: 90 | - task: common:git-clone 91 | - task: common:docker-pipeline 92 | vars: 93 | DOCKER_BUILD_TYPE: "cnb" 94 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-cnb" 95 | VERIFY_BASE_IMAGE: "false" 96 | - defer: 97 | task: common:cleanup-workdir 98 | -------------------------------------------------------------------------------- /taskfiles/build-python-flask/Taskfile-podman.yaml: -------------------------------------------------------------------------------- 1 | # https://taskfile.dev 2 | 3 | version: '3' 4 | 5 | includes: 6 | common: ../common 7 | 8 | dotenv: ['.env', '.secrets', '../.env', '../.secrets'] 9 | env: 10 | APP_VERSION: 0.0.1 11 | BUILD_SCRIPT: .github/scripts/build-app.sh 12 | GH_REPO: flask-bootstrap 13 | WORKDIR: 14 | sh: mktemp -d -p . -t workspace-XXXXXXXXXX 15 | ARTIFACT_DIR_NAME: "artifacts" 16 | SCAN_RESULT_DIR_NAME: "scan_results" 17 | SRC_DIR_NAME: "sourcecode" 18 | ARTIFACT_DIR: "{{.WORKDIR}}/{{.ARTIFACT_DIR_NAME}}" 19 | SRC_DIR: "{{.WORKDIR}}/{{.SRC_DIR_NAME}}" 20 | SCAN_RESULT_DIR: "./{{.SCAN_RESULT_DIR_NAME}}" 21 | DOCKER_DIR: "{{.SRC_DIR}}/{{.GH_REPO}}" 22 | DOCKER_COMMAND: "podman" 23 | DOCKER_BUILD_TYPE: "podman" 24 | DOCKER_IMAGE_NAME: "{{.GH_REPO}}" 25 | DOCKER_IMAGE_SCANNER: "trivy" 26 | DOCKERFILE_SCANNER: "trivy" 27 | DOCKER_SBOM_GENERATOR: "syft" 28 | STOP_ON_CRITICAL_VULNS: "false" 29 | 30 | tasks: 31 | ubuntu: 32 | cmds: 33 | - task: common:python-pipeline 34 | vars: 35 | DOCKER_BUILD_IMAGE: "ubuntu22.04:python3.10-build" 36 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-ubuntu22.04-python3.10-podman" 37 | DOCKERFILE_NAME: "Dockerfile" 38 | - defer: 39 | task: common:cleanup-workdir 40 | 41 | ubuntu-kaniko: 42 | cmds: 43 | - task: common:python-pipeline 44 | vars: 45 | DOCKER_BUILD_IMAGE: "ubuntu22.04:python3.10-build" 46 | DOCKER_BUILD_TYPE: "kaniko" 47 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-kaniko-python3.10-podman" 48 | DOCKERFILE_NAME: "Dockerfile" 49 | - defer: 50 | task: common:cleanup-workdir 51 | 52 | alpine: 53 | cmds: 54 | - task: common:python-pipeline 55 | vars: 56 | DOCKER_BUILD_IMAGE: "alpine3.16:python3.10-build" 57 | DOCKER_IMAGE_SCANNER: "grype" 58 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-alpine3.16-python3.10-podman" 59 | DOCKER_RUN_IMAGE: "ghcr.io/codepraxis-io/alpine3.16:python3.10-run" 60 | DOCKERFILE_NAME: "Dockerfile" 61 | - defer: 62 | task: common:cleanup-workdir 63 | 64 | distroless: 65 | cmds: 66 | - task: common:python-pipeline 67 | vars: 68 | DOCKER_BUILD_IMAGE: "ubuntu22.04:python3.10-build" 69 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-distroless-python3-debian11-podman" 70 | #DOCKER_RUN_IMAGE: "ghcr.io/codepraxis-io/distroless/python3-debian11" 71 | DOCKERFILE_NAME: "Dockerfile.distroless" 72 | DOCKERFILE_SCANNER: "semgrep" 73 | VERIFY_BASE_IMAGE: "false" 74 | - defer: 75 | task: common:cleanup-workdir 76 | 77 | distroless-multistage: 78 | cmds: 79 | - task: common:git-clone 80 | - task: common:docker-pipeline 81 | vars: 82 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-distroless-python3-debian11-multistage-podman" 83 | DOCKERFILE_NAME: "Dockerfile.distroless-multistage" 84 | VERIFY_BASE_IMAGE: "false" 85 | - defer: 86 | task: common:cleanup-workdir 87 | 88 | cloud-native-buildpacks: 89 | cmds: 90 | - task: common:git-clone 91 | - task: common:docker-pipeline 92 | vars: 93 | DOCKER_BUILD_TYPE: "cnb" 94 | DOCKER_IMAGE_TAG: "{{.APP_VERSION}}-cnb" 95 | VERIFY_BASE_IMAGE: "false" 96 | - defer: 97 | task: common:cleanup-workdir 98 | -------------------------------------------------------------------------------- /taskfiles/build-python-flask/Taskfile.yaml: -------------------------------------------------------------------------------- 1 | Taskfile-docker.yaml -------------------------------------------------------------------------------- /taskfiles/common/Taskfile.yaml: -------------------------------------------------------------------------------- 1 | # https://taskfile.dev 2 | 3 | version: '3' 4 | 5 | tasks: 6 | cleanup-workdir: 7 | cmds: 8 | - sudo rm -rf {{.WORKDIR}} 9 | 10 | git-clone: 11 | cmds: 12 | - chmod 755 {{.WORKDIR}} 13 | - rm -rf {{.SRC_DIR}}; mkdir -p {{.SRC_DIR}} 14 | - cd {{.SRC_DIR}}; git clone https://github.com/{{.GH_ORG}}/{{.GH_REPO}}.git 15 | 16 | login-docker-registry: 17 | cmds: 18 | - | 19 | echo $GH_PAT | {{.DOCKER_COMMAND}} login {{.DOCKER_REGISTRY}} -u {{.DOCKER_USERNAME}} --password-stdin 20 | 21 | create-scan-result-directory: 22 | cmds: 23 | - | 24 | CUSTOM_SCAN_RESULT_DIR={{.SCAN_RESULT_DIR}}/{{.DOCKER_IMAGE_NAME}}-{{.DOCKER_IMAGE_TAG}} 25 | mkdir -p $CUSTOM_SCAN_RESULT_DIR 26 | 27 | build-docker-image: 28 | cmds: 29 | - | 30 | FROM_IMAGE="{{.DOCKER_RUN_IMAGE}}" 31 | if [ "{{.VERIFY_BASE_IMAGE}}" == "true" ]; then 32 | if [ "$FROM_IMAGE" == "" ]; then 33 | FROM_IMAGE=$(grep FROM {{.DOCKER_DIR}}/{{.DOCKERFILE_NAME}} | sed -r 's/FROM\s+//g') 34 | if [ "$FROM_IMAGE" == '${BASE_IMAGE}' ]; then 35 | FROM_IMAGE=$(grep 'ARG BASE_IMAGE' {{.DOCKER_DIR}}/{{.DOCKERFILE_NAME}} | sed -r 's/ARG BASE_IMAGE=//g') 36 | fi 37 | fi 38 | cosign verify --key {{.COSIGN_PUBLIC_KEY}} $FROM_IMAGE 39 | fi 40 | if [ "{{.DOCKER_BUILD_TYPE}}" == "kaniko" ]; then 41 | echo Building {{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} with kaniko 42 | #docker run -v `pwd`/{{.DOCKER_DIR}}:/workspace -v {{.HOME}}/.docker/config.json:/kaniko/.docker/config.json:ro gcr.io/kaniko-project/executor:latest --dockerfile /workspace/{{.DOCKERFILE_NAME}} --destination {{.DOCKER_REGISTRY}}/{{.GH_ORG}}/{{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} --context dir:///workspace/ 43 | if [ "{{.DOCKER_COMMAND}}" == "podman" ]; then 44 | REGISTRY_AUTH_FILE={{.XDG_RUNTIME_DIR}}/containers/auth.json 45 | {{.DOCKER_COMMAND}} run --privileged -v `pwd`/{{.DOCKER_DIR}}:/workspace -v $REGISTRY_AUTH_FILE:/kaniko/.docker/config.json:ro gcr.io/kaniko-project/executor:latest --dockerfile /workspace/{{.DOCKERFILE_NAME}} --destination {{.DOCKER_REGISTRY}}/{{.GH_ORG}}/{{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} --context dir:///workspace/ --tarPath /workspace/{{.DOCKER_IMAGE_NAME}}-{{.DOCKER_IMAGE_TAG}}.tar --no-push 46 | elif [ "{{.DOCKER_COMMAND}}" == "nerdctl" ]; then 47 | REGISTRY_AUTH_FILE={{.HOME}}/.docker/config.json 48 | {{.DOCKER_COMMAND}} run --cni-path=/opt/cni/bin -v `pwd`/{{.DOCKER_DIR}}:/workspace -v $REGISTRY_AUTH_FILE:/kaniko/.docker/config.json:ro gcr.io/kaniko-project/executor:latest --dockerfile /workspace/{{.DOCKERFILE_NAME}} --destination {{.DOCKER_REGISTRY}}/{{.GH_ORG}}/{{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} --context dir:///workspace/ --tarPath /workspace/{{.DOCKER_IMAGE_NAME}}-{{.DOCKER_IMAGE_TAG}}.tar --no-push 49 | else 50 | REGISTRY_AUTH_FILE={{.HOME}}/.docker/config.json 51 | {{.DOCKER_COMMAND}} run -v `pwd`/{{.DOCKER_DIR}}:/workspace -v $REGISTRY_AUTH_FILE:/kaniko/.docker/config.json:ro gcr.io/kaniko-project/executor:latest --dockerfile /workspace/{{.DOCKERFILE_NAME}} --destination {{.DOCKER_REGISTRY}}/{{.GH_ORG}}/{{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} --context dir:///workspace/ --tarPath /workspace/{{.DOCKER_IMAGE_NAME}}-{{.DOCKER_IMAGE_TAG}}.tar --no-push 52 | fi 53 | fi 54 | if [ "{{.DOCKER_BUILD_TYPE}}" == "cnb" ]; then 55 | echo Building {{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} with Cloud Native Buildpacks 56 | pack config default-builder paketobuildpacks/builder:base 57 | pushd {{.DOCKER_DIR}} 58 | echo Running: pack build {{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} 59 | pack build {{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} 60 | popd 61 | fi 62 | if [ "{{.DOCKER_BUILD_TYPE}}" == "jib" ] && [ "{{.DOCKER_COMMAND}}" != "nerdctl" ]; then 63 | echo Building {{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} with jib 64 | pushd {{.DOCKER_DIR}} 65 | echo Running: jib jar --from {{.DOCKER_RUN_IMAGE}} --target docker://{{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} {{.BUILD_DIR}}/app.jar 66 | jib jar --from {{.DOCKER_RUN_IMAGE}} --target docker://{{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} {{.BUILD_DIR}}/app.jar 67 | popd 68 | fi 69 | if [ "{{.DOCKER_BUILD_TYPE}}" == "docker" ]; then 70 | echo Building {{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} with docker 71 | pushd {{.DOCKER_DIR}} 72 | ARGS="-t {{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} . -f {{.DOCKERFILE_NAME}}" 73 | if [ "{{.DOCKER_RUN_IMAGE}}" != "" ]; then 74 | ARGS="$ARGS --build-arg BASE_IMAGE={{.DOCKER_RUN_IMAGE}}" 75 | fi 76 | echo Running: docker buildx build $ARGS 77 | docker buildx build $ARGS 78 | popd 79 | fi 80 | if [ "{{.DOCKER_BUILD_TYPE}}" == "nerdctl" ]; then 81 | echo Building {{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} with nerdctl/buildkitd/containerd 82 | pushd {{.DOCKER_DIR}} 83 | ARGS="-t {{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} . -f {{.DOCKERFILE_NAME}}" 84 | if [ "{{.DOCKER_RUN_IMAGE}}" ]; then 85 | ARGS="$ARGS --build-arg BASE_IMAGE={{.DOCKER_RUN_IMAGE}}" 86 | fi 87 | echo Running: nerdctl build $ARGS 88 | nerdctl build $ARGS 89 | echo Saving nerdctl image as tar file for scanning and SBOM 90 | echo Running: nerdctl save -o {{.DOCKER_IMAGE_NAME}}-{{.DOCKER_IMAGE_TAG}}.tar {{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} 91 | nerdctl save -o {{.DOCKER_IMAGE_NAME}}-{{.DOCKER_IMAGE_TAG}}.tar {{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} 92 | popd 93 | fi 94 | if [ "{{.DOCKER_BUILD_TYPE}}" == "podman" ]; then 95 | echo Building {{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} with podman 96 | pushd {{.DOCKER_DIR}} 97 | ARGS="-t {{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} . -f {{.DOCKERFILE_NAME}}" 98 | if [ "{{.DOCKER_RUN_IMAGE}}" ]; then 99 | ARGS="$ARGS --build-arg BASE_IMAGE={{.DOCKER_RUN_IMAGE}}" 100 | fi 101 | echo Running: podman build $ARGS 102 | podman build $ARGS 103 | echo Saving podman image as tar file for scanning and SBOM 104 | echo Running: podman save -o {{.DOCKER_IMAGE_NAME}}-{{.DOCKER_IMAGE_TAG}}.tar {{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} 105 | podman save -o {{.DOCKER_IMAGE_NAME}}-{{.DOCKER_IMAGE_TAG}}.tar {{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} 106 | popd 107 | fi 108 | 109 | scan-docker-image: 110 | cmds: 111 | - | 112 | CUSTOM_SCAN_RESULT_DIR={{.SCAN_RESULT_DIR}}/{{.DOCKER_IMAGE_NAME}}-{{.DOCKER_IMAGE_TAG}} 113 | if [ "{{.DOCKER_IMAGE_SCANNER}}" == "trivy" ]; then 114 | ARGS="-f json -o $CUSTOM_SCAN_RESULT_DIR/trivy-scan.json" 115 | if [ "{{.STOP_ON_CRITICAL_VULNS}}" == "true" ]; then 116 | ARGS="--exit-code 1 --severity CRITICAL $ARGS" 117 | fi 118 | if [ "{{.DOCKER_BUILD_TYPE}}" == "docker" ] || [ "{{.DOCKER_BUILD_TYPE}}" == "cnb" ] || [ "{{.DOCKER_BUILD_TYPE}}" == "jib" ]; then 119 | ARGS="$ARGS {{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}}" 120 | fi 121 | if [ "{{.DOCKER_BUILD_TYPE}}" == "kaniko" ] || [ "{{.DOCKER_BUILD_TYPE}}" == "podman" ] || [ "{{.DOCKER_BUILD_TYPE}}" == "nerdctl" ]; then 122 | ARGS="$ARGS --input {{.DOCKER_DIR}}/{{.DOCKER_IMAGE_NAME}}-{{.DOCKER_IMAGE_TAG}}.tar" 123 | fi 124 | echo Running: trivy image $ARGS 125 | trivy image $ARGS 126 | # Generate sarif format as well 127 | ARGS="-f sarif -o $CUSTOM_SCAN_RESULT_DIR/trivy-scan-sarif.json" 128 | if [ "{{.STOP_ON_CRITICAL_VULNS}}" == "true" ]; then 129 | ARGS="--exit-code 1 --severity CRITICAL $ARGS" 130 | fi 131 | if [ "{{.DOCKER_BUILD_TYPE}}" == "docker" ] || [ "{{.DOCKER_BUILD_TYPE}}" == "cnb" ] || [ "{{.DOCKER_BUILD_TYPE}}" == "jib" ]; then 132 | ARGS="$ARGS {{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}}" 133 | fi 134 | if [ "{{.DOCKER_BUILD_TYPE}}" == "kaniko" ] || [ "{{.DOCKER_BUILD_TYPE}}" == "podman" ] || [ "{{.DOCKER_BUILD_TYPE}}" == "nerdctl" ]; then 135 | ARGS="$ARGS --input {{.DOCKER_DIR}}/{{.DOCKER_IMAGE_NAME}}-{{.DOCKER_IMAGE_TAG}}.tar" 136 | fi 137 | echo Running: trivy image $ARGS 138 | trivy image $ARGS 139 | fi 140 | if [ "{{.DOCKER_IMAGE_SCANNER}}" == "grype" ]; then 141 | ARGS="-o json --file $CUSTOM_SCAN_RESULT_DIR/grype-scan.json" 142 | if [ "{{.STOP_ON_CRITICAL_VULNS}}" == "true" ]; then 143 | ARGS="-f Critical $ARGS" 144 | fi 145 | if [ "{{.DOCKER_BUILD_TYPE}}" == "docker" ] || [ "{{.DOCKER_BUILD_TYPE}}" == "cnb" ] || [ "{{.DOCKER_BUILD_TYPE}}" == "jib" ]; then 146 | ARGS="$ARGS {{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}}" 147 | fi 148 | if [ "{{.DOCKER_BUILD_TYPE}}" == "kaniko" ] || [ "{{.DOCKER_BUILD_TYPE}}" == "podman" ] || [ "{{.DOCKER_BUILD_TYPE}}" == "nerdctl" ]; then 149 | ARGS="$ARGS {{.DOCKER_DIR}}/{{.DOCKER_IMAGE_NAME}}-{{.DOCKER_IMAGE_TAG}}.tar" 150 | fi 151 | echo Running: grype $ARGS 152 | grype $ARGS 153 | # Generate sarif format as well 154 | ARGS="-o sarif --file $CUSTOM_SCAN_RESULT_DIR/grype-scan-sarif.json" 155 | if [ "{{.STOP_ON_CRITICAL_VULNS}}" == "true" ]; then 156 | ARGS="-f Critical $ARGS" 157 | fi 158 | if [ "{{.DOCKER_BUILD_TYPE}}" == "docker" ] || [ "{{.DOCKER_BUILD_TYPE}}" == "cnb" ] || [ "{{.DOCKER_BUILD_TYPE}}" == "jib" ]; then 159 | ARGS="$ARGS {{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}}" 160 | fi 161 | if [ "{{.DOCKER_BUILD_TYPE}}" == "kaniko" ] || [ "{{.DOCKER_BUILD_TYPE}}" == "podman" ] || [ "{{.DOCKER_BUILD_TYPE}}" == "nerdctl" ]; then 162 | ARGS="$ARGS {{.DOCKER_DIR}}/{{.DOCKER_IMAGE_NAME}}-{{.DOCKER_IMAGE_TAG}}.tar" 163 | fi 164 | echo Running: grype $ARGS 165 | grype $ARGS 166 | fi 167 | 168 | generate-sbom-docker-image: 169 | cmds: 170 | - | 171 | CUSTOM_SCAN_RESULT_DIR={{.SCAN_RESULT_DIR}}/{{.DOCKER_IMAGE_NAME}}-{{.DOCKER_IMAGE_TAG}} 172 | if [ "{{.DOCKER_SBOM_GENERATOR}}" == "syft" ]; then 173 | ARGS="-o json --file $CUSTOM_SCAN_RESULT_DIR/syft-sbom.json packages {{.DOCKER_REGISTRY}}/{{.GH_ORG}}/{{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}}" 174 | echo Running: syft $ARGS 175 | syft $ARGS 176 | # if [ "{{.DOCKER_BUILD_TYPE}}" == "docker" ] || [ "{{.DOCKER_BUILD_TYPE}}" == "nerdctl" ]; then 177 | # # Also create cosign attestation in one go 178 | # ARGS="-o json {{.DOCKER_REGISTRY}}/{{.GH_ORG}}/{{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}}" 179 | # echo Running: syft attest $ARGS 180 | # COSIGN_PASSWORD=$(cat {{.HOME}}/.k) syft attest --key {{.COSIGN_PRIVATE_KEY}} $ARGS > {{.SCAN_RESULT_DIR}}/{{.DOCKER_IMAGE_NAME}}-{{.DOCKER_IMAGE_TAG}}/syft-sbom-att.json 181 | #fi 182 | fi 183 | 184 | scan-dockerfile: 185 | cmds: 186 | - | 187 | CUSTOM_SCAN_RESULT_DIR={{.SCAN_RESULT_DIR}}/{{.DOCKER_IMAGE_NAME}}-{{.DOCKER_IMAGE_TAG}} 188 | if [ "{{.DOCKERFILE_SCANNER}}" == "semgrep" ]; then 189 | echo Running: semgrep --config=auto {{.DOCKER_DIR}}/{{.DOCKERFILE_NAME}} --json 190 | semgrep --config=auto {{.DOCKER_DIR}}/{{.DOCKERFILE_NAME}} --json | tee -a $CUSTOM_SCAN_RESULT_DIR/semgrep-dockerfile.json 191 | fi 192 | if [ "{{.DOCKERFILE_SCANNER}}" == "hadolint" ]; then 193 | echo Running: hadolint -f sarif {{.DOCKER_DIR}}/{{.DOCKERFILE_NAME}} 194 | hadolint -f sarif {{.DOCKER_DIR}}/{{.DOCKERFILE_NAME}} | tee -a $CUSTOM_SCAN_RESULT_DIR/hadolint-dockerfile.sarif 195 | fi 196 | if [ "{{.DOCKERFILE_SCANNER}}" == "trivy" ]; then 197 | ARGS="-f json -o $CUSTOM_SCAN_RESULT_DIR/trivy-dockerfile.json {{.DOCKER_DIR}}/{{.DOCKERFILE_NAME}}" 198 | echo Running: trivy config $ARGS 199 | trivy config $ARGS 200 | fi 201 | 202 | push-docker-image: 203 | cmds: 204 | - | 205 | REGISTRY_IMAGE_TAG={{.DOCKER_REGISTRY}}/{{.GH_ORG}}/{{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} 206 | if [ "{{.DOCKER_BUILD_TYPE}}" == "kaniko" ]; then 207 | echo Running crane push {{.DOCKER_DIR}}/{{.DOCKER_IMAGE_NAME}}-{{.DOCKER_IMAGE_TAG}}.tar $REGISTRY_IMAGE_TAG 208 | crane push {{.DOCKER_DIR}}/{{.DOCKER_IMAGE_NAME}}-{{.DOCKER_IMAGE_TAG}}.tar $REGISTRY_IMAGE_TAG 209 | else 210 | {{.DOCKER_COMMAND}} tag {{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} $REGISTRY_IMAGE_TAG 211 | {{.DOCKER_COMMAND}} push $REGISTRY_IMAGE_TAG 212 | fi 213 | 214 | sign-docker-image: 215 | cmds: 216 | - COSIGN_PASSWORD=$(cat {{.HOME}}/.k) cosign sign --key {{.COSIGN_PRIVATE_KEY}} {{.DOCKER_REGISTRY}}/{{.GH_ORG}}/{{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} 217 | 218 | verify-signature-docker-image: 219 | cmds: 220 | - cosign verify --key {{.COSIGN_PUBLIC_KEY}} {{.DOCKER_REGISTRY}}/{{.GH_ORG}}/{{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} 221 | 222 | attach-scan-result-attestations-to-docker-image: 223 | cmds: 224 | - | 225 | CUSTOM_SCAN_RESULT_DIR={{.SCAN_RESULT_DIR}}/{{.DOCKER_IMAGE_NAME}}-{{.DOCKER_IMAGE_TAG}} 226 | for scan_result_file in $(ls $CUSTOM_SCAN_RESULT_DIR | grep -v attestation); do 227 | echo Attaching attestation $scan_result_file to {{.DOCKER_REGISTRY}}/{{.GH_ORG}}/{{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} 228 | COSIGN_PASSWORD=$(cat {{.HOME}}/.k) cosign attest --predicate $CUSTOM_SCAN_RESULT_DIR/$scan_result_file --key {{.COSIGN_PRIVATE_KEY}} {{.DOCKER_REGISTRY}}/{{.GH_ORG}}/{{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} 229 | done 230 | 231 | verify-scan-result-attestations: 232 | cmds: 233 | - | 234 | CUSTOM_SCAN_RESULT_DIR={{.SCAN_RESULT_DIR}}/{{.DOCKER_IMAGE_NAME}}-{{.DOCKER_IMAGE_TAG}} 235 | ATTESTATION_PAYLOAD=$CUSTOM_SCAN_RESULT_DIR/attestation_payload.json 236 | echo Verifying attestations attached to {{.DOCKER_REGISTRY}}/{{.GH_ORG}}/{{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} 237 | cosign verify-attestation --key $COSIGN_PUBLIC_KEY {{.DOCKER_REGISTRY}}/{{.GH_ORG}}/{{.DOCKER_IMAGE_NAME}}:{{.DOCKER_IMAGE_TAG}} | jq --slurp 'map(.payload | @base64d | fromjson | .predicate.Data)' > $ATTESTATION_PAYLOAD 238 | keys=$(cat $ATTESTATION_PAYLOAD | jq -r 'keys[]') 239 | for key in $keys; do 240 | echo Saving payload \#$key to file $CUSTOM_SCAN_RESULT_DIR/attestation_payload_$key.json 241 | cat $ATTESTATION_PAYLOAD | jq -r ".[$key]" > $CUSTOM_SCAN_RESULT_DIR/attestation_payload_$key.json 242 | done 243 | 244 | build-app: 245 | cmds: 246 | - task: git-clone 247 | - task: login-docker-registry 248 | vars: 249 | DOCKER_BUILD_TYPE: '{{.DOCKER_BUILD_TYPE}}' 250 | DOCKER_COMMAND: '{{.DOCKER_COMMAND}}' 251 | - | 252 | if [ "{{.DOCKER_COMMAND}}" == "podman" ]; then 253 | echo Running: {{.DOCKER_COMMAND}} run --privileged -u root --rm -v `pwd`/{{.WORKDIR}}:/tmp/workspace {{.DOCKER_REGISTRY}}/{{.GH_ORG}}/{{.DOCKER_BUILD_IMAGE}} bash -c "cd /tmp/workspace/{{.SRC_DIR_NAME}}/{{.GH_REPO}}; bash {{.BUILD_SCRIPT}}" 254 | {{.DOCKER_COMMAND}} run --privileged -u root --rm -v `pwd`/{{.WORKDIR}}:/tmp/workspace {{.DOCKER_REGISTRY}}/{{.GH_ORG}}/{{.DOCKER_BUILD_IMAGE}} bash -c "cd /tmp/workspace/{{.SRC_DIR_NAME}}/{{.GH_REPO}}; bash {{.BUILD_SCRIPT}}" 255 | elif [ "{{.DOCKER_COMMAND}}" == "nerdctl" ]; then 256 | echo Running: {{.DOCKER_COMMAND}} run --cni-path=/opt/cni/bin --rm -v `pwd`/{{.WORKDIR}}:/tmp/workspace {{.DOCKER_REGISTRY}}/{{.GH_ORG}}/{{.DOCKER_BUILD_IMAGE}} bash -c "cd /tmp/workspace/{{.SRC_DIR_NAME}}/{{.GH_REPO}}; bash {{.BUILD_SCRIPT}}" 257 | {{.DOCKER_COMMAND}} run --cni-path=/opt/cni/bin --rm -v `pwd`/{{.WORKDIR}}:/tmp/workspace {{.DOCKER_REGISTRY}}/{{.GH_ORG}}/{{.DOCKER_BUILD_IMAGE}} bash -c "cd /tmp/workspace/{{.SRC_DIR_NAME}}/{{.GH_REPO}}; bash {{.BUILD_SCRIPT}}" 258 | else 259 | echo Running: {{.DOCKER_COMMAND}} run --rm -v `pwd`/{{.WORKDIR}}:/tmp/workspace {{.DOCKER_REGISTRY}}/{{.GH_ORG}}/{{.DOCKER_BUILD_IMAGE}} bash -c "cd /tmp/workspace/{{.SRC_DIR_NAME}}/{{.GH_REPO}}; bash {{.BUILD_SCRIPT}}" 260 | {{.DOCKER_COMMAND}} run --rm -v `pwd`/{{.WORKDIR}}:/tmp/workspace {{.DOCKER_REGISTRY}}/{{.GH_ORG}}/{{.DOCKER_BUILD_IMAGE}} bash -c "cd /tmp/workspace/{{.SRC_DIR_NAME}}/{{.GH_REPO}}; bash {{.BUILD_SCRIPT}}" 261 | fi 262 | # copy cycloneDX SBOM if it was generated 263 | CYCLONEDX_SBOM_FILE={{.SRC_DIR}}/{{.GH_REPO}}/cyclonedx-sbom.json 264 | CUSTOM_SCAN_RESULT_DIR={{.SCAN_RESULT_DIR}}/{{.DOCKER_IMAGE_NAME}}-{{.DOCKER_IMAGE_TAG}} 265 | if [ -f $CYCLONEDX_SBOM_FILE ]; then 266 | cp $CYCLONEDX_SBOM_FILE $CUSTOM_SCAN_RESULT_DIR 267 | fi 268 | 269 | publish-jar: 270 | cmds: 271 | - mkdir -p {{.ARTIFACT_DIR}} 272 | #- find {{.SRC_DIR}}/ -regextype posix-basic -regex ".*{{.BUILD_DIR}}\/{{.GH_REPO}}-{{.APP_VERSION}}\.jar" -exec cp {} {{.ARTIFACT_DIR}} \; 273 | - sudo mv {{.SRC_DIR}}/{{.GH_REPO}}/{{.BUILD_DIR}}/{{.GH_REPO}}-{{.APP_VERSION}}\.jar {{.ARTIFACT_DIR}} 274 | 275 | retrieve-jar: 276 | cmds: 277 | - mkdir -p {{.SRC_DIR}}/{{.GH_REPO}}/{{.BUILD_DIR}} 278 | - sudo mv {{.ARTIFACT_DIR}}/{{.GH_REPO}}-{{.APP_VERSION}}.jar {{.SRC_DIR}}/{{.GH_REPO}}/{{.BUILD_DIR}}/app.jar 279 | 280 | publish-venv: 281 | cmds: 282 | - mkdir -p {{.ARTIFACT_DIR}} 283 | - mv {{.SRC_DIR}}/{{.GH_REPO}}/venv {{.ARTIFACT_DIR}} 284 | 285 | retrieve-venv: 286 | cmds: 287 | - cp -r {{.ARTIFACT_DIR}}/venv {{.SRC_DIR}}/{{.GH_REPO}}/ 288 | 289 | docker-pipeline: 290 | cmds: 291 | - task: create-scan-result-directory 292 | vars: 293 | DOCKER_IMAGE_NAME: '{{.DOCKER_IMAGE_NAME}}' 294 | DOCKER_IMAGE_TAG: '{{.DOCKER_IMAGE_TAG}}' 295 | - task: login-docker-registry 296 | vars: 297 | DOCKER_BUILD_TYPE: '{{.DOCKER_BUILD_TYPE}}' 298 | DOCKER_COMMAND: '{{.DOCKER_COMMAND}}' 299 | SCAN_RESULT_DIR: "{{.SCAN_RESULT_DIR}}" 300 | - task: scan-dockerfile 301 | vars: 302 | DOCKER_IMAGE_NAME: '{{.DOCKER_IMAGE_NAME}}' 303 | DOCKER_IMAGE_TAG: '{{.DOCKER_IMAGE_TAG}}' 304 | DOCKER_DIR: "{{.DOCKER_DIR}}" 305 | DOCKERFILE_NAME: '{{.DOCKERFILE_NAME}}' 306 | DOCKERFILE_SCANNER: "{{.DOCKERFILE_SCANNER}}" 307 | SCAN_RESULT_DIR: "{{.SCAN_RESULT_DIR}}" 308 | - task: build-docker-image 309 | vars: 310 | DOCKER_DIR: '{{.DOCKER_DIR}}' 311 | DOCKER_BUILD_IMAGE: '{{.DOCKER_BUILD_IMAGE}}' 312 | DOCKER_BUILD_TYPE: '{{.DOCKER_BUILD_TYPE}}' 313 | DOCKER_COMMAND: '{{.DOCKER_COMMAND}}' 314 | DOCKER_IMAGE_NAME: '{{.DOCKER_IMAGE_NAME}}' 315 | DOCKER_IMAGE_TAG: '{{.DOCKER_IMAGE_TAG}}' 316 | DOCKER_RUN_IMAGE: '{{.DOCKER_RUN_IMAGE}}' 317 | DOCKERFILE_NAME: '{{.DOCKERFILE_NAME}}' 318 | VERIFY_BASE_IMAGE: '{{.VERIFY_BASE_IMAGE}}' 319 | - task: scan-docker-image 320 | vars: 321 | DOCKER_BUILD_TYPE: '{{.DOCKER_BUILD_TYPE}}' 322 | DOCKER_IMAGE_NAME: '{{.DOCKER_IMAGE_NAME}}' 323 | DOCKER_IMAGE_SCANNER: "{{.DOCKER_IMAGE_SCANNER}}" 324 | DOCKER_IMAGE_TAG: '{{.DOCKER_IMAGE_TAG}}' 325 | DOCKERFILE_SCANNER: "{{.DOCKERFILE_SCANNER}}" 326 | SCAN_RESULT_DIR: "{{.SCAN_RESULT_DIR}}" 327 | STOP_ON_CRITICAL_VULNS: "{{.STOP_ON_CRITICAL_VULNS}}" 328 | - task: push-docker-image 329 | vars: 330 | DOCKER_BUILD_TYPE: '{{.DOCKER_BUILD_TYPE}}' 331 | DOCKER_IMAGE_NAME: '{{.DOCKER_IMAGE_NAME}}' 332 | DOCKER_IMAGE_TAG: '{{.DOCKER_IMAGE_TAG}}' 333 | - task: generate-sbom-docker-image 334 | vars: 335 | DOCKER_BUILD_TYPE: '{{.DOCKER_BUILD_TYPE}}' 336 | DOCKER_IMAGE_NAME: '{{.DOCKER_IMAGE_NAME}}' 337 | DOCKER_IMAGE_TAG: '{{.DOCKER_IMAGE_TAG}}' 338 | DOCKER_SBOM_GENERATOR: "{{.DOCKER_SBOM_GENERATOR}}" 339 | SCAN_RESULT_DIR: "{{.SCAN_RESULT_DIR}}" 340 | - task: sign-docker-image 341 | vars: 342 | DOCKER_IMAGE_NAME: '{{.DOCKER_IMAGE_NAME}}' 343 | DOCKER_IMAGE_TAG: '{{.DOCKER_IMAGE_TAG}}' 344 | - task: attach-scan-result-attestations-to-docker-image 345 | vars: 346 | DOCKER_IMAGE_NAME: '{{.DOCKER_IMAGE_NAME}}' 347 | DOCKER_IMAGE_TAG: '{{.DOCKER_IMAGE_TAG}}' 348 | SCAN_RESULT_DIR: "{{.SCAN_RESULT_DIR}}" 349 | - task: verify-signature-docker-image 350 | vars: 351 | DOCKER_IMAGE_NAME: '{{.DOCKER_IMAGE_NAME}}' 352 | DOCKER_IMAGE_TAG: '{{.DOCKER_IMAGE_TAG}}' 353 | - task: verify-scan-result-attestations 354 | vars: 355 | DOCKER_IMAGE_NAME: '{{.DOCKER_IMAGE_NAME}}' 356 | DOCKER_IMAGE_TAG: '{{.DOCKER_IMAGE_TAG}}' 357 | SCAN_RESULT_DIR: "{{.SCAN_RESULT_DIR}}" 358 | 359 | java-pipeline: 360 | cmds: 361 | - task: create-scan-result-directory 362 | vars: 363 | DOCKER_IMAGE_NAME: '{{.DOCKER_IMAGE_NAME}}' 364 | DOCKER_IMAGE_TAG: '{{.DOCKER_IMAGE_TAG}}' 365 | - task: build-app 366 | vars: 367 | BUILD_SCRIPT: "{{.BUILD_SCRIPT}}" 368 | DOCKER_BUILD_IMAGE: "{{.DOCKER_BUILD_IMAGE}}" 369 | DOCKER_BUILD_TYPE: '{{.DOCKER_BUILD_TYPE}}' 370 | DOCKER_COMMAND: '{{.DOCKER_COMMAND}}' 371 | DOCKER_IMAGE_NAME: '{{.DOCKER_IMAGE_NAME}}' 372 | DOCKER_IMAGE_TAG: '{{.DOCKER_IMAGE_TAG}}' 373 | - task: publish-jar 374 | - task: retrieve-jar 375 | - task: docker-pipeline 376 | vars: 377 | DOCKER_BUILD_IMAGE: '{{.DOCKER_BUILD_IMAGE}}' 378 | DOCKER_BUILD_TYPE: '{{.DOCKER_BUILD_TYPE}}' 379 | DOCKER_COMMAND: '{{.DOCKER_COMMAND}}' 380 | DOCKER_DIR: "{{.SRC_DIR}}/{{.GH_REPO}}" 381 | DOCKER_IMAGE_NAME: '{{.DOCKER_IMAGE_NAME}}' 382 | DOCKER_IMAGE_SCANNER: "{{.DOCKER_IMAGE_SCANNER}}" 383 | DOCKER_IMAGE_TAG: '{{.DOCKER_IMAGE_TAG}}' 384 | DOCKER_RUN_IMAGE: '{{.DOCKER_RUN_IMAGE}}' 385 | DOCKER_SBOM_GENERATOR: "{{.DOCKER_SBOM_GENERATOR}}" 386 | DOCKERFILE_NAME: '{{.DOCKERFILE_NAME}}' 387 | DOCKERFILE_SCANNER: "{{.DOCKERFILE_SCANNER}}" 388 | SCAN_RESULT_DIR: "{{.SCAN_RESULT_DIR}}" 389 | STOP_ON_CRITICAL_VULNS: "{{.STOP_ON_CRITICAL_VULNS}}" 390 | VERIFY_BASE_IMAGE: '{{.VERIFY_BASE_IMAGE}}' 391 | 392 | python-pipeline: 393 | cmds: 394 | - task: create-scan-result-directory 395 | vars: 396 | DOCKER_IMAGE_NAME: '{{.DOCKER_IMAGE_NAME}}' 397 | DOCKER_IMAGE_TAG: '{{.DOCKER_IMAGE_TAG}}' 398 | - task: build-app 399 | vars: 400 | BUILD_SCRIPT: "{{.BUILD_SCRIPT}}" 401 | DOCKER_BUILD_IMAGE: "{{.DOCKER_BUILD_IMAGE}}" 402 | DOCKER_BUILD_TYPE: '{{.DOCKER_BUILD_TYPE}}' 403 | DOCKER_IMAGE_NAME: '{{.DOCKER_IMAGE_NAME}}' 404 | DOCKER_IMAGE_TAG: '{{.DOCKER_IMAGE_TAG}}' 405 | - task: publish-venv 406 | - task: retrieve-venv 407 | - task: docker-pipeline 408 | vars: 409 | DOCKER_BUILD_IMAGE: '{{.DOCKER_BUILD_IMAGE}}' 410 | DOCKER_BUILD_TYPE: '{{.DOCKER_BUILD_TYPE}}' 411 | DOCKER_COMMAND: '{{.DOCKER_COMMAND}}' 412 | DOCKER_DIR: "{{.SRC_DIR}}/{{.GH_REPO}}" 413 | DOCKER_IMAGE_NAME: '{{.DOCKER_IMAGE_NAME}}' 414 | DOCKER_IMAGE_SCANNER: "{{.DOCKER_IMAGE_SCANNER}}" 415 | DOCKER_IMAGE_TAG: '{{.DOCKER_IMAGE_TAG}}' 416 | DOCKER_RUN_IMAGE: '{{.DOCKER_RUN_IMAGE}}' 417 | DOCKER_SBOM_GENERATOR: "{{.DOCKER_SBOM_GENERATOR}}" 418 | DOCKERFILE_NAME: '{{.DOCKERFILE_NAME}}' 419 | DOCKERFILE_SCANNER: "{{.DOCKERFILE_SCANNER}}" 420 | SCAN_RESULT_DIR: "{{.SCAN_RESULT_DIR}}" 421 | STOP_ON_CRITICAL_VULNS: "{{.STOP_ON_CRITICAL_VULNS}}" 422 | VERIFY_BASE_IMAGE: '{{.VERIFY_BASE_IMAGE}}' 423 | -------------------------------------------------------------------------------- /taskfiles/testing/Taskfile.yaml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | 3 | tasks: 4 | greet: 5 | vars: 6 | RECIPIENT: '{{default "World" .RECIPIENT}}' 7 | cmds: 8 | - echo "Hello, {{.RECIPIENT}}!" 9 | 10 | greet-pessimistically: 11 | cmds: 12 | - task: greet 13 | vars: {RECIPIENT: "Cruel World"} 14 | --------------------------------------------------------------------------------