├── CONTRIBUTING.md
├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ └── feature_request.md
├── dependabot.yml
└── workflows
│ ├── codeql-analysis.yml
│ ├── keyless-salsa-integration.yaml
│ ├── main.yml
│ ├── ratchet.yml
│ └── service-account-salsa-integration.yml
├── .gitignore
├── .goreleaser.yml
├── .tool-versions
├── CODEOWNERS
├── Dockerfile
├── LICENSE
├── Makefile
├── README.md
├── STATUS.md
├── action.yml
├── cmd
└── main.go
├── cosign.pub
├── entrypoint.sh
├── go.mod
├── go.sum
├── integration-test
└── Dockerfile
├── pkg
├── build
│ ├── dependencies.go
│ ├── golang
│ │ ├── golang.go
│ │ ├── golang_test.go
│ │ └── testdata
│ │ │ └── golang
│ │ │ └── go.sum
│ ├── jvm
│ │ ├── gradle.go
│ │ ├── gradle_test.go
│ │ ├── mvn.go
│ │ ├── mvn_test.go
│ │ └── testdata
│ │ │ ├── gradle_output.txt
│ │ │ ├── jvm
│ │ │ ├── gradle-kts
│ │ │ │ ├── build.gradle.kts
│ │ │ │ ├── gradle.properties
│ │ │ │ ├── gradle
│ │ │ │ │ └── verification-metadata.xml
│ │ │ │ ├── gradlew
│ │ │ │ └── gradlew.bat
│ │ │ ├── gradle
│ │ │ │ ├── build.gradle
│ │ │ │ ├── gradle.properties
│ │ │ │ ├── gradle
│ │ │ │ │ ├── verification-metadata.xml
│ │ │ │ │ └── wrapper
│ │ │ │ │ │ ├── gradle-wrapper.jar
│ │ │ │ │ │ └── gradle-wrapper.properties
│ │ │ │ ├── gradlew
│ │ │ │ └── gradlew.bat
│ │ │ └── maven
│ │ │ │ ├── .m2
│ │ │ │ └── maven-settings.xml
│ │ │ │ ├── pom.xml
│ │ │ │ └── target
│ │ │ │ └── dependency
│ │ │ │ └── com
│ │ │ │ └── google
│ │ │ │ └── code
│ │ │ │ └── gson
│ │ │ │ └── gson
│ │ │ │ ├── 2.8.6
│ │ │ │ └── gson-2.8.6.jar
│ │ │ │ └── maven-metadata-local.xml
│ │ │ ├── target
│ │ │ └── dependency
│ │ │ │ └── org
│ │ │ │ ├── springframework
│ │ │ │ └── spring-core
│ │ │ │ │ └── 5.3.16
│ │ │ │ │ └── spring-core-5.3.16.jar
│ │ │ │ └── yaml
│ │ │ │ └── snakeyaml
│ │ │ │ └── 1.26
│ │ │ │ └── snakeyaml-1.26.jar
│ │ │ └── verification-metadata.xml
│ ├── nodejs
│ │ ├── npm.go
│ │ ├── npm_test.go
│ │ ├── testdata
│ │ │ └── nodejs
│ │ │ │ ├── npm
│ │ │ │ └── package-lock.json
│ │ │ │ └── yarn
│ │ │ │ └── yarn.lock
│ │ ├── yarn.go
│ │ └── yarn_test.go
│ ├── php
│ │ ├── composer.go
│ │ ├── composer_test.go
│ │ └── testdata
│ │ │ └── composer.lock
│ ├── test
│ │ └── buildtool_test_util.go
│ └── tool.go
├── clone
│ └── clone.go
├── commands
│ ├── attest.go
│ ├── attest_test.go
│ ├── clone.go
│ ├── find.go
│ ├── root.go
│ ├── scan.go
│ ├── testdata
│ │ └── cosign-verify-output.txt
│ └── version.go
├── config
│ └── scan.go
├── dsse
│ ├── dsse.go
│ ├── parse.go
│ ├── parse_test.go
│ └── testdata
│ │ ├── cosign-attestation.json
│ │ ├── cosign-dsse-attestation.json
│ │ ├── salsa.provenance
│ │ ├── slsa-attestation.json
│ │ └── test-statement.json
├── intoto
│ ├── buildconfig.go
│ ├── find.go
│ ├── find_test.go
│ ├── provenance.go
│ ├── provenance_options.go
│ ├── provenance_options_test.go
│ ├── provenance_test.go
│ └── testdata
│ │ └── cosign-dsse-attestation.json
├── utils
│ ├── exec.go
│ ├── exec_test.go
│ └── utils.go
└── vcs
│ ├── environment.go
│ ├── event.go
│ ├── event_test.go
│ ├── github.go
│ ├── github_build.go
│ ├── github_build_test.go
│ ├── github_ci.go
│ ├── github_ci_test.go
│ ├── github_runner.go
│ ├── github_runner_test.go
│ ├── github_test.go
│ ├── nonereproduceible.go
│ ├── resolve_context.go
│ ├── resolve_context_test.go
│ └── testdata
│ ├── github-context.json
│ ├── pull-request-event.json
│ ├── unknown-event.json
│ └── workflowrun-head-commit.json
└── salsa-sample.yaml
/ CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Salsa
2 |
3 | ## About
4 |
5 | Salsa CLI is a command line tool to generate, sign and upload a [provenance](https://slsa.dev/provenance/v0.2)
6 |
7 | ## Developer installation
8 |
9 | If you have Go 1.17+, you can setup a development environment
10 |
11 | ```text
12 | $ git clone https://github.com/nais/salsa
13 | $ cd salsa
14 | $ make salsa
15 | $ $(go env GOPATH)/bin/salsa
16 | ```
17 |
18 | ## Prerequisites
19 |
20 | * Google Setup
21 | * KMS is enabled in project
22 | * create keyring
23 | * create keys: `Elliptic Curve P-256 key SHA256 Digest`
24 | * Serviceuser in project has roles:
25 | * Cloud KMS CryptoKey signer/verifier
26 | * Cloud KMS viewer
27 | * Logged in to Google
28 | * Set: `GOOGLE_APPLICATION_CREDENTIALS` with path to .json file containing serviceuser credentials.
29 |
30 | ```text
31 | export GOOGLE_APPLICATION_CREDENTIALS=~/path/to/file/cosign-private-key.json
32 | ```
33 |
34 | * Install
35 | * Cosign: https://github.com/sigstore/cosign
36 |
37 | ## Commands
38 |
39 | clone: `clones the given project into user defined path`
40 |
41 | ```
42 | salsa clone --repo salsa --owner nais
43 | ```
44 |
45 | scan: `Scan files and dependencies for a given project and generate provenance`
46 |
47 | ```
48 | salsa scan --repo salsa
49 | ```
50 |
51 | attest: `sign and upload in-toto attestation`
52 |
53 | ```
54 | salsa attest --repo salsa --key gcpkms://projects/$PROJECT/locations/$LOCATION/keyRings/$KEYRING/cryptoKeys/$KEY/versions/$KEY_VERSION ttl.sh/salsax:1h
55 | ```
56 |
57 | attest: `verify and download in-toto attestation`
58 |
59 | ```
60 | salsa attest --verify --repo salsa --key gcpkms://projects/$PROJECT/locations/$LOCATION/keyRings/$KEYRING/cryptoKeys/$KEY/versions/$KEY_VERSION ttl.sh/salsax:1h
61 | ```
62 |
63 | find: `find artifact from attestations`
64 |
65 | ```
66 | salsa find go-crypto
67 | ```
68 |
69 | ## Info
70 |
71 | When testing locally the image can be pushed to registry: ttl.sh, who offers free, short-lived (ie: hours), anonymous
72 | container image hosting if you just want to try these commands out. Check
73 | out [Cosign](https://github.com/sigstore/cosign#quick-start) and ttl.sh [tt.sh info](https://ttl.sh/)
74 |
75 | Instead of setting a bunch of flags, in home directory create a config file with name ".salsa" (without extension)
76 |
77 | ```yml
78 | attest:
79 | key: gcpkms://projects/$PROJECT/locations/$LOCATION/keyRings/$KEYRING/cryptoKeys/$KEY/versions/$KEY_VERSION
80 | ...
81 | ```
82 |
83 | Another possibility is to set Environment variables with prefix `SALSA`
84 |
85 | ```
86 | SALSA_ATTEST_KEY
87 | ```
88 |
89 | ## Ratchet
90 |
91 | Ratchet is a tool for improving the security of CI/CD workflows by automating the process of pinning and unpinning
92 | upstream versions.
93 |
94 | More info in [ratchet](https://github.com/sethvargo/ratchet) repo.
95 |
96 | ### Commands
97 |
98 | The workflow run [ratchet.yml](.github/workflows/ratchet.yml) checks if all actions and containers are pinned,
99 | except `ratchet:exclude` and only runs if there are changes in the `.github/workflows` folder.
100 |
101 | #### pin
102 |
103 | To get a pin a workflow, add the `new workflow` file or `new action` to an existing workflow;
104 |
105 | ```
106 | make pin workflow=my_workflow.yml
107 | ```
108 |
109 | #### update
110 |
111 | Only applies when dependabot comes with major update for an action e.g. v2 too v3, you manually have to edit the
112 | corresponding ratchet constraint;
113 |
114 | from;
115 |
116 | ```
117 | ratchet:actions/some-action@v2
118 | ```
119 |
120 | too;
121 |
122 | ```
123 | ratchet:actions/some-action@v3
124 | ```
125 |
126 | and then run update and commit your changes;
127 |
128 | ```
129 | make update workflow=my_workflow.yml
130 | ```
131 |
132 | Otherwise, just approve the dependabot PR.
133 |
134 | #### check
135 |
136 | Check if workflows is pinned;
137 |
138 | ```
139 | make check workflow=my_workflow.yml
140 | ```
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the bug**
11 | A clear and concise description of what the bug is.
12 |
13 | **To Reproduce**
14 | Steps to reproduce the behavior:
15 | 1. Go to '...'
16 | 2. Click on '....'
17 | 3. Scroll down to '....'
18 | 4. See error
19 |
20 | **Expected behavior**
21 | A clear and concise description of what you expected to happen.
22 |
23 | **Screenshots**
24 | If applicable, add screenshots to help explain your problem.
25 |
26 | **Desktop (please complete the following information):**
27 | - OS: [e.g. iOS]
28 | - Browser [e.g. chrome, safari]
29 | - Version [e.g. 22]
30 |
31 | **Smartphone (please complete the following information):**
32 | - Device: [e.g. iPhone6]
33 | - OS: [e.g. iOS8.1]
34 | - Browser [e.g. stock browser, safari]
35 | - Version [e.g. 22]
36 |
37 | **Additional context**
38 | Add any other context about the problem here.
39 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the solution you'd like**
14 | A clear and concise description of what you want to happen.
15 |
16 | **Describe alternatives you've considered**
17 | A clear and concise description of any alternative solutions or features you've considered.
18 |
19 | **Additional context**
20 | Add any other context or screenshots about the feature request here.
21 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: "gomod"
4 | directory: "/"
5 | schedule:
6 | interval: "weekly"
7 | - package-ecosystem: github-actions
8 | directory: "/"
9 | schedule:
10 | interval: "weekly"
11 | - package-ecosystem: "docker"
12 | directory: "/"
13 | schedule:
14 | interval: "daily"
15 |
--------------------------------------------------------------------------------
/.github/workflows/codeql-analysis.yml:
--------------------------------------------------------------------------------
1 | name: CodeQL
2 | on:
3 | push:
4 | branches: [main]
5 | pull_request:
6 | branches: [main]
7 | paths-ignore:
8 | - '**.md'
9 | - 'CODEOWNERS'
10 | - 'LICENSE'
11 | - '.gitignore'
12 | - 'doc/**'
13 | - 'Makefile'
14 | jobs:
15 | analyze:
16 | name: Analyze
17 | runs-on: ubuntu-latest
18 | permissions:
19 | actions: read
20 | contents: read
21 | security-events: write
22 | strategy:
23 | fail-fast: false
24 | matrix:
25 | language: ['go']
26 | steps:
27 | - name: Checkout repository
28 | uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # ratchet:actions/checkout@v3
29 | - name: Initialize CodeQL
30 | uses: github/codeql-action/init@v2 # ratchet:exclude
31 | with:
32 | languages: ${{ matrix.language }}
33 | - name: Autobuild
34 | uses: github/codeql-action/autobuild@v2 # ratchet:exclude
35 | - name: Perform CodeQL Analysis
36 | uses: github/codeql-action/analyze@v2 # ratchet:exclude
37 |
--------------------------------------------------------------------------------
/.github/workflows/keyless-salsa-integration.yaml:
--------------------------------------------------------------------------------
1 | name: slsa keyless
2 | on:
3 | workflow_run:
4 | workflows: [Salsa build & release]
5 | types: [completed]
6 | branches: [main]
7 | env:
8 | IMAGE: ttl.sh/nais/salsa-keyless-test:1h
9 | jobs:
10 | on-failure:
11 | runs-on: ubuntu-20.04
12 | if: ${{ github.event.workflow_run.conclusion == 'failure' }}
13 | steps:
14 | - run: echo 'The triggering workflow failed' && exit 1
15 | keyless:
16 | permissions:
17 | contents: 'read'
18 | id-token: 'write'
19 | runs-on: ubuntu-20.04
20 | steps:
21 | - name: Checkout Code
22 | uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # ratchet:actions/checkout@v3
23 | - name: Authenticate to Google Cloud
24 | uses: google-github-actions/auth@e8df18b60c5dd38ba618c121b779307266153fbf # ratchet:google-github-actions/auth@v1.1.0
25 | id: google
26 | with:
27 | workload_identity_provider: ${{ secrets.SLSA_WORKLOAD_IDENTITY_PROVIDER }}
28 | service_account: cosign-keyless@plattformsikkerhet-dev-496e.iam.gserviceaccount.com
29 | token_format: "id_token"
30 | id_token_audience: sigstore
31 | id_token_include_email: true
32 | - name: Build and push
33 | id: docker
34 | uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671 # ratchet:docker/build-push-action@v4
35 | with:
36 | context: integration-test
37 | push: true
38 | tags: ${{ env.IMAGE }}
39 | - name: Generate provenance, sign and upload image
40 | uses: ./
41 | with:
42 | image_digest: ${{ steps.docker.outputs.digest }}
43 | registry: ttl.sh
44 | identity_token: ${{ steps.google.outputs.id_token }}
45 | env:
46 | COSIGN_EXPERIMENTAL: "true"
47 |
--------------------------------------------------------------------------------
/.github/workflows/main.yml:
--------------------------------------------------------------------------------
1 | name: Salsa build & release
2 | on:
3 | push:
4 | paths-ignore:
5 | - '**.md'
6 | - 'CODEOWNERS'
7 | - 'LICENSE'
8 | - '.gitignore'
9 | - 'doc/**'
10 | - 'Makefile'
11 | env:
12 | VERSION: v0.12
13 | IMAGE_NAME: ghcr.io/${{ github.repository }}
14 | COSIGN_VERSION: v2.2.2
15 | SYFT_VERSION: v0.44.1
16 | GO_RELEASER_VERSION: v1.11.2
17 | GRADLE_VERSION: 7.5.1
18 | PUSH: false
19 | jobs:
20 | set-version:
21 | runs-on: ubuntu-20.04
22 | outputs:
23 | version: ${{ steps.set-version.outputs.version }}
24 | steps:
25 | - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # ratchet:actions/checkout@v3
26 | - name: set version
27 | id: set-version
28 | run: |
29 | echo Faking a Semantic Version
30 | echo "version=${{ env.VERSION }}.$(date "+%Y%m%d%H%M%S")" >> $GITHUB_OUTPUT
31 | test:
32 | runs-on: ubuntu-20.04
33 | steps:
34 | - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # ratchet:actions/checkout@v3
35 | - uses: actions/setup-go@6edd4406fa81c3da01a34fa6f6343087c207a568 # ratchet:actions/setup-go@v3
36 | with:
37 | go-version-file: ./go.mod
38 | check-latest: true
39 | cache: true
40 | - name: Test Salsa
41 | run: make test
42 | build:
43 | outputs:
44 | cli-tag: ${{ steps.container-tags.outputs.cli-tag }}
45 | action-tag: ${{ steps.container-tags.outputs.action-tag }}
46 | digest: ${{ steps.docker_build.outputs.digest }}
47 | needs:
48 | - set-version
49 | - test
50 | runs-on: ubuntu-20.04
51 | steps:
52 | - name: Checkout latest code
53 | uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # ratchet:actions/checkout@v3
54 | with:
55 | fetch-depth: 0
56 | - name: Set up Go
57 | uses: actions/setup-go@6edd4406fa81c3da01a34fa6f6343087c207a568 # ratchet:actions/setup-go@v3
58 | with:
59 | go-version-file: ./go.mod
60 | check-latest: true
61 | cache: true
62 | - name: Create tag
63 | run: |
64 | git tag ${{ needs.set-version.outputs.version }}
65 | # - uses: navikt/github-app-token-generator@v1
66 | # id: get-homebrew-token
67 | # with:
68 | # private-key: ${{ secrets.NAIS_APP_PRIVATE_KEY }}
69 | # app-id: ${{ secrets.NAIS_APP_ID }}
70 | # repo: nais/homebrew-tap
71 | - name: Install cosign
72 | uses: sigstore/cosign-installer@9614fae9e5c5eddabb09f90a270fcb487c9f7149 # ratchet:sigstore/cosign-installer@v3.3.0
73 | with:
74 | cosign-release: ${{ env.COSIGN_VERSION }}
75 | - name: Install Syft
76 | uses: anchore/sbom-action/download-syft@422cb34a0f8b599678c41b21163ea6088edb2624 # ratchet:anchore/sbom-action/download-syft@v0.14.1
77 | with:
78 | syft-version: ${{ env.SYFT_VERSION }}
79 | - name: Put key on file
80 | run: |
81 | echo '${{ secrets.COSIGN_PRIVATE_KEY }}' > cosign.key
82 | - name: Run GoReleaser
83 | if: ${{ github.ref == 'refs/heads/main' }}
84 | uses: goreleaser/goreleaser-action@5fdedb94abba051217030cc86d4523cf3f02243d # ratchet:goreleaser/goreleaser-action@v4
85 | with:
86 | distribution: goreleaser
87 | version: ${{ env.GO_RELEASER_VERSION }}
88 | args: release -f .goreleaser.yml --rm-dist --debug
89 | env:
90 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
91 | PUSH_TOKEN: ${{ steps.get-homebrew-token.outputs.token }}
92 | COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
93 | - name: Set up Docker Buildx
94 | uses: docker/setup-buildx-action@885d1462b80bc1c1c7f0b00334ad271f09369c55 # ratchet:docker/setup-buildx-action@v2
95 | with:
96 | provenance: false
97 | - name: Login to Docker
98 | uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # ratchet:docker/login-action@v2
99 | with:
100 | registry: ghcr.io
101 | username: ${{ github.actor }}
102 | password: ${{ secrets.GITHUB_TOKEN }}
103 | - name: Create tags
104 | id: container-tags
105 | run: |
106 | echo "cli-tag=${{ env.IMAGE_NAME }}:${{ needs.set-version.outputs.version }}" >> $GITHUB_OUTPUT
107 | echo "action-tag=${{ env.IMAGE_NAME }}:${{ env.VERSION }}" >> $GITHUB_OUTPUT
108 | - name: Only push from main
109 | if: ${{ github.ref == 'refs/heads/main' }}
110 | run: |
111 | echo "PUSH=true" >> $GITHUB_ENV
112 | - name: Build and push
113 | uses: docker/build-push-action@0a97817b6ade9f46837855d676c4cca3a2471fc9 # ratchet:docker/build-push-action@v4
114 | id: docker_build
115 | with:
116 | push: ${{ env.PUSH }}
117 | tags: ${{ steps.container-tags.outputs.cli-tag }},${{ steps.container-tags.outputs.action-tag }}
118 | labels: version=${{ needs.set-version.outputs.version }},revision=${{ github.sha }}
119 | build-args: |
120 | COSIGN_VERSION=${{ env.COSIGN_VERSION }}
121 | GRADLE_VERSION=${{ env.GRADLE_VERSION }}
122 | - name: Update major/minor version tag
123 | if: ${{ github.ref == 'refs/heads/main' }}
124 | run: "git tag -f ${{ env.VERSION }}\ngit push -f origin ${{ env.VERSION }} \n"
125 | - name: Clean up
126 | if: ${{ always() }}
127 | run: "rm -f cosign.key \n"
128 | sign-attest:
129 | needs:
130 | - build
131 | runs-on: ubuntu-20.04
132 | permissions:
133 | packages: write
134 | contents: read
135 | id-token: write
136 | if: ${{ github.ref == 'refs/heads/main' }}
137 | env:
138 | DIGEST: "${{ needs.build.outputs.digest }}"
139 | steps:
140 | - name: Install cosign
141 | uses: sigstore/cosign-installer@ce50ea946c19e4bdba9127f76ba2fb00d8e95a96 # ratchet:sigstore/cosign-installer@v2.5.1
142 | with:
143 | cosign-release: ${{ env.COSIGN_VERSION }}
144 | - name: Login to Docker
145 | uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # ratchet:docker/login-action@v2
146 | with:
147 | registry: ghcr.io
148 | username: ${{ github.actor }}
149 | password: ${{ secrets.GITHUB_TOKEN }}
150 | - name: Generate SBOM
151 | id: sbom
152 | uses: aquasecurity/trivy-action@d43c1f16c00cfd3978dde6c07f4bbcf9eb6993ca # ratchet:aquasecurity/trivy-action@master
153 | with:
154 | scan-type: 'image'
155 | format: 'cyclonedx'
156 | output: 'sbom.json'
157 | image-ref: ${{ env.IMAGE_NAME }}@${{ env.DIGEST }}
158 | - name: Sign Docker image and and add signed attest
159 | run: |
160 | echo '${{ secrets.COSIGN_PRIVATE_KEY }}' > cosign.key
161 | cosign sign --yes --key cosign.key ${{ env.IMAGE_NAME }}@${{ env.DIGEST }}
162 | cosign sign --yes --key cosign.key ${{ env.IMAGE_NAME }}@${{ env.DIGEST }}
163 | cosign attest --yes --tlog-upload=false --key cosign.key --predicate sbom.json --type cyclonedx ${{ env.IMAGE_NAME }}@${{ env.DIGEST }}
164 | env:
165 | COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
166 | - name: Clean up
167 | if: ${{ always() }}
168 | run: |
169 | rm -f cosign.key
170 |
--------------------------------------------------------------------------------
/.github/workflows/ratchet.yml:
--------------------------------------------------------------------------------
1 | name: Check pinned workflows
2 | on:
3 | push:
4 | paths:
5 | - '.github/workflows/**'
6 | jobs:
7 | ratchet:
8 | runs-on: ubuntu-20.04
9 | strategy:
10 | fail-fast: false
11 | matrix:
12 | asset: [
13 | main.yml,
14 | keyless-salsa-integration.yaml,
15 | service-account-salsa-integration.yml,
16 | golangci-lint.yml,
17 | ratchet.yml,
18 | codeql-analysis.yml
19 | ]
20 | steps:
21 | - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
22 | - name: Check if ${{matrix.asset}} is pinned
23 | uses: 'docker://ghcr.io/sethvargo/ratchet@sha256:e5b2409be0d1c82c71a6e60c49027e539f4c90636529e4f8b5c25a68b68a36d4' # ratchet:docker://ghcr.io/sethvargo/ratchet:0.3.0
24 | with:
25 | args: 'check .github/workflows/${{matrix.asset}}'
26 | entrypoint: /ratchet
27 | - name: Checks failed
28 | if: ${{ failure() }}
29 | run: |-
30 | echo see "'https://github.com/sethvargo/ratchet'" for more information.
31 | echo "'make check workflow=my_workflow.yml'"
32 | echo "'make update workflow=my_workflow.yml'"
33 | echo "'make pin workflow=my_workflow.yml'" for new workflow file && exit 1
34 |
--------------------------------------------------------------------------------
/.github/workflows/service-account-salsa-integration.yml:
--------------------------------------------------------------------------------
1 | name: nais Salsa integration
2 | on:
3 | workflow_run:
4 | workflows: [Salsa build & release]
5 | types: [completed]
6 | branches: [main]
7 | env:
8 | IMAGE: ttl.sh/nais/salsa-integration-test:1h
9 | jobs:
10 | on-failure:
11 | runs-on: ubuntu-20.04
12 | if: ${{ github.event.workflow_run.conclusion == 'failure' }}
13 | steps:
14 | - run: echo 'The triggering workflow failed' && exit 1
15 | on-success-generate-provenance:
16 | runs-on: ubuntu-20.04
17 | if: ${{ github.event.workflow_run.conclusion == 'success' }}
18 | steps:
19 | - run: echo 'The triggering workflow passed'
20 | - name: Checkout Code
21 | uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # ratchet:actions/checkout@v3
22 | # For demonstration purpose
23 | # Credentials needed to authenticate to google kms and sign image.
24 | - name: Authenticate to Google Cloud
25 | id: google
26 | uses: google-github-actions/auth@e8df18b60c5dd38ba618c121b779307266153fbf # ratchet:google-github-actions/auth@v0
27 | with:
28 | credentials_json: ${{ secrets.SALSA_CREDENTIALS }}
29 | # For demonstration purpose
30 | - name: Build and push
31 | id: docker
32 | uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671 # ratchet:docker/build-push-action@v4
33 | with:
34 | context: integration-test
35 | push: true
36 | tags: ${{ env.IMAGE }}
37 | # Added to a workflow
38 | - name: Generate provenance, sign and upload image
39 | id: salsa
40 | # nais/salsa@v...
41 | uses: ./
42 | with:
43 | registry: ttl.sh
44 | image_digest: ${{ steps.docker.outputs.digest }}
45 | key: ${{ secrets.SALSA_KMS_KEY }}
46 | # For demonstration purpose
47 | - name: Upload provenance
48 | uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # ratchet:actions/upload-artifact@v3
49 | with:
50 | path: |-
51 | ./${{ steps.salsa.outputs.provenance_file_path }}
52 | ./${{ steps.salsa.outputs.raw_file_path }}
53 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /bin/
2 | /.idea/
3 | /tmp/
4 | /cosign.key
5 | /cover.out
6 | pkg/build/jvm/testdata/jvm/target/
7 | .gradle/
8 | .vscode/
--------------------------------------------------------------------------------
/.goreleaser.yml:
--------------------------------------------------------------------------------
1 | project_name: nais-salsa
2 |
3 | before:
4 | hooks:
5 | - go mod tidy
6 |
7 | builds:
8 | - env: [ CGO_ENABLED=0 ]
9 | targets:
10 | - linux_amd64
11 | - windows_amd64
12 | - darwin_amd64
13 | - darwin_arm64
14 | id: nais
15 | dir: .
16 | main: ./cmd
17 | binary: salsa
18 | ldflags:
19 | - -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{ .CommitDate }} -X main.builtBy=goreleaser
20 |
21 | checksum:
22 | name_template: 'checksums.txt'
23 |
24 | archives:
25 | - format: tar.gz
26 | replacements:
27 | darwin: macOS
28 | files:
29 | - cosign.pub
30 | - dist/*.sig
31 | format_overrides:
32 | - goos: windows
33 | format: zip
34 |
35 | sboms:
36 | - artifacts: archive
37 |
38 | signs:
39 | - id: sign archives
40 | cmd: cosign
41 | stdin: '{{ .Env.COSIGN_PASSWORD }}'
42 | args: ["sign-blob", "--key=cosign.key", "--output=${signature}", "--yes", "${artifact}"]
43 | signature: "${artifact}.sig"
44 | artifacts: archive
45 | - id: sign checksum.txt
46 | cmd: cosign
47 | stdin: '{{ .Env.COSIGN_PASSWORD }}'
48 | args: ["sign-blob", "--key=cosign.key", "--output=${signature}", "--yes", "${artifact}"]
49 | signature: "${artifact}.sig"
50 | artifacts: checksum
51 | changelog:
52 | sort: asc
53 | # use: github
54 | filters:
55 | exclude:
56 | - '^docs:'
57 | - '^test:'
58 | - '^chore'
59 | - Merge pull request
60 | - Merge remote-tracking branch
61 | - Merge branch
62 | groups:
63 | - title: 'New Features'
64 | regexp: "^.*feat[(\\w)]*:+.*$"
65 | order: 0
66 | - title: 'Bug fixes'
67 | regexp: "^.*fix[(\\w)]*:+.*$"
68 | order: 10
69 | - title: 'Dependency updates'
70 | regexp: "^.*build(deps)[(\\w)]*:+.*$"
71 | order: 30
72 | - title: Other work
73 | order: 999
74 |
75 | release:
76 | prerelease: auto
77 | draft: true
78 | github:
79 | owner: nais
80 | name: salsa
81 | header: |
82 | ## {{.ProjectName}} ({{ .Version }})
83 | footer: |
84 | ## Enjoy some {{.ProjectName}} verde
85 | _Changelog_: https://github.com/nais/salsa/compare/{{ .PreviousTag }}...{{ .Tag }}
86 | Those were the changes on {{ .Tag }}!
87 | extra_files:
88 | - glob: "cosign.pub"
89 |
90 | #brews:
91 | # - tap:
92 | # owner: nais
93 | # name: homebrew-tap
94 | # token: "{{ .Env.PUSH_TOKEN }}"
95 | # name: salsa
96 | # homepage: "https://github.com/nais/salsa"
97 | # description: "Command-line interface for SLSA provenance"
98 | # license: MIT
99 | # folder: Formula
100 | # install: |
101 | # bin.install "salsa"
102 | # test: |
103 | # assert_match version, shell_output("#{bin}/salsa version")
104 | #
105 | #nfpms:
106 | # - package_name: salsa
107 | # file_name_template: "{{ .PackageName }}_{{ .Version }}"
108 | # vendor: NAV / nais team
109 | # homepage: "https://github.com/nais/salsa"
110 | # maintainer: NAV / nais team
111 | # description: "Command-line interface for SLSA provenance"
112 | # license: MIT
113 | # formats:
114 | # - deb
115 | # version_metadata: git
--------------------------------------------------------------------------------
/.tool-versions:
--------------------------------------------------------------------------------
1 | golang 1.19.1
2 | gradle 7.5.1
--------------------------------------------------------------------------------
/CODEOWNERS:
--------------------------------------------------------------------------------
1 | * @nais/pig-sikkerhet
2 |
3 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM golang:1.20.3 AS builder
2 |
3 | ENV GOOS=linux
4 | ENV CGO_ENABLED=0
5 |
6 | # Make Salsa
7 | WORKDIR /src
8 | COPY go.* /src/
9 | RUN go mod download
10 |
11 | COPY . /src
12 | RUN make salsa
13 |
14 | FROM maven:3.9.1-eclipse-temurin-17-alpine
15 |
16 | RUN apk add --no-cache ca-certificates docker jq httpie
17 |
18 | # Define a constant with the version of gradle you want to install
19 | ARG GRADLE_VERSION=7.5.1
20 | # Define the URL where gradle can be downloaded from
21 | ARG GRADLE_BASE_URL=https://services.gradle.org/distributions
22 | # Define the SHA key to validate the gradle download
23 | # obtained from here https://gradle.org/release-checksums/
24 | ARG GRADLE_SHA=f6b8596b10cce501591e92f229816aa4046424f3b24d771751b06779d58c8ec4
25 |
26 | # Create the directories, download gradle, validate the download, install it, remove downloaded file and set links
27 | RUN mkdir -p /usr/share/gradle /usr/share/gradle/ref \
28 | && echo "Downloading gradle hash" \
29 | && curl -fsSL -o /tmp/gradle.zip ${GRADLE_BASE_URL}/gradle-${GRADLE_VERSION}-bin.zip \
30 | \
31 | && echo "Checking download hash" \
32 | && echo "${GRADLE_SHA} /tmp/gradle.zip" | sha256sum -c - \
33 | \
34 | && echo "Unziping gradle" \
35 | && unzip -d /usr/share/gradle /tmp/gradle.zip \
36 | \
37 | && echo "Cleaning and setting links" \
38 | && rm -f /tmp/gradle.zip \
39 | && ln -s /usr/share/gradle/gradle-${GRADLE_VERSION} /usr/bin/gradle
40 |
41 | # Define environmental variables required by gradle
42 | ENV GRADLE_VERSION ${GRADLE_VERSION}
43 | ENV GRADLE_HOME /usr/bin/gradle
44 | ENV GRADLE_USER_HOME /cache
45 | ENV PATH $PATH:$GRADLE_HOME/bin
46 |
47 | # Import Salsa
48 | COPY --from=builder /src/bin/salsa /usr/local/bin/
49 | COPY --from=builder /src/salsa-sample.yaml .salsa.yaml
50 | RUN chmod +x /usr/local/bin/salsa
51 |
52 | # Verify and install Cosign
53 | ARG COSIGN_VERSION=v2.0.2
54 | ENV COSIGN_BINARY=cosign-linux-amd64
55 | ENV COSIGN_CHECKSUM=cosign_checksums.txt
56 | ENV COSIGN_PUBLIC_KEY=release-cosign.pub
57 | ENV COSIGN_SIG=cosign-linux-amd64.sig
58 |
59 | # Cosign urls
60 | ARG COSIGN_BASE_URL=https://github.com/sigstore/cosign/releases/download/$COSIGN_VERSION
61 | ARG COSIGN_CHECKSUM_URL=${COSIGN_BASE_URL}/${COSIGN_CHECKSUM}
62 | ARG COSIGN_BINARY_URL=${COSIGN_BASE_URL}/${COSIGN_BINARY}
63 | ARG COSIGN_PUBLIC_KEY_URL=${COSIGN_BASE_URL}/${COSIGN_PUBLIC_KEY}
64 | ARG COSIGN_SIG_URL=${COSIGN_BASE_URL}/${COSIGN_SIG}
65 |
66 | RUN echo "Download cosign checksum" \
67 | && curl -fsSL -o /tmp/${COSIGN_CHECKSUM} ${COSIGN_CHECKSUM_URL} \
68 | \
69 | && echo "Extract current checksum from: ${COSIGN_CHECKSUM}" \
70 | && export COSIGN_SHA256=$(grep -w ${COSIGN_BINARY} tmp/${COSIGN_CHECKSUM} | cut -d ' ' -f1) \
71 | \
72 | && echo "Download cosign ${COSIGN_BINARY} version: ${COSIGN_VERSION}" \
73 | && curl -fsSL -o /tmp/${COSIGN_BINARY} ${COSIGN_BINARY_URL} \
74 | \
75 | && echo "Verify checksum ${COSIGN_SHA256} with ${COSIGN_BINARY}" \
76 | && sha256sum /tmp/${COSIGN_BINARY} \
77 | && echo "${COSIGN_SHA256} /tmp/${COSIGN_BINARY}" | sha256sum -c - \
78 | \
79 | && echo "Move cosign to folder and make cosign executable" \
80 | && chmod +x /tmp/${COSIGN_BINARY} \
81 | && mkdir "tmp2" \
82 | && cp /tmp/${COSIGN_BINARY} tmp2/${COSIGN_BINARY} \
83 | && mv /tmp/${COSIGN_BINARY} /usr/local/bin/cosign \
84 | && chmod +x /usr/local/bin/cosign \
85 | \
86 | && echo "Verify ${COSIGN_BINARY} with public key and signature" \
87 | && curl -fsSL -o /tmp/${COSIGN_PUBLIC_KEY} ${COSIGN_PUBLIC_KEY_URL} \
88 | && curl -fsSL -o /tmp/${COSIGN_SIG} ${COSIGN_SIG_URL} \
89 | && cosign \
90 | && cosign verify-blob --key /tmp/${COSIGN_PUBLIC_KEY} --signature /tmp/${COSIGN_SIG} /tmp2/${COSIGN_BINARY}
91 |
92 | COPY entrypoint.sh /entrypoint.sh
93 | RUN chmod +x /entrypoint.sh
94 |
95 | ENTRYPOINT ["/entrypoint.sh"]
96 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2022 NAV
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | RATCHET_VERSION=0.3.1
2 | WORKFLOWS_PATH=.github/workflows
3 |
4 | .PHONY: ratchet
5 | ratchet:
6 | go install github.com/sethvargo/ratchet@v${RATCHET_VERSION} ; \
7 | chmod -R +x ${WORKFLOWS_PATH}/*
8 |
9 | .PHONY: pin
10 | pin: ratchet ## create a pinned workflow IF NOT already pinned
11 | ratchet pin "${WORKFLOWS_PATH}/${workflow}"
12 |
13 | .PHONY: update
14 | update: ratchet ## update pinned workflows
15 | ratchet update "${WORKFLOWS_PATH}/${workflow}"
16 |
17 | .PHONY: check
18 | check: ratchet ## check pinned workflows
19 | ratchet check "${WORKFLOWS_PATH}/${workflow}"
20 |
21 | salsa:
22 | go build -o bin/salsa cmd/main.go
23 | test: fmt vet
24 | go test ./... -coverprofile cover.out -short
25 | fmt:
26 | go fmt ./...
27 | vet:
28 | go vet ./...
29 |
30 | coverage.out:
31 | go test -race -v -count=1 -covermode=atomic -coverprofile=coverage.out ./... || true
32 |
33 | cover-html: coverage.out
34 | go tool cover -html=$<
35 |
--------------------------------------------------------------------------------
/STATUS.md:
--------------------------------------------------------------------------------
1 | # Status
2 |
3 | Proof of Concept for a SLSA github action / cli.
4 |
5 | ## Relevant concepts to test
6 |
7 | * upload attestation somewhere
8 | * how to make attestations searchable
9 | * Fulcio and Reko from sigstore to see where they can fit in
10 | * https://github.com/sigstore/fulcio:
11 | * Fulcio is a work in progress. There's working code and a running instance and a plan, but you should not
12 | attempt to try to actually use it for anything
13 | * Handle the ability to resolve packages that's private (or don´t)
14 |
15 | ## Concepts tested so far
16 |
17 | Created simple CLI to test concepts:
18 |
19 | * sign attestation using DSSE (leverage some of sigstore functionality)
20 | * create a SBOM / in-toto attestation
21 | * clone github project
22 | * Should contain a Predicate for SLSA Provenance
23 | * list all dependencies in a gradle project
24 | * get all dependencies (including transitive) for a given repo and language
25 | * One language at a time
26 | * create attestation with materials based on dependencies
27 | * sign attestation with DSSE
28 | * sign docker image and put into attestation, using cosign
29 | * digest over dependencies etc in attestation
30 | * include build steps from workflow
31 | * create a pipeline where a "provenance" action can be used
32 | * how to get/add the digest for dependency artifacts for all build tools
33 | * explore cosign
34 |
35 | # Relevant links
36 |
37 | * https://github.com/in-toto/attestation/blob/main/spec/README.md
38 | * https://github.com/slsa-framework/slsa/blob/main/controls/attestations.md
39 | * https://github.com/secure-systems-lab/dsse
40 | * https://slsa.dev/provenance/v0.2
41 | * Mostly cosign, reko and fulcio: https://docs.sigstore.dev/
--------------------------------------------------------------------------------
/action.yml:
--------------------------------------------------------------------------------
1 | name: "nais SLSA Provenance Action"
2 | description: "Action to generate signed SLSA provenance"
3 | branding:
4 | icon: lock
5 | color: red
6 | inputs:
7 | registry:
8 | description: |-
9 | Registry to push to
10 | required: true
11 |
12 | image_digest:
13 | description: |-
14 | The image digest of the container to be attested
15 | required: true
16 |
17 | image:
18 | description: |-
19 | The tag of the docker container to sign.
20 | required: false
21 | default: ""
22 |
23 | repo_name:
24 | description: |-
25 | Name of the generated provenance. Used as an relative path under "github.workspace".
26 | Defaults to "github.repository".
27 | required: false
28 | default: ${{ github.repository }}
29 |
30 | build_started_on:
31 | description: |-
32 | Timestamp of when the build started. Defaults to head commit of workflow,
33 | if provided as input, the timestamp is added to salsa cli flag --build-started-on.
34 | Time format: YYYY-MM-DDTHH:MM:SSZ (RFC3339)
35 | required: false
36 | default: ""
37 |
38 | key:
39 | description: |-
40 | The key used to sign the attestation. Cloud Provider KMS key path.
41 | required: false
42 |
43 | identity_token:
44 | description: |-
45 | Use keyless function for cosign with short lived secrets.
46 | required: false
47 |
48 | docker_user:
49 | description: |-
50 | Docker login user.
51 | Defaults to "github.actor".
52 | required: false
53 | default: "${{ github.actor }}"
54 |
55 | mvn_opts:
56 | description: |-
57 | A comma-delimited string with additional maven options.
58 | required: false
59 | default: ""
60 |
61 | verify_attestation:
62 | description: |-
63 | A boolean for enabling or disabling the verify stage of the attestation.
64 | required: false
65 | default: "true"
66 |
67 | github_token:
68 | description: |-
69 | Normal use is "GITHUB_TOKEN". To fetch from private repository use
70 | PAT with at least packages:read scope to install packages associated with other private repositories
71 | (which GITHUB_TOKEN can't access).
72 | required: false
73 | default: ${{ github.token }}
74 |
75 | token_key_pattern:
76 | description: |-
77 | When input github_token is provided but the build tool configurations uses a key pattern
78 | different from the default "GITHUB_TOKEN".
79 | required: false
80 | default: ""
81 |
82 | registry_access_token:
83 | description: |-
84 | Access token for registry. Used to fetch packages from GAR
85 | required: false
86 | default: ""
87 |
88 | repo_dir:
89 | description: |-
90 | Internal value (do not set): root of directory to search for build files.
91 | Defaults to "github.workspace".
92 | required: false
93 | default: ${{ github.workspace }}
94 |
95 | repo_sub_dir:
96 | description: |-
97 | Specify a subdirectory if build file not found in working root directory.
98 | required: false
99 | default: ""
100 |
101 | github_context:
102 | description: |-
103 | Internal value (do not set): the "github" context object in json.
104 | The context is used when generating provenance.
105 | required: false
106 | default: ${{ toJSON(github) }}
107 |
108 | runner_context:
109 | description: |-
110 | Internal value (do not set): the "runner" context object in json.
111 | The context is used when generating provenance.
112 | required: false
113 | default: ${{ toJSON(runner) }}
114 |
115 | runs:
116 | using: "docker"
117 | image: "docker://ghcr.io/nais/salsa:v0.12"
118 | args:
119 | - ${{ inputs.repo_dir }}
120 | - ${{ inputs.repo_name }}
121 | - ${{ inputs.github_context }}
122 | - ${{ inputs.runner_context }}
123 | - ${{ inputs.image }}
124 | - ${{ inputs.env_context }}
125 | - ${{ inputs.repo_sub_dir }}
126 | - ${{ inputs.key }}
127 | - ${{ inputs.identity_token }}
128 | - ${{ inputs.mvn_opts }}
129 | - ${{ inputs.github_token }}
130 | - ${{ inputs.docker_user }}
131 | - ${{ inputs.token_key_pattern }}
132 | - ${{ inputs.build_started_on }}
133 | - ${{ inputs.registry_access_token }}
134 | - ${{ inputs.registry }}
135 | - ${{ inputs.image_digest }}
136 | - ${{ inputs.verify_attestation }}
137 |
--------------------------------------------------------------------------------
/cmd/main.go:
--------------------------------------------------------------------------------
1 | package main
2 |
3 | import (
4 | "github.com/nais/salsa/pkg/commands"
5 | )
6 |
7 | var (
8 | // Is set during build
9 | version = "dev"
10 | commit = "none"
11 | date = "unknown"
12 | builtBy = "unknown"
13 | )
14 |
15 | func main() {
16 | commands.Execute(version, commit, date, builtBy)
17 | }
18 |
--------------------------------------------------------------------------------
/cosign.pub:
--------------------------------------------------------------------------------
1 | -----BEGIN PUBLIC KEY-----
2 | MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEV2DHtYN0SLatmBcI8Gz9qjPkR5OQ
3 | D+z9xeHOivLI2DMTsE6MKbE85kv4DBpJ0dKbsd5rQZEf3UWG0KtCNXTB4A==
4 | -----END PUBLIC KEY-----
5 |
--------------------------------------------------------------------------------
/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh -l
2 |
3 | setup() {
4 | echo "---------- Preparing pico de gallo SLSA ----------"
5 |
6 | REPO_NAME="${INPUT_REPO_NAME##*/}"
7 | if [ -z "$REPO_NAME" ]; then
8 | echo "REPO_NAME is empty"
9 | exit 1
10 | fi
11 |
12 | if [ -z "$INPUT_REGISTRY" ]; then
13 | echo "INPUT_REGISTRY is empty"
14 | exit 1
15 | fi
16 |
17 | if [ -n "$INPUT_DOCKER_USER" ]; then
18 | export GITHUB_ACTOR="$INPUT_DOCKER_USER"
19 | fi
20 |
21 | if [ -z "$GITHUB_ACTOR" ]; then
22 | echo "GITHUB_ACTOR is not set. Please set it to your GitHub username."
23 | exit 1
24 | fi
25 |
26 | if [ -n "$INPUT_IMAGE" ]; then
27 | export IMAGE="$INPUT_IMAGE"
28 | fi
29 |
30 | if [ -z "$INPUT_IMAGE_DIGEST" ] || [ -z "$IMAGE" ]; then
31 | echo "IMAGE and IMAGE_DIGEST is not set. Please set it."
32 | exit 1
33 | fi
34 |
35 | export IMAGE="$IMAGE@$INPUT_IMAGE_DIGEST"
36 |
37 | if [ -z "$INPUT_GITHUB_CONTEXT" ] || [ -z "$INPUT_RUNNER_CONTEXT" ]; then
38 | echo "GITHUB_CONTEXT and RUNNER_CONTEXT are required"
39 | exit 1
40 | fi
41 |
42 | if [ "$INPUT_VERIFY_ATTESTATION" = "false" ] && [ -z "$INPUT_KEY" ]; then
43 | echo "When running keyless salsa you must verify the attestation. Please set the verify_attestation flag to 'true'.
44 |
45 | (This is also the default value, and may instead be omitted)."
46 | exit 1
47 | fi
48 |
49 | GITHUB=$(echo "${INPUT_GITHUB_CONTEXT}" | base64 -w 0) &&
50 | RUNNER=$(echo "${INPUT_RUNNER_CONTEXT}" | base64 -w 0) &&
51 | ENVS=$(jq -n env | base64 -w 0)
52 |
53 | exportCosignEnvironment
54 | exportGithubToken
55 |
56 | export JAVA_HOME=/opt/java/openjdk
57 | }
58 |
59 | exportGithubToken() {
60 | if [ -n "$INPUT_GITHUB_TOKEN" ]; then
61 | if [ -n "$INPUT_TOKEN_KEY_PATTERN" ]; then
62 | export "$INPUT_TOKEN_KEY_PATTERN"="$INPUT_GITHUB_TOKEN"
63 | else
64 | export GITHUB_TOKEN="$INPUT_GITHUB_TOKEN"
65 | fi
66 | else
67 | export GITHUB_TOKEN
68 | fi
69 | }
70 |
71 | exportCosignEnvironment() {
72 | if [ -n "$COSIGN_EXPERIMENTAL" ]; then
73 | export COSIGN_EXPERIMENTAL
74 | fi
75 |
76 | if [ -n "$COSIGN_REPOSITORY" ]; then
77 | export COSIGN_REPOSITORY
78 | fi
79 | }
80 |
81 | loginDocker() {
82 | echo "---------- Logging in to Docker registry: $INPUT_REGISTRY ----------"
83 | if [ -n "$INPUT_REGISTRY_ACCESS_TOKEN" ]; then
84 | echo "$INPUT_REGISTRY_ACCESS_TOKEN" | docker login "$INPUT_REGISTRY" -u "$GITHUB_ACTOR" --password-stdin
85 | else
86 | echo "$GITHUB_TOKEN" | docker login "$INPUT_REGISTRY" -u "$GITHUB_ACTOR" --password-stdin
87 | fi
88 | }
89 |
90 | logoutDocker() {
91 | echo "---------- Logging out from Docker registry: $INPUT_REGISTRY ----------"
92 | docker logout "$INPUT_REGISTRY"
93 | }
94 |
95 | scan() {
96 | echo "---------- Running Salsa scan for deps ----------" &&
97 | salsa scan \
98 | --repo "$REPO_NAME" \
99 | --build-context "$GITHUB" \
100 | --runner-context "$RUNNER" \
101 | --env-context "$ENVS" \
102 | --subDir "$INPUT_REPO_SUB_DIR" \
103 | --mvn-opts "$INPUT_MVN_OPTS" \
104 | --build-started-on "$INPUT_BUILD_STARTED_ON" \
105 | --remote-run
106 | }
107 |
108 | attest() {
109 | echo "---------- Creating and Uploading Salsa attestation ----------" &&
110 | salsa attest \
111 | --repo "$REPO_NAME" \
112 | --subDir "$INPUT_REPO_SUB_DIR" \
113 | --remote-run \
114 | --identity-token "$INPUT_IDENTITY_TOKEN" \
115 | --key "$INPUT_KEY" \
116 | "$IMAGE"
117 | }
118 |
119 | attestVerify() {
120 | echo "---------- Verifying Salsa attestation ----------" &&
121 | salsa attest \
122 | --verify \
123 | --repo "$REPO_NAME" \
124 | --subDir "$INPUT_REPO_SUB_DIR" \
125 | --remote-run \
126 | --key "$INPUT_KEY" \
127 | "$IMAGE"
128 | }
129 |
130 | runSalsa() {
131 | echo "---------- Running Salsa for repository: $REPO_NAME ----------"
132 | if [ "$INPUT_VERIFY_ATTESTATION" = "false" ]; then
133 | scan && attest
134 | elif [ "$INPUT_VERIFY_ATTESTATION" = "true" ]; then
135 | scan && attest && attestVerify
136 | fi
137 |
138 | }
139 |
140 | cleanUpGoogle() {
141 | echo "---------- Clean up Google Cloud stuff ----------"
142 | if
143 | [ -n "$GOOGLE_APPLICATION_CREDENTIALS" ] ||
144 | [ -n "$CLOUDSDK_AUTH_CREDENTIAL_FILE_OVERRIDE" ] ||
145 | [ -n "$GOOGLE_GHA_CREDS_PATH" ]
146 | then
147 | rm -rvf "$GOOGLE_APPLICATION_CREDENTIALS" "$CLOUDSDK_AUTH_CREDENTIAL_FILE_OVERRIDE" "$GOOGLE_GHA_CREDS_PATH"
148 | fi
149 | }
150 |
151 | setOutput() {
152 | echo "---------- Setting output ----------"
153 | {
154 | echo "provenance_file_path=$REPO_NAME.provenance"
155 | echo "raw_file_path=$REPO_NAME.raw.txt"
156 | } >>"$GITHUB_OUTPUT"
157 | }
158 |
159 | setup && loginDocker && runSalsa && logoutDocker && setOutput
160 | cleanUpGoogle
161 |
--------------------------------------------------------------------------------
/go.mod:
--------------------------------------------------------------------------------
1 | module github.com/nais/salsa
2 |
3 | go 1.19
4 |
5 | require (
6 | github.com/briandowns/spinner v1.23.0
7 | github.com/go-git/go-git/v5 v5.11.0
8 | github.com/go-jose/go-jose/v3 v3.0.3
9 | github.com/in-toto/in-toto-golang v0.7.1
10 | github.com/sirupsen/logrus v1.9.0
11 | github.com/spf13/cobra v1.7.0
12 | github.com/spf13/pflag v1.0.5
13 | github.com/spf13/viper v1.15.0
14 | github.com/stretchr/testify v1.8.4
15 | )
16 |
17 | require (
18 | dario.cat/mergo v1.0.0 // indirect
19 | github.com/Microsoft/go-winio v0.6.1 // indirect
20 | github.com/ProtonMail/go-crypto v0.0.0-20230828082145-3c4c8a2d2371 // indirect
21 | github.com/cloudflare/circl v1.3.7 // indirect
22 | github.com/cyphar/filepath-securejoin v0.2.4 // indirect
23 | github.com/davecgh/go-spew v1.1.1 // indirect
24 | github.com/emirpasic/gods v1.18.1 // indirect
25 | github.com/fatih/color v1.14.1 // indirect
26 | github.com/fsnotify/fsnotify v1.6.0 // indirect
27 | github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect
28 | github.com/go-git/go-billy/v5 v5.5.0 // indirect
29 | github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
30 | github.com/hashicorp/hcl v1.0.0 // indirect
31 | github.com/inconshreveable/mousetrap v1.1.0 // indirect
32 | github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
33 | github.com/kevinburke/ssh_config v1.2.0 // indirect
34 | github.com/magiconair/properties v1.8.7 // indirect
35 | github.com/mattn/go-colorable v0.1.13 // indirect
36 | github.com/mattn/go-isatty v0.0.17 // indirect
37 | github.com/mitchellh/mapstructure v1.5.0 // indirect
38 | github.com/pelletier/go-toml/v2 v2.0.6 // indirect
39 | github.com/pjbgf/sha1cd v0.3.0 // indirect
40 | github.com/pmezard/go-difflib v1.0.0 // indirect
41 | github.com/secure-systems-lab/go-securesystemslib v0.5.0 // indirect
42 | github.com/sergi/go-diff v1.3.1 // indirect
43 | github.com/shibumi/go-pathspec v1.3.0 // indirect
44 | github.com/skeema/knownhosts v1.2.1 // indirect
45 | github.com/spf13/afero v1.9.3 // indirect
46 | github.com/spf13/cast v1.5.0 // indirect
47 | github.com/spf13/jwalterweatherman v1.1.0 // indirect
48 | github.com/subosito/gotenv v1.4.2 // indirect
49 | github.com/xanzy/ssh-agent v0.3.3 // indirect
50 | golang.org/x/crypto v0.21.0 // indirect
51 | golang.org/x/mod v0.12.0 // indirect
52 | golang.org/x/net v0.23.0 // indirect
53 | golang.org/x/sys v0.18.0 // indirect
54 | golang.org/x/term v0.18.0 // indirect
55 | golang.org/x/text v0.14.0 // indirect
56 | golang.org/x/tools v0.13.0 // indirect
57 | gopkg.in/ini.v1 v1.67.0 // indirect
58 | gopkg.in/warnings.v0 v0.1.2 // indirect
59 | gopkg.in/yaml.v3 v3.0.1 // indirect
60 | )
61 |
--------------------------------------------------------------------------------
/integration-test/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:22.10
2 | RUN apt-get update
--------------------------------------------------------------------------------
/pkg/build/dependencies.go:
--------------------------------------------------------------------------------
1 | package build
2 |
3 | import (
4 | "fmt"
5 | slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common"
6 | )
7 |
8 | type artifactType string
9 |
10 | const (
11 | pkgArtifactType artifactType = "pkg"
12 | )
13 |
14 | type ArtifactDependencies struct {
15 | Cmd Cmd
16 | RuntimeDeps map[string]Dependency
17 | }
18 |
19 | func ArtifactDependency(deps map[string]Dependency, path, cmdFlags string) *ArtifactDependencies {
20 | return &ArtifactDependencies{
21 | Cmd: Cmd{
22 | Path: path,
23 | CmdFlags: cmdFlags,
24 | },
25 | RuntimeDeps: deps,
26 | }
27 | }
28 |
29 | func (in ArtifactDependencies) CmdPath() string {
30 | return in.Cmd.Path
31 | }
32 |
33 | func (in ArtifactDependencies) CmdFlags() string {
34 | return in.Cmd.CmdFlags
35 | }
36 |
37 | type Cmd struct {
38 | Path string
39 | CmdFlags string
40 | }
41 |
42 | type Dependency struct {
43 | Coordinates string
44 | Version string
45 | CheckSum CheckSum
46 | Type string
47 | }
48 |
49 | func (d Dependency) ToUri() string {
50 | return fmt.Sprintf("%s:%s:%s", d.Type, d.Coordinates, d.Version)
51 | }
52 |
53 | func (d Dependency) ToDigestSet() slsa.DigestSet {
54 | return slsa.DigestSet{d.CheckSum.Algorithm: d.CheckSum.Digest}
55 | }
56 |
57 | func Dependence(coordinates, version string, checksum CheckSum) Dependency {
58 | return Dependency{
59 | Coordinates: coordinates,
60 | Version: version,
61 | CheckSum: checksum,
62 | Type: string(pkgArtifactType),
63 | }
64 | }
65 |
66 | type CheckSum struct {
67 | Algorithm string
68 | Digest string
69 | }
70 |
71 | func Verification(algo, digest string) CheckSum {
72 | return CheckSum{
73 | Algorithm: algo,
74 | Digest: digest,
75 | }
76 | }
77 |
--------------------------------------------------------------------------------
/pkg/build/golang/golang.go:
--------------------------------------------------------------------------------
1 | package golang
2 |
3 | import (
4 | "fmt"
5 | "github.com/nais/salsa/pkg/build"
6 | "github.com/nais/salsa/pkg/utils"
7 | "os"
8 | "strings"
9 | )
10 |
11 | const golangBuildFileName = "go.sum"
12 |
13 | type Golang struct {
14 | BuildFilePatterns []string
15 | }
16 |
17 | func BuildGo() build.Tool {
18 | return &Golang{
19 | BuildFilePatterns: []string{golangBuildFileName},
20 | }
21 | }
22 |
23 | func (g Golang) BuildFiles() []string {
24 | return g.BuildFilePatterns
25 | }
26 |
27 | func (g Golang) ResolveDeps(workDir string) (*build.ArtifactDependencies, error) {
28 | path := fmt.Sprintf("%s/%s", workDir, golangBuildFileName)
29 |
30 | fileContent, err := os.ReadFile(path)
31 | if err != nil {
32 | return nil, fmt.Errorf("readfile %v", err)
33 | }
34 |
35 | deps, err := GoDeps(string(fileContent))
36 | if err != nil {
37 | return nil, fmt.Errorf("error parsing %s, %v", golangBuildFileName, err)
38 | }
39 |
40 | return build.ArtifactDependency(deps, path, golangBuildFileName), nil
41 | }
42 |
43 | func GoDeps(goSumContents string) (map[string]build.Dependency, error) {
44 | deps := make(map[string]build.Dependency, 0)
45 | lines := strings.Split(goSumContents, "\n")
46 | for _, line := range lines {
47 | if isNotInteresting(line) {
48 | continue
49 | }
50 | parts := strings.Split(line, " ")
51 | version := parts[1][1:]
52 | coordinates := parts[0]
53 | base64EncodedDigest := strings.Split(parts[2], ":")[1]
54 | digest, err := utils.DecodeDigest(base64EncodedDigest)
55 | if err != nil {
56 | return nil, err
57 | }
58 | checksum := build.Verification(build.AlgorithmSHA256, digest)
59 | deps[coordinates] = build.Dependence(coordinates, version, checksum)
60 | }
61 | return deps, nil
62 | }
63 |
64 | func isNotInteresting(line string) bool {
65 | return isEmpty(line) || isMod(line)
66 | }
67 |
68 | func isEmpty(line string) bool {
69 | return strings.TrimSpace(line) == ""
70 | }
71 |
72 | func isMod(line string) bool {
73 | idx := strings.Index(line, "go.mod")
74 | return idx > -1
75 | }
76 |
--------------------------------------------------------------------------------
/pkg/build/golang/golang_test.go:
--------------------------------------------------------------------------------
1 | package golang
2 |
3 | import (
4 | "github.com/nais/salsa/pkg/build"
5 | "github.com/nais/salsa/pkg/build/test"
6 | "github.com/stretchr/testify/assert"
7 | "testing"
8 | )
9 |
10 | func TestGoDeps(t *testing.T) {
11 | got, err := GoDeps(goSumContents)
12 | assert.NoError(t, err)
13 | want := map[string]build.Dependency{}
14 | want["github.com/google/uuid"] = test.Dependency("github.com/google/uuid", "1.0.0", "sha256", "6f81a4fbb59d3ff7771d91fc109b19a6f57b12d0ce81a64bb6768d188bb569d0")
15 | want["github.com/pborman/uuid"] = test.Dependency("github.com/pborman/uuid", "1.2.1", "sha256", "f99648c39f2dfe8cdd8d169787fddac077e65916f363126801d349c5eff7a6fc")
16 |
17 | test.AssertEqual(t, got, want)
18 | }
19 |
20 | const goSumContents = `
21 | github.com/google/uuid v1.0.0 h1:b4Gk+7WdP/d3HZH8EJsZpvV7EtDOgaZLtnaNGIu1adA=
22 | github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
23 | github.com/pborman/uuid v1.2.1 h1:+ZZIw58t/ozdjRaXh/3awHfmWRbzYxJoAdNJxe/3pvw=
24 | github.com/pborman/uuid v1.2.1/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k=
25 | `
26 |
27 | func TestBuildGo(t *testing.T) {
28 | tests := []test.IntegrationTest{
29 | {
30 | Name: "find GOLANG build file and parse output",
31 | BuildType: BuildGo(),
32 | WorkDir: "testdata/golang",
33 | BuildPath: "testdata/golang/go.sum",
34 | Cmd: "go.sum",
35 | Want: test.Want{
36 | Key: "github.com/Microsoft/go-winio",
37 | Version: "0.5.1",
38 | Algo: "sha256",
39 | Digest: "68f269d900fb38eae13b9b505ea42819225cf838c3b564c62ce98dc809ba1606",
40 | },
41 | },
42 | {
43 | Name: "cant find GOLANG build file",
44 | BuildType: BuildGo(),
45 | WorkDir: "testdata/whatever",
46 | Error: true,
47 | ErrorMessage: "could not find match, reading dir open testdata/whatever: no such file or directory",
48 | },
49 | }
50 |
51 | test.Run(t, tests)
52 | }
53 |
--------------------------------------------------------------------------------
/pkg/build/jvm/gradle.go:
--------------------------------------------------------------------------------
1 | package jvm
2 |
3 | import (
4 | "encoding/xml"
5 | "errors"
6 | "fmt"
7 | "os"
8 | "regexp"
9 | "strings"
10 |
11 | "github.com/nais/salsa/pkg/build"
12 |
13 | "github.com/nais/salsa/pkg/utils"
14 | )
15 |
16 | type Gradle struct {
17 | BuildFilePatterns []string
18 | }
19 |
20 | func BuildGradle() build.Tool {
21 | return &Gradle{
22 | BuildFilePatterns: []string{
23 | "build.gradle.kts",
24 | "build.gradle",
25 | },
26 | }
27 | }
28 |
29 | func (g Gradle) BuildFiles() []string {
30 | return g.BuildFilePatterns
31 | }
32 |
33 | func (g Gradle) ResolveDeps(workDir string) (*build.ArtifactDependencies, error) {
34 | cmd := utils.NewCmd(
35 | "gradle",
36 | "",
37 | []string{"-q", "dependencies", "--configuration", "runtimeClasspath", "-M", "sha256"},
38 | nil,
39 | workDir,
40 | )
41 | depsOutput, err := cmd.Run()
42 | if err != nil {
43 | return nil, fmt.Errorf("exec: %v\n", err)
44 | }
45 |
46 | xmlData, err := os.ReadFile(workDir + "/gradle/verification-metadata.xml")
47 | if err != nil {
48 | return nil, fmt.Errorf("readfile: %v\n", err)
49 | }
50 |
51 | deps, err := GradleDeps(depsOutput, xmlData)
52 | if err != nil {
53 | return nil, fmt.Errorf("could not get gradle deps: %w", err)
54 | }
55 | args := make([]string, 0)
56 | args = append(args, cmd.Name)
57 | args = append(args, cmd.Flags...)
58 | return build.ArtifactDependency(deps, cmd.Name, strings.Join(args, " ")), nil
59 | }
60 |
61 | func GradleDeps(depsOutput string, checksumXml []byte) (map[string]build.Dependency, error) {
62 | pattern := regexp.MustCompile(`(?m)---\s[a-zA-Z0-9.]+:.*$`)
63 | matches := pattern.FindAllString(depsOutput, -1)
64 | if matches == nil {
65 | return nil, errors.New("unable to find any dependencies")
66 | }
67 |
68 | deps := make(map[string]build.Dependency, 0)
69 |
70 | sum := GradleChecksum{}
71 | err := xml.Unmarshal(checksumXml, &sum)
72 | if err != nil {
73 | return nil, fmt.Errorf("xml parsing: %v", err)
74 | }
75 |
76 | for _, match := range matches {
77 | elements := filter(match)
78 | groupId := elements[0]
79 | artifactId := elements[1]
80 | version := Version(elements)
81 | coordinates := fmt.Sprintf("%s:%s", groupId, artifactId)
82 | checksum := sum.buildChecksum(groupId, artifactId, version)
83 | deps[coordinates] = build.Dependence(coordinates, version, checksum)
84 | }
85 | return deps, nil
86 | }
87 |
88 | func Version(elements []string) string {
89 | if len(elements) == 3 {
90 | return elements[2]
91 | }
92 | return elements[3]
93 | }
94 |
95 | func filter(match string) []string {
96 | replaceUpgrade := strings.Replace(match, " -> ", ":", -1)
97 | replaceAndTrimStar := strings.TrimSpace(strings.Replace(replaceUpgrade, "(*)", "", -1))
98 | replaceAndTrimC := strings.TrimSpace(strings.Replace(replaceAndTrimStar, "(c)", "", -1))
99 | return strings.Split(strings.Replace(replaceAndTrimC, "--- ", "", -1), ":")
100 | }
101 |
102 | func (g GradleChecksum) buildChecksum(groupId, artifactId, version string) build.CheckSum {
103 | for _, c := range g.Components.Components {
104 | if c.Group == groupId && c.Name == artifactId && c.Version == version {
105 | for _, a := range c.Artifacts {
106 | return build.Verification(build.AlgorithmSHA256, a.Sha256.Value)
107 | }
108 | }
109 | }
110 | return build.CheckSum{}
111 | }
112 |
113 | type GradleChecksum struct {
114 | XMLName xml.Name `xml:"verification-metadata"`
115 | Configuration Configuration `xml:"configuration"`
116 | Components Components `xml:"components"`
117 | }
118 |
119 | type Configuration struct {
120 | XMLName xml.Name `xml:"configuration"`
121 | VerifyMetadata bool `xml:"verify-metadata"`
122 | VerifySignatures bool `xml:"verify-signatures"`
123 | }
124 |
125 | type Components struct {
126 | XMLName xml.Name `xml:"components"`
127 | Components []Component `xml:"component"`
128 | }
129 |
130 | type Component struct {
131 | XMLName xml.Name `xml:"component"`
132 | Group string `xml:"group,attr"`
133 | Name string `xml:"name,attr"`
134 | Version string `xml:"version,attr"`
135 | Artifacts []Artifact `xml:"artifact"`
136 | }
137 |
138 | type Artifact struct {
139 | XMLName xml.Name `xml:"artifact"`
140 | Name string `xml:"name,attr"`
141 | Sha256 Sha256 `xml:"sha256"`
142 | }
143 |
144 | type Sha256 struct {
145 | XMLName xml.Name `xml:"sha256"`
146 | Value string `xml:"value,attr"`
147 | }
148 |
--------------------------------------------------------------------------------
/pkg/build/jvm/gradle_test.go:
--------------------------------------------------------------------------------
1 | package jvm
2 |
3 | import (
4 | "os"
5 | "testing"
6 |
7 | "github.com/nais/salsa/pkg/build"
8 | "github.com/nais/salsa/pkg/build/test"
9 |
10 | "github.com/stretchr/testify/assert"
11 | )
12 |
13 | func TestGradleDeps(t *testing.T) {
14 | gradleOutput, _ := os.ReadFile("testdata/gradle_output.txt")
15 | checksumXml, _ := os.ReadFile("testdata/verification-metadata.xml")
16 | got, err := GradleDeps(string(gradleOutput), checksumXml)
17 | assert.NoError(t, err)
18 | want := map[string]build.Dependency{}
19 | want["ch.qos.logback:logback-classic:"] = test.Dependency(
20 | "ch.qos.logback:logback-classic",
21 | "1.2.10",
22 | "sha256",
23 | "3160ae988af82c8bf3024ddbe034a82da98bb186fd09e76c50543c5b9da5cc5e",
24 | )
25 | want["org.jetbrains.kotlinx:kotlinx-coroutines-core"] = test.Dependency(
26 | "org.jetbrains.kotlinx:kotlinx-coroutines-core",
27 | "1.5.2-native-mt",
28 | "sha256",
29 | "78492527a0d09e0c53c81aacc2e073a83ee0fc3105e701496819ec67c98df16f",
30 | )
31 | want["com.fasterxml.jackson.core:jackson-annotations"] = test.Dependency(
32 | "com.fasterxml.jackson.core:jackson-annotations",
33 | "2.13.0",
34 | "sha256",
35 | "81f9724d8843e8b08f8f6c0609e7a2b030d00c34861c4ac7e2099a7235047d6f",
36 | )
37 | want["com.fasterxml.jackson.core:jackson-databind"] = test.Dependency(
38 | "com.fasterxml.jackson.core:jackson-databind",
39 | "2.13.0",
40 | "sha256",
41 | "9c826d27176268777adcf97e1c6e2051c7e33a7aaa2c370c2e8c6077fd9da3f4",
42 | )
43 |
44 | count := 0
45 | for _, wantDep := range want {
46 | for _, gotDep := range got {
47 | if wantDep == gotDep {
48 | count++
49 | }
50 | }
51 | }
52 |
53 | assert.Equal(t, len(want), count)
54 | }
55 |
56 | func TestBuildGradle(t *testing.T) {
57 | tests := []test.IntegrationTest{
58 | {
59 | Name: "find build file and parse output",
60 | BuildType: BuildGradle(),
61 | WorkDir: "testdata/jvm/gradle-kts",
62 | BuildPath: "/usr/local/bin/gradle",
63 | Cmd: "gradle -q dependencies --configuration runtimeClasspath -M sha256",
64 | Want: test.Want{
65 | Key: "org.jetbrains.kotlin:kotlin-reflect",
66 | Version: "1.6.10",
67 | Algo: "sha256",
68 | Digest: "3277ac102ae17aad10a55abec75ff5696c8d109790396434b496e75087854203",
69 | },
70 | },
71 | {
72 | Name: "cant find Gradle build file",
73 | BuildType: BuildGradle(),
74 | WorkDir: "testdata/whatever",
75 | Error: true,
76 | ErrorMessage: "could not find match, reading dir open testdata/whatever: no such file or directory",
77 | },
78 | {
79 | Name: "support build.gradle file",
80 | BuildType: BuildGradle(),
81 | WorkDir: "testdata/jvm/gradle",
82 | BuildPath: "/usr/local/bin/gradle",
83 | Cmd: "gradle -q dependencies --configuration runtimeClasspath -M sha256",
84 | Want: test.Want{
85 | Key: "org.jetbrains.kotlin:kotlin-reflect",
86 | Version: "1.6.10",
87 | Algo: "sha256",
88 | Digest: "3277ac102ae17aad10a55abec75ff5696c8d109790396434b496e75087854203",
89 | },
90 | },
91 | }
92 |
93 | test.Run(t, tests)
94 | }
95 |
--------------------------------------------------------------------------------
/pkg/build/jvm/mvn.go:
--------------------------------------------------------------------------------
1 | package jvm
2 |
3 | import (
4 | "crypto/sha256"
5 | "fmt"
6 | "os"
7 | "path/filepath"
8 | "strings"
9 |
10 | "github.com/nais/salsa/pkg/build"
11 | log "github.com/sirupsen/logrus"
12 |
13 | "github.com/nais/salsa/pkg/utils"
14 | )
15 |
16 | const mavenBuildFileName = "pom.xml"
17 |
18 | type Maven struct {
19 | BuildFilePatterns []string
20 | CmdOptions string
21 | }
22 |
23 | func BuildMaven(cmdOpts string) build.Tool {
24 | m := &Maven{
25 | BuildFilePatterns: []string{mavenBuildFileName},
26 | CmdOptions: cmdOpts,
27 | }
28 |
29 | return m
30 | }
31 |
32 | func (m Maven) BuildFiles() []string {
33 | return m.BuildFilePatterns
34 | }
35 |
36 | func (m Maven) ResolveDeps(workDir string) (*build.ArtifactDependencies, error) {
37 | cmd := utils.NewCmd(
38 | "mvn",
39 | "dependency:copy-dependencies",
40 | defaultMavenOpts(),
41 | m.parsedCmdOpts(),
42 | workDir,
43 | )
44 |
45 | _, err := cmd.Run()
46 | if err != nil {
47 | return nil, fmt.Errorf("exec: %v\n", err)
48 | }
49 |
50 | rootPath := workDir + "/target/dependency"
51 | deps, err := MavenCompileAndRuntimeTimeDeps(rootPath)
52 | if err != nil {
53 | return nil, fmt.Errorf("scan: %v\n", err)
54 | }
55 |
56 | args := make([]string, 0)
57 | args = append(args, cmd.Name)
58 | args = append(args, cmd.SubCmd)
59 | args = append(args, cmd.Flags...)
60 | args = append(args, cmd.Args...)
61 | return build.ArtifactDependency(deps, cmd.Name, strings.Join(args, " ")), nil
62 | }
63 |
64 | func defaultMavenOpts() []string {
65 | return []string{
66 | "-DincludeScope=runtime",
67 | "-Dmdep.useRepositoryLayout=true",
68 | }
69 | }
70 |
71 | func (m Maven) parsedCmdOpts() []string {
72 | if m.CmdOptions == "" {
73 | return nil
74 | }
75 |
76 | parsed := strings.Split(m.CmdOptions, ",")
77 | for i, s := range parsed {
78 | parsed[i] = strings.ReplaceAll(s, " ", "")
79 | }
80 |
81 | return parsed
82 | }
83 |
84 | func MavenCompileAndRuntimeTimeDeps(rootPath string) (map[string]build.Dependency, error) {
85 | files, err := findJarFiles(rootPath)
86 | if err != nil {
87 | return nil, err
88 | }
89 |
90 | deps := make(map[string]build.Dependency, 0)
91 |
92 | for _, file := range files {
93 | f := strings.Split(file, rootPath)[1]
94 |
95 | path := strings.Split(f, "/")
96 | version := path[len(path)-2]
97 | artifactId := path[len(path)-3]
98 | groupId := strings.Join(path[1:(len(path)-3)], ".")
99 |
100 | checksum, err := buildChecksum(file)
101 | if err != nil {
102 | return nil, err
103 | }
104 | coordinates := fmt.Sprintf("%s:%s", groupId, artifactId)
105 | deps[coordinates] = build.Dependence(coordinates, version, checksum)
106 | }
107 | return deps, nil
108 | }
109 |
110 | func buildChecksum(file string) (build.CheckSum, error) {
111 | content, err := os.ReadFile(file)
112 | if err != nil {
113 | return build.CheckSum{}, err
114 | }
115 | checksum := fmt.Sprintf("%x", sha256.Sum256(content))
116 | return build.Verification(build.AlgorithmSHA256, checksum), nil
117 | }
118 |
119 | func findJarFiles(rootPath string) ([]string, error) {
120 | files := make([]string, 0)
121 | err := filepath.Walk(rootPath, func(path string, info os.FileInfo, err error) error {
122 | if err != nil {
123 | log.Fatalf(err.Error())
124 | }
125 | if !info.IsDir() && filepath.Ext(info.Name()) == ".jar" {
126 | files = append(files, path)
127 | }
128 | return nil
129 | })
130 | if err != nil {
131 | return nil, err
132 | }
133 | return files, nil
134 | }
135 |
--------------------------------------------------------------------------------
/pkg/build/jvm/mvn_test.go:
--------------------------------------------------------------------------------
1 | package jvm
2 |
3 | import (
4 | "github.com/nais/salsa/pkg/build"
5 | "github.com/nais/salsa/pkg/build/test"
6 | "github.com/stretchr/testify/assert"
7 | "testing"
8 | )
9 |
10 | func TestMavenDeps(t *testing.T) {
11 | got, err := MavenCompileAndRuntimeTimeDeps("testdata/target/dependency")
12 | assert.NoError(t, err)
13 | want := map[string]build.Dependency{}
14 | want["org.springframework:spring-core"] = test.Dependency(
15 | "org.springframework:spring-core",
16 | "5.3.16", "sha256",
17 | "0903d17e58654a2c79f4e46df79dc73ccaa49b6edbc7c3278359db403b687f6e",
18 | )
19 | want["org.yaml:snakeyaml"] = test.Dependency(
20 | "org.yaml:snakeyaml",
21 | "1.26",
22 | "sha256",
23 | "d87d607e500885356c03c1cae61e8c2e05d697df8787d5aba13484c2eb76a844",
24 | )
25 |
26 | test.AssertEqual(t, got, want)
27 | }
28 |
29 | func TestBuildMaven(t *testing.T) {
30 | tests := []test.IntegrationTest{
31 | {
32 | Name: "find build file and parse output",
33 | BuildType: BuildMaven(""),
34 | WorkDir: "testdata/jvm/maven",
35 | BuildPath: "/usr/local/bin/mvn",
36 | Cmd: "mvn dependency:copy-dependencies -DincludeScope=runtime -Dmdep.useRepositoryLayout=true",
37 | Want: test.Want{
38 | Key: "com.google.code.gson:gson",
39 | Version: "2.8.6",
40 | Algo: "sha256",
41 | Digest: "c8fb4839054d280b3033f800d1f5a97de2f028eb8ba2eb458ad287e536f3f25f",
42 | },
43 | },
44 | {
45 | Name: "cant find build file",
46 | BuildType: BuildMaven(""),
47 | WorkDir: "testdata/whatever",
48 | Error: true,
49 | ErrorMessage: "could not find match, reading dir open testdata/whatever: no such file or directory",
50 | },
51 | {
52 | Name: "Add additional command line arguments as a part of the mvn command",
53 | BuildType: BuildMaven("-s .m2/maven-settings.xml"),
54 | Cmd: "mvn dependency:copy-dependencies -DincludeScope=runtime -Dmdep.useRepositoryLayout=true -s.m2/maven-settings.xml",
55 | WorkDir: "testdata/jvm/maven",
56 | BuildPath: "/usr/local/bin/mvn",
57 | Want: test.Want{
58 | Key: "com.google.code.gson:gson",
59 | Version: "2.8.6",
60 | Algo: "sha256",
61 | Digest: "c8fb4839054d280b3033f800d1f5a97de2f028eb8ba2eb458ad287e536f3f25f",
62 | },
63 | },
64 | {
65 | Name: "Add additional commands line arguments as a part of the mvn command",
66 | BuildType: BuildMaven("--also-make, --threads=2, --batch-mode, --settings=.m2/maven-settings.xml"),
67 | Cmd: "mvn dependency:copy-dependencies -DincludeScope=runtime -Dmdep.useRepositoryLayout=true --also-make --threads=2 --batch-mode --settings=.m2/maven-settings.xml",
68 | WorkDir: "testdata/jvm/maven",
69 | BuildPath: "/usr/local/bin/mvn",
70 | Want: test.Want{
71 | Key: "com.google.code.gson:gson",
72 | Version: "2.8.6",
73 | Algo: "sha256",
74 | Digest: "c8fb4839054d280b3033f800d1f5a97de2f028eb8ba2eb458ad287e536f3f25f",
75 | },
76 | },
77 | }
78 |
79 | test.Run(t, tests)
80 | }
81 |
--------------------------------------------------------------------------------
/pkg/build/jvm/testdata/jvm/gradle-kts/build.gradle.kts:
--------------------------------------------------------------------------------
1 | import org.gradle.api.tasks.testing.logging.TestLogEvent
2 | import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
3 |
4 | val kotlinVersion = "1.6.10"
5 |
6 | plugins {
7 | application
8 | kotlin("jvm") version "1.6.10"
9 | id("org.jmailen.kotlinter") version "3.9.0"
10 | }
11 |
12 | repositories {
13 | mavenCentral()
14 | }
15 |
16 | dependencies {
17 | implementation(kotlin("stdlib"))
18 | implementation("org.jetbrains.kotlin:kotlin-reflect:$kotlinVersion")
19 | implementation("org.jetbrains.kotlin:kotlin-script-runtime:$kotlinVersion")
20 | }
--------------------------------------------------------------------------------
/pkg/build/jvm/testdata/jvm/gradle-kts/gradle.properties:
--------------------------------------------------------------------------------
1 | kotlin.code.style=official
2 | group=io.nais.test
3 | version=0.1-SNAPSHOT
4 |
--------------------------------------------------------------------------------
/pkg/build/jvm/testdata/jvm/gradle-kts/gradlew:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | #
4 | # Copyright © 2015-2021 the original authors.
5 | #
6 | # Licensed under the Apache License, Version 2.0 (the "License");
7 | # you may not use this file except in compliance with the License.
8 | # You may obtain a copy of the License at
9 | #
10 | # https://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | # See the License for the specific language governing permissions and
16 | # limitations under the License.
17 | #
18 |
19 | ##############################################################################
20 | #
21 | # Gradle start up script for POSIX generated by Gradle.
22 | #
23 | # Important for running:
24 | #
25 | # (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
26 | # noncompliant, but you have some other compliant shell such as ksh or
27 | # bash, then to run this script, type that shell name before the whole
28 | # command line, like:
29 | #
30 | # ksh Gradle
31 | #
32 | # Busybox and similar reduced shells will NOT work, because this script
33 | # requires all of these POSIX shell features:
34 | # * functions;
35 | # * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
36 | # «${var#prefix}», «${var%suffix}», and «$( cmd )»;
37 | # * compound commands having a testable exit status, especially «case»;
38 | # * various built-in commands including «command», «set», and «ulimit».
39 | #
40 | # Important for patching:
41 | #
42 | # (2) This script targets any POSIX shell, so it avoids extensions provided
43 | # by Bash, Ksh, etc; in particular arrays are avoided.
44 | #
45 | # The "traditional" practice of packing multiple parameters into a
46 | # space-separated string is a well documented source of bugs and security
47 | # problems, so this is (mostly) avoided, by progressively accumulating
48 | # options in "$@", and eventually passing that to Java.
49 | #
50 | # Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
51 | # and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
52 | # see the in-line comments for details.
53 | #
54 | # There are tweaks for specific operating systems such as AIX, CygWin,
55 | # Darwin, MinGW, and NonStop.
56 | #
57 | # (3) This script is generated from the Groovy template
58 | # https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
59 | # within the Gradle project.
60 | #
61 | # You can find Gradle at https://github.com/gradle/gradle/.
62 | #
63 | ##############################################################################
64 |
65 | # Attempt to set APP_HOME
66 |
67 | # Resolve links: $0 may be a link
68 | app_path=$0
69 |
70 | # Need this for daisy-chained symlinks.
71 | while
72 | APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
73 | [ -h "$app_path" ]
74 | do
75 | ls=$( ls -ld "$app_path" )
76 | link=${ls#*' -> '}
77 | case $link in #(
78 | /*) app_path=$link ;; #(
79 | *) app_path=$APP_HOME$link ;;
80 | esac
81 | done
82 |
83 | APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
84 |
85 | APP_NAME="Gradle"
86 | APP_BASE_NAME=${0##*/}
87 |
88 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
89 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
90 |
91 | # Use the maximum available, or set MAX_FD != -1 to use that value.
92 | MAX_FD=maximum
93 |
94 | warn () {
95 | echo "$*"
96 | } >&2
97 |
98 | die () {
99 | echo
100 | echo "$*"
101 | echo
102 | exit 1
103 | } >&2
104 |
105 | # OS specific support (must be 'true' or 'false').
106 | cygwin=false
107 | msys=false
108 | darwin=false
109 | nonstop=false
110 | case "$( uname )" in #(
111 | CYGWIN* ) cygwin=true ;; #(
112 | Darwin* ) darwin=true ;; #(
113 | MSYS* | MINGW* ) msys=true ;; #(
114 | NONSTOP* ) nonstop=true ;;
115 | esac
116 |
117 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
118 |
119 |
120 | # Determine the Java command to use to start the JVM.
121 | if [ -n "$JAVA_HOME" ] ; then
122 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
123 | # IBM's JDK on AIX uses strange locations for the executables
124 | JAVACMD=$JAVA_HOME/jre/sh/java
125 | else
126 | JAVACMD=$JAVA_HOME/bin/java
127 | fi
128 | if [ ! -x "$JAVACMD" ] ; then
129 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
130 |
131 | Please set the JAVA_HOME variable in your environment to match the
132 | location of your Java installation."
133 | fi
134 | else
135 | JAVACMD=java
136 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
137 |
138 | Please set the JAVA_HOME variable in your environment to match the
139 | location of your Java installation."
140 | fi
141 |
142 | # Increase the maximum file descriptors if we can.
143 | if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
144 | case $MAX_FD in #(
145 | max*)
146 | MAX_FD=$( ulimit -H -n ) ||
147 | warn "Could not query maximum file descriptor limit"
148 | esac
149 | case $MAX_FD in #(
150 | '' | soft) :;; #(
151 | *)
152 | ulimit -n "$MAX_FD" ||
153 | warn "Could not set maximum file descriptor limit to $MAX_FD"
154 | esac
155 | fi
156 |
157 | # Collect all arguments for the java command, stacking in reverse order:
158 | # * args from the command line
159 | # * the main class name
160 | # * -classpath
161 | # * -D...appname settings
162 | # * --module-path (only if needed)
163 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
164 |
165 | # For Cygwin or MSYS, switch paths to Windows format before running java
166 | if "$cygwin" || "$msys" ; then
167 | APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
168 | CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
169 |
170 | JAVACMD=$( cygpath --unix "$JAVACMD" )
171 |
172 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
173 | for arg do
174 | if
175 | case $arg in #(
176 | -*) false ;; # don't mess with options #(
177 | /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
178 | [ -e "$t" ] ;; #(
179 | *) false ;;
180 | esac
181 | then
182 | arg=$( cygpath --path --ignore --mixed "$arg" )
183 | fi
184 | # Roll the args list around exactly as many times as the number of
185 | # args, so each arg winds up back in the position where it started, but
186 | # possibly modified.
187 | #
188 | # NB: a `for` loop captures its iteration list before it begins, so
189 | # changing the positional parameters here affects neither the number of
190 | # iterations, nor the values presented in `arg`.
191 | shift # remove old arg
192 | set -- "$@" "$arg" # push replacement arg
193 | done
194 | fi
195 |
196 | # Collect all arguments for the java command;
197 | # * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
198 | # shell script including quotes and variable substitutions, so put them in
199 | # double quotes to make sure that they get re-expanded; and
200 | # * put everything else in single quotes, so that it's not re-expanded.
201 |
202 | set -- \
203 | "-Dorg.gradle.appname=$APP_BASE_NAME" \
204 | -classpath "$CLASSPATH" \
205 | org.gradle.wrapper.GradleWrapperMain \
206 | "$@"
207 |
208 | # Use "xargs" to parse quoted args.
209 | #
210 | # With -n1 it outputs one arg per line, with the quotes and backslashes removed.
211 | #
212 | # In Bash we could simply go:
213 | #
214 | # readarray ARGS < <( xargs -n1 <<<"$var" ) &&
215 | # set -- "${ARGS[@]}" "$@"
216 | #
217 | # but POSIX shell has neither arrays nor command substitution, so instead we
218 | # post-process each arg (as a line of input to sed) to backslash-escape any
219 | # character that might be a shell metacharacter, then use eval to reverse
220 | # that process (while maintaining the separation between arguments), and wrap
221 | # the whole thing up as a single "set" statement.
222 | #
223 | # This will of course break if any of these variables contains a newline or
224 | # an unmatched quote.
225 | #
226 |
227 | eval "set -- $(
228 | printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
229 | xargs -n1 |
230 | sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
231 | tr '\n' ' '
232 | )" '"$@"'
233 |
234 | exec "$JAVACMD" "$@"
235 |
--------------------------------------------------------------------------------
/pkg/build/jvm/testdata/jvm/gradle-kts/gradlew.bat:
--------------------------------------------------------------------------------
1 | @rem
2 | @rem Copyright 2015 the original author or authors.
3 | @rem
4 | @rem Licensed under the Apache License, Version 2.0 (the "License");
5 | @rem you may not use this file except in compliance with the License.
6 | @rem You may obtain a copy of the License at
7 | @rem
8 | @rem https://www.apache.org/licenses/LICENSE-2.0
9 | @rem
10 | @rem Unless required by applicable law or agreed to in writing, software
11 | @rem distributed under the License is distributed on an "AS IS" BASIS,
12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | @rem See the License for the specific language governing permissions and
14 | @rem limitations under the License.
15 | @rem
16 |
17 | @if "%DEBUG%" == "" @echo off
18 | @rem ##########################################################################
19 | @rem
20 | @rem Gradle startup script for Windows
21 | @rem
22 | @rem ##########################################################################
23 |
24 | @rem Set local scope for the variables with windows NT shell
25 | if "%OS%"=="Windows_NT" setlocal
26 |
27 | set DIRNAME=%~dp0
28 | if "%DIRNAME%" == "" set DIRNAME=.
29 | set APP_BASE_NAME=%~n0
30 | set APP_HOME=%DIRNAME%
31 |
32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter.
33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
34 |
35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
37 |
38 | @rem Find java.exe
39 | if defined JAVA_HOME goto findJavaFromJavaHome
40 |
41 | set JAVA_EXE=java.exe
42 | %JAVA_EXE% -version >NUL 2>&1
43 | if "%ERRORLEVEL%" == "0" goto execute
44 |
45 | echo.
46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
47 | echo.
48 | echo Please set the JAVA_HOME variable in your environment to match the
49 | echo location of your Java installation.
50 |
51 | goto fail
52 |
53 | :findJavaFromJavaHome
54 | set JAVA_HOME=%JAVA_HOME:"=%
55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
56 |
57 | if exist "%JAVA_EXE%" goto execute
58 |
59 | echo.
60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
61 | echo.
62 | echo Please set the JAVA_HOME variable in your environment to match the
63 | echo location of your Java installation.
64 |
65 | goto fail
66 |
67 | :execute
68 | @rem Setup the command line
69 |
70 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
71 |
72 |
73 | @rem Execute Gradle
74 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
75 |
76 | :end
77 | @rem End local scope for the variables with windows NT shell
78 | if "%ERRORLEVEL%"=="0" goto mainEnd
79 |
80 | :fail
81 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
82 | rem the _cmd.exe /c_ return code!
83 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
84 | exit /b 1
85 |
86 | :mainEnd
87 | if "%OS%"=="Windows_NT" endlocal
88 |
89 | :omega
90 |
--------------------------------------------------------------------------------
/pkg/build/jvm/testdata/jvm/gradle/build.gradle:
--------------------------------------------------------------------------------
1 | buildscript {
2 | ext.kotlin_version = '1.6.10'
3 |
4 | repositories {
5 | mavenCentral()
6 | }
7 | dependencies {
8 | classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
9 | }
10 | }
11 |
12 | apply plugin: 'kotlin'
13 |
14 | repositories {
15 | mavenCentral()
16 | }
17 |
18 | dependencies {
19 | implementation "org.jetbrains.kotlin:kotlin-reflect:$kotlin_version"
20 | implementation "org.jetbrains.kotlin:kotlin-script-runtime:$kotlin_version"
21 | }
--------------------------------------------------------------------------------
/pkg/build/jvm/testdata/jvm/gradle/gradle.properties:
--------------------------------------------------------------------------------
1 | kotlin.code.style=official
2 | group=io.nais.test
3 | version=0.1-SNAPSHOT
4 | org.gradle.dependency.verification=off
5 |
--------------------------------------------------------------------------------
/pkg/build/jvm/testdata/jvm/gradle/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nais/salsa/7cce2b37c43c6bc656617aee6dbc08043749a39c/pkg/build/jvm/testdata/jvm/gradle/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/pkg/build/jvm/testdata/jvm/gradle/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | distributionBase=GRADLE_USER_HOME
2 | distributionPath=wrapper/dists
3 | distributionUrl=https\://services.gradle.org/distributions/gradle-7.4.2-bin.zip
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 |
--------------------------------------------------------------------------------
/pkg/build/jvm/testdata/jvm/gradle/gradlew:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | #
4 | # Copyright © 2015-2021 the original authors.
5 | #
6 | # Licensed under the Apache License, Version 2.0 (the "License");
7 | # you may not use this file except in compliance with the License.
8 | # You may obtain a copy of the License at
9 | #
10 | # https://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | # See the License for the specific language governing permissions and
16 | # limitations under the License.
17 | #
18 |
19 | ##############################################################################
20 | #
21 | # Gradle start up script for POSIX generated by Gradle.
22 | #
23 | # Important for running:
24 | #
25 | # (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
26 | # noncompliant, but you have some other compliant shell such as ksh or
27 | # bash, then to run this script, type that shell name before the whole
28 | # command line, like:
29 | #
30 | # ksh Gradle
31 | #
32 | # Busybox and similar reduced shells will NOT work, because this script
33 | # requires all of these POSIX shell features:
34 | # * functions;
35 | # * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
36 | # «${var#prefix}», «${var%suffix}», and «$( cmd )»;
37 | # * compound commands having a testable exit status, especially «case»;
38 | # * various built-in commands including «command», «set», and «ulimit».
39 | #
40 | # Important for patching:
41 | #
42 | # (2) This script targets any POSIX shell, so it avoids extensions provided
43 | # by Bash, Ksh, etc; in particular arrays are avoided.
44 | #
45 | # The "traditional" practice of packing multiple parameters into a
46 | # space-separated string is a well documented source of bugs and security
47 | # problems, so this is (mostly) avoided, by progressively accumulating
48 | # options in "$@", and eventually passing that to Java.
49 | #
50 | # Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
51 | # and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
52 | # see the in-line comments for details.
53 | #
54 | # There are tweaks for specific operating systems such as AIX, CygWin,
55 | # Darwin, MinGW, and NonStop.
56 | #
57 | # (3) This script is generated from the Groovy template
58 | # https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
59 | # within the Gradle project.
60 | #
61 | # You can find Gradle at https://github.com/gradle/gradle/.
62 | #
63 | ##############################################################################
64 |
65 | # Attempt to set APP_HOME
66 |
67 | # Resolve links: $0 may be a link
68 | app_path=$0
69 |
70 | # Need this for daisy-chained symlinks.
71 | while
72 | APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
73 | [ -h "$app_path" ]
74 | do
75 | ls=$( ls -ld "$app_path" )
76 | link=${ls#*' -> '}
77 | case $link in #(
78 | /*) app_path=$link ;; #(
79 | *) app_path=$APP_HOME$link ;;
80 | esac
81 | done
82 |
83 | APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
84 |
85 | APP_NAME="Gradle"
86 | APP_BASE_NAME=${0##*/}
87 |
88 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
89 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
90 |
91 | # Use the maximum available, or set MAX_FD != -1 to use that value.
92 | MAX_FD=maximum
93 |
94 | warn () {
95 | echo "$*"
96 | } >&2
97 |
98 | die () {
99 | echo
100 | echo "$*"
101 | echo
102 | exit 1
103 | } >&2
104 |
105 | # OS specific support (must be 'true' or 'false').
106 | cygwin=false
107 | msys=false
108 | darwin=false
109 | nonstop=false
110 | case "$( uname )" in #(
111 | CYGWIN* ) cygwin=true ;; #(
112 | Darwin* ) darwin=true ;; #(
113 | MSYS* | MINGW* ) msys=true ;; #(
114 | NONSTOP* ) nonstop=true ;;
115 | esac
116 |
117 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
118 |
119 |
120 | # Determine the Java command to use to start the JVM.
121 | if [ -n "$JAVA_HOME" ] ; then
122 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
123 | # IBM's JDK on AIX uses strange locations for the executables
124 | JAVACMD=$JAVA_HOME/jre/sh/java
125 | else
126 | JAVACMD=$JAVA_HOME/bin/java
127 | fi
128 | if [ ! -x "$JAVACMD" ] ; then
129 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
130 |
131 | Please set the JAVA_HOME variable in your environment to match the
132 | location of your Java installation."
133 | fi
134 | else
135 | JAVACMD=java
136 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
137 |
138 | Please set the JAVA_HOME variable in your environment to match the
139 | location of your Java installation."
140 | fi
141 |
142 | # Increase the maximum file descriptors if we can.
143 | if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
144 | case $MAX_FD in #(
145 | max*)
146 | MAX_FD=$( ulimit -H -n ) ||
147 | warn "Could not query maximum file descriptor limit"
148 | esac
149 | case $MAX_FD in #(
150 | '' | soft) :;; #(
151 | *)
152 | ulimit -n "$MAX_FD" ||
153 | warn "Could not set maximum file descriptor limit to $MAX_FD"
154 | esac
155 | fi
156 |
157 | # Collect all arguments for the java command, stacking in reverse order:
158 | # * args from the command line
159 | # * the main class name
160 | # * -classpath
161 | # * -D...appname settings
162 | # * --module-path (only if needed)
163 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
164 |
165 | # For Cygwin or MSYS, switch paths to Windows format before running java
166 | if "$cygwin" || "$msys" ; then
167 | APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
168 | CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
169 |
170 | JAVACMD=$( cygpath --unix "$JAVACMD" )
171 |
172 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
173 | for arg do
174 | if
175 | case $arg in #(
176 | -*) false ;; # don't mess with options #(
177 | /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
178 | [ -e "$t" ] ;; #(
179 | *) false ;;
180 | esac
181 | then
182 | arg=$( cygpath --path --ignore --mixed "$arg" )
183 | fi
184 | # Roll the args list around exactly as many times as the number of
185 | # args, so each arg winds up back in the position where it started, but
186 | # possibly modified.
187 | #
188 | # NB: a `for` loop captures its iteration list before it begins, so
189 | # changing the positional parameters here affects neither the number of
190 | # iterations, nor the values presented in `arg`.
191 | shift # remove old arg
192 | set -- "$@" "$arg" # push replacement arg
193 | done
194 | fi
195 |
196 | # Collect all arguments for the java command;
197 | # * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
198 | # shell script including quotes and variable substitutions, so put them in
199 | # double quotes to make sure that they get re-expanded; and
200 | # * put everything else in single quotes, so that it's not re-expanded.
201 |
202 | set -- \
203 | "-Dorg.gradle.appname=$APP_BASE_NAME" \
204 | -classpath "$CLASSPATH" \
205 | org.gradle.wrapper.GradleWrapperMain \
206 | "$@"
207 |
208 | # Use "xargs" to parse quoted args.
209 | #
210 | # With -n1 it outputs one arg per line, with the quotes and backslashes removed.
211 | #
212 | # In Bash we could simply go:
213 | #
214 | # readarray ARGS < <( xargs -n1 <<<"$var" ) &&
215 | # set -- "${ARGS[@]}" "$@"
216 | #
217 | # but POSIX shell has neither arrays nor command substitution, so instead we
218 | # post-process each arg (as a line of input to sed) to backslash-escape any
219 | # character that might be a shell metacharacter, then use eval to reverse
220 | # that process (while maintaining the separation between arguments), and wrap
221 | # the whole thing up as a single "set" statement.
222 | #
223 | # This will of course break if any of these variables contains a newline or
224 | # an unmatched quote.
225 | #
226 |
227 | eval "set -- $(
228 | printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
229 | xargs -n1 |
230 | sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
231 | tr '\n' ' '
232 | )" '"$@"'
233 |
234 | exec "$JAVACMD" "$@"
235 |
--------------------------------------------------------------------------------
/pkg/build/jvm/testdata/jvm/gradle/gradlew.bat:
--------------------------------------------------------------------------------
1 | @rem
2 | @rem Copyright 2015 the original author or authors.
3 | @rem
4 | @rem Licensed under the Apache License, Version 2.0 (the "License");
5 | @rem you may not use this file except in compliance with the License.
6 | @rem You may obtain a copy of the License at
7 | @rem
8 | @rem https://www.apache.org/licenses/LICENSE-2.0
9 | @rem
10 | @rem Unless required by applicable law or agreed to in writing, software
11 | @rem distributed under the License is distributed on an "AS IS" BASIS,
12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | @rem See the License for the specific language governing permissions and
14 | @rem limitations under the License.
15 | @rem
16 |
17 | @if "%DEBUG%" == "" @echo off
18 | @rem ##########################################################################
19 | @rem
20 | @rem Gradle startup script for Windows
21 | @rem
22 | @rem ##########################################################################
23 |
24 | @rem Set local scope for the variables with windows NT shell
25 | if "%OS%"=="Windows_NT" setlocal
26 |
27 | set DIRNAME=%~dp0
28 | if "%DIRNAME%" == "" set DIRNAME=.
29 | set APP_BASE_NAME=%~n0
30 | set APP_HOME=%DIRNAME%
31 |
32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter.
33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
34 |
35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
37 |
38 | @rem Find java.exe
39 | if defined JAVA_HOME goto findJavaFromJavaHome
40 |
41 | set JAVA_EXE=java.exe
42 | %JAVA_EXE% -version >NUL 2>&1
43 | if "%ERRORLEVEL%" == "0" goto execute
44 |
45 | echo.
46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
47 | echo.
48 | echo Please set the JAVA_HOME variable in your environment to match the
49 | echo location of your Java installation.
50 |
51 | goto fail
52 |
53 | :findJavaFromJavaHome
54 | set JAVA_HOME=%JAVA_HOME:"=%
55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
56 |
57 | if exist "%JAVA_EXE%" goto execute
58 |
59 | echo.
60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
61 | echo.
62 | echo Please set the JAVA_HOME variable in your environment to match the
63 | echo location of your Java installation.
64 |
65 | goto fail
66 |
67 | :execute
68 | @rem Setup the command line
69 |
70 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
71 |
72 |
73 | @rem Execute Gradle
74 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
75 |
76 | :end
77 | @rem End local scope for the variables with windows NT shell
78 | if "%ERRORLEVEL%"=="0" goto mainEnd
79 |
80 | :fail
81 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
82 | rem the _cmd.exe /c_ return code!
83 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
84 | exit /b 1
85 |
86 | :mainEnd
87 | if "%OS%"=="Windows_NT" endlocal
88 |
89 | :omega
90 |
--------------------------------------------------------------------------------
/pkg/build/jvm/testdata/jvm/maven/.m2/maven-settings.xml:
--------------------------------------------------------------------------------
1 |
5 |
6 |
7 |
8 | github
9 | ${GITHUB_USERNAME}
10 | ${GITHUB_TOKEN}
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/pkg/build/jvm/testdata/jvm/maven/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 | 4.0.0
5 |
6 | no.nav.nais.test
7 | salsa-maven-test
8 | 0.0.1
9 | jar
10 |
11 |
12 | 11
13 | UTF-8
14 | 2.8.6
15 |
16 |
17 |
18 |
19 | com.google.code.gson
20 | gson
21 | ${google.version}
22 |
23 |
24 |
--------------------------------------------------------------------------------
/pkg/build/jvm/testdata/jvm/maven/target/dependency/com/google/code/gson/gson/2.8.6/gson-2.8.6.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nais/salsa/7cce2b37c43c6bc656617aee6dbc08043749a39c/pkg/build/jvm/testdata/jvm/maven/target/dependency/com/google/code/gson/gson/2.8.6/gson-2.8.6.jar
--------------------------------------------------------------------------------
/pkg/build/jvm/testdata/jvm/maven/target/dependency/com/google/code/gson/gson/maven-metadata-local.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | com.google.code.gson
4 | gson
5 |
6 | 2.8.6
7 |
8 | 2.8.6
9 |
10 | 20221116003142
11 |
12 |
13 |
--------------------------------------------------------------------------------
/pkg/build/jvm/testdata/target/dependency/org/springframework/spring-core/5.3.16/spring-core-5.3.16.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nais/salsa/7cce2b37c43c6bc656617aee6dbc08043749a39c/pkg/build/jvm/testdata/target/dependency/org/springframework/spring-core/5.3.16/spring-core-5.3.16.jar
--------------------------------------------------------------------------------
/pkg/build/jvm/testdata/target/dependency/org/yaml/snakeyaml/1.26/snakeyaml-1.26.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nais/salsa/7cce2b37c43c6bc656617aee6dbc08043749a39c/pkg/build/jvm/testdata/target/dependency/org/yaml/snakeyaml/1.26/snakeyaml-1.26.jar
--------------------------------------------------------------------------------
/pkg/build/nodejs/npm.go:
--------------------------------------------------------------------------------
1 | package nodejs
2 |
3 | import (
4 | "encoding/json"
5 | "fmt"
6 | "github.com/nais/salsa/pkg/build"
7 | "github.com/nais/salsa/pkg/utils"
8 | "os"
9 | "strings"
10 | )
11 |
12 | const npmBuildFileName = "package-lock.json"
13 |
14 | type Npm struct {
15 | BuildFilePatterns []string
16 | }
17 |
18 | func BuildNpm() build.Tool {
19 | return &Npm{
20 | BuildFilePatterns: []string{npmBuildFileName},
21 | }
22 | }
23 |
24 | func (n Npm) BuildFiles() []string {
25 | return n.BuildFilePatterns
26 | }
27 |
28 | func (n Npm) ResolveDeps(workDir string) (*build.ArtifactDependencies, error) {
29 | path := fmt.Sprintf("%s/%s", workDir, npmBuildFileName)
30 | fileContent, err := os.ReadFile(path)
31 | if err != nil {
32 | return nil, fmt.Errorf("read file: %w\n", err)
33 | }
34 | deps, err := NpmDeps(string(fileContent))
35 | if err != nil {
36 | return nil, fmt.Errorf("error parsing deps: %v\n", err)
37 | }
38 | return build.ArtifactDependency(deps, path, npmBuildFileName), nil
39 | }
40 |
41 | func NpmDeps(packageLockJsonContents string) (map[string]build.Dependency, error) {
42 | var f interface{}
43 | err := json.Unmarshal([]byte(packageLockJsonContents), &f)
44 | if err != nil {
45 | return nil, fmt.Errorf("unable to parse %s: %v", packageLockJsonContents, err)
46 | }
47 |
48 | raw := f.(map[string]interface{})
49 |
50 | trans, err := transform(raw["dependencies"].(map[string]interface{}))
51 | if err != nil {
52 | return nil, fmt.Errorf("try to derive dependecies from %s %w", packageLockJsonContents, err)
53 | }
54 | return trans, nil
55 | }
56 |
57 | func transform(input map[string]interface{}) (map[string]build.Dependency, error) {
58 | deps := make(map[string]build.Dependency, 0)
59 | for key, value := range input {
60 | dependency := value.(map[string]interface{})
61 | integrity := fmt.Sprintf("%s", dependency["integrity"])
62 | shaDig := strings.Split(integrity, "-")
63 | decodedDigest, err := utils.DecodeDigest(shaDig[1])
64 | if err != nil {
65 | return nil, err
66 | }
67 | checksum := build.Verification(shaDig[0], decodedDigest)
68 | deps[key] = build.Dependence(key, fmt.Sprintf("%s", dependency["version"]), checksum)
69 | }
70 | return deps, nil
71 | }
72 |
--------------------------------------------------------------------------------
/pkg/build/nodejs/npm_test.go:
--------------------------------------------------------------------------------
1 | package nodejs
2 |
3 | import (
4 | "github.com/nais/salsa/pkg/build"
5 | "github.com/nais/salsa/pkg/build/test"
6 | "testing"
7 | )
8 |
9 | func TestPackageLockJsonParsing(t *testing.T) {
10 | got, _ := NpmDeps(packageLockContents)
11 | want := map[string]build.Dependency{}
12 | want["js-tokens"] = test.Dependency("js-tokens", "4.0.0", "sha512", "45d2547e5704ddc5332a232a420b02bb4e853eef5474824ed1b7986cf84737893a6a9809b627dca02b53f5b7313a9601b690f690233a49bce0e026aeb16fcf29")
13 | want["loose-envify"] = test.Dependency("loose-envify", "1.4.0", "sha512", "972bb13c6aff59f86b95e9b608bfd472751cd7372a280226043cee918ed8e45ff242235d928ebe7d12debe5c351e03324b0edfeb5d54218e34f04b71452a0add")
14 | want["object-assign"] = test.Dependency("object-assign", "4.1.1", "sha1", "2109adc7965887cfc05cbbd442cac8bfbb360863")
15 | want["react"] = test.Dependency("react", "17.0.2", "sha512", "82784fb7be62fddabfcf7ffaabfd1ab0fefc0f4bb9f760f92f5a5deccf0ff9d724e85bbf8c978bea25552b6ddfa6d494663f158dffbeef05c0f1435c94641c6c")
16 |
17 | test.AssertEqual(t, got, want)
18 | }
19 |
20 | const packageLockContents = `
21 | {
22 | "name": "myproject",
23 | "version": "1.0.0",
24 | "lockfileVersion": 2,
25 | "requires": true,
26 | "packages": {
27 | "": {
28 | "name": "dillings",
29 | "version": "1.0.0",
30 | "license": "ISC",
31 | "dependencies": {
32 | "react": "^17.0.2"
33 | }
34 | },
35 | "node_modules/js-tokens": {
36 | "version": "4.0.0",
37 | "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
38 | "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="
39 | },
40 | "node_modules/loose-envify": {
41 | "version": "1.4.0",
42 | "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
43 | "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==",
44 | "dependencies": {
45 | "js-tokens": "^3.0.0 || ^4.0.0"
46 | },
47 | "bin": {
48 | "loose-envify": "cli.js"
49 | }
50 | },
51 | "node_modules/object-assign": {
52 | "version": "4.1.1",
53 | "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
54 | "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=",
55 | "engines": {
56 | "node": ">=0.10.0"
57 | }
58 | },
59 | "node_modules/react": {
60 | "version": "17.0.2",
61 | "resolved": "https://registry.npmjs.org/react/-/react-17.0.2.tgz",
62 | "integrity": "sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA==",
63 | "dependencies": {
64 | "loose-envify": "^1.1.0",
65 | "object-assign": "^4.1.1"
66 | },
67 | "engines": {
68 | "node": ">=0.10.0"
69 | }
70 | }
71 | },
72 | "dependencies": {
73 | "js-tokens": {
74 | "version": "4.0.0",
75 | "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
76 | "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="
77 | },
78 | "loose-envify": {
79 | "version": "1.4.0",
80 | "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
81 | "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==",
82 | "requires": {
83 | "js-tokens": "^3.0.0 || ^4.0.0"
84 | }
85 | },
86 | "object-assign": {
87 | "version": "4.1.1",
88 | "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
89 | "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM="
90 | },
91 | "react": {
92 | "version": "17.0.2",
93 | "resolved": "https://registry.npmjs.org/react/-/react-17.0.2.tgz",
94 | "integrity": "sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA==",
95 | "requires": {
96 | "loose-envify": "^1.1.0",
97 | "object-assign": "^4.1.1"
98 | }
99 | }
100 | }
101 | }`
102 |
103 | func TestBuildNpm(t *testing.T) {
104 | tests := []test.IntegrationTest{
105 | {
106 | Name: "find build file and parse output",
107 | BuildType: BuildNpm(),
108 | WorkDir: "testdata/nodejs/npm",
109 | BuildPath: "testdata/nodejs/npm/package-lock.json",
110 | Cmd: "package-lock.json",
111 | Want: test.Want{
112 | Key: "@ampproject/remapping",
113 | Version: "2.1.0",
114 | Algo: "sha512",
115 | Digest: "779472b13949ee19b0e53c38531831718de590c7bdda7f2c5c272e2cf0322001caea3f0379f0f0b469d554380e9eff919c4a2cba50c9f4d3ca40bdbb6c321dd2",
116 | },
117 | },
118 | {
119 | Name: "cant find build file",
120 | BuildType: BuildNpm(),
121 | WorkDir: "testdata/whatever",
122 | Error: true,
123 | ErrorMessage: "could not find match, reading dir open testdata/whatever: no such file or directory",
124 | },
125 | }
126 |
127 | test.Run(t, tests)
128 | }
129 |
--------------------------------------------------------------------------------
/pkg/build/nodejs/testdata/nodejs/yarn/yarn.lock:
--------------------------------------------------------------------------------
1 | # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
2 | # yarn lockfile v1
3 |
4 | "js-tokens@^3.0.0 || ^4.0.0":
5 | version "4.0.0"
6 | resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
7 | integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==
--------------------------------------------------------------------------------
/pkg/build/nodejs/yarn.go:
--------------------------------------------------------------------------------
1 | package nodejs
2 |
3 | import (
4 | "errors"
5 | "fmt"
6 | "github.com/nais/salsa/pkg/build"
7 | "github.com/nais/salsa/pkg/utils"
8 | "os"
9 | "regexp"
10 | "strings"
11 | )
12 |
13 | const yarnBuildFileName = "yarn.lock"
14 |
15 | type Yarn struct {
16 | BuildFilePatterns []string
17 | }
18 |
19 | func BuildYarn() build.Tool {
20 | return &Yarn{
21 | BuildFilePatterns: []string{yarnBuildFileName},
22 | }
23 | }
24 |
25 | func (y Yarn) BuildFiles() []string {
26 | return y.BuildFilePatterns
27 | }
28 |
29 | func (y Yarn) ResolveDeps(workDir string) (*build.ArtifactDependencies, error) {
30 | path := fmt.Sprintf("%s/%s", workDir, yarnBuildFileName)
31 | fileContent, err := os.ReadFile(path)
32 |
33 | if err != nil {
34 | return nil, fmt.Errorf("read file: %w", err)
35 | }
36 |
37 | deps, err := YarnDeps(string(fileContent))
38 | if err != nil {
39 | return nil, fmt.Errorf("try to derive depencies from: %s %w", path, err)
40 | }
41 |
42 | return build.ArtifactDependency(deps, path, yarnBuildFileName), nil
43 | }
44 |
45 | func YarnDeps(yarnLockContents string) (map[string]build.Dependency, error) {
46 | deps := make(map[string]build.Dependency, 0)
47 | lines := strings.Split(yarnLockContents, "\n")
48 | blockLines := blockLineNumbers(lines)
49 | for _, startLine := range blockLines {
50 | depName := parseDependency(lines[startLine])
51 | depVersion := parseVersion(lines[startLine+1])
52 | if !strings.Contains(lines[startLine+3], "integrity") {
53 | return nil, errors.New("integrity is missing")
54 | }
55 | integrityLine := lines[startLine+3]
56 | checksum, err := buildChecksum(integrityLine)
57 | if err != nil {
58 | return nil, fmt.Errorf("building checksum %w", err)
59 | }
60 | deps[depName] = build.Dependence(depName, depVersion, checksum)
61 | }
62 | return deps, nil
63 | }
64 |
65 | func blockLineNumbers(yarnLockLines []string) []int {
66 | var startsOfEntries []int
67 | for index, line := range yarnLockLines {
68 | if isNewEntry(line) {
69 | startsOfEntries = append(startsOfEntries, index)
70 | }
71 | }
72 | return startsOfEntries
73 | }
74 |
75 | func isNewEntry(str string) bool {
76 | return !strings.HasPrefix(str, " ") && strings.HasSuffix(str, ":")
77 | }
78 |
79 | func parseDependency(depLine string) string {
80 | if len(strings.Split(depLine, ", ")) > 1 {
81 | depLine = parseName(depLine)
82 | allPossibilities := strings.Split(depLine, ", ")
83 | return lastElementInSlice(allPossibilities)
84 | } else {
85 | return parseName(depLine)
86 | }
87 | }
88 |
89 | func parseName(line string) string {
90 | regex := regexp.MustCompile(`^"?(?P.*)@[^~]?.*$`)
91 | matches := regex.FindStringSubmatch(line)
92 | pkgnameIndex := regex.SubexpIndex("pkgname")
93 | return matches[pkgnameIndex]
94 | }
95 |
96 | func lastElementInSlice(slice []string) string {
97 | return trim(fmt.Sprintf("%v", slice[len(slice)-1]))
98 | }
99 |
100 | func trim(line string) string {
101 | return strings.TrimPrefix(line, "\"")
102 | }
103 |
104 | func parseVersion(line string) string {
105 | regex := regexp.MustCompile(`.*"(?P.*)"$`)
106 | matches := regex.FindStringSubmatch(line)
107 | pkgversionIndex := regex.SubexpIndex("pkgversion")
108 | return matches[pkgversionIndex]
109 | }
110 |
111 | func buildChecksum(line string) (build.CheckSum, error) {
112 | trimPrefixIntegrity := strings.TrimPrefix(line, " integrity ")
113 | fields := strings.Split(trimPrefixIntegrity, "-")
114 | decodedDigest, err := utils.DecodeDigest(fields[1])
115 | if err != nil {
116 | return build.CheckSum{}, err
117 | }
118 | return build.Verification(fields[0], decodedDigest), nil
119 | }
120 |
--------------------------------------------------------------------------------
/pkg/build/nodejs/yarn_test.go:
--------------------------------------------------------------------------------
1 | package nodejs
2 |
3 | import (
4 | "github.com/nais/salsa/pkg/build"
5 | "github.com/nais/salsa/pkg/build/test"
6 | "github.com/stretchr/testify/assert"
7 | "testing"
8 | )
9 |
10 | func TestYarnLockParsing(t *testing.T) {
11 |
12 | got, err := YarnDeps(yarnlLockContents)
13 | assert.NoError(t, err)
14 | want := map[string]build.Dependency{}
15 | want["js-tokens"] = test.Dependency("js-tokens", "4.0.0", "sha512", "45d2547e5704ddc5332a232a420b02bb4e853eef5474824ed1b7986cf84737893a6a9809b627dca02b53f5b7313a9601b690f690233a49bce0e026aeb16fcf29")
16 | want["loose-envify"] = test.Dependency("loose-envify", "1.4.0", "sha512", "972bb13c6aff59f86b95e9b608bfd472751cd7372a280226043cee918ed8e45ff242235d928ebe7d12debe5c351e03324b0edfeb5d54218e34f04b71452a0add")
17 | want["object-assign"] = test.Dependency("object-assign", "4.1.1", "sha1", "2109adc7965887cfc05cbbd442cac8bfbb360863")
18 | want["react"] = test.Dependency("react", "17.0.2", "sha512", "82784fb7be62fddabfcf7ffaabfd1ab0fefc0f4bb9f760f92f5a5deccf0ff9d724e85bbf8c978bea25552b6ddfa6d494663f158dffbeef05c0f1435c94641c6c")
19 | want["@babel/helper-annotate-as-pure"] = test.Dependency("@babel/helper-annotate-as-pure", "7.16.7", "sha512", "b3ab76c3f20f4154c0113d478ada28c11197a285fc98282db8fe75f79c03fd024f57ac188e9ba308617b26e30e0d55f664024e7f5193e834fd307119bcb3854b")
20 | want["@babel/helper-builder-binary-assignment-operator-visitor"] = test.Dependency("@babel/helper-builder-binary-assignment-operator-visitor", "7.16.7", "sha512", "0ba15d6d16b1623c15bbf81e296e19790d10df501fb6045c7529d9e7f8ec1fa0730896edbd7be1a5f91b9138584aebad640ee7097a4f47a003f73775769acc90")
21 | want["range-parser"] = test.Dependency("range-parser", "1.2.1", "sha512", "1eb82cc7ea2baa8ca09e68456ca68713a736f7a27e1d30105e8c4417a80dba944e9a6189468cb37c6ddc700bdea8206bc2bff6cb143905577f1939796a03b04a")
22 | want["webpack-sources"] = test.Dependency("webpack-sources", "1.4.3", "sha512", "9604d2dd786fd6508e2a8ed20394e3297323a52338b018cd579faad9ba9eb1b48fb391631a653a8e3b414a45fd6f8a96f3bbc322c0889543ce1216d9acc76379")
23 | want["util-deprecate"] = test.Dependency("util-deprecate", "1.0.2", "sha1", "450d4dc9fa70de732762fbd2d4a28981419a0ccf")
24 | want["unist-util-visit"] = test.Dependency("unist-util-visit", "2.0.3", "sha512", "889e3f45ccdb24c903d3bd7698692db86a66fd4e01cb815f0e89cbecdffdb404c6205e229a29233ae6a0c8c639ded9d9ab734fe8172696b1e110a01f1968e1ed")
25 | want["source-map"] = test.Dependency("source-map", "0.6.1", "sha512", "52381aa6e99695b3219018334fb624739617513e3a17488abbc4865ead1b7303f9773fe1d0f963e9e9c9aa3cf565bab697959aa989eb55bc16396332177178ee")
26 |
27 | test.AssertEqual(t, got, want)
28 | }
29 |
30 | const yarnlLockContents = `
31 | # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
32 | # yarn lockfile v1
33 |
34 | "js-tokens@^3.0.0 || ^4.0.0":
35 | version "4.0.0"
36 | resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
37 | integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==
38 |
39 | "loose-envify@^1.1.0":
40 | version "1.4.0"
41 | resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf"
42 | integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==
43 | dependencies:
44 | js-tokens "^3.0.0 || ^4.0.0"
45 |
46 | "object-assign@^4.1.1":
47 | version "4.1.1"
48 | resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863"
49 | integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=
50 |
51 | "@babel/helper-annotate-as-pure@^7.14.5", "@babel/helper-annotate-as-pure@^7.15.4":
52 | version "7.15.4"
53 | resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.15.4.tgz#3d0e43b00c5e49fdb6c57e421601a7a658d5f835"
54 | integrity sha512-QwrtdNvUNsPCj2lfNQacsGSQvGX8ee1ttrBrcozUP2Sv/jylewBP/8QFe6ZkBsC8T/GYWonNAWJV4aRR9AL2DA==
55 | dependencies:
56 | "@babel/types" "^7.15.4"
57 |
58 | "@babel/helper-annotate-as-pure@^7.16.7":
59 | version "7.16.7"
60 | resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.16.7.tgz#bb2339a7534a9c128e3102024c60760a3a7f3862"
61 | integrity sha512-s6t2w/IPQVTAET1HitoowRGXooX8mCgtuP5195wD/QJPV6wYjpujCGF7JuMODVX2ZAJOf1GT6DT9MHEZvLOFSw==
62 | dependencies:
63 | "@babel/types" "^7.16.7"
64 |
65 | "@babel/helper-builder-binary-assignment-operator-visitor@^7.14.5":
66 | version "7.15.4"
67 | resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.15.4.tgz#21ad815f609b84ee0e3058676c33cf6d1670525f"
68 | integrity sha512-P8o7JP2Mzi0SdC6eWr1zF+AEYvrsZa7GSY1lTayjF5XJhVH0kjLYUZPvTMflP7tBgZoe9gIhTa60QwFpqh/E0Q==
69 | dependencies:
70 | "@babel/helper-explode-assignable-expression" "^7.15.4"
71 | "@babel/types" "^7.15.4"
72 |
73 | "@babel/helper-builder-binary-assignment-operator-visitor@^7.16.7":
74 | version "7.16.7"
75 | resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.16.7.tgz#38d138561ea207f0f69eb1626a418e4f7e6a580b"
76 | integrity sha512-C6FdbRaxYjwVu/geKW4ZeQ0Q31AftgRcdSnZ5/jsH6BzCJbtvXvhpfkbkThYSuutZA7nCXpPR6AD9zd1dprMkA==
77 | dependencies:
78 | "@babel/helper-explode-assignable-expression" "^7.16.7"
79 | "@babel/types" "^7.16.7"
80 |
81 | "@react@>=0.5.1, react@^17.0.2":
82 | version "17.0.2"
83 | resolved "https://registry.yarnpkg.com/react/-/react-17.0.2.tgz#d0b5cc516d29eb3eee383f75b62864cfb6800037"
84 | integrity sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA==
85 | dependencies:
86 | loose-envify "^1.1.0"
87 | object-assign "^4.1.1"
88 |
89 | range-parser@^1.2.1, range-parser@~1.2.1:
90 | version "1.2.1"
91 | resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031"
92 | integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==
93 |
94 | webpack-sources@^1.1.0, webpack-sources@^1.4.3:
95 | version "1.4.3"
96 | resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933"
97 | integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ==
98 | dependencies:
99 | source-list-map "^2.0.0"
100 | source-map "~0.6.1"
101 |
102 | util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1:
103 | version "1.0.2"
104 | resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
105 | integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=
106 |
107 | unist-util-visit@2.0.3, unist-util-visit@^2.0.0, unist-util-visit@^2.0.1, unist-util-visit@^2.0.2, unist-util-visit@^2.0.3:
108 | version "2.0.3"
109 | resolved "https://registry.yarnpkg.com/unist-util-visit/-/unist-util-visit-2.0.3.tgz#c3703893146df47203bb8a9795af47d7b971208c"
110 | integrity sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q==
111 | dependencies:
112 | "@types/unist" "^2.0.0"
113 | unist-util-is "^4.0.0"
114 | unist-util-visit-parents "^3.0.0"
115 |
116 | source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1:
117 | version "0.6.1"
118 | resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263"
119 | integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==
120 | `
121 |
122 | func TestBuildYarn(t *testing.T) {
123 | tests := []test.IntegrationTest{
124 | {
125 | Name: "find build file and parse output",
126 | BuildType: BuildYarn(),
127 | WorkDir: "testdata/nodejs/yarn",
128 | BuildPath: "testdata/nodejs/yarn/yarn.lock",
129 | Cmd: "yarn.lock",
130 | Want: test.Want{
131 | Key: "js-tokens",
132 | Version: "4.0.0",
133 | Algo: "sha512",
134 | Digest: "45d2547e5704ddc5332a232a420b02bb4e853eef5474824ed1b7986cf84737893a6a9809b627dca02b53f5b7313a9601b690f690233a49bce0e026aeb16fcf29",
135 | },
136 | },
137 | {
138 | Name: "cant find build file",
139 | BuildType: BuildYarn(),
140 | WorkDir: "testdata/whatever",
141 | Error: true,
142 | ErrorMessage: "could not find match, reading dir open testdata/whatever: no such file or directory",
143 | },
144 | }
145 |
146 | test.Run(t, tests)
147 | }
148 |
--------------------------------------------------------------------------------
/pkg/build/php/composer.go:
--------------------------------------------------------------------------------
1 | package php
2 |
3 | import (
4 | "encoding/json"
5 | "fmt"
6 | "github.com/nais/salsa/pkg/build"
7 | "os"
8 | )
9 |
10 | const composerLockFileName = "composer.lock"
11 |
12 | type Composer struct {
13 | BuildFilePatterns []string
14 | }
15 |
16 | func (c Composer) BuildFiles() []string {
17 | return c.BuildFilePatterns
18 | }
19 |
20 | func BuildComposer() build.Tool {
21 | return &Composer{
22 | BuildFilePatterns: []string{composerLockFileName},
23 | }
24 | }
25 |
26 | func (c Composer) ResolveDeps(workDir string) (*build.ArtifactDependencies, error) {
27 | path := fmt.Sprintf("%s/%s", workDir, composerLockFileName)
28 | fileContent, err := os.ReadFile(path)
29 | if err != nil {
30 | return nil, fmt.Errorf("read file: %w\n", err)
31 | }
32 | deps, err := ComposerDeps(string(fileContent))
33 | if err != nil {
34 | return nil, fmt.Errorf("scan: %v\n", err)
35 | }
36 | return build.ArtifactDependency(deps, path, composerLockFileName), nil
37 | }
38 |
39 | type dist struct {
40 | Shasum string `json:"shasum"`
41 | }
42 |
43 | type dep struct {
44 | Name string `json:"name"`
45 | Version string `json:"version"`
46 | Dist dist `json:"dist"`
47 | }
48 |
49 | type composerLock struct {
50 | Dependencies []dep `json:"packages"`
51 | }
52 |
53 | func ComposerDeps(composerLockJsonContents string) (map[string]build.Dependency, error) {
54 | var lock composerLock
55 | err := json.Unmarshal([]byte(composerLockJsonContents), &lock)
56 | if err != nil {
57 | return nil, fmt.Errorf("unable to parse composer.lock: %v", err)
58 | }
59 |
60 | return transform(lock.Dependencies), nil
61 | }
62 |
63 | func transform(dependencies []dep) map[string]build.Dependency {
64 | deps := make(map[string]build.Dependency, 0)
65 | for _, d := range dependencies {
66 | checksum := build.Verification(build.AlgorithmSHA1, d.Dist.Shasum)
67 | deps[d.Name] = build.Dependence(d.Name, d.Version, checksum)
68 | }
69 | return deps
70 | }
71 |
--------------------------------------------------------------------------------
/pkg/build/test/buildtool_test_util.go:
--------------------------------------------------------------------------------
1 | package test
2 |
3 | import (
4 | "github.com/nais/salsa/pkg/build"
5 | "github.com/stretchr/testify/assert"
6 | "reflect"
7 | "testing"
8 | )
9 |
10 | type Want struct {
11 | Key string
12 | Version string
13 | Algo string
14 | Digest string
15 | }
16 |
17 | type IntegrationTest struct {
18 | Name string
19 | BuildType build.Tool
20 | WorkDir string
21 | BuildPath string
22 | Cmd string
23 | Want Want
24 | Error bool
25 | ErrorMessage string
26 | }
27 |
28 | func Run(t *testing.T, tests []IntegrationTest) {
29 | for _, test := range tests {
30 | test.integrationTest(t)
31 | }
32 | }
33 |
34 | func (in IntegrationTest) integrationTest(t *testing.T) {
35 | t.Run(in.Name, func(t *testing.T) {
36 | tools := build.Tools{
37 | Tools: []build.Tool{in.BuildType},
38 | }
39 |
40 | // Check 1 random dependency is parsed dependencies.
41 | expected := map[string]build.Dependency{
42 | in.Want.Key: Dependency(in.Want.Key, in.Want.Version, in.Want.Algo, in.Want.Digest),
43 | }
44 |
45 | deps, err := tools.DetectDeps(in.WorkDir)
46 | if in.Error {
47 | assert.EqualError(t, err, in.ErrorMessage)
48 | } else {
49 | assert.NoError(t, err)
50 | assert.NotNil(t, deps)
51 | assert.Equal(t, in.Cmd, deps.CmdFlags())
52 | assert.NotEmpty(t, deps)
53 | assert.Equal(t, expected[in.Want.Key], deps.RuntimeDeps[in.Want.Key])
54 | }
55 | })
56 | }
57 |
58 | func Dependency(coordinates, version, algo, checksum string) build.Dependency {
59 | return build.Dependence(coordinates, version,
60 | build.Verification(algo, checksum),
61 | )
62 | }
63 |
64 | func AssertEqual(t *testing.T, got, want map[string]build.Dependency) {
65 | if !reflect.DeepEqual(got, want) {
66 | t.Errorf("got %q, wanted %q", got, want)
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/pkg/build/tool.go:
--------------------------------------------------------------------------------
1 | package build
2 |
3 | import (
4 | "fmt"
5 | "os"
6 |
7 | log "github.com/sirupsen/logrus"
8 | )
9 |
10 | const (
11 | AlgorithmSHA256 = "sha256"
12 | AlgorithmSHA1 = "sha1"
13 | )
14 |
15 | type Tool interface {
16 | BuildFiles() []string
17 | ResolveDeps(workDir string) (*ArtifactDependencies, error)
18 | }
19 |
20 | type Tools struct {
21 | Tools []Tool
22 | }
23 |
24 | func (t Tools) DetectDeps(workDir string) (*ArtifactDependencies, error) {
25 | log.Info("search for build files\n")
26 | for _, tool := range t.Tools {
27 | foundMatch, buildFile, err := match(tool, workDir)
28 | if err != nil {
29 | return nil, fmt.Errorf("could not find match, %v", err)
30 | }
31 |
32 | if foundMatch {
33 | log.Infof("found build type '%s'\n", buildFile)
34 | deps, err := tool.ResolveDeps(workDir)
35 | if err != nil {
36 | return nil, fmt.Errorf("could not resolve deps: %v", err)
37 | }
38 |
39 | return deps, nil
40 | }
41 | }
42 |
43 | return nil, nil
44 | }
45 |
46 | func match(t Tool, workDir string) (bool, string, error) {
47 | for _, file := range t.BuildFiles() {
48 | buildFile, err := findBuildFile(workDir, file)
49 |
50 | if err != nil {
51 | return false, "", err
52 | }
53 |
54 | if file == buildFile && len(buildFile) != 0 {
55 | return true, buildFile, nil
56 | }
57 | }
58 | return false, "", nil
59 | }
60 |
61 | func findBuildFile(root, pattern string) (string, error) {
62 | var result = ""
63 | files, err := os.ReadDir(root)
64 | if err != nil {
65 | return "", fmt.Errorf("reading dir %v", err)
66 | }
67 |
68 | for _, file := range files {
69 | if file.Name() == pattern {
70 | result = file.Name()
71 | break
72 | }
73 | }
74 | return result, nil
75 | }
76 |
--------------------------------------------------------------------------------
/pkg/clone/clone.go:
--------------------------------------------------------------------------------
1 | package clone
2 |
3 | import (
4 | "fmt"
5 | "github.com/go-git/go-git/v5/plumbing/transport"
6 | "github.com/go-git/go-git/v5/plumbing/transport/http"
7 | log "github.com/sirupsen/logrus"
8 | "os"
9 |
10 | "github.com/go-git/go-git/v5"
11 | )
12 |
13 | const (
14 | GithubUrl = "https://github.com"
15 | )
16 |
17 | func Repo(owner, repo, path, token string) error {
18 | auth := transport.AuthMethod(nil)
19 | if token != "" {
20 | auth = &http.BasicAuth{
21 | Username: "github",
22 | Password: token,
23 | }
24 | }
25 | repoUrl := fmt.Sprintf("%s/%s/%s", GithubUrl, owner, repo)
26 | log.Printf("cloning repo %s", repoUrl)
27 | _, err := git.PlainClone(path, false, &git.CloneOptions{
28 | Auth: auth,
29 | URL: repoUrl,
30 | Progress: os.Stdout,
31 | })
32 |
33 | if err != nil {
34 | return fmt.Errorf("could not clone repo %s, %v", repoUrl, err)
35 | }
36 | return nil
37 | }
38 |
--------------------------------------------------------------------------------
/pkg/commands/attest.go:
--------------------------------------------------------------------------------
1 | package commands
2 |
3 | import (
4 | "errors"
5 | "fmt"
6 | "github.com/go-jose/go-jose/v3/jwt"
7 | "os"
8 | "strings"
9 |
10 | "github.com/nais/salsa/pkg/utils"
11 | log "github.com/sirupsen/logrus"
12 | "github.com/spf13/cobra"
13 | "github.com/spf13/viper"
14 | )
15 |
16 | type AttestOptions struct {
17 | Key string `mapstructure:"key"`
18 | IdentityToken string `mapstructure:"identity-token"`
19 | NoUpload bool `mapstructure:"no-upload"`
20 | RekorURL string `mapstructure:"rekor-url"`
21 | PredicateFile string `mapstructure:"predicate"`
22 | PredicateType string `mapstructure:"type"`
23 | }
24 |
25 | var verify bool
26 |
27 | type AttestCmd interface {
28 | Run(args []string, runner utils.CmdRunner) error
29 | }
30 |
31 | var attestCmd = &cobra.Command{
32 | Use: "attest",
33 | Short: "sign and upload in-toto attestation",
34 | RunE: func(cmd *cobra.Command, args []string) error {
35 | var options AttestOptions
36 | err := viper.Unmarshal(&options)
37 | if err != nil {
38 | return err
39 | }
40 |
41 | _, err = options.Run(args, utils.ExecCmd{})
42 | if err != nil {
43 | return err
44 | }
45 | return nil
46 | },
47 | }
48 |
49 | func (o AttestOptions) Run(args []string, runner utils.CmdRunner) (string, error) {
50 | if PathFlags.Repo == "" {
51 | return "", errors.New("repo name must be specified")
52 | }
53 |
54 | if o.PredicateFile == "" {
55 | file := utils.ProvenanceFile(PathFlags.Repo)
56 | log.Infof("no predicate specified, using default pattern %s", file)
57 | o.PredicateFile = file
58 | }
59 |
60 | workDir := PathFlags.WorkDir()
61 | s := utils.StartSpinner(fmt.Sprintf("finished attestation for %s\n", PathFlags.Repo))
62 | defer s.Stop()
63 | filePath := fmt.Sprintf("%s/%s.%s", workDir, PathFlags.Repo, "att")
64 | // TODO: could be a subcommand e.g bin/salsa attest verify
65 | if verify {
66 | cmd := o.verifyCmd(args, runner)
67 | out, err := cmd.Run()
68 | if err != nil {
69 | return "", err
70 | }
71 | docs := strings.Split(out, "\n")
72 | if len(docs) > 0 {
73 | // TODO: fix so that we dont have to make this assumption
74 | //remove last line which is a newline
75 | docs := docs[:len(docs)-1]
76 | if len(docs) == 0 {
77 | return "", fmt.Errorf("unexpected output from cosign command: %s", out)
78 | }
79 |
80 | doc := docs[len(docs)-1]
81 |
82 | err = os.WriteFile(filePath, []byte(doc), os.FileMode(0755))
83 | if err != nil {
84 | return "", fmt.Errorf("could not write file %s %w", filePath, err)
85 | }
86 |
87 | err := os.WriteFile(fmt.Sprintf("%s/%s.%s", workDir, PathFlags.Repo, "raw.txt"), []byte(out), os.FileMode(0755))
88 | if err != nil {
89 | return "", fmt.Errorf("could not write file %s %w", workDir, err)
90 | }
91 | } else {
92 | log.Infof("no attestations found from cosign verify-attest command")
93 | }
94 | return out, nil
95 | } else {
96 | cmd := o.attestCmd(args, runner)
97 | out, err := cmd.Run()
98 |
99 | if err != nil {
100 | return "", err
101 | }
102 | if o.NoUpload {
103 | err = os.WriteFile(filePath, []byte(out), os.FileMode(0755))
104 | if err != nil {
105 | return "", fmt.Errorf("could not write file %s %w", filePath, err)
106 | }
107 | }
108 | return out, nil
109 | }
110 | }
111 |
112 | // TODO: Verifying keyless requires the use of the flags --certificate-identity and --certificate-oidc-issuer to Cosign. These flags
113 | // will require input, such as the identy (email/DNS/IP) and the issuer used for receiving the identity token. This will probably be
114 | // google for all salsa users.
115 | func (o AttestOptions) verifyCmd(a []string, runner utils.CmdRunner) utils.Cmd {
116 | return utils.Cmd{
117 | Name: "cosign",
118 | SubCmd: "verify-attestation",
119 | Flags: o.verifyFlags(),
120 | Args: a,
121 | WorkDir: PathFlags.WorkDir(),
122 | Runner: runner,
123 | }
124 | }
125 |
126 | func (o AttestOptions) verifyFlags() []string {
127 | if o.Key != "" {
128 | return []string{
129 | "--key", o.Key,
130 | "--type", o.PredicateType,
131 | }
132 | }
133 | return []string{
134 | "--type",
135 | o.PredicateType,
136 | }
137 | }
138 |
139 | func (o AttestOptions) attestCmd(a []string, runner utils.CmdRunner) utils.Cmd {
140 | flags, err := o.attestFlags()
141 | if err != nil {
142 | log.Fatal(err)
143 | }
144 |
145 | return utils.Cmd{
146 | Name: "cosign",
147 | SubCmd: "attest",
148 | Flags: flags,
149 | Args: a,
150 | WorkDir: PathFlags.WorkDir(),
151 | Runner: runner,
152 | }
153 | }
154 |
155 | func (o AttestOptions) attestFlags() ([]string, error) {
156 | var flags []string
157 |
158 | if o.Key != "" {
159 | flags = []string{
160 | "--key", o.Key,
161 | }
162 | return append(flags, o.defaultAttestFlags()...), nil
163 | }
164 |
165 | if o.IdentityToken == "" || os.Getenv("COSIGN_EXPERIMENTAL") == "" {
166 | return nil, fmt.Errorf("identity token must be specified with 'COSIGN_EXPERIMENTAL' enabled")
167 | }
168 |
169 | _, err := jwt.ParseSigned(o.IdentityToken)
170 | if err != nil {
171 | return nil, fmt.Errorf("invalid identity token: %w", err)
172 | }
173 |
174 | log.Infof("no key specified, using cosign expriemental keyless mode")
175 | flags = []string{
176 | "--identity-token", o.IdentityToken,
177 | }
178 |
179 | return append(flags, o.defaultAttestFlags()...), nil
180 | }
181 |
182 | func (o AttestOptions) defaultAttestFlags() []string {
183 | flags := []string{
184 | "--predicate", o.PredicateFile,
185 | "--type", o.PredicateType,
186 | "--rekor-url", o.RekorURL,
187 | }
188 |
189 | if !o.NoUpload {
190 | // Flag must be set to automatically upload to the default transparency log
191 | flags = append(flags, "--yes")
192 | } else {
193 | flags = append(flags, "--no-upload")
194 | }
195 |
196 | return flags
197 | }
198 |
199 | func init() {
200 | rootCmd.AddCommand(attestCmd)
201 | attestCmd.Flags().String("key", "",
202 | "path to the private key file, KMS URI or Kubernetes Secret")
203 | attestCmd.Flags().String("identity-token", "",
204 | "use short lived secrets for cosign keyless authentication")
205 | attestCmd.Flags().BoolVar(&verify, "verify", false, "if true, verifies attestations - default is false")
206 | attestCmd.Flags().Bool("no-upload", false,
207 | "do not upload the generated attestation")
208 | attestCmd.Flags().String("rekor-url", "https://rekor.sigstore.dev",
209 | "address of transparency log")
210 | attestCmd.Flags().String("predicate", "",
211 | "the predicate file used for attestation")
212 | attestCmd.Flags().String("type", "slsaprovenance",
213 | "specify a predicate type (slsaprovenance|link|spdx|custom) or an URI (default \"slsaprovenance\")\n")
214 | err := viper.BindPFlags(attestCmd.Flags())
215 | if err != nil {
216 | log.Errorf("setting viper flag: %v", err)
217 | }
218 | }
219 |
--------------------------------------------------------------------------------
/pkg/commands/attest_test.go:
--------------------------------------------------------------------------------
1 | package commands
2 |
3 | import (
4 | "flag"
5 | "fmt"
6 | "os"
7 | "os/exec"
8 | "strings"
9 | "testing"
10 |
11 | "github.com/nais/salsa/pkg/utils"
12 | "github.com/stretchr/testify/assert"
13 | )
14 |
15 | type fakeRunner struct {
16 | envVar string
17 | cmd string
18 | }
19 |
20 | func TestAttestCosignCommand(t *testing.T) {
21 | PathFlags.Repo = "."
22 | PathFlags.Remote = false
23 | PathFlags.RepoDir = "."
24 | o := AttestOptions{
25 | Key: "mykey",
26 | NoUpload: false,
27 | RekorURL: "http://rekor.example.com",
28 | PredicateFile: "file.json",
29 | PredicateType: "slsaprovenance",
30 | }
31 |
32 | runner := fakeRunner{
33 | envVar: "GO_TEST_DRYRUN",
34 | cmd: "-test.run=TestDryRunCmd",
35 | }
36 |
37 | out, err := o.Run([]string{"image"}, runner)
38 | assert.NoError(t, err)
39 | expectedCmd := "[cosign attest --key mykey --predicate file.json --type slsaprovenance --rekor-url http://rekor.example.com --yes image]\n"
40 | assert.Equal(t, expectedCmd, out)
41 | }
42 |
43 | func TestAttestVerifySuccess(t *testing.T) {
44 | workDir, err := os.MkdirTemp("testdata", "output")
45 | assert.NoError(t, err)
46 | defer os.RemoveAll(workDir)
47 |
48 | parts := strings.Split(workDir, "/")
49 | PathFlags.Repo = parts[1]
50 | PathFlags.Remote = false
51 | PathFlags.RepoDir = parts[0]
52 |
53 | o := AttestOptions{
54 | Key: "mykey",
55 | }
56 | verify = true
57 | runner := fakeRunner{
58 | envVar: "GO_TEST_ATTEST_VERIFY_OUTPUT",
59 | cmd: "-test.run=TestAttestVerifyOutput",
60 | }
61 |
62 | _, err = o.Run([]string{"image"}, runner)
63 | assert.NoError(t, err)
64 | }
65 |
66 | func TestDryRunCmd(t *testing.T) {
67 | if os.Getenv("GO_TEST_DRYRUN") != "1" {
68 | return
69 | }
70 | fmt.Printf("%s\n", flag.Args())
71 | os.Exit(0)
72 | }
73 |
74 | func TestAttestVerifyOutput(t *testing.T) {
75 | if os.Getenv("GO_TEST_ATTEST_VERIFY_OUTPUT") != "1" {
76 | return
77 | }
78 | path := "../cosign-verify-output.txt"
79 | output, err := os.ReadFile(path)
80 |
81 | if err != nil {
82 | fmt.Print("fail")
83 | t.Fatalf("could not read testdata file: %s", err)
84 | }
85 | fmt.Printf("%s", output)
86 | os.Exit(0)
87 | }
88 |
89 | func (r fakeRunner) CreateCmd() utils.CreateCmd {
90 | return func(command string, args ...string) *exec.Cmd {
91 | cs := []string{r.cmd, "--", command}
92 | cs = append(cs, args...)
93 | cmd := exec.Command(os.Args[0], cs...)
94 | cmd.Env = []string{r.envVar + "=1"}
95 | return cmd
96 | }
97 | }
98 |
--------------------------------------------------------------------------------
/pkg/commands/clone.go:
--------------------------------------------------------------------------------
1 | package commands
2 |
3 | import (
4 | "errors"
5 | "github.com/nais/salsa/pkg/clone"
6 | log "github.com/sirupsen/logrus"
7 | "github.com/spf13/cobra"
8 | "github.com/spf13/viper"
9 | )
10 |
11 | var owner string
12 |
13 | var cloneCmd = &cobra.Command{
14 | Use: "clone",
15 | Short: "clones the given project into user defined path",
16 | RunE: func(cmd *cobra.Command, args []string) error {
17 | cmd.SilenceErrors = true
18 | if PathFlags.Repo == "" || owner == "" {
19 | return errors.New("repo and owner must be specified")
20 | }
21 |
22 | path := PathFlags.WorkDir()
23 | log.Infof("prepare to checkout %s into path %s ...", PathFlags.Repo, path)
24 | err := clone.Repo(owner, PathFlags.Repo, path, Auth.GithubToken)
25 | if err != nil {
26 | return err
27 | }
28 | return nil
29 | },
30 | }
31 |
32 | func init() {
33 | rootCmd.AddCommand(cloneCmd)
34 | cloneCmd.Flags().StringVar(&owner, "owner", "", "owner of the repo")
35 | err := viper.BindPFlags(cloneCmd.Flags())
36 | if err != nil {
37 | log.Errorf("setting viper flag: %v", err)
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/pkg/commands/find.go:
--------------------------------------------------------------------------------
1 | package commands
2 |
3 | import (
4 | "errors"
5 | "fmt"
6 | "github.com/nais/salsa/pkg/dsse"
7 | "github.com/nais/salsa/pkg/intoto"
8 | "os"
9 | "path/filepath"
10 | "strings"
11 |
12 | log "github.com/sirupsen/logrus"
13 | "github.com/spf13/cobra"
14 | "github.com/spf13/viper"
15 | )
16 |
17 | var artifact string
18 | var findCmd = &cobra.Command{
19 | Use: "find",
20 | Short: "find artifact from attestations",
21 | RunE: func(cmd *cobra.Command, args []string) error {
22 | if len(args) == 1 {
23 | artifact = args[0]
24 | }
25 |
26 | if artifact == "" {
27 | return errors.New("missing artifact")
28 | }
29 |
30 | path := PathFlags.RepoDir
31 | dirs, err := os.ReadDir(path)
32 | if err != nil {
33 | return fmt.Errorf("could not read dir %w", err)
34 | }
35 |
36 | for _, dir := range dirs {
37 | files, err := os.ReadDir(fmt.Sprintf("./%s/%s", path, dir.Name()))
38 | if err != nil {
39 | return fmt.Errorf("could not read dir %w", err)
40 | }
41 |
42 | for _, file := range files {
43 | var attFilePath = fmt.Sprintf("./%s/%s/%s", path, dir.Name(), file.Name())
44 |
45 | if ext := filepath.Ext(file.Name()); ext != ".att" {
46 | continue
47 | }
48 |
49 | fileContents, err := os.ReadFile(attFilePath)
50 | if err != nil {
51 | return fmt.Errorf("read .att file content %s, %w", attFilePath, err)
52 | }
53 |
54 | provenance, err := dsse.ParseEnvelope(fileContents)
55 | if err != nil {
56 | return fmt.Errorf("could not read file %s, %w", attFilePath, err)
57 | }
58 | result := intoto.FindMaterials(provenance.Predicate.Materials, artifact)
59 | app := strings.Split(file.Name(), ".")[0]
60 |
61 | if len(result) == 0 {
62 | log.Infof("no dependcies where found in app %s", app)
63 | } else {
64 | log.Infof("found dependency(ies) in app %s:", app)
65 | for _, f := range result {
66 | log.Infof("-uri: %s", f.URI)
67 | for k, d := range f.Digest {
68 | log.Infof("--digest: %s:%s", k, d)
69 | }
70 | }
71 | }
72 | }
73 | }
74 | return nil
75 | },
76 | }
77 |
78 | func init() {
79 | rootCmd.AddCommand(findCmd)
80 | findCmd.Flags().StringVar(&artifact, "artifact", "", "artifact to search after")
81 | err := viper.BindPFlags(findCmd.Flags())
82 | if err != nil {
83 | log.Errorf("setting viper flag: %v", err)
84 | }
85 | }
86 |
--------------------------------------------------------------------------------
/pkg/commands/root.go:
--------------------------------------------------------------------------------
1 | package commands
2 |
3 | import (
4 | "fmt"
5 | "os"
6 | "strings"
7 |
8 | log "github.com/sirupsen/logrus"
9 | "github.com/spf13/cobra"
10 | "github.com/spf13/pflag"
11 |
12 | "github.com/spf13/viper"
13 | )
14 |
15 | const (
16 | envVarPrefix = "SALSA"
17 | cmdName = "salsa"
18 | // defaultRepoDir = "tmp"
19 | )
20 |
21 | type RootFlags struct {
22 | Repo string
23 | RepoDir string
24 | SubDir string
25 | Remote bool
26 | }
27 |
28 | type Principal struct {
29 | GithubToken string
30 | }
31 |
32 | type ClientInformation struct {
33 | Version string
34 | Commit string
35 | Date string
36 | BuiltBy string
37 | }
38 |
39 | var (
40 | cfgFile string
41 | PathFlags *RootFlags
42 | Auth *Principal
43 | Client *ClientInformation
44 | )
45 |
46 | // rootCmd represents the base command when called without any subcommands
47 | var rootCmd = &cobra.Command{
48 | Use: cmdName,
49 | Short: "Command-line tool for SLSA (SALSA)",
50 | Long: `Scan files and dependencies, sign them and upload them`,
51 | PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
52 | return initConfig(cmd)
53 | },
54 | }
55 |
56 | func (r RootFlags) WorkDir() string {
57 | if r.Remote {
58 | current, _ := os.Getwd()
59 | return r.withSubDir(current)
60 | }
61 | return r.withSubDir(fmt.Sprintf("%s/%s", r.RepoDir, r.Repo))
62 | }
63 |
64 | func (r RootFlags) withSubDir(current string) string {
65 | if r.SubDir == "" {
66 | return current
67 | }
68 | return fmt.Sprintf("%s/%s", current, r.SubDir)
69 | }
70 |
71 | // Execute adds all child commands to the root command and sets flags appropriately.
72 | // This is called by main.main(). It only needs to happen once to the rootCmd.
73 | func Execute(version, commit, date, builtBy string) {
74 | Client = &ClientInformation{
75 | Version: version,
76 | Commit: commit,
77 | Date: date,
78 | BuiltBy: builtBy,
79 | }
80 | cobra.CheckErr(rootCmd.Execute())
81 | }
82 |
83 | func init() {
84 | PathFlags = &RootFlags{}
85 | Auth = &Principal{}
86 | rootCmd.PersistentFlags().StringVar(&PathFlags.Repo, "repo", "", "name of git repo")
87 | rootCmd.PersistentFlags().StringVar(&PathFlags.RepoDir, "repoDir", "tmp", "path to folder for cloned projects")
88 | rootCmd.PersistentFlags().StringVar(&PathFlags.SubDir, "subDir", "", "build file not found in working directory but in a sub directory")
89 | rootCmd.PersistentFlags().BoolVar(&PathFlags.Remote, "remote-run", false, "remote run use another current path (can be deleted with introduction of containers)")
90 | rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/."+cmdName+".yaml)")
91 | rootCmd.PersistentFlags().StringVar(&Auth.GithubToken, "token", "", "Github token or PAT. When cloning a private repo or repo use private dependencies")
92 | }
93 |
94 | // initConfig reads in config file and ENV variables if set.
95 | func initConfig(cmd *cobra.Command) error {
96 | v := viper.New()
97 | if cfgFile != "" {
98 | // Use config file from the flag.
99 | v.SetConfigFile(cfgFile)
100 | } else {
101 | // Find home directory.
102 | home, err := os.UserHomeDir()
103 | cobra.CheckErr(err)
104 |
105 | // Search config in home directory with name ".salsa" (without extension).
106 | v.AddConfigPath(home)
107 | v.SetConfigType("yaml")
108 | v.SetConfigName("." + cmdName)
109 | }
110 |
111 | // If a config file is found, read it in.
112 | if err := v.ReadInConfig(); err == nil {
113 | log.Infof("using config file: %s", v.ConfigFileUsed())
114 | }
115 |
116 | v.SetEnvPrefix(envVarPrefix + "_" + cmd.Name())
117 |
118 | v.AutomaticEnv() // read in environment variables that match
119 | bindFlags(cmd, v)
120 |
121 | cmd.SilenceUsage = true
122 | cmd.SilenceErrors = true
123 |
124 | return nil
125 | }
126 |
127 | func bindFlags(cmd *cobra.Command, v *viper.Viper) {
128 | cmd.Flags().VisitAll(func(f *pflag.Flag) {
129 | bindEnv(v, cmd, f)
130 | if !f.Changed {
131 | setFlagIfPresent(v, cmd, f)
132 | }
133 | })
134 | }
135 |
136 | func bindEnv(v *viper.Viper, cmd *cobra.Command, f *pflag.Flag) {
137 | if strings.Contains(f.Name, "-") {
138 | suffix := strings.ToUpper(strings.ReplaceAll(f.Name, "-", "_"))
139 | prefix := envVarPrefix + "_" + strings.ToUpper(cmd.Name())
140 | env := fmt.Sprintf("%s_%s", prefix, suffix)
141 | err := v.BindEnv(f.Name, env)
142 | if err != nil {
143 | fmt.Printf("could not bind to env: %v", err)
144 | }
145 | }
146 | }
147 |
148 | func setFlagIfPresent(v *viper.Viper, cmd *cobra.Command, f *pflag.Flag) {
149 | val := v.Get(f.Name)
150 | if val == nil {
151 | val = v.Get(cmd.Name() + "." + f.Name)
152 | }
153 | if val != nil {
154 | err := cmd.Flags().Set(f.Name, fmt.Sprintf("%v", val))
155 | if err != nil {
156 | fmt.Printf("could not set flag: %v", err)
157 | return
158 | }
159 | }
160 | }
161 |
--------------------------------------------------------------------------------
/pkg/commands/scan.go:
--------------------------------------------------------------------------------
1 | package commands
2 |
3 | import (
4 | "encoding/json"
5 | "errors"
6 | "fmt"
7 | "os"
8 |
9 | "github.com/nais/salsa/pkg/build"
10 | "github.com/nais/salsa/pkg/build/golang"
11 | "github.com/nais/salsa/pkg/build/jvm"
12 | "github.com/nais/salsa/pkg/build/nodejs"
13 | "github.com/nais/salsa/pkg/build/php"
14 | "github.com/nais/salsa/pkg/config"
15 | "github.com/nais/salsa/pkg/intoto"
16 | "github.com/nais/salsa/pkg/utils"
17 | "github.com/nais/salsa/pkg/vcs"
18 | log "github.com/sirupsen/logrus"
19 | "github.com/spf13/cobra"
20 | )
21 |
22 | var (
23 | buildContext string
24 | runnerContext string
25 | mvnOpts string
26 | envContext string
27 | Config *ProvenanceConfig
28 | )
29 |
30 | type ProvenanceConfig struct {
31 | BuildStartedOn string
32 | }
33 |
34 | var scanCmd = &cobra.Command{
35 | Use: "scan",
36 | Short: "Scan files and dependencies for a given project",
37 | RunE: func(cmd *cobra.Command, args []string) error {
38 | cmd.SilenceErrors = true
39 | cmd.SilenceUsage = true
40 |
41 | if len(args) == 1 {
42 | artifact = args[0]
43 | }
44 |
45 | if PathFlags.Repo == "" {
46 | return errors.New("repo name must be specified")
47 | }
48 |
49 | workDir := PathFlags.WorkDir()
50 | log.Infof("prepare to scan path %s ...", workDir)
51 |
52 | deps := &build.ArtifactDependencies{}
53 | generatedDeps, err := InitBuildTools(mvnOpts).DetectDeps(workDir)
54 | if err != nil {
55 | return fmt.Errorf("detecting dependecies: %v", err)
56 | }
57 |
58 | if generatedDeps != nil {
59 | deps = generatedDeps
60 | } else {
61 | log.Infof("no supported build files found for directory: %s, proceeding", workDir)
62 | }
63 |
64 | contextEnvironment, err := vcs.ResolveBuildContext(&buildContext, &runnerContext, &envContext)
65 | if err != nil {
66 | return err
67 | }
68 |
69 | scanConfiguration := &config.ScanConfiguration{
70 | BuildStartedOn: Config.BuildStartedOn,
71 | WorkDir: workDir,
72 | RepoName: PathFlags.Repo,
73 | Dependencies: deps,
74 | ContextEnvironment: contextEnvironment,
75 | Cmd: cmd,
76 | }
77 |
78 | err = GenerateProvenance(scanConfiguration)
79 | if err != nil {
80 | return err
81 | }
82 | return nil
83 | },
84 | }
85 |
86 | func GenerateProvenance(scanCfg *config.ScanConfiguration) error {
87 | opts := intoto.CreateProvenanceOptions(scanCfg)
88 | predicate := intoto.GenerateSlsaPredicate(opts)
89 | statement, err := json.Marshal(predicate)
90 | if err != nil {
91 | return fmt.Errorf("marshal: %v\n", err)
92 | }
93 |
94 | provenanceFileName := utils.ProvenanceFile(scanCfg.RepoName)
95 | path := fmt.Sprintf("%s/%s", scanCfg.WorkDir, provenanceFileName)
96 | err = os.WriteFile(path, statement, 0644)
97 | if err != nil {
98 | return fmt.Errorf("write to file: %v\n", err)
99 | }
100 |
101 | log.Infof("generated provenance in file: %s", path)
102 | return nil
103 | }
104 |
105 | func InitBuildTools(mavenOpts string) build.Tools {
106 | return build.Tools{
107 | Tools: []build.Tool{
108 | jvm.BuildGradle(),
109 | jvm.BuildMaven(mavenOpts),
110 | golang.BuildGo(),
111 | nodejs.BuildNpm(),
112 | nodejs.BuildYarn(),
113 | php.BuildComposer(),
114 | },
115 | }
116 | }
117 |
118 | func init() {
119 | Config = &ProvenanceConfig{}
120 | rootCmd.AddCommand(scanCmd)
121 | scanCmd.Flags().StringVar(&buildContext, "build-context", "", "context of build tool")
122 | scanCmd.Flags().StringVar(&runnerContext, "runner-context", "", "context of runner")
123 | scanCmd.Flags().StringVar(&envContext, "env-context", "", "environmental variables of current context")
124 | scanCmd.Flags().StringVar(&mvnOpts, "mvn-opts", "", "pass additional Comma-delimited list of options to the maven build tool")
125 | scanCmd.Flags().StringVar(&Config.BuildStartedOn, "build-started-on", "", "set start time for the build")
126 | }
127 |
--------------------------------------------------------------------------------
/pkg/commands/version.go:
--------------------------------------------------------------------------------
1 | package commands
2 |
3 | import (
4 | log "github.com/sirupsen/logrus"
5 | "github.com/spf13/cobra"
6 | )
7 |
8 | var info bool
9 |
10 | var versionCmd = &cobra.Command{
11 | Use: "version [flags]",
12 | Short: "Show 'salsa' client version",
13 | RunE: func(cmd *cobra.Command, args []string) error {
14 | if info {
15 | log.Infof("%s: %s commit: %s date: %s builtBy: %s",
16 | cmd.CommandPath(),
17 | Client.Version,
18 | Client.Commit,
19 | Client.Date,
20 | Client.BuiltBy,
21 | )
22 | return nil
23 | }
24 | log.Infof("%s: %s", cmd.CommandPath(), Client.Version)
25 | return nil
26 | },
27 | }
28 |
29 | func init() {
30 | rootCmd.AddCommand(versionCmd)
31 | versionCmd.Flags().BoolVar(&info, "info", false, "Detailed commit information")
32 | }
33 |
--------------------------------------------------------------------------------
/pkg/config/scan.go:
--------------------------------------------------------------------------------
1 | package config
2 |
3 | import (
4 | "github.com/nais/salsa/pkg/build"
5 | "github.com/nais/salsa/pkg/vcs"
6 | "github.com/spf13/cobra"
7 | )
8 |
9 | type ScanConfiguration struct {
10 | BuildStartedOn string
11 | Cmd *cobra.Command
12 | ContextEnvironment vcs.ContextEnvironment
13 | Dependencies *build.ArtifactDependencies
14 | WorkDir string
15 | RepoName string
16 | }
17 |
--------------------------------------------------------------------------------
/pkg/dsse/dsse.go:
--------------------------------------------------------------------------------
1 | package dsse
2 |
3 | import v02 "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2"
4 |
5 | type Envelope struct {
6 | Payload string `json:"payload"`
7 | }
8 |
9 | type Attestation struct {
10 | Predicate v02.ProvenancePredicate `json:"predicate"`
11 | }
12 |
--------------------------------------------------------------------------------
/pkg/dsse/parse.go:
--------------------------------------------------------------------------------
1 | package dsse
2 |
3 | import (
4 | "encoding/base64"
5 | "encoding/json"
6 | "github.com/in-toto/in-toto-golang/in_toto"
7 | )
8 |
9 | func ParseEnvelope(dsseEnvelope []byte) (*in_toto.ProvenanceStatement, error) {
10 | var env = Envelope{}
11 | err := json.Unmarshal(dsseEnvelope, &env)
12 | if err != nil {
13 | return nil, err
14 | }
15 | decodedPayload, err := base64.StdEncoding.DecodeString(env.Payload)
16 | if err != nil {
17 | return nil, err
18 | }
19 | var stat = &in_toto.ProvenanceStatement{}
20 |
21 | err = json.Unmarshal(decodedPayload, &stat)
22 | if err != nil {
23 | return nil, err
24 | }
25 | return stat, nil
26 | }
27 |
--------------------------------------------------------------------------------
/pkg/dsse/parse_test.go:
--------------------------------------------------------------------------------
1 | package dsse
2 |
3 | import (
4 | "github.com/stretchr/testify/assert"
5 | "os"
6 | "testing"
7 | )
8 |
9 | func TestParse(t *testing.T) {
10 | attPath := "testdata/cosign-dsse-attestation.json"
11 | fileContents, err := os.ReadFile(attPath)
12 |
13 | assert.NoError(t, err)
14 | statement, err := ParseEnvelope(fileContents)
15 |
16 | assert.NoError(t, err)
17 | assert.NotEmpty(t, statement.Predicate.Materials)
18 | }
19 |
--------------------------------------------------------------------------------
/pkg/dsse/testdata/cosign-attestation.json:
--------------------------------------------------------------------------------
1 | {
2 | "_type": "https://in-toto.io/Statement/v0.1",
3 | "predicateType": "https://slsa.dev/provenance/v0.2",
4 | "subject": [
5 | {
6 | "name": "ttl.sh/naisx",
7 | "digest": {
8 | "sha256": "ee89b00528ff4f02f2405e4ee221743ebc3f8e8dd0bfd5c4c20a2fa2aaa7ede3"
9 | }
10 | }
11 | ],
12 | "predicate": {
13 | "builder": {
14 | "id": ""
15 | },
16 | "buildType": "yolo",
17 | "invocation": {
18 | "configSource": {}
19 | },
20 | "metadata": {
21 | "buildStartedOn": "2022-01-06T14:01:03.632632+01:00",
22 | "buildFinishedOn": "2022-01-06T14:01:03.632632+01:00",
23 | "completeness": {
24 | "parameters": false,
25 | "environment": false,
26 | "materials": false
27 | },
28 | "reproducible": false
29 | },
30 | "materials": [
31 | {
32 | "uri": "junit:junit:4.10"
33 | },
34 | {
35 | "uri": "net.minidev:json-smart:[1.3.3,2.4.7] -> 2.4.7"
36 | },
37 | {
38 | "uri": "org.jetbrains.kotlin:kotlin-reflect:1.5.30 -> 1.6.10 (*)"
39 | },
40 | {
41 | "uri": "com.fasterxml.jackson.core:jackson-databind:2.13.0 (*)"
42 | },
43 | {
44 | "uri": "io.prometheus:simpleclient_tracer_otel:0.12.0"
45 | },
46 | {
47 | "uri": "com.github.seratch:kotliquery:1.6.1"
48 | },
49 | {
50 | "uri": "ch.qos.logback:logback-classic:1.2.9"
51 | },
52 | {
53 | "uri": "com.nimbusds:oauth2-oidc-sdk:9.20"
54 | },
55 | {
56 | "uri": "org.postgresql:postgresql:42.3.1"
57 | },
58 | {
59 | "uri": "org.jetbrains.kotlin:kotlin-script-runtime:1.6.10"
60 | },
61 | {
62 | "uri": "io.netty:netty-codec-http2:4.1.69.Final"
63 | },
64 | {
65 | "uri": "com.google.guava:guava:30.0-jre"
66 | },
67 | {
68 | "uri": "io.ktor:ktor-client-jackson:1.6.7"
69 | },
70 | {
71 | "uri": "io.prometheus:simpleclient_tracer_common:0.12.0"
72 | },
73 | {
74 | "uri": "io.ktor:ktor-jackson:1.6.7"
75 | },
76 | {
77 | "uri": "io.netty:netty-transport:4.1.69.Final (*)"
78 | },
79 | {
80 | "uri": "io.ktor:ktor-auth:1.6.7 (*)"
81 | },
82 | {
83 | "uri": "net.minidev:accessors-smart:2.4.7"
84 | },
85 | {
86 | "uri": "com.nimbusds:nimbus-jose-jwt:9.14"
87 | },
88 | {
89 | "uri": "io.ktor:ktor-client-json-jvm:1.6.7"
90 | },
91 | {
92 | "uri": "io.prometheus:simpleclient:0.12.0"
93 | },
94 | {
95 | "uri": "org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.6.0 (*)"
96 | },
97 | {
98 | "uri": "org.jetbrains.kotlinx:kotlinx-coroutines-core:1.5.2-native-mt (*)"
99 | },
100 | {
101 | "uri": "io.ktor:ktor-io-jvm:1.6.7"
102 | },
103 | {
104 | "uri": "io.netty:netty-handler:4.1.69.Final (*)"
105 | },
106 | {
107 | "uri": "com.fasterxml.jackson.core:jackson-annotations:2.13.0 (*)"
108 | },
109 | {
110 | "uri": "com.google.errorprone:error_prone_annotations:2.3.4"
111 | },
112 | {
113 | "uri": "com.nimbusds:content-type:2.1"
114 | },
115 | {
116 | "uri": "com.typesafe:config:1.4.1"
117 | },
118 | {
119 | "uri": "io.netty:netty-transport-native-kqueue:4.1.69.Final"
120 | },
121 | {
122 | "uri": "io.netty:netty-transport-native-epoll:4.1.69.Final"
123 | },
124 | {
125 | "uri": "com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava"
126 | },
127 | {
128 | "uri": "io.ktor:ktor-client-cio-jvm:1.6.7"
129 | },
130 | {
131 | "uri": "io.micrometer:micrometer-core:1.8.1 (*)"
132 | },
133 | {
134 | "uri": "io.ktor:ktor-http-cio:1.6.7 (*)"
135 | },
136 | {
137 | "uri": "com.fasterxml.jackson.core:jackson-core:2.13.0"
138 | },
139 | {
140 | "uri": "com.google.j2objc:j2objc-annotations:1.3"
141 | },
142 | {
143 | "uri": "io.ktor:ktor-client-cio:1.6.7"
144 | },
145 | {
146 | "uri": "ch.qos.logback:logback-core:1.2.9"
147 | },
148 | {
149 | "uri": "com.github.stephenc.jcip:jcip-annotations:1.0-1"
150 | },
151 | {
152 | "uri": "com.zaxxer:HikariCP:5.0.0 (*)"
153 | },
154 | {
155 | "uri": "org.jetbrains.kotlinx:kotlinx-coroutines-jdk8:1.5.2-native-mt (*)"
156 | },
157 | {
158 | "uri": "io.ktor:ktor-server-host-common-kotlinMultiplatform:1.6.7 (*)"
159 | },
160 | {
161 | "uri": "io.ktor:ktor-server-core:1.6.7"
162 | },
163 | {
164 | "uri": "org.fusesource.jansi:jansi:2.4.0"
165 | },
166 | {
167 | "uri": "io.ktor:ktor-http-cio-jvm:1.6.7"
168 | },
169 | {
170 | "uri": "io.github.microutils:kotlin-logging-jvm:2.1.21"
171 | },
172 | {
173 | "uri": "org.flywaydb:flyway-core:8.2.2"
174 | },
175 | {
176 | "uri": "net.logstash.logback:logstash-logback-encoder:7.0.1"
177 | },
178 | {
179 | "uri": "org.jetbrains.kotlin:kotlin-stdlib:1.5.30 -> 1.6.10 (*)"
180 | },
181 | {
182 | "uri": "io.ktor:ktor-server-netty:1.6.7"
183 | },
184 | {
185 | "uri": "io.ktor:ktor-http-jvm:1.6.7"
186 | },
187 | {
188 | "uri": "org.hdrhistogram:HdrHistogram:2.1.12"
189 | },
190 | {
191 | "uri": "io.prometheus:simpleclient_common:0.12.0"
192 | },
193 | {
194 | "uri": "org.latencyutils:LatencyUtils:2.0.3"
195 | },
196 | {
197 | "uri": "org.jetbrains.kotlin:kotlin-stdlib-common:1.6.0 -> 1.6.10"
198 | },
199 | {
200 | "uri": "org.jetbrains.kotlinx:kotlinx-coroutines-core-jvm:1.5.2-native-mt"
201 | },
202 | {
203 | "uri": "io.netty:netty-codec-http:4.1.69.Final"
204 | },
205 | {
206 | "uri": "com.auth0:java-jwt:3.13.0"
207 | },
208 | {
209 | "uri": "com.google.code.findbugs:jsr305:3.0.2"
210 | },
211 | {
212 | "uri": "com.natpryce:konfig:1.6.10.0"
213 | },
214 | {
215 | "uri": "io.github.microutils:kotlin-logging:2.1.21"
216 | },
217 | {
218 | "uri": "io.ktor:ktor-metrics-micrometer:1.6.7"
219 | },
220 | {
221 | "uri": "io.ktor:ktor-io:1.6.7"
222 | },
223 | {
224 | "uri": "io.netty:netty-codec:4.1.69.Final (*)"
225 | },
226 | {
227 | "uri": "io.netty:netty-transport-native-unix-common:4.1.69.Final (*)"
228 | },
229 | {
230 | "uri": "io.ktor:ktor-auth-jwt:1.6.7"
231 | },
232 | {
233 | "uri": "com.auth0:jwks-rsa:0.17.0"
234 | },
235 | {
236 | "uri": "io.ktor:ktor-network-tls:1.6.7"
237 | },
238 | {
239 | "uri": "io.ktor:ktor-network-tls-jvm:1.6.7"
240 | },
241 | {
242 | "uri": "org.ow2.asm:asm:9.1"
243 | },
244 | {
245 | "uri": "org.slf4j:slf4j-api:1.7.32"
246 | },
247 | {
248 | "uri": "io.ktor:ktor-utils-jvm:1.6.7"
249 | },
250 | {
251 | "uri": "io.ktor:ktor-network:1.6.7 (*)"
252 | },
253 | {
254 | "uri": "io.netty:netty-buffer:4.1.69.Final (*)"
255 | },
256 | {
257 | "uri": "io.ktor:ktor-client-core:1.6.7 (*)"
258 | },
259 | {
260 | "uri": "com.fasterxml.jackson.module:jackson-module-kotlin:2.13.0 (*)"
261 | },
262 | {
263 | "uri": "com.nimbusds:lang-tag:1.5"
264 | },
265 | {
266 | "uri": "io.ktor:ktor-network-jvm:1.6.7"
267 | },
268 | {
269 | "uri": "org.eclipse.jetty.alpn:alpn-api:1.1.3.v20160715"
270 | },
271 | {
272 | "uri": "org.hamcrest:hamcrest-core:1.1"
273 | },
274 | {
275 | "uri": "org.jetbrains:annotations:13.0"
276 | },
277 | {
278 | "uri": "io.netty:netty-common:4.1.69.Final"
279 | },
280 | {
281 | "uri": "io.netty:netty-resolver:4.1.69.Final (*)"
282 | },
283 | {
284 | "uri": "io.ktor:ktor-auth-kotlinMultiplatform:1.6.7"
285 | },
286 | {
287 | "uri": "com.google.guava:failureaccess:1.0.1"
288 | },
289 | {
290 | "uri": "io.micrometer:micrometer-registry-prometheus:1.8.1"
291 | },
292 | {
293 | "uri": "org.jetbrains.kotlin:kotlin-stdlib-jdk7:1.6.0 (*)"
294 | },
295 | {
296 | "uri": "io.ktor:ktor-utils:1.6.7 (*)"
297 | },
298 | {
299 | "uri": "com.fasterxml.jackson:jackson-bom:2.13.0 (*)"
300 | },
301 | {
302 | "uri": "org.checkerframework:checker-qual:3.5.0"
303 | },
304 | {
305 | "uri": "io.ktor:ktor-client-json:1.6.7 (*)"
306 | },
307 | {
308 | "uri": "io.ktor:ktor-server-host-common:1.6.7"
309 | },
310 | {
311 | "uri": "io.ktor:ktor-server-core-kotlinMultiplatform:1.6.7 (*)"
312 | },
313 | {
314 | "uri": "io.ktor:ktor-http:1.6.7 (*)"
315 | },
316 | {
317 | "uri": "io.ktor:ktor-client-core-jvm:1.6.7"
318 | },
319 | {
320 | "uri": "io.prometheus:simpleclient_tracer_otel_agent:0.12.0"
321 | }
322 | ]
323 | }
324 | }
325 |
--------------------------------------------------------------------------------
/pkg/dsse/testdata/cosign-dsse-attestation.json:
--------------------------------------------------------------------------------
1 | {
2 | "payloadType": "https://slsa.dev/provenance/v0.2",
3 | "payload": "eyJfdHlwZSI6Imh0dHBzOi8vaW4tdG90by5pby9TdGF0ZW1lbnQvdjAuMSIsInByZWRpY2F0ZVR5cGUiOiJodHRwczovL3Nsc2EuZGV2L3Byb3ZlbmFuY2UvdjAuMiIsInN1YmplY3QiOlt7Im5hbWUiOiJ0dGwuc2gvbmFpc3giLCJkaWdlc3QiOnsic2hhMjU2IjoiZWU4OWIwMDUyOGZmNGYwMmYyNDA1ZTRlZTIyMTc0M2ViYzNmOGU4ZGQwYmZkNWM0YzIwYTJmYTJhYWE3ZWRlMyJ9fV0sInByZWRpY2F0ZSI6eyJidWlsZGVyIjp7ImlkIjoiIn0sImJ1aWxkVHlwZSI6InlvbG8iLCJpbnZvY2F0aW9uIjp7ImNvbmZpZ1NvdXJjZSI6e319LCJtZXRhZGF0YSI6eyJidWlsZFN0YXJ0ZWRPbiI6IjIwMjItMDEtMDZUMTQ6MDE6MDMuNjMyNjMyKzAxOjAwIiwiYnVpbGRGaW5pc2hlZE9uIjoiMjAyMi0wMS0wNlQxNDowMTowMy42MzI2MzIrMDE6MDAiLCJjb21wbGV0ZW5lc3MiOnsicGFyYW1ldGVycyI6ZmFsc2UsImVudmlyb25tZW50IjpmYWxzZSwibWF0ZXJpYWxzIjpmYWxzZX0sInJlcHJvZHVjaWJsZSI6ZmFsc2V9LCJtYXRlcmlhbHMiOlt7InVyaSI6Imp1bml0Omp1bml0OjQuMTAifSx7InVyaSI6Im5ldC5taW5pZGV2Ompzb24tc21hcnQ6WzEuMy4zLDIuNC43XSAtXHUwMDNlIDIuNC43In0seyJ1cmkiOiJvcmcuamV0YnJhaW5zLmtvdGxpbjprb3RsaW4tcmVmbGVjdDoxLjUuMzAgLVx1MDAzZSAxLjYuMTAgKCopIn0seyJ1cmkiOiJjb20uZmFzdGVyeG1sLmphY2tzb24uY29yZTpqYWNrc29uLWRhdGFiaW5kOjIuMTMuMCAoKikifSx7InVyaSI6ImlvLnByb21ldGhldXM6c2ltcGxlY2xpZW50X3RyYWNlcl9vdGVsOjAuMTIuMCJ9LHsidXJpIjoiY29tLmdpdGh1Yi5zZXJhdGNoOmtvdGxpcXVlcnk6MS42LjEifSx7InVyaSI6ImNoLnFvcy5sb2diYWNrOmxvZ2JhY2stY2xhc3NpYzoxLjIuOSJ9LHsidXJpIjoiY29tLm5pbWJ1c2RzOm9hdXRoMi1vaWRjLXNkazo5LjIwIn0seyJ1cmkiOiJvcmcucG9zdGdyZXNxbDpwb3N0Z3Jlc3FsOjQyLjMuMSJ9LHsidXJpIjoib3JnLmpldGJyYWlucy5rb3RsaW46a290bGluLXNjcmlwdC1ydW50aW1lOjEuNi4xMCJ9LHsidXJpIjoiaW8ubmV0dHk6bmV0dHktY29kZWMtaHR0cDI6NC4xLjY5LkZpbmFsIn0seyJ1cmkiOiJjb20uZ29vZ2xlLmd1YXZhOmd1YXZhOjMwLjAtanJlIn0seyJ1cmkiOiJpby5rdG9yOmt0b3ItY2xpZW50LWphY2tzb246MS42LjcifSx7InVyaSI6ImlvLnByb21ldGhldXM6c2ltcGxlY2xpZW50X3RyYWNlcl9jb21tb246MC4xMi4wIn0seyJ1cmkiOiJpby5rdG9yOmt0b3ItamFja3NvbjoxLjYuNyJ9LHsidXJpIjoiaW8ubmV0dHk6bmV0dHktdHJhbnNwb3J0OjQuMS42OS5GaW5hbCAoKikifSx7InVyaSI6ImlvLmt0b3I6a3Rvci1hdXRoOjEuNi43ICgqKSJ9LHsidXJpIjoibmV0Lm1pbmlkZXY6YWNjZXNzb3JzLXNtYXJ0OjIuNC43In0seyJ1cmkiOiJjb20ubmltYnVzZHM6bmltYnVzLWpvc2Utand0OjkuMTQifSx7InVyaSI6ImlvLmt0b3I6a3Rvci1jbGllbnQtanNvbi1qdm06MS42LjcifSx7InVyaSI6ImlvLnByb21ldGhldXM6c2ltcGxlY2xpZW50OjAuMTIuMCJ9LHsidXJpIjoib3JnLmpldGJyYWlucy5rb3RsaW46a290bGluLXN0ZGxpYi1qZGs4OjEuNi4wICgqKSJ9LHsidXJpIjoib3JnLmpldGJyYWlucy5rb3RsaW54OmtvdGxpbngtY29yb3V0aW5lcy1jb3JlOjEuNS4yLW5hdGl2ZS1tdCAoKikifSx7InVyaSI6ImlvLmt0b3I6a3Rvci1pby1qdm06MS42LjcifSx7InVyaSI6ImlvLm5ldHR5Om5ldHR5LWhhbmRsZXI6NC4xLjY5LkZpbmFsICgqKSJ9LHsidXJpIjoiY29tLmZhc3RlcnhtbC5qYWNrc29uLmNvcmU6amFja3Nvbi1hbm5vdGF0aW9uczoyLjEzLjAgKCopIn0seyJ1cmkiOiJjb20uZ29vZ2xlLmVycm9ycHJvbmU6ZXJyb3JfcHJvbmVfYW5ub3RhdGlvbnM6Mi4zLjQifSx7InVyaSI6ImNvbS5uaW1idXNkczpjb250ZW50LXR5cGU6Mi4xIn0seyJ1cmkiOiJjb20udHlwZXNhZmU6Y29uZmlnOjEuNC4xIn0seyJ1cmkiOiJpby5uZXR0eTpuZXR0eS10cmFuc3BvcnQtbmF0aXZlLWtxdWV1ZTo0LjEuNjkuRmluYWwifSx7InVyaSI6ImlvLm5ldHR5Om5ldHR5LXRyYW5zcG9ydC1uYXRpdmUtZXBvbGw6NC4xLjY5LkZpbmFsIn0seyJ1cmkiOiJjb20uZ29vZ2xlLmd1YXZhOmxpc3RlbmFibGVmdXR1cmU6OTk5OS4wLWVtcHR5LXRvLWF2b2lkLWNvbmZsaWN0LXdpdGgtZ3VhdmEifSx7InVyaSI6ImlvLmt0b3I6a3Rvci1jbGllbnQtY2lvLWp2bToxLjYuNyJ9LHsidXJpIjoiaW8ubWljcm9tZXRlcjptaWNyb21ldGVyLWNvcmU6MS44LjEgKCopIn0seyJ1cmkiOiJpby5rdG9yOmt0b3ItaHR0cC1jaW86MS42LjcgKCopIn0seyJ1cmkiOiJjb20uZmFzdGVyeG1sLmphY2tzb24uY29yZTpqYWNrc29uLWNvcmU6Mi4xMy4wIn0seyJ1cmkiOiJjb20uZ29vZ2xlLmoyb2JqYzpqMm9iamMtYW5ub3RhdGlvbnM6MS4zIn0seyJ1cmkiOiJpby5rdG9yOmt0b3ItY2xpZW50LWNpbzoxLjYuNyJ9LHsidXJpIjoiY2gucW9zLmxvZ2JhY2s6bG9nYmFjay1jb3JlOjEuMi45In0seyJ1cmkiOiJjb20uZ2l0aHViLnN0ZXBoZW5jLmpjaXA6amNpcC1hbm5vdGF0aW9uczoxLjAtMSJ9LHsidXJpIjoiY29tLnpheHhlcjpIaWthcmlDUDo1LjAuMCAoKikifSx7InVyaSI6Im9yZy5qZXRicmFpbnMua290bGlueDprb3RsaW54LWNvcm91dGluZXMtamRrODoxLjUuMi1uYXRpdmUtbXQgKCopIn0seyJ1cmkiOiJpby5rdG9yOmt0b3Itc2VydmVyLWhvc3QtY29tbW9uLWtvdGxpbk11bHRpcGxhdGZvcm06MS42LjcgKCopIn0seyJ1cmkiOiJpby5rdG9yOmt0b3Itc2VydmVyLWNvcmU6MS42LjcifSx7InVyaSI6Im9yZy5mdXNlc291cmNlLmphbnNpOmphbnNpOjIuNC4wIn0seyJ1cmkiOiJpby5rdG9yOmt0b3ItaHR0cC1jaW8tanZtOjEuNi43In0seyJ1cmkiOiJpby5naXRodWIubWljcm91dGlsczprb3RsaW4tbG9nZ2luZy1qdm06Mi4xLjIxIn0seyJ1cmkiOiJvcmcuZmx5d2F5ZGI6Zmx5d2F5LWNvcmU6OC4yLjIifSx7InVyaSI6Im5ldC5sb2dzdGFzaC5sb2diYWNrOmxvZ3N0YXNoLWxvZ2JhY2stZW5jb2Rlcjo3LjAuMSJ9LHsidXJpIjoib3JnLmpldGJyYWlucy5rb3RsaW46a290bGluLXN0ZGxpYjoxLjUuMzAgLVx1MDAzZSAxLjYuMTAgKCopIn0seyJ1cmkiOiJpby5rdG9yOmt0b3Itc2VydmVyLW5ldHR5OjEuNi43In0seyJ1cmkiOiJpby5rdG9yOmt0b3ItaHR0cC1qdm06MS42LjcifSx7InVyaSI6Im9yZy5oZHJoaXN0b2dyYW06SGRySGlzdG9ncmFtOjIuMS4xMiJ9LHsidXJpIjoiaW8ucHJvbWV0aGV1czpzaW1wbGVjbGllbnRfY29tbW9uOjAuMTIuMCJ9LHsidXJpIjoib3JnLmxhdGVuY3l1dGlsczpMYXRlbmN5VXRpbHM6Mi4wLjMifSx7InVyaSI6Im9yZy5qZXRicmFpbnMua290bGluOmtvdGxpbi1zdGRsaWItY29tbW9uOjEuNi4wIC1cdTAwM2UgMS42LjEwIn0seyJ1cmkiOiJvcmcuamV0YnJhaW5zLmtvdGxpbng6a290bGlueC1jb3JvdXRpbmVzLWNvcmUtanZtOjEuNS4yLW5hdGl2ZS1tdCJ9LHsidXJpIjoiaW8ubmV0dHk6bmV0dHktY29kZWMtaHR0cDo0LjEuNjkuRmluYWwifSx7InVyaSI6ImNvbS5hdXRoMDpqYXZhLWp3dDozLjEzLjAifSx7InVyaSI6ImNvbS5nb29nbGUuY29kZS5maW5kYnVnczpqc3IzMDU6My4wLjIifSx7InVyaSI6ImNvbS5uYXRwcnljZTprb25maWc6MS42LjEwLjAifSx7InVyaSI6ImlvLmdpdGh1Yi5taWNyb3V0aWxzOmtvdGxpbi1sb2dnaW5nOjIuMS4yMSJ9LHsidXJpIjoiaW8ua3RvcjprdG9yLW1ldHJpY3MtbWljcm9tZXRlcjoxLjYuNyJ9LHsidXJpIjoiaW8ua3RvcjprdG9yLWlvOjEuNi43In0seyJ1cmkiOiJpby5uZXR0eTpuZXR0eS1jb2RlYzo0LjEuNjkuRmluYWwgKCopIn0seyJ1cmkiOiJpby5uZXR0eTpuZXR0eS10cmFuc3BvcnQtbmF0aXZlLXVuaXgtY29tbW9uOjQuMS42OS5GaW5hbCAoKikifSx7InVyaSI6ImlvLmt0b3I6a3Rvci1hdXRoLWp3dDoxLjYuNyJ9LHsidXJpIjoiY29tLmF1dGgwOmp3a3MtcnNhOjAuMTcuMCJ9LHsidXJpIjoiaW8ua3RvcjprdG9yLW5ldHdvcmstdGxzOjEuNi43In0seyJ1cmkiOiJpby5rdG9yOmt0b3ItbmV0d29yay10bHMtanZtOjEuNi43In0seyJ1cmkiOiJvcmcub3cyLmFzbTphc206OS4xIn0seyJ1cmkiOiJvcmcuc2xmNGo6c2xmNGotYXBpOjEuNy4zMiJ9LHsidXJpIjoiaW8ua3RvcjprdG9yLXV0aWxzLWp2bToxLjYuNyJ9LHsidXJpIjoiaW8ua3RvcjprdG9yLW5ldHdvcms6MS42LjcgKCopIn0seyJ1cmkiOiJpby5uZXR0eTpuZXR0eS1idWZmZXI6NC4xLjY5LkZpbmFsICgqKSJ9LHsidXJpIjoiaW8ua3RvcjprdG9yLWNsaWVudC1jb3JlOjEuNi43ICgqKSJ9LHsidXJpIjoiY29tLmZhc3RlcnhtbC5qYWNrc29uLm1vZHVsZTpqYWNrc29uLW1vZHVsZS1rb3RsaW46Mi4xMy4wICgqKSJ9LHsidXJpIjoiY29tLm5pbWJ1c2RzOmxhbmctdGFnOjEuNSJ9LHsidXJpIjoiaW8ua3RvcjprdG9yLW5ldHdvcmstanZtOjEuNi43In0seyJ1cmkiOiJvcmcuZWNsaXBzZS5qZXR0eS5hbHBuOmFscG4tYXBpOjEuMS4zLnYyMDE2MDcxNSJ9LHsidXJpIjoib3JnLmhhbWNyZXN0OmhhbWNyZXN0LWNvcmU6MS4xIn0seyJ1cmkiOiJvcmcuamV0YnJhaW5zOmFubm90YXRpb25zOjEzLjAifSx7InVyaSI6ImlvLm5ldHR5Om5ldHR5LWNvbW1vbjo0LjEuNjkuRmluYWwifSx7InVyaSI6ImlvLm5ldHR5Om5ldHR5LXJlc29sdmVyOjQuMS42OS5GaW5hbCAoKikifSx7InVyaSI6ImlvLmt0b3I6a3Rvci1hdXRoLWtvdGxpbk11bHRpcGxhdGZvcm06MS42LjcifSx7InVyaSI6ImNvbS5nb29nbGUuZ3VhdmE6ZmFpbHVyZWFjY2VzczoxLjAuMSJ9LHsidXJpIjoiaW8ubWljcm9tZXRlcjptaWNyb21ldGVyLXJlZ2lzdHJ5LXByb21ldGhldXM6MS44LjEifSx7InVyaSI6Im9yZy5qZXRicmFpbnMua290bGluOmtvdGxpbi1zdGRsaWItamRrNzoxLjYuMCAoKikifSx7InVyaSI6ImlvLmt0b3I6a3Rvci11dGlsczoxLjYuNyAoKikifSx7InVyaSI6ImNvbS5mYXN0ZXJ4bWwuamFja3NvbjpqYWNrc29uLWJvbToyLjEzLjAgKCopIn0seyJ1cmkiOiJvcmcuY2hlY2tlcmZyYW1ld29yazpjaGVja2VyLXF1YWw6My41LjAifSx7InVyaSI6ImlvLmt0b3I6a3Rvci1jbGllbnQtanNvbjoxLjYuNyAoKikifSx7InVyaSI6ImlvLmt0b3I6a3Rvci1zZXJ2ZXItaG9zdC1jb21tb246MS42LjcifSx7InVyaSI6ImlvLmt0b3I6a3Rvci1zZXJ2ZXItY29yZS1rb3RsaW5NdWx0aXBsYXRmb3JtOjEuNi43ICgqKSJ9LHsidXJpIjoiaW8ua3RvcjprdG9yLWh0dHA6MS42LjcgKCopIn0seyJ1cmkiOiJpby5rdG9yOmt0b3ItY2xpZW50LWNvcmUtanZtOjEuNi43In0seyJ1cmkiOiJpby5wcm9tZXRoZXVzOnNpbXBsZWNsaWVudF90cmFjZXJfb3RlbF9hZ2VudDowLjEyLjAifV19fQ==",
4 | "signatures": [
5 | {
6 | "keyid": "",
7 | "sig": "MEUCIQDQ9kF3ifhd8xdtqAZ6YoLnR4T8FOQi+vx3UvpPB95ixAIgUlNhPqOIpdlNei3V+f4GYmoDfMCbLJbflBIjw27MxaU="
8 | }
9 | ]
10 | }
11 |
--------------------------------------------------------------------------------
/pkg/intoto/buildconfig.go:
--------------------------------------------------------------------------------
1 | package intoto
2 |
3 | import (
4 | "fmt"
5 | "github.com/nais/salsa/pkg/config"
6 | "github.com/spf13/cobra"
7 | "github.com/spf13/pflag"
8 | "strings"
9 | )
10 |
11 | type BuildConfig struct {
12 | Commands []string `json:"commands"`
13 | // Indicates how to parse the strings in commands.
14 | Shell string `json:"shell"`
15 | }
16 |
17 | func GenerateBuildConfig(scanConfig *config.ScanConfiguration) *BuildConfig {
18 | buildConfig := &BuildConfig{
19 | Commands: []string{
20 | fmt.Sprintf("%s %s",
21 | scanConfig.Cmd.CommandPath(),
22 | salsaCmdFlags(scanConfig.Cmd),
23 | ),
24 | },
25 | Shell: "bash",
26 | }
27 |
28 | if len(scanConfig.Dependencies.RuntimeDeps) > 0 {
29 | buildConfig.Commands = append(buildConfig.Commands, fmt.Sprintf("%s %s",
30 | scanConfig.Dependencies.CmdPath(),
31 | scanConfig.Dependencies.CmdFlags(),
32 | ))
33 | }
34 |
35 | return buildConfig
36 | }
37 |
38 | func salsaCmdFlags(cmd *cobra.Command) string {
39 | flagsUsed := make([]*pflag.Flag, 0)
40 | cmd.Flags().VisitAll(func(f *pflag.Flag) {
41 | if f.Changed {
42 | flagsUsed = append(flagsUsed, f)
43 | }
44 | })
45 |
46 | cmdFlags := ""
47 | for _, c := range flagsUsed {
48 | if strings.Contains(c.Name, "token") {
49 | cmdFlags += fmt.Sprintf(" --%s %s", c.Name, "***")
50 | } else {
51 | cmdFlags += fmt.Sprintf(" --%s %s", c.Name, c.Value.String())
52 | }
53 | }
54 |
55 | return cmdFlags
56 | }
57 |
--------------------------------------------------------------------------------
/pkg/intoto/find.go:
--------------------------------------------------------------------------------
1 | package intoto
2 |
3 | import (
4 | slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common"
5 | "strings"
6 | )
7 |
8 | func FindMaterials(materials []slsa.ProvenanceMaterial, value string) []slsa.ProvenanceMaterial {
9 | found := make([]slsa.ProvenanceMaterial, 0)
10 | for _, m := range materials {
11 | if find(m, value) {
12 | found = append(found, m)
13 | }
14 | }
15 | return found
16 | }
17 |
18 | func find(material slsa.ProvenanceMaterial, value string) bool {
19 | return strings.Contains(material.URI, value)
20 | }
21 |
--------------------------------------------------------------------------------
/pkg/intoto/find_test.go:
--------------------------------------------------------------------------------
1 | package intoto
2 |
3 | import (
4 | "github.com/nais/salsa/pkg/dsse"
5 | "github.com/stretchr/testify/assert"
6 | "os"
7 | "testing"
8 | )
9 |
10 | func TestFindMaterial(t *testing.T) {
11 | valueToFind := "com.google.guava:guava"
12 | attPath := "testdata/cosign-dsse-attestation.json"
13 | fileContents, err := os.ReadFile(attPath)
14 | assert.NoError(t, err)
15 |
16 | assert.NoError(t, err)
17 | statement, err := dsse.ParseEnvelope(fileContents)
18 |
19 | assert.NoError(t, err)
20 | assert.NotEmpty(t, statement.Predicate.Materials)
21 |
22 | foundMaterial := FindMaterials(statement.Predicate.Materials, valueToFind)
23 | assert.NotEmpty(t, foundMaterial)
24 | assert.Contains(t, foundMaterial[0].URI, valueToFind)
25 | assert.Equal(t, 1, len(foundMaterial))
26 | }
27 |
--------------------------------------------------------------------------------
/pkg/intoto/provenance.go:
--------------------------------------------------------------------------------
1 | package intoto
2 |
3 | import (
4 | "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common"
5 | slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2"
6 | )
7 |
8 | func GenerateSlsaPredicate(opts *ProvenanceOptions) *slsa.ProvenancePredicate {
9 | predicate := &slsa.ProvenancePredicate{
10 | Builder: common.ProvenanceBuilder{
11 | ID: opts.BuilderId,
12 | },
13 | BuildType: opts.BuildType,
14 | BuildConfig: opts.BuildConfig,
15 | Metadata: withMetadata(opts),
16 | Materials: withMaterials(opts),
17 | }
18 |
19 | if opts.Invocation != nil {
20 | predicate.Invocation = *opts.Invocation
21 | return predicate
22 | }
23 |
24 | return predicate
25 | }
26 |
27 | func withMetadata(opts *ProvenanceOptions) *slsa.ProvenanceMetadata {
28 | timeFinished := opts.GetBuildFinishedOn()
29 | return &slsa.ProvenanceMetadata{
30 | BuildInvocationID: opts.BuildInvocationId,
31 | BuildStartedOn: &opts.BuildStartedOn,
32 | BuildFinishedOn: &timeFinished,
33 | Completeness: withCompleteness(opts),
34 | Reproducible: opts.Reproducible(),
35 | }
36 | }
37 |
38 | func withCompleteness(opts *ProvenanceOptions) slsa.ProvenanceComplete {
39 | return slsa.ProvenanceComplete{
40 | Environment: opts.Environment(),
41 | Materials: opts.Materials(),
42 | Parameters: opts.Parameters(),
43 | }
44 | }
45 |
46 | func withMaterials(opts *ProvenanceOptions) []common.ProvenanceMaterial {
47 | materials := make([]common.ProvenanceMaterial, 0)
48 | AppendRuntimeDependencies(opts, &materials)
49 | AppendBuildContext(opts, &materials)
50 | return materials
51 | }
52 |
53 | func AppendRuntimeDependencies(opts *ProvenanceOptions, materials *[]common.ProvenanceMaterial) {
54 | if opts.Dependencies == nil {
55 | return
56 | }
57 |
58 | for _, dep := range opts.Dependencies.RuntimeDeps {
59 | m := common.ProvenanceMaterial{
60 | URI: dep.ToUri(),
61 | Digest: dep.ToDigestSet(),
62 | }
63 | *materials = append(*materials, m)
64 | }
65 | }
66 |
67 | func AppendBuildContext(opts *ProvenanceOptions, materials *[]common.ProvenanceMaterial) {
68 | if opts.BuilderRepoDigest != nil {
69 | *materials = append(*materials, *opts.BuilderRepoDigest)
70 | }
71 | }
72 |
--------------------------------------------------------------------------------
/pkg/intoto/provenance_options.go:
--------------------------------------------------------------------------------
1 | package intoto
2 |
3 | import (
4 | "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common"
5 | "github.com/nais/salsa/pkg/build"
6 | "github.com/nais/salsa/pkg/config"
7 | "github.com/nais/salsa/pkg/vcs"
8 | log "github.com/sirupsen/logrus"
9 | "time"
10 |
11 | slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2"
12 | )
13 |
14 | const (
15 | // AdHocBuildType no entry point, and the commands were run in an ad-hoc fashion
16 | AdHocBuildType = "https://github.com/nais/salsa/ManuallyRunCommands@v1"
17 | DefaultBuildId = "https://github.com/nais/salsa"
18 | )
19 |
20 | type ProvenanceOptions struct {
21 | BuildConfig *BuildConfig
22 | BuilderId string
23 | BuilderRepoDigest *common.ProvenanceMaterial
24 | BuildInvocationId string
25 | BuildFinishedOn *time.Time
26 | BuildStartedOn time.Time
27 | BuildType string
28 | Dependencies *build.ArtifactDependencies
29 | Invocation *slsa.ProvenanceInvocation
30 | Name string
31 | }
32 |
33 | func CreateProvenanceOptions(scanCfg *config.ScanConfiguration) *ProvenanceOptions {
34 | opts := &ProvenanceOptions{
35 | BuilderId: DefaultBuildId,
36 | BuildType: AdHocBuildType,
37 | Dependencies: scanCfg.Dependencies,
38 | Name: scanCfg.RepoName,
39 | }
40 |
41 | context := scanCfg.ContextEnvironment
42 | opts.BuildStartedOn = buildStartedOn(context, scanCfg.BuildStartedOn)
43 |
44 | if context != nil {
45 | opts.BuildType = context.BuildType()
46 | opts.BuildInvocationId = context.BuildInvocationId()
47 | opts.BuilderId = context.BuilderId()
48 | opts.withBuilderRepoDigest(context).
49 | withBuilderInvocation(context)
50 | return opts
51 | }
52 |
53 | opts.BuildConfig = GenerateBuildConfig(scanCfg)
54 | opts.Invocation = nil
55 | return opts
56 | }
57 |
58 | func (in *ProvenanceOptions) withBuilderRepoDigest(env vcs.ContextEnvironment) *ProvenanceOptions {
59 | in.BuilderRepoDigest = &common.ProvenanceMaterial{
60 | URI: "git+" + env.RepoUri(),
61 | Digest: common.DigestSet{
62 | build.AlgorithmSHA1: env.Sha(),
63 | },
64 | }
65 | return in
66 | }
67 |
68 | func (in *ProvenanceOptions) withBuilderInvocation(env vcs.ContextEnvironment) *ProvenanceOptions {
69 | in.Invocation = &slsa.ProvenanceInvocation{
70 | ConfigSource: slsa.ConfigSource{
71 | URI: "git+" + env.RepoUri(),
72 | Digest: common.DigestSet{
73 | build.AlgorithmSHA1: env.Sha(),
74 | },
75 | EntryPoint: env.Context(),
76 | },
77 | Parameters: env.UserDefinedParameters(),
78 | Environment: env.NonReproducibleMetadata(),
79 | }
80 | return in
81 | }
82 |
83 | func (in *ProvenanceOptions) HasBuilderRepoDigest() bool {
84 | if in.BuilderRepoDigest == nil {
85 | return false
86 | }
87 |
88 | return in.BuilderRepoDigest.Digest != nil && in.BuilderRepoDigest.URI != ""
89 |
90 | }
91 |
92 | func (in *ProvenanceOptions) HasDependencies() bool {
93 | if in.Dependencies == nil {
94 | return false
95 | }
96 |
97 | return len(in.Dependencies.RuntimeDeps) > 0
98 | }
99 |
100 | func (in *ProvenanceOptions) Parameters() bool {
101 | if in.Invocation == nil {
102 | return false
103 | }
104 |
105 | if in.Invocation.Parameters == nil {
106 | return false
107 | }
108 |
109 | if in.Invocation.Parameters.(*vcs.Event) == nil {
110 | return false
111 | }
112 |
113 | return in.Invocation.Parameters.(*vcs.Event).EventMetadata != nil
114 | }
115 |
116 | func (in *ProvenanceOptions) Environment() bool {
117 | if in.Invocation == nil {
118 | return false
119 | }
120 |
121 | return in.Invocation.Environment != nil
122 | }
123 |
124 | func (in *ProvenanceOptions) Materials() bool {
125 | return in.HasDependencies() && in.HasBuilderRepoDigest()
126 | }
127 |
128 | func (in *ProvenanceOptions) Reproducible() bool {
129 | return in.Environment() && in.Materials() && in.Parameters()
130 | }
131 |
132 | func (in *ProvenanceOptions) GetBuildFinishedOn() time.Time {
133 | if in.BuildFinishedOn == nil {
134 | return time.Now().UTC().Round(time.Second)
135 | }
136 | return *in.BuildFinishedOn
137 | }
138 |
139 | func buildStartedOn(context vcs.ContextEnvironment, inputBuildTime string) time.Time {
140 | if inputBuildTime != "" {
141 | return BuildStarted(inputBuildTime)
142 | }
143 |
144 | if context == nil {
145 | return time.Now().UTC().Round(time.Second)
146 | }
147 |
148 | event := context.GetEvent()
149 |
150 | if event == nil {
151 | return time.Now().UTC().Round(time.Second)
152 | }
153 |
154 | buildTime := event.GetHeadCommitTimestamp()
155 | return BuildStarted(buildTime)
156 | }
157 |
158 | func BuildStarted(buildTime string) time.Time {
159 | started, err := time.Parse(time.RFC3339, buildTime)
160 | if err != nil {
161 | log.Warnf("Failed to parse build time: %v, using default start time", err)
162 | return time.Now().UTC().Round(time.Second)
163 | }
164 |
165 | return started
166 | }
167 |
--------------------------------------------------------------------------------
/pkg/intoto/provenance_options_test.go:
--------------------------------------------------------------------------------
1 | package intoto
2 |
3 | import (
4 | "fmt"
5 | "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common"
6 | "github.com/nais/salsa/pkg/build"
7 | "github.com/nais/salsa/pkg/config"
8 | "github.com/nais/salsa/pkg/vcs"
9 | "github.com/spf13/cobra"
10 | "os"
11 | "testing"
12 | "time"
13 |
14 | slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2"
15 | "github.com/stretchr/testify/assert"
16 | )
17 |
18 | func TestCreateProvenanceOptions(t *testing.T) {
19 | deps := ExpectedDeps()
20 | artDeps := ExpectedArtDeps(deps)
21 |
22 | for _, test := range []struct {
23 | name string
24 | buildType string
25 | buildInvocationId string
26 | builderId string
27 | buildConfig *BuildConfig
28 | builderRepoDigest *common.ProvenanceMaterial
29 | configSource slsa.ConfigSource
30 | buildTimerIsSet bool
31 | runnerContext bool
32 | }{
33 | {
34 | name: "create provenance artifact with default values",
35 | buildType: "https://github.com/nais/salsa/ManuallyRunCommands@v1",
36 | buildInvocationId: "",
37 | builderId: "https://github.com/nais/salsa",
38 | buildConfig: buildConfig(),
39 | builderRepoDigest: (*common.ProvenanceMaterial)(nil),
40 | configSource: slsa.ConfigSource{
41 | URI: "",
42 | Digest: common.DigestSet(nil),
43 | EntryPoint: "",
44 | },
45 | buildTimerIsSet: true,
46 | runnerContext: false,
47 | },
48 | {
49 | name: "create provenance artifact with runner context",
50 | buildType: "https://github.com/Attestations/GitHubActionsWorkflow@v1",
51 | buildInvocationId: "https://github.com/nais/salsa/actions/runs/1234",
52 | builderId: "https://github.com/nais/salsa/Attestations/GitHubHostedActions@v1",
53 | buildConfig: nil,
54 | builderRepoDigest: ExpectedBuilderRepoDigestMaterial(),
55 | configSource: ExpectedConfigSource(),
56 | buildTimerIsSet: true,
57 | runnerContext: true,
58 | },
59 | } {
60 | t.Run(test.name, func(t *testing.T) {
61 | if test.runnerContext {
62 | err := os.Setenv("GITHUB_ACTIONS", "true")
63 | assert.NoError(t, err)
64 | env := Environment()
65 | scanCfg := &config.ScanConfiguration{
66 | BuildStartedOn: "",
67 | WorkDir: "",
68 | RepoName: "artifact",
69 | Dependencies: artDeps,
70 | ContextEnvironment: env,
71 | Cmd: nil,
72 | }
73 | provenanceArtifact := CreateProvenanceOptions(scanCfg)
74 | assert.Equal(t, "artifact", provenanceArtifact.Name)
75 | assert.Equal(t, test.buildType, provenanceArtifact.BuildType)
76 | assert.Equal(t, deps, provenanceArtifact.Dependencies.RuntimeDeps)
77 | assert.Equal(t, "2022-02-14T09:38:16+01:00", provenanceArtifact.BuildStartedOn.Format(time.RFC3339))
78 | assert.Equal(t, test.buildInvocationId, provenanceArtifact.BuildInvocationId)
79 | assert.Equal(t, test.buildConfig, provenanceArtifact.BuildConfig)
80 | assert.NotEmpty(t, provenanceArtifact.Invocation)
81 | assert.NotEmpty(t, provenanceArtifact.Invocation.Parameters)
82 | assert.NotEmpty(t, provenanceArtifact.Invocation.Environment)
83 | assert.Equal(t, test.builderId, provenanceArtifact.BuilderId)
84 | assert.Equal(t, test.builderRepoDigest, provenanceArtifact.BuilderRepoDigest)
85 | assert.Equal(t, test.configSource, provenanceArtifact.Invocation.ConfigSource)
86 |
87 | } else {
88 |
89 | scanCfg := &config.ScanConfiguration{
90 | BuildStartedOn: time.Now().UTC().Round(time.Second).Add(-10 * time.Minute).Format(time.RFC3339),
91 | WorkDir: "",
92 | RepoName: "artifact",
93 | Dependencies: artDeps,
94 | ContextEnvironment: nil,
95 | Cmd: &cobra.Command{Use: "salsa"},
96 | }
97 |
98 | provenanceArtifact := CreateProvenanceOptions(scanCfg)
99 | assert.Equal(t, "artifact", provenanceArtifact.Name)
100 | assert.Equal(t, test.buildType, provenanceArtifact.BuildType)
101 | assert.Equal(t, deps, provenanceArtifact.Dependencies.RuntimeDeps)
102 | assert.Equal(t, test.buildTimerIsSet, time.Now().UTC().After(provenanceArtifact.BuildStartedOn))
103 | assert.Equal(t, test.buildInvocationId, provenanceArtifact.BuildInvocationId)
104 | assert.Equal(t, test.buildConfig, provenanceArtifact.BuildConfig)
105 | assert.Empty(t, provenanceArtifact.Invocation)
106 | assert.Equal(t, test.builderId, provenanceArtifact.BuilderId)
107 | assert.Equal(t, test.builderRepoDigest, provenanceArtifact.BuilderRepoDigest)
108 | }
109 | })
110 | }
111 | }
112 |
113 | func ExpectedBuilderRepoDigestMaterial() *common.ProvenanceMaterial {
114 | return &common.ProvenanceMaterial{
115 | URI: "git+https://github.com/nais/salsa",
116 | Digest: common.DigestSet{
117 | build.AlgorithmSHA1: "4321",
118 | },
119 | }
120 | }
121 |
122 | func ExpectedDeps() map[string]build.Dependency {
123 | deps := map[string]build.Dependency{}
124 | checksum := build.Verification("todo", "todo")
125 | deps[fmt.Sprintf("%s:%s", "groupId", "artifactId")] = build.Dependence(
126 | fmt.Sprintf("%s:%s", "groupId", "artifactId"),
127 | "v01",
128 | checksum,
129 | )
130 | return deps
131 | }
132 |
133 | func ExpectedArtDeps(deps map[string]build.Dependency) *build.ArtifactDependencies {
134 | return &build.ArtifactDependencies{
135 | Cmd: build.Cmd{
136 | Path: "lang",
137 | CmdFlags: "list:deps",
138 | },
139 | RuntimeDeps: deps,
140 | }
141 | }
142 |
143 | func Environment() *vcs.GithubCIEnvironment {
144 | return &vcs.GithubCIEnvironment{
145 | BuildContext: &vcs.GithubContext{
146 | Repository: "nais/salsa",
147 | RunId: "1234",
148 | SHA: "4321",
149 | Workflow: "Create a provenance",
150 | ServerUrl: "https://github.com",
151 | EventName: "workflow_dispatch",
152 | },
153 | Event: &vcs.Event{
154 | EventMetadata: &vcs.EventMetadata{
155 | HeadCommit: &vcs.HeadCommit{
156 | Timestamp: "2022-02-14T09:38:16+01:00",
157 | },
158 | },
159 | },
160 | RunnerContext: &vcs.RunnerContext{
161 | OS: "Linux",
162 | Temp: "/home/runner/work/_temp",
163 | ToolCache: "/opt/hostedtoolcache",
164 | },
165 | Actions: vcs.BuildId("v1"),
166 | }
167 | }
168 |
169 | func ExpectedConfigSource() slsa.ConfigSource {
170 | return slsa.ConfigSource{
171 | URI: "git+https://github.com/nais/salsa",
172 | Digest: common.DigestSet{
173 | build.AlgorithmSHA1: "4321",
174 | },
175 | EntryPoint: "Create a provenance",
176 | }
177 | }
178 |
179 | func buildConfig() *BuildConfig {
180 | return &BuildConfig{
181 | Commands: []string{
182 | "salsa ",
183 | "lang list:deps",
184 | },
185 | Shell: "bash",
186 | }
187 | }
188 |
--------------------------------------------------------------------------------
/pkg/intoto/provenance_test.go:
--------------------------------------------------------------------------------
1 | package intoto
2 |
3 | import (
4 | "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common"
5 | "github.com/nais/salsa/pkg/build"
6 | "github.com/nais/salsa/pkg/config"
7 | "github.com/nais/salsa/pkg/vcs"
8 | "github.com/spf13/cobra"
9 | "os"
10 | "testing"
11 | "time"
12 |
13 | slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2"
14 | "github.com/stretchr/testify/assert"
15 | )
16 |
17 | func TestGenerateSlsaPredicate(t *testing.T) {
18 | deps := ExpectedDeps()
19 | artDeps := ExpectedArtDeps(deps)
20 | for _, test := range []struct {
21 | name string
22 | buildType string
23 | buildInvocationId string
24 | builderId string
25 | buildConfig *BuildConfig
26 | materials []common.ProvenanceMaterial
27 | configSource slsa.ConfigSource
28 | buildTimerIsSet bool
29 | buildTimerFinishedIsSet bool
30 | runnerContext bool
31 | }{
32 | {
33 | name: "create slsa provenance artifact with default values",
34 | buildType: "https://github.com/nais/salsa/ManuallyRunCommands@v1",
35 | buildInvocationId: "",
36 | builderId: "https://github.com/nais/salsa",
37 | buildConfig: buildConfig(),
38 | materials: ExpectedDependenciesMaterial(),
39 | configSource: slsa.ConfigSource{
40 | URI: "",
41 | Digest: common.DigestSet(nil),
42 | EntryPoint: "",
43 | },
44 | buildTimerIsSet: true,
45 | buildTimerFinishedIsSet: true,
46 | runnerContext: false,
47 | },
48 | {
49 | name: "create slsa provenance with runner context",
50 | buildType: "https://github.com/Attestations/GitHubActionsWorkflow@v1",
51 | buildInvocationId: "https://github.com/nais/salsa/actions/runs/1234",
52 | builderId: "https://github.com/nais/salsa/Attestations/GitHubHostedActions@v1",
53 | buildConfig: nil,
54 | materials: ToExpectedMaterials(),
55 | configSource: ExpectedConfigSource(),
56 | buildTimerIsSet: true,
57 | buildTimerFinishedIsSet: true,
58 | runnerContext: true,
59 | },
60 | } {
61 | t.Run(test.name, func(t *testing.T) {
62 | if test.runnerContext {
63 | env := Environment()
64 |
65 | scanCfg := &config.ScanConfiguration{
66 | BuildStartedOn: time.Now().UTC().Round(time.Second).Add(-10 * time.Minute).Format(time.RFC3339),
67 | WorkDir: "",
68 | RepoName: "artifact",
69 | Dependencies: artDeps,
70 | ContextEnvironment: env,
71 | Cmd: nil,
72 | }
73 |
74 | err := os.Setenv("GITHUB_ACTIONS", "true")
75 | assert.NoError(t, err)
76 | opts := CreateProvenanceOptions(scanCfg)
77 | slsaPredicate := GenerateSlsaPredicate(opts)
78 |
79 | // VCS GithubContext
80 | assert.Equal(t, test.buildType, slsaPredicate.BuildType)
81 | assert.NotEmpty(t, slsaPredicate.Invocation)
82 | assert.Equal(t, "2022-02-14T09:38:16+01:00", slsaPredicate.Invocation.Parameters.(*vcs.Event).GetHeadCommitTimestamp())
83 | e := slsaPredicate.Invocation.Environment.(*vcs.Metadata)
84 | assert.NoError(t, err)
85 | assert.Equal(t, expectedMetadata(), e)
86 | assert.NotEmpty(t, slsaPredicate.Invocation.Environment)
87 | assert.Equal(t, test.configSource, slsaPredicate.Invocation.ConfigSource)
88 | assert.Equal(t, test.builderId, slsaPredicate.Builder.ID)
89 |
90 | // metadata
91 | assert.Equal(t, test.buildInvocationId, slsaPredicate.Metadata.BuildInvocationID)
92 | assert.Equal(t, test.buildTimerIsSet, *slsaPredicate.Metadata.BuildStartedOn != time.Time{})
93 | assert.Equal(t, test.buildTimerFinishedIsSet, *slsaPredicate.Metadata.BuildFinishedOn != time.Time{})
94 | assert.Equal(t, true, slsaPredicate.Metadata.Reproducible)
95 |
96 | // completeness
97 | assert.Equal(t, true, slsaPredicate.Metadata.Completeness.Environment)
98 | assert.Equal(t, true, slsaPredicate.Metadata.Completeness.Materials)
99 | assert.Equal(t, true, slsaPredicate.Metadata.Completeness.Parameters)
100 |
101 | // materials
102 | assert.Equal(t, 2, len(slsaPredicate.Materials))
103 | assert.Equal(t, test.materials, slsaPredicate.Materials)
104 |
105 | } else {
106 |
107 | scanCfg := &config.ScanConfiguration{
108 | BuildStartedOn: time.Now().UTC().Round(time.Second).Add(-10 * time.Minute).Format(time.RFC3339),
109 | WorkDir: "",
110 | RepoName: "artifact",
111 | Dependencies: artDeps,
112 | ContextEnvironment: nil,
113 | Cmd: &cobra.Command{Use: "salsa"},
114 | }
115 |
116 | opts := CreateProvenanceOptions(scanCfg)
117 | slsaPredicate := GenerateSlsaPredicate(opts)
118 |
119 | // Predicate
120 | assert.Equal(t, test.buildType, slsaPredicate.BuildType)
121 | assert.Equal(t, test.builderId, slsaPredicate.Builder.ID)
122 | assert.Equal(t, test.buildConfig, slsaPredicate.BuildConfig)
123 | assert.Equal(t, test.configSource, slsaPredicate.Invocation.ConfigSource)
124 | assert.Empty(t, slsaPredicate.Invocation.Parameters)
125 | assert.Empty(t, slsaPredicate.Invocation.Environment)
126 |
127 | // metadata
128 | assert.Equal(t, test.buildInvocationId, slsaPredicate.Metadata.BuildInvocationID)
129 | assert.Equal(t, test.buildTimerIsSet, time.Now().UTC().After(*slsaPredicate.Metadata.BuildStartedOn))
130 | assert.Equal(t, test.buildTimerFinishedIsSet, slsaPredicate.Metadata.BuildFinishedOn.After(*slsaPredicate.Metadata.BuildStartedOn))
131 | assert.Equal(t, false, slsaPredicate.Metadata.Reproducible)
132 |
133 | // completeness
134 | assert.Equal(t, false, slsaPredicate.Metadata.Completeness.Environment)
135 | assert.Equal(t, false, slsaPredicate.Metadata.Completeness.Materials)
136 | assert.Equal(t, false, slsaPredicate.Metadata.Completeness.Parameters)
137 |
138 | // materials
139 | assert.Equal(t, 1, len(slsaPredicate.Materials))
140 | assert.Equal(t, ExpectedDependenciesMaterial(), slsaPredicate.Materials)
141 | }
142 | })
143 |
144 | }
145 | }
146 |
147 | func expectedMetadata() *vcs.Metadata {
148 | return &vcs.Metadata{
149 | Arch: "",
150 | Env: map[string]string{},
151 | Context: vcs.Context{
152 | Github: vcs.Github{
153 | RunId: "1234",
154 | },
155 | Runner: vcs.Runner{
156 | Os: "Linux",
157 | Temp: "/home/runner/work/_temp"},
158 | },
159 | }
160 | }
161 |
162 | func ExpectedDependenciesMaterial() []common.ProvenanceMaterial {
163 | return []common.ProvenanceMaterial{
164 | {
165 | URI: "pkg:groupId:artifactId:v01",
166 | Digest: common.DigestSet{
167 | "todo": "todo",
168 | },
169 | },
170 | }
171 | }
172 |
173 | func ToExpectedMaterials() []common.ProvenanceMaterial {
174 | return []common.ProvenanceMaterial{
175 | {
176 | URI: "pkg:groupId:artifactId:v01",
177 | Digest: common.DigestSet{
178 | "todo": "todo",
179 | },
180 | },
181 | {
182 | URI: "git+https://github.com/nais/salsa",
183 | Digest: common.DigestSet{
184 | build.AlgorithmSHA1: "4321",
185 | },
186 | },
187 | }
188 | }
189 |
--------------------------------------------------------------------------------
/pkg/intoto/testdata/cosign-dsse-attestation.json:
--------------------------------------------------------------------------------
1 | {
2 | "payloadType": "https://slsa.dev/provenance/v0.2",
3 | "payload": "eyJfdHlwZSI6Imh0dHBzOi8vaW4tdG90by5pby9TdGF0ZW1lbnQvdjAuMSIsInByZWRpY2F0ZVR5cGUiOiJodHRwczovL3Nsc2EuZGV2L3Byb3ZlbmFuY2UvdjAuMiIsInN1YmplY3QiOlt7Im5hbWUiOiJ0dGwuc2gvbmFpc3giLCJkaWdlc3QiOnsic2hhMjU2IjoiZWU4OWIwMDUyOGZmNGYwMmYyNDA1ZTRlZTIyMTc0M2ViYzNmOGU4ZGQwYmZkNWM0YzIwYTJmYTJhYWE3ZWRlMyJ9fV0sInByZWRpY2F0ZSI6eyJidWlsZGVyIjp7ImlkIjoiIn0sImJ1aWxkVHlwZSI6InlvbG8iLCJpbnZvY2F0aW9uIjp7ImNvbmZpZ1NvdXJjZSI6e319LCJtZXRhZGF0YSI6eyJidWlsZFN0YXJ0ZWRPbiI6IjIwMjItMDEtMDZUMTQ6MDE6MDMuNjMyNjMyKzAxOjAwIiwiYnVpbGRGaW5pc2hlZE9uIjoiMjAyMi0wMS0wNlQxNDowMTowMy42MzI2MzIrMDE6MDAiLCJjb21wbGV0ZW5lc3MiOnsicGFyYW1ldGVycyI6ZmFsc2UsImVudmlyb25tZW50IjpmYWxzZSwibWF0ZXJpYWxzIjpmYWxzZX0sInJlcHJvZHVjaWJsZSI6ZmFsc2V9LCJtYXRlcmlhbHMiOlt7InVyaSI6Imp1bml0Omp1bml0OjQuMTAifSx7InVyaSI6Im5ldC5taW5pZGV2Ompzb24tc21hcnQ6WzEuMy4zLDIuNC43XSAtXHUwMDNlIDIuNC43In0seyJ1cmkiOiJvcmcuamV0YnJhaW5zLmtvdGxpbjprb3RsaW4tcmVmbGVjdDoxLjUuMzAgLVx1MDAzZSAxLjYuMTAgKCopIn0seyJ1cmkiOiJjb20uZmFzdGVyeG1sLmphY2tzb24uY29yZTpqYWNrc29uLWRhdGFiaW5kOjIuMTMuMCAoKikifSx7InVyaSI6ImlvLnByb21ldGhldXM6c2ltcGxlY2xpZW50X3RyYWNlcl9vdGVsOjAuMTIuMCJ9LHsidXJpIjoiY29tLmdpdGh1Yi5zZXJhdGNoOmtvdGxpcXVlcnk6MS42LjEifSx7InVyaSI6ImNoLnFvcy5sb2diYWNrOmxvZ2JhY2stY2xhc3NpYzoxLjIuOSJ9LHsidXJpIjoiY29tLm5pbWJ1c2RzOm9hdXRoMi1vaWRjLXNkazo5LjIwIn0seyJ1cmkiOiJvcmcucG9zdGdyZXNxbDpwb3N0Z3Jlc3FsOjQyLjMuMSJ9LHsidXJpIjoib3JnLmpldGJyYWlucy5rb3RsaW46a290bGluLXNjcmlwdC1ydW50aW1lOjEuNi4xMCJ9LHsidXJpIjoiaW8ubmV0dHk6bmV0dHktY29kZWMtaHR0cDI6NC4xLjY5LkZpbmFsIn0seyJ1cmkiOiJjb20uZ29vZ2xlLmd1YXZhOmd1YXZhOjMwLjAtanJlIn0seyJ1cmkiOiJpby5rdG9yOmt0b3ItY2xpZW50LWphY2tzb246MS42LjcifSx7InVyaSI6ImlvLnByb21ldGhldXM6c2ltcGxlY2xpZW50X3RyYWNlcl9jb21tb246MC4xMi4wIn0seyJ1cmkiOiJpby5rdG9yOmt0b3ItamFja3NvbjoxLjYuNyJ9LHsidXJpIjoiaW8ubmV0dHk6bmV0dHktdHJhbnNwb3J0OjQuMS42OS5GaW5hbCAoKikifSx7InVyaSI6ImlvLmt0b3I6a3Rvci1hdXRoOjEuNi43ICgqKSJ9LHsidXJpIjoibmV0Lm1pbmlkZXY6YWNjZXNzb3JzLXNtYXJ0OjIuNC43In0seyJ1cmkiOiJjb20ubmltYnVzZHM6bmltYnVzLWpvc2Utand0OjkuMTQifSx7InVyaSI6ImlvLmt0b3I6a3Rvci1jbGllbnQtanNvbi1qdm06MS42LjcifSx7InVyaSI6ImlvLnByb21ldGhldXM6c2ltcGxlY2xpZW50OjAuMTIuMCJ9LHsidXJpIjoib3JnLmpldGJyYWlucy5rb3RsaW46a290bGluLXN0ZGxpYi1qZGs4OjEuNi4wICgqKSJ9LHsidXJpIjoib3JnLmpldGJyYWlucy5rb3RsaW54OmtvdGxpbngtY29yb3V0aW5lcy1jb3JlOjEuNS4yLW5hdGl2ZS1tdCAoKikifSx7InVyaSI6ImlvLmt0b3I6a3Rvci1pby1qdm06MS42LjcifSx7InVyaSI6ImlvLm5ldHR5Om5ldHR5LWhhbmRsZXI6NC4xLjY5LkZpbmFsICgqKSJ9LHsidXJpIjoiY29tLmZhc3RlcnhtbC5qYWNrc29uLmNvcmU6amFja3Nvbi1hbm5vdGF0aW9uczoyLjEzLjAgKCopIn0seyJ1cmkiOiJjb20uZ29vZ2xlLmVycm9ycHJvbmU6ZXJyb3JfcHJvbmVfYW5ub3RhdGlvbnM6Mi4zLjQifSx7InVyaSI6ImNvbS5uaW1idXNkczpjb250ZW50LXR5cGU6Mi4xIn0seyJ1cmkiOiJjb20udHlwZXNhZmU6Y29uZmlnOjEuNC4xIn0seyJ1cmkiOiJpby5uZXR0eTpuZXR0eS10cmFuc3BvcnQtbmF0aXZlLWtxdWV1ZTo0LjEuNjkuRmluYWwifSx7InVyaSI6ImlvLm5ldHR5Om5ldHR5LXRyYW5zcG9ydC1uYXRpdmUtZXBvbGw6NC4xLjY5LkZpbmFsIn0seyJ1cmkiOiJjb20uZ29vZ2xlLmd1YXZhOmxpc3RlbmFibGVmdXR1cmU6OTk5OS4wLWVtcHR5LXRvLWF2b2lkLWNvbmZsaWN0LXdpdGgtZ3VhdmEifSx7InVyaSI6ImlvLmt0b3I6a3Rvci1jbGllbnQtY2lvLWp2bToxLjYuNyJ9LHsidXJpIjoiaW8ubWljcm9tZXRlcjptaWNyb21ldGVyLWNvcmU6MS44LjEgKCopIn0seyJ1cmkiOiJpby5rdG9yOmt0b3ItaHR0cC1jaW86MS42LjcgKCopIn0seyJ1cmkiOiJjb20uZmFzdGVyeG1sLmphY2tzb24uY29yZTpqYWNrc29uLWNvcmU6Mi4xMy4wIn0seyJ1cmkiOiJjb20uZ29vZ2xlLmoyb2JqYzpqMm9iamMtYW5ub3RhdGlvbnM6MS4zIn0seyJ1cmkiOiJpby5rdG9yOmt0b3ItY2xpZW50LWNpbzoxLjYuNyJ9LHsidXJpIjoiY2gucW9zLmxvZ2JhY2s6bG9nYmFjay1jb3JlOjEuMi45In0seyJ1cmkiOiJjb20uZ2l0aHViLnN0ZXBoZW5jLmpjaXA6amNpcC1hbm5vdGF0aW9uczoxLjAtMSJ9LHsidXJpIjoiY29tLnpheHhlcjpIaWthcmlDUDo1LjAuMCAoKikifSx7InVyaSI6Im9yZy5qZXRicmFpbnMua290bGlueDprb3RsaW54LWNvcm91dGluZXMtamRrODoxLjUuMi1uYXRpdmUtbXQgKCopIn0seyJ1cmkiOiJpby5rdG9yOmt0b3Itc2VydmVyLWhvc3QtY29tbW9uLWtvdGxpbk11bHRpcGxhdGZvcm06MS42LjcgKCopIn0seyJ1cmkiOiJpby5rdG9yOmt0b3Itc2VydmVyLWNvcmU6MS42LjcifSx7InVyaSI6Im9yZy5mdXNlc291cmNlLmphbnNpOmphbnNpOjIuNC4wIn0seyJ1cmkiOiJpby5rdG9yOmt0b3ItaHR0cC1jaW8tanZtOjEuNi43In0seyJ1cmkiOiJpby5naXRodWIubWljcm91dGlsczprb3RsaW4tbG9nZ2luZy1qdm06Mi4xLjIxIn0seyJ1cmkiOiJvcmcuZmx5d2F5ZGI6Zmx5d2F5LWNvcmU6OC4yLjIifSx7InVyaSI6Im5ldC5sb2dzdGFzaC5sb2diYWNrOmxvZ3N0YXNoLWxvZ2JhY2stZW5jb2Rlcjo3LjAuMSJ9LHsidXJpIjoib3JnLmpldGJyYWlucy5rb3RsaW46a290bGluLXN0ZGxpYjoxLjUuMzAgLVx1MDAzZSAxLjYuMTAgKCopIn0seyJ1cmkiOiJpby5rdG9yOmt0b3Itc2VydmVyLW5ldHR5OjEuNi43In0seyJ1cmkiOiJpby5rdG9yOmt0b3ItaHR0cC1qdm06MS42LjcifSx7InVyaSI6Im9yZy5oZHJoaXN0b2dyYW06SGRySGlzdG9ncmFtOjIuMS4xMiJ9LHsidXJpIjoiaW8ucHJvbWV0aGV1czpzaW1wbGVjbGllbnRfY29tbW9uOjAuMTIuMCJ9LHsidXJpIjoib3JnLmxhdGVuY3l1dGlsczpMYXRlbmN5VXRpbHM6Mi4wLjMifSx7InVyaSI6Im9yZy5qZXRicmFpbnMua290bGluOmtvdGxpbi1zdGRsaWItY29tbW9uOjEuNi4wIC1cdTAwM2UgMS42LjEwIn0seyJ1cmkiOiJvcmcuamV0YnJhaW5zLmtvdGxpbng6a290bGlueC1jb3JvdXRpbmVzLWNvcmUtanZtOjEuNS4yLW5hdGl2ZS1tdCJ9LHsidXJpIjoiaW8ubmV0dHk6bmV0dHktY29kZWMtaHR0cDo0LjEuNjkuRmluYWwifSx7InVyaSI6ImNvbS5hdXRoMDpqYXZhLWp3dDozLjEzLjAifSx7InVyaSI6ImNvbS5nb29nbGUuY29kZS5maW5kYnVnczpqc3IzMDU6My4wLjIifSx7InVyaSI6ImNvbS5uYXRwcnljZTprb25maWc6MS42LjEwLjAifSx7InVyaSI6ImlvLmdpdGh1Yi5taWNyb3V0aWxzOmtvdGxpbi1sb2dnaW5nOjIuMS4yMSJ9LHsidXJpIjoiaW8ua3RvcjprdG9yLW1ldHJpY3MtbWljcm9tZXRlcjoxLjYuNyJ9LHsidXJpIjoiaW8ua3RvcjprdG9yLWlvOjEuNi43In0seyJ1cmkiOiJpby5uZXR0eTpuZXR0eS1jb2RlYzo0LjEuNjkuRmluYWwgKCopIn0seyJ1cmkiOiJpby5uZXR0eTpuZXR0eS10cmFuc3BvcnQtbmF0aXZlLXVuaXgtY29tbW9uOjQuMS42OS5GaW5hbCAoKikifSx7InVyaSI6ImlvLmt0b3I6a3Rvci1hdXRoLWp3dDoxLjYuNyJ9LHsidXJpIjoiY29tLmF1dGgwOmp3a3MtcnNhOjAuMTcuMCJ9LHsidXJpIjoiaW8ua3RvcjprdG9yLW5ldHdvcmstdGxzOjEuNi43In0seyJ1cmkiOiJpby5rdG9yOmt0b3ItbmV0d29yay10bHMtanZtOjEuNi43In0seyJ1cmkiOiJvcmcub3cyLmFzbTphc206OS4xIn0seyJ1cmkiOiJvcmcuc2xmNGo6c2xmNGotYXBpOjEuNy4zMiJ9LHsidXJpIjoiaW8ua3RvcjprdG9yLXV0aWxzLWp2bToxLjYuNyJ9LHsidXJpIjoiaW8ua3RvcjprdG9yLW5ldHdvcms6MS42LjcgKCopIn0seyJ1cmkiOiJpby5uZXR0eTpuZXR0eS1idWZmZXI6NC4xLjY5LkZpbmFsICgqKSJ9LHsidXJpIjoiaW8ua3RvcjprdG9yLWNsaWVudC1jb3JlOjEuNi43ICgqKSJ9LHsidXJpIjoiY29tLmZhc3RlcnhtbC5qYWNrc29uLm1vZHVsZTpqYWNrc29uLW1vZHVsZS1rb3RsaW46Mi4xMy4wICgqKSJ9LHsidXJpIjoiY29tLm5pbWJ1c2RzOmxhbmctdGFnOjEuNSJ9LHsidXJpIjoiaW8ua3RvcjprdG9yLW5ldHdvcmstanZtOjEuNi43In0seyJ1cmkiOiJvcmcuZWNsaXBzZS5qZXR0eS5hbHBuOmFscG4tYXBpOjEuMS4zLnYyMDE2MDcxNSJ9LHsidXJpIjoib3JnLmhhbWNyZXN0OmhhbWNyZXN0LWNvcmU6MS4xIn0seyJ1cmkiOiJvcmcuamV0YnJhaW5zOmFubm90YXRpb25zOjEzLjAifSx7InVyaSI6ImlvLm5ldHR5Om5ldHR5LWNvbW1vbjo0LjEuNjkuRmluYWwifSx7InVyaSI6ImlvLm5ldHR5Om5ldHR5LXJlc29sdmVyOjQuMS42OS5GaW5hbCAoKikifSx7InVyaSI6ImlvLmt0b3I6a3Rvci1hdXRoLWtvdGxpbk11bHRpcGxhdGZvcm06MS42LjcifSx7InVyaSI6ImNvbS5nb29nbGUuZ3VhdmE6ZmFpbHVyZWFjY2VzczoxLjAuMSJ9LHsidXJpIjoiaW8ubWljcm9tZXRlcjptaWNyb21ldGVyLXJlZ2lzdHJ5LXByb21ldGhldXM6MS44LjEifSx7InVyaSI6Im9yZy5qZXRicmFpbnMua290bGluOmtvdGxpbi1zdGRsaWItamRrNzoxLjYuMCAoKikifSx7InVyaSI6ImlvLmt0b3I6a3Rvci11dGlsczoxLjYuNyAoKikifSx7InVyaSI6ImNvbS5mYXN0ZXJ4bWwuamFja3NvbjpqYWNrc29uLWJvbToyLjEzLjAgKCopIn0seyJ1cmkiOiJvcmcuY2hlY2tlcmZyYW1ld29yazpjaGVja2VyLXF1YWw6My41LjAifSx7InVyaSI6ImlvLmt0b3I6a3Rvci1jbGllbnQtanNvbjoxLjYuNyAoKikifSx7InVyaSI6ImlvLmt0b3I6a3Rvci1zZXJ2ZXItaG9zdC1jb21tb246MS42LjcifSx7InVyaSI6ImlvLmt0b3I6a3Rvci1zZXJ2ZXItY29yZS1rb3RsaW5NdWx0aXBsYXRmb3JtOjEuNi43ICgqKSJ9LHsidXJpIjoiaW8ua3RvcjprdG9yLWh0dHA6MS42LjcgKCopIn0seyJ1cmkiOiJpby5rdG9yOmt0b3ItY2xpZW50LWNvcmUtanZtOjEuNi43In0seyJ1cmkiOiJpby5wcm9tZXRoZXVzOnNpbXBsZWNsaWVudF90cmFjZXJfb3RlbF9hZ2VudDowLjEyLjAifV19fQ==",
4 | "signatures": [
5 | {
6 | "keyid": "",
7 | "sig": "MEUCIQDQ9kF3ifhd8xdtqAZ6YoLnR4T8FOQi+vx3UvpPB95ixAIgUlNhPqOIpdlNei3V+f4GYmoDfMCbLJbflBIjw27MxaU="
8 | }
9 | ]
10 | }
11 |
--------------------------------------------------------------------------------
/pkg/utils/exec.go:
--------------------------------------------------------------------------------
1 | package utils
2 |
3 | import (
4 | "bytes"
5 | "fmt"
6 | "io"
7 | "os"
8 | "os/exec"
9 | )
10 |
11 | type CreateCmd = func(name string, arg ...string) *exec.Cmd
12 |
13 | type Cmd struct {
14 | Name string
15 | SubCmd string
16 | Flags []string
17 | Args []string
18 | WorkDir string
19 | Runner CmdRunner
20 | }
21 |
22 | func NewCmd(
23 | name string,
24 | subCmd string,
25 | flags []string,
26 | args []string,
27 | workDir string,
28 | ) Cmd {
29 | return Cmd{Name: name, SubCmd: subCmd, Flags: flags, Args: args, WorkDir: workDir, Runner: &ExecCmd{}}
30 | }
31 |
32 | func (c *Cmd) WithRunner(runner CmdRunner) {
33 | c.Runner = runner
34 | }
35 |
36 | type CmdRunner interface {
37 | CreateCmd() CreateCmd
38 | }
39 |
40 | type ExecCmd struct{}
41 |
42 | func (c ExecCmd) CreateCmd() CreateCmd {
43 | return exec.Command
44 | }
45 |
46 | func (c Cmd) Run() (string, error) {
47 | args := make([]string, 0)
48 | if c.SubCmd != "" {
49 | args = append(args, c.SubCmd)
50 | }
51 | if c.Flags != nil {
52 | args = append(args, c.Flags...)
53 | }
54 | if c.Args != nil {
55 | args = append(args, c.Args...)
56 | }
57 | cmd := c.Runner.CreateCmd()(c.Name, args...)
58 |
59 | err := requireCommand(cmd.Path)
60 | if err != nil {
61 | return "", err
62 | }
63 | cmd.Dir = c.WorkDir
64 | var stdoutBuf, stderrBuf bytes.Buffer
65 | cmd.Stdout = io.MultiWriter(os.Stdout, &stdoutBuf)
66 | cmd.Stderr = io.MultiWriter(os.Stderr, &stderrBuf)
67 |
68 | err = cmd.Run()
69 | if err != nil {
70 | return "", err
71 | }
72 | outStr, _ := stdoutBuf.String(), stderrBuf.String()
73 | return outStr, nil
74 | }
75 |
76 | func requireCommand(cmd string) error {
77 | if _, err := exec.LookPath(cmd); err != nil {
78 | return fmt.Errorf("could not find required cmd: %w", err)
79 | }
80 | return nil
81 | }
82 |
--------------------------------------------------------------------------------
/pkg/utils/exec_test.go:
--------------------------------------------------------------------------------
1 | package utils
2 |
3 | import (
4 | "flag"
5 | "fmt"
6 | "os"
7 | "os/exec"
8 | "testing"
9 |
10 | "github.com/stretchr/testify/assert"
11 | )
12 |
13 | type FakeRunner struct{}
14 |
15 | func TestRun(t *testing.T) {
16 | f := &Cmd{
17 | Name: "cosign",
18 | SubCmd: "",
19 | Flags: []string{"--key", "key", "--predicate", "provenance.json"},
20 | Args: []string{"image"},
21 | WorkDir: "",
22 | Runner: &FakeRunner{},
23 | }
24 | out, _ := f.Run()
25 |
26 | assert.Equal(t, "[cosign --key key --predicate provenance.json image]", out)
27 | }
28 |
29 | func TestDryRunCmd(t *testing.T) {
30 | if os.Getenv("GO_TEST_DRYRUN") != "1" {
31 | return
32 | }
33 | fmt.Printf("%s", flag.Args())
34 | os.Exit(0)
35 | }
36 |
37 | func (r FakeRunner) CreateCmd() CreateCmd {
38 | return func(command string, args ...string) *exec.Cmd {
39 | cs := []string{"-test.run=TestDryRunCmd", "--", command}
40 | cs = append(cs, args...)
41 | cmd := exec.Command(os.Args[0], cs...)
42 | cmd.Env = []string{"GO_TEST_DRYRUN=1"}
43 | return cmd
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/pkg/utils/utils.go:
--------------------------------------------------------------------------------
1 | package utils
2 |
3 | import (
4 | "encoding/base64"
5 | "fmt"
6 | "time"
7 |
8 | "github.com/briandowns/spinner"
9 | )
10 |
11 | func ProvenanceFile(repoName string) string {
12 | return fmt.Sprintf("%s.provenance", repoName)
13 | }
14 |
15 | func StartSpinner(message string) *spinner.Spinner {
16 | s := spinner.New(spinner.CharSets[11], 150*time.Millisecond)
17 | s.FinalMSG = message
18 | s.Start()
19 | return s
20 | }
21 |
22 | func DecodeDigest(base64Encoded string) (string, error) {
23 | decoded, err := base64.StdEncoding.DecodeString(base64Encoded)
24 | if err != nil {
25 | return "", fmt.Errorf("decoding base64 encoded checksum")
26 | }
27 | return fmt.Sprintf("%x", decoded), nil
28 | }
29 |
--------------------------------------------------------------------------------
/pkg/vcs/environment.go:
--------------------------------------------------------------------------------
1 | package vcs
2 |
3 | type ContextEnvironment interface {
4 | BuilderId() string
5 | BuildInvocationId() string
6 | BuildType() string
7 | Context() string
8 | CurrentFilteredEnvironment() map[string]string
9 | NonReproducibleMetadata() *Metadata
10 | UserDefinedParameters() *Event
11 | RepoUri() string
12 | Sha() string
13 | GetEvent() *Event
14 | }
15 |
--------------------------------------------------------------------------------
/pkg/vcs/event.go:
--------------------------------------------------------------------------------
1 | package vcs
2 |
3 | import (
4 | "encoding/json"
5 | "time"
6 | )
7 |
8 | type Event struct {
9 | EventMetadata *EventMetadata `json:"event"`
10 | }
11 |
12 | type EventMetadata struct {
13 | HeadCommit *HeadCommit `json:"head_commit"`
14 | PullRequest *PullRequest `json:"pull_request"`
15 | WorkFlowRun *WorkFlow `json:"workflow_run"`
16 | }
17 |
18 | type HeadCommit struct {
19 | Timestamp string `json:"timestamp"`
20 | }
21 |
22 | type PullRequest struct {
23 | UpdatedAt string `json:"updated_at"`
24 | }
25 |
26 | type WorkFlow struct {
27 | HeadCommit *HeadCommit `json:"head_commit"`
28 | }
29 |
30 | func ParseEvent(inputs []byte) (*Event, error) {
31 | var event Event
32 | err := json.Unmarshal(inputs, &event.EventMetadata)
33 | if err != nil {
34 | return nil, err
35 | }
36 | return &event, nil
37 | }
38 |
39 | func (in *Event) GetHeadCommitTimestamp() string {
40 | if in.EventMetadata.HeadCommit != nil {
41 | return in.EventMetadata.HeadCommit.Timestamp
42 | }
43 |
44 | if in.EventMetadata.WorkFlowRun != nil {
45 | return in.EventMetadata.WorkFlowRun.HeadCommit.Timestamp
46 | }
47 |
48 | if in.EventMetadata.PullRequest != nil {
49 | return in.EventMetadata.PullRequest.UpdatedAt
50 | }
51 |
52 | return time.Now().UTC().Round(time.Second).Format(time.RFC3339)
53 | }
54 |
--------------------------------------------------------------------------------
/pkg/vcs/event_test.go:
--------------------------------------------------------------------------------
1 | package vcs
2 |
3 | import (
4 | "github.com/stretchr/testify/assert"
5 | "os"
6 | "testing"
7 | "time"
8 | )
9 |
10 | func TestEventHeadCommit(t *testing.T) {
11 | for _, test := range []struct {
12 | name string
13 | workFlowMeatData []byte
14 | WantTime string
15 | }{
16 | {
17 | name: "GitHub Event workflow_run with head_commit",
18 | workFlowMeatData: commitMetadata(t, "testdata/workflowrun-head-commit.json"),
19 | WantTime: "2022-10-21T11:26:55+02:00",
20 | },
21 | {
22 | name: "GitHub Event pull_request with updated_at",
23 | workFlowMeatData: commitMetadata(t, "testdata/pull-request-event.json"),
24 | WantTime: "2022-11-17T07:46:39Z",
25 | },
26 | {
27 | name: "GitHub Event workflow_dispatch with head_commit",
28 | workFlowMeatData: commitMetadata(t, "testdata/github-context.json"),
29 | WantTime: "2022-02-14T09:38:16+01:00",
30 | },
31 | {
32 | name: "No metadata found, should return default start time",
33 | workFlowMeatData: commitMetadata(t, "testdata/unknown-event.json"),
34 | },
35 | } {
36 | t.Run(test.name, func(t *testing.T) {
37 | context, err := ParseContext(test.workFlowMeatData)
38 | assert.NoError(t, err)
39 | parsedEvent, err := ParseEvent(context.Event)
40 | assert.NoError(t, err)
41 | assert.NotNil(t, parsedEvent)
42 | if test.WantTime != "" {
43 | assert.Equal(t, test.WantTime, parsedEvent.GetHeadCommitTimestamp())
44 | } else {
45 | _, err := time.Parse(time.RFC3339, parsedEvent.GetHeadCommitTimestamp())
46 | assert.NoError(t, err)
47 | }
48 | })
49 | }
50 | }
51 |
52 | func commitMetadata(t *testing.T, eventFile string) []byte {
53 | metadata, err := os.ReadFile(eventFile)
54 | assert.NoError(t, err)
55 | return metadata
56 | }
57 |
--------------------------------------------------------------------------------
/pkg/vcs/github.go:
--------------------------------------------------------------------------------
1 | package vcs
2 |
3 | import (
4 | "encoding/json"
5 | "fmt"
6 | )
7 |
8 | type GithubContext struct {
9 | Action string `json:"action"`
10 | Actor string `json:"actor"`
11 | Event json.RawMessage `json:"event"`
12 | EventName string `json:"event_name"`
13 | EventPath string `json:"event_path"`
14 | Job string `json:"job"`
15 | Ref string `json:"ref"`
16 | Repository string `json:"repository"`
17 | RepositoryOwner string `json:"repository_owner"`
18 | RunId string `json:"run_id"`
19 | RunNumber string `json:"run_number"`
20 | ServerUrl string `json:"server_url"`
21 | SHA string `json:"sha"`
22 | Token string `json:"token,omitempty"`
23 | Workflow string `json:"workflow"`
24 | Workspace string `json:"workspace"`
25 | }
26 |
27 | func ParseContext(github []byte) (*GithubContext, error) {
28 | context := GithubContext{}
29 | if len(github) == 0 {
30 | return nil, nil
31 | }
32 |
33 | if err := json.Unmarshal(github, &context); err != nil {
34 | if err != nil {
35 | return nil, fmt.Errorf("unmarshal github context json: %w", err)
36 | }
37 | }
38 |
39 | // Ensure we dont misuse token.
40 | context.Token = ""
41 | return &context, nil
42 | }
43 |
44 | type Actions struct {
45 | HostedIdSuffix string
46 | SelfHostedIdSuffix string
47 | BuildType string
48 | }
49 |
50 | // BuildId
51 | // The GitHub Actions team has not yet reviewed or approved this design,
52 | // and it is not yet implemented. Details are subject to change!
53 | func BuildId(version string) *Actions {
54 | return &Actions{
55 | HostedIdSuffix: fmt.Sprintf("/Attestations/GitHubHostedActions@%s", version),
56 | // Self-hosted runner: Not yet supported.
57 | SelfHostedIdSuffix: fmt.Sprintf("/Attestations/SelfHostedActions@%s", version),
58 | // BuildType Describes what the invocations buildConfig and materials was created
59 | BuildType: fmt.Sprintf("https://github.com/Attestations/GitHubActionsWorkflow@%s", version),
60 | }
61 | }
62 |
--------------------------------------------------------------------------------
/pkg/vcs/github_build.go:
--------------------------------------------------------------------------------
1 | package vcs
2 |
3 | import (
4 | "encoding/base64"
5 | "encoding/json"
6 | "fmt"
7 | "strings"
8 | )
9 |
10 | type CurrentBuildEnvironment struct {
11 | Envs map[string]string
12 | }
13 |
14 | func ParseBuild(envs *string) (*CurrentBuildEnvironment, error) {
15 | current := make(map[string]string)
16 |
17 | decodedEnvsBytes, err := base64.StdEncoding.DecodeString(*envs)
18 | if err != nil {
19 | return nil, fmt.Errorf("decoding envs context: %w", err)
20 | }
21 |
22 | if err := json.Unmarshal(decodedEnvsBytes, ¤t); err != nil {
23 | return nil, fmt.Errorf("unmarshal environmental context json: %w", err)
24 | }
25 |
26 | return &CurrentBuildEnvironment{
27 | Envs: current,
28 | }, nil
29 | }
30 |
31 | func (in *CurrentBuildEnvironment) FilterEnvs() map[string]string {
32 | if len(in.Envs) < 1 {
33 | return map[string]string{}
34 | }
35 |
36 | for key := range in.Envs {
37 | in.filterEnvsWithPrefix(key, "INPUT_", "GITHUB_", "RUNNER_", "ACTIONS_")
38 | in.filterEnv(key, "TOKEN")
39 | }
40 |
41 | in.removeDuplicateValues()
42 | return in.Envs
43 | }
44 |
45 | func (in *CurrentBuildEnvironment) filterEnv(key string, contains ...string) {
46 | for _, contain := range contains {
47 | if strings.Contains(key, contain) {
48 | delete(in.Envs, key)
49 | }
50 | }
51 | }
52 |
53 | func (in *CurrentBuildEnvironment) filterEnvsWithPrefix(key string, prefixes ...string) {
54 | for _, prefix := range prefixes {
55 | if strings.HasPrefix(key, prefix) {
56 | delete(in.Envs, key)
57 | }
58 | }
59 | }
60 |
61 | func (in *CurrentBuildEnvironment) GetEnvs() map[string]string {
62 | return in.Envs
63 | }
64 |
65 | func (in *CurrentBuildEnvironment) removeDuplicateValues() {
66 | var current = make(map[string]struct{})
67 | for key, v := range in.Envs {
68 | if _, has := current[v]; has {
69 | delete(in.Envs, key)
70 | }
71 | current[v] = struct{}{}
72 | }
73 | }
74 |
--------------------------------------------------------------------------------
/pkg/vcs/github_ci.go:
--------------------------------------------------------------------------------
1 | package vcs
2 |
3 | import (
4 | "fmt"
5 | )
6 |
7 | const (
8 | GithubActionsBuildIdVersion = "v1"
9 | )
10 |
11 | type GithubCIEnvironment struct {
12 | BuildContext *GithubContext
13 | Event *Event
14 | RunnerContext *RunnerContext
15 | BuildEnvironment *CurrentBuildEnvironment
16 | Actions *Actions
17 | }
18 |
19 | func CreateGithubCIEnvironment(githubContext []byte, runnerContext, envsContext *string) (ContextEnvironment, error) {
20 | context, err := ParseContext(githubContext)
21 | if err != nil {
22 | return nil, fmt.Errorf("parsing context: %w", err)
23 | }
24 |
25 | runner, err := ParseRunner(runnerContext)
26 | if err != nil {
27 | return nil, fmt.Errorf("parsing runner: %w", err)
28 | }
29 |
30 | event, err := ParseEvent(context.Event)
31 | if err != nil {
32 | return nil, fmt.Errorf("parsing event: %w", err)
33 | }
34 |
35 | // Not required to build a CI environment
36 | current := &CurrentBuildEnvironment{}
37 | if envsContext == nil || len(*envsContext) == 0 {
38 | return BuildEnvironment(context, runner, current, event), nil
39 | }
40 |
41 | current, err = ParseBuild(envsContext)
42 | if err != nil {
43 | return nil, fmt.Errorf("parsing envs: %w", err)
44 | }
45 |
46 | return BuildEnvironment(context, runner, current, event), nil
47 | }
48 |
49 | func BuildEnvironment(context *GithubContext, runner *RunnerContext, current *CurrentBuildEnvironment, event *Event) ContextEnvironment {
50 | return &GithubCIEnvironment{
51 | BuildContext: context,
52 | Event: event,
53 | RunnerContext: runner,
54 | BuildEnvironment: current,
55 | Actions: BuildId(GithubActionsBuildIdVersion),
56 | }
57 | }
58 |
59 | func (in *GithubCIEnvironment) Context() string {
60 | return in.BuildContext.Workflow
61 | }
62 |
63 | func (in *GithubCIEnvironment) BuildType() string {
64 | return in.Actions.BuildType
65 | }
66 |
67 | func (in *GithubCIEnvironment) RepoUri() string {
68 | return fmt.Sprintf("%s/%s", in.BuildContext.ServerUrl, in.BuildContext.Repository)
69 | }
70 |
71 | func (in *GithubCIEnvironment) BuildInvocationId() string {
72 | return fmt.Sprintf("%s/actions/runs/%s", in.RepoUri(), in.BuildContext.RunId)
73 | }
74 |
75 | func (in *GithubCIEnvironment) Sha() string {
76 | return in.BuildContext.SHA
77 | }
78 |
79 | func (in *GithubCIEnvironment) BuilderId() string {
80 | if ContextTypeGithub.Hosted() {
81 | return in.RepoUri() + in.Actions.HostedIdSuffix
82 | }
83 | return in.RepoUri() + in.Actions.SelfHostedIdSuffix
84 | }
85 |
86 | func (in *GithubCIEnvironment) UserDefinedParameters() *Event {
87 | // Only possible user-defined parameters
88 | // This is unset/null for all other events.
89 | if in.BuildContext.EventName != "workflow_dispatch" {
90 | return nil
91 | }
92 |
93 | // should be filtered to fit the information needed
94 | return in.Event
95 | }
96 |
97 | func (in *GithubCIEnvironment) CurrentFilteredEnvironment() map[string]string {
98 | if in.BuildEnvironment == nil {
99 | return map[string]string{}
100 | }
101 |
102 | return in.BuildEnvironment.FilterEnvs()
103 | }
104 |
105 | func (in *GithubCIEnvironment) NonReproducibleMetadata() *Metadata {
106 | // Other variables that are required to reproduce the build and that cannot be
107 | // recomputed using existing information.
108 | //(Documentation would explain how to recompute the rest of the fields.)
109 | return &Metadata{
110 | Arch: in.RunnerContext.Arch,
111 | Env: in.CurrentFilteredEnvironment(),
112 | Context: Context{
113 | Github: Github{
114 | RunId: in.BuildContext.RunId,
115 | },
116 | Runner: Runner{
117 | Os: in.RunnerContext.OS,
118 | Temp: in.RunnerContext.Temp,
119 | },
120 | },
121 | }
122 | }
123 |
124 | func (in *GithubCIEnvironment) GetEvent() *Event {
125 | return in.Event
126 | }
127 |
--------------------------------------------------------------------------------
/pkg/vcs/github_ci_test.go:
--------------------------------------------------------------------------------
1 | package vcs
2 |
3 | import (
4 | "encoding/base64"
5 | "github.com/stretchr/testify/assert"
6 | "os"
7 | "testing"
8 | )
9 |
10 | func TestCreateCIEnvironment(t *testing.T) {
11 | err := os.Setenv("GITHUB_ACTIONS", "true")
12 | assert.NoError(t, err)
13 | context := githubContext(t)
14 | runner := runnerContext()
15 | env := envC()
16 | ci, err := CreateGithubCIEnvironment(context, &runner, &env)
17 | assert.NoError(t, err)
18 | assert.Equal(t, "https://github.com/bolo/tomato", ci.RepoUri())
19 | assert.Equal(t, "90dc9f2bc4007d1099a941ba3d408d2c896fe8dd", ci.Sha())
20 | assert.Equal(t, "https://github.com/bolo/tomato/Attestations/GitHubHostedActions@v1", ci.BuilderId())
21 | assert.Equal(t, "https://github.com/bolo/tomato/actions/runs/1839977840", ci.BuildInvocationId())
22 |
23 | metadata := Metadata{
24 | Arch: "X64",
25 | Env: map[string]string{
26 | "GO_ROOT": "/opt/hostedtoolcache/go/1.17.6/x64",
27 | "GO_VERSION": "1.17",
28 | },
29 | Context: Context{
30 | Github: Github{
31 | RunId: "1839977840",
32 | }, Runner: Runner{
33 | Os: "Linux",
34 | Temp: "/home/runner/work/_temp",
35 | },
36 | },
37 | }
38 | assert.Equal(t, &metadata, ci.NonReproducibleMetadata())
39 |
40 | current := map[string]string{
41 | "GO_ROOT": "/opt/hostedtoolcache/go/1.17.6/x64",
42 | "GO_VERSION": "1.17",
43 | }
44 | assert.Equal(t, current, ci.CurrentFilteredEnvironment())
45 |
46 | result := ci.UserDefinedParameters()
47 | assert.NotNil(t, result)
48 | assert.NotEmpty(t, "%s", result)
49 | assert.Equal(t, "tomato CI", ci.Context())
50 |
51 | }
52 |
53 | func TestGetHeadCommitTime(t *testing.T) {
54 | err := os.Setenv("GITHUB_ACTIONS", "true")
55 | assert.NoError(t, err)
56 | context := githubContext(t)
57 | runner := runnerContext()
58 | env := envC()
59 | ci, err := CreateGithubCIEnvironment(context, &runner, &env)
60 | assert.NoError(t, err)
61 | assert.Equal(t, "2022-02-14T09:38:16+01:00", ci.GetEvent().GetHeadCommitTimestamp())
62 | }
63 |
64 | func githubContext(t *testing.T) []byte {
65 | context, err := os.ReadFile("testdata/github-context.json")
66 | assert.NoError(t, err)
67 | return context
68 | }
69 |
70 | func runnerContext() string {
71 | return base64.StdEncoding.EncodeToString([]byte(RunnerTestContext))
72 | }
73 |
74 | func envC() string {
75 | return base64.StdEncoding.EncodeToString([]byte(envTestContext))
76 | }
77 |
78 | var envTestContext = `{
79 | "GO_VERSION": "1.17",
80 | "GO_ROOT": "/opt/hostedtoolcache/go/1.17.6/x64"
81 | }`
82 |
83 | var RunnerTestContext = `{
84 | "os": "Linux",
85 | "arch": "X64",
86 | "name": "Hosted Agent",
87 | "tool_cache": "/opt/hostedtoolcache",
88 | "temp": "/home/runner/work/_temp",
89 | "workspace": "/home/runner/work/nais-salsa-action"
90 | }`
91 |
--------------------------------------------------------------------------------
/pkg/vcs/github_runner.go:
--------------------------------------------------------------------------------
1 | package vcs
2 |
3 | import (
4 | "encoding/base64"
5 | "encoding/json"
6 | "fmt"
7 | )
8 |
9 | type RunnerContext struct {
10 | Name string `json:"name"`
11 | Arch string `json:"arch"`
12 | OS string `json:"os"`
13 | Temp string `json:"temp"`
14 | ToolCache string `json:"tool_cache"`
15 | }
16 |
17 | func ParseRunner(runner *string) (*RunnerContext, error) {
18 | context := RunnerContext{}
19 |
20 | if len(*runner) == 0 {
21 | return nil, nil
22 | }
23 |
24 | decodedRunnerBytes, err := base64.StdEncoding.DecodeString(*runner)
25 | if err != nil {
26 | return nil, fmt.Errorf("decoding runner context: %w", err)
27 | }
28 |
29 | if err := json.Unmarshal(decodedRunnerBytes, &context); err != nil {
30 | return nil, fmt.Errorf("unmarshal runner context json: %w", err)
31 | }
32 |
33 | return &context, nil
34 | }
35 |
--------------------------------------------------------------------------------
/pkg/vcs/github_runner_test.go:
--------------------------------------------------------------------------------
1 | package vcs
2 |
3 | import (
4 | "encoding/base64"
5 | "github.com/stretchr/testify/assert"
6 | "testing"
7 | )
8 |
9 | func TestParseRunnerContext(t *testing.T) {
10 | encodedContext := base64.StdEncoding.EncodeToString([]byte(RunnerTestContext))
11 | context, err := ParseRunner(&encodedContext)
12 | assert.NoError(t, err)
13 | assert.Equal(t, "Hosted Agent", context.Name)
14 | assert.Equal(t, "Linux", context.OS)
15 | assert.Equal(t, "X64", context.Arch)
16 | assert.Equal(t, "/opt/hostedtoolcache", context.ToolCache)
17 | assert.Equal(t, "/home/runner/work/_temp", context.Temp)
18 | }
19 |
20 | func TestParseRunnerNoContext(t *testing.T) {
21 | encodedContext := base64.StdEncoding.EncodeToString([]byte(""))
22 | context, err := ParseRunner(&encodedContext)
23 | assert.NoError(t, err)
24 | assert.Nil(t, context)
25 | }
26 |
27 | func TestParseRunnerFailContext(t *testing.T) {
28 | data := "yolo"
29 | context, err := ParseRunner(&data)
30 | assert.Nil(t, context)
31 | assert.EqualError(t, err, "unmarshal runner context json: invalid character 'Ê' looking for beginning of value")
32 | }
33 |
--------------------------------------------------------------------------------
/pkg/vcs/github_test.go:
--------------------------------------------------------------------------------
1 | package vcs
2 |
3 | import (
4 | "github.com/stretchr/testify/assert"
5 | "os"
6 | "testing"
7 | )
8 |
9 | func TestParseGithubContext(t *testing.T) {
10 | ctx, err := os.ReadFile("testdata/github-context.json")
11 | assert.NoError(t, err)
12 | context, err := ParseContext(ctx)
13 | assert.NoError(t, err)
14 |
15 | assert.Equal(t, "90dc9f2bc4007d1099a941ba3d408d2c896fe8dd", context.SHA)
16 | assert.Equal(t, "build", context.Job)
17 | assert.Equal(t, "refs/heads/main", context.Ref)
18 | assert.Equal(t, "bolo/tomato", context.Repository)
19 | assert.Equal(t, "bolo", context.RepositoryOwner)
20 | assert.Equal(t, "1839977840", context.RunId)
21 | assert.Equal(t, "57", context.RunNumber)
22 | assert.Equal(t, "jdoe", context.Actor)
23 | }
24 |
25 | func TestGithubStaticIdentification(t *testing.T) {
26 | static := BuildId("v1")
27 | assert.Equal(t, "https://github.com/Attestations/GitHubActionsWorkflow@v1", static.BuildType)
28 | assert.Equal(t, "/Attestations/SelfHostedActions@v1", static.SelfHostedIdSuffix)
29 | assert.Equal(t, "/Attestations/GitHubHostedActions@v1", static.HostedIdSuffix)
30 | }
31 |
--------------------------------------------------------------------------------
/pkg/vcs/nonereproduceible.go:
--------------------------------------------------------------------------------
1 | package vcs
2 |
3 | type Metadata struct {
4 | Arch string `json:"arch"`
5 | Env map[string]string `json:"env"`
6 | Context Context `json:"context"`
7 | }
8 |
9 | type Context struct {
10 | Github Github
11 | Runner Runner
12 | }
13 |
14 | type Runner struct {
15 | Os string
16 | Temp string
17 | }
18 |
19 | type Github struct {
20 | RunId string
21 | }
22 |
--------------------------------------------------------------------------------
/pkg/vcs/resolve_context.go:
--------------------------------------------------------------------------------
1 | package vcs
2 |
3 | import (
4 | "encoding/base64"
5 | "encoding/json"
6 | "fmt"
7 | log "github.com/sirupsen/logrus"
8 | "os"
9 | )
10 |
11 | type ContextType string
12 |
13 | const (
14 | ContextTypeGithub ContextType = "GITHUB_ACTIONS"
15 | )
16 |
17 | func (in ContextType) String() string {
18 | return string(in)
19 | }
20 |
21 | func (in ContextType) Hosted() bool {
22 | return os.Getenv(in.String()) == "true"
23 | }
24 |
25 | func ResolveBuildContext(context, runner, env *string) (ContextEnvironment, error) {
26 | if !buildContext(context, runner) {
27 | return nil, nil
28 | }
29 |
30 | decodedContext, err := base64.StdEncoding.DecodeString(*context)
31 | if err != nil {
32 | return nil, fmt.Errorf("decoding context: %w", err)
33 | }
34 |
35 | if !isJSON(decodedContext) {
36 | return nil, fmt.Errorf("decoded build context is not in json format")
37 | }
38 |
39 | if ContextTypeGithub.Hosted() {
40 | log.Info("prepare Github CI environment...")
41 | return CreateGithubCIEnvironment(decodedContext, runner, env)
42 | }
43 |
44 | return nil, fmt.Errorf("build context is not supported")
45 | }
46 |
47 | // Required when creating CI Environment, CLI assumed to be run manually without build context
48 | func buildContext(context, runner *string) bool {
49 | return (context != nil && len(*context) != 0) && (runner != nil && len(*runner) != 0)
50 | }
51 |
52 | func isJSON(str []byte) bool {
53 | var js json.RawMessage
54 | return json.Unmarshal(str, &js) == nil
55 | }
56 |
--------------------------------------------------------------------------------
/pkg/vcs/resolve_context_test.go:
--------------------------------------------------------------------------------
1 | package vcs
2 |
3 | import (
4 | "encoding/base64"
5 | "github.com/stretchr/testify/assert"
6 | "os"
7 | "testing"
8 | )
9 |
10 | func TestResolveBuildContext(t *testing.T) {
11 | context, err := os.ReadFile("testdata/github-context.json")
12 | runner := RunnerTestContext
13 | assert.NoError(t, err)
14 |
15 | for _, test := range []struct {
16 | name string
17 | context string
18 | runner string
19 | buildEnv bool
20 | error bool
21 | manually bool
22 | errorMessage string
23 | }{
24 | {
25 | name: "Resolve a CI environment with proper context and runner",
26 | context: encode(context),
27 | runner: encode([]byte(runner)),
28 | buildEnv: true,
29 | },
30 | {
31 | name: "CLI is run manually without build context or runner",
32 | context: "",
33 | runner: "",
34 | manually: true,
35 | },
36 | {
37 | name: "Not a valid build context",
38 | context: "yolo",
39 | runner: "yolo",
40 | error: true,
41 | errorMessage: "decoded build context is not in json format",
42 | },
43 | {
44 | name: "Valid input json context and runner but not a supported context",
45 | context: encode([]byte(`{"valid": "json"}`)),
46 | runner: encode([]byte(`{"valid": "json"}`)),
47 | error: true,
48 | errorMessage: "build context is not supported",
49 | },
50 | } {
51 | t.Run(test.name, func(t *testing.T) {
52 | if test.buildEnv {
53 | err = os.Setenv("GITHUB_ACTIONS", "true")
54 | assert.NoError(t, err)
55 | }
56 |
57 | resolved, err := ResolveBuildContext(&test.context, &test.runner, nil)
58 |
59 | switch true {
60 | case test.error:
61 | assert.EqualError(t, err, test.errorMessage)
62 | case test.manually:
63 | assert.NoError(t, err)
64 | assert.Nil(t, resolved)
65 | default:
66 | assert.NoError(t, err)
67 | assert.NotNil(t, resolved)
68 | assert.Equal(t, "tomato CI", resolved.Context())
69 | assert.Equal(t, map[string]string{}, resolved.CurrentFilteredEnvironment())
70 | assert.Equal(t, "https://github.com/bolo/tomato/actions/runs/1839977840", resolved.BuildInvocationId())
71 | assert.Equal(t, "https://github.com/Attestations/GitHubActionsWorkflow@v1", resolved.BuildType())
72 | assert.Equal(t, "https://github.com/bolo/tomato", resolved.RepoUri())
73 | assert.Equal(t, "https://github.com/bolo/tomato/Attestations/GitHubHostedActions@v1", resolved.BuilderId())
74 | assert.Equal(t, "90dc9f2bc4007d1099a941ba3d408d2c896fe8dd", resolved.Sha())
75 | assert.NotNil(t, resolved.UserDefinedParameters())
76 | assert.NotEmpty(t, resolved.UserDefinedParameters())
77 | assert.Equal(t, "Linux", resolved.NonReproducibleMetadata().Context.Runner.Os)
78 | assert.Equal(t, "/home/runner/work/_temp", resolved.NonReproducibleMetadata().Context.Runner.Temp)
79 | assert.Equal(t, "1839977840", resolved.NonReproducibleMetadata().Context.Github.RunId)
80 | assert.Empty(t, "", resolved.NonReproducibleMetadata().Env)
81 | assert.Equal(t, "X64", resolved.NonReproducibleMetadata().Arch)
82 | // Unset for next test
83 | err = os.Unsetenv("GITHUB_ACTIONS")
84 | assert.NoError(t, err)
85 | }
86 | })
87 | }
88 | }
89 |
90 | func encode(b []byte) string {
91 | return base64.StdEncoding.EncodeToString(b)
92 | }
93 |
--------------------------------------------------------------------------------
/pkg/vcs/testdata/unknown-event.json:
--------------------------------------------------------------------------------
1 | {
2 | "actor": "jdoe",
3 | "workflow": "Salsa CI",
4 | "head_ref": "",
5 | "base_ref": "",
6 | "event": {
7 | "after": "d4cd018b2fe54d8308b78f2bb88db94ac57173ea",
8 | "base_ref": null,
9 | "before": "a88fd9ea948a6ea1278ebcfd4b238283a72e12b0",
10 | "commits": [
11 | {
12 | "author": {
13 | "email": "john.doe@emal.com",
14 | "name": "jDoe",
15 | "username": "jDoe"
16 | },
17 | "committer": {
18 | "email": "john.doe@emal.com",
19 | "name": "jDoe",
20 | "username": "jDoe"
21 | },
22 | "distinct": true,
23 | "id": "d4cd018b2fe54d8308b78f2bb88db94ac57173ea",
24 | "message": "master commit",
25 | "timestamp": "2022-10-21T11:26:55+02:00",
26 | "tree_id": "1eb5ac4d731daeb199755932a5a2e126e10c80cc",
27 | "url": "https://github.com/yolo"
28 | }
29 | ],
30 | "compare": "https://github.com/yolo/gandalf/compare/a88fd9ea948a...d4cd018b2fe5",
31 | "created": false,
32 | "deleted": false,
33 | "enterprise": {
34 | "avatar_url": "https://avatars.githubusercontent.com/b/371?v=4",
35 | "created_at": "2019-06-26T11:17:54Z",
36 | "description": "",
37 | "html_url": "https://github.com/enterprises/BOGUS",
38 | "id": 371,
39 | "name": "BOGUS",
40 | "node_id": "MDEwOkVudGVycHJpc2UzNzE=",
41 | "slug": "BOGUS",
42 | "updated_at": "2022-08-25T17:53:40Z",
43 | "website_url": "https://BOGUS.no"
44 | },
45 | "forced": false,
46 | "other_commit": {
47 | "author": {
48 | "email": "john.doe@emal.com",
49 | "name": "jDoe",
50 | "username": "jDoe"
51 | },
52 | "committer": {
53 | "email": "john.doe@emal.com",
54 | "name": "jDoe",
55 | "username": "jDoe"
56 | },
57 | "distinct": true,
58 | "id": "d4cd018b2fe54d8308b78f2bb88db94ac57173ea",
59 | "message": "master commit",
60 | "timestamp": "2022-10-21T11:26:55+02:00",
61 | "tree_id": "1eb5ac4d731daeb199755932a5a2e126e10c80cc",
62 | "url": "https://github.com/yolo"
63 | }
64 | }
65 | }
--------------------------------------------------------------------------------
/pkg/vcs/testdata/workflowrun-head-commit.json:
--------------------------------------------------------------------------------
1 | {
2 | "actor": "jdoe",
3 | "workflow": "Salsa CI",
4 | "head_ref": "",
5 | "base_ref": "",
6 | "event": {
7 | "after": "d4cd018b2fe54d8308b78f2bb88db94ac57173ea",
8 | "base_ref": null,
9 | "before": "a88fd9ea948a6ea1278ebcfd4b238283a72e12b0",
10 | "commits": [
11 | {
12 | "author": {
13 | "email": "john.doe@emal.com",
14 | "name": "jDoe",
15 | "username": "jDoe"
16 | },
17 | "committer": {
18 | "email": "john.doe@emal.com",
19 | "name": "jDoe",
20 | "username": "jDoe"
21 | },
22 | "distinct": true,
23 | "id": "d4cd018b2fe54d8308b78f2bb88db94ac57173ea",
24 | "message": "master commit",
25 | "timestamp": "2022-10-21T11:26:55+02:00",
26 | "tree_id": "1eb5ac4d731daeb199755932a5a2e126e10c80cc",
27 | "url": "https://github.com/yolo"
28 | }
29 | ],
30 | "compare": "https://github.com/yolo/gandalf/compare/a88fd9ea948a...d4cd018b2fe5",
31 | "created": false,
32 | "deleted": false,
33 | "enterprise": {
34 | "avatar_url": "https://avatars.githubusercontent.com/b/371?v=4",
35 | "created_at": "2019-06-26T11:17:54Z",
36 | "description": "",
37 | "html_url": "https://github.com/enterprises/BOGUS",
38 | "id": 371,
39 | "name": "BOGUS",
40 | "node_id": "MDEwOkVudGVycHJpc2UzNzE=",
41 | "slug": "BOGUS",
42 | "updated_at": "2022-08-25T17:53:40Z",
43 | "website_url": "https://BOGUS.no"
44 | },
45 | "forced": false,
46 | "head_commit": {
47 | "author": {
48 | "email": "john.doe@emal.com",
49 | "name": "jDoe",
50 | "username": "jDoe"
51 | },
52 | "committer": {
53 | "email": "john.doe@emal.com",
54 | "name": "jDoe",
55 | "username": "jDoe"
56 | },
57 | "distinct": true,
58 | "id": "d4cd018b2fe54d8308b78f2bb88db94ac57173ea",
59 | "message": "master commit",
60 | "timestamp": "2022-10-21T11:26:55+02:00",
61 | "tree_id": "1eb5ac4d731daeb199755932a5a2e126e10c80cc",
62 | "url": "https://github.com/yolo"
63 | }
64 | }
65 | }
--------------------------------------------------------------------------------
/salsa-sample.yaml:
--------------------------------------------------------------------------------
1 | attest:
2 | key: gcpkms://projects/plattformsikkerhet-dev-496e/locations/europe-north1/keyRings/cosign/cryptoKeys/cosign-test/versions/1
3 | rekor-url: https://rekor.sigstore.dev
4 |
5 |
--------------------------------------------------------------------------------