├── .nvmrc ├── .gitattributes ├── src ├── SyftVersion.ts ├── attachReleaseAssets.ts ├── downloadSyft.ts ├── runSyftAction.ts ├── github │ ├── Util.ts │ ├── Executor.ts │ ├── SyftDownloader.ts │ ├── GithubClient.ts │ └── SyftGithubAction.ts └── Syft.ts ├── .prettierignore ├── tests ├── fixtures │ ├── localbuild │ │ └── Dockerfile │ ├── image-alpine-match-coverage │ │ ├── Dockerfile │ │ ├── etc │ │ │ └── os-release │ │ └── lib │ │ │ └── apk │ │ │ └── db │ │ │ └── installed │ ├── image-centos-match-coverage │ │ ├── Dockerfile │ │ ├── var │ │ │ └── lib │ │ │ │ └── rpm │ │ │ │ ├── Packages │ │ │ │ └── generate-fixture.sh │ │ └── etc │ │ │ └── os-release │ ├── image-debian-match-coverage │ │ ├── Dockerfile │ │ ├── python │ │ │ └── dist-info │ │ │ │ ├── top_level.txt │ │ │ │ └── METADATA │ │ ├── java │ │ │ ├── generate-fixtures.md │ │ │ └── example-java-app-maven-0.1.0.jar │ │ ├── ruby │ │ │ ├── Gemfile.lock │ │ │ └── specifications │ │ │ │ └── bundler.gemspec │ │ ├── usr │ │ │ └── lib │ │ │ │ └── os-release │ │ ├── var │ │ │ └── lib │ │ │ │ └── dpkg │ │ │ │ └── status │ │ └── javascript │ │ │ └── pkg-json │ │ │ └── package.json │ ├── npm-project │ │ ├── package.json │ │ └── package-lock.json │ ├── yarn-project │ │ ├── package.json │ │ └── yarn.lock │ ├── content-merge.fixture.json │ └── policy_evaluation.fixture.json ├── integration │ ├── syft_config.yaml │ ├── GitHubSnapshot.test.ts │ └── __snapshots__ │ │ └── GitHubSnapshot.test.ts.snap ├── Util.test.ts ├── GithubClient.test.ts ├── mocks.ts └── SyftGithubAction.test.ts ├── .eslintignore ├── .husky └── pre-commit ├── .prettierrc.json ├── jest.env.js ├── .github ├── dependabot.yml ├── workflows │ ├── remove-awaiting-response-label.yaml │ ├── oss-project-board-add.yaml │ ├── release-draft.yml │ ├── release-tag.yml │ ├── update-syft-release.yml │ └── test.yml ├── ISSUE_TEMPLATE │ ├── feature_request.md │ └── bug_report.yml └── release-drafter.yml ├── .editorconfig ├── jest.config.js ├── download-syft └── action.yml ├── publish-sbom └── action.yml ├── .eslintrc.json ├── tsconfig.json ├── RELEASE.md ├── DEVELOPING.md ├── llms.txt ├── .gitignore ├── action.yml ├── package.json ├── CONTRIBUTING.md ├── CODE_OF_CONDUCT.md ├── README.md └── LICENSE /.nvmrc: -------------------------------------------------------------------------------- 1 | 20 2 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | dist/** linguist-generated=true 2 | -------------------------------------------------------------------------------- /src/SyftVersion.ts: -------------------------------------------------------------------------------- 1 | export const VERSION = "v1.39.0"; 2 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | lib/ 3 | node_modules/ 4 | tests/ 5 | -------------------------------------------------------------------------------- /tests/fixtures/localbuild/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine:3.15.0 2 | -------------------------------------------------------------------------------- /tests/integration/syft_config.yaml: -------------------------------------------------------------------------------- 1 | format: 2 | pretty: true 3 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | lib/ 3 | node_modules/ 4 | jest.config.js 5 | -------------------------------------------------------------------------------- /tests/fixtures/image-alpine-match-coverage/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM scratch 2 | COPY . . -------------------------------------------------------------------------------- /tests/fixtures/image-centos-match-coverage/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM scratch 2 | COPY . . -------------------------------------------------------------------------------- /tests/fixtures/image-debian-match-coverage/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM scratch 2 | COPY . . -------------------------------------------------------------------------------- /tests/fixtures/image-debian-match-coverage/python/dist-info/top_level.txt: -------------------------------------------------------------------------------- 1 | pygments -------------------------------------------------------------------------------- /.husky/pre-commit: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | . "$(dirname "$0")/_/husky.sh" 3 | 4 | npm run precommit 5 | -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "printWidth": 80, 3 | "tabWidth": 2, 4 | "useTabs": false 5 | } 6 | -------------------------------------------------------------------------------- /jest.env.js: -------------------------------------------------------------------------------- 1 | process.env["RUNNER_TOOL_CACHE"] = "/tmp/actions/cache"; 2 | process.env["RUNNER_TEMP"] = "/tmp/actions/temp"; 3 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "daily" 7 | -------------------------------------------------------------------------------- /tests/fixtures/image-centos-match-coverage/var/lib/rpm/Packages: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/anchore/sbom-action/HEAD/tests/fixtures/image-centos-match-coverage/var/lib/rpm/Packages -------------------------------------------------------------------------------- /tests/fixtures/image-debian-match-coverage/java/generate-fixtures.md: -------------------------------------------------------------------------------- 1 | See the syft/cataloger/java/test-fixtures/java-builds dir to generate test fixtures and copy to here manually. 2 | -------------------------------------------------------------------------------- /src/attachReleaseAssets.ts: -------------------------------------------------------------------------------- 1 | import { 2 | attachReleaseAssets, 3 | runAndFailBuildOnException, 4 | } from "./github/SyftGithubAction"; 5 | 6 | runAndFailBuildOnException(attachReleaseAssets); 7 | -------------------------------------------------------------------------------- /tests/fixtures/image-debian-match-coverage/java/example-java-app-maven-0.1.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/anchore/sbom-action/HEAD/tests/fixtures/image-debian-match-coverage/java/example-java-app-maven-0.1.0.jar -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | end_of_line = lf 5 | insert_final_newline = true 6 | indent_size = 2 7 | max_line_length = 80 8 | ij_javascript_spaces_within_brackets = true 9 | ij_javascript_enforce_trailing_comma = keep 10 | -------------------------------------------------------------------------------- /tests/fixtures/image-alpine-match-coverage/etc/os-release: -------------------------------------------------------------------------------- 1 | NAME="Alpine Linux" 2 | ID=alpine 3 | VERSION_ID=3.12.0 4 | PRETTY_NAME="Alpine Linux v3.12" 5 | HOME_URL="https://alpinelinux.org/" 6 | BUG_REPORT_URL="https://bugs.alpinelinux.org/" 7 | -------------------------------------------------------------------------------- /tests/fixtures/image-debian-match-coverage/ruby/Gemfile.lock: -------------------------------------------------------------------------------- 1 | GEM 2 | remote: https://rubygems.org/ 3 | specs: 4 | rails (4.1.1) 5 | activerecord (= 4.1.1) 6 | 7 | PLATFORMS 8 | ruby 9 | 10 | DEPENDENCIES 11 | rails (= 4.1.1) -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | setupFiles: ["/jest.env.js"], 3 | clearMocks: true, 4 | moduleFileExtensions: ['js', 'ts'], 5 | testMatch: ['**/*.test.ts'], 6 | transform: { 7 | '^.+\\.ts$': 'ts-jest' 8 | }, 9 | verbose: true 10 | } 11 | -------------------------------------------------------------------------------- /src/downloadSyft.ts: -------------------------------------------------------------------------------- 1 | import * as core from "@actions/core"; 2 | import { 3 | getSyftCommand, 4 | runAndFailBuildOnException, 5 | } from "./github/SyftGithubAction"; 6 | 7 | runAndFailBuildOnException(async () => { 8 | const cmd = await getSyftCommand(); 9 | core.setOutput("cmd", cmd); 10 | }); 11 | -------------------------------------------------------------------------------- /tests/fixtures/image-debian-match-coverage/usr/lib/os-release: -------------------------------------------------------------------------------- 1 | PRETTY_NAME="Debian GNU/Linux 8 (jessie)" 2 | NAME="Debian GNU/Linux" 3 | VERSION_ID="8" 4 | VERSION="8 (jessie)" 5 | ID=debian 6 | HOME_URL="http://www.debian.org/" 7 | SUPPORT_URL="http://www.debian.org/support" 8 | BUG_REPORT_URL="https://bugs.debian.org/" 9 | -------------------------------------------------------------------------------- /tests/fixtures/npm-project/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "npm-project", 3 | "version": "0.12.1", 4 | "description": "Basic NPM-based project", 5 | "main": "index.js", 6 | "author": "test@test", 7 | "license": "MIT", 8 | "dependencies": { 9 | "react": "^16.14.0", 10 | "tar": "^6.1.0" 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /tests/fixtures/yarn-project/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "yarn-project", 3 | "version": "2.5.34", 4 | "description": "Basic Yarn-based project", 5 | "main": "index.js", 6 | "author": "test@test", 7 | "license": "MIT", 8 | "dependencies": { 9 | "react": "16", 10 | "trim": "0.0.2" 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /.github/workflows/remove-awaiting-response-label.yaml: -------------------------------------------------------------------------------- 1 | name: "Manage Awaiting Response Label" 2 | 3 | on: 4 | issue_comment: 5 | types: [created] 6 | 7 | jobs: 8 | run: 9 | uses: "anchore/workflows/.github/workflows/remove-awaiting-response-label.yaml@main" 10 | secrets: 11 | token: ${{ secrets.OSS_PROJECT_GH_TOKEN }} 12 | -------------------------------------------------------------------------------- /src/runSyftAction.ts: -------------------------------------------------------------------------------- 1 | import { 2 | attachReleaseAssets, 3 | runAndFailBuildOnException, 4 | runSyftAction, 5 | uploadDependencySnapshot, 6 | } from "./github/SyftGithubAction"; 7 | 8 | runAndFailBuildOnException(async () => { 9 | await runSyftAction(); 10 | await uploadDependencySnapshot(); 11 | await attachReleaseAssets(); 12 | }); 13 | -------------------------------------------------------------------------------- /tests/fixtures/image-centos-match-coverage/etc/os-release: -------------------------------------------------------------------------------- 1 | NAME="CentOS Linux" 2 | VERSION="8 (Core)" 3 | ID="centos" 4 | ID_LIKE="rhel fedora" 5 | VERSION_ID="8" 6 | PLATFORM_ID="platform:el8" 7 | PRETTY_NAME="CentOS Linux 8 (Core)" 8 | ANSI_COLOR="0;31" 9 | CPE_NAME="cpe:/o:centos:centos:8" 10 | HOME_URL="https://www.centos.org/" 11 | BUG_REPORT_URL="https://bugs.centos.org/" -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | **What would you like to be added**: 11 | 12 | **Why is this needed**: 13 | 14 | **Additional context**: 15 | 16 | -------------------------------------------------------------------------------- /.github/workflows/oss-project-board-add.yaml: -------------------------------------------------------------------------------- 1 | name: Add to OSS board 2 | 3 | on: 4 | issues: 5 | types: 6 | - opened 7 | - reopened 8 | - transferred 9 | - labeled 10 | 11 | jobs: 12 | 13 | run: 14 | uses: "anchore/workflows/.github/workflows/oss-project-board-add.yaml@main" 15 | secrets: 16 | token: ${{ secrets.OSS_PROJECT_GH_TOKEN }} 17 | -------------------------------------------------------------------------------- /.github/workflows/release-draft.yml: -------------------------------------------------------------------------------- 1 | name: Draft Release 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | jobs: 9 | update_release_draft: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Draft release notes 13 | uses: release-drafter/release-drafter@b1476f6e6eb133afa41ed8589daba6dc69b4d3f5 # v6.1.0 14 | env: 15 | GITHUB_TOKEN: ${{ github.token }} 16 | -------------------------------------------------------------------------------- /.github/workflows/release-tag.yml: -------------------------------------------------------------------------------- 1 | name: Re-tag releases 2 | 3 | on: 4 | release: 5 | types: [published, edited] 6 | 7 | jobs: 8 | actions-tagger: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: Actions-R-Us/actions-tagger@330ddfac760021349fef7ff62b372f2f691c20fb # v2.0.3 12 | env: 13 | GITHUB_TOKEN: ${{ github.token }} 14 | with: 15 | publish_latest_tag: true 16 | -------------------------------------------------------------------------------- /download-syft/action.yml: -------------------------------------------------------------------------------- 1 | name: "Anchore SBOM Action / Download Syft" 2 | description: "Downloads Syft to the Action tool cache and provides a reference" 3 | author: "Anchore" 4 | 5 | inputs: 6 | syft-version: 7 | required: false 8 | description: "The version of Syft to download" 9 | 10 | outputs: 11 | cmd: 12 | description: "A reference to the Syft command" 13 | 14 | runs: 15 | using: "node20" 16 | main: "../dist/downloadSyft/index.js" 17 | -------------------------------------------------------------------------------- /src/github/Util.ts: -------------------------------------------------------------------------------- 1 | import s from "fast-safe-stringify"; 2 | 3 | export function stringify(o: any): string { 4 | return s(o, undefined, 2); 5 | } 6 | 7 | export function stripEmojis(text: string): string { 8 | // Regular expression to match emojis 9 | const emojiRegex = 10 | /(?:[\u2700-\u27BF]|[\uE000-\uF8FF]|[\uD83C-\uDBFF][\uDC00-\uDFFF]|\ud83d[\udc00-\ude4f\ude80-\udeff]|\ud83e[\udd10-\udd3f\udd40-\uddff])/g; 11 | return text.replace(emojiRegex, ""); 12 | } 13 | -------------------------------------------------------------------------------- /.github/release-drafter.yml: -------------------------------------------------------------------------------- 1 | name-template: "v$RESOLVED_VERSION" 2 | tag-template: "v$RESOLVED_VERSION" 3 | 4 | categories: 5 | - title: "🚀 Features" 6 | labels: [feature, enhancement] 7 | - title: "🐛 Bug Fixes" 8 | labels: [fix, bugfix, bug] 9 | 10 | change-template: "- $TITLE (#$NUMBER) [[$AUTHOR](https://github.com/$AUTHOR)]" 11 | 12 | version-resolver: 13 | major: 14 | labels: [major] 15 | minor: 16 | labels: [minor] 17 | patch: 18 | labels: [patch] 19 | default: patch 20 | 21 | exclude-labels: 22 | - "changelog-ignore" 23 | 24 | template: | 25 | ## Changes in v$RESOLVED_VERSION 26 | 27 | $CHANGES 28 | -------------------------------------------------------------------------------- /publish-sbom/action.yml: -------------------------------------------------------------------------------- 1 | name: "Anchore SBOM Action / Attach SBOM to Release" 2 | description: "Creates an SBOM (Software Bill Of Materials) from your code and container images" 3 | author: "Anchore" 4 | inputs: 5 | sbom-artifact-match: 6 | required: false 7 | description: "A regex to find SBOM artifacts to attach to a release" 8 | default: ".*\\.spdx\\.json$" 9 | 10 | github-token: 11 | description: "Authorized secret GitHub Personal Access Token. Defaults to github.token" 12 | required: false 13 | default: ${{ github.token }} 14 | 15 | runs: 16 | using: "node20" 17 | main: "../dist/attachReleaseAssets/index.js" 18 | -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "parser": "@typescript-eslint/parser", 3 | "parserOptions": { 4 | "ecmaVersion": 2020, 5 | "sourceType": "module" 6 | }, 7 | "extends": [ 8 | "plugin:@typescript-eslint/recommended", 9 | "plugin:prettier/recommended" 10 | ], 11 | "rules": { 12 | "@typescript-eslint/quotes": [ 13 | "error", 14 | "double", 15 | { "allowTemplateLiterals": true } 16 | ], 17 | "@typescript-eslint/comma-dangle": "off", 18 | "@typescript-eslint/explicit-module-boundary-types": "off", 19 | "@typescript-eslint/no-explicit-any": "off" 20 | }, 21 | "env": { 22 | "node": true, 23 | "commonjs": true, 24 | "es6": true 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /src/github/Executor.ts: -------------------------------------------------------------------------------- 1 | import * as exec from "@actions/exec"; 2 | 3 | /** 4 | * Execute directly for linux & macOS and use WSL for Windows 5 | * @param cmd command to execute 6 | * @param args command args 7 | * @param options command options 8 | */ 9 | export async function execute( 10 | cmd: string, 11 | args: string[], 12 | options?: exec.ExecOptions 13 | ) { 14 | return exec.exec(cmd, args, options); 15 | } 16 | 17 | /** 18 | * Maps the given parameter to a Windows Subsystem for Linux style path 19 | * @param arg 20 | */ 21 | export function mapToWSLPath(arg: string) { 22 | return arg.replace( 23 | /^([A-Z]):(.*)$/, 24 | (v, drive, path) => `/mnt/${drive.toLowerCase()}${path.replace(/\\/g, "/")}` 25 | ); 26 | } 27 | -------------------------------------------------------------------------------- /tests/Util.test.ts: -------------------------------------------------------------------------------- 1 | import { stripEmojis } from "../src/github/Util"; 2 | 3 | describe("stripEmojis", () => { 4 | it("Should not modify strings without emojis", () => { 5 | const input = "Workflow for building my awesome app"; 6 | const output = stripEmojis(input); 7 | expect(output).toBe(input); 8 | }); 9 | 10 | it("should remove single emojis from strings", () => { 11 | const input = "Workflow for building my awesome app🏗"; 12 | const output = stripEmojis(input); 13 | expect(output).toBe("Workflow for building my awesome app"); 14 | }); 15 | 16 | it("should remove multiple emojis from strings", () => { 17 | const input = "🚀Good 🧹morning 🏗!"; 18 | const output = stripEmojis(input); 19 | expect(output).toBe("Good morning !"); 20 | }); 21 | }); 22 | -------------------------------------------------------------------------------- /tests/fixtures/image-centos-match-coverage/var/lib/rpm/generate-fixture.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eux 3 | 4 | docker create --name generate-rpmdb-fixture centos:latest sh -c 'tail -f /dev/null' 5 | 6 | function cleanup { 7 | docker kill generate-rpmdb-fixture 8 | docker rm generate-rpmdb-fixture 9 | } 10 | trap cleanup EXIT 11 | 12 | docker start generate-rpmdb-fixture 13 | docker exec -i --tty=false generate-rpmdb-fixture bash <<-EOF 14 | mkdir -p /scratch 15 | cd /scratch 16 | rpm --initdb --dbpath /scratch 17 | curl -sSLO https://github.com/wagoodman/dive/releases/download/v0.9.2/dive_0.9.2_linux_amd64.rpm 18 | rpm --dbpath /scratch -ivh dive_0.9.2_linux_amd64.rpm 19 | rm dive_0.9.2_linux_amd64.rpm 20 | rpm --dbpath /scratch -qa 21 | EOF 22 | 23 | docker cp generate-rpmdb-fixture:/scratch/Packages . 24 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es6" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */, 4 | "module": "commonjs" /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */, 5 | "moduleResolution": "node", 6 | "strict": true /* Enable all strict type-checking options. */, 7 | "noImplicitAny": true /* Raise error on expressions and declarations with an implied 'any' type. */, 8 | "esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */, 9 | "allowSyntheticDefaultImports": true, 10 | "types": ["node", "jest"] 11 | }, 12 | "include": ["./src/**/*.ts"], 13 | "exclude": ["node_modules"] 14 | } 15 | -------------------------------------------------------------------------------- /tests/fixtures/image-alpine-match-coverage/lib/apk/db/installed: -------------------------------------------------------------------------------- 1 | C:Q1z0MwWQKfva+S+q7XmOBYFfQgW/k= 2 | P:libvncserver 3 | V:0.9.9 4 | A:x86_64 5 | S:166239 6 | I:389120 7 | T:Library to make writing a vnc server easy 8 | U:http://libvncserver.sourceforge.net/ 9 | L:GPL-2.0-or-later 10 | o:libvncserver 11 | m:A. Wilcox 12 | t:1572818861 13 | c:bf1ec813f662f128fc6b70f37ef1c0474bb24488 14 | D:so:libc.musl-x86_64.so.1 so:libgcrypt.so.20 so:libgnutls.so.30 so:libjpeg.so.8 so:libpng16.so.16 so:libz.so.1 15 | p:so:libvncclient.so.1=1.0.0 so:libvncserver.so.1=1.0.0 16 | F:usr 17 | F:usr/lib 18 | R:libvncclient.so.1 19 | a:0:0:777 20 | Z:Q1quyp/JcSPFQhtQFjMUYdMwRvAWM= 21 | R:libvncserver.so.1.0.0 22 | a:0:0:755 23 | Z:Q16Pd1AqyqQRMwiFfbUt9XkYnkapw= 24 | R:libvncserver.so.1 25 | a:0:0:777 26 | Z:Q184HrHsxEBqnsH4QNxeU5w8alhKI= 27 | R:libvncclient.so.1.0.0 28 | a:0:0:755 29 | Z:Q1IEjCrEwVlQt2GjIsb3o39vcgqMg= 30 | -------------------------------------------------------------------------------- /RELEASE.md: -------------------------------------------------------------------------------- 1 | # Release 2 | 3 | A release of sbom-action comprises: 4 | - a new semver git tag from the current tip of the main branch 5 | - a new [github release](https://github.com/anchore/sbom-action/releases) with a changelog 6 | - the action distributable committed into the repo at `dist/` 7 | 8 | Ideally releasing should be done often with small increments when possible. Unless a 9 | breaking change is blocking the release, or no fixes/features have been merged, a good 10 | target release cadence is between every 1 or 2 weeks. 11 | 12 | 13 | ## Creating a release 14 | 15 | Releases are automatically drafted on every push to the main branch. Please see the [github releases page](https://github.com/anchore/sbom-action/releases) for the latest draft. To publish the release: 16 | 17 | - Click "edit" (the pencil icon) 18 | - Modify the changelog as needed (for instance, if syft was bumped multiple times, include only the latest version bump entry) 19 | - Click "publish" 20 | -------------------------------------------------------------------------------- /DEVELOPING.md: -------------------------------------------------------------------------------- 1 | # Developing 2 | 3 | Information to get started developing the `sbom-action`. 4 | 5 | ## Logging 6 | 7 | If you would like to get more extensive debug logging, it is 8 | possible to enable this as [described here](https://github.com/actions/toolkit/blob/master/docs/action-debugging.md) 9 | by setting a secret in your repository of `ACTIONS_STEP_DEBUG` to `true`. 10 | 11 | ## Update `dist/` 12 | 13 | Updates to `dist/` and handled via a commit hook. Install the hook by running `npm install`. 14 | 15 | ## Tests 16 | 17 | To run tests locally, you will need a local docker instance and registry along with a few 18 | known images from the fixtures. Just run: 19 | 20 | ```shell 21 | docker run -d -p 5000:5000 --name registry registry:2 22 | ``` 23 | 24 | ... and a set of images built: 25 | 26 | ```shell 27 | for distro in alpine centos debian; do 28 | docker build -t localhost:5000/match-coverage/$distro ./tests/fixtures/image-$distro-match-coverage 29 | docker push localhost:5000/match-coverage/$distro:latest 30 | done 31 | ``` 32 | -------------------------------------------------------------------------------- /src/Syft.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Used for filesystem directory input to Syft 3 | */ 4 | export interface SyftDirectoryInput { 5 | path: string; 6 | } 7 | 8 | /** 9 | * Used for file input to Syft 10 | */ 11 | export interface SyftFileInput { 12 | file: string; 13 | } 14 | 15 | /** 16 | * Used to point Syft to a registry to scan an image 17 | */ 18 | export interface SyftRegistryInput { 19 | registry: string; 20 | image: string; 21 | } 22 | 23 | /** 24 | * Used to point Syft to a local image 25 | */ 26 | export interface SyftImageInput { 27 | image: string; 28 | } 29 | 30 | /** 31 | * Syft invocation options 32 | */ 33 | export interface SyftOptions { 34 | input: 35 | | SyftDirectoryInput 36 | | SyftFileInput 37 | | SyftRegistryInput 38 | | SyftImageInput; 39 | format: 40 | | "spdx" 41 | | "spdx-tag-value" 42 | | "spdx-json" 43 | | "cyclonedx" 44 | | "cyclonedx-xml" 45 | | "cyclonedx-json" 46 | | "table" 47 | | "text" 48 | | "json"; 49 | uploadToDependencySnapshotAPI: boolean; 50 | configFile: string; 51 | } 52 | -------------------------------------------------------------------------------- /src/github/SyftDownloader.ts: -------------------------------------------------------------------------------- 1 | /* istanbul ignore file */ 2 | 3 | import fs from "fs"; 4 | import os from "os"; 5 | import path from "path"; 6 | import { execute } from "./Executor"; 7 | 8 | const repo = 9 | /https:..github.com.([-\w]+).([-\w]+).archive.refs.heads.([-\w]+).zip/; 10 | 11 | export async function downloadSyftFromZip(url: string): Promise { 12 | // This needs to be an archive download instead of a `go install` because it 13 | // may not be from the same repo, in which case `go install` fails 14 | // `https://github.com/anchore/syft/archive/refs/heads/main.zip` 15 | const groups = url.match(repo); 16 | if (groups && groups.length > 2) { 17 | const repo = groups[2]; 18 | const branch = groups[3]; 19 | const cwd = process.cwd(); 20 | try { 21 | const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "syft")); 22 | process.chdir(tmpDir); 23 | await execute("curl", ["-L", "-o", `${branch}.zip`, url]); 24 | await execute("unzip", [`${branch}.zip`]); 25 | const repoDir = path.join(tmpDir, `${repo}-${branch}`); 26 | process.chdir(repoDir); 27 | // go build -o syftbin 28 | await execute("go", ["build", "-o", "syftbin"]); 29 | return `${repoDir}/syftbin`; 30 | } finally { 31 | process.chdir(cwd); 32 | } 33 | } 34 | return ""; 35 | } 36 | -------------------------------------------------------------------------------- /tests/fixtures/image-debian-match-coverage/ruby/specifications/bundler.gemspec: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | # -*- encoding: utf-8 -*- 3 | # stub: bundler 2.1.4 ruby lib 4 | 5 | Gem::Specification.new do |s| 6 | s.name = "bundler".freeze 7 | s.version = "2.1.4" 8 | 9 | s.required_rubygems_version = Gem::Requirement.new(">= 2.5.2".freeze) if s.respond_to? :required_rubygems_version= 10 | s.metadata = { "bug_tracker_uri" => "https://github.com/bundler/bundler/issues", "changelog_uri" => "https://github.com/bundler/bundler/blob/master/CHANGELOG.md", "homepage_uri" => "https://bundler.io/", "source_code_uri" => "https://github.com/bundler/bundler/" } if s.respond_to? :metadata= 11 | s.require_paths = ["lib".freeze] 12 | s.authors = ["Andr\u00E9 Arko".freeze, "Samuel Giddins".freeze, "Colby Swandale".freeze, "Hiroshi Shibata".freeze, "David Rodr\u00EDguez".freeze, "Grey Baker".freeze, "Stephanie Morillo".freeze, "Chris Morris".freeze, "James Wen".freeze, "Tim Moore".freeze, "Andr\u00E9 Medeiros".freeze, "Jessica Lynn Suttles".freeze, "Terence Lee".freeze, "Carl Lerche".freeze, "Yehuda Katz".freeze] 13 | s.bindir = "exe".freeze 14 | s.date = "2020-01-05" 15 | s.description = "Bundler manages an application's dependencies through its entire life, across many machines, systematically and repeatably".freeze 16 | s.email = ["team@bundler.io".freeze] 17 | s.executables = ["bundle".freeze, "bundler".freeze] 18 | s.files = ["exe/bundle".freeze, "exe/bundler".freeze] 19 | s.homepage = "https://bundler.io".freeze 20 | s.licenses = ["MIT".freeze] 21 | s.required_ruby_version = Gem::Requirement.new(">= 2.3.0".freeze) 22 | s.rubygems_version = "3.1.2".freeze 23 | s.summary = "The best way to manage your application's dependencies".freeze 24 | 25 | s.installed_by_version = "3.1.2" if s.respond_to? :installed_by_version 26 | end -------------------------------------------------------------------------------- /.github/workflows/update-syft-release.yml: -------------------------------------------------------------------------------- 1 | name: PR for latest Syft release 2 | on: 3 | schedule: 4 | # 7:04 UTC (2:04 am EST) 5 | - cron: "4 7 * * *" 6 | 7 | # Allows you to run this workflow manually from the Actions tab 8 | workflow_dispatch: 9 | 10 | jobs: 11 | upgrade-syft: 12 | runs-on: ubuntu-latest 13 | if: github.repository == 'anchore/sbom-action' 14 | steps: 15 | - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 16 | - name: Get latest Syft version 17 | id: latest-version 18 | env: 19 | GITHUB_TOKEN: ${{ github.token }} 20 | run: | 21 | LATEST_VERSION=$(gh release view --json name -q '.name' -R anchore/syft) 22 | echo "export const VERSION = \"$LATEST_VERSION\";" > src/SyftVersion.ts 23 | # install husky hooks and dependencies: 24 | npm install 25 | # export the version for use with create-pull-request: 26 | echo "LATEST_VERSION=$LATEST_VERSION" >> $GITHUB_OUTPUT 27 | - uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a # v2.1.0 28 | id: generate-token 29 | with: 30 | app_id: ${{ secrets.TOKEN_APP_ID }} 31 | private_key: ${{ secrets.TOKEN_APP_PRIVATE_KEY }} 32 | - uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0 33 | with: 34 | signoff: true 35 | delete-branch: true 36 | branch: auto/latest-syft 37 | labels: dependencies 38 | commit-message: "chore(deps): update Syft to ${{ steps.latest-version.outputs.LATEST_VERSION }}" 39 | title: "chore(deps): update Syft to ${{ steps.latest-version.outputs.LATEST_VERSION }}" 40 | body: "Update Syft to ${{ steps.latest-version.outputs.LATEST_VERSION }}" 41 | token: ${{ steps.generate-token.outputs.token }} 42 | -------------------------------------------------------------------------------- /tests/fixtures/image-debian-match-coverage/var/lib/dpkg/status: -------------------------------------------------------------------------------- 1 | Package: apt 2 | Status: install ok installed 3 | Priority: required 4 | Section: admin 5 | Installed-Size: 4064 6 | Maintainer: APT Development Team 7 | Architecture: amd64 8 | Version: 1.8.2 9 | Source: apt-dev 10 | Replaces: apt-transport-https (<< 1.5~alpha4~), apt-utils (<< 1.3~exp2~) 11 | Provides: apt-transport-https (= 1.8.2) 12 | Depends: adduser, gpgv | gpgv2 | gpgv1, debian-archive-keyring, libapt-pkg5.0 (>= 1.7.0~alpha3~), libc6 (>= 2.15), libgcc1 (>= 1:3.0), libgnutls30 (>= 3.6.6), libseccomp2 (>= 1.0.1), libstdc++6 (>= 5.2) 13 | Recommends: ca-certificates 14 | Suggests: apt-doc, aptitude | synaptic | wajig, dpkg-dev (>= 1.17.2), gnupg | gnupg2 | gnupg1, powermgmt-base 15 | Breaks: apt-transport-https (<< 1.5~alpha4~), apt-utils (<< 1.3~exp2~), aptitude (<< 0.8.10) 16 | Conffiles: 17 | /etc/apt/apt.conf.d/01autoremove 76120d358bc9037bb6358e737b3050b5 18 | /etc/cron.daily/apt-compat 49e9b2cfa17849700d4db735d04244f3 19 | /etc/kernel/postinst.d/apt-auto-removal 4ad976a68f045517cf4696cec7b8aa3a 20 | /etc/logrotate.d/apt 179f2ed4f85cbaca12fa3d69c2a4a1c3 21 | Description: commandline package manager 22 | This package provides commandline tools for searching and 23 | managing as well as querying information about packages 24 | as a low-level access to all features of the libapt-pkg library. 25 | . 26 | These include: 27 | * apt-get for retrieval of packages and information about them 28 | from authenticated sources and for installation, upgrade and 29 | removal of packages together with their dependencies 30 | * apt-cache for querying available information about installed 31 | as well as installable packages 32 | * apt-cdrom to use removable media as a source for packages 33 | * apt-config as an interface to the configuration settings 34 | * apt-key as an interface to manage authentication keys 35 | 36 | -------------------------------------------------------------------------------- /llms.txt: -------------------------------------------------------------------------------- 1 | # SBOM Action 2 | 3 | GitHub Action for creating software bill of materials (SBOM) using Syft. 4 | 5 | ## Project Overview 6 | 7 | This is Anchore's official GitHub Action that generates Software Bill of Materials (SBOMs) using the Syft scanner. The action can scan container images, directories, and files to produce SBOMs in various formats including SPDX and CycloneDX. 8 | 9 | ## Key Features 10 | 11 | - Generate SBOMs from container images, directories, or files 12 | - Support for multiple SBOM formats (SPDX, CycloneDX) 13 | - Automatic upload to GitHub releases as assets 14 | - Integration with GitHub's dependency submission API 15 | - Support for private container registries with authentication 16 | 17 | ## Architecture 18 | 19 | The action is built with TypeScript and consists of several main components: 20 | 21 | - `src/runSyftAction.ts` - Main entry point for SBOM generation 22 | - `src/Syft.ts` - Syft tool wrapper and configuration 23 | - `src/github/` - GitHub API integration for uploads and releases 24 | - `src/downloadSyft.ts` - Handles downloading the Syft binary 25 | - `src/attachReleaseAssets.ts` - Manages release asset uploads 26 | 27 | ## Usage 28 | 29 | Basic usage: 30 | ```yaml 31 | - uses: anchore/sbom-action@v0 32 | ``` 33 | 34 | Scan container image: 35 | ```yaml 36 | - uses: anchore/sbom-action@v0 37 | with: 38 | image: ghcr.io/example/image_name:tag 39 | ``` 40 | 41 | ## Development 42 | 43 | - Built with TypeScript and Node.js (v20.11.0+) 44 | - Uses GitHub Actions SDK (@actions/core, @actions/github) 45 | - Testing with Jest 46 | - Linting with ESLint and formatting with Prettier 47 | - Automated builds with ncc for distribution 48 | 49 | ## Repository Structure 50 | 51 | - `/src/` - TypeScript source code 52 | - `/dist/` - Compiled JavaScript for distribution 53 | - `/tests/` - Test files and fixtures 54 | - `/download-syft/`, `/publish-sbom/` - Sub-actions 55 | - `action.yml` - Main action configuration -------------------------------------------------------------------------------- /tests/fixtures/image-debian-match-coverage/python/dist-info/METADATA: -------------------------------------------------------------------------------- 1 | Metadata-Version: 2.1 2 | Name: Pygments 3 | Version: 2.6.1 4 | Summary: Pygments is a syntax highlighting package written in Python. 5 | Home-page: https://pygments.org/ 6 | Author: Georg Brandl 7 | Author-email: georg@python.org 8 | License: BSD License 9 | Keywords: syntax highlighting 10 | Platform: any 11 | Classifier: License :: OSI Approved :: BSD License 12 | Classifier: Intended Audience :: Developers 13 | Classifier: Intended Audience :: End Users/Desktop 14 | Classifier: Intended Audience :: System Administrators 15 | Classifier: Development Status :: 6 - Mature 16 | Classifier: Programming Language :: Python 17 | Classifier: Programming Language :: Python :: 3 18 | Classifier: Programming Language :: Python :: 3.5 19 | Classifier: Programming Language :: Python :: 3.6 20 | Classifier: Programming Language :: Python :: 3.7 21 | Classifier: Programming Language :: Python :: 3.8 22 | Classifier: Programming Language :: Python :: Implementation :: CPython 23 | Classifier: Programming Language :: Python :: Implementation :: PyPy 24 | Classifier: Operating System :: OS Independent 25 | Classifier: Topic :: Text Processing :: Filters 26 | Classifier: Topic :: Utilities 27 | Requires-Python: >=3.5 28 | 29 | 30 | Pygments 31 | ~~~~~~~~ 32 | 33 | Pygments is a syntax highlighting package written in Python. 34 | 35 | It is a generic syntax highlighter suitable for use in code hosting, forums, 36 | wikis or other applications that need to prettify source code. Highlights 37 | are: 38 | 39 | * a wide range of over 500 languages and other text formats is supported 40 | * special attention is paid to details, increasing quality by a fair amount 41 | * support for new languages and formats are added easily 42 | * a number of output formats, presently HTML, LaTeX, RTF, SVG, all image formats that PIL supports and ANSI sequences 43 | * it is usable as a command-line tool and as a library 44 | 45 | :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. 46 | :license: BSD, see LICENSE for details. 47 | 48 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Dependency directory 2 | node_modules 3 | 4 | # IDEs 5 | /.idea 6 | .vscode 7 | .idea 8 | 9 | # Rest pulled from https://github.com/github/gitignore/blob/master/Node.gitignore 10 | # Logs 11 | logs 12 | *.log 13 | npm-debug.log* 14 | yarn-debug.log* 15 | yarn-error.log* 16 | lerna-debug.log* 17 | 18 | # Diagnostic reports (https://nodejs.org/api/report.html) 19 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 20 | 21 | # Runtime data 22 | pids 23 | *.pid 24 | *.seed 25 | *.pid.lock 26 | 27 | # Directory for instrumented libs generated by jscoverage/JSCover 28 | lib-cov 29 | 30 | # Coverage directory used by tools like istanbul 31 | coverage 32 | *.lcov 33 | 34 | # nyc test coverage 35 | .nyc_output 36 | 37 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 38 | .grunt 39 | 40 | # Bower dependency directory (https://bower.io/) 41 | bower_components 42 | 43 | # node-waf configuration 44 | .lock-wscript 45 | 46 | # Compiled binary addons (https://nodejs.org/api/addons.html) 47 | build/Release 48 | 49 | # Dependency directories 50 | jspm_packages/ 51 | 52 | # TypeScript v1 declaration files 53 | typings/ 54 | 55 | # TypeScript cache 56 | *.tsbuildinfo 57 | 58 | # Optional npm cache directory 59 | .npm 60 | 61 | # Optional eslint cache 62 | .eslintcache 63 | 64 | # Optional REPL history 65 | .node_repl_history 66 | 67 | # Output of 'npm pack' 68 | *.tgz 69 | 70 | # Yarn Integrity file 71 | .yarn-integrity 72 | 73 | # dotenv environment variables file 74 | .env 75 | .env.test 76 | 77 | # parcel-bundler cache (https://parceljs.org/) 78 | .cache 79 | 80 | # next.js build output 81 | .next 82 | 83 | # nuxt.js build output 84 | .nuxt 85 | 86 | # vuepress build output 87 | .vuepress/dist 88 | 89 | # Serverless directories 90 | .serverless/ 91 | 92 | # FuseBox cache 93 | .fusebox/ 94 | 95 | # DynamoDB Local files 96 | .dynamodb/ 97 | 98 | # OS metadata 99 | .DS_Store 100 | Thumbs.db 101 | 102 | # Ignore built ts files 103 | __tests__/runner/* 104 | lib/**/* 105 | 106 | # Test data 107 | sbom-action-*/ 108 | -------------------------------------------------------------------------------- /action.yml: -------------------------------------------------------------------------------- 1 | name: "Anchore SBOM Action" 2 | description: "Creates an SBOM (Software Bill Of Materials) from your code and container images" 3 | author: "Anchore" 4 | branding: 5 | color: blue 6 | icon: check-circle 7 | inputs: 8 | path: 9 | required: false 10 | description: "A path to a directory on the filesystem to scan" 11 | default: "." 12 | 13 | file: 14 | required: false 15 | description: "A file on the filesystem to scan" 16 | 17 | image: 18 | required: false 19 | description: "A container image to scan" 20 | 21 | registry-username: 22 | required: false 23 | description: "The registry username" 24 | 25 | registry-password: 26 | required: false 27 | description: "The registry password" 28 | 29 | format: 30 | required: false 31 | description: "The SBOM format to export" 32 | default: "spdx-json" 33 | 34 | github-token: 35 | description: "Authorized secret GitHub Personal Access Token. Defaults to github.token" 36 | required: false 37 | default: ${{ github.token }} 38 | 39 | artifact-name: 40 | description: "The name to use for the SBOM file generated by this action" 41 | required: false 42 | 43 | output-file: 44 | required: false 45 | description: "A file location to output the SBOM" 46 | 47 | syft-version: 48 | required: false 49 | description: "The version of Syft to use" 50 | 51 | dependency-snapshot: 52 | required: false 53 | description: "Upload to GitHub dependency snapshot API" 54 | default: "false" 55 | 56 | upload-artifact: 57 | required: false 58 | description: "Upload artifact to workflow" 59 | default: "true" 60 | 61 | upload-artifact-retention: 62 | required: false 63 | description: > 64 | Retention policy for uploaded artifact to workflow. 65 | 66 | Minimum 1 day. 67 | Maximum 90 days unless changed from the repository settings page. 68 | An input of 0 assumes default retention value. 69 | default: 0 70 | 71 | upload-release-assets: 72 | required: false 73 | description: "Upload release assets" 74 | default: "true" 75 | 76 | config: 77 | required: false 78 | description: "Configuration file to use" 79 | 80 | runs: 81 | using: "node20" 82 | main: "dist/runSyftAction/index.js" 83 | -------------------------------------------------------------------------------- /tests/fixtures/yarn-project/yarn.lock: -------------------------------------------------------------------------------- 1 | # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. 2 | # yarn lockfile v1 3 | 4 | 5 | "js-tokens@^3.0.0 || ^4.0.0": 6 | version "4.0.0" 7 | resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" 8 | integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== 9 | 10 | loose-envify@^1.1.0, loose-envify@^1.4.0: 11 | version "1.4.0" 12 | resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" 13 | integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== 14 | dependencies: 15 | js-tokens "^3.0.0 || ^4.0.0" 16 | 17 | object-assign@^4.1.1: 18 | version "4.1.1" 19 | resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" 20 | integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= 21 | 22 | prop-types@^15.6.2: 23 | version "15.7.2" 24 | resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.7.2.tgz#52c41e75b8c87e72b9d9360e0206b99dcbffa6c5" 25 | integrity sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ== 26 | dependencies: 27 | loose-envify "^1.4.0" 28 | object-assign "^4.1.1" 29 | react-is "^16.8.1" 30 | 31 | react-is@^16.8.1: 32 | version "16.13.1" 33 | resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" 34 | integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== 35 | 36 | react@16: 37 | version "16.14.0" 38 | resolved "https://registry.yarnpkg.com/react/-/react-16.14.0.tgz#94d776ddd0aaa37da3eda8fc5b6b18a4c9a3114d" 39 | integrity sha512-0X2CImDkJGApiAlcf0ODKIneSwBPhqJawOa5wCtKbu7ZECrmS26NvtSILynQ66cgkT/RJ4LidJOc3bUESwmU8g== 40 | dependencies: 41 | loose-envify "^1.1.0" 42 | object-assign "^4.1.1" 43 | prop-types "^15.6.2" 44 | 45 | trim@0.0.2: 46 | version "0.0.2" 47 | resolved "https://registry.yarnpkg.com/trim/-/trim-0.0.2.tgz#b41afc68d6b5fc1a1fceb47b2ac91da258a071d4" 48 | integrity sha512-kTIK/cS0xM3jxJ7toUHlFTxHgix/kmmBgOiqc0gUAoW+NjIRsMB3vkjgAth5XEghYFCQxOdF0p/PHrv1BqTHgA== 49 | -------------------------------------------------------------------------------- /tests/fixtures/content-merge.fixture.json: -------------------------------------------------------------------------------- 1 | { 2 | "content-gem.json": { 3 | "content": [ 4 | { 5 | "license": "ruby", 6 | "location": "/usr/lib/ruby/gems/2.3.0/specifications/default/bigdecimal-1.2.8.gemspec", 7 | "origin": "Kenta Murata,Zachary Scott,Shigeo Kobayashi", 8 | "package": "bigdecimal", 9 | "type": "GEM", 10 | "version": "1.2.8" 11 | } 12 | ], 13 | "content_type": "gem", 14 | "imageDigest": "sha256:0a97ccb2868e3c54167317fe7a2fc58e5123290d6c5b653a725091cbf18ca1ea" 15 | }, 16 | "content-java.json": { 17 | "content": [ 18 | { 19 | "implementation-version": "N/A", 20 | "location": "/usr/share/java/java-atk-wrapper.jar", 21 | "maven-version": "N/A", 22 | "origin": "N/A", 23 | "package": "java-atk-wrapper", 24 | "specification-version": "N/A", 25 | "type": "JAVA-JAR" 26 | } 27 | ], 28 | "content_type": "java", 29 | "imageDigest": "sha256:0a97ccb2868e3c54167317fe7a2fc58e5123290d6c5b653a725091cbf18ca1ea" 30 | }, 31 | "content-npm.json": { 32 | "content": [ 33 | { 34 | "license": "BSD-2-Clause", 35 | "location": "/opt/yarn-v1.19.1/package.json", 36 | "origin": "Unknown", 37 | "package": "yarn", 38 | "type": "NPM", 39 | "version": "1.19.1" 40 | } 41 | ], 42 | "content_type": "npm", 43 | "imageDigest": "sha256:0a97ccb2868e3c54167317fe7a2fc58e5123290d6c5b653a725091cbf18ca1ea" 44 | }, 45 | "content-python.json": { 46 | "content": [ 47 | { 48 | "license": "Python Software Foundation License", 49 | "location": "/usr/lib/python2.7", 50 | "origin": "Steven Bethard ", 51 | "package": "argparse", 52 | "type": "PYTHON", 53 | "version": "1.2.1" 54 | } 55 | ], 56 | "content_type": "python", 57 | "imageDigest": "sha256:0a97ccb2868e3c54167317fe7a2fc58e5123290d6c5b653a725091cbf18ca1ea" 58 | }, 59 | "content-os.json": { 60 | "content": [ 61 | { 62 | "license": "GPLv2+", 63 | "origin": "APT Development Team (maintainer)", 64 | "package": "apt", 65 | "size": "3539000", 66 | "type": "dpkg", 67 | "version": "1.4.9" 68 | } 69 | ], 70 | "content_type": "os", 71 | "imageDigest": "sha256:0a97ccb2868e3c54167317fe7a2fc58e5123290d6c5b653a725091cbf18ca1ea" 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@anchore/sbom-action", 3 | "version": "0.1.0", 4 | "private": true, 5 | "description": "GitHub Action for creating software bill of materials", 6 | "main": "src/runSyftAction.ts", 7 | "scripts": { 8 | "build": "tsc --noEmit", 9 | "format": "prettier --write **/*.ts", 10 | "format-check": "prettier --check **/*.ts", 11 | "lint": "eslint src/**/*.ts", 12 | "package": "run-p package:* && run-s post-package:*", 13 | "package:sbom-action": "ncc build src/runSyftAction.ts -o dist/runSyftAction", 14 | "package:download-action": "ncc build src/attachReleaseAssets.ts -o dist/attachReleaseAssets", 15 | "package:release-action": "ncc build src/downloadSyft.ts -o dist/downloadSyft", 16 | "post-package:fix-line-endings": "eolConverter 'dist/**/*.js'", 17 | "test": "jest --collect-coverage --runInBand", 18 | "test:update-snapshots": "jest --updateSnapshot", 19 | "all": "npm run build && npm run format && npm run lint && npm run package && npm test", 20 | "prepare": "husky install", 21 | "prettier": "prettier -w src", 22 | "precommit": "pretty-quick --staged && npm run package && git add dist/", 23 | "update-deps": "ncu -u && npm i && npm audit fix" 24 | }, 25 | "repository": { 26 | "type": "git", 27 | "url": "git+https://github.com/anchore/sbom-action.git" 28 | }, 29 | "keywords": [ 30 | "actions", 31 | "node", 32 | "setup" 33 | ], 34 | "author": "Keith Zantow ", 35 | "license": "Apache-2.0", 36 | "dependencies": { 37 | "@actions/artifact": "^4.0.0", 38 | "@actions/core": "^1.11.1", 39 | "@actions/exec": "^1.1.1", 40 | "@actions/github": "^6.0.1", 41 | "@actions/http-client": "^2.1.0", 42 | "@actions/tool-cache": "^2.0.1", 43 | "@octokit/core": "^7.0.6", 44 | "@octokit/webhooks": "^14.2.0", 45 | "fast-safe-stringify": "^2.1.1", 46 | "npm-check-updates": "^17.1.3" 47 | }, 48 | "devDependencies": { 49 | "@octokit/webhooks-types": "^6.11.0", 50 | "@types/jest": "^29.5.1", 51 | "@types/node": "^20.11.17", 52 | "@typescript-eslint/eslint-plugin": "^5.59.2", 53 | "@typescript-eslint/parser": "^5.59.2", 54 | "@vercel/ncc": "^0.36.1", 55 | "eol-converter-cli": "^1.0.8", 56 | "eslint": "^8.39.0", 57 | "eslint-config-prettier": "^8.8.0", 58 | "eslint-plugin-jest": "^27.2.1", 59 | "eslint-plugin-node": "^11.1.0", 60 | "eslint-plugin-prettier": "^4.2.1", 61 | "husky": "^8.0.3", 62 | "jest": "^29.5.0", 63 | "js-yaml": "^4.1.1", 64 | "npm-run-all": "^4.1.5", 65 | "prettier": "2.8.8", 66 | "prettier-plugin-organize-imports": "^3.2.2", 67 | "pretty-quick": "^3.1.3", 68 | "ts-jest": "^29.1.0", 69 | "type-fest": "^3.9.0", 70 | "typescript": "^5.0.4" 71 | }, 72 | "engines": { 73 | "node": ">=v20.11.0" 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /tests/fixtures/image-debian-match-coverage/javascript/pkg-json/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "6.14.6", 3 | "name": "npm", 4 | "description": "a package manager for JavaScript", 5 | "keywords": [ 6 | "install", 7 | "modules", 8 | "package manager", 9 | "package.json" 10 | ], 11 | "preferGlobal": true, 12 | "config": { 13 | "publishtest": false 14 | }, 15 | "homepage": "https://docs.npmjs.com/", 16 | "author": "Isaac Z. Schlueter (http://blog.izs.me)", 17 | "repository": { 18 | "type": "git", 19 | "url": "https://github.com/npm/cli" 20 | }, 21 | "bugs": { 22 | "url": "https://npm.community/c/bugs" 23 | }, 24 | "directories": { 25 | "bin": "./bin", 26 | "doc": "./doc", 27 | "lib": "./lib", 28 | "man": "./man" 29 | }, 30 | "main": "./lib/npm.js", 31 | "bin": { 32 | "npm": "./bin/npm-cli.js", 33 | "npx": "./bin/npx-cli.js" 34 | }, 35 | "dependencies": { 36 | "JSONStream": "^1.3.5", 37 | "abbrev": "~1.1.1", 38 | "ansicolors": "~0.3.2", 39 | "write-file-atomic": "^2.4.3" 40 | }, 41 | "bundleDependencies": [ 42 | "abbrev", 43 | "ansicolors", 44 | "ansistyles", 45 | "write-file-atomic" 46 | ], 47 | "devDependencies": { 48 | "deep-equal": "^1.0.1", 49 | "get-stream": "^4.1.0", 50 | "licensee": "^7.0.3", 51 | "marked": "^0.6.3", 52 | "marked-man": "^0.6.0", 53 | "npm-registry-couchapp": "^2.7.4", 54 | "npm-registry-mock": "^1.3.1", 55 | "require-inject": "^1.4.4", 56 | "sprintf-js": "^1.1.2", 57 | "standard": "^11.0.1", 58 | "tacks": "^1.3.0", 59 | "tap": "^12.7.0", 60 | "tar-stream": "^2.1.0" 61 | }, 62 | "scripts": { 63 | "dumpconf": "env | grep npm | sort | uniq", 64 | "prepare": "node bin/npm-cli.js rebuild && node bin/npm-cli.js --no-audit --no-timing prune --prefix=. --no-global && rimraf test/*/*/node_modules && make -j4 mandocs", 65 | "preversion": "bash scripts/update-authors.sh && git add AUTHORS && git commit -m \"update AUTHORS\" || true", 66 | "licenses": "licensee --production --errors-only", 67 | "tap": "tap -J --timeout 300 --no-esm", 68 | "tap-cover": "tap -J --nyc-arg=--cache --coverage --timeout 600 --no-esm", 69 | "lint": "standard", 70 | "pretest": "npm run lint", 71 | "test": "npm run test-tap --", 72 | "test:nocleanup": "NO_TEST_CLEANUP=1 npm run test --", 73 | "sudotest": "sudo npm run tap -- \"test/tap/*.js\"", 74 | "sudotest:nocleanup": "sudo NO_TEST_CLEANUP=1 npm run tap -- \"test/tap/*.js\"", 75 | "posttest": "rimraf test/npm_cache*", 76 | "test-coverage": "npm run tap-cover -- \"test/tap/*.js\" \"test/network/*.js\"", 77 | "test-tap": "npm run tap -- \"test/tap/*.js\" \"test/network/*.js\"", 78 | "test-node": "tap --timeout 240 \"test/tap/*.js\" \"test/network/*.js\"" 79 | }, 80 | "license": "Artistic-2.0", 81 | "engines": { 82 | "node": "6 >=6.2.0 || 8 || >=9.3.0" 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | description: Create a bug report 4 | labels: 5 | - bug 6 | body: 7 | - type: markdown 8 | attributes: 9 | value: | 10 | Thanks for taking the time to fill out this bug report! 11 | - type: checkboxes 12 | attributes: 13 | label: Is there an existing issue for this? 14 | description: Search to see if an issue already exists for the bug you encountered. 15 | options: 16 | - label: I have searched the existing issues 17 | required: true 18 | - type: checkboxes 19 | attributes: 20 | label: Are you using the latest sbom-action version available? 21 | description: | 22 | Ensure that you're using the latest sbom-action version. 23 | https://github.com/anchore/sbom-action/releases/latest 24 | options: 25 | - label: I am using the latest sbom-action version. 26 | required: true 27 | - label: | 28 | I can reproduce the issue running sbom-action using complete version identifier (example: vX.Y.Z), and not just with a partial one (example: vX) 29 | required: true 30 | - label: | 31 | I am using the anchore/sbom-action action. 32 | required: true 33 | - type: checkboxes 34 | attributes: 35 | label: Are you resonably sure that it's a sbom-action issue, and not an issue related to a tool that sbom-action runs? 36 | description: | 37 | If you encounter a specific issue, ensure that the issue is about 38 | sbom-action, and not about a tool that sbom-action runs. For example, 39 | if the action reports an unexpected or a surprising error, you may check 40 | if there are similar issues reported in that components's issue tracker. 41 | options: 42 | - label: I think that this is a sbom-action issue. 43 | required: true 44 | - type: textarea 45 | attributes: 46 | label: Current Behavior 47 | description: A concise description of what you're experiencing. 48 | validations: 49 | required: true 50 | - type: textarea 51 | attributes: 52 | label: Expected Behavior 53 | description: A concise description of what you expected to happen. 54 | validations: 55 | required: true 56 | - type: textarea 57 | attributes: 58 | label: sbom-action version 59 | description: | 60 | sbom-action version where you observed this issue 61 | placeholder: | 62 | vX.Y.Z 63 | render: markdown 64 | validations: 65 | required: true 66 | - type: textarea 67 | id: logs 68 | attributes: 69 | label: Relevant log output 70 | description: | 71 | Copy and paste any relevant log output. 72 | This will be automatically formatted into code, so no need for backticks. 73 | Enable debug logging, either on GitHub Actions, or when running locally. 74 | Not attaching debug logging will delay the issue triaging process. 75 | render: shell 76 | validations: 77 | required: true 78 | - type: textarea 79 | attributes: 80 | label: Anything else? 81 | description: | 82 | Links? References? Anything that will give us more context about the issue you are encountering! 83 | validations: 84 | required: false 85 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to sbom-action 2 | 3 | If you are looking to contribute to this project and want to open a GitHub pull request ("PR"), there are a few guidelines of what we are looking for in patches. Make sure you go through this document and ensure that your code proposal is aligned. 4 | 5 | ## Sign off your work 6 | 7 | The `sign-off` is an added line at the end of the explanation for the commit, certifying that you wrote it or otherwise have the right to submit it as an open-source patch. By submitting a contribution, you agree to be bound by the terms of the DCO Version 1.1 and Apache License Version 2.0. 8 | 9 | Signing off a commit certifies the below Developer's Certificate of Origin (DCO): 10 | 11 | ```text 12 | Developer's Certificate of Origin 1.1 13 | 14 | By making a contribution to this project, I certify that: 15 | 16 | (a) The contribution was created in whole or in part by me and I 17 | have the right to submit it under the open source license 18 | indicated in the file; or 19 | 20 | (b) The contribution is based upon previous work that, to the best 21 | of my knowledge, is covered under an appropriate open source 22 | license and I have the right under that license to submit that 23 | work with modifications, whether created in whole or in part 24 | by me, under the same open source license (unless I am 25 | permitted to submit under a different license), as indicated 26 | in the file; or 27 | 28 | (c) The contribution was provided directly to me by some other 29 | person who certified (a), (b) or (c) and I have not modified 30 | it. 31 | 32 | (d) I understand and agree that this project and the contribution 33 | are public and that a record of the contribution (including all 34 | personal information I submit with it, including my sign-off) is 35 | maintained indefinitely and may be redistributed consistent with 36 | this project or the open source license(s) involved. 37 | ``` 38 | 39 | All contributions to this project are licensed under the [Apache License Version 2.0, January 2004](http://www.apache.org/licenses/). 40 | 41 | When committing your change, you can add the required line manually so that it looks like this: 42 | 43 | ```text 44 | Signed-off-by: John Doe 45 | ``` 46 | 47 | Alternatively, configure your Git client with your name and email to use the `-s` flag when creating a commit: 48 | 49 | ```text 50 | $ git config --global user.name "John Doe" 51 | $ git config --global user.email "john.doe@example.com" 52 | ``` 53 | 54 | Creating a signed-off commit is then possible with `-s` or `--signoff`: 55 | 56 | ```text 57 | $ git commit -s -m "this is a commit message" 58 | ``` 59 | 60 | To double-check that the commit was signed-off, look at the log output: 61 | 62 | ```text 63 | $ git log -1 64 | commit 37ceh170e4hb283bb73d958f2036ee5k07e7fde7 (HEAD -> issue-35, origin/main, main) 65 | Author: John Doe 66 | Date: Mon Aug 1 11:27:13 2020 -0400 67 | 68 | this is a commit message 69 | 70 | Signed-off-by: John Doe 71 | ``` 72 | 73 | 74 | [//]: # (TODO: Commit guidelines, granular commits) 75 | 76 | 77 | [//]: # (TODO: Commit guidelines, descriptive messages) 78 | 79 | 80 | [//]: # (TODO: Commit guidelines, commit title, extra body description) 81 | 82 | 83 | [//]: # (TODO: PR title and description) 84 | 85 | ## Test your changes 86 | 87 | Ensure that your changes have passed the test suite. For more information on working with this project's tests, see https://github.com/anchore/sbom-action/blob/main/DEVELOPING.md#tests. 88 | 89 | ## Document your changes 90 | 91 | When proposed changes are modifying user-facing functionality or output, it is expected the PR will include updates to the documentation as well. 92 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: "build-test" 2 | on: # rebuild any PRs and main branch changes 3 | pull_request: 4 | branches: [main] 5 | push: 6 | branches: 7 | - main 8 | release: 9 | types: 10 | - created 11 | - edited 12 | 13 | # Allows you to run this workflow manually from the Actions tab 14 | workflow_dispatch: 15 | 16 | jobs: 17 | build: # make sure build/ci work properly and there is no faked build ncc built scripts 18 | runs-on: ubuntu-latest 19 | steps: 20 | - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 21 | - run: npm ci 22 | - run: npm run package 23 | - run: git status --porcelain 24 | - run: git diff 25 | - run: git diff --exit-code 26 | 27 | test-on-fixture-dirs: 28 | strategy: 29 | matrix: 30 | os: [ubuntu-latest, windows-latest] 31 | runs-on: ${{ matrix.os }} 32 | steps: 33 | - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 34 | with: 35 | path: ./ 36 | artifact-name: ${{ matrix.os }}-1.spdx 37 | 38 | - uses: ./ 39 | with: 40 | path: ./tests/fixtures/npm-project 41 | artifact-name: ${{ matrix.os }}-2.spdx 42 | 43 | - uses: ./ 44 | with: 45 | path: ./tests/fixtures/yarn-project 46 | artifact-name: ${{ matrix.os }}-3.spdx 47 | 48 | - uses: ./ 49 | id: yarn-scan 50 | with: 51 | path: ./tests/fixtures/yarn-project 52 | artifact-name: ${{ matrix.os }}-4.spdx 53 | 54 | - uses: ./ 55 | with: 56 | path: ./tests/fixtures/yarn-project 57 | artifact-name: ${{ matrix.os }}-SBOM.txt 58 | 59 | test: 60 | runs-on: ubuntu-latest 61 | services: 62 | registry: 63 | image: registry:2 64 | ports: 65 | - 5000:5000 66 | steps: 67 | - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 68 | - name: Build images 69 | run: | 70 | for distro in alpine centos debian; do 71 | docker build -t localhost:5000/match-coverage/$distro ./tests/fixtures/image-$distro-match-coverage 72 | docker push localhost:5000/match-coverage/${distro}:latest 73 | done 74 | - run: npm ci 75 | - run: npm test 76 | 77 | test-as-action: # make sure the action works on a clean machine without building 78 | runs-on: ubuntu-latest 79 | steps: 80 | - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 81 | with: 82 | path: ./ 83 | 84 | - uses: ./download-syft # anchore/sbom-action/download-syft 85 | id: syft 86 | 87 | - run: | 88 | echo "${{ steps.syft.outputs.cmd }}" 89 | "${{ steps.syft.outputs.cmd }}" dir:. 90 | 91 | - uses: ./ # anchore/sbom-action 92 | id: dirscan 93 | with: 94 | artifact-name: dirscan-sbom.spdx 95 | output-file: dirscan-sbom.spdx 96 | format: spdx 97 | - run: | 98 | echo DIR SCAN SBOM: 99 | cat dirscan-sbom.spdx 100 | 101 | - uses: ./ # anchore/sbom-action 102 | id: imagescan 103 | with: 104 | image: alpine:latest 105 | artifact-name: imagescan-sbom.spdx 106 | output-file: my.sbom 107 | - run: | 108 | echo IMAGE SCAN SBOM: 109 | cat my.sbom 110 | 111 | - uses: ./publish-sbom # anchore/sbom-action/publish-sbom 112 | with: 113 | sbom-artifact-match: imagescan-sbom.spdx 114 | 115 | - uses: ./publish-sbom # anchore/sbom-action/publish-sbom 116 | with: 117 | sbom-artifact-match: "^dont-match-anything$" 118 | 119 | - uses: ./ # anchore/sbom-action with artifact retention 120 | name: "One day artifact retention test" 121 | id: one-day 122 | with: 123 | image: alpine:latest 124 | upload-artifact-retention: 1 125 | artifact-name: one-day.sbom.spdx 126 | output-file: one-day-sbom.spdx 127 | format: spdx 128 | -------------------------------------------------------------------------------- /tests/fixtures/npm-project/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "npm-project", 3 | "version": "0.12.1", 4 | "lockfileVersion": 1, 5 | "requires": true, 6 | "dependencies": { 7 | "chownr": { 8 | "version": "2.0.0", 9 | "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", 10 | "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==" 11 | }, 12 | "fs-minipass": { 13 | "version": "2.1.0", 14 | "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", 15 | "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", 16 | "requires": { 17 | "minipass": "^3.0.0" 18 | } 19 | }, 20 | "js-tokens": { 21 | "version": "4.0.0", 22 | "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", 23 | "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" 24 | }, 25 | "loose-envify": { 26 | "version": "1.4.0", 27 | "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", 28 | "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", 29 | "requires": { 30 | "js-tokens": "^3.0.0 || ^4.0.0" 31 | } 32 | }, 33 | "minipass": { 34 | "version": "3.1.3", 35 | "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.3.tgz", 36 | "integrity": "sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg==", 37 | "requires": { 38 | "yallist": "^4.0.0" 39 | } 40 | }, 41 | "minizlib": { 42 | "version": "2.1.2", 43 | "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", 44 | "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", 45 | "requires": { 46 | "minipass": "^3.0.0", 47 | "yallist": "^4.0.0" 48 | } 49 | }, 50 | "mkdirp": { 51 | "version": "1.0.4", 52 | "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", 53 | "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==" 54 | }, 55 | "object-assign": { 56 | "version": "4.1.1", 57 | "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", 58 | "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" 59 | }, 60 | "prop-types": { 61 | "version": "15.7.2", 62 | "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", 63 | "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", 64 | "requires": { 65 | "loose-envify": "^1.4.0", 66 | "object-assign": "^4.1.1", 67 | "react-is": "^16.8.1" 68 | } 69 | }, 70 | "react": { 71 | "version": "16.14.0", 72 | "resolved": "https://registry.npmjs.org/react/-/react-16.14.0.tgz", 73 | "integrity": "sha512-0X2CImDkJGApiAlcf0ODKIneSwBPhqJawOa5wCtKbu7ZECrmS26NvtSILynQ66cgkT/RJ4LidJOc3bUESwmU8g==", 74 | "requires": { 75 | "loose-envify": "^1.1.0", 76 | "object-assign": "^4.1.1", 77 | "prop-types": "^15.6.2" 78 | } 79 | }, 80 | "react-is": { 81 | "version": "16.13.1", 82 | "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", 83 | "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" 84 | }, 85 | "tar": { 86 | "version": "6.1.0", 87 | "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.0.tgz", 88 | "integrity": "sha512-DUCttfhsnLCjwoDoFcI+B2iJgYa93vBnDUATYEeRx6sntCTdN01VnqsIuTlALXla/LWooNg0yEGeB+Y8WdFxGA==", 89 | "requires": { 90 | "chownr": "^2.0.0", 91 | "fs-minipass": "^2.0.0", 92 | "minipass": "^3.0.0", 93 | "minizlib": "^2.1.1", 94 | "mkdirp": "^1.0.3", 95 | "yallist": "^4.0.0" 96 | } 97 | }, 98 | "yallist": { 99 | "version": "4.0.0", 100 | "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", 101 | "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" 102 | } 103 | } 104 | } 105 | -------------------------------------------------------------------------------- /tests/integration/GitHubSnapshot.test.ts: -------------------------------------------------------------------------------- 1 | import { context, getMocks } from "../mocks"; 2 | const { setData, restoreInitialData, mocks } = getMocks(); 3 | 4 | // actually run syft so we know if this output format is properly working 5 | delete mocks["@actions/tool-cache"]; 6 | delete mocks["@actions/exec"]; 7 | 8 | // set up a mock for octokit.request 9 | let requestArgs: any; 10 | const mockCreator = mocks["@actions/github"]; 11 | mocks["@actions/github"] = () => { 12 | const actionsBase = mockCreator() as any; 13 | return { 14 | ...actionsBase, 15 | getOctokit() { 16 | const kit = actionsBase.getOctokit(); 17 | kit.request = (...args: any[]) => { 18 | requestArgs = args; 19 | return args; 20 | } 21 | return kit; 22 | } 23 | } 24 | } 25 | for (const mock of Object.keys(mocks)) { 26 | jest.mock(mock, mocks[mock]); 27 | } 28 | 29 | // setting up mocks must happen before this import 30 | import * as action from "../../src/github/SyftGithubAction"; 31 | 32 | jest.setTimeout(30000); 33 | Date.now = jest.fn(() => 1482363367071); 34 | 35 | describe("GitHub Snapshot", () => { 36 | beforeEach(() => { 37 | restoreInitialData(); 38 | }); 39 | 40 | it("runs with default inputs", async () => { 41 | setData({ 42 | inputs: { 43 | path: "tests/fixtures/npm-project", 44 | "dependency-snapshot": "true", 45 | "upload-artifact": "false", 46 | }, 47 | context: { 48 | ...context.push({ 49 | ref: "main", 50 | }), 51 | sha: "f293f09uaw90gwa09f9wea", 52 | workflow: "my-workflow", 53 | job: "default-import-job", 54 | action: "__anchore_sbom-action", 55 | }, 56 | }); 57 | 58 | await action.runSyftAction(); 59 | await action.uploadDependencySnapshot(); 60 | 61 | // validate the request was made 62 | expect(requestArgs).toBeDefined(); 63 | expect(requestArgs).toHaveLength(2); 64 | expect(requestArgs[0]).toBe("POST /repos/test-org/test-repo/dependency-graph/snapshots"); 65 | 66 | // check the resulting snapshot file 67 | const data = requestArgs[1].data; 68 | const submission = JSON.parse(data); 69 | 70 | expect(submission.job.correlator).toEqual("my-workflow_default-import-job") 71 | expect(submission.scanned).toBeDefined(); 72 | 73 | // redact changing data 74 | submission.scanned = ""; 75 | submission.detector.version = ""; 76 | 77 | expect(submission).toMatchSnapshot(); 78 | }); 79 | 80 | it("runs with artifact-name input", async () => { 81 | setData({ 82 | inputs: { 83 | path: "tests/fixtures/npm-project", 84 | "dependency-snapshot": "true", 85 | "upload-artifact": "false", 86 | "artifact-name": "my-matrix-build-1", 87 | }, 88 | context: { 89 | ...context.push({ 90 | ref: "main", 91 | }), 92 | sha: "f293f09uaw90gwa09f9wea", 93 | workflow: "my-workflow", 94 | job: "default-import-job", 95 | action: "__anchore_sbom-action", 96 | }, 97 | }); 98 | 99 | await action.runSyftAction(); 100 | await action.uploadDependencySnapshot(); 101 | 102 | // validate the request was made 103 | expect(requestArgs).toBeDefined(); 104 | expect(requestArgs).toHaveLength(2); 105 | expect(requestArgs[0]).toBe("POST /repos/test-org/test-repo/dependency-graph/snapshots"); 106 | 107 | // check the resulting snapshot file 108 | const data = requestArgs[1].data; 109 | const submission = JSON.parse(data); 110 | 111 | expect(submission.scanned).toBeDefined(); 112 | 113 | // redact changing data 114 | submission.scanned = ""; 115 | submission.detector.version = ""; 116 | 117 | expect(submission.job).toBeDefined() 118 | expect(submission.job.correlator).toEqual("my-workflow_default-import-job_my-matrix-build-1") 119 | 120 | expect(submission).toMatchSnapshot(); 121 | }); 122 | 123 | it("runs with dependency-snapshot-correlator defined", async () => { 124 | setData({ 125 | inputs: { 126 | path: "tests/fixtures/npm-project", 127 | "dependency-snapshot": "true", 128 | "upload-artifact": "false", 129 | "dependency-snapshot-correlator": "some-correlator", 130 | }, 131 | context: { 132 | ...context.push({ 133 | ref: "main", 134 | }), 135 | sha: "f293f09uaw90gwa09f9wea", 136 | workflow: "my-workflow", 137 | job: "default-import-job", 138 | action: "__anchore_sbom-action", 139 | }, 140 | }); 141 | 142 | await action.runSyftAction(); 143 | await action.uploadDependencySnapshot(); 144 | 145 | // validate the request was made 146 | expect(requestArgs).toBeDefined(); 147 | expect(requestArgs).toHaveLength(2); 148 | expect(requestArgs[0]).toBe("POST /repos/test-org/test-repo/dependency-graph/snapshots"); 149 | 150 | // check the resulting snapshot file 151 | const data = requestArgs[1].data; 152 | const submission = JSON.parse(data); 153 | 154 | expect(submission.scanned).toBeDefined(); 155 | 156 | // redact changing data 157 | submission.scanned = ""; 158 | submission.detector.version = ""; 159 | 160 | expect(submission.job).toBeDefined() 161 | expect(submission.job.correlator).toEqual("some-correlator") 162 | 163 | expect(submission).toMatchSnapshot(); 164 | }); 165 | }); 166 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | We as members, contributors, and leaders pledge to make participation in our 6 | community a harassment-free experience for everyone, regardless of age, body 7 | size, visible or invisible disability, ethnicity, sex characteristics, gender 8 | identity and expression, level of experience, education, socio-economic status, 9 | nationality, personal appearance, race, religion, or sexual identity 10 | and orientation. 11 | 12 | We pledge to act and interact in ways that contribute to an open, welcoming, 13 | diverse, inclusive, and healthy community. 14 | 15 | ## Our Standards 16 | 17 | Examples of behavior that contributes to a positive environment for our 18 | community include: 19 | 20 | * Demonstrating empathy and kindness toward other people 21 | * Being respectful of differing opinions, viewpoints, and experiences 22 | * Giving and gracefully accepting constructive feedback 23 | * Accepting responsibility and apologizing to those affected by our mistakes, 24 | and learning from the experience 25 | * Focusing on what is best not just for us as individuals, but for the 26 | overall community 27 | 28 | Examples of unacceptable behavior include: 29 | 30 | * The use of sexualized language or imagery, and sexual attention or 31 | advances of any kind 32 | * Trolling, insulting or derogatory comments, and personal or political attacks 33 | * Public or private harassment 34 | * Publishing others' private information, such as a physical or email 35 | address, without their explicit permission 36 | * Other conduct which could reasonably be considered inappropriate in a 37 | professional setting 38 | 39 | ## Enforcement Responsibilities 40 | 41 | Community leaders are responsible for clarifying and enforcing our standards of 42 | acceptable behavior and will take appropriate and fair corrective action in 43 | response to any behavior that they deem inappropriate, threatening, offensive, 44 | or harmful. 45 | 46 | Community leaders have the right and responsibility to remove, edit, or reject 47 | comments, commits, code, wiki edits, issues, and other contributions that are 48 | not aligned to this Code of Conduct, and will communicate reasons for moderation 49 | decisions when appropriate. 50 | 51 | ## Scope 52 | 53 | This Code of Conduct applies within all community spaces, and also applies when 54 | an individual is officially representing the community in public spaces. 55 | Examples of representing our community include using an official e-mail address, 56 | posting via an official social media account, or acting as an appointed 57 | representative at an online or offline event. 58 | 59 | ## Enforcement 60 | 61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 62 | reported to the community leaders responsible for enforcement at 63 | [opensource@anchore.com](mailto:opensource@anchore.com). 64 | All complaints will be reviewed and investigated promptly and fairly. 65 | 66 | All community leaders are obligated to respect the privacy and security of the 67 | reporter of any incident. 68 | 69 | ## Enforcement Guidelines 70 | 71 | Community leaders will follow these Community Impact Guidelines in determining 72 | the consequences for any action they deem in violation of this Code of Conduct: 73 | 74 | ### 1. Correction 75 | 76 | **Community Impact**: Use of inappropriate language or other behavior deemed 77 | unprofessional or unwelcome in the community. 78 | 79 | **Consequence**: A private, written warning from community leaders, providing 80 | clarity around the nature of the violation and an explanation of why the 81 | behavior was inappropriate. A public apology may be requested. 82 | 83 | ### 2. Warning 84 | 85 | **Community Impact**: A violation through a single incident or series 86 | of actions. 87 | 88 | **Consequence**: A warning with consequences for continued behavior. No 89 | interaction with the people involved, including unsolicited interaction with 90 | those enforcing the Code of Conduct, for a specified period of time. This 91 | includes avoiding interactions in community spaces as well as external channels 92 | like social media. Violating these terms may lead to a temporary or 93 | permanent ban. 94 | 95 | ### 3. Temporary Ban 96 | 97 | **Community Impact**: A serious violation of community standards, including 98 | sustained inappropriate behavior. 99 | 100 | **Consequence**: A temporary ban from any sort of interaction or public 101 | communication with the community for a specified period of time. No public or 102 | private interaction with the people involved, including unsolicited interaction 103 | with those enforcing the Code of Conduct, is allowed during this period. 104 | Violating these terms may lead to a permanent ban. 105 | 106 | ### 4. Permanent Ban 107 | 108 | **Community Impact**: Demonstrating a pattern of violation of community 109 | standards, including sustained inappropriate behavior, harassment of an 110 | individual, or aggression toward or disparagement of classes of individuals. 111 | 112 | **Consequence**: A permanent ban from any sort of public interaction within 113 | the community. 114 | 115 | ## Attribution 116 | 117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 118 | version 2.0, available at 119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. 120 | 121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct 122 | enforcement ladder](https://github.com/mozilla/diversity). 123 | 124 | [homepage]: https://www.contributor-covenant.org 125 | 126 | For answers to common questions about this code of conduct, see the FAQ at 127 | https://www.contributor-covenant.org/faq. Translations are available at 128 | https://www.contributor-covenant.org/translations. 129 | -------------------------------------------------------------------------------- /tests/GithubClient.test.ts: -------------------------------------------------------------------------------- 1 | import { getMocks } from "./mocks" 2 | const { data, mocks, setData, restoreInitialData } = getMocks(); 3 | for (const mock of Object.keys(mocks)) { 4 | jest.mock(mock, mocks[mock]); 5 | } 6 | 7 | import { Release } from "@octokit/webhooks-types"; 8 | import * as githubClient from "../src/github/GithubClient"; 9 | import { debugLog } from "../src/github/GithubClient"; 10 | 11 | jest.setTimeout(30000); 12 | Date.now = jest.fn(() => 1482363367071); 13 | 14 | describe("Github Client", () => { 15 | beforeEach(() => { 16 | restoreInitialData(); 17 | }); 18 | 19 | it("calls release asset methods", async () => { 20 | const client = githubClient.getClient( 21 | { owner: "test-owner", repo: "test-repo" }, 22 | "token" 23 | ); 24 | let assets = await client.listReleaseAssets({ 25 | release: { 26 | id: 1234, 27 | } as any, 28 | }); 29 | 30 | const startLength = assets.length; 31 | 32 | await client.uploadReleaseAsset({ 33 | release: { 34 | id: 1234, 35 | upload_url: "http://", 36 | } as Release, 37 | contents: "data", 38 | assetName: "test", 39 | }); 40 | 41 | assets = await client.listReleaseAssets({ 42 | release: { 43 | id: 1234, 44 | } as Release, 45 | }); 46 | 47 | expect(assets.length).toBe(startLength + 1); 48 | 49 | await client.deleteReleaseAsset({ 50 | release: { 51 | id: 1324, 52 | } as any, 53 | asset: { 54 | id: assets.length - 1, 55 | name: "test", 56 | }, 57 | }); 58 | 59 | assets = await client.listReleaseAssets({ 60 | release: { 61 | id: 1234, 62 | } as Release, 63 | }); 64 | 65 | expect(assets.length).toBe(startLength); 66 | }); 67 | 68 | it("calls workflow run for branch methods", async () => { 69 | setData({ 70 | workflowRuns: [{ 71 | id: 3, 72 | head_branch: "main", 73 | conclusion: "success" 74 | }], 75 | }); 76 | const client = githubClient.getClient( 77 | { owner: "test-owner", repo: "test-repo" }, 78 | "token" 79 | ); 80 | const run: any = await client.findLatestWorkflowRunForBranch({ 81 | branch: "main", 82 | }); 83 | expect(run.id).toBe(3); 84 | }); 85 | 86 | it("calls findRelease methods", async () => { 87 | setData({ 88 | releases: [{ 89 | id: 2, 90 | tag_name: "main" 91 | }], 92 | }) 93 | const client = githubClient.getClient( 94 | { owner: "test-owner", repo: "test-repo" }, 95 | "token" 96 | ); 97 | const r: any = await client.findRelease({ 98 | tag: "main", 99 | }); 100 | expect(r.id).toBe(2); 101 | }); 102 | 103 | it("calls findDraftRelease methods", async () => { 104 | setData({ 105 | releases: [{ 106 | id: 1, 107 | tag_name: "main", 108 | draft: false 109 | },{ 110 | id: 2, 111 | tag_name: "main", 112 | draft: true 113 | }], 114 | }) 115 | const client = githubClient.getClient( 116 | { owner: "test-owner", repo: "test-repo" }, 117 | "token" 118 | ); 119 | const r: any = await client.findDraftRelease({ 120 | tag: "main", 121 | }); 122 | expect(r.id).toBe(2); 123 | }); 124 | 125 | it("calls artifact methods", async () => { 126 | setData({ 127 | artifacts: [{ 128 | runId: 1, 129 | id: 34534, 130 | },{ 131 | runId: 2, 132 | id: 34534, 133 | }] 134 | }); 135 | 136 | const client = githubClient.getClient( 137 | { owner: "test-owner", repo: "test-repo" }, 138 | "token" 139 | ); 140 | 141 | let artifacts = await client.listCurrentWorkflowArtifacts(); 142 | 143 | expect(artifacts.length).toBe(0); 144 | 145 | await client.uploadWorkflowArtifact({ 146 | name: "test", 147 | file: "file", 148 | }); 149 | 150 | artifacts = await client.listWorkflowRunArtifacts({ 151 | runId: 1, 152 | }); 153 | 154 | expect(artifacts.length).toBe(1); 155 | 156 | let artifact = await client.downloadWorkflowArtifact({ 157 | name: "test", 158 | }); 159 | 160 | expect(artifact).toBeDefined(); 161 | 162 | artifact = await client.downloadWorkflowRunArtifact({ 163 | artifactId: 1, 164 | }); 165 | 166 | expect(artifact).toBeDefined(); 167 | }); 168 | 169 | it("fails when return status is error", async () => { 170 | setData({ 171 | returnStatus: { 172 | status: 500, 173 | }, 174 | }); 175 | const client = githubClient.getClient( 176 | { owner: "test-owner", repo: "test-repo" }, 177 | "token" 178 | ); 179 | try { 180 | await client.listWorkflowRunArtifacts({ 181 | runId: 1 182 | }); 183 | expect("exception thrown").toBeUndefined(); 184 | } catch(e) { 185 | expect(e).toBeDefined(); 186 | } 187 | 188 | try { 189 | await client.findLatestWorkflowRunForBranch({ 190 | branch: "main" 191 | }); 192 | expect("exception thrown").toBeUndefined(); 193 | } catch(e) { 194 | expect(e).toBeDefined(); 195 | } 196 | 197 | try { 198 | await client.listReleaseAssets({ 199 | release: { 200 | id: 2134 201 | } as any 202 | }); 203 | expect("exception thrown").toBeUndefined(); 204 | } catch(e) { 205 | expect(e).toBeDefined(); 206 | } 207 | }); 208 | 209 | it("debugLog works", () => { 210 | setData({ 211 | debug: { 212 | enabled: true, 213 | log: [], 214 | } 215 | }); 216 | 217 | debugLog("the_label", "string"); 218 | 219 | expect(data.debug.log.length).toBe(1); 220 | expect(data.debug.log[0]).toBe("string"); 221 | }); 222 | 223 | it("finds a draft release", async () => { 224 | setData({ 225 | releases: [{ 226 | id: 1234, 227 | draft: false, 228 | }, { 229 | id: 5432, 230 | draft: true, 231 | tag_name: "v9" 232 | }] 233 | }); 234 | 235 | const client = githubClient.getClient( 236 | { owner: "test-owner", repo: "test-repo" }, 237 | "token" 238 | ); 239 | 240 | const release: any = await client.findRelease({ tag: "v9" }); 241 | 242 | expect(release.id).toBe(5432); 243 | expect(release.draft).toBeTruthy(); 244 | }); 245 | }); 246 | -------------------------------------------------------------------------------- /tests/integration/__snapshots__/GitHubSnapshot.test.ts.snap: -------------------------------------------------------------------------------- 1 | // Jest Snapshot v1, https://goo.gl/fbAQLP 2 | 3 | exports[`GitHub Snapshot runs with artifact-name input 1`] = ` 4 | { 5 | "detector": { 6 | "name": "syft", 7 | "url": "https://github.com/anchore/syft", 8 | "version": "", 9 | }, 10 | "job": { 11 | "correlator": "my-workflow_default-import-job_my-matrix-build-1", 12 | "id": "1", 13 | }, 14 | "manifests": { 15 | "tests/fixtures/npm-project/package-lock.json": { 16 | "file": { 17 | "source_location": "tests/fixtures/npm-project/package-lock.json", 18 | }, 19 | "name": "tests/fixtures/npm-project/package-lock.json", 20 | "resolved": { 21 | "pkg:npm/chownr@2.0.0": { 22 | "package_url": "pkg:npm/chownr@2.0.0", 23 | "relationship": "direct", 24 | "scope": "runtime", 25 | }, 26 | "pkg:npm/fs-minipass@2.1.0": { 27 | "package_url": "pkg:npm/fs-minipass@2.1.0", 28 | "relationship": "direct", 29 | "scope": "runtime", 30 | }, 31 | "pkg:npm/js-tokens@4.0.0": { 32 | "package_url": "pkg:npm/js-tokens@4.0.0", 33 | "relationship": "direct", 34 | "scope": "runtime", 35 | }, 36 | "pkg:npm/loose-envify@1.4.0": { 37 | "package_url": "pkg:npm/loose-envify@1.4.0", 38 | "relationship": "direct", 39 | "scope": "runtime", 40 | }, 41 | "pkg:npm/minipass@3.1.3": { 42 | "package_url": "pkg:npm/minipass@3.1.3", 43 | "relationship": "direct", 44 | "scope": "runtime", 45 | }, 46 | "pkg:npm/minizlib@2.1.2": { 47 | "package_url": "pkg:npm/minizlib@2.1.2", 48 | "relationship": "direct", 49 | "scope": "runtime", 50 | }, 51 | "pkg:npm/mkdirp@1.0.4": { 52 | "package_url": "pkg:npm/mkdirp@1.0.4", 53 | "relationship": "direct", 54 | "scope": "runtime", 55 | }, 56 | "pkg:npm/object-assign@4.1.1": { 57 | "package_url": "pkg:npm/object-assign@4.1.1", 58 | "relationship": "direct", 59 | "scope": "runtime", 60 | }, 61 | "pkg:npm/prop-types@15.7.2": { 62 | "package_url": "pkg:npm/prop-types@15.7.2", 63 | "relationship": "direct", 64 | "scope": "runtime", 65 | }, 66 | "pkg:npm/react-is@16.13.1": { 67 | "package_url": "pkg:npm/react-is@16.13.1", 68 | "relationship": "direct", 69 | "scope": "runtime", 70 | }, 71 | "pkg:npm/react@16.14.0": { 72 | "package_url": "pkg:npm/react@16.14.0", 73 | "relationship": "direct", 74 | "scope": "runtime", 75 | }, 76 | "pkg:npm/tar@6.1.0": { 77 | "package_url": "pkg:npm/tar@6.1.0", 78 | "relationship": "direct", 79 | "scope": "runtime", 80 | }, 81 | "pkg:npm/yallist@4.0.0": { 82 | "package_url": "pkg:npm/yallist@4.0.0", 83 | "relationship": "direct", 84 | "scope": "runtime", 85 | }, 86 | }, 87 | }, 88 | }, 89 | "ref": "v0.0.0", 90 | "scanned": "", 91 | "sha": "f293f09uaw90gwa09f9wea", 92 | "version": 0, 93 | } 94 | `; 95 | 96 | exports[`GitHub Snapshot runs with default inputs 1`] = ` 97 | { 98 | "detector": { 99 | "name": "syft", 100 | "url": "https://github.com/anchore/syft", 101 | "version": "", 102 | }, 103 | "job": { 104 | "correlator": "my-workflow_default-import-job", 105 | "id": "1", 106 | }, 107 | "manifests": { 108 | "tests/fixtures/npm-project/package-lock.json": { 109 | "file": { 110 | "source_location": "tests/fixtures/npm-project/package-lock.json", 111 | }, 112 | "name": "tests/fixtures/npm-project/package-lock.json", 113 | "resolved": { 114 | "pkg:npm/chownr@2.0.0": { 115 | "package_url": "pkg:npm/chownr@2.0.0", 116 | "relationship": "direct", 117 | "scope": "runtime", 118 | }, 119 | "pkg:npm/fs-minipass@2.1.0": { 120 | "package_url": "pkg:npm/fs-minipass@2.1.0", 121 | "relationship": "direct", 122 | "scope": "runtime", 123 | }, 124 | "pkg:npm/js-tokens@4.0.0": { 125 | "package_url": "pkg:npm/js-tokens@4.0.0", 126 | "relationship": "direct", 127 | "scope": "runtime", 128 | }, 129 | "pkg:npm/loose-envify@1.4.0": { 130 | "package_url": "pkg:npm/loose-envify@1.4.0", 131 | "relationship": "direct", 132 | "scope": "runtime", 133 | }, 134 | "pkg:npm/minipass@3.1.3": { 135 | "package_url": "pkg:npm/minipass@3.1.3", 136 | "relationship": "direct", 137 | "scope": "runtime", 138 | }, 139 | "pkg:npm/minizlib@2.1.2": { 140 | "package_url": "pkg:npm/minizlib@2.1.2", 141 | "relationship": "direct", 142 | "scope": "runtime", 143 | }, 144 | "pkg:npm/mkdirp@1.0.4": { 145 | "package_url": "pkg:npm/mkdirp@1.0.4", 146 | "relationship": "direct", 147 | "scope": "runtime", 148 | }, 149 | "pkg:npm/object-assign@4.1.1": { 150 | "package_url": "pkg:npm/object-assign@4.1.1", 151 | "relationship": "direct", 152 | "scope": "runtime", 153 | }, 154 | "pkg:npm/prop-types@15.7.2": { 155 | "package_url": "pkg:npm/prop-types@15.7.2", 156 | "relationship": "direct", 157 | "scope": "runtime", 158 | }, 159 | "pkg:npm/react-is@16.13.1": { 160 | "package_url": "pkg:npm/react-is@16.13.1", 161 | "relationship": "direct", 162 | "scope": "runtime", 163 | }, 164 | "pkg:npm/react@16.14.0": { 165 | "package_url": "pkg:npm/react@16.14.0", 166 | "relationship": "direct", 167 | "scope": "runtime", 168 | }, 169 | "pkg:npm/tar@6.1.0": { 170 | "package_url": "pkg:npm/tar@6.1.0", 171 | "relationship": "direct", 172 | "scope": "runtime", 173 | }, 174 | "pkg:npm/yallist@4.0.0": { 175 | "package_url": "pkg:npm/yallist@4.0.0", 176 | "relationship": "direct", 177 | "scope": "runtime", 178 | }, 179 | }, 180 | }, 181 | }, 182 | "ref": "v0.0.0", 183 | "scanned": "", 184 | "sha": "f293f09uaw90gwa09f9wea", 185 | "version": 0, 186 | } 187 | `; 188 | 189 | exports[`GitHub Snapshot runs with dependency-snapshot-correlator defined 1`] = ` 190 | { 191 | "detector": { 192 | "name": "syft", 193 | "url": "https://github.com/anchore/syft", 194 | "version": "", 195 | }, 196 | "job": { 197 | "correlator": "some-correlator", 198 | "id": "1", 199 | }, 200 | "manifests": { 201 | "tests/fixtures/npm-project/package-lock.json": { 202 | "file": { 203 | "source_location": "tests/fixtures/npm-project/package-lock.json", 204 | }, 205 | "name": "tests/fixtures/npm-project/package-lock.json", 206 | "resolved": { 207 | "pkg:npm/chownr@2.0.0": { 208 | "package_url": "pkg:npm/chownr@2.0.0", 209 | "relationship": "direct", 210 | "scope": "runtime", 211 | }, 212 | "pkg:npm/fs-minipass@2.1.0": { 213 | "package_url": "pkg:npm/fs-minipass@2.1.0", 214 | "relationship": "direct", 215 | "scope": "runtime", 216 | }, 217 | "pkg:npm/js-tokens@4.0.0": { 218 | "package_url": "pkg:npm/js-tokens@4.0.0", 219 | "relationship": "direct", 220 | "scope": "runtime", 221 | }, 222 | "pkg:npm/loose-envify@1.4.0": { 223 | "package_url": "pkg:npm/loose-envify@1.4.0", 224 | "relationship": "direct", 225 | "scope": "runtime", 226 | }, 227 | "pkg:npm/minipass@3.1.3": { 228 | "package_url": "pkg:npm/minipass@3.1.3", 229 | "relationship": "direct", 230 | "scope": "runtime", 231 | }, 232 | "pkg:npm/minizlib@2.1.2": { 233 | "package_url": "pkg:npm/minizlib@2.1.2", 234 | "relationship": "direct", 235 | "scope": "runtime", 236 | }, 237 | "pkg:npm/mkdirp@1.0.4": { 238 | "package_url": "pkg:npm/mkdirp@1.0.4", 239 | "relationship": "direct", 240 | "scope": "runtime", 241 | }, 242 | "pkg:npm/object-assign@4.1.1": { 243 | "package_url": "pkg:npm/object-assign@4.1.1", 244 | "relationship": "direct", 245 | "scope": "runtime", 246 | }, 247 | "pkg:npm/prop-types@15.7.2": { 248 | "package_url": "pkg:npm/prop-types@15.7.2", 249 | "relationship": "direct", 250 | "scope": "runtime", 251 | }, 252 | "pkg:npm/react-is@16.13.1": { 253 | "package_url": "pkg:npm/react-is@16.13.1", 254 | "relationship": "direct", 255 | "scope": "runtime", 256 | }, 257 | "pkg:npm/react@16.14.0": { 258 | "package_url": "pkg:npm/react@16.14.0", 259 | "relationship": "direct", 260 | "scope": "runtime", 261 | }, 262 | "pkg:npm/tar@6.1.0": { 263 | "package_url": "pkg:npm/tar@6.1.0", 264 | "relationship": "direct", 265 | "scope": "runtime", 266 | }, 267 | "pkg:npm/yallist@4.0.0": { 268 | "package_url": "pkg:npm/yallist@4.0.0", 269 | "relationship": "direct", 270 | "scope": "runtime", 271 | }, 272 | }, 273 | }, 274 | }, 275 | "ref": "v0.0.0", 276 | "scanned": "", 277 | "sha": "f293f09uaw90gwa09f9wea", 278 | "version": 0, 279 | } 280 | `; 281 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # GitHub Action for SBOM Generation 2 | 3 | **A GitHub Action for creating a software bill of materials (SBOM) using [Syft](https://github.com/anchore/syft).** 4 | 5 | [![GitHub release](https://img.shields.io/github/release/anchore/sbom-action.svg)](https://github.com/anchore/sbom-action/releases/latest) 6 | [![License: Apache-2.0](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://github.com/anchore/sbom-action/blob/main/LICENSE) 7 | [![Join our Discourse](https://img.shields.io/badge/Discourse-Join-blue?logo=discourse)](https://anchore.com/discourse) 8 | 9 | ## Basic Usage 10 | 11 | ```yaml 12 | - uses: anchore/sbom-action@v0 13 | ``` 14 | 15 | By default, this action will execute a Syft scan in the workspace directory 16 | and upload a workflow artifact SBOM in SPDX format. It will also detect 17 | if being run during a [GitHub release](https://docs.github.com/en/repositories/releasing-projects-on-github/about-releases) 18 | and upload the SBOM as a release asset. 19 | 20 | > [!IMPORTANT] 21 | > To upload the SBOM to releases, you will need to give the action permission to read the artifact from the action, and write it to the release: 22 | > ```yaml 23 | > jobs: 24 | > build: 25 | > permissions: 26 | > actions: read 27 | > contents: write 28 | > steps: 29 | > ``` 30 | 31 | ## Example Usage 32 | 33 | ### Scan a container image 34 | 35 | To scan a container image, use the `image` parameter: 36 | 37 | ```yaml 38 | - uses: anchore/sbom-action@v0 39 | with: 40 | image: ghcr.io/example/image_name:tag 41 | ``` 42 | 43 | The image will be fetched using the Docker daemon if available, 44 | which will use any authentication available to the daemon. 45 | 46 | If the Docker daemon is not available, the action will retrieve the image 47 | directly from the container registry. 48 | 49 | It is also possible to directly connect to the container registry with the 50 | `registry-username` and `registry-password` parameters. This will always bypass the 51 | Docker daemon: 52 | 53 | ```yaml 54 | - uses: anchore/sbom-action@v0 55 | with: 56 | image: my-registry.com/my/image 57 | registry-username: mr_awesome 58 | registry-password: ${{ secrets.REGISTRY_PASSWORD }} 59 | ``` 60 | 61 | ### Scan a specific directory 62 | 63 | Use the `path` parameter, relative to the repository root: 64 | 65 | ```yaml 66 | - uses: anchore/sbom-action@v0 67 | with: 68 | path: ./build/ 69 | ``` 70 | 71 | ### Scan a specific file 72 | 73 | Use the `file` parameter, relative to the repository root: 74 | 75 | ```yaml 76 | - uses: anchore/sbom-action@v0 77 | with: 78 | file: ./build/file 79 | ``` 80 | 81 | ### Publishing SBOMs with releases 82 | 83 | The `sbom-action` will detect being run during a 84 | [GitHub release](https://docs.github.com/en/repositories/releasing-projects-on-github/about-releases) 85 | and automatically upload all SBOMs as release assets. However, 86 | it may be desirable to upload SBOMs generated with other tools or using Syft 87 | outside this action. To do this, use the `anchore/sbom-action/publish-sbom` sub-action 88 | and specify a regular expression with the `sbom-artifact-match` 89 | parameter: 90 | 91 | ```yaml 92 | - uses: anchore/sbom-action/publish-sbom@v0 93 | with: 94 | sbom-artifact-match: ".*\\.spdx$" 95 | ``` 96 | 97 | ### Naming the SBOM output 98 | 99 | By default, this action will upload an artifact named 100 | `-[-].`, for 101 | example: 102 | 103 | ```yaml 104 | build-sbom: 105 | steps: 106 | - uses: anchore/sbom-action@v0 107 | - uses: anchore/sbom-action@v0 108 | - uses: anchore/sbom-action@v0 109 | id: myid 110 | ``` 111 | 112 | Will create 3 artifacts: 113 | 114 | ```text 115 | my-repo-build-sbom.spdx.json 116 | my-repo-build-sbom-2.spdx.json 117 | my-repo-build-sbom-myid.spdx.json 118 | ``` 119 | 120 | You may need to name these artifacts differently, simply 121 | use the `artifact-name` parameter: 122 | 123 | ```yaml 124 | - uses: anchore/sbom-action@v0 125 | with: 126 | artifact-name: sbom.spdx 127 | ``` 128 | 129 | > [!IMPORTANT] 130 | > If using this action within a **matrix build**, you must specify a unique `artifact-name` 131 | > based on matrix parameters or the artifact upload will fail due to duplicate names. See 132 | > an [example here](.github/workflows/test.yml#L36). 133 | 134 | ## Permissions 135 | 136 | This action needs the following permissions, depending on how it is being used: 137 | 138 | ``` 139 | contents: write # for sbom-action artifact uploads 140 | ``` 141 | 142 | If attaching release assets, the `actions: read` permission is also required. 143 | This may be implicit for public repositories, but is likely to be necessary for 144 | private repositories. 145 | 146 | ``` 147 | actions: read # to find workflow artifacts when attaching release assets 148 | ``` 149 | 150 | ## Configuration 151 | 152 | ### anchore/sbom-action 153 | 154 | The main [SBOM action](action.yml), responsible for generating SBOMs 155 | and uploading them as workflow artifacts and release assets. 156 | 157 | | Parameter | Description | Default | 158 | | --------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------- | 159 | | `path` | A path on the filesystem to scan. This is mutually exclusive to `file` and `image`. | \ | 160 | | `file` | A file on the filesystem to scan. This is mutually exclusive to `path` and `image`. | | 161 | | `image` | A container image to scan. This is mutually exclusive to `path` and `file`. See [Scan a container image](#scan-a-container-image) for more information. | | 162 | | `registry-username` | The registry username to use when authenticating to an external registry | | 163 | | `registry-password` | The registry password to use when authenticating to an external registry | | 164 | | `artifact-name` | The name to use for the generated SBOM artifact. See: [Naming the SBOM output](#naming-the-sbom-output) | `sbom--.spdx.json` | 165 | | `output-file` | The location to output a resulting SBOM | | 166 | | `format` | The SBOM format to export. One of: `spdx`, `spdx-json`, `cyclonedx`, `cyclonedx-json` | `spdx-json` | 167 | | `dependency-snapshot` | Whether to upload the SBOM to the GitHub Dependency submission API | `false` | 168 | | `upload-artifact` | Upload artifact to workflow | `true` | 169 | | `upload-artifact-retention` | Retention policy in days for uploaded artifact to workflow. | | 170 | | `upload-release-assets` | Upload release assets | `true` | 171 | | `syft-version` | The version of Syft to use | | 172 | | `github-token` | Authorized secret GitHub Personal Access Token. | `github.token` | 173 | | `config ` | Syft configuration file to use. | | 174 | 175 | ### anchore/sbom-action/publish-sbom 176 | 177 | A sub-action to [upload multiple SBOMs](publish-sbom/action.yml) to GitHub releases. 178 | 179 | | Parameter | Description | Default | 180 | | --------------------- | --------------------------------- | ------------------- | 181 | | `sbom-artifact-match` | A pattern to find SBOM artifacts. | `.*\\.spdx\\.json$` | 182 | 183 | ### anchore/sbom-action/download-syft 184 | 185 | A sub-action to [download Syft](download-syft/action.yml). 186 | 187 | | Parameter | Description | Default | 188 | | -------------- | ------------------------------- | ------- | 189 | | `syft-version` | The version of Syft to download | | 190 | 191 | Output parameters: 192 | 193 | | Parameter | Description | 194 | | --------- | ------------------------------------------------------------------ | 195 | | `cmd` | a reference to the [Syft](https://github.com/anchore/syft) binary. | 196 | 197 | `cmd` can be referenced in a workflow like other output parameters: 198 | `${{ steps..outputs.cmd }}` 199 | 200 | ## Windows 201 | 202 | This action is tested on Windows, and should work natively on Windows hosts 203 | without WSL. (Note that it previously required WSL, but should now be run 204 | natively on Windows.) 205 | 206 | ## Diagnostics 207 | 208 | This action makes extensive use of GitHub Action debug logging, 209 | which can be enabled as [described here](https://github.com/actions/toolkit/blob/master/docs/action-debugging.md) 210 | by setting a secret in your repository of `ACTIONS_STEP_DEBUG` to `true`. 211 | -------------------------------------------------------------------------------- /tests/mocks.ts: -------------------------------------------------------------------------------- 1 | import { 2 | DownloadArtifactOptions, 3 | DownloadArtifactResponse, 4 | FindOptions, 5 | ListArtifactsResponse, 6 | UploadArtifactOptions, 7 | UploadArtifactResponse 8 | } from "@actions/artifact"; 9 | 10 | /** 11 | * Get all the mocks and mock data 12 | */ 13 | export function getMocks() { 14 | class Data { 15 | artifacts: Partial<(Artifact & { runId: number, id: number, files: string[] })>[] = []; 16 | 17 | assets: Partial[] = []; 18 | 19 | workflowRuns: Partial[] = []; 20 | 21 | inputs: { [key: string]: string | number } = {}; 22 | 23 | outputs: { [key: string]: string } = {}; 24 | 25 | releases: Partial[] = []; 26 | 27 | latestRun: Partial = {} as never; 28 | 29 | context: Omit & { payload?: PartialDeep } = context.push({}) as never; 30 | 31 | execArgs: { 32 | cmd: string, 33 | args: string[], 34 | opts: ExecOptions, 35 | env: { [key: string]: string } 36 | } = {} as never; 37 | 38 | returnStatus: { status: number } = { 39 | status: 200, 40 | }; 41 | 42 | failed: { message?: string } = {}; 43 | 44 | debug: { 45 | enabled: boolean, 46 | log: string[], 47 | } = { 48 | enabled: false, 49 | log: [], 50 | } 51 | } 52 | 53 | const data = Object.freeze(new Data()); 54 | const initialState = Object.freeze(JSON.parse(JSON.stringify(data))); 55 | 56 | const setData = (newData: PartialDeep) => { 57 | for (const d of Object.keys(newData)) { 58 | const prop: any = (data as any)[d]; 59 | const newProp: any = (newData as any)[d]; 60 | if (Array.isArray(prop)) { 61 | prop.splice(0, prop.length); 62 | prop.push(...newProp); 63 | } else if (typeof prop === "object") { 64 | for (const k of Object.keys(prop)) { 65 | delete prop[k]; 66 | } 67 | Object.assign(prop, newProp); 68 | // If this was a mutable object, we might want to do this: 69 | // } else { 70 | // (data as any)[d] = newProp; 71 | } 72 | } 73 | }; 74 | 75 | const restoreInitialData = () => { 76 | setData(JSON.parse(JSON.stringify(initialState))); 77 | }; 78 | 79 | return { 80 | data, 81 | setData, 82 | restoreInitialData, 83 | mocks: { 84 | "@actions/core": () => { 85 | return { 86 | getInput(name: string) { 87 | return data.inputs[name]; 88 | }, 89 | setOutput(name: string, value: string) { 90 | data.outputs[name] = value; 91 | }, 92 | setFailed(msg: string) { 93 | data.failed.message = msg; 94 | }, 95 | info() { 96 | // ignore 97 | }, 98 | warning() { 99 | // ignore 100 | }, 101 | debug(msg: any) { 102 | if (data.debug.enabled) { 103 | data.debug.log.push(msg); 104 | } 105 | }, 106 | addPath() { 107 | // ignore 108 | }, 109 | isDebug() { 110 | return data.debug.enabled; 111 | }, 112 | exportVariable() { 113 | // ignore 114 | }, 115 | async group(_name: string, callback: () => Promise) { 116 | return callback(); 117 | } 118 | }; 119 | }, 120 | 121 | "@actions/artifact": () => { 122 | return { 123 | /* 124 | export interface ArtifactClient { 125 | uploadArtifact(name: string, files: string[], rootDirectory: string, options?: UploadArtifactOptions): Promise; 126 | listArtifacts(options?: ListArtifactsOptions & FindOptions): Promise; 127 | getArtifact(artifactName: string, options?: FindOptions): Promise; 128 | downloadArtifact(artifactId: number, options?: DownloadArtifactOptions & FindOptions): Promise; 129 | deleteArtifact(artifactName: string, options?: FindOptions): Promise; 130 | } 131 | */ 132 | uploadArtifact(name: string, files: string[], rootDirectory: string, options?: UploadArtifactOptions): UploadArtifactResponse { 133 | const id = data.artifacts.length; 134 | data.artifacts.push({ 135 | id, 136 | name: path.basename(name), 137 | files, 138 | rootDirectory, 139 | options, 140 | } as never); 141 | return { 142 | id, 143 | }; 144 | }, 145 | downloadArtifact(artifactId: number, options?: DownloadArtifactOptions & FindOptions): DownloadArtifactResponse { 146 | const tempPath = options?.path || "/tmp"; 147 | const artifact = data.artifacts.find(a => a.id == artifactId); 148 | if (artifact) { 149 | const name = "my-artifact-name"; 150 | fs.writeFileSync(`${tempPath}/${name}`, "file"); 151 | return { 152 | downloadPath: `${tempPath}/${name}`, 153 | }; 154 | } 155 | throw new Error(`no artifact for id: ${artifactId}`); 156 | }, 157 | listArtifacts() { 158 | return { 159 | artifacts: data.artifacts.filter(a => !a.runId), 160 | }; 161 | }, 162 | getArtifact(artifactName: string, options?: FindOptions) { 163 | return { 164 | artifact: data.artifacts.find(a => a.name == artifactName) 165 | } 166 | }, 167 | }; 168 | }, 169 | 170 | "@actions/tool-cache": () => ({ 171 | downloadTool() { 172 | return "download-tool"; 173 | }, 174 | extractZip() { 175 | const tempPath = fs.mkdtempSync(path.join(os.tmpdir(), "sbom-action-")); 176 | fs.writeFileSync(`${tempPath}/sbom-asdf.spdx`, "sbom"); 177 | return tempPath; 178 | }, 179 | find(name: string) { 180 | return name; 181 | }, 182 | }), 183 | 184 | "@actions/exec": () => ({ 185 | async exec(cmd: string, args: string[], opts: ExecOptions = {}) { 186 | data.execArgs.cmd = cmd; 187 | data.execArgs.args = args; 188 | data.execArgs.opts = opts; 189 | data.execArgs.env = opts.env as any; 190 | if (opts) { 191 | const out = opts.listeners?.stdout; 192 | if (out) { 193 | out(Buffer.from("syft output")); 194 | } 195 | } 196 | return 0; 197 | }, 198 | }), 199 | 200 | "@actions/github": () => { 201 | return { 202 | get context() { 203 | return data.context; 204 | }, 205 | getOctokit() { 206 | return { 207 | request(request: any): any { 208 | return request; 209 | }, 210 | rest: { 211 | actions: { 212 | async listWorkflowRunArtifacts({ run_id }: any) { 213 | return { 214 | status: data.returnStatus.status, 215 | data: { 216 | artifacts: data.artifacts.filter(a => a.runId === run_id), 217 | }, 218 | }; 219 | }, 220 | async downloadArtifact() { 221 | return { 222 | url: "http://artifact", 223 | }; 224 | }, 225 | async listWorkflowRunsForRepo({ branch, status }: any) { 226 | return { 227 | status: data.returnStatus.status, 228 | data: { 229 | workflow_runs: data.workflowRuns.filter(r => 230 | r.head_branch === branch && r.conclusion === status 231 | ), 232 | }, 233 | }; 234 | }, 235 | }, 236 | repos: { 237 | async listReleaseAssets() { 238 | return { 239 | status: data.returnStatus.status, 240 | data: data.assets, 241 | }; 242 | }, 243 | async uploadReleaseAsset({name}: ReleaseAsset) { 244 | data.assets.push({ 245 | id: data.assets.length, 246 | name, 247 | } as never); 248 | }, 249 | async deleteReleaseAsset({id}: ReleaseAsset) { 250 | const idx = data.assets.findIndex(a => a.id === id); 251 | data.assets.splice(idx, 1); 252 | }, 253 | async getReleaseByTag({ tag }: any) { 254 | return { 255 | data: data.releases.find(r => r.tag_name === tag), 256 | }; 257 | }, 258 | async listReleases() { 259 | return { 260 | data: data.releases, 261 | }; 262 | } 263 | }, 264 | }, 265 | }; 266 | }, 267 | }; 268 | }, 269 | } as { [key: string]: () => unknown } 270 | }; 271 | } 272 | 273 | const contextBase = { 274 | ref: "v0.0.0", 275 | sha: "a89b7d99c7097", 276 | payload: {}, 277 | repo: { 278 | owner: "test-org", 279 | repo: "test-repo", 280 | }, 281 | runId: 1, 282 | job: "my_job", 283 | action: "__anchore_sbom-action_2", 284 | }; 285 | 286 | export const context = { 287 | pull_request(payload: PartialDeep) { 288 | return { 289 | ...contextBase, 290 | eventName: "pull_request", 291 | payload, 292 | }; 293 | }, 294 | push(payload: PartialDeep) { 295 | return { 296 | ...contextBase, 297 | eventName: "push", 298 | payload, 299 | }; 300 | }, 301 | release(payload: PartialDeep) { 302 | return { 303 | ...contextBase, 304 | eventName: "release", 305 | payload, 306 | }; 307 | } 308 | }; 309 | 310 | import { PartialDeep } from "type-fest"; 311 | import { Artifact } from "../src/github/GithubClient"; 312 | import { ExecOptions } from "@actions/exec"; 313 | import { Context } from "@actions/github/lib/context"; 314 | import { 315 | PullRequestEvent, PushEvent, 316 | Release, 317 | ReleaseAsset, ReleaseEvent, 318 | WorkflowRun 319 | } from "@octokit/webhooks-types"; 320 | import * as fs from "fs"; 321 | import * as os from "os"; 322 | import * as path from "path"; 323 | -------------------------------------------------------------------------------- /tests/SyftGithubAction.test.ts: -------------------------------------------------------------------------------- 1 | import {context, getMocks} from "./mocks"; 2 | const { data, setData, restoreInitialData, mocks } = getMocks(); 3 | const { 4 | artifacts, 5 | assets, 6 | inputs, 7 | } = data; 8 | for (const mock of Object.keys(mocks)) { 9 | jest.mock(mock, mocks[mock]); 10 | } 11 | 12 | import * as fs from "fs"; 13 | import * as os from "os"; 14 | import * as path from "path"; 15 | import * as action from "../src/github/SyftGithubAction"; 16 | import { 17 | downloadSyft, 18 | runAndFailBuildOnException 19 | } from "../src/github/SyftGithubAction"; 20 | import {mapToWSLPath} from "../src/github/Executor"; 21 | 22 | jest.setTimeout(30000); 23 | Date.now = jest.fn(() => 1482363367071); 24 | 25 | describe("Action", () => { 26 | beforeEach(() => { 27 | restoreInitialData(); 28 | }); 29 | 30 | it("downloads syft", async () => { 31 | const path = await downloadSyft(); 32 | expect(path).toBe("download-tool_syft/syft") 33 | }); 34 | 35 | it("runs with default inputs on push", async () => { 36 | setData({ 37 | inputs: { 38 | path: ".", 39 | }, 40 | context: { 41 | ...context.push({ 42 | ref: "main", 43 | }), 44 | job: "default-import-job", 45 | action: "__anchore_sbom-action", 46 | }, 47 | }); 48 | 49 | await action.runSyftAction(); 50 | await action.attachReleaseAssets(); 51 | 52 | expect(artifacts.length).toBe(1); 53 | expect(assets.length).toBe(0); 54 | }); 55 | 56 | it("runs with image input", async () => { 57 | setData({ 58 | inputs: { 59 | image: "some-image:latest", 60 | }, 61 | }); 62 | 63 | await action.runSyftAction(); 64 | 65 | const { args } = data.execArgs; 66 | 67 | expect(args).toBeDefined() 68 | expect(args.length > 1).toBeTruthy(); 69 | expect(args[1]).toBe("some-image:latest") 70 | }); 71 | 72 | it("runs with path input", async () => { 73 | setData({ 74 | inputs: { 75 | path: "some-path", 76 | }, 77 | }); 78 | 79 | await action.runSyftAction(); 80 | 81 | const { args } = data.execArgs; 82 | 83 | expect(args).toBeDefined() 84 | expect(args.length > 1).toBeTruthy(); 85 | expect(args[1]).toBe("dir:some-path") 86 | }); 87 | 88 | it("runs with file input", async () => { 89 | setData({ 90 | inputs: { 91 | file: "some-file.jar", 92 | }, 93 | }); 94 | 95 | await action.runSyftAction(); 96 | 97 | const { args } = data.execArgs; 98 | 99 | expect(args).toBeDefined() 100 | expect(args.length > 1).toBeTruthy(); 101 | expect(args[1]).toBe("file:some-file.jar") 102 | }); 103 | 104 | it("runs with release uploads inputs", async () => { 105 | const outputFile = `${fs.mkdtempSync( 106 | path.join(os.tmpdir(), "sbom-action-") 107 | )}/sbom.spdx`; 108 | 109 | setData({ 110 | inputs: { 111 | image: "org/img", 112 | "upload-artifact": "true", 113 | "output-file": outputFile, 114 | "upload-release-assets": "true", 115 | }, 116 | context: context.release({ 117 | release: { 118 | id: 4095345, 119 | name: "v3.5.6", 120 | }, 121 | }), 122 | }); 123 | 124 | await action.runSyftAction(); 125 | 126 | expect(fs.existsSync(inputs["output-file"] as string)).toBeTruthy(); 127 | 128 | await action.attachReleaseAssets(); 129 | 130 | expect(artifacts.length).toBe(1); 131 | expect(assets.length).toBe(1); 132 | 133 | expect(fs.existsSync(outputFile)).toBeTruthy(); 134 | }); 135 | 136 | it("runs with retention input", async () => { 137 | setData({ 138 | inputs: { 139 | image: "org/img", 140 | "upload-artifact": "true", 141 | "upload-artifact-retention": "3", 142 | }, 143 | }); 144 | 145 | await action.runSyftAction(); 146 | 147 | const { artifacts } = data; 148 | 149 | expect(artifacts).toHaveLength(1); 150 | 151 | const opts = (artifacts[0] as any).options 152 | 153 | expect(opts.retentionDays).toEqual(3) 154 | }); 155 | 156 | it("runs without uploading anything", async () => { 157 | setData({ 158 | inputs: { 159 | image: "org/img", 160 | "upload-artifact": "false", 161 | "upload-release-assets": "false", 162 | }, 163 | context: context.release({ 164 | release: { 165 | id: 4095345, 166 | name: "v3.5.6", 167 | }, 168 | }), 169 | }); 170 | 171 | await action.runSyftAction(); 172 | await action.attachReleaseAssets(); 173 | 174 | expect(artifacts.length).toBe(0); 175 | expect(assets.length).toBe(0); 176 | }); 177 | 178 | it("runs pull-request compare", async () => { 179 | setData({ 180 | inputs:{ 181 | image: "org/img", 182 | "compare-pulls": "true", 183 | "artifact-name": "sbom.spdx.json" 184 | }, 185 | context: context.pull_request({ 186 | pull_request: { 187 | base: { 188 | ref: "main", 189 | }, 190 | }, 191 | }), 192 | workflowRuns: [{ 193 | id: 6, 194 | head_branch: "main", 195 | conclusion: "success", 196 | }], 197 | artifacts: [{ 198 | runId: 6, 199 | name: "sbom.spdx.json", 200 | files: ["the_sbom"], 201 | }], 202 | }); 203 | 204 | await action.runSyftAction(); 205 | 206 | expect(artifacts.length).toBe(2); 207 | }); 208 | 209 | it("runs in tag workflow", async () => { 210 | setData({ 211 | inputs:{ 212 | "sbom-artifact-match": ".*.spdx.json$" 213 | }, 214 | context: { 215 | ...context.push({}), 216 | ref: "refs/tags/v34.8451", 217 | }, 218 | releases: [{ 219 | tag_name: "v34.8451" 220 | }], 221 | artifacts: [{ 222 | name: "awesome.spdx.json" 223 | }], 224 | }); 225 | 226 | await action.attachReleaseAssets(); 227 | 228 | expect(assets.length).toBe(1); 229 | }); 230 | 231 | it("runs in tag workflow with draft release", async () => { 232 | setData({ 233 | inputs:{ 234 | "sbom-artifact-match": ".*.spdx.json$" 235 | }, 236 | context: { 237 | ...context.push({}), 238 | ref: "refs/tags/v34.8451", 239 | }, 240 | releases: [{ 241 | draft: true, 242 | tag_name: "v34.8451" 243 | }], 244 | artifacts: [{ 245 | name: "awesome.spdx.json" 246 | }], 247 | }); 248 | 249 | await action.attachReleaseAssets(); 250 | 251 | expect(assets.length).toBe(1); 252 | }); 253 | 254 | it("runs in release with prior workflow artifacts", async () => { 255 | setData({ 256 | inputs:{ 257 | "sbom-artifact-match": ".*.spdx.json$" 258 | }, 259 | context: { 260 | ...context.release({ 261 | release: { 262 | target_commitish: "main" 263 | } 264 | }), 265 | ref: "refs/tags/v34.8451", 266 | }, 267 | releases: [{ 268 | draft: true, 269 | tag_name: "v34.8451" 270 | }], 271 | artifacts: [{ 272 | runId: 9, 273 | name: "awesome.spdx.json" 274 | }], 275 | workflowRuns: [{ 276 | id: 9, 277 | head_branch: "main", 278 | conclusion: "success" 279 | }] 280 | }); 281 | 282 | await action.attachReleaseAssets(); 283 | 284 | expect(assets.length).toBe(1); 285 | }); 286 | 287 | it("fails build with runAndFailBuildOnException", async () => { 288 | try { 289 | await runAndFailBuildOnException(async () => { 290 | throw new Error(); 291 | }); 292 | expect(data.failed.message).toBeDefined(); 293 | } catch (e) { 294 | expect("should not throw exception").toBeUndefined(); 295 | } 296 | }); 297 | 298 | it("does not include docker scheme by default", async () => { 299 | setData({ 300 | inputs:{ 301 | image: "somewhere/org/img", 302 | } 303 | }); 304 | 305 | await action.runSyftAction(); 306 | 307 | const { cmd, args, env } = data.execArgs; 308 | 309 | expect(cmd.endsWith("syft")).toBeTruthy(); 310 | expect(args).toContain("somewhere/org/img"); 311 | expect(env.SYFT_REGISTRY_AUTH_USERNAME).toBeFalsy(); 312 | expect(env.SYFT_REGISTRY_AUTH_PASSWORD).toBeFalsy(); 313 | }); 314 | 315 | it("uses registry scheme with username and password", async () => { 316 | setData({ 317 | inputs:{ 318 | image: "somewhere/org/img", 319 | "registry-username": "mr_awesome", 320 | "registry-password": "super_secret", 321 | }, 322 | }); 323 | 324 | await action.runSyftAction(); 325 | 326 | const { cmd, args, env } = data.execArgs; 327 | 328 | expect(cmd.endsWith("syft")).toBeTruthy(); 329 | expect(args).toContain("registry:somewhere/org/img"); 330 | expect(env.SYFT_REGISTRY_AUTH_USERNAME).toBe("mr_awesome"); 331 | expect(env.SYFT_REGISTRY_AUTH_PASSWORD).toBe("super_secret"); 332 | }); 333 | 334 | it("uses image name for default artifact name", () => { 335 | setData({ 336 | inputs: { 337 | image: "something-something/image-image" 338 | } 339 | }); 340 | 341 | expect(action.getArtifactName()).toBe("something-something-image-image.spdx.json"); 342 | 343 | setData({ 344 | inputs: { 345 | image: "ghcr.io/something-something/image-image" 346 | } 347 | }); 348 | 349 | expect(action.getArtifactName()).toBe("something-something-image-image.spdx.json"); 350 | }); 351 | 352 | it("format informs artifact name", () => { 353 | setData({ 354 | inputs: { 355 | image: "img", 356 | format: "spdx", 357 | } 358 | }); 359 | 360 | expect(action.getArtifactName()).toBe("img.spdx"); 361 | 362 | setData({ 363 | inputs: { 364 | image: "img", 365 | format: "spdx-json", 366 | } 367 | }); 368 | 369 | expect(action.getArtifactName()).toBe("img.spdx.json"); 370 | 371 | setData({ 372 | inputs: { 373 | image: "img", 374 | format: "cyclonedx", 375 | } 376 | }); 377 | 378 | expect(action.getArtifactName()).toBe("img.cyclonedx.xml"); 379 | 380 | setData({ 381 | inputs: { 382 | image: "img", 383 | format: "cyclonedx-json", 384 | } 385 | }); 386 | 387 | expect(action.getArtifactName()).toBe("img.cyclonedx.json"); 388 | 389 | }); 390 | 391 | it("correctly encode tags", () => { 392 | setData({ 393 | inputs: { 394 | image: "ghcr.io/something-something/image-image:0.1.2-dev" 395 | } 396 | }); 397 | 398 | expect(action.getArtifactName()).toBe("something-something-image-image_0_1_2-dev.spdx.json"); 399 | }); 400 | 401 | it ("properly maps paths for WSL", () => { 402 | expect(mapToWSLPath("basic arg")).toBe("basic arg"); 403 | expect(mapToWSLPath("D:\\Some\\Path")).toBe("/mnt/d/Some/Path"); 404 | expect(mapToWSLPath("C:\\Some\\Path")).toBe("/mnt/c/Some/Path"); 405 | }); 406 | 407 | it("calls with config", async () => { 408 | setData({ 409 | inputs: { 410 | image: "some-image:latest", 411 | config: "syft-config.yaml", 412 | } 413 | }); 414 | 415 | await action.runSyftAction(); 416 | const { cmd, args, env } = data.execArgs; 417 | 418 | expect(args).toContain("-c"); 419 | expect(args).toContain("syft-config.yaml"); 420 | }); 421 | }); 422 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /tests/fixtures/policy_evaluation.fixture.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "sha256:0c24303": { 4 | "nginx:latest": [ 5 | { 6 | "detail": { 7 | "policy": { 8 | "blacklisted_images": [], 9 | "comment": "Default bundle", 10 | "id": "2c53a13c-1765-11e8-82ef-23527761d060", 11 | "mappings": [ 12 | { 13 | "id": "c4f9bf74-dc38-4ddf-b5cf-00e9c0074611", 14 | "image": { 15 | "type": "tag", 16 | "value": "*" 17 | }, 18 | "name": "default", 19 | "policy_id": "48e6f7d6-1765-11e8-b5f9-8b6f228548b6", 20 | "registry": "*", 21 | "repository": "*", 22 | "whitelist_ids": ["37fd763e-1765-11e8-add4-3b16c029ac5c"] 23 | } 24 | ], 25 | "name": "Default bundle", 26 | "policies": [ 27 | { 28 | "comment": "System default policy", 29 | "id": "48e6f7d6-1765-11e8-b5f9-8b6f228548b6", 30 | "name": "DefaultPolicy", 31 | "rules": [ 32 | { 33 | "action": "STOP", 34 | "gate": "dockerfile", 35 | "id": "ce7b8000-829b-4c27-8122-69cd59018400", 36 | "params": [ 37 | { 38 | "name": "ports", 39 | "value": "22" 40 | }, 41 | { 42 | "name": "type", 43 | "value": "blacklist" 44 | } 45 | ], 46 | "trigger": "exposed_ports" 47 | }, 48 | { 49 | "action": "WARN", 50 | "gate": "dockerfile", 51 | "id": "312d9e41-1c05-4e2f-ad89-b7d34b0855bb", 52 | "params": [ 53 | { 54 | "name": "instruction", 55 | "value": "HEALTHCHECK" 56 | }, 57 | { 58 | "name": "check", 59 | "value": "not_exists" 60 | } 61 | ], 62 | "trigger": "instruction" 63 | }, 64 | { 65 | "action": "WARN", 66 | "gate": "vulnerabilities", 67 | "id": "6b5c14e7-a6f7-48cc-99d2-959273a2c6fa", 68 | "params": [ 69 | { 70 | "name": "max_days_since_sync", 71 | "value": "2" 72 | } 73 | ], 74 | "trigger": "stale_feed_data" 75 | }, 76 | { 77 | "action": "WARN", 78 | "gate": "vulnerabilities", 79 | "id": "3e79ea94-18c4-4d26-9e29-3b9172a62c2e", 80 | "params": [], 81 | "trigger": "vulnerability_data_unavailable" 82 | }, 83 | { 84 | "action": "WARN", 85 | "gate": "vulnerabilities", 86 | "id": "6063fdde-b1c5-46af-973a-915739451ac4", 87 | "params": [ 88 | { 89 | "name": "package_type", 90 | "value": "all" 91 | }, 92 | { 93 | "name": "severity_comparison", 94 | "value": "=" 95 | }, 96 | { 97 | "name": "severity", 98 | "value": "medium" 99 | } 100 | ], 101 | "trigger": "package" 102 | }, 103 | { 104 | "action": "STOP", 105 | "gate": "vulnerabilities", 106 | "id": "b30e8abc-444f-45b1-8a37-55be1b8c8bb5", 107 | "params": [ 108 | { 109 | "name": "package_type", 110 | "value": "all" 111 | }, 112 | { 113 | "name": "severity_comparison", 114 | "value": ">" 115 | }, 116 | { 117 | "name": "severity", 118 | "value": "medium" 119 | } 120 | ], 121 | "trigger": "package" 122 | } 123 | ], 124 | "version": "1_0" 125 | } 126 | ], 127 | "version": "1_0", 128 | "whitelisted_images": [], 129 | "whitelists": [ 130 | { 131 | "comment": "Default global whitelist", 132 | "id": "37fd763e-1765-11e8-add4-3b16c029ac5c", 133 | "items": [], 134 | "name": "Global Whitelist", 135 | "version": "1_0" 136 | } 137 | ] 138 | }, 139 | "result": { 140 | "bundle": { 141 | "blacklisted_images": [], 142 | "comment": "Default bundle", 143 | "id": "2c53a13c-1765-11e8-82ef-23527761d060", 144 | "mappings": [ 145 | { 146 | "id": "c4f9bf74-dc38-4ddf-b5cf-00e9c0074611", 147 | "image": { 148 | "type": "tag", 149 | "value": "*" 150 | }, 151 | "name": "default", 152 | "policy_id": "48e6f7d6-1765-11e8-b5f9-8b6f228548b6", 153 | "registry": "*", 154 | "repository": "*", 155 | "whitelist_ids": ["37fd763e-1765-11e8-add4-3b16c029ac5c"] 156 | } 157 | ], 158 | "name": "Default bundle", 159 | "policies": [ 160 | { 161 | "comment": "System default policy", 162 | "id": "48e6f7d6-1765-11e8-b5f9-8b6f228548b6", 163 | "name": "DefaultPolicy", 164 | "rules": [ 165 | { 166 | "action": "STOP", 167 | "gate": "dockerfile", 168 | "id": "ce7b8000-829b-4c27-8122-69cd59018400", 169 | "params": [ 170 | { 171 | "name": "ports", 172 | "value": "22" 173 | }, 174 | { 175 | "name": "type", 176 | "value": "blacklist" 177 | } 178 | ], 179 | "trigger": "exposed_ports" 180 | }, 181 | { 182 | "action": "WARN", 183 | "gate": "dockerfile", 184 | "id": "312d9e41-1c05-4e2f-ad89-b7d34b0855bb", 185 | "params": [ 186 | { 187 | "name": "instruction", 188 | "value": "HEALTHCHECK" 189 | }, 190 | { 191 | "name": "check", 192 | "value": "not_exists" 193 | } 194 | ], 195 | "trigger": "instruction" 196 | }, 197 | { 198 | "action": "WARN", 199 | "gate": "vulnerabilities", 200 | "id": "6b5c14e7-a6f7-48cc-99d2-959273a2c6fa", 201 | "params": [ 202 | { 203 | "name": "max_days_since_sync", 204 | "value": "2" 205 | } 206 | ], 207 | "trigger": "stale_feed_data" 208 | }, 209 | { 210 | "action": "WARN", 211 | "gate": "vulnerabilities", 212 | "id": "3e79ea94-18c4-4d26-9e29-3b9172a62c2e", 213 | "params": [], 214 | "trigger": "vulnerability_data_unavailable" 215 | }, 216 | { 217 | "action": "WARN", 218 | "gate": "vulnerabilities", 219 | "id": "6063fdde-b1c5-46af-973a-915739451ac4", 220 | "params": [ 221 | { 222 | "name": "package_type", 223 | "value": "all" 224 | }, 225 | { 226 | "name": "severity_comparison", 227 | "value": "=" 228 | }, 229 | { 230 | "name": "severity", 231 | "value": "medium" 232 | } 233 | ], 234 | "trigger": "package" 235 | }, 236 | { 237 | "action": "STOP", 238 | "gate": "vulnerabilities", 239 | "id": "b30e8abc-444f-45b1-8a37-55be1b8c8bb5", 240 | "params": [ 241 | { 242 | "name": "package_type", 243 | "value": "all" 244 | }, 245 | { 246 | "name": "severity_comparison", 247 | "value": ">" 248 | }, 249 | { 250 | "name": "severity", 251 | "value": "medium" 252 | } 253 | ], 254 | "trigger": "package" 255 | } 256 | ], 257 | "version": "1_0" 258 | } 259 | ], 260 | "version": "1_0", 261 | "whitelisted_images": [], 262 | "whitelists": [ 263 | { 264 | "comment": "Default global whitelist", 265 | "id": "37fd763e-1765-11e8-add4-3b16c029ac5c", 266 | "items": [], 267 | "name": "Global Whitelist", 268 | "version": "1_0" 269 | } 270 | ] 271 | }, 272 | "created_at": 1571949314, 273 | "evaluation_problems": [], 274 | "final_action": "warn", 275 | "final_action_reason": "policy_evaluation", 276 | "image_id": "ab56bba91343aafcdd94b7a44b42e12f32719b9a2b8579e93017c1280f48e8f3", 277 | "last_modified": 1571949314, 278 | "matched_blacklisted_images_rule": false, 279 | "matched_mapping_rule": { 280 | "id": "c4f9bf74-dc38-4ddf-b5cf-00e9c0074611", 281 | "image": { 282 | "type": "tag", 283 | "value": "*" 284 | }, 285 | "name": "default", 286 | "policy_id": "48e6f7d6-1765-11e8-b5f9-8b6f228548b6", 287 | "registry": "*", 288 | "repository": "*", 289 | "whitelist_ids": ["37fd763e-1765-11e8-add4-3b16c029ac5c"] 290 | }, 291 | "matched_whitelisted_images_rule": false, 292 | "result": { 293 | "ab56bba91343aafcdd94b7a44b42e12f32719b9a2b8579e93017c1280f48e8f3": { 294 | "result": { 295 | "final_action": "warn", 296 | "header": [ 297 | "Image_Id", 298 | "Repo_Tag", 299 | "Trigger_Id", 300 | "Gate", 301 | "Trigger", 302 | "Check_Output", 303 | "Gate_Action", 304 | "Whitelisted", 305 | "Policy_Id" 306 | ], 307 | "row_count": 1, 308 | "rows": [ 309 | [ 310 | "ab56bba91343aafcdd94b7a44b42e12f32719b9a2b8579e93017c1280f48e8f3", 311 | "localhost:5000/nginx:latest", 312 | "41cb7cdf04850e33a11f80c42bf660b3", 313 | "dockerfile", 314 | "instruction", 315 | "Dockerfile directive 'HEALTHCHECK' not found, matching condition 'not_exists' check", 316 | "warn", 317 | false, 318 | "48e6f7d6-1765-11e8-b5f9-8b6f228548b6" 319 | ] 320 | ] 321 | } 322 | }, 323 | "policy_data": [], 324 | "policy_name": "", 325 | "whitelist_data": [], 326 | "whitelist_names": [] 327 | }, 328 | "tag": "localhost:5000/nginx:latest", 329 | "user_id": "admin" 330 | } 331 | }, 332 | "last_evaluation": "2019-10-24T20:35:14Z", 333 | "policyId": "2c53a13c-1765-11e8-82ef-23527761d060", 334 | "status": "pass" 335 | } 336 | ] 337 | } 338 | } 339 | ] 340 | -------------------------------------------------------------------------------- /src/github/GithubClient.ts: -------------------------------------------------------------------------------- 1 | import artifactClient, { UploadArtifactOptions } from "@actions/artifact"; 2 | import * as core from "@actions/core"; 3 | import * as github from "@actions/github"; 4 | import { GitHub } from "@actions/github/lib/utils"; 5 | import * as cache from "@actions/tool-cache"; 6 | import { Release } from "@octokit/webhooks-types"; 7 | import fs from "fs"; 8 | import os from "os"; 9 | import path from "path"; 10 | import { stringify } from "./Util"; 11 | 12 | export type GithubRepo = { owner: string; repo: string }; 13 | 14 | /** 15 | * Basic release asset information 16 | */ 17 | export interface ReleaseAsset { 18 | id: number; 19 | name: string; 20 | } 21 | 22 | /** 23 | * Common interface for methods requiring a release 24 | */ 25 | interface ReleaseProps { 26 | release: Release; 27 | } 28 | 29 | /** 30 | * Basic artifact interface returned via listWorkflowArtifacts 31 | */ 32 | export interface Artifact { 33 | // Workflow run artifact will have an ID 34 | id?: number; 35 | name: string; 36 | } 37 | 38 | /** 39 | * Basic workflow run information 40 | */ 41 | export interface WorkflowRun { 42 | id: number; 43 | } 44 | 45 | /** 46 | * This is only a partial definition of the snapshot format, just including the 47 | * values we need to set from the workflow run 48 | */ 49 | export interface DependencySnapshot { 50 | job: { 51 | correlator: string; 52 | id: string; 53 | }; 54 | sha: string; 55 | ref: string; 56 | detector: { 57 | version: string; 58 | }; 59 | } 60 | 61 | /** 62 | * Suppress info output by redirecting to debug 63 | * @param fn function to call for duration of output suppression 64 | */ 65 | async function suppressOutput(fn: () => Promise): Promise { 66 | const info = core.info; 67 | try { 68 | try { 69 | // eslint-disable-next-line @typescript-eslint/ban-ts-comment 70 | // @ts-ignore 71 | core.info = core.debug; 72 | } catch (e) {} 73 | return await fn(); 74 | } finally { 75 | try { 76 | // eslint-disable-next-line @typescript-eslint/ban-ts-comment 77 | // @ts-ignore 78 | core.info = info; 79 | } catch (e) {} 80 | } 81 | } 82 | 83 | /** 84 | * Wraps a string in dashes 85 | */ 86 | export function dashWrap(str: string): string { 87 | let out = ` ${str} `; 88 | const width = 80; 89 | while (out.length < width) { 90 | out = `-${out}-`; 91 | } 92 | if (out.length > width) { 93 | out = out.substr(0, width); 94 | } 95 | return out; 96 | } 97 | 98 | /** 99 | * Attempts to intelligently log all objects passed in when debug is enabled 100 | */ 101 | export function debugLog(label: string, ...args: unknown[]): void { 102 | if (core.isDebug()) { 103 | core.group(label, async () => { 104 | for (const arg of args) { 105 | if (typeof arg === "string") { 106 | core.debug(arg); 107 | } else if (arg instanceof Error) { 108 | core.debug(arg.message); 109 | core.debug(stringify(arg.stack)); 110 | } else { 111 | core.debug(stringify(arg)); 112 | } 113 | } 114 | }); 115 | } 116 | } 117 | 118 | /** 119 | * Provides a basic shim to interact with the necessary Github APIs 120 | */ 121 | export class GithubClient { 122 | client: InstanceType; 123 | 124 | repo: GithubRepo; 125 | 126 | constructor(client: InstanceType, repo: GithubRepo) { 127 | this.client = client; 128 | this.repo = repo; 129 | } 130 | 131 | // --------------- WORKFLOW ARTIFACT METHODS ------------------ 132 | 133 | /** 134 | * Lists the workflow artifacts for the current workflow 135 | */ 136 | async listCurrentWorkflowArtifacts(): Promise { 137 | // The REST listWorkflowRunArtifacts endpoint does not seem to work during 138 | // the workflow run, presumably they are available afterward, much like the 139 | // GitHub UI only shows artifacts after completion of a run, so we have 140 | // to do a little bit of hackery here. We _could_ download all artifacts 141 | // using a supported API, but internally it's using this anyway 142 | const response = await artifactClient.listArtifacts(); 143 | 144 | debugLog("listCurrentWorkflowArtifacts response:", response); 145 | 146 | return response.artifacts; 147 | } 148 | 149 | /** 150 | * Downloads a workflow artifact for the current workflow run 151 | * @param name artifact name 152 | * @param id specified if using a workflow run artifact 153 | * @return full path to the artifact 154 | */ 155 | async downloadWorkflowArtifact({ name, id }: Artifact): Promise { 156 | if (id) { 157 | return this.downloadWorkflowRunArtifact({ artifactId: id }); 158 | } 159 | const tempPath = fs.mkdtempSync(path.join(os.tmpdir(), "sbom-action-")); 160 | const response = await suppressOutput(async () => { 161 | const response = await artifactClient.getArtifact(name); 162 | return await artifactClient.downloadArtifact(response.artifact.id, { 163 | path: tempPath, 164 | }); 165 | }); 166 | 167 | if (!response.downloadPath) { 168 | debugLog("downloadArtifact response empty", response); 169 | return ""; 170 | } 171 | 172 | debugLog( 173 | "downloadArtifact response:", 174 | response, 175 | "dir:", 176 | core.isDebug() && fs.readdirSync(response.downloadPath) 177 | ); 178 | 179 | return `${response.downloadPath}`; 180 | } 181 | 182 | /** 183 | * Uploads a workflow artifact for the current workflow run 184 | * @param name name of the artifact 185 | * @param file file to upload 186 | * @param retention retention days of a artifact 187 | */ 188 | async uploadWorkflowArtifact({ 189 | name, 190 | file, 191 | retention, 192 | }: { 193 | name: string; 194 | file: string; 195 | retention?: number; 196 | }): Promise { 197 | const rootDirectory = path.dirname(file); 198 | 199 | debugLog( 200 | "uploadArtifact:", 201 | name, 202 | file, 203 | retention, 204 | rootDirectory, 205 | core.isDebug() && fs.readdirSync(rootDirectory) 206 | ); 207 | 208 | const options: UploadArtifactOptions = {}; 209 | if (retention) { 210 | options.retentionDays = retention; 211 | } 212 | 213 | const info = await suppressOutput(async () => 214 | artifactClient.uploadArtifact(name, [file], rootDirectory, options) 215 | ); 216 | 217 | debugLog("uploadArtifact response:", info); 218 | } 219 | 220 | // --------------- COMPLETED WORKFLOW METHODS ------------------ 221 | 222 | /** 223 | * Lists the workflow run artifacts for a completed workflow 224 | * @param runId the workflow run number 225 | */ 226 | async listWorkflowRunArtifacts({ 227 | runId, 228 | }: { 229 | runId: number; 230 | }): Promise<(Artifact & { id: number })[]> { 231 | const response = await this.client.rest.actions.listWorkflowRunArtifacts({ 232 | ...this.repo, 233 | run_id: runId, 234 | per_page: 100, 235 | page: 1, 236 | }); 237 | 238 | debugLog("listWorkflowRunArtifacts response:", response); 239 | 240 | if (response.status >= 400) { 241 | throw new Error("Unable to retrieve listWorkflowRunArtifacts"); 242 | } 243 | 244 | return response.data.artifacts; 245 | } 246 | 247 | /** 248 | * Lists the workflow run artifacts for a completed workflow 249 | * @param branch the branch name 250 | */ 251 | async findLatestWorkflowRunForBranch({ 252 | branch, 253 | }: { 254 | branch: string; 255 | }): Promise { 256 | const response = await this.client.rest.actions.listWorkflowRunsForRepo({ 257 | ...this.repo, 258 | branch, 259 | status: "success", 260 | per_page: 100, 261 | page: 1, 262 | }); 263 | 264 | debugLog("findLatestWorkflowRunForBranch response:", response); 265 | 266 | if (response.status >= 400) { 267 | throw new Error("Unable to findLatestWorkflowRunForBranch"); 268 | } 269 | 270 | return response.data.workflow_runs[0]; 271 | } 272 | 273 | /** 274 | * Downloads the artifact and returns a reference to the file 275 | * @param artifactId the artifact id to download 276 | */ 277 | async downloadWorkflowRunArtifact({ 278 | artifactId, 279 | }: { 280 | artifactId: number; 281 | }): Promise { 282 | const response = await this.client.rest.actions.downloadArtifact({ 283 | ...this.repo, 284 | artifact_id: artifactId, 285 | archive_format: "zip", 286 | }); 287 | 288 | debugLog("downloadWorkflowRunArtifact response:", response); 289 | 290 | const artifactZip = await cache.downloadTool(response.url); 291 | 292 | debugLog("downloadTool response:", artifactZip); 293 | 294 | const artifactPath = await cache.extractZip(artifactZip); 295 | 296 | debugLog("extractZip response:", artifactPath); 297 | 298 | for (const file of fs.readdirSync(artifactPath)) { 299 | const filePath = `${artifactPath}/${file}`; 300 | if (fs.existsSync(filePath)) { 301 | return filePath; 302 | } 303 | } 304 | 305 | return ""; 306 | } 307 | 308 | // --------------- RELEASE ASSET METHODS ------------------ 309 | 310 | /** 311 | * Uploads a release asset 312 | * @param release release object 313 | * @param fileName name of the asset 314 | * @param contents contents of the asset 315 | * @param contentType content type of the asset 316 | */ 317 | async uploadReleaseAsset({ 318 | release, 319 | assetName, 320 | contents, 321 | contentType, 322 | }: ReleaseProps & { 323 | assetName: string; 324 | contents: string; 325 | contentType?: string; 326 | }): Promise { 327 | await this.client.rest.repos.uploadReleaseAsset({ 328 | ...this.repo, 329 | release_id: release.id, 330 | url: release.upload_url, 331 | name: assetName, 332 | data: contents, 333 | mediaType: contentType ? { format: contentType } : undefined, 334 | }); 335 | } 336 | 337 | /** 338 | * Lists assets for a release 339 | */ 340 | async listReleaseAssets({ release }: ReleaseProps): Promise { 341 | const response = await this.client.rest.repos.listReleaseAssets({ 342 | ...this.repo, 343 | release_id: release.id, 344 | }); 345 | if (response.status >= 400) { 346 | throw new Error("Bad response from listReleaseAssets"); 347 | } 348 | 349 | debugLog("listReleaseAssets response:", response); 350 | 351 | return response.data.sort((a, b) => a.name.localeCompare(b.name)); 352 | } 353 | 354 | /** 355 | * Deletes a release asset 356 | */ 357 | async deleteReleaseAsset({ 358 | asset, 359 | }: ReleaseProps & { 360 | asset: ReleaseAsset; 361 | }): Promise { 362 | await this.client.rest.repos.deleteReleaseAsset({ 363 | ...this.repo, 364 | asset_id: asset.id, 365 | }); 366 | } 367 | 368 | /** 369 | * Finds a release by tag name 370 | * @param tag 371 | */ 372 | async findRelease({ tag }: { tag: string }): Promise { 373 | core.debug(`Getting release by tag: ${tag}`); 374 | let release: Release | undefined; 375 | try { 376 | const response = await this.client.rest.repos.getReleaseByTag({ 377 | ...this.repo, 378 | tag, 379 | }); 380 | 381 | release = response.data as Release | undefined; 382 | debugLog(`getReleaseByTag response:`, release); 383 | } catch (e) { 384 | debugLog("Error while fetching release by tag name:", e); 385 | } 386 | 387 | if (!release) { 388 | core.debug(`No release found for ${tag}, looking for draft release...`); 389 | release = await this.findDraftRelease({ tag }); 390 | } 391 | 392 | return release; 393 | } 394 | 395 | /** 396 | * Finds a draft release by ref 397 | * @param tag release tag_name to search by 398 | * @param ref release target_commitish to search by 399 | */ 400 | async findDraftRelease({ 401 | tag, 402 | }: { 403 | tag?: string; 404 | }): Promise { 405 | debugLog(`Getting draft release by tag: ${tag}`); 406 | try { 407 | const response = await this.client.rest.repos.listReleases({ 408 | ...this.repo, 409 | }); 410 | 411 | const release = (response.data as Release[]) 412 | .filter((r) => r.draft) 413 | .find((r) => r.tag_name === tag); 414 | 415 | debugLog(`listReleases filtered response:`, release); 416 | 417 | return release; 418 | } catch (e) { 419 | debugLog("Error while fetching draft release by tag name:", e); 420 | return undefined; 421 | } 422 | } 423 | 424 | // --------------- DEPENDENCY SNAPSHOT METHODS ------------------ 425 | 426 | /** 427 | * Posts a snapshot to the dependency submission api 428 | * @param snapshot 429 | */ 430 | async postDependencySnapshot(snapshot: DependencySnapshot) { 431 | const { repo } = github.context; 432 | const token = core.getInput("github-token"); 433 | 434 | try { 435 | const response = await this.client.request( 436 | `POST /repos/${repo.owner}/${repo.repo}/dependency-graph/snapshots`, 437 | { 438 | headers: { 439 | "content-type": "application/json", 440 | authorization: `token ${token}`, 441 | }, 442 | data: JSON.stringify(snapshot), 443 | } 444 | ); 445 | 446 | if (response.status >= 400) { 447 | core.warning( 448 | `Dependency snapshot upload failed: ${stringify(response)}` 449 | ); 450 | } else { 451 | debugLog(`Dependency snapshot upload successful:`, response); 452 | } 453 | } catch (e: any) { 454 | if ("response" in e) { 455 | e = e.response; 456 | } 457 | core.warning(`Error uploading depdendency snapshot: ${stringify(e)}`); 458 | } 459 | } 460 | } 461 | 462 | /** 463 | * Returns a GitHubClient 464 | * @param repo repository to use 465 | * @param githubToken authentication token 466 | */ 467 | export function getClient(repo: GithubRepo, githubToken: string): GithubClient { 468 | // This should be a token with access to your repository scoped in as a secret. 469 | // The YML workflow will need to set myToken with the GitHub Secret Token 470 | // github-token: ${{ secrets.GITHUB_TOKEN }} 471 | // https://help.github.com/en/actions/automating-your-workflow-with-github-actions/authenticating-with-the-github_token#about-the-github_token-secret 472 | const octokit = github.getOctokit(githubToken, { 473 | throttle: { 474 | // eslint-disable-next-line @typescript-eslint/ban-ts-comment 475 | // @ts-ignore 476 | onRateLimit: (retryAfter, options) => { 477 | core.warning( 478 | `Request quota exhausted for request ${options.method} ${options.url}` 479 | ); 480 | if (options.request.retryCount === 0) { 481 | // only retries once 482 | core.info(`Retrying after ${retryAfter} seconds!`); 483 | return true; 484 | } 485 | }, 486 | // eslint-disable-next-line @typescript-eslint/ban-ts-comment 487 | // @ts-ignore 488 | onAbuseLimit: (retryAfter, options) => { 489 | // does not retry, only logs a warning 490 | core.warning( 491 | `Abuse detected for request ${options.method} ${options.url}` 492 | ); 493 | }, 494 | }, 495 | }); 496 | 497 | return new GithubClient(octokit, repo); 498 | } 499 | -------------------------------------------------------------------------------- /src/github/SyftGithubAction.ts: -------------------------------------------------------------------------------- 1 | import * as core from "@actions/core"; 2 | import * as github from "@actions/github"; 3 | import * as cache from "@actions/tool-cache"; 4 | import { 5 | PullRequestEvent, 6 | Release, 7 | ReleaseEvent, 8 | } from "@octokit/webhooks-types"; 9 | import * as fs from "fs"; 10 | import os from "os"; 11 | import path from "path"; 12 | import stream from "stream"; 13 | import { SyftOptions } from "../Syft"; 14 | import { VERSION } from "../SyftVersion"; 15 | import { execute } from "./Executor"; 16 | import { 17 | DependencySnapshot, 18 | dashWrap, 19 | debugLog, 20 | getClient, 21 | } from "./GithubClient"; 22 | import { downloadSyftFromZip } from "./SyftDownloader"; 23 | import { stringify, stripEmojis } from "./Util"; 24 | 25 | export const SYFT_BINARY_NAME = "syft"; 26 | export const SYFT_VERSION = core.getInput("syft-version") || VERSION; 27 | 28 | const PRIOR_ARTIFACT_ENV_VAR = "ANCHORE_SBOM_ACTION_PRIOR_ARTIFACT"; 29 | 30 | const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "sbom-action-")); 31 | const githubDependencySnapshotFile = `${tempDir}/github.sbom.json`; 32 | 33 | const exeSuffix = process.platform == "win32" ? ".exe" : ""; 34 | 35 | /** 36 | * Tries to get a unique artifact name or otherwise as appropriate as possible 37 | */ 38 | export function getArtifactName(): string { 39 | const fileName = getArtifactNameInput(); 40 | 41 | // if there is an explicit filename just return it, this could cause issues 42 | // where earlier sboms are overwritten by later ones 43 | if (fileName) { 44 | return fileName; 45 | } 46 | 47 | const format = getSbomFormat(); 48 | let extension: string = format; 49 | switch (format) { 50 | case "spdx": 51 | case "spdx-tag-value": 52 | extension = "spdx"; 53 | break; 54 | case "spdx-json": 55 | extension = "spdx.json"; 56 | break; 57 | case "cyclonedx": 58 | case "cyclonedx-xml": 59 | extension = "cyclonedx.xml"; 60 | break; 61 | case "cyclonedx-json": 62 | extension = "cyclonedx.json"; 63 | break; 64 | case "json": 65 | extension = "syft.json"; 66 | break; 67 | } 68 | 69 | const imageName = core.getInput("image"); 70 | if (imageName) { 71 | const parts = imageName.split("/"); 72 | // remove the hostname 73 | if (parts.length > 2) { 74 | parts.splice(0, 1); 75 | } 76 | const prefix = parts.join("-").replace(/[^-a-zA-Z0-9]/g, "_"); 77 | return `${prefix}.${extension}`; 78 | } 79 | 80 | const { 81 | repo: { repo }, 82 | job, 83 | action, 84 | } = github.context; 85 | // when run without an id, we get various auto-generated names, like: 86 | // __self __self_2 __anchore_sbom-action __anchore_sbom-action_2 etc. 87 | // so just keep the number at the end if there is one, otherwise 88 | // this will not match an id unless for some reason it starts with __ 89 | let stepName = action.replace(/__[-_a-z]+/, ""); 90 | if (stepName) { 91 | stepName = `-${stepName}`; 92 | } 93 | return `${repo}-${job}${stepName}.${extension}`; 94 | } 95 | 96 | /** 97 | * Returns the artifact-name input value 98 | */ 99 | function getArtifactNameInput() { 100 | return core.getInput("artifact-name"); 101 | } 102 | 103 | /** 104 | * Gets a reference to the syft command and executes the syft action 105 | * @param input syft input parameters 106 | * @param format syft output format 107 | * @param opts additional options 108 | */ 109 | async function executeSyft({ 110 | input, 111 | format, 112 | ...opts 113 | }: SyftOptions): Promise { 114 | let stdout = ""; 115 | 116 | const cmd = await getSyftCommand(); 117 | 118 | const env: { [key: string]: string } = { 119 | ...process.env, 120 | SYFT_CHECK_FOR_APP_UPDATE: "false", 121 | }; 122 | 123 | const registryUser = core.getInput("registry-username"); 124 | const registryPass = core.getInput("registry-password"); 125 | 126 | if (registryUser) { 127 | env.SYFT_REGISTRY_AUTH_USERNAME = registryUser; 128 | if (registryPass) { 129 | env.SYFT_REGISTRY_AUTH_PASSWORD = registryPass; 130 | } else { 131 | core.warning( 132 | "WARNING: registry-username specified without registry-password" 133 | ); 134 | } 135 | } 136 | 137 | // https://github.com/anchore/syft#configuration 138 | let args = ["scan"]; 139 | 140 | if (core.isDebug()) { 141 | args = [...args, "-vv"]; 142 | } 143 | 144 | if ("image" in input && input.image) { 145 | if (registryUser) { 146 | args = [...args, `registry:${input.image}`]; 147 | } else { 148 | args = [...args, `${input.image}`]; 149 | } 150 | } else if ("file" in input && input.file) { 151 | args = [...args, `file:${input.file}`]; 152 | } else if ("path" in input && input.path) { 153 | args = [...args, `dir:${input.path}`]; 154 | } else { 155 | throw new Error("Invalid input, no image or path specified"); 156 | } 157 | 158 | args = [...args, "-o", format]; 159 | 160 | if (opts.uploadToDependencySnapshotAPI) { 161 | // generate github dependency format 162 | args = [...args, "-o", `github=${githubDependencySnapshotFile}`]; 163 | } 164 | 165 | if (opts.configFile) { 166 | args = [...args, "-c", opts.configFile]; 167 | } 168 | 169 | // Execute in a group so the syft output is collapsed in the GitHub log 170 | core.info(`[command]${cmd} ${args.join(" ")}`); 171 | 172 | // This /dev/null writable stream is required so the entire contents 173 | // of the SBOM is not written to the GitHub action log. the listener below 174 | // will actually capture the output 175 | const outStream = new stream.Writable({ 176 | write(buffer, encoding, next) { 177 | next(); 178 | }, 179 | }); 180 | 181 | const exitCode = await core.group("Executing Syft...", async () => 182 | execute(cmd, args, { 183 | env, 184 | outStream, 185 | listeners: { 186 | stdout(buffer) { 187 | stdout += buffer.toString(); 188 | }, 189 | stderr(buffer) { 190 | core.info(buffer.toString()); 191 | }, 192 | debug(message) { 193 | core.debug(message); 194 | }, 195 | }, 196 | }) 197 | ); 198 | 199 | if (exitCode > 0) { 200 | debugLog("Syft stdout:", stdout); 201 | throw new Error("An error occurred running Syft"); 202 | } else { 203 | return stdout; 204 | } 205 | } 206 | 207 | function isWindows(): boolean { 208 | return process.platform == "win32"; 209 | } 210 | 211 | async function downloadSyftWindowsWorkaround(version: string): Promise { 212 | const versionNoV = version.replace(/^v/, ""); 213 | const url = `https://github.com/anchore/syft/releases/download/${version}/syft_${versionNoV}_windows_amd64.zip`; 214 | core.info(`Downloading syft from ${url}`); 215 | const zipPath = await cache.downloadTool(url); 216 | const toolDir = await cache.extractZip(zipPath); 217 | return path.join(toolDir, `${SYFT_BINARY_NAME}${exeSuffix}`); 218 | } 219 | 220 | /** 221 | * Downloads the appropriate Syft binary for the platform 222 | */ 223 | export async function downloadSyft(): Promise { 224 | const name = SYFT_BINARY_NAME; 225 | const version = SYFT_VERSION; 226 | if (isWindows()) { 227 | return downloadSyftWindowsWorkaround(version); 228 | } 229 | 230 | const url = `https://raw.githubusercontent.com/anchore/${name}/main/install.sh`; 231 | 232 | core.debug(`Installing ${name} ${version}`); 233 | 234 | // Download the installer, and run 235 | const installPath = await cache.downloadTool(url); 236 | 237 | const syftBinaryPath = `${installPath}_${name}`; 238 | 239 | await execute("sh", [installPath, "-d", "-b", syftBinaryPath, version]); 240 | 241 | return path.join(syftBinaryPath, name) + exeSuffix; 242 | } 243 | 244 | /** 245 | * Gets the Syft command to run via exec 246 | */ 247 | export async function getSyftCommand(): Promise { 248 | const name = SYFT_BINARY_NAME + exeSuffix; 249 | const version = SYFT_VERSION; 250 | 251 | const sourceSyft = await downloadSyftFromZip(version); 252 | if (sourceSyft) { 253 | core.info(`Using sourceSyft: '${sourceSyft}'`); 254 | return sourceSyft; 255 | } 256 | 257 | let syftPath = cache.find(name, version); 258 | if (!syftPath) { 259 | // Not found; download and install it; returns a path to the binary 260 | syftPath = await downloadSyft(); 261 | 262 | // Cache the downloaded file 263 | syftPath = await cache.cacheFile(syftPath, name, name, version); 264 | } 265 | 266 | core.debug(`Got Syft path: ${syftPath} binary at: ${syftPath}/${name}`); 267 | 268 | // Add tool to path for this and future actions to use 269 | core.addPath(syftPath); 270 | return `${syftPath}/${name}`; 271 | } 272 | 273 | /** 274 | * Returns the SBOM format as specified by the user, defaults to SPDX 275 | */ 276 | export function getSbomFormat(): SyftOptions["format"] { 277 | return (core.getInput("format") as SyftOptions["format"]) || "spdx-json"; 278 | } 279 | 280 | /** 281 | * Returns the SHA of the current commit, which will either be the head 282 | * of the pull request branch or the value of github.context.sha, depending 283 | * on the event type. 284 | */ 285 | export function getSha(): string { 286 | const pull_request_events = [ 287 | "pull_request", 288 | "pull_request_comment", 289 | "pull_request_review", 290 | "pull_request_review_comment", 291 | // Note that pull_request_target is omitted here. 292 | // That event runs in the context of the base commit of the PR, 293 | // so the snapshot should not be associated with the head commit. 294 | ]; 295 | if (pull_request_events.includes(github.context.eventName)) { 296 | const pr = (github.context.payload as PullRequestEvent).pull_request; 297 | return pr.head.sha; 298 | } else { 299 | return github.context.sha; 300 | } 301 | } 302 | 303 | /** 304 | * Uploads a SBOM as a workflow artifact 305 | * @param contents SBOM file contents 306 | */ 307 | export async function uploadSbomArtifact(contents: string): Promise { 308 | const { repo } = github.context; 309 | const client = getClient(repo, core.getInput("github-token")); 310 | 311 | const fileName = getArtifactName(); 312 | 313 | const filePath = `${tempDir}/${fileName}`; 314 | fs.writeFileSync(filePath, contents); 315 | 316 | const retentionDays = parseInt(core.getInput("upload-artifact-retention")); 317 | 318 | core.info(dashWrap("Uploading workflow artifacts")); 319 | core.info(filePath); 320 | 321 | await client.uploadWorkflowArtifact({ 322 | file: filePath, 323 | name: fileName, 324 | retention: retentionDays, 325 | }); 326 | } 327 | 328 | /** 329 | * Gets a boolean input value if supplied, otherwise returns the default 330 | * @param name name of the input 331 | * @param defaultValue default value to return if not set 332 | */ 333 | function getBooleanInput(name: string, defaultValue: boolean): boolean { 334 | const val = core.getInput(name); 335 | if (val === undefined || val === "") { 336 | return defaultValue; 337 | } 338 | return val.toLowerCase() === "true"; 339 | } 340 | 341 | /** 342 | * Optionally fetches the target SBOM in order to provide some information 343 | * on changes 344 | */ 345 | async function comparePullRequestTargetArtifact(): Promise { 346 | const doCompare = getBooleanInput("compare-pulls", false); 347 | const { eventName, payload, repo } = github.context; 348 | if (doCompare && eventName === "pull_request") { 349 | const client = getClient(repo, core.getInput("github-token")); 350 | 351 | const pr = (payload as PullRequestEvent).pull_request; 352 | const branchWorkflow = await client.findLatestWorkflowRunForBranch({ 353 | branch: pr.base.ref, 354 | }); 355 | 356 | debugLog("Got branchWorkflow:", branchWorkflow); 357 | 358 | if (branchWorkflow) { 359 | const baseBranchArtifacts = await client.listWorkflowRunArtifacts({ 360 | runId: branchWorkflow.id, 361 | }); 362 | 363 | debugLog("Got baseBranchArtifacts:", baseBranchArtifacts); 364 | 365 | for (const artifact of baseBranchArtifacts) { 366 | if (artifact.name === getArtifactName()) { 367 | const baseArtifact = await client.downloadWorkflowRunArtifact({ 368 | artifactId: artifact.id, 369 | }); 370 | 371 | core.info( 372 | `Downloaded SBOM from ref '${pr.base.ref}' to ${baseArtifact}` 373 | ); 374 | } 375 | } 376 | } 377 | } 378 | } 379 | 380 | function uploadToSnapshotAPI() { 381 | return getBooleanInput("dependency-snapshot", false); 382 | } 383 | 384 | export async function runSyftAction(): Promise { 385 | core.info(dashWrap("Running SBOM Action")); 386 | 387 | debugLog(`Got github context:`, github.context); 388 | 389 | const start = Date.now(); 390 | 391 | const doUpload = getBooleanInput("upload-artifact", true); 392 | 393 | const output = await executeSyft({ 394 | input: { 395 | path: core.getInput("path"), 396 | file: core.getInput("file"), 397 | image: core.getInput("image"), 398 | }, 399 | format: getSbomFormat(), 400 | uploadToDependencySnapshotAPI: uploadToSnapshotAPI(), 401 | configFile: core.getInput("config"), 402 | }); 403 | 404 | core.info(`SBOM scan completed in: ${(Date.now() - start) / 1000}s`); 405 | 406 | if (output) { 407 | await comparePullRequestTargetArtifact(); 408 | 409 | // We may want to develop a supply chain during the build, this is one 410 | // potential way to do so: 411 | const priorArtifact = process.env[PRIOR_ARTIFACT_ENV_VAR]; 412 | if (priorArtifact) { 413 | core.debug(`Prior artifact: ${priorArtifact}`); 414 | } 415 | 416 | const outputFile = core.getInput("output-file"); 417 | if (outputFile) { 418 | fs.writeFileSync(outputFile, output); 419 | } 420 | 421 | if (doUpload) { 422 | await uploadSbomArtifact(output); 423 | 424 | core.exportVariable(PRIOR_ARTIFACT_ENV_VAR, getArtifactName()); 425 | } 426 | } else { 427 | throw new Error(`No Syft output`); 428 | } 429 | } 430 | 431 | /** 432 | * Attaches the SBOM assets to a release if run in release mode 433 | */ 434 | export async function uploadDependencySnapshot(): Promise { 435 | if (!uploadToSnapshotAPI()) { 436 | return; 437 | } 438 | 439 | if (!fs.existsSync(githubDependencySnapshotFile)) { 440 | core.warning( 441 | `No dependency snapshot found at '${githubDependencySnapshotFile}'` 442 | ); 443 | return; 444 | } 445 | const { workflow, job, runId, repo, ref } = github.context; 446 | const sha = getSha(); 447 | const client = getClient(repo, core.getInput("github-token")); 448 | 449 | const snapshot = JSON.parse( 450 | fs.readFileSync(githubDependencySnapshotFile).toString("utf8") 451 | ) as DependencySnapshot; 452 | 453 | let correlator = `${workflow}_${job}`; 454 | // if running in a matrix build, it is not possible to determine a unique value, 455 | // so a user must explicitly specify the artifact-name input, there isn't any 456 | // other indicator of being run within a matrix build, so we must use that 457 | // here in order to properly correlate dependency snapshots 458 | const artifactInput = getArtifactNameInput(); 459 | if (artifactInput) { 460 | correlator += `_${artifactInput}`; 461 | } 462 | 463 | // Need to add the job and repo details 464 | snapshot.job = { 465 | correlator: stripEmojis( 466 | core.getInput("dependency-snapshot-correlator") || correlator 467 | ), 468 | id: `${runId}`, 469 | }; 470 | snapshot.sha = sha; 471 | snapshot.ref = ref; 472 | 473 | core.info( 474 | `Uploading GitHub dependency snapshot from ${githubDependencySnapshotFile}` 475 | ); 476 | debugLog("Snapshot:", snapshot); 477 | 478 | await client.postDependencySnapshot(snapshot); 479 | } 480 | 481 | /** 482 | * Attaches the SBOM assets to a release if run in release mode 483 | */ 484 | export async function attachReleaseAssets(): Promise { 485 | const doRelease = getBooleanInput("upload-release-assets", true); 486 | 487 | if (!doRelease) { 488 | return; 489 | } 490 | 491 | debugLog("Got github context:", github.context); 492 | 493 | const { eventName, ref, payload, repo } = github.context; 494 | const client = getClient(repo, core.getInput("github-token")); 495 | 496 | let release: Release | undefined = undefined; 497 | 498 | // Try to detect a release 499 | if (eventName === "release") { 500 | // Obviously if this is run during a release 501 | release = (payload as ReleaseEvent).release; 502 | debugLog("Got releaseEvent:", release); 503 | } else { 504 | // We may have a tag-based workflow that creates releases or even drafts 505 | const releaseRefPrefix = 506 | core.getInput("release-ref-prefix") || "refs/tags/"; 507 | const isRefPush = eventName === "push" && ref.startsWith(releaseRefPrefix); 508 | if (isRefPush) { 509 | const tag = ref.substring(releaseRefPrefix.length); 510 | release = await client.findRelease({ tag }); 511 | debugLog("Found release for ref push:", release); 512 | } 513 | } 514 | 515 | if (release) { 516 | // ^sbom.*\\.${format}$`; 517 | const sbomArtifactInput = core.getInput("sbom-artifact-match"); 518 | const sbomArtifactPattern = sbomArtifactInput || `^${getArtifactName()}$`; 519 | const matcher = new RegExp(sbomArtifactPattern); 520 | 521 | const artifacts = await client.listCurrentWorkflowArtifacts(); 522 | let matched = artifacts.filter((a) => { 523 | const matches = matcher.test(a.name); 524 | if (matches) { 525 | core.debug(`Found artifact: ${a.name}`); 526 | } else { 527 | core.debug(`Artifact: ${a.name} not matching ${sbomArtifactPattern}`); 528 | } 529 | return matches; 530 | }); 531 | 532 | // We may have a release run based on a prior build from another workflow 533 | if (eventName === "release" && !matched.length) { 534 | core.info( 535 | "No artifacts found in this workflow. Searching for release artifacts from prior workflow..." 536 | ); 537 | const latestRun = await client.findLatestWorkflowRunForBranch({ 538 | branch: release.target_commitish, 539 | }); 540 | 541 | debugLog("Got latest run for prior workflow", latestRun); 542 | 543 | if (latestRun) { 544 | const runArtifacts = await client.listWorkflowRunArtifacts({ 545 | runId: latestRun.id, 546 | }); 547 | 548 | matched = runArtifacts.filter((a) => { 549 | const matches = matcher.test(a.name); 550 | if (matches) { 551 | core.debug(`Found run artifact: ${a.name}`); 552 | } else { 553 | core.debug( 554 | `Run artifact: ${a.name} not matching ${sbomArtifactPattern}` 555 | ); 556 | } 557 | return matches; 558 | }); 559 | } 560 | } 561 | 562 | if (!matched.length && sbomArtifactInput) { 563 | core.warning(`WARNING: no SBOMs found matching ${sbomArtifactInput}`); 564 | return; 565 | } 566 | 567 | core.info(dashWrap(`Attaching SBOMs to release: '${release.tag_name}'`)); 568 | for (const artifact of matched) { 569 | const file = await client.downloadWorkflowArtifact(artifact); 570 | 571 | core.info(file); 572 | const contents = fs.readFileSync(file); 573 | const assetName = path.basename(file); 574 | 575 | const assets = await client.listReleaseAssets({ 576 | release, 577 | }); 578 | 579 | const asset = assets.find((a) => a.name === assetName); 580 | if (asset) { 581 | await client.deleteReleaseAsset({ 582 | release, 583 | asset, 584 | }); 585 | } 586 | 587 | await client.uploadReleaseAsset({ 588 | release, 589 | assetName, 590 | contents: contents.toString(), 591 | contentType: "text/plain", 592 | }); 593 | } 594 | } 595 | } 596 | 597 | /** 598 | * Executes the provided callback and wraps any exceptions in a build failure 599 | */ 600 | export async function runAndFailBuildOnException( 601 | fn: () => Promise 602 | ): Promise { 603 | try { 604 | return await fn(); 605 | } catch (e) { 606 | if (e instanceof Error) { 607 | core.setFailed(e.message); 608 | } else if (e instanceof Object) { 609 | core.setFailed(`Action failed: ${stringify(e)}`); 610 | } else { 611 | core.setFailed(`An unknown error occurred: ${stringify(e)}`); 612 | } 613 | } 614 | } 615 | --------------------------------------------------------------------------------