├── .gitignore
├── .github
├── workflows
│ ├── CODEOWNERS
│ ├── publish-oss-readme.yml
│ ├── publish-liquibase-secure-readme.yml
│ ├── trivy-scan-published-images.yml
│ └── trivy.yml
├── test
│ ├── scripts
│ │ ├── liquibase_version.sh
│ │ └── liquibase_command.sh
│ ├── databasechangelog.csv
│ ├── Dockerfile
│ ├── liquibase.properties
│ ├── changelog.sql
│ ├── liquibase-mssql.properties
│ └── example-changelog.xml
└── dependabot.yml
├── liquibase.docker.properties
├── examples
├── postgres
│ ├── Dockerfile.alpine.psql
│ └── Dockerfile.psql
├── docker-compose
│ ├── liquibase.properties
│ ├── changelog
│ │ ├── db.changelog-master.xml
│ │ ├── 002-insert-sample-data.xml
│ │ └── 001-create-users-table.xml
│ ├── docker-compose.yml
│ ├── docker-compose.local.yml
│ ├── docker-compose.secure.yml
│ └── README.md
├── aws
│ ├── Dockerfile.alpine.awscli
│ └── Dockerfile.awscli
├── mssql
│ ├── Dockerfile.sqlcmd
│ └── Dockerfile.alpine.sqlcmd
└── oracle
│ ├── Dockerfile.sqlplus
│ └── Dockerfile.alpine.sqlplus
├── pom.xml
├── scripts
├── check-file-exists.sh
├── lib
│ ├── vendor-severity.jq
│ └── vuln-filters.sh
├── save-grype-results.sh
├── convert-scan-results.sh
├── generate-dockerhub-matrix.sh
├── extract-nested-deps.sh
├── append-github-summary.sh
├── create-enhanced-report.sh
├── README.md
└── analyze-scan-results.sh
├── Dockerfile
├── .trivyignore
├── Dockerfile.alpine
├── DockerfileSecure
├── docker-entrypoint.sh
├── CLAUDE.md
├── SECURITY.md
├── scan-repo.sh
├── README-secure.md
├── LICENSE
└── README.md
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea
2 | my-app.iml
3 |
--------------------------------------------------------------------------------
/.github/workflows/CODEOWNERS:
--------------------------------------------------------------------------------
1 | * @mcred @jnewton03
--------------------------------------------------------------------------------
/liquibase.docker.properties:
--------------------------------------------------------------------------------
1 | liquibase.headless: true
2 |
--------------------------------------------------------------------------------
/.github/test/scripts/liquibase_version.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | echo "Checking liquibase version ..."
3 | liquibase --version
--------------------------------------------------------------------------------
/.github/test/databasechangelog.csv:
--------------------------------------------------------------------------------
1 | "ID","AUTHOR","FILENAME","DATEEXECUTED","ORDEREXECUTED","EXECTYPE","MD5SUM","DESCRIPTION","COMMENTS","TAG","LIQUIBASE","CONTEXTS","LABELS","DEPLOYMENT_ID"
2 |
--------------------------------------------------------------------------------
/.github/test/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM liquibase:test-entrypoint
2 |
3 | USER root
4 |
5 | ADD scripts /scripts
6 | RUN chmod -R +x /scripts
7 |
8 | USER liquibase
9 |
10 | ENTRYPOINT ["/bin/bash"]
11 |
12 |
--------------------------------------------------------------------------------
/examples/postgres/Dockerfile.alpine.psql:
--------------------------------------------------------------------------------
1 | FROM liquibase/liquibase:alpine
2 |
3 | # Install PSQL
4 | USER root
5 |
6 | RUN apk --no-cache add postgresql-client
7 |
8 | # Return to liquibase user space
9 | USER liquibase
10 |
--------------------------------------------------------------------------------
/.github/test/scripts/liquibase_command.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash -e
2 |
3 | echo "Processing liquibase tasks ..."
4 | case "$1" in
5 | "version" )
6 | echo "Checking liquibase version ..."
7 | sh /scripts/liquibase_version.sh
8 | ;;
9 | esac
--------------------------------------------------------------------------------
/examples/docker-compose/liquibase.properties:
--------------------------------------------------------------------------------
1 | changeLogFile=db.changelog-master.xml
2 | url=jdbc:postgresql://postgres:5432/liquibase_demo
3 | username=liquibase
4 | password=liquibase_password
5 | driver=org.postgresql.Driver
6 | searchPath=/liquibase/changelog
--------------------------------------------------------------------------------
/examples/postgres/Dockerfile.psql:
--------------------------------------------------------------------------------
1 | FROM liquibase/liquibase:latest
2 |
3 | # Install PSQL
4 | USER root
5 | RUN apt-get update -y && \
6 | apt-get install --no-install-recommends -y postgresql-client && \
7 | rm -rf /var/lib/apt/lists/*
8 |
9 | # Return to liquibase user space
10 | USER liquibase
11 |
--------------------------------------------------------------------------------
/examples/aws/Dockerfile.alpine.awscli:
--------------------------------------------------------------------------------
1 | FROM liquibase/liquibase:latest-alpine
2 |
3 | USER root
4 |
5 | RUN apk add --no-cache wget unzip
6 |
7 | RUN wget "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -O "awscliv2.zip" && \
8 | unzip awscliv2.zip && rm -rf awscliv2.zip && \
9 | ./aws/install
10 |
11 | USER liquibase
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 |
4 | # Keep Dockerfile up to date, batching pull requests as updated
5 | - package-ecosystem: "docker"
6 | directory: "/"
7 | schedule:
8 | interval: "daily"
9 | - package-ecosystem: "github-actions"
10 | directory: "/"
11 | schedule:
12 | interval: "daily"
13 |
--------------------------------------------------------------------------------
/examples/aws/Dockerfile.awscli:
--------------------------------------------------------------------------------
1 | FROM liquibase/liquibase:latest
2 |
3 | USER root
4 |
5 | RUN apt-get update -y && \
6 | apt-get install --no-install-recommends -y unzip && \
7 | rm -rf /var/lib/apt/lists/*
8 |
9 | RUN wget "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -O "awscliv2.zip" && \
10 | unzip awscliv2.zip && rm -rf awscliv2.zip && \
11 | ./aws/install
12 |
13 | USER liquibase
--------------------------------------------------------------------------------
/.github/test/liquibase.properties:
--------------------------------------------------------------------------------
1 | #### Enter the Target database 'url' information ####
2 | liquibase.command.url: jdbc:h2:tcp://localhost:9090/mem:dev
3 | liquibase.command.username: dbuser
4 | liquibase.command.password: letmein
5 |
6 | #### Enter the Source Database 'referenceUrl' information ####
7 | liquibase.command.referenceUrl: jdbc:h2:tcp://localhost:9090/mem:integration
8 | liquibase.command.referenceUsername: dbuser
9 | liquibase.command.referencePassword: letmein
10 |
--------------------------------------------------------------------------------
/.github/test/changelog.sql:
--------------------------------------------------------------------------------
1 | USE master;
2 |
3 | -- Create a sample table in the dbo schema
4 | IF NOT EXISTS (SELECT 1 FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = 'dbo' AND TABLE_NAME = 'SampleTable')
5 | BEGIN
6 | CREATE TABLE dbo.SampleTable (
7 | id INT PRIMARY KEY NOT NULL,
8 | name VARCHAR(255)
9 | -- Add more columns as needed
10 | );
11 |
12 | -- Example: Add an index on the 'name' column
13 | -- CREATE INDEX idx_name ON dbo.SampleTable (name);
14 | END;
15 |
--------------------------------------------------------------------------------
/.github/test/liquibase-mssql.properties:
--------------------------------------------------------------------------------
1 | # Enter the path for your changelog file.
2 | changeLogFile=changelog.sql
3 |
4 | # Enter the URL of the source database
5 | url=jdbc:sqlserver://mssql:1433;database=master;encrypt=false;
6 |
7 | # Enter the username for your source database.
8 | username: SA
9 | password: Letmein.8
10 |
11 | driver=com.microsoft.sqlserver.jdbc.SQLServerDriver
12 |
13 | #### Target Database Information ####
14 | ## The target database is the database you want to use to compare to your source database.
15 | logLevel: ERROR
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
3 | 4.0.0
4 |
5 | com.mycompany.app
6 | my-app
7 | 1.0-SNAPSHOT
8 |
9 |
10 | 1.7
11 | 1.7
12 |
13 |
14 |
--------------------------------------------------------------------------------
/examples/docker-compose/changelog/db.changelog-master.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/examples/mssql/Dockerfile.sqlcmd:
--------------------------------------------------------------------------------
1 | FROM liquibase/liquibase:latest
2 |
3 | # Install SQLCMD
4 | USER root
5 |
6 | RUN wget -qO - https://packages.microsoft.com/keys/microsoft.asc | apt-key add - && \
7 | wget -qO - https://packages.microsoft.com/config/ubuntu/20.04/prod.list | tee /etc/apt/sources.list.d/msprod.list && \
8 | apt-get update -y && \
9 | ACCEPT_EULA=Y apt-get install --no-install-recommends -y mssql-tools unixodbc-dev && \
10 | rm -rf /var/lib/apt/lists/*
11 |
12 | # Set SQLCMD Env Vars
13 | ENV PATH="$PATH:/opt/mssql-tools/bin"
14 |
15 | # Return to liquibase user space
16 | USER liquibase
--------------------------------------------------------------------------------
/examples/oracle/Dockerfile.sqlplus:
--------------------------------------------------------------------------------
1 | FROM liquibase/liquibase:latest
2 |
3 | # Install SQLPlus
4 | USER root
5 | RUN apt-get update && apt-get -y install libaio1 libaio-dev && rm -rf /var/lib/apt/lists/*
6 | RUN mkdir /opt/oracle
7 | # You must already have the sqlplus archives downloaded from Oracle
8 | COPY instantclient-sqlplus-linux.x64-19.12.0.0.0dbru.zip instantclient-basic-linux.x64-19.12.0.0.0dbru.zip .
9 | RUN sh -c 'unzip -q "*.zip" -d /opt/oracle/'
10 | RUN rm *.zip
11 |
12 | # Set SQLPlus Env Vars
13 | ENV PATH="$PATH:/opt/oracle/instantclient_19_12"
14 | ENV LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/opt/oracle/instantclient_19_12"
15 |
16 | # Return to liquibase user space
17 | USER liquibase
--------------------------------------------------------------------------------
/examples/docker-compose/changelog/002-insert-sample-data.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
--------------------------------------------------------------------------------
/examples/docker-compose/changelog/001-create-users-table.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/examples/docker-compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | services:
2 | # PostgreSQL database
3 | postgres:
4 | image: postgres:15-alpine
5 | container_name: liquibase_postgres
6 | environment:
7 | POSTGRES_DB: liquibase_demo
8 | POSTGRES_USER: liquibase
9 | POSTGRES_PASSWORD: liquibase_password
10 | ports:
11 | - "5432:5432"
12 | volumes:
13 | - postgres_data:/var/lib/postgresql/data
14 | healthcheck:
15 | test: ["CMD-SHELL", "pg_isready -U liquibase -d liquibase_demo"]
16 | interval: 10s
17 | timeout: 5s
18 | retries: 5
19 |
20 | # Liquibase service
21 | liquibase:
22 | image: liquibase:alpine
23 | container_name: liquibase_runner
24 | depends_on:
25 | postgres:
26 | condition: service_healthy
27 | volumes:
28 | - ./changelog:/liquibase/changelog
29 | - ./liquibase.properties:/liquibase/liquibase.properties
30 | environment:
31 | LIQUIBASE_COMMAND_URL: jdbc:postgresql://postgres:5432/liquibase_demo
32 | LIQUIBASE_COMMAND_USERNAME: liquibase
33 | LIQUIBASE_COMMAND_PASSWORD: liquibase_password
34 | command: ["--defaults-file=/liquibase/liquibase.properties", "update"]
35 |
36 | volumes:
37 | postgres_data:
--------------------------------------------------------------------------------
/examples/oracle/Dockerfile.alpine.sqlplus:
--------------------------------------------------------------------------------
1 | FROM liquibase/liquibase:latest-alpine
2 |
3 | ENV ORACLE_BASE /usr/lib/instantclient
4 | ENV LD_LIBRARY_PATH /usr/lib/instantclient
5 | ENV TNS_ADMIN /usr/lib/instantclient
6 | ENV ORACLE_HOME /usr/lib/instantclient
7 |
8 | # Install SQLPlus
9 | USER root
10 |
11 | # Install Instantclient Basic Light Oracle and Dependencies
12 | RUN apk --no-cache add libaio libnsl libc6-compat curl && \
13 | cd /tmp && \
14 | curl -o instantclient-basiclite.zip https://download.oracle.com/otn_software/linux/instantclient/instantclient-basiclite-linuxx64.zip -SL && \
15 | unzip instantclient-basiclite.zip && \
16 | mv instantclient*/ /usr/lib/instantclient && \
17 | rm instantclient-basiclite.zip && \
18 | ln -s /usr/lib/instantclient/libclntsh.so.19.1 /usr/lib/libclntsh.so && \
19 | ln -s /usr/lib/instantclient/libocci.so.19.1 /usr/lib/libocci.so && \
20 | ln -s /usr/lib/instantclient/libociicus.so /usr/lib/libociicus.so && \
21 | ln -s /usr/lib/instantclient/libnnz19.so /usr/lib/libnnz19.so && \
22 | ln -s /usr/lib/libnsl.so.2 /usr/lib/libnsl.so.1 && \
23 | ln -s /lib/libc.so.6 /usr/lib/libresolv.so.2 && \
24 | ln -s /lib64/ld-linux-x86-64.so.2 /usr/lib/ld-linux-x86-64.so.2
25 |
26 | # Return to liquibase user space
27 | USER liquibase
--------------------------------------------------------------------------------
/examples/docker-compose/docker-compose.local.yml:
--------------------------------------------------------------------------------
1 | services:
2 | # PostgreSQL database
3 | postgres:
4 | image: postgres:15-alpine
5 | container_name: liquibase_postgres
6 | environment:
7 | POSTGRES_DB: liquibase_demo
8 | POSTGRES_USER: liquibase
9 | POSTGRES_PASSWORD: liquibase_password
10 | ports:
11 | - "5432:5432"
12 | volumes:
13 | - postgres_data:/var/lib/postgresql/data
14 | healthcheck:
15 | test: ["CMD-SHELL", "pg_isready -U liquibase -d liquibase_demo"]
16 | interval: 10s
17 | timeout: 5s
18 | retries: 5
19 |
20 | # Liquibase service - built from local Dockerfile
21 | liquibase:
22 | build:
23 | context: ../../
24 | dockerfile: Dockerfile.alpine
25 | container_name: liquibase_runner
26 | depends_on:
27 | postgres:
28 | condition: service_healthy
29 | volumes:
30 | - ./changelog:/liquibase/changelog
31 | - ./liquibase.properties:/liquibase/liquibase.properties
32 | environment:
33 | LIQUIBASE_COMMAND_URL: jdbc:postgresql://postgres:5432/liquibase_demo
34 | LIQUIBASE_COMMAND_USERNAME: liquibase
35 | LIQUIBASE_COMMAND_PASSWORD: liquibase_password
36 | command: ["--defaults-file=/liquibase/liquibase.properties", "update"]
37 |
38 | volumes:
39 | postgres_data:
--------------------------------------------------------------------------------
/examples/docker-compose/docker-compose.secure.yml:
--------------------------------------------------------------------------------
1 | services:
2 | # PostgreSQL database
3 | postgres:
4 | image: postgres:15-alpine
5 | container_name: liquibase_postgres
6 | environment:
7 | POSTGRES_DB: liquibase_demo
8 | POSTGRES_USER: liquibase
9 | POSTGRES_PASSWORD: liquibase_password
10 | ports:
11 | - "5432:5432"
12 | volumes:
13 | - postgres_data:/var/lib/postgresql/data
14 | healthcheck:
15 | test: ["CMD-SHELL", "pg_isready -U liquibase -d liquibase_demo"]
16 | interval: 10s
17 | timeout: 5s
18 | retries: 5
19 |
20 | # Liquibase Secure service
21 | liquibase:
22 | image: liquibase/liquibase-secure:latest
23 | container_name: liquibase_secure_runner
24 | depends_on:
25 | postgres:
26 | condition: service_healthy
27 | volumes:
28 | - ./changelog:/liquibase/changelog
29 | - ./liquibase.properties:/liquibase/liquibase.properties
30 | environment:
31 | LIQUIBASE_COMMAND_URL: jdbc:postgresql://postgres:5432/liquibase_demo
32 | LIQUIBASE_COMMAND_USERNAME: liquibase
33 | LIQUIBASE_COMMAND_PASSWORD: liquibase_password
34 | LIQUIBASE_LICENSE_KEY: ${LIQUIBASE_LICENSE_KEY} # Set in your environment
35 | command: ["--defaults-file=/liquibase/liquibase.properties", "update"]
36 |
37 | volumes:
38 | postgres_data:
--------------------------------------------------------------------------------
/scripts/check-file-exists.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | #
3 | # check-file-exists.sh
4 | #
5 | # Utility script to check if a file exists and optionally set GitHub Actions output.
6 | # This is commonly used to check for SARIF or JSON scan results before uploading.
7 | #
8 | # Usage:
9 | # check-file-exists.sh [output_name]
10 | #
11 | # Arguments:
12 | # filename: Path to the file to check
13 | # output_name: Name for GitHub Actions output variable (default: 'exists')
14 | #
15 | # Environment Variables:
16 | # GITHUB_OUTPUT: GitHub Actions output file path (optional)
17 | #
18 | # Outputs:
19 | # - GitHub Actions output: =true or false
20 | # - Exit code 0 (always succeeds)
21 |
22 | set -e
23 |
24 | # Arguments
25 | FILENAME="${1:?Error: Filename required}"
26 | OUTPUT_NAME="${2:-exists}"
27 |
28 | echo "🔍 Checking if file exists: ${FILENAME}"
29 |
30 | # Check if file exists
31 | if [ -f "$FILENAME" ]; then
32 | echo "✓ File exists: ${FILENAME}"
33 | EXISTS="true"
34 | else
35 | echo "⚠ File not found: ${FILENAME}"
36 | EXISTS="false"
37 | fi
38 |
39 | # Set GitHub Actions output if available
40 | if [ -n "${GITHUB_OUTPUT:-}" ]; then
41 | echo "${OUTPUT_NAME}=${EXISTS}" >> "$GITHUB_OUTPUT"
42 | echo "✓ Set GitHub output: ${OUTPUT_NAME}=${EXISTS}"
43 | else
44 | echo "Result: ${EXISTS}"
45 | fi
46 |
47 | exit 0
48 |
--------------------------------------------------------------------------------
/examples/mssql/Dockerfile.alpine.sqlcmd:
--------------------------------------------------------------------------------
1 | FROM liquibase/liquibase:latest-alpine
2 |
3 | # Install SQLCMD
4 | USER root
5 |
6 | RUN apk add --no-cache curl gnupg
7 |
8 | #Download the desired package(s)
9 | RUN curl -O https://download.microsoft.com/download/b/9/f/b9f3cce4-3925-46d4-9f46-da08869c6486/msodbcsql18_18.0.1.1-1_amd64.apk \
10 | && curl -O https://download.microsoft.com/download/b/9/f/b9f3cce4-3925-46d4-9f46-da08869c6486/mssql-tools18_18.0.1.1-1_amd64.apk
11 |
12 |
13 | #(Optional) Verify signature, if 'gpg' is missing install it using 'apk add gnupg':
14 | RUN curl -O https://download.microsoft.com/download/b/9/f/b9f3cce4-3925-46d4-9f46-da08869c6486/msodbcsql18_18.0.1.1-1_amd64.sig \
15 | && curl -O https://download.microsoft.com/download/b/9/f/b9f3cce4-3925-46d4-9f46-da08869c6486/mssql-tools18_18.0.1.1-1_amd64.sig
16 |
17 | RUN curl https://packages.microsoft.com/keys/microsoft.asc | gpg --import - \
18 | && gpg --verify msodbcsql18_18.0.1.1-1_amd64.sig msodbcsql18_18.0.1.1-1_amd64.apk \
19 | && gpg --verify mssql-tools18_18.0.1.1-1_amd64.sig mssql-tools18_18.0.1.1-1_amd64.apk
20 |
21 |
22 | #Install the package(s)
23 | RUN apk add --allow-untrusted msodbcsql18_18.0.1.1-1_amd64.apk \
24 | && apk add --allow-untrusted mssql-tools18_18.0.1.1-1_amd64.apk \
25 | && rm -f msodbcsql18_18.0.1.1-1_amd64.apk mssql-tools18_18.0.1.1-1_amd64.apk
26 |
27 | # Return to liquibase user space
28 | USER liquibase
--------------------------------------------------------------------------------
/.github/workflows/publish-oss-readme.yml:
--------------------------------------------------------------------------------
1 | name: Publish Liquibase Community README to Docker Hub
2 |
3 | on:
4 | push:
5 | paths:
6 | - "README.md"
7 | branches:
8 | - main
9 | workflow_dispatch:
10 |
11 | permissions:
12 | id-token: write
13 |
14 | jobs:
15 | update-liquibase-Community-readme:
16 | runs-on: ubuntu-latest
17 |
18 | steps:
19 | - name: Checkout repository
20 | uses: actions/checkout@v6
21 |
22 | - name: Configure AWS credentials for vault access
23 | uses: aws-actions/configure-aws-credentials@v5
24 | with:
25 | role-to-assume: ${{ secrets.LIQUIBASE_VAULT_OIDC_ROLE_ARN }}
26 | aws-region: us-east-1
27 |
28 | - name: Get secrets from vault
29 | id: vault-secrets
30 | uses: aws-actions/aws-secretsmanager-get-secrets@v2
31 | with:
32 | secret-ids: |
33 | ,/vault/liquibase
34 | parse-json-secrets: true
35 |
36 | - name: Decode DOCKERHUB_USERNAME
37 | run: |
38 | decoded_username=$(echo "${{ env.DOCKERHUB_USERNAME }}" | base64 -d)
39 | echo "DOCKERHUB_USERNAME_DECODED=$decoded_username" >> $GITHUB_ENV
40 |
41 | - name: Update Liquibase Community README on Docker Hub
42 | uses: peter-evans/dockerhub-description@v5
43 | with:
44 | username: ${{ env.DOCKERHUB_USERNAME_DECODED }}
45 | password: ${{ env.DOCKERHUB_UPDATE_README }}
46 | repository: liquibase/liquibase
47 | readme-filepath: README.md
48 | short-description: "Liquibase Community"
49 |
--------------------------------------------------------------------------------
/.github/workflows/publish-liquibase-secure-readme.yml:
--------------------------------------------------------------------------------
1 | name: Publish Liquibase Secure README to Docker Hub
2 |
3 | on:
4 | push:
5 | paths:
6 | - 'README-secure.md'
7 | branches:
8 | - main
9 | workflow_dispatch:
10 |
11 | permissions:
12 | contents: write
13 | id-token: write
14 |
15 | jobs:
16 | update-liquibase-secure-readme:
17 | runs-on: ubuntu-latest
18 |
19 | steps:
20 | - name: Checkout repository
21 | uses: actions/checkout@v6
22 |
23 | - name: Configure AWS credentials for vault access
24 | uses: aws-actions/configure-aws-credentials@v5
25 | with:
26 | role-to-assume: ${{ secrets.LIQUIBASE_VAULT_OIDC_ROLE_ARN }}
27 | aws-region: us-east-1
28 |
29 | - name: Get secrets from vault
30 | id: vault-secrets
31 | uses: aws-actions/aws-secretsmanager-get-secrets@v2
32 | with:
33 | secret-ids: |
34 | ,/vault/liquibase
35 | parse-json-secrets: true
36 |
37 | - name: Decode DOCKERHUB_USERNAME
38 | run: |
39 | decoded_username=$(echo "${{ env.DOCKERHUB_USERNAME }}" | base64 -d)
40 | echo "DOCKERHUB_USERNAME_DECODED=$decoded_username" >> $GITHUB_ENV
41 |
42 | - name: Update Liquibase Secure README on Docker Hub
43 | uses: peter-evans/dockerhub-description@v5
44 | with:
45 | username: ${{ env.DOCKERHUB_USERNAME_DECODED}}
46 | password: ${{ env.DOCKERHUB_UPDATE_README }}
47 | repository: liquibase/liquibase-secure
48 | readme-filepath: ./README-secure.md
49 | short-description: "Liquibase Secure"
50 |
--------------------------------------------------------------------------------
/scripts/lib/vendor-severity.jq:
--------------------------------------------------------------------------------
1 | # vendor-severity.jq
2 | # Shared jq functions for extracting vendor severity data
3 | #
4 | # Usage: jq -L scripts/lib 'include "vendor-severity"; ...'
5 | # Or inline with: jq --slurpfile not needed, just import the filter
6 |
7 | # Converts numeric severity (1=LOW, 2=MEDIUM, 3=HIGH, 4=CRITICAL) to letter
8 | def severity_letter:
9 | if . == 1 then "L"
10 | elif . == 2 then "M"
11 | elif . == 3 then "H"
12 | elif . == 4 then "C"
13 | else "-"
14 | end;
15 |
16 | # Extracts vendor severity as [prefix, letter, url] array
17 | # Requires $cve to be in scope
18 | def vendor_severity($cve):
19 | if .VendorSeverity.nvd then
20 | ["nvd", (.VendorSeverity.nvd | severity_letter), "https://nvd.nist.gov/vuln/detail/\($cve)"]
21 | elif .VendorSeverity.ghsa then
22 | ["ghsa", (.VendorSeverity.ghsa | severity_letter), "https://github.com/advisories?query=\($cve)"]
23 | elif .VendorSeverity.redhat then
24 | ["rh", (.VendorSeverity.redhat | severity_letter), "https://access.redhat.com/security/cve/\($cve)"]
25 | elif .VendorSeverity.amazon then
26 | ["amz", (.VendorSeverity.amazon | severity_letter), "https://alas.aws.amazon.com/cve/html/\($cve).html"]
27 | elif .VendorSeverity["oracle-oval"] then
28 | ["ora", (.VendorSeverity["oracle-oval"] | severity_letter), "https://linux.oracle.com/cve/\($cve).html"]
29 | elif .VendorSeverity.bitnami then
30 | ["bit", (.VendorSeverity.bitnami | severity_letter), ""]
31 | elif .VendorSeverity.alma then
32 | ["alma", (.VendorSeverity.alma | severity_letter), "https://errata.almalinux.org/"]
33 | elif .VendorSeverity.rocky then
34 | ["rky", (.VendorSeverity.rocky | severity_letter), "https://errata.rockylinux.org/"]
35 | else
36 | ["-", "-", ""]
37 | end;
38 |
39 | # Format vendor severity for markdown display
40 | # Returns "[prefix:letter](url)" if url exists, "prefix:letter" if no url, or "-" if no vendor data
41 | def format_vendor($vendor):
42 | if $vendor[0] == "-" then
43 | "-"
44 | elif $vendor[2] != "" then
45 | "[\($vendor[0]):\($vendor[1])](\($vendor[2]))"
46 | else
47 | "\($vendor[0]):\($vendor[1])"
48 | end;
49 |
--------------------------------------------------------------------------------
/.github/test/example-changelog.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
9 |
10 |
11 | example-comment
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 | example-comment
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 | example-comment
42 |
43 |
44 |
45 |
46 |
47 |
--------------------------------------------------------------------------------
/scripts/save-grype-results.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | #
3 | # save-grype-results.sh
4 | #
5 | # Utility script to locate and save Grype scan results to a consistent filename.
6 | # The anchore/scan-action outputs results to various locations depending on configuration.
7 | #
8 | # Usage:
9 | # save-grype-results.sh [output_filename]
10 | #
11 | # Arguments:
12 | # output_filename: Desired output filename (default: grype-results.sarif or grype-results.json)
13 | #
14 | # Environment Variables:
15 | # GRYPE_OUTPUT_FORMAT: Output format - 'sarif' or 'json' (default: sarif)
16 | #
17 | # Outputs:
18 | # - Grype results saved to specified filename
19 | # - Exit code 0 on success, 1 if no results found
20 |
21 | set -e
22 |
23 | # Configuration
24 | OUTPUT_FORMAT="${GRYPE_OUTPUT_FORMAT:-sarif}"
25 | OUTPUT_FILE="${1:-grype-results.${OUTPUT_FORMAT}}"
26 |
27 | echo "🔍 Locating Grype scan results (format: ${OUTPUT_FORMAT})..."
28 |
29 | # Determine file extension based on format
30 | if [ "$OUTPUT_FORMAT" = "sarif" ]; then
31 | # Try to find SARIF output in common locations
32 | if [ -f "results.sarif" ]; then
33 | mv results.sarif "$OUTPUT_FILE"
34 | echo "✓ Grype SARIF results saved to $OUTPUT_FILE"
35 | exit 0
36 | elif [ -f "anchore-scan-results.sarif" ]; then
37 | mv anchore-scan-results.sarif "$OUTPUT_FILE"
38 | echo "✓ Grype SARIF results saved to $OUTPUT_FILE"
39 | exit 0
40 | elif [ -f "$OUTPUT_FILE" ]; then
41 | echo "✓ Grype SARIF results already at $OUTPUT_FILE"
42 | exit 0
43 | else
44 | echo "⚠ Grype SARIF output file not found in expected locations"
45 | echo "Checking for any SARIF files created by Grype:"
46 | find . -name "*.sarif" -type f -mmin -5 | grep -v node_modules || true
47 | exit 1
48 | fi
49 | elif [ "$OUTPUT_FORMAT" = "json" ]; then
50 | # Try to find JSON output in common locations
51 | if [ -f "results.json" ]; then
52 | mv results.json "$OUTPUT_FILE"
53 | echo "✓ Grype JSON results saved to $OUTPUT_FILE"
54 | exit 0
55 | elif [ -f "anchore-scan-results.json" ]; then
56 | mv anchore-scan-results.json "$OUTPUT_FILE"
57 | echo "✓ Grype JSON results saved to $OUTPUT_FILE"
58 | exit 0
59 | elif [ -f "$OUTPUT_FILE" ]; then
60 | echo "✓ Grype JSON results already at $OUTPUT_FILE"
61 | exit 0
62 | else
63 | echo "⚠ Grype JSON output file not found in expected locations"
64 | echo "Checking for any JSON files created by Grype:"
65 | find . -name "*.json" -type f -mmin -5 | grep -v node_modules || true
66 | exit 1
67 | fi
68 | else
69 | echo "❌ Unknown output format: $OUTPUT_FORMAT"
70 | echo "Supported formats: sarif, json"
71 | exit 1
72 | fi
73 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | # Builder Stage
2 | FROM eclipse-temurin:21-jre-noble
3 |
4 | # Create liquibase user
5 | RUN groupadd --gid 1001 liquibase && \
6 | useradd --uid 1001 --gid liquibase --create-home --home-dir /liquibase liquibase && \
7 | chown liquibase:root /liquibase && \
8 | chmod g+rx /liquibase
9 |
10 | # Download and install Liquibase
11 | WORKDIR /liquibase
12 |
13 | ARG LIQUIBASE_VERSION=5.0.1
14 | ARG LB_SHA256=3ae11ccdcd4c080e421e5fd043bdbd624d56fcfc9b294d5d9d898cb8b074e449
15 |
16 | RUN wget -q -O liquibase-${LIQUIBASE_VERSION}.tar.gz "https://package.liquibase.com/downloads/dockerhub/official/liquibase-${LIQUIBASE_VERSION}.tar.gz" && \
17 | echo "$LB_SHA256 *liquibase-${LIQUIBASE_VERSION}.tar.gz" | sha256sum -c - && \
18 | tar -xzf liquibase-${LIQUIBASE_VERSION}.tar.gz && \
19 | rm liquibase-${LIQUIBASE_VERSION}.tar.gz && \
20 | ln -s /liquibase/liquibase /usr/local/bin/liquibase && \
21 | ln -s /liquibase/docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh && \
22 | liquibase --version
23 |
24 | ARG LPM_VERSION=0.2.17
25 | ARG LPM_SHA256=f58e69ec338f5ab6abb60af5a03d9151bf17ae569662e54001963b61a5ac02d7
26 | ARG LPM_SHA256_ARM=020a2ccd0d9a63b97de6109aee7558946f16fa55cdebb34218676c54c0cf7464
27 |
28 | # Add metadata labels
29 | LABEL org.opencontainers.image.description="Liquibase Container Image"
30 | LABEL org.opencontainers.image.licenses="FSL-1.1-ALv2"
31 | LABEL org.opencontainers.image.vendor="Liquibase"
32 | LABEL org.opencontainers.image.version="${LIQUIBASE_VERSION}"
33 | LABEL org.opencontainers.image.documentation="https://docs.liquibase.com"
34 |
35 | # Download and Install lpm
36 | RUN apt-get update && \
37 | apt-get -yqq install unzip --no-install-recommends && \
38 | rm -rf /var/lib/apt/lists/* && \
39 | mkdir /liquibase/bin && \
40 | arch="$(dpkg --print-architecture)" && \
41 | case "$arch" in \
42 | amd64) DOWNLOAD_ARCH="" ;; \
43 | arm64) DOWNLOAD_ARCH="-arm64" && LPM_SHA256=$LPM_SHA256_ARM ;; \
44 | *) echo >&2 "error: unsupported architecture '$arch'" && exit 1 ;; \
45 | esac && wget -q -O lpm-${LPM_VERSION}-linux${DOWNLOAD_ARCH}.zip "https://github.com/liquibase/liquibase-package-manager/releases/download/v${LPM_VERSION}/lpm-${LPM_VERSION}-linux${DOWNLOAD_ARCH}.zip" && \
46 | echo "$LPM_SHA256 *lpm-${LPM_VERSION}-linux${DOWNLOAD_ARCH}.zip" | sha256sum -c - && \
47 | unzip lpm-${LPM_VERSION}-linux${DOWNLOAD_ARCH}.zip -d bin/ && \
48 | rm lpm-${LPM_VERSION}-linux${DOWNLOAD_ARCH}.zip && \
49 | apt-get purge -y --auto-remove unzip && \
50 | ln -s /liquibase/bin/lpm /usr/local/bin/lpm && \
51 | lpm --version
52 |
53 | # Set LIQUIBASE_HOME environment variable
54 | ENV LIQUIBASE_HOME=/liquibase
55 | # Marker which indicates this is a Liquibase docker container
56 | ENV DOCKER_LIQUIBASE=true
57 |
58 | COPY docker-entrypoint.sh ./
59 | COPY liquibase.docker.properties ./
60 |
61 | # Set user and group
62 | USER liquibase:liquibase
63 |
64 | ENTRYPOINT ["/liquibase/docker-entrypoint.sh"]
65 | CMD ["--help"]
66 |
--------------------------------------------------------------------------------
/.trivyignore:
--------------------------------------------------------------------------------
1 | # .trivyignore - Trivy vulnerability scanner suppressions
2 | # This file contains CVEs that are false positives or accepted risks
3 |
4 | # CVE-2025-59250 - Microsoft SQL Server JDBC Driver
5 | # Status: FALSE POSITIVE
6 | #
7 | # Description:
8 | # Trivy flags mssql-jdbc 12.10.2.jre8 as vulnerable to CVE-2025-59250, but this
9 | # version actually CONTAINS THE FIX for this vulnerability.
10 | #
11 | # Evidence:
12 | # - Liquibase Secure 5.0.2 upgraded mssql-jdbc from 12.10.1.jre8 to 12.10.2.jre8
13 | # - This upgrade explicitly addressed CVE-2025-59250 (see DAT-21214)
14 | # - Microsoft released mssql-jdbc 12.10.2.jre8 on October 13, 2024 as a security update
15 | # - The vulnerability affects versions <12.10.2.jre8
16 | # - Version 12.10.2.jre8 contains the fix
17 | #
18 | # Root Cause of False Positive:
19 | # Trivy's vulnerability database only lists JRE11 variants as fixed versions:
20 | # (10.2.4.jre11, 11.2.4.jre11, 12.2.1.jre11, 12.6.5.jre11, 12.8.2.jre11,
21 | # 12.10.2.jre11, 13.2.1.jre11)
22 | # The database doesn't recognize that the JRE8 variant (12.10.2.jre8) is also patched.
23 | #
24 | # References:
25 | # - Liquibase Secure 5.0.2 changelog
26 | # - Jira ticket: DAT-21214 "Bump mssql driver versions to address CVE-2025-59250"
27 | # - GitHub PR: https://github.com/liquibase/liquibase-pro/pull/2976
28 | # - Microsoft JDBC Driver release notes:
29 | # https://learn.microsoft.com/en-us/sql/connect/jdbc/release-notes-for-the-jdbc-driver
30 | #
31 | # Date Added: 2025-11-20
32 | # Added By: DevOps Team
33 | # Review Date: 2026-02-20 (review when Trivy database is updated)
34 | CVE-2025-59250 exp:2026-02-20
35 |
36 | # GraalVM Python/setuptools - FALSE POSITIVES
37 | # Status: FALSE POSITIVE
38 | #
39 | # Description:
40 | # Trivy Deep Scan finds setuptools vulnerabilities in Python packages embedded
41 | # within GraalVM extensions (org.graalvm.python.vfs). These are false positives.
42 | #
43 | # Technical Justification:
44 | # GraalVM Python is used in liquibase-checks for custom validation scripts.
45 | # While network access is enabled for the Python runtime, setuptools package
46 | # management functions are NEVER invoked at runtime:
47 | #
48 | # - CVE-2022-40897 (ReDoS): Requires setuptools to parse HTML during package lookup.
49 | # No package lookups occur - all packages are pre-installed at build time via Maven.
50 | #
51 | # - CVE-2024-6345 (Command injection): Requires setuptools to process URLs.
52 | # setuptools download/install functions are never called at runtime.
53 | #
54 | # - CVE-2025-47273 (Path traversal): Requires setuptools to process package inputs.
55 | # No package installation or processing occurs at runtime.
56 | #
57 | # setuptools is bundled as a transitive dependency (standard Python tooling) but
58 | # its package management functionality is never used. Package installation occurs
59 | # only during Maven build via graalpy-maven-plugin, not at runtime.
60 | #
61 | # Source: Code review of liquibase-checks PythonScriptExecutor.java
62 | # Date Added: 2025-12-04
63 | # Added By: DevOps Team
64 | # Review Date: 2026-06-04
65 | CVE-2022-40897 exp:2026-06-04
66 | CVE-2024-6345 exp:2026-06-04
67 | CVE-2025-47273 exp:2026-06-04
68 |
--------------------------------------------------------------------------------
/Dockerfile.alpine:
--------------------------------------------------------------------------------
1 | # Use multi-stage build
2 | FROM alpine:3.23
3 |
4 | # Create liquibase user
5 | RUN addgroup --gid 1001 liquibase && \
6 | adduser --disabled-password --uid 1001 --ingroup liquibase --home /liquibase liquibase && \
7 | chown liquibase:root /liquibase && \
8 | chmod g+rx /liquibase
9 |
10 | # Install smaller JRE, if available and acceptable
11 | RUN apk add --no-cache openjdk21-jre-headless bash
12 |
13 | WORKDIR /liquibase
14 |
15 | ARG LIQUIBASE_VERSION=5.0.1
16 | ARG LB_SHA256=3ae11ccdcd4c080e421e5fd043bdbd624d56fcfc9b294d5d9d898cb8b074e449
17 |
18 | # Download, verify, extract
19 | RUN set -x && \
20 | apk add --no-cache --virtual .fetch-deps wget && \
21 | wget -q -O liquibase-${LIQUIBASE_VERSION}.tar.gz "https://github.com/liquibase/liquibase/releases/download/v${LIQUIBASE_VERSION}/liquibase-${LIQUIBASE_VERSION}.tar.gz" && \
22 | echo "$LB_SHA256 *liquibase-${LIQUIBASE_VERSION}.tar.gz" | sha256sum -c - && \
23 | tar -xzf liquibase-${LIQUIBASE_VERSION}.tar.gz && \
24 | rm liquibase-${LIQUIBASE_VERSION}.tar.gz && \
25 | apk del --no-network .fetch-deps && \
26 | ln -s /liquibase/liquibase /usr/local/bin/liquibase && \
27 | ln -s /liquibase/docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh && \
28 | liquibase --version
29 |
30 | ARG LPM_VERSION=0.2.17
31 | ARG LPM_SHA256=f58e69ec338f5ab6abb60af5a03d9151bf17ae569662e54001963b61a5ac02d7
32 | ARG LPM_SHA256_ARM=020a2ccd0d9a63b97de6109aee7558946f16fa55cdebb34218676c54c0cf7464
33 |
34 | # Add metadata labels
35 | LABEL org.opencontainers.image.description="Liquibase Container Image (Alpine)"
36 | LABEL org.opencontainers.image.licenses="FSL-1.1-ALv2"
37 | LABEL org.opencontainers.image.vendor="Liquibase"
38 | LABEL org.opencontainers.image.version="${LIQUIBASE_VERSION}"
39 | LABEL org.opencontainers.image.documentation="https://docs.liquibase.com"
40 |
41 | # Download and Install lpm
42 | RUN mkdir /liquibase/bin && \
43 | apk add --no-cache --virtual .fetch-deps wget unzip && \
44 | arch="$(apk --print-arch)" && \
45 | case "$arch" in \
46 | x86_64) DOWNLOAD_ARCH="" ;; \
47 | aarch64) DOWNLOAD_ARCH="-arm64" && LPM_SHA256=$LPM_SHA256_ARM ;; \
48 | *) echo >&2 "error: unsupported architecture '$arch'" && exit 1 ;; \
49 | esac && wget -q -O lpm-${LPM_VERSION}-linux${DOWNLOAD_ARCH}.zip "https://github.com/liquibase/liquibase-package-manager/releases/download/v${LPM_VERSION}/lpm-${LPM_VERSION}-linux${DOWNLOAD_ARCH}.zip" && \
50 | echo "$LPM_SHA256 *lpm-${LPM_VERSION}-linux${DOWNLOAD_ARCH}.zip" | sha256sum -c - && \
51 | unzip lpm-${LPM_VERSION}-linux${DOWNLOAD_ARCH}.zip -d bin/ && \
52 | rm lpm-${LPM_VERSION}-linux${DOWNLOAD_ARCH}.zip && \
53 | apk del --no-network .fetch-deps && \
54 | ln -s /liquibase/bin/lpm /usr/local/bin/lpm && \
55 | lpm --version
56 |
57 | # Set LIQUIBASE_HOME environment variable
58 | ENV LIQUIBASE_HOME=/liquibase
59 | # Marker which indicates this is a Liquibase docker container
60 | ENV DOCKER_LIQUIBASE=true
61 |
62 | COPY docker-entrypoint.sh ./
63 | COPY liquibase.docker.properties ./
64 |
65 | USER liquibase:liquibase
66 |
67 | ENTRYPOINT ["/liquibase/docker-entrypoint.sh"]
68 | CMD ["--help"]
69 |
--------------------------------------------------------------------------------
/DockerfileSecure:
--------------------------------------------------------------------------------
1 | # Builder Stage
2 | FROM eclipse-temurin:21-jre-noble
3 |
4 | # Create liquibase user
5 | RUN groupadd --gid 1001 liquibase && \
6 | useradd --uid 1001 --gid liquibase --create-home --home-dir /liquibase liquibase && \
7 | chown liquibase:root /liquibase && \
8 | chmod g+rx /liquibase
9 |
10 | # Download and install Liquibase
11 | WORKDIR /liquibase
12 |
13 | ARG LIQUIBASE_SECURE_VERSION=5.0.3
14 | ARG LB_SECURE_SHA256=274b84056a8350ec25fbcf35410385bc0451f3e986ee5f8ec523b1ec9c1f4fcf
15 |
16 | # Add metadata labels
17 | LABEL org.opencontainers.image.description="Liquibase Secure Container Image"
18 | LABEL org.opencontainers.image.licenses="LicenseRef-Liquibase-EULA"
19 | LABEL org.opencontainers.image.licenses.url="https://www.liquibase.com/eula"
20 | LABEL org.opencontainers.image.vendor="Liquibase"
21 | LABEL org.opencontainers.image.version="${LIQUIBASE_SECURE_VERSION}"
22 | LABEL org.opencontainers.image.documentation="https://docs.liquibase.com"
23 |
24 | # Download and install Liquibase
25 | WORKDIR /liquibase
26 |
27 | RUN wget -q -O liquibase-secure-${LIQUIBASE_SECURE_VERSION}.tar.gz "https://repo.liquibase.com/releases/secure/${LIQUIBASE_SECURE_VERSION}/liquibase-secure-${LIQUIBASE_SECURE_VERSION}.tar.gz" && \
28 | echo "$LB_SECURE_SHA256 *liquibase-secure-${LIQUIBASE_SECURE_VERSION}.tar.gz" | sha256sum -c - && \
29 | tar -xzf liquibase-secure-${LIQUIBASE_SECURE_VERSION}.tar.gz && \
30 | rm liquibase-secure-${LIQUIBASE_SECURE_VERSION}.tar.gz && \
31 | ln -s /liquibase/liquibase /usr/local/bin/liquibase && \
32 | ln -s /liquibase/docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh && \
33 | liquibase --version
34 |
35 | ARG LPM_VERSION=0.2.17
36 | ARG LPM_SHA256=f58e69ec338f5ab6abb60af5a03d9151bf17ae569662e54001963b61a5ac02d7
37 | ARG LPM_SHA256_ARM=020a2ccd0d9a63b97de6109aee7558946f16fa55cdebb34218676c54c0cf7464
38 |
39 | # Download and Install lpm
40 | RUN apt-get update && \
41 | apt-get -yqq install unzip --no-install-recommends && \
42 | rm -rf /var/lib/apt/lists/* && \
43 | mkdir /liquibase/bin && \
44 | arch="$(dpkg --print-architecture)" && \
45 | case "$arch" in \
46 | amd64) DOWNLOAD_ARCH="" ;; \
47 | arm64) DOWNLOAD_ARCH="-arm64" && LPM_SHA256=$LPM_SHA256_ARM ;; \
48 | *) echo >&2 "error: unsupported architecture '$arch'" && exit 1 ;; \
49 | esac && wget -q -O lpm-${LPM_VERSION}-linux${DOWNLOAD_ARCH}.zip "https://github.com/liquibase/liquibase-package-manager/releases/download/v${LPM_VERSION}/lpm-${LPM_VERSION}-linux${DOWNLOAD_ARCH}.zip" && \
50 | echo "$LPM_SHA256 *lpm-${LPM_VERSION}-linux${DOWNLOAD_ARCH}.zip" | sha256sum -c - && \
51 | unzip lpm-${LPM_VERSION}-linux${DOWNLOAD_ARCH}.zip -d bin/ && \
52 | rm lpm-${LPM_VERSION}-linux${DOWNLOAD_ARCH}.zip && \
53 | apt-get purge -y --auto-remove unzip && \
54 | ln -s /liquibase/bin/lpm /usr/local/bin/lpm && \
55 | lpm --version
56 |
57 | # Set LIQUIBASE_HOME environment variable
58 | ENV LIQUIBASE_HOME=/liquibase
59 | # Marker which indicates this is a Liquibase docker container
60 | ENV DOCKER_LIQUIBASE=true
61 |
62 | COPY docker-entrypoint.sh ./
63 | COPY liquibase.docker.properties ./
64 |
65 | # Set user and group
66 | USER liquibase:liquibase
67 |
68 | ENTRYPOINT ["/liquibase/docker-entrypoint.sh"]
69 | CMD ["--help"]
70 |
--------------------------------------------------------------------------------
/scripts/convert-scan-results.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | #
3 | # convert-scan-results.sh
4 | #
5 | # Converts Trivy JSON scan results to SARIF format and analyzes vulnerability counts.
6 | # Requires Trivy CLI to be installed.
7 | #
8 | # Usage:
9 | # convert-scan-results.sh
10 | #
11 | # Expected Input Files:
12 | # - trivy-surface.json: Trivy surface scan results (optional)
13 | # - trivy-deep.json: Trivy deep scan results (optional)
14 | # - grype-results.json: Grype JSON results (optional)
15 | #
16 | # Outputs:
17 | # - trivy-surface.sarif: Converted SARIF format
18 | # - trivy-deep.sarif: Converted SARIF format
19 | # - Environment variables: surface_vulns, deep_vulns, grype_vulns, total_vulns
20 | # - Exit code 0 (always succeeds to allow workflow to continue)
21 |
22 | set +e # Don't fail immediately
23 |
24 | echo "🔍 Converting scan results to SARIF format..."
25 | echo ""
26 | echo "Available scan result files:"
27 | ls -lh *.sarif *.json 2>/dev/null || echo "No scan result files found"
28 | echo ""
29 |
30 | # Initialize counters
31 | surface_vulns=0
32 | deep_vulns=0
33 | grype_vulns=0
34 |
35 | # Convert Trivy surface scan results
36 | if [ -f trivy-surface.json ]; then
37 | trivy convert --format sarif --output trivy-surface.sarif trivy-surface.json
38 | surface_vulns=$(jq '[.Results[]?.Vulnerabilities[]? | select(.Severity == "HIGH" or .Severity == "CRITICAL")] | length' trivy-surface.json 2>/dev/null || echo 0)
39 | echo "✓ Trivy Surface Scan: $surface_vulns HIGH/CRITICAL vulnerabilities"
40 | else
41 | echo "⚠ Trivy Surface Scan: JSON file not found"
42 | fi
43 |
44 | # Convert Trivy deep scan results
45 | if [ -f trivy-deep.json ]; then
46 | trivy convert --format sarif --output trivy-deep.sarif trivy-deep.json
47 | deep_vulns=$(jq '[.Results[]?.Vulnerabilities[]? | select(.Severity == "HIGH" or .Severity == "CRITICAL")] | length' trivy-deep.json 2>/dev/null || echo 0)
48 | echo "✓ Trivy Deep Scan: $deep_vulns HIGH/CRITICAL vulnerabilities"
49 | else
50 | echo "⚠ Trivy Deep Scan: JSON file not found"
51 | fi
52 |
53 | # Process Grype results
54 | if [ -f grype-results.json ]; then
55 | # Count vulnerabilities from JSON
56 | grype_vulns=$(jq '[.matches[]? | select(.vulnerability.severity == "High" or .vulnerability.severity == "Critical")] | length' grype-results.json 2>/dev/null || echo 0)
57 | echo "✓ Grype SBOM Scan: $grype_vulns HIGH/CRITICAL vulnerabilities"
58 | else
59 | echo "⚠ Grype SBOM Scan: JSON file not found (scan may have failed or SBOM was empty)"
60 | fi
61 |
62 | total_vulns=$((surface_vulns + deep_vulns + grype_vulns))
63 | echo ""
64 | echo "📊 Total HIGH/CRITICAL vulnerabilities found: $total_vulns"
65 |
66 | # Print detailed table if vulnerabilities found
67 | if [ $total_vulns -gt 0 ]; then
68 | echo ""
69 | echo "==== Trivy Surface Scan Vulnerabilities ===="
70 | if [ -f trivy-surface.json ] && [ $surface_vulns -gt 0 ]; then
71 | trivy convert --format table trivy-surface.json
72 | fi
73 |
74 | echo ""
75 | echo "==== Trivy Deep Scan Vulnerabilities (Nested JARs) ===="
76 | if [ -f trivy-deep.json ] && [ $deep_vulns -gt 0 ]; then
77 | trivy convert --format table trivy-deep.json
78 | fi
79 | fi
80 |
81 | # Export to GitHub Actions environment if available
82 | if [ -n "${GITHUB_ENV:-}" ]; then
83 | echo "surface_vulns=$surface_vulns" >> "$GITHUB_ENV"
84 | echo "deep_vulns=$deep_vulns" >> "$GITHUB_ENV"
85 | echo "grype_vulns=$grype_vulns" >> "$GITHUB_ENV"
86 | echo "total_vulns=$total_vulns" >> "$GITHUB_ENV"
87 | fi
88 |
89 | echo "✅ Conversion complete"
90 | exit 0
91 |
--------------------------------------------------------------------------------
/examples/docker-compose/README.md:
--------------------------------------------------------------------------------
1 | # Docker Compose Example
2 |
3 | This example demonstrates how to use Liquibase with Docker Compose to manage database changes alongside a PostgreSQL database.
4 |
5 | ## Prerequisites
6 |
7 | - Docker and Docker Compose installed
8 | - Basic understanding of Liquibase and database migrations
9 |
10 | ## Quick Start
11 |
12 | ### Option 1: Using Published Image (Recommended for End Users)
13 |
14 | 1. **Start the services:**
15 | ```bash
16 | docker-compose up
17 | ```
18 |
19 | ### Option 2: Building from Local Dockerfile (For Development/Testing)
20 |
21 | 1. **Start the services with local build:**
22 | ```bash
23 | docker-compose -f docker-compose.local.yml up --build
24 | ```
25 |
26 | 2. **Verify the migration:**
27 | The Liquibase service will automatically run the `update` command after PostgreSQL is ready. Check the logs to see the migration results:
28 | ```bash
29 | docker-compose logs liquibase
30 | ```
31 |
32 | 3. **Connect to the database to verify:**
33 | ```bash
34 | docker-compose exec postgres psql -U liquibase -d liquibase_demo -c "SELECT * FROM users;"
35 | ```
36 |
37 | 4. **Stop the services:**
38 | ```bash
39 | docker-compose down
40 | ```
41 |
42 | ## What This Example Does
43 |
44 | - **PostgreSQL**: Runs a PostgreSQL 15 Alpine container with a database named `liquibase_demo`
45 | - **Liquibase**: Uses the official Alpine Liquibase image to run database migrations
46 | - **Sample Migration**: Creates a `users` table and inserts sample data
47 | - **Health Checks**: Ensures PostgreSQL is ready before running Liquibase migrations
48 |
49 | ## File Structure
50 |
51 | ```
52 | docker-compose/
53 | ├── docker-compose.yml # Docker Compose with published image
54 | ├── docker-compose.local.yml # Docker Compose with local build
55 | ├── liquibase.properties # Liquibase configuration
56 | ├── changelog/
57 | │ ├── db.changelog-master.xml # Master changelog file
58 | │ ├── 001-create-users-table.xml
59 | │ └── 002-insert-sample-data.xml
60 | └── README.md # This file
61 | ```
62 |
63 | ## Configuration
64 |
65 | ### Environment Variables
66 |
67 | The example uses environment variables for database connection:
68 | - `LIQUIBASE_COMMAND_URL`: Database connection URL
69 | - `LIQUIBASE_COMMAND_USERNAME`: Database username
70 | - `LIQUIBASE_COMMAND_PASSWORD`: Database password
71 |
72 | ### Volumes
73 |
74 | - `./changelog:/liquibase/changelog`: Mounts local changelog files
75 | - `./liquibase.properties:/liquibase/liquibase.properties`: Mounts configuration file
76 | - `postgres_data`: Persists PostgreSQL data
77 |
78 | ## Running Other Liquibase Commands
79 |
80 | To run other Liquibase commands, you can override the default command:
81 |
82 | ```bash
83 | # Generate SQL for review
84 | docker-compose run --rm liquibase --defaults-file=/liquibase/liquibase.properties update-sql
85 |
86 | # Rollback last changeset
87 | docker-compose run --rm liquibase --defaults-file=/liquibase/liquibase.properties rollback-count 1
88 |
89 | # Check status
90 | docker-compose run --rm liquibase --defaults-file=/liquibase/liquibase.properties status
91 | ```
92 |
93 | ## Customization
94 |
95 | To adapt this example for your use case:
96 |
97 | 1. **Change Database**: Modify the `postgres` service in `docker-compose.yml`
98 | 2. **Update Connection**: Modify `liquibase.properties` with your database details
99 | 3. **Add Your Migrations**: Replace the sample changelog files with your own
100 | 4. **Environment**: Adjust environment variables as needed
--------------------------------------------------------------------------------
/scripts/generate-dockerhub-matrix.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | #
3 | # generate-dockerhub-matrix.sh
4 | #
5 | # Generates a JSON matrix of Docker images and tags to scan from Docker Hub.
6 | # Fetches recent tags for both liquibase/liquibase and liquibase/liquibase-secure.
7 | #
8 | # Usage:
9 | # generate-dockerhub-matrix.sh [max_tags]
10 | #
11 | # Arguments:
12 | # max_tags: Maximum number of tags to scan per image (default: 10)
13 | #
14 | # Environment Variables:
15 | # MAX_TAGS: Maximum tags per image (overrides argument)
16 | #
17 | # Outputs:
18 | # - JSON matrix written to stdout and $GITHUB_OUTPUT if available
19 | # - Format: {"include":[{"image":"...","tag":"...","published":"..."},...]}
20 | # - published: ISO 8601 timestamp of when the tag was last updated
21 |
22 | set -e
23 |
24 | # Configuration
25 | MAX_TAGS="${MAX_TAGS:-${1:-10}}"
26 |
27 | echo "Generating matrix for scanning with max $MAX_TAGS tags per image..." >&2
28 |
29 | MATRIX_INCLUDE="["
30 | FIRST=true
31 |
32 | for IMAGE in "liquibase/liquibase" "liquibase/liquibase-secure"; do
33 | echo "Getting tags for $IMAGE..." >&2
34 | REPO=$(basename "$IMAGE")
35 | TAGS=""
36 | URL="https://hub.docker.com/v2/namespaces/liquibase/repositories/${REPO}/tags?page_size=100"
37 |
38 | while [ -n "$URL" ]; do
39 | RESPONSE=$(curl -s "$URL")
40 |
41 | # Only include semantic version tags (e.g., 5.0.1, 4.28)
42 | # Format: tag|last_updated (pipe-separated to preserve dates through filtering)
43 | # Match semver followed by pipe delimiter (the line continues with |last_updated)
44 | TAG_REGEX='^[0-9]+\.[0-9]+(\.[0-9]+)?(\||$)'
45 | NEW_TAGS=$(echo "$RESPONSE" | jq -r '.results[] | select(.tag_status == "active") | "\(.name)|\(.last_updated)"' | grep -E "$TAG_REGEX" || true)
46 | TAGS=$(echo -e "$TAGS\n$NEW_TAGS" | sort -t'|' -k1 -Vu)
47 |
48 | # Filter out minor version tags if we have the full version
49 | # e.g., if we have 4.28.0, skip 4.28
50 | # Preserves the |last_updated suffix through filtering
51 | # Note: Uses GNU awk match() with capture groups (Ubuntu default, not BSD awk)
52 | TAGS=$(echo "$TAGS" | awk -F'|' '
53 | {
54 | tag = $1
55 | date = $2
56 | tags[NR] = $0
57 | tag_only[NR] = tag
58 | if (match(tag, /^([0-9]+)\.([0-9]+)\.([0-9]+)$/, m)) {
59 | full = m[1] "." m[2] "." m[3]
60 | has_full[full] = 1
61 | }
62 | }
63 | END {
64 | for (i = 1; i <= NR; i++) {
65 | tag = tag_only[i]
66 | if (match(tag, /^([0-9]+)\.([0-9]+)$/, m)) {
67 | short = m[1] "." m[2] ".0"
68 | if (has_full[short]) continue
69 | }
70 | print tags[i]
71 | }
72 | }
73 | ')
74 |
75 | # Get next page URL
76 | URL=$(echo "$RESPONSE" | jq -r '.next')
77 | [ "$URL" = "null" ] && break
78 | done
79 |
80 | # Get most recent tags (reverse sort and take first N)
81 | TAGS=$(echo "$TAGS" | tac | head -n "$MAX_TAGS")
82 |
83 | # Build matrix JSON
84 | # Each line is in format: tag|last_updated
85 | while IFS='|' read -r tag published; do
86 | if [ -n "$tag" ]; then
87 | # Escape any special characters in the date string for JSON
88 | published="${published:-unknown}"
89 | if [ "$FIRST" = true ]; then
90 | MATRIX_INCLUDE="${MATRIX_INCLUDE}{\"image\":\"$IMAGE\",\"tag\":\"$tag\",\"published\":\"$published\"}"
91 | FIRST=false
92 | else
93 | MATRIX_INCLUDE="${MATRIX_INCLUDE},{\"image\":\"$IMAGE\",\"tag\":\"$tag\",\"published\":\"$published\"}"
94 | fi
95 | fi
96 | done <<< "$TAGS"
97 | done
98 |
99 | MATRIX_INCLUDE="${MATRIX_INCLUDE}]"
100 | MATRIX="{\"include\":$MATRIX_INCLUDE}"
101 |
102 | echo "Generated matrix: $MATRIX" >&2
103 |
104 | # Output to GitHub Actions if running in CI
105 | if [ -n "${GITHUB_OUTPUT:-}" ]; then
106 | echo "matrix=$MATRIX" >> "$GITHUB_OUTPUT"
107 | fi
108 |
109 | # Always output to stdout for testing/debugging
110 | echo "$MATRIX"
111 |
--------------------------------------------------------------------------------
/docker-entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | if [[ "$INSTALL_MYSQL" ]]; then
5 | lpm add mysql --global
6 | fi
7 |
8 | if [[ "$1" != "history" ]] && [[ "$1" != "init" ]] && [[ "$1" != "liquibase" ]] && type "$1" > /dev/null 2>&1; then
9 | ## First argument is an actual OS command (except if the command is history, init, or liquibase). Run it
10 | exec "$@"
11 | else
12 | # If first argument is 'liquibase', remove it since we'll be calling /liquibase/liquibase anyway
13 | if [ "$1" = "liquibase" ]; then
14 | shift
15 | fi
16 |
17 | # Check if changelog directory exists (common mount point) and change to it
18 | # This makes Docker behavior match CLI behavior for relative paths
19 | # Allow SHOULD_CHANGE_DIR to be set via environment variable to override automatic detection
20 | if [ -z "$SHOULD_CHANGE_DIR" ]; then
21 | # Check if we should change directory based on relative paths being used
22 | SHOULD_CHANGE_DIR=false
23 |
24 | # Only change directory if changelog directory is mounted AND we detect relative paths
25 | if [ -d "/liquibase/changelog" ]; then
26 | # Check if the changelog directory appears to be a mount point (has files or is writable)
27 | if [ "$(ls -A /liquibase/changelog 2>/dev/null)" ] || touch /liquibase/changelog/.test 2>/dev/null; then
28 | # Remove test file if created
29 | rm -f /liquibase/changelog/.test 2>/dev/null
30 |
31 | # Check if any arguments contain relative paths (not starting with / or containing :/ for URLs)
32 | for arg in "$@"; do
33 | # Convert argument to lowercase for case-insensitive matching
34 | lower_arg=$(echo "$arg" | tr '[:upper:]' '[:lower:]')
35 | # Match any --*file= argument (e.g., --changelog-file, --flow-file, --output-file, etc.)
36 | case "$lower_arg" in
37 | --*file=*)
38 | value="${arg#*=}" # Use original arg to preserve case in the value
39 | # If the value doesn't start with / and doesn't contain :/ (for URLs), it's likely a relative path
40 | if [[ "$value" != /* && "$value" != *://* && "$value" != "" ]]; then
41 | SHOULD_CHANGE_DIR=true
42 | break
43 | fi
44 | ;;
45 | esac
46 | done
47 |
48 | # Also check environment variables ending in _FILE (e.g., LIQUIBASE_COMMAND_CHANGELOG_FILE)
49 | if [ "$SHOULD_CHANGE_DIR" = false ]; then
50 | while IFS='=' read -r name value; do
51 | # Match any *_FILE environment variable
52 | case "$name" in
53 | *_FILE)
54 | # If the value doesn't start with / and doesn't contain :/ (for URLs), it's likely a relative path
55 | if [[ "$value" != /* && "$value" != *://* && "$value" != "" ]]; then
56 | SHOULD_CHANGE_DIR=true
57 | break
58 | fi
59 | ;;
60 | esac
61 | done < <(env)
62 | fi
63 | fi
64 | fi
65 | fi
66 |
67 | # Change directory to the changelog directory if it's mounted
68 | # This ensures all relative paths and generated files end up in the mounted volume
69 | if [ -d "/liquibase/changelog" ] && [ "$SHOULD_CHANGE_DIR" = true ]; then
70 | cd /liquibase/changelog
71 | fi
72 |
73 | # Set search path based on whether we changed directories
74 | # BUT: Only inject our default search path if user hasn't already provided one
75 | # This respects Liquibase's configuration precedence: CLI args > env vars > properties files
76 | EXTRA_SEARCH_PATH=""
77 |
78 | # Check if user already provided a search path
79 | USER_HAS_SEARCH_PATH=false
80 | for arg in "$@"; do
81 | # Check for both --search-path and --searchPath (case variations)
82 | case "$arg" in
83 | --search-path=*|--searchPath=*)
84 | USER_HAS_SEARCH_PATH=true
85 | break
86 | ;;
87 | esac
88 | done
89 |
90 | # Also check if user provided LIQUIBASE_SEARCH_PATH environment variable
91 | if [ -n "$LIQUIBASE_SEARCH_PATH" ]; then
92 | USER_HAS_SEARCH_PATH=true
93 | fi
94 |
95 | # Only inject our default search path if user didn't provide one
96 | if [ "$USER_HAS_SEARCH_PATH" = false ]; then
97 | if [ "$SHOULD_CHANGE_DIR" = true ]; then
98 | # If we changed to changelog directory, search current directory
99 | EXTRA_SEARCH_PATH="--search-path=."
100 | else
101 | # If we stayed in /liquibase and changelog directory exists, add it to search path
102 | # This helps when using absolute paths like /liquibase/changelog/file.xml
103 | if [ -d "/liquibase/changelog" ]; then
104 | EXTRA_SEARCH_PATH="--search-path=/liquibase/changelog"
105 | fi
106 | fi
107 | fi
108 |
109 | if [[ "$*" == *--defaultsFile* ]] || [[ "$*" == *--defaults-file* ]] || [[ "$*" == *--version* ]]; then
110 | ## Just run as-is, but add search path if needed
111 | if [ -n "$EXTRA_SEARCH_PATH" ]; then
112 | exec /liquibase/liquibase "$EXTRA_SEARCH_PATH" "$@"
113 | else
114 | exec /liquibase/liquibase "$@"
115 | fi
116 | else
117 | ## Include standard defaultsFile and search path
118 | if [ -n "$EXTRA_SEARCH_PATH" ]; then
119 | exec /liquibase/liquibase "--defaultsFile=/liquibase/liquibase.docker.properties" "$EXTRA_SEARCH_PATH" "$@"
120 | else
121 | exec /liquibase/liquibase "--defaultsFile=/liquibase/liquibase.docker.properties" "$@"
122 | fi
123 | fi
124 | fi
--------------------------------------------------------------------------------
/.github/workflows/trivy-scan-published-images.yml:
--------------------------------------------------------------------------------
1 | # Vulnerability scanning for published Docker images using Trivy
2 | # This workflow scans the published images on Docker Hub for vulnerabilities.
3 | # It generates a matrix of image/tag combinations and runs Trivy scans on them.
4 | #
5 | # NOTE: SARIF results are NOT uploaded to GitHub Security tab to avoid stale alerts.
6 | # Published image vulnerabilities are tracked via workflow artifacts and GitHub Actions summary.
7 | # Only the main branch workflow (trivy.yml) populates the Security tab.
8 |
9 | name: Published Images Vulnerability Scanning
10 |
11 | on:
12 | workflow_dispatch:
13 | inputs:
14 | max_tags_to_scan:
15 | description: "Maximum number of published tags to scan"
16 | required: false
17 | default: "20"
18 | schedule:
19 | # Run Monday-Friday at 10 AM UTC (published image monitoring)
20 | - cron: "0 10 * * 1-5" # Run every weekday at 10am UTC
21 |
22 | permissions:
23 | contents: read
24 | actions: read # Required for private repositories to get Action run status
25 |
26 | jobs:
27 | generate-matrix:
28 | name: Generate Scan Matrix
29 | runs-on: ubuntu-22.04
30 | outputs:
31 | matrix: ${{ steps.set-matrix.outputs.matrix }}
32 | env:
33 | MAX_TAGS: ${{ github.event.inputs.max_tags_to_scan || '10' }}
34 | steps:
35 | - name: Checkout code
36 | uses: actions/checkout@v6
37 |
38 | - name: Generate matrix for all image/tag combinations
39 | id: set-matrix
40 | run: |
41 | scripts/generate-dockerhub-matrix.sh
42 |
43 | trivy-scan:
44 | name: ${{ matrix.image }}:${{ matrix.tag }}
45 | runs-on: ubuntu-22.04
46 | needs: generate-matrix
47 | permissions:
48 | contents: read
49 | actions: read
50 | strategy:
51 | fail-fast: false
52 | matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
53 | steps:
54 | - name: Checkout repository
55 | uses: actions/checkout@v6
56 |
57 | - name: Pull published image
58 | run: |
59 | echo "Pulling ${{ matrix.image }}:${{ matrix.tag }}..."
60 | docker pull ${{ matrix.image }}:${{ matrix.tag }}
61 |
62 | - name: Extract nested JARs and Python packages for deep scanning
63 | run: |
64 | scripts/extract-nested-deps.sh ${{ matrix.image }}:${{ matrix.tag }}
65 |
66 | - name: Generate SBOM with Syft
67 | uses: anchore/sbom-action@v0
68 | with:
69 | image: "${{ matrix.image }}:${{ matrix.tag }}"
70 | format: "spdx-json"
71 | output-file: "sbom.spdx.json"
72 | upload-artifact: false
73 |
74 | - name: Run Trivy vulnerability scanner (Surface Scan)
75 | id: trivy_scan
76 | uses: aquasecurity/trivy-action@0.33.1
77 | with:
78 | image-ref: "${{ matrix.image }}:${{ matrix.tag }}"
79 | vuln-type: "os,library"
80 | scanners: "vuln"
81 | format: "json"
82 | output: "trivy-surface.json"
83 | severity: "HIGH,CRITICAL"
84 | exit-code: "0"
85 | continue-on-error: true
86 |
87 | - name: Run Trivy scanner on extracted nested JARs (Deep Scan)
88 | id: trivy_deep
89 | uses: aquasecurity/trivy-action@0.33.1
90 | with:
91 | scan-type: "rootfs"
92 | scan-ref: "/tmp/extracted-deps"
93 | vuln-type: "library"
94 | format: "json"
95 | output: "trivy-deep.json"
96 | severity: "HIGH,CRITICAL"
97 | exit-code: "0"
98 | continue-on-error: true
99 |
100 | - name: Run Grype scanner on SBOM
101 | id: grype_scan
102 | uses: anchore/scan-action@v7
103 | with:
104 | sbom: "sbom.spdx.json"
105 | fail-build: false
106 | severity-cutoff: high
107 | output-format: json
108 | output-file: "grype-results.json"
109 | continue-on-error: true
110 |
111 | - name: Save Grype results to file
112 | if: always()
113 | env:
114 | GRYPE_OUTPUT_FORMAT: json
115 | run: |
116 | scripts/save-grype-results.sh
117 |
118 | # trivy convert steps assume trivy CLI is available, therefore install the CLI before using those commands across steps.
119 | - name: Install Trivy CLI
120 | run: |
121 | curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b /usr/local/bin
122 | trivy --version
123 |
124 | - name: Convert JSON to SARIF and analyze results
125 | if: always()
126 | run: |
127 | scripts/convert-scan-results.sh
128 |
129 | - name: Create enhanced vulnerability report with parent JAR mapping
130 | if: always()
131 | run: |
132 | scripts/create-enhanced-report.sh ${{ matrix.image }} ${{ matrix.tag }} "${{ matrix.published }}"
133 |
134 | - name: Upload enhanced vulnerability report
135 | if: always()
136 | uses: actions/upload-artifact@v5
137 | with:
138 | name: vulnerability-report-${{ matrix.image == 'liquibase/liquibase-secure' && 'secure' || 'community' }}-${{ matrix.tag }}
139 | path: vulnerability-report-enhanced.md
140 | retention-days: 20
141 |
142 | - name: Append summary to GitHub Actions summary
143 | if: always()
144 | run: |
145 | scripts/append-github-summary.sh ${{ matrix.image }} ${{ matrix.tag }} "${{ matrix.published }}"
146 |
--------------------------------------------------------------------------------
/CLAUDE.md:
--------------------------------------------------------------------------------
1 | # CLAUDE.md
2 |
3 | This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
4 |
5 | ## Repository Overview
6 |
7 | This is the official Liquibase Docker image repository that builds and publishes Docker images for both Liquibase Community and Liquibase Secure editions. The repository contains:
8 |
9 | - **Dockerfile**: Standard Liquibase Community image
10 | - **DockerfileSecure**: Liquibase Secure image (enterprise features)
11 | - **Dockerfile.alpine**: Alpine Linux variant (lightweight)
12 | - **Examples**: Database-specific extensions (AWS CLI, SQL Server, PostgreSQL, Oracle)
13 | - **Docker Compose**: Complete example with PostgreSQL
14 |
15 | ## Image Publishing
16 |
17 | Images are published to multiple registries:
18 |
19 | - Docker Hub: `liquibase/liquibase` (Community) and `liquibase/liquibase-secure` (Secure)
20 | - GitHub Container Registry: `ghcr.io/liquibase/liquibase*`
21 | - Amazon ECR Public: `public.ecr.aws/liquibase/liquibase*`
22 |
23 | ### Release Tagging Strategy
24 |
25 | The repository uses distinct tagging strategies for Community and SECURE releases to prevent conflicts:
26 |
27 | **Community Releases** (from `liquibase-release` workflow):
28 |
29 | - Git tag: `v{version}` (e.g., `v5.0.1`)
30 | - GitHub Release: `v{version}`
31 | - Docker images: `liquibase/liquibase:{version}`, `liquibase/liquibase:{major.minor}`, `liquibase/liquibase:latest`
32 |
33 | **SECURE Releases** (from `liquibase-secure-release` workflow):
34 |
35 | - Git tag: `v{version}-SECURE` (e.g., `v5.0.1-SECURE`)
36 | - GitHub Release: `v{version}-SECURE`
37 | - Docker images: `liquibase/liquibase-secure:{version}`, `liquibase/liquibase-secure:{major.minor}`, `liquibase/liquibase-secure:latest`
38 |
39 | This ensures that Community and SECURE releases maintain separate version histories and do not create conflicting tags in Git or GitHub releases.
40 |
41 | ## Common Development Commands
42 |
43 | ### Building Images
44 |
45 | ```bash
46 | # Build Community image
47 | docker build -f Dockerfile -t liquibase/liquibase:latest .
48 |
49 | # Build Secure image
50 | docker build -f DockerfileSecure -t liquibase/liquibase-secure:latest .
51 |
52 | # Build Alpine variant
53 | docker build -f Dockerfile.alpine -t liquibase/liquibase:latest-alpine .
54 | ```
55 |
56 | ### Testing Images
57 |
58 | ```bash
59 | # Test Community image
60 | docker run --rm liquibase/liquibase:latest --version
61 |
62 | # Test Secure image (requires license)
63 | docker run --rm -e LIQUIBASE_LICENSE_KEY="your-key" liquibase/liquibase-secure:latest --version
64 |
65 | # Run with example changelog
66 | docker run --rm -v $(pwd)/examples/docker-compose/changelog:/liquibase/changelog liquibase/liquibase:latest --changelog-file=db.changelog-master.xml validate
67 | ```
68 |
69 | ### Docker Compose Example
70 |
71 | ```bash
72 | # Run complete example with PostgreSQL
73 | cd examples/docker-compose
74 | docker-compose up
75 |
76 | # Use local build for testing
77 | docker-compose -f docker-compose.local.yml up --build
78 |
79 | # Run with Liquibase Secure
80 | docker-compose -f docker-compose.secure.yml up
81 | ```
82 |
83 | ## Architecture
84 |
85 | ### Base Image Structure
86 |
87 | - **Base**: Eclipse Temurin JRE 21 (Jammy)
88 | - **User**: Non-root `liquibase` user (UID/GID 1001)
89 | - **Working Directory**: `/liquibase`
90 | - **Entrypoint**: `docker-entrypoint.sh` with automatic MySQL driver installation
91 |
92 | ### Key Components
93 |
94 | - **Liquibase**: Database migration tool (Community: GitHub releases, Secure: repo.liquibase.com)
95 | - **LPM**: Liquibase Package Manager for extensions
96 | - **Default Config**: `liquibase.docker.properties` sets headless mode
97 | - **CLI-Docker Compatibility**: Auto-detects `/liquibase/changelog` mount and changes working directory for consistent behavior
98 |
99 | ### Version Management
100 |
101 | - Liquibase versions are controlled via `LIQUIBASE_VERSION` (Community) and `LIQUIBASE_PRO_VERSION` (Secure) ARGs
102 | - SHA256 checksums are validated for security
103 | - LPM version is specified via `LPM_VERSION` ARG
104 |
105 | ## Environment Variables
106 |
107 | ### Database Connection
108 |
109 | - `LIQUIBASE_COMMAND_URL`: JDBC connection string
110 | - `LIQUIBASE_COMMAND_USERNAME`: Database username
111 | - `LIQUIBASE_COMMAND_PASSWORD`: Database password
112 | - `LIQUIBASE_COMMAND_CHANGELOG_FILE`: Path to changelog file
113 |
114 | ### Secure Features (DockerfileSecure only)
115 |
116 | - `LIQUIBASE_LICENSE_KEY`: Required for Secure features
117 | - `LIQUIBASE_PRO_POLICY_CHECKS_ENABLED`: Enable policy checks
118 | - `LIQUIBASE_PRO_QUALITY_CHECKS_ENABLED`: Enable quality checks
119 |
120 | ### Special Options
121 |
122 | - `INSTALL_MYSQL=true`: Auto-install MySQL driver at runtime
123 | - `LIQUIBASE_HOME=/liquibase`: Liquibase installation directory
124 | - `DOCKER_LIQUIBASE=true`: Marker for Docker environment
125 | - `SHOULD_CHANGE_DIR`: Override automatic working directory detection (true/false). When set, prevents the entrypoint from guessing whether to change to `/liquibase/changelog` directory based on command arguments
126 |
127 | ## Extending Images
128 |
129 | ### Adding Database Drivers
130 |
131 | ```dockerfile
132 | FROM liquibase/liquibase:latest
133 | RUN lpm add mysql --global
134 | ```
135 |
136 | ### Using Liquibase Secure
137 |
138 | ```dockerfile
139 | FROM liquibase/liquibase-secure:latest
140 | ENV LIQUIBASE_LICENSE_KEY=your-license-key
141 | ```
142 |
143 | ### Adding Tools (e.g., AWS CLI)
144 |
145 | ```dockerfile
146 | FROM liquibase/liquibase:latest
147 | USER root
148 | RUN apt-get update && apt-get install -y awscli
149 | USER liquibase
150 | ```
151 |
152 | ## Maven Configuration
153 |
154 | The `pom.xml` is minimal and used for build processes. The repository primarily uses Docker for builds rather than Maven.
155 |
--------------------------------------------------------------------------------
/scripts/lib/vuln-filters.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | #
3 | # lib/vuln-filters.sh
4 | #
5 | # Shared jq filters and functions for vulnerability scanning scripts.
6 | # Source this file to use the functions in other scripts.
7 | #
8 | # Usage:
9 | # source "$(dirname "$0")/lib/vuln-filters.sh"
10 | #
11 | # Available variables:
12 | # JQ_VENDOR_FILTER - jq filter that extracts vendor severity as $vendor array
13 | #
14 | # Available functions:
15 | # jq_trivy_surface_vulns - Process Trivy surface scan vulnerabilities
16 | # jq_trivy_deep_vulns - Process Trivy deep scan vulnerabilities (with target)
17 | # jq_trivy_python_vulns - Process Python package vulnerabilities
18 | # format_vendor_display - Format vendor severity for markdown display
19 | # format_fix_indicator - Format Y/N as emoji
20 |
21 | # Vendor severity jq filter - extracts [prefix, letter, url] array
22 | # This is the core filter used by all Trivy vulnerability processing.
23 | # Requires $cve to be defined in jq context before this filter.
24 | # Result is stored in $vendor variable: [prefix, severity_letter, url]
25 | #
26 | # Supported vendors (in priority order):
27 | # nvd, ghsa, redhat, amazon, oracle-oval, bitnami, alma, rocky
28 | #
29 | # Usage in jq:
30 | # .VulnerabilityID as $cve | '"${JQ_VENDOR_FILTER}"' | ... use $vendor ...
31 | #
32 | readonly JQ_VENDOR_FILTER='
33 | (if .VendorSeverity.nvd then
34 | ["nvd", (.VendorSeverity.nvd | if . == 1 then "L" elif . == 2 then "M" elif . == 3 then "H" elif . == 4 then "C" else "-" end), "https://nvd.nist.gov/vuln/detail/\($cve)"]
35 | elif .VendorSeverity.ghsa then
36 | ["ghsa", (.VendorSeverity.ghsa | if . == 1 then "L" elif . == 2 then "M" elif . == 3 then "H" elif . == 4 then "C" else "-" end), "https://github.com/advisories?query=\($cve)"]
37 | elif .VendorSeverity.redhat then
38 | ["rh", (.VendorSeverity.redhat | if . == 1 then "L" elif . == 2 then "M" elif . == 3 then "H" elif . == 4 then "C" else "-" end), "https://access.redhat.com/security/cve/\($cve)"]
39 | elif .VendorSeverity.amazon then
40 | ["amz", (.VendorSeverity.amazon | if . == 1 then "L" elif . == 2 then "M" elif . == 3 then "H" elif . == 4 then "C" else "-" end), "https://alas.aws.amazon.com/cve/html/\($cve).html"]
41 | elif .VendorSeverity["oracle-oval"] then
42 | ["ora", (.VendorSeverity["oracle-oval"] | if . == 1 then "L" elif . == 2 then "M" elif . == 3 then "H" elif . == 4 then "C" else "-" end), "https://linux.oracle.com/cve/\($cve).html"]
43 | elif .VendorSeverity.bitnami then
44 | ["bit", (.VendorSeverity.bitnami | if . == 1 then "L" elif . == 2 then "M" elif . == 3 then "H" elif . == 4 then "C" else "-" end), ""]
45 | elif .VendorSeverity.alma then
46 | ["alma", (.VendorSeverity.alma | if . == 1 then "L" elif . == 2 then "M" elif . == 3 then "H" elif . == 4 then "C" else "-" end), "https://errata.almalinux.org/"]
47 | elif .VendorSeverity.rocky then
48 | ["rky", (.VendorSeverity.rocky | if . == 1 then "L" elif . == 2 then "M" elif . == 3 then "H" elif . == 4 then "C" else "-" end), "https://errata.rockylinux.org/"]
49 | else
50 | ["-", "-", ""]
51 | end) as $vendor'
52 |
53 | # Process Trivy surface scan results and output pipe-delimited rows
54 | # Output format: pkg|cve|cve_date|severity|vendor_prefix:letter|vendor_url|installed|fixed|has_fix|cvss
55 | # Usage: jq_trivy_surface_vulns trivy-surface.json
56 | jq_trivy_surface_vulns() {
57 | local input_file="$1"
58 | jq -r '.Results[]?.Vulnerabilities[]? | select(.Severity == "HIGH" or .Severity == "CRITICAL") |
59 | .VulnerabilityID as $cve |
60 | (.CVSS.nvd.V3Score // .CVSS.redhat.V3Score // .CVSS.ghsa.V3Score // "-") as $cvss |
61 | '"${JQ_VENDOR_FILTER}"' |
62 | "\(.PkgName)|\($cve)|\((.PublishedDate // "-") | split("T")[0])|\(.Severity)|\($vendor[0]):\($vendor[1])|\($vendor[2])|\(.InstalledVersion)|\(.FixedVersion // "-")|\(if (.FixedVersion // "") != "" then "Y" else "N" end)|\($cvss)"' \
63 | "$input_file" 2>/dev/null
64 | }
65 |
66 | # Process Trivy deep scan results and output pipe-delimited rows with target
67 | # Output format: target|pkgpath|pkg|cve|cve_date|severity|vendor_prefix:letter|vendor_url|installed|fixed|has_fix|cvss
68 | # Usage: jq_trivy_deep_vulns trivy-deep.json
69 | jq_trivy_deep_vulns() {
70 | local input_file="$1"
71 | jq -r '.Results[]? | .Target as $target | .Vulnerabilities[]? |
72 | select(.Severity == "HIGH" or .Severity == "CRITICAL") |
73 | .VulnerabilityID as $cve |
74 | (.CVSS.nvd.V3Score // .CVSS.redhat.V3Score // .CVSS.ghsa.V3Score // "-") as $cvss |
75 | '"${JQ_VENDOR_FILTER}"' |
76 | "\($target)|\(.PkgPath // "")|\(.PkgName)|\($cve)|\((.PublishedDate // "-") | split("T")[0])|\(.Severity)|\($vendor[0]):\($vendor[1])|\($vendor[2])|\(.InstalledVersion)|\(.FixedVersion // "-")|\(if (.FixedVersion // "") != "" then "Y" else "N" end)|\($cvss)"' \
77 | "$input_file" 2>/dev/null
78 | }
79 |
80 | # Process Trivy Python package vulnerabilities
81 | # Output format: pkg|cve|cve_date|severity|vendor_prefix:letter|vendor_url|installed|fixed|has_fix|cvss
82 | # Usage: jq_trivy_python_vulns trivy-deep.json
83 | jq_trivy_python_vulns() {
84 | local input_file="$1"
85 | jq -r '.Results[]? | select(.Type == "python-pkg") | .Vulnerabilities[]? |
86 | select(.Severity == "HIGH" or .Severity == "CRITICAL") |
87 | .VulnerabilityID as $cve |
88 | (.CVSS.nvd.V3Score // .CVSS.redhat.V3Score // .CVSS.ghsa.V3Score // "-") as $cvss |
89 | '"${JQ_VENDOR_FILTER}"' |
90 | "\(.PkgName)|\($cve)|\((.PublishedDate // "-") | split("T")[0])|\(.Severity)|\($vendor[0]):\($vendor[1])|\($vendor[2])|\(.InstalledVersion)|\(.FixedVersion // "-")|\(if (.FixedVersion // "") != "" then "Y" else "N" end)|\($cvss)"' \
91 | "$input_file" 2>/dev/null
92 | }
93 |
94 | # Format vendor severity for markdown display
95 | # Input: vendor_sev (e.g., "rh:H") and vendor_url
96 | # Output: "[rh:H](url)" if url exists, "-" if no vendor data, else "rh:H"
97 | # Usage: format_vendor_display "$vendor_sev" "$vendor_url"
98 | format_vendor_display() {
99 | local vendor_sev="$1"
100 | local vendor_url="$2"
101 | # Handle no vendor data case (fallback returns "-:-")
102 | if [ "$vendor_sev" = "-:-" ]; then
103 | echo "-"
104 | elif [ -n "$vendor_url" ]; then
105 | echo "[$vendor_sev]($vendor_url)"
106 | else
107 | echo "$vendor_sev"
108 | fi
109 | }
110 |
111 | # Format fix indicator for markdown
112 | # Input: "Y" or "N"
113 | # Output: checkmark or x emoji
114 | format_fix_indicator() {
115 | if [ "$1" = "Y" ]; then
116 | echo "✅"
117 | else
118 | echo "❌"
119 | fi
120 | }
121 |
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 | # Understanding Vulnerability Reports
2 |
3 | This guide explains how to read and interpret the vulnerability scan reports generated by the Published Images Vulnerability Scanning workflow. It is intended for support, sales, and other non-developer audiences.
4 |
5 | ## What Gets Scanned
6 |
7 | The workflow performs three types of scans on each published Docker image:
8 |
9 | | Scan Type | What It Checks | Scanner |
10 | |-----------|----------------|---------|
11 | | **OS & Application Libraries** | Operating system packages (Debian/Ubuntu) and top-level Java libraries | Trivy |
12 | | **Nested JAR Dependencies** | Libraries bundled inside Liquibase JARs (Spring Boot fat JARs, extensions) | Trivy |
13 | | **SBOM-based Scan** | Software Bill of Materials - a complete inventory of all components | Grype |
14 |
15 | ## Glossary of Terms
16 |
17 | | Term | Definition |
18 | |------|------------|
19 | | **CVE** | Common Vulnerabilities and Exposures - a unique identifier for a security vulnerability (e.g., CVE-2025-12345) |
20 | | **CVSS** | Common Vulnerability Scoring System - a numeric score (0.0-10.0) that measures vulnerability severity. Scores: 0.0 = None, 0.1-3.9 = Low, 4.0-6.9 = Medium, 7.0-8.9 = High, 9.0-10.0 = Critical |
21 | | **NVD** | National Vulnerability Database - the U.S. government's repository of vulnerability data, managed by NIST |
22 | | **GHSA** | GitHub Security Advisory - GitHub's database of security vulnerabilities |
23 | | **Trivy** | An open-source vulnerability scanner by Aqua Security |
24 | | **Grype** | An open-source vulnerability scanner by Anchore |
25 | | **SBOM** | Software Bill of Materials - a complete list of all software components in an image |
26 | | **HIGH/CRITICAL** | Severity ratings indicating vulnerabilities that should be prioritized for remediation |
27 | | **Parent JAR** | The main Liquibase JAR file that contains nested dependencies |
28 | | **Nested JAR** | A library bundled inside another JAR file (common in Spring Boot applications) |
29 |
30 | ## Reading the Vulnerability Tables
31 |
32 | ### Column Definitions
33 |
34 | | Column | Description |
35 | |--------|-------------|
36 | | **Package** | The name of the vulnerable software component |
37 | | **Parent JAR** | Which Liquibase JAR contains this vulnerable dependency |
38 | | **NVD** | Link to the vulnerability details in the National Vulnerability Database |
39 | | **GitHub Advisories** | Link to search for related security advisories on GitHub |
40 | | **CVE Published** | Date the vulnerability was publicly disclosed |
41 | | **Trivy Severity** | Trivy's assessment of the vulnerability severity (HIGH or CRITICAL) |
42 | | **CVSS** | The CVSS v3 numeric score (0.0-10.0) from NVD or vendor database. Higher scores indicate more severe vulnerabilities |
43 | | **Trivy Vendor Data** | Severity rating from the software vendor's security team (may differ from Trivy's assessment) |
44 | | **Grype Severity** | Grype scanner's assessment of vulnerability severity |
45 | | **Installed** | The version currently installed in the image |
46 | | **Fixed** | The version that contains the fix (if available) |
47 | | **Fix?** | Whether a fix is available: ✅ = yes, ❌ = no (must wait for upstream fix) |
48 |
49 | ### Vendor Severity Prefixes
50 |
51 | The "Trivy Vendor Data" column shows severity ratings from different security databases:
52 |
53 | | Prefix | Source | Description |
54 | |--------|--------|-------------|
55 | | `nvd` | NVD | National Vulnerability Database (U.S. government) |
56 | | `ghsa` | GHSA | GitHub Security Advisories |
57 | | `rh` | Red Hat | Red Hat Product Security |
58 | | `amz` | Amazon | Amazon Linux Security Center |
59 | | `ora` | Oracle | Oracle Linux Security |
60 | | `bit` | Bitnami | Bitnami Vulnerability Database |
61 | | `alma` | AlmaLinux | AlmaLinux Errata |
62 | | `rky` | Rocky | Rocky Linux Errata |
63 |
64 | **Note:** Vendor severity ratings come from Trivy's database and may occasionally differ from the current assessment on the vendor's website.
65 |
66 | ### Severity Levels
67 |
68 | | Level | Code | Description |
69 | |-------|------|-------------|
70 | | LOW | L | Minor impact, low priority |
71 | | MEDIUM | M | Moderate impact, should be addressed |
72 | | HIGH | H | Significant impact, prioritize remediation |
73 | | CRITICAL | C | Severe impact, address immediately |
74 |
75 | ## Workflow Artifacts
76 |
77 | After each scan, the following artifact is available for download:
78 |
79 | - **`vulnerability-report-{community|secure}-{version}`** - Detailed markdown report with all vulnerability information
80 |
81 | To download: Go to the workflow run > scroll to "Artifacts" section > click the download icon.
82 |
83 | ## Interpreting Results
84 |
85 | 1. **No vulnerabilities found** - The image passed all scans with no HIGH/CRITICAL issues
86 | 2. **Vulnerabilities with fixes available (✅)** - These can be resolved by updating dependencies
87 | 3. **Vulnerabilities without fixes (❌)** - Must wait for upstream maintainers to release patches
88 | 4. **Same CVE in multiple scanners** - Normal; different scanners may detect the same issue
89 |
90 | ## Common Questions
91 |
92 | **Q: Why do Trivy and Grype sometimes show different results?**
93 | A: Each scanner uses different vulnerability databases and detection methods. Having multiple scanners provides more comprehensive coverage.
94 |
95 | **Q: What does "Parent JAR: (internal)" mean?**
96 | A: The vulnerable component is part of Liquibase's core libraries, not a nested dependency from an extension.
97 |
98 | **Q: Why is the vendor severity different from what I see on their website?**
99 | A: Trivy's database may not always be synchronized with the latest vendor assessments. The linked vendor URL shows the current rating.
100 |
101 | **Q: How often are published images scanned?**
102 | A: The workflow runs Monday-Friday at 10 AM UTC, scanning the most recent tags of each image.
103 |
104 | **Q: Where can I view the scan results?**
105 | A: Go to the repository's Actions tab > "Published Images Vulnerability Scanning" workflow > select a run > view the summary or download artifacts.
106 |
107 | ## Workflow Schedule
108 |
109 | | Schedule | Time | Description |
110 | |----------|------|-------------|
111 | | Weekdays | 10:00 AM UTC | Automatic scan of published images |
112 | | Manual | On-demand | Can be triggered manually via "Run workflow" button |
113 |
114 | ## Related Documentation
115 |
116 | - [Vulnerability Scanning Scripts](README.md) - Technical documentation for developers
117 | - [Trivy Documentation](https://trivy.dev/) - Official Trivy scanner documentation
118 | - [Grype Documentation](https://github.com/anchore/grype) - Official Grype scanner documentation
119 | - [NVD](https://nvd.nist.gov/) - National Vulnerability Database
120 |
--------------------------------------------------------------------------------
/scan-repo.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -euo pipefail
3 |
4 | REPO_NAME="$1"
5 | DOCKERHUB_NAMESPACE=liquibase
6 | MAX_PARALLEL_JOBS=${MAX_PARALLEL_JOBS:-4}
7 | MAX_TAGS=${MAX_TAGS:-20} # Limit recent tags only
8 |
9 | echo "🔍 Scanning recent tags for $DOCKERHUB_NAMESPACE/$REPO_NAME (max: $MAX_TAGS tags)"
10 |
11 | mkdir -p sarif-outputs
12 | touch trivy-failures.txt
13 |
14 | # Pre-install and warm up Trivy DB
15 | echo "📦 Installing Trivy and warming up database..."
16 | curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b /usr/local/bin
17 | # Download and cache the vulnerability database once
18 | trivy image --download-db-only
19 | trivy --version
20 |
21 | fetch_recent_tags() {
22 | local url="https://hub.docker.com/v2/repositories/${DOCKERHUB_NAMESPACE}/${REPO_NAME}/tags?page_size=100&ordering=-last_updated"
23 | local count=0
24 |
25 | while [ -n "$url" ] && [ $count -lt $MAX_TAGS ]; do
26 | response=$(curl -s "$url")
27 |
28 | # Parse tags and return the most recent ones up to MAX_TAGS
29 | while IFS= read -r tag; do
30 | if [ $count -ge $MAX_TAGS ]; then break; fi
31 | if [ -z "$tag" ] || [ "$tag" = "null" ]; then continue; fi
32 |
33 | echo "$tag"
34 | ((count++))
35 | done < <(echo "$response" | jq -r '.results[].name // empty')
36 |
37 | url=$(echo "$response" | jq -r '.next')
38 | [ "$url" = "null" ] && break
39 | done
40 | }
41 |
42 | scan_tag() {
43 | local tag=$1
44 | local image="docker.io/${DOCKERHUB_NAMESPACE}/${REPO_NAME}:${tag}"
45 | local sarif="sarif-outputs/${REPO_NAME}--${tag//\//-}.sarif"
46 |
47 | echo "🧪 [$$] Scanning $image"
48 |
49 | # Use --cache-dir for faster subsequent scans
50 | # Error output from docker pull is logged to a unique docker-errors.log file for debugging purposes
51 | LOG_FILE="docker-errors.log"
52 | if docker pull "$image" 2>"$LOG_FILE"; then
53 | if ! trivy image \
54 | --cache-dir /tmp/trivy-cache \
55 | --vuln-type os,library \
56 | --scanners vuln \
57 | --format sarif \
58 | --output "$sarif" \
59 | --severity HIGH,CRITICAL \
60 | --exit-code 1 \
61 | --timeout 10m \
62 | "$image" 2>/dev/null; then
63 | echo "❌ VULNERABILITIES FOUND: $image"
64 | echo "$tag" >> trivy-failures.txt
65 | else
66 | echo "✅ CLEAN: $image"
67 | fi
68 | else
69 | echo "❌ Failed to pull $image"
70 | echo "PULL_FAILED:$tag" >> trivy-failures.txt
71 | fi
72 |
73 | # Clean up immediately to save disk space
74 | docker image rm "$image" 2>/dev/null || true
75 | }
76 |
77 | # Create parallel job control
78 | echo "🚀 Starting parallel scans (max $MAX_PARALLEL_JOBS jobs)..."
79 |
80 | # Get tags and filter valid ones into an array
81 | valid_tags=()
82 | while IFS= read -r tag; do
83 | if [[ "$tag" =~ ^[a-zA-Z0-9]+([._-][a-zA-Z0-9]+)*$ ]]; then
84 | valid_tags+=("$tag")
85 | else
86 | echo "⚠️ Skipping invalid tag: $tag"
87 | fi
88 | done < <(fetch_recent_tags)
89 |
90 | echo "Found ${#valid_tags[@]} valid tags to scan"
91 |
92 | # Process tags with controlled parallelism
93 | active_jobs=0
94 | for tag in "${valid_tags[@]}"; do
95 | # Start scan in background
96 | scan_tag "$tag" &
97 |
98 | ((active_jobs++))
99 |
100 | # Wait if we've reached the job limit
101 | if ((active_jobs >= MAX_PARALLEL_JOBS)); then
102 | wait -n # Wait for any job to complete
103 | ((active_jobs--))
104 | fi
105 | done
106 |
107 | # Wait for all remaining jobs to complete
108 | wait
109 |
110 | echo "Completed scanning ${#valid_tags[@]} tags"
111 |
112 | # Clean up Docker to free space
113 | docker system prune -f -a --volumes 2>/dev/null || true
114 |
115 | echo "✅ Scanning complete for $REPO_NAME"
116 |
117 | # === ALWAYS create artifacts directory first ===
118 | echo "::group::Prepare artifacts"
119 | mkdir -p artifacts
120 | echo "Created artifacts directory"
121 |
122 | # === SARIF upload (only if files exist) ===
123 | if ls sarif-outputs/*.sarif 1> /dev/null 2>&1; then
124 | echo "::group::Upload SARIF results"
125 |
126 | # Combine SARIF files more efficiently
127 | combined_sarif=$(find sarif-outputs -name "*.sarif" -exec cat {} \; | jq -s '{
128 | version: "2.1.0",
129 | runs: map(select(.runs) | .runs[])
130 | }')
131 |
132 | if [ -n "$GITHUB_REPOSITORY" ] && [ -n "$GITHUB_SHA" ] && [ -n "$GITHUB_REF" ]; then
133 | gh api \
134 | --method POST \
135 | -H "Accept: application/vnd.github+json" \
136 | /repos/${GITHUB_REPOSITORY}/code-scanning/sarifs \
137 | -f commit_sha="${GITHUB_SHA}" \
138 | -f ref="${GITHUB_REF}" \
139 | --input <(echo "$combined_sarif") \
140 | -F checkout_uri="file://$(pwd)" \
141 | -F started_at="$(date -u +"%Y-%m-%dT%H:%M:%SZ")" || echo "SARIF upload failed, continuing..."
142 | else
143 | echo "GitHub environment variables missing, skipping SARIF upload"
144 | fi
145 | echo "::endgroup::"
146 | else
147 | echo "⚠️ No SARIF files generated"
148 | fi
149 |
150 | # === Upload scan artifacts (ensure directory exists) ===
151 | echo "::group::Upload artifacts"
152 |
153 | # Copy SARIF files if they exist
154 | if [ -d sarif-outputs ]; then
155 | find sarif-outputs -name "*.sarif" -type f -exec cp {} artifacts/ \; 2>/dev/null || true
156 | sarif_count=$(find sarif-outputs -name "*.sarif" -type f 2>/dev/null | wc -l)
157 | echo "Copied $sarif_count SARIF files to artifacts/"
158 | else
159 | sarif_count=0
160 | echo "No sarif-outputs directory found"
161 | fi
162 |
163 | # Always create trivy-failures.txt in artifacts (even if empty)
164 | if [ -f trivy-failures.txt ]; then
165 | cp trivy-failures.txt artifacts/
166 | echo "Copied trivy-failures.txt to artifacts/"
167 | else
168 | touch artifacts/trivy-failures.txt
169 | echo "Created empty trivy-failures.txt in artifacts/"
170 | fi
171 |
172 | # Always create summary file
173 | echo "Scanned repository: $REPO_NAME" > artifacts/scan-summary.txt
174 | echo "Tags scanned: $sarif_count" >> artifacts/scan-summary.txt
175 | echo "Scan date: $(date)" >> artifacts/scan-summary.txt
176 | echo "Max tags limit: $MAX_TAGS" >> artifacts/scan-summary.txt
177 | echo "Script completed successfully" >> artifacts/scan-summary.txt
178 |
179 | # Create a README for troubleshooting
180 | echo "Liquibase Docker Scan Results" > artifacts/README.md
181 | echo "=============================" >> artifacts/README.md
182 | echo "" >> artifacts/README.md
183 | echo "Repository: $REPO_NAME" >> artifacts/README.md
184 | echo "Scan Date: $(date)" >> artifacts/README.md
185 | echo "SARIF Files: $sarif_count" >> artifacts/README.md
186 | echo "" >> artifacts/README.md
187 | if [ -s artifacts/trivy-failures.txt ]; then
188 | echo "⚠️ Vulnerable Tags Found:" >> artifacts/README.md
189 | echo "\`\`\`" >> artifacts/README.md
190 | cat artifacts/trivy-failures.txt >> artifacts/README.md
191 | echo "\`\`\`" >> artifacts/README.md
192 | else
193 | echo "✅ No vulnerabilities found in scanned images!" >> artifacts/README.md
194 | fi
195 |
196 | # Verify artifacts directory content
197 | echo "Artifacts directory contents:"
198 | ls -la artifacts/ || echo "Failed to list artifacts directory"
199 |
200 | echo "::endgroup::"
201 |
202 | # === Print scan summary ===
203 | if [[ -s trivy-failures.txt ]]; then
204 | echo "❌ The following tags had HIGH/CRITICAL vulnerabilities:"
205 | cat trivy-failures.txt
206 | exit 1
207 | else
208 | echo "✅ No HIGH or CRITICAL vulnerabilities found."
209 | fi
--------------------------------------------------------------------------------
/scripts/extract-nested-deps.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | #
3 | # extract-nested-deps.sh
4 | #
5 | # Extracts nested JARs and Python packages from Liquibase Docker images for deep vulnerability scanning.
6 | # This script handles Spring Boot nested JARs (BOOT-INF/lib) and GraalVM Python packages.
7 | #
8 | # Usage:
9 | # extract-nested-deps.sh
10 | #
11 | # Arguments:
12 | # image_ref: Docker image reference (e.g., liquibase/liquibase:latest or image:sha)
13 | #
14 | # Environment Variables:
15 | # EXTRACT_DIR: Base directory for extraction (default: /tmp/extracted-deps)
16 | #
17 | # Outputs:
18 | # - Extracted JAR files in ${EXTRACT_DIR}/internal-jars/{lib,extensions}
19 | # - Nested JARs from archives in ${EXTRACT_DIR}/dist/
20 | # - Python packages in ${EXTRACT_DIR}/python-packages
21 | # - JAR mapping file in ${EXTRACT_DIR}/jar-mapping.txt
22 |
23 | set -e
24 |
25 | # Configuration
26 | IMAGE_REF="${1:?Error: Image reference required}"
27 | EXTRACT_DIR="${EXTRACT_DIR:-/tmp/extracted-deps}"
28 |
29 | echo "📦 Extracting nested dependencies from ${IMAGE_REF}..."
30 |
31 | # Create extraction directory
32 | mkdir -p "${EXTRACT_DIR}"
33 |
34 | # Create container from image to extract files
35 | container_id=$(docker create "${IMAGE_REF}")
36 | trap "docker rm ${container_id} > /dev/null 2>&1" EXIT
37 |
38 | echo "🔍 Extracting all JAR files from container..."
39 |
40 | # Extract distribution archives if they exist
41 | echo "🔍 Checking /liquibase/dist for tar.gz archives..."
42 | if docker cp "${container_id}:/liquibase/dist" /tmp/liquibase-dist 2>/dev/null; then
43 | echo "✓ Found /liquibase/dist directory"
44 |
45 | # Extract all tar.gz archives
46 | find /tmp/liquibase-dist -name "*.tar.gz" -type f | while read -r archive; do
47 | archive_name=$(basename "$archive" .tar.gz)
48 | echo " 📦 Extracting $archive_name..."
49 | extract_dir="/tmp/liquibase-dist/${archive_name}-extracted"
50 | mkdir -p "$extract_dir"
51 | tar -xzf "$archive" -C "$extract_dir" 2>/dev/null || true
52 |
53 | # Find JARs in extracted archive
54 | jar_count=$(find "$extract_dir" -name "*.jar" -type f | wc -l)
55 | if [ "$jar_count" -gt 0 ]; then
56 | echo " ✓ Found $jar_count JAR(s) in $archive_name"
57 |
58 | # Extract each JAR from the archive
59 | find "$extract_dir" -name "*.jar" -type f | while read -r jar_file; do
60 | jar_name=$(basename "$jar_file" .jar)
61 | jar_extract="${EXTRACT_DIR}/dist/${archive_name}/${jar_name}"
62 | mkdir -p "$jar_extract"
63 | unzip -q "$jar_file" -d "$jar_extract" 2>/dev/null || true
64 |
65 | # Check for Spring Boot nested JARs and copy them as-is (don't extract)
66 | if [ -d "$jar_extract/BOOT-INF/lib" ]; then
67 | echo " ✓ Spring Boot JAR: $jar_name - preserving nested JAR files"
68 | nested_count=0
69 | nested_jar_dir="${EXTRACT_DIR}/dist/${archive_name}/${jar_name}-nested-jars"
70 | mkdir -p "$nested_jar_dir"
71 |
72 | # Create a mapping file to track parent JAR relationships
73 | mapping_file="${EXTRACT_DIR}/jar-mapping.txt"
74 |
75 | for nested_jar in "$jar_extract/BOOT-INF/lib"/*.jar; do
76 | if [ -f "$nested_jar" ]; then
77 | nested_count=$((nested_count + 1))
78 | nested_jar_name=$(basename "$nested_jar")
79 |
80 | # Copy the JAR file as-is, don't extract it
81 | cp "$nested_jar" "$nested_jar_dir/" 2>/dev/null || true
82 |
83 | # Record the parent → nested relationship
84 | echo "${jar_name}.jar|$nested_jar_name" >> "$mapping_file"
85 | fi
86 | done
87 | echo " → Preserved $nested_count nested JAR file(s)"
88 | fi
89 | done
90 | fi
91 | done
92 | else
93 | echo "⚠ No /liquibase/dist directory found"
94 | fi
95 |
96 | # Copy entire internal directory for comprehensive scanning
97 | if docker cp "${container_id}:/liquibase/internal" /tmp/liquibase-internal 2>/dev/null; then
98 | echo "✓ Copied /liquibase/internal directory"
99 |
100 | # Count total JARs
101 | jar_count=$(find /tmp/liquibase-internal -name "*.jar" -type f | wc -l)
102 | echo "📊 Found $jar_count JAR files to scan"
103 |
104 | # Copy all JAR files preserving them for Trivy to scan
105 | mkdir -p "${EXTRACT_DIR}/internal-jars/lib"
106 | mkdir -p "${EXTRACT_DIR}/internal-jars/extensions"
107 |
108 | # Copy lib JARs as-is
109 | if [ -d /tmp/liquibase-internal/lib ]; then
110 | cp /tmp/liquibase-internal/lib/*.jar "${EXTRACT_DIR}/internal-jars/lib/" 2>/dev/null || true
111 | lib_jar_count=$(ls -1 "${EXTRACT_DIR}/internal-jars/lib/"*.jar 2>/dev/null | wc -l)
112 | echo " ✓ Preserved $lib_jar_count lib JAR(s)"
113 | fi
114 |
115 | # Copy extension JARs as-is
116 | if [ -d /tmp/liquibase-internal/extensions ]; then
117 | cp /tmp/liquibase-internal/extensions/*.jar "${EXTRACT_DIR}/internal-jars/extensions/" 2>/dev/null || true
118 | ext_jar_count=$(ls -1 "${EXTRACT_DIR}/internal-jars/extensions/"*.jar 2>/dev/null | wc -l)
119 | echo " ✓ Preserved $ext_jar_count extension JAR(s)"
120 | fi
121 |
122 | echo "✓ Preserved $jar_count JAR files for scanning"
123 | else
124 | echo "⚠ Could not copy /liquibase/internal directory"
125 | fi
126 |
127 | # Extract all extension JARs and look for GraalVM Python embedded dependencies
128 | echo "🔍 Scanning extension JARs for Python packages..."
129 | mkdir -p "${EXTRACT_DIR}/python-packages"
130 |
131 | # Scan all JARs in internal-jars/extensions directory
132 | if [ -d "${EXTRACT_DIR}/internal-jars/extensions" ]; then
133 | for ext_jar in "${EXTRACT_DIR}/internal-jars/extensions"/*.jar; do
134 | if [ -f "$ext_jar" ]; then
135 | jar_name=$(basename "$ext_jar")
136 | jar_extract="${EXTRACT_DIR}/extension-scan/${jar_name%.jar}"
137 | mkdir -p "$jar_extract"
138 | unzip -q "$ext_jar" -d "$jar_extract" 2>/dev/null || true
139 |
140 | # Check if this JAR contains GraalVM Python packages
141 | if [ -d "$jar_extract/org.graalvm.python.vfs" ]; then
142 | echo " ✓ Found Python packages in $jar_name"
143 |
144 | # Copy from both possible locations
145 | if [ -d "$jar_extract/org.graalvm.python.vfs/venv/lib/python3.11/site-packages" ]; then
146 | cp -r "$jar_extract/org.graalvm.python.vfs/venv/lib/python3.11/site-packages"/* "${EXTRACT_DIR}/python-packages/" 2>/dev/null || true
147 | fi
148 | if [ -d "$jar_extract/org.graalvm.python.vfs/venv/Lib/site-packages" ]; then
149 | cp -r "$jar_extract/org.graalvm.python.vfs/venv/Lib/site-packages"/* "${EXTRACT_DIR}/python-packages/" 2>/dev/null || true
150 | fi
151 |
152 | # Also extract bundled wheels
153 | if [ -d "$jar_extract/META-INF/resources/libpython/ensurepip/_bundled" ]; then
154 | mkdir -p "${EXTRACT_DIR}/python-bundled"
155 | cp "$jar_extract/META-INF/resources/libpython/ensurepip/_bundled"/*.whl "${EXTRACT_DIR}/python-bundled/" 2>/dev/null || true
156 | fi
157 | fi
158 | fi
159 | done
160 | fi
161 |
162 | # Save manifest of JAR files for reporting
163 | echo "📝 Creating JAR manifest..."
164 | MANIFEST="${EXTRACT_DIR}/scanned-jars.txt"
165 | {
166 | # List lib JARs
167 | if [ -d "${EXTRACT_DIR}/internal-jars/lib" ]; then
168 | ls -1 "${EXTRACT_DIR}/internal-jars/lib/"*.jar 2>/dev/null | xargs -n1 basename 2>/dev/null || true
169 | fi
170 | # List extension JARs
171 | if [ -d "${EXTRACT_DIR}/internal-jars/extensions" ]; then
172 | ls -1 "${EXTRACT_DIR}/internal-jars/extensions/"*.jar 2>/dev/null | xargs -n1 basename 2>/dev/null || true
173 | fi
174 | # List nested JARs from dist archives
175 | if [ -d "${EXTRACT_DIR}/dist" ]; then
176 | find "${EXTRACT_DIR}/dist" -type d -name "*-nested-jars" -exec ls -1 {} \; 2>/dev/null | xargs -n1 basename 2>/dev/null || true
177 | fi
178 | } | sort -u > "$MANIFEST"
179 | manifest_count=$(wc -l < "$MANIFEST" | tr -d ' ')
180 | echo "✓ Created manifest with ${manifest_count} JAR files"
181 |
182 | # Show what was extracted
183 | echo ""
184 | echo "📊 Extraction Summary:"
185 | total_files=$(find "${EXTRACT_DIR}" -type f 2>/dev/null | wc -l)
186 | echo "Total files extracted: $total_files"
187 |
188 | if [ -d "${EXTRACT_DIR}/dist" ]; then
189 | dist_archives=$(find /tmp/liquibase-dist -name "*.tar.gz" -type f 2>/dev/null | wc -l)
190 | dist_jars=$(find /tmp/liquibase-dist -name "*.jar" -type f 2>/dev/null | wc -l)
191 | nested_jars=$(find "${EXTRACT_DIR}/dist" -type d -name "*-nested-jars" -exec sh -c 'ls -1 "{}"/*.jar 2>/dev/null | wc -l' \; 2>/dev/null | awk '{s+=$1} END {print s}')
192 | echo "Distribution archives: $dist_archives"
193 | echo " - JARs in archives: $dist_jars"
194 | if [ "$nested_jars" -gt 0 ]; then
195 | echo " - Spring Boot nested JARs: $nested_jars"
196 | fi
197 | fi
198 |
199 | if [ -d "${EXTRACT_DIR}/internal-jars" ]; then
200 | lib_jars=$(ls -1 "${EXTRACT_DIR}/internal-jars/lib/"*.jar 2>/dev/null | wc -l)
201 | ext_jars=$(ls -1 "${EXTRACT_DIR}/internal-jars/extensions/"*.jar 2>/dev/null | wc -l)
202 | total_internal=$((lib_jars + ext_jars))
203 | echo "Internal JARs preserved: $total_internal"
204 | echo " - Lib JARs: $lib_jars"
205 | echo " - Extension JARs: $ext_jars"
206 | fi
207 |
208 | if [ -d "${EXTRACT_DIR}/python-packages" ]; then
209 | python_pkgs=$(ls -1 "${EXTRACT_DIR}/python-packages" 2>/dev/null | grep -E '\.(dist-info|egg-info)$' | wc -l)
210 | echo "Python packages found: $python_pkgs"
211 | fi
212 |
213 | echo "✅ Extraction complete"
214 |
--------------------------------------------------------------------------------
/README-secure.md:
--------------------------------------------------------------------------------
1 | # Official Liquibase-Secure Docker Images formerly called Liquibase-Pro
2 |
3 | **Liquibase Secure** is the enterprise edition of Liquibase that provides advanced database DevOps capabilities for teams requiring enhanced security, performance, and governance features.
4 |
5 | ## ⚠️ License Requirements
6 |
7 | > **WARNING**: Liquibase Secure requires a valid license key to use Secure features. Without a license, the container will show an invalid license error.
8 | >
9 | > - Contact [Liquibase Sales](https://www.liquibase.com/contact-us) to obtain a Liquibase Secure license
10 | > - Existing customers receive their Secure license keys in an email.
11 |
12 | ## 📋 Secure Features
13 |
14 | Liquibase Secure is the enterprise edition of [Liquibase](https://www.liquibase.com/) that provides advanced database DevOps capabilities for teams requiring enhanced security, performance, and governance features.
15 |
16 | Liquibase Secure includes all Community features plus:
17 |
18 | ### 🔐 Security & Governance
19 |
20 | - **Policy Checks**: Enforce database standards and best practices and advanced validation rules for changesets
21 | - **Rollback SQL**: Generate rollback scripts for any deployment
22 | - **Targeted Rollback**: Rollback specific changesets without affecting others
23 | - **Advanced Database Support**: Enhanced support for Oracle, SQL Server, and other enterprise databases
24 | - **Audit Reports**: Comprehensive tracking of database changes
25 | - **Stored Logic**: Support for functions, procedures, packages, and triggers
26 |
27 | ## 🔧 Environment Variables
28 |
29 | ### Secure License Environment Variable
30 |
31 | | Variable | Description | Example |
32 | |----------|-------------|---------|
33 | | `LIQUIBASE_LICENSE_KEY` | Your Liquibase Secure license key | `ABcd-1234-EFGH-5678` |
34 |
35 | ### 🔧 Action Required
36 |
37 | Please update your Dockerfiles and scripts to pull from the new official image:
38 |
39 | ## Available Registries
40 |
41 | We publish this image to multiple registries:
42 |
43 | | Registry | Secure Image |
44 | |----------|-----------|
45 | | **Docker Hub (default)** | `liquibase/liquibase-secure` |
46 | | **GitHub Container Registry** | `ghcr.io/liquibase/liquibase-secure` |
47 | | **Amazon ECR Public** | `public.ecr.aws/liquibase/liquibase-secure` |
48 |
49 | ## Dockerfile
50 |
51 | ```dockerfile
52 | FROM liquibase/liquibase-secure:latest
53 | # OR ghcr.io/liquibase/liquibase-secure:latest # GHCR
54 | # OR public.ecr.aws/liquibase/liquibase-secure:latest # Amazon ECR Public
55 | ```
56 |
57 | ## Scripts
58 |
59 | ### Liquibase Secure Edition
60 |
61 | ```bash
62 | # Docker Hub (default)
63 | docker pull liquibase/liquibase-secure
64 |
65 | # GitHub Container Registry
66 | docker pull ghcr.io/liquibase/liquibase-secure
67 |
68 | # Amazon ECR Public
69 | docker pull public.ecr.aws/liquibase/liquibase-secure
70 | ```
71 |
72 | ### Pulling the Latest or Specific Version
73 |
74 | #### Pulling Liquibase Secure Edition Images
75 |
76 | ```bash
77 | # Latest
78 | docker pull liquibase/liquibase-secure:latest
79 | docker pull ghcr.io/liquibase/liquibase-secure:latest
80 | docker pull public.ecr.aws/liquibase/liquibase-secure:latest
81 |
82 | # Specific version (example: 5.0.0)
83 | docker pull liquibase/liquibase-secure:5.0.0
84 | docker pull ghcr.io/liquibase/liquibase-secure:5.0.0
85 | docker pull public.ecr.aws/liquibase/liquibase-secure:5.0.0
86 | ```
87 |
88 | For any questions or support, please visit our [Liquibase Support](https://forum.liquibase.org/).
89 |
90 | ## 🏷️ Supported Tags
91 |
92 | The following tags are officially supported and can be found on [Docker Hub](https://hub.docker.com/r/liquibase/liquibase-secure/tags):
93 |
94 | - `liquibase/liquibase-secure:`
95 |
96 | ### Database Connection Variables
97 |
98 | | Variable | Description | Example |
99 | |----------|-------------|---------|
100 | | `LIQUIBASE_COMMAND_URL` | Database JDBC URL | `jdbc:postgresql://db:5432/mydb` |
101 | | `LIQUIBASE_COMMAND_USERNAME` | Database username | `dbuser` |
102 | | `LIQUIBASE_COMMAND_PASSWORD` | Database password | `dbpass` |
103 | | `LIQUIBASE_COMMAND_CHANGELOG_FILE` | Path to changelog file | `/liquibase/changelog/changelog.xml` |
104 |
105 | ### Liquibase Secure-Specific Configuration
106 |
107 | | Variable | Description | Default |
108 | |----------|-------------|---------|
109 | | `LIQUIBASE_SECURE_POLICY_CHECKS_ENABLED` | Enable policy checks | `true` |
110 | | `LIQUIBASE_SECURE_QUALITY_CHECKS_ENABLED` | Enable quality checks | `true` |
111 | | `LIQUIBASE_REPORTS_ENABLED` | Enable HTML reports | `true` |
112 | | `LIQUIBASE_REPORTS_PATH` | Reports output directory | `/tmp/reports` |
113 |
114 | ## Required License Configuration
115 |
116 | Set your Liquibase Secure license key using the `LIQUIBASE_LICENSE_KEY` environment variable:
117 |
118 | ```bash
119 | $ docker run --rm \
120 | -e LIQUIBASE_LICENSE_KEY="YOUR_LICENSE_KEY_HERE" \
121 | -v /path/to/changelog:/liquibase/changelog \
122 | liquibase/liquibase-secure \
123 | --changelog-file=example-changelog.xml \
124 | --url="jdbc:postgresql://host.docker.internal:5432/testdb" \
125 | --username=postgres \
126 | --password=password \
127 | --search-path=/liquibase/changelog/ \
128 | update
129 | ```
130 |
131 | ## Mounting Changelog Files
132 |
133 | Mount your changelog directory to the `/liquibase/changelog` volume and use the `--search-path` parameter to specify the location.
134 |
135 | ```bash
136 | $ docker run --rm \
137 | -e LIQUIBASE_LICENSE_KEY="YOUR_LICENSE_KEY_HERE" \
138 | -v "$(pwd)":/liquibase/changelog \
139 | liquibase/liquibase-secure \
140 | --changelog-file=example-changelog.xml \
141 | --search-path=/liquibase/changelog/ \
142 | update
143 | ```
144 |
145 | ## Using a Properties File
146 |
147 | To use a default configuration file, mount it in your changelog volume and reference it with the `--defaults-file` argument.
148 |
149 | ```bash
150 | $ docker run --rm \
151 | -e LIQUIBASE_LICENSE_KEY="YOUR_LICENSE_KEY_HERE" \
152 | -v /path/to/changelog:/liquibase/changelog \
153 | liquibase/liquibase-secure \
154 | --defaults-file=liquibase.properties update
155 | ```
156 |
157 | Example `liquibase.properties` file:
158 |
159 | ```bash
160 | url=jdbc:postgresql://host.docker.internal:5432/testdb
161 | username=postgres
162 | password=password
163 | changelog-file=example-changelog.xml
164 | search-path=/liquibase/changelog/
165 | licenseKey=
166 | ```
167 |
168 | ## Adding Additional JARs
169 |
170 | Mount a local directory containing additional jars to `/liquibase/lib`.
171 |
172 | ```bash
173 | $ docker run --rm \
174 | -e LIQUIBASE_LICENSE_KEY="YOUR_LICENSE_KEY_HERE" \
175 | -v /path/to/changelog:/liquibase/changelog \
176 | -v /path/to/lib:/liquibase/lib \
177 | liquibase/liquibase-secure update
178 |
179 | ## 📦 Using the Docker Image
180 |
181 | ### 🏷️ Standard Image
182 |
183 | The `liquibase/liquibase-secure:` image is the standard choice. Use it as a disposable container or a foundational building block for other images.
184 |
185 | For examples of extending the standard image, see the [standard image examples](https://github.com/liquibase/docker/tree/main/examples).
186 |
187 |
188 | **Usage:**
189 |
190 | ```bash
191 | # Build the image
192 | docker build . -t liquibase-secure-aws
193 |
194 | # Run with AWS credentials
195 | docker run --rm \
196 | -e AWS_ACCESS_KEY_ID="your-access-key" \
197 | -e AWS_SECRET_ACCESS_KEY="your-secret-key" \
198 | -e LIQUIBASE_LICENSE_KEY="your-license-key" \
199 | -v "$(pwd)":/liquibase/changelog \
200 | liquibase-secure-aws \
201 | --changelog-file=changelog.xml \
202 | --search-path=/liquibase/changelog/ \
203 | update
204 | ```
205 |
206 | ### 🐳 Docker Compose Example
207 |
208 | For a complete example using Docker Compose with PostgreSQL:
209 |
210 | ```yaml
211 | version: '3.8'
212 | services:
213 | liquibase:
214 | image: liquibase/liquibase-secure:latest
215 | environment:
216 | LIQUIBASE_LICENSE_KEY: "${LIQUIBASE_LICENSE_KEY}"
217 | LIQUIBASE_COMMAND_URL: "jdbc:postgresql://postgres:5432/example"
218 | LIQUIBASE_COMMAND_USERNAME: "liquibase"
219 | LIQUIBASE_COMMAND_PASSWORD: "liquibase"
220 | LIQUIBASE_COMMAND_CHANGELOG_FILE: "changelog.xml"
221 | volumes:
222 | - ./changelog:/liquibase/changelog
223 | depends_on:
224 | - postgres
225 | command: update
226 |
227 | postgres:
228 | image: postgres:15
229 | environment:
230 | POSTGRES_DB: example
231 | POSTGRES_USER: liquibase
232 | POSTGRES_PASSWORD: liquibase
233 | ports:
234 | - "5432:5432"
235 | ```
236 |
237 | ## License
238 |
239 | This Docker image contains Liquibase Secure software which requires a valid commercial license for use.
240 |
241 | For licensing questions, please contact [Liquibase Sales](https://www.liquibase.com/contact-us).
242 |
243 | View [license information](https://www.liquibase.com/eula) for the software contained in this image.
244 |
245 | As with all Docker images, these likely also contain other software which may be under other licenses (such as Bash, etc from the base distribution, along with any direct or indirect dependencies of the primary software being contained).
246 |
247 | Some additional license information which was able to be auto-detected might be found in [the `repo-info` repository's `liquibase/` directory](https://github.com/docker-library/repo-info/tree/master/repos/liquibase).
248 |
249 | As for any pre-built image usage, it is the image user's responsibility to ensure that any use of this image complies with any relevant licenses for all software contained within.
250 |
--------------------------------------------------------------------------------
/scripts/append-github-summary.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | #
3 | # append-github-summary.sh
4 | #
5 | # Appends detailed vulnerability information to GitHub Actions step summary.
6 | # This script generates formatted markdown tables for GitHub Actions UI.
7 | #
8 | # Usage:
9 | # append-github-summary.sh [published]
10 | #
11 | # Arguments:
12 | # image: Docker image name (e.g., liquibase/liquibase)
13 | # tag: Image tag (e.g., 4.28.0)
14 | # published: ISO 8601 timestamp of when the image tag was last updated (optional)
15 | #
16 | # Environment Variables:
17 | # EXTRACT_DIR: Directory containing jar-mapping.txt (default: /tmp/extracted-deps)
18 | # surface_vulns: Number of surface vulnerabilities
19 | # deep_vulns: Number of deep vulnerabilities
20 | # grype_vulns: Number of Grype vulnerabilities
21 | # total_vulns: Total vulnerabilities
22 | # GITHUB_STEP_SUMMARY: GitHub Actions summary file path
23 | #
24 | # Expected Input Files:
25 | # - trivy-surface.json: Trivy surface scan results
26 | # - trivy-deep.json: Trivy deep scan results
27 | # - grype-results.json: Grype JSON results
28 | #
29 | # Outputs:
30 | # - Appends to $GITHUB_STEP_SUMMARY
31 |
32 | set -e
33 |
34 | # Source shared jq filters
35 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
36 | # shellcheck source=lib/vuln-filters.sh
37 | source "${SCRIPT_DIR}/lib/vuln-filters.sh"
38 |
39 | # Arguments
40 | IMAGE="${1:?Error: Image name required}"
41 | TAG="${2:?Error: Tag required}"
42 | PUBLISHED="${3:-}"
43 |
44 | # Format the published date for display (extract just the date part YYYY-MM-DD)
45 | if [ -n "$PUBLISHED" ] && [ "$PUBLISHED" != "unknown" ]; then
46 | PUBLISHED_DATE="${PUBLISHED%%T*}"
47 | else
48 | PUBLISHED_DATE="unknown"
49 | fi
50 |
51 | # Environment variables
52 | EXTRACT_DIR="${EXTRACT_DIR:-/tmp/extracted-deps}"
53 | surface_vulns="${surface_vulns:-0}"
54 | deep_vulns="${deep_vulns:-0}"
55 | grype_vulns="${grype_vulns:-0}"
56 | total_vulns="${total_vulns:-0}"
57 |
58 | # Ensure we're running in GitHub Actions
59 | if [ -z "${GITHUB_STEP_SUMMARY:-}" ]; then
60 | echo "⚠️ Not running in GitHub Actions, skipping summary generation"
61 | exit 0
62 | fi
63 |
64 | echo "📊 Appending vulnerability details to GitHub Actions summary..."
65 |
66 | # Create summary header
67 | {
68 | echo "## 🛡️ Vulnerability Scan Results for \`${IMAGE}:${TAG}\`"
69 | echo ""
70 | echo "**Image Last Updated**: ${PUBLISHED_DATE}"
71 | echo ""
72 | echo "**Total HIGH/CRITICAL Vulnerabilities: ${total_vulns}**"
73 | echo ""
74 | echo "| Scanner | Vulnerabilities | Status |"
75 | echo "|---------|-----------------|--------|"
76 | echo "| 🔍 OS & Application Libraries | ${surface_vulns} | $([ "$surface_vulns" -eq 0 ] && echo '✅' || echo '⚠️') |"
77 | echo "| 🔎 Nested JAR Dependencies | ${deep_vulns} | $([ "$deep_vulns" -eq 0 ] && echo '✅' || echo '⚠️') |"
78 | echo "| 📋 Grype (SBOM-based) | ${grype_vulns} | $([ "$grype_vulns" -eq 0 ] && echo '✅' || echo '⚠️') |"
79 | echo ""
80 | } >> "$GITHUB_STEP_SUMMARY"
81 |
82 | # Add scan targets section (collapsible)
83 | {
84 | echo ""
85 | echo "📁 Scan Targets (click to expand)
"
86 | echo ""
87 | echo "**OS & Application Libraries:**"
88 | if [ -f trivy-surface.json ]; then
89 | jq -r '[.Results[].Target] | unique | .[]' trivy-surface.json 2>/dev/null | sed 's/^/- /' || echo "- (no targets found)"
90 | else
91 | echo "- (scan results not available)"
92 | fi
93 | echo ""
94 | echo "**Nested JAR Dependencies:**"
95 | if [ -f "${EXTRACT_DIR}/scanned-jars.txt" ]; then
96 | jar_count=$(wc -l < "${EXTRACT_DIR}/scanned-jars.txt" | tr -d ' ')
97 | echo "*(${jar_count} JAR files scanned)*"
98 | head -20 "${EXTRACT_DIR}/scanned-jars.txt" | sed 's/^/- /'
99 | if [ "$jar_count" -gt 20 ]; then
100 | echo "- ... and $((jar_count - 20)) more"
101 | echo ""
102 | echo "📎 *Full report available in workflow artifacts*"
103 | fi
104 | elif [ -f trivy-deep.json ]; then
105 | # Fallback to Trivy JSON if manifest not available
106 | target_count=$(jq -r '[.Results[].Target] | unique | length' trivy-deep.json 2>/dev/null || echo 0)
107 | echo "*(${target_count} files scanned)*"
108 | jq -r '[.Results[].Target | split("/")[-1]] | unique | sort | .[]' trivy-deep.json 2>/dev/null | head -20 | sed 's/^/- /' || echo "- (no targets found)"
109 | if [ "$target_count" -gt 20 ]; then
110 | echo "- ... and $((target_count - 20)) more"
111 | echo ""
112 | echo "📎 *Full report available in workflow artifacts*"
113 | fi
114 | else
115 | echo "- (scan results not available)"
116 | fi
117 | echo ""
118 | echo " "
119 | echo ""
120 | } >> "$GITHUB_STEP_SUMMARY"
121 |
122 | # Add detailed vulnerability tables
123 | if [ "$surface_vulns" -gt 0 ] && [ -f trivy-surface.json ]; then
124 | {
125 | echo "### 🔍 OS & Application Library Vulnerabilities"
126 | echo ""
127 | echo "| Package | NVD | GitHub Advisories | CVE Published | Trivy Severity | CVSS | Trivy Vendor Data | Installed | Fixed | Fix? |"
128 | echo "|---------|-----|-------------------|---------------|----------------|------|-----------------|-----------|-------|------|"
129 | } >> "$GITHUB_STEP_SUMMARY"
130 |
131 | jq -r '.Results[]?.Vulnerabilities[]? | select(.Severity == "HIGH" or .Severity == "CRITICAL") |
132 | .VulnerabilityID as $cve |
133 | (.CVSS.nvd.V3Score // .CVSS.redhat.V3Score // .CVSS.ghsa.V3Score // "-") as $cvss |
134 | '"${JQ_VENDOR_FILTER}"' |
135 | "| \(.PkgName) | [\($cve)](https://nvd.nist.gov/vuln/detail/\($cve)) | [Search](https://github.com/advisories?query=\($cve)) | \((.PublishedDate // "-") | split("T")[0]) | \(.Severity) | \($cvss) | \(if $vendor[2] != "" then "[\($vendor[0]):\($vendor[1])](\($vendor[2]))" else "\($vendor[0]):\($vendor[1])" end) | \(.InstalledVersion) | \(.FixedVersion // "-") | \(if (.FixedVersion // "") != "" then "✅" else "❌" end) |"' \
136 | trivy-surface.json 2>/dev/null | head -20 >> "$GITHUB_STEP_SUMMARY" || echo "| Error parsing results | - | - | - | - | - | - | - | - | - |" >> "$GITHUB_STEP_SUMMARY"
137 |
138 | echo "" >> "$GITHUB_STEP_SUMMARY"
139 | fi
140 |
141 | if [ "$deep_vulns" -gt 0 ] && [ -f trivy-deep.json ]; then
142 | {
143 | echo "### 🔎 Nested JAR Dependency Vulnerabilities"
144 | echo ""
145 | echo "| Parent JAR | Package | NVD | GitHub Advisories | CVE Published | Trivy Severity | CVSS | Trivy Vendor Data | Installed | Fixed | Fix? |"
146 | echo "|------------|---------|-----|-------------------|---------------|----------------|------|-----------------|-----------|-------|------|"
147 | } >> "$GITHUB_STEP_SUMMARY"
148 |
149 | # Process each vulnerability and look up parent JAR from mapping file
150 | # First, collect all rows into a temp file, then deduplicate
151 | temp_table="/tmp/vuln-table-$$.txt"
152 | > "$temp_table" # Clear temp file
153 |
154 | jq_trivy_deep_vulns trivy-deep.json | while IFS='|' read -r target pkgpath pkg vuln cve_date severity vendor_sev vendor_url installed fixed has_fix cvss; do
155 | # Use PkgPath if available (contains JAR file path), otherwise use Target
156 | jar_path="${pkgpath:-$target}"
157 |
158 | # Extract JAR filename from path (handle both file paths and directory paths)
159 | if [[ "$jar_path" == *.jar ]]; then
160 | jar_file=$(basename "$jar_path" 2>/dev/null || echo "$jar_path")
161 | else
162 | # Path might be a directory containing a JAR, extract JAR name from path
163 | jar_file=$(echo "$jar_path" | grep -oE '[^/]+\.jar' | tail -1)
164 | if [ -z "$jar_file" ]; then
165 | jar_file=$(basename "$jar_path" 2>/dev/null || echo "$jar_path")
166 | fi
167 | fi
168 |
169 | # Look up parent JAR from mapping file
170 | parent_jar="(internal)"
171 | if [ -f "${EXTRACT_DIR}/jar-mapping.txt" ] && [ -n "$jar_file" ]; then
172 | # Try exact match first
173 | parent_match=$(grep -F "$jar_file" "${EXTRACT_DIR}/jar-mapping.txt" 2>/dev/null | cut -d'|' -f1 | tr -d ' ' | head -1)
174 | if [ -n "$parent_match" ]; then
175 | parent_jar="$parent_match"
176 | fi
177 | fi
178 |
179 | vendor_display=$(format_vendor_display "$vendor_sev" "$vendor_url")
180 | fix_indicator=$(format_fix_indicator "$has_fix")
181 | echo "| $parent_jar | $pkg | [$vuln](https://nvd.nist.gov/vuln/detail/$vuln) | [Search](https://github.com/advisories?query=$vuln) | $cve_date | $severity | $cvss | $vendor_display | $installed | $fixed | $fix_indicator |" >> "$temp_table"
182 | done
183 |
184 | # Deduplicate and add to summary (limit to 40 entries)
185 | sort -u "$temp_table" | head -40 >> "$GITHUB_STEP_SUMMARY"
186 | rm -f "$temp_table"
187 |
188 | echo "" >> "$GITHUB_STEP_SUMMARY"
189 | fi
190 |
191 | if [ "$grype_vulns" -gt 0 ] && [ -f grype-results.json ]; then
192 | {
193 | echo "### 📋 Grype SBOM Scan Details"
194 | echo ""
195 | echo "| Package | NVD | GitHub Advisories | Grype Severity | CVSS | Installed | Fixed | Fix? |"
196 | echo "|---------|-----|-------------------|----------------|------|-----------|-------|------|"
197 | } >> "$GITHUB_STEP_SUMMARY"
198 |
199 | # Note: Grype JSON doesn't include CVE publish dates or vendor severity in the standard output
200 | # Use suggestedVersion from matchDetails when available (filters to relevant version for installed package)
201 | # Extract CVSS from vulnerability.ratings[] - prefer NVD source
202 | jq -r '.matches[]? | select(.vulnerability.severity == "High" or .vulnerability.severity == "Critical") |
203 | (.matchDetails[0].fix.suggestedVersion // .vulnerability.fix.versions[0] // "-") as $fixVersion |
204 | ((.vulnerability.ratings[]? | select(.source == "NVD" or .source == "nvd") | .score) // (.vulnerability.ratings[0]?.score) // "-") as $cvss |
205 | "| \(.artifact.name) | [\(.vulnerability.id)](https://nvd.nist.gov/vuln/detail/\(.vulnerability.id)) | [Search](https://github.com/advisories?query=\(.vulnerability.id)) | \(.vulnerability.severity) | \($cvss) | \(.artifact.version) | \($fixVersion) | \(if $fixVersion != "-" then "✅" else "❌" end) |"' \
206 | grype-results.json 2>/dev/null | head -20 >> "$GITHUB_STEP_SUMMARY" || echo "| Error parsing results | - | - | - | - | - | - | - |" >> "$GITHUB_STEP_SUMMARY"
207 |
208 | echo "" >> "$GITHUB_STEP_SUMMARY"
209 | fi
210 |
211 | echo "✅ GitHub Actions summary updated"
212 |
--------------------------------------------------------------------------------
/scripts/create-enhanced-report.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | #
3 | # create-enhanced-report.sh
4 | #
5 | # Creates an enhanced vulnerability report with parent JAR mapping for nested dependencies.
6 | # This script is specifically designed for published images scanning workflow.
7 | #
8 | # Usage:
9 | # create-enhanced-report.sh [published]
10 | #
11 | # Arguments:
12 | # image: Docker image name (e.g., liquibase/liquibase)
13 | # tag: Image tag (e.g., 4.28.0)
14 | # published: ISO 8601 timestamp of when the image tag was last updated (optional)
15 | #
16 | # Environment Variables:
17 | # EXTRACT_DIR: Directory containing jar-mapping.txt (default: /tmp/extracted-deps)
18 | # surface_vulns: Number of surface vulnerabilities (from previous step)
19 | # deep_vulns: Number of deep vulnerabilities (from previous step)
20 | # grype_vulns: Number of Grype vulnerabilities (from previous step)
21 | # total_vulns: Total vulnerabilities (from previous step)
22 | #
23 | # Expected Input Files:
24 | # - trivy-deep.json: Trivy deep scan results
25 | # - ${EXTRACT_DIR}/jar-mapping.txt: Parent JAR mapping file
26 | #
27 | # Outputs:
28 | # - vulnerability-report-enhanced.md: Detailed report with JAR relationships
29 |
30 | set -e
31 |
32 | # Source shared jq filters
33 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
34 | # shellcheck source=lib/vuln-filters.sh
35 | source "${SCRIPT_DIR}/lib/vuln-filters.sh"
36 |
37 | # Arguments
38 | IMAGE="${1:?Error: Image name required}"
39 | TAG="${2:?Error: Tag required}"
40 | PUBLISHED="${3:-}"
41 |
42 | # Format the published date for display (extract just the date part YYYY-MM-DD)
43 | if [ -n "$PUBLISHED" ] && [ "$PUBLISHED" != "unknown" ]; then
44 | PUBLISHED_DATE="${PUBLISHED%%T*}"
45 | else
46 | PUBLISHED_DATE="unknown"
47 | fi
48 |
49 | # Environment variables
50 | EXTRACT_DIR="${EXTRACT_DIR:-/tmp/extracted-deps}"
51 | surface_vulns="${surface_vulns:-0}"
52 | deep_vulns="${deep_vulns:-0}"
53 | grype_vulns="${grype_vulns:-0}"
54 | total_vulns="${total_vulns:-0}"
55 |
56 | report_file="vulnerability-report-enhanced.md"
57 |
58 | echo "📝 Creating enhanced vulnerability report for ${IMAGE}:${TAG}..."
59 |
60 | # Create report header
61 | {
62 | echo "# Enhanced Vulnerability Report"
63 | echo ""
64 | echo "**Image**: \`${IMAGE}:${TAG}\`"
65 | echo "**Image Last Updated**: ${PUBLISHED_DATE}"
66 | echo "**Scan Date**: $(date -u '+%Y-%m-%d %H:%M:%S UTC')"
67 | echo ""
68 | echo "## Summary"
69 | echo ""
70 | echo "| Scanner | HIGH/CRITICAL Vulnerabilities |"
71 | echo "|---------|-------------------------------|"
72 | echo "| OS & Application Libraries | ${surface_vulns} |"
73 | echo "| Nested JAR Dependencies | ${deep_vulns} |"
74 | echo "| Grype (SBOM-based) | ${grype_vulns} |"
75 | echo "| **Total** | **${total_vulns}** |"
76 | echo ""
77 | if [ "$total_vulns" -eq 0 ]; then
78 | echo "**No HIGH/CRITICAL vulnerabilities found.**"
79 | echo ""
80 | fi
81 | } > "$report_file"
82 |
83 | # Add scan targets section
84 | {
85 | echo "## Scan Targets"
86 | echo ""
87 | echo "### OS & Application Libraries"
88 | if [ -f trivy-surface.json ]; then
89 | jq -r '[.Results[].Target] | unique | .[]' trivy-surface.json 2>/dev/null | sed 's/^/- /' || echo "- (no targets found)"
90 | else
91 | echo "- (scan results not available)"
92 | fi
93 | echo ""
94 | echo "### Nested JAR Dependencies"
95 | if [ -f "${EXTRACT_DIR}/scanned-jars.txt" ]; then
96 | jar_count=$(wc -l < "${EXTRACT_DIR}/scanned-jars.txt" | tr -d ' ')
97 | echo "*(${jar_count} JAR files scanned)*"
98 | echo ""
99 | cat "${EXTRACT_DIR}/scanned-jars.txt" | sed 's/^/- /'
100 | elif [ -f trivy-deep.json ]; then
101 | # Fallback to Trivy JSON if manifest not available
102 | target_count=$(jq -r '[.Results[].Target] | unique | length' trivy-deep.json 2>/dev/null || echo 0)
103 | echo "*(${target_count} files scanned)*"
104 | echo ""
105 | jq -r '[.Results[].Target | split("/")[-1]] | unique | sort | .[]' trivy-deep.json 2>/dev/null | sed 's/^/- /' || echo "- (no targets found)"
106 | else
107 | echo "- (scan results not available)"
108 | fi
109 | echo ""
110 | } >> "$report_file"
111 |
112 | # Add OS & Application Library vulnerabilities (only if found)
113 | if [ "$surface_vulns" -gt 0 ] && [ -f trivy-surface.json ]; then
114 | {
115 | echo "## OS & Application Library Vulnerabilities"
116 | echo ""
117 | echo "| Package | NVD | GitHub Advisories | CVE Published | Trivy Severity | CVSS | Trivy Vendor Data | Installed | Fixed | Fix? |"
118 | echo "|---------|-----|-------------------|---------------|----------------|------|-----------------|-----------|-------|------|"
119 | } >> "$report_file"
120 |
121 | jq_trivy_surface_vulns trivy-surface.json | while IFS='|' read -r pkg vuln cve_date severity vendor_sev vendor_url installed fixed has_fix cvss; do
122 | vendor_display=$(format_vendor_display "$vendor_sev" "$vendor_url")
123 | fix_indicator=$(format_fix_indicator "$has_fix")
124 | echo "| $pkg | [$vuln](https://nvd.nist.gov/vuln/detail/$vuln) | [Search](https://github.com/advisories?query=$vuln) | $cve_date | $severity | $cvss | $vendor_display | $installed | $fixed | $fix_indicator |" >> "$report_file"
125 | done
126 |
127 | echo "" >> "$report_file"
128 | fi
129 |
130 | # Add parent JAR mapping section (only if vulnerabilities found)
131 | if [ -f "${EXTRACT_DIR}/jar-mapping.txt" ] && [ "$deep_vulns" -gt 0 ]; then
132 | {
133 | echo "## Parent JAR Relationships"
134 | echo ""
135 | echo "The following shows which Liquibase JARs contain vulnerable nested dependencies:"
136 | echo ""
137 | echo "\`\`\`"
138 | cat "${EXTRACT_DIR}/jar-mapping.txt" | sort | uniq
139 | echo "\`\`\`"
140 | echo ""
141 | } >> "$report_file"
142 | fi
143 |
144 | # Add detailed vulnerability table with parent JAR context (only if vulnerabilities found)
145 | if [ -f trivy-deep.json ] && [ "$deep_vulns" -gt 0 ]; then
146 | {
147 | echo "## Detailed Vulnerability Analysis"
148 | echo ""
149 | echo "### Nested JAR Vulnerabilities"
150 | echo ""
151 | echo "| Parent JAR | Nested JAR | NVD | GitHub Advisories | CVE Published | Trivy Severity | CVSS | Trivy Vendor Data | Installed | Fixed | Fix? |"
152 | echo "|------------|------------|-----|-------------------|---------------|----------------|------|-----------------|-----------|-------|------|"
153 | } >> "$report_file"
154 |
155 | # Process each vulnerability and match with parent JAR
156 | jq_trivy_deep_vulns trivy-deep.json | while IFS='|' read -r target pkgpath pkg vuln cve_date severity vendor_sev vendor_url installed fixed has_fix cvss; do
157 | # Use PkgPath if available (contains JAR file path), otherwise use Target
158 | jar_path="${pkgpath:-$target}"
159 |
160 | # Extract JAR filename from path
161 | if [[ "$jar_path" == *.jar ]]; then
162 | jar_file=$(basename "$jar_path" 2>/dev/null || echo "$jar_path")
163 | else
164 | jar_file=$(basename "$target" 2>/dev/null || echo "$target")
165 | fi
166 |
167 | # Find parent JAR from mapping file
168 | if [ -f "${EXTRACT_DIR}/jar-mapping.txt" ]; then
169 | parent_jar=$(grep -F "$jar_file" "${EXTRACT_DIR}/jar-mapping.txt" | cut -d'|' -f1 | tr -d ' ' | head -1)
170 | if [ -z "$parent_jar" ]; then
171 | parent_jar="(internal)"
172 | fi
173 | else
174 | parent_jar="(unknown)"
175 | fi
176 |
177 | vendor_display=$(format_vendor_display "$vendor_sev" "$vendor_url")
178 | fix_indicator=$(format_fix_indicator "$has_fix")
179 | echo "| $parent_jar | $jar_file | [$vuln](https://nvd.nist.gov/vuln/detail/$vuln) | [Search](https://github.com/advisories?query=$vuln) | $cve_date | $severity | $cvss | $vendor_display | $installed | $fixed | $fix_indicator |" >> "$report_file"
180 | done
181 |
182 | echo "" >> "$report_file"
183 | fi
184 |
185 | # Add Python vulnerabilities
186 | if [ -f trivy-deep.json ]; then
187 | python_vulns=$(jq '[.Results[]? | select(.Type == "python-pkg") | .Vulnerabilities[]? | select(.Severity == "HIGH" or .Severity == "CRITICAL")] | length' trivy-deep.json 2>/dev/null || echo 0)
188 |
189 | if [ "$python_vulns" -gt 0 ]; then
190 | {
191 | echo "### Python Package Vulnerabilities"
192 | echo ""
193 | echo "These are found in extension JARs (GraalVM Python VFS)"
194 | echo ""
195 | echo "| Package | NVD | GitHub Advisories | CVE Published | Trivy Severity | CVSS | Trivy Vendor Data | Installed | Fixed | Fix? |"
196 | echo "|---------|-----|-------------------|---------------|----------------|------|-----------------|-----------|-------|------|"
197 | } >> "$report_file"
198 |
199 | jq_trivy_python_vulns trivy-deep.json | while IFS='|' read -r pkg vuln cve_date severity vendor_sev vendor_url installed fixed has_fix cvss; do
200 | vendor_display=$(format_vendor_display "$vendor_sev" "$vendor_url")
201 | fix_indicator=$(format_fix_indicator "$has_fix")
202 | echo "| $pkg | [$vuln](https://nvd.nist.gov/vuln/detail/$vuln) | [Search](https://github.com/advisories?query=$vuln) | $cve_date | $severity | $cvss | $vendor_display | $installed | $fixed | $fix_indicator |" >> "$report_file"
203 | done
204 |
205 | echo "" >> "$report_file"
206 | fi
207 | fi
208 |
209 | # Add Grype SBOM scan vulnerabilities (only if found)
210 | if [ "$grype_vulns" -gt 0 ] && [ -f grype-results.json ]; then
211 | {
212 | echo "## Grype SBOM Scan Details"
213 | echo ""
214 | echo "| Package | NVD | GitHub Advisories | Grype Severity | CVSS | Installed | Fixed | Fix? |"
215 | echo "|---------|-----|-------------------|----------------|------|-----------|-------|------|"
216 | } >> "$report_file"
217 |
218 | # Use suggestedVersion from matchDetails when available (filters to relevant version for installed package)
219 | # Extract CVSS from vulnerability.ratings[] - prefer NVD source
220 | jq -r '.matches[]? | select(.vulnerability.severity == "High" or .vulnerability.severity == "Critical") |
221 | (.matchDetails[0].fix.suggestedVersion // .vulnerability.fix.versions[0] // "-") as $fixVersion |
222 | ((.vulnerability.ratings[]? | select(.source == "NVD" or .source == "nvd") | .score) // (.vulnerability.ratings[0]?.score) // "-") as $cvss |
223 | "\(.artifact.name)|\(.vulnerability.id)|\(.vulnerability.severity)|\($cvss)|\(.artifact.version)|\($fixVersion)|\(if $fixVersion != "-" then "Y" else "N" end)"' \
224 | grype-results.json 2>/dev/null | while IFS='|' read -r pkg vuln severity cvss installed fixed has_fix; do
225 | fix_indicator=$(format_fix_indicator "$has_fix")
226 | echo "| $pkg | [$vuln](https://nvd.nist.gov/vuln/detail/$vuln) | [Search](https://github.com/advisories?query=$vuln) | $severity | $cvss | $installed | $fixed | $fix_indicator |" >> "$report_file"
227 | done
228 |
229 | echo "" >> "$report_file"
230 | fi
231 |
232 | echo "✓ Enhanced vulnerability report created: $report_file"
233 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/scripts/README.md:
--------------------------------------------------------------------------------
1 | # Vulnerability Scanning Scripts
2 |
3 | This directory contains shell scripts extracted from GitHub Actions workflows for vulnerability scanning. These scripts are versioned, testable, and reusable across multiple workflows.
4 |
5 | > **For Support & Sales:** See [SECURITY.md](SECURITY.md) for a guide on understanding vulnerability reports, terminology definitions, and how to interpret scan results.
6 |
7 | ## Overview
8 |
9 | The scripts handle various aspects of Docker image vulnerability scanning:
10 |
11 | - **Extraction**: Extracting nested JARs and Python packages from Docker images
12 | - **Analysis**: Analyzing and combining scan results from multiple scanners
13 | - **Reporting**: Generating detailed vulnerability reports
14 | - **Utilities**: Common operations like file checking and result conversion
15 |
16 | ## Scripts
17 |
18 | ### Core Scanning Scripts
19 |
20 | #### `extract-nested-deps.sh`
21 |
22 | Extracts nested JARs and Python packages from Liquibase Docker images for deep vulnerability scanning.
23 |
24 | **Usage:**
25 | ```bash
26 | ./extract-nested-deps.sh
27 | ```
28 |
29 | **Arguments:**
30 | - `image_ref`: Docker image reference (e.g., `liquibase/liquibase:latest`)
31 |
32 | **Environment Variables:**
33 | - `EXTRACT_DIR`: Base directory for extraction (default: `/tmp/extracted-deps`)
34 |
35 | **Outputs:**
36 | - Extracted JAR files in `${EXTRACT_DIR}/internal-jars/{lib,extensions}`
37 | - Nested JARs from archives in `${EXTRACT_DIR}/dist/`
38 | - Python packages in `${EXTRACT_DIR}/python-packages`
39 | - JAR mapping file in `${EXTRACT_DIR}/jar-mapping.txt`
40 |
41 | **Example:**
42 | ```bash
43 | # Extract from local image
44 | ./extract-nested-deps.sh liquibase/liquibase:latest
45 |
46 | # Extract from image with SHA
47 | ./extract-nested-deps.sh liquibase/liquibase:abc123
48 | ```
49 |
50 | ---
51 |
52 | #### `analyze-scan-results.sh`
53 |
54 | Analyzes and combines vulnerability scan results from Trivy and Grype scanners.
55 |
56 | **Usage:**
57 | ```bash
58 | IMAGE_NAME="liquibase/liquibase" IMAGE_SUFFIX="-alpine" ./analyze-scan-results.sh
59 | ```
60 |
61 | **Environment Variables:**
62 | - `EXTRACT_DIR`: Directory containing `jar-mapping.txt` (default: `/tmp/extracted-deps`)
63 | - `IMAGE_NAME`: Name of the image being scanned
64 | - `IMAGE_SUFFIX`: Suffix for the image variant (e.g., `-alpine`)
65 | - `GITHUB_STEP_SUMMARY`: GitHub Actions summary file path (optional)
66 | - `GITHUB_SHA`: Git commit SHA (optional)
67 |
68 | **Expected Input Files:**
69 | - `trivy-surface.json`: Trivy surface scan results
70 | - `trivy-deep.json`: Trivy deep scan results
71 | - `grype-results.sarif`: Grype SARIF results
72 |
73 | **Outputs:**
74 | - `vulnerability-report-enhanced.md`: Detailed vulnerability report
75 | - `scan-summary.txt`: Summary of scan results
76 | - Exit code 1 if vulnerabilities found, 0 otherwise
77 | - GitHub Actions step summary (if `GITHUB_STEP_SUMMARY` is set)
78 |
79 | ---
80 |
81 | #### `convert-scan-results.sh`
82 |
83 | Converts Trivy JSON scan results to SARIF format and counts vulnerabilities.
84 |
85 | **Usage:**
86 | ```bash
87 | ./convert-scan-results.sh
88 | ```
89 |
90 | **Requirements:**
91 | - Trivy CLI must be installed
92 |
93 | **Expected Input Files:**
94 | - `trivy-surface.json`: Trivy surface scan results (optional)
95 | - `trivy-deep.json`: Trivy deep scan results (optional)
96 | - `grype-results.json`: Grype JSON results (optional)
97 |
98 | **Outputs:**
99 | - `trivy-surface.sarif`: Converted SARIF format
100 | - `trivy-deep.sarif`: Converted SARIF format
101 | - Environment variables (if `GITHUB_ENV` is set): `surface_vulns`, `deep_vulns`, `grype_vulns`, `total_vulns`
102 |
103 | **Example:**
104 | ```bash
105 | # Convert scan results after running Trivy
106 | trivy image --format json --output trivy-surface.json liquibase/liquibase:latest
107 | ./convert-scan-results.sh
108 | ```
109 |
110 | ---
111 |
112 | ### Reporting Scripts
113 |
114 | #### `create-enhanced-report.sh`
115 |
116 | Creates an enhanced vulnerability report with parent JAR mapping for nested dependencies.
117 |
118 | **Usage:**
119 | ```bash
120 | ./create-enhanced-report.sh
121 | ```
122 |
123 | **Arguments:**
124 | - `image`: Docker image name (e.g., `liquibase/liquibase`)
125 | - `tag`: Image tag (e.g., `4.28.0`)
126 |
127 | **Environment Variables:**
128 | - `EXTRACT_DIR`: Directory containing `jar-mapping.txt` (default: `/tmp/extracted-deps`)
129 | - `surface_vulns`: Number of surface vulnerabilities
130 | - `deep_vulns`: Number of deep vulnerabilities
131 | - `grype_vulns`: Number of Grype vulnerabilities
132 | - `total_vulns`: Total vulnerabilities
133 |
134 | **Expected Input Files:**
135 | - `trivy-deep.json`: Trivy deep scan results
136 | - `${EXTRACT_DIR}/jar-mapping.txt`: Parent JAR mapping file
137 |
138 | **Outputs:**
139 | - `vulnerability-report-enhanced.md`: Detailed report with JAR relationships
140 |
141 | ---
142 |
143 | #### `append-github-summary.sh`
144 |
145 | Appends detailed vulnerability information to GitHub Actions step summary.
146 |
147 | **Usage:**
148 | ```bash
149 | ./append-github-summary.sh
150 | ```
151 |
152 | **Arguments:**
153 | - `image`: Docker image name (e.g., `liquibase/liquibase`)
154 | - `tag`: Image tag (e.g., `4.28.0`)
155 |
156 | **Environment Variables:**
157 | - `EXTRACT_DIR`: Directory containing `jar-mapping.txt` (default: `/tmp/extracted-deps`)
158 | - `surface_vulns`: Number of surface vulnerabilities
159 | - `deep_vulns`: Number of deep vulnerabilities
160 | - `grype_vulns`: Number of Grype vulnerabilities
161 | - `total_vulns`: Total vulnerabilities
162 | - `GITHUB_STEP_SUMMARY`: GitHub Actions summary file path (required)
163 |
164 | **Expected Input Files:**
165 | - `trivy-surface.json`: Trivy surface scan results
166 | - `trivy-deep.json`: Trivy deep scan results
167 | - `grype-results.json`: Grype JSON results
168 |
169 | **Note:** This script only runs in GitHub Actions environment.
170 |
171 | ---
172 |
173 | ### Utility Scripts
174 |
175 | #### `generate-dockerhub-matrix.sh`
176 |
177 | Generates a JSON matrix of Docker images and tags to scan from Docker Hub.
178 |
179 | **Usage:**
180 | ```bash
181 | ./generate-dockerhub-matrix.sh [max_tags]
182 | ```
183 |
184 | **Arguments:**
185 | - `max_tags`: Maximum number of tags to scan per image (default: 10)
186 |
187 | **Environment Variables:**
188 | - `MAX_TAGS`: Maximum tags per image (overrides argument)
189 | - `GITHUB_OUTPUT`: GitHub Actions output file path (optional)
190 |
191 | **Outputs:**
192 | - JSON matrix written to stdout and `$GITHUB_OUTPUT` if available
193 | - Format: `{"include":[{"image":"...","tag":"..."}]}`
194 |
195 | **Example:**
196 | ```bash
197 | # Generate matrix for 5 most recent tags
198 | ./generate-dockerhub-matrix.sh 5
199 |
200 | # Use in GitHub Actions
201 | MAX_TAGS=10 ./generate-dockerhub-matrix.sh
202 | ```
203 |
204 | ---
205 |
206 | #### `save-grype-results.sh`
207 |
208 | Locates and saves Grype scan results to a consistent filename.
209 |
210 | **Usage:**
211 | ```bash
212 | ./save-grype-results.sh [output_filename]
213 | ```
214 |
215 | **Arguments:**
216 | - `output_filename`: Desired output filename (default: `grype-results.sarif` or `grype-results.json`)
217 |
218 | **Environment Variables:**
219 | - `GRYPE_OUTPUT_FORMAT`: Output format - `sarif` or `json` (default: `sarif`)
220 |
221 | **Outputs:**
222 | - Grype results saved to specified filename
223 | - Exit code 0 on success, 1 if no results found
224 |
225 | **Example:**
226 | ```bash
227 | # Save SARIF results
228 | GRYPE_OUTPUT_FORMAT=sarif ./save-grype-results.sh
229 |
230 | # Save JSON results with custom name
231 | GRYPE_OUTPUT_FORMAT=json ./save-grype-results.sh my-grype-results.json
232 | ```
233 |
234 | ---
235 |
236 | #### `check-file-exists.sh`
237 |
238 | Checks if a file exists and optionally sets GitHub Actions output.
239 |
240 | **Usage:**
241 | ```bash
242 | ./check-file-exists.sh [output_name]
243 | ```
244 |
245 | **Arguments:**
246 | - `filename`: Path to the file to check
247 | - `output_name`: Name for GitHub Actions output variable (default: `exists`)
248 |
249 | **Environment Variables:**
250 | - `GITHUB_OUTPUT`: GitHub Actions output file path (optional)
251 |
252 | **Outputs:**
253 | - GitHub Actions output: `=true` or `false`
254 | - Exit code 0 (always succeeds)
255 |
256 | **Example:**
257 | ```bash
258 | # Check if SARIF file exists
259 | ./check-file-exists.sh trivy-deep.sarif
260 |
261 | # In GitHub Actions workflow
262 | ./check-file-exists.sh grype-results.sarif grype_exists
263 | # Sets output: grype_exists=true or grype_exists=false
264 | ```
265 |
266 | ---
267 |
268 | ## Workflow Integration
269 |
270 | ### Example: Using in trivy.yml workflow
271 |
272 | ```yaml
273 | - name: Extract nested JARs and Python packages
274 | run: |
275 | scripts/extract-nested-deps.sh ${{ matrix.image.name }}${{ matrix.image.suffix }}:${{ github.sha }}
276 |
277 | - name: Analyze scan results
278 | if: always()
279 | env:
280 | IMAGE_NAME: ${{ matrix.image.name }}
281 | IMAGE_SUFFIX: ${{ matrix.image.suffix }}
282 | run: |
283 | scripts/analyze-scan-results.sh
284 | ```
285 |
286 | ### Example: Using in trivy-scan-published-images.yml workflow
287 |
288 | ```yaml
289 | - name: Generate scan matrix
290 | id: set-matrix
291 | run: |
292 | MATRIX=$(scripts/generate-dockerhub-matrix.sh 10)
293 | echo "matrix=$MATRIX" >> $GITHUB_OUTPUT
294 |
295 | - name: Extract nested dependencies
296 | run: |
297 | scripts/extract-nested-deps.sh ${{ matrix.image }}:${{ matrix.tag }}
298 |
299 | - name: Convert and analyze results
300 | run: |
301 | scripts/convert-scan-results.sh
302 |
303 | - name: Create enhanced report
304 | env:
305 | surface_vulns: ${{ env.surface_vulns }}
306 | deep_vulns: ${{ env.deep_vulns }}
307 | grype_vulns: ${{ env.grype_vulns }}
308 | total_vulns: ${{ env.total_vulns }}
309 | run: |
310 | scripts/create-enhanced-report.sh ${{ matrix.image }} ${{ matrix.tag }}
311 |
312 | - name: Append to GitHub summary
313 | run: |
314 | scripts/append-github-summary.sh ${{ matrix.image }} ${{ matrix.tag }}
315 | ```
316 |
317 | ## Testing Scripts Locally
318 |
319 | All scripts can be tested locally outside of GitHub Actions:
320 |
321 | ```bash
322 | # Make scripts executable
323 | chmod +x scripts/*.sh
324 |
325 | # Test extraction
326 | docker build -t test-image:latest .
327 | scripts/extract-nested-deps.sh test-image:latest
328 |
329 | # Test report generation (create dummy scan results first)
330 | echo '{"Results":[]}' > trivy-surface.json
331 | echo '{"Results":[]}' > trivy-deep.json
332 | IMAGE_NAME="test-image" scripts/analyze-scan-results.sh
333 |
334 | # Test matrix generation
335 | scripts/generate-dockerhub-matrix.sh 5
336 | ```
337 |
338 | ## Dependencies
339 |
340 | ### Required Tools
341 |
342 | - **bash**: Shell interpreter (version 4.0+)
343 | - **jq**: JSON processor
344 | - **docker**: For image extraction operations
345 | - **curl**: For Docker Hub API access (matrix generation)
346 | - **trivy**: For SARIF conversion (convert-scan-results.sh only)
347 |
348 | ### Optional Tools
349 |
350 | - **unzip**: For JAR extraction (usually pre-installed)
351 | - **tar**: For archive extraction (usually pre-installed)
352 |
353 | ## Error Handling
354 |
355 | All scripts use appropriate error handling:
356 |
357 | - Scripts that should fail on errors use `set -e`
358 | - Analysis scripts use `set +e` to collect all results before exiting
359 | - Utility scripts always exit with code 0 to avoid breaking workflows
360 | - Missing files and tools are reported with clear error messages
361 |
362 | ## File Permissions
363 |
364 | After cloning the repository, make all scripts executable:
365 |
366 | ```bash
367 | chmod +x scripts/*.sh
368 | ```
369 |
370 | Or use git to track executable permissions:
371 |
372 | ```bash
373 | git update-index --chmod=+x scripts/*.sh
374 | ```
375 |
376 | ## Contributing
377 |
378 | When adding or modifying scripts:
379 |
380 | 1. **Add header comments**: Include purpose, usage, arguments, and outputs
381 | 2. **Use environment variables**: Make scripts configurable via environment
382 | 3. **Handle errors gracefully**: Don't fail workflows unnecessarily
383 | 4. **Test locally**: Verify scripts work outside GitHub Actions
384 | 5. **Update this README**: Document new scripts and changes
385 | 6. **Follow naming conventions**: Use descriptive, kebab-case names
386 |
387 | ## Migration Notes
388 |
389 | These scripts were extracted from inline shell code in:
390 | - `.github/workflows/trivy.yml`
391 | - `.github/workflows/trivy-scan-published-images.yml`
392 |
393 | Benefits of extraction:
394 | - ✅ Version control for scanning logic
395 | - ✅ Easier to test and debug
396 | - ✅ Reusable across workflows
397 | - ✅ Smaller, more readable workflow files
398 | - ✅ Consistent behavior between workflows
--------------------------------------------------------------------------------
/scripts/analyze-scan-results.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | #
3 | # analyze-scan-results.sh
4 | #
5 | # Analyzes and combines vulnerability scan results from multiple scanners (Trivy, Grype).
6 | # Generates detailed reports and determines overall scan status.
7 | #
8 | # Usage:
9 | # analyze-scan-results.sh
10 | #
11 | # Environment Variables:
12 | # EXTRACT_DIR: Directory containing jar-mapping.txt (default: /tmp/extracted-deps)
13 | # IMAGE_NAME: Name of the image being scanned
14 | # IMAGE_SUFFIX: Suffix for the image variant (e.g., -alpine)
15 | #
16 | # Expected Input Files:
17 | # - trivy-surface.json: Trivy surface scan results
18 | # - trivy-deep.json: Trivy deep scan results
19 | # - grype-results.sarif: Grype SARIF results
20 | #
21 | # Outputs:
22 | # - vulnerability-report-enhanced.md: Detailed vulnerability report
23 | # - scan-summary.txt: Summary of scan results
24 | # - Exit code 1 if vulnerabilities found, 0 otherwise
25 |
26 | set +e # Don't fail immediately - we want to collect all results
27 |
28 | EXTRACT_DIR="${EXTRACT_DIR:-/tmp/extracted-deps}"
29 | IMAGE_NAME="${IMAGE_NAME:-unknown}"
30 | IMAGE_SUFFIX="${IMAGE_SUFFIX:-}"
31 |
32 | echo "🔍 Analyzing scan results..."
33 | echo ""
34 | echo "Available scan result files:"
35 | ls -lh *.sarif *.json 2>/dev/null || echo "No scan result files found"
36 | echo ""
37 |
38 | # Count vulnerabilities from each scanner (using JSON for accuracy)
39 | surface_vulns=0
40 | deep_vulns=0
41 | grype_vulns=0
42 |
43 | if [ -f trivy-surface.json ]; then
44 | surface_vulns=$(jq '[.Results[]?.Vulnerabilities[]? | select(.Severity == "HIGH" or .Severity == "CRITICAL")] | length' trivy-surface.json 2>/dev/null || echo 0)
45 | echo "✓ Trivy Surface Scan: $surface_vulns HIGH/CRITICAL vulnerabilities"
46 | else
47 | echo "⚠ Trivy Surface Scan: JSON file not found"
48 | fi
49 |
50 | if [ -f trivy-deep.json ]; then
51 | deep_vulns=$(jq '[.Results[]?.Vulnerabilities[]? | select(.Severity == "HIGH" or .Severity == "CRITICAL")] | length' trivy-deep.json 2>/dev/null || echo 0)
52 | echo "✓ Trivy Deep Scan: $deep_vulns HIGH/CRITICAL vulnerabilities"
53 | else
54 | echo "⚠ Trivy Deep Scan: JSON file not found"
55 | fi
56 |
57 | if [ -f grype-results.sarif ]; then
58 | grype_vulns=$(jq '[.runs[].results[]? | select(.level == "error" or .level == "warning")] | length' grype-results.sarif 2>/dev/null || echo 0)
59 | echo "✓ Grype SBOM Scan: $grype_vulns HIGH/CRITICAL vulnerabilities"
60 | else
61 | echo "⚠ Grype SBOM Scan: SARIF file not found (scan may have failed or SBOM was empty)"
62 | fi
63 |
64 | total_vulns=$((surface_vulns + deep_vulns + grype_vulns))
65 | echo ""
66 | echo "📊 Total HIGH/CRITICAL vulnerabilities found: $total_vulns"
67 |
68 | # Create GitHub Actions Summary if running in GitHub Actions
69 | if [ -n "${GITHUB_STEP_SUMMARY:-}" ]; then
70 | {
71 | echo "## 🛡️ Vulnerability Scan Results for \`${IMAGE_NAME}${IMAGE_SUFFIX}\`"
72 | echo ""
73 | echo "**Total HIGH/CRITICAL Vulnerabilities: $total_vulns**"
74 | echo ""
75 | echo "| Scanner | Vulnerabilities | Status |"
76 | echo "|---------|-----------------|--------|"
77 | echo "| 🔍 Trivy Surface (OS + Top-level) | $surface_vulns | $([ $surface_vulns -eq 0 ] && echo '✅' || echo '⚠️') |"
78 | echo "| 🔎 Trivy Deep (Nested JARs + Python) | $deep_vulns | $([ $deep_vulns -eq 0 ] && echo '✅' || echo '⚠️') |"
79 | echo "| 📋 Grype (SBOM-based) | $grype_vulns | $([ $grype_vulns -eq 0 ] && echo '✅' || echo '⚠️') |"
80 | echo ""
81 | } >> "$GITHUB_STEP_SUMMARY"
82 |
83 | # Add detailed vulnerability tables using JSON format (more reliable than SARIF)
84 | if [ $surface_vulns -gt 0 ] && [ -f trivy-surface.json ]; then
85 | {
86 | echo "### 🔍 Trivy Surface Scan Details"
87 | echo ""
88 | echo "| Package | Vulnerability | Severity | CVSS | Installed | Fixed |"
89 | echo "|---------|---------------|----------|------|-----------|-------|"
90 | } >> "$GITHUB_STEP_SUMMARY"
91 |
92 | jq -r '.Results[]?.Vulnerabilities[]? | select(.Severity == "HIGH" or .Severity == "CRITICAL") |
93 | (.CVSS.nvd.V3Score // .CVSS.redhat.V3Score // .CVSS.ghsa.V3Score // "-") as $cvss |
94 | "| \(.PkgName) | \(.VulnerabilityID) | \(.Severity) | \($cvss) | \(.InstalledVersion) | \(.FixedVersion // "N/A") |"' \
95 | trivy-surface.json 2>/dev/null | head -20 >> "$GITHUB_STEP_SUMMARY" || echo "| Error parsing results | - | - | - | - | - |" >> "$GITHUB_STEP_SUMMARY"
96 |
97 | echo "" >> "$GITHUB_STEP_SUMMARY"
98 | fi
99 |
100 | if [ $deep_vulns -gt 0 ] && [ -f trivy-deep.json ]; then
101 | {
102 | echo "### 🔎 Trivy Deep Scan Details (Nested JARs & Python)"
103 | echo ""
104 | echo "| Parent JAR | Package | Vulnerability | Severity | CVSS | Installed | Fixed |"
105 | echo "|------------|---------|---------------|----------|------|-----------|-------|"
106 | } >> "$GITHUB_STEP_SUMMARY"
107 |
108 | # Process each vulnerability and look up parent JAR from mapping file
109 | # First, collect all rows into a temp file, then deduplicate
110 | temp_table="/tmp/vuln-table-$$.txt"
111 | > "$temp_table" # Clear temp file
112 |
113 | jq -r '.Results[]? | .Target as $target | .Vulnerabilities[]? |
114 | select(.Severity == "HIGH" or .Severity == "CRITICAL") |
115 | (.CVSS.nvd.V3Score // .CVSS.redhat.V3Score // .CVSS.ghsa.V3Score // "-") as $cvss |
116 | "\($target)|\(.PkgPath // "")|\(.PkgName)|\(.VulnerabilityID)|\(.Severity)|\($cvss)|\(.InstalledVersion)|\(.FixedVersion // "N/A")"' \
117 | trivy-deep.json 2>/dev/null | while IFS='|' read -r target pkgpath pkg vuln severity cvss installed fixed; do
118 |
119 | # Use PkgPath if available (contains JAR file path), otherwise use Target
120 | jar_path="${pkgpath:-$target}"
121 |
122 | # Extract JAR filename from path (handle both file paths and directory paths)
123 | if [[ "$jar_path" == *.jar ]]; then
124 | jar_file=$(basename "$jar_path" 2>/dev/null || echo "$jar_path")
125 | else
126 | # Path might be a directory containing a JAR, extract JAR name from path
127 | jar_file=$(echo "$jar_path" | grep -oE '[^/]+\.jar' | tail -1)
128 | if [ -z "$jar_file" ]; then
129 | jar_file=$(basename "$jar_path" 2>/dev/null || echo "$jar_path")
130 | fi
131 | fi
132 |
133 | # Look up parent JAR from mapping file
134 | parent_jar="(internal)"
135 | if [ -f "${EXTRACT_DIR}/jar-mapping.txt" ] && [ -n "$jar_file" ]; then
136 | # Try exact match first
137 | parent_match=$(grep -F "$jar_file" "${EXTRACT_DIR}/jar-mapping.txt" 2>/dev/null | cut -d'|' -f1 | tr -d ' ' | head -1)
138 | if [ -n "$parent_match" ]; then
139 | parent_jar="$parent_match"
140 | fi
141 | fi
142 |
143 | echo "| $parent_jar | $pkg | $vuln | $severity | $cvss | $installed | $fixed |" >> "$temp_table"
144 | done
145 |
146 | # Deduplicate and add to summary (limit to 40 entries)
147 | sort -u "$temp_table" | head -40 >> "$GITHUB_STEP_SUMMARY"
148 | rm -f "$temp_table"
149 |
150 | echo "" >> "$GITHUB_STEP_SUMMARY"
151 | fi
152 |
153 | if [ $grype_vulns -gt 0 ] && [ -f grype-results.sarif ]; then
154 | {
155 | echo "### 📋 Grype SBOM Scan Details"
156 | echo ""
157 | echo "| Package | Vulnerability | Severity | CVSS | Installed | Fixed |"
158 | echo "|---------|---------------|----------|------|-----------|-------|"
159 | } >> "$GITHUB_STEP_SUMMARY"
160 |
161 | # Grype SARIF has different structure - CVSS not directly available in SARIF, show "-"
162 | jq -r '.runs[].results[] |
163 | (.ruleId // "N/A") as $cve |
164 | (try (.properties.packageName // .locations[0].logicalLocations[0].name) // "N/A") as $pkg |
165 | (.level // "unknown") as $severity |
166 | (try (.properties.installedVersion // "N/A") catch "N/A") as $installed |
167 | (try (.properties.fixedVersion // "N/A") catch "N/A") as $fixed |
168 | "| \($pkg) | \($cve) | \($severity | ascii_upcase) | - | \($installed) | \($fixed) |"' \
169 | grype-results.sarif 2>/dev/null | head -20 >> "$GITHUB_STEP_SUMMARY" || echo "| Error parsing results | - | - | - | - | - |" >> "$GITHUB_STEP_SUMMARY"
170 |
171 | echo "" >> "$GITHUB_STEP_SUMMARY"
172 | fi
173 |
174 | # Add scanner information
175 | {
176 | echo "---"
177 | echo ""
178 | echo "### 📖 Scanner Information"
179 | echo ""
180 | echo "- **Trivy Surface**: Scans OS packages and top-level libraries"
181 | echo "- **Trivy Deep**: Extracts and scans nested Spring Boot JARs (BOOT-INF/lib) and GraalVM Python packages"
182 | echo "- **Grype**: SBOM-based validation for comprehensive dependency analysis"
183 | echo ""
184 | echo "💡 **Note**: Deep scan detects vulnerabilities in nested dependencies that standard scans miss."
185 | } >> "$GITHUB_STEP_SUMMARY"
186 | fi
187 |
188 | # Create combined summary file
189 | cat > scan-summary.txt < "$report_file"
226 |
227 | # Add parent JAR mapping section
228 | if [ -f "${EXTRACT_DIR}/jar-mapping.txt" ]; then
229 | {
230 | echo "## Parent JAR Relationships"
231 | echo ""
232 | echo "The following shows which Liquibase JARs contain vulnerable nested dependencies:"
233 | echo ""
234 | echo "\`\`\`"
235 | cat "${EXTRACT_DIR}/jar-mapping.txt" | sort | uniq
236 | echo "\`\`\`"
237 | echo ""
238 | } >> "$report_file"
239 | fi
240 |
241 | # Add detailed vulnerability table with parent JAR context
242 | if [ -f trivy-deep.json ]; then
243 | {
244 | echo "## Detailed Vulnerability Analysis"
245 | echo ""
246 | echo "### Nested JAR Vulnerabilities"
247 | echo ""
248 | echo "| Parent JAR | Nested JAR | Vulnerability | Severity | CVSS | Installed | Fixed |"
249 | echo "|------------|------------|---------------|----------|------|-----------|-------|"
250 | } >> "$report_file"
251 |
252 | # Process each vulnerability and match with parent JAR
253 | jq -r '.Results[]? | .Target as $target | .Vulnerabilities[]? |
254 | select(.Severity == "HIGH" or .Severity == "CRITICAL") |
255 | (.CVSS.nvd.V3Score // .CVSS.redhat.V3Score // .CVSS.ghsa.V3Score // "-") as $cvss |
256 | "\($target)|\(.PkgName)|\(.VulnerabilityID)|\(.Severity)|\($cvss)|\(.InstalledVersion)|\(.FixedVersion // "-")"' \
257 | trivy-deep.json 2>/dev/null | while IFS='|' read -r target pkg vuln severity cvss installed fixed; do
258 |
259 | # Extract JAR name from target path
260 | jar_file=$(basename "$target" 2>/dev/null || echo "$target")
261 |
262 | # Find parent JAR from mapping file
263 | if [ -f "${EXTRACT_DIR}/jar-mapping.txt" ]; then
264 | parent_jar=$(grep "$jar_file" "${EXTRACT_DIR}/jar-mapping.txt" | cut -d'|' -f1 | tr -d ' ' | head -1)
265 | if [ -z "$parent_jar" ]; then
266 | parent_jar="(internal)"
267 | fi
268 | else
269 | parent_jar="(unknown)"
270 | fi
271 |
272 | echo "| $parent_jar | $jar_file | $vuln | $severity | $cvss | $installed | $fixed |" >> "$report_file"
273 | done
274 |
275 | echo "" >> "$report_file"
276 | fi
277 |
278 | # Add Python vulnerabilities
279 | if [ -f trivy-deep.json ]; then
280 | python_vulns=$(jq '[.Results[]? | select(.Type == "python-pkg") | .Vulnerabilities[]? | select(.Severity == "HIGH" or .Severity == "CRITICAL")] | length' trivy-deep.json 2>/dev/null || echo 0)
281 |
282 | if [ "$python_vulns" -gt 0 ]; then
283 | {
284 | echo "### Python Package Vulnerabilities"
285 | echo ""
286 | echo "These are found in extension JARs (GraalVM Python VFS)"
287 | echo ""
288 | echo "| Package | Vulnerability | Severity | CVSS | Installed | Fixed |"
289 | echo "|---------|---------------|----------|------|-----------|-------|"
290 | } >> "$report_file"
291 |
292 | jq -r '.Results[]? | select(.Type == "python-pkg") | .Vulnerabilities[]? |
293 | select(.Severity == "HIGH" or .Severity == "CRITICAL") |
294 | (.CVSS.nvd.V3Score // .CVSS.redhat.V3Score // .CVSS.ghsa.V3Score // "-") as $cvss |
295 | "\(.PkgName)|\(.VulnerabilityID)|\(.Severity)|\($cvss)|\(.InstalledVersion)|\(.FixedVersion // "-")"' \
296 | trivy-deep.json 2>/dev/null | while IFS='|' read -r pkg vuln severity cvss installed fixed; do
297 | echo "| $pkg | $vuln | $severity | $cvss | $installed | $fixed |" >> "$report_file"
298 | done
299 |
300 | echo "" >> "$report_file"
301 | fi
302 | fi
303 |
304 | echo "✓ Enhanced vulnerability report created: $report_file"
305 |
306 | # Exit with error if vulnerabilities found
307 | if [ $total_vulns -gt 0 ]; then
308 | echo "❌ Vulnerabilities detected - failing build"
309 | exit 1
310 | else
311 | echo "✅ No HIGH/CRITICAL vulnerabilities found"
312 | exit 0
313 | fi
314 |
--------------------------------------------------------------------------------
/.github/workflows/trivy.yml:
--------------------------------------------------------------------------------
1 | # This workflow uses actions that are not certified by GitHub.
2 | # They are provided by a third-party and are governed by
3 | # separate terms of service, privacy policy, and support
4 | # documentation.
5 |
6 | name: Vulnerability Scanning
7 |
8 | on:
9 | workflow_dispatch:
10 | push:
11 | branches: ["main"]
12 | paths:
13 | - 'Dockerfile*'
14 | - 'docker-entrypoint.sh'
15 | - '.github/workflows/trivy.yml'
16 | - 'examples/**'
17 | - 'pom.xml'
18 | pull_request:
19 | branches: ["main"]
20 | paths:
21 | - 'Dockerfile*'
22 | - 'docker-entrypoint.sh'
23 | - '.github/workflows/trivy.yml'
24 | - 'examples/**'
25 | - 'pom.xml'
26 | schedule:
27 | - cron: "0 7 * * 1-5" # Run every weekday at 7am UTC
28 |
29 | permissions:
30 | contents: read
31 | id-token: write # Required for AWS OIDC authentication
32 | security-events: write # Required for uploading SARIF results to GitHub Security tab
33 | actions: read # Required for private repositories to get Action run status
34 | pull-requests: write # Required for writing comments on pull requests
35 |
36 | jobs:
37 | trivy:
38 | strategy:
39 | fail-fast: false
40 | matrix:
41 | image:
42 | [
43 | { dockerfile: Dockerfile, name: liquibase/liquibase, suffix: "" },
44 | {
45 | dockerfile: Dockerfile.alpine,
46 | name: liquibase/liquibase,
47 | suffix: "-alpine",
48 | },
49 | {
50 | dockerfile: DockerfileSecure,
51 | name: liquibase/liquibase-secure,
52 | suffix: "",
53 | },
54 | ]
55 | name: Trivy
56 | runs-on: "ubuntu-22.04"
57 | steps:
58 | - name: Checkout code
59 | uses: actions/checkout@v6
60 |
61 | - name: Configure AWS credentials for vault access
62 | uses: aws-actions/configure-aws-credentials@v5
63 | with:
64 | role-to-assume: ${{ secrets.LIQUIBASE_VAULT_OIDC_ROLE_ARN }}
65 | aws-region: us-east-1
66 |
67 | - name: Get secrets from vault
68 | id: vault-secrets
69 | uses: aws-actions/aws-secretsmanager-get-secrets@v2
70 | with:
71 | secret-ids: |
72 | ,/vault/liquibase
73 | parse-json-secrets: true
74 |
75 | - name: Set up Docker Buildx
76 | uses: docker/setup-buildx-action@v3
77 |
78 | - name: Build ${{ matrix.image.name }}${{ matrix.image.suffix }} from Dockerfile
79 | run: |
80 | docker build -f ${{ matrix.image.dockerfile }} -t ${{ matrix.image.name }}${{ matrix.image.suffix }}:${{ github.sha }} .
81 |
82 | - name: Extract nested JARs and Python packages for deep scanning
83 | run: |
84 | scripts/extract-nested-deps.sh ${{ matrix.image.name }}${{ matrix.image.suffix }}:${{ github.sha }}
85 |
86 | - name: Generate SBOM with Syft
87 | uses: anchore/sbom-action@v0
88 | with:
89 | image: "${{ matrix.image.name }}${{ matrix.image.suffix }}:${{ github.sha }}"
90 | format: "spdx-json"
91 | output-file: "sbom.spdx.json"
92 |
93 | - name: Upload SBOM as artifact
94 | if: always()
95 | uses: actions/upload-artifact@v5
96 | with:
97 | name: sbom-${{ matrix.image.dockerfile == 'DockerfileSecure' && 'secure' || matrix.image.suffix != '' && matrix.image.suffix || 'community' }}
98 | path: sbom.spdx.json
99 | retention-days: 30
100 |
101 | - name: Run Trivy vulnerability scanner (Surface Scan - SARIF)
102 | uses: aquasecurity/trivy-action@0.33.1
103 | with:
104 | image-ref: "${{ matrix.image.name }}${{ matrix.image.suffix }}:${{ github.sha }}"
105 | vuln-type: "os,library"
106 | format: "sarif"
107 | output: "trivy-surface.sarif"
108 | severity: "HIGH,CRITICAL"
109 | exit-code: "0"
110 | limit-severities-for-sarif: true
111 |
112 | - name: Run Trivy vulnerability scanner (Surface Scan - JSON)
113 | uses: aquasecurity/trivy-action@0.33.1
114 | with:
115 | image-ref: "${{ matrix.image.name }}${{ matrix.image.suffix }}:${{ github.sha }}"
116 | vuln-type: "os,library"
117 | format: "json"
118 | output: "trivy-surface.json"
119 | severity: "HIGH,CRITICAL"
120 | exit-code: "0"
121 |
122 | - name: Run Trivy scanner on extracted nested JARs (Deep Scan - SARIF)
123 | if: always()
124 | uses: aquasecurity/trivy-action@0.33.1
125 | with:
126 | scan-type: "rootfs"
127 | scan-ref: "/tmp/extracted-deps"
128 | vuln-type: "library"
129 | format: "sarif"
130 | output: "trivy-deep.sarif"
131 | severity: "HIGH,CRITICAL"
132 | exit-code: "0"
133 | limit-severities-for-sarif: true
134 |
135 | - name: Run Trivy scanner on extracted nested JARs (Deep Scan - JSON)
136 | if: always()
137 | uses: aquasecurity/trivy-action@0.33.1
138 | with:
139 | scan-type: "rootfs"
140 | scan-ref: "/tmp/extracted-deps"
141 | vuln-type: "library"
142 | format: "json"
143 | output: "trivy-deep.json"
144 | severity: "HIGH,CRITICAL"
145 | exit-code: "0"
146 |
147 | - name: Run Grype scanner on SBOM
148 | if: always()
149 | id: grype_scan
150 | uses: anchore/scan-action@v7
151 | with:
152 | sbom: "sbom.spdx.json"
153 | fail-build: false
154 | severity-cutoff: high
155 | only-fixed: true
156 | output-format: sarif
157 | output-file: results.sarif
158 | continue-on-error: true
159 |
160 | - name: Save Grype results to file
161 | if: always()
162 | run: |
163 | scripts/save-grype-results.sh
164 |
165 | - name: Combine scan results and check for failures
166 | if: always()
167 | env:
168 | IMAGE_NAME: ${{ matrix.image.name }}
169 | IMAGE_SUFFIX: ${{ matrix.image.suffix }}
170 | run: |
171 | scripts/analyze-scan-results.sh
172 |
173 | - name: Upload enhanced vulnerability report
174 | if: always()
175 | uses: actions/upload-artifact@v5
176 | with:
177 | name: vulnerability-report-${{ matrix.image.dockerfile == 'DockerfileSecure' && 'secure' || matrix.image.suffix != '' && matrix.image.suffix || 'community' }}
178 | path: vulnerability-report-enhanced.md
179 | retention-days: 90
180 |
181 | - name: Notify Slack on Build Failure
182 | if: failure()
183 | uses: rtCamp/action-slack-notify@v2
184 | env:
185 | SLACK_COLOR: "failure"
186 | SLACK_MESSAGE: "View details on GitHub Actions: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}. Triggered by repository: ${{ github.repository }} and job: ${{ github.job }}"
187 | SLACK_TITLE: "❌ ${{ github.repository }} ❌ Trivy failed on branch ${{ github.ref_name }} for commit ${{ github.sha }} in repository ${{ github.repository }}"
188 | SLACK_USERNAME: liquibot
189 | SLACK_WEBHOOK: ${{ env.DOCKER_SLACK_WEBHOOK_URL }}
190 | SLACK_ICON_EMOJI: ":whale:"
191 | SLACK_FOOTER: "${{ github.repository }} - ${{ matrix.image.name }}${{ matrix.image.suffix }}:${{ github.sha }}"
192 | SLACK_LINK_NAMES: true
193 |
194 | - name: Upload Trivy Surface scan results to GitHub Security tab
195 | if: always()
196 | uses: github/codeql-action/upload-sarif@v4
197 | with:
198 | sarif_file: "trivy-surface.sarif"
199 | category: "${{ matrix.image.name }}${{ matrix.image.suffix }}-surface"
200 |
201 | - name: Upload Trivy Deep scan results to GitHub Security tab
202 | if: always()
203 | uses: github/codeql-action/upload-sarif@v4
204 | with:
205 | sarif_file: "trivy-deep.sarif"
206 | category: "${{ matrix.image.name }}${{ matrix.image.suffix }}-deep"
207 |
208 | - name: Check if Grype SARIF exists
209 | if: always()
210 | id: check_grype
211 | run: |
212 | scripts/check-file-exists.sh grype-results.sarif exists
213 |
214 | - name: Upload Grype scan results to GitHub Security tab
215 | if: always() && steps.check_grype.outputs.exists == 'true'
216 | uses: github/codeql-action/upload-sarif@v4
217 | with:
218 | sarif_file: "grype-results.sarif"
219 | category: "${{ matrix.image.name }}${{ matrix.image.suffix }}-grype"
220 |
221 | - name: Generate Security Report
222 | if: always()
223 | uses: rsdmike/github-security-report-action@v3.0.4
224 | with:
225 | token: ${{ secrets.GITHUB_TOKEN }}
226 | outputDir: ./reports/trivy${{ matrix.image.dockerfile == 'DockerfileSecure' && '-secure' || matrix.image.suffix }}/
227 | sarifReportDir: .
228 |
229 | - name: Upload Security Report
230 | if: always()
231 | uses: actions/upload-artifact@v5
232 | with:
233 | name: security-report-trivy${{ matrix.image.dockerfile == 'DockerfileSecure' && '-secure' || matrix.image.suffix }}
234 | path: ./reports/trivy${{ matrix.image.dockerfile == 'DockerfileSecure' && '-secure' || matrix.image.suffix }}/summary.pdf
235 |
236 | - name: Upload Scan Summary
237 | if: always()
238 | uses: actions/upload-artifact@v5
239 | with:
240 | name: scan-summary-${{ matrix.image.dockerfile == 'DockerfileSecure' && 'secure' || matrix.image.suffix != '' && matrix.image.suffix || 'community' }}
241 | path: scan-summary.txt
242 | retention-days: 30
243 |
244 | scout:
245 | strategy:
246 | fail-fast: false
247 | matrix:
248 | image:
249 | [
250 | { dockerfile: Dockerfile, name: liquibase/liquibase, suffix: "" },
251 | {
252 | dockerfile: Dockerfile.alpine,
253 | name: liquibase/liquibase,
254 | suffix: "-alpine",
255 | },
256 | {
257 | dockerfile: DockerfileSecure,
258 | name: liquibase/liquibase-secure,
259 | suffix: "",
260 | },
261 | ]
262 | name: Scout
263 | runs-on: "ubuntu-22.04"
264 | steps:
265 | - name: Checkout code
266 | uses: actions/checkout@v6
267 |
268 | - name: Set up Docker Buildx
269 | uses: docker/setup-buildx-action@v3
270 |
271 | - name: Build ${{ matrix.image.name }}${{ matrix.image.suffix }} from Dockerfile
272 | run: |
273 | docker build -f ${{ matrix.image.dockerfile }} -t ${{ matrix.image.name }}${{ matrix.image.suffix }}:${{ github.sha }} .
274 |
275 | - name: Configure AWS credentials for vault access
276 | uses: aws-actions/configure-aws-credentials@v5
277 | with:
278 | role-to-assume: ${{ secrets.LIQUIBASE_VAULT_OIDC_ROLE_ARN }}
279 | aws-region: us-east-1
280 |
281 | - name: Get secrets from vault
282 | id: vault-secrets
283 | uses: aws-actions/aws-secretsmanager-get-secrets@v2
284 | with:
285 | secret-ids: |
286 | ,/vault/liquibase
287 | parse-json-secrets: true
288 |
289 | - name: Decode DOCKERHUB_USERNAME
290 | run: |
291 | decoded_username=$(echo "${{ env.DOCKERHUB_USERNAME }}" | base64 -d)
292 | echo "DOCKERHUB_USERNAME_DECODED=$decoded_username" >> $GITHUB_ENV
293 |
294 | - uses: docker/login-action@v3
295 | with:
296 | username: ${{ env.DOCKERHUB_USERNAME_DECODED }}
297 | password: ${{ env.DOCKERHUB_TOKEN }}
298 |
299 | - name: Docker Scout
300 | uses: docker/scout-action@v1.18.2
301 | with:
302 | command: cves
303 | image: "${{ matrix.image.name }}${{ matrix.image.suffix }}:${{ github.sha }}"
304 | github-token: ${{ secrets.GITHUB_TOKEN }}
305 | write-comment: true
306 | sarif-file: "scout-results.sarif"
307 | summary: true
308 | exit-code: true
309 | only-severities: "critical,high"
310 |
311 | - name: Notify Slack on Build Failure
312 | if: failure()
313 | uses: rtCamp/action-slack-notify@v2
314 | env:
315 | SLACK_COLOR: "failure"
316 | SLACK_MESSAGE: "View details on GitHub Actions: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}. Triggered by repository: ${{ github.repository }} and job: ${{ github.job }}"
317 | SLACK_TITLE: "❌ ${{ github.repository }} ❌ Docker Scout failed on branch ${{ github.ref_name }} for commit ${{ github.sha }} in repository ${{ github.repository }}"
318 | SLACK_USERNAME: liquibot
319 | SLACK_WEBHOOK: ${{ env.DOCKER_SLACK_WEBHOOK_URL }}
320 | SLACK_ICON_EMOJI: ":whale:"
321 | SLACK_FOOTER: "${{ github.repository }} - ${{ matrix.image.name }}${{ matrix.image.suffix }}:${{ github.sha }}"
322 | SLACK_LINK_NAMES: true
323 |
324 | - name: Upload Scout scan results to GitHub Security tab
325 | if: always()
326 | uses: github/codeql-action/upload-sarif@v4
327 | with:
328 | sarif_file: "scout-results.sarif"
329 | category: "${{ matrix.image.name }}${{ matrix.image.suffix }}"
330 |
331 | - name: Generate Security Report
332 | if: always()
333 | uses: rsdmike/github-security-report-action@v3.0.4
334 | with:
335 | token: ${{ secrets.GITHUB_TOKEN }}
336 | outputDir: ./reports/scout${{ matrix.image.dockerfile == 'DockerfileSecure' && '-secure' || matrix.image.suffix }}/
337 | sarifReportDir: .
338 |
339 | - name: Upload Security Report
340 | if: always()
341 | uses: actions/upload-artifact@v5
342 | with:
343 | name: security-report-scout${{ matrix.image.dockerfile == 'DockerfileSecure' && '-secure' || matrix.image.suffix }}
344 | path: ./reports/scout${{ matrix.image.dockerfile == 'DockerfileSecure' && '-secure' || matrix.image.suffix }}/summary.pdf
345 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Official Liquibase Docker Images
2 |
3 | ## 🚨 Important: Liquibase 5.0 Changes 🚨
4 |
5 | ### Liquibase Community vs Liquibase Secure
6 |
7 | Starting with **Liquibase 5.0**, we have introduced a clear separation between our open source Community edition and our commercial Secure offering:
8 |
9 | - **`liquibase/liquibase`** (Community Edition): Community version under the Functional Source License (FSL)
10 | - **`liquibase/liquibase-secure`** (Secure Edition): Commercial version with enterprise features
11 |
12 | **If you have a valid Liquibase License Key, you should now use `liquibase/liquibase-secure` instead of `liquibase/liquibase`.**
13 |
14 | ### 📋 Image Availability Matrix
15 |
16 | | Version Range | Community Image | Secure Image | License | Docker Official |
17 | | ------------- | ----------------------------------------------- | ---------------------------- | ----------------------- | ----------------------------- |
18 | | **5.0+** | `liquibase/liquibase` | `liquibase/liquibase-secure` | FSL\* / Commercial | ❌ No\*\* |
19 | | **4.x** | `liquibase:4.x`\*\*\*
`liquibase/liquibase` | `liquibase/liquibase-secure` | Apache 2.0 / Commercial | ✅ Yes\*\*\* (Community only) |
20 |
21 | - \*FSL = Functional Source License (See [Liquibase License Information](#license-information))
22 | - \*\*For Liquibase 5.0+, use the community registry image `liquibase/liquibase` (not available as official Docker image).
23 | - \*\*\*Liquibase 4 community image is available as the official Docker image at [https://hub.docker.com/\_/liquibase](https://hub.docker.com/_/liquibase). Pull using `docker pull liquibase:4.x`.
24 |
25 | ### 🚨 Breaking Change: Drivers and Extensions No Longer Included
26 |
27 | As of **Liquibase 5.0**, the Community edition (`liquibase/liquibase`) and the official Docker Community liquibase image **no longer include database drivers or extensions by default**.
28 |
29 | **What this means for you:**
30 |
31 | - You must now explicitly add database drivers using the Liquibase Package Manager (LPM)
32 | - Extensions must be manually installed or mounted into the container
33 | - MySQL driver installation via `INSTALL_MYSQL=true` environment variable is still supported
34 |
35 | **Learn more:** [Liquibase 5.0 Release Announcement](https://www.liquibase.com/blog/liquibase-5-0-release)
36 |
37 | ### Adding Drivers with LPM
38 |
39 | ```dockerfile
40 | FROM liquibase/liquibase:latest
41 | # Add database drivers as needed
42 | RUN lpm add mysql --global
43 | RUN lpm add postgresql --global
44 | RUN lpm add mssql --global
45 | ```
46 |
47 | ---
48 |
49 | ## 🌍 Available Registries
50 |
51 | We publish Liquibase images to multiple registries for flexibility:
52 |
53 | | Registry | Community Image | Secure Image |
54 | | ----------------------------- | ------------------------------------ | ------------------------------------------- |
55 | | **Docker Hub (default)** | `liquibase/liquibase` | `liquibase/liquibase-secure` |
56 | | **GitHub Container Registry** | `ghcr.io/liquibase/liquibase` | `ghcr.io/liquibase/liquibase-secure` |
57 | | **Amazon ECR Public** | `public.ecr.aws/liquibase/liquibase` | `public.ecr.aws/liquibase/liquibase-secure` |
58 |
59 | ## 🚀 Quick Start
60 |
61 | ### For Community Users (Liquibase 5.0+)
62 |
63 | ```bash
64 | # Pull the community image
65 | docker pull liquibase/liquibase:5.0.1
66 |
67 | # Run with a changelog
68 | docker run --rm \
69 | -v /path/to/changelog:/liquibase/changelog \
70 | -e LIQUIBASE_COMMAND_URL="jdbc:postgresql://localhost:5432/mydb" \
71 | -e LIQUIBASE_COMMAND_USERNAME="username" \
72 | -e LIQUIBASE_COMMAND_PASSWORD="password" \
73 | liquibase/liquibase update
74 | ```
75 |
76 | ### For Secure Edition Users
77 |
78 | ```bash
79 | # Pull the secure image
80 | docker pull liquibase/liquibase-secure:5.0.1
81 |
82 | # Run with a changelog and license key
83 | docker run --rm \
84 | -v /path/to/changelog:/liquibase/changelog \
85 | -e LIQUIBASE_COMMAND_URL="jdbc:postgresql://localhost:5432/mydb" \
86 | -e LIQUIBASE_COMMAND_USERNAME="username" \
87 | -e LIQUIBASE_COMMAND_PASSWORD="password" \
88 | -e LIQUIBASE_LICENSE_KEY="your-license-key" \
89 | liquibase/liquibase-secure:5.0.1 update
90 | ```
91 |
92 | ### For Liquibase 4 Users
93 |
94 | If you're still using Liquibase 4, you can pull from either the official Docker repository or the community registry:
95 |
96 | **Official Docker Repository:**
97 |
98 | ```bash
99 | # Pull the latest Liquibase 4 image
100 | docker pull liquibase:latest
101 |
102 | # Or pull a specific version
103 | docker pull liquibase:4.x
104 | ```
105 |
106 | **Community Registry:**
107 |
108 | ```bash
109 | # Pull from community registry
110 | docker pull liquibase/liquibase:4.x
111 | ```
112 |
113 | ---
114 |
115 | ## 📖 Upgrading from Liquibase 4 to 5.0
116 |
117 | If you're upgrading from Liquibase 4 to 5.0, follow these steps:
118 |
119 | ### Step 1: Understand License Requirements
120 |
121 | - **Liquibase 4**: Uses Apache 2.0 license (always available)
122 | - **Liquibase 5.0 Community**: Uses Functional Source License (FSL)
123 | - **Liquibase 5.0 Secure**: Requires a commercial license
124 |
125 | Read more: [Liquibase License Information](#license-information)
126 |
127 | ### Step 2: Determine Which Edition You Need
128 |
129 | **Use Community Edition if:**
130 |
131 | - You are an open source user
132 | - You accept the Functional Source License terms
133 | - You do not require enterprise features
134 |
135 | **Use Secure Edition if:**
136 |
137 | - You have a commercial Liquibase license
138 | - You need enterprise features like Policy Checks, Quality Checks, or Advanced Rollback
139 | - Your organization requires commercial support
140 |
141 | ### Step 3: Update Your Image Reference
142 |
143 | **If using Community Edition:**
144 |
145 | ```bash
146 | # Before (Liquibase 4)
147 | FROM liquibase/liquibase:4.x
148 |
149 | # After (Liquibase 5.0+)
150 | FROM liquibase/liquibase:5.0 # or :latest
151 | ```
152 |
153 | **If using PRO Edition:**
154 |
155 | ```bash
156 | # Before (Liquibase 4)
157 | FROM liquibase/liquibase-pro:4.x
158 |
159 | # After (Liquibase 5.0+)
160 | FROM liquibase/liquibase-secure:5.0 # or :latest
161 | ```
162 |
163 | ### Step 4: Update Driver Installation
164 |
165 | **Liquibase 5.0+ no longer includes drivers by default.** Add drivers explicitly:
166 |
167 | ```dockerfile
168 | FROM liquibase/liquibase:latest
169 |
170 | # Add required database drivers
171 | RUN lpm add postgresql --global
172 | RUN lpm add mysql --global
173 | RUN lpm add mssql --global
174 | ```
175 |
176 | Or at runtime using environment variables:
177 |
178 | ```bash
179 | docker run -e INSTALL_MYSQL=true liquibase/liquibase:latest update
180 | ```
181 |
182 | ### Step 5: Test in Non-Production First
183 |
184 | ```bash
185 | # Test your changelogs against a test database
186 | docker run --rm \
187 | -v /path/to/changelog:/liquibase/changelog \
188 | -e LIQUIBASE_COMMAND_URL="jdbc:postgresql://test-db:5432/testdb" \
189 | -e LIQUIBASE_COMMAND_USERNAME="username" \
190 | -e LIQUIBASE_COMMAND_PASSWORD="password" \
191 | liquibase/liquibase:5.0 validate
192 | ```
193 |
194 | ### Step 6: Complete Production Migration
195 |
196 | Once testing is successful, update your production deployments to use the new image.
197 |
198 | ---
199 |
200 | ## 🔐 License Information
201 |
202 | ### Functional Source License (FSL) - Liquibase 5.0 Community
203 |
204 | The Liquibase 5.0 Community edition is available under the Functional Source License (FSL). This license:
205 |
206 | - Allows you to freely use Liquibase for database migrations
207 | - Prohibits commercial use that competes with Liquibase’s products or services
208 | - Automatically transitions to the Apache 2.0 license after two years
209 | - Provides full source code access (but not OSI-approved open source)
210 |
211 | Read the full license: [Functional Source License on fsl.software](https://fsl.software/)
212 |
213 | ### Apache 2.0 License - Liquibase 4
214 |
215 | Liquibase 4 versions continue to use the [Apache 2.0 license](https://www.apache.org/licenses/LICENSE-2.0).
216 |
217 | ### Commercial License - Liquibase Secure
218 |
219 | The [Liquibase Secure edition](https://www.liquibase.com/liquibase-secure) requires a commercial license and provides enterprise-grade database change management with advanced capabilities:
220 |
221 | **Developer Productivity:**
222 | - VS Code Extension for IDE-native operations
223 | - Liquibase Flows for environment consistency
224 | - Policy Checks to enforce standards and block risky changes
225 | - Support for 60+ database types
226 |
227 | **Secure Automation:**
228 | - CI/CD deployment automation with policy enforcement
229 | - Targeted rollbacks for precise recovery
230 | - Advanced drift detection and alerting
231 | - Secrets management and RBAC enforcement
232 | - Multi-environment deployment capabilities
233 |
234 | **Change Insights & Compliance:**
235 | - Unified change tracking with full audit context
236 | - Real-time drift detection
237 | - Audit-ready compliance reporting (SOX, HIPAA, PCI, SOC2, GDPR)
238 | - Risk scoring for change assessment
239 |
240 | For more information and licensing inquiries, visit [liquibase.com/get-liquibase](https://www.liquibase.com/get-liquibase)
241 |
242 | ---
243 |
244 | ## Dockerfile
245 |
246 | ```dockerfile
247 | FROM liquibase/liquibase:latest
248 | # OR ghcr.io/liquibase/liquibase:latest # GHCR
249 | # OR public.ecr.aws/liquibase/liquibase:latest # Amazon ECR Public
250 | ```
251 |
252 | ## Scripts
253 |
254 | ### Community Edition
255 |
256 | ```bash
257 | # Docker Hub (default)
258 | docker pull liquibase/liquibase
259 |
260 | # GitHub Container Registry
261 | docker pull ghcr.io/liquibase/liquibase
262 |
263 | # Amazon ECR Public
264 | docker pull public.ecr.aws/liquibase/liquibase
265 | ```
266 |
267 | ### Liquibase Secure Edition
268 |
269 | ```bash
270 | # Docker Hub (default)
271 | docker pull liquibase/liquibase-secure
272 |
273 | # GitHub Container Registry
274 | docker pull ghcr.io/liquibase/liquibase-secure
275 |
276 | # Amazon ECR Public
277 | docker pull public.ecr.aws/liquibase/liquibase-secure
278 | ```
279 |
280 | ### Pulling the Latest or Specific Version
281 |
282 | #### Community Edition
283 |
284 | ```bash
285 | # Latest
286 | docker pull liquibase/liquibase:latest
287 | docker pull ghcr.io/liquibase/liquibase:latest
288 | docker pull public.ecr.aws/liquibase/liquibase:latest
289 |
290 | # Specific version (example: 4.32.0)
291 | docker pull liquibase/liquibase:4.32.0
292 | docker pull ghcr.io/liquibase/liquibase:4.32.0
293 | docker pull public.ecr.aws/liquibase/liquibase:4.32.0
294 | ```
295 |
296 | #### Liquibase Secure Edition
297 |
298 | ```bash
299 | # Latest
300 | docker pull liquibase/liquibase-secure:latest
301 | docker pull ghcr.io/liquibase/liquibase-secure:latest
302 | docker pull public.ecr.aws/liquibase/liquibase-secure:latest
303 |
304 | # Specific version (example: 4.32.0)
305 | docker pull liquibase/liquibase-secure:4.32.0
306 | docker pull ghcr.io/liquibase/liquibase-secure:4.32.0
307 | docker pull public.ecr.aws/liquibase/liquibase-secure:4.32.0
308 | ```
309 |
310 | For any questions or support, please visit our [Liquibase Community Forum](https://forum.liquibase.org/).
311 |
312 | ---
313 |
314 | This is the community repository for [Liquibase](https://download.liquibase.org/) images.
315 |
316 | ## 🚨 BREAKING CHANGE
317 |
318 | Support for Snowflake database has been moved from the external extension liquibase-snowflake into the main Liquibase artifact. This means that Snowflake is now included in the main docker image. If you are using the snowflake extension, remove it from your lib directory or however you are including it in your project. If you are using the Docker image, use the main v4.12+ as there will no longer be a snowflake separate docker image produced. The latest separate Snowflake image will be v4.11. You need to update your reference to either latest to use the main one that includes Snowflake or the version tag you prefer.
319 |
320 | ## 🏷️ Image Tags and Versions
321 |
322 | Liquibase Docker images use semantic versioning with the following tag strategies:
323 |
324 | ### Tag Formats
325 |
326 | | Tag Format | Example | Description |
327 | | ------------------ | ----------------------------------- | ------------------------------ |
328 | | `latest` | `liquibase/liquibase:latest` | Latest stable release |
329 | | `latest-alpine` | `liquibase/liquibase:latest-alpine` | Latest stable Alpine variant |
330 | | `` | `liquibase/liquibase:5.0.0` | Specific version (exact match) |
331 | | `-alpine` | `liquibase/liquibase:5.0.0-alpine` | Specific Alpine version |
332 | | `.` | `liquibase/liquibase:5.0` | Latest patch for major.minor |
333 |
334 | ### Community vs Secure Image Tags
335 |
336 | The same tag structure applies to both image types:
337 |
338 | - **Community**: `liquibase/liquibase:5.0.0`
339 | - **Secure**: `liquibase/liquibase-secure:5.0.0`
340 |
341 | Both are available across all registries (Docker Hub, GHCR, Amazon ECR Public).
342 |
343 | ### Supported Tags
344 |
345 | The following tags are officially supported and can be found on [Docker Hub](https://hub.docker.com/r/liquibase/liquibase/tags):
346 |
347 | **Community Image:**
348 |
349 | - `liquibase/liquibase:latest` - Latest 5.0+ release
350 | - `liquibase/liquibase:5.0` - Latest 5.0.x release
351 | - `liquibase/liquibase:latest-alpine` - Latest Alpine variant
352 | - `liquibase/liquibase:4.x` - Liquibase 4 versions (Apache 2.0)
353 |
354 | **Secure Image:**
355 |
356 | - `liquibase/liquibase-secure:latest` - Latest Secure release
357 | - `liquibase/liquibase-secure:5.0` - Latest 5.0.x release
358 | - `liquibase/liquibase-secure:latest-alpine` - Latest Secure Alpine variant
359 |
360 | ### Choosing the Right Tag
361 |
362 | - **For production**: Use major.minor tags (e.g., `5.0`) for reproducibility with latest patches
363 | - **For development**: Use `latest` or `latest-alpine` for convenience
364 | - **For Alpine Linux**: Append `-alpine` for smaller image size
365 | - **For Liquibase 4**: Use `4.x` versions (Apache 2.0 license)
366 |
367 | ## 📦 Using the Docker Image
368 |
369 | ### 🏷️ Standard Image
370 |
371 | The `liquibase/liquibase:` image is the standard choice. Use it as a disposable container or a foundational building block for other images.
372 |
373 | For examples of extending the standard image, see the [standard image examples](https://github.com/liquibase/docker/tree/main/examples).
374 |
375 | ### 🏷️ Alpine Image
376 |
377 | The `liquibase/liquibase:-alpine` image is a lightweight version designed for environments with limited resources. It is built on Alpine Linux and has a smaller footprint.
378 |
379 | For examples of extending the alpine image, see the [alpine image examples](https://github.com/liquibase/docker/tree/main/examples).
380 |
381 | ### 🐳 Docker Compose Example
382 |
383 | For a complete example using Docker Compose with PostgreSQL, see the [docker-compose example](https://github.com/liquibase/docker/tree/main/examples/docker-compose).
384 |
385 | ### 📄 Using the Changelog File
386 |
387 | Mount your changelog directory to the `/liquibase/changelog` volume and use relative paths for the `--changeLogFile` argument.
388 |
389 | #### Example
390 |
391 | ```shell
392 | docker run --rm -v /path/to/changelog:/liquibase/changelog liquibase/liquibase --changeLogFile=changelog.xml update
393 | ```
394 |
395 | ### 🔄 CLI-Docker Compatibility
396 |
397 | Starting with this version, Docker containers now behave consistently with CLI usage for file path handling. When you mount your changelog directory to `/liquibase/changelog`, the container automatically changes its working directory to match, making relative file paths work the same way in both CLI and Docker environments.
398 |
399 | **Before this enhancement:**
400 |
401 | - CLI: `liquibase generateChangeLog --changelogFile=mychangelog.xml` (creates file in current directory)
402 | - Docker: `liquibase generateChangeLog --changelogFile=changelog/mychangelog.xml` (had to include path prefix)
403 |
404 | **Now (improved):**
405 |
406 | - CLI: `liquibase generateChangeLog --changelogFile=mychangelog.xml` (creates file in current directory)
407 | - Docker: `liquibase generateChangeLog --changelogFile=mychangelog.xml` (creates file in mounted changelog directory)
408 |
409 | Both approaches now work identically, making it easier to switch between local CLI and CI/CD Docker usage without modifying your commands or file paths.
410 |
411 | #### How it works
412 |
413 | When you mount a directory to `/liquibase/changelog`, the container automatically:
414 |
415 | 1. Detects the presence of the mounted changelog directory
416 | 2. Changes the working directory to `/liquibase/changelog`
417 | 3. Executes Liquibase commands from that location
418 |
419 | This ensures that relative paths in your commands work consistently whether you're using CLI locally or Docker containers in CI/CD pipelines. In most cases, this automatic behavior works seamlessly without any manual intervention.
420 |
421 | ### 🔍 Search Path Configuration
422 |
423 | Liquibase Docker images automatically manage the search path to help locate changelog files and dependencies. The search path is configured with the following priority (highest to lowest):
424 |
425 | 1. **User-provided `--search-path` CLI argument** (highest priority)
426 | 2. **User-provided `LIQUIBASE_SEARCH_PATH` environment variable**
427 | 3. **Automatic search path injection** (lowest priority)
428 |
429 | #### Understanding Search Path Behavior
430 |
431 | When you mount a changelog directory to `/liquibase/changelog`:
432 |
433 | - **With relative paths** (`--changelogFile=mychangelog.xml`): The container automatically sets `--search-path=.` to search the current directory (working directory).
434 | - **Without mount or with absolute paths**: The container sets `--search-path=/liquibase/changelog` to help locate files in the default location.
435 |
436 | #### Custom Search Paths
437 |
438 | If you need to use a custom search path (for example, to include S3 buckets or remote storage locations), the container respects your configuration and **will not override** user-provided search paths:
439 |
440 | **Example 1: Using environment variable with multiple search paths**
441 |
442 | ```bash
443 | docker run --rm \
444 | --env LIQUIBASE_SEARCH_PATH="/liquibase/changelog,s3://my-bucket/snapshots/" \
445 | -v /path/to/changelog:/liquibase/changelog \
446 | liquibase/liquibase --changelogFile=mychangelog.xml update
447 | ```
448 |
449 | **Example 2: Using CLI argument**
450 |
451 | ```bash
452 | docker run --rm \
453 | -v /path/to/changelog:/liquibase/changelog \
454 | liquibase/liquibase \
455 | --changelogFile=mychangelog.xml \
456 | --search-path=/custom/path \
457 | update
458 | ```
459 |
460 | **Example 3: Combining relative paths with custom search paths (Correct approach)**
461 |
462 | ```bash
463 | docker run --rm \
464 | --env LIQUIBASE_SEARCH_PATH="/liquibase/changelog,/liquibase/shared-changesets" \
465 | -v /path/to/changelog:/liquibase/changelog \
466 | -v /path/to/shared:/liquibase/shared-changesets \
467 | liquibase/liquibase --changelogFile=main.xml update
468 | ```
469 |
470 | In this example:
471 | - The relative path `main.xml` is found in the working directory (`/liquibase/changelog`)
472 | - Included files are searched **only** in the paths specified by `LIQUIBASE_SEARCH_PATH` (`/liquibase/shared-changesets`). The current directory (`.`) is **not** automatically included. If you want to search both the current directory and a custom path, include both in your configuration: `LIQUIBASE_SEARCH_PATH="/liquibase/changelog,/liquibase/shared-changesets"`
473 |
474 | #### Troubleshooting Search Path Issues
475 |
476 | If you're experiencing file-not-found errors with custom search paths:
477 |
478 | 1. **Verify the environment variable is set correctly**: Check that `LIQUIBASE_SEARCH_PATH` is properly formatted (comma-separated for multiple paths)
479 | 2. **Check path permissions**: Ensure the Docker container can access mounted directories
480 | 3. **Use absolute paths**: For clarity, use absolute paths in your search path configuration
481 | 4. **Review Liquibase logs**: Liquibase will output which search path it's using during execution
482 |
483 | #### Important: Search Path Behavior with Custom Paths
484 |
485 | When you set `LIQUIBASE_SEARCH_PATH` to a custom value:
486 |
487 | - **Only the paths you specify are searched** for included files
488 | - The current directory (`.`) is **not automatically added**
489 | - If you want to search multiple locations, **include all of them** in your `LIQUIBASE_SEARCH_PATH` configuration
490 |
491 | **Example:** If you want to search both `/liquibase/changelog` and `/liquibase/shared-changesets`:
492 |
493 | ```bash
494 | # ✓ CORRECT: Include both paths
495 | --env LIQUIBASE_SEARCH_PATH="/liquibase/changelog,/liquibase/shared-changesets"
496 |
497 | # ✗ INCORRECT: Only includes shared-changesets, NOT the current directory
498 | --env LIQUIBASE_SEARCH_PATH="/liquibase/shared-changesets"
499 | ```
500 |
501 | ### ⚙️ Using a Configuration File
502 |
503 | To use a default configuration file, mount it in your changelog volume and reference it with the `--defaultsFile` argument.
504 |
505 | #### Example
506 |
507 | ```shell
508 | docker run --rm -v /path/to/changelog:/liquibase/changelog liquibase/liquibase --defaultsFile=liquibase.properties update
509 | ```
510 |
511 | ### 📚 Including Drivers and Extensions
512 |
513 | Mount a local directory containing additional jars to `/liquibase/lib`.
514 |
515 | #### Example
516 |
517 | ```shell
518 | docker run --rm -v /path/to/changelog:/liquibase/changelog -v /path/to/lib:/liquibase/lib liquibase/liquibase update
519 | ```
520 |
521 | ### 🔍 MySQL Users
522 |
523 | Due to licensing restrictions, the MySQL driver is not included. Add it either by extending the image or during runtime via an environment variable.
524 |
525 | #### Extending the Image
526 |
527 | Dockerfile:
528 |
529 | ```dockerfile
530 | FROM liquibase:latest
531 |
532 | RUN lpm add mysql --global
533 | ```
534 |
535 | Build:
536 |
537 | ```shell
538 | docker build . -t liquibase-mysql
539 | ```
540 |
541 | #### Runtime
542 |
543 | ```shell
544 | docker run -e INSTALL_MYSQL=true liquibase/liquibase update
545 | ```
546 |
547 | ## 🛠️ Complete Example
548 |
549 | Here is a complete example using environment variables and a properties file:
550 |
551 | ### Environment Variables Example
552 |
553 | ```shell
554 | docker run --env LIQUIBASE_COMMAND_USERNAME --env LIQUIBASE_COMMAND_PASSWORD --env LIQUIBASE_COMMAND_URL --env LIQUIBASE_PRO_LICENSE_KEY --env LIQUIBASE_COMMAND_CHANGELOG_FILE --rm -v /path/to/changelog:/liquibase/changelog liquibase/liquibase --log-level=info update
555 | ```
556 |
557 | ### Properties File Example
558 |
559 | `liquibase.docker.properties` file:
560 |
561 | ```properties
562 | searchPath: /liquibase/changelog
563 | url: jdbc:postgresql://:5432/?currentSchema=
564 | changeLogFile: changelog.xml
565 | username:
566 | password:
567 | liquibaseSecureLicenseKey=
568 | ```
569 |
570 | CLI:
571 |
572 | ```shell
573 | docker run --rm -v /path/to/changelog:/liquibase/changelog liquibase/liquibase --defaultsFile=liquibase.docker.properties update
574 | ```
575 |
576 | ## 🔗 Example JDBC URLs
577 |
578 | - MS SQL Server: `jdbc:sqlserver://:1433;database=`
579 | - PostgreSQL: `jdbc:postgresql://:5432/?currentSchema=`
580 | - MySQL: `jdbc:mysql://:3306/`
581 | - MariaDB: `jdbc:mariadb://:3306/`
582 | - DB2: `jdbc:db2://:50000/`
583 | - Snowflake: `jdbc:snowflake:///?db=&schema=`
584 | - Sybase: `jdbc:jtds:sybase://:/`
585 | - SQLite: `jdbc:sqlite:/tmp/.db`
586 |
587 | For more details, visit our [Liquibase Documentation](https://docs.liquibase.com/).
588 |
589 |
590 |
--------------------------------------------------------------------------------