├── .gitignore ├── .gitattributes ├── rootfs └── homeassistant │ └── etc │ ├── secrets.yaml │ ├── packages │ └── global │ │ └── example.yaml │ └── configuration.yaml ├── .dockerignore ├── .github └── workflows │ ├── tags.yml │ ├── org.readme.yml │ ├── cron.yml │ ├── org.version.yml │ ├── org.update.yml │ └── docker.yml ├── .json ├── LICENSE ├── compose.yml ├── project.md ├── arch.dockerfile └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | # default 2 | maintain/ 3 | node_modules/ 4 | 5 | # custom 6 | .env -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # default 2 | * text=auto 3 | *.sh eol=lf -------------------------------------------------------------------------------- /rootfs/homeassistant/etc/secrets.yaml: -------------------------------------------------------------------------------- 1 | postgresuri: postgresql://postgres:homeassistant@postgres/postgres -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | # default 2 | .git* 3 | maintain/ 4 | LICENSE 5 | *.md 6 | img/ 7 | node_modules/ 8 | 9 | # custom 10 | .env -------------------------------------------------------------------------------- /rootfs/homeassistant/etc/packages/global/example.yaml: -------------------------------------------------------------------------------- 1 | input_text: 2 | system_tts_voice: 3 | name: "TTS AI Voice" 4 | initial: "Sarah" -------------------------------------------------------------------------------- /.github/workflows/tags.yml: -------------------------------------------------------------------------------- 1 | name: tags 2 | on: 3 | push: 4 | tags: 5 | - 'v*' 6 | jobs: 7 | image: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - name: build docker image 11 | uses: the-actions-org/workflow-dispatch@3133c5d135c7dbe4be4f9793872b6ef331b53bc7 12 | with: 13 | workflow: docker.yml 14 | wait-for-completion: false 15 | token: "${{ secrets.REPOSITORY_TOKEN }}" 16 | inputs: '{ "release":"true", "readme":"true", "run-name":"build ${{ github.ref_name }}" }' -------------------------------------------------------------------------------- /rootfs/homeassistant/etc/configuration.yaml: -------------------------------------------------------------------------------- 1 | config: 2 | ssdp: 3 | zeroconf: 4 | sun: 5 | logbook: 6 | history: 7 | media_source: 8 | mobile_app: 9 | system_health: 10 | 11 | homeassistant: 12 | packages: !include_dir_named packages 13 | 14 | http: 15 | server_port: 8123 16 | use_x_forwarded_for: true 17 | trusted_proxies: 18 | - 10.0.0.0/8 19 | - 172.16.0.0/12 20 | - 192.168.0.0/16 21 | 22 | recorder: 23 | auto_purge: true 24 | purge_keep_days: 365 25 | db_url: !secret postgresuri 26 | 27 | logger: 28 | default: critical 29 | logs: 30 | homeassistant.core: fatal -------------------------------------------------------------------------------- /.github/workflows/org.readme.yml: -------------------------------------------------------------------------------- 1 | name: org.readme 2 | 3 | on: 4 | workflow_dispatch: 5 | 6 | jobs: 7 | # ╔═════════════════════════════════════════════════════╗ 8 | # ║ CREATE README.md ║ 9 | # ╚═════════════════════════════════════════════════════╝ 10 | readme: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: create README.md 14 | uses: the-actions-org/workflow-dispatch@3133c5d135c7dbe4be4f9793872b6ef331b53bc7 15 | with: 16 | wait-for-completion: false 17 | workflow: docker.yml 18 | token: "${{ secrets.REPOSITORY_TOKEN }}" 19 | inputs: '{ "build":"false", "release":"false", "readme":"true", "run-name":"readme" }' -------------------------------------------------------------------------------- /.json: -------------------------------------------------------------------------------- 1 | { 2 | "image": "11notes/homeassistant", 3 | "platform": "amd64,arm64", 4 | "name": "homeassistant", 5 | "root": "/homeassistant", 6 | "semver": { 7 | "version": "2025.12.3" 8 | }, 9 | "readme": { 10 | "description": "run home assistant rootless", 11 | "introduction": "[Home Assistant](https://github.com/home-assistant/core) (created by [home-assistant](https://github.com/home-assistant/)) is an open source home automation that puts local control and privacy first. Powered by a worldwide community of tinkerers and DIY enthusiasts. Perfect to run on a Raspberry Pi or a local server.", 12 | "parent": { 13 | "image": "11notes/alpine:stable" 14 | }, 15 | "built": { 16 | "home-assistant/core": "https://github.com/home-assistant/core" 17 | } 18 | }, 19 | "grype": { 20 | "severity": "critical", 21 | "fail": false 22 | } 23 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2025 11notes 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /.github/workflows/cron.yml: -------------------------------------------------------------------------------- 1 | name: cron 2 | 3 | on: 4 | workflow_dispatch: 5 | schedule: 6 | - cron: "0 5 * * *" 7 | 8 | jobs: 9 | update: 10 | runs-on: ubuntu-latest 11 | 12 | permissions: 13 | actions: read 14 | contents: write 15 | 16 | steps: 17 | - name: checkout all tags 18 | uses: actions/checkout@85e6279cec87321a52edac9c87bce653a07cf6c2 19 | with: 20 | ref: 'master' 21 | fetch-depth: 0 22 | 23 | - name: get latest version and last tag 24 | run: | 25 | export LATEST_VERSION=$(curl -s https://api.github.com/repos/home-assistant/core/releases/latest | jq -r '.tag_name' | sed 's|v||') 26 | export LATEST_TAG=$(git describe --abbrev=0 --tags `git rev-list --tags --max-count=1` | sed 's|v||') 27 | echo "WORKFLOW_UPDATE_BASE64JSON=$(echo '{"version":"'${LATEST_VERSION}'","tag":"'${LATEST_TAG}'"}' | base64 -w 0)" >> "${GITHUB_ENV}" 28 | 29 | - name: call org.update 30 | uses: the-actions-org/workflow-dispatch@3133c5d135c7dbe4be4f9793872b6ef331b53bc7 31 | with: 32 | wait-for-completion: false 33 | workflow: org.update.yml 34 | token: "${{ secrets.REPOSITORY_TOKEN }}" 35 | inputs: '{ "etc":"${{ env.WORKFLOW_UPDATE_BASE64JSON }}" }' -------------------------------------------------------------------------------- /.github/workflows/org.version.yml: -------------------------------------------------------------------------------- 1 | name: org.version 2 | run-name: org.version ${{ inputs.version }} 3 | 4 | on: 5 | workflow_dispatch: 6 | inputs: 7 | version: 8 | description: 'set version for build' 9 | type: string 10 | required: true 11 | 12 | jobs: 13 | # ╔═════════════════════════════════════════════════════╗ 14 | # ║ BUILD VERSION {N} IMAGE ║ 15 | # ╚═════════════════════════════════════════════════════╝ 16 | version: 17 | runs-on: ubuntu-latest 18 | steps: 19 | - name: setup config 20 | uses: actions/github-script@62c3794a3eb6788d9a2a72b219504732c0c9a298 21 | with: 22 | script: | 23 | const { Buffer } = require('node:buffer'); 24 | const etc = { 25 | version:"${{ github.event.inputs.version }}", 26 | semver:{disable:{rolling:true}} 27 | }; 28 | core.exportVariable('WORKFLOW_BASE64JSON', Buffer.from(JSON.stringify(etc)).toString('base64')); 29 | 30 | - name: build container image 31 | uses: the-actions-org/workflow-dispatch@3133c5d135c7dbe4be4f9793872b6ef331b53bc7 32 | with: 33 | wait-for-completion: false 34 | workflow: docker.yml 35 | token: "${{ secrets.REPOSITORY_TOKEN }}" 36 | inputs: '{ "release":"false", "readme":"false", "etc":"${{ env.WORKFLOW_BASE64JSON }}" }' -------------------------------------------------------------------------------- /compose.yml: -------------------------------------------------------------------------------- 1 | name: "iot" 2 | 3 | x-lockdown: &lockdown 4 | # prevents write access to the image itself 5 | read_only: true 6 | # prevents any process within the container to gain more privileges 7 | security_opt: 8 | - "no-new-privileges=true" 9 | 10 | services: 11 | postgres: 12 | # for more information about this image checkout: 13 | # https://github.com/11notes/docker-postgres 14 | image: "11notes/postgres:17" 15 | <<: *lockdown 16 | environment: 17 | TZ: "Europe/Zurich" 18 | POSTGRES_PASSWORD: "${POSTGRES_PASSWORD}" 19 | networks: 20 | backend: 21 | volumes: 22 | - "postgres.etc:/postgres/etc" 23 | - "postgres.var:/postgres/var" 24 | - "postgres.backup:/postgres/backup" 25 | tmpfs: 26 | - "/postgres/run:uid=1000,gid=1000" 27 | - "/postgres/log:uid=1000,gid=1000" 28 | restart: "always" 29 | 30 | homeassistant: 31 | depends_on: 32 | postgres: 33 | condition: "service_healthy" 34 | restart: true 35 | image: "11notes/homeassistant:2025.11.2" 36 | <<: *lockdown 37 | environment: 38 | TZ: "Europe/Zurich" 39 | volumes: 40 | - "homeassistant.etc:/homeassistant/etc" 41 | networks: 42 | frontend: 43 | backend: 44 | ports: 45 | - "3000:8123/tcp" 46 | restart: "always" 47 | 48 | volumes: 49 | postgres.etc: 50 | postgres.var: 51 | postgres.backup: 52 | homeassistant.etc: 53 | 54 | networks: 55 | frontend: 56 | backend: 57 | internal: true -------------------------------------------------------------------------------- /project.md: -------------------------------------------------------------------------------- 1 | ${{ content_synopsis }} This image will give you a [rootless](https://github.com/11notes/RTFM/blob/main/linux/container/image/rootless.md) Home Assistant installation. 2 | 3 | ${{ content_uvp }} Good question! Because ... 4 | 5 | ${{ github:> [!IMPORTANT] }} 6 | ${{ github:> }}* ... this image runs [rootless](https://github.com/11notes/RTFM/blob/main/linux/container/image/rootless.md) as 1000:1000 7 | ${{ github:> }}* ... this image is auto updated to the latest version via CI/CD 8 | ${{ github:> }}* ... this image is built and compiled from source 9 | ${{ github:> }}* ... this image has a health check 10 | ${{ github:> }}* ... this image runs read-only 11 | ${{ github:> }}* ... this image is automatically scanned for CVEs before and after publishing 12 | ${{ github:> }}* ... this image is created via a secure and pinned CI/CD process 13 | ${{ github:> }}* ... this image is very small 14 | 15 | If you value security, simplicity and optimizations to the extreme, then this image might be for you. 16 | 17 | ${{ title_volumes }} 18 | * **${{ json_root }}/etc** - Directory of all config and custom files 19 | 20 | ${{ content_compose }} 21 | 22 | ${{ content_defaults }} 23 | 24 | ${{ content_environment }} 25 | 26 | ${{ content_source }} 27 | 28 | ${{ content_parent }} 29 | 30 | ${{ content_built }} 31 | 32 | ${{ content_tips }} 33 | 34 | ${{ title_caution }} 35 | ${{ github:> [!CAUTION] }} 36 | ${{ github:> }}* This image comes with a default configuration with some default settings and examples. Please provide your own configuration if used in production. -------------------------------------------------------------------------------- /arch.dockerfile: -------------------------------------------------------------------------------- 1 | # ╔═════════════════════════════════════════════════════╗ 2 | # ║ SETUP ║ 3 | # ╚═════════════════════════════════════════════════════╝ 4 | # GLOBAL 5 | ARG APP_UID=1000 \ 6 | APP_GID=1000 7 | ARG PYTHON_VERSION=3.13 8 | 9 | # :: FOREIGN IMAGES 10 | FROM 11notes/util:bin AS util-bin 11 | FROM 11notes/util AS util 12 | FROM 11notes/distroless:localhealth AS distroless-localhealth 13 | FROM 11notes/distroless:go2rtc AS distroless-go2rtc 14 | 15 | 16 | # ╔═════════════════════════════════════════════════════╗ 17 | # ║ BUILD ║ 18 | # ╚═════════════════════════════════════════════════════╝ 19 | # :: HOMEASSISTANT 20 | FROM 11notes/python:${PYTHON_VERSION} AS build 21 | ARG APP_VERSION 22 | USER root 23 | 24 | RUN set -ex; \ 25 | apk --no-cache --update add \ 26 | bash \ 27 | binutils \ 28 | bluez \ 29 | bluez-deprecated \ 30 | bluez-libs \ 31 | ca-certificates \ 32 | catatonit \ 33 | coreutils \ 34 | cups-libs \ 35 | curl \ 36 | eudev-libs \ 37 | ffmpeg \ 38 | git \ 39 | grep \ 40 | hwdata-usb \ 41 | imlib2 \ 42 | iperf3 \ 43 | iputils \ 44 | jq \ 45 | libcap \ 46 | libftdi1 \ 47 | libgpiod \ 48 | libturbojpeg \ 49 | libpulse \ 50 | libstdc++ \ 51 | libxslt \ 52 | libzbar \ 53 | mailcap \ 54 | mariadb-connector-c \ 55 | nano \ 56 | net-tools \ 57 | nmap \ 58 | openssh-client \ 59 | openssl \ 60 | pianobar \ 61 | postgresql-libs \ 62 | pulseaudio-alsa \ 63 | socat \ 64 | tiff \ 65 | tzdata \ 66 | unzip \ 67 | xz; 68 | 69 | RUN set -ex; \ 70 | pip install \ 71 | --only-binary=:all: \ 72 | -f https://wheels.home-assistant.io/musllinux/ \ 73 | -f https://11notes.github.io/python-wheels/ \ 74 | -r https://raw.githubusercontent.com/home-assistant/core/refs/tags/${APP_VERSION}/requirements.txt \ 75 | -r https://raw.githubusercontent.com/home-assistant/core/refs/tags/${APP_VERSION}/requirements_all.txt \ 76 | -r https://raw.githubusercontent.com/home-assistant/docker/refs/heads/master/requirements.txt \ 77 | homeassistant=="${APP_VERSION}"; 78 | 79 | # :: FILE-SYSTEM 80 | FROM alpine AS file-system 81 | ARG APP_ROOT 82 | 83 | RUN set -ex; \ 84 | mkdir -p /distroless${APP_ROOT}/etc; 85 | 86 | # ╔═════════════════════════════════════════════════════╗ 87 | # ║ IMAGE ║ 88 | # ╚═════════════════════════════════════════════════════╝ 89 | # :: HEADER 90 | FROM scratch 91 | 92 | # :: default arguments 93 | ARG TARGETPLATFORM \ 94 | TARGETOS \ 95 | TARGETARCH \ 96 | TARGETVARIANT \ 97 | APP_IMAGE \ 98 | APP_NAME \ 99 | APP_VERSION \ 100 | APP_ROOT \ 101 | APP_UID \ 102 | APP_GID \ 103 | APP_NO_CACHE 104 | 105 | # :: default environment 106 | ENV APP_IMAGE=${APP_IMAGE} \ 107 | APP_NAME=${APP_NAME} \ 108 | APP_VERSION=${APP_VERSION} \ 109 | APP_ROOT=${APP_ROOT} 110 | 111 | # :: multi-stage 112 | COPY --from=distroless-localhealth / / 113 | COPY --from=distroless-go2rtc / / 114 | COPY --from=util / / 115 | COPY --from=build / / 116 | COPY --from=file-system --chown=${APP_UID}:${APP_GID} /distroless/ / 117 | COPY --chown=${APP_UID}:${APP_GID} ./rootfs/ / 118 | 119 | # :: PERSISTENT DATA 120 | VOLUME ["${APP_ROOT}/etc"] 121 | 122 | # :: MONITORING 123 | HEALTHCHECK --interval=5s --timeout=2s --start-period=5s \ 124 | CMD ["/usr/local/bin/localhealth", "http://127.0.0.1:8123/"] 125 | 126 | # :: EXECUTE 127 | USER ${APP_UID}:${APP_GID} 128 | ENTRYPOINT ["/usr/local/bin/tini", "--", "python", "-m", "homeassistant", "--config", "/homeassistant/etc"] -------------------------------------------------------------------------------- /.github/workflows/org.update.yml: -------------------------------------------------------------------------------- 1 | name: org.update 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | etc: 7 | description: 'base64 encoded json string' 8 | required: true 9 | 10 | jobs: 11 | update: 12 | runs-on: ubuntu-latest 13 | 14 | permissions: 15 | actions: read 16 | contents: write 17 | 18 | steps: 19 | - name: init / checkout 20 | uses: actions/checkout@85e6279cec87321a52edac9c87bce653a07cf6c2 21 | with: 22 | ref: 'master' 23 | fetch-depth: 0 24 | 25 | - name: update / setup node 26 | uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 27 | with: 28 | node-version: '20' 29 | - run: npm i semver 30 | 31 | - name: update / compare latest with current version 32 | uses: actions/github-script@62c3794a3eb6788d9a2a72b219504732c0c9a298 33 | with: 34 | script: | 35 | (async()=>{ 36 | const { Buffer } = require('node:buffer'); 37 | const { inspect } = require('node:util'); 38 | const { existsSync, readFileSync, writeFileSync } = require('node:fs'); 39 | const { resolve } = require('node:path'); 40 | const semver = require('semver') 41 | 42 | // defaults 43 | const json = `${{ toJSON(github.event.inputs) }}`; 44 | const job = {inputs:{}, json:{}}; 45 | 46 | // check if inputs is valid base64 encoded json 47 | try{ 48 | if(json.length > 0){ 49 | const n = JSON.parse(json); 50 | if(n?.etc){ 51 | try{ 52 | job.inputs = JSON.parse(Buffer.from(n.etc, 'base64').toString('ascii')); 53 | if(!job.inputs?.version){ 54 | core.setFailed(`input does not contain valid semver version: ${inspect(job.inputs, {showHidden:false, depth:null, colors:true})}`); 55 | }else if(!job.inputs?.tag){ 56 | core.setFailed(`input does not contain valid git tag: ${inspect(job.inputs, {showHidden:false, depth:null, colors:true})}`); 57 | } 58 | }catch(e){ 59 | core.setFailed(`could not parse github.event.inputs.etc: ${n.etc} (${Buffer.from(n.etc, 'base64').toString('ascii')})`); 60 | } 61 | } 62 | } 63 | }catch(e){ 64 | core.setFailed(`could not parse github.event.inputs: ${json}`); 65 | } 66 | 67 | // check if .json exists 68 | try{ 69 | const path = resolve('.json'); 70 | if(existsSync(path)){ 71 | try{ 72 | job.json = JSON.parse(readFileSync(path).toString()); 73 | }catch(e){ 74 | throw new Error('could not parse .json'); 75 | } 76 | }else{ 77 | throw new Error('.json does not exist!'); 78 | } 79 | }catch(e){ 80 | core.setFailed(e); 81 | } 82 | 83 | // semver 84 | const latest = semver.valid(semver.coerce(job.inputs.version)); 85 | const current = semver.valid(semver.coerce(job.json.semver.version)); 86 | const tag = semver.valid(semver.coerce(job.inputs.tag)); 87 | const checks = {latestTagExists:true}; 88 | 89 | try{ 90 | const tag = await fetch(`https://hub.docker.com/v2/repositories/${job.json.image}/tags/${latest}`); 91 | if(tag.status === 404){ 92 | checks.latestTagExists = false; 93 | } 94 | }catch(e){ 95 | core.warning(e); 96 | } 97 | 98 | // compare 99 | if((latest && latest !== current) || !checks.latestTagExists){ 100 | core.info(`new ${semver.diff(current, latest)} release found (${latest}), updating ...`) 101 | job.json.semver.version = latest; 102 | 103 | // check if app has a build version 104 | if(job.inputs?.build){ 105 | job.json.build.args.version_build = job.inputs.build; 106 | } 107 | 108 | // update .json 109 | try{ 110 | writeFileSync(resolve('.json'), JSON.stringify(job.json, null, 2)); 111 | 112 | // export variables 113 | core.exportVariable('WORKFLOW_UPDATE', true); 114 | if(job.inputs?.unraid){ 115 | core.exportVariable('WORKFLOW_UPDATE_UNRAID', 'true'); 116 | core.exportVariable('WORKFLOW_UPDATE_UNRAID_BASE64JSON', Buffer.from(JSON.stringify({semversuffix:"unraid", uid:99, gid:100})).toString('base64')); 117 | } 118 | core.exportVariable('LATEST_TAG', semver.inc(tag, semver.diff(current, latest))); 119 | core.exportVariable('LATEST_VERSION', latest); 120 | if(job.inputs?.build) core.exportVariable('LATEST_BUILD', job.inputs.build); 121 | }catch(e){ 122 | core.setFailed(e); 123 | } 124 | }else{ 125 | core.info('no update required') 126 | } 127 | 128 | core.info(inspect(job, {showHidden:false, depth:null, colors:true})); 129 | })(); 130 | 131 | 132 | 133 | - name: update / checkout 134 | id: checkout 135 | if: env.WORKFLOW_UPDATE == 'true' 136 | run: | 137 | git config user.name "github-actions[bot]" 138 | git config user.email "41898282+github-actions[bot]@users.noreply.github.com" 139 | git add .json 140 | git commit -m "chore: auto upgrade to v${{ env.LATEST_VERSION }}" 141 | git push origin HEAD:master 142 | 143 | - name: update / tag 144 | if: env.WORKFLOW_UPDATE == 'true' && steps.checkout.outcome == 'success' 145 | run: | 146 | SHA256=$(git rev-list --branches --max-count=1) 147 | git tag -a v${{ env.LATEST_TAG }} -m "v${{ env.LATEST_TAG }}" ${SHA256} 148 | git push --follow-tags 149 | 150 | - name: update / build container image 151 | id: build 152 | if: env.WORKFLOW_UPDATE == 'true' && steps.checkout.outcome == 'success' 153 | uses: the-actions-org/workflow-dispatch@3133c5d135c7dbe4be4f9793872b6ef331b53bc7 154 | with: 155 | workflow: docker.yml 156 | wait-for-completion: false 157 | token: "${{ secrets.REPOSITORY_TOKEN }}" 158 | inputs: '{ "release":"true", "readme":"true", "run-name":"update v${{ env.LATEST_VERSION }}" }' 159 | ref: "v${{ env.LATEST_TAG }}" 160 | 161 | - name: update / build container image for unraid 162 | if: env.WORKFLOW_UPDATE_UNRAID == 'true' && steps.checkout.outcome == 'success' && steps.build.outcome == 'success' 163 | uses: the-actions-org/workflow-dispatch@3133c5d135c7dbe4be4f9793872b6ef331b53bc7 164 | with: 165 | workflow: docker.yml 166 | wait-for-completion: false 167 | token: "${{ secrets.REPOSITORY_TOKEN }}" 168 | inputs: '{ "release":"false", "readme":"false", "run-name":"update unraid v${{ env.LATEST_VERSION }}", "etc":"${{ env.WORKFLOW_UPDATE_UNRAID_BASE64JSON }}" }' 169 | ref: "v${{ env.LATEST_TAG }}" -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![banner](https://raw.githubusercontent.com/11notes/static/refs/heads/main/img/banner/README.png) 2 | 3 | # HOMEASSISTANT 4 | ![size](https://img.shields.io/badge/image_size-2GB-green?color=%2338ad2d)![5px](https://raw.githubusercontent.com/11notes/static/refs/heads/main/img/markdown/transparent5x2px.png)![pulls](https://img.shields.io/docker/pulls/11notes/homeassistant?color=2b75d6)![5px](https://raw.githubusercontent.com/11notes/static/refs/heads/main/img/markdown/transparent5x2px.png)[](https://github.com/11notes/docker-homeassistant/issues)![5px](https://raw.githubusercontent.com/11notes/static/refs/heads/main/img/markdown/transparent5x2px.png)![swiss_made](https://img.shields.io/badge/Swiss_Made-FFFFFF?labelColor=FF0000&logo=data:image/svg%2bxml;base64,PHN2ZyB2ZXJzaW9uPSIxIiB3aWR0aD0iNTEyIiBoZWlnaHQ9IjUxMiIgdmlld0JveD0iMCAwIDMyIDMyIiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciPgogIDxyZWN0IHdpZHRoPSIzMiIgaGVpZ2h0PSIzMiIgZmlsbD0idHJhbnNwYXJlbnQiLz4KICA8cGF0aCBkPSJtMTMgNmg2djdoN3Y2aC03djdoLTZ2LTdoLTd2LTZoN3oiIGZpbGw9IiNmZmYiLz4KPC9zdmc+) 5 | 6 | run home assistant rootless 7 | 8 | # INTRODUCTION 📢 9 | 10 | [Home Assistant](https://github.com/home-assistant/core) (created by [home-assistant](https://github.com/home-assistant/)) is an open source home automation that puts local control and privacy first. Powered by a worldwide community of tinkerers and DIY enthusiasts. Perfect to run on a Raspberry Pi or a local server. 11 | 12 | # SYNOPSIS 📖 13 | **What can I do with this?** This image will give you a [rootless](https://github.com/11notes/RTFM/blob/main/linux/container/image/rootless.md) Home Assistant installation. 14 | 15 | # UNIQUE VALUE PROPOSITION 💶 16 | **Why should I run this image and not the other image(s) that already exist?** Good question! Because ... 17 | 18 | > [!IMPORTANT] 19 | >* ... this image runs [rootless](https://github.com/11notes/RTFM/blob/main/linux/container/image/rootless.md) as 1000:1000 20 | >* ... this image is auto updated to the latest version via CI/CD 21 | >* ... this image is built and compiled from source 22 | >* ... this image has a health check 23 | >* ... this image runs read-only 24 | >* ... this image is automatically scanned for CVEs before and after publishing 25 | >* ... this image is created via a secure and pinned CI/CD process 26 | >* ... this image is very small 27 | 28 | If you value security, simplicity and optimizations to the extreme, then this image might be for you. 29 | 30 | # VOLUMES 📁 31 | * **/homeassistant/etc** - Directory of all config and custom files 32 | 33 | # COMPOSE ✂️ 34 | ```yaml 35 | name: "iot" 36 | 37 | x-lockdown: &lockdown 38 | # prevents write access to the image itself 39 | read_only: true 40 | # prevents any process within the container to gain more privileges 41 | security_opt: 42 | - "no-new-privileges=true" 43 | 44 | services: 45 | postgres: 46 | # for more information about this image checkout: 47 | # https://github.com/11notes/docker-postgres 48 | image: "11notes/postgres:17" 49 | <<: *lockdown 50 | environment: 51 | TZ: "Europe/Zurich" 52 | POSTGRES_PASSWORD: "${POSTGRES_PASSWORD}" 53 | networks: 54 | backend: 55 | volumes: 56 | - "postgres.etc:/postgres/etc" 57 | - "postgres.var:/postgres/var" 58 | - "postgres.backup:/postgres/backup" 59 | tmpfs: 60 | - "/postgres/run:uid=1000,gid=1000" 61 | - "/postgres/log:uid=1000,gid=1000" 62 | restart: "always" 63 | 64 | homeassistant: 65 | depends_on: 66 | postgres: 67 | condition: "service_healthy" 68 | restart: true 69 | image: "11notes/homeassistant:2025.11.2" 70 | <<: *lockdown 71 | environment: 72 | TZ: "Europe/Zurich" 73 | volumes: 74 | - "homeassistant.etc:/homeassistant/etc" 75 | networks: 76 | frontend: 77 | backend: 78 | ports: 79 | - "3000:8123/tcp" 80 | restart: "always" 81 | 82 | volumes: 83 | postgres.etc: 84 | postgres.var: 85 | postgres.backup: 86 | homeassistant.etc: 87 | 88 | networks: 89 | frontend: 90 | backend: 91 | internal: true 92 | ``` 93 | To find out how you can change the default UID/GID of this container image, consult the [RTFM](https://github.com/11notes/RTFM/blob/main/linux/container/image/11notes/how-to.changeUIDGID.md#change-uidgid-the-correct-way). 94 | 95 | # DEFAULT SETTINGS 🗃️ 96 | | Parameter | Value | Description | 97 | | --- | --- | --- | 98 | | `user` | docker | user name | 99 | | `uid` | 1000 | [user identifier](https://en.wikipedia.org/wiki/User_identifier) | 100 | | `gid` | 1000 | [group identifier](https://en.wikipedia.org/wiki/Group_identifier) | 101 | | `home` | /homeassistant | home directory of user docker | 102 | 103 | # ENVIRONMENT 📝 104 | | Parameter | Value | Default | 105 | | --- | --- | --- | 106 | | `TZ` | [Time Zone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) | | 107 | | `DEBUG` | Will activate debug option for container image and app (if available) | | 108 | 109 | # MAIN TAGS 🏷️ 110 | These are the main tags for the image. There is also a tag for each commit and its shorthand sha256 value. 111 | 112 | * [2025.11.2](https://hub.docker.com/r/11notes/homeassistant/tags?name=2025.11.2) 113 | 114 | ### There is no latest tag, what am I supposed to do about updates? 115 | It is my opinion that the ```:latest``` tag is a bad habbit and should not be used at all. Many developers introduce **breaking changes** in new releases. This would messed up everything for people who use ```:latest```. If you don’t want to change the tag to the latest [semver](https://semver.org/), simply use the short versions of [semver](https://semver.org/). Instead of using ```:2025.11.2``` you can use ```:2025``` or ```:2025.11```. Since on each new version these tags are updated to the latest version of the software, using them is identical to using ```:latest``` but at least fixed to a major or minor version. Which in theory should not introduce breaking changes. 116 | 117 | If you still insist on having the bleeding edge release of this app, simply use the ```:rolling``` tag, but be warned! You will get the latest version of the app instantly, regardless of breaking changes or security issues or what so ever. You do this at your own risk! 118 | 119 | # REGISTRIES ☁️ 120 | ``` 121 | docker pull 11notes/homeassistant:2025.11.2 122 | docker pull ghcr.io/11notes/homeassistant:2025.11.2 123 | docker pull quay.io/11notes/homeassistant:2025.11.2 124 | ``` 125 | 126 | # SOURCE 💾 127 | * [11notes/homeassistant](https://github.com/11notes/docker-homeassistant) 128 | 129 | # PARENT IMAGE 🏛️ 130 | * [11notes/alpine:stable](https://hub.docker.com/r/11notes/alpine) 131 | 132 | # BUILT WITH 🧰 133 | * [home-assistant/core](https://github.com/home-assistant/core) 134 | * [11notes/util](https://github.com/11notes/docker-util) 135 | 136 | # GENERAL TIPS 📌 137 | > [!TIP] 138 | >* Use a reverse proxy like Traefik, Nginx, HAproxy to terminate TLS and to protect your endpoints 139 | >* Use Let’s Encrypt DNS-01 challenge to obtain valid SSL certificates for your services 140 | 141 | # CAUTION ⚠️ 142 | > [!CAUTION] 143 | >* This image comes with a default configuration with some default settings and examples. Please provide your own configuration if used in production. 144 | 145 | # ElevenNotes™️ 146 | This image is provided to you at your own risk. Always make backups before updating an image to a different version. Check the [releases](https://github.com/11notes/docker-homeassistant/releases) for breaking changes. If you have any problems with using this image simply raise an [issue](https://github.com/11notes/docker-homeassistant/issues), thanks. If you have a question or inputs please create a new [discussion](https://github.com/11notes/docker-homeassistant/discussions) instead of an issue. You can find all my other repositories on [github](https://github.com/11notes?tab=repositories). 147 | 148 | *created 19.11.2025, 12:15:24 (CET)* -------------------------------------------------------------------------------- /.github/workflows/docker.yml: -------------------------------------------------------------------------------- 1 | name: docker 2 | run-name: ${{ inputs.run-name }} 3 | 4 | on: 5 | workflow_dispatch: 6 | inputs: 7 | run-name: 8 | description: 'set run-name for workflow (multiple calls)' 9 | type: string 10 | required: false 11 | default: 'docker' 12 | 13 | platform: 14 | description: 'list of platforms to build for' 15 | type: string 16 | required: false 17 | default: "amd64,arm64,arm/v7" 18 | 19 | build: 20 | description: 'set WORKFLOW_BUILD' 21 | required: false 22 | default: 'true' 23 | 24 | release: 25 | description: 'set WORKFLOW_GITHUB_RELEASE' 26 | required: false 27 | default: 'false' 28 | 29 | readme: 30 | description: 'set WORKFLOW_GITHUB_README' 31 | required: false 32 | default: 'false' 33 | 34 | etc: 35 | description: 'base64 encoded json string' 36 | required: false 37 | 38 | jobs: 39 | # ╔═════════════════════════════════════════════════════╗ 40 | # ║ ║ 41 | # ║ ║ 42 | # ║ CREATE PLATFORM MATRIX ║ 43 | # ║ ║ 44 | # ║ ║ 45 | # ╚═════════════════════════════════════════════════════╝ 46 | matrix: 47 | name: create job matrix 48 | runs-on: ubuntu-latest 49 | outputs: 50 | stringify: ${{ steps.setup-matrix.outputs.stringify }} 51 | 52 | steps: 53 | # CHECKOUT REPOSITORY 54 | - name: init / checkout 55 | uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 56 | with: 57 | ref: ${{ github.ref_name }} 58 | 59 | - name: matrix / setup list 60 | id: setup-matrix 61 | uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 62 | with: 63 | script: | 64 | const { existsSync, readFileSync } = require('node:fs'); 65 | const { inspect } = require('node:util'); 66 | const { resolve } = require('node:path'); 67 | const opt = {dot:{}}; 68 | 69 | try{ 70 | const path = resolve('.json'); 71 | if(existsSync(path)){ 72 | try{ 73 | opt.dot = JSON.parse(readFileSync(path).toString()); 74 | }catch(e){ 75 | throw new Error('could not parse .json'); 76 | } 77 | }else{ 78 | throw new Error('.json does not exist'); 79 | } 80 | }catch(e){ 81 | core.setFailed(e); 82 | } 83 | 84 | const platforms = ( 85 | ("${{ github.event.inputs.platform }}" != "amd64,arm64,arm/v7") ? "${{ github.event.inputs.platform }}".split(",") : ( 86 | (opt.dot?.platform) ? opt.dot.platform.split(",") : "${{ github.event.inputs.platform }}".split(",") 87 | ) 88 | ); 89 | 90 | const matrix = {include:[]}; 91 | if("${{ github.event.inputs.readme }}" === "true" && "${{ github.event.inputs.build }}" === "false"){ 92 | matrix.include.push({platform:"amd64", runner:"ubuntu-24.04"}); 93 | }else{ 94 | for(const platform of platforms){ 95 | switch(platform){ 96 | case "amd64": matrix.include.push({platform:platform, runner:"ubuntu-24.04"}); break; 97 | case "arm64": matrix.include.push({platform:platform, runner:"ubuntu-24.04-arm"}); break; 98 | case "arm/v7": matrix.include.push({platform:platform, runner:"ubuntu-24.04-arm"}); break; 99 | } 100 | } 101 | } 102 | 103 | const stringify = JSON.stringify(matrix); 104 | core.setOutput('stringify', stringify); 105 | 106 | // print 107 | core.info(inspect({opt:opt, matrix:matrix, platforms:platforms}, {showHidden:false, depth:null, colors:true})); 108 | 109 | 110 | # ╔═════════════════════════════════════════════════════╗ 111 | # ║ ║ 112 | # ║ ║ 113 | # ║ BUILD CONTAINER IMAGE ║ 114 | # ║ ║ 115 | # ║ ║ 116 | # ╚═════════════════════════════════════════════════════╝ 117 | docker: 118 | name: create container image 119 | runs-on: ${{ matrix.runner }} 120 | strategy: 121 | fail-fast: false 122 | matrix: ${{ fromJSON(needs.matrix.outputs.stringify) }} 123 | outputs: 124 | DOCKER_IMAGE_NAME: ${{ steps.setup-environment.outputs.DOCKER_IMAGE_NAME }} 125 | DOCKER_IMAGE_MERGE_TAGS: ${{ steps.setup-environment.outputs.DOCKER_IMAGE_MERGE_TAGS }} 126 | DOCKER_IMAGE_DESCRIPTION: ${{ steps.setup-environment.outputs.DOCKER_IMAGE_DESCRIPTION }} 127 | DOCKER_IMAGE_NAME_AND_VERSION: ${{ steps.setup-environment.outputs.DOCKER_IMAGE_NAME_AND_VERSION }} 128 | DOCKER_IMAGE_ARGUMENTS: ${{ steps.setup-environment.outputs.DOCKER_IMAGE_ARGUMENTS }} 129 | WORKFLOW_BUILD: ${{ steps.setup-environment.outputs.WORKFLOW_BUILD }} 130 | 131 | timeout-minutes: 1440 132 | 133 | services: 134 | registry: 135 | image: registry:2 136 | ports: 137 | - 5000:5000 138 | 139 | permissions: 140 | actions: write 141 | contents: write 142 | packages: write 143 | attestations: write 144 | id-token: write 145 | security-events: write 146 | 147 | needs: matrix 148 | 149 | steps: 150 | # ╔═════════════════════════════════════════════════════╗ 151 | # ║ SETUP ENVIRONMENT ║ 152 | # ╚═════════════════════════════════════════════════════╝ 153 | # CHECKOUT ALL DEPTHS (ALL TAGS) 154 | - name: init / checkout 155 | uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 156 | with: 157 | ref: ${{ github.ref_name }} 158 | fetch-depth: 0 159 | 160 | # SETUP ENVIRONMENT VARIABLES AND INPUTS 161 | - name: init / setup environment 162 | id: setup-environment 163 | uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 164 | with: 165 | script: | 166 | const { existsSync, readFileSync } = require('node:fs'); 167 | const { resolve } = require('node:path'); 168 | const { inspect } = require('node:util'); 169 | const { Buffer } = require('node:buffer'); 170 | const inputs = `${{ toJSON(github.event.inputs) }}`. 171 | replace(/"platform":\s*"\[(.+)\]",/i, `"platform": [$1],`); 172 | const opt = {input:{}, dot:{}}; 173 | 174 | try{ 175 | if(inputs.length > 0){ 176 | opt.input = JSON.parse(inputs); 177 | if(opt.input?.etc){ 178 | opt.input.etc = JSON.parse(Buffer.from(opt.input.etc, 'base64').toString('ascii')); 179 | } 180 | } 181 | }catch(e){ 182 | core.warning('could not parse github.event.inputs'); 183 | core.warning(inputs); 184 | } 185 | 186 | try{ 187 | const path = resolve('.json'); 188 | if(existsSync(path)){ 189 | try{ 190 | opt.dot = JSON.parse(readFileSync(path).toString()); 191 | }catch(e){ 192 | throw new Error('could not parse .json'); 193 | } 194 | }else{ 195 | throw new Error('.json does not exist'); 196 | } 197 | }catch(e){ 198 | core.setFailed(e); 199 | } 200 | 201 | const docker = { 202 | image:{ 203 | name:opt.dot.image, 204 | arch:(opt.input?.etc?.arch || opt.dot?.arch || 'linux/amd64,linux/arm64'), 205 | prefix:((opt.input?.etc?.semverprefix) ? `${opt.input?.etc?.semverprefix}-` : ''), 206 | suffix:((opt.input?.etc?.semversuffix) ? `-${opt.input?.etc?.semversuffix}` : ''), 207 | description:(opt.dot?.readme?.description || ''), 208 | platform:{ 209 | sanitized:"${{ matrix.platform }}".replace(/[^A-Z-a-z0-9]+/i, ""), 210 | }, 211 | tags:[], 212 | build:(opt.input?.build === undefined) ? false : opt.input.build, 213 | }, 214 | app:{ 215 | image:opt.dot.image, 216 | name:opt.dot.name, 217 | version:(opt.input?.etc?.version || opt.dot?.semver?.version), 218 | root:opt.dot.root, 219 | UID:(opt.input?.etc?.uid || 1000), 220 | GID:(opt.input?.etc?.gid || 1000), 221 | no_cache:new Date().getTime(), 222 | }, 223 | cache:{ 224 | registry:'localhost:5000/', 225 | enable:(opt.input?.etc?.cache === undefined) ? true : opt.input.etc.cache, 226 | }, 227 | tags:[], 228 | merge_tags:[], 229 | }; 230 | 231 | docker.cache.name = `${docker.image.name}:${docker.image.prefix}buildcache${docker.image.suffix}`; 232 | docker.cache.grype = `${docker.cache.registry}${docker.image.name}:${docker.image.prefix}grype${docker.image.suffix}`; 233 | docker.app.prefix = docker.image.prefix; 234 | docker.app.suffix = docker.image.suffix; 235 | 236 | const semver = docker.app.version.split('.'); 237 | // setup tags 238 | if(!opt.dot?.semver?.disable?.rolling && !opt.input.etc?.semver?.disable?.rolling){ 239 | docker.image.tags.push('rolling'); 240 | } 241 | if(opt.input?.etc?.dockerfile !== 'arch.dockerfile' && opt.input?.etc?.tag){ 242 | docker.image.tags.push(opt.input.etc.tag); 243 | if(Array.isArray(semver)){ 244 | if(semver.length >= 1) docker.image.tags.push(`${opt.input.etc.tag}-${semver[0]}`); 245 | if(semver.length >= 2) docker.image.tags.push(`${opt.input.etc.tag}-${semver[0]}.${semver[1]}`); 246 | if(semver.length >= 3) docker.image.tags.push(`${opt.input.etc.tag}-${semver[0]}.${semver[1]}.${semver[2]}`); 247 | }else{ 248 | docker.image.tags.push(`${opt.input.etc.tag}-${docker.app.version}`); 249 | } 250 | docker.cache.name = `${docker.image.name}:buildcache-${opt.input.etc.tag}`; 251 | }else if(docker.app.version !== 'latest'){ 252 | if(Array.isArray(semver)){ 253 | if(semver.length >= 1) docker.image.tags.push(`${semver[0]}`); 254 | if(semver.length >= 2) docker.image.tags.push(`${semver[0]}.${semver[1]}`); 255 | if(semver.length >= 3) docker.image.tags.push(`${semver[0]}.${semver[1]}.${semver[2]}`); 256 | } 257 | if(opt.dot?.semver?.stable && new RegExp(opt.dot?.semver.stable, 'ig').test(docker.image.tags.join(','))) docker.image.tags.push('stable'); 258 | if(opt.dot?.semver?.latest && new RegExp(opt.dot?.semver.latest, 'ig').test(docker.image.tags.join(','))) docker.image.tags.push('latest'); 259 | }else{ 260 | docker.image.tags.push('latest'); 261 | } 262 | 263 | for(const tag of docker.image.tags){ 264 | docker.tags.push(`${docker.image.name}:${docker.image.prefix}${tag}${docker.image.suffix}-${docker.image.platform.sanitized}`); 265 | docker.tags.push(`ghcr.io/${docker.image.name}:${docker.image.prefix}${tag}${docker.image.suffix}-${docker.image.platform.sanitized}`); 266 | docker.tags.push(`quay.io/${docker.image.name}:${docker.image.prefix}${tag}${docker.image.suffix}-${docker.image.platform.sanitized}`); 267 | docker.merge_tags.push(`${docker.image.prefix}${tag}${docker.image.suffix}`); 268 | } 269 | 270 | // setup build arguments 271 | if(opt.input?.etc?.build?.args){ 272 | for(const arg in opt.input.etc.build.args){ 273 | docker.app[arg] = opt.input.etc.build.args[arg]; 274 | } 275 | } 276 | if(opt.dot?.build?.args){ 277 | for(const arg in opt.dot.build.args){ 278 | docker.app[arg] = opt.dot.build.args[arg]; 279 | } 280 | } 281 | const arguments = []; 282 | for(const argument in docker.app){ 283 | arguments.push(`APP_${argument.toUpperCase()}=${docker.app[argument]}`); 284 | } 285 | 286 | // export to environment 287 | core.exportVariable('DOCKER_CACHE_REGISTRY', docker.cache.registry); 288 | core.exportVariable('DOCKER_CACHE_NAME', `${docker.cache.name}-${docker.image.platform.sanitized}`); 289 | core.exportVariable('DOCKER_CACHE_GRYPE', docker.cache.grype); 290 | 291 | core.exportVariable('DOCKER_IMAGE_NAME', docker.image.name); 292 | core.setOutput('DOCKER_IMAGE_NAME', docker.image.name); 293 | core.exportVariable('DOCKER_IMAGE_TAGS', docker.tags.join(',')); 294 | core.exportVariable('DOCKER_IMAGE_MERGE_TAGS', docker.merge_tags.join("\r\n")); 295 | core.setOutput('DOCKER_IMAGE_MERGE_TAGS', docker.merge_tags.join("\r\n")); 296 | core.exportVariable('DOCKER_IMAGE_DESCRIPTION', docker.image.description); 297 | core.setOutput('DOCKER_IMAGE_DESCRIPTION', docker.image.description); 298 | core.exportVariable('DOCKER_IMAGE_ARGUMENTS', arguments.join("\r\n")); 299 | core.setOutput('DOCKER_IMAGE_ARGUMENTS', arguments.join("\r\n")); 300 | core.exportVariable('DOCKER_IMAGE_DOCKERFILE', opt.input?.etc?.dockerfile || 'arch.dockerfile'); 301 | core.exportVariable('DOCKER_IMAGE_PLATFORM_SANITIZED', docker.image.platform.sanitized); 302 | core.exportVariable('DOCKER_IMAGE_NAME_AND_VERSION', `${docker.image.name}:${docker.app.version}`); 303 | core.setOutput('DOCKER_IMAGE_NAME_AND_VERSION', `${docker.image.name}:${docker.app.version}`); 304 | 305 | core.exportVariable('WORKFLOW_BUILD', docker.image.build); 306 | core.setOutput('WORKFLOW_BUILD', docker.image.build); 307 | core.exportVariable('WORKFLOW_BUILD_NO_CACHE', !docker.cache.enable); 308 | 309 | core.exportVariable('WORKFLOW_CREATE_RELEASE', (opt.input?.release === undefined) ? false : opt.input.release); 310 | core.exportVariable('WORKFLOW_CREATE_README', (opt.input?.readme === undefined) ? false : opt.input.readme); 311 | core.exportVariable('WORKFLOW_GRYPE_FAIL_ON_SEVERITY', (opt.dot?.grype?.fail === undefined) ? true : opt.dot.grype.fail); 312 | core.exportVariable('WORKFLOW_GRYPE_SEVERITY_CUTOFF', (opt.dot?.grype?.severity || 'critical')); 313 | 314 | // print 315 | core.info(inspect({opt:opt, docker:docker}, {showHidden:false, depth:null, colors:true})); 316 | 317 | 318 | # ╔═════════════════════════════════════════════════════╗ 319 | # ║ CONTAINER REGISTRY LOGIN ║ 320 | # ╚═════════════════════════════════════════════════════╝ 321 | # DOCKER HUB 322 | - name: docker / login to hub 323 | uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 324 | with: 325 | username: 11notes 326 | password: ${{ secrets.DOCKER_TOKEN }} 327 | 328 | # GITHUB CONTAINER REGISTRY 329 | - name: github / login to ghcr 330 | uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 331 | with: 332 | registry: ghcr.io 333 | username: 11notes 334 | password: ${{ secrets.GITHUB_TOKEN }} 335 | 336 | # REDHAT QUAY 337 | - name: quay / login to quay 338 | uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 339 | with: 340 | registry: quay.io 341 | username: 11notes+github 342 | password: ${{ secrets.QUAY_TOKEN }} 343 | 344 | 345 | # ╔═════════════════════════════════════════════════════╗ 346 | # ║ BUILD CONTAINER IMAGE ║ 347 | # ╚═════════════════════════════════════════════════════╝ 348 | # SETUP QEMU 349 | - name: container image / setup qemu 350 | if: env.WORKFLOW_BUILD == 'true' && matrix.platform == 'arm/v7' 351 | uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0 352 | with: 353 | image: tonistiigi/binfmt:qemu-v8.1.5 354 | cache-image: false 355 | 356 | # SETUP BUILDX BUILDER WITH USING LOCAL REGISTRY 357 | - name: container image / setup buildx 358 | if: env.WORKFLOW_BUILD == 'true' 359 | uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1 360 | with: 361 | driver-opts: network=host 362 | 363 | # BUILD CONTAINER IMAGE FROM GLOBAL CACHE (DOCKER HUB) AND PUSH TO LOCAL CACHE 364 | - name: container image / build 365 | if: env.WORKFLOW_BUILD == 'true' 366 | id: image-build 367 | uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0 368 | with: 369 | context: . 370 | no-cache: ${{ env.WORKFLOW_BUILD_NO_CACHE }} 371 | file: ${{ env.DOCKER_IMAGE_DOCKERFILE }} 372 | push: true 373 | platforms: linux/${{ matrix.platform }} 374 | cache-from: type=registry,ref=${{ env.DOCKER_CACHE_NAME }} 375 | cache-to: type=registry,ref=${{ env.DOCKER_CACHE_REGISTRY }}${{ env.DOCKER_CACHE_NAME }},mode=max,compression=zstd,force-compression=true 376 | build-args: | 377 | ${{ env.DOCKER_IMAGE_ARGUMENTS }} 378 | tags: | 379 | ${{ env.DOCKER_CACHE_GRYPE }} 380 | 381 | # SCAN LOCAL CONTAINER IMAGE WITH GRYPE 382 | - name: container image / scan with grype 383 | if: env.WORKFLOW_BUILD == 'true' 384 | id: grype 385 | uses: anchore/scan-action@1638637db639e0ade3258b51db49a9a137574c3e # v6.5.1 386 | with: 387 | image: ${{ env.DOCKER_CACHE_GRYPE }} 388 | fail-build: ${{ env.WORKFLOW_GRYPE_FAIL_ON_SEVERITY }} 389 | severity-cutoff: ${{ env.WORKFLOW_GRYPE_SEVERITY_CUTOFF }} 390 | output-format: 'sarif' 391 | by-cve: true 392 | cache-db: true 393 | 394 | # OUTPUT CVE REPORT IF SCAN FAILS 395 | - name: container image / scan with grype FAILED 396 | if: env.WORKFLOW_BUILD == 'true' && (failure() || steps.grype.outcome == 'failure') && steps.image-build.outcome == 'success' 397 | uses: anchore/scan-action@1638637db639e0ade3258b51db49a9a137574c3e # v6.5.1 398 | with: 399 | image: ${{ env.DOCKER_CACHE_GRYPE }} 400 | fail-build: false 401 | severity-cutoff: ${{ env.WORKFLOW_GRYPE_SEVERITY_CUTOFF }} 402 | output-format: 'table' 403 | by-cve: true 404 | cache-db: true 405 | 406 | # PUSH IMAGE TO ALL REGISTRIES IF CLEAN 407 | - name: container image / push to registries 408 | id: image-push 409 | if: env.WORKFLOW_BUILD == 'true' 410 | uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0 411 | with: 412 | context: . 413 | no-cache: ${{ env.WORKFLOW_BUILD_NO_CACHE }} 414 | file: ${{ env.DOCKER_IMAGE_DOCKERFILE }} 415 | push: true 416 | sbom: true 417 | provenance: mode=max 418 | platforms: linux/${{ matrix.platform }} 419 | cache-from: type=registry,ref=${{ env.DOCKER_CACHE_REGISTRY }}${{ env.DOCKER_CACHE_NAME }} 420 | cache-to: type=registry,ref=${{ env.DOCKER_CACHE_NAME }},mode=max,compression=zstd,force-compression=true 421 | build-args: | 422 | ${{ env.DOCKER_IMAGE_ARGUMENTS }} 423 | tags: | 424 | ${{ env.DOCKER_IMAGE_TAGS }} 425 | 426 | # CREATE ATTESTATION ARTIFACTS 427 | - name: container image / create attestation artifacts 428 | if: env.WORKFLOW_BUILD == 'true' && steps.image-push.outcome == 'success' 429 | uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0 430 | with: 431 | subject-name: docker.io/${{ env.DOCKER_IMAGE_NAME }} 432 | subject-digest: ${{ steps.image-push.outputs.digest }} 433 | push-to-registry: false 434 | 435 | # EXPORT DIGEST 436 | - name: container image / export digest 437 | if: env.WORKFLOW_BUILD == 'true' && steps.image-push.outcome == 'success' 438 | run: | 439 | mkdir -p ${{ runner.temp }}/digests 440 | digest="${{ steps.image-push.outputs.digest }}" 441 | touch "${{ runner.temp }}/digests/${digest#sha256:}" 442 | 443 | # UPLOAD DIGEST 444 | - name: container image / upload 445 | if: env.WORKFLOW_BUILD == 'true' && steps.image-push.outcome == 'success' 446 | uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 447 | with: 448 | name: digests-linux-${{ env.DOCKER_IMAGE_PLATFORM_SANITIZED }} 449 | path: ${{ runner.temp }}/digests/* 450 | if-no-files-found: error 451 | 452 | 453 | # ╔═════════════════════════════════════════════════════╗ 454 | # ║ CREATE GITHUB RELEASE ║ 455 | # ╚═════════════════════════════════════════════════════╝ 456 | # CREATE RELEASE MARKUP 457 | - name: github release / prepare markdown 458 | if: env.WORKFLOW_CREATE_RELEASE == 'true' && matrix.platform == 'amd64' 459 | id: git-release 460 | uses: 11notes/action-docker-release@v1 461 | 462 | # CREATE GITHUB RELEASE 463 | - name: github release / create 464 | if: env.WORKFLOW_CREATE_RELEASE == 'true' && steps.git-release.outcome == 'success' 465 | uses: actions/create-release@0cb9c9b65d5d1901c1f53e5e66eaf4afd303e70e # v1.1.4 466 | env: 467 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 468 | with: 469 | tag_name: ${{ github.ref }} 470 | release_name: ${{ github.ref }} 471 | body: ${{ steps.git-release.outputs.release }} 472 | draft: false 473 | prerelease: false 474 | 475 | 476 | # ╔═════════════════════════════════════════════════════╗ 477 | # ║ ║ 478 | # ║ ║ 479 | # ║ MERGE IMAGES INTO SINGLE MANIFEST ║ 480 | # ║ ║ 481 | # ║ ║ 482 | # ╚═════════════════════════════════════════════════════╝ 483 | merge_platform_images: 484 | needs: docker 485 | if: needs.docker.outputs.WORKFLOW_BUILD == 'true' 486 | name: merge platform images to a single manifest 487 | runs-on: ubuntu-latest 488 | strategy: 489 | fail-fast: false 490 | matrix: 491 | registry: [docker.io, ghcr.io, quay.io] 492 | 493 | env: 494 | DOCKER_IMAGE_NAME: ${{ needs.docker.outputs.DOCKER_IMAGE_NAME }} 495 | DOCKER_IMAGE_MERGE_TAGS: ${{ needs.docker.outputs.DOCKER_IMAGE_MERGE_TAGS }} 496 | 497 | permissions: 498 | contents: read 499 | packages: write 500 | attestations: write 501 | id-token: write 502 | 503 | steps: 504 | # ╔═════════════════════════════════════════════════════╗ 505 | # ║ CONTAINER REGISTRY LOGIN ║ 506 | # ╚═════════════════════════════════════════════════════╝ 507 | # DOCKER HUB 508 | - name: docker / login to hub 509 | uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 510 | with: 511 | username: 11notes 512 | password: ${{ secrets.DOCKER_TOKEN }} 513 | 514 | # GITHUB CONTAINER REGISTRY 515 | - name: github / login to ghcr 516 | uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 517 | with: 518 | registry: ghcr.io 519 | username: 11notes 520 | password: ${{ secrets.GITHUB_TOKEN }} 521 | 522 | # REDHAT QUAY 523 | - name: quay / login to quay 524 | uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 525 | with: 526 | registry: quay.io 527 | username: 11notes+github 528 | password: ${{ secrets.QUAY_TOKEN }} 529 | 530 | 531 | # ╔═════════════════════════════════════════════════════╗ 532 | # ║ MERGE PLATFORM IMAGES MANIFEST ║ 533 | # ╚═════════════════════════════════════════════════════╝ 534 | # DOWNLOAD DIGESTS 535 | - name: platform merge / digest 536 | uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 537 | with: 538 | path: ${{ runner.temp }}/digests 539 | pattern: digests-* 540 | merge-multiple: true 541 | 542 | # SETUP BUILDX BUILDER 543 | - name: platform merge / buildx 544 | uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1 545 | 546 | # GET META DATA 547 | - name: platform merge / meta 548 | id: meta 549 | uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0 550 | with: 551 | images: ${{ matrix.registry }}/${{ env.DOCKER_IMAGE_NAME }} 552 | tags: | 553 | ${{ env.DOCKER_IMAGE_MERGE_TAGS }} 554 | 555 | # CREATE MANIFEST 556 | - name: platform merge / create manifest and push 557 | working-directory: ${{ runner.temp }}/digests 558 | run: | 559 | docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ 560 | $(printf 'docker.io/${{ env.DOCKER_IMAGE_NAME }}@sha256:%s ' *) 561 | 562 | # INSPECT MANIFEST 563 | - name: platform merge / inspect 564 | run: | 565 | docker buildx imagetools inspect ${{ matrix.registry }}/${{ env.DOCKER_IMAGE_NAME }}:${{ steps.meta.outputs.version }} 566 | 567 | 568 | # ╔═════════════════════════════════════════════════════╗ 569 | # ║ ║ 570 | # ║ ║ 571 | # ║ FINALIZE IMAGE CREATION ║ 572 | # ║ ║ 573 | # ║ ║ 574 | # ╚═════════════════════════════════════════════════════╝ 575 | finally: 576 | if: ${{ always() }} 577 | needs: 578 | - docker 579 | - merge_platform_images 580 | name: finalize image creation 581 | runs-on: ubuntu-latest 582 | 583 | env: 584 | DOCKER_IMAGE_NAME: ${{ needs.docker.outputs.DOCKER_IMAGE_NAME }} 585 | DOCKER_IMAGE_DESCRIPTION: ${{ needs.docker.outputs.DOCKER_IMAGE_DESCRIPTION }} 586 | DOCKER_IMAGE_NAME_AND_VERSION: ${{ needs.docker.outputs.DOCKER_IMAGE_NAME_AND_VERSION }} 587 | DOCKER_IMAGE_ARGUMENTS: ${{ needs.docker.outputs.DOCKER_IMAGE_ARGUMENTS }} 588 | 589 | permissions: 590 | contents: write 591 | 592 | steps: 593 | # ╔═════════════════════════════════════════════════════╗ 594 | # ║ SETUP ENVIRONMENT ║ 595 | # ╚═════════════════════════════════════════════════════╝ 596 | # CHECKOUT ALL DEPTHS (ALL TAGS) 597 | - name: init / checkout 598 | uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 599 | with: 600 | ref: master 601 | fetch-depth: 0 602 | 603 | # ╔═════════════════════════════════════════════════════╗ 604 | # ║ CONTAINER REGISTRY LOGIN ║ 605 | # ╚═════════════════════════════════════════════════════╝ 606 | # DOCKER HUB 607 | - name: docker / login to hub 608 | uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 609 | with: 610 | username: 11notes 611 | password: ${{ secrets.DOCKER_TOKEN }} 612 | 613 | # ╔═════════════════════════════════════════════════════╗ 614 | # ║ CREATE README.md ║ 615 | # ╚═════════════════════════════════════════════════════╝ 616 | # CHECKOUT HEAD TO BE UP TO DATE WITH EVERYTHING 617 | - name: README.md / checkout 618 | if: github.event.inputs.readme == 'true' 619 | continue-on-error: true 620 | run: | 621 | git checkout HEAD 622 | 623 | # CREATE MAKRDOWN OF README.md 624 | - name: README.md / create 625 | if: github.event.inputs.readme == 'true' 626 | id: github-readme 627 | continue-on-error: true 628 | uses: 11notes/action-docker-readme@v1 629 | 630 | # UPLOAD README.md to DOCKER HUB 631 | - name: README.md / push to Docker Hub 632 | if: github.event.inputs.readme == 'true' && steps.github-readme.outcome == 'success' && hashFiles('README_NONGITHUB.md') != '' 633 | continue-on-error: true 634 | uses: christian-korneck/update-container-description-action@d36005551adeaba9698d8d67a296bd16fa91f8e8 # v1 635 | env: 636 | DOCKER_USER: 11notes 637 | DOCKER_PASS: ${{ secrets.DOCKER_TOKEN }} 638 | with: 639 | destination_container_repo: ${{ env.DOCKER_IMAGE_NAME }} 640 | provider: dockerhub 641 | short_description: ${{ env.DOCKER_IMAGE_DESCRIPTION }} 642 | readme_file: 'README_NONGITHUB.md' 643 | 644 | # COMMIT NEW README.md, LICENSE and compose 645 | - name: README.md / github commit & push 646 | if: github.event.inputs.readme == 'true' && steps.github-readme.outcome == 'success' && hashFiles('README.md') != '' 647 | continue-on-error: true 648 | run: | 649 | git config user.name "github-actions[bot]" 650 | git config user.email "41898282+github-actions[bot]@users.noreply.github.com" 651 | git add README.md 652 | if [ -f compose.yaml ]; then 653 | git add compose.yaml 654 | fi 655 | if [ -f compose.yml ]; then 656 | git add compose.yml 657 | fi 658 | if [ -f LICENSE ]; then 659 | git add LICENSE 660 | fi 661 | git commit -m "update README.md" 662 | git push origin HEAD:master 663 | 664 | # ╔═════════════════════════════════════════════════════╗ 665 | # ║ GITHUB REPOSITORY DEFAULT SETTINGS ║ 666 | # ╚═════════════════════════════════════════════════════╝ 667 | # UPDATE REPO WITH DEFAULT SETTINGS FOR CONTAINER IMAGE 668 | - name: github / update description and set repo defaults 669 | run: | 670 | curl --request PATCH \ 671 | --url https://api.github.com/repos/${{ github.repository }} \ 672 | --header 'authorization: Bearer ${{ secrets.REPOSITORY_TOKEN }}' \ 673 | --header 'content-type: application/json' \ 674 | --data '{ 675 | "description":"${{ env.DOCKER_IMAGE_DESCRIPTION }}", 676 | "homepage":"", 677 | "has_issues":true, 678 | "has_discussions":true, 679 | "has_projects":false, 680 | "has_wiki":false 681 | }' \ 682 | --fail --------------------------------------------------------------------------------