├── .gitignore ├── .github ├── FUNDING.yml └── workflows │ ├── clear-cache.yml │ ├── delete-untagged-images.yml │ ├── delete-old-images.yml │ └── docker-build.yml ├── config └── provisioning │ ├── README.md │ ├── default.sh │ └── flux.sh ├── NOTICE.md ├── LICENSE.md ├── docker-compose.yaml └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | workspace 2 | *__pycache__ 3 | config/authorized_keys 4 | config/rclone 5 | tpdocs 6 | .env 7 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: [ai-dock, robballantyne] 4 | -------------------------------------------------------------------------------- /config/provisioning/README.md: -------------------------------------------------------------------------------- 1 | This directory contains example provisioning scripts for auto-configuration of your container. 2 | 3 | Recent builds no longer contain micromamba for python package installation, so old builds are not fully compatible with the new format. If you are using an older build you should upgrade as soon as possible. 4 | 5 | The previous default, if you need it for reference can be found in `./mamba_base`. 6 | -------------------------------------------------------------------------------- /NOTICE.md: -------------------------------------------------------------------------------- 1 | ## Notice: 2 | 3 | I have chosen to apply a custom license to this software for the following reasons: 4 | 5 | - **Uniqueness of Containers:** Common open-source licenses may not adequately address the nuances of software distributed within containers. My custom license ensures clarity regarding the separation of my code from bundled software, thereby respecting the rights of other authors. 6 | 7 | - **Preservation of Source Code Integrity:** I am committed to maintaining the integrity of the source code while adhering to the spirit of open-source software. My custom license helps ensure transparency and accountability in my development practices. 8 | 9 | - **Funding and Control of Distribution:** Some of the funding for this project comes from maintaining control of distribution. This funding model wouldn't be possible without limiting distribution in certain ways, ultimately supporting the project's mission. 10 | 11 | - **Empowering Access:** Supported by controlled distribution, the mission of this project is to empower users with access to valuable tools and resources in the cloud, enabling them to utilize software that may otherwise require hardware resources beyond their reach. 12 | 13 | I welcome sponsorship from commercial entities utilizing this software, although it is not mandatory. Your support helps sustain the ongoing development and improvement of this project. 14 | 15 | You can sponsor this project at https://github.com/sponsors/ai-dock. 16 | 17 | Your understanding and support are greatly appreciated. -------------------------------------------------------------------------------- /.github/workflows/clear-cache.yml: -------------------------------------------------------------------------------- 1 | # https://stackoverflow.com/a/73556714 2 | name: Clear Cache 3 | 4 | on: 5 | workflow_dispatch: 6 | 7 | permissions: 8 | actions: write 9 | 10 | jobs: 11 | clear-cache: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - name: Clear cache 15 | uses: actions/github-script@v6 16 | with: 17 | script: | 18 | console.log("About to clear") 19 | const response = await github.rest.actions.getActionsCacheList({ 20 | owner: context.repo.owner, 21 | repo: context.repo.repo, 22 | page: 1, 23 | per_page: 100 24 | }); 25 | 26 | const pages = (function() { 27 | if (typeof response.headers.link !== 'undefined') { 28 | return response.headers.link.split(">").slice(-2)[0].split('=').slice(-1)[0] 29 | } 30 | return 1; 31 | })(); 32 | 33 | console.log("Total pages: " + pages); 34 | 35 | for (let page = pages; page >= 1; page--) { 36 | console.log("Processing page " + page) 37 | 38 | const response = await github.rest.actions.getActionsCacheList({ 39 | owner: context.repo.owner, 40 | repo: context.repo.repo, 41 | page: page, 42 | per_page: 100 43 | }); 44 | 45 | for (const cache of response.data.actions_caches) { 46 | console.log(cache) 47 | github.rest.actions.deleteActionsCacheById({ 48 | owner: context.repo.owner, 49 | repo: context.repo.repo, 50 | cache_id: cache.id, 51 | }) 52 | } 53 | } 54 | 55 | console.log("Clear completed") -------------------------------------------------------------------------------- /.github/workflows/delete-untagged-images.yml: -------------------------------------------------------------------------------- 1 | name: Delete Untagged Packages 2 | 3 | env: 4 | PER_PAGE: 100 5 | 6 | on: 7 | workflow_dispatch: 8 | workflow_run: 9 | workflows: ["Docker Build"] 10 | types: 11 | - completed 12 | 13 | jobs: 14 | delete-untagged: 15 | runs-on: ubuntu-latest 16 | steps: 17 | - 18 | run: | 19 | echo "PACKAGE_NAME=${GITHUB_REPOSITORY,,}" >> ${GITHUB_ENV} 20 | echo "OWNER=orgs/${GITHUB_REPOSITORY_OWNER,,}" >> ${GITHUB_ENV} 21 | - 22 | uses: actions/github-script@v6 23 | with: 24 | github-token: ${{ secrets.DELETE_PACKAGES_TOKEN }} 25 | script: | 26 | const response = await github.request("GET /${{ env.OWNER }}/packages/container/${{ github.event.repository.name }}/versions", 27 | { per_page: ${{ env.PER_PAGE }} 28 | }); 29 | 30 | const pages = (function() { 31 | if (typeof response.headers.link !== 'undefined') { 32 | return response.headers.link.split(">").slice(-2)[0].split('=').slice(-1)[0] 33 | } 34 | return 1; 35 | })(); 36 | 37 | console.log("Total pages: " + pages); 38 | 39 | for (let page = pages; page >= 1; page--) { 40 | console.log("Processing page " + page) 41 | 42 | const response = await github.request("GET /${{ env.OWNER }}/packages/container/${{ github.event.repository.name }}/versions", 43 | { 44 | per_page: ${{ env.PER_PAGE }}, 45 | page: page 46 | }); 47 | 48 | for (version of response.data) { 49 | if (version.metadata.container.tags.length == 0) { 50 | console.log("delete " + version.id) 51 | const deleteResponse = await github.request("DELETE /${{ env.OWNER }}/packages/container/${{ github.event.repository.name }}/versions/" + version.id, { }); 52 | console.log("status " + deleteResponse.status) 53 | } 54 | } 55 | } 56 | 57 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | Custom Software License 2 | 3 | Copyright © 2022-present Robert Ballantyne, trading as AI-Dock. All rights reserved. 4 | 5 | Author and Licensor: Robert Ballantyne. 6 | 7 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software") to use the Software for personal or commercial purposes, subject to the following conditions: 8 | 9 | 1. Users may not modify the Software in any way that overrides the original code written by the author, except as explicitly instructed in the accompanying documentation provided by the author. 10 | 11 | 2. Users may add additional code or modifications for their custom builds, provided that such additions do not override the original code written by the author. 12 | 13 | 3. Distribution of the Software, including forks and source code, is permitted without explicit permission from the author. Hosting derivatives on a public registry, such as Docker Hub, is allowed, but users are not permitted to actively encourage the use of these derivatives by others without explicit permission from the author. Distribution of Docker images and templates derived from the Software is permitted only with explicit permission from the author. Permission may be revoked at any time without prior notice. To obtain permission for distribution of Docker images and templates, users must enter into a separate licensing agreement with the author. 14 | 15 | 4. Users may not remove or alter any branding, trademarks, or copyright notices present in the Software, including hyperlinks to external resources such as the author's website or documentation, and links to third-party services. These hyperlinks and links shall remain intact and unaltered. 16 | 17 | 5. Distribution of modified versions of the Software must prominently display a notice indicating that the Software has been modified from the original version and include appropriate attribution to the original author. 18 | 19 | 6. Users may not engage in any activities that could lead to malicious imitation or misrepresentation of the Software, including but not limited to creating derivative works that attempt to pass off as the original Software or using the Software to mislead or deceive others. 20 | 21 | 7. The author must ensure that the complete corresponding source code for the Software, including any modifications made by the author, remains publicly available at all times. 22 | 23 | 8. Users who have been granted permission to modify and distribute the Software are responsible for ensuring that the complete corresponding source code for any modifications they make to the Software remains publicly available at all times when they distribute their versions of the Software. This requirement applies to both the original Software and any derivative works created based on the Software. 24 | 25 | 9. This license applies only to the code originating from AI-Dock repositories, both inside and outside of containers. Other bundled software or dependencies should be viewed as separate entities and may be subject to their own respective licenses. 26 | 27 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 28 | -------------------------------------------------------------------------------- /docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: "3.8" 2 | # Compose file build variables set in .env 3 | services: 4 | supervisor: 5 | platform: linux/amd64 6 | build: 7 | context: ./build 8 | args: 9 | PYTHON_VERSION: ${PYTHON_VERSION:-3.10} 10 | PYTORCH_VERSION: ${PYTORCH_VERSION:-2.4.1} 11 | FORGE_TAG: ${FORGE_TAG:-} 12 | IMAGE_BASE: ${IMAGE_BASE:-ghcr.io/ai-dock/python:${PYTHON_VERSION:-3.10}-v2-cuda-12.1.1-base-22.04} 13 | tags: 14 | - "ghcr.io/ai-dock/stable-diffusion-webui-forge:${IMAGE_TAG:-cuda-12.1.1-base-22.04}" 15 | 16 | image: ghcr.io/ai-dock/stable-diffusion-webui-forge:${IMAGE_TAG:-cuda-12.1.1-base-22.04} 17 | 18 | 19 | ## For Nvidia GPU's - You probably want to uncomment this 20 | #deploy: 21 | # resources: 22 | # reservations: 23 | # devices: 24 | # - driver: nvidia 25 | # count: all 26 | # capabilities: [gpu] 27 | 28 | devices: 29 | - "/dev/dri:/dev/dri" 30 | # For AMD GPU 31 | #- "/dev/kfd:/dev/kfd" 32 | 33 | volumes: 34 | # Workspace 35 | - ./workspace:${WORKSPACE:-/workspace/}:rshared 36 | # You can share /workspace/storage with other non-WEBUI containers. See README 37 | #- /path/to/common_storage:${WORKSPACE:-/workspace/}storage/:rshared 38 | # Will echo to root-owned authorized_keys file; 39 | # Avoids changing local file owner 40 | - ./config/authorized_keys:/root/.ssh/authorized_keys_mount 41 | - ./config/provisioning/default.sh:/opt/ai-dock/bin/provisioning.sh 42 | 43 | ports: 44 | # SSH available on host machine port 2222 to avoid conflict. Change to suit 45 | - ${SSH_PORT_HOST:-2222}:${SSH_PORT_LOCAL:-22} 46 | # Caddy port for service portal 47 | - ${SERVICEPORTAL_PORT_HOST:-1111}:${SERVICEPORTAL_PORT_HOST:-1111} 48 | # WEBUI web interface 49 | - ${FORGE_PORT_HOST:-7860}:${FORGE_PORT_HOST:-7860} 50 | # Jupyter server 51 | - ${JUPYTER_PORT_HOST:-8888}:${JUPYTER_PORT_HOST:-8888} 52 | # Syncthing 53 | - ${SYNCTHING_UI_PORT_HOST:-8384}:${SYNCTHING_UI_PORT_HOST:-8384} 54 | - ${SYNCTHING_TRANSPORT_PORT_HOST:-22999}:${SYNCTHING_TRANSPORT_PORT_HOST:-22999} 55 | 56 | environment: 57 | # Don't enclose values in quotes 58 | - DIRECT_ADDRESS=${DIRECT_ADDRESS:-127.0.0.1} 59 | - DIRECT_ADDRESS_GET_WAN=${DIRECT_ADDRESS_GET_WAN:-false} 60 | - WORKSPACE=${WORKSPACE:-/workspace} 61 | - WORKSPACE_SYNC=${WORKSPACE_SYNC:-false} 62 | - CF_TUNNEL_TOKEN=${CF_TUNNEL_TOKEN:-} 63 | - CF_QUICK_TUNNELS=${CF_QUICK_TUNNELS:-true} 64 | - CIVITAI_TOKEN=${CIVITAI_TOKEN:-} 65 | - HF_TOKEN=${HF_TOKEN:-} 66 | - WEB_ENABLE_AUTH=${WEB_ENABLE_AUTH:-true} 67 | - WEB_ENABLE_HTTPS${WEB_ENABLE_HTTPS:-false} 68 | - WEB_USER=${WEB_USER:-user} 69 | - WEB_PASSWORD=${WEB_PASSWORD:-password} 70 | - SSH_PORT_HOST=${SSH_PORT_HOST:-2222} 71 | - SSH_PORT_LOCAL=${SSH_PORT_LOCAL:-22} 72 | - SERVICEPORTAL_PORT_HOST=${SERVICEPORTAL_PORT_HOST:-1111} 73 | - SERVICEPORTAL_METRICS_PORT=${SERVICEPORTAL_METRICS_PORT:-21111} 74 | - SERVICEPORTAL_URL=${SERVICEPORTAL_URL:-} 75 | - FORGE_BRANCH=${FORGE_BRANCH:-} 76 | - FORGE_ARGS=${FORGE_ARGS:-} 77 | - FORGE_PORT_HOST=${FORGE_PORT_HOST:-7860} 78 | - FORGE_PORT_LOCAL=${FORGE_PORT_LOCAL:-17860} 79 | - FORGE_METRICS_PORT=${FORGE_METRICS_PORT:-27860} 80 | - FORGE_URL=${FORGE_URL:-} 81 | - JUPYTER_PORT_HOST=${JUPYTER_PORT_HOST:-8888} 82 | - JUPYTER_METRICS_PORT=${JUPYTER_METRICS_PORT:-28888} 83 | - JUPYTER_URL=${JUPYTER_URL:-} 84 | - SERVERLESS=${SERVERLESS:-false} 85 | - SYNCTHING_UI_PORT_HOST=${SYNCTHING_UI_PORT_HOST:-8384} 86 | - SYNCTHING_TRANSPORT_PORT_HOST=${SYNCTHING_TRANSPORT_PORT_HOST:-22999} 87 | - SYNCTHING_URL=${SYNCTHING_URL:-} 88 | #- PROVISIONING_SCRIPT=${PROVISIONING_SCRIPT:-} 89 | -------------------------------------------------------------------------------- /.github/workflows/delete-old-images.yml: -------------------------------------------------------------------------------- 1 | name: Delete Old Packages 2 | 3 | env: 4 | PER_PAGE: 100 5 | 6 | on: 7 | workflow_dispatch: 8 | inputs: 9 | age: 10 | type: choice 11 | required: true 12 | description: Delete older than 13 | options: 14 | - 1 Hour 15 | - 12 Hours 16 | - 1 Day 17 | - 1 Week 18 | - 2 Weeks 19 | - 1 Month 20 | - 6 Months 21 | - 1 Year 22 | - 2 Years 23 | - 3 Years 24 | - 4 Years 25 | - 5 Years 26 | - All Packages 27 | 28 | jobs: 29 | delete-old-packages: 30 | runs-on: ubuntu-latest 31 | steps: 32 | - 33 | run: | 34 | echo "PACKAGE_NAME=${GITHUB_REPOSITORY,,}" >> ${GITHUB_ENV} 35 | echo "OWNER=orgs/${GITHUB_REPOSITORY_OWNER,,}" >> ${GITHUB_ENV} 36 | - 37 | uses: actions/github-script@v6 38 | with: 39 | github-token: ${{ secrets.DELETE_PACKAGES_TOKEN }} 40 | script: | 41 | const delete_age = (function() { 42 | switch ("${{ github.event.inputs.age }}") { 43 | case "All Packages": 44 | return 0; 45 | case "1 Hour": 46 | return 60; 47 | case "12 Hours": 48 | return 720; 49 | case "1 Day": 50 | return 1440; 51 | case "1 Week": 52 | return 10080; 53 | case "2 Weeks": 54 | return 20160; 55 | case "1 Month": 56 | return 43800; 57 | case "6 Months": 58 | return 262800; 59 | case "1 Year": 60 | return 525600; 61 | case "2 Years": 62 | return 525600 * 2; 63 | case "3 Years": 64 | return 525600 * 3; 65 | case "4 Years": 66 | return 525600 * 4; 67 | case "5 Years": 68 | return 525600 * 5; 69 | default: 70 | return 157680000; 71 | } 72 | })(); 73 | 74 | const now = new Date(); 75 | const epoch_minutes = Math.round(now.getTime() / 1000 / 60); 76 | 77 | const response = await github.request("GET /${{ env.OWNER }}/packages/container/${{ github.event.repository.name }}/versions", 78 | { per_page: ${{ env.PER_PAGE }} 79 | }); 80 | 81 | const pages = (function() { 82 | if (typeof response.headers.link !== 'undefined') { 83 | return response.headers.link.split(">").slice(-2)[0].split('=').slice(-1)[0] 84 | } 85 | return 1; 86 | })(); 87 | 88 | console.log("Total pages: " + pages); 89 | 90 | for (let page = pages; page >= 1; page--) { 91 | console.log("Processing page " + page) 92 | 93 | const response = await github.request("GET /${{ env.OWNER }}/packages/container/${{ github.event.repository.name }}/versions", 94 | { 95 | per_page: ${{ env.PER_PAGE }}, 96 | page: page 97 | }); 98 | 99 | console.log("Deleting packages updated more than " + delete_age + " minutes ago...") 100 | for (version of response.data) { 101 | let updated_at = new Date(version.updated_at) 102 | let minutes_old = epoch_minutes - Math.round(updated_at.getTime() / 1000 / 60); 103 | console.log("Package is " + minutes_old + " minutes old") 104 | if (minutes_old > delete_age) { 105 | console.log("delete " + version.id) 106 | const deleteResponse = await github.request("DELETE /${{ env.OWNER }}/packages/container/${{ github.event.repository.name }}/versions/" + version.id, { }); 107 | console.log("status " + deleteResponse.status) 108 | } 109 | } 110 | } 111 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Docker Build](https://github.com/ai-dock/stable-diffusion-webui-forge/actions/workflows/docker-build.yml/badge.svg)](https://github.com/ai-dock/stable-diffusion-webui-forge/actions/workflows/docker-build.yml) 2 | 3 | # Stable Diffusion WebUI Forge Docker Image 4 | 5 | Run [Stable Diffusion WebUI Forge](https://github.com/lllyasviel/stable-diffusion-webui-forge) in a docker container locally or in the cloud. 6 | 7 | >[!NOTE] 8 | >These images do not bundle models or third-party configurations. You should use a [provisioning script](https://github.com/ai-dock/base-image/wiki/4.0-Running-the-Image#provisioning-script) to automatically configure your container. You can find examples in `config/provisioning`. 9 | 10 | ## Documentation 11 | 12 | All AI-Dock containers share a common base which is designed to make running on cloud services such as [vast.ai](https://link.ai-dock.org/vast.ai) as straightforward and user friendly as possible. 13 | 14 | Common features and options are documented in the [base wiki](https://github.com/ai-dock/base-image/wiki) but any additional features unique to this image will be detailed below. 15 | 16 | 17 | #### Version Tags 18 | 19 | The `:latest` tag points to `:latest-cuda` 20 | 21 | Tags follow these patterns: 22 | 23 | ##### _CUDA_ 24 | - `:v2-cuda-[x.x.x]-[base|runtime]-[ubuntu-version]` 25 | 26 | - `:latest-cuda` → `:v2-cuda-12.1.1-base-22.04` 27 | 28 | ##### _ROCm_ 29 | - `:rocm-[x.x.x]-runtime-[ubuntu-version]` 30 | 31 | - `:latest-rocm` → `:v2-rocm-6.0-core-22.04` 32 | 33 | ##### _CPU_ 34 | - `:cpu-ubuntu-[ubuntu-version]` 35 | 36 | - `:latest-cpu` → `:v2-cpu-22.04` 37 | 38 | Browse [ghcr.io](https://github.com/ai-dock/stable-diffusion-webui-forge/pkgs/container/stable-diffusion-webui) for an image suitable for your target environment. Alternatively, view a select range of [CUDA](https://hub.docker.com/r/aidockorg/stable-diffusion-webui-forge-cuda) and [ROCm](https://hub.docker.com/r/aidockorg/stable-diffusion-webui-forge-rocm) builds at DockerHub. 39 | 40 | Supported Python versions: `3.10` 41 | 42 | Supported Platforms: `NVIDIA CUDA`, `AMD ROCm`, `CPU` 43 | 44 | ## Additional Environment Variables 45 | 46 | | Variable | Description | 47 | | ------------------------ | ----------- | 48 | | `AUTO_UPDATE` | Update Web UI Forge on startup (default `false`) | 49 | | `CIVITAI_TOKEN` | Authenticate download requests from Civitai - Required for gated models | 50 | | `HF_TOKEN` | Authenticate download requests from HuggingFace - Required for gated models (SD3, FLUX, etc.) | 51 | | `FORGE_ARGS` | Startup arguments. eg. `--no-half --api` | 52 | | `FORGE_PORT_HOST` | Web UI port (default `7860`) | 53 | | `FORGE_REF` | Git reference for auto update. Accepts branch, tag or commit hash. Default: latest release | 54 | | `FORGE_URL` | Override `$DIRECT_ADDRESS:port` with URL for Web UI | 55 | 56 | See the base environment variables [here](https://github.com/ai-dock/base-image/wiki/2.0-Environment-Variables) for more configuration options. 57 | 58 | ### Additional Python Environments 59 | 60 | | Environment | Packages | 61 | | -------------- | ----------------------------------------- | 62 | | `forge` | SD WebUI Forge and dependencies | 63 | 64 | This environment will be activated on shell login. 65 | 66 | ~~See the base micromamba environments [here](https://github.com/ai-dock/base-image/wiki/1.0-Included-Software#installed-micromamba-environments).~~ 67 | 68 | 69 | ## Additional Services 70 | 71 | The following services will be launched alongside the [default services](https://github.com/ai-dock/base-image/wiki/1.0-Included-Software) provided by the base image. 72 | 73 | ### Stable Diffusion WebUI Forge 74 | 75 | The service will launch on port `7860` unless you have specified an override with `FORGE_PORT_HOST`. 76 | 77 | You can set startup arguments by using variable `FORGE_ARGS`. 78 | 79 | To manage this service you can use `supervisorctl [start|stop|restart] forge` or via the Service Portal application. 80 | 81 | >[!NOTE] 82 | >All services are password protected by default and HTTPS is available optionally. See the [security](https://github.com/ai-dock/base-image/wiki#security) and [environment variables](https://github.com/ai-dock/base-image/wiki/2.0-Environment-Variables) documentation for more information. 83 | 84 | 85 | ## Pre-Configured Templates 86 | 87 | **Vast.​ai** 88 | 89 | - [SD WebUI Forge:latest-cuda](https://link.ai-dock.org/template-vast-sd-webui-forge) 90 | 91 | - [SD WebUI Forge:latest-cuda + FLUX.1](https://link.ai-dock.org/template-vast-webui-forge-flux) 92 | 93 | --- 94 | 95 | _The author ([@robballantyne](https://github.com/robballantyne)) may be compensated if you sign up to services linked in this document. Testing multiple variants of GPU images in many different environments is both costly and time-consuming; This helps to offset costs_ -------------------------------------------------------------------------------- /config/provisioning/default.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # This file will be sourced in init.sh 3 | # Namespace functions with provisioning_ 4 | 5 | # https://raw.githubusercontent.com/ai-dock/stable-diffusion-webui/main/config/provisioning/default.sh 6 | 7 | ### Edit the following arrays to suit your workflow - values must be quoted and separated by newlines or spaces. 8 | ### If you specify gated models you'll need to set environment variables HF_TOKEN and/orf CIVITAI_TOKEN 9 | 10 | DISK_GB_REQUIRED=30 11 | 12 | APT_PACKAGES=( 13 | #"package-1" 14 | #"package-2" 15 | ) 16 | 17 | PIP_PACKAGES=( 18 | "onnxruntime-gpu" 19 | ) 20 | 21 | EXTENSIONS=( 22 | "https://github.com/deforum-art/sd-webui-deforum" 23 | "https://github.com/adieyal/sd-dynamic-prompts" 24 | "https://github.com/ototadana/sd-face-editor" 25 | "https://github.com/AlUlkesh/stable-diffusion-webui-images-browser" 26 | "https://github.com/hako-mikan/sd-webui-regional-prompter" 27 | "https://github.com/Coyote-A/ultimate-upscale-for-automatic1111" 28 | ) 29 | 30 | CHECKPOINT_MODELS=( 31 | #"https://huggingface.co/runwayml/stable-diffusion-v1-5/resolve/main/v1-5-pruned-emaonly.ckpt" 32 | #"https://huggingface.co/stabilityai/stable-diffusion-2-1/resolve/main/v2-1_768-ema-pruned.ckpt" 33 | "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0.safetensors" 34 | "https://huggingface.co/stabilityai/stable-diffusion-xl-refiner-1.0/resolve/main/sd_xl_refiner_1.0.safetensors" 35 | ) 36 | 37 | LORA_MODELS=( 38 | #"https://civitai.com/api/download/models/16576" 39 | ) 40 | 41 | VAE_MODELS=( 42 | #"https://huggingface.co/stabilityai/sd-vae-ft-ema-original/resolve/main/vae-ft-ema-560000-ema-pruned.safetensors" 43 | #"https://huggingface.co/stabilityai/sd-vae-ft-mse-original/resolve/main/vae-ft-mse-840000-ema-pruned.safetensors" 44 | "https://huggingface.co/stabilityai/sdxl-vae/resolve/main/sdxl_vae.safetensors" 45 | ) 46 | 47 | ESRGAN_MODELS=( 48 | "https://huggingface.co/ai-forever/Real-ESRGAN/resolve/main/RealESRGAN_x4.pth" 49 | "https://huggingface.co/FacehugmanIII/4x_foolhardy_Remacri/resolve/main/4x_foolhardy_Remacri.pth" 50 | "https://huggingface.co/Akumetsu971/SD_Anime_Futuristic_Armor/resolve/main/4x_NMKD-Siax_200k.pth" 51 | ) 52 | 53 | CONTROLNET_MODELS=( 54 | "https://huggingface.co/lllyasviel/sd_control_collection/resolve/main/diffusers_xl_canny_mid.safetensors" 55 | "https://huggingface.co/lllyasviel/sd_control_collection/resolve/main/diffusers_xl_depth_mid.safetensors?download" 56 | "https://huggingface.co/lllyasviel/sd_control_collection/resolve/main/t2i-adapter_diffusers_xl_openpose.safetensors" 57 | #"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_canny-fp16.safetensors" 58 | #"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_depth-fp16.safetensors" 59 | #"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_hed-fp16.safetensors" 60 | #"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_mlsd-fp16.safetensors" 61 | #"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_normal-fp16.safetensors" 62 | #"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_openpose-fp16.safetensors" 63 | #"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_scribble-fp16.safetensors" 64 | #"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_seg-fp16.safetensors" 65 | #"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/t2iadapter_canny-fp16.safetensors" 66 | #"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/t2iadapter_color-fp16.safetensors" 67 | #"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/t2iadapter_depth-fp16.safetensors" 68 | #"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/t2iadapter_keypose-fp16.safetensors" 69 | #"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/t2iadapter_openpose-fp16.safetensors" 70 | #"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/t2iadapter_seg-fp16.safetensors" 71 | #"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/t2iadapter_sketch-fp16.safetensors" 72 | #"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/t2iadapter_style-fp16.safetensors" 73 | ) 74 | 75 | 76 | ### DO NOT EDIT BELOW HERE UNLESS YOU KNOW WHAT YOU ARE DOING ### 77 | 78 | function provisioning_start() { 79 | # We need to apply some workarounds to make old builds work with the new default 80 | if [[ ! -d /opt/environments/python ]]; then 81 | export MAMBA_BASE=true 82 | fi 83 | source /opt/ai-dock/etc/environment.sh 84 | source /opt/ai-dock/bin/venv-set.sh webui 85 | 86 | DISK_GB_AVAILABLE=$(($(df --output=avail -m "${WORKSPACE}" | tail -n1) / 1000)) 87 | DISK_GB_USED=$(($(df --output=used -m "${WORKSPACE}" | tail -n1) / 1000)) 88 | DISK_GB_ALLOCATED=$(($DISK_GB_AVAILABLE + $DISK_GB_USED)) 89 | provisioning_print_header 90 | provisioning_get_apt_packages 91 | provisioning_get_pip_packages 92 | provisioning_get_extensions 93 | provisioning_get_models \ 94 | "${WORKSPACE}/storage/stable_diffusion/models/ckpt" \ 95 | "${CHECKPOINT_MODELS[@]}" 96 | provisioning_get_models \ 97 | "${WORKSPACE}/storage/stable_diffusion/models/lora" \ 98 | "${LORA_MODELS[@]}" 99 | provisioning_get_models \ 100 | "${WORKSPACE}/storage/stable_diffusion/models/controlnet" \ 101 | "${CONTROLNET_MODELS[@]}" 102 | provisioning_get_models \ 103 | "${WORKSPACE}/storage/stable_diffusion/models/vae" \ 104 | "${VAE_MODELS[@]}" 105 | provisioning_get_models \ 106 | "${WORKSPACE}/storage/stable_diffusion/models/esrgan" \ 107 | "${ESRGAN_MODELS[@]}" 108 | 109 | PLATFORM_ARGS="" 110 | if [[ $XPU_TARGET = "CPU" ]]; then 111 | PLATFORM_ARGS="--use-cpu all --skip-torch-cuda-test --no-half" 112 | fi 113 | PROVISIONING_ARGS="--skip-python-version-check --no-download-sd-model --do-not-download-clip --port 11404 --exit" 114 | ARGS_COMBINED="${PLATFORM_ARGS} $(cat /etc/forge_args.conf) ${PROVISIONING_ARGS}" 115 | 116 | # Start and exit because webui will probably require a restart 117 | cd /opt/stable-diffusion-webui-forge 118 | source "$FORGE_VENV/bin/activate" 119 | LD_PRELOAD=libtcmalloc.so python launch.py \ 120 | ${ARGS_COMBINED} 121 | deactivate 122 | 123 | 124 | provisioning_print_end 125 | } 126 | 127 | function pip_install() { 128 | "$FORGE_VENV_PIP" install --no-cache-dir "$@" 129 | } 130 | 131 | function provisioning_get_apt_packages() { 132 | if [[ -n $APT_PACKAGES ]]; then 133 | sudo $APT_INSTALL ${APT_PACKAGES[@]} 134 | fi 135 | } 136 | 137 | function provisioning_get_pip_packages() { 138 | if [[ -n $PIP_PACKAGES ]]; then 139 | pip_install ${PIP_PACKAGES[@]} 140 | fi 141 | } 142 | 143 | function provisioning_get_extensions() { 144 | for repo in "${EXTENSIONS[@]}"; do 145 | dir="${repo##*/}" 146 | path="/opt/stable-diffusion-webui-forge/extensions/${dir}" 147 | if [[ -d $path ]]; then 148 | # Pull only if AUTO_UPDATE 149 | if [[ ${AUTO_UPDATE,,} == "true" ]]; then 150 | printf "Updating extension: %s...\n" "${repo}" 151 | ( cd "$path" && git pull ) 152 | fi 153 | else 154 | printf "Downloading extension: %s...\n" "${repo}" 155 | git clone "${repo}" "${path}" --recursive 156 | fi 157 | done 158 | } 159 | 160 | function provisioning_get_models() { 161 | if [[ -z $2 ]]; then return 1; fi 162 | dir="$1" 163 | mkdir -p "$dir" 164 | shift 165 | if [[ $DISK_GB_ALLOCATED -ge $DISK_GB_REQUIRED ]]; then 166 | arr=("$@") 167 | else 168 | printf "WARNING: Low disk space allocation - Only the first model will be downloaded!\n" 169 | arr=("$1") 170 | fi 171 | 172 | printf "Downloading %s model(s) to %s...\n" "${#arr[@]}" "$dir" 173 | for url in "${arr[@]}"; do 174 | printf "Downloading: %s\n" "${url}" 175 | provisioning_download "${url}" "${dir}" 176 | printf "\n" 177 | done 178 | } 179 | 180 | function provisioning_print_header() { 181 | printf "\n##############################################\n# #\n# Provisioning container #\n# #\n# This will take some time #\n# #\n# Your container will be ready on completion #\n# #\n##############################################\n\n" 182 | if [[ $DISK_GB_ALLOCATED -lt $DISK_GB_REQUIRED ]]; then 183 | printf "WARNING: Your allocated disk size (%sGB) is below the recommended %sGB - Some models will not be downloaded\n" "$DISK_GB_ALLOCATED" "$DISK_GB_REQUIRED" 184 | fi 185 | } 186 | 187 | function provisioning_print_end() { 188 | printf "\nProvisioning complete: Web UI will start now\n\n" 189 | } 190 | 191 | 192 | # Download from $1 URL to $2 file path 193 | function provisioning_download() { 194 | if [[ -n $HF_TOKEN && $1 =~ ^https://([a-zA-Z0-9_-]+\.)?huggingface\.co(/|$|\?) ]]; then 195 | auth_token="$HF_TOKEN" 196 | elif 197 | [[ -n $CIVITAI_TOKEN && $1 =~ ^https://([a-zA-Z0-9_-]+\.)?civitai\.com(/|$|\?) ]]; then 198 | auth_token="$CIVITAI_TOKEN" 199 | fi 200 | if [[ -n $auth_token ]];then 201 | wget --header="Authorization: Bearer $auth_token" -qnc --content-disposition --show-progress -e dotbytes="${3:-4M}" -P "$2" "$1" 202 | else 203 | wget -qnc --content-disposition --show-progress -e dotbytes="${3:-4M}" -P "$2" "$1" 204 | fi 205 | } 206 | 207 | provisioning_start -------------------------------------------------------------------------------- /config/provisioning/flux.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # This file will be sourced in init.sh 3 | # Namespace functions with provisioning_ 4 | 5 | # https://raw.githubusercontent.com/ai-dock/stable-diffusion-webui/main/config/provisioning/default.sh 6 | 7 | ### Edit the following arrays to suit your workflow - values must be quoted and separated by newlines or spaces. 8 | ### If you specify gated models you'll need to set environment variables HF_TOKEN and/orf CIVITAI_TOKEN 9 | 10 | DISK_GB_REQUIRED=30 11 | 12 | APT_PACKAGES=( 13 | #"package-1" 14 | #"package-2" 15 | ) 16 | 17 | PIP_PACKAGES=( 18 | "onnxruntime-gpu" 19 | ) 20 | 21 | EXTENSIONS=( 22 | "https://github.com/deforum-art/sd-webui-deforum" 23 | "https://github.com/adieyal/sd-dynamic-prompts" 24 | "https://github.com/ototadana/sd-face-editor" 25 | "https://github.com/AlUlkesh/stable-diffusion-webui-images-browser" 26 | "https://github.com/hako-mikan/sd-webui-regional-prompter" 27 | "https://github.com/Coyote-A/ultimate-upscale-for-automatic1111" 28 | ) 29 | 30 | CHECKPOINT_MODELS=( 31 | ) 32 | 33 | LORA_MODELS=( 34 | #"https://civitai.com/api/download/models/16576" 35 | ) 36 | 37 | CLIP_MODELS=( 38 | "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors" 39 | "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors" 40 | ) 41 | 42 | VAE_MODELS=( 43 | ) 44 | 45 | ESRGAN_MODELS=( 46 | "https://huggingface.co/ai-forever/Real-ESRGAN/resolve/main/RealESRGAN_x4.pth" 47 | "https://huggingface.co/FacehugmanIII/4x_foolhardy_Remacri/resolve/main/4x_foolhardy_Remacri.pth" 48 | "https://huggingface.co/Akumetsu971/SD_Anime_Futuristic_Armor/resolve/main/4x_NMKD-Siax_200k.pth" 49 | ) 50 | 51 | CONTROLNET_MODELS=( 52 | ) 53 | 54 | 55 | ### DO NOT EDIT BELOW HERE UNLESS YOU KNOW WHAT YOU ARE DOING ### 56 | 57 | function provisioning_start() { 58 | # We need to apply some workarounds to make old builds work with the new default 59 | if [[ ! -d /opt/environments/python ]]; then 60 | export MAMBA_BASE=true 61 | fi 62 | source /opt/ai-dock/etc/environment.sh 63 | source /opt/ai-dock/bin/venv-set.sh webui 64 | 65 | # Get licensed models if HF_TOKEN set & valid 66 | if provisioning_has_valid_hf_token; then 67 | CHECKPOINT_MODELS+=("https://huggingface.co/black-forest-labs/FLUX.1-dev/resolve/main/flux1-dev.safetensors") 68 | VAE_MODELS+=("https://huggingface.co/black-forest-labs/FLUX.1-dev/resolve/main/ae.safetensors") 69 | else 70 | CHECKPOINT_MODELS+=("https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/flux1-schnell.safetensors") 71 | VAE_MODELS+=("https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors") 72 | fi 73 | 74 | DISK_GB_AVAILABLE=$(($(df --output=avail -m "${WORKSPACE}" | tail -n1) / 1000)) 75 | DISK_GB_USED=$(($(df --output=used -m "${WORKSPACE}" | tail -n1) / 1000)) 76 | DISK_GB_ALLOCATED=$(($DISK_GB_AVAILABLE + $DISK_GB_USED)) 77 | provisioning_print_header 78 | provisioning_get_apt_packages 79 | provisioning_get_pip_packages 80 | provisioning_get_extensions 81 | provisioning_get_models \ 82 | "${WORKSPACE}/storage/stable_diffusion/models/ckpt" \ 83 | "${CHECKPOINT_MODELS[@]}" 84 | provisioning_get_models \ 85 | "${WORKSPACE}/storage/stable_diffusion/models/lora" \ 86 | "${LORA_MODELS[@]}" 87 | provisioning_get_models \ 88 | "${WORKSPACE}/storage/stable_diffusion/models/controlnet" \ 89 | "${CONTROLNET_MODELS[@]}" 90 | provisioning_get_models \ 91 | "${WORKSPACE}/storage/stable_diffusion/models/vae" \ 92 | "${VAE_MODELS[@]}" 93 | provisioning_get_models \ 94 | "${WORKSPACE}/storage/stable_diffusion/models/esrgan" \ 95 | "${ESRGAN_MODELS[@]}" 96 | provisioning_get_models \ 97 | "${WORKSPACE}/storage/stable_diffusion/models/clip" \ 98 | "${CLIP_MODELS[@]}" 99 | 100 | PLATFORM_ARGS="" 101 | if [[ $XPU_TARGET = "CPU" ]]; then 102 | PLATFORM_ARGS="--use-cpu all --skip-torch-cuda-test --no-half" 103 | fi 104 | PROVISIONING_ARGS="--skip-python-version-check --no-download-sd-model --do-not-download-clip --port 11404 --exit" 105 | ARGS_COMBINED="${PLATFORM_ARGS} $(cat /etc/forge_args.conf) ${PROVISIONING_ARGS}" 106 | 107 | # Start and exit because webui will probably require a restart 108 | cd /opt/stable-diffusion-webui-forge 109 | source "$FORGE_VENV/bin/activate" 110 | LD_PRELOAD=libtcmalloc.so python launch.py \ 111 | ${ARGS_COMBINED} 112 | deactivate 113 | 114 | 115 | provisioning_print_end 116 | } 117 | 118 | function pip_install() { 119 | "$FORGE_VENV_PIP" install --no-cache-dir "$@" 120 | } 121 | 122 | function provisioning_get_apt_packages() { 123 | if [[ -n $APT_PACKAGES ]]; then 124 | sudo $APT_INSTALL ${APT_PACKAGES[@]} 125 | fi 126 | } 127 | 128 | function provisioning_get_pip_packages() { 129 | if [[ -n $PIP_PACKAGES ]]; then 130 | pip_install ${PIP_PACKAGES[@]} 131 | fi 132 | } 133 | 134 | function provisioning_get_extensions() { 135 | for repo in "${EXTENSIONS[@]}"; do 136 | dir="${repo##*/}" 137 | path="/opt/stable-diffusion-webui-forge/extensions/${dir}" 138 | if [[ -d $path ]]; then 139 | # Pull only if AUTO_UPDATE 140 | if [[ ${AUTO_UPDATE,,} == "true" ]]; then 141 | printf "Updating extension: %s...\n" "${repo}" 142 | ( cd "$path" && git pull ) 143 | fi 144 | else 145 | printf "Downloading extension: %s...\n" "${repo}" 146 | git clone "${repo}" "${path}" --recursive 147 | fi 148 | done 149 | } 150 | 151 | function provisioning_get_models() { 152 | if [[ -z $2 ]]; then return 1; fi 153 | dir="$1" 154 | mkdir -p "$dir" 155 | shift 156 | if [[ $DISK_GB_ALLOCATED -ge $DISK_GB_REQUIRED ]]; then 157 | arr=("$@") 158 | else 159 | printf "WARNING: Low disk space allocation - Only the first model will be downloaded!\n" 160 | arr=("$1") 161 | fi 162 | 163 | printf "Downloading %s model(s) to %s...\n" "${#arr[@]}" "$dir" 164 | for url in "${arr[@]}"; do 165 | printf "Downloading: %s\n" "${url}" 166 | provisioning_download "${url}" "${dir}" 167 | printf "\n" 168 | done 169 | } 170 | 171 | function provisioning_print_header() { 172 | printf "\n##############################################\n# #\n# Provisioning container #\n# #\n# This will take some time #\n# #\n# Your container will be ready on completion #\n# #\n##############################################\n\n" 173 | if [[ $DISK_GB_ALLOCATED -lt $DISK_GB_REQUIRED ]]; then 174 | printf "WARNING: Your allocated disk size (%sGB) is below the recommended %sGB - Some models will not be downloaded\n" "$DISK_GB_ALLOCATED" "$DISK_GB_REQUIRED" 175 | fi 176 | } 177 | 178 | function provisioning_print_end() { 179 | printf "\nProvisioning complete: Web UI will start now\n\n" 180 | } 181 | 182 | function provisioning_has_valid_hf_token() { 183 | [[ -n "$HF_TOKEN" ]] || return 1 184 | url="https://huggingface.co/api/whoami-v2" 185 | 186 | response=$(curl -o /dev/null -s -w "%{http_code}" -X GET "$url" \ 187 | -H "Authorization: Bearer $HF_TOKEN" \ 188 | -H "Content-Type: application/json") 189 | 190 | # Check if the token is valid 191 | if [ "$response" -eq 200 ]; then 192 | return 0 193 | else 194 | return 1 195 | fi 196 | } 197 | 198 | function provisioning_has_valid_civitai_token() { 199 | [[ -n "$CIVITAI_TOKEN" ]] || return 1 200 | url="https://civitai.com/api/v1/models?hidden=1&limit=1" 201 | 202 | response=$(curl -o /dev/null -s -w "%{http_code}" -X GET "$url" \ 203 | -H "Authorization: Bearer $CIVITAI_TOKEN" \ 204 | -H "Content-Type: application/json") 205 | 206 | # Check if the token is valid 207 | if [ "$response" -eq 200 ]; then 208 | return 0 209 | else 210 | return 1 211 | fi 212 | } 213 | 214 | function provisioning_download() { 215 | local url=$1 216 | local output_dir=$2 217 | local auth_token="" 218 | if [[ -n $HF_TOKEN && $1 =~ ^https://([a-zA-Z0-9_-]+\.)?huggingface\.co(/|$|\?) ]]; then 219 | auth_token="$HF_TOKEN" 220 | elif 221 | [[ -n $CIVITAI_TOKEN && $1 =~ ^https://([a-zA-Z0-9_-]+\.)?civitai\.com(/|$|\?) ]]; then 222 | auth_token="$CIVITAI_TOKEN" 223 | fi 224 | 225 | # Build the curl command as an array 226 | local cmd=("curl" "-L" "-H" "Content-Type: application/json") 227 | if [[ -n $auth_token ]]; then 228 | cmd+=("-H" "Authorization: Bearer $auth_token") 229 | fi 230 | cmd+=("$url" "--create-dirs" "--output-dir" "$output_dir" "-O" "-J" "--progress-bar") 231 | 232 | # Initial percentage 233 | local last_percentage=0 234 | local current_percentage=0 235 | local current_int=0 236 | 237 | # Use curl to download the file and process the output 238 | "${cmd[@]}" 2>&1 | 239 | while IFS= read -d $'\r' -r p; do 240 | # Extract the percentage from the progress bar output 241 | if [[ $p =~ ([0-9]+(\.[0-9]+)?)% ]]; then 242 | current_percentage=${BASH_REMATCH[1]} 243 | 244 | # Extract the integer part of the percentage 245 | current_int=${current_percentage%.*} 246 | 247 | # Print the percentage only if it has increased by at least 5% 248 | if [[ $current_int -lt 100 ]] && (( current_int >= last_percentage + 5 )); then 249 | echo "Downloading $url (${current_percentage}%)..." 250 | last_percentage=$current_int 251 | fi 252 | fi 253 | done 254 | } 255 | 256 | provisioning_start -------------------------------------------------------------------------------- /.github/workflows/docker-build.yml: -------------------------------------------------------------------------------- 1 | name: Docker Build 2 | 3 | on: 4 | workflow_dispatch: 5 | #push: 6 | # branches: [ "main" ] 7 | 8 | env: 9 | UBUNTU_VERSION: 22.04 10 | BUILDX_NO_DEFAULT_ATTESTATIONS: 1 11 | 12 | jobs: 13 | cpu-base: 14 | runs-on: ubuntu-latest 15 | strategy: 16 | fail-fast: false 17 | matrix: 18 | build: 19 | # Undeclared release tag finds latest from GitHub tags 20 | - {latest: "true", forge: "2d3903d", python: "3.10", pytorch: "2.4.1"} 21 | steps: 22 | - 23 | name: Free Space 24 | run: | 25 | df -h 26 | sudo rm -rf /usr/share/dotnet 27 | sudo rm -rf /opt/ghc 28 | sudo rm -rf /usr/local/.ghcup 29 | sudo rm -rf /usr/local/share/boost 30 | sudo rm -rf /usr/local/lib/android 31 | sudo rm -rf "$AGENT_TOOLSDIRECTORY" 32 | df -h 33 | - 34 | name: Env Setter 35 | run: | 36 | REPO=${GITHUB_REPOSITORY,,} 37 | echo "REPO_NAMESPACE=${REPO%%/*}" >> ${GITHUB_ENV} 38 | echo "REPO_NAME=${REPO#*/}" >> ${GITHUB_ENV} 39 | - 40 | name: Checkout 41 | uses: actions/checkout@v3 42 | - 43 | name: Permissions fixes 44 | run: | 45 | target="${HOME}/work/${{ env.REPO_NAME }}/${{ env.REPO_NAME }}/build/COPY*" 46 | chmod -R ug+rwX ${target} 47 | - 48 | name: Login to DockerHub 49 | uses: docker/login-action@v3 50 | with: 51 | username: ${{ vars.DOCKERHUB_USER }} 52 | password: ${{ secrets.DOCKERHUB_TOKEN }} 53 | - 54 | name: Login to GitHub Container Registry 55 | uses: docker/login-action@v3 56 | with: 57 | registry: ghcr.io 58 | username: ${{ github.actor }} 59 | password: ${{ secrets.GITHUB_TOKEN }} 60 | - 61 | name: Set tags 62 | run: | 63 | img_path_ghcr="ghcr.io/${{ env.REPO_NAMESPACE }}/${{ env.REPO_NAME }}" 64 | if [[ -z '${{ matrix.build.forge }}' ]]; then 65 | FORGE_BUILD_REF="$(curl -s https://api.github.com/repos/lllyasviel/stable-diffusion-webui-forge/tags | jq -r '.[0].name')" 66 | else 67 | FORGE_BUILD_REF="${{ matrix.build.forge }}" 68 | fi 69 | [ -z "$FORGE_BUILD_REF" ] && { echo "Error: FORGE_BUILD_REF is empty. Exiting script." >&2; exit 1; } 70 | echo "FORGE_BUILD_REF=${FORGE_BUILD_REF}" >> ${GITHUB_ENV} 71 | 72 | base_tag="v2-cpu-${{ env.UBUNTU_VERSION }}" 73 | 74 | if [[ ${{ matrix.build.latest }} == "true" ]]; then 75 | echo "Marking latest" 76 | TAGS="${img_path_ghcr}:${base_tag}-${FORGE_BUILD_REF}, ${img_path_ghcr}:${base_tag}, ${img_path_ghcr}:latest-cpu" 77 | else 78 | TAGS="${img_path_ghcr}:${base_tag}-${FORGE_BUILD_REF}" 79 | fi 80 | echo "TAGS=${TAGS}" >> ${GITHUB_ENV} 81 | - 82 | name: Build and push 83 | uses: docker/build-push-action@v4 84 | with: 85 | context: build 86 | build-args: | 87 | IMAGE_BASE=ghcr.io/ai-dock/python:${{ matrix.build.python }}-v2-cpu-${{ env.UBUNTU_VERSION }} 88 | PYTHON_VERSION=${{ matrix.build.python }} 89 | PYTORCH_VERSION=${{ matrix.build.pytorch }} 90 | FORGE_BUILD_REF=${{ env.FORGE_BUILD_REF }} 91 | push: true 92 | # Avoids unknown/unknown architecture and extra metadata 93 | provenance: false 94 | tags: ${{ env.TAGS }} 95 | 96 | nvidia-base: 97 | runs-on: ubuntu-latest 98 | strategy: 99 | fail-fast: false 100 | matrix: 101 | build: 102 | # Undeclared release tag finds latest from GitHub tags 103 | - {latest: "true", forge: "2d3903d", python: "3.10", pytorch: "2.4.1", cuda: "12.1.1-base"} 104 | 105 | steps: 106 | - 107 | name: Free Space 108 | run: | 109 | df -h 110 | sudo rm -rf /usr/share/dotnet 111 | sudo rm -rf /opt/ghc 112 | sudo rm -rf /usr/local/.ghcup 113 | sudo rm -rf /usr/local/share/boost 114 | sudo rm -rf /usr/local/lib/android 115 | sudo rm -rf "$AGENT_TOOLSDIRECTORY" 116 | df -h 117 | - 118 | name: Env Setter 119 | run: | 120 | REPO=${GITHUB_REPOSITORY,,} 121 | echo "REPO_NAMESPACE=${REPO%%/*}" >> ${GITHUB_ENV} 122 | echo "REPO_NAME=${REPO#*/}" >> ${GITHUB_ENV} 123 | - 124 | name: Checkout 125 | uses: actions/checkout@v3 126 | - 127 | name: Permissions fixes 128 | run: | 129 | target="${HOME}/work/${{ env.REPO_NAME }}/${{ env.REPO_NAME }}/build/COPY*" 130 | chmod -R ug+rwX ${target} 131 | - 132 | name: Login to DockerHub 133 | uses: docker/login-action@v3 134 | with: 135 | username: ${{ vars.DOCKERHUB_USER }} 136 | password: ${{ secrets.DOCKERHUB_TOKEN }} 137 | - 138 | name: Login to GitHub Container Registry 139 | uses: docker/login-action@v3 140 | with: 141 | registry: ghcr.io 142 | username: ${{ github.actor }} 143 | password: ${{ secrets.GITHUB_TOKEN }} 144 | - 145 | name: Set tags 146 | run: | 147 | img_path_ghcr="ghcr.io/${{ env.REPO_NAMESPACE }}/${{ env.REPO_NAME }}" 148 | img_path_dhub="${{ vars.DOCKERHUB_USER }}/${{ env.REPO_NAME }}-cuda" 149 | if [[ -z '${{ matrix.build.forge }}' ]]; then 150 | FORGE_BUILD_REF="$(curl -s https://api.github.com/repos/lllyasviel/stable-diffusion-webui-forge/tags | jq -r '.[0].name')" 151 | else 152 | FORGE_BUILD_REF="${{ matrix.build.forge }}" 153 | fi 154 | [ -z "$FORGE_BUILD_REF" ] && { echo "Error: FORGE_BUILD_REF is empty. Exiting script." >&2; exit 1; } 155 | echo "FORGE_BUILD_REF=${FORGE_BUILD_REF}" >> ${GITHUB_ENV} 156 | 157 | base_tag="v2-cuda-${{ matrix.build.cuda }}-${{ env.UBUNTU_VERSION }}" 158 | 159 | if [[ ${{ matrix.build.latest }} == "true" ]]; then 160 | echo "Marking latest" 161 | # GHCR.io Tags 162 | TAGS="${img_path_ghcr}:${base_tag}-${FORGE_BUILD_REF}, ${img_path_ghcr}:${base_tag}, ${img_path_ghcr}:latest, ${img_path_ghcr}:latest-cuda" 163 | # Docker.io Tags 164 | TAGS="${TAGS}, ${img_path_dhub}:${FORGE_BUILD_REF}, ${img_path_dhub}:latest" 165 | else 166 | TAGS="${img_path_ghcr}:${base_tag}-${FORGE_BUILD_REF}, ${img_path_dhub}:${FORGE_BUILD_REF}" 167 | fi 168 | echo "TAGS=${TAGS}" >> ${GITHUB_ENV} 169 | - 170 | name: Build and push 171 | uses: docker/build-push-action@v4 172 | with: 173 | context: build 174 | build-args: | 175 | IMAGE_BASE=ghcr.io/ai-dock/python:${{ matrix.build.python }}-v2-cuda-${{ matrix.build.cuda }}-${{ env.UBUNTU_VERSION }} 176 | PYTHON_VERSION=${{ matrix.build.python }} 177 | PYTORCH_VERSION=${{ matrix.build.pytorch }} 178 | FORGE_BUILD_REF=${{ env.FORGE_BUILD_REF }} 179 | push: true 180 | provenance: false 181 | tags: ${{ env.TAGS }} 182 | 183 | amd-base: 184 | runs-on: ubuntu-latest 185 | strategy: 186 | fail-fast: false 187 | matrix: 188 | build: 189 | - {latest: "true", forge: "2d3903d", python: "3.10", pytorch: "2.3.1", rocm: "6.0-core"} 190 | steps: 191 | - 192 | name: Free Space 193 | run: | 194 | df -h 195 | sudo rm -rf /usr/share/dotnet 196 | sudo rm -rf /opt/ghc 197 | sudo rm -rf /usr/local/.ghcup 198 | sudo rm -rf /usr/local/share/boost 199 | sudo rm -rf /usr/local/lib/android 200 | sudo rm -rf "$AGENT_TOOLSDIRECTORY" 201 | df -h 202 | - 203 | name: Env Setter 204 | run: | 205 | REPO=${GITHUB_REPOSITORY,,} 206 | echo "REPO_NAMESPACE=${REPO%%/*}" >> ${GITHUB_ENV} 207 | echo "REPO_NAME=${REPO#*/}" >> ${GITHUB_ENV} 208 | - 209 | name: Checkout 210 | uses: actions/checkout@v3 211 | - 212 | name: Permissions fixes 213 | run: | 214 | target="${HOME}/work/${{ env.REPO_NAME }}/${{ env.REPO_NAME }}/build/COPY*" 215 | chmod -R ug+rwX ${target} 216 | - 217 | name: Login to DockerHub 218 | uses: docker/login-action@v3 219 | with: 220 | username: ${{ vars.DOCKERHUB_USER }} 221 | password: ${{ secrets.DOCKERHUB_TOKEN }} 222 | - 223 | name: Login to GitHub Container Registry 224 | uses: docker/login-action@v3 225 | with: 226 | registry: ghcr.io 227 | username: ${{ github.actor }} 228 | password: ${{ secrets.GITHUB_TOKEN }} 229 | - 230 | name: Set tags 231 | run: | 232 | img_path_ghcr="ghcr.io/${{ env.REPO_NAMESPACE }}/${{ env.REPO_NAME }}" 233 | img_path_dhub="${{ vars.DOCKERHUB_USER }}/${{ env.REPO_NAME }}-rocm" 234 | if [[ -z '${{ matrix.build.forge }}' ]]; then 235 | FORGE_BUILD_REF="$(curl -s https://api.github.com/repos/lllyasviel/stable-diffusion-webui-forge/tags | jq -r '.[0].name')" 236 | else 237 | FORGE_BUILD_REF="${{ matrix.build.forge }}" 238 | fi 239 | [ -z "$FORGE_BUILD_REF" ] && { echo "Error: FORGE_BUILD_REF is empty. Exiting script." >&2; exit 1; } 240 | echo "FORGE_BUILD_REF=${FORGE_BUILD_REF}" >> ${GITHUB_ENV} 241 | 242 | base_tag="v2-cuda-${{ matrix.build.cuda }}-${{ env.UBUNTU_VERSION }}" 243 | 244 | if [[ ${{ matrix.build.latest }} == "true" ]]; then 245 | echo "Marking latest" 246 | # GHCR.io Tags 247 | TAGS="${img_path_ghcr}:${base_tag}-${FORGE_BUILD_REF}, ${img_path_ghcr}:${base_tag}, ${img_path_ghcr}:latest-rocm" 248 | # Docker.io Tags 249 | TAGS="${TAGS}, ${img_path_dhub}:${FORGE_BUILD_REF}, ${img_path_dhub}:latest" 250 | else 251 | TAGS="${img_path_ghcr}:${base_tag}-${FORGE_BUILD_REF}, ${img_path_dhub}:${FORGE_BUILD_REF}" 252 | fi 253 | echo "TAGS=${TAGS}" >> ${GITHUB_ENV} 254 | - 255 | name: Build and push 256 | uses: docker/build-push-action@v4 257 | with: 258 | context: build 259 | build-args: | 260 | IMAGE_BASE=ghcr.io/ai-dock/python:${{ matrix.build.python }}-v2-rocm-${{ matrix.build.rocm }}-${{ env.UBUNTU_VERSION }} 261 | PYTHON_VERSION=${{ matrix.build.python }} 262 | PYTORCH_VERSION=${{ matrix.build.pytorch }} 263 | FORGE_BUILD_REF=${{ env.FORGE_BUILD_REF }} 264 | push: true 265 | provenance: false 266 | tags: ${{ env.TAGS }} --------------------------------------------------------------------------------