├── requirements.txt
├── assets
├── node_summary.png
├── smartgallery-3.jpg
├── gallery_from_pc_screen.png
├── node_summary_with_image.png
├── gallery_from_mobile_screen.png
└── smart-comfyui-gallery-unraidCA.png
├── static
└── galleryout
│ └── favicon.ico
├── CONTRIBUTING.md
├── .gitignore
├── compose.yaml
├── LICENSE
├── Dockerfile
├── Makefile
├── docker_init.bash
├── CHANGELOG.md
├── README.md
├── DOCKER_HELP.md
└── smartgallery.py
/requirements.txt:
--------------------------------------------------------------------------------
1 | Flask
2 | Pillow
3 | opencv-python
4 | tqdm
5 |
--------------------------------------------------------------------------------
/assets/node_summary.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/biagiomaf/smart-comfyui-gallery/HEAD/assets/node_summary.png
--------------------------------------------------------------------------------
/assets/smartgallery-3.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/biagiomaf/smart-comfyui-gallery/HEAD/assets/smartgallery-3.jpg
--------------------------------------------------------------------------------
/static/galleryout/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/biagiomaf/smart-comfyui-gallery/HEAD/static/galleryout/favicon.ico
--------------------------------------------------------------------------------
/assets/gallery_from_pc_screen.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/biagiomaf/smart-comfyui-gallery/HEAD/assets/gallery_from_pc_screen.png
--------------------------------------------------------------------------------
/assets/node_summary_with_image.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/biagiomaf/smart-comfyui-gallery/HEAD/assets/node_summary_with_image.png
--------------------------------------------------------------------------------
/assets/gallery_from_mobile_screen.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/biagiomaf/smart-comfyui-gallery/HEAD/assets/gallery_from_mobile_screen.png
--------------------------------------------------------------------------------
/assets/smart-comfyui-gallery-unraidCA.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/biagiomaf/smart-comfyui-gallery/HEAD/assets/smart-comfyui-gallery-unraidCA.png
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing to SmartGallery
2 |
3 | Thank you for considering contributing to SmartGallery!
4 |
5 | ## How to Contribute
6 |
7 | 1. Fork the repository
8 | 2. Create a feature branch (`git checkout -b feature/amazing-feature`)
9 | 3. Commit your changes (`git commit -m 'Add amazing feature'`)
10 | 4. Push to the branch (`git push origin feature/amazing-feature`)
11 | 5. Open a Pull Request
12 |
13 | ## Reporting Issues
14 |
15 | Please use the GitHub issue tracker to report bugs or request features.
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Python
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 | *.so
6 | .Python
7 | build/
8 | develop-eggs/
9 | dist/
10 | downloads/
11 | eggs/
12 | .eggs/
13 | lib/
14 | lib64/
15 | parts/
16 | sdist/
17 | var/
18 | wheels/
19 | *.egg-info/
20 | .installed.cfg
21 | *.egg
22 |
23 | # Virtual Environment
24 | venv/
25 | env/
26 | ENV/
27 |
28 | # Database
29 | *.db
30 | *.sqlite3
31 |
32 | # Thumbnails
33 | thumbs/
34 | thumbnails/
35 |
36 | # IDE
37 | .vscode/
38 | .idea/
39 | *.swp
40 | *.swo
41 |
42 | # OS
43 | .DS_Store
44 | Thumbs.db
45 |
46 | # Build
47 | *.cmd
48 | *.log
49 | .env
50 | *.temp
51 |
52 |
--------------------------------------------------------------------------------
/compose.yaml:
--------------------------------------------------------------------------------
1 | services:
2 | comfy-smartgallery:
3 | image: mmartial/smart-comfyui-gallery:latest
4 | container_name: comfy-smartgallery
5 | ports:
6 | - 8189:8189
7 | volumes:
8 | - /comfyui-nvidia/basedir/output:/mnt/output
9 | - /comfyui-nvidia/basedir/input:/mnt/input
10 | - /comfyui-nvidia/SmartGallery:/mnt/SmartGallery
11 | # - /tmp:/deleteto
12 | # - /etc/localtime:/etc/localtime:ro
13 | restart: unless-stopped
14 | environment:
15 | - BASE_OUTPUT_PATH=/mnt/output
16 | - BASE_INPUT_PATH=/mnt/input
17 | - BASE_SMARTGALLERY_PATH=/mnt/SmartGallery
18 | # Adapt to wanted values (use id -u and id -g to obtain values)
19 | - WANTED_UID=1000
20 | - WANTED_GID=1000
21 | # Set to true to force chown of the BASE_SMARTGALLERY_PATH folder only
22 | # - FORCE_CHOWN=true
23 | # - DELETE_TO=/deleteto
24 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2025 Biagio Maffettone
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.12-slim
2 |
3 | # Install system packages
4 | ENV DEBIAN_FRONTEND=noninteractive
5 | RUN apt-get update -y --fix-missing\
6 | && apt-get install -y \
7 | apt-utils \
8 | locales \
9 | ca-certificates \
10 | sudo \
11 | && apt-get upgrade -y \
12 | && apt-get clean
13 |
14 | # UTF-8
15 | RUN localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8
16 | ENV LANG=en_US.utf8
17 | ENV LC_ALL=C
18 |
19 | # Install ffprobe
20 | RUN apt-get update && apt-get install -y ffmpeg \
21 | && test -x /usr/bin/ffprobe
22 | ENV FFPROBE_MANUAL_PATH=/usr/bin/ffprobe
23 |
24 | # Set environment variables
25 | ENV PYTHONDONTWRITEBYTECODE=1 \
26 | PYTHONUNBUFFERED=1 \
27 | PYTHONPATH=/app \
28 | PYTHONIOENCODING=utf-8
29 |
30 | RUN mkdir -p /app/templates
31 |
32 | WORKDIR /app
33 |
34 | # Every sudo group user does not need a password
35 | RUN echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers
36 |
37 | # Create a new group for the smartgallery and smartgallerytoo users
38 | RUN groupadd -g 1024 smartgallery \
39 | && groupadd -g 1025 smartgallerytoo
40 |
41 | # The smartgallery (resp. smartgallerytoo) user will have UID 1024 (resp. 1025),
42 | # be part of the smartgallery (resp. smartgallerytoo) and users groups and be sudo capable (passwordless)
43 | RUN useradd -u 1024 -d /home/smartgallery -g smartgallery -s /bin/bash -m smartgallery \
44 | && usermod -G users smartgallery \
45 | && adduser smartgallery sudo
46 | RUN useradd -u 1025 -d /home/smartgallerytoo -g smartgallerytoo -s /bin/bash -m smartgallerytoo \
47 | && usermod -G users smartgallerytoo \
48 | && adduser smartgallerytoo sudo
49 |
50 | COPY requirements.txt /app/requirements.txt
51 | RUN pip install --no-cache-dir -r requirements.txt
52 |
53 | COPY smartgallery.py /app/smartgallery.py
54 | COPY templates/* /app/templates/
55 | COPY static /app/static
56 |
57 | COPY --chmod=555 docker_init.bash /smartgallery_init.bash
58 |
59 | EXPOSE 8189
60 |
61 | USER smartgallerytoo
62 |
63 | CMD ["/smartgallery_init.bash"]
64 |
65 | LABEL org.opencontainers.image.source=https://github.com/biagiomaf/smart-comfyui-gallery
66 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | ifneq (,$(wildcard .env))
2 | include .env
3 | export $(shell sed -n 's/^\([A-Za-z_][A-Za-z0-9_]*\)=.*/\1/p' .env)
4 | endif
5 |
6 | SMARTGALLERY_VERSION = 1.51
7 |
8 | DOCKERFILE = Dockerfile
9 | DOCKER_TAG_PRE = smartgallery
10 |
11 | DOCKER_TAG = ${SMARTGALLERY_VERSION}
12 | DOCKER_LATEST_TAG = latest
13 |
14 | SMARTGALLERY_CONTAINER_NAME = ${DOCKER_TAG_PRE}:${DOCKER_TAG}
15 | SMARTGALLERY_NAME = $(shell echo ${SMARTGALLERY_CONTAINER_NAME} | tr -cd '[:alnum:]-_.')
16 |
17 | DOCKER_CMD=docker
18 | DOCKER_PRE="NVIDIA_VISIBLE_DEVICES=void"
19 | DOCKER_BUILD_ARGS=
20 |
21 | # Avoid modifying the _PATH variables
22 | BASE_OUTPUT_PATH=/mnt/output
23 | BASE_INPUT_PATH=/mnt/input
24 | BASE_SMARTGALLERY_PATH=/mnt/SmartGallery
25 |
26 | # Adapt _REAL_PATH variables to match the locations on your systems (those are passed as environment variables to the container)
27 | BASE_OUTPUT_PATH_REAL=/comfyui-nvidia/basedir/output
28 | BASE_INPUT_PATH_REAL=/comfyui-nvidia/basedir/input
29 | BASE_SMARTGALLERY_PATH_REAL=/comfyui-nvidia/SmartGallery_Temp
30 |
31 | # set WANTED_UID and WANTED_GID
32 | WANTED_UID=1000
33 | WANTED_GID=1000
34 |
35 | # Modify the alternate options as needed
36 | EXPOSED_PORT=8189
37 | THUMBNAIL_WIDTH=300
38 | WEBP_ANIMATED_FPS=16.0
39 | PAGE_SIZE=100
40 | BATCH_SIZE=500
41 | # see MAX_PARALLEL_WORKERS in smartgallery.py, if not set, will use "None" (ie use all available CPU cores)
42 |
43 | all:
44 | @echo "Available targets: build run kill buildx_rm"
45 |
46 | build:
47 | @echo ""; echo ""; echo "===== Building ${SMARTGALLERY_CONTAINER_NAME}"
48 | @$(eval VAR_NT="${SMARTGALLERY_NAME}")
49 | @echo "-- Docker command to be run:"
50 | @echo "docker buildx ls | grep -q ${SMARTGALLERY_NAME} && echo \"builder already exists -- to delete it, use: docker buildx rm ${SMARTGALLERY_NAME}\" || docker buildx create --name ${SMARTGALLERY_NAME}" > ${VAR_NT}.cmd
51 | @echo "docker buildx use ${SMARTGALLERY_NAME} || exit 1" >> ${VAR_NT}.cmd
52 | @echo "BUILDX_EXPERIMENTAL=1 ${DOCKER_PRE} docker buildx debug --on=error build --progress plain --platform linux/amd64 ${DOCKER_BUILD_ARGS} \\" >> ${VAR_NT}.cmd
53 | @echo " --tag=\"${SMARTGALLERY_CONTAINER_NAME}\" \\" >> ${VAR_NT}.cmd
54 | @echo " -f ${DOCKERFILE} \\" >> ${VAR_NT}.cmd
55 | @echo " --load \\" >> ${VAR_NT}.cmd
56 | @echo " ." >> ${VAR_NT}.cmd
57 | @echo "docker buildx use default" >> ${VAR_NT}.cmd
58 | @cat ${VAR_NT}.cmd | tee ${VAR_NT}.log.temp
59 | @echo "" | tee -a ${VAR_NT}.log.temp
60 | @echo "Press Ctl+c within 5 seconds to cancel"
61 | @for i in 5 4 3 2 1; do echo -n "$$i "; sleep 1; done; echo ""
62 | # Actual build
63 | @chmod +x ./${VAR_NT}.cmd
64 | @script -a -e -c ./${VAR_NT}.cmd ${VAR_NT}.log.temp
65 | @mv ${VAR_NT}.log.temp ${VAR_NT}.log
66 | @rm -f ./${VAR_NT}.cmd
67 |
68 | run:
69 | docker run --name ${SMARTGALLERY_NAME} -v $(BASE_OUTPUT_PATH_REAL):$(BASE_OUTPUT_PATH) -v $(BASE_INPUT_PATH_REAL):$(BASE_INPUT_PATH) -v $(BASE_SMARTGALLERY_PATH_REAL):$(BASE_SMARTGALLERY_PATH) -e BASE_OUTPUT_PATH=$(BASE_OUTPUT_PATH) -e BASE_INPUT_PATH=$(BASE_INPUT_PATH) -e BASE_SMARTGALLERY_PATH=$(BASE_SMARTGALLERY_PATH) -p $(EXPOSED_PORT):8189 -e WANTED_UID=${WANTED_UID} -e WANTED_GID=${WANTED_GID} ${SMARTGALLERY_CONTAINER_NAME}
70 |
71 | kill:
72 | (docker kill ${SMARTGALLERY_NAME} && docker rm ${SMARTGALLERY_NAME}) || docker rm ${SMARTGALLERY_NAME}
73 |
74 | buildx_rm:
75 | @docker buildx rm ${SMARTGALLERY_NAME}
76 |
77 |
78 | ##### Docker Container registry (maintainers only)
79 | DOCKERHUB_REPO="mmartial/smart-comfyui-gallery"
80 | DOCKER_PRESENT=$(shell image="${SMARTGALLERY_CONTAINER_NAME}"; if docker images --format "{{.Repository}}:{{.Tag}}" | grep -v ${DOCKERHUB_REPO} | grep -q $$image; then echo $$image; fi)
81 |
82 | docker_tag:
83 | @if [ `echo ${DOCKER_PRESENT} | wc -w` -eq 0 ]; then echo "No images to tag"; exit 1; fi
84 | @echo "== About to tag ${SMARTGALLERY_CONTAINER_NAME} as:"
85 | @echo "${DOCKERHUB_REPO}:${DOCKER_TAG}"
86 | @echo "${DOCKERHUB_REPO}:${DOCKER_LATEST_TAG}"
87 | @echo ""
88 | @echo "Press Ctl+c within 5 seconds to cancel"
89 | @for i in 5 4 3 2 1; do echo -n "$$i "; sleep 1; done; echo ""
90 | @docker tag ${SMARTGALLERY_CONTAINER_NAME} ${DOCKERHUB_REPO}:${DOCKER_TAG}
91 | @docker tag ${SMARTGALLERY_CONTAINER_NAME} ${DOCKERHUB_REPO}:${DOCKER_LATEST_TAG}
92 |
93 | docker_push:
94 | @echo "== Pushing ${DOCKERHUB_REPO}:${DOCKER_TAG} and ${DOCKERHUB_REPO}:${DOCKER_LATEST_TAG}"
95 | @echo ""
96 | @echo "Press Ctl+c within 5 seconds to cancel"
97 | @for i in 5 4 3 2 1; do echo -n "$$i "; sleep 1; done; echo ""
98 | @docker push ${DOCKERHUB_REPO}:${DOCKER_TAG}
99 | @docker push ${DOCKERHUB_REPO}:${DOCKER_LATEST_TAG}
100 |
101 | ##### Maintainer
102 | # Docker images (mmartial/smart-comfyui-gallery):
103 | # - Build the images:
104 | # % make build
105 | # - Confirm tags are correct, esp. latest (be ready to Ctrl+C before re-running)
106 | # % make docker_tag
107 | # - Push the images (here too be ready to Ctrl+C before re-running)
108 | # % make docker_push
109 | #
110 | # GitHub release:
111 | # - on the build system, checkout main and pull the changes
112 | # % git checkout main
113 | # % git pull
114 | # - if needed: delete the development branch
115 | # % git branch -d dev_branch_name
116 | # - Tag the release on GitHub
117 | # % git tag 1.41
118 | # % git push origin 1.41
119 | # - Create a release on GitHub using the 1.41 tag, add the release notes, and publish
120 | #
121 | # Cleanup:
122 | # - Erase build logs
123 | # % rm *.log
124 | # - Erase the buildx builder
125 | # % make buildx_rm
126 |
--------------------------------------------------------------------------------
/docker_init.bash:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 |
5 | error_exit() {
6 | echo -n "!! ERROR: "
7 | echo $*
8 | echo "!! Exiting script (ID: $$)"
9 | exit 1
10 | }
11 |
12 | ok_exit() {
13 | echo $*
14 | echo "++ Exiting script (ID: $$)"
15 | exit 0
16 | }
17 |
18 | ## Environment variables loaded when passing environment variables from user to user
19 | # Ignore list: variables to ignore when loading environment variables from user to user
20 | export ENV_IGNORELIST="HOME PWD USER SHLVL TERM OLDPWD SHELL _ SUDO_COMMAND HOSTNAME LOGNAME MAIL SUDO_GID SUDO_UID SUDO_USER CHECK_NV_CUDNN_VERSION VIRTUAL_ENV VIRTUAL_ENV_PROMPT ENV_IGNORELIST ENV_OBFUSCATE_PART"
21 | # Obfuscate part: part of the key to obfuscate when loading environment variables from user to user, ex: HF_TOKEN, ...
22 | export ENV_OBFUSCATE_PART="TOKEN API KEY"
23 |
24 | # Check for ENV_IGNORELIST and ENV_OBFUSCATE_PART
25 | if [ -z "${ENV_IGNORELIST+x}" ]; then error_exit "ENV_IGNORELIST not set"; fi
26 | if [ -z "${ENV_OBFUSCATE_PART+x}" ]; then error_exit "ENV_OBFUSCATE_PART not set"; fi
27 |
28 | whoami=`whoami`
29 | script_dir=$(dirname $0)
30 | script_name=$(basename $0)
31 | echo ""; echo ""
32 | echo "======================================"
33 | echo "=================== Starting script (ID: $$)"
34 | echo "== Running ${script_name} in ${script_dir} as ${whoami}"
35 | script_fullname=$0
36 | echo " - script_fullname: ${script_fullname}"
37 | ignore_value="VALUE_TO_IGNORE"
38 |
39 | # everyone can read our files by default
40 | umask 0022
41 |
42 | # Write a world-writeable file (preferably inside /tmp -- ie within the container)
43 | write_worldtmpfile() {
44 | tmpfile=$1
45 | if [ -z "${tmpfile}" ]; then error_exit "write_worldfile: missing argument"; fi
46 | if [ -f $tmpfile ]; then rm -f $tmpfile; fi
47 | echo -n $2 > ${tmpfile}
48 | chmod 777 ${tmpfile}
49 | }
50 |
51 | itdir=/tmp/smartgallery_init
52 | if [ ! -d $itdir ]; then mkdir $itdir; chmod 777 $itdir; fi
53 | if [ ! -d $itdir ]; then error_exit "Failed to create $itdir"; fi
54 |
55 | # Set user and group id
56 | # logic: if not set and file exists, use file value, else use default. Create file for persistence when the container is re-run
57 | # reasoning: needed when using docker compose as the file will exist in the stopped container, and changing the value from environment variables or configuration file must be propagated from smartgallerytoo to smartgallerytoo transition (those values are the only ones loaded before the environment variables dump file are loaded)
58 | it=$itdir/smartgallery_user_uid
59 | if [ -z "${WANTED_UID+x}" ]; then
60 | if [ -f $it ]; then WANTED_UID=$(cat $it); fi
61 | fi
62 | WANTED_UID=${WANTED_UID:-1024}
63 | write_worldtmpfile $it "$WANTED_UID"
64 | echo "-- WANTED_UID: \"${WANTED_UID}\""
65 |
66 | it=$itdir/smartgallery_user_gid
67 | if [ -z "${WANTED_GID+x}" ]; then
68 | if [ -f $it ]; then WANTED_GID=$(cat $it); fi
69 | fi
70 | WANTED_GID=${WANTED_GID:-1024}
71 | write_worldtmpfile $it "$WANTED_GID"
72 | echo "-- WANTED_GID: \"${WANTED_GID}\""
73 |
74 | echo "== Most Environment variables set"
75 |
76 | # Check user id and group id
77 | new_gid=`id -g`
78 | new_uid=`id -u`
79 | echo "== user ($whoami)"
80 | echo " uid: $new_uid / WANTED_UID: $WANTED_UID"
81 | echo " gid: $new_gid / WANTED_GID: $WANTED_GID"
82 |
83 | save_env() {
84 | tosave=$1
85 | echo "-- Saving environment variables to $tosave"
86 | env | sort > "$tosave"
87 | }
88 |
89 | load_env() {
90 | tocheck=$1
91 | overwrite_if_different=$2
92 | ignore_list="${ENV_IGNORELIST}"
93 | obfuscate_part="${ENV_OBFUSCATE_PART}"
94 | if [ -f "$tocheck" ]; then
95 | echo "-- Loading environment variables from $tocheck (overwrite existing: $overwrite_if_different) (ignorelist: $ignore_list) (obfuscate: $obfuscate_part)"
96 | while IFS='=' read -r key value; do
97 | doit=false
98 | # checking if the key is in the ignorelist
99 | for i in $ignore_list; do
100 | if [[ "A$key" == "A$i" ]]; then doit=ignore; break; fi
101 | done
102 | if [[ "A$doit" == "Aignore" ]]; then continue; fi
103 | rvalue=$value
104 | # checking if part of the key is in the obfuscate list
105 | doobs=false
106 | for i in $obfuscate_part; do
107 | if [[ "A$key" == *"$i"* ]]; then doobs=obfuscate; break; fi
108 | done
109 | if [[ "A$doobs" == "Aobfuscate" ]]; then rvalue="**OBFUSCATED**"; fi
110 |
111 | if [ -z "${!key}" ]; then
112 | echo " ++ Setting environment variable $key [$rvalue]"
113 | doit=true
114 | elif [ "A$overwrite_if_different" == "Atrue" ]; then
115 | cvalue="${!key}"
116 | if [[ "A${doobs}" == "Aobfuscate" ]]; then cvalue="**OBFUSCATED**"; fi
117 | if [[ "A${!key}" != "A${value}" ]]; then
118 | echo " @@ Overwriting environment variable $key [$cvalue] -> [$rvalue]"
119 | doit=true
120 | else
121 | echo " == Environment variable $key [$rvalue] already set and value is unchanged"
122 | fi
123 | fi
124 | if [[ "A$doit" == "Atrue" ]]; then
125 | export "$key=$value"
126 | fi
127 | done < "$tocheck"
128 | fi
129 | }
130 |
131 | # smartgallerytoo is a specfiic user not existing by default on ubuntu, we can check its whomai
132 | if [ "A${whoami}" == "Asmartgallerytoo" ]; then
133 | echo "-- Running as smartgallerytoo, will switch smartgallery to the desired UID/GID"
134 | # The script is started as smartgallerytoo -- UID/GID 1025/1025
135 |
136 | # We are altering the UID/GID of the smartgallery user to the desired ones and restarting as that user
137 | # using usermod for the already create smartgallery user, knowing it is not already in use
138 | # per usermod manual: "You must make certain that the named user is not executing any processes when this command is being executed"
139 | sudo groupmod -o -g ${WANTED_GID} smartgallery || error_exit "Failed to set GID of smartgallery user"
140 | sudo usermod -o -u ${WANTED_UID} smartgallery || error_exit "Failed to set UID of smartgallery user"
141 | sudo chown -R ${WANTED_UID}:${WANTED_GID} /home/smartgallery || error_exit "Failed to set owner of /home/smartgallery"
142 | save_env /tmp/smartgallerytoo_env.txt
143 | # restart the script as smartgallery set with the correct UID/GID this time
144 | echo "-- Restarting as smartgallery user with UID ${WANTED_UID} GID ${WANTED_GID}"
145 | sudo su smartgallery $script_fullname || error_exit "subscript failed"
146 | ok_exit "Clean exit"
147 | fi
148 |
149 | # If we are here, the script is started as another user than smartgallerytoo
150 | # because the whoami value for the smartgallery user can be any existing user, we can not check against it
151 | # instead we check if the UID/GID are the expected ones
152 | if [ "$WANTED_GID" != "$new_gid" ]; then error_exit "smartgallery MUST be running as UID ${WANTED_UID} GID ${WANTED_GID}, current UID ${new_uid} GID ${new_gid}"; fi
153 | if [ "$WANTED_UID" != "$new_uid" ]; then error_exit "smartgallery MUST be running as UID ${WANTED_UID} GID ${WANTED_GID}, current UID ${new_uid} GID ${new_gid}"; fi
154 |
155 | ########## 'smartgallery' specific section below
156 |
157 | # We are therefore running as smartgallery
158 | echo ""; echo "== Running as smartgallery"
159 |
160 | # Load environment variables one by one if they do not exist from /tmp/smartgallerytoo_env.txt
161 | it=/tmp/smartgallerytoo_env.txt
162 | if [ -f $it ]; then
163 | echo "-- Loading not already set environment variables from $it"
164 | load_env $it true
165 | fi
166 |
167 | ######## Environment variables (consume AFTER the load_env)
168 |
169 | lc() { echo "$1" | tr '[:upper:]' '[:lower:]'; }
170 | FORCE_CHOWN=${FORCE_CHOWN:-"false"} # any value works, empty value or false means disabled
171 | FORCE_CHOWN=`lc "${FORCE_CHOWN}"`
172 |
173 | if [ -z "${BASE_SMARTGALLERY_PATH+x}" ]; then error_exit "BASE_SMARTGALLERY_PATH is not set"; fi
174 | if [ -z "${BASE_INPUT_PATH+x}" ]; then error_exit "BASE_INPUT_PATH is not set"; fi
175 | if [ -z "${BASE_OUTPUT_PATH+x}" ]; then error_exit "BASE_OUTPUT_PATH is not set"; fi
176 |
177 | it_dir=${BASE_INPUT_PATH}
178 | if [ ! -d "${it_dir}" ]; then error_exit "BASE_INPUT_PATH is not a directory"; fi
179 | it="${it_dir}/.testfile"; touch "$it" && rm -f "$it" || echo "Failed to write to BASE_INPUT_PATH directory as the smartgallery user, we will not be able to delete files from there"
180 |
181 | it_dir=${BASE_OUTPUT_PATH}
182 | if [ ! -d "${it_dir}" ]; then error_exit "BASE_OUTPUT_PATH is not a directory"; fi
183 | it="${it_dir}/.testfile"; touch "$it" && rm -f "$it" || echo "Failed to write to BASE_OUTPUT_PATH directory as the smartgallery user, we will not be able to delete files from there"
184 |
185 | it_dir=${BASE_SMARTGALLERY_PATH}
186 | if [ ! -d "${it_dir}" ]; then error_exit "BASE_SMARTGALLERY_PATH is not a directory"; fi
187 | if [ "${FORCE_CHOWN}" == "false" ]; then it="${it_dir}/.testfile"; touch "$it" && rm -f "$it" || error_exit "Failed to write to the required $it_dir directory as the smartgallery user"; fi
188 |
189 | for i in .sqlite_cache .thumbnails_cache .zip_downloads; do
190 | it_dir="${BASE_SMARTGALLERY_PATH}/$i"
191 | if [ "${FORCE_CHOWN}" == "true" ]; then
192 | if [ ! -d "${it_dir}" ]; then sudo mkdir -p "${it_dir}"; fi
193 | echo "-- FORCE_CHOWN set, forcing ownership of ${it_dir}"; sudo chown -R smartgallery:smartgallery "${it_dir}"
194 | else
195 | if [ ! -d "${it_dir}" ]; then mkdir -p "${it_dir}"; fi
196 | fi
197 | it="${it_dir}/.testfile"; touch "$it" && rm -f "$it" || error_exit "Failed to write to the required $it_dir directory as the smartgallery user"
198 | done
199 |
200 | echo ""; echo "==================="
201 | echo "== Running SmartGallery"
202 | cd /app; python smartgallery.py || error_exit "SmartGallery failed or exited with an error"
203 |
204 | ok_exit "Clean exit"
205 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | ## [1.51] - 2025-12-17
4 |
5 | ### Added
6 |
7 | #### Search & Filtering
8 | ### Added
9 | - **Prompt Keywords Search**: New filter to search for text strings directly within the generation prompt. Supports comma-separated multiple keywords (e.g., "woman, kimono").
10 | - **Deep Workflow Search**: Added a new `Workflow Files` search field. This searches specifically within the metadata of the generated files to find references to models, LoRAs, and input images used in the workflow (e.g., search for "sd_xl").
11 | - **Global Search**: Users can now toggle between searching the "Current Folder" or performing a "Global" search across the entire library.
12 | - **Date Range Filters**: Added `From` and `To` date pickers to filter files by their creation/modification time.
13 | - **"No Workflow" Filter**: A new checkbox option to quickly identify files that do not contain embedded workflow metadata.
14 | - **Redesigned Filter Panel**: The search and filter options have been moved to a collapsible overlay panel for a cleaner UI on both desktop and mobile.
15 |
16 | #### Backend & Database
17 | - **Database Migration (v26)**: Added `workflow_files` column to the database.
18 | - **Metadata Backfilling**: On first startup after update, the system automatically scans existing files to populate the new `workflow_files` search data for deep searching.
19 | - **Optimized SQL**: Improved query performance for filtered searches using `WAL` journal mode and optimized synchronous settings.
20 |
21 | ### Fixed
22 | - **Filter Dropdown Performance**: Added a limit (`MAX_PREFIX_DROPDOWN_ITEMS`) to the Prefix dropdown to prevent UI freezing in folders with thousands of unique prefixes.
23 | - **Navigation Logic**: Fixed state retention issues when switching between global search results and folder navigation.
24 |
25 | ## [1.41.1] - 2025-12-05
26 |
27 | ### Fixed
28 | - **Image Size**: Fixed an issue where the image size for thumbnail generation.
29 | - **Docker**: Added `FORCE_CHOWN` environment variable to force chown of the BASE_SMARTGALLERY_PATH folder only. Pre-checked permissions for the BASE_SMARTGALLERY_PATH to avoid permission errors.
30 |
31 | ## [1.41] - 2025-11-24
32 |
33 | ### Added
34 |
35 | #### Core & Configuration
36 | - **Batch Zip Download**: Users can now select multiple files and download them as a single `.zip` archive. The generation happens in the background to prevent timeouts, with a notification appearing when the download is ready.
37 | - **Environment Variable Support**: All major configuration settings (`BASE_OUTPUT_PATH`, `SERVER_PORT`, etc.) can now be set via OS environment variables, making deployment and containerization easier.
38 | - **Startup Diagnostics (GUI)**: Added graphical popup alerts on startup to immediately warn users about critical errors (e.g., invalid Output Path) or missing optional dependencies (FFmpeg) without needing to check the console.
39 | - **Automatic Update Check**: The application now checks the GitHub repository upon launch and notifies the console if a newer version of `smartgallery.py` is available.
40 | - **Safe Deletion (`DELETE_TO`)**: Introduced a new `DELETE_TO` environment variable. If set, deleting a file moves it to the specified path (e.g., `/tmp` or a Trash folder) instead of permanently removing it. This is ideal for Unix systems with auto-cleanup policies for temporary files.
41 |
42 | #### Gallery & File Management
43 | - **Workflow Input Visualization**: The Node Summary tool now intelligently detects input media (Images, Videos, Audio) used in the workflow (referenced in nodes like `Load Image`, `LoadAudio`, `VHS_LoadVideo`, etc.) located in the `BASE_INPUT_PATH`.
44 | - **Source Media Gallery**: Added a dedicated "Source Media" section at the top of the Node Summary overlay. It displays previews for all detected inputs in a responsive grid layout.
45 | - **Audio Input Support**: Added a native audio player within the Node Summary to listen to audio files used as workflow inputs.
46 | - **Advanced Folder Rescan**: Added a "Rescan" button with a modal dialog allowing users to choose between scanning "All Files" or only "Recent Files" (files checked > 1 hour ago). This utilizes a new `last_scanned` database column for optimization.
47 | - **Range Selection**: Added a "Range" button (`↔️`) to the selection bar. When exactly two files are selected, this button appears and allows selecting all files between them.
48 | - **Enhanced Node Summary**: The workflow parser has been updated to support both ComfyUI "UI format" and "API format" JSONs, ensuring node summaries work for a wider range of generated files.
49 | - **Smart File Counter**: Added a dynamic badge in the toolbar that displays the count of currently visible files. If filters are active (or viewing a subset), it explicitly shows the total number of files in the folder (e.g., "10 Files (50 Total)").
50 |
51 | #### User Interface & Lightbox
52 | - **Keyboard Shortcuts Help**: Added a help overlay (accessible via the `?` key) listing all available keyboard shortcuts for navigation and file management.
53 | - **Visual Shortcut Bar**: Added a floating shortcuts bar inside the Lightbox view to guide users on available controls (Zoom, Pan, Rename, etc.).
54 | - **Advanced Lightbox Navigation**:
55 | - Added **Numpad Panning**: Use Numpad keys (1-9) to pan around zoomed images.
56 | - Added **Pan Step Cycling**: Press `.` to change the speed/distance of keyboard panning.
57 | - Added **Smart Loader**: New visual loader for high-res images in the lightbox for a smoother experience.
58 |
59 | #### Docker & Deployment
60 | - **Containerization Support**: Added full Docker support to run SmartGallery in an isolated environment.
61 | - **Docker Compose & Makefile**: Included `compose.yaml` for easy deployment and a `Makefile` for advanced build management.
62 | - **Permission Handling**: Implemented `WANTED_UID` and `WANTED_GID` environment variables to ensure the container can correctly read/write files on the host system without permission errors.
63 |
64 | ### Fixed
65 | - **Security Patch**: Implemented robust checks to prevent potential path traversal vulnerabilities.
66 | - **FFprobe in Multiprocessing**: Fixed an issue where the path to `ffprobe` was not correctly passed to worker processes during parallel scanning on some systems.
67 |
68 | ## [1.31] - 2025-10-27
69 |
70 | ### Performance
71 | - **Massive Performance Boost with Parallel Processing**: Thumbnail generation and metadata analysis have been completely parallelized for both the initial database build and on-demand folder syncing. This drastically reduces waiting times (from many minutes to mere seconds or a few minutes, depending on hardware) by leveraging all available CPU cores.
72 | - **Configurable CPU Usage**: A new `MAX_PARALLEL_WORKERS` setting has been added to allow users to specify the number of parallel processes to use. Set to `None` for maximum speed (using all cores) or to a specific number to limit CPU usage.
73 |
74 | ### Added
75 | - **File Renaming from Lightbox**: Users can now rename files directly from the lightbox view using a new pencil icon in the toolbar. The new name is immediately reflected in the gallery view and all associated links without requiring a page reload. Includes validation to prevent conflicts with existing files.
76 | - **Persistent Folder Sort**: Folder sort preferences (by name or date) are now saved to the browser's `localStorage`. The chosen sort order now persists across page reloads and navigation to other folders.
77 | - **Console Progress Bar for Initial Scan**: During the initial database build (the offline process), a detailed progress bar (`tqdm`) is now displayed in the console. It provides real-time feedback on completion percentage, processing speed, and estimated time remaining.
78 |
79 | ### Fixed
80 | - **Critical 'Out of Memory' Error**: Fixed a critical 'out of memory' error that occurred during the initial scan of tens of thousands of files. The issue was resolved by implementing batch processing (`BATCH_SIZE`) for database writes.
81 |
82 | ### Changed
83 | - **Code Refactoring**: File processing logic was centralized into a `process_single_file` worker function to improve code maintainability and support parallel execution.
84 |
85 | ## [1.30] - 2025-10-26
86 |
87 | ### Added
88 |
89 | #### Folder Navigation & Management (`index.html`)
90 | - **Expandable Sidebar**: Added an "Expand" button (`↔️`) to widen the folder sidebar, making long folder names fully visible. On mobile, this opens a full-screen overlay for maximum readability.
91 | - **Real-time Folder Search**: Implemented a search bar above the folder tree to filter folders by name instantly.
92 | - **Bi-directional Folder Sorting**: Added buttons to sort the folder tree by Name (A-Z / Z-A) or Modification Date (Newest / Oldest). The current sort order is indicated by an arrow (↑↓).
93 | - **Enhanced "Move File" Panel**: All new folder navigation features (Search, and Bi-directional Sorting) have been fully integrated into the "Move File" dialog for a consistent experience.
94 |
95 | #### Gallery View (`index.html`)
96 | - **Bi-directional Thumbnail Sorting**: Added sort buttons for "Date" and "Name" to the main gallery view. Each button toggles between ascending and descending order on click, indicated by an arrow.
97 |
98 | #### Lightbox Experience (`index.html`)
99 | - **Zoom with Mouse Wheel**: Implemented zooming in and out of images in the lightbox using the mouse scroll wheel.
100 | - **Persistent Zoom Level**: The current zoom level is now maintained when navigating to the next or previous image, or after deleting an item.
101 | - **Zoom Percentage Display**: The current zoom level is now displayed next to the filename in the lightbox title (e.g., `my_image.png (120%)`).
102 | - **Delete Functionality**: Added a delete button (`🗑️`) to the lightbox toolbar and enabled the `Delete` key on the keyboard for quick deletion (no confirmation required with the key).
103 |
104 | #### System & Feedback (`smartgallery.py` & `index.html`)
105 | - **Real-time Sync Feedback**: Implemented a non-blocking, real-time folder synchronization process using Server-Sent Events (SSE).
106 | - **Sync Progress Overlay**: When new or modified files are detected, a progress overlay is now displayed, showing the status and a progress bar of the indexing and thumbnailing operation. The check is silent if no changes are found.
107 |
108 | ### Changed
109 |
110 | #### `smartgallery.py`
111 | - **Dynamic Workflow Filename**: When downloading a workflow, the file is now named after the original image (e.g., `my_image.png` -> `my_image.json`) instead of a generic `workflow.json`.
112 | - **Folder Metadata**: The backend now retrieves the modification time for each folder to enable sorting by date.
113 |
114 |
115 | ## [1.22] - 2025-10-08
116 |
117 | ### Changed
118 |
119 | #### index.html
120 | - Minor aesthetic improvements
121 |
122 | #### smartgallery.py
123 | - Implemented intelligent file management for moving files between folders
124 | - Added automatic file renaming when destination file already exists
125 | - Files are now renamed with progressive numbers (e.g., `myfile.png` → `myfile(1).png`, `myfile(2).png`, etc.)
126 |
127 | ### Fixed
128 | - Fixed issue where file move operations would fail when a file with the same name already existed in the destination folder
129 | - Files are now successfully moved with the new name instead of failing the operation
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # SmartGallery for ComfyUI ✨
2 | ### Your lightweight, browser-based visual hub for ComfyUI outputs
3 |
4 | **SmartGallery** is a fast, mobile-friendly web gallery that gives you
5 | **complete control over your ComfyUI outputs** — even when ComfyUI is not running.
6 |
7 | Browse, search, organize, and instantly recall the exact workflow behind every image or video,
8 | from any device, on Windows, Linux, or Docker.
9 |
10 | ---
11 |
12 |
13 |
14 |
15 |
16 |
17 | 🎨 A beautiful, lightning-fast gallery that remembers the workflow behind every creation
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 | ---
29 |
30 | ## 🎯 Why SmartGallery?
31 |
32 | If you use ComfyUI, you already know the pain:
33 |
34 | - Thousands of generated files
35 | - Forgotten workflows
36 | - Generic filenames
37 | - No easy way to browse from your phone
38 | - No fast way to find *that* image again
39 |
40 | **SmartGallery fixes all of this.**
41 |
42 | It automatically links every generated file (PNG, JPG, MP4, WebP)
43 | to its **exact workflow**, making your entire creative history searchable and explorable.
44 |
45 | ---
46 |
47 | ## ⚡ Key Features
48 |
49 | - 📝 **$\color{red}{\text{(new)}}$** **Prompt Keywords Search**: Instantly find generations by searching for specific words inside your prompt. Supports multiple comma-separated keywords (e.g., "woman, kimono").
50 |
51 | - 🧬 **$\color{red}{\text{(new)}}$** **Deep Workflow Search**: Search for generated images, videos, and animations based on the **filenames** inside the workflow (Models, LoRAs, Inputs).
52 | *Supports multiple comma-separated keywords (e.g., "wan2.1, portrait.png").*
53 |
54 | - 🏃♂️ **Blazing Fast**
55 | SQLite database + smart caching = instant browsing, even with huge libraries
56 |
57 | - 📱 **Mobile-First Experience**
58 | Perfect UI on desktop, tablet, and smartphone
59 |
60 | - 🔍 **Powerful Search**
61 | Search by filename, prefix, extension, date range, or globally across folders
62 |
63 | - 🔎 **Node Summary**
64 | Instantly see model, seed, parameters, and source media used to generate each file
65 |
66 |
67 |
68 |
69 |
70 | - 📁 **Smart Organization**
71 | Real-time folder browsing, sorting, filtering, and file management
72 |
73 | - 📦 **Batch Operations**
74 | Multi-select, ZIP download, range selection
75 |
76 | - 🆕 **Universal Upload Magic**
77 | Upload any ComfyUI-generated image or video and instantly discover its workflow
78 |
79 | - 🔄 **Real-time Sync**
80 | Background scanning with visual progress when new files are detected
81 |
82 | - 🐳 **Docker Ready**
83 | Run it anywhere, cleanly and reliably
84 |
85 | ---
86 |
87 | ## 🆕 What's New in Version 1.51?
88 |
89 | Recent updates focus on search, performance, and usability.
90 | Highlights:
91 |
92 | ### 📝 Powerful Prompt Text Search
93 | Finding that one specific generation is now easier than ever. We've added a **Prompt Keywords** search that digs into the actual text used in your prompts.
94 | * **How it works:** It scans the workflow metadata for the text prompts you wrote.
95 | * **Multiple Keywords:** You can search for several words at once.
96 | * **Example:** You want to find all your previous tests involving a woman wearing a specific garment.
97 | * **Solution:** Just type `woman, kimono` in the Prompt Keywords field, and SmartGallery will filter all matching images, animations, and videos instantly!
98 |
99 | ### 🧬 Deep Workflow Search
100 | We've added a powerful new way to find your creations. The **"Workflow Files"** search field digs inside the hidden metadata to find specific **filenames** used in the generation.
101 |
102 | * **How it works:** It searches specifically for the names of **Checkpoints, LoRAs, Upscalers, and Input Images** referenced in the nodes.
103 | * **Multiple Keywords:** You can search for multiple items at once by separating them with commas.
104 | * **Example:** You want to find images generated with the **Wan2.1** model that also used **portrait.png** as an input.
105 | * **Solution:** Just type `wan2.1, portrait.png` in the Workflow Files search, and SmartGallery will find matches containing both!
106 | * *(Note: This searches for filenames, not numeric parameters like Seed or CFG).*
107 |
108 | 🌐 **Global search across all folders**
109 |
110 | 📅 **Date range filtering**
111 |
112 | 🚀 **Optimized UI for large libraries**
113 |
114 | 👉 See [CHANGELOG.md](CHANGELOG.md) for full details.
115 |
116 |
117 |
118 |

119 |

120 |
121 |
122 | 📱 Perfect mobile experience
123 |
124 |
125 | ---
126 |
127 | ## 🎮 Installation: Ridiculously Simple
128 |
129 | ### Step 1: Get the Code
130 | ```bash
131 | git clone https://github.com/biagiomaf/smart-comfyui-gallery
132 | cd smart-comfyui-gallery
133 | ```
134 |
135 | ### Step 2: Quick Setup
136 | ```bash
137 | # Create virtual environment (recommended)
138 | python -m venv venv
139 |
140 | # Activate it
141 | # Windows Command Prompt: call venv\Scripts\activate.bat
142 | # Mac/Linux: source venv/bin/activate
143 |
144 | # Install dependencies
145 | pip install -r requirements.txt
146 | ```
147 |
148 | ### Step 3: Configure Your Paths
149 |
150 | You have **two easy options** to configure SmartGallery:
151 |
152 | #### 🅰️ Option A: Environment Variables (recommended)
153 | Create a startup script to keep your settings organized.
154 |
155 | Perfect if you want to keep your settings separate or run multiple configurations.
156 |
157 | **Windows:**
158 | Create a file named start_gallery.bat inside the smart-comfyui-gallery folder with the following content:
159 | ```cmd
160 | @echo off
161 | cd /d %~dp0
162 | call venv\Scripts\activate.bat
163 | REM Path to your ComfyUI Output folder (Where images are generated)
164 | set "BASE_OUTPUT_PATH=C:/ComfyUI/output"
165 | REM Path to your ComfyUI Input folder (For source media in Node Summary)
166 | set "BASE_INPUT_PATH=C:/ComfyUI/input"
167 | REM Where SmartGallery stores the SQLite Database and Thumbnails Cache
168 | set "BASE_SMARTGALLERY_PATH=C:/ComfyUI/output"
169 | REM Path to ffprobe.exe (ffmpeg required for extracting workflows from video files)
170 | set "FFPROBE_MANUAL_PATH=C:/ffmpeg/bin/ffprobe.exe"
171 | set SERVER_PORT=8189
172 | REM Leave MAX_PARALLEL_WORKERS empty to use all CPU cores (recommended)
173 | set "MAX_PARALLEL_WORKERS="
174 | python smartgallery.py
175 | ```
176 |
177 | **Linux/Mac:**
178 | Create a file named start_gallery.sh with the following content:
179 | ```bash
180 | #!/bin/bash
181 | source venv/bin/activate
182 | # Path to your ComfyUI Output folder (Where images are generated)
183 | export BASE_OUTPUT_PATH="$HOME/ComfyUI/output"
184 | # Path to your ComfyUI Input folder (For source media in Node Summary)
185 | export BASE_INPUT_PATH="$HOME/ComfyUI/input"
186 | # Where SmartGallery stores the SQLite Database and Thumbnails Cache
187 | export BASE_SMARTGALLERY_PATH="$HOME/ComfyUI/output"
188 | # Path to ffprobe executable (Required for extracting workflows from video files)
189 | export FFPROBE_MANUAL_PATH="/usr/bin/ffprobe"
190 | # The port where SmartGallery will run
191 | export SERVER_PORT=8189
192 | # Leave empty to use all CPU cores, or set a number (e.g., 4) to limit usage
193 | export MAX_PARALLEL_WORKERS=""
194 | python smartgallery.py
195 | ```
196 | Then make it executable and run it:
197 | ```bash
198 | chmod +x start_gallery.sh
199 | ./start_gallery.sh
200 | ```
201 |
202 | > 💡 **Tip**: See the complete configuration guide at the top of `smartgallery.py` for all available settings and detailed examples!
203 |
204 | #### 🅱️ Option B: Direct File Edit
205 |
206 | Open `smartgallery.py` and find the **USER CONFIGURATION** section. A detailed guide is included at the top of the file. Update just the paths after the commas:
207 | ```python
208 | # Find this section and change ONLY the values after the commas:
209 | BASE_OUTPUT_PATH = os.environ.get('BASE_OUTPUT_PATH', 'C:/ComfyUI/output')
210 | BASE_INPUT_PATH = os.environ.get('BASE_INPUT_PATH', 'C:/ComfyUI/input')
211 | BASE_SMARTGALLERY_PATH = os.environ.get('BASE_SMARTGALLERY_PATH', BASE_OUTPUT_PATH) # DB & Cache location
212 | FFPROBE_MANUAL_PATH = os.environ.get('FFPROBE_MANUAL_PATH', "C:/ffmpeg/bin/ffprobe.exe")
213 | SERVER_PORT = int(os.environ.get('SERVER_PORT', 8189))
214 | ```
215 |
216 | > 💡 **Important**: Always use forward slashes (`/`) even on Windows! If your paths contain spaces, use quotes.
217 |
218 | > 📹 **FFmpeg Note**: Recommended for extracting workflows from MP4 files. Download from [ffmpeg.org](https://ffmpeg.org/) if needed. Common locations:
219 | > - Windows: `C:/ffmpeg/bin/ffprobe.exe` or `C:/Program Files/ffmpeg/bin/ffprobe.exe`
220 | > - Linux: `/usr/bin/ffprobe` or `/usr/local/bin/ffprobe`
221 | > - Mac: `/usr/local/bin/ffprobe` or `/opt/homebrew/bin/ffprobe`
222 |
223 | ### Step 4: Launch & Enjoy
224 |
225 | Visit **`http://127.0.0.1:8189/galleryout`** and watch the magic happen!
226 |
227 | > **⏱️ First Run**: The initial launch scans your files and generates thumbnails. Thanks to parallel processing, this is now incredibly fast (seconds to a few minutes depending on your collection size). After that? Lightning fast! ⚡
228 |
229 | ---
230 |
231 |
232 | ## 🐳 Docker Deployment (Advanced Users)
233 |
234 | Want to run SmartGallery in a containerized environment? We've got you covered!
235 |
236 | > 🎖️ **Special Thanks**: A huge shout-out to **[Martial Michel](https://github.com/mmartial)** for orchestrating the Docker support and contributing significant improvements to the core application logic.
237 |
238 | > **Note for Windows Users**: The standard installation (Steps 1-4 above) is much simpler and works perfectly on Windows! Docker is completely optional and mainly useful for Linux servers or advanced deployment scenarios.
239 |
240 | Docker deployment provides isolation, easier deployment, and consistent environments across different systems. However, it requires some familiarity with Docker concepts.
241 |
242 | **🗄️ Pre-built images**
243 |
244 | Pre-built images are available on DockerHub at [mmartial/smart-comfyui-gallery](https://hub.docker.com/r/mmartial/smart-comfyui-gallery) and Unraid's Community Apps.
245 |
246 | 
247 |
248 | Example `docker run` command:
249 |
250 | ```bash
251 | # Adapt the mounts and WANTED_UID/WANTED_GID variables to match your system
252 | docker run \
253 | --name smartgallery \
254 | -v /comfyui-nvidia/basedir/output:/mnt/output \
255 | -v /comfyui-nvidia/basedir/input:/mnt/input \
256 | -v /comfyui-nvidia/SmartGallery:/mnt/SmartGallery \
257 | -e BASE_OUTPUT_PATH=/mnt/output \
258 | -e BASE_INPUT_PATH=/mnt/input \
259 | -e BASE_SMARTGALLERY_PATH=/mnt/SmartGallery \
260 | -p 8189:8189 \
261 | -e WANTED_UID=`id -u` \
262 | -e WANTED_GID=`id -g` \
263 | mmartial/smart-comfyui-gallery
264 | ```
265 |
266 | > **Note**: The `id -u` and `id -g` commands return the user and group IDs of the current user, respectively. This ensures that the container runs with the same permissions as the host user, which is important for file permissions and access to mounted volumes.
267 |
268 | A [compose.yaml](compose.yaml) file is provided for ease of use. You can use it to obtain the published image and run the container with the following command after placing it in a directory of your choice and adapting the paths and environment variables to match your system:
269 | ```bash
270 | docker compose up -d
271 | ```
272 |
273 | See the following section's "All available environment variables" for a list of all available environment variables.
274 |
275 | **📚 [Complete Docker Setup Guide →](DOCKER_HELP.md)**
276 |
277 | Our comprehensive Docker guide covers:
278 | - 🏗️ Building the Docker image
279 | - 🚀 Running with Docker Compose (recommended for beginners)
280 | - ⚙️ Using Makefile (For advanced control and automation)
281 | - 🔐 Understanding permissions and volume mapping
282 | - 🛠️ Troubleshooting common Docker issues
283 | - 📋 All available environment variables
284 |
285 |
286 | ---
287 | ## 🌐 Reverse Proxy Setup
288 |
289 | Running behind Nginx or Apache? Point your proxy to:
290 | ```
291 | http://127.0.0.1:8189/galleryout
292 | ```
293 |
294 | **Example Nginx configuration:**
295 | ```nginx
296 | location /gallery/ {
297 | proxy_pass http://127.0.0.1:8189/galleryout/;
298 | proxy_set_header Host $host;
299 | proxy_set_header X-Real-IP $remote_addr;
300 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
301 | }
302 | ```
303 |
304 | ---
305 | ## 🚀 Coming Soon: Announcing the AI Features (Optional)
306 |
307 | SmartGallery is designed to stay **lightweight by default**.
308 |
309 | Advanced AI-powered features will be provided soon by a **separate optional component**:
310 |
311 | ### **SmartGallery AI Service (Optional)**
312 |
313 | > 🔌 A dedicated service, completely independent from the SmartGallery core.
314 |
315 | ### 🧠 AI Search (Coming Soon)
316 |
317 | Search your gallery by **describing what you remember** — not filenames.
318 |
319 | Examples:
320 | ```text
321 | "cyberpunk portrait with neon lights"
322 | "dark fantasy illustration"
323 | "portrait with red background"
324 | No manual tagging.
325 | No heavy dependencies in the core.
326 | No cloud. Fully local and private.
327 | ```
328 | ⚠️ Important
329 | SmartGallery works perfectly without AI
330 | The AI Service is optional, local and free!
331 | It runs in a separate Docker container or Python environment.
332 | If you don't install it, nothing changes.
333 |
334 | **The AI Service is currently under development and not released yet.**
335 |
336 | [ SmartGallery Core (lightweight)] ---> [ SmartGallery AI Service (Optional)- docker / separate environment]
337 |
338 | ---
339 | ## 🤝 Join the Community
340 |
341 | ### Found a Bug? Have an Idea?
342 | **[➡️ Open an Issue](../../issues)** - I read every single one!
343 |
344 | ### Want to Contribute?
345 | 1. Fork the repo
346 | 2. Create your feature branch (`git checkout -b amazing-feature`)
347 | 3. Commit your changes (`git commit -m 'Add amazing feature'`)
348 | 4. Push to the branch (`git push origin amazing-feature`)
349 | 5. Open a Pull Request
350 |
351 | Let's build something incredible together! 🚀
352 |
353 | ---
354 |
355 | ## 🔥 License
356 |
357 | SmartGallery is released under the **MIT License** - see [LICENSE](LICENSE) for details.
358 |
359 | This software is provided "as is" without warranty. Use responsibly and in compliance with applicable laws.
360 |
361 | ---
362 |
363 | ## ❤️ Show Some Love
364 |
365 | If SmartGallery has transformed your ComfyUI workflow, **please give it a ⭐ star!**
366 |
367 | It takes 2 seconds but means the world to me and helps other creators discover this tool.
368 |
369 | **[⭐ Star this repo now!](https://github.com/biagiomaf/smart-comfyui-gallery/stargazers)**
370 |
371 | ---
372 |
373 |
374 | Made with ❤️ for the ComfyUI community
375 |
376 |
--------------------------------------------------------------------------------
/DOCKER_HELP.md:
--------------------------------------------------------------------------------
1 | # 🐳 SmartGallery Docker Deployment Guide
2 |
3 | This guide covers everything you need to know to run SmartGallery in a Docker container.
4 |
5 | ## 📋 Table of Contents
6 |
7 | - [Prerequisites](#prerequisites)
8 | - [Quick Start](#quick-start)
9 | - [Method 1: Docker Compose (Recommended for beginners)](#method-1-docker-compose-recommended)
10 | - [Method 2: Makefile (Power Users)](#method-2-makefile-power-users)
11 | - [Understanding Docker Permissions](#understanding-docker-permissions)
12 | - [Environment Variables Reference](#environment-variables-reference)
13 | - [Docker Volumes Explained](#docker-volumes-explained)
14 | - [Troubleshooting](#troubleshooting)
15 | - [Advanced Topics](#advanced-topics)
16 |
17 | ---
18 |
19 | ## Prerequisites
20 |
21 | Before you begin, ensure you have:
22 |
23 | - **Docker installed**:
24 | - **Linux**: Docker Engine ([installation guide](https://docs.docker.com/engine/install/))
25 | - **Windows/Mac**: Docker Desktop ([download](https://www.docker.com/products/docker-desktop/))
26 | - **Basic Docker knowledge**: Understanding of containers, images, and volumes
27 | - **Your ComfyUI output and input folders path** ready
28 | - **Sufficient disk space** for Docker images and volumes
29 |
30 | > **⚠️ Important**: Docker on Windows adds complexity. The standard Python installation is recommended for Windows users unless you specifically need containerization.
31 |
32 | ---
33 |
34 | ## Quick Start
35 |
36 | For those who just want to get started quickly:
37 | ```bash
38 | # 1. Clone the repository
39 | git clone https://github.com/biagiomaf/smart-comfyui-gallery
40 | cd smart-comfyui-gallery
41 |
42 | # 2. Build the Docker image
43 | docker build -t smartgallery:latest .
44 |
45 | # 3. Edit compose.yaml with your paths (see below)
46 | nano compose.yaml # or use your favorite editor
47 | # if building your own image, replace mmartial/smart-comfyui-gallery:latest with smartgallery:latest
48 |
49 | # 4. Start the container
50 | docker compose up -d
51 |
52 | # 5. Access the gallery
53 | # Open http://localhost:8189/galleryout in your browser
54 | ```
55 |
56 | ---
57 |
58 | ## Method 1: Docker Compose (Recommended)
59 |
60 | Docker Compose is the easiest way to run SmartGallery in Docker.
61 |
62 | ### Step 1: Build the Docker Image
63 |
64 | From the project directory:
65 | ```bash
66 | docker build -t smartgallery:latest .
67 | ```
68 |
69 | This creates a Docker image named `smartgallery:latest` containing:
70 | - Python 3.12
71 | - All required dependencies
72 | - FFmpeg (with ffprobe for video workflow extraction)
73 | - Pre-configured environment
74 |
75 | **Build time**: ~2-5 minutes depending on your internet connection.
76 |
77 | ### Step 2: Configure `compose.yaml`
78 |
79 | Open `compose.yaml` in your favorite text editor and adjust the configuration:
80 | ```yaml
81 | services:
82 | comfy-smartgallery:
83 | image: smartgallery:latest
84 | container_name: comfy-smartgallery
85 | ports:
86 | - 8189:8189
87 | volumes:
88 | # CHANGE THESE PATHS TO MATCH YOUR SYSTEM
89 | - /path/to/your/ComfyUI/output:/mnt/output
90 | - /path/to/your/ComfyUI/input:/mnt/input
91 | - /path/to/your/SmartGallery:/mnt/SmartGallery
92 | restart: unless-stopped
93 | environment:
94 | # Container paths (DO NOT CHANGE)
95 | - BASE_OUTPUT_PATH=/mnt/output
96 | - BASE_INPUT_PATH=/mnt/input
97 | - BASE_SMARTGALLERY_PATH=/mnt/SmartGallery
98 | # File permissions (CHANGE TO YOUR UID/GID)
99 | - WANTED_UID=1000
100 | - WANTED_GID=1000
101 | ```
102 |
103 | #### What to Change:
104 |
105 | **1. Volume Paths (Required)**
106 |
107 | Replace these with your actual paths:
108 | ```yaml
109 | volumes:
110 | # Your ComfyUI output folder
111 | - /home/username/ComfyUI/output:/mnt/output
112 | # Your ComfyUI input folder
113 | - /home/username/ComfyUI/input:/mnt/input
114 |
115 | # Where SmartGallery stores database/cache (can be anywhere)
116 | - /home/username/SmartGallery_Data:/mnt/SmartGallery
117 | ```
118 |
119 | **Linux Examples:**
120 | ```yaml
121 | - /home/john/ComfyUI/output:/mnt/output
122 | - /home/john/ComfyUI/input:/mnt/input
123 | - /home/john/SmartGallery:/mnt/SmartGallery
124 | ```
125 |
126 | **Windows Examples (using WSL paths):**
127 | ```yaml
128 | - /mnt/c/Users/YourName/ComfyUI/output:/mnt/output
129 | - /mnt/c/Users/YourName/ComfyUI/input:/mnt/input
130 | - /mnt/c/Users/YourName/SmartGallery:/mnt/SmartGallery
131 | ```
132 |
133 | **Mac Examples:**
134 | ```yaml
135 | - /Users/yourname/ComfyUI/output:/mnt/output
136 | - /Users/yourname/ComfyUI/input:/mnt/input
137 | - /Users/yourname/SmartGallery:/mnt/SmartGallery
138 | ```
139 |
140 | **2. User Permissions (Linux/Mac Only)**
141 |
142 | Find your user ID and group ID:
143 | ```bash
144 | id -u # Your User ID (UID)
145 | id -g # Your Group ID (GID)
146 | ```
147 |
148 | Update in `compose.yaml`:
149 | ```yaml
150 | environment:
151 | - WANTED_UID=1000 # Replace with your UID
152 | - WANTED_GID=1000 # Replace with your GID
153 | ```
154 |
155 | > **Windows Users**: Leave these as `1000` (default values work fine on Windows).
156 |
157 | **3. Optional: Change Port**
158 |
159 | Only if port 8189 is already in use:
160 | ```yaml
161 | ports:
162 | - 8190:8189 # Maps host port 8190 to container port 8189
163 | ```
164 |
165 | ### Step 3: Start the Container
166 | ```bash
167 | docker compose up -d
168 | ```
169 |
170 | The `-d` flag runs it in detached mode (background).
171 |
172 | **What happens:**
173 | 1. Container starts with name `comfy-smartgallery`
174 | 2. Mounts your specified volumes
175 | 3. Adjusts internal user permissions to match your UID/GID
176 | 4. Starts SmartGallery web server
177 | 5. Makes it accessible at `http://localhost:8189/galleryout`
178 |
179 | ### Step 4: Verify It's Running
180 |
181 | **Check container status:**
182 | ```bash
183 | docker ps
184 | ```
185 |
186 | You should see `comfy-smartgallery` in the list.
187 |
188 | **View logs:**
189 | ```bash
190 | docker compose logs -f
191 | ```
192 |
193 | Press `Ctrl+C` to stop following logs.
194 |
195 | **Access the gallery:**
196 | Open your browser and navigate to:
197 | ```
198 | http://localhost:8189/galleryout
199 | ```
200 |
201 | ### Managing the Container
202 |
203 | **Stop the container:**
204 | ```bash
205 | docker compose down
206 | ```
207 |
208 | **Restart the container:**
209 | ```bash
210 | docker compose restart
211 | ```
212 |
213 | **View live logs:**
214 | ```bash
215 | docker compose logs -f comfy-smartgallery
216 | ```
217 |
218 | **Update after code changes:**
219 | ```bash
220 | docker compose down
221 | docker build -t smartgallery:latest .
222 | docker compose up -d
223 | ```
224 |
225 | ---
226 |
227 | ## Method 2: Makefile (Power Users)
228 |
229 | The Makefile provides more control and is ideal for developers or advanced users.
230 |
231 | ### Step 1: Configure the Makefile
232 |
233 | Open `Makefile` and adjust these variables:
234 | ```makefile
235 | # === CHANGE THESE VALUES ===
236 |
237 | # Your actual paths on the host system
238 | BASE_OUTPUT_PATH_REAL=/home/username/ComfyUI/output
239 | BASE_INPUT_PATH_REAL=/home/username/ComfyUI/input
240 | BASE_SMARTGALLERY_PATH_REAL=/home/username/SmartGallery
241 |
242 | # Your user permissions (use: id -u and id -g)
243 | WANTED_UID=1000
244 | WANTED_GID=1000
245 |
246 | # === OPTIONAL CUSTOMIZATIONS ===
247 |
248 | # Port to expose
249 | EXPOSED_PORT=8189
250 |
251 | # SmartGallery settings
252 | THUMBNAIL_WIDTH=300
253 | PAGE_SIZE=100
254 | BATCH_SIZE=500
255 |
256 | # === DO NOT CHANGE (container internal paths) ===
257 | BASE_OUTPUT_PATH=/mnt/output
258 | BASE_INPUT_PATH=/mnt/input
259 | BASE_SMARTGALLERY_PATH=/mnt/SmartGallery
260 | ```
261 |
262 | ### Step 2: Build the Image
263 | ```bash
264 | make build
265 | ```
266 |
267 | This builds the Docker image with detailed logging. The build log is saved to `smartgallery.log`.
268 |
269 | ### Step 3: Run the Container
270 | ```bash
271 | make run
272 | ```
273 |
274 | This starts the container with all your configured settings.
275 |
276 | ### Step 4: Manage the Container
277 |
278 | **Stop and remove:**
279 | ```bash
280 | make kill
281 | ```
282 |
283 | **Remove buildx builder (if needed):**
284 | ```bash
285 | make buildx_rm
286 | ```
287 |
288 | ### Makefile Commands Reference
289 |
290 | | Command | Description |
291 | |---------|-------------|
292 | | `make build` | Build the Docker image with logging |
293 | | `make run` | Start the container with configured settings |
294 | | `make kill` | Stop and remove the container |
295 | | `make buildx_rm` | Remove the buildx builder |
296 |
297 | ---
298 |
299 | ## Understanding Docker Permissions
300 |
301 | SmartGallery's Docker setup uses a **two-user system** to handle Linux file permissions correctly.
302 |
303 | ### Why Permissions Matter
304 |
305 | When Docker creates files inside a container, they're owned by the container's user (typically UID 1000 or root). This can cause problems:
306 |
307 | ❌ **Without proper UID/GID mapping:**
308 | - Files created by the container are owned by a different user on your host
309 | - You can't edit or delete files created by SmartGallery
310 | - SmartGallery might not be able to read your ComfyUI files
311 |
312 | ✅ **With proper UID/GID mapping:**
313 | - Files created inside the container match your host user
314 | - Full read/write access from both container and host
315 | - No permission errors
316 |
317 | ### The Two-User System
318 |
319 | SmartGallery uses two users to achieve this:
320 |
321 | 1. **`smartgallerytoo`** (UID 1025)
322 | - Initial user that starts the container
323 | - Adjusts the `smartgallery` user's UID/GID
324 | - Restarts the script as `smartgallery`
325 |
326 | 2. **`smartgallery`** (UID adjustable)
327 | - Actual user running SmartGallery
328 | - UID/GID is changed to match your `WANTED_UID`/`WANTED_GID`
329 | - All files are created with your host user's permissions
330 |
331 | ### How It Works
332 |
333 | The `docker_init.bash` script automatically:
334 |
335 | 1. Checks if running as `smartgallerytoo` (initial startup)
336 | 2. Uses `sudo` to modify `smartgallery` user's UID/GID
337 | 3. Changes ownership of `/home/smartgallery` directory
338 | 4. Saves environment variables
339 | 5. Restarts the script as the `smartgallery` user
340 | 6. Verifies UID/GID match the expected values
341 | 7. Starts SmartGallery application
342 |
343 | ### Setting Your UID/GID
344 |
345 | **On Linux/Mac:**
346 | ```bash
347 | # Find your UID and GID
348 | id -u # Example output: 1000
349 | id -g # Example output: 1000
350 |
351 | # Use these values in compose.yaml or Makefile
352 | WANTED_UID=1000
353 | WANTED_GID=1000
354 | ```
355 |
356 | **On Windows:**
357 |
358 | Windows handles Docker permissions differently. Use the default values:
359 | ```yaml
360 | WANTED_UID=1000
361 | WANTED_GID=1000
362 | ```
363 |
364 | ---
365 |
366 | ## Environment Variables Reference
367 |
368 | All SmartGallery configuration can be set via environment variables in `compose.yaml`:
369 |
370 | ### Core Configuration
371 |
372 | | Variable | Description | Default | Example |
373 | |----------|-------------|---------|---------|
374 | | `BASE_OUTPUT_PATH` | ComfyUI output folder (container path) | Required | `/mnt/output` |
375 | | `BASE_INPUT_PATH` | ComfyUI input folder (container path) | Required | `/mnt/input` |
376 | | `BASE_SMARTGALLERY_PATH` | Database/cache location (container path) | Same as output | `/mnt/SmartGallery` |
377 | | `FFPROBE_MANUAL_PATH` | Path to ffprobe executable | `/usr/bin/ffprobe` | `/usr/bin/ffprobe` |
378 | | `SERVER_PORT` | Web server port inside container | `8189` | `8189` |
379 |
380 | ### Gallery Settings
381 |
382 | | Variable | Description | Default | Example |
383 | |----------|-------------|---------|---------|
384 | | `THUMBNAIL_WIDTH` | Thumbnail width in pixels | `300` | `300` |
385 | | `PAGE_SIZE` | Files to load initially | `100` | `100` |
386 | | `WEBP_ANIMATED_FPS` | WebP animation frame rate | `16.0` | `16.0` |
387 | | `BATCH_SIZE` | Database sync batch size | `500` | `500` |
388 |
389 | ### Performance
390 |
391 | | Variable | Description | Default | Example |
392 | |----------|-------------|---------|---------|
393 | | `MAX_PARALLEL_WORKERS` | CPU cores for processing | `""` (all cores) | `4` or `""` |
394 |
395 | **Options for `MAX_PARALLEL_WORKERS`:**
396 | - `""` (empty string): Use all available CPU cores (fastest)
397 | - `1`: Single-threaded processing (slowest, lowest CPU usage)
398 | - `4`: Use 4 CPU cores (balanced)
399 |
400 | ### Docker-Specific
401 |
402 | | Variable | Description | Default | Example |
403 | |----------|-------------|---------|---------|
404 | | `WANTED_UID` | Host user ID for file permissions | `1000` | `1000` |
405 | | `WANTED_GID` | Host group ID for file permissions | `1000` | `1000` |
406 |
407 | ### Adding Custom Environment Variables
408 |
409 | In `compose.yaml`:
410 | ```yaml
411 | environment:
412 | - BASE_OUTPUT_PATH=/mnt/output
413 | - BASE_INPUT_PATH=/mnt/input
414 | - BASE_SMARTGALLERY_PATH=/mnt/SmartGallery
415 | - THUMBNAIL_WIDTH=400
416 | - PAGE_SIZE=200
417 | - MAX_PARALLEL_WORKERS=4
418 | - WANTED_UID=1000
419 | - WANTED_GID=1000
420 | ```
421 |
422 | ---
423 |
424 | ## Docker Volumes Explained
425 |
426 | Docker volumes map folders from your host system into the container.
427 |
428 | ### Volume Configuration
429 | ```yaml
430 | volumes:
431 | - /host/path:/container/path
432 | ```
433 |
434 | **Left side (before `:`)**: Path on your host computer
435 | **Right side (after `:`)**: Path inside the container
436 |
437 | ### SmartGallery Volumes
438 | ```yaml
439 | volumes:
440 | - /home/user/ComfyUI/output:/mnt/output
441 | - /home/user/ComfyUI/input:/mnt/input
442 | - /home/user/SmartGallery:/mnt/SmartGallery
443 | ```
444 |
445 | **First Volume** (`/mnt/output`):
446 | - Your ComfyUI generated files
447 | - Images, videos, workflows
448 | - SmartGallery reads from here
449 | - Can be read-only if desired: `/path/to/output:/mnt/output:ro`
450 |
451 | **Second Volume** (`/mnt/input`):
452 | - Your ComfyUI input folder (source images, videos, audio)
453 | - Required for the Node Summary to display source media
454 | - SmartGallery reads from here
455 | - Can be read-only if desired: `/path/to/input:/mnt/input:ro`
456 |
457 | **Third Volume** (`/mnt/SmartGallery`):
458 | - SmartGallery's working directory
459 | - SQLite database
460 | - Thumbnail cache
461 | - ZIP downloads
462 | - Needs read-write access
463 |
464 | ### Volume Best Practices
465 |
466 | ✅ **Do:**
467 | - Use absolute paths: `/home/user/...`
468 | - Ensure folders exist before starting container
469 | - Keep SmartGallery data separate from ComfyUI output
470 | - Use descriptive folder names: `SmartGallery_Data`
471 |
472 | ❌ **Don't:**
473 | - Use relative paths: `../ComfyUI/output` (won't work)
474 | - Mount the same folder to multiple paths
475 | - Store SmartGallery data inside ComfyUI output folder
476 |
477 | ### Checking Volume Contents
478 |
479 | **From host:**
480 | ```bash
481 | ls -la /home/user/SmartGallery
482 | ```
483 |
484 | **From inside container:**
485 | ```bash
486 | docker exec -it comfy-smartgallery ls -la /mnt/SmartGallery
487 | ```
488 |
489 | ---
490 |
491 | ## Troubleshooting
492 |
493 | ### Container Won't Start
494 |
495 | **Check logs:**
496 | ```bash
497 | docker compose logs comfy-smartgallery
498 | ```
499 |
500 | **Common issues:**
501 |
502 | 1. **Port already in use:**
503 | ```
504 | Error: bind: address already in use
505 | ```
506 | **Solution**: Change the port in `compose.yaml`:
507 | ```yaml
508 | ports:
509 | - 8190:8189 # Use 8190 instead
510 | ```
511 |
512 | 2. **Volume path doesn't exist:**
513 | ```
514 | Error: invalid mount config
515 | ```
516 | **Solution**: Create the folders first:
517 | ```bash
518 | mkdir -p /home/user/ComfyUI/output
519 | mkdir -p /home/user/SmartGallery
520 | ```
521 |
522 | 3. **Image not found:**
523 | ```
524 | Error: No such image: smartgallery:latest
525 | ```
526 | **Solution**: Build the image first:
527 | ```bash
528 | docker build -t smartgallery:latest .
529 | ```
530 |
531 | ### Permission Denied Errors
532 |
533 | **Symptom**: Container can't read ComfyUI files or write database.
534 |
535 | **Check your UID/GID:**
536 | ```bash
537 | id -u # Should match WANTED_UID
538 | id -g # Should match WANTED_GID
539 | ```
540 |
541 | **Verify volume permissions:**
542 | ```bash
543 | ls -la /home/user/ComfyUI/output
544 | ls -la /home/user/SmartGallery
545 | ```
546 |
547 | **Fix permissions:**
548 | ```bash
549 | # Make folders accessible
550 | chmod 755 /home/user/ComfyUI/output
551 | chmod 755 /home/user/SmartGallery
552 |
553 | # Change ownership (if needed)
554 | sudo chown -R $(id -u):$(id -g) /home/user/SmartGallery
555 | ```
556 |
557 | **Update UID/GID in compose.yaml:**
558 | ```yaml
559 | environment:
560 | - WANTED_UID=1000 # Your actual UID
561 | - WANTED_GID=1000 # Your actual GID
562 | ```
563 |
564 | Then restart:
565 | ```bash
566 | docker compose down
567 | docker compose up -d
568 | ```
569 |
570 | ### Can't Access Gallery
571 |
572 | **Check if container is running:**
573 | ```bash
574 | docker ps
575 | ```
576 |
577 | **Check if port is accessible:**
578 | ```bash
579 | curl http://localhost:8189/galleryout
580 | ```
581 |
582 | **Check firewall (Linux):**
583 | ```bash
584 | sudo ufw allow 8189
585 | ```
586 |
587 | **Try different browser or incognito mode** (clears cache).
588 |
589 | ### Database or Thumbnail Issues
590 |
591 | **Reset SmartGallery data:**
592 | ```bash
593 | # Stop container
594 | docker compose down
595 |
596 | # Delete SmartGallery data (not ComfyUI files!)
597 | rm -rf /home/user/SmartGallery/*
598 |
599 | # Start container (will rebuild database)
600 | docker compose up -d
601 | ```
602 |
603 | ### Container Exits Immediately
604 |
605 | **View exit logs:**
606 | ```bash
607 | docker compose logs comfy-smartgallery
608 | ```
609 |
610 | **Common causes:**
611 | - Missing or incorrect `BASE_OUTPUT_PATH`
612 | - UID/GID mismatch causing permission errors
613 | - Python dependency issues
614 |
615 | **Try running interactively:**
616 | ```bash
617 | docker run -it --rm \
618 | -v /path/to/output:/mnt/output \
619 | -v /path/to/smartgallery:/mnt/SmartGallery \
620 | -e BASE_OUTPUT_PATH=/mnt/output \
621 | -e WANTED_UID=1000 \
622 | -e WANTED_GID=1000 \
623 | smartgallery:latest \
624 | /bin/bash
625 | ```
626 |
627 | This drops you into a shell inside the container for debugging.
628 |
629 | ### Rebuild After Code Changes
630 | ```bash
631 | # Stop container
632 | docker compose down
633 |
634 | # Rebuild image
635 | docker build -t smartgallery:latest .
636 |
637 | # Start container
638 | docker compose up -d
639 |
640 | # Check logs
641 | docker compose logs -f
642 | ```
643 |
644 | ### Still Having Issues?
645 |
646 | 1. **Check the main troubleshooting section** in [README.md](README.md#-troubleshooting)
647 | 2. **Open an issue** on GitHub with:
648 | - Your `docker compose logs` output
649 | - Your `compose.yaml` configuration (remove sensitive paths)
650 | - Operating system and Docker version
651 | - Steps to reproduce the problem
652 |
653 | ---
654 |
655 | ## Advanced Topics
656 |
657 | ### Running Multiple Instances
658 |
659 | To run multiple SmartGallery instances (e.g., for different ComfyUI installations):
660 |
661 | **1. Create separate compose files:**
662 |
663 | `compose-instance1.yaml`:
664 | ```yaml
665 | services:
666 | smartgallery-instance1:
667 | image: smartgallery:latest
668 | container_name: smartgallery-instance1
669 | ports:
670 | - 8189:8189
671 | volumes:
672 | - /path/to/comfyui1/output:/mnt/output
673 | - /path/to/comfyui1/input:/mnt/input
674 | - /path/to/smartgallery1:/mnt/SmartGallery
675 | environment:
676 | - BASE_OUTPUT_PATH=/mnt/output
677 | - BASE_INPUT_PATH=/mnt/input
678 | - BASE_SMARTGALLERY_PATH=/mnt/SmartGallery
679 | - WANTED_UID=1000
680 | - WANTED_GID=1000
681 | ```
682 |
683 | `compose-instance2.yaml`:
684 | ```yaml
685 | services:
686 | smartgallery-instance2:
687 | image: smartgallery:latest
688 | container_name: smartgallery-instance2
689 | ports:
690 | - 8190:8189 # Different port!
691 | volumes:
692 | - /path/to/comfyui2/output:/mnt/output
693 | - /path/to/comfyui2/input:/mnt/input
694 | - /path/to/smartgallery2:/mnt/SmartGallery
695 | environment:
696 | - BASE_OUTPUT_PATH=/mnt/output
697 | - BASE_INPUT_PATH=/mnt/input
698 | - BASE_SMARTGALLERY_PATH=/mnt/SmartGallery
699 | - WANTED_UID=1000
700 | - WANTED_GID=1000
701 | ```
702 |
703 | **2. Start both:**
704 | ```bash
705 | docker compose -f compose-instance1.yaml up -d
706 | docker compose -f compose-instance2.yaml up -d
707 | ```
708 |
709 | **3. Access:**
710 | - Instance 1: `http://localhost:8189/galleryout`
711 | - Instance 2: `http://localhost:8190/galleryout`
712 |
713 | ### Using Docker Run Instead of Compose
714 |
715 | If you prefer `docker run` over compose:
716 | ```bash
717 | docker run -d \
718 | --name smartgallery \
719 | -p 8189:8189 \
720 | -v /home/user/ComfyUI/output:/mnt/output \
721 | -v /home/user/ComfyUI/input:/mnt/input \
722 | -v /home/user/SmartGallery:/mnt/SmartGallery \
723 | -e BASE_OUTPUT_PATH=/mnt/output \
724 | -e BASE_INPUT_PATH=/mnt/input \
725 | -e BASE_SMARTGALLERY_PATH=/mnt/SmartGallery \
726 | -e WANTED_UID=1000 \
727 | -e WANTED_GID=1000 \
728 | --restart unless-stopped \
729 | smartgallery:latest
730 | ```
731 |
732 | ### Read-Only Output Volume
733 |
734 | If you want to prevent SmartGallery from modifying your ComfyUI outputs:
735 | ```yaml
736 | volumes:
737 | - /path/to/output:/mnt/output:ro # :ro = read-only
738 | - /path/to/input:/mnt/input:ro # :ro = read-only
739 | - /path/to/smartgallery:/mnt/SmartGallery
740 | ```
741 |
742 | ### Custom Network Configuration
743 |
744 | To run SmartGallery on a custom Docker network:
745 | ```yaml
746 | services:
747 | comfy-smartgallery:
748 | image: smartgallery:latest
749 | networks:
750 | - comfyui_network
751 | # ... rest of configuration ...
752 |
753 | networks:
754 | comfyui_network:
755 | external: true
756 | ```
757 |
758 | ### Resource Limits
759 |
760 | Limit CPU and memory usage:
761 | ```yaml
762 | services:
763 | comfy-smartgallery:
764 | image: smartgallery:latest
765 | # ... other config ...
766 | deploy:
767 | resources:
768 | limits:
769 | cpus: '2.0'
770 | memory: 4G
771 | reservations:
772 | cpus: '1.0'
773 | memory: 2G
774 | ```
775 |
776 | ### Health Checks
777 |
778 | Add a health check to monitor container status:
779 | ```yaml
780 | services:
781 | comfy-smartgallery:
782 | image: smartgallery:latest
783 | # ... other config ...
784 | healthcheck:
785 | test: ["CMD", "curl", "-f", "http://localhost:8189/galleryout"]
786 | interval: 30s
787 | timeout: 10s
788 | retries: 3
789 | start_period: 40s
790 | ```
791 |
792 | ---
793 |
794 | ## Need More Help?
795 |
796 | - **Main README**: [README.md](README.md)
797 | - **Report Issues**: [GitHub Issues](../../issues)
798 | - **Changelog**: [CHANGELOG.md](CHANGELOG.md)
799 |
800 | ---
801 |
802 |
803 | Made with ❤️ for the ComfyUI community
804 |
--------------------------------------------------------------------------------
/smartgallery.py:
--------------------------------------------------------------------------------
1 | # Smart Gallery for ComfyUI
2 | # Author: Biagio Maffettone © 2025 — MIT License (free to use and modify)
3 | #
4 | # Version: 1.51 - December 18, 2025
5 | # Check the GitHub repository for updates, bug fixes, and contributions.
6 | #
7 | # Contact: biagiomaf@gmail.com
8 | # GitHub: https://github.com/biagiomaf/smart-comfyui-gallery
9 |
10 | import os
11 | import hashlib
12 | import cv2
13 | import json
14 | import shutil
15 | import re
16 | import sqlite3
17 | import time
18 | from datetime import datetime
19 | import glob
20 | import sys
21 | import subprocess
22 | import base64
23 | import zipfile
24 | import io
25 | from flask import Flask, render_template, send_from_directory, abort, send_file, url_for, redirect, request, jsonify, Response
26 | from PIL import Image, ImageSequence
27 | import colorsys
28 | from werkzeug.utils import secure_filename
29 | import concurrent.futures
30 | from tqdm import tqdm
31 | import threading
32 | import uuid
33 | # Try to import tkinter for GUI dialogs, but make it optional for Docker/headless environments
34 | try:
35 | import tkinter as tk
36 | from tkinter import messagebox
37 | TKINTER_AVAILABLE = True
38 | except ImportError:
39 | TKINTER_AVAILABLE = False
40 | # tkinter not available (e.g., in Docker containers) - will fall back to console output
41 | import urllib.request
42 | import secrets
43 |
44 |
45 | # ============================================================================
46 | # CONFIGURATION GUIDE - PLEASE READ BEFORE SETTING UP
47 | # ============================================================================
48 | #
49 | # CONFIGURATION PRIORITY:
50 | # All settings below first check for environment variables. If an environment
51 | # variable is set, its value will be used automatically.
52 | # If you have NOT set environment variables, you only need to modify the
53 | # values AFTER the comma in the os.environ.get() statements.
54 | #
55 | # Example: os.environ.get('BASE_OUTPUT_PATH', 'C:/your/path/here')
56 | # - If BASE_OUTPUT_PATH environment variable exists → it will be used
57 | # - If NOT → the value 'C:/your/path/here' will be used instead
58 | # - ONLY CHANGE 'C:/your/path/here' if you haven't set environment variables
59 | #
60 | # ----------------------------------------------------------------------------
61 | # HOW TO SET ENVIRONMENT VARIABLES (before running python smartgallery.py):
62 | # ----------------------------------------------------------------------------
63 | #
64 | # IMPORTANT: If your paths contain SPACES, you MUST use quotes around them!
65 | # Replace the example paths below with YOUR actual paths!
66 | #
67 | # Windows (Command Prompt):
68 | # call venv\Scripts\activate.bat
69 | # set "BASE_OUTPUT_PATH=C:/ComfyUI/output"
70 | # set BASE_INPUT_PATH=C:/sm/Data/Packages/ComfyUI/input
71 | # set "BASE_SMARTGALLERY_PATH=C:/ComfyUI/output"
72 | # set "FFPROBE_MANUAL_PATH=C:/ffmpeg/bin/ffprobe.exe"
73 | # set SERVER_PORT=8189
74 | # set THUMBNAIL_WIDTH=300
75 | # set WEBP_ANIMATED_FPS=16.0
76 | # set PAGE_SIZE=100
77 | # set BATCH_SIZE=500
78 | # set ENABLE_AI_SEARCH=false
79 | # REM Leave MAX_PARALLEL_WORKERS empty to use all CPU cores (recommended)
80 | # set "MAX_PARALLEL_WORKERS="
81 | # python smartgallery.py
82 | #
83 | # Windows (PowerShell):
84 | # venv\Scripts\Activate.ps1
85 | # $env:BASE_OUTPUT_PATH="C:/ComfyUI/output"
86 | # $env:BASE_INPUT_PATH="C:/sm/Data/Packages/ComfyUI/input"
87 | # $env:BASE_SMARTGALLERY_PATH="C:/ComfyUI/output"
88 | # $env:FFPROBE_MANUAL_PATH="C:/ffmpeg/bin/ffprobe.exe"
89 | # $env:SERVER_PORT="8189"
90 | # $env:THUMBNAIL_WIDTH="300"
91 | # $env:WEBP_ANIMATED_FPS="16.0"
92 | # $env:PAGE_SIZE="100"
93 | # $env:BATCH_SIZE="500"
94 | # $env:ENABLE_AI_SEARCH="false"
95 | # # Leave MAX_PARALLEL_WORKERS empty to use all CPU cores (recommended)
96 | # $env:MAX_PARALLEL_WORKERS=""
97 | # python smartgallery.py
98 | #
99 | # Linux/Mac (bash/zsh):
100 | # source venv/bin/activate
101 | # export BASE_OUTPUT_PATH="$HOME/ComfyUI/output"
102 | # export BASE_INPUT_PATH="/path/to/ComfyUI/input"
103 | # export BASE_SMARTGALLERY_PATH="$HOME/ComfyUI/output"
104 | # export FFPROBE_MANUAL_PATH="/usr/bin/ffprobe"
105 | # export DELETE_TO="/path/to/trash" # Optional, set to disable permanent delete
106 | # export SERVER_PORT=8189
107 | # export THUMBNAIL_WIDTH=300
108 | # export WEBP_ANIMATED_FPS=16.0
109 | # export PAGE_SIZE=100
110 | # export BATCH_SIZE=500
111 | # export ENABLE_AI_SEARCH=false
112 | # # Leave MAX_PARALLEL_WORKERS empty to use all CPU cores (recommended)
113 | # export MAX_PARALLEL_WORKERS=""
114 | # python smartgallery.py
115 | #
116 | #
117 | # IMPORTANT NOTES:
118 | # - Even on Windows, always use forward slashes (/) in paths,
119 | # not backslashes (\), to ensure compatibility.
120 | # - Use QUOTES around paths containing spaces to avoid errors.
121 | # - Replace example paths (C:/ComfyUI/, $HOME/ComfyUI/) with YOUR actual paths!
122 | # - Set MAX_PARALLEL_WORKERS="" (empty string) to use all available CPU cores.
123 | # Set it to a number (e.g., 4) to limit CPU usage.
124 | # - It is strongly recommended to have ffmpeg installed,
125 | # since some features depend on it.
126 | #
127 | # ============================================================================
128 |
129 |
130 | # ============================================================================
131 | # USER CONFIGURATION
132 | # ============================================================================
133 | # Adjust the parameters below to customize the gallery.
134 | # Remember: environment variables take priority over these default values.
135 | # ============================================================================
136 |
137 | # Path to the ComfyUI 'output' folder.
138 | # Common locations:
139 | # Windows: C:/ComfyUI/output or C:/Users/YourName/ComfyUI/output
140 | # Linux/Mac: /home/username/ComfyUI/output or ~/ComfyUI/output
141 | BASE_OUTPUT_PATH = os.environ.get('BASE_OUTPUT_PATH', 'C:/ComfyUI/output')
142 |
143 | # Path to the ComfyUI 'input' folder
144 | BASE_INPUT_PATH = os.environ.get('BASE_INPUT_PATH', 'C:/ComfyUI/input')
145 |
146 | # Path for service folders (database, cache, zip files).
147 | # If not specified, the ComfyUI output path will be used.
148 | # These sub-folders won't appear in the gallery.
149 | # Change this if you want the cache stored separately for better performance
150 | # or to keep system files separate from gallery content.
151 | # Leave as-is if you are unsure.
152 | BASE_SMARTGALLERY_PATH = os.environ.get('BASE_SMARTGALLERY_PATH', BASE_OUTPUT_PATH)
153 |
154 | # Path to ffprobe executable (part of ffmpeg).
155 | # Common locations:
156 | # Windows: C:/ffmpeg/bin/ffprobe.exe or C:/Program Files/ffmpeg/bin/ffprobe.exe
157 | # Linux: /usr/bin/ffprobe or /usr/local/bin/ffprobe
158 | # Mac: /usr/local/bin/ffprobe or /opt/homebrew/bin/ffprobe
159 | # Required for extracting workflows from .mp4 files.
160 | # NOTE: A full ffmpeg installation is highly recommended.
161 | FFPROBE_MANUAL_PATH = os.environ.get('FFPROBE_MANUAL_PATH', "C:/ffmpeg/bin/ffprobe.exe")
162 |
163 | # Port on which the gallery web server will run.
164 | # Must be different from the ComfyUI port (usually 8188).
165 | # The gallery does not require ComfyUI to be running; it works independently.
166 | SERVER_PORT = int(os.environ.get('SERVER_PORT', 8189))
167 |
168 | # Width (in pixels) of the generated thumbnails.
169 | THUMBNAIL_WIDTH = int(os.environ.get('THUMBNAIL_WIDTH', 300))
170 |
171 | # Assumed frame rate for animated WebP files.
172 | # Many tools, including ComfyUI, generate WebP animations at ~16 FPS.
173 | # Adjust this value if your WebPs use a different frame rate,
174 | # so that animation durations are calculated correctly.
175 | WEBP_ANIMATED_FPS = float(os.environ.get('WEBP_ANIMATED_FPS', 16.0))
176 |
177 | # Maximum number of files to load initially before showing a "Load more" button.
178 | # Use a very large number (e.g., 9999999) for "infinite" loading.
179 | PAGE_SIZE = int(os.environ.get('PAGE_SIZE', 100))
180 |
181 | # Names of special folders (e.g., 'video', 'audio').
182 | # These folders will appear in the menu only if they exist inside BASE_OUTPUT_PATH.
183 | # Leave as-is if unsure.
184 | SPECIAL_FOLDERS = ['video', 'audio']
185 |
186 | # Number of files to process at once during database sync.
187 | # Higher values use more memory but may be faster.
188 | # Lower this if you run out of memory.
189 | BATCH_SIZE = int(os.environ.get('BATCH_SIZE', 500))
190 |
191 | # Number of parallel processes to use for thumbnail and metadata generation.
192 | # - None or empty string: use all available CPU cores (fastest, recommended)
193 | # - 1: disable parallel processing (slowest, like in previous versions)
194 | # - Specific number (e.g., 4): limit CPU usage on multi-core machines
195 | MAX_PARALLEL_WORKERS = os.environ.get('MAX_PARALLEL_WORKERS', None)
196 | if MAX_PARALLEL_WORKERS is not None and MAX_PARALLEL_WORKERS != "":
197 | MAX_PARALLEL_WORKERS = int(MAX_PARALLEL_WORKERS)
198 | else:
199 | MAX_PARALLEL_WORKERS = None
200 |
201 | # Flask secret key
202 | # You can set it in the environment variable SECRET_KEY
203 | # If not set, it will be generated randomly
204 | SECRET_KEY = os.environ.get('SECRET_KEY', secrets.token_hex(32))
205 |
206 | # Maximum number of items allowed in the "Prefix" dropdown to prevent UI lag.
207 | MAX_PREFIX_DROPDOWN_ITEMS = 100
208 |
209 |
210 | # Optional path where deleted files will be moved instead of being permanently deleted.
211 | # If set, files will be moved to DELETE_TO/SmartGallery/_
212 | # If not set (None or empty string), files will be permanently deleted as before.
213 | # The path MUST exist and be writable, or the application will exit with an error.
214 | # Example: /path/to/trash or C:/Trash
215 | DELETE_TO = os.environ.get('DELETE_TO', None)
216 | if DELETE_TO and DELETE_TO.strip():
217 | DELETE_TO = DELETE_TO.strip()
218 | TRASH_FOLDER = os.path.join(DELETE_TO, 'SmartGallery')
219 |
220 | # Validate that DELETE_TO path exists
221 | if not os.path.exists(DELETE_TO):
222 | print(f"{Colors.RED}{Colors.BOLD}CRITICAL ERROR: DELETE_TO path does not exist: {DELETE_TO}{Colors.RESET}")
223 | print(f"{Colors.RED}Please create the directory or unset the DELETE_TO environment variable.{Colors.RESET}")
224 | sys.exit(1)
225 |
226 | # Validate that DELETE_TO is writable
227 | if not os.access(DELETE_TO, os.W_OK):
228 | print(f"{Colors.RED}{Colors.BOLD}CRITICAL ERROR: DELETE_TO path is not writable: {DELETE_TO}{Colors.RESET}")
229 | print(f"{Colors.RED}Please check permissions or unset the DELETE_TO environment variable.{Colors.RESET}")
230 | sys.exit(1)
231 |
232 | # Validate that SmartGallery subfolder exists or can be created
233 | if not os.path.exists(TRASH_FOLDER):
234 | try:
235 | os.makedirs(TRASH_FOLDER)
236 | print(f"{Colors.GREEN}Created trash folder: {TRASH_FOLDER}{Colors.RESET}")
237 | except OSError as e:
238 | print(f"{Colors.RED}{Colors.BOLD}CRITICAL ERROR: Cannot create trash folder: {TRASH_FOLDER}{Colors.RESET}")
239 | print(f"{Colors.RED}Error: {e}{Colors.RESET}")
240 | sys.exit(1)
241 | else:
242 | DELETE_TO = None
243 | TRASH_FOLDER = None
244 |
245 | # ============================================================================
246 | # WORKFLOW PROMPT EXTRACTION SETTINGS
247 | # ============================================================================
248 | # List of specific text phrases to EXCLUDE from the 'Prompt Keywords' search index.
249 | # Some custom nodes (e.g., Wan2.1, text boxes, primitives) come with long default
250 | # example prompts or placeholder text that gets saved in the workflow metadata
251 | # even if not actually used in the generation.
252 | # Add those specific strings here to prevent them from cluttering your search results.
253 | WORKFLOW_PROMPT_BLACKLIST = {
254 | "The white dragon warrior stands still, eyes full of determination and strength. The camera slowly moves closer or circles around the warrior, highlighting the powerful presence and heroic spirit of the character.",
255 | "undefined",
256 | "null",
257 | "None"
258 | }
259 |
260 | # ============================================================================
261 | # AI SEARCH CONFIGURATION (FUTURE FEATURE)
262 | # ============================================================================
263 | # Enable or disable the AI Search UI features.
264 | #
265 | # IMPORTANT:
266 | # The SmartGallery AI Service (Optional) required for this feature
267 | # is currently UNDER DEVELOPMENT and HAS NOT BEEN RELEASED yet.
268 | #
269 | # SmartGallery works fully out-of-the-box without any AI components.
270 | #
271 | # Advanced features such as AI Search will be provided by a separate,
272 | # optional service that can be installed via Docker or in a separated dedicated Python virtual environment.
273 | #
274 | # PLEASE KEEP THIS SETTING DISABLED (default).
275 | # Do NOT enable this option unless the AI Service has been officially
276 | # released and correctly installed alongside SmartGallery.
277 | #
278 | # Check the GitHub repository for official announcements and
279 | # installation instructions regarding the optional AI Service.
280 | #
281 | # Windows: set ENABLE_AI_SEARCH=false
282 | # Linux / Mac: export ENABLE_AI_SEARCH=false
283 | # Docker: -e ENABLE_AI_SEARCH=false
284 | #
285 | ENABLE_AI_SEARCH = os.environ.get('ENABLE_AI_SEARCH', 'false').lower() == 'true'
286 |
287 | # ============================================================================
288 | # END OF USER CONFIGURATION
289 | # ============================================================================
290 |
291 |
292 | # --- CACHE AND FOLDER NAMES ---
293 | THUMBNAIL_CACHE_FOLDER_NAME = '.thumbnails_cache'
294 | SQLITE_CACHE_FOLDER_NAME = '.sqlite_cache'
295 | DATABASE_FILENAME = 'gallery_cache.sqlite'
296 | ZIP_CACHE_FOLDER_NAME = '.zip_downloads'
297 | AI_MODELS_FOLDER_NAME = '.AImodels'
298 |
299 | # --- APP INFO ---
300 | APP_VERSION = "1.51"
301 | APP_VERSION_DATE = "December 18, 2025"
302 | GITHUB_REPO_URL = "https://github.com/biagiomaf/smart-comfyui-gallery"
303 | GITHUB_RAW_URL = "https://raw.githubusercontent.com/biagiomaf/smart-comfyui-gallery/main/smartgallery.py"
304 |
305 |
306 | # --- HELPER FUNCTIONS (DEFINED FIRST) ---
307 | def path_to_key(relative_path):
308 | if not relative_path: return '_root_'
309 | return base64.urlsafe_b64encode(relative_path.replace(os.sep, '/').encode()).decode()
310 |
311 | def key_to_path(key):
312 | if key == '_root_': return ''
313 | try:
314 | return base64.urlsafe_b64decode(key.encode()).decode().replace('/', os.sep)
315 | except Exception: return None
316 |
317 | # --- DERIVED SETTINGS ---
318 | DB_SCHEMA_VERSION = 26
319 | THUMBNAIL_CACHE_DIR = os.path.join(BASE_SMARTGALLERY_PATH, THUMBNAIL_CACHE_FOLDER_NAME)
320 | SQLITE_CACHE_DIR = os.path.join(BASE_SMARTGALLERY_PATH, SQLITE_CACHE_FOLDER_NAME)
321 | DATABASE_FILE = os.path.join(SQLITE_CACHE_DIR, DATABASE_FILENAME)
322 | ZIP_CACHE_DIR = os.path.join(BASE_SMARTGALLERY_PATH, ZIP_CACHE_FOLDER_NAME)
323 | PROTECTED_FOLDER_KEYS = {path_to_key(f) for f in SPECIAL_FOLDERS}
324 | PROTECTED_FOLDER_KEYS.add('_root_')
325 |
326 |
327 | # --- CONSOLE STYLING ---
328 | class Colors:
329 | HEADER = '\033[95m'
330 | BLUE = '\033[94m'
331 | CYAN = '\033[96m'
332 | GREEN = '\033[92m'
333 | YELLOW = '\033[93m'
334 | RED = '\033[91m'
335 | RESET = '\033[0m'
336 | BOLD = '\033[1m'
337 | DIM = '\033[2m'
338 |
339 | def normalize_smart_path(path_str):
340 | """
341 | Normalizes a path string for search comparison:
342 | 1. Converts to lowercase.
343 | 2. Replaces all backslashes (\\) with forward slashes (/).
344 | """
345 | if not path_str: return ""
346 | return str(path_str).lower().replace('\\', '/')
347 |
348 | def print_configuration():
349 | """Prints the current configuration in a neat, aligned table."""
350 | print(f"\n{Colors.HEADER}{Colors.BOLD}--- CURRENT CONFIGURATION ---{Colors.RESET}")
351 |
352 | # Helper for aligned printing
353 | def print_row(key, value, is_path=False):
354 | color = Colors.CYAN if is_path else Colors.GREEN
355 | print(f" {Colors.BOLD}{key:<25}{Colors.RESET} : {color}{value}{Colors.RESET}")
356 |
357 | print_row("Server Port", SERVER_PORT)
358 | print_row("Base Output Path", BASE_OUTPUT_PATH, True)
359 | print_row("Base Input Path", BASE_INPUT_PATH, True)
360 | print_row("SmartGallery Path", BASE_SMARTGALLERY_PATH, True)
361 | print_row("FFprobe Path", FFPROBE_MANUAL_PATH, True)
362 | print_row("Delete To (Trash)", DELETE_TO if DELETE_TO else "Disabled (Permanent Delete)", DELETE_TO is not None)
363 | print_row("Thumbnail Width", f"{THUMBNAIL_WIDTH}px")
364 | print_row("WebP Animated FPS", WEBP_ANIMATED_FPS)
365 | print_row("Page Size", PAGE_SIZE)
366 | print_row("Batch Size", BATCH_SIZE)
367 | print_row("Max Parallel Workers", MAX_PARALLEL_WORKERS if MAX_PARALLEL_WORKERS else "All Cores")
368 | print_row("AI Search", "Enabled" if ENABLE_AI_SEARCH else "Disabled")
369 | print(f"{Colors.HEADER}-----------------------------{Colors.RESET}\n")
370 |
371 | # --- FLASK APP INITIALIZATION ---
372 | app = Flask(__name__)
373 | app.secret_key = SECRET_KEY
374 | gallery_view_cache = []
375 | folder_config_cache = None
376 | FFPROBE_EXECUTABLE_PATH = None
377 |
378 |
379 | # Data structures for node categorization and analysis
380 | NODE_CATEGORIES_ORDER = ["input", "model", "processing", "output", "others"]
381 | NODE_CATEGORIES = {
382 | "Load Checkpoint": "input", "CheckpointLoaderSimple": "input", "Empty Latent Image": "input",
383 | "CLIPTextEncode": "input", "Load Image": "input",
384 | "ModelMerger": "model",
385 | "KSampler": "processing", "KSamplerAdvanced": "processing", "VAEDecode": "processing",
386 | "VAEEncode": "processing", "LatentUpscale": "processing", "ConditioningCombine": "processing",
387 | "PreviewImage": "output", "SaveImage": "output",
388 | "LoadImageOutput": "input"
389 | }
390 | NODE_PARAM_NAMES = {
391 | "CLIPTextEncode": ["text"],
392 | "KSampler": ["seed", "steps", "cfg", "sampler_name", "scheduler", "denoise"],
393 | "KSamplerAdvanced": ["add_noise", "noise_seed", "steps", "cfg", "sampler_name", "scheduler", "start_at_step", "end_at_step", "return_with_leftover_noise"],
394 | "Load Checkpoint": ["ckpt_name"],
395 | "CheckpointLoaderSimple": ["ckpt_name"],
396 | "Empty Latent Image": ["width", "height", "batch_size"],
397 | "LatentUpscale": ["upscale_method", "width", "height"],
398 | "SaveImage": ["filename_prefix"],
399 | "ModelMerger": ["ckpt_name1", "ckpt_name2", "ratio"],
400 | "Load Image": ["image"],
401 | "LoadImageMask": ["image"],
402 | "VHS_LoadVideo": ["video"],
403 | "LoadAudio": ["audio"],
404 | "AudioLoader": ["audio"],
405 | "LoadImageOutput": ["image"]
406 | }
407 |
408 | # Cache for node colors
409 | _node_colors_cache = {}
410 |
411 | def get_node_color(node_type):
412 | """Generates a unique and consistent color for a node type."""
413 | if node_type not in _node_colors_cache:
414 | # Use a hash to get a consistent color for the same node type
415 | hue = (hash(node_type + "a_salt_string") % 360) / 360.0
416 | rgb = [int(c * 255) for c in colorsys.hsv_to_rgb(hue, 0.7, 0.85)]
417 | _node_colors_cache[node_type] = f"#{rgb[0]:02x}{rgb[1]:02x}{rgb[2]:02x}"
418 | return _node_colors_cache[node_type]
419 |
420 | def filter_enabled_nodes(workflow_data):
421 | """Filters and returns only active nodes and links (mode=0) from a workflow."""
422 | if not isinstance(workflow_data, dict): return {'nodes': [], 'links': []}
423 |
424 | active_nodes = [n for n in workflow_data.get("nodes", []) if n.get("mode", 0) == 0]
425 | active_node_ids = {str(n["id"]) for n in active_nodes}
426 |
427 | active_links = [
428 | l for l in workflow_data.get("links", [])
429 | if str(l[1]) in active_node_ids and str(l[3]) in active_node_ids
430 | ]
431 | return {"nodes": active_nodes, "links": active_links}
432 |
433 | def generate_node_summary(workflow_json_string):
434 | """
435 | Analyzes a workflow JSON, extracts active nodes, and identifies input media.
436 | Robust version: handles ComfyUI specific suffixes like ' [output]'.
437 | """
438 | try:
439 | workflow_data = json.loads(workflow_json_string)
440 | except json.JSONDecodeError:
441 | return None
442 |
443 | nodes = []
444 | is_api_format = False
445 |
446 | if 'nodes' in workflow_data and isinstance(workflow_data['nodes'], list):
447 | active_workflow = filter_enabled_nodes(workflow_data)
448 | nodes = active_workflow.get('nodes', [])
449 | else:
450 | is_api_format = True
451 | for node_id, node_data in workflow_data.items():
452 | if isinstance(node_data, dict) and 'class_type' in node_data:
453 | node_entry = node_data.copy()
454 | node_entry['id'] = node_id
455 | node_entry['type'] = node_data['class_type']
456 | node_entry['inputs'] = node_data.get('inputs', {})
457 | nodes.append(node_entry)
458 |
459 | if not nodes:
460 | return []
461 |
462 | def get_id_safe(n):
463 | try: return int(n.get('id', 0))
464 | except: return str(n.get('id', 0))
465 |
466 | sorted_nodes = sorted(nodes, key=lambda n: (
467 | NODE_CATEGORIES_ORDER.index(NODE_CATEGORIES.get(n.get('type'), 'others')),
468 | get_id_safe(n)
469 | ))
470 |
471 | summary_list = []
472 |
473 | valid_media_exts = {
474 | '.png', '.jpg', '.jpeg', '.webp', '.gif', '.jfif', '.bmp', '.tiff',
475 | '.mp4', '.mov', '.webm', '.mkv', '.avi',
476 | '.mp3', '.wav', '.ogg', '.flac', '.m4a', '.aac'
477 | }
478 |
479 | base_input_norm = os.path.normpath(BASE_INPUT_PATH)
480 |
481 | for node in sorted_nodes:
482 | node_type = node.get('type', 'Unknown')
483 | params_list = []
484 |
485 | raw_params = {}
486 | if is_api_format:
487 | raw_params = node.get('inputs', {})
488 | else:
489 | widgets_values = node.get('widgets_values', [])
490 | param_names_list = NODE_PARAM_NAMES.get(node_type, [])
491 | for i, value in enumerate(widgets_values):
492 | name = param_names_list[i] if i < len(param_names_list) else f"param_{i+1}"
493 | raw_params[name] = value
494 |
495 | for name, value in raw_params.items():
496 | display_value = value
497 | is_input_file = False
498 | input_url = None
499 |
500 | if isinstance(value, list):
501 | if len(value) == 2 and isinstance(value[0], str):
502 | display_value = f"(Link to {value[0]})"
503 | else:
504 | display_value = str(value)
505 |
506 | if isinstance(value, str) and value.strip():
507 | # 1. Pulizia aggressiva per rimuovere suffissi tipo " [output]" o " [input]"
508 | clean_value = value.replace('\\', '/').strip()
509 | # Rimuovi suffissi comuni tra parentesi quadre alla fine della stringa
510 | clean_value = re.sub(r'\s*\[.*?\]$', '', clean_value)
511 |
512 | _, ext = os.path.splitext(clean_value)
513 |
514 | if ext.lower() in valid_media_exts:
515 | filename_only = os.path.basename(clean_value)
516 |
517 | candidates = [
518 | os.path.join(BASE_INPUT_PATH, clean_value),
519 | os.path.join(BASE_INPUT_PATH, filename_only),
520 | os.path.normpath(os.path.join(BASE_INPUT_PATH, clean_value))
521 | ]
522 |
523 | for candidate_path in candidates:
524 | try:
525 | if os.path.isfile(candidate_path):
526 | abs_candidate = os.path.abspath(candidate_path)
527 | abs_base = os.path.abspath(BASE_INPUT_PATH)
528 |
529 | if abs_candidate.startswith(abs_base):
530 | is_input_file = True
531 | rel_path = os.path.relpath(abs_candidate, abs_base).replace('\\', '/')
532 | input_url = f"/galleryout/input_file/{rel_path}"
533 | # Aggiorniamo anche il valore mostrato a video per pulirlo
534 | display_value = clean_value
535 | break
536 | except Exception:
537 | continue
538 |
539 | params_list.append({
540 | "name": name,
541 | "value": display_value,
542 | "is_input_file": is_input_file,
543 | "input_url": input_url
544 | })
545 |
546 | summary_list.append({
547 | "id": node.get('id', 'N/A'),
548 | "type": node_type,
549 | "category": NODE_CATEGORIES.get(node_type, 'others'),
550 | "color": get_node_color(node_type),
551 | "params": params_list
552 | })
553 |
554 | return summary_list
555 |
556 | # --- ALL UTILITY AND HELPER FUNCTIONS ARE DEFINED HERE, BEFORE ANY ROUTES ---
557 |
558 | def safe_delete_file(filepath):
559 | """
560 | Safely delete a file by either moving it to trash (if DELETE_TO is configured)
561 | or permanently deleting it.
562 |
563 | Args:
564 | filepath: Path to the file to delete
565 |
566 | Raises:
567 | OSError: If deletion/move fails
568 | """
569 | if DELETE_TO and TRASH_FOLDER:
570 | # Move to trash (folder already validated at startup)
571 | timestamp = time.strftime('%Y%m%d_%H%M%S')
572 | filename = os.path.basename(filepath)
573 | trash_filename = f"{timestamp}_{filename}"
574 | trash_path = os.path.join(TRASH_FOLDER, trash_filename)
575 |
576 | # Handle duplicate filenames in trash
577 | counter = 1
578 | while os.path.exists(trash_path):
579 | name_without_ext, ext = os.path.splitext(filename)
580 | trash_filename = f"{timestamp}_{name_without_ext}_{counter}{ext}"
581 | trash_path = os.path.join(TRASH_FOLDER, trash_filename)
582 | counter += 1
583 |
584 | shutil.move(filepath, trash_path)
585 | print(f"INFO: Moved file to trash: {trash_path}")
586 | else:
587 | # Permanently delete
588 | os.remove(filepath)
589 |
590 | def find_ffprobe_path():
591 | if FFPROBE_MANUAL_PATH and os.path.isfile(FFPROBE_MANUAL_PATH):
592 | try:
593 | subprocess.run([FFPROBE_MANUAL_PATH, "-version"], capture_output=True, check=True, creationflags=subprocess.CREATE_NO_WINDOW if sys.platform == "win32" else 0)
594 | return FFPROBE_MANUAL_PATH
595 | except Exception: pass
596 | base_name = "ffprobe.exe" if sys.platform == "win32" else "ffprobe"
597 | try:
598 | subprocess.run([base_name, "-version"], capture_output=True, check=True, creationflags=subprocess.CREATE_NO_WINDOW if sys.platform == "win32" else 0)
599 | return base_name
600 | except Exception: pass
601 | print("WARNING: ffprobe not found. Video metadata analysis will be disabled.")
602 | return None
603 |
604 | def _validate_and_get_workflow(json_string):
605 | try:
606 | data = json.loads(json_string)
607 | # Check for UI format (has 'nodes')
608 | workflow_data = data.get('workflow', data.get('prompt', data))
609 |
610 | if isinstance(workflow_data, dict):
611 | if 'nodes' in workflow_data:
612 | return json.dumps(workflow_data), 'ui'
613 |
614 | # Check for API format (keys are IDs, values have class_type)
615 | # Heuristic: Check if it looks like a dict of nodes
616 | is_api = False
617 | for k, v in workflow_data.items():
618 | if isinstance(v, dict) and 'class_type' in v:
619 | is_api = True
620 | break
621 | if is_api:
622 | return json.dumps(workflow_data), 'api'
623 |
624 | except Exception:
625 | pass
626 |
627 | return None, None
628 |
629 | def _scan_bytes_for_workflow(content_bytes):
630 | """
631 | Generator that yields all valid JSON objects found in the byte stream.
632 | Searches for matching curly braces.
633 | """
634 | try:
635 | stream_str = content_bytes.decode('utf-8', errors='ignore')
636 | except Exception:
637 | return
638 |
639 | start_pos = 0
640 | while True:
641 | first_brace = stream_str.find('{', start_pos)
642 | if first_brace == -1:
643 | break
644 |
645 | open_braces = 0
646 | start_index = first_brace
647 |
648 | for i in range(start_index, len(stream_str)):
649 | char = stream_str[i]
650 | if char == '{':
651 | open_braces += 1
652 | elif char == '}':
653 | open_braces -= 1
654 |
655 | if open_braces == 0:
656 | candidate = stream_str[start_index : i + 1]
657 | # FIX: Use 'except Exception' to allow GeneratorExit to pass through
658 | try:
659 | json.loads(candidate)
660 | yield candidate
661 | except Exception:
662 | pass
663 |
664 | # Move start_pos to after this candidate to find the next one
665 | start_pos = i + 1
666 | break
667 | else:
668 | # If loop finishes without open_braces hitting 0, no more valid JSON here
669 | break
670 |
671 | def extract_workflow(filepath):
672 | ext = os.path.splitext(filepath)[1].lower()
673 | video_exts = ['.mp4', '.mkv', '.webm', '.mov', '.avi']
674 |
675 | best_workflow = None
676 |
677 | def update_best(wf, wf_type):
678 | nonlocal best_workflow
679 | if wf_type == 'ui':
680 | best_workflow = wf
681 | return True # Found best, stop searching
682 | if wf_type == 'api' and best_workflow is None:
683 | best_workflow = wf
684 | return False
685 |
686 | if ext in video_exts:
687 | # --- FIX: Risoluzione del path anche nei processi Worker ---
688 | # Se la variabile globale è vuota (succede nel multiprocessing), la cerchiamo ora.
689 | current_ffprobe_path = FFPROBE_EXECUTABLE_PATH
690 | if not current_ffprobe_path:
691 | current_ffprobe_path = find_ffprobe_path()
692 | # -----------------------------------------------------------
693 |
694 | if current_ffprobe_path:
695 | try:
696 | # Usiamo current_ffprobe_path invece della globale
697 | cmd = [current_ffprobe_path, '-v', 'quiet', '-print_format', 'json', '-show_format', filepath]
698 | result = subprocess.run(cmd, capture_output=True, text=True, encoding='utf-8', errors='ignore', check=True, creationflags=subprocess.CREATE_NO_WINDOW if sys.platform == "win32" else 0)
699 | data = json.loads(result.stdout)
700 | if 'format' in data and 'tags' in data['format']:
701 | for value in data['format']['tags'].values():
702 | if isinstance(value, str) and value.strip().startswith('{'):
703 | wf, wf_type = _validate_and_get_workflow(value)
704 | if wf:
705 | if update_best(wf, wf_type): return best_workflow
706 | except Exception: pass
707 | else:
708 | try:
709 | with Image.open(filepath) as img:
710 | # Check standard keys first
711 | for key in ['workflow', 'prompt']:
712 | val = img.info.get(key)
713 | if val:
714 | wf, wf_type = _validate_and_get_workflow(val)
715 | if wf:
716 | if update_best(wf, wf_type): return best_workflow
717 |
718 | exif_data = img.info.get('exif')
719 | if exif_data and isinstance(exif_data, bytes):
720 | # Check for "workflow:" prefix which some tools use
721 | try:
722 | exif_str = exif_data.decode('utf-8', errors='ignore')
723 | if 'workflow:{' in exif_str:
724 | # Extract the JSON part after "workflow:"
725 | start = exif_str.find('workflow:{') + len('workflow:')
726 | # Try to parse this specific part first
727 | for json_candidate in _scan_bytes_for_workflow(exif_str[start:].encode('utf-8')):
728 | wf, wf_type = _validate_and_get_workflow(json_candidate)
729 | if wf:
730 | if update_best(wf, wf_type): return best_workflow
731 | break
732 | except Exception: pass
733 |
734 | # Fallback to standard scan of the entire exif_data if not already returned
735 | if best_workflow is None:
736 | for json_str in _scan_bytes_for_workflow(exif_data):
737 | wf, wf_type = _validate_and_get_workflow(json_str)
738 | if wf:
739 | if update_best(wf, wf_type): return best_workflow
740 | except Exception: pass
741 |
742 | # Raw byte scan (fallback for any file type)
743 | try:
744 | with open(filepath, 'rb') as f:
745 | content = f.read()
746 | for json_str in _scan_bytes_for_workflow(content):
747 | wf, wf_type = _validate_and_get_workflow(json_str)
748 | if wf:
749 | if update_best(wf, wf_type): return best_workflow
750 | except Exception: pass
751 |
752 | return best_workflow
753 |
754 | def is_webp_animated(filepath):
755 | try:
756 | with Image.open(filepath) as img: return getattr(img, 'is_animated', False)
757 | except: return False
758 |
759 | def format_duration(seconds):
760 | if not seconds or seconds < 0: return ""
761 | m, s = divmod(int(seconds), 60); h, m = divmod(m, 60)
762 | return f"{h}:{m:02d}:{s:02d}" if h > 0 else f"{m:02d}:{s:02d}"
763 |
764 | def analyze_file_metadata(filepath):
765 | details = {'type': 'unknown', 'duration': '', 'dimensions': '', 'has_workflow': 0}
766 | ext_lower = os.path.splitext(filepath)[1].lower()
767 | type_map = {'.png': 'image', '.jpg': 'image', '.jpeg': 'image', '.gif': 'animated_image', '.mp4': 'video', '.webm': 'video', '.mov': 'video', '.mp3': 'audio', '.wav': 'audio', '.ogg': 'audio', '.flac': 'audio'}
768 | details['type'] = type_map.get(ext_lower, 'unknown')
769 | if details['type'] == 'unknown' and ext_lower == '.webp': details['type'] = 'animated_image' if is_webp_animated(filepath) else 'image'
770 | if 'image' in details['type']:
771 | try:
772 | with Image.open(filepath) as img: details['dimensions'] = f"{img.width}x{img.height}"
773 | except Exception: pass
774 | if extract_workflow(filepath): details['has_workflow'] = 1
775 | total_duration_sec = 0
776 | if details['type'] == 'video':
777 | try:
778 | cap = cv2.VideoCapture(filepath)
779 | if cap.isOpened():
780 | fps, count = cap.get(cv2.CAP_PROP_FPS), cap.get(cv2.CAP_PROP_FRAME_COUNT)
781 | if fps > 0 and count > 0: total_duration_sec = count / fps
782 | details['dimensions'] = f"{int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))}x{int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))}"
783 | cap.release()
784 | except Exception: pass
785 | elif details['type'] == 'animated_image':
786 | try:
787 | with Image.open(filepath) as img:
788 | if getattr(img, 'is_animated', False):
789 | if ext_lower == '.gif': total_duration_sec = sum(frame.info.get('duration', 100) for frame in ImageSequence.Iterator(img)) / 1000
790 | elif ext_lower == '.webp': total_duration_sec = getattr(img, 'n_frames', 1) / WEBP_ANIMATED_FPS
791 | except Exception: pass
792 | if total_duration_sec > 0: details['duration'] = format_duration(total_duration_sec)
793 | return details
794 |
795 | def create_thumbnail(filepath, file_hash, file_type):
796 | Image.MAX_IMAGE_PIXELS = None
797 | if file_type in ['image', 'animated_image']:
798 | try:
799 | with Image.open(filepath) as img:
800 | fmt = 'gif' if img.format == 'GIF' else 'webp' if img.format == 'WEBP' else 'jpeg'
801 | cache_path = os.path.join(THUMBNAIL_CACHE_DIR, f"{file_hash}.{fmt}")
802 | if file_type == 'animated_image' and getattr(img, 'is_animated', False):
803 | frames = [fr.copy() for fr in ImageSequence.Iterator(img)]
804 | if frames:
805 | for frame in frames: frame.thumbnail((THUMBNAIL_WIDTH, THUMBNAIL_WIDTH * 2), Image.Resampling.LANCZOS)
806 | processed_frames = [frame.convert('RGBA').convert('RGB') for frame in frames]
807 | if processed_frames:
808 | processed_frames[0].save(cache_path, save_all=True, append_images=processed_frames[1:], duration=img.info.get('duration', 100), loop=img.info.get('loop', 0), optimize=True)
809 | else:
810 | img.thumbnail((THUMBNAIL_WIDTH, THUMBNAIL_WIDTH * 2), Image.Resampling.LANCZOS)
811 | if img.mode != 'RGB': img = img.convert('RGB')
812 | img.save(cache_path, 'JPEG', quality=85)
813 | return cache_path
814 | except Exception as e: print(f"ERROR (Pillow): Could not create thumbnail for {os.path.basename(filepath)}: {e}")
815 | elif file_type == 'video':
816 | try:
817 | cap = cv2.VideoCapture(filepath)
818 | success, frame = cap.read()
819 | cap.release()
820 | if success:
821 | cache_path = os.path.join(THUMBNAIL_CACHE_DIR, f"{file_hash}.jpeg")
822 | frame_rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
823 | img = Image.fromarray(frame_rgb)
824 | img.thumbnail((THUMBNAIL_WIDTH, THUMBNAIL_WIDTH * 2), Image.Resampling.LANCZOS)
825 | img.save(cache_path, 'JPEG', quality=80)
826 | return cache_path
827 | except Exception as e: print(f"ERROR (OpenCV): Could not create thumbnail for {os.path.basename(filepath)}: {e}")
828 | return None
829 |
830 | def extract_workflow_files_string(workflow_json_string):
831 | """
832 | Parses workflow and returns a normalized string containing ONLY filenames
833 | (models, images, videos) used in the workflow.
834 | Filters out prompts, settings, and comments based on extensions and path structure.
835 | """
836 | if not workflow_json_string: return ""
837 |
838 | try:
839 | data = json.loads(workflow_json_string)
840 | except:
841 | return ""
842 |
843 | # Normalize structure (UI vs API format)
844 | nodes = []
845 | if 'nodes' in data and isinstance(data['nodes'], list):
846 | nodes = data['nodes'] # UI Format
847 | else:
848 | # API Format fallback
849 | for nid, n in data.items():
850 | if isinstance(n, dict):
851 | n['id'] = nid
852 | nodes.append(n)
853 |
854 | # 1. Blocklist Nodes (Comments)
855 | ignored_types = {'Note', 'NotePrimitive', 'Reroute', 'PrimitiveNode'}
856 |
857 | # 2. Whitelist Extensions (The most important filter)
858 | valid_extensions = {
859 | # Models
860 | '.safetensors', '.ckpt', '.pt', '.pth', '.bin', '.gguf', '.lora', '.sft',
861 | # Images
862 | '.png', '.jpg', '.jpeg', '.webp', '.gif', '.bmp', '.tiff',
863 | # Video/Audio
864 | '.mp4', '.mov', '.webm', '.mkv', '.avi', '.mp3', '.wav', '.ogg', '.flac', '.m4a'
865 | }
866 |
867 | found_tokens = set()
868 |
869 | for node in nodes:
870 | node_type = node.get('type', node.get('class_type', ''))
871 |
872 | # Skip comment nodes
873 | if node_type in ignored_types:
874 | continue
875 |
876 | # Collect values from widgets_values (UI) or inputs (API)
877 | values_to_check = []
878 |
879 | # UI Format values
880 | if 'widgets_values' in node and isinstance(node['widgets_values'], list):
881 | values_to_check.extend(node['widgets_values'])
882 |
883 | # API Format inputs
884 | if 'inputs' in node and isinstance(node['inputs'], dict):
885 | values_to_check.extend(node['inputs'].values())
886 |
887 | for val in values_to_check:
888 | if isinstance(val, str) and val.strip():
889 | # Normalize immediately
890 | norm_val = normalize_smart_path(val.strip())
891 |
892 | # --- FILTER LOGIC ---
893 |
894 | # Check A: Valid Extension?
895 | # We check if the string ends with one of the valid extensions
896 | has_valid_ext = any(norm_val.endswith(ext) for ext in valid_extensions)
897 |
898 | # Check B: Absolute Path? (For folders or files without standard extensions)
899 | # Matches "c:/..." or "/home/..."
900 | # Must be shorter than 260 chars to avoid catching long prompts starting with /
901 | is_abs_path = (len(norm_val) < 260) and (
902 | (len(norm_val) > 2 and norm_val[1] == ':') or # Windows Drive (c:)
903 | norm_val.startswith('/') # Unix/Linux root
904 | )
905 |
906 | # Keep ONLY if it looks like a file/path
907 | if has_valid_ext or is_abs_path:
908 | found_tokens.add(norm_val)
909 |
910 | return " ||| ".join(sorted(list(found_tokens)))
911 |
912 | def extract_workflow_prompt_string(workflow_json_string):
913 | """
914 | Parses workflow and extracts ALL text prompts found in nodes.
915 |
916 | New Logic (Broad Extraction with Blacklist):
917 | Scans all nodes for text parameters, filtering out technical values,
918 | filenames, specific default prompt examples defined in global config,
919 | and strictly ignoring Comment/Note nodes (including Markdown notes).
920 |
921 | Returns: A joined string of all found text prompts.
922 | """
923 | if not workflow_json_string: return ""
924 |
925 | try:
926 | data = json.loads(workflow_json_string)
927 | except:
928 | return ""
929 |
930 | nodes = []
931 |
932 | # Normalize Structure
933 | if 'nodes' in data and isinstance(data['nodes'], list):
934 | nodes = data['nodes'] # UI Format
935 | else:
936 | # API Format fallback
937 | for nid, n in data.items():
938 | if isinstance(n, dict):
939 | n['id'] = nid
940 | nodes.append(n)
941 |
942 | found_texts = set()
943 |
944 | # 1. Types to strictly ignore (Comments, Routing, structural nodes)
945 | # Updated to include MarkdownNote and other common note types
946 | ignored_types = {
947 | 'Note', 'NotePrimitive', 'Reroute', 'PrimitiveNode',
948 | 'ShowText', 'PreviewText', 'ViewInfo', 'SaveImage', 'PreviewImage',
949 | 'MarkdownNote', 'Text Note', 'StickyNote'
950 | }
951 |
952 | for node in nodes:
953 | node_type = node.get('type', node.get('class_type', '')).strip()
954 |
955 | # Skip ignored node types
956 | if node_type in ignored_types: continue
957 |
958 | # Gather values to check
959 | values_to_check = []
960 |
961 | # UI Format: check 'widgets_values'
962 | if 'widgets_values' in node and isinstance(node['widgets_values'], list):
963 | values_to_check.extend(node['widgets_values'])
964 |
965 | # API Format: check 'inputs' values
966 | if 'inputs' in node and isinstance(node['inputs'], dict):
967 | values_to_check.extend(node['inputs'].values())
968 |
969 | # Analyze values
970 | for val in values_to_check:
971 | # We are only interested in Strings
972 | if isinstance(val, str) and val.strip():
973 | text = val.strip()
974 |
975 | # --- FILTERING LOGIC ---
976 |
977 | # A. Blacklist Check (Uses the Global Configuration Variable)
978 | if text in WORKFLOW_PROMPT_BLACKLIST:
979 | continue
980 |
981 | # B. Ignore short strings (likely garbage or symbols)
982 | if len(text) < 2: continue
983 |
984 | # C. Ignore numeric strings (seeds, steps, cfg, dimensions)
985 | try:
986 | float(text)
987 | continue
988 | except ValueError:
989 | pass
990 |
991 | # D. Ignore filenames (extensions)
992 | if '.' in text and ' ' not in text:
993 | ext = os.path.splitext(text)[1].lower()
994 | if ext in ['.safetensors', '.ckpt', '.pt', '.png', '.jpg', '.webp']:
995 | continue
996 |
997 | # E. Ignore common tech keywords
998 | tech_keywords = {'euler', 'dpm', 'normal', 'karras', 'gpu', 'cpu', 'auto', 'enable', 'disable', 'fixed', 'increment', 'randomized'}
999 | if text.lower() in tech_keywords:
1000 | continue
1001 |
1002 | # If passed all filters, it's likely a prompt
1003 | found_texts.add(text)
1004 |
1005 | # Join with a separator
1006 | return " , ".join(list(found_texts))
1007 |
1008 | def process_single_file(filepath):
1009 | """
1010 | Worker function to perform all heavy processing for a single file.
1011 | Designed to be run in a parallel process pool.
1012 | """
1013 | try:
1014 | mtime = os.path.getmtime(filepath)
1015 | metadata = analyze_file_metadata(filepath)
1016 | file_hash_for_thumbnail = hashlib.md5((filepath + str(mtime)).encode()).hexdigest()
1017 |
1018 | if not glob.glob(os.path.join(THUMBNAIL_CACHE_DIR, f"{file_hash_for_thumbnail}.*")):
1019 | create_thumbnail(filepath, file_hash_for_thumbnail, metadata['type'])
1020 |
1021 | file_id = hashlib.md5(filepath.encode()).hexdigest()
1022 | file_size = os.path.getsize(filepath)
1023 |
1024 | # Extract workflow data
1025 | workflow_files_content = ""
1026 | workflow_prompt_content = ""
1027 |
1028 | if metadata['has_workflow']:
1029 | wf_json = extract_workflow(filepath)
1030 | if wf_json:
1031 | workflow_files_content = extract_workflow_files_string(wf_json)
1032 | workflow_prompt_content = extract_workflow_prompt_string(wf_json) # NEW
1033 |
1034 | return (
1035 | file_id, filepath, mtime, os.path.basename(filepath),
1036 | metadata['type'], metadata['duration'], metadata['dimensions'],
1037 | metadata['has_workflow'], file_size, time.time(),
1038 | workflow_files_content,
1039 | workflow_prompt_content # NEW return value
1040 | )
1041 | except Exception as e:
1042 | print(f"ERROR: Failed to process file {os.path.basename(filepath)} in worker: {e}")
1043 | return None
1044 |
1045 | def get_db_connection():
1046 | # Timeout increased to 50s to be patient with the Indexer
1047 | conn = sqlite3.connect(DATABASE_FILE, timeout=50)
1048 | conn.row_factory = sqlite3.Row
1049 |
1050 | # CONCURRENCY OPTIMIZATION:
1051 | # WAL: Allows non-blocking reads.
1052 | # NORMAL: Makes transactions (commits) instant, reducing lock time drastically.
1053 | conn.execute('PRAGMA journal_mode=WAL;')
1054 | conn.execute('PRAGMA synchronous=NORMAL;')
1055 | return conn
1056 |
1057 | def init_db(conn=None):
1058 | close_conn = False
1059 | if conn is None:
1060 | conn = get_db_connection()
1061 | close_conn = True
1062 |
1063 | # Main files table - UPDATED SCHEMA with all new columns
1064 | conn.execute('''
1065 | CREATE TABLE IF NOT EXISTS files (
1066 | id TEXT PRIMARY KEY,
1067 | path TEXT NOT NULL UNIQUE,
1068 | mtime REAL NOT NULL,
1069 | name TEXT NOT NULL,
1070 | type TEXT,
1071 | duration TEXT,
1072 | dimensions TEXT,
1073 | has_workflow INTEGER,
1074 | is_favorite INTEGER DEFAULT 0,
1075 | size INTEGER DEFAULT 0,
1076 |
1077 | -- Version 24+ Columns included natively for fresh installs
1078 | last_scanned REAL DEFAULT 0,
1079 | workflow_files TEXT DEFAULT '',
1080 | workflow_prompt TEXT DEFAULT '',
1081 |
1082 | -- AI Columns
1083 | ai_last_scanned REAL DEFAULT 0,
1084 | ai_caption TEXT,
1085 | ai_embedding BLOB,
1086 | ai_error TEXT
1087 | )
1088 | ''')
1089 |
1090 | # AI Search Queue Table
1091 | conn.execute('''
1092 | CREATE TABLE IF NOT EXISTS ai_search_queue (
1093 | id INTEGER PRIMARY KEY AUTOINCREMENT,
1094 | session_id TEXT NOT NULL UNIQUE,
1095 | query TEXT NOT NULL,
1096 | limit_results INTEGER DEFAULT 100,
1097 | status TEXT DEFAULT 'pending',
1098 | created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
1099 | completed_at TIMESTAMP NULL
1100 | );
1101 | ''')
1102 |
1103 | # AI Search Results Table
1104 | conn.execute('''
1105 | CREATE TABLE IF NOT EXISTS ai_search_results (
1106 | id INTEGER PRIMARY KEY AUTOINCREMENT,
1107 | session_id TEXT NOT NULL,
1108 | file_id TEXT NOT NULL,
1109 | score REAL NOT NULL,
1110 | FOREIGN KEY (session_id) REFERENCES ai_search_queue(session_id)
1111 | );
1112 | ''')
1113 |
1114 | # AI Metadata Table
1115 | conn.execute("CREATE TABLE IF NOT EXISTS ai_metadata (key TEXT PRIMARY KEY, value TEXT, updated_at REAL)")
1116 |
1117 | # Indices
1118 | conn.execute('CREATE INDEX IF NOT EXISTS idx_queue_status ON ai_search_queue(status);')
1119 | conn.execute('CREATE INDEX IF NOT EXISTS idx_results_session ON ai_search_results(session_id);')
1120 |
1121 | conn.commit()
1122 | if close_conn: conn.close()
1123 |
1124 | def get_dynamic_folder_config(force_refresh=False):
1125 | global folder_config_cache
1126 | if folder_config_cache is not None and not force_refresh:
1127 | return folder_config_cache
1128 |
1129 | print("INFO: Refreshing folder configuration by scanning directory tree...")
1130 |
1131 | base_path_normalized = os.path.normpath(BASE_OUTPUT_PATH).replace('\\', '/')
1132 |
1133 | try:
1134 | root_mtime = os.path.getmtime(BASE_OUTPUT_PATH)
1135 | except OSError:
1136 | root_mtime = time.time()
1137 |
1138 | dynamic_config = {
1139 | '_root_': {
1140 | 'display_name': 'Main',
1141 | 'path': base_path_normalized,
1142 | 'relative_path': '',
1143 | 'parent': None,
1144 | 'children': [],
1145 | 'mtime': root_mtime
1146 | }
1147 | }
1148 |
1149 | try:
1150 | all_folders = {}
1151 | for dirpath, dirnames, _ in os.walk(BASE_OUTPUT_PATH):
1152 | dirnames[:] = [d for d in dirnames if d not in [THUMBNAIL_CACHE_FOLDER_NAME, SQLITE_CACHE_FOLDER_NAME, ZIP_CACHE_FOLDER_NAME, AI_MODELS_FOLDER_NAME]]
1153 | for dirname in dirnames:
1154 | full_path = os.path.normpath(os.path.join(dirpath, dirname)).replace('\\', '/')
1155 | relative_path = os.path.relpath(full_path, BASE_OUTPUT_PATH).replace('\\', '/')
1156 | try:
1157 | mtime = os.path.getmtime(full_path)
1158 | except OSError:
1159 | mtime = time.time()
1160 |
1161 | all_folders[relative_path] = {
1162 | 'full_path': full_path,
1163 | 'display_name': dirname,
1164 | 'mtime': mtime
1165 | }
1166 |
1167 | sorted_paths = sorted(all_folders.keys(), key=lambda x: x.count('/'))
1168 |
1169 | for rel_path in sorted_paths:
1170 | folder_data = all_folders[rel_path]
1171 | key = path_to_key(rel_path)
1172 | parent_rel_path = os.path.dirname(rel_path).replace('\\', '/')
1173 | parent_key = '_root_' if parent_rel_path == '.' or parent_rel_path == '' else path_to_key(parent_rel_path)
1174 |
1175 | if parent_key in dynamic_config:
1176 | dynamic_config[parent_key]['children'].append(key)
1177 |
1178 | dynamic_config[key] = {
1179 | 'display_name': folder_data['display_name'],
1180 | 'path': folder_data['full_path'],
1181 | 'relative_path': rel_path,
1182 | 'parent': parent_key,
1183 | 'children': [],
1184 | 'mtime': folder_data['mtime']
1185 | }
1186 | except FileNotFoundError:
1187 | print(f"WARNING: The base directory '{BASE_OUTPUT_PATH}' was not found.")
1188 |
1189 | folder_config_cache = dynamic_config
1190 | return dynamic_config
1191 |
1192 | def full_sync_database(conn):
1193 | print("INFO: Starting full file scan...")
1194 | start_time = time.time()
1195 |
1196 | all_folders = get_dynamic_folder_config(force_refresh=True)
1197 | db_files = {row['path']: row['mtime'] for row in conn.execute('SELECT path, mtime FROM files').fetchall()}
1198 |
1199 | disk_files = {}
1200 | print("INFO: Scanning directories on disk...")
1201 | for folder_data in all_folders.values():
1202 | folder_path = folder_data['path']
1203 | if not os.path.isdir(folder_path): continue
1204 | try:
1205 | for name in os.listdir(folder_path):
1206 | filepath = os.path.join(folder_path, name)
1207 | if os.path.isfile(filepath) and os.path.splitext(name)[1].lower() not in ['.json', '.sqlite']:
1208 | disk_files[filepath] = os.path.getmtime(filepath)
1209 | except OSError as e:
1210 | print(f"WARNING: Could not access folder {folder_path}: {e}")
1211 |
1212 | db_paths = set(db_files.keys())
1213 | disk_paths = set(disk_files.keys())
1214 |
1215 | to_delete = db_paths - disk_paths
1216 | to_add = disk_paths - db_paths
1217 | to_check = disk_paths & db_paths
1218 | to_update = {path for path in to_check if int(disk_files.get(path, 0)) > int(db_files.get(path, 0))}
1219 |
1220 | files_to_process = list(to_add.union(to_update))
1221 |
1222 | if files_to_process:
1223 | print(f"INFO: Processing {len(files_to_process)} files in parallel using up to {MAX_PARALLEL_WORKERS or 'all'} CPU cores...")
1224 |
1225 | results = []
1226 | # --- CORRECT BLOCK FOR PROGRESS BAR ---
1227 | with concurrent.futures.ProcessPoolExecutor(max_workers=MAX_PARALLEL_WORKERS) as executor:
1228 | # Submit all jobs to the pool and get future objects
1229 | futures = {executor.submit(process_single_file, path): path for path in files_to_process}
1230 |
1231 | # Create the progress bar with the correct total
1232 | with tqdm(total=len(files_to_process), desc="Processing files") as pbar:
1233 | # Iterate over the jobs as they are COMPLETED
1234 | for future in concurrent.futures.as_completed(futures):
1235 | result = future.result()
1236 | if result:
1237 | results.append(result)
1238 | # Update the bar by 1 step for each completed job
1239 | pbar.update(1)
1240 |
1241 | if results:
1242 | print(f"INFO: Inserting {len(results)} processed records into the database...")
1243 | for i in range(0, len(results), BATCH_SIZE):
1244 | batch = results[i:i + BATCH_SIZE]
1245 | conn.executemany("""
1246 | INSERT INTO files (id, path, mtime, name, type, duration, dimensions, has_workflow, size, last_scanned, workflow_files, workflow_prompt)
1247 | VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
1248 | ON CONFLICT(id) DO UPDATE SET
1249 | path = excluded.path,
1250 | name = excluded.name,
1251 | type = excluded.type,
1252 | duration = excluded.duration,
1253 | dimensions = excluded.dimensions,
1254 | has_workflow = excluded.has_workflow,
1255 | size = excluded.size,
1256 | last_scanned = excluded.last_scanned,
1257 | workflow_files = excluded.workflow_files,
1258 | workflow_prompt = excluded.workflow_prompt,
1259 |
1260 | -- LOGICA CONDIZIONALE:
1261 | is_favorite = CASE
1262 | WHEN files.mtime != excluded.mtime THEN 0
1263 | ELSE files.is_favorite
1264 | END,
1265 |
1266 | ai_caption = CASE
1267 | WHEN files.mtime != excluded.mtime THEN NULL
1268 | ELSE files.ai_caption
1269 | END,
1270 |
1271 | ai_embedding = CASE
1272 | WHEN files.mtime != excluded.mtime THEN NULL
1273 | ELSE files.ai_embedding
1274 | END,
1275 |
1276 | ai_last_scanned = CASE
1277 | WHEN files.mtime != excluded.mtime THEN 0
1278 | ELSE files.ai_last_scanned
1279 | END,
1280 |
1281 | -- Aggiorna mtime alla fine
1282 | mtime = excluded.mtime
1283 | """, batch)
1284 | conn.commit()
1285 |
1286 | if to_delete:
1287 | print(f"INFO: Removing {len(to_delete)} obsolete file entries from the database...")
1288 | conn.executemany("DELETE FROM files WHERE path = ?", [(p,) for p in to_delete])
1289 | conn.commit()
1290 |
1291 | print(f"INFO: Full scan completed in {time.time() - start_time:.2f} seconds.")
1292 |
1293 | def sync_folder_on_demand(folder_path):
1294 | yield f"data: {json.dumps({'message': 'Checking folder for changes...', 'current': 0, 'total': 1})}\n\n"
1295 |
1296 | try:
1297 | with get_db_connection() as conn:
1298 | disk_files, valid_extensions = {}, {'.png', '.jpg', '.jpeg', '.gif', '.webp', '.mp4', '.mkv', '.webm', '.mov', '.avi', '.mp3', '.wav', '.ogg', '.flac'}
1299 | if os.path.isdir(folder_path):
1300 | for name in os.listdir(folder_path):
1301 | filepath = os.path.join(folder_path, name)
1302 | if os.path.isfile(filepath) and os.path.splitext(name)[1].lower() in valid_extensions:
1303 | disk_files[filepath] = os.path.getmtime(filepath)
1304 |
1305 | db_files_query = conn.execute("SELECT path, mtime FROM files WHERE path LIKE ?", (folder_path + os.sep + '%',)).fetchall()
1306 | db_files = {row['path']: row['mtime'] for row in db_files_query if os.path.normpath(os.path.dirname(row['path'])) == os.path.normpath(folder_path)}
1307 |
1308 | disk_filepaths, db_filepaths = set(disk_files.keys()), set(db_files.keys())
1309 | files_to_add = disk_filepaths - db_filepaths
1310 | files_to_delete = db_filepaths - disk_filepaths
1311 | files_to_update = {path for path in (disk_filepaths & db_filepaths) if int(disk_files[path]) > int(db_files[path])}
1312 |
1313 | if not files_to_add and not files_to_update and not files_to_delete:
1314 | yield f"data: {json.dumps({'message': 'Folder is up-to-date.', 'status': 'no_changes', 'current': 1, 'total': 1})}\n\n"
1315 | return
1316 |
1317 | files_to_process = list(files_to_add.union(files_to_update))
1318 | total_files = len(files_to_process)
1319 |
1320 | if total_files > 0:
1321 | yield f"data: {json.dumps({'message': f'Found {total_files} new/modified files. Processing...', 'current': 0, 'total': total_files})}\n\n"
1322 |
1323 | data_to_upsert = []
1324 | processed_count = 0
1325 |
1326 | with concurrent.futures.ProcessPoolExecutor(max_workers=MAX_PARALLEL_WORKERS) as executor:
1327 | futures = {executor.submit(process_single_file, path): path for path in files_to_process}
1328 |
1329 | for future in concurrent.futures.as_completed(futures):
1330 | result = future.result()
1331 | if result:
1332 | data_to_upsert.append(result)
1333 |
1334 | processed_count += 1
1335 | path = futures[future]
1336 | progress_data = {
1337 | 'message': f'Processing: {os.path.basename(path)}',
1338 | 'current': processed_count,
1339 | 'total': total_files
1340 | }
1341 | yield f"data: {json.dumps(progress_data)}\n\n"
1342 |
1343 | if data_to_upsert:
1344 | conn.executemany("""
1345 | INSERT INTO files (id, path, mtime, name, type, duration, dimensions, has_workflow, size, last_scanned, workflow_files, workflow_prompt)
1346 | VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
1347 | ON CONFLICT(id) DO UPDATE SET
1348 | path = excluded.path,
1349 | name = excluded.name,
1350 | type = excluded.type,
1351 | duration = excluded.duration,
1352 | dimensions = excluded.dimensions,
1353 | has_workflow = excluded.has_workflow,
1354 | size = excluded.size,
1355 | last_scanned = excluded.last_scanned,
1356 | workflow_files = excluded.workflow_files,
1357 | workflow_prompt = excluded.workflow_prompt,
1358 |
1359 | -- LOGICA CONDIZIONALE:
1360 | is_favorite = CASE
1361 | WHEN files.mtime != excluded.mtime THEN 0
1362 | ELSE files.is_favorite
1363 | END,
1364 |
1365 | ai_caption = CASE
1366 | WHEN files.mtime != excluded.mtime THEN NULL
1367 | ELSE files.ai_caption
1368 | END,
1369 |
1370 | ai_embedding = CASE
1371 | WHEN files.mtime != excluded.mtime THEN NULL
1372 | ELSE files.ai_embedding
1373 | END,
1374 |
1375 | ai_last_scanned = CASE
1376 | WHEN files.mtime != excluded.mtime THEN 0
1377 | ELSE files.ai_last_scanned
1378 | END,
1379 |
1380 | -- Aggiorna mtime alla fine
1381 | mtime = excluded.mtime
1382 | """, data_to_upsert)
1383 |
1384 | if files_to_delete:
1385 | conn.executemany("DELETE FROM files WHERE path IN (?)", [(p,) for p in files_to_delete])
1386 |
1387 | conn.commit()
1388 | yield f"data: {json.dumps({'message': 'Sync complete. Reloading...', 'status': 'reloading', 'current': total_files, 'total': total_files})}\n\n"
1389 |
1390 | except Exception as e:
1391 | error_message = f"Error during sync: {e}"
1392 | print(f"ERROR: {error_message}")
1393 | yield f"data: {json.dumps({'message': error_message, 'current': 1, 'total': 1, 'error': True})}\n\n"
1394 |
1395 | def scan_folder_and_extract_options(folder_path):
1396 | extensions, prefixes = set(), set()
1397 | file_count = 0
1398 | try:
1399 | if not os.path.isdir(folder_path): return 0, [], []
1400 | for filename in os.listdir(folder_path):
1401 | if os.path.isfile(os.path.join(folder_path, filename)):
1402 | ext = os.path.splitext(filename)[1]
1403 | if ext and ext.lower() not in ['.json', '.sqlite']:
1404 | extensions.add(ext.lstrip('.').lower())
1405 | file_count += 1
1406 | if '_' in filename: prefixes.add(filename.split('_')[0])
1407 | except Exception as e: print(f"ERROR: Could not scan folder '{folder_path}': {e}")
1408 | return file_count, sorted(list(extensions)), sorted(list(prefixes))
1409 |
1410 | def _worker_extract_wf_string(filepath):
1411 | """
1412 | Worker helper for migration: Extracts just the workflow string.
1413 | """
1414 | try:
1415 | wf_json = extract_workflow(filepath)
1416 | if wf_json:
1417 | return extract_workflow_files_string(wf_json)
1418 | except Exception:
1419 | pass
1420 | return ""
1421 |
1422 | def _worker_extract_wf_prompt(filepath):
1423 | """
1424 | Worker helper for migration: Extracts just the workflow prompt (positive).
1425 | """
1426 | try:
1427 | wf_json = extract_workflow(filepath)
1428 | if wf_json:
1429 | return extract_workflow_prompt_string(wf_json)
1430 | except Exception:
1431 | pass
1432 | return ""
1433 |
1434 | def initialize_gallery():
1435 | print("INFO: Initializing gallery...")
1436 | global FFPROBE_EXECUTABLE_PATH
1437 | FFPROBE_EXECUTABLE_PATH = find_ffprobe_path()
1438 | os.makedirs(THUMBNAIL_CACHE_DIR, exist_ok=True)
1439 | os.makedirs(SQLITE_CACHE_DIR, exist_ok=True)
1440 |
1441 | with get_db_connection() as conn:
1442 | try:
1443 | # ==========================================
1444 | # SCENARIO A: FRESH INSTALL (No Database)
1445 | # ==========================================
1446 | table_check = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='files'").fetchone()
1447 |
1448 | if not table_check:
1449 | print(f"{Colors.GREEN}INFO: New installation detected. Creating database (v{DB_SCHEMA_VERSION})...{Colors.RESET}")
1450 | # 1. Create Tables (schema is already up to date in init_db)
1451 | init_db(conn)
1452 | # 2. Initial Scan
1453 | print(f"{Colors.BLUE}INFO: Performing initial file scan...{Colors.RESET}")
1454 | full_sync_database(conn)
1455 | # 3. Set Version
1456 | conn.execute(f'PRAGMA user_version = {DB_SCHEMA_VERSION}')
1457 | conn.commit()
1458 | print(f"{Colors.GREEN}INFO: Initialization complete.{Colors.RESET}")
1459 | return # Exit function, everything is ready.
1460 |
1461 | # ==========================================
1462 | # SCENARIO B: UPGRADE / EXISTING DATABASE
1463 | # ==========================================
1464 |
1465 | # 1. Check & Add Missing Columns (Non-destructive Migration)
1466 | cursor = conn.execute("PRAGMA table_info(files)")
1467 | columns = [row[1] for row in cursor.fetchall()]
1468 |
1469 | # List of columns to check/add
1470 | migrations = {
1471 | 'last_scanned': 'REAL DEFAULT 0',
1472 | 'ai_last_scanned': 'REAL DEFAULT 0',
1473 | 'ai_caption': 'TEXT',
1474 | 'ai_embedding': 'BLOB',
1475 | 'ai_error': 'TEXT',
1476 | 'workflow_files': "TEXT DEFAULT ''",
1477 | 'workflow_prompt': "TEXT DEFAULT ''"
1478 | }
1479 |
1480 | for col_name, col_def in migrations.items():
1481 | if col_name not in columns:
1482 | print(f"INFO: Migrating DB... Adding column '{col_name}'")
1483 | conn.execute(f"ALTER TABLE files ADD COLUMN {col_name} {col_def}")
1484 |
1485 | conn.commit()
1486 |
1487 | # 2. Data Backfill (Populate new columns for existing files)
1488 |
1489 | # Backfill: Workflow Files
1490 | missing_wf_data = conn.execute(
1491 | "SELECT id, path FROM files WHERE has_workflow = 1 AND (workflow_files IS NULL OR workflow_files = '')"
1492 | ).fetchall()
1493 |
1494 | if missing_wf_data:
1495 | count = len(missing_wf_data)
1496 | print(f"{Colors.YELLOW}INFO: Migrating {count} files to populate 'workflow_files'...{Colors.RESET}")
1497 | updates = []
1498 | with concurrent.futures.ProcessPoolExecutor(max_workers=MAX_PARALLEL_WORKERS) as executor:
1499 | futures = {executor.submit(_worker_extract_wf_string, row['path']): row['id'] for row in missing_wf_data}
1500 | for future in tqdm(concurrent.futures.as_completed(futures), total=count, desc="Migrating Files", unit="files"):
1501 | try:
1502 | wf_string = future.result()
1503 | if wf_string: updates.append((wf_string, futures[future]))
1504 | except: pass
1505 | if updates:
1506 | conn.executemany("UPDATE files SET workflow_files = ? WHERE id = ?", updates)
1507 | conn.commit()
1508 |
1509 | # Backfill: Workflow Prompt
1510 | missing_prompt_data = conn.execute(
1511 | "SELECT id, path FROM files WHERE has_workflow = 1 AND (workflow_prompt IS NULL OR workflow_prompt = '')"
1512 | ).fetchall()
1513 |
1514 | if missing_prompt_data:
1515 | count = len(missing_prompt_data)
1516 | print(f"{Colors.YELLOW}INFO: Migrating {count} files to populate 'workflow_prompt'...{Colors.RESET}")
1517 | print(f"{Colors.DIM} This runs only once. Please wait...{Colors.RESET}")
1518 | updates = []
1519 | with concurrent.futures.ProcessPoolExecutor(max_workers=MAX_PARALLEL_WORKERS) as executor:
1520 | futures = {executor.submit(_worker_extract_wf_prompt, row['path']): row['id'] for row in missing_prompt_data}
1521 | for future in tqdm(concurrent.futures.as_completed(futures), total=count, desc="Migrating Prompts", unit="files"):
1522 | try:
1523 | wf_prompt = future.result()
1524 | updates.append((wf_prompt, futures[future]))
1525 | except: pass
1526 | if updates:
1527 | conn.executemany("UPDATE files SET workflow_prompt = ? WHERE id = ?", updates)
1528 | conn.commit()
1529 | print(f"{Colors.GREEN}INFO: Prompt migration complete.{Colors.RESET}")
1530 |
1531 | # 3. Final Version Update
1532 | # We update the version number only after migrations are successful
1533 | try:
1534 | stored_version = conn.execute('PRAGMA user_version').fetchone()[0]
1535 | except:
1536 | stored_version = 0
1537 |
1538 | if stored_version < DB_SCHEMA_VERSION:
1539 | print(f"INFO: Updating DB Internal Version: {stored_version} -> {DB_SCHEMA_VERSION}")
1540 | conn.execute(f'PRAGMA user_version = {DB_SCHEMA_VERSION}')
1541 | conn.commit()
1542 |
1543 | # 4. Fallback check for empty DB on existing install
1544 | # (In case a user has the DB file but 0 records for some reason)
1545 | file_count = conn.execute("SELECT COUNT(*) FROM files").fetchone()[0]
1546 | if file_count == 0:
1547 | print(f"{Colors.BLUE}INFO: Database file exists but is empty. Scanning...{Colors.RESET}")
1548 | full_sync_database(conn)
1549 |
1550 | except sqlite3.DatabaseError as e:
1551 | print(f"ERROR initializing database: {e}")
1552 |
1553 | def get_filter_options_from_db(conn, scope, folder_path=None):
1554 | """
1555 | Extracts available extensions and prefixes from the database based on scope.
1556 | Enforces a limit on prefixes to prevent UI issues.
1557 | """
1558 | extensions = set()
1559 | prefixes = set()
1560 | prefix_limit_reached = False
1561 |
1562 | try:
1563 | # Determine Query based on Scope
1564 | if scope == 'global':
1565 | cursor = conn.execute("SELECT name FROM files")
1566 | else:
1567 | # Local Scope: strict folder match (parent directory must match)
1568 | # We use Python filtering for strict parent match to align with view logic,
1569 | # or a precise SQL like 'path' logic.
1570 | # To be fast and consistent with gallery_view, we query strictly.
1571 | # Note: We need rows that belong strictly to this folder.
1572 |
1573 | # Efficient SQL for strict parent check is complex across OS separators.
1574 | # We will grab all files in the tree and filter in python for 100% accuracy
1575 | # or use a GLOB/LIKE and filter.
1576 | cursor = conn.execute("SELECT name, path FROM files WHERE path LIKE ?", (folder_path + os.sep + '%',))
1577 |
1578 | # Process Results
1579 | for row in cursor:
1580 | # For local scope, ensure strict containment (no subfolders)
1581 | if scope != 'global':
1582 | file_dir = os.path.dirname(row['path'])
1583 | # OS-agnostic comparison
1584 | if os.path.normpath(file_dir) != os.path.normpath(folder_path):
1585 | continue
1586 |
1587 | name = row['name']
1588 |
1589 | # 1. Extensions
1590 | _, ext = os.path.splitext(name)
1591 | if ext:
1592 | extensions.add(ext.lstrip('.').lower())
1593 |
1594 | # 2. Prefixes (Only if limit not reached)
1595 | if not prefix_limit_reached and '_' in name:
1596 | pfx = name.split('_')[0]
1597 | if pfx:
1598 | prefixes.add(pfx)
1599 | if len(prefixes) > MAX_PREFIX_DROPDOWN_ITEMS:
1600 | prefix_limit_reached = True
1601 | prefixes.clear() # Discard to save memory, UI will show fallback
1602 |
1603 | except Exception as e:
1604 | print(f"Error extracting options: {e}")
1605 |
1606 | return sorted(list(extensions)), sorted(list(prefixes)), prefix_limit_reached
1607 |
1608 | # --- FLASK ROUTES ---
1609 | @app.route('/galleryout/')
1610 | @app.route('/')
1611 | def gallery_redirect_base():
1612 | return redirect(url_for('gallery_view', folder_key='_root_'))
1613 |
1614 | # AI QUEUE SUBMISSION ROUTE
1615 | @app.route('/galleryout/ai_queue', methods=['POST'])
1616 | def ai_queue_search():
1617 | """
1618 | Receives a search query from the frontend and adds it to the DB queue.
1619 | Also performs basic housekeeping (cleaning old requests).
1620 | """
1621 | data = request.json
1622 | query = data.get('query', '').strip()
1623 | # FIX: Leggi il limite dal JSON (default 100 se non presente)
1624 | limit = int(data.get('limit', 100))
1625 |
1626 | if not query:
1627 | return jsonify({'status': 'error', 'message': 'Query cannot be empty'}), 400
1628 |
1629 | session_id = str(uuid.uuid4())
1630 |
1631 | try:
1632 | with get_db_connection() as conn:
1633 | # 1. Housekeeping
1634 | conn.execute("DELETE FROM ai_search_queue WHERE created_at < datetime('now', '-1 hour')")
1635 | conn.execute("DELETE FROM ai_search_results WHERE session_id NOT IN (SELECT session_id FROM ai_search_queue)")
1636 |
1637 | # 2. Insert new request WITH LIMIT
1638 | # Assicurati che la query SQL includa la colonna limit_results
1639 | conn.execute('''
1640 | INSERT INTO ai_search_queue (session_id, query, limit_results, status)
1641 | VALUES (?, ?, ?, 'pending')
1642 | ''', (session_id, query, limit))
1643 | conn.commit()
1644 |
1645 | return jsonify({'status': 'queued', 'session_id': session_id})
1646 | except Exception as e:
1647 | print(f"AI Queue Error: {e}")
1648 | return jsonify({'status': 'error', 'message': str(e)}), 500
1649 |
1650 | # AI STATUS CHECK ROUTE (POLLING)
1651 | @app.route('/galleryout/ai_check/', methods=['GET'])
1652 | def ai_check_status(session_id):
1653 | """Checks the status of a specific search session."""
1654 | with get_db_connection() as conn:
1655 | row = conn.execute("SELECT status FROM ai_search_queue WHERE session_id = ?", (session_id,)).fetchone()
1656 |
1657 | if not row:
1658 | return jsonify({'status': 'not_found'})
1659 |
1660 | return jsonify({'status': row['status']})
1661 |
1662 | @app.route('/galleryout/sync_status/')
1663 | def sync_status(folder_key):
1664 | folders = get_dynamic_folder_config()
1665 | if folder_key not in folders:
1666 | abort(404)
1667 | folder_path = folders[folder_key]['path']
1668 | return Response(sync_folder_on_demand(folder_path), mimetype='text/event-stream')
1669 |
1670 | @app.route('/galleryout/api/search_options')
1671 | def api_search_options():
1672 | """
1673 | API Endpoint to fetch filter options (extensions/prefixes) dynamically
1674 | without reloading the page.
1675 | """
1676 | scope = request.args.get('scope', 'local')
1677 | folder_key = request.args.get('folder_key', '_root_')
1678 |
1679 | folders = get_dynamic_folder_config()
1680 | # Resolve folder path safely
1681 | folder_path = folders.get(folder_key, {}).get('path', BASE_OUTPUT_PATH)
1682 |
1683 | with get_db_connection() as conn:
1684 | exts, pfxs, limit_reached = get_filter_options_from_db(conn, scope, folder_path)
1685 |
1686 | return jsonify({
1687 | 'extensions': exts,
1688 | 'prefixes': pfxs,
1689 | 'prefix_limit_reached': limit_reached
1690 | })
1691 |
1692 | @app.route('/galleryout/view/')
1693 | def gallery_view(folder_key):
1694 | global gallery_view_cache
1695 | folders = get_dynamic_folder_config(force_refresh=True)
1696 | if folder_key not in folders:
1697 | return redirect(url_for('gallery_view', folder_key='_root_'))
1698 |
1699 | current_folder_info = folders[folder_key]
1700 | folder_path = current_folder_info['path']
1701 |
1702 | # Check if this is an AI Result View (Only if enabled)
1703 | ai_session_id = request.args.get('ai_session_id')
1704 | is_ai_search = False
1705 | ai_query_text = ""
1706 | is_global_search = False
1707 |
1708 | # AI Logic runs only if explicitly enabled
1709 | if ENABLE_AI_SEARCH:
1710 | with get_db_connection() as conn:
1711 | # --- PATH A: AI SEARCH RESULTS ---
1712 | if ai_session_id:
1713 | # Verify session completion
1714 | try:
1715 | queue_info = conn.execute("SELECT query, status FROM ai_search_queue WHERE session_id = ?", (ai_session_id,)).fetchone()
1716 |
1717 | if queue_info and queue_info['status'] == 'completed':
1718 | is_ai_search = True
1719 | ai_query_text = queue_info['query']
1720 |
1721 | # Retrieve files joined with search results, ordered by score
1722 | query_sql = '''
1723 | SELECT f.*, r.score
1724 | FROM ai_search_results r
1725 | JOIN files f ON r.file_id = f.id
1726 | WHERE r.session_id = ?
1727 | ORDER BY r.score DESC
1728 | '''
1729 | all_files_raw = conn.execute(query_sql, (ai_session_id,)).fetchall()
1730 |
1731 | # Convert to dict and clean blob
1732 | files_list = []
1733 | for row in all_files_raw:
1734 | d = dict(row)
1735 | if 'ai_embedding' in d: del d['ai_embedding']
1736 | files_list.append(d)
1737 |
1738 | gallery_view_cache = files_list
1739 | except Exception as e:
1740 | print(f"AI Search Error: {e}")
1741 | is_ai_search = False
1742 |
1743 | # --- PATH B: STANDARD FOLDER VIEW OR GLOBAL STANDARD SEARCH ---
1744 | if not is_ai_search:
1745 | with get_db_connection() as conn:
1746 | conditions, params = [], []
1747 |
1748 | # Check for Global Search Scope
1749 | search_scope = request.args.get('scope', 'local')
1750 | if search_scope == 'global':
1751 | is_global_search = True
1752 | else:
1753 | # Local scope: filter by path
1754 | conditions.append("path LIKE ?")
1755 | params.append(folder_path + os.sep + '%')
1756 |
1757 | sort_by = 'name' if request.args.get('sort_by') == 'name' else 'mtime'
1758 | sort_order = 'asc' if request.args.get('sort_order', 'desc').lower() == 'asc' else 'desc'
1759 |
1760 | # 1. Text Search
1761 | search_term = request.args.get('search', '').strip()
1762 | if search_term:
1763 | conditions.append("name LIKE ?")
1764 | params.append(f"%{search_term}%")
1765 |
1766 | # 2. Workflow Files Search
1767 | wf_search_raw = request.args.get('workflow_files', '').strip()
1768 | if wf_search_raw:
1769 | keywords = [k.strip() for k in wf_search_raw.split(',') if k.strip()]
1770 | for kw in keywords:
1771 | smart_kw = normalize_smart_path(kw)
1772 | conditions.append("workflow_files LIKE ?")
1773 | params.append(f"%{smart_kw}%")
1774 |
1775 | # 3. Workflow PROMPT Search (NEW)
1776 | wf_prompt_raw = request.args.get('workflow_prompt', '').strip()
1777 | if wf_prompt_raw:
1778 | keywords = [k.strip() for k in wf_prompt_raw.split(',') if k.strip()]
1779 | for kw in keywords:
1780 | # Use standard LIKE for text matching
1781 | conditions.append("workflow_prompt LIKE ?")
1782 | params.append(f"%{kw}%")
1783 |
1784 | # 4. Boolean Options
1785 | # Favorites
1786 | if request.args.get('favorites', 'false').lower() == 'true':
1787 | conditions.append("is_favorite = 1")
1788 |
1789 | # No Workflow (New)
1790 | if request.args.get('no_workflow', 'false').lower() == 'true':
1791 | conditions.append("has_workflow = 0")
1792 |
1793 | # No AI Caption (New - Only if AI Enabled)
1794 | if ENABLE_AI_SEARCH and request.args.get('no_ai_caption', 'false').lower() == 'true':
1795 | conditions.append("(ai_caption IS NULL OR ai_caption = '')")
1796 |
1797 | # 5. Date Range Search (New)
1798 | start_date_str = request.args.get('start_date', '').strip()
1799 | end_date_str = request.args.get('end_date', '').strip()
1800 |
1801 | if start_date_str:
1802 | try:
1803 | # Convert 'YYYY-MM-DD' to timestamp at 00:00:00
1804 | dt_start = datetime.strptime(start_date_str, '%Y-%m-%d')
1805 | conditions.append("mtime >= ?")
1806 | params.append(dt_start.timestamp())
1807 | except ValueError:
1808 | pass # Ignore invalid date format
1809 |
1810 | if end_date_str:
1811 | try:
1812 | # Convert 'YYYY-MM-DD' to timestamp at 23:59:59
1813 | dt_end = datetime.strptime(end_date_str, '%Y-%m-%d')
1814 | # Add almost one day (86399 seconds) to include the whole end day
1815 | end_ts = dt_end.timestamp() + 86399
1816 | conditions.append("mtime <= ?")
1817 | params.append(end_ts)
1818 | except ValueError:
1819 | pass
1820 |
1821 | # 6. Dropdown Filters (Prefix/Extensions)
1822 | selected_prefixes = request.args.getlist('prefix')
1823 | if selected_prefixes:
1824 | prefix_conditions = [f"name LIKE ?" for p in selected_prefixes if p.strip()]
1825 | params.extend([f"{p.strip()}_%" for p in selected_prefixes if p.strip()])
1826 | if prefix_conditions: conditions.append(f"({' OR '.join(prefix_conditions)})")
1827 |
1828 | selected_extensions = request.args.getlist('extension')
1829 | if selected_extensions:
1830 | ext_conditions = [f"name LIKE ?" for ext in selected_extensions if ext.strip()]
1831 | params.extend([f"%.{ext.lstrip('.').lower()}" for ext in selected_extensions if ext.strip()])
1832 | if ext_conditions: conditions.append(f"({' OR '.join(ext_conditions)})")
1833 |
1834 | sort_direction = "ASC" if sort_order == 'asc' else "DESC"
1835 |
1836 | where_clause = f"WHERE {' AND '.join(conditions)}" if conditions else ""
1837 | query = f"SELECT * FROM files {where_clause} ORDER BY {sort_by} {sort_direction}"
1838 |
1839 | all_files_raw = conn.execute(query, params).fetchall()
1840 |
1841 | # Local Scope strict filtering
1842 | if not is_global_search:
1843 | folder_path_norm = os.path.normpath(folder_path)
1844 | all_files_filtered = [dict(row) for row in all_files_raw if os.path.normpath(os.path.dirname(row['path'])) == folder_path_norm]
1845 | else:
1846 | all_files_filtered = [dict(row) for row in all_files_raw]
1847 |
1848 | # Cleanup blobs
1849 | for f in all_files_filtered:
1850 | if 'ai_embedding' in f: del f['ai_embedding']
1851 |
1852 | gallery_view_cache = all_files_filtered
1853 |
1854 | # Count active filters for UI Feedback
1855 | active_filters_count = 0
1856 | if request.args.get('search', '').strip(): active_filters_count += 1
1857 | if request.args.get('workflow_files', '').strip(): active_filters_count += 1
1858 | if request.args.get('workflow_prompt', '').strip(): active_filters_count += 1
1859 | if request.args.get('favorites', 'false').lower() == 'true': active_filters_count += 1
1860 | if request.args.get('no_workflow', 'false').lower() == 'true': active_filters_count += 1
1861 | if request.args.get('no_ai_caption', 'false').lower() == 'true': active_filters_count += 1
1862 | if request.args.get('start_date', '').strip(): active_filters_count += 1
1863 | if request.args.getlist('extension'): active_filters_count += 1
1864 | if request.args.getlist('prefix'): active_filters_count += 1
1865 | if request.args.get('scope', 'local') == 'global': active_filters_count += 1
1866 |
1867 | # Pagination Logic (Shared)
1868 | initial_files = gallery_view_cache[:PAGE_SIZE]
1869 |
1870 | # --- Metadata and Options Logic ---
1871 |
1872 | # 1. Get total files count for the badge (Standard Local Scan)
1873 | # We ignore the list-based options from the scan (using _) because we get them from DB now
1874 | total_folder_files, _, _ = scan_folder_and_extract_options(folder_path)
1875 |
1876 | # 2. Get Filter Dropdown Options (DB Based, Scope Aware, Limited)
1877 | scope_for_options = 'global' if is_global_search else 'local'
1878 |
1879 | # Initialize variables to ensure they exist even if branches are skipped
1880 | extensions = []
1881 | prefixes = []
1882 | prefix_limit_reached = False
1883 |
1884 | # Check if 'conn' variable exists and is open from previous blocks (PATH B)
1885 | if 'conn' in locals() and not is_ai_search:
1886 | # Re-use existing connection
1887 | extensions, prefixes, prefix_limit_reached = get_filter_options_from_db(conn, scope_for_options, folder_path)
1888 | else:
1889 | # Open temp connection (e.g. inside AI path or error cases)
1890 | with get_db_connection() as db_conn_for_opts:
1891 | extensions, prefixes, prefix_limit_reached = get_filter_options_from_db(db_conn_for_opts, scope_for_options, folder_path)
1892 |
1893 | # --- Breadcrumbs Logic ---
1894 | breadcrumbs, ancestor_keys = [], set()
1895 | curr_key = folder_key
1896 | while curr_key is not None and curr_key in folders:
1897 | folder_info = folders[curr_key]
1898 | breadcrumbs.append({'key': curr_key, 'display_name': folder_info['display_name']})
1899 | ancestor_keys.add(curr_key)
1900 | curr_key = folder_info.get('parent')
1901 | breadcrumbs.reverse()
1902 |
1903 | return render_template('index.html',
1904 | files=initial_files,
1905 | total_files=len(gallery_view_cache),
1906 | total_folder_files=total_folder_files,
1907 | folders=folders,
1908 | current_folder_key=folder_key,
1909 | current_folder_info=current_folder_info,
1910 | breadcrumbs=breadcrumbs,
1911 | ancestor_keys=list(ancestor_keys),
1912 | available_extensions=extensions,
1913 | available_prefixes=prefixes,
1914 | prefix_limit_reached=prefix_limit_reached,
1915 | selected_extensions=request.args.getlist('extension'),
1916 | selected_prefixes=request.args.getlist('prefix'),
1917 | show_favorites=request.args.get('favorites', 'false').lower() == 'true',
1918 | protected_folder_keys=list(PROTECTED_FOLDER_KEYS),
1919 | enable_ai_search=ENABLE_AI_SEARCH,
1920 | is_ai_search=is_ai_search,
1921 | ai_query=ai_query_text,
1922 | is_global_search=is_global_search,
1923 | active_filters_count=active_filters_count,
1924 | current_scope=request.args.get('scope', 'local'))
1925 |
1926 | @app.route('/galleryout/upload', methods=['POST'])
1927 | def upload_files():
1928 | folder_key = request.form.get('folder_key')
1929 | if not folder_key: return jsonify({'status': 'error', 'message': 'No destination folder provided.'}), 400
1930 | folders = get_dynamic_folder_config()
1931 | if folder_key not in folders: return jsonify({'status': 'error', 'message': 'Destination folder not found.'}), 404
1932 | destination_path = folders[folder_key]['path']
1933 | if 'files' not in request.files: return jsonify({'status': 'error', 'message': 'No files were uploaded.'}), 400
1934 | uploaded_files, errors, success_count = request.files.getlist('files'), {}, 0
1935 | for file in uploaded_files:
1936 | if file and file.filename:
1937 | filename = secure_filename(file.filename)
1938 | try:
1939 | file.save(os.path.join(destination_path, filename))
1940 | success_count += 1
1941 | except Exception as e: errors[filename] = str(e)
1942 | if success_count > 0: sync_folder_on_demand(destination_path)
1943 | if errors: return jsonify({'status': 'partial_success', 'message': f'Successfully uploaded {success_count} files. The following files failed: {", ".join(errors.keys())}'}), 207
1944 | return jsonify({'status': 'success', 'message': f'Successfully uploaded {success_count} files.'})
1945 |
1946 | @app.route('/galleryout/rescan_folder', methods=['POST'])
1947 | def rescan_folder():
1948 | data = request.json
1949 | folder_key = data.get('folder_key')
1950 | mode = data.get('mode', 'all') # 'all' or 'recent'
1951 |
1952 | if not folder_key: return jsonify({'status': 'error', 'message': 'No folder provided.'}), 400
1953 | folders = get_dynamic_folder_config()
1954 | if folder_key not in folders: return jsonify({'status': 'error', 'message': 'Folder not found.'}), 404
1955 |
1956 | folder_path = folders[folder_key]['path']
1957 |
1958 | try:
1959 | with get_db_connection() as conn:
1960 | # Get all files in this folder
1961 | query = "SELECT path, last_scanned FROM files WHERE path LIKE ?"
1962 | params = (folder_path + os.sep + '%',)
1963 | rows = conn.execute(query, params).fetchall()
1964 |
1965 | # Filter files strictly within this folder (not subfolders)
1966 | folder_path_norm = os.path.normpath(folder_path)
1967 | files_in_folder = [
1968 | {'path': row['path'], 'last_scanned': row['last_scanned']}
1969 | for row in rows
1970 | if os.path.normpath(os.path.dirname(row['path'])) == folder_path_norm
1971 | ]
1972 |
1973 | files_to_process = []
1974 | current_time = time.time()
1975 |
1976 | if mode == 'recent':
1977 | # Process files not scanned in the last 60 minutes (3600 seconds)
1978 | cutoff_time = current_time - 3600
1979 | files_to_process = [f['path'] for f in files_in_folder if (f['last_scanned'] or 0) < cutoff_time]
1980 | else:
1981 | # Process all files
1982 | files_to_process = [f['path'] for f in files_in_folder]
1983 |
1984 | if not files_to_process:
1985 | return jsonify({'status': 'success', 'message': 'No files needed rescanning.', 'count': 0})
1986 |
1987 | print(f"INFO: Rescanning {len(files_to_process)} files in '{folder_path}' (Mode: {mode})...")
1988 |
1989 | processed_count = 0
1990 | results = []
1991 |
1992 | with concurrent.futures.ProcessPoolExecutor(max_workers=MAX_PARALLEL_WORKERS) as executor:
1993 | futures = {executor.submit(process_single_file, path): path for path in files_to_process}
1994 | for future in concurrent.futures.as_completed(futures):
1995 | result = future.result()
1996 | if result:
1997 | results.append(result)
1998 | processed_count += 1
1999 |
2000 | if results:
2001 | conn.executemany("""
2002 | INSERT INTO files (id, path, mtime, name, type, duration, dimensions, has_workflow, size, last_scanned, workflow_files, workflow_prompt)
2003 | VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
2004 | ON CONFLICT(id) DO UPDATE SET
2005 | path = excluded.path,
2006 | name = excluded.name,
2007 | type = excluded.type,
2008 | duration = excluded.duration,
2009 | dimensions = excluded.dimensions,
2010 | has_workflow = excluded.has_workflow,
2011 | size = excluded.size,
2012 | last_scanned = excluded.last_scanned,
2013 | workflow_files = excluded.workflow_files,
2014 | workflow_prompt = excluded.workflow_prompt,
2015 |
2016 | -- LOGICA CONDIZIONALE:
2017 | is_favorite = CASE
2018 | WHEN files.mtime != excluded.mtime THEN 0
2019 | ELSE files.is_favorite
2020 | END,
2021 |
2022 | ai_caption = CASE
2023 | WHEN files.mtime != excluded.mtime THEN NULL
2024 | ELSE files.ai_caption
2025 | END,
2026 |
2027 | ai_embedding = CASE
2028 | WHEN files.mtime != excluded.mtime THEN NULL
2029 | ELSE files.ai_embedding
2030 | END,
2031 |
2032 | ai_last_scanned = CASE
2033 | WHEN files.mtime != excluded.mtime THEN 0
2034 | ELSE files.ai_last_scanned
2035 | END,
2036 |
2037 | -- Aggiorna mtime alla fine
2038 | mtime = excluded.mtime
2039 | """, results)
2040 | conn.commit()
2041 |
2042 | return jsonify({'status': 'success', 'message': f'Successfully rescanned {len(results)} files.', 'count': len(results)})
2043 |
2044 | except Exception as e:
2045 | print(f"ERROR: Rescan failed: {e}")
2046 | return jsonify({'status': 'error', 'message': str(e)}), 500
2047 |
2048 | @app.route('/galleryout/create_folder', methods=['POST'])
2049 | def create_folder():
2050 | data = request.json
2051 | parent_key = data.get('parent_key', '_root_')
2052 |
2053 | raw_name = data.get('folder_name', '').strip()
2054 | folder_name = re.sub(r'[\\/:*?"<>|]', '', raw_name)
2055 |
2056 | if not folder_name or folder_name in ['.', '..']:
2057 | return jsonify({'status': 'error', 'message': 'Invalid folder name provided.'}), 400
2058 |
2059 | folders = get_dynamic_folder_config()
2060 | if parent_key not in folders: return jsonify({'status': 'error', 'message': 'Parent folder not found.'}), 404
2061 | parent_path = folders[parent_key]['path']
2062 | new_folder_path = os.path.join(parent_path, folder_name)
2063 | try:
2064 | os.makedirs(new_folder_path, exist_ok=False)
2065 | sync_folder_on_demand(parent_path)
2066 | return jsonify({'status': 'success', 'message': f'Folder "{folder_name}" created successfully.'})
2067 | except FileExistsError: return jsonify({'status': 'error', 'message': 'Folder already exists.'}), 400
2068 | except Exception as e: return jsonify({'status': 'error', 'message': str(e)}), 500
2069 |
2070 | # --- ZIP BACKGROUND JOB MANAGEMENT ---
2071 | zip_jobs = {}
2072 | def background_zip_task(job_id, file_ids):
2073 | try:
2074 | if not os.path.exists(ZIP_CACHE_DIR):
2075 | try:
2076 | os.makedirs(ZIP_CACHE_DIR, exist_ok=True)
2077 | except Exception as e:
2078 | print(f"ERROR: Could not create zip directory: {e}")
2079 | zip_jobs[job_id] = {'status': 'error', 'message': f'Server permission error: {e}'}
2080 | return
2081 |
2082 | zip_filename = f"smartgallery_{job_id}.zip"
2083 | zip_filepath = os.path.join(ZIP_CACHE_DIR, zip_filename)
2084 |
2085 | with get_db_connection() as conn:
2086 | placeholders = ','.join(['?'] * len(file_ids))
2087 | query = f"SELECT path, name FROM files WHERE id IN ({placeholders})"
2088 | files_to_zip = conn.execute(query, file_ids).fetchall()
2089 |
2090 | if not files_to_zip:
2091 | zip_jobs[job_id] = {'status': 'error', 'message': 'No valid files found.'}
2092 | return
2093 |
2094 | with zipfile.ZipFile(zip_filepath, 'w', zipfile.ZIP_DEFLATED) as zf:
2095 | for file_row in files_to_zip:
2096 | file_path = file_row['path']
2097 | file_name = file_row['name']
2098 | # Check the file esists
2099 | if os.path.exists(file_path):
2100 | # Add file to zip
2101 | zf.write(file_path, file_name)
2102 |
2103 | # Job completed succesfully
2104 | zip_jobs[job_id] = {
2105 | 'status': 'ready',
2106 | 'filename': zip_filename
2107 | }
2108 |
2109 | # Clean automatic: delete zip older than 24 hours
2110 | try:
2111 | now = time.time()
2112 | for f in os.listdir(ZIP_CACHE_DIR):
2113 | fp = os.path.join(ZIP_CACHE_DIR, f)
2114 | if os.path.isfile(fp) and os.stat(fp).st_mtime < now - 86400:
2115 | os.remove(fp)
2116 | except Exception:
2117 | pass
2118 |
2119 | except Exception as e:
2120 | print(f"Zip Error: {e}")
2121 | zip_jobs[job_id] = {'status': 'error', 'message': str(e)}
2122 |
2123 | @app.route('/galleryout/prepare_batch_zip', methods=['POST'])
2124 | def prepare_batch_zip():
2125 | data = request.json
2126 | file_ids = data.get('file_ids', [])
2127 | if not file_ids:
2128 | return jsonify({'status': 'error', 'message': 'No files specified.'}), 400
2129 |
2130 | job_id = str(uuid.uuid4())
2131 | zip_jobs[job_id] = {'status': 'processing'}
2132 |
2133 | thread = threading.Thread(target=background_zip_task, args=(job_id, file_ids))
2134 | thread.daemon = True
2135 | thread.start()
2136 |
2137 | return jsonify({'status': 'success', 'job_id': job_id, 'message': 'Zip generation started.'})
2138 |
2139 | @app.route('/galleryout/check_zip_status/')
2140 | def check_zip_status(job_id):
2141 | job = zip_jobs.get(job_id)
2142 | if not job:
2143 | return jsonify({'status': 'error', 'message': 'Job not found'}), 404
2144 | response_data = job.copy()
2145 | if job['status'] == 'ready' and 'filename' in job:
2146 | response_data['download_url'] = url_for('serve_zip_file', filename=job['filename'])
2147 |
2148 | return jsonify(response_data)
2149 |
2150 | @app.route('/galleryout/serve_zip/')
2151 | def serve_zip_file(filename):
2152 | return send_from_directory(ZIP_CACHE_DIR, filename, as_attachment=True)
2153 |
2154 | @app.route('/galleryout/rename_folder/', methods=['POST'])
2155 | def rename_folder(folder_key):
2156 | if folder_key in PROTECTED_FOLDER_KEYS: return jsonify({'status': 'error', 'message': 'This folder cannot be renamed.'}), 403
2157 |
2158 | raw_name = request.json.get('new_name', '').strip()
2159 | new_name = re.sub(r'[\\/:*?"<>|]', '', raw_name)
2160 |
2161 | if not new_name or new_name in ['.', '..']:
2162 | return jsonify({'status': 'error', 'message': 'Invalid name.'}), 400
2163 |
2164 | folders = get_dynamic_folder_config()
2165 | if folder_key not in folders: return jsonify({'status': 'error', 'message': 'Folder not found.'}), 400
2166 | old_path = folders[folder_key]['path']
2167 | new_path = os.path.join(os.path.dirname(old_path), new_name)
2168 | if os.path.exists(new_path): return jsonify({'status': 'error', 'message': 'A folder with this name already exists.'}), 400
2169 | try:
2170 | with get_db_connection() as conn:
2171 | old_path_like = old_path + os.sep + '%'
2172 | files_to_update = conn.execute("SELECT id, path FROM files WHERE path LIKE ?", (old_path_like,)).fetchall()
2173 | update_data = []
2174 | for row in files_to_update:
2175 | new_file_path = row['path'].replace(old_path, new_path, 1)
2176 | new_id = hashlib.md5(new_file_path.encode()).hexdigest()
2177 | update_data.append((new_id, new_file_path, row['id']))
2178 | os.rename(old_path, new_path)
2179 | if update_data: conn.executemany("UPDATE files SET id = ?, path = ? WHERE id = ?", update_data)
2180 | conn.commit()
2181 | get_dynamic_folder_config(force_refresh=True)
2182 | return jsonify({'status': 'success', 'message': 'Folder renamed.'})
2183 | except Exception as e: return jsonify({'status': 'error', 'message': f'Error: {e}'}), 500
2184 |
2185 | @app.route('/galleryout/delete_folder/', methods=['POST'])
2186 | def delete_folder(folder_key):
2187 | if folder_key in PROTECTED_FOLDER_KEYS: return jsonify({'status': 'error', 'message': 'This folder cannot be deleted.'}), 403
2188 | folders = get_dynamic_folder_config()
2189 | if folder_key not in folders: return jsonify({'status': 'error', 'message': 'Folder not found.'}), 404
2190 | try:
2191 | folder_path = folders[folder_key]['path']
2192 | with get_db_connection() as conn:
2193 | conn.execute("DELETE FROM files WHERE path LIKE ?", (folder_path + os.sep + '%',))
2194 | conn.commit()
2195 | shutil.rmtree(folder_path)
2196 | get_dynamic_folder_config(force_refresh=True)
2197 | return jsonify({'status': 'success', 'message': 'Folder deleted.'})
2198 | except Exception as e: return jsonify({'status': 'error', 'message': f'Error: {e}'}), 500
2199 |
2200 | @app.route('/galleryout/load_more')
2201 | def load_more():
2202 | offset = request.args.get('offset', 0, type=int)
2203 | if offset >= len(gallery_view_cache): return jsonify(files=[])
2204 | return jsonify(files=gallery_view_cache[offset:offset + PAGE_SIZE])
2205 |
2206 | def get_file_info_from_db(file_id, column='*'):
2207 | with get_db_connection() as conn:
2208 | row = conn.execute(f"SELECT {column} FROM files WHERE id = ?", (file_id,)).fetchone()
2209 | if not row: abort(404)
2210 | return dict(row) if column == '*' else row[0]
2211 |
2212 | def _get_unique_filepath(destination_folder, filename):
2213 | """
2214 | Generates a unique filepath using the NATIVE OS separator.
2215 | This ensures that the path matches exactly what the Scanner generates,
2216 | preventing duplicate records in the database.
2217 | """
2218 | base, ext = os.path.splitext(filename)
2219 | counter = 1
2220 |
2221 | # Use standard os.path.join.
2222 | # On Windows with base path "C:/A", it produces "C:/A\file.txt" (Matches your DB).
2223 | # On Linux, it produces "C:/A/file.txt" (Matches Linux DB).
2224 | full_path = os.path.join(destination_folder, filename)
2225 |
2226 | while os.path.exists(full_path):
2227 | new_filename = f"{base}({counter}){ext}"
2228 | full_path = os.path.join(destination_folder, new_filename)
2229 | counter += 1
2230 |
2231 | return full_path
2232 |
2233 | @app.route('/galleryout/move_batch', methods=['POST'])
2234 | def move_batch():
2235 | data = request.json
2236 | file_ids = data.get('file_ids', [])
2237 | dest_key = data.get('destination_folder')
2238 |
2239 | folders = get_dynamic_folder_config()
2240 |
2241 | if not all([file_ids, dest_key, dest_key in folders]):
2242 | return jsonify({'status': 'error', 'message': 'Invalid data provided.'}), 400
2243 |
2244 | moved_count, renamed_count, skipped_count = 0, 0, 0
2245 | failed_files = []
2246 |
2247 | # Get destination path from config
2248 | dest_path_raw = folders[dest_key]['path']
2249 |
2250 | with get_db_connection() as conn:
2251 | for file_id in file_ids:
2252 | source_path = None
2253 | try:
2254 | # 1. Fetch Source Data + AI Metadata
2255 | query_fetch = """
2256 | SELECT
2257 | path, name, size, has_workflow, is_favorite, type, duration, dimensions,
2258 | ai_last_scanned, ai_caption, ai_embedding, ai_error, workflow_files, workflow_prompt
2259 | FROM files WHERE id = ?
2260 | """
2261 | file_info = conn.execute(query_fetch, (file_id,)).fetchone()
2262 |
2263 | if not file_info:
2264 | failed_files.append(f"ID {file_id} not found in DB")
2265 | continue
2266 |
2267 | source_path = file_info['path']
2268 | source_filename = file_info['name']
2269 |
2270 | # Metadata Pack
2271 | meta = {
2272 | 'size': file_info['size'],
2273 | 'has_workflow': file_info['has_workflow'],
2274 | 'is_favorite': file_info['is_favorite'],
2275 | 'type': file_info['type'],
2276 | 'duration': file_info['duration'],
2277 | 'dimensions': file_info['dimensions'],
2278 | 'ai_last_scanned': file_info['ai_last_scanned'],
2279 | 'ai_caption': file_info['ai_caption'],
2280 | 'ai_embedding': file_info['ai_embedding'],
2281 | 'ai_error': file_info['ai_error'],
2282 | 'workflow_files': file_info['workflow_files'],
2283 | 'workflow_prompt': file_info['workflow_prompt']
2284 | }
2285 |
2286 | # Check Source vs Dest (OS Agnostic comparison)
2287 | source_dir_norm = os.path.normpath(os.path.dirname(source_path))
2288 | dest_dir_norm = os.path.normpath(dest_path_raw)
2289 | is_same_folder = (source_dir_norm.lower() == dest_dir_norm.lower()) if os.name == 'nt' else (source_dir_norm == dest_dir_norm)
2290 |
2291 | if is_same_folder:
2292 | skipped_count += 1
2293 | continue
2294 |
2295 | if not os.path.exists(source_path):
2296 | failed_files.append(f"{source_filename} (not found on disk)")
2297 | conn.execute("DELETE FROM files WHERE id = ?", (file_id,))
2298 | continue
2299 |
2300 | # 2. Calculate unique path NATIVELY (No separator forcing)
2301 | # This guarantees the path string matches what the Scanner will see.
2302 | final_dest_path = _get_unique_filepath(dest_path_raw, source_filename)
2303 | final_filename = os.path.basename(final_dest_path)
2304 |
2305 | if final_filename != source_filename:
2306 | renamed_count += 1
2307 |
2308 | # 3. Move file on disk
2309 | shutil.move(source_path, final_dest_path)
2310 |
2311 | # 4. Calculate New ID based on the NATIVE path
2312 | new_id = hashlib.md5(final_dest_path.encode()).hexdigest()
2313 |
2314 | # 5. DB Update / Merge Logic
2315 | existing_target = conn.execute("SELECT id FROM files WHERE id = ?", (new_id,)).fetchone()
2316 |
2317 | if existing_target:
2318 | # MERGE: Target exists (e.g. ghost record). Overwrite with source metadata.
2319 | query_merge = """
2320 | UPDATE files
2321 | SET path = ?, name = ?, mtime = ?,
2322 | size = ?, has_workflow = ?, is_favorite = ?,
2323 | type = ?, duration = ?, dimensions = ?,
2324 | ai_last_scanned = ?, ai_caption = ?, ai_embedding = ?, ai_error = ?,
2325 | workflow_files = ?, workflow_prompt = ?
2326 | WHERE id = ?
2327 | """
2328 | conn.execute(query_merge, (
2329 | final_dest_path, final_filename, time.time(),
2330 | meta['size'], meta['has_workflow'], meta['is_favorite'],
2331 | meta['type'], meta['duration'], meta['dimensions'],
2332 | meta['ai_last_scanned'], meta['ai_caption'], meta['ai_embedding'], meta['ai_error'],
2333 | meta['workflow_files'],
2334 | meta['workflow_prompt'],
2335 | new_id
2336 | ))
2337 | conn.execute("DELETE FROM files WHERE id = ?", (file_id,))
2338 | else:
2339 | # STANDARD: Update existing record path/name.
2340 | conn.execute("UPDATE files SET id = ?, path = ?, name = ? WHERE id = ?",
2341 | (new_id, final_dest_path, final_filename, file_id))
2342 |
2343 | moved_count += 1
2344 |
2345 | except Exception as e:
2346 | filename_for_error = os.path.basename(source_path) if source_path else f"ID {file_id}"
2347 | failed_files.append(filename_for_error)
2348 | print(f"ERROR: Failed to move file {filename_for_error}. Reason: {e}")
2349 | continue
2350 | conn.commit()
2351 |
2352 | message = f"Successfully moved {moved_count} file(s)."
2353 | if skipped_count > 0: message += f" {skipped_count} skipped (same folder)."
2354 | if renamed_count > 0: message += f" {renamed_count} renamed."
2355 | if failed_files: message += f" Failed: {len(failed_files)}."
2356 |
2357 | status = 'success'
2358 | if failed_files or (skipped_count > 0 and moved_count == 0): status = 'partial_success'
2359 |
2360 | return jsonify({'status': status, 'message': message})
2361 |
2362 | @app.route('/galleryout/delete_batch', methods=['POST'])
2363 | def delete_batch():
2364 | try:
2365 | # Preveniamo il crash gestendo tutto in un blocco try/except
2366 | data = request.json
2367 | file_ids = data.get('file_ids', [])
2368 |
2369 | if not file_ids:
2370 | return jsonify({'status': 'error', 'message': 'No files selected.'}), 400
2371 |
2372 | deleted_count = 0
2373 | failed_files = []
2374 | ids_to_remove_from_db = []
2375 |
2376 | with get_db_connection() as conn:
2377 | # 1. Generazione corretta e sicura dei placeholder SQL (?,?,?)
2378 | # Usiamo una lista esplicita per evitare errori di sintassi python
2379 | placeholders = ','.join(['?'] * len(file_ids))
2380 |
2381 | # Selezioniamo i file per verificare i percorsi
2382 | query_select = f"SELECT id, path FROM files WHERE id IN ({placeholders})"
2383 | files_to_delete = conn.execute(query_select, file_ids).fetchall()
2384 |
2385 | for row in files_to_delete:
2386 | file_path = row['path']
2387 | file_id = row['id']
2388 |
2389 | try:
2390 | # Cancellazione Fisica (o spostamento nel cestino)
2391 | if os.path.exists(file_path):
2392 | safe_delete_file(file_path)
2393 |
2394 | # Se l'operazione su disco riesce (o il file non c'era già più),
2395 | # segniamo l'ID per la rimozione dal DB
2396 | ids_to_remove_from_db.append(file_id)
2397 | deleted_count += 1
2398 |
2399 | except Exception as e:
2400 | # Se fallisce la cancellazione fisica di un file, lo annotiamo ma continuiamo
2401 | print(f"ERROR: Could not delete {file_path}: {e}")
2402 | failed_files.append(os.path.basename(file_path))
2403 |
2404 | # 2. Pulizia Database (Massiva)
2405 | if ids_to_remove_from_db:
2406 | # Generiamo nuovi placeholder solo per gli ID effettivamente cancellati
2407 | db_placeholders = ','.join(['?'] * len(ids_to_remove_from_db))
2408 | query_delete = f"DELETE FROM files WHERE id IN ({db_placeholders})"
2409 | conn.execute(query_delete, ids_to_remove_from_db)
2410 | conn.commit()
2411 |
2412 | # Costruzione messaggio finale
2413 | action = "moved to trash" if DELETE_TO else "deleted"
2414 | message = f'Successfully {action} {deleted_count} files.'
2415 |
2416 | status = 'success'
2417 | if failed_files:
2418 | message += f" Failed to delete {len(failed_files)} files."
2419 | status = 'partial_success'
2420 |
2421 | return jsonify({'status': status, 'message': message})
2422 |
2423 | except Exception as e:
2424 | # QUESTO risolve il "doctype is not json":
2425 | # Se c'è un errore grave, restituiamo un JSON di errore invece di una pagina HTML rotta.
2426 | print(f"CRITICAL ERROR in delete_batch: {e}")
2427 | return jsonify({'status': 'error', 'message': str(e)}), 500
2428 |
2429 | @app.route('/galleryout/favorite_batch', methods=['POST'])
2430 | def favorite_batch():
2431 | data = request.json
2432 | file_ids, status = data.get('file_ids', []), data.get('status', False)
2433 | if not file_ids: return jsonify({'status': 'error', 'message': 'No files selected'}), 400
2434 | with get_db_connection() as conn:
2435 | placeholders = ','.join('?' * len(file_ids))
2436 | conn.execute(f"UPDATE files SET is_favorite = ? WHERE id IN ({placeholders})", [1 if status else 0] + file_ids)
2437 | conn.commit()
2438 | return jsonify({'status': 'success', 'message': f"Updated favorites for {len(file_ids)} files."})
2439 |
2440 | @app.route('/galleryout/toggle_favorite/', methods=['POST'])
2441 | def toggle_favorite(file_id):
2442 | with get_db_connection() as conn:
2443 | current = conn.execute("SELECT is_favorite FROM files WHERE id = ?", (file_id,)).fetchone()
2444 | if not current: abort(404)
2445 | new_status = 1 - current['is_favorite']
2446 | conn.execute("UPDATE files SET is_favorite = ? WHERE id = ?", (new_status, file_id))
2447 | conn.commit()
2448 | return jsonify({'status': 'success', 'is_favorite': bool(new_status)})
2449 |
2450 | # --- FIX: ROBUST DELETE ROUTE ---
2451 | @app.route('/galleryout/delete/', methods=['POST'])
2452 | def delete_file(file_id):
2453 | with get_db_connection() as conn:
2454 | file_info = conn.execute("SELECT path FROM files WHERE id = ?", (file_id,)).fetchone()
2455 | if not file_info:
2456 | return jsonify({'status': 'success', 'message': 'File already deleted from database.'})
2457 |
2458 | filepath = file_info['path']
2459 |
2460 | try:
2461 | if os.path.exists(filepath):
2462 | safe_delete_file(filepath)
2463 | # If file doesn't exist on disk, we still proceed to remove the DB entry, which is the desired state.
2464 | except OSError as e:
2465 | # A real OS error occurred (e.g., permissions).
2466 | print(f"ERROR: Could not delete file {filepath} from disk: {e}")
2467 | return jsonify({'status': 'error', 'message': f'Could not delete file from disk: {e}'}), 500
2468 |
2469 | # Whether the file was deleted now or was already gone, we clean up the DB.
2470 | conn.execute("DELETE FROM files WHERE id = ?", (file_id,))
2471 | conn.commit()
2472 | action = "moved to trash" if DELETE_TO else "deleted"
2473 | return jsonify({'status': 'success', 'message': f'File {action} successfully.'})
2474 |
2475 | # --- RENAME FILE ---
2476 | @app.route('/galleryout/rename_file/', methods=['POST'])
2477 | def rename_file(file_id):
2478 | data = request.json
2479 | new_name = data.get('new_name', '').strip()
2480 |
2481 | if not new_name or len(new_name) > 250:
2482 | return jsonify({'status': 'error', 'message': 'Invalid filename.'}), 400
2483 | if re.search(r'[\\/:"*?<>|]', new_name):
2484 | return jsonify({'status': 'error', 'message': 'Invalid characters.'}), 400
2485 |
2486 | try:
2487 | with get_db_connection() as conn:
2488 | # 1. Fetch All Metadata
2489 | query_fetch = """
2490 | SELECT
2491 | path, name, size, has_workflow, is_favorite, type, duration, dimensions,
2492 | ai_last_scanned, ai_caption, ai_embedding, ai_error, workflow_files, workflow_prompt
2493 | FROM files WHERE id = ?
2494 | """
2495 | file_info = conn.execute(query_fetch, (file_id,)).fetchone()
2496 |
2497 | if not file_info:
2498 | return jsonify({'status': 'error', 'message': 'File not found.'}), 404
2499 |
2500 | old_path = file_info['path']
2501 | old_name = file_info['name']
2502 |
2503 | # Metadata Pack
2504 | meta = {
2505 | 'size': file_info['size'],
2506 | 'has_workflow': file_info['has_workflow'],
2507 | 'is_favorite': file_info['is_favorite'],
2508 | 'type': file_info['type'],
2509 | 'duration': file_info['duration'],
2510 | 'dimensions': file_info['dimensions'],
2511 | 'ai_last_scanned': file_info['ai_last_scanned'],
2512 | 'ai_caption': file_info['ai_caption'],
2513 | 'ai_embedding': file_info['ai_embedding'],
2514 | 'ai_error': file_info['ai_error'],
2515 | 'workflow_files': file_info['workflow_files'],
2516 | 'workflow_prompt': file_info['workflow_prompt']
2517 | }
2518 |
2519 | # Extension logic
2520 | _, old_ext = os.path.splitext(old_name)
2521 | new_name_base, new_ext = os.path.splitext(new_name)
2522 | final_new_name = new_name if new_ext else new_name + old_ext
2523 |
2524 | if final_new_name == old_name:
2525 | return jsonify({'status': 'error', 'message': 'Name unchanged.'}), 400
2526 |
2527 | # 2. Construct Path NATIVELY using os.path.join
2528 | # This respects the OS separator (Mixed on Win, Forward on Linux)
2529 | # ensuring the Hash ID matches future Scans.
2530 | dir_name = os.path.dirname(old_path)
2531 | new_path = os.path.join(dir_name, final_new_name)
2532 |
2533 | if os.path.exists(new_path):
2534 | return jsonify({'status': 'error', 'message': f'File "{final_new_name}" already exists.'}), 409
2535 |
2536 | new_id = hashlib.md5(new_path.encode()).hexdigest()
2537 | existing_db = conn.execute("SELECT id FROM files WHERE id = ?", (new_id,)).fetchone()
2538 |
2539 | os.rename(old_path, new_path)
2540 |
2541 | if existing_db:
2542 | # MERGE SCENARIO
2543 | query_merge = """
2544 | UPDATE files
2545 | SET path = ?, name = ?, mtime = ?,
2546 | size = ?, has_workflow = ?, is_favorite = ?,
2547 | type = ?, duration = ?, dimensions = ?,
2548 | ai_last_scanned = ?, ai_caption = ?, ai_embedding = ?, ai_error = ?,
2549 | workflow_files = ?, workflow_prompt = ?
2550 | WHERE id = ?
2551 | """
2552 | conn.execute(query_merge, (
2553 | final_dest_path, final_filename, time.time(),
2554 | meta['size'], meta['has_workflow'], meta['is_favorite'],
2555 | meta['type'], meta['duration'], meta['dimensions'],
2556 | meta['ai_last_scanned'], meta['ai_caption'], meta['ai_embedding'], meta['ai_error'],
2557 | meta['workflow_files'],
2558 | meta['workflow_prompt'],
2559 | new_id
2560 | ))
2561 | conn.execute("DELETE FROM files WHERE id = ?", (file_id,))
2562 | else:
2563 | # STANDARD SCENARIO
2564 | conn.execute("UPDATE files SET id = ?, path = ?, name = ? WHERE id = ?",
2565 | (new_id, new_path, final_new_name, file_id))
2566 |
2567 | conn.commit()
2568 |
2569 | return jsonify({
2570 | 'status': 'success',
2571 | 'message': 'File renamed.',
2572 | 'new_name': final_new_name,
2573 | 'new_id': new_id
2574 | })
2575 |
2576 | except Exception as e:
2577 | print(f"ERROR: Rename failed: {e}")
2578 | return jsonify({'status': 'error', 'message': f'Error: {e}'}), 500
2579 |
2580 | @app.route('/galleryout/file/')
2581 | def serve_file(file_id):
2582 | filepath = get_file_info_from_db(file_id, 'path')
2583 | if filepath.lower().endswith('.webp'): return send_file(filepath, mimetype='image/webp')
2584 | return send_file(filepath)
2585 |
2586 | @app.route('/galleryout/download/')
2587 | def download_file(file_id):
2588 | filepath = get_file_info_from_db(file_id, 'path')
2589 | return send_file(filepath, as_attachment=True)
2590 |
2591 | @app.route('/galleryout/workflow/')
2592 | def download_workflow(file_id):
2593 | info = get_file_info_from_db(file_id)
2594 | filepath = info['path']
2595 | original_filename = info['name']
2596 | workflow_json = extract_workflow(filepath)
2597 | if workflow_json:
2598 | base_name, _ = os.path.splitext(original_filename)
2599 | new_filename = f"{base_name}.json"
2600 | headers = {'Content-Disposition': f'attachment;filename="{new_filename}"'}
2601 | return Response(workflow_json, mimetype='application/json', headers=headers)
2602 | abort(404)
2603 |
2604 | @app.route('/galleryout/node_summary/')
2605 | def get_node_summary(file_id):
2606 | try:
2607 | filepath = get_file_info_from_db(file_id, 'path')
2608 | workflow_json = extract_workflow(filepath)
2609 | if not workflow_json:
2610 | return jsonify({'status': 'error', 'message': 'Workflow not found for this file.'}), 404
2611 | summary_data = generate_node_summary(workflow_json)
2612 | if summary_data is None:
2613 | return jsonify({'status': 'error', 'message': 'Failed to parse workflow JSON.'}), 400
2614 | return jsonify({'status': 'success', 'summary': summary_data})
2615 | except Exception as e:
2616 | print(f"ERROR generating node summary for {file_id}: {e}")
2617 | return jsonify({'status': 'error', 'message': f'An internal error occurred: {e}'}), 500
2618 |
2619 | @app.route('/galleryout/thumbnail/')
2620 | def serve_thumbnail(file_id):
2621 | info = get_file_info_from_db(file_id)
2622 | filepath, mtime = info['path'], info['mtime']
2623 | file_hash = hashlib.md5((filepath + str(mtime)).encode()).hexdigest()
2624 | existing_thumbnails = glob.glob(os.path.join(THUMBNAIL_CACHE_DIR, f"{file_hash}.*"))
2625 | if existing_thumbnails: return send_file(existing_thumbnails[0])
2626 | print(f"WARN: Thumbnail not found for {os.path.basename(filepath)}, generating...")
2627 | cache_path = create_thumbnail(filepath, file_hash, info['type'])
2628 | if cache_path and os.path.exists(cache_path): return send_file(cache_path)
2629 | return "Thumbnail generation failed", 404
2630 |
2631 | @app.route('/favicon.ico')
2632 | def favicon():
2633 | return send_file('static/galleryout/favicon.ico')
2634 |
2635 | @app.route('/galleryout/input_file/')
2636 | def serve_input_file(filename):
2637 | """Serves input files directly from the ComfyUI Input folder."""
2638 | try:
2639 | # Prevent path traversal
2640 | filename = secure_filename(filename)
2641 | filepath = os.path.abspath(os.path.join(BASE_INPUT_PATH, filename))
2642 | if not filepath.startswith(os.path.abspath(BASE_INPUT_PATH)):
2643 | abort(403)
2644 |
2645 | # For webp, frocing the correct mimetype
2646 | if filename.lower().endswith('.webp'):
2647 | return send_from_directory(BASE_INPUT_PATH, filename, mimetype='image/webp', as_attachment=False)
2648 |
2649 | # For all the other files, I let Flask guessing the mimetype, but disable the attachment, just a lil trick
2650 | return send_from_directory(BASE_INPUT_PATH, filename, as_attachment=False)
2651 | except Exception as e:
2652 | abort(404)
2653 |
2654 | @app.route('/galleryout/check_metadata/')
2655 | def check_metadata(file_id):
2656 | """
2657 | Lightweight endpoint to check real-time status of metadata
2658 | (Workflow and AI Caption) for the Lightbox.
2659 | """
2660 | try:
2661 | with get_db_connection() as conn:
2662 | row = conn.execute("SELECT has_workflow, ai_caption FROM files WHERE id = ?", (file_id,)).fetchone()
2663 |
2664 | if not row:
2665 | return jsonify({'status': 'error', 'message': 'File not found'}), 404
2666 |
2667 | return jsonify({
2668 | 'status': 'success',
2669 | 'has_workflow': bool(row['has_workflow']),
2670 | 'has_ai_caption': bool(row['ai_caption']),
2671 | 'ai_caption': row['ai_caption'] or "" # Return actual text to update cache
2672 | })
2673 | except Exception as e:
2674 | print(f"Metadata Check Error: {e}")
2675 | return jsonify({'status': 'error', 'message': str(e)}), 500
2676 |
2677 | def print_startup_banner():
2678 | banner = rf"""
2679 | {Colors.GREEN}{Colors.BOLD} _____ _ _____ _ _
2680 | / ____| | | / ____| | | |
2681 | | (___ _ __ ___ __ _ _ __| |_ | | __ __ _| | | ___ _ __ _ _
2682 | \___ \| '_ ` _ \ / _` | '__| __| | | |_ |/ _` | | |/ _ \ '__| | | |
2683 | ____) | | | | | | (_| | | | |_ | |__| | (_| | | | __/ | | |_| |
2684 | |_____/|_| |_| |_|\__,_|_| \__| \_____|\__,_|_|_|\___|_| \__, |
2685 | __/ |
2686 | |___/ {Colors.RESET}
2687 | """
2688 | print(banner)
2689 | print(f" {Colors.BOLD}Smart Gallery for ComfyUI{Colors.RESET}")
2690 | print(f" Author : {Colors.BLUE}Biagio Maffettone{Colors.RESET}")
2691 | print(f" Version : {Colors.YELLOW}{APP_VERSION}{Colors.RESET} ({APP_VERSION_DATE})")
2692 | print(f" GitHub : {Colors.CYAN}{GITHUB_REPO_URL}{Colors.RESET}")
2693 | print(f" Contributor: {Colors.CYAN}Martial Michel (Docker & Codebase){Colors.RESET}")
2694 | print("")
2695 |
2696 | def check_for_updates():
2697 | """Checks the GitHub repo for a newer version without external libs."""
2698 | print("Checking for updates...", end=" ", flush=True)
2699 | try:
2700 | # Timeout (3s) not blocking start if no internet connection
2701 | with urllib.request.urlopen(GITHUB_RAW_URL, timeout=3) as response:
2702 | content = response.read().decode('utf-8')
2703 |
2704 | # Regex modified to handle APP_VERSION="1.41" (string) or APP_VERSION=1.41 (number)
2705 | match = re.search(r'APP_VERSION\s*=\s*["\']?([0-9.]+)["\']?', content)
2706 |
2707 | remote_version_str = None
2708 | if match:
2709 | remote_version_str = match.group(1)
2710 | else:
2711 | # Fallback: Check header comment if variable not found
2712 | match_header = re.search(r'#\s*Version:\s*([0-9.]+)', content)
2713 | if match_header:
2714 | remote_version_str = match_header.group(1)
2715 |
2716 | if remote_version_str:
2717 | # --- HYBRID COMPARISON LOGIC ---
2718 | # 1. Clean both versions from non-numeric chars (except dots)
2719 | local_clean = re.sub(r'[^0-9.]', '', str(APP_VERSION))
2720 | remote_clean = re.sub(r'[^0-9.]', '', str(remote_version_str))
2721 |
2722 | # 2. Check if they are Legacy Float style (max 1 dot, e.g. "1.41", "1.4099")
2723 | # or Modern SemVer style (2+ dots, e.g. "1.51.01")
2724 | local_dots = local_clean.count('.')
2725 | remote_dots = remote_clean.count('.')
2726 |
2727 | is_update_available = False
2728 |
2729 | if local_dots <= 1 and remote_dots <= 1:
2730 | # Use Float logic (Legacy) to support 1.41 > 1.4099
2731 | try:
2732 | is_update_available = float(remote_clean) > float(local_clean)
2733 | except ValueError:
2734 | # Fallback to tuple comparison if float conversion fails
2735 | pass
2736 |
2737 | if not is_update_available:
2738 | # Use Semantic Tuple logic (Modern) if float check failed or didn't apply
2739 | # Examples: 1.51.1 > 1.51
2740 | local_v = tuple(map(int, local_clean.split('.'))) if local_clean else (0,)
2741 | remote_v = tuple(map(int, remote_clean.split('.'))) if remote_clean else (0,)
2742 | is_update_available = remote_v > local_v
2743 |
2744 | if is_update_available:
2745 | print(f"\n{Colors.YELLOW}{Colors.BOLD}NOTICE: A new version ({remote_version_str}) is available!{Colors.RESET}")
2746 | print(f"Please update from: {GITHUB_REPO_URL}\n")
2747 | else:
2748 | print("You are up to date.")
2749 | else:
2750 | print("Could not parse remote version.")
2751 |
2752 | except Exception:
2753 | print("Skipped (Offline or GitHub unreachable).")
2754 |
2755 | # --- STARTUP CHECKS AND MAIN ENTRY POINT ---
2756 | def show_config_error_and_exit(path):
2757 | """Shows a critical error message and exits the program."""
2758 | msg = (
2759 | f"❌ CRITICAL ERROR: The specified path does not exist or is not accessible:\n\n"
2760 | f"👉 {path}\n\n"
2761 | f"INSTRUCTIONS:\n"
2762 | f"1. If you are launching via a script (e.g., .bat file), please edit it and set the correct 'BASE_OUTPUT_PATH' variable.\n"
2763 | f"2. Or edit 'smartgallery.py' (USER CONFIGURATION section) and ensure the path points to an existing folder.\n\n"
2764 | f"The program cannot continue and will now exit."
2765 | )
2766 |
2767 | if TKINTER_AVAILABLE:
2768 | root = tk.Tk()
2769 | root.withdraw()
2770 | root.attributes('-topmost', True)
2771 | messagebox.showerror("SmartGallery - Configuration Error", msg)
2772 | root.destroy()
2773 | else:
2774 | # Fallback for headless environments (Docker, etc.)
2775 | print(f"\n{Colors.RED}{Colors.BOLD}" + "="*70 + f"{Colors.RESET}")
2776 | print(f"{Colors.RED}{Colors.BOLD}{msg}{Colors.RESET}")
2777 | print(f"{Colors.RED}{Colors.BOLD}" + "="*70 + f"{Colors.RESET}\n")
2778 |
2779 | sys.exit(1)
2780 |
2781 | def show_ffmpeg_warning():
2782 | """Shows a non-blocking warning message for missing FFmpeg."""
2783 | msg = (
2784 | "WARNING: FFmpeg/FFprobe not found\n\n"
2785 | "The system uses the 'ffprobe' utility to analyze video files. "
2786 | "It seems it is missing or not configured correctly.\n\n"
2787 | "CONSEQUENCES:\n"
2788 | "❌ You will NOT be able to extract ComfyUI workflows from video files (.mp4, .mov, etc).\n"
2789 | "✅ Gallery browsing, playback, and image features will still work perfectly.\n\n"
2790 | "To fix this, install FFmpeg or check the 'FFPROBE_MANUAL_PATH' in the configuration."
2791 | )
2792 |
2793 | if TKINTER_AVAILABLE:
2794 | root = tk.Tk()
2795 | root.withdraw()
2796 | root.attributes('-topmost', True)
2797 | messagebox.showwarning("SmartGallery - Feature Limitation", msg)
2798 | root.destroy()
2799 | else:
2800 | # Fallback for headless environments (Docker, etc.)
2801 | print(f"\n{Colors.YELLOW}{Colors.BOLD}" + "="*70 + f"{Colors.RESET}")
2802 | print(f"{Colors.YELLOW}{msg}{Colors.RESET}")
2803 | print(f"{Colors.YELLOW}{Colors.BOLD}" + "="*70 + f"{Colors.RESET}\n")
2804 |
2805 | if __name__ == '__main__':
2806 |
2807 | print_startup_banner()
2808 | check_for_updates()
2809 | print_configuration()
2810 |
2811 | # --- CHECK: CRITICAL OUTPUT PATH CHECK (Blocking) ---
2812 | if not os.path.exists(BASE_OUTPUT_PATH):
2813 | show_config_error_and_exit(BASE_OUTPUT_PATH)
2814 |
2815 | # --- CHECK: INPUT PATH CHECK (Non-Blocking / Warning) ---
2816 | if not os.path.exists(BASE_INPUT_PATH):
2817 | print(f"{Colors.YELLOW}{Colors.BOLD}WARNING: Input Path not found!{Colors.RESET}")
2818 | print(f"{Colors.YELLOW} The path '{BASE_INPUT_PATH}' does not exist.{Colors.RESET}")
2819 | print(f"{Colors.YELLOW} > Source media visualization in Node Summary will be DISABLED.{Colors.RESET}")
2820 | print(f"{Colors.YELLOW} > The gallery will still function normally for output files.{Colors.RESET}\n")
2821 |
2822 | # Initialize the gallery
2823 | initialize_gallery()
2824 |
2825 | # --- CHECK: FFMPEG WARNING ---
2826 | if not FFPROBE_EXECUTABLE_PATH:
2827 | # Check if we are in a headless environment (like Docker) where tk might fail
2828 | if os.environ.get('DISPLAY') or os.name == 'nt':
2829 | try:
2830 | show_ffmpeg_warning()
2831 | except:
2832 | print(f"{Colors.RED}WARNING: FFmpeg not found. Video workflows extraction disabled.{Colors.RESET}")
2833 | else:
2834 | print(f"{Colors.RED}WARNING: FFmpeg not found. Video workflows extraction disabled.{Colors.RESET}")
2835 |
2836 | print(f"{Colors.GREEN}{Colors.BOLD}🚀 Gallery started successfully!{Colors.RESET}")
2837 | print(f"👉 Access URL: {Colors.CYAN}{Colors.BOLD}http://127.0.0.1:{SERVER_PORT}/galleryout/{Colors.RESET}")
2838 | print(f" (Press CTRL+C to stop)")
2839 |
2840 | app.run(host='0.0.0.0', port=SERVER_PORT, debug=False)
--------------------------------------------------------------------------------