├── .last_release
├── src
├── utils
│ ├── __init__.py
│ ├── notify.py
│ ├── config.py
│ ├── validate_config.py
│ ├── logger.py
│ └── regions.py
├── __init__.py
├── updater.py
├── entrypoint.py
├── check_remote.py
├── filesystem.py
├── process_manager.py
└── downloader.py
├── .python-version
├── .github
├── dependabot.yml
├── ISSUE_TEMPLATE
│ └── bug_report.md
└── workflows
│ ├── lint.yml
│ ├── full-test.yml
│ ├── check-releases.yml
│ └── build-and-push.yml
├── docker-compose.yml
├── docker-compose.build.yml
├── .gitignore
├── entrypoint.sh
├── Makefile
├── Dockerfile
├── pyproject.toml
├── LICENSE
├── README.md
└── uv.lock
/.last_release:
--------------------------------------------------------------------------------
1 | 0.7.4
2 |
--------------------------------------------------------------------------------
/src/utils/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.python-version:
--------------------------------------------------------------------------------
1 | 3.13
2 |
--------------------------------------------------------------------------------
/src/__init__.py:
--------------------------------------------------------------------------------
1 | from .downloader import InsufficientSpaceError
2 |
3 | __all__ = ["InsufficientSpaceError"]
4 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: "github-actions"
4 | directory: "/"
5 | schedule:
6 | interval: "weekly"
7 |
8 | - package-ecosystem: "docker"
9 | directory: "/"
10 | schedule:
11 | interval: "weekly"
12 |
13 | - package-ecosystem: "uv"
14 |
15 | directory: "/"
16 | schedule:
17 | interval: "weekly"
18 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | services:
2 | photon:
3 | image: ghcr.io/rtuszik/photon-docker:latest
4 | container_name: "photon-docker"
5 | environment:
6 | - UPDATE_STRATEGY=PARALLEL
7 | - UPDATE_INTERVAL=30d
8 | - LOG_LEVEL=INFO # Options: DEBUG, INFO, ERROR
9 | # - BASE_URL=https://r2.koalasec.org/public
10 | # - REGION=andorra
11 | volumes:
12 | - photon_data:/photon/data
13 | restart: unless-stopped
14 | ports:
15 | - "2322:2322"
16 | volumes:
17 | photon_data:
18 |
--------------------------------------------------------------------------------
/docker-compose.build.yml:
--------------------------------------------------------------------------------
1 | services:
2 | photon:
3 | build:
4 | context: .
5 | dockerfile: Dockerfile
6 | args:
7 | - PHOTON_VERSION=${PHOTON_VERSION}
8 | environment:
9 | - UPDATE_STRATEGY=SEQUENTIAL
10 | - UPDATE_INTERVAL=3m
11 | # - LOG_LEVEL=DEBUG
12 | # - FORCE_UPDATE=TRUE
13 | # - REGION=andorra
14 | # - BASE_URL=https://r2.koalasec.org/public
15 | # - SKIP_MD5_CHECK=TRUE
16 | volumes:
17 | - photon_data:/photon/data
18 | restart: unless-stopped
19 | ports:
20 | - "2322:2322"
21 | volumes:
22 | photon_data:
23 |
--------------------------------------------------------------------------------
/src/utils/notify.py:
--------------------------------------------------------------------------------
1 | # notification module for apprise notifications
2 |
3 | import apprise
4 |
5 | from . import config
6 | from .logger import get_logger
7 |
8 | logging = get_logger()
9 |
10 |
11 | def send_notification(message: str, title: str = "Photon Status"):
12 | apprise_urls = config.APPRISE_URLS
13 | if not apprise_urls:
14 | logging.info("No APPRISE_URLS set, skipping notification.")
15 | return
16 |
17 | apobj = apprise.Apprise()
18 |
19 | for url in apprise_urls.split(","):
20 | if url.strip():
21 | apobj.add(url.strip())
22 |
23 | if len(apobj) == 0:
24 | logging.warning("No valid Apprise URLs were found after processing the APPRISE_URLS variable.")
25 | return
26 |
27 | if not apobj.notify(body=message, title=title):
28 | logging.error("Failed to send notification to one or more Apprise targets.")
29 | else:
30 | logging.info("Successfully sent notification.")
31 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: "[BUG]"
5 | labels: bug
6 | assignees: rtuszik
7 |
8 | ---
9 |
10 | **Describe the bug**
11 | A clear and concise description of what the bug is.
12 |
13 | **To Reproduce**
14 | Steps to reproduce the behavior:
15 |
16 | **Expected behavior**
17 | A clear and concise description of what you expected to happen.
18 |
19 | ### **`photon-docker` configuration:**
20 | Please add your docker compose and environment variables.
21 |
22 | ### System Info
23 | - **Host OS:**
24 | - **Host Type:** [e.g., Bare-metal, LXC, VM, Synology]
25 | - **Hardware details:**
26 | - **CPU:** [e.g., Intel Core i7-9700K]
27 | - **Available RAM:**
28 | - **Storage Type:** [e.g., SSD, NVME, NFS, SAMBA]
29 | - **Storage Size:**
30 |
31 |
32 | **Debug Logs**
33 | Please provide any relevant logs. To get more detailed logs, you can set the `LOG_LEVEL` environment variable to `DEBUG` in your `docker-compose.yml` file.
34 |
--------------------------------------------------------------------------------
/src/utils/config.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | # USER CONFIG
4 | UPDATE_STRATEGY = os.getenv("UPDATE_STRATEGY", "SEQUENTIAL")
5 | UPDATE_INTERVAL = os.getenv("UPDATE_INTERVAL", "30d")
6 | REGION = os.getenv("REGION")
7 | FORCE_UPDATE = os.getenv("FORCE_UPDATE", "False").lower() in ("true", "1", "t")
8 | DOWNLOAD_MAX_RETRIES = os.getenv("DOWNLOAD_MAX_RETRIES", "3")
9 | FILE_URL = os.getenv("FILE_URL")
10 | PHOTON_PARAMS = os.getenv("PHOTON_PARAMS")
11 | JAVA_PARAMS = os.getenv("JAVA_PARAMS")
12 | LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO")
13 | BASE_URL = os.getenv("BASE_URL", "https://r2.koalasec.org/public").rstrip("/")
14 | SKIP_MD5_CHECK = os.getenv("SKIP_MD5_CHECK", "False").lower() in ("true", "1", "t")
15 | INITIAL_DOWNLOAD = os.getenv("INITIAL_DOWNLOAD", "True").lower() in ("true", "1", "t")
16 | APPRISE_URLS = os.getenv("APPRISE_URLS")
17 |
18 | # APP CONFIG
19 | PHOTON_DIR = "/photon"
20 | DATA_DIR = "/photon/data"
21 | PHOTON_DATA_DIR = os.path.join(DATA_DIR, "photon_data")
22 | TEMP_DIR = os.path.join(DATA_DIR, "temp")
23 | OS_NODE_DIR = os.path.join(PHOTON_DATA_DIR, "node_1")
24 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 |
2 | .env
3 |
4 | dev_cmd.md
5 |
6 | # Created by https://www.toptal.com/developers/gitignore/api/python
7 | # Edit at https://www.toptal.com/developers/gitignore?templates=python
8 |
9 | ### Python ###
10 | # Byte-compiled / optimized / DLL files
11 | __pycache__/
12 | *.py[cod]
13 | *$py.class
14 |
15 | # Distribution / packaging
16 | .Python
17 | build/
18 | develop-eggs/
19 | dist/
20 | downloads/
21 | eggs/
22 | .eggs/
23 | lib/
24 | lib64/
25 | parts/
26 | sdist/
27 | var/
28 | wheels/
29 | share/python-wheels/
30 | *.egg-info/
31 | .installed.cfg
32 | *.egg
33 | MANIFEST
34 |
35 |
36 | # Unit test / coverage reports
37 | htmlcov/
38 | .tox/
39 | .nox/
40 | .coverage
41 | .coverage.*
42 | .cache
43 | nosetests.xml
44 | coverage.xml
45 | *.cover
46 | *.py,cover
47 | .hypothesis/
48 | .pytest_cache/
49 | cover/
50 |
51 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
52 | __pypackages__/
53 |
54 | # Environments
55 | .env
56 | .venv
57 | env/
58 | venv/
59 | ENV/
60 | env.bak/
61 | venv.bak/
62 |
63 | # ruff
64 | .ruff_cache/
65 |
66 | # LSP config files
67 | pyrightconfig.json
68 |
69 |
70 |
--------------------------------------------------------------------------------
/src/updater.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | from .downloader import parallel_update, sequential_update
4 | from .utils import config
5 | from .utils.logger import get_logger, setup_logging
6 | from .utils.notify import send_notification
7 |
8 | logger = get_logger()
9 |
10 |
11 | def main():
12 | logger.info("Starting update process...")
13 |
14 | try:
15 | if config.UPDATE_STRATEGY == "PARALLEL":
16 | logger.info("Running parallel update...")
17 | parallel_update()
18 | elif config.UPDATE_STRATEGY == "SEQUENTIAL":
19 | logger.info("Running sequential update...")
20 | sequential_update()
21 | else:
22 | logger.error(f"Unknown update strategy: {config.UPDATE_STRATEGY}")
23 | sys.exit(1)
24 |
25 | logger.info("Update completed successfully")
26 | send_notification("Photon Index Updated Successfully")
27 |
28 | except Exception as e:
29 | error_msg = f"Update failed: {e!s}"
30 | logger.exception(error_msg)
31 | send_notification(f"Photon Update Failed - {error_msg}")
32 | sys.exit(1)
33 |
34 |
35 | if __name__ == "__main__":
36 | setup_logging()
37 | main()
38 |
--------------------------------------------------------------------------------
/src/utils/validate_config.py:
--------------------------------------------------------------------------------
1 | import re
2 |
3 | from . import config
4 | from .logger import get_logger
5 | from .regions import is_valid_region
6 |
7 | logging = get_logger()
8 |
9 |
10 | def validate_config():
11 | logging.info("Validating environment variables...")
12 | error_messages = []
13 |
14 | valid_strategies = ["SEQUENTIAL", "PARALLEL", "DISABLED"]
15 | if config.UPDATE_STRATEGY not in valid_strategies:
16 | error_messages.append(
17 | f"Invalid UPDATE_STRATEGY: '{config.UPDATE_STRATEGY}'. Must be one of {valid_strategies}."
18 | )
19 |
20 | if not re.match(r"^\d+[dhm]$", config.UPDATE_INTERVAL):
21 | error_messages.append(
22 | f"Invalid UPDATE_INTERVAL format: '{config.UPDATE_INTERVAL}'. Expected format like '30d', '12h', or '30m'."
23 | )
24 |
25 | if config.REGION and not is_valid_region(config.REGION):
26 | error_messages.append(f"Invalid REGION: '{config.REGION}'. Must be a valid continent, sub-region, or 'planet'.")
27 |
28 | if error_messages:
29 | full_error_message = "Configuration validation failed:\n" + "\n".join(error_messages)
30 | raise ValueError(full_error_message)
31 |
32 | logging.info("Environment variables are valid.")
33 |
--------------------------------------------------------------------------------
/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | PUID=${PUID:-9011}
4 | PGID=${PGID:-9011}
5 |
6 | CURRENT_UID=$(id -u photon 2>/dev/null || echo "0")
7 | CURRENT_GID=$(id -g photon 2>/dev/null || echo "0")
8 |
9 | if [ "$CURRENT_GID" != "$PGID" ]; then
10 | echo "Updating photon group GID from $CURRENT_GID to $PGID"
11 | groupmod -o -g "$PGID" photon
12 | echo "Updating ownership of files from GID $CURRENT_GID to $PGID"
13 | find / -group "$CURRENT_GID" -exec chgrp -h "$PGID" {} \; 2>/dev/null
14 | fi
15 |
16 | if [ "$CURRENT_UID" != "$PUID" ]; then
17 | echo "Updating photon user UID from $CURRENT_UID to $PUID"
18 | usermod -o -u "$PUID" photon
19 | echo "Updating ownership of files from UID $CURRENT_UID to $PUID"
20 | find / -user "$CURRENT_UID" -exec chown -h "$PUID" {} \; 2>/dev/null
21 | fi
22 |
23 | if [ -d "/photon/data/photon_data/node_1" ]; then
24 | if [ -d "/photon/data/node_1" ]; then
25 | echo "Removing old index..."
26 | rm -rf /photon/data/node_1
27 | echo "Cleanup complete: removed /photon/data/node_1"
28 | fi
29 | elif [ -d "/photon/data/node_1" ]; then
30 | echo "Migrating data structure..."
31 | mkdir -p /photon/data/photon_data
32 | mv /photon/data/node_1 /photon/data/photon_data/
33 | echo "Migration complete: moved node_1 to /photon/data/photon_data/"
34 | fi
35 |
36 | chown -R photon:photon /photon
37 | exec gosu photon "$@"
38 |
--------------------------------------------------------------------------------
/src/utils/logger.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import logging.handlers
3 | import sys
4 | from pathlib import Path
5 |
6 | from . import config
7 |
8 | LOG_FORMAT = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
9 |
10 |
11 | def setup_logging() -> None:
12 | root_logger = logging.getLogger()
13 |
14 | if root_logger.handlers:
15 | return
16 |
17 | root_logger.setLevel(config.LOG_LEVEL)
18 |
19 | formatter = logging.Formatter(LOG_FORMAT)
20 |
21 | console_handler = logging.StreamHandler(sys.stdout)
22 | console_handler.setLevel(config.LOG_LEVEL)
23 | console_handler.setFormatter(formatter)
24 | root_logger.addHandler(console_handler)
25 |
26 | try:
27 | log_dir = Path(config.DATA_DIR) / "logs"
28 | log_dir.mkdir(exist_ok=True)
29 |
30 | file_handler = logging.handlers.RotatingFileHandler(
31 | log_dir / "photon.log",
32 | maxBytes=50 * 1024 * 1024, # 50MB
33 | backupCount=5,
34 | )
35 | file_handler.setLevel(logging.INFO)
36 | file_handler.setFormatter(formatter)
37 | root_logger.addHandler(file_handler)
38 | except (OSError, PermissionError):
39 | pass
40 |
41 | logging.getLogger("urllib3").setLevel(logging.WARNING)
42 | logging.getLogger("requests").setLevel(logging.WARNING)
43 |
44 |
45 | def get_logger(name: str = "") -> logging.Logger:
46 | if name:
47 | return logging.getLogger(name)
48 | return logging.getLogger()
49 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | PHOTON_VERSION := $(shell cat .last_release | tr -d '[:space:]')
2 |
3 | .PHONY: help check lint format typecheck deadcode rebuild clean
4 |
5 | help:
6 | @echo "Available targets:"
7 | @echo " make check - Run all quality checks (lint, format, typecheck, deadcode)"
8 | @echo " make lint - Run ruff linter with auto-fix"
9 | @echo " make format - Run ruff formatter"
10 | @echo " make typecheck - Run ty type checker"
11 | @echo " make deadcode - Run vulture dead code checker"
12 | @echo " make rebuild - Build and run Docker containers (with prompts)"
13 | @echo " make clean - Stop and remove Docker containers"
14 |
15 | check: lint format typecheck deadcode
16 |
17 | lint:
18 | uv run ruff check --fix
19 |
20 | format:
21 | uv run ruff format
22 |
23 | typecheck:
24 | uv run ty check
25 |
26 | deadcode:
27 | uv run vulture --min-confidence 100 --exclude ".venv" .
28 |
29 | rebuild:
30 | @read -p "Rebuild without cache? (y/n): " nocache; \
31 | read -p "Remove volumes before rebuild? (y/n): " volumes; \
32 | if [ "$$volumes" = "y" ]; then \
33 | docker compose -f docker-compose.build.yml down -v; \
34 | else \
35 | docker compose -f docker-compose.build.yml down; \
36 | fi; \
37 | if [ "$$nocache" = "y" ]; then \
38 | PHOTON_VERSION=$(PHOTON_VERSION) docker compose -f docker-compose.build.yml build --no-cache; \
39 | else \
40 | PHOTON_VERSION=$(PHOTON_VERSION) docker compose -f docker-compose.build.yml build; \
41 | fi; \
42 | PHOTON_VERSION=$(PHOTON_VERSION) docker compose -f docker-compose.build.yml up
43 |
44 | clean:
45 | docker compose -f docker-compose.build.yml down
46 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM eclipse-temurin:21.0.9_10-jre-noble
2 |
3 | # install astral uv
4 | COPY --from=ghcr.io/astral-sh/uv:0.8 /uv /usr/local/bin/
5 |
6 | ARG DEBIAN_FRONTEND=noninteractive
7 | ARG PHOTON_VERSION
8 | ARG PUID=9011
9 | ARG PGID=9011
10 |
11 | RUN apt-get update \
12 | && apt-get -y install --no-install-recommends \
13 | lbzip2 \
14 | gosu \
15 | python3.12 \
16 | curl \
17 | && rm -rf /var/lib/apt/lists/*
18 |
19 | RUN groupadd -g ${PGID} -o photon && \
20 | useradd -l -u ${PUID} -g photon -o -s /bin/false -m -d /photon photon
21 |
22 | WORKDIR /photon
23 |
24 | RUN mkdir -p /photon/data/
25 |
26 | ADD https://github.com/komoot/photon/releases/download/${PHOTON_VERSION}/photon-opensearch-${PHOTON_VERSION}.jar /photon/photon.jar
27 |
28 | COPY src/ ./src/
29 | COPY entrypoint.sh .
30 | COPY pyproject.toml .
31 | COPY uv.lock .
32 | RUN gosu photon uv sync --locked
33 |
34 |
35 | RUN chmod 644 /photon/photon.jar && \
36 | chown -R photon:photon /photon
37 |
38 | LABEL org.opencontainers.image.title="photon-docker" \
39 | org.opencontainers.image.description="Unofficial docker image for the Photon Geocoder" \
40 | org.opencontainers.image.url="https://github.com/rtuszik/photon-docker" \
41 | org.opencontainers.image.source="https://github.com/rtuszik/photon-docker" \
42 | org.opencontainers.image.documentation="https://github.com/rtuszik/photon-docker#readme"
43 |
44 | EXPOSE 2322
45 |
46 | HEALTHCHECK --interval=30s --timeout=10s --start-period=240s --retries=3 \
47 | CMD curl -f http://localhost:2322/status || exit 1
48 |
49 | ENTRYPOINT ["/bin/sh", "entrypoint.sh"]
50 | CMD ["uv", "run", "-m", "src.process_manager"]
51 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | name = "photon-docker"
3 | version = "1.2.1"
4 | description = "Unofficial docker image for the Photon Geocoder"
5 | readme = "README.md"
6 | requires-python = ">=3.12"
7 | dependencies = [
8 | "apprise>=1.9.3",
9 | "psutil>=5.9.0",
10 | "python-dateutil>=2.9.0.post0",
11 | "requests==2.32.5",
12 | "schedule>=1.2.2",
13 | "tqdm==4.67.1",
14 | ]
15 |
16 | [dependency-groups]
17 | dev = ["bandit>=1.8.6", "ruff>=0.12.7", "ty>=0.0.1a16", "vulture>=2.14"]
18 |
19 |
20 | [tool.ruff]
21 | indent-width = 4
22 | line-length = 120
23 |
24 |
25 | [tool.ruff.lint]
26 | # select = ["ALL"]
27 | ignore = [
28 | "ANN", # flake8-annotations
29 | "COM", # flake8-commas
30 | "C90", # mccabe complexity
31 | "DJ", # django
32 | "EXE", # flake8-executable
33 | "BLE", # blind except
34 | "PTH", # flake8-pathlib
35 | "T10", # debugger
36 | "TID", # flake8-tidy-imports
37 | "D100",
38 | "D101",
39 | "D102",
40 | "D103",
41 | "D104",
42 | "D105",
43 | "D106",
44 | "D107",
45 | "D101",
46 | "D107", # missing docstring in public module
47 | "D102", # missing docstring in public class
48 | "D104", # missing docstring in public package
49 | "D213",
50 | "D203",
51 | "D400",
52 | "D415",
53 | "G004",
54 | "PLR2004",
55 | "E501", # line too long
56 | "TRY",
57 | "SIM105", # faster without contextlib
58 | ]
59 |
60 | fixable = ["ALL"]
61 | unfixable = []
62 |
63 | # Allow unused variables when underscore-prefixed.
64 | dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
65 |
66 | [tool.ruff.format]
67 | quote-style = "double"
68 | indent-style = "space"
69 | line-ending = "auto"
70 |
71 | docstring-code-format = false
72 |
--------------------------------------------------------------------------------
/src/entrypoint.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 |
4 | from .downloader import InsufficientSpaceError, parallel_update, sequential_update
5 | from .utils import config
6 | from .utils.logger import get_logger, setup_logging
7 | from .utils.notify import send_notification
8 | from .utils.validate_config import validate_config
9 |
10 | logger = get_logger()
11 |
12 |
13 | def main():
14 | send_notification("Photon-Docker Initializing")
15 |
16 | logger.debug("Entrypoint setup called")
17 | logger.info("=== CONFIG VARIABLES ===")
18 | logger.info(f"UPDATE_STRATEGY: {config.UPDATE_STRATEGY}")
19 | logger.info(f"UPDATE_INTERVAL: {config.UPDATE_INTERVAL}")
20 | logger.info(f"REGION: {config.REGION}")
21 | logger.info(f"FORCE_UPDATE: {config.FORCE_UPDATE}")
22 | logger.info(f"DOWNLOAD_MAX_RETRIES: {config.DOWNLOAD_MAX_RETRIES}")
23 | # TODO ## some people may use HTTP Basic Auth in URL. Only debug log for now, possbily think of solution later. Same goes for BASE_URL, though less likely
24 | logger.debug(f"FILE_URL: {config.FILE_URL}")
25 | logger.info(f"PHOTON_PARAMS: {config.PHOTON_PARAMS}")
26 | logger.info(f"JAVA_PARAMS: {config.JAVA_PARAMS}")
27 | logger.info(f"LOG_LEVEL: {config.LOG_LEVEL}")
28 | logger.info(f"BASE_URL: {config.BASE_URL}")
29 | logger.info(f"SKIP_MD5_CHECK: {config.SKIP_MD5_CHECK}")
30 | logger.info(f"INITIAL_DOWNLOAD: {config.INITIAL_DOWNLOAD}")
31 |
32 | logger.info("=== END CONFIG VARIABLES ===")
33 |
34 | try:
35 | validate_config()
36 | except ValueError as e:
37 | logger.error(f"Stopping due to invalid configuration.\n{e}")
38 | sys.exit(1)
39 |
40 | if config.FORCE_UPDATE:
41 | logger.info("Starting forced update")
42 | try:
43 | if config.UPDATE_STRATEGY == "PARALLEL":
44 | parallel_update()
45 | else:
46 | sequential_update()
47 | except InsufficientSpaceError as e:
48 | logger.error(f"Cannot proceed with force update: {e}")
49 | send_notification(f"Photon-Docker force update failed: {e}")
50 | sys.exit(75)
51 | except Exception:
52 | logger.error("Force update failed")
53 | raise
54 | elif not os.path.isdir(config.OS_NODE_DIR):
55 | if not config.INITIAL_DOWNLOAD:
56 | logger.warning("Initial download is disabled but no existing Photon index was found. ")
57 | return
58 | logger.info("Starting initial download using sequential strategy")
59 | logger.info("Note: Initial download will use sequential strategy regardless of config setting")
60 | try:
61 | sequential_update()
62 | except InsufficientSpaceError as e:
63 | logger.error(f"Cannot proceed: {e}")
64 | send_notification(f"Photon-Docker cannot start: {e}")
65 | sys.exit(75)
66 | else:
67 | logger.info("Existing index found, skipping download")
68 |
69 |
70 | if __name__ == "__main__":
71 | setup_logging()
72 | main()
73 |
--------------------------------------------------------------------------------
/.github/workflows/lint.yml:
--------------------------------------------------------------------------------
1 | name: checks
2 | on:
3 | pull_request:
4 | workflow_dispatch:
5 |
6 | jobs:
7 | setup:
8 | runs-on: ubuntu-latest
9 | steps:
10 | - uses: actions/checkout@v5
11 | - name: "Set up Python"
12 | uses: actions/setup-python@v6
13 | with:
14 | python-version-file: "pyproject.toml"
15 | - name: Install uv
16 | uses: astral-sh/setup-uv@v7
17 | with:
18 | enable-cache: true
19 | version: 0.8.*
20 | - name: Install dependencies
21 | run: uv sync --locked
22 | - name: Cache dependencies
23 | uses: actions/cache/save@v4
24 | with:
25 | path: |
26 | .venv
27 | ~/.cache/uv
28 | key: ${{ runner.os }}-uv-${{ hashFiles('uv.lock') }}
29 |
30 | lint:
31 | runs-on: ubuntu-latest
32 | needs: setup
33 | steps:
34 | - uses: actions/checkout@v5
35 | - name: "Set up Python"
36 | uses: actions/setup-python@v6
37 | with:
38 | python-version-file: "pyproject.toml"
39 | - name: Install uv
40 | uses: astral-sh/setup-uv@v7
41 | with:
42 | enable-cache: true
43 | version: 0.8.*
44 | - name: Restore dependencies
45 | uses: actions/cache/restore@v4
46 | with:
47 | path: |
48 | .venv
49 | ~/.cache/uv
50 | key: ${{ runner.os }}-uv-${{ hashFiles('uv.lock') }}
51 | fail-on-cache-miss: true
52 | - name: Run linting
53 | run: |
54 | uv run ruff check --fix
55 | uv run ruff format
56 |
57 | typecheck:
58 | runs-on: ubuntu-latest
59 | needs: setup
60 | steps:
61 | - uses: actions/checkout@v5
62 | - name: "Set up Python"
63 | uses: actions/setup-python@v6
64 | with:
65 | python-version-file: "pyproject.toml"
66 | - name: Install uv
67 | uses: astral-sh/setup-uv@v7
68 | with:
69 | enable-cache: true
70 | version: 0.8.*
71 | - name: Restore dependencies
72 | uses: actions/cache/restore@v4
73 | with:
74 | path: |
75 | .venv
76 | ~/.cache/uv
77 | key: ${{ runner.os }}-uv-${{ hashFiles('uv.lock') }}
78 | fail-on-cache-miss: true
79 | - name: Run type checking
80 | run: uv run ty check
81 | vulture:
82 | runs-on: ubuntu-latest
83 | needs: setup
84 | steps:
85 | - uses: actions/checkout@v5
86 | - name: "Set up Python"
87 | uses: actions/setup-python@v6
88 | with:
89 | python-version-file: "pyproject.toml"
90 | - name: Install uv
91 | uses: astral-sh/setup-uv@v7
92 | with:
93 | enable-cache: true
94 | version: 0.8.*
95 | - name: Restore dependencies
96 | uses: actions/cache/restore@v4
97 | with:
98 | path: |
99 | .venv
100 | ~/.cache/uv
101 | key: ${{ runner.os }}-uv-${{ hashFiles('uv.lock') }}
102 | fail-on-cache-miss: true
103 | - name: Run vulture
104 | run: uv run vulture --min-confidence 100 --exclude ".venv" .
105 |
--------------------------------------------------------------------------------
/src/utils/regions.py:
--------------------------------------------------------------------------------
1 | REGION_MAPPING = {
2 | "planet": {
3 | "type": "planet",
4 | "continent": None,
5 | "available": True,
6 | },
7 | "africa": {
8 | "type": "continent",
9 | "continent": "africa",
10 | "available": True,
11 | },
12 | "asia": {
13 | "type": "continent",
14 | "continent": "asia",
15 | "available": True,
16 | },
17 | "australia-oceania": {
18 | "type": "continent",
19 | "continent": "australia-oceania",
20 | "available": True,
21 | },
22 | "europe": {
23 | "type": "continent",
24 | "continent": "europe",
25 | "available": True,
26 | },
27 | "north-america": {
28 | "type": "continent",
29 | "continent": "north-america",
30 | "available": True,
31 | },
32 | "south-america": {
33 | "type": "continent",
34 | "continent": "south-america",
35 | "available": True,
36 | },
37 | "india": {"type": "sub-region", "continent": "asia", "available": True},
38 | "japan": {"type": "sub-region", "continent": "asia", "available": True},
39 | "andorra": {"type": "sub-region", "continent": "europe", "available": True},
40 | "austria": {"type": "sub-region", "continent": "europe", "available": True},
41 | "denmark": {"type": "sub-region", "continent": "europe", "available": True},
42 | "france-monacco": {"type": "sub-region", "continent": "europe", "available": True},
43 | "germany": {"type": "sub-region", "continent": "europe", "available": True},
44 | "luxemburg": {"type": "sub-region", "continent": "europe", "available": True},
45 | "netherlands": {"type": "sub-region", "continent": "europe", "available": True},
46 | "russia": {"type": "sub-region", "continent": "europe", "available": True},
47 | "slovakia": {"type": "sub-region", "continent": "europe", "available": True},
48 | "spain": {"type": "sub-region", "continent": "europe", "available": True},
49 | "canada": {"type": "sub-region", "continent": "north-america", "available": True},
50 | "mexico": {"type": "sub-region", "continent": "north-america", "available": True},
51 | "usa": {"type": "sub-region", "continent": "north-america", "available": True},
52 | "argentina": {"type": "sub-region", "continent": "south-america", "available": True},
53 | }
54 |
55 | REGION_ALIASES = {
56 | "in": "india",
57 | "jp": "japan",
58 | "ad": "andorra",
59 | "at": "austria",
60 | "dk": "denmark",
61 | "fr": "france-monacco",
62 | "de": "germany",
63 | "lu": "luxemburg",
64 | "nl": "netherlands",
65 | "ru": "russia",
66 | "sk": "slovakia",
67 | "es": "spain",
68 | "ca": "canada",
69 | "mx": "mexico",
70 | "us": "usa",
71 | "ar": "argentina",
72 | "united states": "usa",
73 | "united states of america": "usa",
74 | "deutschland": "germany",
75 | "france": "france-monacco",
76 | "monaco": "france-monacco",
77 | "luxembourg": "luxemburg",
78 | "the netherlands": "netherlands",
79 | "holland": "netherlands",
80 | "espana": "spain",
81 | "españa": "spain",
82 | }
83 |
84 |
85 | def normalize_region(region: str) -> str | None:
86 | if not region:
87 | return None
88 |
89 | region_lower = region.lower().strip()
90 |
91 | if region_lower in REGION_MAPPING:
92 | return region_lower
93 |
94 | if region_lower in REGION_ALIASES:
95 | return REGION_ALIASES[region_lower]
96 |
97 | return None
98 |
99 |
100 | def get_region_info(region: str) -> dict | None:
101 | normalized = normalize_region(region)
102 | return REGION_MAPPING.get(normalized) if normalized else None
103 |
104 |
105 | def is_valid_region(region: str) -> bool:
106 | return get_region_info(region) is not None
107 |
--------------------------------------------------------------------------------
/src/check_remote.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import os
3 |
4 | import requests
5 | from dateutil.parser import parse as parsedate
6 | from requests.exceptions import RequestException
7 |
8 | from src.utils import config
9 | from src.utils.logger import get_logger
10 | from src.utils.regions import get_region_info, normalize_region
11 |
12 | logging = get_logger()
13 |
14 |
15 | def get_remote_file_size(url: str) -> int:
16 | try:
17 | response = requests.head(url, allow_redirects=True, timeout=15)
18 | response.raise_for_status()
19 |
20 | content_length = response.headers.get("content-length")
21 | if content_length:
22 | return int(content_length)
23 |
24 | response = requests.get(url, headers={"Range": "bytes=0-0"}, stream=True, timeout=15)
25 | response.raise_for_status()
26 |
27 | content_range = response.headers.get("content-range")
28 | if content_range and "/" in content_range:
29 | total_size = content_range.split("/")[-1]
30 | if total_size.isdigit():
31 | return int(total_size)
32 |
33 | except Exception as e:
34 | logging.warning(f"Could not determine remote file size for {url}: {e}")
35 |
36 | return 0
37 |
38 |
39 | def get_remote_time(remote_url: str):
40 | try:
41 | r = requests.head(remote_url, timeout=10)
42 | r.raise_for_status()
43 | urltime = r.headers.get("last-modified")
44 | if urltime:
45 | return parsedate(urltime)
46 | except RequestException as e:
47 | logging.exception(f"Error fetching remote URL: {e}")
48 | return None
49 |
50 |
51 | def get_local_time(local_path: str):
52 | marker_file = os.path.join(config.DATA_DIR, ".photon-index-updated")
53 | if os.path.exists(marker_file):
54 | return os.path.getmtime(marker_file)
55 |
56 | if not os.path.exists(local_path):
57 | return 0.0
58 | return os.path.getmtime(local_path)
59 |
60 |
61 | def compare_mtime() -> bool:
62 | if config.REGION:
63 | normalized = normalize_region(config.REGION)
64 | region_info = get_region_info(config.REGION)
65 | if not region_info:
66 | logging.error(f"Unknown region: {config.REGION}")
67 | return False
68 |
69 | region_type = region_info["type"]
70 |
71 | if region_type == "planet":
72 | index_file = "/photon-db-planet-0.7OS-latest.tar.bz2"
73 | elif region_type == "continent":
74 | index_file = f"/{normalized}/photon-db-{normalized}-0.7OS-latest.tar.bz2"
75 | elif region_type == "sub-region":
76 | continent = region_info["continent"]
77 | index_file = f"/{continent}/{normalized}/photon-db-{normalized}-0.7OS-latest.tar.bz2"
78 | else:
79 | logging.error(f"Invalid region type: {region_type}")
80 | return False
81 | else:
82 | index_file = "/photon-db-planet-0.7OS-latest.tar.bz2"
83 |
84 | remote_url = config.BASE_URL + index_file
85 |
86 | remote_dt = get_remote_time(remote_url)
87 |
88 | if remote_dt is None:
89 | logging.warning("Could not determine remote time. Assuming no update is needed.")
90 | return False
91 |
92 | marker_file = os.path.join(config.DATA_DIR, ".photon-index-updated")
93 | using_marker_file = os.path.exists(marker_file)
94 |
95 | local_timestamp = get_local_time(config.OS_NODE_DIR)
96 | local_dt = datetime.datetime.fromtimestamp(local_timestamp, tz=datetime.UTC)
97 |
98 | logging.debug(f"Remote index time: {remote_dt}")
99 | logging.debug(f"Local index time: {local_dt}")
100 |
101 | if using_marker_file:
102 | logging.debug("Using marker file timestamp - comparing directly without grace period")
103 | return remote_dt > local_dt
104 | else:
105 | logging.debug("Using directory timestamp - applying 144-hour grace period")
106 | grace_period = datetime.timedelta(hours=144)
107 | return remote_dt > (local_dt + grace_period)
108 |
--------------------------------------------------------------------------------
/.github/workflows/full-test.yml:
--------------------------------------------------------------------------------
1 | name: Container Test
2 |
3 | on:
4 | pull_request:
5 | branches:
6 | - main
7 | paths:
8 | - "Dockerfile"
9 | - "src/**"
10 | - "docker-compose*.yml"
11 | - ".last_release"
12 | - "pyproject.toml"
13 | - "uv.lock"
14 |
15 | jobs:
16 | test-container:
17 | runs-on: ubuntu-latest
18 | steps:
19 | - name: Checkout Repository
20 | uses: actions/checkout@v5
21 |
22 | - name: Set up Docker Buildx
23 | uses: docker/setup-buildx-action@v3
24 |
25 | - name: Read Photon version from .last_release
26 | id: photon_version
27 | run: |
28 | PHOTON_VERSION=$(cat .last_release | tr -d '[:space:]')
29 | if [[ -z "$PHOTON_VERSION" || ! "$PHOTON_VERSION" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
30 | echo "Error: .last_release is missing, empty, or contains an invalid version: '$PHOTON_VERSION'"
31 | exit 1
32 | fi
33 | echo "PHOTON_VERSION=$PHOTON_VERSION" >> "$GITHUB_ENV"
34 | echo "Photon Version: $PHOTON_VERSION"
35 |
36 | - name: Build test image
37 | uses: docker/build-push-action@v6
38 | with:
39 | context: .
40 | file: ./Dockerfile
41 | build-args: |
42 | PHOTON_VERSION=${{ env.PHOTON_VERSION }}
43 | push: false
44 | load: true
45 | tags: photon-test:pr-${{ github.event.pull_request.number }}
46 | platforms: linux/amd64
47 | cache-from: type=gha
48 | cache-to: type=gha,mode=max
49 |
50 | - name: Start container
51 | run: |
52 | docker run -d \
53 | --name photon-test-pr-${{ github.event.pull_request.number }} \
54 | -e REGION=andorra \
55 | -e UPDATE_STRATEGY=DISABLED \
56 | photon-test:pr-${{ github.event.pull_request.number }}
57 |
58 | - name: Wait for container to be healthy
59 | run: |
60 | echo "Waiting for container to become healthy (timeout: 6 minutes)..."
61 | CONTAINER_NAME=photon-test-pr-${{ github.event.pull_request.number }}
62 |
63 | docker logs -f $CONTAINER_NAME &
64 | LOGS_PID=$!
65 |
66 | SECONDS=0
67 | TIMEOUT=360
68 |
69 | while [ $SECONDS -lt $TIMEOUT ]; do
70 | HEALTH_STATUS=$(docker inspect --format='{{.State.Health.Status}}' $CONTAINER_NAME 2>/dev/null || echo "unknown")
71 |
72 | if [ "$HEALTH_STATUS" = "healthy" ]; then
73 | echo "Container is healthy after $SECONDS seconds"
74 | kill $LOGS_PID 2>/dev/null || true
75 | exit 0
76 | fi
77 |
78 | echo "Health status: $HEALTH_STATUS (elapsed: ${SECONDS}s)"
79 | sleep 10
80 | SECONDS=$((SECONDS + 10))
81 | done
82 |
83 | kill $LOGS_PID 2>/dev/null || true
84 | echo "Container failed to become healthy within $TIMEOUT seconds"
85 | docker logs $CONTAINER_NAME
86 | exit 1
87 |
88 | - name: Cleanup
89 | if: always()
90 | run: |
91 | docker stop photon-test-pr-${{ github.event.pull_request.number }} || true
92 | docker rm photon-test-pr-${{ github.event.pull_request.number }} || true
93 | docker rmi photon-test:pr-${{ github.event.pull_request.number }} || true
94 |
95 | - name: Output summary
96 | if: always()
97 | run: |
98 | echo "## Container Test Summary" >> $GITHUB_STEP_SUMMARY
99 | echo "- **PR Number:** ${{ github.event.pull_request.number }}" >> $GITHUB_STEP_SUMMARY
100 | echo "- **Photon Version:** ${{ env.PHOTON_VERSION }}" >> $GITHUB_STEP_SUMMARY
101 | echo "- **Status:** ${{ job.status }}" >> $GITHUB_STEP_SUMMARY
102 |
--------------------------------------------------------------------------------
/.github/workflows/check-releases.yml:
--------------------------------------------------------------------------------
1 | name: Check for New Releases and Propose Update
2 |
3 | on:
4 | schedule:
5 | - cron: "0 * * * *"
6 | workflow_dispatch:
7 |
8 | jobs:
9 | check_and_propose_update:
10 | runs-on: ubuntu-latest
11 | permissions:
12 | contents: write
13 | pull-requests: write
14 |
15 | steps:
16 | - name: Checkout Repository
17 | uses: actions/checkout@v5
18 |
19 | - name: Check for new Photon release
20 | id: check_release
21 | run: |
22 | latest_release=$(curl -s https://github.com/komoot/photon/releases.atom | grep '
' | sed -n '2p' | sed -E 's/.*Release ([0-9]+\.[0-9]+\.[0-9]+).*/\1/')
23 | if [ -z "$latest_release" ]; then
24 | echo "Error: Failed to fetch the latest Photon release version."
25 | exit 1
26 | else
27 | echo "Latest Photon release version: $latest_release"
28 | echo "latest_release_version=${latest_release}" >> "$GITHUB_ENV"
29 | fi
30 |
31 | - name: Get last processed release from file
32 | id: get_last_release
33 | run: |
34 | if [ -f .last_release ]; then
35 | current_version_in_file=$(cat .last_release)
36 | echo "Current version in .last_release file: $current_version_in_file"
37 | echo "last_processed_version=$current_version_in_file" >> "$GITHUB_ENV"
38 | else
39 | echo ".last_release file not found."
40 | exit 1
41 | fi
42 |
43 | - name: Determine if update is needed
44 | id: prepare_update
45 | run: |
46 | if [[ -n "${{ env.latest_release_version }}" && "${{ env.latest_release_version }}" != "${{ env.last_processed_version }}" ]]; then
47 | echo "New version found: ${{ env.latest_release_version }}. (Previous: ${{ env.last_processed_version }})"
48 | {
49 | echo "update_needed=true"
50 | echo "new_version=${{ env.latest_release_version }}"
51 | echo "new_branch_name=update-photon-${{ env.latest_release_version }}"
52 | } >> "$GITHUB_OUTPUT"
53 | else
54 | echo "No new Photon release detected or version is already up-to-date. Latest fetched: '${{ env.latest_release_version }}', last processed: '${{ env.last_processed_version }}'."
55 | {
56 | echo "update_needed=false"
57 | echo "new_version=${{ env.last_processed_version }}"
58 | } >> "$GITHUB_OUTPUT"
59 | fi
60 |
61 | - name: Update release file(s) locally
62 | if: steps.prepare_update.outputs.update_needed == 'true'
63 | run: |
64 | echo "Updating .last_release to ${{ steps.prepare_update.outputs.new_version }}"
65 | echo "${{ steps.prepare_update.outputs.new_version }}" > .last_release
66 |
67 | - name: Create Pull Request
68 | if: steps.prepare_update.outputs.update_needed == 'true'
69 | uses: peter-evans/create-pull-request@v7
70 | with:
71 | token: ${{ secrets.MY_PAT_TOKEN }}
72 | commit-message: |
73 | Update Photon version to ${{ steps.prepare_update.outputs.new_version }}
74 |
75 | Automated update of the .last_release file (and potentially other version files)
76 | to reflect the new Photon release: ${{ steps.prepare_update.outputs.new_version }}.
77 | committer: GitHub Actions <41898282+github-actions[bot]@users.noreply.github.com>
78 | author: GitHub Actions <41898282+github-actions[bot]@users.noreply.github.com>
79 | branch: ${{ steps.prepare_update.outputs.new_branch_name }}
80 | delete-branch: true
81 | title: "Update Photon to version ${{ steps.prepare_update.outputs.new_version }}"
82 | body: |
83 | A new version of Photon (${{ steps.prepare_update.outputs.new_version }}) has been released.
84 |
85 | This Pull Request proposes updating our tracked version.
86 |
87 | **Release File(s) Updated:**
88 | * `.last_release` has been updated to `${{ steps.prepare_update.outputs.new_version }}`.
89 | * (Mention any other files updated here, e.g., Dockerfile, if applicable)
90 |
91 | **Next Steps:**
92 | 1. Review the changes in this PR.
93 | 2. Merge this PR if everything looks good.
94 | 3. Build New Image: Merging this PR should (ideally) trigger the separate workflow responsible for building and publishing the new Docker image with Photon version `${{ steps.prepare_update.outputs.new_version }}`.
95 |
96 | ---
97 | Upstream release notes for Photon ${{ steps.prepare_update.outputs.new_version }}: https://github.com/komoot/photon/releases/tag/${{ steps.prepare_update.outputs.new_version }}
98 | labels: |
99 | update
100 | automated-pr
101 |
102 | - name: No update needed
103 | if: steps.prepare_update.outputs.update_needed == 'false'
104 | run: echo "No new Photon release was found or the version is already current. No action taken."
105 |
--------------------------------------------------------------------------------
/.github/workflows/build-and-push.yml:
--------------------------------------------------------------------------------
1 | name: Build and Publish Docker Image
2 |
3 | on:
4 | workflow_dispatch:
5 | inputs:
6 | container_version:
7 | description: "SemVer version to build (e.g., 1.2.3 or 1.2.3-beta)"
8 | required: true
9 | photon_version:
10 | description: "Photon version (optional - will read from .last_release if not provided)"
11 | required: false
12 |
13 | release:
14 | types: [published]
15 |
16 | jobs:
17 | build-and-push:
18 | runs-on: ubuntu-latest
19 | steps:
20 | - name: Checkout Repository
21 | uses: actions/checkout@v5
22 | with:
23 | ref: ${{ github.ref }}
24 |
25 | - name: Set up Docker Buildx
26 | uses: docker/setup-buildx-action@v3
27 |
28 | - name: Login to DockerHub
29 | uses: docker/login-action@v3
30 | with:
31 | username: ${{ secrets.DOCKER_USERNAME }}
32 | password: ${{ secrets.DOCKER_PASSWORD }}
33 |
34 | - name: Login to GitHub Container Registry
35 | uses: docker/login-action@v3
36 | with:
37 | registry: ghcr.io
38 | username: ${{ github.repository_owner }}
39 | password: ${{ secrets.GITHUB_TOKEN }}
40 |
41 | - name: Extract version information
42 | id: version_info
43 | run: |
44 | if [ -f ".last_release" ]; then
45 | PHOTON_VERSION=$(cat .last_release | tr -d '[:space:]')
46 |
47 | if [[ -z "$PHOTON_VERSION" || ! "$PHOTON_VERSION" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
48 | echo "Error: .last_release is missing, empty, or contains an invalid version: '$PHOTON_VERSION'"
49 | exit 1
50 | fi
51 | echo "Read photon version from .last_release: $PHOTON_VERSION"
52 | else
53 | PHOTON_VERSION="${{ github.event.inputs.photon_version }}"
54 | if [[ -z "$PHOTON_VERSION" ]]; then
55 | echo "Error: PHOTON_VERSION must be provided when .last_release file is missing"
56 | exit 1
57 | fi
58 | fi
59 |
60 | if [ "${{ github.event_name }}" == "release" ]; then
61 | CONTAINER_VERSION="${{ github.event.release.tag_name }}"
62 | CONTAINER_VERSION="${CONTAINER_VERSION#v}"
63 |
64 | IS_PRERELEASE="${{ github.event.release.prerelease }}"
65 | elif [ "${{ github.event_name }}" == "pull_request" ]; then
66 | CONTAINER_VERSION="pr-${{ github.event.pull_request.number }}"
67 | IS_PRERELEASE="true"
68 | else
69 | CONTAINER_VERSION="${{ github.event.inputs.container_version }}"
70 |
71 | CONTAINER_VERSION="${CONTAINER_VERSION#v}"
72 |
73 | if [[ "$CONTAINER_VERSION" == *"-beta"* ]]; then
74 | IS_PRERELEASE="true"
75 | else
76 | IS_PRERELEASE="false"
77 | fi
78 | fi
79 |
80 | echo "CONTAINER_VERSION=$CONTAINER_VERSION" >> "$GITHUB_ENV"
81 | echo "PHOTON_VERSION=$PHOTON_VERSION" >> "$GITHUB_ENV"
82 | echo "IS_PRERELEASE=$IS_PRERELEASE" >> "$GITHUB_ENV"
83 |
84 | echo "Container Version: $CONTAINER_VERSION"
85 | echo "Photon Version: $PHOTON_VERSION"
86 | echo "Is Prerelease: $IS_PRERELEASE"
87 |
88 | - name: Generate Docker tags with semver support
89 | id: generate_tags
90 | run: |
91 | CONTAINER_VERSION="${{ env.CONTAINER_VERSION }}"
92 | IS_PRERELEASE="${{ env.IS_PRERELEASE }}"
93 |
94 | REPO_NAME="${{ github.repository }}"
95 | DOCKERHUB_REPO="${REPO_NAME,,}"
96 | GHCR_REPO="ghcr.io/${REPO_NAME,,}"
97 |
98 | TAGS="$DOCKERHUB_REPO:$CONTAINER_VERSION,$GHCR_REPO:$CONTAINER_VERSION"
99 |
100 |
101 | if [ "$IS_PRERELEASE" == "true" ]; then
102 | TAGS="$TAGS,$DOCKERHUB_REPO:beta,$GHCR_REPO:beta"
103 | else
104 |
105 | if [[ "$CONTAINER_VERSION" =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
106 | MAJOR="${BASH_REMATCH[1]}"
107 | MINOR="${BASH_REMATCH[2]}"
108 | PATCH="${BASH_REMATCH[3]}"
109 |
110 | TAGS="$TAGS,$DOCKERHUB_REPO:$MAJOR,$GHCR_REPO:$MAJOR"
111 | TAGS="$TAGS,$DOCKERHUB_REPO:$MAJOR.$MINOR,$GHCR_REPO:$MAJOR.$MINOR"
112 | TAGS="$TAGS,$DOCKERHUB_REPO:$MAJOR.$MINOR.$PATCH,$GHCR_REPO:$MAJOR.$MINOR.$PATCH"
113 | TAGS="$TAGS,$DOCKERHUB_REPO:latest,$GHCR_REPO:latest"
114 |
115 | echo "Generated semver tags for version $MAJOR.$MINOR.$PATCH"
116 | else
117 | echo "Version doesn't match semver pattern, skipping semver tags"
118 | TAGS="$TAGS,$DOCKERHUB_REPO:latest,$GHCR_REPO:latest"
119 | fi
120 | fi
121 |
122 | echo "DOCKER_TAGS=$TAGS" >> "$GITHUB_ENV"
123 | echo "Generated tags: $TAGS"
124 |
125 | - name: Build and push Docker image
126 | uses: docker/build-push-action@v6
127 | with:
128 | build-args: |
129 | PHOTON_VERSION=${{ env.PHOTON_VERSION }}
130 | push: true
131 | tags: ${{ env.DOCKER_TAGS }}
132 | platforms: linux/amd64,linux/arm64
133 | cache-from: type=gha
134 | cache-to: type=gha,mode=max
135 |
136 | - name: Output summary
137 | run: |
138 | echo "## Docker Build Summary" >> $GITHUB_STEP_SUMMARY
139 | echo "- **Event:** ${{ github.event_name }}" >> $GITHUB_STEP_SUMMARY
140 | echo "- **Container Version:** ${{ env.CONTAINER_VERSION }}" >> $GITHUB_STEP_SUMMARY
141 | echo "- **Photon Version:** ${{ env.PHOTON_VERSION }}" >> $GITHUB_STEP_SUMMARY
142 | echo "- **Is Prerelease:** ${{ env.IS_PRERELEASE }}" >> $GITHUB_STEP_SUMMARY
143 | echo "- **Tags:** ${{ env.DOCKER_TAGS }}" >> $GITHUB_STEP_SUMMARY
144 |
--------------------------------------------------------------------------------
/src/filesystem.py:
--------------------------------------------------------------------------------
1 | import hashlib
2 | import os
3 | import shutil
4 | import subprocess
5 | from pathlib import Path
6 |
7 | from src.utils import config
8 | from src.utils.logger import get_logger
9 |
10 | logging = get_logger()
11 |
12 |
13 | def extract_index(index_file: str):
14 | logging.info("Extracting Index")
15 | logging.debug(f"Index file: {index_file}")
16 | logging.debug(f"Index file exists: {os.path.exists(index_file)}")
17 | logging.debug(f"Index file size: {os.path.getsize(index_file) if os.path.exists(index_file) else 'N/A'}")
18 | logging.debug(f"Temp directory: {config.TEMP_DIR}")
19 | logging.debug(f"Temp directory exists: {os.path.exists(config.TEMP_DIR)}")
20 |
21 | if not os.path.exists(config.TEMP_DIR):
22 | logging.debug(f"Creating temp directory: {config.TEMP_DIR}")
23 | os.makedirs(config.TEMP_DIR, exist_ok=True)
24 |
25 | install_command = f"lbzip2 -d -c {index_file} | tar x -C {config.TEMP_DIR}"
26 | logging.debug(f"Extraction command: {install_command}")
27 |
28 | try:
29 | logging.debug("Starting extraction process...")
30 | result = subprocess.run(install_command, shell=True, capture_output=True, text=True, check=True)
31 | logging.debug("Extraction process completed successfully")
32 |
33 | if result.stdout:
34 | logging.debug(f"Extraction stdout: {result.stdout}")
35 | if result.stderr:
36 | logging.debug(f"Extraction stderr: {result.stderr}")
37 |
38 | logging.debug(f"Contents of {config.TEMP_DIR} after extraction:")
39 | try:
40 | for item in os.listdir(config.TEMP_DIR):
41 | item_path = os.path.join(config.TEMP_DIR, item)
42 | if os.path.isdir(item_path):
43 | logging.debug(f" DIR: {item}")
44 | try:
45 | sub_items = os.listdir(item_path)
46 | logging.debug(f" Contains {len(sub_items)} items")
47 | for sub_item in sub_items[:5]:
48 | logging.debug(f" {sub_item}")
49 | if len(sub_items) > 5:
50 | logging.debug(f" ... and {len(sub_items) - 5} more items")
51 | except Exception as e:
52 | logging.debug(f" Could not list subdirectory contents: {e}")
53 | else:
54 | logging.debug(f" FILE: {item} ({os.path.getsize(item_path)} bytes)")
55 | except Exception as e:
56 | logging.debug(f"Could not list contents of {config.TEMP_DIR}: {e}")
57 |
58 | except subprocess.CalledProcessError as e:
59 | logging.error(f"Index extraction failed with return code {e.returncode}")
60 | logging.error(f"Command: {e.cmd}")
61 | logging.error(f"Stdout: {e.stdout}")
62 | logging.error(f"Stderr: {e.stderr}")
63 | raise
64 | except Exception:
65 | logging.exception("Index extraction failed")
66 | raise
67 |
68 |
69 | def move_index():
70 | temp_photon_dir = os.path.join(config.TEMP_DIR, "photon_data")
71 | target_node_dir = os.path.join(config.PHOTON_DATA_DIR)
72 |
73 | logging.info(f"Moving index from {temp_photon_dir} to {target_node_dir}")
74 | result = move_index_atomic(temp_photon_dir, target_node_dir)
75 |
76 | if result:
77 | update_timestamp_marker()
78 |
79 | return result
80 |
81 |
82 | def move_index_atomic(source_dir: str, target_dir: str) -> bool:
83 | try:
84 | logging.info("Starting atomic index move operation")
85 |
86 | os.makedirs(os.path.dirname(target_dir), exist_ok=True)
87 |
88 | staging_dir = target_dir + ".staging"
89 | backup_dir = target_dir + ".backup"
90 |
91 | cleanup_staging_and_temp_backup(staging_dir, backup_dir)
92 |
93 | shutil.move(source_dir, staging_dir)
94 |
95 | if os.path.exists(target_dir):
96 | os.rename(target_dir, backup_dir)
97 |
98 | os.rename(staging_dir, target_dir)
99 | logging.info("Atomic index move completed successfully")
100 |
101 | return True
102 |
103 | except Exception as e:
104 | logging.error(f"Atomic move failed: {e}")
105 | rollback_atomic_move(source_dir, target_dir, staging_dir, backup_dir)
106 | raise
107 |
108 |
109 | def rollback_atomic_move(original_source: str, target_dir: str, staging_dir: str, backup_dir: str):
110 | logging.error("Rolling back atomic move operation")
111 |
112 | try:
113 | if os.path.exists(target_dir) and not os.path.exists(backup_dir):
114 | logging.debug("New index was successfully moved, keeping it")
115 | return
116 |
117 | if os.path.exists(target_dir):
118 | shutil.rmtree(target_dir)
119 |
120 | if os.path.exists(backup_dir):
121 | logging.info("Restoring backup after failed atomic move")
122 | os.rename(backup_dir, target_dir)
123 |
124 | if os.path.exists(staging_dir):
125 | shutil.move(staging_dir, original_source)
126 |
127 | logging.info("Rollback completed successfully")
128 |
129 | except Exception as rollback_error:
130 | logging.critical(f"Rollback failed: {rollback_error}")
131 |
132 |
133 | def cleanup_staging_and_temp_backup(staging_dir: str, backup_dir: str):
134 | for dir_path in [staging_dir, backup_dir]:
135 | if os.path.exists(dir_path):
136 | try:
137 | shutil.rmtree(dir_path)
138 | except Exception as e:
139 | logging.warning(f"Failed to cleanup {dir_path}: {e}")
140 |
141 |
142 | def cleanup_backup_after_verification(target_dir: str) -> bool:
143 | backup_dir = target_dir + ".backup"
144 | if os.path.exists(backup_dir):
145 | try:
146 | logging.info("Removing backup after successful verification")
147 | shutil.rmtree(backup_dir)
148 | return True
149 | except Exception as e:
150 | logging.warning(f"Failed to cleanup backup: {e}")
151 | return False
152 | return True
153 |
154 |
155 | def verify_checksum(md5_file, index_file):
156 | hash_md5 = hashlib.md5()
157 | try:
158 | with open(index_file, "rb") as f:
159 | for chunk in iter(lambda: f.read(4096), b""):
160 | hash_md5.update(chunk)
161 | dl_sum = hash_md5.hexdigest()
162 | except FileNotFoundError:
163 | logging.error(f"Index file not found for checksum generation: {index_file}")
164 | raise
165 |
166 | try:
167 | with open(md5_file) as f:
168 | md5_sum = f.read().split()[0].strip()
169 | except FileNotFoundError:
170 | logging.error(f"MD5 file not found: {md5_file}")
171 | raise
172 | except IndexError:
173 | logging.error(f"MD5 file is empty or malformed: {md5_file}")
174 | raise
175 |
176 | if dl_sum == md5_sum:
177 | logging.info("Checksum verified successfully.")
178 | return True
179 |
180 | raise Exception(f"Checksum mismatch for {index_file}. Expected: {md5_sum}, Got: {dl_sum}")
181 |
182 |
183 | def clear_temp_dir():
184 | logging.info("Removing TEMP dir")
185 | if os.path.exists(config.TEMP_DIR):
186 | logging.debug(f"Contents of TEMP directory {config.TEMP_DIR}:")
187 | try:
188 | for item in os.listdir(config.TEMP_DIR):
189 | item_path = os.path.join(config.TEMP_DIR, item)
190 | if os.path.isdir(item_path):
191 | logging.debug(f" DIR: {item}")
192 | else:
193 | logging.debug(f" FILE: {item}")
194 | except Exception as e:
195 | logging.debug(f"Could not list contents of {config.TEMP_DIR}: {e}")
196 |
197 | try:
198 | shutil.rmtree(config.TEMP_DIR)
199 | except Exception:
200 | logging.exception("Failed to Remove TEMP_DIR")
201 |
202 |
203 | def update_timestamp_marker():
204 | marker_file = os.path.join(config.DATA_DIR, ".photon-index-updated")
205 | try:
206 | Path(marker_file).touch()
207 | logging.info(f"Updated timestamp marker: {marker_file}")
208 | except Exception as e:
209 | logging.warning(f"Failed to update timestamp marker: {e}")
210 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/src/process_manager.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | import os
3 | import shlex
4 | import signal
5 | import subprocess
6 | import sys
7 | import threading
8 | import time
9 | from enum import Enum
10 |
11 | import psutil
12 | import requests
13 | import schedule
14 | from requests.exceptions import RequestException
15 |
16 | from src.check_remote import compare_mtime
17 |
18 | from .filesystem import cleanup_backup_after_verification
19 | from .utils import config
20 | from .utils.logger import get_logger, setup_logging
21 |
22 | logger = get_logger()
23 |
24 |
25 | def check_photon_health(timeout=30, max_retries=10) -> bool:
26 | url = "http://localhost:2322/status"
27 |
28 | for attempt in range(max_retries):
29 | try:
30 | response = requests.get(url, timeout=timeout)
31 | if response.status_code == 200:
32 | logger.info("Photon health check passed")
33 | return True
34 | logger.warning(f"Photon health check failed with status {response.status_code}")
35 | except RequestException as e:
36 | logger.debug(f"Health check attempt {attempt + 1} failed: {e}")
37 |
38 | if attempt < max_retries - 1:
39 | time.sleep(3)
40 |
41 | logger.error(f"Photon health check failed after {max_retries} attempts")
42 | return False
43 |
44 |
45 | def wait_for_photon_ready(timeout=120) -> bool:
46 | start_time = time.time()
47 | logger.info("Waiting for Photon to become ready...")
48 |
49 | while time.time() - start_time < timeout:
50 | if check_photon_health(timeout=5, max_retries=1):
51 | elapsed = time.time() - start_time
52 | logger.info(f"Photon ready after {elapsed:.1f} seconds")
53 | return True
54 | time.sleep(5)
55 |
56 | logger.error(f"Photon failed to become ready within {timeout} seconds")
57 | return False
58 |
59 |
60 | class AppState(Enum):
61 | INITIALIZING = 1
62 | RUNNING = 2
63 | UPDATING = 3
64 | SHUTTING_DOWN = 4
65 |
66 |
67 | class PhotonManager:
68 | def __init__(self):
69 | self.state = AppState.INITIALIZING
70 | self.photon_process = None
71 | self.should_exit = False
72 |
73 | signal.signal(signal.SIGTERM, self.handle_shutdown)
74 | signal.signal(signal.SIGINT, self.handle_shutdown)
75 |
76 | def handle_shutdown(self, signum, _frame):
77 | logger.info(f"Received shutdown signal {signum}")
78 | self.should_exit = True
79 | self.shutdown()
80 |
81 | def run_initial_setup(self):
82 | logger.info("Running initial setup...")
83 | result = subprocess.run(["uv", "run", "-m", "src.entrypoint", "setup"], check=False, cwd="/photon")
84 |
85 | if result.returncode != 0:
86 | logger.error("Setup failed!")
87 | sys.exit(1)
88 |
89 | def start_photon(self, max_startup_retries=3):
90 | for attempt in range(max_startup_retries):
91 | logger.info(f"Starting Photon (attempt {attempt + 1}/{max_startup_retries})...")
92 | self.state = AppState.RUNNING
93 |
94 | java_params = config.JAVA_PARAMS or ""
95 | photon_params = config.PHOTON_PARAMS or ""
96 |
97 | cmd = [
98 | "java",
99 | "--add-modules",
100 | "jdk.incubator.vector",
101 | "--enable-native-access=ALL-UNNAMED",
102 | "-Des.gateway.auto_import_dangling_indices=true",
103 | "-Des.cluster.routing.allocation.batch_mode=true",
104 | "-Dlog4j2.disable.jmx=true",
105 | ]
106 |
107 | if java_params:
108 | cmd.extend(shlex.split(java_params))
109 |
110 | cmd.extend(["-jar", "/photon/photon.jar", "-data-dir", config.DATA_DIR])
111 |
112 | if photon_params:
113 | cmd.extend(shlex.split(photon_params))
114 |
115 | self.photon_process = subprocess.Popen(cmd, cwd="/photon", preexec_fn=os.setsid)
116 |
117 | logger.info(f"Photon started with PID: {self.photon_process.pid}")
118 |
119 | if wait_for_photon_ready():
120 | logger.info("Photon startup successful")
121 | return True
122 | logger.error(f"Photon health check failed on attempt {attempt + 1}")
123 | self.stop_photon()
124 |
125 | if attempt < max_startup_retries - 1:
126 | logger.info("Retrying Photon startup...")
127 | time.sleep(5)
128 |
129 | logger.error(f"Photon failed to start successfully after {max_startup_retries} attempts")
130 | return False
131 |
132 | def stop_photon(self):
133 | if self.photon_process:
134 | logger.info("Stopping Photon...")
135 |
136 | try:
137 | os.killpg(os.getpgid(self.photon_process.pid), signal.SIGTERM)
138 | self.photon_process.wait(timeout=30)
139 | except subprocess.TimeoutExpired:
140 | logger.warning("Photon didn't stop gracefully, force killing...")
141 | # Force kill
142 | try:
143 | os.killpg(os.getpgid(self.photon_process.pid), signal.SIGKILL)
144 | except ProcessLookupError:
145 | pass # Process dead
146 | self.photon_process.wait()
147 | except ProcessLookupError:
148 | # Process dead
149 | pass
150 |
151 | self.photon_process = None
152 |
153 | self.cleanup_orphaned_photon_processes()
154 |
155 | self._cleanup_lock_files()
156 |
157 | time.sleep(2)
158 |
159 | def cleanup_orphaned_photon_processes(self):
160 | try:
161 | for proc in psutil.process_iter(["pid", "name", "cmdline"]):
162 | if (
163 | proc.info["name"] == "java"
164 | and proc.info["cmdline"]
165 | and any("photon.jar" in arg for arg in proc.info["cmdline"])
166 | ):
167 | logger.warning(f"Found orphaned Photon process PID {proc.info['pid']}, terminating...")
168 | proc.terminate()
169 | try:
170 | proc.wait(timeout=5)
171 | except psutil.TimeoutExpired:
172 | proc.kill()
173 | except Exception as e:
174 | logger.debug(f"Error checking for orphaned processes: {e}")
175 |
176 | def _cleanup_lock_files(self):
177 | lock_files = [
178 | os.path.join(config.OS_NODE_DIR, "node.lock"),
179 | os.path.join(config.OS_NODE_DIR, "data", "node.lock"),
180 | ]
181 |
182 | for lock_file in lock_files:
183 | if os.path.exists(lock_file):
184 | try:
185 | os.remove(lock_file)
186 | logger.debug(f"Removed lock file: {lock_file}")
187 | except Exception as e:
188 | logger.debug(f"Could not remove lock file {lock_file}: {e}")
189 |
190 | def run_update(self):
191 | if config.UPDATE_STRATEGY == "DISABLED":
192 | logger.info("Updates disabled, skipping")
193 | return
194 |
195 | self.state = AppState.UPDATING
196 | logger.info(f"Running {config.UPDATE_STRATEGY.lower()} update...")
197 | update_start = time.time()
198 |
199 | if not compare_mtime():
200 | update_duration = time.time() - update_start
201 | logger.info(f"Index already up to date - no restart needed ({update_duration:.1f}s)")
202 | self.state = AppState.RUNNING
203 | return
204 |
205 | if config.UPDATE_STRATEGY == "SEQUENTIAL":
206 | self.stop_photon()
207 |
208 | result = subprocess.run(["uv", "run", "-m", "src.updater"], check=False, cwd="/photon")
209 |
210 | if result.returncode == 0:
211 | logger.info("Update process completed, verifying Photon health...")
212 |
213 | if config.UPDATE_STRATEGY == "PARALLEL":
214 | self.stop_photon()
215 | if self.start_photon():
216 | update_duration = time.time() - update_start
217 | logger.info(f"Update completed successfully - Photon healthy ({update_duration:.1f}s)")
218 | target_node_dir = os.path.join(config.PHOTON_DATA_DIR, "node_1")
219 | cleanup_backup_after_verification(target_node_dir)
220 | else:
221 | update_duration = time.time() - update_start
222 | logger.error(f"Update failed - Photon health check failed after restart ({update_duration:.1f}s)")
223 | elif config.UPDATE_STRATEGY == "SEQUENTIAL":
224 | if self.start_photon():
225 | update_duration = time.time() - update_start
226 | logger.info(f"Update completed successfully - Photon healthy ({update_duration:.1f}s)")
227 | target_node_dir = os.path.join(config.PHOTON_DATA_DIR, "node_1")
228 | cleanup_backup_after_verification(target_node_dir)
229 | else:
230 | update_duration = time.time() - update_start
231 | logger.error(f"Update failed - Photon health check failed after restart ({update_duration:.1f}s)")
232 | else:
233 | update_duration = time.time() - update_start
234 | logger.error(f"Update process failed with code {result.returncode} ({update_duration:.1f}s)")
235 | if config.UPDATE_STRATEGY == "SEQUENTIAL" and not self.photon_process:
236 | logger.info("Attempting to restart Photon after failed update")
237 | if not self.start_photon():
238 | logger.error("Failed to restart Photon after update failure")
239 |
240 | self.state = AppState.RUNNING
241 |
242 | def schedule_updates(self):
243 | if config.UPDATE_STRATEGY == "DISABLED":
244 | logger.info("Updates disabled, not scheduling")
245 | return
246 |
247 | interval = config.UPDATE_INTERVAL.lower()
248 |
249 | if interval.endswith("d"):
250 | days = int(interval[:-1])
251 | schedule.every(days).days.do(self.run_update)
252 | logger.info(f"Scheduling updates every {days} days")
253 | elif interval.endswith("h"):
254 | hours = int(interval[:-1])
255 | schedule.every(hours).hours.do(self.run_update)
256 | logger.info(f"Scheduling updates every {hours} hours")
257 | elif interval.endswith("m"):
258 | minutes = int(interval[:-1])
259 | schedule.every(minutes).minutes.do(self.run_update)
260 | logger.info(f"Scheduling updates every {minutes} minutes")
261 | else:
262 | logger.warning(f"Invalid UPDATE_INTERVAL format: {interval}, defaulting to daily")
263 | schedule.every().day.do(self.run_update)
264 |
265 | def scheduler_loop():
266 | while not self.should_exit:
267 | schedule.run_pending()
268 | time.sleep(1)
269 |
270 | thread = threading.Thread(target=scheduler_loop, daemon=True)
271 | thread.start()
272 |
273 | def monitor_photon(self):
274 | while not self.should_exit:
275 | if self.photon_process and self.state == AppState.RUNNING:
276 | ret = self.photon_process.poll()
277 | if ret is not None:
278 | logger.warning(f"Photon exited with code {ret}, restarting...")
279 | if not self.start_photon():
280 | logger.error("Failed to restart Photon after unexpected exit")
281 | time.sleep(5)
282 |
283 | def shutdown(self):
284 | logger.info("Shutting down...")
285 | self.state = AppState.SHUTTING_DOWN
286 | self.stop_photon()
287 | sys.exit(0)
288 |
289 | def run(self):
290 | logger.info("Photon Manager starting...")
291 |
292 | if not config.FORCE_UPDATE and os.path.isdir(config.OS_NODE_DIR):
293 | logger.info("Existing index found, skipping initial setup")
294 | else:
295 | self.run_initial_setup()
296 |
297 | if not self.start_photon():
298 | logger.error("Failed to start Photon during initial startup")
299 | sys.exit(1)
300 |
301 | self.schedule_updates()
302 |
303 | self.monitor_photon()
304 |
305 |
306 | if __name__ == "__main__":
307 | setup_logging()
308 | manager = PhotonManager()
309 | manager.run()
310 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |     
2 |
3 | # Photon Docker Image
4 |
5 | ## Overview
6 |
7 | This is an _unofficial_ docker image for [Photon](https://github.com/komoot/photon)
8 |
9 | Photon is an open-source geocoding solution built for OpenStreetMap (OSM) data,
10 | providing features such as search-as-you-type and reverse geocoding.
11 | This repository offers a Docker image for running Photon locally,
12 | enhancing data privacy and integration capabilities with services like [Dawarich](https://github.com/Freika/dawarich).
13 |
14 | ## Important Notes
15 |
16 | ⚠️ **Warning: Large File Sizes** ⚠️
17 |
18 | - The Photon index file is fairly large and growing steadily.
19 | As of the beginning of 2025, around 200GB is needed for the full index,
20 | and it is growing by 10-20GB per year.
21 | - Ensure you have sufficient disk space available before running the container.
22 | - The initial download and extraction process may take a considerable amount of time.
23 | Depending on your hardware, checksum verification and decompression may take multiple hours.
24 |
25 | ♻️ **Change in Default Download Source** ♻️
26 |
27 | - To reduce the load on the official Photon servers,
28 | the default `BASE_URL` for downloading the index files now points to a community-hosted mirror.
29 | Please see the **Community Mirrors** section for more details.
30 |
31 | ## Usage
32 |
33 | ### Example Docker Compose
34 |
35 | ```yaml
36 | services:
37 | photon:
38 | image: rtuszik/photon-docker:latest
39 | environment:
40 | - UPDATE_STRATEGY=PARALLEL
41 | - UPDATE_INTERVAL=720h # Check for updates every 30 days
42 | # - REGION=andorra # Optional: specific region (continent, country, or planet)
43 | # - APPRISE_URLS=pover://user@token # Optional: notifications
44 | volumes:
45 | - photon_data:/photon/data
46 | restart: unless-stopped
47 | ports:
48 | - "2322:2322"
49 | volumes:
50 | photon_data:
51 | ```
52 |
53 | ```bash
54 | docker compose up -d
55 | ```
56 |
57 | ### Configuration Options
58 |
59 | The container can be configured using the following environment variables:
60 |
61 | | Variable | Parameters | Default | Description |
62 | | ---------------------- | -------------------------------------- | -------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
63 | | `UPDATE_STRATEGY` | `PARALLEL`, `SEQUENTIAL`, `DISABLED` | `SEQUENTIAL` | Controls how index updates are handled. `PARALLEL` downloads the new index in the background then swaps with minimal downtime (requires 2x space). `SEQUENTIAL` stops Photon, deletes the existing index, downloads the new one, then restarts. `DISABLED` prevents automatic updates. |
64 | | `UPDATE_INTERVAL` | Time string (e.g., "720h", "30d") | `30d` | How often to check for updates. To reduce server load, it is recommended to set this to a long interval (e.g., `720h` for 30 days) or disable updates altogether if you do not need the latest data. |
65 | | `REGION` | Region name, country code, or `planet` | `planet` | Optional region for a specific dataset. Can be a continent (`europe`, `asia`), individual country/region (`germany`, `usa`, `japan`), country code (`de`, `us`, `jp`), or `planet` for worldwide data. See [Available Regions](#available-regions) section for details. |
66 | | `LOG_LEVEL` | `DEBUG`, `INFO`, `ERROR` | `INFO` | Controls logging verbosity. |
67 | | `FORCE_UPDATE` | `TRUE`, `FALSE` | `FALSE` | Forces an index update on container startup, regardless of `UPDATE_STRATEGY`. |
68 | | `DOWNLOAD_MAX_RETRIES` | Number | `3` | Maximum number of retries for failed downloads. |
69 | | `INITIAL_DOWNLOAD` | `TRUE`, `FALSE` | `TRUE` | Controls whether the container performs the initial index download when the Photon data directory is empty. Useful for manual imports. |
70 | | `BASE_URL` | Valid URL | `https://r2.koalasec.org/public` | Custom base URL for index data downloads. Should point to the parent directory of index files. The default has been changed to a community mirror to reduce load on the GraphHopper servers. |
71 | | `SKIP_MD5_CHECK` | `TRUE`, `FALSE` | `FALSE` | Optionally skip MD5 verification of downloaded index files. |
72 | | `FILE_URL` | URL to a .tar.bz2 file | - | Set a custom URL for the index file to be downloaded (e.g., "https://download1.graphhopper.com/public/experimental/photon-db-latest.tar.bz2"). This must be a tar.bz2 format. Make sure to set the `UPDATE_STRATEGY` to `DISABLED` when using this option. |
73 | | `PHOTON_PARAMS` | Photon executable parameters | - | See `https://github.com/komoot/photon#running-photon.` |
74 | | `APPRISE_URLS` | Comma-separated Apprise URLs | - | Optional notification URLs for [Apprise](https://github.com/caronc/apprise) to send status updates (e.g., download completion, errors). Supports multiple services like Pushover, Slack, email, etc. Example: `pover://user@token,mailto://user:pass@gmail.com` |
75 | | `PUID` | User ID | 9011 | The User ID for the photon process. Set this to your host user's ID (`id -u`) to prevent permission errors when using bind mounts. |
76 | | `PGID` | Group ID | 9011 | The Group ID for the photon process. Set this to your host group's ID (`id -g`) to prevent permission errors when using bind mounts. |
77 |
78 | ## Available Regions
79 |
80 | ### 1. Planet-wide Data
81 |
82 | (This is the default if no region is specified)
83 |
84 | - **Region**: `planet`
85 | - **Size**: ~116GB
86 | - **Coverage**: Worldwide
87 |
88 | ### 2. Continental Data
89 |
90 | - **africa** (~2.8GB)
91 | - **asia** (~13.5GB)
92 | - **australia-oceania** (~2.9GB)
93 | - **europe** (~60.7GB)
94 | - **north-america** (~29.5GB)
95 | - **south-america** (~13.8GB)
96 |
97 | ### 3. Individual Countries/Regions
98 |
99 | Only **16 regions** have individual database downloads available:
100 |
101 | #### Asia (2 regions)
102 |
103 | - **india** (also: `in`)
104 | - **japan** (also: `jp`)
105 |
106 | #### Europe (10 regions)
107 |
108 | - **andorra** (also: `ad`)
109 | - **austria** (also: `at`)
110 | - **denmark** (also: `dk`)
111 | - **france-monacco** (also: `fr`, `france`, `monaco`)
112 | - **germany** (also: `de`, `deutschland`)
113 | - **luxemburg** (also: `lu`, `luxembourg`)
114 | - **netherlands** (also: `nl`, `holland`, `the netherlands`)
115 | - **russia** (also: `ru`)
116 | - **slovakia** (also: `sk`)
117 | - **spain** (also: `es`, `españa`, `espana`)
118 |
119 | #### North America (3 regions)
120 |
121 | - **canada** (also: `ca`)
122 | - **mexico** (also: `mx`)
123 | - **usa** (also: `us`, `united states`, `united states of america`)
124 |
125 | #### South America (1 region)
126 |
127 | - **argentina** (also: `ar`)
128 |
129 | ### Usage Examples
130 |
131 | ```yaml
132 | # Continental download
133 | - REGION=europe
134 |
135 | # Individual country by name
136 | - REGION=andorra
137 |
138 | # Individual country by code
139 | - REGION=de
140 | ```
141 |
142 | ## Community Mirrors
143 |
144 | To ensure the sustainability of the Photon project and reduce the load on the official GraphHopper download servers,
145 | this Docker image now defaults to using community-hosted mirrors.
146 |
147 | > ⚠️ **Disclaimer:** Community mirrors are not officially managed by the Photon team or the maintainer of this Docker image.
148 | > There are **no guarantees regarding the availability, performance, or integrity of the data** provided by these mirrors. Use them at your own discretion.
149 |
150 | If you are hosting a public mirror, please open an issue or pull request to have it added to this list.
151 |
152 | | URL | Maintained By | Status |
153 | | ------------------------------------------- | ------------------------------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
154 | | `https://download1.graphhopper.com/public/` | [GraphHopper](https://www.graphhopper.com/) (Official) |  |
155 | | `https://r2.koalasec.org/public/` | [rtuszik](https://github.com/rtuszik) |  |
156 |
157 | ### Use with Dawarich
158 |
159 | This docker container for photon can be used as your reverse-geocoder for the [Dawarich Location History Tracker](https://github.com/Freika/dawarich)
160 |
161 | To connect dawarich to your photon instance, the following environment variables need to be set in your dawarich docker-compose.yml:
162 |
163 | ```yaml
164 | PHOTON_API_HOST={PHOTON-IP}:{PORT}
165 | PHOTON_API_USE_HTTPS=false
166 | ```
167 |
168 | for example:
169 |
170 | ```yaml
171 | PHOTON_API_HOST=192.168.10.10:2322
172 | PHOTON_API_USE_HTTPS=false
173 | ```
174 |
175 | - Do _not_ set `PHOTON_API_USE_HTTPS` to `true` unless your photon instance is available using HTTPS.
176 | - Only use the host address for your photon instance. Do not append `/api`
177 |
178 | ### Build and Run Locally
179 |
180 | ```bash
181 | docker compose -f docker-compose.build.yml build --build-arg PHOTON_VERSION=0.6.2
182 | ```
183 |
184 | ### Accessing the API
185 |
186 | The Photon API is available at:
187 |
188 | ```
189 | http://localhost:2322/api?q=Harare
190 | ```
191 |
192 | ## Contributing
193 |
194 | Contributions are welcome. Please submit pull requests or open issues for suggestions and improvements.
195 |
196 | ## License
197 |
198 | This project is licensed under the Apache License, Version 2.0.
199 |
200 | ## Acknowledgments
201 |
202 | - [Photon](https://github.com/komoot/photon)
203 | - [Dawarich](https://github.com/Freika/dawarich)
204 |
205 |
206 |
--------------------------------------------------------------------------------
/src/downloader.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import shutil
4 | import sys
5 | import time
6 |
7 | import requests
8 | from requests.exceptions import RequestException
9 | from tqdm import tqdm
10 |
11 | from src.check_remote import get_local_time, get_remote_file_size
12 | from src.filesystem import clear_temp_dir, extract_index, move_index, verify_checksum
13 | from src.utils import config
14 | from src.utils.logger import get_logger
15 | from src.utils.regions import get_region_info, normalize_region
16 |
17 |
18 | class InsufficientSpaceError(Exception):
19 | pass
20 |
21 |
22 | logging = get_logger()
23 |
24 |
25 | def get_available_space(path: str) -> int:
26 | try:
27 | statvfs = os.statvfs(path)
28 | return statvfs.f_frsize * statvfs.f_bavail
29 | except (OSError, AttributeError):
30 | return 0
31 |
32 |
33 | def check_disk_space_requirements(download_size: int, is_parallel: bool = True) -> bool:
34 | temp_available = get_available_space(config.TEMP_DIR if os.path.exists(config.TEMP_DIR) else config.DATA_DIR)
35 | data_available = get_available_space(
36 | config.PHOTON_DATA_DIR if os.path.exists(config.PHOTON_DATA_DIR) else config.DATA_DIR
37 | )
38 |
39 | compressed_size = download_size
40 | extracted_size = int(download_size * 1.63)
41 |
42 | if is_parallel:
43 | temp_needed = compressed_size + extracted_size
44 | data_needed = extracted_size
45 | total_needed = int(download_size * 1.7)
46 |
47 | logging.info("Parallel update space requirements:")
48 | logging.info(f" Download size: {compressed_size / (1024**3):.2f} GB")
49 | logging.info(f" Estimated extracted size: {extracted_size / (1024**3):.2f} GB")
50 | logging.info(f" Total space needed: {total_needed / (1024**3):.2f} GB")
51 | logging.info(f" Temp space available: {temp_available / (1024**3):.2f} GB")
52 | logging.info(f" Data space available: {data_available / (1024**3):.2f} GB")
53 |
54 | if temp_available < temp_needed:
55 | logging.error(
56 | f"Insufficient temp space: need {temp_needed / (1024**3):.2f} GB, have {temp_available / (1024**3):.2f} GB"
57 | )
58 | return False
59 |
60 | if data_available < data_needed:
61 | logging.error(
62 | f"Insufficient data space: need {data_needed / (1024**3):.2f} GB, have {data_available / (1024**3):.2f} GB"
63 | )
64 | return False
65 |
66 | else:
67 | temp_needed = compressed_size + extracted_size
68 |
69 | logging.info("Sequential update space requirements:")
70 | logging.info(f" Download size: {compressed_size / (1024**3):.2f} GB")
71 | logging.info(f" Estimated extracted size: {extracted_size / (1024**3):.2f} GB")
72 | logging.info(f" Temp space needed: {temp_needed / (1024**3):.2f} GB")
73 | logging.info(f" Temp space available: {temp_available / (1024**3):.2f} GB")
74 |
75 | if temp_available < temp_needed:
76 | logging.error(
77 | f"Insufficient temp space: need {temp_needed / (1024**3):.2f} GB, have {temp_available / (1024**3):.2f} GB"
78 | )
79 | return False
80 |
81 | logging.info("Sufficient disk space available for update")
82 | return True
83 |
84 |
85 | def get_download_state_file(destination: str) -> str:
86 | return destination + ".download_state"
87 |
88 |
89 | def save_download_state(destination: str, url: str, downloaded_bytes: int, total_size: int):
90 | state_file = get_download_state_file(destination)
91 | state = {
92 | "url": url,
93 | "destination": destination,
94 | "downloaded_bytes": downloaded_bytes,
95 | "total_size": total_size,
96 | "file_size": os.path.getsize(destination) if os.path.exists(destination) else 0,
97 | }
98 | try:
99 | with open(state_file, "w") as f:
100 | json.dump(state, f)
101 | except Exception as e:
102 | logging.warning(f"Failed to save download state: {e}")
103 |
104 |
105 | def load_download_state(destination: str) -> dict:
106 | state_file = get_download_state_file(destination)
107 | if not os.path.exists(state_file):
108 | return {}
109 |
110 | try:
111 | with open(state_file) as f:
112 | state = json.load(f)
113 |
114 | if os.path.exists(destination):
115 | actual_size = os.path.getsize(destination)
116 | saved_size = state.get("file_size", 0)
117 | if actual_size >= saved_size:
118 | state["file_size"] = actual_size
119 | state["downloaded_bytes"] = actual_size
120 | logging.info(f"Resuming download: file size {actual_size} bytes (saved state: {saved_size} bytes)")
121 | return state
122 | logging.warning(
123 | f"File size mismatch: actual {actual_size} < expected {saved_size}, starting fresh download"
124 | )
125 | cleanup_download_state(destination)
126 |
127 | except Exception as e:
128 | logging.warning(f"Failed to load download state: {e}")
129 | cleanup_download_state(destination)
130 |
131 | return {}
132 |
133 |
134 | def cleanup_download_state(destination: str):
135 | state_file = get_download_state_file(destination)
136 | try:
137 | if os.path.exists(state_file):
138 | os.remove(state_file)
139 | except Exception as e:
140 | logging.warning(f"Failed to cleanup download state: {e}")
141 |
142 |
143 | def supports_range_requests(url: str) -> bool:
144 | try:
145 | response = requests.head(url, allow_redirects=True, timeout=5)
146 | response.raise_for_status()
147 | return response.headers.get("accept-ranges", "").lower() == "bytes"
148 | except Exception as e:
149 | logging.warning(f"Could not determine range support for {url}: {e}")
150 | return False
151 |
152 |
153 | def get_download_url() -> str:
154 | if config.FILE_URL:
155 | return config.FILE_URL
156 |
157 | if config.REGION:
158 | normalized = normalize_region(config.REGION)
159 | region_info = get_region_info(config.REGION)
160 | if not region_info:
161 | raise ValueError(f"Unknown region: {config.REGION}")
162 |
163 | region_type = region_info["type"]
164 |
165 | if region_type == "planet":
166 | index_url = "/photon-db-planet-0.7OS-latest.tar.bz2"
167 | elif region_type == "continent":
168 | index_url = f"/{normalized}/photon-db-{normalized}-0.7OS-latest.tar.bz2"
169 | elif region_type == "sub-region":
170 | continent = region_info["continent"]
171 | index_url = f"/{continent}/{normalized}/photon-db-{normalized}-0.7OS-latest.tar.bz2"
172 | else:
173 | raise ValueError(f"Invalid region type: {region_type}")
174 | else:
175 | index_url = "/photon-db-planet-0.7OS-latest.tar.bz2"
176 |
177 | return config.BASE_URL + index_url
178 |
179 |
180 | def parallel_update():
181 | logging.info("Starting parallel update process...")
182 |
183 | try:
184 | if os.path.isdir(config.TEMP_DIR):
185 | logging.debug(f"Temporary directory {config.TEMP_DIR} exists. Attempting to remove it.")
186 | try:
187 | shutil.rmtree(config.TEMP_DIR)
188 | logging.debug(f"Successfully removed directory: {config.TEMP_DIR}")
189 | except Exception as e:
190 | logging.error(f"Failed to remove existing TEMP_DIR: {e}")
191 | raise
192 |
193 | logging.debug(f"Creating temporary directory: {config.TEMP_DIR}")
194 | os.makedirs(config.TEMP_DIR, exist_ok=True)
195 |
196 | download_url = get_download_url()
197 | file_size = get_remote_file_size(download_url)
198 |
199 | if file_size > 0:
200 | if not check_disk_space_requirements(file_size, is_parallel=True):
201 | logging.error("Insufficient disk space for parallel update")
202 | raise InsufficientSpaceError("Insufficient disk space for parallel update")
203 | else:
204 | logging.warning("Could not determine download size, proceeding without space check")
205 |
206 | logging.info("Downloading index")
207 |
208 | index_file = download_index()
209 |
210 | extract_index(index_file)
211 |
212 | if not config.SKIP_MD5_CHECK:
213 | md5_file = download_md5()
214 |
215 | logging.info("Verifying checksum...")
216 | verify_checksum(md5_file, index_file)
217 |
218 | logging.debug("Checksum verification successful.")
219 |
220 | logging.info("Moving Index")
221 | move_index()
222 | clear_temp_dir()
223 |
224 | logging.info("Parallel update process completed successfully.")
225 |
226 | except Exception as e:
227 | logging.error(f"FATAL: Update process failed with an error: {e}")
228 | logging.error("Aborting script.")
229 | sys.exit(1)
230 |
231 |
232 | def sequential_update():
233 | logging.info("Starting sequential download process...")
234 |
235 | try:
236 | if os.path.isdir(config.TEMP_DIR):
237 | logging.debug(f"Temporary directory {config.TEMP_DIR} exists. Attempting to remove it.")
238 | try:
239 | shutil.rmtree(config.TEMP_DIR)
240 | logging.debug(f"Successfully removed directory: {config.TEMP_DIR}")
241 | except Exception as e:
242 | logging.error(f"Failed to remove existing TEMP_DIR: {e}")
243 | raise
244 |
245 | logging.debug(f"Creating temporary directory: {config.TEMP_DIR}")
246 | os.makedirs(config.TEMP_DIR, exist_ok=True)
247 |
248 | download_url = get_download_url()
249 | file_size = get_remote_file_size(download_url)
250 |
251 | if file_size > 0:
252 | if not check_disk_space_requirements(file_size, is_parallel=False):
253 | logging.error("Insufficient disk space for sequential update")
254 | raise InsufficientSpaceError("Insufficient disk space for sequential update")
255 | else:
256 | logging.warning("Could not determine download size, proceeding without space check")
257 |
258 | logging.info("Downloading new index and MD5 checksum...")
259 | index_file = download_index()
260 | extract_index(index_file)
261 |
262 | if not config.SKIP_MD5_CHECK:
263 | md5_file = download_md5()
264 |
265 | logging.info("Verifying checksum...")
266 | verify_checksum(md5_file, index_file)
267 |
268 | logging.debug("Checksum verification successful.")
269 |
270 | logging.info("Moving new index into place...")
271 | move_index()
272 |
273 | clear_temp_dir()
274 |
275 | logging.info("Sequential download process completed successfully.")
276 |
277 | except Exception as e:
278 | logging.critical(f"FATAL: Update process failed with an error: {e}")
279 | logging.critical("Aborting script.")
280 | sys.exit(1)
281 |
282 |
283 | def download_index() -> str:
284 | output_file = "photon-db-latest.tar.bz2"
285 | download_url = get_download_url()
286 |
287 | output = os.path.join(config.TEMP_DIR, output_file)
288 |
289 | if not download_file(download_url, output):
290 | raise Exception(f"Failed to download index from {download_url}")
291 |
292 | local_timestamp = get_local_time(config.OS_NODE_DIR)
293 |
294 | logging.debug(f"New index timestamp: {local_timestamp}")
295 | return output
296 |
297 |
298 | def download_md5():
299 | if config.REGION:
300 | normalized = normalize_region(config.REGION)
301 | region_info = get_region_info(config.REGION)
302 | if not region_info:
303 | raise ValueError(f"Unknown region: {config.REGION}")
304 |
305 | region_type = region_info["type"]
306 |
307 | if region_type == "planet":
308 | md5_url = "/photon-db-planet-0.7OS-latest.tar.bz2.md5"
309 | elif region_type == "continent":
310 | md5_url = f"/{normalized}/photon-db-{normalized}-0.7OS-latest.tar.bz2.md5"
311 | elif region_type == "sub-region":
312 | continent = region_info["continent"]
313 | md5_url = f"/{continent}/{normalized}/photon-db-{normalized}-0.7OS-latest.tar.bz2.md5"
314 | else:
315 | raise ValueError(f"Invalid region type: {region_type}")
316 | else:
317 | md5_url = "/photon-db-planet-0.7OS-latest.tar.bz2.md5"
318 |
319 | download_url = config.BASE_URL + md5_url
320 |
321 | output_file = "photon-db-latest.tar.bz2.md5"
322 | output = os.path.join(config.TEMP_DIR, output_file)
323 |
324 | if not download_file(download_url, output):
325 | raise Exception(f"Failed to download MD5 checksum from {download_url}")
326 |
327 | return output
328 |
329 |
330 | def _prepare_download(url, destination):
331 | """Prepare download parameters including resume position."""
332 | state = load_download_state(destination)
333 | resume_byte_pos = 0
334 | mode = "wb"
335 |
336 | if state and state.get("url") == url:
337 | resume_byte_pos = state.get("downloaded_bytes", 0)
338 | if resume_byte_pos > 0 and os.path.exists(destination):
339 | mode = "ab"
340 | logging.info(f"Resuming download from byte {resume_byte_pos}")
341 |
342 | return resume_byte_pos, mode
343 |
344 |
345 | def _get_download_headers(resume_byte_pos, url):
346 | if resume_byte_pos > 0 and supports_range_requests(url):
347 | return {"Range": f"bytes={resume_byte_pos}-"}
348 | return {}
349 |
350 |
351 | def _calculate_total_size(response, headers, resume_byte_pos):
352 | if headers and response.status_code == 206:
353 | content_range = response.headers.get("content-range", "")
354 | if content_range:
355 | return int(content_range.split("/")[-1])
356 | return resume_byte_pos + int(response.headers.get("content-length", 0))
357 | return int(response.headers.get("content-length", 0))
358 |
359 |
360 | def _handle_no_range_support(resume_byte_pos, destination):
361 | if resume_byte_pos > 0:
362 | logging.warning("Server doesn't support range requests, restarting download")
363 | if os.path.exists(destination):
364 | os.remove(destination)
365 | return 0, "wb"
366 | return resume_byte_pos, None
367 |
368 |
369 | def _create_progress_bar(total_size, resume_byte_pos, destination):
370 | if total_size > 0:
371 | try:
372 | return tqdm(
373 | desc=f"Downloading {os.path.basename(destination)}",
374 | total=total_size,
375 | initial=resume_byte_pos,
376 | unit="B",
377 | unit_scale=True,
378 | unit_divisor=1024,
379 | leave=True,
380 | disable=None,
381 | file=sys.stderr,
382 | )
383 | except Exception:
384 | return None
385 | return None
386 |
387 |
388 | def _download_content(response, destination, mode, url, total_size, resume_byte_pos, progress_bar):
389 | downloaded = resume_byte_pos
390 | chunk_size = 8192
391 | save_interval = 1024 * 1024
392 | last_save = downloaded
393 | last_log = time.time()
394 | log_interval = 10
395 | last_log_bytes = downloaded
396 |
397 | try:
398 | with open(destination, mode) as f:
399 | for chunk in response.iter_content(chunk_size=chunk_size):
400 | if not chunk:
401 | continue
402 |
403 | size = f.write(chunk)
404 | downloaded += size
405 |
406 | if progress_bar:
407 | progress_bar.update(size)
408 |
409 | current_time = time.time()
410 | if current_time - last_log >= log_interval and total_size > 0:
411 | percent = (downloaded / total_size) * 100
412 | interval_bytes = downloaded - last_log_bytes
413 | interval_time = current_time - last_log
414 | speed_mbps = (interval_bytes * 8) / (interval_time * 1_000_000) if interval_time > 0 else 0
415 | eta = ((total_size - downloaded) / (interval_bytes / interval_time)) if interval_bytes > 0 else 0
416 | eta_str = f"{int(eta // 3600)}h {int((eta % 3600) // 60)}m" if eta > 0 else "calculating..."
417 |
418 | logging.info(
419 | f"Download progress: {percent:.1f}% ({downloaded / (1024**3):.2f}GB / {total_size / (1024**3):.2f}GB) - {speed_mbps:.1f} Mbps - ETA: {eta_str}"
420 | )
421 | last_log = current_time
422 | last_log_bytes = downloaded
423 |
424 | if downloaded - last_save >= save_interval:
425 | save_download_state(destination, url, downloaded, total_size)
426 | last_save = downloaded
427 |
428 | save_download_state(destination, url, downloaded, total_size)
429 |
430 | except Exception:
431 | save_download_state(destination, url, downloaded, total_size)
432 | raise
433 |
434 | return downloaded
435 |
436 |
437 | def _log_download_metrics(total_size, start_time, destination):
438 | if total_size > 0:
439 | speed_mbps = (total_size * 8) / ((time.time() - start_time) * 1_000_000)
440 | size_gb = total_size / (1024**3)
441 | duration = time.time() - start_time
442 | duration_minutes = duration / 60
443 | if duration_minutes > 120:
444 | duration_hours = duration_minutes / 60
445 | logging.info(
446 | f"Download completed: {size_gb:.2f}GB in {duration:.1f}s ({duration_minutes:.1f}m, {duration_hours:.1f}h) at {speed_mbps:.1f} Mbps"
447 | )
448 | else:
449 | logging.info(
450 | f"Download completed: {size_gb:.2f}GB in {duration:.1f}s ({duration_minutes:.1f}m) at {speed_mbps:.1f} Mbps"
451 | )
452 | else:
453 | logging.info(f"Downloaded {destination} successfully.")
454 |
455 |
456 | def _perform_download(url, destination, resume_byte_pos, mode, start_time):
457 | headers = _get_download_headers(resume_byte_pos, url)
458 |
459 | with requests.get(url, stream=True, headers=headers, timeout=(30, 60)) as response:
460 | response.raise_for_status()
461 |
462 | total_size = _calculate_total_size(response, headers, resume_byte_pos)
463 |
464 | if total_size > 0:
465 | logging.info(f"Starting download of {total_size / (1024**3):.2f}GB to {os.path.basename(destination)}")
466 |
467 | if not headers and response.status_code != 206:
468 | new_pos, new_mode = _handle_no_range_support(resume_byte_pos, destination)
469 | if new_mode:
470 | resume_byte_pos = new_pos
471 | mode = new_mode
472 |
473 | progress_bar = _create_progress_bar(total_size, resume_byte_pos, destination)
474 |
475 | try:
476 | downloaded = _download_content(
477 | response,
478 | destination,
479 | mode,
480 | url,
481 | total_size,
482 | resume_byte_pos,
483 | progress_bar,
484 | )
485 |
486 | if progress_bar:
487 | progress_bar.close()
488 |
489 | save_download_state(destination, url, downloaded, total_size)
490 |
491 | if total_size > 0 and downloaded < total_size:
492 | raise Exception(f"Download incomplete: {downloaded}/{total_size} bytes")
493 |
494 | cleanup_download_state(destination)
495 | _log_download_metrics(total_size, start_time, destination)
496 | return True
497 |
498 | finally:
499 | if progress_bar:
500 | progress_bar.close()
501 |
502 |
503 | def download_file(url, destination):
504 | start_time = time.time()
505 | max_retries = int(config.DOWNLOAD_MAX_RETRIES)
506 |
507 | for attempt in range(max_retries):
508 | resume_byte_pos, mode = _prepare_download(url, destination)
509 | try:
510 | return _perform_download(url, destination, resume_byte_pos, mode, start_time)
511 |
512 | except RequestException as e:
513 | logging.warning(f"Download attempt {attempt + 1} failed: {e}")
514 | if attempt < max_retries - 1:
515 | wait_time = 2**attempt # 1s, 2s, 4s
516 | logging.info(f"Waiting {wait_time}s before retry...")
517 | time.sleep(wait_time)
518 | logging.info(f"Retrying download (attempt {attempt + 2}/{max_retries})...")
519 | continue
520 | logging.exception(f"Download failed after {max_retries} attempts")
521 | return False
522 |
523 | except Exception:
524 | logging.exception("Download failed")
525 | return False
526 |
527 | return False
528 |
--------------------------------------------------------------------------------
/uv.lock:
--------------------------------------------------------------------------------
1 | version = 1
2 | revision = 3
3 | requires-python = ">=3.12"
4 |
5 | [[package]]
6 | name = "apprise"
7 | version = "1.9.5"
8 | source = { registry = "https://pypi.org/simple" }
9 | dependencies = [
10 | { name = "certifi" },
11 | { name = "click" },
12 | { name = "markdown" },
13 | { name = "pyyaml" },
14 | { name = "requests" },
15 | { name = "requests-oauthlib" },
16 | { name = "tzdata", marker = "sys_platform == 'win32'" },
17 | ]
18 | sdist = { url = "https://files.pythonhosted.org/packages/60/16/e39338b8310af9466fab6f4482b542e24cb1fcbb7e36bf00c089c4e015e7/apprise-1.9.5.tar.gz", hash = "sha256:8f3be318bb429c2017470e33928a2e313cbf7600fc74b8184782a37060db366a", size = 1877134, upload-time = "2025-09-30T15:57:28.046Z" }
19 | wheels = [
20 | { url = "https://files.pythonhosted.org/packages/0d/f1/318762320d966e528dfb9e6be5953fe7df2952156f15ba857cbccafb630c/apprise-1.9.5-py3-none-any.whl", hash = "sha256:1873a8a1b8cf9e44fcbefe0486ed260b590652aea12427f545b37c8566142961", size = 1421011, upload-time = "2025-09-30T15:57:26.268Z" },
21 | ]
22 |
23 | [[package]]
24 | name = "bandit"
25 | version = "1.8.6"
26 | source = { registry = "https://pypi.org/simple" }
27 | dependencies = [
28 | { name = "colorama", marker = "sys_platform == 'win32'" },
29 | { name = "pyyaml" },
30 | { name = "rich" },
31 | { name = "stevedore" },
32 | ]
33 | sdist = { url = "https://files.pythonhosted.org/packages/fb/b5/7eb834e213d6f73aace21938e5e90425c92e5f42abafaf8a6d5d21beed51/bandit-1.8.6.tar.gz", hash = "sha256:dbfe9c25fc6961c2078593de55fd19f2559f9e45b99f1272341f5b95dea4e56b", size = 4240271, upload-time = "2025-07-06T03:10:50.9Z" }
34 | wheels = [
35 | { url = "https://files.pythonhosted.org/packages/48/ca/ba5f909b40ea12ec542d5d7bdd13ee31c4d65f3beed20211ef81c18fa1f3/bandit-1.8.6-py3-none-any.whl", hash = "sha256:3348e934d736fcdb68b6aa4030487097e23a501adf3e7827b63658df464dddd0", size = 133808, upload-time = "2025-07-06T03:10:49.134Z" },
36 | ]
37 |
38 | [[package]]
39 | name = "certifi"
40 | version = "2025.6.15"
41 | source = { registry = "https://pypi.org/simple" }
42 | sdist = { url = "https://files.pythonhosted.org/packages/73/f7/f14b46d4bcd21092d7d3ccef689615220d8a08fb25e564b65d20738e672e/certifi-2025.6.15.tar.gz", hash = "sha256:d747aa5a8b9bbbb1bb8c22bb13e22bd1f18e9796defa16bab421f7f7a317323b", size = 158753, upload-time = "2025-06-15T02:45:51.329Z" }
43 | wheels = [
44 | { url = "https://files.pythonhosted.org/packages/84/ae/320161bd181fc06471eed047ecce67b693fd7515b16d495d8932db763426/certifi-2025.6.15-py3-none-any.whl", hash = "sha256:2e0c7ce7cb5d8f8634ca55d2ba7e6ec2689a2fd6537d8dec1296a477a4910057", size = 157650, upload-time = "2025-06-15T02:45:49.977Z" },
45 | ]
46 |
47 | [[package]]
48 | name = "charset-normalizer"
49 | version = "3.4.2"
50 | source = { registry = "https://pypi.org/simple" }
51 | sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" }
52 | wheels = [
53 | { url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936, upload-time = "2025-05-02T08:32:33.712Z" },
54 | { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790, upload-time = "2025-05-02T08:32:35.768Z" },
55 | { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924, upload-time = "2025-05-02T08:32:37.284Z" },
56 | { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626, upload-time = "2025-05-02T08:32:38.803Z" },
57 | { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567, upload-time = "2025-05-02T08:32:40.251Z" },
58 | { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957, upload-time = "2025-05-02T08:32:41.705Z" },
59 | { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408, upload-time = "2025-05-02T08:32:43.709Z" },
60 | { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399, upload-time = "2025-05-02T08:32:46.197Z" },
61 | { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815, upload-time = "2025-05-02T08:32:48.105Z" },
62 | { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537, upload-time = "2025-05-02T08:32:49.719Z" },
63 | { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565, upload-time = "2025-05-02T08:32:51.404Z" },
64 | { url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357, upload-time = "2025-05-02T08:32:53.079Z" },
65 | { url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776, upload-time = "2025-05-02T08:32:54.573Z" },
66 | { url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622, upload-time = "2025-05-02T08:32:56.363Z" },
67 | { url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435, upload-time = "2025-05-02T08:32:58.551Z" },
68 | { url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653, upload-time = "2025-05-02T08:33:00.342Z" },
69 | { url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231, upload-time = "2025-05-02T08:33:02.081Z" },
70 | { url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243, upload-time = "2025-05-02T08:33:04.063Z" },
71 | { url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442, upload-time = "2025-05-02T08:33:06.418Z" },
72 | { url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147, upload-time = "2025-05-02T08:33:08.183Z" },
73 | { url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057, upload-time = "2025-05-02T08:33:09.986Z" },
74 | { url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454, upload-time = "2025-05-02T08:33:11.814Z" },
75 | { url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174, upload-time = "2025-05-02T08:33:13.707Z" },
76 | { url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166, upload-time = "2025-05-02T08:33:15.458Z" },
77 | { url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064, upload-time = "2025-05-02T08:33:17.06Z" },
78 | { url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641, upload-time = "2025-05-02T08:33:18.753Z" },
79 | { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" },
80 | ]
81 |
82 | [[package]]
83 | name = "click"
84 | version = "8.2.1"
85 | source = { registry = "https://pypi.org/simple" }
86 | dependencies = [
87 | { name = "colorama", marker = "sys_platform == 'win32'" },
88 | ]
89 | sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" }
90 | wheels = [
91 | { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" },
92 | ]
93 |
94 | [[package]]
95 | name = "colorama"
96 | version = "0.4.6"
97 | source = { registry = "https://pypi.org/simple" }
98 | sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
99 | wheels = [
100 | { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
101 | ]
102 |
103 | [[package]]
104 | name = "idna"
105 | version = "3.10"
106 | source = { registry = "https://pypi.org/simple" }
107 | sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" }
108 | wheels = [
109 | { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" },
110 | ]
111 |
112 | [[package]]
113 | name = "markdown"
114 | version = "3.8.2"
115 | source = { registry = "https://pypi.org/simple" }
116 | sdist = { url = "https://files.pythonhosted.org/packages/d7/c2/4ab49206c17f75cb08d6311171f2d65798988db4360c4d1485bd0eedd67c/markdown-3.8.2.tar.gz", hash = "sha256:247b9a70dd12e27f67431ce62523e675b866d254f900c4fe75ce3dda62237c45", size = 362071, upload-time = "2025-06-19T17:12:44.483Z" }
117 | wheels = [
118 | { url = "https://files.pythonhosted.org/packages/96/2b/34cc11786bc00d0f04d0f5fdc3a2b1ae0b6239eef72d3d345805f9ad92a1/markdown-3.8.2-py3-none-any.whl", hash = "sha256:5c83764dbd4e00bdd94d85a19b8d55ccca20fe35b2e678a1422b380324dd5f24", size = 106827, upload-time = "2025-06-19T17:12:42.994Z" },
119 | ]
120 |
121 | [[package]]
122 | name = "markdown-it-py"
123 | version = "4.0.0"
124 | source = { registry = "https://pypi.org/simple" }
125 | dependencies = [
126 | { name = "mdurl" },
127 | ]
128 | sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" }
129 | wheels = [
130 | { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" },
131 | ]
132 |
133 | [[package]]
134 | name = "mdurl"
135 | version = "0.1.2"
136 | source = { registry = "https://pypi.org/simple" }
137 | sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" }
138 | wheels = [
139 | { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" },
140 | ]
141 |
142 | [[package]]
143 | name = "oauthlib"
144 | version = "3.3.1"
145 | source = { registry = "https://pypi.org/simple" }
146 | sdist = { url = "https://files.pythonhosted.org/packages/0b/5f/19930f824ffeb0ad4372da4812c50edbd1434f678c90c2733e1188edfc63/oauthlib-3.3.1.tar.gz", hash = "sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9", size = 185918, upload-time = "2025-06-19T22:48:08.269Z" }
147 | wheels = [
148 | { url = "https://files.pythonhosted.org/packages/be/9c/92789c596b8df838baa98fa71844d84283302f7604ed565dafe5a6b5041a/oauthlib-3.3.1-py3-none-any.whl", hash = "sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1", size = 160065, upload-time = "2025-06-19T22:48:06.508Z" },
149 | ]
150 |
151 | [[package]]
152 | name = "photon-docker"
153 | version = "1.2.1"
154 | source = { virtual = "." }
155 | dependencies = [
156 | { name = "apprise" },
157 | { name = "psutil" },
158 | { name = "python-dateutil" },
159 | { name = "requests" },
160 | { name = "schedule" },
161 | { name = "tqdm" },
162 | ]
163 |
164 | [package.dev-dependencies]
165 | dev = [
166 | { name = "bandit" },
167 | { name = "ruff" },
168 | { name = "ty" },
169 | { name = "vulture" },
170 | ]
171 |
172 | [package.metadata]
173 | requires-dist = [
174 | { name = "apprise", specifier = ">=1.9.3" },
175 | { name = "psutil", specifier = ">=5.9.0" },
176 | { name = "python-dateutil", specifier = ">=2.9.0.post0" },
177 | { name = "requests", specifier = "==2.32.5" },
178 | { name = "schedule", specifier = ">=1.2.2" },
179 | { name = "tqdm", specifier = "==4.67.1" },
180 | ]
181 |
182 | [package.metadata.requires-dev]
183 | dev = [
184 | { name = "bandit", specifier = ">=1.8.6" },
185 | { name = "ruff", specifier = ">=0.12.7" },
186 | { name = "ty", specifier = ">=0.0.1a16" },
187 | { name = "vulture", specifier = ">=2.14" },
188 | ]
189 |
190 | [[package]]
191 | name = "psutil"
192 | version = "7.1.3"
193 | source = { registry = "https://pypi.org/simple" }
194 | sdist = { url = "https://files.pythonhosted.org/packages/e1/88/bdd0a41e5857d5d703287598cbf08dad90aed56774ea52ae071bae9071b6/psutil-7.1.3.tar.gz", hash = "sha256:6c86281738d77335af7aec228328e944b30930899ea760ecf33a4dba66be5e74", size = 489059, upload-time = "2025-11-02T12:25:54.619Z" }
195 | wheels = [
196 | { url = "https://files.pythonhosted.org/packages/bd/93/0c49e776b8734fef56ec9c5c57f923922f2cf0497d62e0f419465f28f3d0/psutil-7.1.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0005da714eee687b4b8decd3d6cc7c6db36215c9e74e5ad2264b90c3df7d92dc", size = 239751, upload-time = "2025-11-02T12:25:58.161Z" },
197 | { url = "https://files.pythonhosted.org/packages/6f/8d/b31e39c769e70780f007969815195a55c81a63efebdd4dbe9e7a113adb2f/psutil-7.1.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:19644c85dcb987e35eeeaefdc3915d059dac7bd1167cdcdbf27e0ce2df0c08c0", size = 240368, upload-time = "2025-11-02T12:26:00.491Z" },
198 | { url = "https://files.pythonhosted.org/packages/62/61/23fd4acc3c9eebbf6b6c78bcd89e5d020cfde4acf0a9233e9d4e3fa698b4/psutil-7.1.3-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95ef04cf2e5ba0ab9eaafc4a11eaae91b44f4ef5541acd2ee91d9108d00d59a7", size = 287134, upload-time = "2025-11-02T12:26:02.613Z" },
199 | { url = "https://files.pythonhosted.org/packages/30/1c/f921a009ea9ceb51aa355cb0cc118f68d354db36eae18174bab63affb3e6/psutil-7.1.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1068c303be3a72f8e18e412c5b2a8f6d31750fb152f9cb106b54090296c9d251", size = 289904, upload-time = "2025-11-02T12:26:05.207Z" },
200 | { url = "https://files.pythonhosted.org/packages/a6/82/62d68066e13e46a5116df187d319d1724b3f437ddd0f958756fc052677f4/psutil-7.1.3-cp313-cp313t-win_amd64.whl", hash = "sha256:18349c5c24b06ac5612c0428ec2a0331c26443d259e2a0144a9b24b4395b58fa", size = 249642, upload-time = "2025-11-02T12:26:07.447Z" },
201 | { url = "https://files.pythonhosted.org/packages/df/ad/c1cd5fe965c14a0392112f68362cfceb5230819dbb5b1888950d18a11d9f/psutil-7.1.3-cp313-cp313t-win_arm64.whl", hash = "sha256:c525ffa774fe4496282fb0b1187725793de3e7c6b29e41562733cae9ada151ee", size = 245518, upload-time = "2025-11-02T12:26:09.719Z" },
202 | { url = "https://files.pythonhosted.org/packages/2e/bb/6670bded3e3236eb4287c7bcdc167e9fae6e1e9286e437f7111caed2f909/psutil-7.1.3-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b403da1df4d6d43973dc004d19cee3b848e998ae3154cc8097d139b77156c353", size = 239843, upload-time = "2025-11-02T12:26:11.968Z" },
203 | { url = "https://files.pythonhosted.org/packages/b8/66/853d50e75a38c9a7370ddbeefabdd3d3116b9c31ef94dc92c6729bc36bec/psutil-7.1.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ad81425efc5e75da3f39b3e636293360ad8d0b49bed7df824c79764fb4ba9b8b", size = 240369, upload-time = "2025-11-02T12:26:14.358Z" },
204 | { url = "https://files.pythonhosted.org/packages/41/bd/313aba97cb5bfb26916dc29cf0646cbe4dd6a89ca69e8c6edce654876d39/psutil-7.1.3-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f33a3702e167783a9213db10ad29650ebf383946e91bc77f28a5eb083496bc9", size = 288210, upload-time = "2025-11-02T12:26:16.699Z" },
205 | { url = "https://files.pythonhosted.org/packages/c2/fa/76e3c06e760927a0cfb5705eb38164254de34e9bd86db656d4dbaa228b04/psutil-7.1.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fac9cd332c67f4422504297889da5ab7e05fd11e3c4392140f7370f4208ded1f", size = 291182, upload-time = "2025-11-02T12:26:18.848Z" },
206 | { url = "https://files.pythonhosted.org/packages/0f/1d/5774a91607035ee5078b8fd747686ebec28a962f178712de100d00b78a32/psutil-7.1.3-cp314-cp314t-win_amd64.whl", hash = "sha256:3792983e23b69843aea49c8f5b8f115572c5ab64c153bada5270086a2123c7e7", size = 250466, upload-time = "2025-11-02T12:26:21.183Z" },
207 | { url = "https://files.pythonhosted.org/packages/00/ca/e426584bacb43a5cb1ac91fae1937f478cd8fbe5e4ff96574e698a2c77cd/psutil-7.1.3-cp314-cp314t-win_arm64.whl", hash = "sha256:31d77fcedb7529f27bb3a0472bea9334349f9a04160e8e6e5020f22c59893264", size = 245756, upload-time = "2025-11-02T12:26:23.148Z" },
208 | { url = "https://files.pythonhosted.org/packages/ef/94/46b9154a800253e7ecff5aaacdf8ebf43db99de4a2dfa18575b02548654e/psutil-7.1.3-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2bdbcd0e58ca14996a42adf3621a6244f1bb2e2e528886959c72cf1e326677ab", size = 238359, upload-time = "2025-11-02T12:26:25.284Z" },
209 | { url = "https://files.pythonhosted.org/packages/68/3a/9f93cff5c025029a36d9a92fef47220ab4692ee7f2be0fba9f92813d0cb8/psutil-7.1.3-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:bc31fa00f1fbc3c3802141eede66f3a2d51d89716a194bf2cd6fc68310a19880", size = 239171, upload-time = "2025-11-02T12:26:27.23Z" },
210 | { url = "https://files.pythonhosted.org/packages/ce/b1/5f49af514f76431ba4eea935b8ad3725cdeb397e9245ab919dbc1d1dc20f/psutil-7.1.3-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bb428f9f05c1225a558f53e30ccbad9930b11c3fc206836242de1091d3e7dd3", size = 263261, upload-time = "2025-11-02T12:26:29.48Z" },
211 | { url = "https://files.pythonhosted.org/packages/e0/95/992c8816a74016eb095e73585d747e0a8ea21a061ed3689474fabb29a395/psutil-7.1.3-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56d974e02ca2c8eb4812c3f76c30e28836fffc311d55d979f1465c1feeb2b68b", size = 264635, upload-time = "2025-11-02T12:26:31.74Z" },
212 | { url = "https://files.pythonhosted.org/packages/55/4c/c3ed1a622b6ae2fd3c945a366e64eb35247a31e4db16cf5095e269e8eb3c/psutil-7.1.3-cp37-abi3-win_amd64.whl", hash = "sha256:f39c2c19fe824b47484b96f9692932248a54c43799a84282cfe58d05a6449efd", size = 247633, upload-time = "2025-11-02T12:26:33.887Z" },
213 | { url = "https://files.pythonhosted.org/packages/c9/ad/33b2ccec09bf96c2b2ef3f9a6f66baac8253d7565d8839e024a6b905d45d/psutil-7.1.3-cp37-abi3-win_arm64.whl", hash = "sha256:bd0d69cee829226a761e92f28140bec9a5ee9d5b4fb4b0cc589068dbfff559b1", size = 244608, upload-time = "2025-11-02T12:26:36.136Z" },
214 | ]
215 |
216 | [[package]]
217 | name = "pygments"
218 | version = "2.19.2"
219 | source = { registry = "https://pypi.org/simple" }
220 | sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
221 | wheels = [
222 | { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
223 | ]
224 |
225 | [[package]]
226 | name = "python-dateutil"
227 | version = "2.9.0.post0"
228 | source = { registry = "https://pypi.org/simple" }
229 | dependencies = [
230 | { name = "six" },
231 | ]
232 | sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" }
233 | wheels = [
234 | { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" },
235 | ]
236 |
237 | [[package]]
238 | name = "pyyaml"
239 | version = "6.0.2"
240 | source = { registry = "https://pypi.org/simple" }
241 | sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" }
242 | wheels = [
243 | { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" },
244 | { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" },
245 | { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" },
246 | { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" },
247 | { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" },
248 | { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" },
249 | { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" },
250 | { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" },
251 | { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" },
252 | { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" },
253 | { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" },
254 | { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" },
255 | { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" },
256 | { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" },
257 | { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" },
258 | { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" },
259 | { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" },
260 | { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" },
261 | ]
262 |
263 | [[package]]
264 | name = "requests"
265 | version = "2.32.5"
266 | source = { registry = "https://pypi.org/simple" }
267 | dependencies = [
268 | { name = "certifi" },
269 | { name = "charset-normalizer" },
270 | { name = "idna" },
271 | { name = "urllib3" },
272 | ]
273 | sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" }
274 | wheels = [
275 | { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
276 | ]
277 |
278 | [[package]]
279 | name = "requests-oauthlib"
280 | version = "2.0.0"
281 | source = { registry = "https://pypi.org/simple" }
282 | dependencies = [
283 | { name = "oauthlib" },
284 | { name = "requests" },
285 | ]
286 | sdist = { url = "https://files.pythonhosted.org/packages/42/f2/05f29bc3913aea15eb670be136045bf5c5bbf4b99ecb839da9b422bb2c85/requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9", size = 55650, upload-time = "2024-03-22T20:32:29.939Z" }
287 | wheels = [
288 | { url = "https://files.pythonhosted.org/packages/3b/5d/63d4ae3b9daea098d5d6f5da83984853c1bbacd5dc826764b249fe119d24/requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36", size = 24179, upload-time = "2024-03-22T20:32:28.055Z" },
289 | ]
290 |
291 | [[package]]
292 | name = "rich"
293 | version = "14.1.0"
294 | source = { registry = "https://pypi.org/simple" }
295 | dependencies = [
296 | { name = "markdown-it-py" },
297 | { name = "pygments" },
298 | ]
299 | sdist = { url = "https://files.pythonhosted.org/packages/fe/75/af448d8e52bf1d8fa6a9d089ca6c07ff4453d86c65c145d0a300bb073b9b/rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8", size = 224441, upload-time = "2025-07-25T07:32:58.125Z" }
300 | wheels = [
301 | { url = "https://files.pythonhosted.org/packages/e3/30/3c4d035596d3cf444529e0b2953ad0466f6049528a879d27534700580395/rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f", size = 243368, upload-time = "2025-07-25T07:32:56.73Z" },
302 | ]
303 |
304 | [[package]]
305 | name = "ruff"
306 | version = "0.14.6"
307 | source = { registry = "https://pypi.org/simple" }
308 | sdist = { url = "https://files.pythonhosted.org/packages/52/f0/62b5a1a723fe183650109407fa56abb433b00aa1c0b9ba555f9c4efec2c6/ruff-0.14.6.tar.gz", hash = "sha256:6f0c742ca6a7783a736b867a263b9a7a80a45ce9bee391eeda296895f1b4e1cc", size = 5669501, upload-time = "2025-11-21T14:26:17.903Z" }
309 | wheels = [
310 | { url = "https://files.pythonhosted.org/packages/67/d2/7dd544116d107fffb24a0064d41a5d2ed1c9d6372d142f9ba108c8e39207/ruff-0.14.6-py3-none-linux_armv6l.whl", hash = "sha256:d724ac2f1c240dbd01a2ae98db5d1d9a5e1d9e96eba999d1c48e30062df578a3", size = 13326119, upload-time = "2025-11-21T14:25:24.2Z" },
311 | { url = "https://files.pythonhosted.org/packages/36/6a/ad66d0a3315d6327ed6b01f759d83df3c4d5f86c30462121024361137b6a/ruff-0.14.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9f7539ea257aa4d07b7ce87aed580e485c40143f2473ff2f2b75aee003186004", size = 13526007, upload-time = "2025-11-21T14:25:26.906Z" },
312 | { url = "https://files.pythonhosted.org/packages/a3/9d/dae6db96df28e0a15dea8e986ee393af70fc97fd57669808728080529c37/ruff-0.14.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7f6007e55b90a2a7e93083ba48a9f23c3158c433591c33ee2e99a49b889c6332", size = 12676572, upload-time = "2025-11-21T14:25:29.826Z" },
313 | { url = "https://files.pythonhosted.org/packages/76/a4/f319e87759949062cfee1b26245048e92e2acce900ad3a909285f9db1859/ruff-0.14.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a8e7b9d73d8728b68f632aa8e824ef041d068d231d8dbc7808532d3629a6bef", size = 13140745, upload-time = "2025-11-21T14:25:32.788Z" },
314 | { url = "https://files.pythonhosted.org/packages/95/d3/248c1efc71a0a8ed4e8e10b4b2266845d7dfc7a0ab64354afe049eaa1310/ruff-0.14.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d50d45d4553a3ebcbd33e7c5e0fe6ca4aafd9a9122492de357205c2c48f00775", size = 13076486, upload-time = "2025-11-21T14:25:35.601Z" },
315 | { url = "https://files.pythonhosted.org/packages/a5/19/b68d4563fe50eba4b8c92aa842149bb56dd24d198389c0ed12e7faff4f7d/ruff-0.14.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:118548dd121f8a21bfa8ab2c5b80e5b4aed67ead4b7567790962554f38e598ce", size = 13727563, upload-time = "2025-11-21T14:25:38.514Z" },
316 | { url = "https://files.pythonhosted.org/packages/47/ac/943169436832d4b0e867235abbdb57ce3a82367b47e0280fa7b4eabb7593/ruff-0.14.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:57256efafbfefcb8748df9d1d766062f62b20150691021f8ab79e2d919f7c11f", size = 15199755, upload-time = "2025-11-21T14:25:41.516Z" },
317 | { url = "https://files.pythonhosted.org/packages/c9/b9/288bb2399860a36d4bb0541cb66cce3c0f4156aaff009dc8499be0c24bf2/ruff-0.14.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff18134841e5c68f8e5df1999a64429a02d5549036b394fafbe410f886e1989d", size = 14850608, upload-time = "2025-11-21T14:25:44.428Z" },
318 | { url = "https://files.pythonhosted.org/packages/ee/b1/a0d549dd4364e240f37e7d2907e97ee80587480d98c7799d2d8dc7a2f605/ruff-0.14.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:29c4b7ec1e66a105d5c27bd57fa93203637d66a26d10ca9809dc7fc18ec58440", size = 14118754, upload-time = "2025-11-21T14:25:47.214Z" },
319 | { url = "https://files.pythonhosted.org/packages/13/ac/9b9fe63716af8bdfddfacd0882bc1586f29985d3b988b3c62ddce2e202c3/ruff-0.14.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:167843a6f78680746d7e226f255d920aeed5e4ad9c03258094a2d49d3028b105", size = 13949214, upload-time = "2025-11-21T14:25:50.002Z" },
320 | { url = "https://files.pythonhosted.org/packages/12/27/4dad6c6a77fede9560b7df6802b1b697e97e49ceabe1f12baf3ea20862e9/ruff-0.14.6-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:16a33af621c9c523b1ae006b1b99b159bf5ac7e4b1f20b85b2572455018e0821", size = 14106112, upload-time = "2025-11-21T14:25:52.841Z" },
321 | { url = "https://files.pythonhosted.org/packages/6a/db/23e322d7177873eaedea59a7932ca5084ec5b7e20cb30f341ab594130a71/ruff-0.14.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1432ab6e1ae2dc565a7eea707d3b03a0c234ef401482a6f1621bc1f427c2ff55", size = 13035010, upload-time = "2025-11-21T14:25:55.536Z" },
322 | { url = "https://files.pythonhosted.org/packages/a8/9c/20e21d4d69dbb35e6a1df7691e02f363423658a20a2afacf2a2c011800dc/ruff-0.14.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4c55cfbbe7abb61eb914bfd20683d14cdfb38a6d56c6c66efa55ec6570ee4e71", size = 13054082, upload-time = "2025-11-21T14:25:58.625Z" },
323 | { url = "https://files.pythonhosted.org/packages/66/25/906ee6a0464c3125c8d673c589771a974965c2be1a1e28b5c3b96cb6ef88/ruff-0.14.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:efea3c0f21901a685fff4befda6d61a1bf4cb43de16da87e8226a281d614350b", size = 13303354, upload-time = "2025-11-21T14:26:01.816Z" },
324 | { url = "https://files.pythonhosted.org/packages/4c/58/60577569e198d56922b7ead07b465f559002b7b11d53f40937e95067ca1c/ruff-0.14.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:344d97172576d75dc6afc0e9243376dbe1668559c72de1864439c4fc95f78185", size = 14054487, upload-time = "2025-11-21T14:26:05.058Z" },
325 | { url = "https://files.pythonhosted.org/packages/67/0b/8e4e0639e4cc12547f41cb771b0b44ec8225b6b6a93393176d75fe6f7d40/ruff-0.14.6-py3-none-win32.whl", hash = "sha256:00169c0c8b85396516fdd9ce3446c7ca20c2a8f90a77aa945ba6b8f2bfe99e85", size = 13013361, upload-time = "2025-11-21T14:26:08.152Z" },
326 | { url = "https://files.pythonhosted.org/packages/fb/02/82240553b77fd1341f80ebb3eaae43ba011c7a91b4224a9f317d8e6591af/ruff-0.14.6-py3-none-win_amd64.whl", hash = "sha256:390e6480c5e3659f8a4c8d6a0373027820419ac14fa0d2713bd8e6c3e125b8b9", size = 14432087, upload-time = "2025-11-21T14:26:10.891Z" },
327 | { url = "https://files.pythonhosted.org/packages/a5/1f/93f9b0fad9470e4c829a5bb678da4012f0c710d09331b860ee555216f4ea/ruff-0.14.6-py3-none-win_arm64.whl", hash = "sha256:d43c81fbeae52cfa8728d8766bbf46ee4298c888072105815b392da70ca836b2", size = 13520930, upload-time = "2025-11-21T14:26:13.951Z" },
328 | ]
329 |
330 | [[package]]
331 | name = "schedule"
332 | version = "1.2.2"
333 | source = { registry = "https://pypi.org/simple" }
334 | sdist = { url = "https://files.pythonhosted.org/packages/0c/91/b525790063015759f34447d4cf9d2ccb52cdee0f1dd6ff8764e863bcb74c/schedule-1.2.2.tar.gz", hash = "sha256:15fe9c75fe5fd9b9627f3f19cc0ef1420508f9f9a46f45cd0769ef75ede5f0b7", size = 26452, upload-time = "2024-06-18T20:03:14.633Z" }
335 | wheels = [
336 | { url = "https://files.pythonhosted.org/packages/20/a7/84c96b61fd13205f2cafbe263cdb2745965974bdf3e0078f121dfeca5f02/schedule-1.2.2-py3-none-any.whl", hash = "sha256:5bef4a2a0183abf44046ae0d164cadcac21b1db011bdd8102e4a0c1e91e06a7d", size = 12220, upload-time = "2024-05-25T18:41:59.121Z" },
337 | ]
338 |
339 | [[package]]
340 | name = "six"
341 | version = "1.17.0"
342 | source = { registry = "https://pypi.org/simple" }
343 | sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" }
344 | wheels = [
345 | { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" },
346 | ]
347 |
348 | [[package]]
349 | name = "stevedore"
350 | version = "5.5.0"
351 | source = { registry = "https://pypi.org/simple" }
352 | sdist = { url = "https://files.pythonhosted.org/packages/2a/5f/8418daad5c353300b7661dd8ce2574b0410a6316a8be650a189d5c68d938/stevedore-5.5.0.tar.gz", hash = "sha256:d31496a4f4df9825e1a1e4f1f74d19abb0154aff311c3b376fcc89dae8fccd73", size = 513878, upload-time = "2025-08-25T12:54:26.806Z" }
353 | wheels = [
354 | { url = "https://files.pythonhosted.org/packages/80/c5/0c06759b95747882bb50abda18f5fb48c3e9b0fbfc6ebc0e23550b52415d/stevedore-5.5.0-py3-none-any.whl", hash = "sha256:18363d4d268181e8e8452e71a38cd77630f345b2ef6b4a8d5614dac5ee0d18cf", size = 49518, upload-time = "2025-08-25T12:54:25.445Z" },
355 | ]
356 |
357 | [[package]]
358 | name = "tqdm"
359 | version = "4.67.1"
360 | source = { registry = "https://pypi.org/simple" }
361 | dependencies = [
362 | { name = "colorama", marker = "sys_platform == 'win32'" },
363 | ]
364 | sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" }
365 | wheels = [
366 | { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" },
367 | ]
368 |
369 | [[package]]
370 | name = "ty"
371 | version = "0.0.1a20"
372 | source = { registry = "https://pypi.org/simple" }
373 | sdist = { url = "https://files.pythonhosted.org/packages/7a/82/a5e3b4bc5280ec49c4b0b43d0ff727d58c7df128752c9c6f97ad0b5f575f/ty-0.0.1a20.tar.gz", hash = "sha256:933b65a152f277aa0e23ba9027e5df2c2cc09e18293e87f2a918658634db5f15", size = 4194773, upload-time = "2025-09-03T12:35:46.775Z" }
374 | wheels = [
375 | { url = "https://files.pythonhosted.org/packages/45/c8/f7d39392043d5c04936f6cad90e50eb661965ed092ca4bfc01db917d7b8a/ty-0.0.1a20-py3-none-linux_armv6l.whl", hash = "sha256:f73a7aca1f0d38af4d6999b375eb00553f3bfcba102ae976756cc142e14f3450", size = 8443599, upload-time = "2025-09-03T12:35:04.289Z" },
376 | { url = "https://files.pythonhosted.org/packages/1e/57/5aec78f9b8a677b7439ccded7d66c3361e61247e0f6b14e659b00dd01008/ty-0.0.1a20-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cad12c857ea4b97bf61e02f6796e13061ccca5e41f054cbd657862d80aa43bae", size = 8618102, upload-time = "2025-09-03T12:35:07.448Z" },
377 | { url = "https://files.pythonhosted.org/packages/15/20/50c9107d93cdb55676473d9dc4e2339af6af606660c9428d3b86a1b2a476/ty-0.0.1a20-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f153b65c7fcb6b8b59547ddb6353761b3e8d8bb6f0edd15e3e3ac14405949f7a", size = 8192167, upload-time = "2025-09-03T12:35:09.706Z" },
378 | { url = "https://files.pythonhosted.org/packages/85/28/018b2f330109cee19e81c5ca9df3dc29f06c5778440eb9af05d4550c4302/ty-0.0.1a20-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8c4336987a6a781d4392a9fd7b3a39edb7e4f3dd4f860e03f46c932b52aefa2", size = 8349256, upload-time = "2025-09-03T12:35:11.76Z" },
379 | { url = "https://files.pythonhosted.org/packages/cd/c9/2f8797a05587158f52b142278796ffd72c893bc5ad41840fce5aeb65c6f2/ty-0.0.1a20-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3ff75cd4c744d09914e8c9db8d99e02f82c9379ad56b0a3fc4c5c9c923cfa84e", size = 8271214, upload-time = "2025-09-03T12:35:13.741Z" },
380 | { url = "https://files.pythonhosted.org/packages/30/d4/2cac5e5eb9ee51941358cb3139aadadb59520cfaec94e4fcd2b166969748/ty-0.0.1a20-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e26437772be7f7808868701f2bf9e14e706a6ec4c7d02dbd377ff94d7ba60c11", size = 9264939, upload-time = "2025-09-03T12:35:16.896Z" },
381 | { url = "https://files.pythonhosted.org/packages/93/96/a6f2b54e484b2c6a5488f217882237dbdf10f0fdbdb6cd31333d57afe494/ty-0.0.1a20-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:83a7ee12465841619b5eb3ca962ffc7d576bb1c1ac812638681aee241acbfbbe", size = 9743137, upload-time = "2025-09-03T12:35:19.799Z" },
382 | { url = "https://files.pythonhosted.org/packages/6e/67/95b40dcbec3d222f3af5fe5dd1ce066d42f8a25a2f70d5724490457048e7/ty-0.0.1a20-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:726d0738be4459ac7ffae312ba96c5f486d6cbc082723f322555d7cba9397871", size = 9368153, upload-time = "2025-09-03T12:35:22.569Z" },
383 | { url = "https://files.pythonhosted.org/packages/2c/24/689fa4c4270b9ef9a53dc2b1d6ffade259ba2c4127e451f0629e130ea46a/ty-0.0.1a20-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0b481f26513f38543df514189fb16744690bcba8d23afee95a01927d93b46e36", size = 9099637, upload-time = "2025-09-03T12:35:24.94Z" },
384 | { url = "https://files.pythonhosted.org/packages/a1/5b/913011cbf3ea4030097fb3c4ce751856114c9e1a5e1075561a4c5242af9b/ty-0.0.1a20-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7abbe3c02218c12228b1d7c5f98c57240029cc3bcb15b6997b707c19be3908c1", size = 8952000, upload-time = "2025-09-03T12:35:27.288Z" },
385 | { url = "https://files.pythonhosted.org/packages/df/f9/f5ba2ae455b20c5bb003f9940ef8142a8c4ed9e27de16e8f7472013609db/ty-0.0.1a20-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:fff51c75ee3f7cc6d7722f2f15789ef8ffe6fd2af70e7269ac785763c906688e", size = 8217938, upload-time = "2025-09-03T12:35:29.54Z" },
386 | { url = "https://files.pythonhosted.org/packages/eb/62/17002cf9032f0981cdb8c898d02422c095c30eefd69ca62a8b705d15bd0f/ty-0.0.1a20-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b4124ab75e0e6f09fe7bc9df4a77ee43c5e0ef7e61b0c149d7c089d971437cbd", size = 8292369, upload-time = "2025-09-03T12:35:31.748Z" },
387 | { url = "https://files.pythonhosted.org/packages/28/d6/0879b1fb66afe1d01d45c7658f3849aa641ac4ea10679404094f3b40053e/ty-0.0.1a20-py3-none-musllinux_1_2_i686.whl", hash = "sha256:8a138fa4f74e6ed34e9fd14652d132409700c7ff57682c2fed656109ebfba42f", size = 8811973, upload-time = "2025-09-03T12:35:33.997Z" },
388 | { url = "https://files.pythonhosted.org/packages/60/1e/70bf0348cfe8ba5f7532983f53c508c293ddf5fa9f942ed79a3c4d576df3/ty-0.0.1a20-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:8eff8871d6b88d150e2a67beba2c57048f20c090c219f38ed02eebaada04c124", size = 9010990, upload-time = "2025-09-03T12:35:36.766Z" },
389 | { url = "https://files.pythonhosted.org/packages/b7/ca/03d85c7650359247b1ca3f38a0d869a608ef540450151920e7014ed58292/ty-0.0.1a20-py3-none-win32.whl", hash = "sha256:3c2ace3a22fab4bd79f84c74e3dab26e798bfba7006bea4008d6321c1bd6efc6", size = 8100746, upload-time = "2025-09-03T12:35:40.007Z" },
390 | { url = "https://files.pythonhosted.org/packages/94/53/7a1937b8c7a66d0c8ed7493de49ed454a850396fe137d2ae12ed247e0b2f/ty-0.0.1a20-py3-none-win_amd64.whl", hash = "sha256:f41e77ff118da3385915e13c3f366b3a2f823461de54abd2e0ca72b170ba0f19", size = 8748861, upload-time = "2025-09-03T12:35:42.175Z" },
391 | { url = "https://files.pythonhosted.org/packages/27/36/5a3a70c5d497d3332f9e63cabc9c6f13484783b832fecc393f4f1c0c4aa8/ty-0.0.1a20-py3-none-win_arm64.whl", hash = "sha256:d8ac1c5a14cda5fad1a8b53959d9a5d979fe16ce1cc2785ea8676fed143ac85f", size = 8269906, upload-time = "2025-09-03T12:35:45.045Z" },
392 | ]
393 |
394 | [[package]]
395 | name = "tzdata"
396 | version = "2025.2"
397 | source = { registry = "https://pypi.org/simple" }
398 | sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" }
399 | wheels = [
400 | { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" },
401 | ]
402 |
403 | [[package]]
404 | name = "urllib3"
405 | version = "2.5.0"
406 | source = { registry = "https://pypi.org/simple" }
407 | sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" }
408 | wheels = [
409 | { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" },
410 | ]
411 |
412 | [[package]]
413 | name = "vulture"
414 | version = "2.14"
415 | source = { registry = "https://pypi.org/simple" }
416 | sdist = { url = "https://files.pythonhosted.org/packages/8e/25/925f35db758a0f9199113aaf61d703de891676b082bd7cf73ea01d6000f7/vulture-2.14.tar.gz", hash = "sha256:cb8277902a1138deeab796ec5bef7076a6e0248ca3607a3f3dee0b6d9e9b8415", size = 58823, upload-time = "2024-12-08T17:39:43.319Z" }
417 | wheels = [
418 | { url = "https://files.pythonhosted.org/packages/a0/56/0cc15b8ff2613c1d5c3dc1f3f576ede1c43868c1bc2e5ccaa2d4bcd7974d/vulture-2.14-py2.py3-none-any.whl", hash = "sha256:d9a90dba89607489548a49d557f8bac8112bd25d3cbc8aeef23e860811bd5ed9", size = 28915, upload-time = "2024-12-08T17:39:40.573Z" },
419 | ]
420 |
--------------------------------------------------------------------------------