├── .python-version
├── frontend
├── src
│ ├── lib
│ │ ├── index.ts
│ │ ├── components
│ │ │ ├── timeline
│ │ │ │ ├── types.ts
│ │ │ │ ├── timeline-outcome-filters.svelte
│ │ │ │ └── utils.ts
│ │ │ ├── mappings
│ │ │ │ └── capabilities-cache.ts
│ │ │ ├── json-code-block.svelte
│ │ │ └── toast-host.svelte
│ │ ├── ui
│ │ │ ├── tooltip.svelte
│ │ │ └── modal.svelte
│ │ ├── utils
│ │ │ ├── pin-options.ts
│ │ │ ├── anilist.ts
│ │ │ ├── human.ts
│ │ │ ├── notify.ts
│ │ │ └── api.ts
│ │ ├── types
│ │ │ └── anilist.ts
│ │ └── assets
│ │ │ └── favicon.svg
│ ├── app.d.ts
│ ├── app.html
│ └── app.css
├── .npmrc
├── pnpm-workspace.yaml
├── static
│ ├── favicon.ico
│ ├── pwa-192x192.png
│ ├── pwa-512x512.png
│ ├── apple-touch-icon.png
│ ├── pwa-maskable-192x192.png
│ └── pwa-maskable-512x512.png
├── .prettierignore
├── .gitignore
├── svelte.config.js
├── tsconfig.json
├── .prettierrc
├── eslint.config.js
├── package.json
└── vite.config.ts
├── docs
├── CNAME
├── favicon.ico
├── img
│ ├── logo.png
│ └── screenshots
│ │ ├── logs.png
│ │ ├── backups.png
│ │ ├── dashboard.png
│ │ ├── mappings.png
│ │ └── timeline.png
├── pyproject.toml
├── web
│ ├── screenshots.md
│ ├── api.md
│ ├── backups.md
│ ├── logs.md
│ ├── mappings.md
│ └── timeline.md
├── .nav.yml
├── quick-start
│ ├── source.md
│ └── docker.md
├── compose.yaml
├── mappings
│ └── custom-mappings.md
├── faq.md
├── recovery
│ └── disaster-recovery.md
└── css
│ └── htpasswd-generator.css
├── src
├── plex
│ └── __init__.py
├── web
│ ├── services
│ │ └── __init__.py
│ ├── __init__.py
│ ├── middlewares
│ │ ├── __init__.py
│ │ └── request_logging.py
│ ├── routes
│ │ ├── webhook
│ │ │ └── __init__.py
│ │ ├── __init__.py
│ │ ├── ws
│ │ │ ├── __init__.py
│ │ │ ├── logs.py
│ │ │ ├── status.py
│ │ │ └── history.py
│ │ └── api
│ │ │ ├── __init__.py
│ │ │ ├── status.py
│ │ │ ├── backups.py
│ │ │ ├── sync.py
│ │ │ └── history.py
│ └── state.py
├── utils
│ ├── __init__.py
│ ├── types.py
│ ├── requests.py
│ ├── terminal.py
│ ├── version.py
│ └── htpasswd.py
├── config
│ └── __init__.py
├── models
│ ├── __init__.py
│ ├── schemas
│ │ ├── __init__.py
│ │ └── plex.py
│ └── db
│ │ ├── __init__.py
│ │ ├── housekeeping.py
│ │ ├── provenance.py
│ │ ├── pin.py
│ │ └── base.py
├── core
│ ├── sync
│ │ └── __init__.py
│ └── __init__.py
└── __init__.py
├── tests
├── __init__.py
├── config
│ └── __init__.py
├── utils
│ ├── __init__.py
│ ├── test_version.py
│ ├── test_cache.py
│ ├── test_requests.py
│ ├── test_logging.py
│ └── test_terminal.py
├── core
│ ├── __init__.py
│ └── sync
│ │ └── __init__.py
├── conftest.py
├── test_pyproject.py
└── web
│ └── test_mappings_service_filters.py
├── .github
├── ISSUE_TEMPLATE
│ ├── config.yml
│ ├── feature_request.yml
│ └── bug_report.yml
├── workflows
│ ├── tests.yml
│ └── docs-publish.yml
├── PULL_REQUEST_TEMPLATE.md
├── FUNDING.yml
└── CONTRIBUTING.md
├── scripts
├── __init__.py
├── docker_init.sh
└── openapi.py
├── typings
└── plexapi
│ ├── __init__.py
│ ├── settings.pyi
│ └── utils.pyi
├── .prettierrc
├── .vscode
├── launch.json
├── extensions.json
└── settings.json
├── data
├── mappings.example.yaml
├── mappings.example.json
└── config.example.yaml
├── .dockerignore
├── alembic
├── script.py.mako
├── versions
│ ├── 2025-02-04-01-57_424fe94c2c03.py
│ ├── 2025-02-17-20-24_6b471e97e780.py
│ ├── 2025-08-12-06-30_213f9be1534f.py
│ ├── 2025-02-15-02-10_ddbadb26481f.py
│ ├── 2025-02-17-14-29_c5581ec025a7.py
│ ├── 2025-10-20-10-41_90496c989bdd.py
│ ├── 2025-03-24-02-37_e89ead9178d7.py
│ ├── 2025-09-30-04-03_12add4c4ffa9.py
│ ├── 2025-06-26-05-56_08f39c25b391.py
│ ├── 2025-09-19-00-29_8387b3fd8a6a.py
│ ├── 2025-10-08-19-00_db24057a61c9.py
│ ├── 2024-12-21-11-56_6e710e6677c0.py
│ ├── 2025-08-08-05-10_cd371e53adcb.py
│ └── 2025-01-15-15-52_b2ad27e14048.py
└── env.py
├── .env.example
├── LICENSE
├── .devcontainer
└── devcontainer.json
├── alembic.ini
├── Dockerfile
├── mkdocs.yml
├── pyproject.toml
├── README.md
└── .gitignore
/.python-version:
--------------------------------------------------------------------------------
1 | 3.13
2 |
--------------------------------------------------------------------------------
/frontend/src/lib/index.ts:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/docs/CNAME:
--------------------------------------------------------------------------------
1 | plexanibridge.elias.eu.org
--------------------------------------------------------------------------------
/frontend/.npmrc:
--------------------------------------------------------------------------------
1 | engine-strict=true
2 |
--------------------------------------------------------------------------------
/src/plex/__init__.py:
--------------------------------------------------------------------------------
1 | """Plex API Module."""
2 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | """PlexAniBridge tests."""
2 |
--------------------------------------------------------------------------------
/src/web/services/__init__.py:
--------------------------------------------------------------------------------
1 | """Web services."""
2 |
--------------------------------------------------------------------------------
/src/utils/__init__.py:
--------------------------------------------------------------------------------
1 | """Utility functions module."""
2 |
--------------------------------------------------------------------------------
/tests/config/__init__.py:
--------------------------------------------------------------------------------
1 | """Configuration tests."""
2 |
--------------------------------------------------------------------------------
/tests/utils/__init__.py:
--------------------------------------------------------------------------------
1 | """Utility function tests."""
2 |
--------------------------------------------------------------------------------
/src/web/__init__.py:
--------------------------------------------------------------------------------
1 | """PlexAniBridge web server module."""
2 |
--------------------------------------------------------------------------------
/src/web/middlewares/__init__.py:
--------------------------------------------------------------------------------
1 | """FastAPI middlewares."""
2 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
1 | blank_issues_enabled: false
2 |
--------------------------------------------------------------------------------
/scripts/__init__.py:
--------------------------------------------------------------------------------
1 | """Development scripts for PlexAniBridge."""
2 |
--------------------------------------------------------------------------------
/src/config/__init__.py:
--------------------------------------------------------------------------------
1 | """PlexAniBridge configuration module."""
2 |
--------------------------------------------------------------------------------
/src/models/__init__.py:
--------------------------------------------------------------------------------
1 | """PlexAniBridge application models."""
2 |
--------------------------------------------------------------------------------
/tests/core/__init__.py:
--------------------------------------------------------------------------------
1 | """Core test utilities for PlexAniBridge."""
2 |
--------------------------------------------------------------------------------
/tests/core/sync/__init__.py:
--------------------------------------------------------------------------------
1 | """Test suite for the core synchronization module."""
2 |
--------------------------------------------------------------------------------
/docs/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eliasbenb/PlexAniBridge/HEAD/docs/favicon.ico
--------------------------------------------------------------------------------
/docs/img/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eliasbenb/PlexAniBridge/HEAD/docs/img/logo.png
--------------------------------------------------------------------------------
/src/models/schemas/__init__.py:
--------------------------------------------------------------------------------
1 | """Schemas for external APIs used in PlexAniBridge."""
2 |
--------------------------------------------------------------------------------
/typings/plexapi/__init__.py:
--------------------------------------------------------------------------------
1 | """PlexAPI type stubs for type checking and autocompletion."""
2 |
--------------------------------------------------------------------------------
/frontend/pnpm-workspace.yaml:
--------------------------------------------------------------------------------
1 | onlyBuiltDependencies:
2 | - esbuild
3 | - "@tailwindcss/oxide"
4 |
--------------------------------------------------------------------------------
/frontend/static/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eliasbenb/PlexAniBridge/HEAD/frontend/static/favicon.ico
--------------------------------------------------------------------------------
/docs/img/screenshots/logs.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eliasbenb/PlexAniBridge/HEAD/docs/img/screenshots/logs.png
--------------------------------------------------------------------------------
/docs/img/screenshots/backups.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eliasbenb/PlexAniBridge/HEAD/docs/img/screenshots/backups.png
--------------------------------------------------------------------------------
/frontend/static/pwa-192x192.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eliasbenb/PlexAniBridge/HEAD/frontend/static/pwa-192x192.png
--------------------------------------------------------------------------------
/frontend/static/pwa-512x512.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eliasbenb/PlexAniBridge/HEAD/frontend/static/pwa-512x512.png
--------------------------------------------------------------------------------
/docs/img/screenshots/dashboard.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eliasbenb/PlexAniBridge/HEAD/docs/img/screenshots/dashboard.png
--------------------------------------------------------------------------------
/docs/img/screenshots/mappings.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eliasbenb/PlexAniBridge/HEAD/docs/img/screenshots/mappings.png
--------------------------------------------------------------------------------
/docs/img/screenshots/timeline.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eliasbenb/PlexAniBridge/HEAD/docs/img/screenshots/timeline.png
--------------------------------------------------------------------------------
/.prettierrc:
--------------------------------------------------------------------------------
1 | {
2 | "useTabs": false,
3 | "tabWidth": 4,
4 | "singleQuote": false,
5 | "trailingComma": "all"
6 | }
7 |
--------------------------------------------------------------------------------
/frontend/static/apple-touch-icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eliasbenb/PlexAniBridge/HEAD/frontend/static/apple-touch-icon.png
--------------------------------------------------------------------------------
/frontend/static/pwa-maskable-192x192.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eliasbenb/PlexAniBridge/HEAD/frontend/static/pwa-maskable-192x192.png
--------------------------------------------------------------------------------
/frontend/static/pwa-maskable-512x512.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eliasbenb/PlexAniBridge/HEAD/frontend/static/pwa-maskable-512x512.png
--------------------------------------------------------------------------------
/frontend/.prettierignore:
--------------------------------------------------------------------------------
1 | # Package Managers
2 | package-lock.json
3 | pnpm-lock.yaml
4 | yarn.lock
5 | bun.lock
6 | bun.lockb
7 |
8 | # Miscellaneous
9 | /static/
10 |
--------------------------------------------------------------------------------
/.vscode/launch.json:
--------------------------------------------------------------------------------
1 | {
2 | "configurations": [
3 | {
4 | "type": "debugpy",
5 | "request": "launch",
6 | "name": "Run PlexAniBridge",
7 | "program": "main.py"
8 | }
9 | ]
10 | }
11 |
--------------------------------------------------------------------------------
/src/core/sync/__init__.py:
--------------------------------------------------------------------------------
1 | """Synchronization Module Initialization."""
2 |
3 | from src.core.sync.base import BaseSyncClient
4 | from src.core.sync.movie import MovieSyncClient
5 | from src.core.sync.show import ShowSyncClient
6 |
7 | __all__ = ["BaseSyncClient", "MovieSyncClient", "ShowSyncClient"]
8 |
--------------------------------------------------------------------------------
/src/web/routes/webhook/__init__.py:
--------------------------------------------------------------------------------
1 | """Webhook route aggregator."""
2 |
3 | from fastapi.routing import APIRouter
4 |
5 | from .plex import router as plex_router
6 |
7 | __all__ = ["router"]
8 |
9 | router = APIRouter()
10 | router.include_router(plex_router, prefix="/plex", tags=["plex"])
11 |
--------------------------------------------------------------------------------
/frontend/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 |
3 | # Output
4 | .output
5 | .vercel
6 | .netlify
7 | .wrangler
8 | /.svelte-kit
9 | /build
10 |
11 | # OS
12 | .DS_Store
13 | Thumbs.db
14 |
15 | # Env
16 | .env
17 | .env.*
18 | !.env.example
19 | !.env.test
20 |
21 | # Vite
22 | vite.config.js.timestamp-*
23 | vite.config.ts.timestamp-*
24 |
--------------------------------------------------------------------------------
/frontend/src/app.d.ts:
--------------------------------------------------------------------------------
1 | // See https://svelte.dev/docs/kit/types#app.d.ts
2 | // for information about these interfaces
3 | declare global {
4 | namespace App {
5 | // interface Error {}
6 | // interface Locals {}
7 | // interface PageData {}
8 | // interface PageState {}
9 | // interface Platform {}
10 | }
11 | }
12 |
13 | export {};
14 |
--------------------------------------------------------------------------------
/docs/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | name = "docs"
3 | version = "0.1.0"
4 | description = "PlexAniBridge Documentation"
5 | readme = "index.md"
6 | requires-python = ">=3.10"
7 |
8 | dependencies = [
9 | "mkdocs-awesome-nav>=3.2.0",
10 | "mkdocs-git-revision-date-localized-plugin>=1.4.7",
11 | "mkdocs-git-tag-plugin>=0.1.0",
12 | "mkdocs-material>=9.6.22",
13 | "mkdocs-redoc-tag>=0.2.0",
14 | ]
15 |
--------------------------------------------------------------------------------
/src/core/__init__.py:
--------------------------------------------------------------------------------
1 | """Core Module Initialization."""
2 |
3 | from src.core.anilist import AniListClient
4 | from src.core.animap import AniMapClient
5 | from src.core.plex import PlexClient
6 |
7 | from src.core.bridge import BridgeClient # isort:skip
8 | from src.core.sched import SchedulerClient
9 |
10 | __all__ = [
11 | "AniListClient",
12 | "AniMapClient",
13 | "BridgeClient",
14 | "PlexClient",
15 | "SchedulerClient",
16 | ]
17 |
--------------------------------------------------------------------------------
/frontend/svelte.config.js:
--------------------------------------------------------------------------------
1 | import adapter from "@sveltejs/adapter-static";
2 | import { vitePreprocess } from "@sveltejs/vite-plugin-svelte";
3 |
4 | /** @type {import('@sveltejs/kit').Config} */
5 | const config = {
6 | // Consult https://svelte.dev/docs/kit/integrations
7 | // for more information about preprocessors
8 | preprocess: vitePreprocess(),
9 | kit: { adapter: adapter({ fallback: "index.html", strict: false }) },
10 | };
11 |
12 | export default config;
13 |
--------------------------------------------------------------------------------
/.vscode/extensions.json:
--------------------------------------------------------------------------------
1 | {
2 | "recommendations": [
3 | "charliermarsh.ruff",
4 | "dbaeumer.vscode-eslint",
5 | "eamodio.gitlens",
6 | "esbenp.prettier-vscode",
7 | "ms-python.python",
8 | "rafaelha.vscode-flamegraph",
9 | "redhat.vscode-yaml",
10 | "tamasfe.even-better-toml",
11 | "yy0931.vscode-sqlite3-editor",
12 | "yzhang.markdown-all-in-one",
13 | "svelte.svelte-vscode",
14 | "bradlc.vscode-tailwindcss"
15 | ]
16 | }
17 |
--------------------------------------------------------------------------------
/docs/web/screenshots.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Screenshots
3 | icon: material/monitor-shimmer
4 | ---
5 |
6 | ## Dashboard
7 |
8 | 
9 |
10 | ## Timeline
11 |
12 | 
13 |
14 | ## Mappings
15 |
16 | 
17 |
18 | ## Logs
19 |
20 | 
21 |
22 | # Backups
23 |
24 | 
25 |
--------------------------------------------------------------------------------
/src/models/db/__init__.py:
--------------------------------------------------------------------------------
1 | """Models for PlexAniBridge database tables."""
2 |
3 | from src.models.db.animap import AniMap
4 | from src.models.db.base import Base
5 | from src.models.db.housekeeping import Housekeeping
6 | from src.models.db.pin import Pin
7 | from src.models.db.provenance import AniMapProvenance
8 | from src.models.db.sync_history import SyncHistory
9 |
10 | __all__ = [
11 | "AniMap",
12 | "AniMapProvenance",
13 | "Base",
14 | "Housekeeping",
15 | "Pin",
16 | "SyncHistory",
17 | ]
18 |
--------------------------------------------------------------------------------
/src/web/routes/__init__.py:
--------------------------------------------------------------------------------
1 | """Route aggregators for the web application."""
2 |
3 | from fastapi.routing import APIRouter
4 |
5 | from src.web.routes.api import router as api_router
6 | from src.web.routes.webhook import router as webhook_router
7 | from src.web.routes.ws import router as ws_router
8 |
9 | __all__ = ["router"]
10 |
11 | router = APIRouter()
12 |
13 | router.include_router(api_router, prefix="/api", tags=[])
14 | router.include_router(webhook_router, prefix="/webhook", tags=[])
15 | router.include_router(ws_router, prefix="/ws", tags=[])
16 |
--------------------------------------------------------------------------------
/docs/.nav.yml:
--------------------------------------------------------------------------------
1 | nav:
2 | - index.md
3 | - configuration.md
4 | - faq.md
5 | - Quick start:
6 | - quick-start/docker.md
7 | - quick-start/source.md
8 | - Web:
9 | - web/screenshots.md
10 | - web/api.md
11 | - web/timeline.md
12 | - web/mappings.md
13 | - web/logs.md
14 | - web/backups.md
15 | - Mappings:
16 | - mappings/custom-mappings.md
17 | - mappings/mappings-querying-language.md
18 | - Recovery:
19 | - recovery/disaster-recovery.md
20 |
--------------------------------------------------------------------------------
/data/mappings.example.yaml:
--------------------------------------------------------------------------------
1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/eliasbenb/PlexAniBridge-Mappings/v2/mappings.schema.json
2 |
3 | $includes:
4 | - /path/to/another/mappings.json
5 | - https://url.to/another/mappings.json
6 |
7 | 99999:
8 | anidb_id: 9999
9 | imdb_id: tt9999999
10 | mal_id: 9999
11 | tmdb_show_id: 9999
12 | tvdb_id: 9999
13 | tvdb_mappings:
14 | s0: e1
15 | s1: e1-e13
16 |
17 | 99998:
18 | anidb_id: 9998
19 | imdb_id: tt9999998
20 | mal_id: 9998
21 | tmdb_movie_id: 9998
22 |
--------------------------------------------------------------------------------
/src/web/routes/ws/__init__.py:
--------------------------------------------------------------------------------
1 | """Websocket routes."""
2 |
3 | from fastapi.routing import APIRouter
4 |
5 | from src.web.routes.ws.history import router as history_router
6 | from src.web.routes.ws.logs import router as logs_router
7 | from src.web.routes.ws.status import router as status_router
8 |
9 | __all__ = ["router"]
10 |
11 | router = APIRouter()
12 |
13 | router.include_router(history_router, prefix="/history", tags=["history"])
14 | router.include_router(logs_router, prefix="/logs", tags=["logs"])
15 | router.include_router(status_router, prefix="/status", tags=["status"])
16 |
--------------------------------------------------------------------------------
/src/models/db/housekeeping.py:
--------------------------------------------------------------------------------
1 | """Housekeeping Model Module."""
2 |
3 | from sqlalchemy import String
4 | from sqlalchemy.orm import Mapped, mapped_column
5 |
6 | from src.models.db.base import Base
7 |
8 | __all__ = ["Housekeeping"]
9 |
10 |
11 | class Housekeeping(Base):
12 | """Model for the Housekeeping table.
13 |
14 | This table is used to store miscellaneous data such as timestamps and hashes.
15 | """
16 |
17 | __tablename__ = "house_keeping"
18 |
19 | key: Mapped[str] = mapped_column(String, primary_key=True)
20 | value: Mapped[str | None] = mapped_column(String, nullable=True)
21 |
--------------------------------------------------------------------------------
/.github/workflows/tests.yml:
--------------------------------------------------------------------------------
1 | name: Tests
2 |
3 | on:
4 | push:
5 | pull_request:
6 |
7 | jobs:
8 | pytest:
9 | runs-on: ubuntu-latest
10 |
11 | timeout-minutes: 10
12 |
13 | permissions:
14 | contents: read
15 |
16 | steps:
17 | - name: Checkout repository
18 | uses: actions/checkout@v5
19 |
20 | - name: Install uv
21 | uses: astral-sh/setup-uv@v7
22 |
23 | - name: Sync dependencies
24 | run: uv sync --group dev --frozen
25 |
26 | - name: Run pytest
27 | run: uv run pytest
28 |
--------------------------------------------------------------------------------
/docs/web/api.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: API
3 | icon: material/api
4 | ---
5 |
6 | The API is self documenting via the OpenAPI specification. You can explore endpoints, view schemas, and try out requests directly from either the built-in [Swagger UI](https://swagger.io/tools/swagger-ui/) or [Redoc](https://redocly.github.io/redoc/) interfaces (available at `/docs` and `/redoc` respectively of your PlexAniBridge deployment).
7 |
8 | Below is an embedded Redoc viewer for convenience. _Note: the embedded viewer may not always be up to date with the live API; for the most accurate docs, visit `/redoc` directly on your PlexAniBridge instance._
9 |
10 |
11 |
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | # Data
2 | /data
3 |
4 | # Database
5 | *.db
6 | *.db-journal
7 | *.sqlite3
8 |
9 | # Installer logs
10 | pip-log.txt
11 | pip-delete-this-directory.txt
12 |
13 | # Docs
14 | /docs
15 | /mkdocs.yml
16 |
17 | # Environments
18 | .env
19 | .venv
20 | env/
21 | venv/
22 | ENV/
23 | env.bak/
24 | venv.bak/
25 |
26 | # Logs
27 | *.log
28 |
29 | # IDE
30 | /.devcontainer
31 | /.vscode
32 |
33 | # Git
34 | /.git
35 | !/.git/HEAD
36 | !/.git/refs/heads/
37 |
38 | # GitHub
39 | /.github
40 |
41 | # Ruff
42 | /.ruff_cache
43 |
44 | # Python
45 | __pycache__
46 | *.pyc
47 | *.pyo
48 | *.pyd
49 |
50 | # Node
51 | .pnpm-store
52 | .svelte-kit
53 | build
54 | node_modules
55 | frontend/node_modules
56 |
--------------------------------------------------------------------------------
/data/mappings.example.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://raw.githubusercontent.com/eliasbenb/PlexAniBridge-Mappings/v2/mappings.schema.json",
3 | "$includes": [
4 | "/path/to/another/mappings.json",
5 | "https://url.to/another/mappings.json"
6 | ],
7 | "99999": {
8 | "anidb_id": 9999,
9 | "imdb_id": "tt9999999",
10 | "mal_id": 9999,
11 | "tmdb_show_id": 9999,
12 | "tvdb_id": 9999,
13 | "tvdb_mappings": {
14 | "s0": "e1",
15 | "s1": "e1-e13"
16 | }
17 | },
18 | "99998": {
19 | "anidb_id": 9998,
20 | "imdb_id": "tt9999998",
21 | "mal_id": 9998,
22 | "tmdb_movie_id": 9998
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/alembic/script.py.mako:
--------------------------------------------------------------------------------
1 | """${message}
2 |
3 | Revision ID: ${up_revision}
4 | Revises: ${down_revision | comma,n}
5 | Create Date: ${create_date}
6 |
7 | """
8 | from typing import Sequence, Union
9 |
10 | from alembic import op
11 | import sqlalchemy as sa
12 | ${imports if imports else ""}
13 |
14 | # revision identifiers, used by Alembic.
15 | revision: str = ${repr(up_revision)}
16 | down_revision: Union[str, None] = ${repr(down_revision)}
17 | branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
18 | depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
19 |
20 |
21 | def upgrade() -> None:
22 | ${upgrades if upgrades else "pass"}
23 |
24 |
25 | def downgrade() -> None:
26 | ${downgrades if downgrades else "pass"}
27 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | """Shared pytest configuration for core tests."""
2 |
3 | import atexit
4 | import os
5 | import shutil
6 | import tempfile
7 | from pathlib import Path
8 |
9 | _TEST_DATA_DIR = Path(tempfile.mkdtemp(prefix="pab-tests-"))
10 |
11 | os.environ.setdefault("PAB_DATA_PATH", str(_TEST_DATA_DIR))
12 | os.environ.setdefault("PAB_ANILIST_TOKEN", "anilist-token")
13 | os.environ.setdefault("PAB_PLEX_TOKEN", "plex-token")
14 | os.environ.setdefault("PAB_PLEX_USER", "eliasbenb")
15 | os.environ.setdefault("PAB_PLEX_URL", "http://plex:32400")
16 |
17 |
18 | @atexit.register
19 | def _cleanup_test_data_dir() -> None:
20 | """Remove the temporary test data directory after the test session."""
21 | shutil.rmtree(_TEST_DATA_DIR, ignore_errors=True)
22 |
--------------------------------------------------------------------------------
/frontend/src/lib/components/timeline/types.ts:
--------------------------------------------------------------------------------
1 | import type { Circle } from "@lucide/svelte";
2 |
3 | import type { HistoryItem } from "$lib/types/api";
4 |
5 | export type DiffStatus = "added" | "removed" | "changed" | "unchanged";
6 |
7 | export interface DiffEntry {
8 | path: string;
9 | before: unknown;
10 | after: unknown;
11 | status: DiffStatus;
12 | }
13 |
14 | export interface ItemDiffUi {
15 | tab: "changes" | "compare";
16 | filter: string;
17 | showUnchanged: boolean;
18 | }
19 |
20 | export interface OutcomeMeta {
21 | label: string;
22 | color: string;
23 | icon: typeof Circle;
24 | order: number;
25 | }
26 |
27 | export interface TimelineItemContext {
28 | item: HistoryItem;
29 | meta: OutcomeMeta;
30 | }
31 |
--------------------------------------------------------------------------------
/docs/quick-start/source.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Source
3 | icon: material/wrench
4 | ---
5 |
6 | ## Requirements
7 |
8 | - [Python 3.13+](https://www.python.org/downloads/)
9 | - [uv](https://docs.astral.sh/uv/getting-started/installation/)
10 | - [SQLite3](https://www.sqlite.org/download.html)
11 | - [Git](https://git-scm.com/downloads)
12 |
13 | ## Setup
14 |
15 | !!! tip
16 |
17 | Have a look at [the configuration page](../configuration.md) for a detailed list of configurable environment variables.
18 |
19 | ```shell
20 | git clone https://github.com/eliasbenb/PlexAniBridge.git
21 | cd PlexAniBridge
22 |
23 | cp .env.example .env # Edit the .env file
24 |
25 | # Setup environment
26 | uv sync
27 | uv run pab-deps-install
28 | uv run pab-build
29 |
30 | # Run PlexAniBridge
31 | uv run pab-start
32 | ```
33 |
--------------------------------------------------------------------------------
/src/utils/types.py:
--------------------------------------------------------------------------------
1 | """Generic types and protocols for type hinting."""
2 |
3 | from typing import Any, Protocol
4 |
5 | __all__ = ["Comparable"]
6 |
7 |
8 | class Comparable(Protocol):
9 | """Protocol for objects that can be compared using <, >, <=, >= operators."""
10 |
11 | def __lt__(self, other: Any) -> bool:
12 | """Return True if this object is less than other."""
13 | ...
14 |
15 | def __gt__(self, other: Any) -> bool:
16 | """Return True if this object is greater than other."""
17 | ...
18 |
19 | def __le__(self, other: Any) -> bool:
20 | """Return True if this object is less than or equal to other."""
21 | ...
22 |
23 | def __ge__(self, other: Any) -> bool:
24 | """Return True if this object is greater than or equal to other."""
25 | ...
26 |
--------------------------------------------------------------------------------
/frontend/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "./.svelte-kit/tsconfig.json",
3 | "compilerOptions": {
4 | "allowJs": true,
5 | "checkJs": true,
6 | "esModuleInterop": true,
7 | "forceConsistentCasingInFileNames": true,
8 | "resolveJsonModule": true,
9 | "skipLibCheck": true,
10 | "sourceMap": true,
11 | "strict": true,
12 | "moduleResolution": "bundler",
13 | "allowArbitraryExtensions": true
14 | }
15 | // Path aliases are handled by https://svelte.dev/docs/kit/configuration#alias
16 | // except $lib which is handled by https://svelte.dev/docs/kit/configuration#files
17 | //
18 | // To make changes to top-level options such as include and exclude, we recommend extending
19 | // the generated config; see https://svelte.dev/docs/kit/configuration#typescript
20 | }
21 |
--------------------------------------------------------------------------------
/scripts/docker_init.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | PUID=${PUID:-1000}
4 | PGID=${PGID:-1000}
5 | UMASK=${UMASK:-022}
6 |
7 | log() {
8 | printf "%s - init - INFO\t%s\n" "$(date '+%Y-%m-%d %H:%M:%S')" "$1"
9 | }
10 |
11 | if ! getent group abc >/dev/null 2>&1; then
12 | addgroup -g "$PGID" abc
13 | fi
14 |
15 | if ! getent passwd abc >/dev/null 2>&1; then
16 | adduser -u "$PUID" -G abc -s /bin/sh -D abc
17 | fi
18 |
19 | chown -R "$PUID:$PGID" /app
20 | if [ -d "/config" ]; then
21 | chown -R "$PUID:$PGID" /config
22 | fi
23 |
24 | if printf '%s' "$UMASK" | grep -Eq '^[0-7]{3,4}$'; then
25 | umask "$UMASK"
26 | else
27 | log "Invalid UMASK '$UMASK' provided, falling back to 022"
28 | umask 022
29 | fi
30 |
31 | CURRENT_UMASK=$(umask)
32 | log "Starting PlexAniBridge (UID: $PUID, GID: $PGID, UMASK: $CURRENT_UMASK)"
33 |
34 | exec su-exec abc "$@"
35 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | ### Description
2 |
3 | A few sentences describing the overall goals of the pull request's commits.
4 |
5 |
6 |
7 | **What's new:**
8 |
9 | **Improvements:**
10 |
11 | **Fixes:**
12 |
13 | ### Database Migration
14 |
15 | YES - ###
16 |
17 | ### Issues Fixed or Closed by this PR
18 |
19 | - Closes #
20 |
21 | ### Checklist
22 |
23 | - [ ] I have performed a self-review of my own code
24 | - [ ] My code passes the test suite (`pytest`)
25 | - [ ] My code passes the code style checks of this project (`ruff check && (cd frontend && pnpm lint)`)
26 | - [ ] I have added tests that prove my fix is effective or that my feature works
27 | - [ ] I have commented my code, particularly in hard-to-understand areas
28 | - [ ] I have made changes to the documentation in `docs/` if applicable
29 |
--------------------------------------------------------------------------------
/frontend/src/app.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
8 |
11 |
14 |
17 |
20 |
23 | %sveltekit.head%
24 |
25 |
26 | %sveltekit.body%
27 |
28 |
29 |
--------------------------------------------------------------------------------
/frontend/.prettierrc:
--------------------------------------------------------------------------------
1 | {
2 | "useTabs": false,
3 | "tabWidth": 4,
4 | "singleQuote": false,
5 | "trailingComma": "all",
6 | "printWidth": 88,
7 | "objectWrap": "collapse",
8 | "bracketSameLine": true,
9 | "singleAttributePerLine": true,
10 | "plugins": [
11 | "prettier-plugin-svelte",
12 | "prettier-plugin-tailwindcss",
13 | "@ianvs/prettier-plugin-sort-imports"
14 | ],
15 | "importOrder": [
16 | "^svelte$",
17 | "",
18 | "",
19 | "",
20 | "",
21 | "^[$]",
22 | "^[.]",
23 | "",
24 | ".css$"
25 | ],
26 | "overrides": [
27 | { "files": "*.svelte", "options": { "parser": "svelte" } },
28 | { "files": "*.json", "options": { "objectWrap": "preserve" } }
29 | ],
30 | "tailwindStylesheet": "./src/app.css"
31 | }
32 |
--------------------------------------------------------------------------------
/src/web/routes/ws/logs.py:
--------------------------------------------------------------------------------
1 | """Websocket endpoint for live logs."""
2 |
3 | from fastapi.routing import APIRouter
4 | from fastapi.websockets import WebSocket, WebSocketDisconnect
5 |
6 | from src.web.services.logging_handler import get_log_ws_handler
7 |
8 | __all__ = ["router"]
9 |
10 | router = APIRouter()
11 |
12 |
13 | @router.websocket("")
14 | async def logs_ws(ws: WebSocket) -> None:
15 | """Websocket endpoint for live logs.
16 |
17 | Args:
18 | ws (WebSocket): The WebSocket connection instance.
19 | """
20 | log_ws_handler = get_log_ws_handler()
21 |
22 | await ws.accept()
23 | await log_ws_handler.add(ws)
24 | try:
25 | while True:
26 | # Keep connection alive; we don't expect client messages
27 | await ws.receive_text()
28 | except WebSocketDisconnect:
29 | pass
30 | finally:
31 | await log_ws_handler.remove(ws)
32 |
--------------------------------------------------------------------------------
/alembic/versions/2025-02-04-01-57_424fe94c2c03.py:
--------------------------------------------------------------------------------
1 | """Drop AniDB unique index
2 |
3 | Revision ID: 424fe94c2c03
4 | Revises: b2ad27e14048
5 | Create Date: 2025-02-04 01:57:53.836952
6 |
7 | """
8 |
9 | from typing import Sequence, Union
10 |
11 | import sqlalchemy as sa
12 |
13 | from alembic import op
14 |
15 | # revision identifiers, used by Alembic.
16 | revision: str = "424fe94c2c03"
17 | down_revision: Union[str, None] = "b2ad27e14048"
18 | branch_labels: Union[str, Sequence[str], None] = None
19 | depends_on: Union[str, Sequence[str], None] = None
20 |
21 |
22 | def upgrade() -> None:
23 | op.drop_index("ix_animap_anidb_id", table_name="animap")
24 | op.create_index("ix_animap_anidb_id", "animap", ["anidb_id"], unique=False)
25 |
26 |
27 | def downgrade() -> None:
28 | op.drop_index("ix_animap_anidb_id", table_name="animap")
29 | op.create_index("ix_animap_anidb_id", "animap", ["anidb_id"], unique=True)
30 |
--------------------------------------------------------------------------------
/tests/test_pyproject.py:
--------------------------------------------------------------------------------
1 | """Basic pytest smoke tests for PlexAniBridge."""
2 |
3 | import re
4 | import tomllib
5 | from pathlib import Path
6 |
7 | SEMVER_PATTERN = re.compile(r"^\d+\.\d+\.\d+(?:[+-][0-9A-Za-z-.]+)?$")
8 |
9 |
10 | def test_project_metadata() -> None:
11 | """Ensure core project metadata is present and well-formed."""
12 | pyproject_path = Path("pyproject.toml")
13 | assert pyproject_path.exists(), "pyproject.toml should exist at the project root"
14 |
15 | with pyproject_path.open("rb") as f:
16 | pyproject = tomllib.load(f)
17 |
18 | project = pyproject.get("project")
19 | assert isinstance(project, dict), "[project] table must exist in pyproject.toml"
20 |
21 | assert project.get("name") == "PlexAniBridge"
22 |
23 | version = project.get("version")
24 | assert isinstance(version, str) and SEMVER_PATTERN.fullmatch(version), (
25 | "Version must follow semantic versioning"
26 | )
27 |
--------------------------------------------------------------------------------
/src/models/db/provenance.py:
--------------------------------------------------------------------------------
1 | """AniMap Provenance Model."""
2 |
3 | from __future__ import annotations
4 |
5 | from sqlalchemy import ForeignKey, Integer, String
6 | from sqlalchemy.orm import Mapped, mapped_column
7 |
8 | from src.models.db.base import Base
9 |
10 | __all__ = ["AniMapProvenance"]
11 |
12 |
13 | class AniMapProvenance(Base):
14 | """Tracks the provenance (source paths/URLs) for each AniMap row.
15 |
16 | Stores one row per source with an order column ``n`` to preserve the
17 | original order of sources for a given ``anilist_id``.
18 | """
19 |
20 | __tablename__ = "animap_provenance"
21 |
22 | anilist_id: Mapped[int] = mapped_column(
23 | Integer,
24 | ForeignKey("animap.anilist_id", ondelete="CASCADE"),
25 | primary_key=True,
26 | index=True,
27 | )
28 | n: Mapped[int] = mapped_column(Integer, primary_key=True)
29 | source: Mapped[str] = mapped_column(String, nullable=False)
30 |
--------------------------------------------------------------------------------
/tests/utils/test_version.py:
--------------------------------------------------------------------------------
1 | """Tests for version utility helpers."""
2 |
3 | import string
4 | import tomllib
5 | from pathlib import Path
6 |
7 | from src.utils.version import get_git_hash, get_pyproject_version
8 |
9 |
10 | def test_get_pyproject_version_matches_pyproject() -> None:
11 | """Test that get_pyproject_version matches the version in pyproject.toml."""
12 | with Path("pyproject.toml").open("rb") as f:
13 | pyproject = tomllib.load(f)
14 |
15 | expected_version = pyproject["project"]["version"]
16 |
17 | assert get_pyproject_version() == expected_version
18 |
19 |
20 | def test_get_git_hash_returns_hex_or_unknown() -> None:
21 | """Test that get_git_hash returns a valid git hash or 'unknown'."""
22 | git_hash = get_git_hash()
23 |
24 | assert isinstance(git_hash, str)
25 | if git_hash != "unknown":
26 | assert len(git_hash) in {7, 40}
27 | assert all(char in string.hexdigits for char in git_hash)
28 |
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | # These are supported funding model platforms
2 |
3 | github: [eliasbenb] # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
4 | patreon: # Replace with a single Patreon username
5 | open_collective: # Replace with a single Open Collective username
6 | ko_fi: # Replace with a single Ko-fi username
7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
9 | liberapay: # Replace with a single Liberapay username
10 | issuehunt: # Replace with a single IssueHunt username
11 | lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
12 | polar: # Replace with a single Polar username
13 | buy_me_a_coffee: # Replace with a single Buy Me a Coffee username
14 | thanks_dev: # Replace with a single thanks.dev username
15 | custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
16 |
--------------------------------------------------------------------------------
/data/config.example.yaml:
--------------------------------------------------------------------------------
1 | #== Global Defaults ==#
2 | anilist_token: ...
3 | plex_token: ...
4 | plex_user: ...
5 | plex_url: ...
6 | # plex_sections: []
7 | # plex_genres: []
8 | # plex_metadata_source: "local"
9 | # sync_interval: 86400
10 | # sync_modes: ["periodic", "poll", "webhook"]
11 | # full_scan: false
12 | # destructive_sync: false
13 | # excluded_sync_fields: ["notes", "score"]
14 | # dry_run: false
15 | # backup_retention_days: 30
16 | # batch_requests: false
17 | # search_fallback_threshold: -1
18 |
19 | #== Profile Overrides ==#
20 | # profiles:
21 | # example:
22 | # $field: $value
23 |
24 | #== Global Settings ==#
25 | # data_path: "./data"
26 | # log_level: "INFO"
27 | # mappings_url: "https://raw.githubusercontent.com/eliasbenb/PlexAniBridge-Mappings/v2/mappings.json"
28 | # web_enabled: true
29 | # web_host: "0.0.0.0"
30 | # web_port: 4848
31 | # web_basic_auth_username: null
32 | # web_basic_auth_password: null
33 | # web_basic_auth_htpasswd_path: null
34 | # web_basic_auth_realm: "PlexAniBridge"
35 |
--------------------------------------------------------------------------------
/frontend/src/lib/ui/tooltip.svelte:
--------------------------------------------------------------------------------
1 |
22 |
23 |
24 |
25 | {@render trigger?.()}
26 |
27 |
28 |
31 | {@render children?.()}
32 |
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/.env.example:
--------------------------------------------------------------------------------
1 | #== Global Defaults ==#
2 | PAB_ANILIST_TOKEN=...
3 | PAB_PLEX_TOKEN=...
4 | PAB_PLEX_USER=...
5 | PAB_PLEX_URL=...
6 | # PAB_PLEX_SECTIONS=[]
7 | # PAB_PLEX_GENRES=[]
8 | # PAB_PLEX_METADATA_SOURCE="local"
9 | # PAB_SYNC_INTERVAL=86400
10 | # PAB_SYNC_MODES=["periodic", "poll", "webhook"]
11 | # PAB_FULL_SCAN=false
12 | # PAB_DESTRUCTIVE_SYNC=false
13 | # PAB_EXCLUDED_SYNC_FIELDS=["notes", "score"]
14 | # PAB_DRY_RUN=false
15 | # PAB_BACKUP_RETENTION_DAYS=30
16 | # PAB_BATCH_REQUESTS=false
17 | # PAB_SEARCH_FALLBACK_THRESHOLD=-1
18 |
19 | #== Profile Overrides ==#
20 | # PAB_PROFILES__example__$FIELD=$VALUE
21 |
22 | #== Global Settings ==#
23 | # PAB_DATA_PATH="./data"
24 | # PAB_LOG_LEVEL="INFO"
25 | # PAB_MAPPINGS_URL="https://raw.githubusercontent.com/eliasbenb/PlexAniBridge-Mappings/v2/mappings.json"
26 | # PAB_WEB_ENABLED=true
27 | # PAB_WEB_HOST="0.0.0.0"
28 | # PAB_WEB_PORT=4848
29 | # PAB_WEB_BASIC_AUTH_USERNAME=null
30 | # PAB_WEB_BASIC_AUTH_PASSWORD=null
31 | # PAB_WEB_BASIC_AUTH_HTPASSWD_PATH=null
32 | # PAB_WEB_BASIC_AUTH_REALM="PlexAniBridge"
33 |
--------------------------------------------------------------------------------
/docs/web/backups.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Backups
3 | icon: material/archive-refresh
4 | ---
5 |
6 | 
7 |
8 | Use the backups page to browse, preview, and restore the AniList snapshots PlexAniBridge creates for you. The page has two stops: a profile picker at `/backups` and a detailed table at `/backups/{profile}`. Everything you see reflects the real files stored in `PAB_DATA_PATH/backups`, so you always know what is available.
9 |
10 | _Note: backups are kept for 30 days by default. Adjust [BACKUP_RETENTION_DAYS](../configuration.md#backup_retention_days) if you need a longer retention window._
11 |
12 | The profile view lists every backup with its filename, when it was created, how old it is, file size, and which AniList user it belongs to.
13 |
14 | Actions on each row:
15 |
16 | - `Preview` opens the backup in a modal so you can skim the JSON of your AniList backup before doing anything permanent.
17 | - `Restore` triggers the restore job, and shows a toast summarising how many entries were brought back plus any warnings.
18 |
--------------------------------------------------------------------------------
/alembic/versions/2025-02-17-20-24_6b471e97e780.py:
--------------------------------------------------------------------------------
1 | """Convert TVDB mappings to a dict
2 |
3 | Revision ID: 6b471e97e780
4 | Revises: c5581ec025a7
5 | Create Date: 2025-02-17 20:24:50.010412
6 |
7 | """
8 |
9 | from typing import Sequence, Union
10 |
11 | import sqlalchemy as sa
12 |
13 | from alembic import op
14 |
15 | # revision identifiers, used by Alembic.
16 | revision: str = "6b471e97e780"
17 | down_revision: Union[str, None] = "c5581ec025a7"
18 | branch_labels: Union[str, Sequence[str], None] = None
19 | depends_on: Union[str, Sequence[str], None] = None
20 |
21 |
22 | def upgrade() -> None:
23 | # ### commands auto generated by Alembic - please adjust! ###
24 | op.execute("DELETE FROM animap")
25 | op.execute("DELETE FROM house_keeping WHERE key = 'animap_cdn_hash'")
26 | op.execute("DELETE FROM house_keeping WHERE key = 'animap_custom_hash'")
27 | # ### end Alembic commands ###
28 |
29 |
30 | def downgrade() -> None:
31 | # ### commands auto generated by Alembic - please adjust! ###
32 | pass
33 | # ### end Alembic commands ###
34 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2025 Elias Benbourenane
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/docs/web/logs.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Logs
3 | icon: material/file-chart
4 | ---
5 |
6 | 
7 |
8 | The logs page has two tabs for real-time streaming output and archived log files. It connects to `/ws/logs` for live events and `/api/logs/*` for history.
9 |
10 | ## Live Stream
11 |
12 | - The `Live` tab subscribes to the websocket and appends new entries as they happen.
13 | - The toolbar actions let you clear the current buffer, toggle auto-scroll, switch message wrapping, and download the in-memory log slice as a text file.
14 |
15 | ## Log History
16 |
17 | - Switch to the `History` tab to browse archived log files stored on disk.
18 | - Choose how many trailing lines to load (100–2000 or all).
19 | - Wrap toggles and downloads are available here too, letting you export just the excerpt you reviewed.
20 |
21 | ## Search & Filtering
22 |
23 | - The global search box filters both live and history buffers across message text with a case-insensitive match.
24 | - The level selector is ranked (`DEBUG` < `INFO` < `SUCCESS` < `WARNING` < `ERROR`). Selecting a level hides anything less severe so you can focus on warnings or failures.
25 |
--------------------------------------------------------------------------------
/typings/plexapi/settings.pyi:
--------------------------------------------------------------------------------
1 | from typing import Any
2 | from xml.etree.ElementTree import Element
3 |
4 | from plexapi.base import PlexObject
5 | from plexapi.server import PlexServer
6 |
7 | class Settings(PlexObject):
8 | def __init__(
9 | self, server: PlexServer, data: Element, initpath: str | None = None
10 | ) -> None: ...
11 | def __getattr__(self, attr): ... # TODO: type
12 | def __setattr__(self, attr, value): ... # TODO: type
13 | def all(self) -> list[Setting]: ...
14 | def get(self, id) -> Setting: ...
15 | def groups(self) -> dict[str, list[Setting]]: ...
16 | def group(self, group) -> list[Setting]: ...
17 | def save(self) -> None: ...
18 |
19 | class Setting(PlexObject):
20 | TYPES: dict # TODO: type
21 | type: str
22 | advanced: bool
23 | default: Any
24 | enumValues: Any
25 | group: str
26 | hidden: bool
27 | id: str
28 | label: str
29 | option: str
30 | secure: bool
31 | summary: str
32 | value: Any
33 | _setValue: Any = None
34 | def set(self, value: Any) -> None: ...
35 | def toUrl(self) -> str: ...
36 |
37 | class Preferences(Setting):
38 | FILTER: str
39 |
--------------------------------------------------------------------------------
/src/models/db/pin.py:
--------------------------------------------------------------------------------
1 | """Pin model for per-profile AniList field pinning."""
2 |
3 | from datetime import UTC, datetime
4 |
5 | from sqlalchemy import JSON, DateTime, Index, Integer, String
6 | from sqlalchemy.orm import Mapped, mapped_column
7 |
8 | from src.models.db.base import Base
9 |
10 | __all__ = ["Pin"]
11 |
12 |
13 | class Pin(Base):
14 | """Model representing pinned AniList fields for a profile entry."""
15 |
16 | __tablename__ = "pin"
17 |
18 | id: Mapped[int] = mapped_column(Integer, primary_key=True)
19 | profile_name: Mapped[str] = mapped_column(String, index=True)
20 | anilist_id: Mapped[int] = mapped_column(Integer, index=True)
21 | fields: Mapped[list[str]] = mapped_column(JSON, default=list)
22 | created_at: Mapped[datetime] = mapped_column(
23 | DateTime(timezone=True), default=lambda: datetime.now(UTC)
24 | )
25 | updated_at: Mapped[datetime] = mapped_column(
26 | DateTime(timezone=True),
27 | default=lambda: datetime.now(UTC),
28 | onupdate=lambda: datetime.now(UTC),
29 | )
30 |
31 | __table_args__ = (
32 | Index("ix_pin_profile_anilist", "profile_name", "anilist_id", unique=True),
33 | )
34 |
--------------------------------------------------------------------------------
/frontend/src/lib/utils/pin-options.ts:
--------------------------------------------------------------------------------
1 | import type { PinFieldOption, PinOptionsResponse } from "$lib/types/api";
2 | import { apiFetch } from "$lib/utils/api";
3 |
4 | interface PinOptionsState {
5 | cache: PinFieldOption[] | null;
6 | inflight: Promise | null;
7 | }
8 |
9 | const state: PinOptionsState = { cache: null, inflight: null };
10 |
11 | export async function loadPinOptions(force = false): Promise {
12 | if (!force && state.cache) return [...state.cache];
13 | if (!force && state.inflight) return state.inflight;
14 |
15 | state.inflight = (async () => {
16 | const res = await apiFetch("/api/pins/fields", undefined, { silent: true });
17 | if (!res.ok) throw new Error(`HTTP ${res.status}`);
18 | const payload = (await res.json()) as PinOptionsResponse;
19 | const options = payload.options ?? [];
20 | state.cache = [...options];
21 | return [...options];
22 | })();
23 |
24 | try {
25 | return await state.inflight;
26 | } finally {
27 | state.inflight = null;
28 | }
29 | }
30 |
31 | export function clearPinOptionsCache(): void {
32 | state.cache = null;
33 | }
34 |
--------------------------------------------------------------------------------
/alembic/versions/2025-08-12-06-30_213f9be1534f.py:
--------------------------------------------------------------------------------
1 | """sync_history composite indexes
2 |
3 | Revision ID: 213f9be1534f
4 | Revises: cd371e53adcb
5 | Create Date: 2025-08-12 06:30:14.506677
6 |
7 | """
8 | from typing import Sequence, Union
9 |
10 | from alembic import op
11 | import sqlalchemy as sa
12 |
13 |
14 | # revision identifiers, used by Alembic.
15 | revision: str = '213f9be1534f'
16 | down_revision: Union[str, None] = 'cd371e53adcb'
17 | branch_labels: Union[str, Sequence[str], None] = None
18 | depends_on: Union[str, Sequence[str], None] = None
19 |
20 |
21 | def upgrade() -> None:
22 | # ### commands auto generated by Alembic - please adjust! ###
23 | with op.batch_alter_table('sync_history', schema=None) as batch_op:
24 | batch_op.create_index('ix_sync_history_upsert_keys', ['profile_name', 'plex_rating_key', 'plex_child_rating_key', 'plex_type', 'outcome'], unique=False)
25 |
26 | # ### end Alembic commands ###
27 |
28 |
29 | def downgrade() -> None:
30 | # ### commands auto generated by Alembic - please adjust! ###
31 | with op.batch_alter_table('sync_history', schema=None) as batch_op:
32 | batch_op.drop_index('ix_sync_history_upsert_keys')
33 |
34 | # ### end Alembic commands ###
35 |
--------------------------------------------------------------------------------
/alembic/versions/2025-02-15-02-10_ddbadb26481f.py:
--------------------------------------------------------------------------------
1 | """AniMap v2 schema migration
2 |
3 | Revision ID: ddbadb26481f
4 | Revises: 424fe94c2c03
5 | Create Date: 2025-02-15 02:10:50.182654
6 |
7 | """
8 | from typing import Sequence, Union
9 |
10 | from alembic import op
11 | import sqlalchemy as sa
12 |
13 | # revision identifiers, used by Alembic.
14 | revision: str = 'ddbadb26481f'
15 | down_revision: Union[str, None] = '424fe94c2c03'
16 | branch_labels: Union[str, Sequence[str], None] = None
17 | depends_on: Union[str, Sequence[str], None] = None
18 |
19 |
20 | def upgrade() -> None:
21 | # ### commands auto generated by Alembic - please adjust! ###
22 | with op.batch_alter_table('animap', schema=None) as batch_op:
23 | batch_op.drop_column('tvdb_epoffset')
24 | batch_op.drop_column('tvdb_season')
25 |
26 | # ### end Alembic commands ###
27 |
28 |
29 | def downgrade() -> None:
30 | # ### commands auto generated by Alembic - please adjust! ###
31 | with op.batch_alter_table('animap', schema=None) as batch_op:
32 | batch_op.add_column(sa.Column('tvdb_season', sa.INTEGER(), nullable=True))
33 | batch_op.add_column(sa.Column('tvdb_epoffset', sa.INTEGER(), nullable=True))
34 |
35 | # ### end Alembic commands ###
36 |
--------------------------------------------------------------------------------
/src/web/routes/api/__init__.py:
--------------------------------------------------------------------------------
1 | """API routes."""
2 |
3 | from fastapi.routing import APIRouter
4 |
5 | from src.web.routes.api.backups import router as backups_router
6 | from src.web.routes.api.history import router as history_router
7 | from src.web.routes.api.logs import router as logs_history_router
8 | from src.web.routes.api.mappings import router as mappings_router
9 | from src.web.routes.api.pins import router as pins_router
10 | from src.web.routes.api.status import router as status_router
11 | from src.web.routes.api.sync import router as sync_router
12 | from src.web.routes.api.system import router as system_router
13 |
14 | __all__ = ["router"]
15 |
16 | router = APIRouter()
17 |
18 |
19 | router.include_router(history_router, prefix="/history", tags=["history"])
20 | router.include_router(backups_router, prefix="/backups", tags=["backups"])
21 | router.include_router(mappings_router, prefix="/mappings", tags=["mappings"])
22 | router.include_router(pins_router, prefix="/pins", tags=["pins"])
23 | router.include_router(logs_history_router, prefix="/logs", tags=["logs"])
24 | router.include_router(status_router, prefix="/status", tags=["status"])
25 | router.include_router(sync_router, prefix="/sync", tags=["sync"])
26 | router.include_router(system_router, prefix="/system", tags=["system"])
27 |
--------------------------------------------------------------------------------
/alembic/versions/2025-02-17-14-29_c5581ec025a7.py:
--------------------------------------------------------------------------------
1 | """TVDB Mappings
2 |
3 | Revision ID: c5581ec025a7
4 | Revises: ddbadb26481f
5 | Create Date: 2025-02-17 14:29:10.113697
6 |
7 | """
8 | from typing import Sequence, Union
9 |
10 | import sqlalchemy as sa
11 |
12 | from alembic import op
13 |
14 | # revision identifiers, used by Alembic.
15 | revision: str = 'c5581ec025a7'
16 | down_revision: Union[str, None] = 'ddbadb26481f'
17 | branch_labels: Union[str, Sequence[str], None] = None
18 | depends_on: Union[str, Sequence[str], None] = None
19 |
20 |
21 | def upgrade() -> None:
22 | # ### commands auto generated by Alembic - please adjust! ###
23 | with op.batch_alter_table('animap', schema=None) as batch_op:
24 | batch_op.add_column(sa.Column('tvdb_mappings', sa.JSON(none_as_null=True), nullable=True))
25 | batch_op.create_index(batch_op.f('ix_animap_tvdb_mappings'), ['tvdb_mappings'], unique=False)
26 |
27 | # ### end Alembic commands ###
28 |
29 |
30 | def downgrade() -> None:
31 | # ### commands auto generated by Alembic - please adjust! ###
32 | with op.batch_alter_table('animap', schema=None) as batch_op:
33 | batch_op.drop_index(batch_op.f('ix_animap_tvdb_mappings'))
34 | batch_op.drop_column('tvdb_mappings')
35 |
36 | # ### end Alembic commands ###
37 |
--------------------------------------------------------------------------------
/alembic/versions/2025-10-20-10-41_90496c989bdd.py:
--------------------------------------------------------------------------------
1 | """scalar tmdb_show_id
2 |
3 | Revision ID: 90496c989bdd
4 | Revises: db24057a61c9
5 | Create Date: 2025-10-20 10:41:57.362134
6 |
7 | """
8 | from typing import Sequence, Union
9 |
10 | import sqlalchemy as sa
11 | from sqlalchemy.dialects import sqlite
12 |
13 | from alembic import op
14 |
15 | # revision identifiers, used by Alembic.
16 | revision: str = '90496c989bdd'
17 | down_revision: Union[str, None] = 'db24057a61c9'
18 | branch_labels: Union[str, Sequence[str], None] = None
19 | depends_on: Union[str, Sequence[str], None] = None
20 |
21 |
22 | def upgrade() -> None:
23 | # ### commands auto generated by Alembic - please adjust! ###
24 | with op.batch_alter_table('animap', schema=None) as batch_op:
25 | batch_op.alter_column('tmdb_show_id',
26 | existing_type=sqlite.JSON(),
27 | type_=sa.Integer(),
28 | existing_nullable=True)
29 |
30 | # ### end Alembic commands ###
31 |
32 |
33 | def downgrade() -> None:
34 | # ### commands auto generated by Alembic - please adjust! ###
35 | with op.batch_alter_table('animap', schema=None) as batch_op:
36 | batch_op.alter_column('tmdb_show_id',
37 | existing_type=sa.Integer(),
38 | type_=sqlite.JSON(),
39 | existing_nullable=True)
40 |
41 | # ### end Alembic commands ###
42 |
--------------------------------------------------------------------------------
/alembic/versions/2025-03-24-02-37_e89ead9178d7.py:
--------------------------------------------------------------------------------
1 | """Delete unutilized indexes
2 |
3 | Revision ID: e89ead9178d7
4 | Revises: 6b471e97e780
5 | Create Date: 2025-03-24 02:37:18.107721
6 |
7 | """
8 | from typing import Sequence, Union
9 |
10 | import sqlalchemy as sa
11 |
12 | from alembic import op
13 |
14 | # revision identifiers, used by Alembic.
15 | revision: str = 'e89ead9178d7'
16 | down_revision: Union[str, None] = '6b471e97e780'
17 | branch_labels: Union[str, Sequence[str], None] = None
18 | depends_on: Union[str, Sequence[str], None] = None
19 |
20 |
21 | def upgrade() -> None:
22 | # ### commands auto generated by Alembic - please adjust! ###
23 | with op.batch_alter_table('animap', schema=None) as batch_op:
24 | batch_op.drop_index('ix_animap_anidb_id')
25 | batch_op.drop_index('ix_animap_mal_id')
26 | batch_op.drop_index('ix_animap_tvdb_mappings')
27 |
28 | # ### end Alembic commands ###
29 |
30 |
31 | def downgrade() -> None:
32 | # ### commands auto generated by Alembic - please adjust! ###
33 | with op.batch_alter_table('animap', schema=None) as batch_op:
34 | batch_op.create_index('ix_animap_tvdb_mappings', ['tvdb_mappings'], unique=False)
35 | batch_op.create_index('ix_animap_mal_id', ['mal_id'], unique=False)
36 | batch_op.create_index('ix_animap_anidb_id', ['anidb_id'], unique=False)
37 |
38 | # ### end Alembic commands ###
39 |
--------------------------------------------------------------------------------
/tests/web/test_mappings_service_filters.py:
--------------------------------------------------------------------------------
1 | """Tests covering AniList filter construction for mappings service."""
2 |
3 | import pytest
4 |
5 | from src.exceptions import AniListFilterError
6 | from src.web.services.mappings_query_spec import get_query_field_map
7 | from src.web.services.mappings_service import MappingsService
8 |
9 |
10 | @pytest.fixture(scope="module")
11 | def mappings_service() -> MappingsService:
12 | """Provide a MappingsService instance for filter tests."""
13 | return MappingsService()
14 |
15 |
16 | def test_build_anilist_filters_multi_enum(mappings_service: MappingsService) -> None:
17 | """Multiple AniList enum values should resolve and deduplicate."""
18 | spec = get_query_field_map()["anilist.format"]
19 | filters = mappings_service._build_anilist_term_filters(
20 | spec,
21 | "TV,Movie,TV",
22 | ("tv", "Movie", "TV"),
23 | )
24 | assert filters == {"format_in": ["TV", "MOVIE"]}
25 |
26 |
27 | def test_build_anilist_filters_multi_not_supported(
28 | mappings_service: MappingsService,
29 | ) -> None:
30 | """Fields without *_in support should raise when given multiple values."""
31 | spec = get_query_field_map()["anilist.title"]
32 | with pytest.raises(AniListFilterError):
33 | mappings_service._build_anilist_term_filters(
34 | spec,
35 | "Naruto,Bleach",
36 | ("Naruto", "Bleach"),
37 | )
38 |
--------------------------------------------------------------------------------
/alembic/versions/2025-09-30-04-03_12add4c4ffa9.py:
--------------------------------------------------------------------------------
1 | """tmdb_mappings column
2 |
3 | Revision ID: 12add4c4ffa9
4 | Revises: 8387b3fd8a6a
5 | Create Date: 2025-09-30 04:03:04.619505
6 |
7 | """
8 | from typing import Sequence, Union
9 |
10 | from alembic import op
11 | import sqlalchemy as sa
12 |
13 |
14 | # revision identifiers, used by Alembic.
15 | revision: str = '12add4c4ffa9'
16 | down_revision: Union[str, None] = '8387b3fd8a6a'
17 | branch_labels: Union[str, Sequence[str], None] = None
18 | depends_on: Union[str, Sequence[str], None] = None
19 |
20 |
21 | def upgrade() -> None:
22 | # ### commands auto generated by Alembic - please adjust! ###
23 | with op.batch_alter_table('animap', schema=None) as batch_op:
24 | batch_op.add_column(sa.Column('tmdb_mappings', sa.JSON(), nullable=True))
25 | batch_op.create_index('idx_tmdb_season', ['tmdb_show_id', 'tmdb_mappings'], unique=False)
26 | batch_op.create_index(batch_op.f('ix_animap_tmdb_mappings'), ['tmdb_mappings'], unique=False)
27 |
28 | # ### end Alembic commands ###
29 |
30 |
31 | def downgrade() -> None:
32 | # ### commands auto generated by Alembic - please adjust! ###
33 | with op.batch_alter_table('animap', schema=None) as batch_op:
34 | batch_op.drop_index(batch_op.f('ix_animap_tmdb_mappings'))
35 | batch_op.drop_index('idx_tmdb_season')
36 | batch_op.drop_column('tmdb_mappings')
37 |
38 | # ### end Alembic commands ###
39 |
--------------------------------------------------------------------------------
/alembic/versions/2025-06-26-05-56_08f39c25b391.py:
--------------------------------------------------------------------------------
1 | """Add composite mapping indexes
2 |
3 | Revision ID: 08f39c25b391
4 | Revises: e89ead9178d7
5 | Create Date: 2025-06-26 05:56:19.495133
6 |
7 | """
8 | from typing import Sequence, Union
9 |
10 | import sqlalchemy as sa
11 |
12 | from alembic import op
13 |
14 | # revision identifiers, used by Alembic.
15 | revision: str = '08f39c25b391'
16 | down_revision: Union[str, None] = 'e89ead9178d7'
17 | branch_labels: Union[str, Sequence[str], None] = None
18 | depends_on: Union[str, Sequence[str], None] = None
19 |
20 |
21 | def upgrade() -> None:
22 | # ### commands auto generated by Alembic - please adjust! ###
23 | with op.batch_alter_table('animap', schema=None) as batch_op:
24 | batch_op.create_index('idx_imdb_tmdb', ['imdb_id', 'tmdb_movie_id'], unique=False)
25 | batch_op.create_index('idx_tvdb_season', ['tvdb_id', 'tvdb_mappings'], unique=False)
26 | batch_op.create_index(batch_op.f('ix_animap_tvdb_mappings'), ['tvdb_mappings'], unique=False)
27 |
28 | # ### end Alembic commands ###
29 |
30 |
31 | def downgrade() -> None:
32 | # ### commands auto generated by Alembic - please adjust! ###
33 | with op.batch_alter_table('animap', schema=None) as batch_op:
34 | batch_op.drop_index(batch_op.f('ix_animap_tvdb_mappings'))
35 | batch_op.drop_index('idx_tvdb_season')
36 | batch_op.drop_index('idx_imdb_tmdb')
37 |
38 | # ### end Alembic commands ###
39 |
--------------------------------------------------------------------------------
/frontend/src/lib/components/mappings/capabilities-cache.ts:
--------------------------------------------------------------------------------
1 | import { apiJson } from "$lib/utils/api";
2 |
3 | export type OperatorToken = "=" | ">" | ">=" | "<" | "<=" | "*" | "?" | "in" | "range";
4 |
5 | export type FieldCapability = {
6 | key: string;
7 | aliases?: string[];
8 | type: "int" | "string" | "enum" | string;
9 | operators: OperatorToken[];
10 | values?: string[] | null;
11 | desc?: string | null;
12 | };
13 |
14 | type CapabilitiesResponse = { fields: FieldCapability[] };
15 |
16 | let cachedCapabilities: FieldCapability[] | null | undefined;
17 | let inFlight: Promise | null = null;
18 |
19 | export async function loadCapabilities(): Promise {
20 | if (cachedCapabilities !== undefined) {
21 | return cachedCapabilities;
22 | }
23 |
24 | if (!inFlight) {
25 | inFlight = apiJson("/api/mappings/query-capabilities")
26 | .then((res) => {
27 | if (res && Array.isArray(res.fields)) {
28 | cachedCapabilities = res.fields as FieldCapability[];
29 | } else {
30 | cachedCapabilities = null;
31 | }
32 | return cachedCapabilities;
33 | })
34 | .catch((error) => {
35 | cachedCapabilities = undefined;
36 | inFlight = null;
37 | throw error;
38 | });
39 | }
40 |
41 | return inFlight;
42 | }
43 |
--------------------------------------------------------------------------------
/src/web/routes/ws/status.py:
--------------------------------------------------------------------------------
1 | """Websocket endpoint for periodic status snapshots."""
2 |
3 | import asyncio
4 |
5 | from fastapi.routing import APIRouter
6 | from fastapi.websockets import WebSocket, WebSocketDisconnect
7 |
8 | from src.web.state import get_app_state
9 |
10 | __all__ = ["router"]
11 |
12 | router = APIRouter()
13 |
14 |
15 | @router.websocket("")
16 | async def status_ws(ws: WebSocket) -> None:
17 | """Websocket endpoint for periodic status snapshots.
18 |
19 | Args:
20 | ws (WebSocket): The WebSocket connection instance.
21 | """
22 | await ws.accept()
23 | try:
24 | while True:
25 | scheduler = get_app_state().scheduler
26 | data = (
27 | {"profiles": await scheduler.get_status()}
28 | if scheduler
29 | else {"profiles": {}}
30 | )
31 | await ws.send_json(data)
32 |
33 | # If any profile reports an active current_sync, increase refresh rate
34 | refresh = 5.0
35 | try:
36 | profiles = data.get("profiles", {})
37 | if any(
38 | (p.get("status", {}).get("current_sync") or {}).get("state")
39 | == "running"
40 | for p in profiles.values()
41 | ):
42 | refresh = 0.5
43 | except Exception:
44 | pass
45 |
46 | await asyncio.sleep(refresh)
47 | except WebSocketDisconnect:
48 | pass
49 |
--------------------------------------------------------------------------------
/.github/workflows/docs-publish.yml:
--------------------------------------------------------------------------------
1 | name: MkDocs
2 |
3 | on:
4 | push:
5 | branches: [main]
6 | paths:
7 | - "data/mappings.example.*"
8 | - "docs/**"
9 | - "mkdocs.yml"
10 | - ".env.example"
11 | workflow_dispatch:
12 |
13 | concurrency:
14 | group: ${{ github.workflow }}-${{ github.ref }}
15 | cancel-in-progress: true
16 |
17 | jobs:
18 | build-and-publish:
19 | runs-on: ubuntu-latest
20 |
21 | permissions:
22 | contents: write
23 |
24 | steps:
25 | - name: Checkout Repository
26 | uses: actions/checkout@v5
27 |
28 | - name: Configure Git Credentials
29 | run: |
30 | git config --local user.email "action@github.com"
31 | git config --local user.name "GitHub Action"
32 |
33 | - name: Setup uv
34 | uses: astral-sh/setup-uv@v7
35 |
36 | - name: Determine Cache ID
37 | run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV
38 |
39 | - name: Setup Cache
40 | uses: actions/cache@v4
41 | with:
42 | key: mkdocs-material-${{ env.cache_id }}
43 | path: .cache
44 | restore-keys: |
45 | mkdocs-material-
46 |
47 | - name: Install Dependencies
48 | run: uv sync --package docs --frozen
49 |
50 | - name: Build and Deploy Docs
51 | run: uv run mkdocs gh-deploy --force
52 |
--------------------------------------------------------------------------------
/.devcontainer/devcontainer.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "PlexAniBridge",
3 | "image": "mcr.microsoft.com/devcontainers/base:ubuntu-24.04",
4 | "features": {
5 | "ghcr.io/devcontainers/features/node:1": {
6 | "version": "25"
7 | },
8 | "ghcr.io/devcontainers/features/github-cli:1": {},
9 | "ghcr.io/devcontainers-extra/features/apt-packages:1": {
10 | "packages": "sqlite3"
11 | },
12 | "ghcr.io/devcontainers-extra/features/uv": {}
13 | },
14 | "customizations": {
15 | "vscode": {
16 | "extensions": [
17 | "charliermarsh.ruff",
18 | "dbaeumer.vscode-eslint",
19 | "eamodio.gitlens",
20 | "esbenp.prettier-vscode",
21 | "ms-python.python",
22 | "rafaelha.vscode-flamegraph",
23 | "redhat.vscode-yaml",
24 | "tamasfe.even-better-toml",
25 | "yy0931.vscode-sqlite3-editor",
26 | "yzhang.markdown-all-in-one",
27 | "svelte.svelte-vscode",
28 | "bradlc.vscode-tailwindcss"
29 | ]
30 | }
31 | },
32 | "forwardPorts": [4848, 5173],
33 | "portsAttributes": {
34 | "4848": {
35 | "label": "PlexAniBridge Server",
36 | "onAutoForward": "notify"
37 | },
38 | "5173": {
39 | "label": "PlexAniBridge Frontend",
40 | "onAutoForward": "notify"
41 | }
42 | },
43 | "postCreateCommand": "uv run pab-deps-install",
44 | "containerEnv": {
45 | "UV_LINK_MODE": "copy"
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/frontend/eslint.config.js:
--------------------------------------------------------------------------------
1 | import { fileURLToPath } from "node:url";
2 | import { includeIgnoreFile } from "@eslint/compat";
3 | import js from "@eslint/js";
4 | import prettier from "eslint-config-prettier";
5 | import svelte from "eslint-plugin-svelte";
6 | import { defineConfig } from "eslint/config";
7 | import globals from "globals";
8 | import ts from "typescript-eslint";
9 |
10 | import svelteConfig from "./svelte.config.js";
11 |
12 | const gitignorePath = fileURLToPath(new URL("./.gitignore", import.meta.url));
13 |
14 | export default defineConfig(
15 | includeIgnoreFile(gitignorePath),
16 | js.configs.recommended,
17 | ...ts.configs.recommended,
18 | ...svelte.configs.recommended,
19 | prettier,
20 | ...svelte.configs.prettier,
21 | {
22 | languageOptions: { globals: { ...globals.browser, ...globals.node } },
23 | rules: {
24 | // typescript-eslint strongly recommend that you do not use the no-undef lint rule on TypeScript projects.
25 | // see: https://typescript-eslint.io/troubleshooting/faqs/eslint/#i-get-errors-from-the-no-undef-rule-about-global-variables-not-being-defined-even-though-there-are-no-typescript-errors
26 | "no-undef": "off",
27 | "no-empty": "off",
28 | },
29 | },
30 | {
31 | files: ["**/*.svelte", "**/*.svelte.ts", "**/*.svelte.js"],
32 | languageOptions: {
33 | parserOptions: {
34 | projectService: true,
35 | extraFileExtensions: [".svelte"],
36 | parser: ts.parser,
37 | svelteConfig,
38 | },
39 | },
40 | },
41 | );
42 |
--------------------------------------------------------------------------------
/frontend/src/lib/utils/anilist.ts:
--------------------------------------------------------------------------------
1 | import { writable } from "svelte/store";
2 |
3 | import type { MediaTitle } from "$lib/types/anilist";
4 |
5 | export type TitleLanguage = "romaji" | "english" | "native" | "userPreferred";
6 |
7 | const STORAGE_KEY = "anilist.lang";
8 | const DEFAULT_LANG: TitleLanguage = "romaji";
9 |
10 | function loadInitial(): TitleLanguage {
11 | try {
12 | const v = localStorage.getItem(STORAGE_KEY) as TitleLanguage | null;
13 | if (
14 | v === "romaji" ||
15 | v === "english" ||
16 | v === "native" ||
17 | v === "userPreferred"
18 | )
19 | return v;
20 | } catch {}
21 | return DEFAULT_LANG;
22 | }
23 |
24 | let currentLang: TitleLanguage = loadInitial();
25 | export const anilistTitleLang = writable(currentLang);
26 |
27 | anilistTitleLang.subscribe((v) => {
28 | currentLang = v;
29 | try {
30 | localStorage.setItem(STORAGE_KEY, v);
31 | } catch {}
32 | });
33 |
34 | export function setAniListTitleLang(lang: TitleLanguage) {
35 | anilistTitleLang.set(lang);
36 | }
37 |
38 | export function preferredTitle(t?: MediaTitle | null): string | null {
39 | if (!t) return null;
40 | const baseOrder: TitleLanguage[] = ["romaji", "english", "native"];
41 | let order: TitleLanguage[];
42 | if (currentLang === "userPreferred") order = ["userPreferred", ...baseOrder];
43 | else order = [currentLang, ...baseOrder.filter((l) => l !== currentLang)];
44 | for (const key of order) {
45 | const val = (t as Record)[key];
46 | if (val) return val;
47 | }
48 | return null;
49 | }
50 |
--------------------------------------------------------------------------------
/frontend/src/lib/types/anilist.ts:
--------------------------------------------------------------------------------
1 | export type MediaType = "ANIME" | "MANGA";
2 |
3 | export type MediaFormat =
4 | | "TV"
5 | | "TV_SHORT"
6 | | "MOVIE"
7 | | "SPECIAL"
8 | | "OVA"
9 | | "ONA"
10 | | "MUSIC"
11 | | "MANGA"
12 | | "NOVEL"
13 | | "ONE_SHOT";
14 |
15 | export type MediaStatus =
16 | | "FINISHED"
17 | | "RELEASING"
18 | | "NOT_YET_RELEASED"
19 | | "CANCELLED"
20 | | "HIATUS";
21 |
22 | export type MediaSeason = "WINTER" | "SPRING" | "SUMMER" | "FALL";
23 |
24 | export interface MediaTitle {
25 | romaji?: string | null;
26 | english?: string | null;
27 | native?: string | null;
28 | userPreferred?: string | null;
29 | }
30 |
31 | export interface MediaCoverImage {
32 | extraLarge?: string | null;
33 | large?: string | null;
34 | medium?: string | null;
35 | color?: string | null;
36 | }
37 |
38 | export interface MediaWithoutList {
39 | id: number;
40 | // id_mal?: number | null;
41 | // type?: MediaType | null;
42 | format?: MediaFormat | null;
43 | status?: MediaStatus | null;
44 | season?: MediaSeason | null;
45 | seasonYear?: number | null;
46 | episodes?: number | null;
47 | duration?: number | null;
48 | coverImage?: MediaCoverImage | null;
49 | // bannerImage?: string | null;
50 | // synonyms?: string[] | null;
51 | // isLocked?: boolean | null;
52 | isAdult?: boolean | null;
53 | title?: MediaTitle | null;
54 | // startDate?: FuzzyDate | null;
55 | // endDate?: FuzzyDate | null;
56 | // nextAiringEpisode?: AiringSchedule | null;
57 | }
58 |
59 | export interface Media extends MediaWithoutList {
60 | media_list_entry?: { id: number };
61 | }
62 |
--------------------------------------------------------------------------------
/src/web/routes/ws/history.py:
--------------------------------------------------------------------------------
1 | """WebSocket endpoint for real-time timeline updates."""
2 |
3 | import asyncio
4 |
5 | from fastapi.routing import APIRouter
6 | from fastapi.websockets import WebSocket, WebSocketDisconnect
7 |
8 | from src.web.services.history_service import get_history_service
9 |
10 | __all__ = ["router"]
11 |
12 | router = APIRouter()
13 |
14 |
15 | @router.websocket("/{profile}")
16 | async def history_websocket(websocket: WebSocket, profile: str) -> None:
17 | """Stream live history updates to client.
18 |
19 | Polls for changes every 5 seconds and pushes updates when items change.
20 | """
21 | await websocket.accept()
22 |
23 | last_item_ids: set[int] = set()
24 |
25 | try:
26 | while True:
27 | page_data = await get_history_service().get_page(
28 | profile=profile, page=1, per_page=25, outcome=None
29 | )
30 |
31 | # Check if items have changed
32 | current_ids = {item.id for item in page_data.items}
33 |
34 | if current_ids != last_item_ids:
35 | last_item_ids = current_ids
36 |
37 | await websocket.send_json(
38 | {
39 | "items": [
40 | item.model_dump(mode="json") for item in page_data.items
41 | ],
42 | "stats": page_data.stats,
43 | "profile": profile,
44 | "total": page_data.total,
45 | }
46 | )
47 |
48 | await asyncio.sleep(5)
49 |
50 | except WebSocketDisconnect:
51 | pass
52 | except Exception:
53 | await websocket.close()
54 |
--------------------------------------------------------------------------------
/src/utils/requests.py:
--------------------------------------------------------------------------------
1 | """Selective SSL Verification for Requests Module."""
2 |
3 | import warnings
4 | from urllib.parse import urlparse
5 |
6 | import requests
7 | from urllib3.exceptions import InsecureRequestWarning
8 |
9 | from src import log
10 |
11 | __all__ = ["SelectiveVerifySession"]
12 |
13 |
14 | class SelectiveVerifySession(requests.Session):
15 | """Session that selectively disables SSL verification for whitelisted domains."""
16 |
17 | def __init__(self, whitelist=None) -> None:
18 | """Initialize the session with a whitelist of domains."""
19 | super().__init__()
20 | self.whitelist = set(whitelist or [])
21 | if self.whitelist:
22 | log.debug(
23 | "SSL verify disabled for domains: "
24 | + ", ".join([f"$$'{d}'$$" for d in sorted(self.whitelist)])
25 | )
26 |
27 | def request(self, method, url, *_, **kwargs):
28 | """Override the request method to selectively disable SSL verification."""
29 | domain = urlparse(url).hostname
30 | # Disable SSL verification for whitelisted domains
31 | if domain in self.whitelist:
32 | kwargs["verify"] = False
33 | # Suppress SSL warnings
34 | with warnings.catch_warnings():
35 | warnings.simplefilter("ignore", InsecureRequestWarning)
36 | try:
37 | return super().request(method, url, **kwargs)
38 | except Exception as e:
39 | log.error(
40 | (f"Error during request to $$'{domain}'$$: {e}"), exc_info=True
41 | )
42 | raise
43 | return super().request(method, url, *_, **kwargs)
44 |
--------------------------------------------------------------------------------
/alembic/versions/2025-09-19-00-29_8387b3fd8a6a.py:
--------------------------------------------------------------------------------
1 | """Added provenance tracking table
2 |
3 | Revision ID: 8387b3fd8a6a
4 | Revises: 213f9be1534f
5 | Create Date: 2025-09-19 00:29:08.882127
6 |
7 | """
8 | from typing import Sequence, Union
9 |
10 | from alembic import op
11 | import sqlalchemy as sa
12 |
13 |
14 | # revision identifiers, used by Alembic.
15 | revision: str = '8387b3fd8a6a'
16 | down_revision: Union[str, None] = '213f9be1534f'
17 | branch_labels: Union[str, Sequence[str], None] = None
18 | depends_on: Union[str, Sequence[str], None] = None
19 |
20 |
21 | def upgrade() -> None:
22 | # ### commands auto generated by Alembic - please adjust! ###
23 | op.create_table('animap_provenance',
24 | sa.Column('anilist_id', sa.Integer(), nullable=False),
25 | sa.Column('n', sa.Integer(), nullable=False),
26 | sa.Column('source', sa.String(), nullable=False),
27 | sa.ForeignKeyConstraint(['anilist_id'], ['animap.anilist_id'], ondelete='CASCADE'),
28 | sa.PrimaryKeyConstraint('anilist_id', 'n')
29 | )
30 | with op.batch_alter_table('animap_provenance', schema=None) as batch_op:
31 | batch_op.create_index(batch_op.f('ix_animap_provenance_anilist_id'), ['anilist_id'], unique=False)
32 |
33 | # ### end Alembic commands ###
34 |
35 | op.execute("DELETE FROM animap")
36 | op.execute("DELETE FROM house_keeping WHERE key = 'animap_mappings_hash'")
37 |
38 |
39 | def downgrade() -> None:
40 | # ### commands auto generated by Alembic - please adjust! ###
41 | with op.batch_alter_table('animap_provenance', schema=None) as batch_op:
42 | batch_op.drop_index(batch_op.f('ix_animap_provenance_anilist_id'))
43 |
44 | op.drop_table('animap_provenance')
45 | # ### end Alembic commands ###
46 |
--------------------------------------------------------------------------------
/frontend/src/lib/utils/human.ts:
--------------------------------------------------------------------------------
1 | const DURATION_UNITS = [
2 | { name: "year", value: 31536000 }, // 365 * 24 * 60 * 60
3 | { name: "month", value: 2592000 }, // 30 * 24 * 60 * 60
4 | { name: "week", value: 604800 }, // 7 * 24 * 60 * 60
5 | { name: "day", value: 86400 }, // 24 * 60 * 60
6 | { name: "hour", value: 3600 }, // 60 * 60
7 | { name: "minute", value: 60 },
8 | { name: "second", value: 1 },
9 | ];
10 |
11 | const SIZE_UNITS = [
12 | { name: "B", value: 1 },
13 | { name: "KB", value: 1024 },
14 | { name: "MB", value: 1048576 }, // 1024 * 1024
15 | { name: "GB", value: 1073741824 }, // 1024 * 1024 * 1024
16 | { name: "TB", value: 1099511627776 }, // 1024 * 1024 * 1024 * 1024
17 | ];
18 |
19 | export const humanDuration = (seconds: number): string => {
20 | const abs = Math.abs(Math.floor(seconds));
21 | if (abs === 0) return "0 seconds";
22 | const unit = DURATION_UNITS.find((u) => abs >= u.value)!;
23 | const count = Math.floor(abs / unit.value);
24 | return `${count} ${unit.name}${count === 1 ? "" : "s"}`;
25 | };
26 |
27 | export const humanSize = (size: number): string => {
28 | const abs = Math.abs(size);
29 | if (abs === 0) return "0 B";
30 | const unit = SIZE_UNITS.find((u) => abs < u.value * 1024)!;
31 | const count = size / unit.value;
32 | return `${count.toFixed(2)} ${unit.name}`;
33 | };
34 |
35 | export const humanTimestamp = (timestamp: string | number | Date): string => {
36 | const date = new Date(timestamp);
37 | return date.toLocaleString(undefined, {
38 | year: "numeric",
39 | month: "short",
40 | day: "2-digit",
41 | hour: "2-digit",
42 | minute: "2-digit",
43 | second: "2-digit",
44 | });
45 | };
46 |
--------------------------------------------------------------------------------
/docs/compose.yaml:
--------------------------------------------------------------------------------
1 | services:
2 | plexanibridge:
3 | image: ghcr.io/eliasbenb/plexanibridge:v1 # :vX, :vX.Y, :vX.Y.Z, :main, :develop, :experimental, :latest
4 | environment:
5 | PUID: 1000
6 | PGID: 1000
7 | UMASK: 022
8 | TZ: "Etc/UTC"
9 | PAB_ANILIST_TOKEN: ...
10 | PAB_PLEX_TOKEN: ...
11 | PAB_PLEX_USER: ...
12 | PAB_PLEX_URL: ...
13 | # PAB_PLEX_SECTIONS: '[]'
14 | # PAB_PLEX_GENRES: '[]'
15 | # PAB_PLEX_METADATA_SOURCE: "local"
16 | # PAB_SYNC_INTERVAL: 86400
17 | # PAB_SYNC_MODES: '["periodic", "poll", "webhook"]'
18 | # PAB_FULL_SCAN: false
19 | # PAB_DESTRUCTIVE_SYNC: false
20 | # PAB_EXCLUDED_SYNC_FIELDS: '["notes", "score"]'
21 | # PAB_DRY_RUN: false
22 | # PAB_BACKUP_RETENTION_DAYS: 30
23 | # PAB_BATCH_REQUESTS: false
24 | # PAB_SEARCH_FALLBACK_THRESHOLD=-1
25 | # PAB_PROFILES__example__$FIELD=$VALUE
26 | # PAB_DATA_PATH: "/config"
27 | # PAB_LOG_LEVEL: "INFO"
28 | # PAB_MAPPINGS_URL: "https://raw.githubusercontent.com/eliasbenb/PlexAniBridge-Mappings/v2/mappings.json"
29 | # PAB_WEB_ENABLED: true
30 | # PAB_WEB_HOST: "0.0.0.0"
31 | # PAB_WEB_PORT: 4848
32 | # PAB_WEB_BASIC_AUTH_USERNAME: null
33 | # PAB_WEB_BASIC_AUTH_PASSWORD: null
34 | # PAB_WEB_BASIC_AUTH_HTPASSWD_PATH: null
35 | # PAB_WEB_BASIC_AUTH_REALM: "PlexAniBridge"
36 | volumes:
37 | - /path/to/plexanibridge/data:/config
38 | ports:
39 | - 4848:4848
40 | restart: unless-stopped
41 |
--------------------------------------------------------------------------------
/docs/web/mappings.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Mappings
3 | icon: material/table-large
4 | ---
5 |
6 | 
7 |
8 | The mappings page lets you browse and manage the AniList ↔ Plex mapping database. You can search existing records with a powerful query language and manage mapping overrides.
9 |
10 | ## Searching & Filtering
11 |
12 | - The search bar uses the [Mappings Query Language](../mappings/mappings-querying-language.md) with autocomplete hints as you type.
13 | - Some useful queries to try: `"One Piece"`, `anilist:12345`, `tvdb:67890`, `has:tvdb_mappings`.
14 | - Press `Enter` or click the search icon to run a query.
15 | - Toggle `Custom Only` to limit results to entries sourced from your [custom mappings](../mappings/custom-mappings.md) (`mappings.custom.(json|yaml|yml)`)
16 |
17 | ## Table Controls
18 |
19 | - Column visibility can be set by clicking the eye icon popover; toggles persist in local storage.
20 | - Drag column separators to resize widths as needed; column widths persist in local storage.
21 | - Use `Show All`, `Hide All`, or `(reset)` inside the eye icon popover to restore the default column layout.
22 | - Pagination controls (page navigation, per page size) sit at the foot of the table.
23 |
24 | ## Editing & Creating Overrides
25 |
26 | - `New Override` (plus icon in toolbar) opens the editor with a blank template; `Edit` in a row pre-fills fields from the current mapping.
27 | - The override editor has two tabs: Form and Raw JSON; Form mode exposes all supported identifiers with per-field modes (`omit`, `null`, `value`) and helper controls for TMDB/TVDB season mappings.
28 | - Switch to `Raw JSON` for a schema-backed Monaco editor that validates against the override schema. Changes sync back to the form when you return.
29 |
--------------------------------------------------------------------------------
/frontend/src/lib/components/json-code-block.svelte:
--------------------------------------------------------------------------------
1 |
33 |
34 |
39 |
40 |
41 | {@html highlightJson(value ?? {})}
42 |
43 |
44 |
--------------------------------------------------------------------------------
/frontend/src/app.css:
--------------------------------------------------------------------------------
1 | @import "tailwindcss";
2 |
3 | @theme {
4 | --color-bg: #05070d;
5 | --color-bg-alt: #0f1522;
6 | --color-surface: #141c2b;
7 | --color-surface-alt: #1d293b;
8 | --color-border: #243347;
9 | --color-accent: #3b82f6;
10 | --color-accent-muted: #1d4ed8;
11 | --color-accent-foreground: #e3f2ff;
12 | --color-danger: #ef4444;
13 | --color-warning: #f59e0b;
14 | --color-success: #10b981;
15 | }
16 |
17 | html {
18 | color-scheme: dark;
19 | }
20 |
21 | body {
22 | background: var(--color-bg);
23 | color: var(--color-accent-foreground);
24 | }
25 |
26 | * {
27 | scrollbar-width: thin;
28 | scrollbar-color: #334155 #020617;
29 | }
30 | *::-webkit-scrollbar {
31 | width: 10px;
32 | height: 10px;
33 | }
34 | *::-webkit-scrollbar-track {
35 | background: #020617;
36 | }
37 | *::-webkit-scrollbar-thumb {
38 | background: #334155;
39 | border-radius: 999px;
40 | }
41 | *::-webkit-scrollbar-thumb:hover {
42 | background: #475569;
43 | }
44 |
45 | .btn-base {
46 | @apply inline-flex items-center gap-2 rounded-md px-3 py-1.5 text-sm font-medium transition-colors;
47 | }
48 | .nav-link {
49 | @apply flex items-center gap-2 rounded-md px-3 py-2 text-sm font-medium whitespace-nowrap text-slate-400 transition-colors hover:bg-slate-700/40 hover:text-white;
50 | }
51 | .nav-link-active {
52 | @apply bg-slate-700/50 text-white;
53 | }
54 | .nav-link[aria-current="page"] {
55 | @apply bg-slate-800/70 text-slate-100 ring-1 ring-slate-700/70;
56 | }
57 | .badge {
58 | @apply inline-flex items-center rounded-full border border-slate-700 bg-slate-800/60 px-2 py-0.5 text-[11px] font-medium tracking-wider text-slate-300 uppercase;
59 | }
60 | .fade-in {
61 | animation: fade-in 0.35s ease-out;
62 | }
63 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.yml:
--------------------------------------------------------------------------------
1 | name: Feature Request
2 | description: "Suggest an idea for PlexAniBridge"
3 | labels: ["enhancement"]
4 | body:
5 | - type: checkboxes
6 | attributes:
7 | label: Is there an existing issue for this?
8 | description: Please search to see if an open or closed issue already exists for the feature you are requesting. If a feature request exists and it is closed as complete it may not yet be in a stable release.
9 | options:
10 | - label: I have searched the existing open and closed issues
11 | required: true
12 | - type: textarea
13 | attributes:
14 | label: Is your feature request related to a problem? Please describe
15 | description: A clear and concise description of what the problem is.
16 | validations:
17 | required: true
18 | - type: textarea
19 | attributes:
20 | label: Describe the solution you'd like
21 | description: A clear and concise description of what you want to happen.
22 | validations:
23 | required: true
24 | - type: textarea
25 | attributes:
26 | label: Describe alternatives you've considered
27 | description: A clear and concise description of any alternative solutions or features you've considered.
28 | validations:
29 | required: true
30 | - type: textarea
31 | attributes:
32 | label: Anything else?
33 | description: |
34 | Links? References? Mockups? Anything that will give us more context about the feature you are encountering!
35 | Tip: You can attach images or log files by clicking this area to highlight it and then dragging files in.
36 | validations:
37 | required: false
38 |
--------------------------------------------------------------------------------
/alembic/env.py:
--------------------------------------------------------------------------------
1 | """Alembic environment script for managing database migrations."""
2 |
3 | import pathlib
4 | import sys
5 | from logging.config import fileConfig
6 |
7 | from sqlalchemy import create_engine
8 |
9 | from alembic import context
10 |
11 | sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent))
12 |
13 | import src.models.db
14 | from src.config.settings import get_config
15 |
16 | config = context.config
17 |
18 | if config.config_file_name is not None:
19 | fileConfig(config.config_file_name)
20 |
21 | target_metadata = src.models.db.Base.metadata
22 |
23 | db_url = f"sqlite:///{get_config().data_path / 'plexanibridge.db'}"
24 | config.set_main_option("sqlalchemy.url", db_url)
25 |
26 |
27 | def run_migrations_offline() -> None:
28 | """Run migrations in 'offline' mode."""
29 | context.configure(
30 | url=db_url,
31 | target_metadata=target_metadata,
32 | literal_binds=True,
33 | compare_type=True,
34 | render_as_batch=True,
35 | dialect_opts={"paramstyle": "named"},
36 | )
37 | with context.begin_transaction():
38 | context.run_migrations()
39 |
40 |
41 | def run_migrations_online() -> None:
42 | """Run migrations in 'online' mode."""
43 | connectable = create_engine(
44 | db_url,
45 | echo=False,
46 | future=True,
47 | )
48 |
49 | with connectable.connect() as connection:
50 | context.configure(
51 | connection=connection,
52 | target_metadata=target_metadata,
53 | compare_type=True,
54 | render_as_batch=True,
55 | )
56 |
57 | with context.begin_transaction():
58 | context.run_migrations()
59 |
60 |
61 | if context.is_offline_mode():
62 | run_migrations_offline()
63 | else:
64 | run_migrations_online()
65 |
--------------------------------------------------------------------------------
/scripts/openapi.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """Export the FastAPI OpenAPI schema to a JSON file."""
4 |
5 | import argparse
6 | import json
7 | import sys
8 | from pathlib import Path
9 |
10 | from scripts import __file__ as scripts_file
11 |
12 | ROOT_DIR = Path(scripts_file).parent.parent.resolve()
13 | DEFAULT_OUTPUT = ROOT_DIR / "docs" / "web" / "openapi.json"
14 |
15 |
16 | def build_openapi_json() -> str:
17 | """Return the OpenAPI specification as a JSON string.
18 |
19 | Returns:
20 | The OpenAPI specification in JSON format.
21 | """
22 | from src.web.app import create_app
23 |
24 | app = create_app()
25 | spec = app.openapi()
26 |
27 | return json.dumps(
28 | spec,
29 | ensure_ascii=True,
30 | sort_keys=True,
31 | indent=4,
32 | )
33 |
34 |
35 | def main(argv: list[str] | None = None) -> int:
36 | """CLI entry point for exporting the OpenAPI schema."""
37 | parser = argparse.ArgumentParser(
38 | description="Export the FastAPI OpenAPI schema to docs/web/openapi.json."
39 | )
40 | parser.add_argument(
41 | "--output",
42 | type=Path,
43 | default=DEFAULT_OUTPUT,
44 | help=f"Path to write the schema (default: {DEFAULT_OUTPUT}).",
45 | )
46 |
47 | args = parser.parse_args(argv)
48 | output_path = args.output.resolve()
49 |
50 | try:
51 | rendered = build_openapi_json()
52 | except Exception as exc:
53 | print(f"Failed to generate OpenAPI schema: {exc}", file=sys.stderr)
54 | return 1
55 |
56 | output_path.parent.mkdir(parents=True, exist_ok=True)
57 | output_path.write_text(rendered + "\n", encoding="utf-8")
58 | print(f"Wrote OpenAPI schema to {output_path}")
59 | return 0
60 |
61 |
62 | if __name__ == "__main__":
63 | sys.exit(main())
64 |
--------------------------------------------------------------------------------
/alembic/versions/2025-10-08-19-00_db24057a61c9.py:
--------------------------------------------------------------------------------
1 | """pin table
2 |
3 | Revision ID: db24057a61c9
4 | Revises: 12add4c4ffa9
5 | Create Date: 2025-10-08 19:00:30.934679
6 |
7 | """
8 | from typing import Sequence, Union
9 |
10 | from alembic import op
11 | import sqlalchemy as sa
12 |
13 |
14 | # revision identifiers, used by Alembic.
15 | revision: str = 'db24057a61c9'
16 | down_revision: Union[str, None] = '12add4c4ffa9'
17 | branch_labels: Union[str, Sequence[str], None] = None
18 | depends_on: Union[str, Sequence[str], None] = None
19 |
20 |
21 | def upgrade() -> None:
22 | # ### commands auto generated by Alembic - please adjust! ###
23 | op.create_table('pin',
24 | sa.Column('id', sa.Integer(), nullable=False),
25 | sa.Column('profile_name', sa.String(), nullable=False),
26 | sa.Column('anilist_id', sa.Integer(), nullable=False),
27 | sa.Column('fields', sa.JSON(), nullable=False),
28 | sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
29 | sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
30 | sa.PrimaryKeyConstraint('id')
31 | )
32 | with op.batch_alter_table('pin', schema=None) as batch_op:
33 | batch_op.create_index(batch_op.f('ix_pin_anilist_id'), ['anilist_id'], unique=False)
34 | batch_op.create_index('ix_pin_profile_anilist', ['profile_name', 'anilist_id'], unique=True)
35 | batch_op.create_index(batch_op.f('ix_pin_profile_name'), ['profile_name'], unique=False)
36 |
37 | # ### end Alembic commands ###
38 |
39 |
40 | def downgrade() -> None:
41 | # ### commands auto generated by Alembic - please adjust! ###
42 | with op.batch_alter_table('pin', schema=None) as batch_op:
43 | batch_op.drop_index(batch_op.f('ix_pin_profile_name'))
44 | batch_op.drop_index('ix_pin_profile_anilist')
45 | batch_op.drop_index(batch_op.f('ix_pin_anilist_id'))
46 |
47 | op.drop_table('pin')
48 | # ### end Alembic commands ###
49 |
--------------------------------------------------------------------------------
/alembic.ini:
--------------------------------------------------------------------------------
1 | # A generic, single database configuration.
2 |
3 | [alembic]
4 | # path to migration scripts
5 | script_location = alembic
6 |
7 | # template used to generate migration files
8 | # file_template = %%(rev)s_%%(slug)s
9 | file_template = %%(year)d-%%(month).2d-%%(day).2d-%%(hour).2d-%%(minute).2d_%%(rev)s
10 |
11 | # timezone to use when rendering the date
12 | # within the migration file as well as the filename.
13 | # string value is passed to dateutil.tz.gettz()
14 | # leave blank for localtime
15 | # timezone =
16 |
17 | # max length of characters to apply to the
18 | # "slug" field
19 | #truncate_slug_length = 40
20 |
21 | # set to 'true' to run the environment during
22 | # the 'revision' command, regardless of autogenerate
23 | # revision_environment = false
24 |
25 | # set to 'true' to allow .pyc and .pyo files without
26 | # a source .py file to be detected as revisions in the
27 | # versions/ directory
28 | # sourceless = false
29 |
30 | # version location specification; this defaults
31 | # to alembic/versions. When using multiple version
32 | # directories, initial revisions must be specified with --version-path
33 | # version_locations = %(here)s/bar %(here)s/bat alembic/versions
34 |
35 | # the output encoding used when revision files
36 | # are written from script.py.mako
37 | # output_encoding = utf-8
38 |
39 | # Logging configuration
40 | [loggers]
41 | keys = root,sqlalchemy,alembic
42 |
43 | [handlers]
44 | keys = console
45 |
46 | [formatters]
47 | keys = generic
48 |
49 | [logger_root]
50 | level = WARNING
51 | handlers = console
52 | qualname =
53 |
54 | [logger_sqlalchemy]
55 | level = WARNING
56 | handlers =
57 | qualname = sqlalchemy.engine
58 |
59 | [logger_alembic]
60 | level = INFO
61 | handlers =
62 | qualname = alembic
63 |
64 | [handler_console]
65 | class = StreamHandler
66 | args = (sys.stderr,)
67 | level = NOTSET
68 | formatter = generic
69 |
70 | [formatter_generic]
71 | format = %(asctime)s - %(name)s - %(levelname)s\t%(message)s
72 | datefmt = %Y-%m-%d %H:%M:%S
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "[css]": {
3 | "editor.defaultFormatter": "esbenp.prettier-vscode"
4 | },
5 | "[html]": {
6 | "editor.defaultFormatter": "esbenp.prettier-vscode"
7 | },
8 | "[javascript]": {
9 | "editor.defaultFormatter": "esbenp.prettier-vscode"
10 | },
11 | "[json]": {
12 | "editor.defaultFormatter": "esbenp.prettier-vscode"
13 | },
14 | "[markdown]": {
15 | "editor.defaultFormatter": "esbenp.prettier-vscode"
16 | },
17 | "[python]": {
18 | "editor.defaultFormatter": "charliermarsh.ruff"
19 | },
20 | "[svelte]": {
21 | "editor.defaultFormatter": "esbenp.prettier-vscode"
22 | },
23 | "[tailwindcss]": {
24 | "editor.defaultFormatter": "esbenp.prettier-vscode"
25 | },
26 | "[typescript]": {
27 | "editor.defaultFormatter": "esbenp.prettier-vscode"
28 | },
29 | "[yaml]": {
30 | "editor.defaultFormatter": "esbenp.prettier-vscode",
31 | "editor.tabSize": 4
32 | },
33 | "files.associations": {
34 | "*.css": "tailwindcss"
35 | },
36 | "editor.detectIndentation": false,
37 | "editor.formatOnSave": true,
38 | "editor.insertSpaces": true,
39 | "editor.tabSize": 4,
40 | "eslint.validate": ["javascript", "typescript", "svelte", "html"],
41 | "eslint.workingDirectories": ["./frontend"],
42 | "prettier.enable": true,
43 | "prettier.prettierPath": "./frontend/node_modules/prettier",
44 | "python.defaultInterpreterPath": "${workspaceFolder}/.venv/bin/python",
45 | "python.envFile": "",
46 | "python.terminal.activateEnvInCurrentTerminal": true,
47 | "python.testing.pytestArgs": ["tests"],
48 | "python.testing.pytestEnabled": true,
49 | "python.testing.unittestEnabled": false,
50 | "ruff.fixAll": true,
51 | "ruff.importStrategy": "fromEnvironment",
52 | "ruff.organizeImports": true,
53 | "yaml.schemas": {
54 | "https://squidfunk.github.io/mkdocs-material/schema.json": "mkdocs.yml"
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/frontend/src/lib/assets/favicon.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/src/web/routes/api/status.py:
--------------------------------------------------------------------------------
1 | """API status endpoints."""
2 |
3 | from fastapi.routing import APIRouter
4 | from pydantic import BaseModel
5 |
6 | from src.web.state import get_app_state
7 |
8 | __all__ = [
9 | "ProfileConfigModel",
10 | "ProfileRuntimeStatusModel",
11 | "ProfileStatusModel",
12 | "router",
13 | ]
14 |
15 |
16 | class ProfileConfigModel(BaseModel):
17 | """Serialized profile configuration exposed to the web UI."""
18 |
19 | plex_user: str | None = None
20 | anilist_user: str | None = None
21 | sync_interval: int | None = None
22 | sync_modes: list[str] = []
23 | full_scan: bool | None = None
24 | destructive_sync: bool | None = None
25 |
26 |
27 | class ProfileRuntimeStatusModel(BaseModel):
28 | """Runtime status of a profile exposed to the web UI."""
29 |
30 | running: bool
31 | last_synced: str | None = None
32 | current_sync: dict | None = None
33 |
34 |
35 | class ProfileStatusModel(BaseModel):
36 | """Combined profile configuration and runtime status exposed to the web UI."""
37 |
38 | config: ProfileConfigModel
39 | status: ProfileRuntimeStatusModel
40 |
41 |
42 | class StatusResponse(BaseModel):
43 | profiles: dict[str, ProfileStatusModel]
44 |
45 |
46 | router = APIRouter()
47 |
48 |
49 | @router.get("", response_model=StatusResponse)
50 | async def status() -> StatusResponse:
51 | """Get the status of the application.
52 |
53 | Returns:
54 | dict[str, Any]: The status of the application.
55 | """
56 | scheduler = get_app_state().scheduler
57 | if not scheduler:
58 | return StatusResponse(profiles={})
59 | raw = await scheduler.get_status()
60 | converted: dict[str, ProfileStatusModel] = {}
61 | for name, data in raw.items():
62 | cfg = data.get("config", {})
63 | st = data.get("status", {})
64 | converted[name] = ProfileStatusModel(
65 | config=ProfileConfigModel(**cfg), status=ProfileRuntimeStatusModel(**st)
66 | )
67 | return StatusResponse(profiles=converted)
68 |
--------------------------------------------------------------------------------
/src/utils/terminal.py:
--------------------------------------------------------------------------------
1 | """Terminal Utilities Module."""
2 |
3 | import locale
4 | import os
5 | import sys
6 | from functools import lru_cache
7 |
8 | import colorama
9 |
10 | __all__ = ["supports_color", "supports_utf8"]
11 |
12 |
13 | @lru_cache(maxsize=1)
14 | def supports_utf8() -> bool:
15 | """Check if the terminal supports UTF-8 encoding.
16 |
17 | Returns:
18 | bool: True if the terminal supports UTF-8 encoding, False otherwise
19 | """
20 | encoding = sys.stdout.encoding or locale.getpreferredencoding(False)
21 | return encoding.lower().startswith("utf")
22 |
23 |
24 | @lru_cache(maxsize=1)
25 | def supports_color() -> bool:
26 | """Check if the terminal supports ANSI color codes.
27 |
28 | Detects if the terminal supports ANSI color codes by checking platform-specific
29 | conditions and environment variables. On Windows, it also checks the Windows
30 | registry for the VirtualTerminalLevel key.
31 |
32 | Returns:
33 | bool: True if the terminal supports color, False otherwise
34 | """
35 |
36 | def vt_codes_enabled_in_windows_registry():
37 | if sys.platform != "win32":
38 | return False
39 |
40 | try:
41 | import winreg
42 | except ImportError:
43 | return False
44 |
45 | try:
46 | reg_key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, "Console")
47 | reg_key_value, _ = winreg.QueryValueEx(reg_key, "VirtualTerminalLevel")
48 | return reg_key_value == 1
49 | except FileNotFoundError:
50 | return False
51 |
52 | is_a_tty = hasattr(sys.stdout, "isatty") and sys.stdout.isatty()
53 |
54 | if not is_a_tty:
55 | return False
56 |
57 | if sys.platform == "win32":
58 | return (
59 | getattr(colorama, "fixed_windows_console", False)
60 | or "ANSICON" in os.environ
61 | or "WT_SESSION" in os.environ # Windows Terminal
62 | or os.environ.get("TERM_PROGRAM") == "vscode"
63 | or vt_codes_enabled_in_windows_registry()
64 | )
65 |
66 | return True
67 |
--------------------------------------------------------------------------------
/frontend/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "frontend",
3 | "private": true,
4 | "version": "0.1.0",
5 | "type": "module",
6 | "scripts": {
7 | "dev": "vite dev --host",
8 | "build": "svelte-kit sync && vite build",
9 | "preview": "vite preview --host",
10 | "prepare": "svelte-kit sync || echo ''",
11 | "clean": "rm -rf build",
12 | "check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
13 | "check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch",
14 | "format": "prettier --write .",
15 | "lint": "prettier --check . && eslint ."
16 | },
17 | "devDependencies": {
18 | "@eslint/compat": "^1.4.1",
19 | "@eslint/js": "^9.39.1",
20 | "@ianvs/prettier-plugin-sort-imports": "^4.7.0",
21 | "@monaco-editor/loader": "^1.6.1",
22 | "@sveltejs/adapter-static": "3.0.10",
23 | "@sveltejs/kit": "2.48.4",
24 | "@sveltejs/vite-plugin-svelte": "6.2.1",
25 | "@tailwindcss/vite": "^4.1.17",
26 | "@types/node": "^24.10.0",
27 | "@vite-pwa/sveltekit": "^1.0.1",
28 | "eslint": "^9.39.1",
29 | "eslint-config-prettier": "^10.1.8",
30 | "eslint-plugin-svelte": "^3.13.0",
31 | "globals": "^16.5.0",
32 | "prettier": "^3.6.2",
33 | "prettier-plugin-svelte": "^3.4.0",
34 | "prettier-plugin-tailwindcss": "^0.7.1",
35 | "svelte": "5.43.2",
36 | "svelte-check": "4.3.3",
37 | "tailwindcss": "^4.1.17",
38 | "typescript": "5.9.3",
39 | "typescript-eslint": "^8.46.4",
40 | "vite": "7.1.12"
41 | },
42 | "dependencies": {
43 | "@lucide/svelte": "^0.552.0",
44 | "bits-ui": "^2.14.3",
45 | "monaco-editor": "^0.54.0",
46 | "tailwind-merge": "^3.4.0"
47 | },
48 | "pnpm": {
49 | "onlyBuiltDependencies": [
50 | "@tailwindcss/oxide",
51 | "esbuild"
52 | ],
53 | "overrides": {
54 | "cookie": "^0.7.2",
55 | "dompurify": "^3.2.7"
56 | }
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/tests/utils/test_cache.py:
--------------------------------------------------------------------------------
1 | """Tests for caching utilities."""
2 |
3 | import aiocache
4 | import pytest
5 |
6 | from src.utils.cache import gattl_cache, generic_hash, glru_cache, gttl_cache
7 |
8 |
9 | def test_generic_hash_order_insensitive_for_dicts():
10 | """Test that generic_hash produces the same hash for dicts in different orders."""
11 | data_one = {"b": [1, 2], "a": {"x": 1}}
12 | data_two = {"a": {"x": 1}, "b": [1, 2]}
13 |
14 | assert generic_hash(data_one) == generic_hash(data_two)
15 |
16 |
17 | def test_generic_hash_handles_cycles():
18 | """Test that generic_hash can handle cyclic data structures."""
19 | cyclic = []
20 | cyclic.append(cyclic)
21 |
22 | result = generic_hash(cyclic)
23 |
24 | assert isinstance(result, int)
25 |
26 |
27 | def test_glru_cache_caches_unhashable_arguments():
28 | """Test that glru_cache caches results for unhashable arguments."""
29 | call_count = 0
30 |
31 | @glru_cache(maxsize=8)
32 | def compute(values):
33 | nonlocal call_count
34 | call_count += 1
35 | return sum(values)
36 |
37 | assert compute([1, 2, 3]) == 6
38 | assert compute([1, 2, 3]) == 6
39 | assert call_count == 1
40 |
41 |
42 | def test_gttl_cache_caches_unhashable_arguments():
43 | """Test that gttl_cache caches results for unhashable arguments."""
44 | call_count = 0
45 |
46 | @gttl_cache(maxsize=8, ttl=60)
47 | def compute(values):
48 | nonlocal call_count
49 | call_count += 1
50 | return sum(values)
51 |
52 | assert compute([4, 5]) == 9
53 | assert compute([4, 5]) == 9
54 | assert call_count == 1
55 |
56 |
57 | @pytest.mark.asyncio
58 | async def test_gattl_cache_caches_async_functions():
59 | """Test that gattl_cache caches async results with unhashable arguments."""
60 | call_count = 0
61 |
62 | @gattl_cache(ttl=60)
63 | async def fetch(value):
64 | nonlocal call_count
65 | call_count += 1
66 | return value
67 |
68 | assert await fetch(["a", "b"]) == ["a", "b"]
69 | assert await fetch(["a", "b"]) == ["a", "b"]
70 | assert call_count == 1
71 |
72 | aiocache.caches._caches.clear()
73 |
--------------------------------------------------------------------------------
/frontend/src/lib/components/toast-host.svelte:
--------------------------------------------------------------------------------
1 |
20 |
21 |
25 | {#each list as t (t.id)}
26 |
29 |
{t.message}
30 |
37 |
44 |
45 | {/each}
46 |
47 |
48 |
58 |
--------------------------------------------------------------------------------
/docs/web/timeline.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Timeline
3 | icon: material/timeline-clock
4 | ---
5 |
6 | 
7 |
8 | The sync timeline gives you a live feed of Plex to AniList activity for a specific profile. It combines current sync progress, websocket updates, and stored history so you can see exactly what changed, when, and why.
9 |
10 | ## Header Controls
11 |
12 | - `Full Sync` runs a complete rescan of the selected profile.
13 | - `Poll Sync` asks the backend to poll Plex for new or recently changed items without resetting library state.
14 | - When a sync is running the header shows stage, library section, processed counts, and a progress meter.
15 |
16 | ## Outcome Filters & Stats
17 |
18 | - The filter cards summarise counts for `Synced`, `Failed`, `Not Found`, `Deleted`, and `Undone` outcomes.
19 | - Click a card to filter by that outcome; click the `Clear` badge to remove the filter.
20 | - Counts update as history entries stream in, so you can watch failures or retries in real time.
21 |
22 | ## History Feed
23 |
24 | - New entries arrive via the `/ws/history/{profile}` websocket and appear at the top of the feed.
25 | - Infinite scrolling loads older history as you reach the sentinel at the bottom.
26 | - Each card displays AniList and Plex links (when available), library metadata, timestamp, and any backend error message.
27 |
28 | ## Entry Actions
29 |
30 | - `Retry` re-queues a failed or not found item.
31 | - `Undo` reverses successful syncs by returning the AniList entry to its prior state.
32 | - `Delete` removes the history entry from the database (purely cosmetic; does not affect AniList data).
33 |
34 | ## Diff Viewer
35 |
36 | - The diff viewer highlights changes between the `before` and `after` AniList states.
37 | - The default `Changes` tab shows only modified fields with color-coded additions and deletions.
38 | - Use the search box to filter by JSON path, show unchanged values, or switch to the side-by-side `Compare` tab for a holistic view.
39 |
40 | ## Pinning Controls
41 |
42 | - Use `Show pins` on an entry to pick AniList fields that must stay untouched for that title.
43 | - The global `Pins` button opens the pins manager, letting you search AniList titles, review every pinned record, and edit multiple entries without leaving the timeline.
44 |
--------------------------------------------------------------------------------
/src/models/db/base.py:
--------------------------------------------------------------------------------
1 | """Base Model Module."""
2 |
3 | from __future__ import annotations
4 |
5 | import json
6 | from datetime import datetime
7 | from typing import TYPE_CHECKING, Any, Literal
8 |
9 | from sqlalchemy.orm import DeclarativeBase
10 |
11 | from src.exceptions import UnsupportedModeError
12 |
13 | __all__ = ["Base"]
14 |
15 | if TYPE_CHECKING:
16 | from pydantic.main import IncEx
17 |
18 |
19 | def _generic_serialize(obj: Any) -> Any:
20 | """Recursively convert an object to a JSON-serializable format.
21 |
22 | Args:
23 | obj: The object to convert.
24 |
25 | Returns:
26 | A JSON-serializable representation of the object.
27 | """
28 | if obj is None:
29 | return None
30 | if isinstance(obj, datetime):
31 | return obj.isoformat()
32 | return str(obj)
33 |
34 |
35 | class Base(DeclarativeBase):
36 | """Base class for all database models."""
37 |
38 | def model_dump(
39 | self,
40 | *,
41 | mode: Literal["json", "python"] | str = "python",
42 | include: IncEx | None = None,
43 | exclude: IncEx | None = None,
44 | exclude_none: bool = False,
45 | ) -> dict[str, Any]:
46 | """Dump the model fields to a dictionary.
47 |
48 | Imitates the behavior of Pydantic's model_dump method.
49 | """
50 | if not exclude_none and not include and not exclude:
51 | result = {k: v for k, v in self.__dict__.items() if not k.startswith("_")}
52 | else:
53 | inc = set(include) if include and not isinstance(include, dict) else include
54 | exc = set(exclude) if exclude and not isinstance(exclude, dict) else exclude
55 |
56 | result = {}
57 | for k, v in self.__dict__.items():
58 | if k.startswith("_"):
59 | continue
60 | if exclude_none and v is None:
61 | continue
62 | if inc and k not in inc:
63 | continue
64 | if exc and k in exc:
65 | continue
66 | result[k] = v
67 |
68 | if mode == "python":
69 | return result
70 | if mode == "json":
71 | return json.loads(json.dumps(result, default=_generic_serialize))
72 | raise UnsupportedModeError(f"Unsupported mode: {mode}")
73 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.13-alpine AS python-builder
2 |
3 | COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/
4 |
5 | ENV UV_LINK_MODE=copy \
6 | UV_PROJECT_ENVIRONMENT=/opt/venv \
7 | UV_PYTHON_DOWNLOADS=never
8 |
9 | RUN apk add --no-cache git
10 |
11 | WORKDIR /tmp
12 |
13 | RUN --mount=type=bind,source=uv.lock,target=/tmp/uv.lock,ro \
14 | --mount=type=bind,source=pyproject.toml,target=/tmp/pyproject.toml,ro \
15 | --mount=type=cache,target=/root/.cache/uv \
16 | uv sync --frozen --no-dev --no-install-project
17 |
18 | FROM node:25-alpine AS node-builder
19 |
20 | WORKDIR /app
21 |
22 | ENV CI=1 \
23 | PNPM_HOME=/pnpm \
24 | PNPM_STORE_DIR=/pnpm/store
25 | ENV PATH="$PNPM_HOME:$PATH"
26 |
27 | RUN npm install -g pnpm
28 |
29 | RUN --mount=type=bind,source=frontend/pnpm-lock.yaml,target=/app/pnpm-lock.yaml,ro \
30 | --mount=type=bind,source=frontend/package.json,target=/app/package.json,ro \
31 | --mount=type=cache,id=pnpm-store,target=/pnpm/store \
32 | pnpm install --frozen-lockfile
33 |
34 | COPY ./frontend /app
35 |
36 | RUN pnpm build
37 |
38 | FROM python:3.13-alpine
39 |
40 | RUN apk add --no-cache shadow su-exec
41 |
42 | LABEL maintainer="Elias Benbourenane " \
43 | org.opencontainers.image.title="PlexAniBridge" \
44 | org.opencontainers.image.description="The smart way to keep your AniList profile perfectly synchronized with your Plex library." \
45 | org.opencontainers.image.authors="Elias Benbourenane " \
46 | org.opencontainers.image.url="https://plexanibridge.elias.eu.org" \
47 | org.opencontainers.image.documentation="https://plexanibridge.elias.eu.org" \
48 | org.opencontainers.image.source="https://github.com/eliasbenb/PlexAniBridge" \
49 | org.opencontainers.image.licenses="MIT"
50 |
51 | ENV PYTHONPATH=/opt/venv/lib/python3.13/site-packages \
52 | PYTHONUNBUFFERED=1 \
53 | PUID=1000 \
54 | PGID=1000 \
55 | UMASK=022 \
56 | PAB_DATA_PATH=/config
57 |
58 | WORKDIR /app
59 |
60 | COPY . /app
61 | COPY ./scripts/docker_init.sh /init
62 |
63 | RUN rm -rf /app/frontend && \
64 | mkdir -p /config
65 |
66 | COPY --from=python-builder /opt/venv /opt/venv
67 | COPY --from=node-builder /app/build /app/frontend/build
68 |
69 | VOLUME ["/config"]
70 |
71 | EXPOSE 4848
72 |
73 | ENTRYPOINT ["/init"]
74 | CMD ["python", "/app/main.py"]
75 |
--------------------------------------------------------------------------------
/src/web/routes/api/backups.py:
--------------------------------------------------------------------------------
1 | """Backup API endpoints."""
2 |
3 | from typing import Any
4 |
5 | from fastapi.routing import APIRouter
6 | from pydantic import BaseModel
7 |
8 | from src.web.services.backup_service import (
9 | BackupMeta,
10 | RestoreSummary,
11 | get_backup_service,
12 | )
13 |
14 | router = APIRouter()
15 |
16 |
17 | class ListBackupsResponse(BaseModel):
18 | """Response model for listing backups."""
19 |
20 | backups: list[BackupMeta]
21 |
22 |
23 | class RestoreRequest(BaseModel):
24 | """Request body for triggering a restore."""
25 |
26 | filename: str
27 |
28 |
29 | @router.get("/{profile}", response_model=ListBackupsResponse)
30 | def list_backups(profile: str) -> ListBackupsResponse:
31 | """List backups for a profile.
32 |
33 | Args:
34 | profile (str): Profile name
35 |
36 | Returns:
37 | ListBackupsResponse: List of available backups
38 |
39 | Raises:
40 | SchedulerNotInitializedError: If the scheduler is not running.
41 | ProfileNotFoundError: If the profile is unknown.
42 | """
43 | backups = get_backup_service().list_backups(profile)
44 | return ListBackupsResponse(backups=backups)
45 |
46 |
47 | @router.post("/{profile}/restore", response_model=RestoreSummary)
48 | async def restore_backup(profile: str, req: RestoreRequest) -> RestoreSummary:
49 | """Restore a backup file (no dry-run mode).
50 |
51 | Raises:
52 | SchedulerNotInitializedError: If the scheduler is not running.
53 | ProfileNotFoundError: If the profile is unknown.
54 | InvalidBackupFilenameError: If the filename is invalid (e.g., path traversal).
55 | BackupFileNotFoundError: If the backup file does not exist.
56 | """
57 | return await get_backup_service().restore_backup(
58 | profile=profile, filename=req.filename
59 | )
60 |
61 |
62 | @router.get("/{profile}/raw/{filename}")
63 | def get_backup_raw(profile: str, filename: str) -> dict[str, Any]:
64 | """Return raw JSON content of a backup.
65 |
66 | The response is unvalidated JSON so the UI can present a preview.
67 |
68 | Raises:
69 | SchedulerNotInitializedError: If the scheduler is not running.
70 | ProfileNotFoundError: If the profile is unknown.
71 | InvalidBackupFilenameError: If the filename is invalid.
72 | BackupFileNotFoundError: If the backup file was not found.
73 | """
74 | return get_backup_service().read_backup_raw(profile, filename)
75 |
--------------------------------------------------------------------------------
/frontend/vite.config.ts:
--------------------------------------------------------------------------------
1 | import { sveltekit } from "@sveltejs/kit/vite";
2 | import tailwindcss from "@tailwindcss/vite";
3 | import { SvelteKitPWA } from "@vite-pwa/sveltekit";
4 | import { defineConfig } from "vite";
5 |
6 | export default defineConfig({
7 | plugins: [
8 | tailwindcss(),
9 | sveltekit(),
10 | SvelteKitPWA({
11 | registerType: "autoUpdate",
12 | includeAssets: [
13 | "favicon.ico",
14 | "apple-touch-icon.png",
15 | "pwa-192x192.png",
16 | "pwa-512x512.png",
17 | "pwa-maskable-192x192.png",
18 | "pwa-maskable-512x512.png",
19 | ],
20 | devOptions: { enabled: false },
21 | workbox: { maximumFileSizeToCacheInBytes: 8000000 },
22 | manifest: {
23 | name: "PlexAniBridge",
24 | short_name: "PAB",
25 | icons: [
26 | {
27 | src: "/pwa-192x192.png",
28 | sizes: "192x192",
29 | type: "image/png",
30 | purpose: "any",
31 | },
32 | {
33 | src: "/pwa-512x512.png",
34 | sizes: "512x512",
35 | type: "image/png",
36 | purpose: "any",
37 | },
38 | {
39 | src: "/pwa-maskable-192x192.png",
40 | sizes: "192x192",
41 | type: "image/png",
42 | purpose: "maskable",
43 | },
44 | {
45 | src: "/pwa-maskable-512x512.png",
46 | sizes: "512x512",
47 | type: "image/png",
48 | purpose: "maskable",
49 | },
50 | ],
51 | start_url: "/",
52 | display: "standalone",
53 | background_color: "#05070d",
54 | theme_color: "#020618",
55 | description:
56 | "The smart way to keep your AniList profile perfectly synchronized with your Plex library.",
57 | },
58 | }),
59 | ],
60 | server: {
61 | proxy: {
62 | "/api": { target: "http://localhost:4848", changeOrigin: true },
63 | "/ws": { target: "http://localhost:4848", changeOrigin: true, ws: true },
64 | },
65 | },
66 | });
67 |
--------------------------------------------------------------------------------
/frontend/src/lib/components/timeline/timeline-outcome-filters.svelte:
--------------------------------------------------------------------------------
1 |
26 |
27 |
28 | {#each Object.entries(meta) as [k, value] (k)}
29 |
50 | {/each}
51 |
52 | {#if activeMeta()}
53 |
54 |
55 | Filtering by
56 | {activeMeta()?.label}
57 |
58 |
62 |
63 | {/if}
64 |
--------------------------------------------------------------------------------
/alembic/versions/2024-12-21-11-56_6e710e6677c0.py:
--------------------------------------------------------------------------------
1 | """Init
2 |
3 | Revision ID: 6e710e6677c0
4 | Revises:
5 | Create Date: 2024-12-21 11:56:14.628540
6 |
7 | """
8 | from typing import Sequence, Union
9 |
10 | from alembic import op
11 | import sqlalchemy as sa
12 |
13 |
14 | # revision identifiers, used by Alembic.
15 | revision: str = '6e710e6677c0'
16 | down_revision: Union[str, None] = None
17 | branch_labels: Union[str, Sequence[str], None] = None
18 | depends_on: Union[str, Sequence[str], None] = None
19 |
20 |
21 | def upgrade() -> None:
22 | # ### commands auto generated by Alembic - please adjust! ###
23 | op.create_table('animap',
24 | sa.Column('anidb_id', sa.Integer(), nullable=False),
25 | sa.Column('anilist_id', sa.Integer(), nullable=True),
26 | sa.Column('imdb_id', sa.JSON(none_as_null=True), nullable=True),
27 | sa.Column('mal_id', sa.JSON(none_as_null=True), nullable=True),
28 | sa.Column('tmdb_movie_id', sa.Integer(), nullable=True),
29 | sa.Column('tmdb_show_id', sa.Integer(), nullable=True),
30 | sa.Column('tvdb_id', sa.Integer(), nullable=True),
31 | sa.Column('tvdb_epoffset', sa.Integer(), nullable=True),
32 | sa.Column('tvdb_season', sa.Integer(), nullable=True),
33 | sa.PrimaryKeyConstraint('anidb_id')
34 | )
35 | op.create_index(op.f('ix_animap_anilist_id'), 'animap', ['anilist_id'], unique=False)
36 | op.create_index(op.f('ix_animap_imdb_id'), 'animap', ['imdb_id'], unique=False)
37 | op.create_index(op.f('ix_animap_mal_id'), 'animap', ['mal_id'], unique=False)
38 | op.create_index(op.f('ix_animap_tmdb_movie_id'), 'animap', ['tmdb_movie_id'], unique=False)
39 | op.create_index(op.f('ix_animap_tmdb_show_id'), 'animap', ['tmdb_show_id'], unique=False)
40 | op.create_index(op.f('ix_animap_tvdb_id'), 'animap', ['tvdb_id'], unique=False)
41 | op.create_table('house_keeping',
42 | sa.Column('key', sa.String(), nullable=False),
43 | sa.Column('value', sa.String(), nullable=True),
44 | sa.PrimaryKeyConstraint('key')
45 | )
46 | # ### end Alembic commands ###
47 |
48 |
49 | def downgrade() -> None:
50 | # ### commands auto generated by Alembic - please adjust! ###
51 | op.drop_table('house_keeping')
52 | op.drop_index(op.f('ix_animap_tvdb_id'), table_name='animap')
53 | op.drop_index(op.f('ix_animap_tmdb_show_id'), table_name='animap')
54 | op.drop_index(op.f('ix_animap_tmdb_movie_id'), table_name='animap')
55 | op.drop_index(op.f('ix_animap_mal_id'), table_name='animap')
56 | op.drop_index(op.f('ix_animap_imdb_id'), table_name='animap')
57 | op.drop_index(op.f('ix_animap_anilist_id'), table_name='animap')
58 | op.drop_table('animap')
59 | # ### end Alembic commands ###
60 |
--------------------------------------------------------------------------------
/src/web/routes/api/sync.py:
--------------------------------------------------------------------------------
1 | """API endpoints to trigger sync operations."""
2 |
3 | from fastapi.param_functions import Body, Path, Query
4 | from fastapi.routing import APIRouter
5 | from pydantic import BaseModel
6 |
7 | from src.exceptions import SchedulerNotInitializedError
8 | from src.web.state import get_app_state
9 |
10 | __all__ = ["router"]
11 |
12 |
13 | class OkResponse(BaseModel):
14 | ok: bool = True
15 |
16 |
17 | router = APIRouter()
18 |
19 |
20 | @router.post("", response_model=OkResponse)
21 | async def sync_all(poll: bool = Query(False)) -> OkResponse:
22 | """Trigger a sync for all profiles.
23 |
24 | Args:
25 | poll (bool): Whether to poll for updates.
26 |
27 | Returns:
28 | OkResponse: The response containing the sync status.
29 |
30 | Raises:
31 | SchedulerNotInitializedError: If the scheduler is not running.
32 | """
33 | scheduler = get_app_state().scheduler
34 | if not scheduler:
35 | raise SchedulerNotInitializedError("Scheduler not available")
36 | await scheduler.trigger_sync(poll=poll)
37 | return OkResponse(ok=True)
38 |
39 |
40 | @router.post("/database", response_model=OkResponse)
41 | async def sync_database() -> OkResponse:
42 | """Trigger a sync for the database.
43 |
44 | Returns:
45 | OkResponse: The response containing the sync status.
46 |
47 | Raises:
48 | SchedulerNotInitializedError: If the scheduler is not running.
49 | """
50 | scheduler = get_app_state().scheduler
51 | if not scheduler:
52 | raise SchedulerNotInitializedError("Scheduler not available")
53 | await scheduler.shared_animap_client.sync_db()
54 | return OkResponse(ok=True)
55 |
56 |
57 | @router.post("/profile/{profile}", response_model=OkResponse)
58 | async def sync_profile(
59 | profile: str = Path(...),
60 | poll: bool = Query(False),
61 | rating_keys: list[str] | None = Body(default=None, embed=True),
62 | ) -> OkResponse:
63 | """Trigger a sync for a specific profile.
64 |
65 | Args:
66 | profile (str): The profile to sync.
67 | poll (bool): Whether to poll for updates.
68 | rating_keys (list[str] | None): Specific rating keys to sync (if any).
69 |
70 | Returns:
71 | OkResponse: The response containing the sync status.
72 |
73 | Raises:
74 | SchedulerNotInitializedError: If the scheduler is not running.
75 | ProfileNotFoundError: If the profile does not exist.
76 | """
77 | scheduler = get_app_state().scheduler
78 | if not scheduler:
79 | raise SchedulerNotInitializedError("Scheduler not available")
80 | await scheduler.trigger_sync(profile, poll=poll, rating_keys=rating_keys)
81 | return OkResponse(ok=True)
82 |
--------------------------------------------------------------------------------
/alembic/versions/2025-08-08-05-10_cd371e53adcb.py:
--------------------------------------------------------------------------------
1 | """Sync history table
2 |
3 | Revision ID: cd371e53adcb
4 | Revises: 08f39c25b391
5 | Create Date: 2025-08-08 05:10:15.509159
6 |
7 | """
8 | from typing import Sequence, Union
9 |
10 | from alembic import op
11 | import sqlalchemy as sa
12 |
13 |
14 | # revision identifiers, used by Alembic.
15 | revision: str = 'cd371e53adcb'
16 | down_revision: Union[str, None] = '08f39c25b391'
17 | branch_labels: Union[str, Sequence[str], None] = None
18 | depends_on: Union[str, Sequence[str], None] = None
19 |
20 |
21 | def upgrade() -> None:
22 | # ### commands auto generated by Alembic - please adjust! ###
23 | op.create_table('sync_history',
24 | sa.Column('id', sa.Integer(), nullable=False),
25 | sa.Column('profile_name', sa.String(), nullable=False),
26 | sa.Column('plex_guid', sa.String(), nullable=True),
27 | sa.Column('plex_rating_key', sa.String(), nullable=False),
28 | sa.Column('plex_child_rating_key', sa.String(), nullable=True),
29 | sa.Column('plex_type', sa.Enum('MOVIE', 'SHOW', 'SEASON', 'EPISODE', name='mediatype'), nullable=False),
30 | sa.Column('anilist_id', sa.Integer(), nullable=True),
31 | sa.Column('outcome', sa.Enum('SYNCED', 'SKIPPED', 'FAILED', 'NOT_FOUND', 'DELETED', 'PENDING', name='syncoutcome'), nullable=False),
32 | sa.Column('before_state', sa.JSON(), nullable=True),
33 | sa.Column('after_state', sa.JSON(), nullable=True),
34 | sa.Column('error_message', sa.String(), nullable=True),
35 | sa.Column('timestamp', sa.DateTime(), nullable=False),
36 | sa.PrimaryKeyConstraint('id')
37 | )
38 | with op.batch_alter_table('sync_history', schema=None) as batch_op:
39 | batch_op.create_index(batch_op.f('ix_sync_history_outcome'), ['outcome'], unique=False)
40 | batch_op.create_index(batch_op.f('ix_sync_history_plex_guid'), ['plex_guid'], unique=False)
41 | batch_op.create_index(batch_op.f('ix_sync_history_plex_type'), ['plex_type'], unique=False)
42 | batch_op.create_index(batch_op.f('ix_sync_history_profile_name'), ['profile_name'], unique=False)
43 | batch_op.create_index(batch_op.f('ix_sync_history_timestamp'), ['timestamp'], unique=False)
44 |
45 | # ### end Alembic commands ###
46 |
47 |
48 | def downgrade() -> None:
49 | # ### commands auto generated by Alembic - please adjust! ###
50 | with op.batch_alter_table('sync_history', schema=None) as batch_op:
51 | batch_op.drop_index(batch_op.f('ix_sync_history_timestamp'))
52 | batch_op.drop_index(batch_op.f('ix_sync_history_profile_name'))
53 | batch_op.drop_index(batch_op.f('ix_sync_history_plex_type'))
54 | batch_op.drop_index(batch_op.f('ix_sync_history_plex_guid'))
55 | batch_op.drop_index(batch_op.f('ix_sync_history_outcome'))
56 |
57 | op.drop_table('sync_history')
58 | # ### end Alembic commands ###
59 |
--------------------------------------------------------------------------------
/frontend/src/lib/ui/modal.svelte:
--------------------------------------------------------------------------------
1 |
41 |
42 |
43 |
44 |
45 |
49 | {#if titleChildren}
50 |
64 | {/if}
65 |
66 |
67 | {@render children?.()}
68 |
69 |
70 |
73 |
74 |
75 |
76 |
--------------------------------------------------------------------------------
/docs/mappings/custom-mappings.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Custom Mappings
3 | icon: material/map
4 | ---
5 |
6 | # Custom Mappings
7 |
8 | PlexAniBridge allows you to define custom mappings for Plex content to AniList, supplementing the [default mappings database](https://github.com/eliasbenb/PlexAniBridge-Mappings). This feature is particularly helpful for content that is missing or incorrectly mapped in the default database.
9 |
10 | !!! note
11 |
12 | Custom mappings *merge* with the default mappings, they do not override them. This means that if you add a custom mapping for a series that is already in the default database, only the fields specified in the custom mapping will be updated. The remaining pre-existing fields will remain unchanged.
13 |
14 | Below is an example mappings file. You can use [the JSON schema](https://github.com/eliasbenb/PlexAniBridge-Mappings/blob/HEAD/mappings.schema.json) or the [PlexAniBridge-Mappings database](https://github.com/eliasbenb/PlexAniBridge-Mappings) as reference.
15 |
16 | ```yaml title="mappings.custom.yaml"
17 | --8<-- "data/mappings.example.yaml"
18 | ```
19 |
20 | ??? tip "JSON Format for Mappings"
21 |
22 | The mappings file can also be written in JSON format. Here is the same example in JSON:
23 |
24 | ```json title="mappings.custom.json"
25 | --8<-- "data/mappings.example.json"
26 | ```
27 |
28 | !!! tip "Including External Mappings"
29 |
30 | ```yaml title="mappings.custom.yaml"
31 | $includes:
32 | - "https://example.com/mappings.json"
33 | - "/path/to/mappings.yaml"
34 | - "./relative/path/to/mappings.yml"
35 | ```
36 |
37 | ## Local Custom Mappings
38 |
39 | PlexAniBridge will look for a custom mappings file with the name `mappings.custom.(json|yaml|yml)` in the `PAB_DATA_PATH` directory. The file extension determines the format of the file (YAML or JSON).
40 |
41 | ## Community Custom Mappings
42 |
43 | There are community maintained mappings repositories that you can use to get pre-made mappings for your content. You can include these mappings in your custom mappings file using the `$includes` key as explained above.
44 |
45 | -
46 |
47 | LuceoEtzio/PlexAniBridge-Custom-Mappings
48 |
49 |
50 | ## Default Mappings
51 |
52 | If you want to contribute your custom mappings to the community, you can submit a pull request to the [PlexAniBridge-Mappings](https://github.com/eliasbenb/PlexAniBridge-Mappings) repository. Your pull request should modify the [`mappings.edits.yaml`](https://github.com/eliasbenb/PlexAniBridge-Mappings/blob/HEAD/mappings.edits.yaml) and **not** the [`mappings.json`](https://github.com/eliasbenb/PlexAniBridge-Mappings/blob/HEAD/mappings.json) file.
53 |
--------------------------------------------------------------------------------
/src/utils/version.py:
--------------------------------------------------------------------------------
1 | """Version Utilities Module."""
2 |
3 | import tomllib
4 | from pathlib import Path
5 |
6 | from src import __file__ as src_file
7 |
8 | __all__ = ["get_docker_status", "get_git_hash", "get_pyproject_version"]
9 |
10 |
11 | def get_pyproject_version() -> str:
12 | """Get the PlexAniBridge's version from the pyproject.toml file.
13 |
14 | Returns:
15 | str: PlexAniBridge's version
16 | """
17 | try:
18 | project_root = Path(src_file).resolve().parent.parent
19 | toml_file = project_root / "pyproject.toml"
20 |
21 | if not toml_file.exists() or not toml_file.is_file():
22 | return "unknown"
23 |
24 | with toml_file.open("rb") as f:
25 | toml_data = tomllib.load(f)
26 |
27 | project_data = toml_data.get("project") or {}
28 | if isinstance(project_data, dict) and "version" in project_data:
29 | return project_data["version"]
30 | return "unknown"
31 | except Exception:
32 | return "unknown"
33 |
34 |
35 | def get_git_hash() -> str:
36 | """Get the git commit hash of the PlexAniBridge repository.
37 |
38 | Returns:
39 | str: PlexAniBridge's current commit hash
40 | """
41 | try:
42 | project_root = Path(src_file).resolve().parent.parent
43 | git_dir_path = project_root / ".git"
44 | if not git_dir_path.exists() or not git_dir_path.is_dir():
45 | return "unknown"
46 |
47 | with open(git_dir_path / "HEAD") as f:
48 | head_content = f.read().strip()
49 |
50 | # HEAD is directly pointing to a commit
51 | if not head_content.startswith("ref:"):
52 | return head_content
53 |
54 | ref_path = head_content.split("ref: ")[1]
55 |
56 | # HEAD is pointing to a branch
57 | full_ref_path = git_dir_path / ref_path
58 | if full_ref_path.exists() and full_ref_path.is_file():
59 | with open(full_ref_path) as f:
60 | return f.read().strip()
61 |
62 | # HEAD is pointing to reference in packed-refs
63 | packed_refs_path = git_dir_path / "packed-refs"
64 | if packed_refs_path.exists() and packed_refs_path.is_file():
65 | with open(packed_refs_path) as f:
66 | for line in f:
67 | line = line.strip()
68 | if line and not line.startswith("#") and line.endswith(ref_path):
69 | return line.split()[0]
70 |
71 | return "unknown"
72 | except Exception:
73 | return "unknown"
74 |
75 |
76 | def get_docker_status() -> bool:
77 | """Check if PlexAniBridge is running inside a Docker container.
78 |
79 | Returns:
80 | bool: True if running inside a Docker container, False otherwise
81 | """
82 | dockerenv_path = Path("/.dockerenv")
83 | return dockerenv_path.exists() and dockerenv_path.is_file()
84 |
--------------------------------------------------------------------------------
/frontend/src/lib/utils/notify.ts:
--------------------------------------------------------------------------------
1 | import { writable } from "svelte/store";
2 |
3 | export type ToastType = "info" | "success" | "error" | "warn";
4 |
5 | export interface ToastOptions {
6 | timeout?: number;
7 | id?: string;
8 | }
9 |
10 | export interface Toast {
11 | id: string;
12 | message: string;
13 | type: ToastType;
14 | created: number;
15 | timeout: number;
16 | }
17 |
18 | export interface ToastConfig {
19 | durations: Record;
20 | }
21 |
22 | const DEFAULT_CONFIG: ToastConfig = {
23 | durations: { info: 4000, success: 3000, warn: 6500, error: 9000 },
24 | };
25 |
26 | const CONFIG_KEY = "toast.config";
27 |
28 | function loadConfig(): ToastConfig {
29 | if (typeof localStorage === "undefined") return DEFAULT_CONFIG;
30 | try {
31 | const raw = localStorage.getItem(CONFIG_KEY);
32 | if (!raw) return DEFAULT_CONFIG;
33 | const parsed = JSON.parse(raw);
34 | if (!parsed || typeof parsed !== "object") return DEFAULT_CONFIG;
35 | const d = (parsed as ToastConfig).durations || {};
36 | return { durations: { ...DEFAULT_CONFIG.durations, ...d } };
37 | } catch {
38 | return DEFAULT_CONFIG;
39 | }
40 | }
41 |
42 | export const toastConfig = writable(loadConfig());
43 | let currentConfig = loadConfig();
44 | toastConfig.subscribe((c) => {
45 | currentConfig = c;
46 | try {
47 | if (typeof localStorage !== "undefined")
48 | localStorage.setItem(CONFIG_KEY, JSON.stringify(c));
49 | } catch {}
50 | });
51 |
52 | export const toasts = writable([]);
53 |
54 | const DEDUPE_WINDOW = 2500;
55 |
56 | let lastShown: { message: string; at: number } | null = null;
57 |
58 | export function toast(
59 | message: string,
60 | type: ToastType = "info",
61 | opts: ToastOptions = {},
62 | ) {
63 | if (!message) return;
64 | const now = Date.now();
65 | if (
66 | lastShown &&
67 | lastShown.message === message &&
68 | now - lastShown.at < DEDUPE_WINDOW
69 | ) {
70 | return; // suppress duplicate
71 | }
72 | lastShown = { message, at: now };
73 | const t: Toast = {
74 | id: opts.id || Math.random().toString(36).slice(2, 10),
75 | message,
76 | type,
77 | created: now,
78 | timeout: opts.timeout ?? currentConfig.durations[type],
79 | };
80 | toasts.update((list) => [t, ...list]);
81 | if (t.timeout > 0) {
82 | setTimeout(() => {
83 | toasts.update((list) => list.filter((x) => x.id !== t.id));
84 | }, t.timeout);
85 | }
86 | }
87 |
88 | export function dismiss(id: string) {
89 | toasts.update((list) => list.filter((t) => t.id !== id));
90 | }
91 |
92 | export function updateToastDurations(partial: Partial>) {
93 | toastConfig.update((c) => ({ durations: { ...c.durations, ...partial } }));
94 | }
95 |
--------------------------------------------------------------------------------
/src/utils/htpasswd.py:
--------------------------------------------------------------------------------
1 | """A standalone, minimal htpasswd parser.
2 |
3 | Source: https://github.com/mitmproxy/mitmproxy/pull/7906
4 | """
5 |
6 | from __future__ import annotations
7 |
8 | import base64
9 | import hashlib
10 | from pathlib import Path
11 |
12 | import bcrypt
13 |
14 | __all__ = ["HtpasswdFile"]
15 |
16 |
17 | class HtpasswdFile:
18 | """A minimal htpasswd file parser supporting bcrypt and SHA1."""
19 |
20 | def __init__(self, content: str) -> None:
21 | """Create a HtpasswdFile from a string."""
22 | self.users: dict[str, str] = {}
23 | for line in content.splitlines():
24 | line = line.strip()
25 | if not line or line.startswith("#"):
26 | continue
27 | if ":" not in line:
28 | raise ValueError(f"Malformed htpasswd line: {line!r}")
29 | user, pwhash = line.split(":", 1)
30 | if not user:
31 | raise ValueError(f"Malformed htpasswd line: {line!r}")
32 |
33 | is_sha = pwhash.startswith("{SHA}")
34 | is_bcrypt = pwhash.startswith(("$2y$", "$2b$", "$2a$"))
35 | if not is_sha and not is_bcrypt:
36 | raise ValueError(f"Unsupported htpasswd format for user {user!r}")
37 |
38 | self.users[user] = pwhash
39 |
40 | @classmethod
41 | def from_file(cls, path: Path) -> HtpasswdFile:
42 | """Initializes and loads an htpasswd file.
43 |
44 | Args:
45 | path: The path to the htpasswd file.
46 |
47 | Raises:
48 | OSError: If the file cannot be read.
49 | ValueError: If the file is malformed.
50 | """
51 | try:
52 | content = path.read_text("utf-8")
53 | except FileNotFoundError:
54 | raise OSError(f"Htpasswd file not found: {path}") from None
55 | return cls(content)
56 |
57 | def check_password(self, username: str, password: str) -> bool:
58 | """Checks if a username and password combination is valid.
59 |
60 | Args:
61 | username: The username to check.
62 | password: The password to check.
63 |
64 | Returns:
65 | True if the password is valid, False otherwise.
66 | """
67 | pwhash = self.users.get(username)
68 | if pwhash is None:
69 | return False
70 |
71 | pwhash = pwhash.split(":", 1)[0]
72 |
73 | if pwhash.startswith("{SHA}"):
74 | # Apache's {SHA} is base64-encoded SHA-1.
75 | # https://httpd.apache.org/docs/2.4/misc/password_encryptions.html
76 | digest = hashlib.sha1(password.encode("utf-8")).digest()
77 | expected = base64.b64encode(digest).decode("ascii")
78 | return pwhash[5:] == expected
79 | else: # pwhash.startswith(("$2y$", "$2b$", "$2a$")):
80 | return bcrypt.checkpw(password.encode("utf-8"), pwhash.encode("utf-8"))
81 |
--------------------------------------------------------------------------------
/frontend/src/lib/utils/api.ts:
--------------------------------------------------------------------------------
1 | import { toast, type ToastType } from "$lib/utils/notify";
2 |
3 | export function isAbortError(error: unknown): boolean {
4 | if (!error) return false;
5 | if (error instanceof DOMException && error.name === "AbortError") return true;
6 | return (error as { name?: string }).name === "AbortError";
7 | }
8 |
9 | export interface ApiErrorData {
10 | message?: string;
11 | error?: string;
12 | detail?: string | { message?: string } | unknown;
13 | }
14 |
15 | function extractMessage(data: ApiErrorData, status: number): string {
16 | if (!data) return `Request failed (${status})`;
17 | if (data.message) return data.message;
18 | if (typeof data.detail === "string") return data.detail;
19 | if (data.error) return data.error;
20 | if (data.detail && typeof data.detail === "object" && "message" in data.detail) {
21 | // @ts-expect-error best attempt
22 | return data.detail.message || `Request failed (${status})`;
23 | }
24 | return `Request failed (${status})`;
25 | }
26 |
27 | export interface ApiOptions {
28 | silent?: boolean;
29 | successMessage?: string;
30 | successType?: ToastType;
31 | }
32 |
33 | export async function apiFetch(
34 | input: RequestInfo | URL,
35 | init?: RequestInit,
36 | opts: ApiOptions = {},
37 | ): Promise {
38 | let res: Response;
39 | try {
40 | res = await fetch(input, init);
41 | } catch (e) {
42 | if (isAbortError(e)) throw e;
43 | if (!opts.silent) toast(`Network error: ${(e as Error).message || e}`, "error");
44 | throw e;
45 | }
46 | if (!res.ok) {
47 | let msg = `HTTP ${res.status}`;
48 | const ct = res.headers.get("content-type") || "";
49 | if (ct.includes("application/json")) {
50 | try {
51 | const data = (await res.clone().json()) as ApiErrorData;
52 | msg = extractMessage(data, res.status);
53 | } catch {}
54 | } else {
55 | try {
56 | const text = await res.clone().text();
57 | if (text.trim()) msg = text.slice(0, 300);
58 | } catch {}
59 | }
60 | if (!opts.silent) toast(msg, "error");
61 | } else if (opts.successMessage) {
62 | toast(opts.successMessage, opts.successType || "success");
63 | }
64 | return res;
65 | }
66 |
67 | export async function apiJson(
68 | input: RequestInfo | URL,
69 | init?: RequestInit,
70 | ): Promise {
71 | const r = await apiFetch(input, init);
72 | // If backend returned error body but still non-ok, apiFetch already toasted; allow caller to decide what to do.
73 | const ct = r.headers.get("content-type") || "";
74 | if (!ct.includes("application/json")) {
75 | const text = await r.text();
76 | return JSON.parse(text) as T; // may throw which is fine
77 | }
78 | return (await r.json()) as T;
79 | }
80 |
--------------------------------------------------------------------------------
/mkdocs.yml:
--------------------------------------------------------------------------------
1 | site_name: PlexAniBridge
2 | site_url: https://plexanibridge.elias.eu.org
3 | repo_url: https://github.com/eliasbenb/PlexAniBridge
4 | repo_name: eliasbenb/PlexAniBridge
5 | edit_uri: edit/main/docs/
6 |
7 | extra_javascript:
8 | - https://cdn.jsdelivr.net/npm/bcryptjs@3/umd/index.min.js
9 | - js/htpasswd-generator.js
10 | extra_css:
11 | - css/htpasswd-generator.css
12 |
13 | extra:
14 | generator: false
15 | plugins:
16 | - git-tag
17 | - git-revision-date-localized
18 | - search
19 | - awesome-nav
20 | - redoc-tag
21 | theme:
22 | name: material
23 | logo: img/logo.png
24 | favicon: favicon.ico
25 | icon:
26 | repo: fontawesome/brands/github
27 | features:
28 | - announce.dismiss
29 | - content.action.edit
30 | - content.code.annotate
31 | - content.code.copy
32 | # - content.code.select
33 | # - content.footnote.tooltips
34 | # - content.tabs.link
35 | - content.tooltips
36 | # - header.autohide
37 | # - navigation.expand
38 | - navigation.footer
39 | - navigation.indexes
40 | # - navigation.instant
41 | # - navigation.instant.prefetch
42 | # - navigation.instant.progress
43 | # - navigation.prune
44 | - navigation.sections
45 | # - navigation.tabs
46 | # - navigation.tabs.sticky
47 | - navigation.top
48 | - navigation.tracking
49 | - search.highlight
50 | - search.share
51 | - search.suggest
52 | - toc.follow
53 | # - toc.integrate
54 | font:
55 | text: Roboto
56 | code: Roboto Mono
57 | palette:
58 | - media: "(prefers-color-scheme)"
59 | toggle:
60 | icon: material/brightness-auto
61 | name: Switch to light mode
62 |
63 | - media: "(prefers-color-scheme: light)"
64 | scheme: default
65 | toggle:
66 | icon: material/brightness-7
67 | name: Switch to dark mode
68 |
69 | - media: "(prefers-color-scheme: dark)"
70 | scheme: slate
71 | toggle:
72 | icon: material/brightness-4
73 | name: Switch to system preference
74 | markdown_extensions:
75 | - admonition
76 | - attr_list
77 | - def_list
78 | - pymdownx.betterem
79 | - pymdownx.details
80 | - pymdownx.emoji:
81 | emoji_index: !!python/name:material.extensions.emoji.twemoji
82 | emoji_generator: !!python/name:material.extensions.emoji.to_svg
83 | - pymdownx.highlight:
84 | anchor_linenums: true
85 | - pymdownx.inlinehilite
86 | - pymdownx.smartsymbols
87 | - pymdownx.snippets
88 | - pymdownx.superfences:
89 | custom_fences:
90 | - name: mermaid
91 | class: mermaid
92 | format: !!python/name:pymdownx.superfences.fence_code_format
93 | - pymdownx.tasklist:
94 | custom_checkbox: true
95 |
--------------------------------------------------------------------------------
/tests/utils/test_requests.py:
--------------------------------------------------------------------------------
1 | """Tests for selective request session behavior."""
2 |
3 | import types
4 | from typing import Any
5 |
6 | import pytest
7 | import requests
8 |
9 | from src.utils import requests as requests_module
10 | from src.utils.requests import SelectiveVerifySession
11 |
12 |
13 | @pytest.fixture(autouse=True)
14 | def restore_log(monkeypatch):
15 | """Restore the original requests log after each test."""
16 | dummy_logger = types.SimpleNamespace(
17 | debug=lambda *_, **__: None,
18 | error=lambda *_, **__: None,
19 | )
20 | monkeypatch.setattr(requests_module, "log", dummy_logger)
21 |
22 |
23 | def test_selective_verify_session_sets_verify_false_for_whitelist(monkeypatch):
24 | """Test that SelectiveVerifySession sets verify=False for whitelisted domains."""
25 | captured: dict[str, Any] = {}
26 |
27 | def fake_request(self, method, url, *args, **kwargs):
28 | captured["method"] = method
29 | captured["url"] = url
30 | captured["kwargs"] = kwargs
31 | return "ok"
32 |
33 | monkeypatch.setattr(requests.Session, "request", fake_request, raising=False)
34 |
35 | session = SelectiveVerifySession(whitelist={"example.com"})
36 | result = session.request("GET", "https://example.com/api")
37 |
38 | assert result == "ok"
39 | assert captured["kwargs"].get("verify") is False
40 |
41 |
42 | def test_selective_verify_session_leaves_verify_for_other_domains(monkeypatch):
43 | """Test that SelectiveVerifySession does not modify verify for other domains."""
44 | captured = {}
45 |
46 | def fake_request(self, method, url, *args, **kwargs):
47 | captured["kwargs"] = kwargs
48 | return "resp"
49 |
50 | monkeypatch.setattr(requests.Session, "request", fake_request, raising=False)
51 |
52 | session = SelectiveVerifySession(whitelist={"whitelisted.com"})
53 | result = session.request("GET", "https://other.com/resource")
54 |
55 | assert result == "resp"
56 | assert "verify" not in captured["kwargs"]
57 |
58 |
59 | def test_selective_verify_session_logs_error_on_failure(monkeypatch):
60 | """Test that SelectiveVerifySession logs an error when a request fails."""
61 | errors: list[str] = []
62 |
63 | def error_logger(message, **_):
64 | errors.append(message)
65 |
66 | dummy_logger = types.SimpleNamespace(
67 | debug=lambda *_, **__: None,
68 | error=error_logger,
69 | )
70 | monkeypatch.setattr(requests_module, "log", dummy_logger)
71 |
72 | class BoomError(RuntimeError):
73 | pass
74 |
75 | def fake_request(self, method, url, *args, **kwargs):
76 | raise BoomError("boom")
77 |
78 | monkeypatch.setattr(requests.Session, "request", fake_request, raising=False)
79 |
80 | session = SelectiveVerifySession(whitelist={"fail.com"})
81 |
82 | with pytest.raises(BoomError):
83 | session.request("GET", "https://fail.com/endpoint")
84 |
85 | assert errors and "fail.com" in errors[0]
86 |
--------------------------------------------------------------------------------
/src/web/routes/api/history.py:
--------------------------------------------------------------------------------
1 | """History API endpoints."""
2 |
3 | from fastapi.param_functions import Query
4 | from fastapi.routing import APIRouter
5 | from pydantic import BaseModel
6 |
7 | from src.web.services.history_service import (
8 | HistoryItem,
9 | HistoryPage,
10 | get_history_service,
11 | )
12 |
13 | router = APIRouter()
14 |
15 |
16 | class GetHistoryResponse(BaseModel):
17 | """Paginated history response (flattened)."""
18 |
19 | items: list[HistoryItem]
20 | page: int
21 | per_page: int
22 | total: int
23 | pages: int
24 | stats: dict[str, int] = {}
25 |
26 |
27 | class OkResponse(BaseModel):
28 | """Response model for successful operations."""
29 |
30 | ok: bool = True
31 |
32 |
33 | class UndoResponse(BaseModel):
34 | """Response model for undo operation."""
35 |
36 | item: HistoryItem
37 |
38 |
39 | @router.get("/{profile}", response_model=GetHistoryResponse)
40 | async def get_history(
41 | profile: str,
42 | page: int = 1,
43 | per_page: int = 25,
44 | outcome: str | None = Query(None, description="Filter by outcome"),
45 | ) -> GetHistoryResponse:
46 | """Get paginated timeline for profile.
47 |
48 | Args:
49 | profile (str): The profile name.
50 | page (int): The page number.
51 | per_page (int): The number of items per page.
52 | outcome (str | None): Filter by outcome.
53 |
54 | Returns:
55 | GetHistoryResponse: The paginated history response.
56 |
57 | Raises:
58 | SchedulerNotInitializedError: If the scheduler is not running.
59 | ProfileNotFoundError: If the profile is unknown.
60 | """
61 | hp: HistoryPage = await get_history_service().get_page(
62 | profile=profile, page=page, per_page=per_page, outcome=outcome
63 | )
64 | return GetHistoryResponse(**hp.model_dump())
65 |
66 |
67 | @router.delete("/{profile}/{item_id}", response_model=OkResponse)
68 | async def delete_history(profile: str, item_id: int) -> OkResponse:
69 | """Delete a history item.
70 |
71 | Args:
72 | profile (str): The profile name.
73 | item_id (int): The ID of the history item to delete.
74 |
75 | Returns:
76 | OkResponse: The response indicating success.
77 |
78 | Raises:
79 | HistoryItemNotFoundError: If the specified item does not exist.
80 | """
81 | await get_history_service().delete_item(profile, item_id)
82 | return OkResponse()
83 |
84 |
85 | @router.post("/{profile}/{item_id}/undo", response_model=UndoResponse)
86 | async def undo_history(profile: str, item_id: int) -> UndoResponse:
87 | """Undo a history item if possible.
88 |
89 | Raises:
90 | SchedulerNotInitializedError: If the scheduler is not running.
91 | ProfileNotFoundError: If the profile is unknown.
92 | HistoryItemNotFoundError: If the specified item does not exist.
93 | """
94 | item = await get_history_service().undo_item(profile, item_id)
95 | return UndoResponse(item=item)
96 |
--------------------------------------------------------------------------------
/frontend/src/lib/components/timeline/utils.ts:
--------------------------------------------------------------------------------
1 | import { SvelteSet } from "svelte/reactivity";
2 |
3 | import type { DiffEntry } from "$lib/components/timeline/types";
4 | import type { HistoryItem } from "$lib/types/api";
5 |
6 | export function buildDiff(item: HistoryItem): DiffEntry[] {
7 | const before = item.before_state || {};
8 | const after = item.after_state || {};
9 | const paths = new SvelteSet();
10 | const visit = (obj: unknown, base = "") => {
11 | if (!obj || typeof obj !== "object") return;
12 | for (const k of Object.keys(obj as Record)) {
13 | const val = (obj as Record)[k];
14 | const path = base ? `${base}.${k}` : k;
15 | if (val && typeof val === "object" && !Array.isArray(val)) visit(val, path);
16 | else paths.add(path);
17 | }
18 | };
19 | visit(before);
20 | visit(after);
21 | const diff: DiffEntry[] = [];
22 | for (const p of paths) {
23 | const segs = p.split(".");
24 | const get = (root: unknown) =>
25 | segs.reduce(
26 | (o, k) =>
27 | o && typeof o === "object" && k in (o as Record)
28 | ? (o as Record)[k]
29 | : undefined,
30 | root,
31 | );
32 | const bv = get(before);
33 | const av = get(after);
34 | let status: DiffEntry["status"] = "unchanged";
35 | if (bv === undefined && av !== undefined) status = "added";
36 | else if (bv !== undefined && av === undefined) status = "removed";
37 | else if (JSON.stringify(bv) !== JSON.stringify(av)) status = "changed";
38 | diff.push({ path: p, before: bv, after: av, status });
39 | }
40 | const weight: Record = {
41 | changed: 0,
42 | added: 1,
43 | removed: 2,
44 | unchanged: 3,
45 | };
46 | diff.sort(
47 | (a, b) => weight[a.status] - weight[b.status] || a.path.localeCompare(b.path),
48 | );
49 | return diff;
50 | }
51 |
52 | export function truncateValue(value: unknown, max = 120): string {
53 | if (value === null) return "null";
54 | if (value === undefined) return "undefined";
55 | const text = typeof value === "string" ? value : JSON.stringify(value);
56 | return text.length > max ? `${text.slice(0, max - 1)}…` : text;
57 | }
58 |
59 | export function sizeLabel(obj: unknown): string {
60 | if (!obj) return "0 keys";
61 | let count = 0;
62 | const scan = (input: unknown) => {
63 | if (input && typeof input === "object")
64 | Object.keys(input as Record).forEach((key) => {
65 | count++;
66 | const child = (input as Record)[key];
67 | if (child && typeof child === "object" && !Array.isArray(child))
68 | scan(child);
69 | });
70 | };
71 | scan(obj);
72 | return `${count} keys`;
73 | }
74 |
--------------------------------------------------------------------------------
/tests/utils/test_logging.py:
--------------------------------------------------------------------------------
1 | """Tests for logging utilities."""
2 |
3 | import logging
4 |
5 | import colorama
6 |
7 | from src.utils.logging import CleanFormatter, ColorFormatter, Logger
8 |
9 |
10 | def test_color_formatter_applies_color_codes():
11 | """Test that ColorFormatter applies color codes to marked sections."""
12 | formatter = ColorFormatter("%(levelname)s:%(message)s")
13 | original_message = "$$'value'$$ $${key: value}$$ message"
14 | record = logging.LogRecord(
15 | name="test",
16 | level=logging.INFO,
17 | pathname=__file__,
18 | lineno=10,
19 | msg=original_message,
20 | args=(),
21 | exc_info=None,
22 | )
23 |
24 | formatted = formatter.format(record)
25 |
26 | assert colorama.Fore.GREEN in formatted
27 | assert colorama.Fore.LIGHTBLUE_EX in formatted
28 | assert colorama.Style.DIM in formatted
29 | assert record.msg == original_message
30 |
31 |
32 | def test_clean_formatter_removes_markers():
33 | """Test that CleanFormatter removes special markers from the message."""
34 | formatter = CleanFormatter("%(message)s")
35 | original_message = "wrapped $$'value'$$ and $${key: 1}$$"
36 | record = logging.LogRecord(
37 | name="test",
38 | level=logging.INFO,
39 | pathname=__file__,
40 | lineno=10,
41 | msg=original_message,
42 | args=(),
43 | exc_info=None,
44 | )
45 |
46 | formatted = formatter.format(record)
47 |
48 | assert "$$" not in formatted
49 | assert "'value'" in formatted
50 | assert "{key: 1}" in formatted
51 | assert record.msg == original_message
52 |
53 |
54 | def test_logger_prefixes_class_name():
55 | """Test that Logger prefixes messages with the class name."""
56 | logger = Logger("test")
57 | logger.setLevel(logging.DEBUG)
58 | captured = []
59 |
60 | class ListHandler(logging.Handler):
61 | def emit(self, record):
62 | captured.append(record.getMessage())
63 |
64 | logger.addHandler(ListHandler())
65 |
66 | class Sample:
67 | def __init__(self, bound_logger: Logger):
68 | self.log = bound_logger
69 |
70 | def run(self):
71 | self.log.info("hello")
72 |
73 | Sample(logger).run()
74 |
75 | assert captured and captured[0] == "Sample: hello"
76 |
77 |
78 | def test_logger_success_level_records_message():
79 | """Test that Logger logs messages at SUCCESS level."""
80 | logger = Logger("test")
81 | logger.setLevel(Logger.SUCCESS)
82 | records: list[logging.LogRecord] = []
83 |
84 | class CaptureHandler(logging.Handler):
85 | def emit(self, record):
86 | records.append(record)
87 |
88 | logger.addHandler(CaptureHandler())
89 |
90 | logger.success("operation complete")
91 |
92 | assert records, "Expected at least one log record"
93 | record = records[0]
94 | assert record.levelno == Logger.SUCCESS
95 | assert record.levelname == "SUCCESS"
96 | assert record.getMessage() == "operation complete"
97 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.yml:
--------------------------------------------------------------------------------
1 | name: Bug Report
2 | description: "Make sure to read through all instructions in the template below before submitting an issue."
3 | labels: ["bug"]
4 | body:
5 | - type: checkboxes
6 | attributes:
7 | label: Is there an existing issue for this?
8 | description: Please search to see if an open or closed issue already exists for the bug you encountered. If a bug exists and it is closed as complete it may not yet be in a stable release.
9 | options:
10 | - label: I have searched the existing open and closed issues
11 | required: true
12 | - type: textarea
13 | attributes:
14 | label: Current Behavior
15 | description: A concise description of what you're experiencing.
16 | validations:
17 | required: true
18 | - type: textarea
19 | attributes:
20 | label: Expected Behavior
21 | description: A concise description of what you expected to happen.
22 | validations:
23 | required: true
24 | - type: textarea
25 | attributes:
26 | label: Steps To Reproduce
27 | description: Steps to reproduce the behavior.
28 | placeholder: |
29 | 1. In this environment...
30 | 2. With this config...
31 | 3. Run '...'
32 | 4. See error...
33 | validations:
34 | required: false
35 | - type: textarea
36 | attributes:
37 | label: Environment
38 | description: |
39 | examples:
40 | - **OS**: Ubuntu 22.04
41 | - **PlexAniBridge**: PlexAniBridge 0.2.2
42 | - **Docker Install**: Yes
43 | - **Database**: Sqlite 3.41.1
44 | value: |
45 | - OS:
46 | - PlexAniBridge:
47 | - Docker Install:
48 | - Database:
49 | render: markdown
50 | validations:
51 | required: true
52 | - type: dropdown
53 | attributes:
54 | label: What branch are you running?
55 | options:
56 | - Main
57 | - Develop
58 | - Other (This issue will be closed)
59 | validations:
60 | required: true
61 | - type: textarea
62 | attributes:
63 | label: Debug Logs?
64 | description: |
65 | Debug Logs
66 | ***Generally speaking, all bug reports must have debug logs provided.***
67 | *** Info Logs are not debug logs. If the logs do not say debug and are not from a file like `*.DEBUG.log` they are not debug logs.***
68 | validations:
69 | required: true
70 | - type: textarea
71 | attributes:
72 | label: Anything else?
73 | description: |
74 | Links? Screenshots? References? Anything that will give us more context about the issue you are encountering!
75 | Tip: You can attach images or log files by clicking this area to highlight it and then dragging files in.
76 | validations:
77 | required: false
78 |
--------------------------------------------------------------------------------
/tests/utils/test_terminal.py:
--------------------------------------------------------------------------------
1 | """Tests for terminal capability helpers."""
2 |
3 | import locale
4 | import os
5 | import sys
6 | from types import SimpleNamespace
7 |
8 | import pytest
9 |
10 | from src.utils.terminal import supports_color, supports_utf8
11 |
12 |
13 | @pytest.fixture(autouse=True)
14 | def clear_terminal_caches() -> None:
15 | """Clear the caches for terminal capability functions before each test."""
16 | supports_utf8.cache_clear()
17 | supports_color.cache_clear()
18 |
19 |
20 | def _fake_stdout(*, encoding: str | None, isatty: bool) -> SimpleNamespace:
21 | """Create a fake stdout object with specified encoding and isatty behavior."""
22 | return SimpleNamespace(encoding=encoding, isatty=lambda: isatty)
23 |
24 |
25 | def test_supports_utf8_true_with_stdout_encoding(
26 | monkeypatch: pytest.MonkeyPatch,
27 | ) -> None:
28 | """Test that supports_utf8 returns True when stdout encoding is UTF-8."""
29 | monkeypatch.setattr(sys, "stdout", _fake_stdout(encoding="UTF-8", isatty=True))
30 |
31 | assert supports_utf8()
32 |
33 |
34 | def test_supports_utf8_uses_locale_fallback(monkeypatch: pytest.MonkeyPatch) -> None:
35 | """Test that supports_utf8 falls back to locale encoding when encoding is None."""
36 | monkeypatch.setattr(sys, "stdout", _fake_stdout(encoding=None, isatty=True))
37 | monkeypatch.setattr(locale, "getpreferredencoding", lambda _: "latin-1")
38 |
39 | assert not supports_utf8()
40 |
41 |
42 | def test_supports_color_false_when_not_tty(monkeypatch: pytest.MonkeyPatch) -> None:
43 | """Test that supports_color returns False when stdout is not a TTY."""
44 | monkeypatch.setattr(sys, "stdout", _fake_stdout(encoding="UTF-8", isatty=False))
45 | monkeypatch.setattr(sys, "platform", "linux")
46 |
47 | assert not supports_color()
48 |
49 |
50 | def test_supports_color_true_on_linux_tty(monkeypatch: pytest.MonkeyPatch) -> None:
51 | """Test that supports_color returns True on Linux TTY with UTF-8 encoding."""
52 | monkeypatch.setattr(sys, "stdout", _fake_stdout(encoding="UTF-8", isatty=True))
53 | monkeypatch.setattr(sys, "platform", "linux")
54 |
55 | assert supports_color()
56 |
57 |
58 | def test_supports_color_windows_session(monkeypatch: pytest.MonkeyPatch) -> None:
59 | """Test that supports_color returns True on Windows with WT_SESSION set."""
60 | monkeypatch.setattr(sys, "stdout", _fake_stdout(encoding="UTF-8", isatty=True))
61 | monkeypatch.setattr(sys, "platform", "win32")
62 | monkeypatch.setenv("WT_SESSION", "1")
63 |
64 | assert supports_color()
65 |
66 |
67 | def test_supports_color_windows_without_capabilities(
68 | monkeypatch: pytest.MonkeyPatch,
69 | ) -> None:
70 | """Test that supports_color returns False on Windows without terminal."""
71 | monkeypatch.setattr(sys, "stdout", _fake_stdout(encoding="UTF-8", isatty=True))
72 | monkeypatch.setattr(sys, "platform", "win32")
73 | for key in ["WT_SESSION", "ANSICON", "TERM_PROGRAM"]:
74 | monkeypatch.delenv(key, raising=False)
75 | monkeypatch.setattr(os.environ, "copy", lambda: {})
76 |
77 | assert not supports_color()
78 |
--------------------------------------------------------------------------------
/src/web/middlewares/request_logging.py:
--------------------------------------------------------------------------------
1 | """Middlewares for handling requests and responses."""
2 |
3 | import time
4 | from collections.abc import Callable
5 | from io import BytesIO
6 |
7 | from fastapi.responses import Response
8 | from starlette.middleware.base import BaseHTTPMiddleware
9 | from starlette.requests import Request
10 |
11 | from src import log
12 |
13 | __all__ = ["RequestLoggingMiddleware"]
14 |
15 |
16 | class RequestLoggingMiddleware(BaseHTTPMiddleware):
17 | """Middleware to log all incoming requests and responses."""
18 |
19 | async def dispatch(self, request: Request, call_next: Callable) -> Response:
20 | """Process the incoming request, log details, and capture the response.
21 |
22 | Args:
23 | request (Request): The incoming HTTP request.
24 | call_next (Callable): Function to call the next middleware or endpoint.
25 |
26 | Returns:
27 | Response: The HTTP response generated by the endpoint.
28 | """
29 | start_time = time.time()
30 |
31 | request_info = (
32 | f"{request.method} {request.url.path}"
33 | f"{f'?{request.url.query}' if request.url.query else ''} "
34 | f"from {request.client.host if request.client else 'unknown'}"
35 | )
36 |
37 | # Capture request body without consuming the stream
38 | body_info = ""
39 | if request.method in ("POST", "PUT", "PATCH"):
40 | try:
41 | body = await request.body()
42 |
43 | request._body = body
44 | request.scope["body"] = BytesIO(body)
45 |
46 | if body:
47 | content_type = request.headers.get("content-type", "").lower()
48 | if "application/json" in content_type or "text/" in content_type:
49 | try:
50 | body_str = body.decode("utf-8")
51 | if len(body_str) > 1000:
52 | body_str = body_str[:1000] + "..."
53 | body_info = f" Body: {body_str}"
54 | except UnicodeDecodeError:
55 | body_info = f" Body: "
56 | else:
57 | body_info = (
58 | f" Body: <{content_type or 'unknown'}, {len(body)} bytes>"
59 | )
60 | except Exception as e:
61 | body_info = f" Body: "
62 |
63 | full_request_info = request_info + body_info
64 |
65 | try:
66 | response = await call_next(request)
67 | process_time = time.time() - start_time
68 |
69 | log.debug(
70 | f"Request: {full_request_info} -> "
71 | f"Response: {response.status_code} ({process_time:.3f}s)"
72 | )
73 |
74 | return response
75 | except Exception:
76 | process_time = time.time() - start_time
77 | log.debug(f"Request: {full_request_info} -> Failed ({process_time:.3f}s)")
78 | raise
79 |
--------------------------------------------------------------------------------
/src/__init__.py:
--------------------------------------------------------------------------------
1 | """PlexAniBridge Initialization Module."""
2 |
3 | import os
4 | import uuid
5 |
6 | from src.config.settings import get_config
7 | from src.utils.logging import get_logger
8 | from src.utils.terminal import supports_utf8
9 | from src.utils.version import get_docker_status, get_git_hash, get_pyproject_version
10 |
11 | __author__ = "Elias Benbourenane "
12 | __credits__ = ["eliasbenb"]
13 | __license__ = "MIT"
14 | __maintainer__ = "eliasbenb"
15 | __email__ = "eliasbenbourenane@gmail.com"
16 | __version__ = get_pyproject_version()
17 | __git_hash__ = get_git_hash()
18 |
19 |
20 | config = get_config()
21 | log = get_logger()
22 |
23 | if supports_utf8():
24 | PLEXANIBDRIGE_HEADER = f"""
25 | ╔═══════════════════════════════════════════════════════════════════════════════╗
26 | ║ P L E X A N I B R I D G E ║
27 | ╠═══════════════════════════════════════════════════════════════════════════════╣
28 | ║ ║
29 | ║ Version: {__version__:<68}║
30 | ║ Git Hash: {__git_hash__:<67}║
31 | ║ Docker: {"Yes" if get_docker_status() else "No":<69}║
32 | ║ Author: {f"{__author__} @{__maintainer__}":<69}║
33 | ║ License: {__license__:<68}║
34 | ║ Repository: https://github.com/eliasbenb/PlexAniBridge ║
35 | ║ Documentation: https://plexanibridge.elias.eu.org ║
36 | ║ ║
37 | ╚═══════════════════════════════════════════════════════════════════════════════╝
38 | """.strip()
39 | else:
40 | PLEXANIBDRIGE_HEADER = f"""
41 | +-------------------------------------------------------------------------------+
42 | | P L E X A N I B R I D G E |
43 | +-------------------------------------------------------------------------------+
44 | | |
45 | | Version: {__version__:<68}|
46 | | Git Hash: {__git_hash__:<67}|
47 | | Docker: {"Yes" if get_docker_status() else "No":<69}|
48 | | Author: {f"{__author__} @{__maintainer__}":<69}|
49 | | License: {__license__:<68}|
50 | | Repository: https://github.com/eliasbenb/PlexAniBridge |
51 | | Documentation: https://plexanibridge.elias.eu.org |
52 | | |
53 | +-------------------------------------------------------------------------------+
54 | """.strip()
55 |
56 | # The below environment variables are consumed by the python-plexapi library
57 | # and are used to identify the client making the requests to the Plex server.
58 | # Having a consistent identifier is important so that the server doesn't think
59 | # the client is a new one every time it starts (which causes "New Device"
60 | # notifications)
61 | os.environ["PLEXAPI_HEADER_IDENTIFIER"] = uuid.uuid3(
62 | uuid.NAMESPACE_DNS, "PlexAniBridge"
63 | ).hex
64 | os.environ["PLEXAPI_HEADER_DEVICE_NAME"] = "PlexAniBridge"
65 | os.environ["PLEXAPI_HEADER_VERSION"] = __version__
66 | os.environ["PLEXAPI_HEADER_PROVIDES"] = ""
67 | os.environ["PLEXAPI_PLEXAPI_AUTORELOAD"] = "0"
68 |
--------------------------------------------------------------------------------
/docs/faq.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: FAQ
3 | icon: material/frequently-asked-questions
4 | ---
5 |
6 | ## Why is my repeat count incorrect?
7 |
8 | PlexAniBridge relies on your Plex server to provide accurate `viewCount` attributes when calculating the repeat count. It is a known issue that Plex may not always update this count reliably or in a way users might expect.
9 |
10 | Certain actions can make the `viewCount` behave unexpectedly or become corrupted. Common causes include deleting and re-adding items, syncing play history across multiple devices, and manually marking an item as watched/unwatched.
11 |
12 | If you notice discrepancies in repeat counts, consider querying your Plex server directly to verify the `viewCount` values for specific items. If the counts are incorrect at the source, PlexAniBridge will reflect those inaccuracies. See [#174](https://github.com/eliasbenb/PlexAniBridge/issues/174) for more details.
13 |
14 | _Note: the `viewCount` attribute **is not** equivalent to the number of items under "View Play History" in the Plex UI._
15 |
16 | ## Why are there no mappings for X?
17 |
18 | While PlexAniBridge aims to cover as many titles as possible (and we are proud to say we have one of the most comprehensive mapping databases available), there are still some titles that may not be mapped. If you get a "not found" message for one of your titles, it could be due to several reasons:
19 |
20 | - The title is very new or obscure and has not yet been added to the mapping database.
21 | - The title is uncorrectable due to mismatches across databases (see [PlexAniBridge-Mappings#known-issues](https://github.com/eliasbenb/PlexAniBridge-Mappings#known-issues)).
22 | - We just missed it!
23 |
24 | If you find a title that is not mapped, please consider submitting a pull request to the [PlexAniBridge-Mappings](https://github.com/eliasbenb/PlexAniBridge-Mappings) repository with your corrections or additions.
25 |
26 | ## Why doesn't X sync when it's in the mappings?
27 |
28 | If Plex is not correctly identifying or updating the metadata for an item, it may not sync properly. Ensure that item is correctly matched to the TVDB/TMDB/IMDb ID in your mappings and try refreshing the metadata in Plex.
29 |
30 | ## Is the HAMA agent supported?
31 |
32 | No, PlexAniBridge does not support the HAMA agent. It is recommended to use the default Plex TV and Movie agents for best compatibility (we recommend the "TheTVDB" episode ordering for TV shows).
33 |
34 | Support is not planned for HAMA since it is [slated for deprecation](https://forums.plex.tv/t/important-information-for-users-running-plex-media-server-on-nvidia-shield-devices/883484) in the near future.
35 |
36 | ## 401 Unauthorized when using webhooks with HTTP Basic Authentication
37 |
38 | If you are attempting to use Plex webhooks with PlexAniBridge and have enabled HTTP Basic Authentication for the web UI, you may encounter `401 Unauthorized` errors.
39 |
40 | To resolve this, ensure that you include the credentials in the webhook URL used in Plex. Example webhook URL: `http://username:password@:/webhook/plex`.
41 |
42 | You may need to encode special characters in your username or password using [URL encoding](https://meyerweb.com/eric/tools/dencoder/). For example, if your password is `p@ssw0rd`, you would encode it as `p%40ssw0rd`.
43 |
--------------------------------------------------------------------------------
/src/web/state.py:
--------------------------------------------------------------------------------
1 | """Global web application state utilities.
2 |
3 | Holds references to long-lived singletons (scheduler, log broadcaster, etc.) needed by
4 | route handlers and websocket endpoints.
5 | """
6 |
7 | from __future__ import annotations
8 |
9 | from collections.abc import Callable
10 | from contextlib import suppress
11 | from datetime import UTC, datetime
12 | from functools import lru_cache
13 | from typing import TYPE_CHECKING, Any
14 |
15 | from src.core.anilist import AniListClient
16 |
17 | __all__ = ["AppState", "get_app_state"]
18 |
19 | if TYPE_CHECKING:
20 | from plexapi.server import PlexServer
21 |
22 | from src.core.sched import SchedulerClient
23 |
24 |
25 | class AppState:
26 | """Container for global web application state."""
27 |
28 | def __init__(self) -> None:
29 | """Initialize empty state containers and record process start time."""
30 | self.plex: PlexServer | None = None
31 | self.scheduler: SchedulerClient | None = None
32 | self.public_anilist: AniListClient | None = None
33 | self.on_shutdown_callbacks: list[Callable[[], Any]] = []
34 | self.started_at: datetime = datetime.now(UTC)
35 |
36 | def set_scheduler(self, scheduler: SchedulerClient) -> None:
37 | """Set the scheduler client.
38 |
39 | Args:
40 | scheduler (SchedulerClient): The scheduler client instance to set.
41 | """
42 | self.scheduler = scheduler
43 |
44 | def add_shutdown_callback(self, cb: Callable[[], Any]) -> None:
45 | """Register a shutdown callback executed during app shutdown.
46 |
47 | Args:
48 | cb (Callable[[], Any]): The callback function to register.
49 | """
50 | self.on_shutdown_callbacks.append(cb)
51 |
52 | async def ensure_public_anilist(self) -> AniListClient:
53 | """Get or create the shared public AniList client.
54 |
55 | Returns:
56 | AniListClient: A tokenless AniList client suitable for public queries.
57 | """
58 | if self.public_anilist is None:
59 | self.public_anilist = AniListClient(
60 | anilist_token=None,
61 | backup_dir=None,
62 | dry_run=False,
63 | profile_name="public",
64 | )
65 | await self.public_anilist.initialize()
66 | return self.public_anilist
67 |
68 | async def shutdown(self) -> None:
69 | """Run registered shutdown callbacks (ignore individual errors).
70 |
71 | Args:
72 | self (AppState): The application state instance.
73 | """
74 | for cb in self.on_shutdown_callbacks:
75 | try:
76 | res = cb()
77 | if hasattr(res, "__await__"):
78 | await res
79 | except Exception:
80 | pass
81 |
82 | if self.public_anilist is not None:
83 | with suppress(Exception):
84 | await self.public_anilist.close()
85 | self.public_anilist = None
86 |
87 |
88 | @lru_cache(maxsize=1)
89 | def get_app_state() -> AppState:
90 | """Get the singleton application state instance.
91 |
92 | Returns:
93 | AppState: The application state instance.
94 | """
95 | return AppState()
96 |
--------------------------------------------------------------------------------
/typings/plexapi/utils.pyi:
--------------------------------------------------------------------------------
1 | import io
2 | import logging
3 | from collections.abc import Callable, Iterator
4 | from datetime import datetime
5 | from logging import Logger
6 | from typing import Any
7 | from xml.etree.ElementTree import Element, ElementTree
8 |
9 | from _typeshed import Incomplete
10 |
11 | from plexapi.base import PlexObject
12 | from plexapi.library import LibrarySection
13 | from plexapi.myplex import MyPlexAccount, MyPlexDevice
14 | from plexapi.server import PlexServer
15 |
16 | log: Logger
17 | SEARCHTYPES: dict[str, int]
18 | REVERSESEARCHTYPES: dict[int, str]
19 | TAGTYPES: dict[str, int]
20 | REVERSETAGTYPES: dict[int, str]
21 | PLEXOBJECTS: dict[str, PlexObject]
22 |
23 | class SecretsFilter(logging.Filter):
24 | secrets: set[str]
25 | def __init__(self, secrets: set[str] | None = None) -> None: ...
26 | def add_secret(self, secret: str) -> str: ...
27 | def filter(self, record: logging.LogRecord) -> bool: ...
28 |
29 | def registerPlexObject(cls: PlexObject) -> PlexObject: ...
30 | def getPlexObject(ehash: str, default: PlexObject) -> PlexObject | None: ...
31 | def cast(
32 | func: Callable[[Any], str | int | float | bool], value: Any
33 | ) -> str | int | float | bool: ...
34 | def joinArgs(args: dict) -> str: ...
35 | def lowerFirst(s: str) -> str: ...
36 | def rget(obj: Any, attrstr: str, default: Any = None, delim: str = ".") -> Any: ...
37 | def searchType(libtype: str) -> str: ...
38 | def reverseSearchType(libtype: str) -> int: ...
39 | def tagType(tag: str) -> str: ...
40 | def reverseTagType(tag: str) -> int: ...
41 | def threaded(callback: Callable[[Any], Any], listargs: list[list]) -> Any: ...
42 | def toDatetime(value: str, format: str | None = None) -> datetime | None: ...
43 | def millisecondToHumanstr(milliseconds: int) -> str: ...
44 | def toList(
45 | value: Any, itemcast: Callable[[str], Any] | None = None, delim: str = ","
46 | ) -> list[Any]: ...
47 | def cleanFilename(filename: str, replace: str = "_") -> str: ...
48 | def downloadSessionImages(
49 | server: PlexServer,
50 | filename: Incomplete | None = None, # TODO: type
51 | height: int = 150,
52 | width: int = 150,
53 | opacity: int = 100,
54 | saturation: int = 100,
55 | ) -> dict: ...
56 | def download(
57 | url: str,
58 | token: str,
59 | filename: str | None = None,
60 | savepath: str | None = None,
61 | session: int | None = None,
62 | chunksize: int = 4096,
63 | unpack: bool = False,
64 | mocked: bool = False,
65 | showstatus: bool = False,
66 | ) -> str: ...
67 | def getMyPlexAccount(opts: Incomplete | None = None) -> MyPlexAccount: ... # TODO: type
68 | def createMyPlexDevice(
69 | headers: dict, account: MyPlexAccount, timeout: int = 10
70 | ) -> MyPlexDevice: ...
71 | def plexOAuth(headers: dict, forwardUrl: str | None = None, timeout: int = 120): ...
72 | def choose(msg: str, items: list, attr): ...
73 | def getAgentIdentifier(section: LibrarySection, agent: str) -> str: ...
74 | def base64str(text: str): ...
75 | def deprecated(message: str, stacklevel: int = 2): ...
76 | def iterXMLBFS(root: ElementTree, tag: str | None = None) -> Iterator[Element]: ...
77 | def toJson(obj: object, **kwargs) -> str: ...
78 | def openOrRead(file: str | io.BufferedReader): ...
79 | def sha1hash(guid: str) -> str: ...
80 | def cleanXMLString(s: str) -> str: ...
81 |
--------------------------------------------------------------------------------
/docs/recovery/disaster-recovery.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Disaster Recovery
3 | icon: material/weather-cloudy-alert
4 | ---
5 |
6 | Given that software will always be susceptible to bugs, PlexAniBridge offers multiple recovery features: daily automatic AniList backups, in-app restore, and a per‑sync undo capability on the timeline.
7 |
8 | !!! tip "Prevention"
9 |
10 | Before running PlexAniBridge for the first time, it is recommended to try a [dry run](../configuration.md#dry_run) to see what changes will be made without actually making them. This can help you identify any potential issues before they occur.
11 |
12 | ## Backups
13 |
14 | PlexAniBridge creates a JSON snapshot of the current AniList list data on startup and on a daily schedule. These backups are stored under the data folder (defined in `PAB_DATA_PATH`) in the `backups` directory as JSON files named like:
15 |
16 | ```
17 | plexanibridge-..json
18 | ```
19 |
20 | You can work with these backups in two ways:
21 |
22 | 1. Web UI (recommended for most cases) - browse, preview, and restore directly.
23 | 2. CLI [restore script](https://github.com/eliasbenb/PlexAniBridge/blob/HEAD/scripts/anilist_restore.py) (legacy, deprecated).
24 |
25 | !!! warning
26 |
27 | Backups are kept for 30 days by default. Adjust [BACKUP_RETENTION_DAYS](../configuration.md#backup_retention_days) if you need a longer retention window.
28 |
29 | ### Viewing & Restoring Backups in the Web UI
30 |
31 | 1. Open the Web UI and navigate to: Backups → select a profile.
32 | 2. You will see a table of recent backups (filename, created time, size, age, detected user if available).
33 | 3. Click Preview to open a highlighted JSON view (no data is changed).
34 | 4. Click Restore to apply that snapshot back to AniList for the profile.
35 | 5. A toast will indicate success; any individual sync outcomes will appear later on the timeline.
36 |
37 | !!! warning
38 |
39 | Initiating a restore will **overwrite all current AniList entries** for that profile.
40 |
41 | ## Restoring from Backups (CLI Script)
42 |
43 | _This method is no longer recommended for typical users; prefer the Web UI above._
44 |
45 | To restore from a backup without the Web UI, use the [restore script](https://github.com/eliasbenb/PlexAniBridge/blob/HEAD/scripts/anilist_restore.py) in the `scripts` folder. You will need to pass the backup file and AniList token as arguments:
46 |
47 | ## Undoing Individual Sync Changes
48 |
49 | In addition to full restores, you can undo specific sync operations directly from the Timeline page.
50 |
51 | Each timeline entry representing a change (e.g. a creation, update, or deletion) exposes an Undo button when it is logically reversible. When clicked, PlexAniBridge applies an inverse operation to restore the previous state and creates a new timeline entry marked as `undone`.
52 |
53 | ### Undo Is Available When
54 |
55 | | Original Outcome | Before State | After State | Meaning | Undo Action |
56 | | ---------------- | ------------ | ----------- | ------------- | ---------------- |
57 | | synced | present | present | Updated entry | Revert to before |
58 | | synced | null | present | Created entry | Delete it |
59 | | deleted | present | null | Deleted entry | Restore it |
60 |
61 | _Note: Undos that are supposed to cause an entry deletion will not take effect if [DESTRUCTIVE_SYNC](../configuration.md#destructive_sync) is disabled._
62 |
--------------------------------------------------------------------------------
/docs/quick-start/docker.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Docker
3 | icon: material/docker
4 | ---
5 |
6 | ## Requirements
7 |
8 | - [Docker](https://docs.docker.com/get-docker/) and [Docker Compose](https://docs.docker.com/compose/install/)
9 | - AMD64 or ARM64 CPU architecture (or build the image yourself for other architectures)
10 |
11 | ## Setup
12 |
13 | ### Docker Compose
14 |
15 | Below is a PlexAniBridge Docker compose file with example values. Optional environment variables are commented out.
16 |
17 | ```yaml title="compose.yaml"
18 | --8<-- "docs/compose.yaml"
19 | ```
20 |
21 | !!! tip "PlexAniBridge Configuration"
22 |
23 | Have a look at [the configuration page](../configuration.md) for a detailed list of configurable environment variables.
24 |
25 | !!! tip "Docker Variables"
26 |
27 | While configuring the Docker variables are not required, they are highly recommended to improve file permission handling and debugging.
28 |
29 | Setting the `PUID` and `PGID` variables allows PlexAniBridge to run with the same permissions as the user running the container, which is important if you want to access files on the host system. You can find your user ID and group ID by running `id -u` and `id -g` in the terminal.
30 |
31 | The `UMASK` variable sets the default file permissions for new files created by the container. A common value is `022`, which gives read and execute permissions to everyone, but only write permissions to the owner.
32 |
33 | The `TZ` variable sets the timezone for the container, which is useful for logging and scheduling tasks. You can search for your timezone in the [list of tz database time zones](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) Wikipedia page.
34 |
35 | ```yaml
36 | environment:
37 | PUID: 1000
38 | PGID: 1000
39 | UMASK: 022
40 | TZ: "Etc/UTC"
41 | ```
42 |
43 | To start the container, run:
44 |
45 | ```shell
46 | docker compose -f compose.yaml up -d
47 | ```
48 |
49 | !!! tip "Image Tags"
50 |
51 | You can pin the image to a specific version or branch by changing `latest` to a specific tag. Some available tags are:
52 |
53 | - `vX.Y.Z`, `X.Y.Z`: A specific version from the [releases page](https://github.com/eliasbenb/PlexAniBridge/releases) (e.g. `v1.0.0`)
54 | - `vX.Y`, `X.Y`: The latest release in a specific minor version series (e.g. `v1.0` for the latest `1.0.x` release)
55 | - `vX`, `X`: The latest release in a specific major version series (e.g. `v1` for the latest `1.x.x` release)
56 | - `beta`: The latest beta release (may be unstable)
57 | - `alpha`: The latest alpha release (may be unstable)
58 | - `main`: The latest commit on the `main` branch, which is usually tied to the latest release
59 | - `develop`: The latest commit on the `develop` branch (may be unstable)
60 | - `experimental`: The latest commit on the `experimental` branch (may be unstable)
61 | - `latest`: The latest stable release
62 |
63 | ### Docker CLI
64 |
65 | Below is a minimal example of a Docker run command with only the required variables.
66 |
67 | ```shell
68 | docker run \
69 | --name plexanibridge \
70 | -e PUID=1000 \
71 | -e PGID=1000 \
72 | -e UMASK=022 \
73 | -e TZ=Etc/UTC \
74 | -e PAB_ANILIST_TOKEN=... \
75 | -e PAB_PLEX_TOKEN=... \
76 | -e PAB_PLEX_USER=... \
77 | -e PAB_PLEX_URL=... \
78 | -p 4848:4848 \
79 | -v /path/to/plexanibridge/data:/config \
80 | ghcr.io/eliasbenb/plexanibridge:v1
81 | ```
82 |
--------------------------------------------------------------------------------
/docs/css/htpasswd-generator.css:
--------------------------------------------------------------------------------
1 | .htpasswd-generator {
2 | --r: 0.5rem;
3 | --p: 0.6rem 0.75rem;
4 | --g: 0.8rem;
5 | --g-lg: 1rem;
6 |
7 | display: grid;
8 | gap: var(--g-lg);
9 | padding: 1rem;
10 | background: var(--md-default-bg-color--lightest);
11 | border: 1px solid var(--md-default-bg-color--lighter);
12 | border-radius: 0.75rem;
13 | box-shadow: var(--md-shadow-z1);
14 | }
15 |
16 | /* Layout */
17 | .htpasswd-generator__grid {
18 | display: grid;
19 | grid-template-columns: 1fr 1fr auto;
20 | gap: var(--g);
21 | align-items: end;
22 | }
23 |
24 | @media (max-width: 640px) {
25 | .htpasswd-generator__grid {
26 | grid-template-columns: 1fr;
27 | }
28 | }
29 |
30 | /* Labels & Inputs */
31 | .htpasswd-generator label {
32 | display: grid;
33 | gap: 0.35rem;
34 | font-size: 0.64rem;
35 | color: var(--md-default-fg-color--light);
36 | }
37 |
38 | .htpasswd-generator input,
39 | .htpasswd-generator textarea {
40 | width: 100%;
41 | padding: var(--p);
42 | border: 1px solid var(--md-default-bg-color--lighter);
43 | border-radius: var(--r);
44 | background: var(--md-code-bg-color);
45 | color: var(--md-typeset-color);
46 | outline: none;
47 | transition:
48 | border-color 0.15s,
49 | box-shadow 0.15s,
50 | background 0.15s;
51 | }
52 |
53 | .htpasswd-generator ::placeholder {
54 | color: var(--md-default-fg-color--lighter);
55 | }
56 |
57 | .htpasswd-generator input:focus,
58 | .htpasswd-generator textarea:focus {
59 | border-color: var(--md-typeset-a-color);
60 | box-shadow: 0 0 0 0.2rem var(--md-accent-fg-color--transparent);
61 | }
62 |
63 | /* Output */
64 | .htpasswd-generator__output {
65 | display: grid;
66 | gap: var(--g);
67 | }
68 |
69 | .htpasswd-generator textarea {
70 | resize: vertical;
71 | color: var(--md-code-fg-color);
72 | box-shadow: inset var(--md-shadow-z1);
73 | }
74 |
75 | /* Buttons */
76 | .htpasswd-generator__actions {
77 | display: flex;
78 | gap: 0.5rem;
79 | }
80 |
81 | .htpasswd-generator__actions button {
82 | appearance: none;
83 | padding: 0.55rem 0.9rem;
84 | font-weight: 600;
85 | border-radius: 0.6rem;
86 | border: 1px solid var(--md-primary-fg-color);
87 | background: transparent;
88 | color: var(--md-primary-fg-color);
89 | cursor: pointer;
90 | transition:
91 | background 0.15s,
92 | color 0.15s,
93 | border-color 0.15s,
94 | box-shadow 0.15s;
95 | box-shadow: var(--md-shadow-z1);
96 | }
97 |
98 | .htpasswd-generator__actions button[type="submit"] {
99 | background: var(--md-primary-fg-color);
100 | color: var(--md-primary-bg-color);
101 | }
102 |
103 | .htpasswd-generator__actions button:hover {
104 | box-shadow: var(--md-shadow-z2);
105 | }
106 |
107 | .htpasswd-generator__actions button[type="submit"]:hover {
108 | filter: brightness(1.05);
109 | }
110 |
111 | .htpasswd-generator__actions button:focus-visible {
112 | box-shadow:
113 | 0 0 0 0.2rem var(--md-accent-fg-color--transparent),
114 | var(--md-shadow-z2);
115 | }
116 |
117 | .htpasswd-generator__actions button:disabled,
118 | .htpasswd-generator__actions button[aria-disabled="true"] {
119 | opacity: 0.6;
120 | cursor: not-allowed;
121 | box-shadow: none;
122 | }
123 |
124 | .htpasswd-generator__feedback {
125 | font-size: 0.64rem;
126 | color: var(--md-default-fg-color--light);
127 | }
128 |
129 | [data-htpasswd-output][readonly] {
130 | user-select: all;
131 | }
132 |
--------------------------------------------------------------------------------
/alembic/versions/2025-01-15-15-52_b2ad27e14048.py:
--------------------------------------------------------------------------------
1 | """Migration to the PlexAniBridge mapping database
2 |
3 | Revision ID: b2ad27e14048
4 | Revises: 6e710e6677c0
5 | Create Date: 2025-01-15 15:52:56.167462
6 |
7 | """
8 |
9 | import sqlalchemy as sa
10 |
11 | from alembic import op
12 |
13 | # revision identifiers, used by Alembic.
14 | revision = "b2ad27e14048"
15 | down_revision = "6e710e6677c0"
16 | branch_labels = None
17 | depends_on = None
18 |
19 |
20 | def upgrade() -> None:
21 | op.drop_table("animap_new", if_exists=True)
22 | op.create_table(
23 | "animap_new",
24 | sa.Column("anilist_id", sa.Integer, primary_key=True),
25 | sa.Column("anidb_id", sa.Integer, primary_key=False, nullable=True),
26 | sa.Column("imdb_id", sa.JSON(none_as_null=True), nullable=True),
27 | sa.Column("mal_id", sa.JSON(none_as_null=True), nullable=True),
28 | sa.Column("tmdb_movie_id", sa.JSON(none_as_null=True), nullable=True),
29 | sa.Column("tmdb_show_id", sa.JSON(none_as_null=True), nullable=True),
30 | sa.Column("tvdb_id", sa.Integer, nullable=True),
31 | sa.Column("tvdb_epoffset", sa.Integer, nullable=True),
32 | sa.Column("tvdb_season", sa.Integer, nullable=True),
33 | )
34 |
35 | op.drop_table("animap")
36 | op.rename_table("animap_new", "animap")
37 |
38 | op.create_index("ix_animap_anidb_id", "animap", ["anidb_id"], unique=True)
39 | op.create_index("ix_animap_imdb_id", "animap", ["imdb_id"], unique=False)
40 | op.create_index("ix_animap_mal_id", "animap", ["mal_id"], unique=False)
41 | op.create_index(
42 | "ix_animap_tmdb_movie_id", "animap", ["tmdb_movie_id"], unique=False
43 | )
44 | op.create_index("ix_animap_tmdb_show_id", "animap", ["tmdb_show_id"], unique=False)
45 | op.create_index("ix_animap_tvdb_id", "animap", ["tvdb_id"], unique=False)
46 |
47 | # Clear the data in the house_keeping table
48 | op.execute("DELETE FROM house_keeping")
49 |
50 |
51 | def downgrade() -> None:
52 | # Reverse the migration: recreate the original table structure
53 |
54 | # Create the old table structure
55 | op.create_table(
56 | "animap_old",
57 | sa.Column(
58 | "anilist_id", sa.Integer, primary_key=True, nullable=False
59 | ), # Revert to original primary key
60 | sa.Column("anidb_id", sa.Integer, nullable=True),
61 | sa.Column("imdb_id", sa.JSON(none_as_null=True), nullable=True),
62 | sa.Column("mal_id", sa.JSON(none_as_null=True), nullable=True),
63 | sa.Column("tmdb_movie_id", sa.JSON(none_as_null=True), nullable=True),
64 | sa.Column("tmdb_show_id", sa.JSON(none_as_null=True), nullable=True),
65 | sa.Column("tvdb_id", sa.Integer, nullable=True),
66 | sa.Column("tvdb_epoffset", sa.Integer, nullable=True),
67 | sa.Column("tvdb_season", sa.Integer, nullable=True),
68 | )
69 |
70 | # Drop the current table and rename back to the original
71 | op.drop_table("animap")
72 | op.rename_table("animap_old", "animap")
73 |
74 | # Recreate indexes for the old table
75 | op.create_index("ix_animap_anilist_id", "animap", ["anilist_id"], unique=True)
76 | op.create_index("ix_animap_anidb_id", "animap", ["anidb_id"], unique=True)
77 | op.create_index("ix_animap_imdb_id", "animap", ["imdb_id"], unique=False)
78 | op.create_index("ix_animap_mal_id", "animap", ["mal_id"], unique=False)
79 | op.create_index(
80 | "ix_animap_tmdb_movie_id", "animap", ["tmdb_movie_id"], unique=False
81 | )
82 | op.create_index("ix_animap_tmdb_show_id", "animap", ["tmdb_show_id"], unique=False)
83 | op.create_index("ix_animap_tvdb_id", "animap", ["tvdb_id"], unique=False)
84 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | name = "PlexAniBridge"
3 | version = "1.10.1"
4 | description = "The smart way to keep your AniList profile perfectly synchronized with your Plex library."
5 | readme = "README.md"
6 | requires-python = ">=3.13"
7 | license = { text = "MIT" }
8 |
9 | authors = [
10 | { name = "Elias Benbourenane", email = "eliasbenbourenane@gmail.com" },
11 | ]
12 | maintainers = [
13 | { name = "Elias Benbourenane", email = "eliasbenbourenane@gmail.com" },
14 | ]
15 |
16 | keywords = [
17 | "docker",
18 | "sync",
19 | "scrobble",
20 | "synchronization",
21 | "monitoring",
22 | "anime",
23 | "plex",
24 | "anilist",
25 | "plexpy",
26 | "plexanibridge",
27 | ]
28 | classifiers = [
29 | "Programming Language :: Python :: 3",
30 | "Programming Language :: Python :: 3.13",
31 | "License :: OSI Approved :: MIT License",
32 | "Operating System :: OS Independent",
33 | ]
34 | dependencies = [
35 | "aiocache>=0.12.3",
36 | "aiohttp>=3.13.1",
37 | "alembic>=1.17.0",
38 | "async-lru>=2.0.5",
39 | "cachetools>=6.2.1",
40 | "colorama>=0.4.6",
41 | "fastapi[all]>=0.120.0",
42 | "limiter>=0.5.0",
43 | "plexapi>=4.17.1",
44 | "pydantic>=2.12.3",
45 | "pydantic-settings>=2.11.0",
46 | "pyparsing>=3.2.5",
47 | "pyyaml>=6.0.3",
48 | "rapidfuzz>=3.14.1",
49 | "requests>=2.32.5",
50 | "ruamel-yaml>=0.18.16",
51 | "sqlalchemy>=2.0.44",
52 | "tzlocal>=5.3.1",
53 | "bcrypt>=5.0.0",
54 | ]
55 |
56 | [project.scripts]
57 | pab-build = "scripts.dev:build"
58 | pab-bump-version = "scripts.dev:bump_version"
59 | pab-clean = "scripts.dev:clean"
60 | pab-deps-install = "scripts.dev:deps_install"
61 | pab-deps-upgrade = "scripts.dev:deps_upgrade"
62 | pab-dev = "scripts.dev:dev"
63 | pab-format = "scripts.dev:format"
64 | pab-lint = "scripts.dev:lint"
65 | pab-start = "scripts.dev:start"
66 | pab-test = "scripts.dev:test"
67 | pab-openapi = "scripts.openapi:main"
68 | pab-docs-build = "scripts.dev:docs_build"
69 | pab-docs-serve = "scripts.dev:docs_serve"
70 |
71 | [project.urls]
72 | Documentation = "https://plexanibridge.elias.eu.org"
73 | Homepage = "https://plexanibridge.elias.eu.org"
74 | Issues = "https://github.com/eliasbenb/PlexAniBridge/issues"
75 | Source = "https://github.com/eliasbenb/PlexAniBridge"
76 |
77 | [tool.hatch.build.targets.wheel]
78 | packages = ["scripts"]
79 |
80 | [tool.pyright]
81 | stubPath = "typings"
82 |
83 | [tool.ruff]
84 | exclude = ["alembic"]
85 | indent-width = 4
86 | line-length = 88
87 |
88 | [tool.ruff.format]
89 | docstring-code-format = true
90 | indent-style = "space"
91 | quote-style = "double"
92 |
93 | [tool.ruff.lint]
94 | select = ["B", "D", "DOC", "E", "F", "I", "RUF", "SIM", "UP", "W"]
95 |
96 | [tool.ruff.lint.flake8-bugbear]
97 | extend-immutable-calls = [
98 | "fastapi.param_functions.Body",
99 | "fastapi.param_functions.Cookie",
100 | "fastapi.param_functions.Depends",
101 | "fastapi.param_functions.File",
102 | "fastapi.param_functions.Form",
103 | "fastapi.param_functions.Header",
104 | "fastapi.param_functions.Path",
105 | "fastapi.param_functions.Query",
106 | "fastapi.param_functions.Security",
107 | ]
108 |
109 | [tool.ruff.lint.pydocstyle]
110 | convention = "google"
111 |
112 | [tool.pytest.ini_options]
113 | pythonpath = ["src"]
114 | testpaths = ["tests"]
115 | addopts = "--cov=src"
116 |
117 | [tool.uv.workspace]
118 | members = ["docs"]
119 |
120 | [build-system]
121 | requires = ["hatchling"]
122 | build-backend = "hatchling.build"
123 |
124 | [dependency-groups]
125 | dev = [
126 | "py-spy>=0.4.1",
127 | "pytest>=8.3.3",
128 | "pytest-asyncio>=1.2.0",
129 | "pytest-cov>=7.0.0",
130 | "ruff>=0.14.2",
131 | ]
132 |
--------------------------------------------------------------------------------
/src/models/schemas/plex.py:
--------------------------------------------------------------------------------
1 | """Plex schema definitions.
2 |
3 | The models stored here won't be defined in the python-plexapi or the custom metadata
4 | server implementation and are reserved for more niche use cases.
5 | """
6 |
7 | from enum import StrEnum
8 | from functools import cached_property
9 |
10 | from pydantic import BaseModel, Field
11 |
12 |
13 | class PlexWebhookEventType(StrEnum):
14 | """Enumeration of Plex webhook event types."""
15 |
16 | MEDIA_ADDED = "library.new"
17 | ON_DECK = "library.on.deck"
18 | PLAY = "media.play"
19 | PAUSE = "media.pause"
20 | STOP = "media.stop"
21 | RESUME = "media.resume"
22 | SCROBBLE = "media.scrobble"
23 | RATE = "media.rate"
24 | DATABASE_BACKUP = "admin.database.backup"
25 | DATABASE_CORRUPTED = "admin.database.corrupted"
26 | NEW_ADMIN_DEVICE = "device.new"
27 | SHARED_PLAYBACK_STARTED = "playback.started"
28 |
29 |
30 | class Account(BaseModel):
31 | """Represents a Plex account involved in a webhook event."""
32 |
33 | id: int | None = None
34 | thumb: str | None = None
35 | title: str | None = None
36 |
37 |
38 | class Server(BaseModel):
39 | """Represents a Plex server involved in a webhook event."""
40 |
41 | title: str | None = None
42 | uuid: str | None = None
43 |
44 |
45 | class Player(BaseModel):
46 | """Represents a Plex player involved in a webhook event."""
47 |
48 | local: bool
49 | publicAddress: str | None = None
50 | title: str | None = None
51 | uuid: str | None = None
52 |
53 |
54 | class Metadata(BaseModel):
55 | """Represents metadata information received from a Plex webhook event."""
56 |
57 | librarySectionType: str | None = None
58 | ratingKey: str | None = None
59 | key: str | None = None
60 | parentRatingKey: str | None = None
61 | grandparentRatingKey: str | None = None
62 | guid: str | None = None
63 | librarySectionID: int | None = None
64 | type: str | None = None
65 | title: str | None = None
66 | year: int | None = None
67 | grandparentKey: str | None = None
68 | parentKey: str | None = None
69 | grandparentTitle: str | None = None
70 | parentTitle: str | None = None
71 | summary: str | None = None
72 | index: int | None = None
73 | parentIndex: int | None = None
74 | ratingCount: int | None = None
75 | thumb: str | None = None
76 | art: str | None = None
77 | parentThumb: str | None = None
78 | grandparentThumb: str | None = None
79 | grandparentArt: str | None = None
80 | addedAt: int | None = None
81 | updatedAt: int | None = None
82 |
83 |
84 | class PlexWebhook(BaseModel):
85 | """Represents a Plex webhook event."""
86 |
87 | event: str | None = None
88 | user: bool
89 | owner: bool
90 | account: Account | None = Field(None, alias="Account")
91 | server: Server | None = Field(None, alias="Server")
92 | player: Player | None = Field(None, alias="Player")
93 | metadata: Metadata | None = Field(None, alias="Metadata")
94 |
95 | @cached_property
96 | def event_type(self) -> PlexWebhookEventType | None:
97 | """The webhook event type."""
98 | if self.event is None:
99 | return None
100 | try:
101 | return PlexWebhookEventType(self.event)
102 | except ValueError:
103 | return None
104 |
105 | @cached_property
106 | def account_id(self) -> int | None:
107 | """The webhook owner's Plex account ID."""
108 | return self.account.id if self.account and self.account.id is not None else None
109 |
110 | @cached_property
111 | def top_level_rating_key(self) -> str | None:
112 | """The top-level rating key for the media item."""
113 | if not self.metadata:
114 | return None
115 | return (
116 | self.metadata.grandparentRatingKey
117 | or self.metadata.parentRatingKey
118 | or self.metadata.ratingKey
119 | )
120 |
--------------------------------------------------------------------------------
/.github/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | Contributions to PlexAniBridge are always appreciated! Please follow the guidelines below to help us maintain a high-quality codebase.
4 |
5 | ## Development
6 |
7 | This guide is for developers who want to contribute to the backend of the PlexAniBridge project. It assumes you'll be using Visual Studio Code with Dev Containers (requires Docker). Of course, you are free to use any IDE you prefer, but the VS Code IDE and Devcontainer settings are already pre-configured with linting and dependency management for you.
8 |
9 | _If you decide to use a different IDE, you will need to set up the environment manually and infer the project's dependencies yourself. Take a look at [`.devcontainer/devcontainer.json`](/.devcontainer/devcontainer.json) and [`.vscode/settings.json`](/.vscode/settings.json) for an idea of what's required._
10 |
11 | ### Tools required
12 |
13 | - [Visual Studio Code](https://code.visualstudio.com/) with the [Dev Containers](https://code.visualstudio.com/docs/remote/containers) extension
14 | - [Docker](https://www.docker.com/)
15 |
16 | ### Getting started
17 |
18 | 1. Fork PlexAniBridge.
19 | 2. Clone the repository into your development machine ([_info_](https://docs.github.com/en/get-started/quickstart/fork-a-repo)).
20 | 3. Open the project in Visual Studio Code, it will prompt you to reopen in a dev container, do so.
21 | 4. If you didn't receive a prompt, you can also open the command palette (Ctrl+Shift+P) and select "Reopen in Container".
22 | 5. The dev container will build and setup all required packages and dependencies.
23 | 6. Once the dev container is ready, you can activate the Python virtual environment with `source .venv/bin/activate`.
24 |
25 | ## Documentation
26 |
27 | If you are only making changes to the documentation, you can opt to clone the repository and edit the documentation files in your preferred text editor or directly in the GitHub web UI. The documentation is located in the `docs/` directory.
28 |
29 | 1. Fork PlexAniBridge.
30 | 2. Clone the repository into your development machine ([_info_](https://docs.github.com/en/get-started/quickstart/fork-a-repo)) or edit directly in the GitHub web UI.
31 | 3. Make your changes to the documentation files in the `docs/` directory.
32 |
33 | ## Contributing Code
34 |
35 | The PlexAniBridge codebase is primarily written in Python for the backend and Svelte/TypeScript for the frontend.
36 |
37 | The project has a set of developer scripts at `scripts/dev.py` to help with common tasks. You can run these scripts with `python scripts/dev.py `. Run `python scripts/dev.py --help` to see a list of available commands.
38 |
39 | - Follow the coding standard. We use [ruff](https://docs.astral.sh/ruff/) for Python linting and [ESLint](https://eslint.org/) for JavaScript/TypeScript linting.
40 | - Run `python scripts/dev.py lint` to ensure your Python and Svelte code passes the linting rules.
41 | - Run `python scripts/dev.py format` to auto-format your Python and Svelte code.
42 | - Run `python scripts/dev.py test` to run the test suite and ensure all tests pass.
43 | - Update or add documentation in the `docs/` directory if it affects usage.
44 | - Make sure any complex or non-obvious code is explained with comments. This helps maintain readability and ease of review.
45 |
46 | ## Pull Requesting
47 |
48 | - Make pull requests to the default/HEAD branch.
49 | - Please ensure your pull request has a clear title and description.
50 | - Fill out the pull request template in its entirety, it helps us understand what your changes are and why they are needed.
51 | - Each PR should come from its own [feature branch](http://martinfowler.com/bliki/FeatureBranch.html) not develop in your fork, it should have a meaningful branch name (what is being added/fixed).
52 | - new-feature (Good)
53 | - fix-bug (Good)
54 | - patch (Bad)
55 | - develop (Bad)
56 | - Each PR should solve one issue or add one feature (or a group of meaningfully connected issues/features), if you have multiple, please create a separate PR for each.
57 | - Check for existing pull requests at https://github.com/eliasbenb/PlexAniBridge/pulls to avoid duplicates.
58 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | #
PlexAniBridge
2 |
3 | The smart way to keep your AniList profile perfectly synchronized with your Plex library.
4 |
5 | > [!IMPORTANT]
6 | > Visit the [PlexAniBridge documentation](https://plexanibridge.elias.eu.org) for detailed setup instructions and usage information.
7 |
8 | [](https://discord.gg/ey8kyQU9aD) [](https://github.com/eliasbenb/PlexAniBridge) [](https://github.com/eliasbenb/PlexAniBridge/pkgs/container/plexanibridge)
9 |
10 |
11 | ## Key Features
12 |
13 | - **🔄 Comprehensive Synchronization**: Automatically syncs watch status, progress, ratings, reviews, and start/completion dates between Plex and AniList.
14 | - **🎯 Smart Content Matching**: Uses a [curated mappings database](https://github.com/eliasbenb/PlexAniBridge-Mappings) with fuzzy title search fallback and support for custom mapping overrides.
15 | - **⚡ Optimized Performance**: Intelligent batch processing, rate limiting, and caching to minimize API usage while maximizing sync speed.
16 | - **👥 Multi-User & Multi-Profile**: Supports multiple Plex users (including Home users) with individual AniList accounts and configurable sync profiles.
17 | - **🖥️ Web Dashboard**: Intuitive web interface with a real-time sync timeline, profile management, custom mapping editor, and log viewer.
18 | - **🔧 Flexible Configuration**: Filter by library sections or genres and configure scanning modes (polling or periodic).
19 | - **🛡️ Safe & Reliable**: Built-in dry run mode for testing and automatic AniList backups with a [restore script](./scripts/anilist_restore.py) for easy recovery.
20 | - **🐳 Easy Deployment**: Docker-ready with easy and comprehensive environment variable configuration.
21 |
22 | 
23 |
24 | ## Docker Quick Start
25 |
26 | ```yaml
27 | services:
28 | plexanibridge:
29 | image: ghcr.io/eliasbenb/plexanibridge:v1
30 | environment:
31 | PUID: 1000
32 | PGID: 1000
33 | UMASK: 022
34 | TZ: "Etc/UTC"
35 | PAB_ANILIST_TOKEN: ...
36 | PAB_PLEX_TOKEN: ...
37 | PAB_PLEX_USER: ...
38 | PAB_PLEX_URL: ...
39 | volumes:
40 | - /path/to/plexanibridge/data:/config
41 | ports:
42 | - 4848:4848
43 | restart: unless-stopped
44 | ```
45 |
46 | ## Web UI Screenshot
47 |
48 | 
49 |
50 | _View more screenshots in the [documentation](https://plexanibridge.elias.eu.org/web/screenshots)_
51 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Data
2 | /data/**
3 | !/data/config.example.yaml
4 | !/data/mappings.example.*
5 |
6 | # Database
7 | *.db
8 | *.db-journal
9 | *.sqlite3
10 |
11 | # Byte-compiled / optimized / DLL files
12 | __pycache__/
13 | *.py[cod]
14 | *$py.class
15 |
16 | # C extensions
17 | *.so
18 |
19 | # Distribution / packaging
20 | .Python
21 | build/
22 | develop-eggs/
23 | dist/
24 | downloads/
25 | eggs/
26 | .eggs/
27 | /lib/
28 | /lib64/
29 | parts/
30 | sdist/
31 | var/
32 | wheels/
33 | share/python-wheels/
34 | *.egg-info/
35 | .installed.cfg
36 | *.egg
37 | MANIFEST
38 |
39 | # PyInstaller
40 | # Usually these files are written by a python script from a template
41 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
42 | *.manifest
43 | *.spec
44 |
45 | # Installer logs
46 | pip-log.txt
47 | pip-delete-this-directory.txt
48 |
49 | # Unit test / coverage reports
50 | htmlcov/
51 | .tox/
52 | .nox/
53 | .coverage
54 | .coverage.*
55 | .cache
56 | nosetests.xml
57 | coverage.xml
58 | *.cover
59 | *.py,cover
60 | .hypothesis/
61 | .pytest_cache/
62 | cover/
63 |
64 | # Translations
65 | *.mo
66 | *.pot
67 |
68 | # Django stuff:
69 | *.log
70 | local_settings.py
71 | db.sqlite3
72 | db.sqlite3-journal
73 |
74 | # Flask stuff:
75 | instance/
76 | .webassets-cache
77 |
78 | # Scrapy stuff:
79 | .scrapy
80 |
81 | # Sphinx documentation
82 | docs/_build/
83 |
84 | # PyBuilder
85 | .pybuilder/
86 | target/
87 |
88 | # Jupyter Notebook
89 | .ipynb_checkpoints
90 |
91 | # IPython
92 | profile_default/
93 | ipython_config.py
94 |
95 | # pyenv
96 | # For a library or package, you might want to ignore these files since the code is
97 | # intended to run in multiple environments; otherwise, check them in:
98 | # .python-version
99 |
100 | # pipenv
101 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
102 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
103 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
104 | # install all needed dependencies.
105 | #Pipfile.lock
106 |
107 | # poetry
108 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
109 | # This is especially recommended for binary packages to ensure reproducibility, and is more
110 | # commonly ignored for libraries.
111 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
112 | #poetry.lock
113 |
114 | # pdm
115 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
116 | #pdm.lock
117 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
118 | # in version control.
119 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control
120 | .pdm.toml
121 | .pdm-python
122 | .pdm-build/
123 |
124 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
125 | __pypackages__/
126 |
127 | # Celery stuff
128 | celerybeat-schedule
129 | celerybeat.pid
130 |
131 | # SageMath parsed files
132 | *.sage.py
133 |
134 | # Environments
135 | .env
136 | .venv
137 | env/
138 | venv/
139 | ENV/
140 | env.bak/
141 | venv.bak/
142 |
143 | # Spyder project settings
144 | .spyderproject
145 | .spyproject
146 |
147 | # Rope project settings
148 | .ropeproject
149 |
150 | # mkdocs documentation
151 | /site
152 |
153 | # mypy
154 | .mypy_cache/
155 | .dmypy.json
156 | dmypy.json
157 |
158 | # Pyre type checker
159 | .pyre/
160 |
161 | # pytype static type analyzer
162 | .pytype/
163 |
164 | # Cython debug symbols
165 | cython_debug/
166 |
167 | # PyCharm
168 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
169 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
170 | # and can be added to the global gitignore or merged into this file. For a more nuclear
171 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
172 | #.idea/
173 |
174 | # Profilers
175 | *.pyspy
176 |
177 | # Node
178 | node_modules/
179 | .pnpm-store/
180 |
--------------------------------------------------------------------------------