├── server ├── static │ └── .gitkeep └── src │ ├── tests │ ├── __init__.py │ ├── unit │ │ ├── __init__.py │ │ └── test_discovarr_prompt.py │ ├── integration │ │ ├── __init__.py │ │ ├── test_jellyfin_provider.py │ │ ├── test_plex_provider.py │ │ ├── test_openai_provider.py │ │ ├── test_ollama_provider.py │ │ ├── test_gemini_provider.py │ │ ├── test_settings.py │ │ ├── base │ │ │ ├── base_live_request_provider_tests.py │ │ │ └── base_live_library_provider_tests.py │ │ ├── test_postgres.py │ │ ├── test_trakt_provider.py │ │ ├── test_radarr_provider.py │ │ └── test_sonarr_provider.py │ ├── requirements.txt │ └── README.md │ ├── providers │ ├── __init__.py │ ├── radarr.py │ └── sonarr.py │ ├── services │ ├── __init__.py │ ├── migrations │ │ ├── 006_search.py │ │ ├── 013_settings.py │ │ ├── 012_watchhistory.py │ │ ├── 009_watchhistory.py │ │ ├── 007_search.py │ │ ├── 010_watchhistory.py │ │ ├── 004_media.py │ │ ├── 005_search.py │ │ ├── 011_poster_url_source.py │ │ ├── 008_watchhistory.py │ │ ├── 003_searchstat.py │ │ ├── 016_media_favorite.py │ │ ├── 015_media_source_provider.py │ │ ├── 002_media.py │ │ ├── 018_llmstat.py │ │ ├── __init__.py │ │ ├── 001_media.py │ │ └── 014_media_entity_type.py │ ├── response.py │ ├── tmdb.py │ ├── image_cache.py │ ├── api.py │ └── scheduler.py │ ├── requirements.txt │ ├── env.example.sh │ └── base │ ├── library_provider_base.py │ └── llm_provider_base.py ├── CONTRIBUTE.md ├── client ├── src │ ├── assets │ │ └── main.css │ ├── config.js │ ├── main.js │ ├── stores │ │ ├── movie.js │ │ ├── searchStore.js │ │ ├── toast.js │ │ └── settings.js │ ├── router │ │ └── index.js │ ├── components │ │ ├── GlobalToast.vue │ │ ├── Markdown.vue │ │ ├── EditMediaNameModal.vue │ │ ├── ExamplePrompts.vue │ │ ├── RequestModal.vue │ │ └── VideoCarousel.vue │ ├── App.vue │ └── views │ │ └── WatchHistoryView.vue ├── public │ ├── logo.png │ ├── logo1.png │ ├── logo2.png │ ├── logo3.png │ ├── aiarr_favicon.png │ └── placeholder-image.jpg ├── postcss.config.js ├── index.html ├── tailwind.config.js ├── vite.config.js └── package.json ├── .assets ├── home_page.png ├── search_page.png └── settings_page.png ├── .dockerignore ├── .vscode └── extensions.json ├── scripts ├── movies.csv └── import_watch_history.py ├── compose.example.yml ├── .gitignore ├── MIGRATE.md ├── RELEASE.md ├── start-dev.sh ├── compose.example.env.yml ├── compose.example.qa.yml ├── compose.example.dev.yml ├── Dockerfile.dev ├── CHANGELOG.md ├── entrypoint.sh ├── .github └── workflows │ └── docker-publish.yml └── Dockerfile /server/static/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /server/src/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /server/src/providers/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /server/src/services/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /server/src/tests/unit/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /server/src/tests/integration/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /server/src/tests/requirements.txt: -------------------------------------------------------------------------------- 1 | pytest 2 | pytest-asyncio -------------------------------------------------------------------------------- /CONTRIBUTE.md: -------------------------------------------------------------------------------- 1 | # Please submit PRs to the dev branch, thank you! -------------------------------------------------------------------------------- /client/src/assets/main.css: -------------------------------------------------------------------------------- 1 | @tailwind base; 2 | @tailwind components; 3 | @tailwind utilities; -------------------------------------------------------------------------------- /.assets/home_page.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sqrlmstr5000/discovarr/HEAD/.assets/home_page.png -------------------------------------------------------------------------------- /client/public/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sqrlmstr5000/discovarr/HEAD/client/public/logo.png -------------------------------------------------------------------------------- /.assets/search_page.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sqrlmstr5000/discovarr/HEAD/.assets/search_page.png -------------------------------------------------------------------------------- /.assets/settings_page.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sqrlmstr5000/discovarr/HEAD/.assets/settings_page.png -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__ 3 | **/__pycache__/ 4 | **/*.py[cod] -------------------------------------------------------------------------------- /client/public/logo1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sqrlmstr5000/discovarr/HEAD/client/public/logo1.png -------------------------------------------------------------------------------- /client/public/logo2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sqrlmstr5000/discovarr/HEAD/client/public/logo2.png -------------------------------------------------------------------------------- /client/public/logo3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sqrlmstr5000/discovarr/HEAD/client/public/logo3.png -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": ["Vue.volar", "Vue.vscode-typescript-vue-plugin"] 3 | } 4 | -------------------------------------------------------------------------------- /client/public/aiarr_favicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sqrlmstr5000/discovarr/HEAD/client/public/aiarr_favicon.png -------------------------------------------------------------------------------- /client/public/placeholder-image.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sqrlmstr5000/discovarr/HEAD/client/public/placeholder-image.jpg -------------------------------------------------------------------------------- /client/src/config.js: -------------------------------------------------------------------------------- 1 | export const config = { 2 | apiUrl: import.meta.env.VITE_DISCOVARR_URL || "__API_ENDPOINT__" 3 | }; 4 | -------------------------------------------------------------------------------- /client/postcss.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | plugins: { 3 | tailwindcss: {}, 4 | autoprefixer: {}, 5 | }, 6 | } 7 | -------------------------------------------------------------------------------- /scripts/movies.csv: -------------------------------------------------------------------------------- 1 | title,watched_by,media_type 2 | "The Grand Budapest Hotel",test,movie 3 | "Pulp Fiction",test,movie 4 | "Interstellar",test,movie -------------------------------------------------------------------------------- /server/src/requirements.txt: -------------------------------------------------------------------------------- 1 | fastapi 2 | uvicorn[standard] 3 | requests 4 | google-genai 5 | openai 6 | APScheduler>=3.10.1 7 | peewee==3.18.* 8 | Jinja2 9 | plexapi 10 | ollama 11 | trakt.py 12 | aiohttp 13 | aiofiles 14 | psycopg2-binary 15 | #pgvector 16 | #sqlite-vec -------------------------------------------------------------------------------- /client/src/main.js: -------------------------------------------------------------------------------- 1 | import { createApp } from 'vue' 2 | import { createPinia } from 'pinia' 3 | 4 | import 'animate.css'; 5 | 6 | import App from './App.vue' 7 | import router from './router' 8 | 9 | import './assets/main.css' 10 | 11 | const app = createApp(App) 12 | 13 | app.use(createPinia()) 14 | app.use(router) 15 | 16 | app.mount('#app') 17 | -------------------------------------------------------------------------------- /client/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | Discovarr 8 | 9 | 10 |
11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /client/src/stores/movie.js: -------------------------------------------------------------------------------- 1 | import { defineStore } from 'pinia' 2 | 3 | export const useMovieStore = defineStore('movie', { 4 | state: () => ({ 5 | movie: null, 6 | showFullVideo: false, 7 | }), 8 | actions: { 9 | setMovie(movie) { 10 | this.movie = movie 11 | }, 12 | clearMovie() { 13 | this.movie = null 14 | } 15 | }, 16 | getters: { 17 | currentMovie: (state) => state.movie || null 18 | } 19 | }) 20 | -------------------------------------------------------------------------------- /client/tailwind.config.js: -------------------------------------------------------------------------------- 1 | /** @type {import('tailwindcss').Config} */ 2 | module.exports = { 3 | content: [ 4 | "./index.html", 5 | "./src/**/*.{vue,js,ts,jsx,tsx}", 6 | ], 7 | theme: { 8 | extend: { 9 | colors: { 10 | 'discovarr': { 11 | DEFAULT: '#1d8bfa', 12 | 600: '#1d8bfa' 13 | } 14 | } 15 | }, 16 | }, 17 | plugins: [ 18 | require('@tailwindcss/typography'), 19 | ], 20 | 21 | } 22 | -------------------------------------------------------------------------------- /server/src/services/migrations/006_search.py: -------------------------------------------------------------------------------- 1 | import peewee as pw 2 | from playhouse.migrate import migrate as run_migrations, SchemaMigrator 3 | 4 | def upgrade(migrator: SchemaMigrator): 5 | kwargs = pw.CharField(null=True) 6 | 7 | run_migrations( 8 | migrator.add_column('search', 'kwargs', kwargs), 9 | ) 10 | 11 | def rollback(migrator: SchemaMigrator): 12 | run_migrations( 13 | migrator.drop_column('search', 'kwargs'), 14 | ) 15 | 16 | -------------------------------------------------------------------------------- /compose.example.yml: -------------------------------------------------------------------------------- 1 | services: 2 | discovarr: 3 | image: ghcr.io/sqrlmstr5000/discovarr:latest 4 | container_name: discovarr 5 | restart: unless-stopped 6 | ports: 7 | - "8000:8000" 8 | environment: 9 | # Client needs to know where the API is. This will be your host machine IP or hostname since the client is connecting from your browser 10 | - VITE_DISCOVARR_URL=http://192.168.0.100:8000/api 11 | volumes: 12 | - ./config:/config 13 | - ./cache:/cache -------------------------------------------------------------------------------- /server/src/services/response.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Dict, Any 2 | from pydantic import BaseModel # Or just use a dataclass/regular class 3 | 4 | class APIResponse(BaseModel): 5 | success: bool 6 | data: Optional[Any] = None # The successful response body (dict, list, etc.) 7 | error: Optional[Dict[str, Any]] = None # Structured error details 8 | status_code: Optional[int] = None # HTTP status from the external API 9 | message: Optional[str] = None # A human-readable message 10 | -------------------------------------------------------------------------------- /client/vite.config.js: -------------------------------------------------------------------------------- 1 | import { fileURLToPath, URL } from 'node:url' 2 | 3 | import { defineConfig } from 'vite' 4 | import vue from '@vitejs/plugin-vue' 5 | 6 | // https://vitejs.dev/config/ 7 | export default defineConfig({ 8 | server: { 9 | watch: { 10 | usePolling: true, 11 | }, 12 | host: "0.0.0.0", 13 | port: 5173, 14 | }, 15 | plugins: [vue()], 16 | resolve: { 17 | alias: { 18 | '@': fileURLToPath(new URL('./src', import.meta.url)) 19 | } 20 | } 21 | }) 22 | -------------------------------------------------------------------------------- /server/src/services/migrations/013_settings.py: -------------------------------------------------------------------------------- 1 | import peewee as pw 2 | from peewee import * 3 | from playhouse.migrate import migrate as run_migrations, SchemaMigrator 4 | 5 | def upgrade(migrator: SchemaMigrator): 6 | 7 | run_migrations( 8 | migrator.alter_column_type('settings', 'value', TextField(null=True)) 9 | ) 10 | 11 | def rollback(migrator: SchemaMigrator): 12 | run_migrations( 13 | migrator.alter_column_type('settings', 'value', CharField(null=True)) 14 | ) 15 | 16 | -------------------------------------------------------------------------------- /server/src/services/migrations/012_watchhistory.py: -------------------------------------------------------------------------------- 1 | import peewee as pw 2 | from peewee import * 3 | from playhouse.migrate import migrate as run_migrations, SchemaMigrator 4 | 5 | def upgrade(migrator: SchemaMigrator): 6 | source = CharField(null=True) 7 | 8 | run_migrations( 9 | migrator.add_column('watchhistory', 'source', source), 10 | ) 11 | 12 | def rollback(migrator: SchemaMigrator): 13 | run_migrations( 14 | migrator.drop_column('watchhistory', 'source'), 15 | ) 16 | 17 | -------------------------------------------------------------------------------- /server/src/services/migrations/009_watchhistory.py: -------------------------------------------------------------------------------- 1 | import peewee as pw 2 | from peewee import * 3 | from playhouse.migrate import migrate as run_migrations, SchemaMigrator 4 | 5 | def upgrade(migrator: SchemaMigrator): 6 | media_id = CharField(null=True) 7 | 8 | run_migrations( 9 | migrator.add_column('watchhistory', 'media_id', media_id), 10 | ) 11 | 12 | def rollback(migrator: SchemaMigrator): 13 | run_migrations( 14 | migrator.drop_column('watchhistory', 'media_id'), 15 | ) 16 | 17 | -------------------------------------------------------------------------------- /server/src/services/migrations/007_search.py: -------------------------------------------------------------------------------- 1 | import peewee as pw 2 | from peewee import * 3 | from playhouse.migrate import migrate as run_migrations, SchemaMigrator 4 | 5 | def upgrade(migrator: SchemaMigrator): 6 | last_run_date = DateTimeField(null=True) 7 | 8 | run_migrations( 9 | migrator.add_column('search', 'last_run_date', last_run_date), 10 | ) 11 | 12 | def rollback(migrator: SchemaMigrator): 13 | run_migrations( 14 | migrator.drop_column('search', 'last_run_date'), 15 | ) 16 | 17 | -------------------------------------------------------------------------------- /server/src/services/migrations/010_watchhistory.py: -------------------------------------------------------------------------------- 1 | import peewee as pw 2 | from peewee import * 3 | from playhouse.migrate import migrate as run_migrations, SchemaMigrator 4 | 5 | def upgrade(migrator: SchemaMigrator): 6 | poster_url = CharField(null=True) 7 | 8 | run_migrations( 9 | migrator.add_column('watchhistory', 'poster_url', poster_url), 10 | ) 11 | 12 | def rollback(migrator: SchemaMigrator): 13 | run_migrations( 14 | migrator.drop_column('watchhistory', 'poster_url'), 15 | ) 16 | 17 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | pnpm-debug.log* 8 | lerna-debug.log* 9 | 10 | node_modules 11 | .DS_Store 12 | dist 13 | dist-ssr 14 | coverage 15 | *.local 16 | 17 | /cypress/videos/ 18 | /cypress/screenshots/ 19 | 20 | # Editor directories and files 21 | .vscode/* 22 | !.vscode/extensions.json 23 | .idea 24 | *.suo 25 | *.ntvs* 26 | *.njsproj 27 | *.sln 28 | *.sw? 29 | 30 | # Python 31 | __pycache__/ 32 | .pytest_cache/ 33 | 34 | # Docker 35 | /compose.yml 36 | **/env.sh 37 | .env* -------------------------------------------------------------------------------- /server/src/tests/README.md: -------------------------------------------------------------------------------- 1 | 2 | ## Install Python Dependencies 3 | ``` 4 | python -m venv ~/venv/discovarr 5 | source ~/venv/discovarr/bin/activate 6 | pip install -r server/src/requirements.txt 7 | pip install -r server/src/tests/requirements.txt 8 | ``` 9 | 10 | ## Run Tests 11 | ``` 12 | cd server/src 13 | source env.sh 14 | pytest -s --log-cli-level=DEBUG tests/integration/test_trakt_provider.py 15 | 16 | pytest -s --log-cli-level=DEBUG tests/integration/test_trakt_provider.py::TestTraktProviderLive::test_get_items_filtered_from_live_history 17 | ``` -------------------------------------------------------------------------------- /server/src/services/migrations/004_media.py: -------------------------------------------------------------------------------- 1 | import peewee as pw 2 | from playhouse.migrate import migrate as run_migrations, SchemaMigrator 3 | 4 | def upgrade(migrator: SchemaMigrator): 5 | run_migrations( 6 | migrator.rename_column('media', 'genre', 'genres'), 7 | ) 8 | 9 | def downgrade(migrator: SchemaMigrator): 10 | # Note: Downgrading to NOT NULL might fail if there are existing NULL values. 11 | # You might need to handle or clean up NULLs before applying this. 12 | run_migrations( 13 | migrator.rename_column('media', 'genres', 'genre'), 14 | ) -------------------------------------------------------------------------------- /server/src/env.example.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | export LOGLEVEL= 3 | export TRAKT_TEST_CLIENT_ID= 4 | export TRAKT_TEST_CLIENT_SECRET= 5 | export TRAKT_TEST_REDIRECT_URI= 6 | export TRAKT_TEST_AUTH_JSON= 7 | export JELLYFIN_TEST_URL= 8 | export JELLYFIN_TEST_API_KEY= 9 | export PLEX_TEST_API_KEY= 10 | export PLEX_TEST_URL= 11 | export GEMINI_TEST_API_KEY= 12 | export GEMINI_TEST_MODEL= 13 | export OLLAMA_TEST_BASE_URL= 14 | export OLLAMA_TEST_MODEL= 15 | export RADARR_TEST_URL= 16 | export RADARR_TEST_API_KEY= 17 | export SONARR_TEST_URL= 18 | export SONARR_TEST_API_KEY= 19 | export TMDB_TEST_API_KEY= -------------------------------------------------------------------------------- /server/src/services/migrations/005_search.py: -------------------------------------------------------------------------------- 1 | import peewee as pw 2 | from playhouse.migrate import migrate as run_migrations, SchemaMigrator 3 | 4 | def upgrade(migrator: SchemaMigrator): 5 | run_migrations( 6 | migrator.rename_column('search', 'query', 'prompt'), 7 | ) 8 | 9 | def downgrade(migrator: SchemaMigrator): 10 | # Note: Downgrading to NOT NULL might fail if there are existing NULL values. 11 | # You might need to handle or clean up NULLs before applying this. 12 | run_migrations( 13 | migrator.rename_column('media', 'prompt`', 'genre'), 14 | ) -------------------------------------------------------------------------------- /MIGRATE.md: -------------------------------------------------------------------------------- 1 | # Migrate 2 | 3 | I goal here is to create a way to migrate from an existing sqlite database to postgres. TDB 4 | 5 | ### Migrate sqlite database to postgres (*Doesn't work. pgloader seems to be abandoned) 6 | ``` 7 | psql: CREATE DATABASE discovarr_test_migrate; 8 | 9 | cat < /tmp/migrate.pgloader 10 | load database 11 | from '/app/discovarr-dev/discovarr.db' 12 | into postgresql://admin:admin@localhost:5432/discovarr_test_migrate 13 | 14 | set work_mem to '16MB', maintenance_work_mem to '512 MB'; 15 | EOF 16 | 17 | sudo pgloader -vd /tmp/migrate.pgloader 18 | ``` -------------------------------------------------------------------------------- /RELEASE.md: -------------------------------------------------------------------------------- 1 | # Release Process 2 | 3 | ## Run Tests 4 | ``` 5 | TODO 6 | ``` 7 | 8 | ## Update CHANGELOG.md 9 | ``` 10 | ## [x.x.x]() - YYYY-MM-DD 11 | ### Added 12 | - Added new commandlet 13 | 14 | ### Changed 15 | - Changed existing commandlet 16 | 17 | ### Fixed 18 | - Fixed issue on existing commandlet 19 | ``` 20 | 21 | ## Commit Changes 22 | ``` 23 | git commit -m 'Release 2.3.1' 24 | git push 25 | git checkout main 26 | git merge dev 27 | 28 | # Only tag releases as this triggers a git workflow (.github/workflows/*) 29 | git tag v2.3.1 30 | git push origin v2.3.1 31 | git push origin main 32 | git checkout dev 33 | ``` -------------------------------------------------------------------------------- /server/src/services/migrations/011_poster_url_source.py: -------------------------------------------------------------------------------- 1 | import peewee as pw 2 | from peewee import * 3 | from playhouse.migrate import migrate as run_migrations, SchemaMigrator 4 | 5 | def upgrade(migrator: SchemaMigrator): 6 | poster_url_source = CharField(null=True) 7 | 8 | run_migrations( 9 | migrator.add_column('media', 'poster_url_source', poster_url_source), 10 | migrator.add_column('watchhistory', 'poster_url_source', poster_url_source), 11 | ) 12 | 13 | def rollback(migrator: SchemaMigrator): 14 | run_migrations( 15 | migrator.drop_column('media', 'poster_url_source'), 16 | migrator.drop_column('watchhistory', 'poster_url_source'), 17 | ) 18 | 19 | -------------------------------------------------------------------------------- /server/src/services/migrations/008_watchhistory.py: -------------------------------------------------------------------------------- 1 | import peewee as pw 2 | from peewee import * 3 | from playhouse.migrate import migrate as run_migrations, SchemaMigrator 4 | 5 | def upgrade(migrator: SchemaMigrator): 6 | processed = BooleanField(default=False) 7 | processed_at = DateTimeField(null=True) 8 | 9 | run_migrations( 10 | migrator.add_column('watchhistory', 'processed', processed), 11 | migrator.add_column('watchhistory', 'processed_at', processed_at), 12 | ) 13 | 14 | def rollback(migrator: SchemaMigrator): 15 | run_migrations( 16 | migrator.drop_column('watchhistory', 'processed'), 17 | migrator.drop_column('watchhistory', 'processed_at'), 18 | ) 19 | 20 | -------------------------------------------------------------------------------- /client/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "discovarr", 3 | "version": "0.0.0", 4 | "private": true, 5 | "scripts": { 6 | "dev": "vite --host", 7 | "build": "vite build", 8 | "preview": "vite preview" 9 | }, 10 | "dependencies": { 11 | "animate.css": "^4.1.1", 12 | "pinia": "^2.0.28", 13 | "markdown-it": "^14.0.0", 14 | "vue": "^3.2.45", 15 | "vue-material-design-icons": "^5.2.0", 16 | "vue-router": "^4.1.6", 17 | "vue3-carousel": "^0.2.9" 18 | }, 19 | "devDependencies": { 20 | "@vitejs/plugin-vue": "^4.0.0", 21 | "@tailwindcss/typography": "^0.5.10", 22 | "autoprefixer": "^10.4.13", 23 | "postcss": "^8.4.21", 24 | "tailwindcss": "^3.2.7", 25 | "vite": "^4.0.0" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /client/src/stores/searchStore.js: -------------------------------------------------------------------------------- 1 | import { defineStore } from 'pinia'; 2 | 3 | export const useSearchStore = defineStore('search', { 4 | state: () => ({ 5 | results: null, 6 | // Context for which the current 'results' are valid 7 | resultsContext: { 8 | searchId: null, 9 | prompt: null, 10 | mediaName: null, 11 | }, 12 | }), 13 | actions: { 14 | setSearchResults(results, context) { 15 | // context should be an object like { searchId, prompt, mediaName } 16 | this.results = results; 17 | this.resultsContext = { ...context }; 18 | }, 19 | clearSearchResults() { 20 | this.results = null; 21 | this.resultsContext = { searchId: null, prompt: null, mediaName: null }; 22 | }, 23 | }, 24 | }); 25 | -------------------------------------------------------------------------------- /start-dev.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -m # Enable Job Control 3 | 4 | echo "Starting FastAPI backend (port 8000) with auto-reload..." 5 | cd /app/server/src 6 | # Your main.py is in /app/server/src/main.py and the FastAPI instance is 'app' 7 | uvicorn main:app --host 0.0.0.0 --port 8000 --reload --reload-dir /app/server/src & 8 | 9 | echo "Starting Vite frontend dev server (port 5173)..." 10 | cd /app/client 11 | # npm run dev will use vite.config.js which sets host 0.0.0.0 and port 5173 12 | # The --host flag here ensures Vite listens on all interfaces within the container. 13 | npm run dev 14 | 15 | # When the foreground process (npm run dev) exits (e.g., Ctrl+C), 16 | # this script will terminate. Docker will then stop the container, 17 | # which will also stop the background Uvicorn process. -------------------------------------------------------------------------------- /server/src/services/migrations/003_searchstat.py: -------------------------------------------------------------------------------- 1 | import peewee as pw 2 | from playhouse.migrate import migrate as run_migrations, SchemaMigrator 3 | 4 | def upgrade(migrator: SchemaMigrator): 5 | """Set the search_id column in the searchstat table to allow NULLs.""" 6 | run_migrations( 7 | migrator.drop_not_null('searchstat', 'search_id'), # 'search_id' is the typical column name for a ForeignKeyField named 'search' 8 | ) 9 | 10 | def downgrade(migrator: SchemaMigrator): 11 | """Revert SearchStat.search to NOT NULL (if possible and desired).""" 12 | # Note: Downgrading to NOT NULL might fail if there are existing NULL values. 13 | # You might need to handle or clean up NULLs before applying this. 14 | run_migrations( 15 | migrator.add_not_null('searchstat', 'search_id') 16 | ) -------------------------------------------------------------------------------- /server/src/services/migrations/016_media_favorite.py: -------------------------------------------------------------------------------- 1 | import peewee as pw 2 | from peewee import * 3 | from playhouse.migrate import migrate as run_migrations, SchemaMigrator 4 | # Import necessary models and the database proxy 5 | from services.models import Media, WatchHistory, database 6 | 7 | def upgrade(migrator: SchemaMigrator): 8 | # === Media table changes === 9 | # Define the new column with null=True initially 10 | favorite = BooleanField(default=False) 11 | 12 | run_migrations( 13 | migrator.add_column('media', 'favorite', favorite), 14 | ) 15 | 16 | def rollback(migrator: SchemaMigrator): 17 | # === Media table rollback === 18 | # To properly rollback, first allow NULLs again, then drop the column 19 | run_migrations( 20 | migrator.drop_column('media', 'favorite'), 21 | ) 22 | 23 | -------------------------------------------------------------------------------- /server/src/services/migrations/015_media_source_provider.py: -------------------------------------------------------------------------------- 1 | import peewee as pw 2 | from peewee import * 3 | from playhouse.migrate import migrate as run_migrations, SchemaMigrator 4 | # Import necessary models and the database proxy 5 | from services.models import Media, WatchHistory, database 6 | 7 | def upgrade(migrator: SchemaMigrator): 8 | # === Media table changes === 9 | # Define the new column with null=True initially 10 | source_provider = CharField(null=True) 11 | 12 | run_migrations( 13 | migrator.add_column('media', 'source_provider', source_provider), 14 | ) 15 | 16 | def rollback(migrator: SchemaMigrator): 17 | # === Media table rollback === 18 | # To properly rollback, first allow NULLs again, then drop the column 19 | run_migrations( 20 | migrator.drop_column('media', 'source_provider'), 21 | ) 22 | 23 | -------------------------------------------------------------------------------- /server/src/services/migrations/002_media.py: -------------------------------------------------------------------------------- 1 | """ 2 | Migration 002: Add new fields to the Media table. 3 | 4 | New fields: 5 | - source_title 6 | - similarity 7 | - rt_url 8 | - rt_score 9 | - media_status 10 | - release_date 11 | - networks 12 | - original_language 13 | - ignore 14 | """ 15 | import peewee as pw 16 | from playhouse.migrate import migrate as run_migrations, SchemaMigrator 17 | 18 | def upgrade(migrator: SchemaMigrator): 19 | """ 20 | Applies the migration to add new fields to the 'media' table. 21 | """ 22 | genre = pw.TextField(null=True) 23 | 24 | run_migrations( 25 | migrator.add_column('media', 'genre', genre), 26 | ) 27 | 28 | def rollback(migrator: SchemaMigrator): 29 | """ 30 | Reverts the migration by removing the added fields from the 'media' table. 31 | """ 32 | run_migrations( 33 | migrator.drop_column('media', 'genre'), 34 | ) 35 | 36 | -------------------------------------------------------------------------------- /client/src/stores/toast.js: -------------------------------------------------------------------------------- 1 | import { defineStore } from 'pinia'; 2 | 3 | export const useToastStore = defineStore('toast', { 4 | state: () => ({ 5 | message: '', 6 | type: 'success', // 'success', 'error', 'info', 'warning' 7 | isVisible: false, 8 | timeoutId: null, 9 | }), 10 | actions: { 11 | show(message, type = 'success', duration = 10000) { 12 | this.message = message; 13 | this.type = type; 14 | this.isVisible = true; 15 | 16 | if (this.timeoutId) { 17 | clearTimeout(this.timeoutId); 18 | } 19 | 20 | this.timeoutId = setTimeout(() => { 21 | this.hide(); 22 | }, duration); 23 | }, 24 | hide() { 25 | this.isVisible = false; 26 | this.message = ''; 27 | this.type = 'success'; // Reset to default 28 | if (this.timeoutId) { 29 | clearTimeout(this.timeoutId); 30 | this.timeoutId = null; 31 | } 32 | }, 33 | }, 34 | }); 35 | -------------------------------------------------------------------------------- /compose.example.env.yml: -------------------------------------------------------------------------------- 1 | # Requires a .env file in the same directory as this file 2 | 3 | services: 4 | discovarr: 5 | container_name: discovarr 6 | environment: 7 | LOGLEVEL: ${LOGLEVEL} 8 | TZ: ${TZ} 9 | VITE_DISCOVARR_URL: ${VITE_DISCOVARR_URL} 10 | JELLYFIN_URL: ${JELLYFIN_URL} 11 | JELLYFIN_API_KEY: ${JELLYFIN_API_KEY} 12 | PLEX_API_TOKEN: ${PLEX_API_TOKEN} 13 | PLEX_URL: ${PLEX_URL} 14 | APP_RECENT_LIMIT: ${APP_RECENT_LIMIT} 15 | GEMINI_API_KEY: ${GEMINI_API_KEY} 16 | GEMINI_MODEL: ${GEMINI_MODEL} 17 | GEMINI_LIMIT: ${GEMINI_LIMIT} 18 | RADARR_URL: ${RADARR_URL} 19 | RADARR_API_KEY: ${RADARR_API_KEY} 20 | SONARR_URL: ${SONARR_URL} 21 | SONARR_API_KEY: ${SONARR_API_KEY} 22 | TMDB_API_KEY: ${TMDB_API_KEY} 23 | image: ghcr.io/sqrlmstr5000/discovarr:latest 24 | network_mode: bridge 25 | ports: 26 | - 8000:8000 27 | restart: unless-stopped 28 | volumes: 29 | - ./config:/config 30 | - ./cache:/cache -------------------------------------------------------------------------------- /client/src/router/index.js: -------------------------------------------------------------------------------- 1 | import { createRouter, createWebHistory } from 'vue-router' 2 | import MediaListView from '../views/MediaListView.vue' 3 | import SettingsView from '../views/SettingsView.vue' 4 | import SearchView from '../views/SearchView.vue' 5 | import ResearchView from '../views/ResearchView.vue' 6 | import WatchHistory from '../views/WatchHistoryView.vue' 7 | 8 | const router = createRouter({ 9 | history: createWebHistory(import.meta.env.BASE_URL), 10 | routes: [ 11 | { 12 | path: '/', 13 | name: 'home', 14 | component: MediaListView 15 | }, 16 | { 17 | path: '/settings', 18 | name: 'settings', 19 | component: SettingsView 20 | }, 21 | { 22 | path: '/search/:searchId?', // Optional searchId parameter 23 | name: 'search-view', 24 | component: SearchView, 25 | props: true // Automatically pass route params as props to the component 26 | }, 27 | { 28 | path: '/research', 29 | name: 'research', 30 | component: ResearchView 31 | }, 32 | { 33 | path: '/watch-history', 34 | name: 'watch-history', 35 | component: WatchHistory 36 | } 37 | ] 38 | }) 39 | 40 | export default router 41 | -------------------------------------------------------------------------------- /compose.example.qa.yml: -------------------------------------------------------------------------------- 1 | # Local build of the production Dockerfile 2 | 3 | networks: 4 | jellyfin_htpc: 5 | external: true 6 | 7 | services: 8 | discovarr-qa: 9 | build: 10 | context: . 11 | dockerfile: Dockerfile 12 | container_name: discovarr-qa 13 | ports: 14 | - "8000:8000" # FastAPI backend dev server (using host port 8001 to avoid conflicts) 15 | environment: 16 | # Common environment variables 17 | - LOGLEVEL=DEBUG 18 | - TZ=America/Denver 19 | # Client needs to know where the API is (exposed on host port 8001) 20 | - VITE_DISCOVARR_URL=http://192.168.0.100:8001/api 21 | 22 | # Backend API Keys and URLs (copy from your existing discovarr-api or discovarr service) 23 | - JELLYFIN_URL=http://jellyfin:8096 24 | - JELLYFIN_API_KEY= 25 | - GEMINI_API_KEY= # NO DOUBLE QUOTES 26 | - GEMINI_MODEL=gemini-2.5-flash-preview-04-17 27 | - RADARR_URL=http://radarr:7878 28 | - RADARR_API_KEY= 29 | - SONARR_URL=http://sonarr:8989 30 | - SONARR_API_KEY= 31 | - TMDB_API_KEY= 32 | networks: 33 | - jellyfin_htpc # Ensure this network is defined or external as in your example 34 | restart: unless-stopped # Or 'no' for typical dev workflows -------------------------------------------------------------------------------- /server/src/services/migrations/018_llmstat.py: -------------------------------------------------------------------------------- 1 | import peewee as pw 2 | from peewee import * 3 | from playhouse.migrate import migrate as run_migrations, SchemaMigrator 4 | # Import necessary models and the database proxy 5 | from services.models import SearchStat, LLMStat, database 6 | 7 | def upgrade(migrator: SchemaMigrator): 8 | # === SearchStat table changes === 9 | # Drop the SearchStat table if it exists 10 | # This will be executed within the transaction and with PRAGMA foreign_keys=OFF for SQLite 11 | # by the migration runner. 12 | if SearchStat.table_exists(): 13 | # For PostgreSQL, if WatchHistory has foreign keys pointing to it from other tables, 14 | # cascade=True might be needed: WatchHistory.drop_table(cascade=True, safe=True) 15 | SearchStat.drop_table(safe=True) 16 | 17 | # Recreate the WatchHistory table based on its current model definition 18 | database.create_tables([LLMStat], safe=True) 19 | 20 | def rollback(migrator: SchemaMigrator): 21 | if not SearchStat.table_exists(): 22 | # For PostgreSQL, if WatchHistory has foreign keys pointing to it from other tables, 23 | # cascade=True might be needed: WatchHistory.drop_table(cascade=True, safe=True) 24 | database.create_tables([SearchStat], safe=True) 25 | 26 | -------------------------------------------------------------------------------- /server/src/services/migrations/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from playhouse.migrate import SchemaMigrator # Generic migrator type 3 | from peewee import Database as PeeweeDatabase # Generic Peewee Database type 4 | from datetime import datetime 5 | 6 | class Migration: 7 | def __init__(self, database: PeeweeDatabase, migrator: SchemaMigrator): 8 | self.logger = logging.getLogger(__name__) 9 | self.database = database 10 | self.migrator = migrator # Use the passed migrator instance 11 | 12 | def get_current_version(self) -> int: 13 | try: 14 | cursor = self.database.execute_sql( 15 | "SELECT version FROM migrations ORDER BY version DESC LIMIT 1" 16 | ) 17 | result = cursor.fetchone() 18 | return result[0] if result else 0 19 | except: 20 | self.logger.warning("Could not get current version, resetting migrations table") 21 | return 0 22 | 23 | def set_version(self, version: int): 24 | placeholder = self.database.param # Gets '?' for SQLite, '%s' for Postgres 25 | sql_query = f"INSERT INTO migrations (version, applied_at) VALUES ({placeholder}, {placeholder})" 26 | self.database.execute_sql( 27 | sql_query, 28 | (version, datetime.now()) 29 | ) -------------------------------------------------------------------------------- /client/src/components/GlobalToast.vue: -------------------------------------------------------------------------------- 1 | 16 | 17 | 38 | 39 | 50 | -------------------------------------------------------------------------------- /client/src/components/Markdown.vue: -------------------------------------------------------------------------------- 1 | 18 | 19 | -------------------------------------------------------------------------------- /server/src/services/migrations/001_media.py: -------------------------------------------------------------------------------- 1 | """ 2 | Migration 002: Add new fields to the Media table. 3 | 4 | New fields: 5 | - source_title 6 | - similarity 7 | - rt_url 8 | - rt_score 9 | - media_status 10 | - release_date 11 | - networks 12 | - original_language 13 | - ignore 14 | """ 15 | import peewee as pw 16 | from playhouse.migrate import migrate as run_migrations, SchemaMigrator 17 | 18 | def upgrade(migrator: SchemaMigrator): 19 | """ 20 | Applies the migration to add new fields to the 'media' table. 21 | """ 22 | media_status_field = pw.CharField(null=True) 23 | release_date_field = pw.DateField(null=True) 24 | networks_field = pw.TextField(null=True) 25 | original_language_field = pw.CharField(null=True) 26 | 27 | run_migrations( 28 | migrator.add_column('media', 'media_status', media_status_field), 29 | migrator.add_column('media', 'release_date', release_date_field), 30 | migrator.add_column('media', 'networks', networks_field), 31 | migrator.add_column('media', 'original_language', original_language_field), 32 | ) 33 | 34 | def rollback(migrator: SchemaMigrator): 35 | """ 36 | Reverts the migration by removing the added fields from the 'media' table. 37 | """ 38 | run_migrations( 39 | migrator.drop_column('media', 'original_language'), 40 | migrator.drop_column('media', 'networks'), 41 | migrator.drop_column('media', 'release_date'), 42 | migrator.drop_column('media', 'media_status'), 43 | ) 44 | 45 | -------------------------------------------------------------------------------- /compose.example.dev.yml: -------------------------------------------------------------------------------- 1 | # Local build of the development environment using Dockerfile.dev 2 | 3 | networks: 4 | arr: 5 | external: true # Assumes you already have a network named arr 6 | 7 | services: 8 | discovarr-dev: 9 | build: 10 | context: . 11 | dockerfile: Dockerfile.dev 12 | container_name: discovarr-dev 13 | ports: 14 | - "8000:5173" # Vite frontend dev server 15 | - "8001:8000" # FastAPI backend dev server (using host port 8001 to avoid conflicts) 16 | volumes: 17 | - ./client/src:/app/client/src 18 | - ./client/public:/app/client/public 19 | - ./client/tailwind.config.js:/app/client/tailwind.config.js 20 | - ./server:/app/server 21 | - /app/discovarr-dev/config:/config 22 | environment: 23 | # Common environment variables 24 | - LOGLEVEL=DEBUG 25 | - TZ=America/Denver 26 | # Client needs to know where the API is (exposed on host port 8001) 27 | - VITE_DISCOVARR_URL=http://192.168.0.100:8001/api 28 | # Backend API Keys and URLs (copy from your existing discovarr-api or discovarr service) 29 | - JELLYFIN_URL=http://jellyfin:8096 30 | - JELLYFIN_API_KEY= 31 | - GEMINI_API_KEY= # NO DOUBLE QUOTES 32 | - GEMINI_MODEL=gemini-2.5-flash-preview-04-17 33 | - RADARR_URL=http://radarr:7878 34 | - RADARR_API_KEY= 35 | - SONARR_URL=http://sonarr:8989 36 | - SONARR_API_KEY= 37 | - TMDB_API_KEY= 38 | networks: 39 | - arr # Ensure this network is defined or external as in your example 40 | restart: unless-stopped # Or 'no' for typical dev workflows 41 | -------------------------------------------------------------------------------- /client/src/components/EditMediaNameModal.vue: -------------------------------------------------------------------------------- 1 | 29 | 30 | -------------------------------------------------------------------------------- /Dockerfile.dev: -------------------------------------------------------------------------------- 1 | # Development build. Uses vite dev server to host client application. Supports hot reloading of both client and server. 2 | 3 | FROM python:3.12-slim 4 | 5 | # Set environment variables 6 | ENV PYTHONDONTWRITEBYTECODE 1 7 | ENV PYTHONUNBUFFERED 1 8 | ENV PYTHONTRACEMALLOC=1 9 | 10 | # Install Node.js (e.g., v18) and npm 11 | RUN apt-get update && \ 12 | apt-get install -y curl gnupg && \ 13 | curl -fsSL https://deb.nodesource.com/setup_20.x | bash - && \ 14 | apt-get install -y nodejs && \ 15 | apt-get clean && rm -rf /var/lib/apt/lists/* 16 | 17 | WORKDIR /app 18 | 19 | # --- Server Setup --- 20 | # Copy only requirements.txt first to leverage Docker cache 21 | COPY ./server/src/requirements.txt /app/requirements.txt 22 | RUN pip install --no-cache-dir --upgrade pip && \ 23 | pip install --no-cache-dir -r /app/requirements.txt 24 | 25 | # Install PostgreSQL client for pg_dump/psql 26 | RUN apt-get update && apt-get install -y --no-install-recommends postgresql-client && rm -rf /var/lib/apt/lists/* 27 | 28 | # Copy server application code (will be volume-mounted in dev) 29 | COPY ./server /app/server 30 | 31 | # --- Client Setup --- 32 | # Copy only package.json and lock file first to leverage Docker cache 33 | WORKDIR /app/client 34 | COPY ./client/package.json ./client/package-lock.json* ./ 35 | # npm ci installs from package-lock.json and includes devDependencies by default. 36 | RUN npm ci 37 | # The rest of the client code will be volume-mounted in dev, 38 | # but COPYing it here pre-populates the image layer. 39 | COPY ./client/ ./ 40 | 41 | # Create directories 42 | RUN mkdir -p /config /backups /cache 43 | 44 | # Expose ports 45 | # For FastAPI backend dev server 46 | EXPOSE 8000 47 | # For Vite frontend dev server 48 | EXPOSE 5173 49 | 50 | # Copy the startup script and make it executable 51 | COPY ./start-dev.sh /app/start-dev.sh 52 | RUN chmod +x /app/start-dev.sh 53 | 54 | # Note: For development, we're running as root for simplicity with volume mounts. 55 | # The production Dockerfile already handles non-root user setup. 56 | 57 | # Command to run the startup script 58 | CMD ["/app/start-dev.sh"] -------------------------------------------------------------------------------- /client/src/stores/settings.js: -------------------------------------------------------------------------------- 1 | // src/stores/settings.js 2 | import { defineStore } from 'pinia'; 3 | import { ref } from 'vue'; 4 | import { config } from '../config'; // Assuming your API URL config is here 5 | 6 | export const useSettingsStore = defineStore('settings', () => { 7 | // State 8 | const allSettings = ref({}); // Store settings as an object: { settingName: settingObject, ... } 9 | const isLoading = ref(false); 10 | const error = ref(null); 11 | 12 | // Actions 13 | async function fetchSettings() { 14 | isLoading.value = true; 15 | error.value = null; 16 | try { 17 | const response = await fetch(`${config.apiUrl}/settings`); 18 | if (!response.ok) { 19 | const errorData = await response.json().catch(() => ({ message: response.statusText })); 20 | throw new Error(`Failed to fetch settings: ${response.status} ${errorData.message || ''}`); 21 | } 22 | const settingsArray = await response.json(); // Assuming API returns an array of setting objects 23 | 24 | allSettings.value = settingsArray 25 | // Transform the array into an object keyed by setting name for easier access 26 | // allSettings.value = settingsArray.reduce((acc, setting) => { 27 | // acc[setting.name] = setting; // Store the whole setting object 28 | // return acc; 29 | //}, {}); 30 | 31 | console.log('Settings fetched successfully:', allSettings.value); 32 | 33 | } catch (e) { 34 | console.error('Error fetching settings:', e); 35 | error.value = e.message || 'An unknown error occurred while fetching settings.'; 36 | allSettings.value = {}; // Reset or keep stale data, depending on preference 37 | } finally { 38 | isLoading.value = false; 39 | } 40 | } 41 | 42 | // Getters (optional, but can be useful) 43 | // Example: Get a specific setting's value 44 | function getSettingValue(group, name, defaultValue = null) { 45 | return allSettings.value[group]?.[name]?.value ?? defaultValue; 46 | } 47 | 48 | return { 49 | allSettings, 50 | isLoading, 51 | error, 52 | fetchSettings, 53 | getSettingValue 54 | }; 55 | }); 56 | -------------------------------------------------------------------------------- /server/src/tests/integration/test_jellyfin_provider.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import os 3 | from typing import List # For type hinting 4 | 5 | from providers.jellyfin import JellyfinProvider 6 | from services.models import ItemsFiltered, LibraryUser # Ensure LibraryUser is imported if not already 7 | from tests.integration.base.base_live_library_provider_tests import BaseLiveLibraryProviderTests 8 | 9 | # --- Fixtures for Jellyfin --- 10 | 11 | @pytest.fixture(scope="module") 12 | def live_jellyfin_url() -> str: 13 | """Retrieves Jellyfin URL from environment variable.""" 14 | url = os.environ.get("JELLYFIN_TEST_URL") 15 | if not url: 16 | pytest.skip("JELLYFIN_TEST_URL environment variable not set. Skipping live Jellyfin tests.") 17 | return url 18 | 19 | @pytest.fixture(scope="module") 20 | def live_jellyfin_api_key() -> str: 21 | """Retrieves Jellyfin API Key from environment variable.""" 22 | api_key = os.environ.get("JELLYFIN_TEST_API_KEY") 23 | if not api_key: 24 | pytest.skip("JELLYFIN_TEST_API_KEY environment variable not set. Skipping live Jellyfin tests.") 25 | return api_key 26 | 27 | @pytest.mark.integration_live # Mark tests that hit the live API 28 | class TestJellyfinProviderLive(BaseLiveLibraryProviderTests): 29 | """Groups live integration tests for JellyfinProvider.""" 30 | 31 | # Override the live_provider fixture from the base class and make it module-scoped 32 | @pytest.fixture(scope="module") 33 | def live_provider(self, live_jellyfin_url: str, live_jellyfin_api_key: str) -> JellyfinProvider: 34 | """ 35 | Provides a live instance of the JellyfinProvider, scoped for the module. 36 | """ 37 | provider = JellyfinProvider( 38 | jellyfin_url=live_jellyfin_url, 39 | jellyfin_api_key=live_jellyfin_api_key 40 | # limit is optional and defaults in JellyfinProvider constructor 41 | ) 42 | assert provider.jellyfin_url == live_jellyfin_url, "Provider URL should be set." 43 | return provider 44 | 45 | # Other tests like test_provider_name, test_get_users, test_get_user_by_name, 46 | # test_get_all_items_filtered_as_objects, and test_get_all_items_filtered_as_names 47 | # are inherited from BaseLiveLibraryProviderTests and should work as expected. -------------------------------------------------------------------------------- /server/src/tests/integration/test_plex_provider.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import os 3 | from typing import List # For type hinting 4 | 5 | from providers.plex import PlexProvider 6 | from services.models import ItemsFiltered, LibraryUser 7 | from tests.integration.base.base_live_library_provider_tests import BaseLiveLibraryProviderTests 8 | 9 | # --- Fixtures for Plex --- 10 | 11 | @pytest.fixture(scope="module") 12 | def live_plex_url() -> str: 13 | """Retrieves Plex URL from environment variable.""" 14 | url = os.environ.get("PLEX_TEST_URL") 15 | if not url: 16 | pytest.skip("PLEX_TEST_URL environment variable not set. Skipping live Plex tests.") 17 | return url 18 | 19 | @pytest.fixture(scope="module") 20 | def live_plex_api_key() -> str: 21 | """Retrieves Plex API Key (Token) from environment variable.""" 22 | api_key = os.environ.get("PLEX_TEST_API_KEY") # Using API_KEY for consistency, though it's a token for Plex 23 | if not api_key: 24 | pytest.skip("PLEX_TEST_API_KEY environment variable not set. Skipping live Plex tests.") 25 | return api_key 26 | 27 | @pytest.mark.integration_live # Mark tests that hit the live API 28 | class TestPlexProviderLive(BaseLiveLibraryProviderTests): 29 | """Groups live integration tests for PlexProvider.""" 30 | 31 | # Override the live_provider fixture from the base class and make it module-scoped 32 | @pytest.fixture(scope="module") 33 | def live_provider(self, live_plex_url: str, live_plex_api_key: str) -> PlexProvider: 34 | """ 35 | Provides a live instance of the PlexProvider, scoped for the module. 36 | """ 37 | provider = PlexProvider( 38 | plex_url=live_plex_url, 39 | plex_api_key=live_plex_api_key 40 | # limit is optional and defaults in PlexProvider constructor 41 | ) 42 | assert provider.server is not None, "Plex server should be connected." 43 | return provider 44 | 45 | # All tests (test_provider_name, test_get_users, test_get_user_by_name, 46 | # test_get_recently_watched, test_get_favorites, 47 | # test_get_all_items_filtered_as_objects, and test_get_all_items_filtered_as_names) 48 | # are inherited from BaseLiveLibraryProviderTests and should work as expected 49 | # if PlexProvider correctly implements the LibraryProviderBase interface. -------------------------------------------------------------------------------- /server/src/services/migrations/014_media_entity_type.py: -------------------------------------------------------------------------------- 1 | import peewee as pw 2 | from peewee import * 3 | from playhouse.migrate import migrate as run_migrations, SchemaMigrator 4 | # Import necessary models and the database proxy 5 | from services.models import Media, WatchHistory, database 6 | 7 | def upgrade(migrator: SchemaMigrator): 8 | # === Media table changes === 9 | # Define the new column with null=True initially 10 | entity_type_field = CharField(null=True) 11 | watched_field = BooleanField(default=False) 12 | watch_count_field = IntegerField(default=0) 13 | 14 | run_migrations( 15 | migrator.add_column('media', 'entity_type', entity_type_field), 16 | migrator.add_column('media', 'watched', watched_field), 17 | migrator.add_column('media', 'watch_count', watch_count_field), 18 | ) 19 | 20 | # Update all existing rows to set entity_type = "suggestion" 21 | # This needs to be done before making the column NOT NULL 22 | if Media.table_exists(): # Check if table exists, though it should in a migration context 23 | Media.update(entity_type="suggestion").execute() 24 | 25 | # Now alter the column to be NOT NULL 26 | run_migrations( 27 | migrator.add_not_null('media', 'entity_type') 28 | ) 29 | 30 | # === WatchHistory table changes === 31 | # Drop the WatchHistory table if it exists 32 | # This will be executed within the transaction and with PRAGMA foreign_keys=OFF for SQLite 33 | # by the migration runner. 34 | if WatchHistory.table_exists(): 35 | # For PostgreSQL, if WatchHistory has foreign keys pointing to it from other tables, 36 | # cascade=True might be needed: WatchHistory.drop_table(cascade=True, safe=True) 37 | WatchHistory.drop_table(safe=True) 38 | 39 | # Recreate the WatchHistory table based on its current model definition 40 | database.create_tables([WatchHistory], safe=True) 41 | 42 | def rollback(migrator: SchemaMigrator): 43 | # === Media table rollback === 44 | # To properly rollback, first allow NULLs again, then drop the column 45 | run_migrations( 46 | migrator.drop_not_null('media', 'entity_type'), # Make it nullable before dropping 47 | migrator.drop_column('media', 'entity_type'), 48 | migrator.drop_column('media', 'watched'), 49 | migrator.drop_column('media', 'watch_count'), 50 | ) 51 | 52 | -------------------------------------------------------------------------------- /server/src/tests/integration/test_openai_provider.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | from typing import Any, Optional, List, Dict 4 | 5 | from providers.openai import OpenAIProvider # The class we are testing 6 | from tests.integration.base.base_live_llm_provider_tests import BaseLiveLlmProviderTests # The base test class 7 | 8 | # Environment variable names 9 | OPENAI_API_KEY_ENV_VAR = "OPENAI_TEST_API_KEY" 10 | OPENAI_BASE_URL_ENV_VAR = "OPENAI_TEST_BASE_URL" 11 | OPENAI_TEST_MODEL_ENV_VAR = "OPENAI_TEST_MODEL" # Optional, for overriding the default test model 12 | OPENAI_TEST_EMBEDDING_MODEL_ENV_VAR = "OPENAI_TEST_EMBEDDING_MODEL" 13 | 14 | # Default model to use for tests if not overridden by environment variable 15 | DEFAULT_OPENAI_TEST_MODEL = "gpt-4o" 16 | DEFAULT_OPENAI_TEST_EMBEDDING_MODEL = "text-embedding-3-small" 17 | DEFAULT_OPENAI_TEST_EMBEDDING_DIMENSIONS = 1536 18 | 19 | class TestOpenAIProviderLive(BaseLiveLlmProviderTests): 20 | """ 21 | Live integration tests for the OpenAIProvider. 22 | Requires the OPENAI_API_KEY environment variable to be set. 23 | """ 24 | 25 | def _get_provider_instance(self) -> OpenAIProvider: 26 | api_key = os.getenv(OPENAI_API_KEY_ENV_VAR) 27 | base_url = os.getenv(OPENAI_BASE_URL_ENV_VAR) # Can be None, will use provider default 28 | if not api_key: 29 | self.skipTest(f"{OPENAI_API_KEY_ENV_VAR} environment variable not set.") 30 | # If base_url is None, the provider's __init__ will use its default. 31 | return OpenAIProvider(api_key=api_key, base_url=base_url) 32 | 33 | def _get_model_name(self) -> str: 34 | return os.getenv(OPENAI_TEST_MODEL_ENV_VAR, DEFAULT_OPENAI_TEST_MODEL) 35 | 36 | def _get_generate_content_prompt_data(self) -> List[Dict[str, str]]: 37 | """Return a list of messages for OpenAI's _generate_content.""" 38 | return [{'role': 'user', 'content': 'Tell me a short, one-sentence joke.'}] 39 | 40 | def _get_embedding_model_name(self) -> str: 41 | return os.getenv(OPENAI_TEST_EMBEDDING_MODEL_ENV_VAR, DEFAULT_OPENAI_TEST_EMBEDDING_MODEL) 42 | 43 | def _get_embedding_dimensions(self) -> Optional[int]: 44 | # text-embedding-3-small can have variable dimensions, but let's test a fixed one. 45 | return DEFAULT_OPENAI_TEST_EMBEDDING_DIMENSIONS 46 | 47 | def _get_required_env_vars(self) -> list[str]: 48 | return [OPENAI_API_KEY_ENV_VAR] 49 | 50 | if __name__ == '__main__': 51 | unittest.main() -------------------------------------------------------------------------------- /server/src/tests/integration/test_ollama_provider.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | from typing import Any, List, Dict, Optional 4 | 5 | from providers.ollama import OllamaProvider # The class we are testing 6 | from tests.integration.base.base_live_llm_provider_tests import BaseLiveLlmProviderTests # The base test class 7 | 8 | # Environment variable names 9 | OLLAMA_TEST_BASE_URL_ENV_VAR = "OLLAMA_TEST_BASE_URL" 10 | OLLAMA_TEST_MODEL_ENV_VAR = "OLLAMA_TEST_MODEL" # Optional, for overriding the default test model 11 | 12 | # Default model to use for tests if not overridden by environment variable 13 | # Using the default from OllamaProvider.get_default_settings() 14 | DEFAULT_OLLAMA_TEST_MODEL = "llama3" 15 | 16 | class TestOllamaProviderLive(BaseLiveLlmProviderTests): 17 | """ 18 | Live integration tests for the OllamaProvider. 19 | Requires the OLLAMA_TEST_BASE_URL environment variable to be set. 20 | Optionally, OLLAMA_TEST_MODEL can be set to use a specific model for testing. 21 | """ 22 | 23 | def _get_provider_instance(self) -> OllamaProvider: 24 | base_url = os.getenv(OLLAMA_TEST_BASE_URL_ENV_VAR) 25 | if not base_url: 26 | self.skipTest(f"{OLLAMA_TEST_BASE_URL_ENV_VAR} environment variable not set.") 27 | 28 | # OllamaProvider constructor takes ollama_base_url 29 | return OllamaProvider(ollama_base_url=base_url) 30 | 31 | def _get_model_name(self) -> str: 32 | return os.getenv(OLLAMA_TEST_MODEL_ENV_VAR, DEFAULT_OLLAMA_TEST_MODEL) 33 | 34 | def _get_required_env_vars(self) -> list[str]: 35 | return [OLLAMA_TEST_BASE_URL_ENV_VAR] 36 | 37 | def _get_generate_content_prompt_data(self) -> List[Dict[str, str]]: 38 | """Return a list of messages for Ollama's _generate_content.""" 39 | return "Tell me a short, one-sentence joke." 40 | 41 | def _get_embedding_model_name(self) -> str: 42 | return "nomic-embed-text" 43 | 44 | def _get_embedding_dimensions(self) -> Optional[int]: 45 | return 768 46 | 47 | # BaseLiveLlmProviderTests provides: 48 | # - asyncSetUp 49 | # - test_get_models_live 50 | # - test_get_similar_media_live_basic 51 | # (OllamaProvider.get_similar_media returns a dict matching SuggestionList, so this should pass) 52 | # - test_get_similar_media_live_with_suggestion_list_schema 53 | # (OllamaProvider.get_similar_media uses SuggestionList schema by default) 54 | 55 | if __name__ == '__main__': 56 | # This allows running the tests directly from this file: 57 | # python -m tests.integration.plugins.test_ollama_provider 58 | unittest.main() -------------------------------------------------------------------------------- /server/src/tests/integration/test_gemini_provider.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | from typing import Any, Optional 4 | 5 | from providers.gemini import GeminiProvider # The class we are testing 6 | from tests.integration.base.base_live_llm_provider_tests import BaseLiveLlmProviderTests # The base test class 7 | 8 | from services.models import SuggestionList 9 | # Environment variable names 10 | GEMINI_API_KEY_ENV_VAR = "GEMINI_TEST_API_KEY" 11 | GEMINI_TEST_MODEL_ENV_VAR = "GEMINI_TEST_MODEL" # Optional, for overriding the default test model 12 | GEMINI_TEST_EMBEDDING_MODEL_ENV_VAR = "GEMINI_TEST_EMBEDDING_MODEL" 13 | 14 | # Default model to use for tests if not overridden by environment variable 15 | # Using the default from GeminiProvider.get_default_settings() 16 | DEFAULT_GEMINI_TEST_MODEL = "gemini-2.5-flash" 17 | DEFAULT_GEMINI_TEST_EMBEDDING_MODEL = "gemini-embedding-exp" 18 | 19 | class TestGeminiProviderLive(BaseLiveLlmProviderTests): 20 | """ 21 | Live integration tests for the GeminiProvider. 22 | Requires the GEMINI_API_KEY environment variable to be set. 23 | Optionally, GEMINI_TEST_MODEL can be set to use a specific model for testing. 24 | """ 25 | 26 | def _get_provider_instance(self) -> GeminiProvider: 27 | api_key = os.getenv(GEMINI_API_KEY_ENV_VAR) 28 | # The asyncSetUp in BaseLiveLlmProviderTests will skip if api_key is None 29 | # based on _get_required_env_vars, so direct check here is mostly a safeguard. 30 | if not api_key: 31 | self.skipTest(f"{GEMINI_API_KEY_ENV_VAR} environment variable not set.") 32 | return GeminiProvider(gemini_api_key=api_key) 33 | 34 | def _get_model_name(self) -> str: 35 | return os.getenv(GEMINI_TEST_MODEL_ENV_VAR, DEFAULT_GEMINI_TEST_MODEL) 36 | 37 | def _get_required_env_vars(self) -> list[str]: 38 | return [GEMINI_API_KEY_ENV_VAR] 39 | 40 | def _get_generate_content_prompt_data(self) -> str: 41 | """Return a simple string prompt for Gemini's _generate_content.""" 42 | return "Tell me a short, one-sentence joke." 43 | 44 | def _get_embedding_model_name(self) -> str: 45 | return os.getenv(GEMINI_TEST_EMBEDDING_MODEL_ENV_VAR, DEFAULT_GEMINI_TEST_EMBEDDING_MODEL) 46 | 47 | def _get_embedding_dimensions(self) -> Optional[int]: 48 | return 768 49 | 50 | # BaseLiveLlmProviderTests provides: 51 | # - asyncSetUp 52 | # - test_get_models_live 53 | # - test_get_similar_media_live_basic 54 | 55 | if __name__ == '__main__': 56 | # This allows running the tests directly from this file: 57 | # python -m tests.unit.plugins.test_gemini_provider 58 | unittest.main() -------------------------------------------------------------------------------- /server/src/base/library_provider_base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from typing import Optional, Dict, List, Any, Union 3 | from services.models import ItemsFiltered, LibraryUser 4 | 5 | class LibraryProviderBase(ABC): 6 | """ 7 | Abstract base class for media library providers (e.g., Jellyfin, Plex). 8 | Defines a common interface for interacting with different library services. 9 | """ 10 | 11 | @property 12 | @abstractmethod 13 | def name(self) -> str: 14 | """ 15 | Returns the name of the library provider (e.g., "jellyfin", "plex"). 16 | """ 17 | pass 18 | 19 | @abstractmethod 20 | def get_users(self) -> Optional[List[LibraryUser]]: 21 | """ 22 | Retrieves all users from the library provider. 23 | 24 | Returns: 25 | Optional[List[LibraryUser]]: A list of user objects, 26 | or None if an error occurs. 27 | """ 28 | pass 29 | 30 | @abstractmethod 31 | def get_user_by_name(self, username: str) -> Optional[LibraryUser]: 32 | """ 33 | Retrieves a specific user by their username. 34 | 35 | Args: 36 | username (str): The username to search for. 37 | 38 | Returns: 39 | Optional[LibraryUser]: The user object if found, 40 | or None otherwise. 41 | """ 42 | pass 43 | 44 | @abstractmethod 45 | def get_recently_watched(self, user_id: str, limit: Optional[int] = None) -> Optional[List[ItemsFiltered]]: 46 | """ 47 | Retrieves recently watched items for a specific user. 48 | 49 | Args: 50 | user_id (str): The unique identifier for the user. 51 | limit (Optional[int]): The maximum number of items to retrieve. 52 | 53 | Returns: 54 | Optional[List[Dict[str, Any]]]: A list of recently watched items (raw dictionaries), 55 | or None if an error occurs. 56 | """ 57 | pass 58 | 59 | @abstractmethod 60 | def get_favorites(self, user_id: str, limit: Optional[int] = None) -> Optional[List[ItemsFiltered]]: 61 | """ 62 | Retrieves favorite items for a specific user. 63 | 64 | Args: 65 | user_id (str): The unique identifier for the user. 66 | limit (Optional[int]): The maximum number of items to retrieve. 67 | 68 | Returns: 69 | Optional[List[Dict[str, Any]]]: A list of favorite items (raw dictionaries), 70 | or None if an error occurs. 71 | """ 72 | pass 73 | 74 | @abstractmethod 75 | def get_all_items_filtered(self, attribute_filter: Optional[str] = None) -> Optional[Union[List[ItemsFiltered], List[str]]]: 76 | """ 77 | Retrieves all relevant items (e.g., movies, shows) from the library and filters them. 78 | """ 79 | pass 80 | 81 | @classmethod 82 | @abstractmethod 83 | def get_default_settings(cls) -> Dict[str, Dict[str, Any]]: 84 | """ 85 | Returns the default settings specific to this LLM provider. 86 | This method should be implemented by each concrete provider class. 87 | The structure should align with how settings are defined in SettingsService. 88 | """ 89 | pass -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | All notable changes to this project will be documented in this file. 3 | 4 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 5 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 6 | 7 | ## [1.2.0]() - 2025-06-23 8 | ### Added 9 | - OpenAI integration (Supports all services that comply with the OpenAI API spec. Requires the Responses API for structured output. Continue to use the separate Ollama provider until they add the Responses API. See https://github.com/ollama/ollama/issues/9659) 10 | - Jellyseer integration 11 | - Overseerr integration 12 | 13 | ### Changed 14 | - OpenAI provider now supports custom base URLs for API proxying. 15 | - Refactored Radarr and Sonarr into Providers of type "request" 16 | 17 | ## [1.1.2]() - 2025-06-21 18 | ### Fixed 19 | - ENVs overwrite Settings in database properly now 20 | - Library providers sync all watch history on first run, settings.app.recent_limit after. 21 | 22 | ## [1.1.1]() - 2025-06-20 23 | ### Fixed 24 | - Fixed issue with favorite returning None in Plex Provider causing watch history sync to fail. 25 | 26 | ## [1.1.0]() - 2025-06-19 27 | ### Added 28 | - Research page for movie/tv series analysis 29 | - Postgres support (no migration, you'll need to start fresh) 30 | 31 | ### Changed 32 | - Drop searchstat and replaced it with a generic llmstat table. Existing stats will be wiped. 33 | 34 | ## [1.0.2]() - 2025-06-13 35 | ### Added 36 | - /watch-history/import api endpoint to allow importing watch history manually; created scripts/import_watch_history.py 37 | 38 | ### Changed 39 | - Search template variable media_exclude to all_media. media_exclude will continue to work 40 | 41 | ### Fixed 42 | - Enable thinking_budget for gemini-2.5-flash and gemini-2.5-pro only 43 | 44 | ## [1.0.1]() - 2025-06-13 45 | ### Fixed 46 | - Fix plex watch history id AttributeError; Fixes #10 47 | 48 | ## [1.0.0]() - 2025-06-12 49 | ### Changed 50 | - Version to 1.0.0 51 | 52 | ## [0.0.8]() - 2025-06-12 53 | ### Fixed 54 | - media_id in migration_009 55 | 56 | ## [0.0.7]() - 2025-06-12 57 | ### Added 58 | - Trakt integration (watch history only) 59 | - Image caching service using /cache directory 60 | - Added watch_history template variable to Search 61 | - Integration tests for Library and LLM Providers. Some basic unit testing 62 | - Settings page enhancements 63 | - Allow changing PUID/PGUI in container 64 | 65 | ### Changed 66 | - Using LibraryProviderBase and LLMProviderBase to standardize providers. Refactored providers. 67 | - Added Default User Setting to Library providers. Removed this from the Search page. 68 | - Plex Provider enhancements 69 | - sync_watch_history will sync all history on first attempt, settings.recent_limit after 70 | 71 | ### Fixed 72 | - ai_arr replacements, Closes PR #9 73 | 74 | ## [0.0.6]() - 2025-06-09 75 | ### Fixed 76 | - entrypoint.sh issue when VITE_DISCOVARR_URL is missing to properly display error message 77 | 78 | ## [0.0.5]() - 2025-06-09 79 | ### Changed 80 | - Modified the Dockerfile to support defining a PUID/PGID at runtime and setup /config permissions accordingly 81 | 82 | ## [0.0.4]() - 2025-06-09 83 | ### Changed 84 | - Renamed app to discovarr 85 | - Add setting for root_dir_path; Fixes #5 86 | - Moved gemini.limit setting to app.suggestion_limit 87 | 88 | ## [0.0.3]() - 2025-06-03 89 | ### Added 90 | - Ollama support 91 | 92 | ## [0.0.1]() - 2025-06-01 93 | ### Added 94 | - Initlal beta release -------------------------------------------------------------------------------- /server/src/tests/integration/test_settings.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from unittest.mock import patch 3 | 4 | from discovarr import Discovarr 5 | from services.models import Settings 6 | 7 | # Mark all tests in this file as integration tests 8 | pytestmark = pytest.mark.integration 9 | 10 | 11 | def test_env_variable_overwrites_default_on_creation(tmp_path, monkeypatch): 12 | """ 13 | Tests that an environment variable correctly overwrites a default setting 14 | during the initial settings creation in a fresh database. 15 | """ 16 | # 1. Setup: Set environment variable and prepare a temporary database path. 17 | test_db_path = tmp_path / "test_settings_env_creation.db" 18 | expected_url = "http://my-custom-jellyfin:8096" 19 | monkeypatch.setenv("JELLYFIN_URL", expected_url) 20 | 21 | # Patch _validate_configuration to prevent it from raising errors about other 22 | # missing settings, allowing us to focus only on the settings initialization logic. 23 | with patch('discovarr.Discovarr._validate_configuration', return_value=None): 24 | # 2. Execute: Initialize Discovarr, which triggers the settings initialization. 25 | discovarr_instance = Discovarr(db_path=str(test_db_path)) 26 | 27 | # 3. Assert: Check the value in the database. 28 | # The database connection is managed by the Discovarr instance's db object. 29 | # We can query the Settings model directly. 30 | jellyfin_url_setting = Settings.get_or_none( 31 | (Settings.group == "jellyfin") & (Settings.name == "url") 32 | ) 33 | 34 | assert jellyfin_url_setting is not None, "The 'jellyfin.url' setting should have been created." 35 | assert jellyfin_url_setting.value == expected_url, "The setting value should match the environment variable." 36 | 37 | # Clean up the instance to close the DB connection. 38 | discovarr_instance.db.cleanup() 39 | 40 | 41 | def test_env_variable_overwrites_existing_db_value(tmp_path, monkeypatch): 42 | """ 43 | Tests that an environment variable correctly overwrites an existing setting 44 | value in the database when the application starts up. 45 | """ 46 | # 1. Setup: Prepare a temporary database path and initialize Discovarr once 47 | # to create the initial settings with default values. 48 | test_db_path = tmp_path / "test_settings_env_overwrite.db" 49 | 50 | with patch('discovarr.Discovarr._validate_configuration', return_value=None): 51 | discovarr_instance_1 = Discovarr(db_path=str(test_db_path)) 52 | 53 | # Verify the initial default value is in the database. 54 | initial_setting = Settings.get((Settings.group == "jellyfin") & (Settings.name == "url")) 55 | assert initial_setting.value == "http://jellyfin:8096", "Initial value should be the default." 56 | 57 | # Clean up the first instance to release the DB file lock. 58 | discovarr_instance_1.db.cleanup() 59 | 60 | # 2. Execute: Set the environment variable and re-initialize Discovarr. 61 | expected_url = "http://my-overwritten-jellyfin:8096" 62 | monkeypatch.setenv("JELLYFIN_URL", expected_url) 63 | 64 | with patch('discovarr.Discovarr._validate_configuration', return_value=None): 65 | discovarr_instance_2 = Discovarr(db_path=str(test_db_path)) 66 | 67 | # 3. Assert: Check that the value in the database has been updated. 68 | updated_setting = Settings.get((Settings.group == "jellyfin") & (Settings.name == "url")) 69 | 70 | assert updated_setting is not None, "The setting should still exist." 71 | assert updated_setting.value == expected_url, "The setting value should have been overwritten by the environment variable." 72 | 73 | # Clean up the second instance. 74 | discovarr_instance_2.db.cleanup() -------------------------------------------------------------------------------- /entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # --- PUID/PGID Runtime Configuration --- 3 | # Use environment variables PUID and PGID, with defaults if not set. 4 | # These defaults should ideally match the ones used at build time if no runtime vars are provided. 5 | PUID_TO_SET=${PUID:-1884} 6 | PGID_TO_SET=${PGID:-1884} 7 | 8 | APP_USER="discovarr" 9 | 10 | # Check if the user exists before trying to get its ID 11 | if id "$APP_USER" >/dev/null 2>&1; then 12 | CURRENT_UID=$(id -u "$APP_USER") 13 | CURRENT_GID=$(id -g "$APP_USER") 14 | 15 | echo "INFO: Desired PUID=${PUID_TO_SET}, PGID=${PGID_TO_SET}" 16 | echo "INFO: Current ${APP_USER} UID=${CURRENT_UID}, GID=${CURRENT_GID}" 17 | 18 | # Modify group GID if necessary 19 | if [ "$PGID_TO_SET" != "$CURRENT_GID" ]; then 20 | echo "INFO: Modifying group ${APP_USER} GID from $CURRENT_GID to $PGID_TO_SET" 21 | groupmod -o -g "$PGID_TO_SET" "$APP_USER" 22 | fi 23 | 24 | # Modify user UID if necessary 25 | if [ "$PUID_TO_SET" != "$CURRENT_UID" ]; then 26 | echo "INFO: Modifying user ${APP_USER} UID from $CURRENT_UID to $PUID_TO_SET" 27 | usermod -o -u "$PUID_TO_SET" "$APP_USER" 28 | fi 29 | else 30 | echo "WARNING: User ${APP_USER} not found. Skipping PUID/PGID modification. This might indicate an issue with the Docker image build." 31 | fi 32 | # --- End PUID/PGID Runtime Configuration --- 33 | 34 | # The runtime environment variable that holds the API URL 35 | RUNTIME_API_URL_ENV_VAR="VITE_DISCOVARR_URL" 36 | 37 | # The placeholder string in your JS/HTML code 38 | PLACEHOLDER="__API_ENDPOINT__" 39 | 40 | # Default API URL if the environment variable is not set or is empty 41 | # This will be used if VITE_DISCOVARR_URL is not provided when running the container. 42 | DEFAULT_FALLBACK_URL="http://localhost:8000/api" # Adjust if your default API path is different 43 | 44 | # Directory where your built frontend assets are located (copied from client/dist) 45 | # This path is inside the Docker container. 46 | STATIC_ASSETS_DIR="/app/server/static" 47 | 48 | # Determine the target URL 49 | TARGET_URL=$(printenv "${RUNTIME_API_URL_ENV_VAR}") 50 | 51 | if [ -z "${TARGET_URL}" ]; then 52 | echo "INFO: Environment variable '${RUNTIME_API_URL_ENV_VAR}' is not set or is empty." 53 | echo "INFO: Using default fallback API URL: '${DEFAULT_FALLBACK_URL}'" 54 | TARGET_URL="${DEFAULT_FALLBACK_URL}" 55 | else 56 | echo "INFO: Using API URL from environment variable '${RUNTIME_API_URL_ENV_VAR}': '${TARGET_URL}'" 57 | fi 58 | 59 | echo "INFO: Replacing placeholder '${PLACEHOLDER}' with '${TARGET_URL}' in JS and HTML files..." 60 | 61 | # Find all .js and .html files in the static assets directory and its subdirectories 62 | # and replace the placeholder. 63 | # Using '#' as a delimiter for sed to avoid issues with slashes ('/') in the URL. 64 | # The -print0 and xargs -0 pattern handles filenames with spaces or special characters. 65 | find "${STATIC_ASSETS_DIR}" -type f \( -name "*.js" -o -name "*.html" \) -print0 | \ 66 | xargs -0 sed -i "s#${PLACEHOLDER}#${TARGET_URL}#g" 67 | 68 | echo "INFO: Placeholder replacement complete." 69 | 70 | # Update ownership of key directories after potential UID/GID changes and before switching user. 71 | # This ensures the application user can read/write necessary files. 72 | echo "INFO: Ensuring ownership of /app, /config, /backups, /cache for UID ${PUID_TO_SET} and GID ${PGID_TO_SET}" 73 | chown -R "${PUID_TO_SET}:${PGID_TO_SET}" /app /config /backups /cache 74 | 75 | # Execute the CMD passed to the entrypoint (e.g., uvicorn ...) 76 | # Use gosu to drop privileges and execute the command as the APP_USER 77 | echo "INFO: Executing command as user ${APP_USER} (UID: $(id -u ${APP_USER}), GID: $(id -g ${APP_USER})): $@" 78 | exec gosu "$APP_USER" "$@" -------------------------------------------------------------------------------- /client/src/components/ExamplePrompts.vue: -------------------------------------------------------------------------------- 1 | 33 | 34 | -------------------------------------------------------------------------------- /server/src/tests/unit/test_discovarr_prompt.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from unittest.mock import MagicMock, patch 3 | from discovarr import Discovarr # Keep this for type hinting if needed 4 | from services.models import LibraryUser, ItemsFiltered 5 | 6 | from tests.unit.base.base_discovarr_tests import mocked_discovarr_instance # Import the new base fixture 7 | 8 | 9 | def test_get_prompt_basic_scenario(mocked_discovarr_instance: Discovarr): 10 | dv = mocked_discovarr_instance 11 | 12 | # --- Configure mocks for this specific test --- 13 | # Mock the LLMService's get_prompt method directly 14 | mock_rendered_prompt = "Mocked prompt string for basic scenario" 15 | dv.llm_service.get_prompt.return_value = mock_rendered_prompt 16 | 17 | # Mock settings to provide a default prompt template 18 | default_prompt_template = 'Default template' 19 | dv.settings.get.side_effect = lambda group, key, default=None: \ 20 | default_prompt_template if group == "app" and key == "default_prompt" else default 21 | 22 | # --- Call the method under test --- 23 | prompt = dv.get_prompt(limit=5, media_name="Test Movie") 24 | 25 | # --- Assertions --- 26 | # Assert that Discovarr.get_prompt returned the value from the mocked LLMService.get_prompt 27 | assert prompt == mock_rendered_prompt 28 | 29 | # Verify mock calls 30 | # Assert that Discovarr.get_prompt called LLMService.get_prompt with the correct arguments 31 | dv.llm_service.get_prompt.assert_called_once_with( 32 | limit=5, 33 | media_name="Test Movie", 34 | template_string=None # Discovarr.get_prompt passes None if not specified 35 | ) 36 | 37 | 38 | def test_get_prompt_with_default_plex_user(mocked_discovarr_instance: Discovarr): 39 | dv = mocked_discovarr_instance # dv is already the mocked instance 40 | 41 | default_prompt_template = "Favs: {{ favorites }}" 42 | test_specific_settings = { 43 | # The default_user setting is used *inside* LLMService.get_prompt, 44 | # so we don't need to mock get_user_by_name here. 45 | # We just need to ensure the default_prompt is available via settings. 46 | ("app", "default_prompt"): default_prompt_template, 47 | } 48 | dv.settings.get.side_effect = lambda group, key, default=None: test_specific_settings.get((group, key), default) 49 | 50 | # Mock the LLMService's get_prompt method 51 | mock_rendered_prompt = "Mocked prompt string with default user logic handled internally" 52 | dv.llm_service.get_prompt.return_value = mock_rendered_prompt 53 | 54 | prompt = dv.get_prompt(limit=3, media_name="Another Movie") 55 | 56 | assert prompt == mock_rendered_prompt 57 | dv.llm_service.get_prompt.assert_called_once_with( 58 | limit=3, media_name="Another Movie", template_string=None # Discovarr.get_prompt passes None 59 | ) 60 | 61 | 62 | def test_get_prompt_custom_template_string(mocked_discovarr_instance: Discovarr): 63 | dv = mocked_discovarr_instance # dv is already the mocked instance 64 | 65 | # Ensure providers are disabled via settings mock so they don't interfere 66 | dv.settings.get.side_effect = lambda group, key, default=None: { 67 | # Only need to mock the default prompt setting if the test might fall back to it 68 | ("app", "default_prompt"): "Should not be used", 69 | }.get((group, key), default) 70 | 71 | # Mock the LLMService's get_prompt method 72 | mock_rendered_prompt = "Mocked prompt string from custom template" 73 | dv.llm_service.get_prompt.return_value = mock_rendered_prompt 74 | 75 | custom_template = "Custom: {{ media_name }} | Exclude: {{ all_media }} | Favs: {{ favorites }} | History: {{ watch_history }}" 76 | prompt = dv.get_prompt(limit=10, media_name="Custom Media", template_string=custom_template) 77 | 78 | assert prompt == mock_rendered_prompt 79 | dv.llm_service.get_prompt.assert_called_once_with( 80 | limit=10, media_name="Custom Media", template_string=custom_template 81 | ) 82 | -------------------------------------------------------------------------------- /server/src/base/llm_provider_base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from typing import Optional, Dict, Any, List 3 | 4 | class LLMProviderBase(ABC): 5 | """ 6 | Abstract base class for LLM providers. 7 | Defines a common interface for interacting with different LLM services. 8 | """ 9 | 10 | @property 11 | @abstractmethod 12 | def name(self) -> str: 13 | """ 14 | Returns the name of the LLM provider (e.g., "Gemini", "Ollama"). 15 | """ 16 | pass 17 | 18 | @abstractmethod 19 | async def get_similar_media( 20 | self, 21 | model: str, 22 | prompt: str, 23 | system_prompt: Optional[str] = None, 24 | temperature: Optional[float] = 0.7, 25 | **kwargs: Any 26 | ) -> Optional[Dict[str, Any]]: 27 | """ 28 | Generates media suggestions based on a prompt. 29 | 30 | Args: 31 | model (str): The specific model to use for this LLM provider. 32 | prompt (str): The user's prompt for suggestions. 33 | system_prompt (Optional[str]): System instructions for the LLM. 34 | temperature (Optional[float]): Controls randomness. 35 | **kwargs: Additional provider-specific parameters (e.g., thinking_budget for Gemini). 36 | 37 | Returns: 38 | Optional[Dict[str, Any]]: A dictionary containing the LLM's response 39 | and token counts, or None on error. 40 | Expected structure: 41 | { 42 | 'response': Dict[str, Any], # Parsed LLM output 43 | 'token_counts': Dict[str, int] # Token usage 44 | } 45 | """ 46 | pass 47 | 48 | @abstractmethod 49 | async def _generate_content( 50 | self, 51 | model: str, 52 | prompt_data: Any, 53 | system_prompt: Optional[str] = None, 54 | temperature: Optional[float] = 0.7, 55 | response_format_details: Optional[Any] = None, 56 | **kwargs: Any 57 | ) -> Dict[str, Any]: 58 | """ 59 | Low-level content generation method to be implemented by each provider. 60 | 61 | Args: 62 | model (str): The specific model to use. 63 | prompt_data (Any): Provider-specific prompt input (e.g., string, list of messages). 64 | system_prompt (Optional[str]): System instructions for the LLM. 65 | temperature (Optional[float]): Controls randomness. 66 | response_format_details (Optional[Any]): Provider-specific details for response formatting (e.g., Pydantic model, JSON schema). 67 | **kwargs: Additional provider-specific parameters. 68 | 69 | Returns: 70 | Dict[str, Any]: A dictionary containing 'success' (bool), 'content' (Any), 'token_counts' (Optional[Dict]), and 'message' (Optional[str]). 71 | """ 72 | pass 73 | 74 | @abstractmethod 75 | async def get_models(self) -> Optional[List[str]]: 76 | """ 77 | Lists available model names for this LLM provider. 78 | 79 | Returns: 80 | Optional[List[str]]: A list of model names, or None on error. 81 | """ 82 | pass 83 | 84 | @abstractmethod 85 | async def get_embedding(self, text_content: str, model: Optional[str] = None, dimensions: Optional[int] = None) -> Optional[List[float]]: 86 | """ 87 | Generates an embedding for the given text using the LLM provider's embedding model. 88 | 89 | Args: 90 | text_content (str): The text to embed. 91 | 92 | Returns: 93 | Optional[List[float]]: A list of floats representing the embedding, or None on error. 94 | """ 95 | pass 96 | 97 | @classmethod 98 | @abstractmethod 99 | def get_default_settings(cls) -> Dict[str, Dict[str, Any]]: 100 | """ 101 | Returns the default settings specific to this LLM provider. 102 | This method should be implemented by each concrete provider class. 103 | The structure should align with how settings are defined in SettingsService. 104 | """ 105 | pass 106 | -------------------------------------------------------------------------------- /.github/workflows/docker-publish.yml: -------------------------------------------------------------------------------- 1 | name: Docker Build, Publish, and Release 2 | 3 | on: 4 | push: 5 | tags: 6 | - 'v*' # Trigger on tags like v1.0, v1.2.3, v2.0-alpha, etc. 7 | workflow_dispatch: # Allows manual triggering 8 | 9 | jobs: 10 | build-and-push: 11 | runs-on: ubuntu-latest 12 | permissions: 13 | contents: read 14 | packages: write # Needed to push to GHCR 15 | 16 | steps: 17 | - name: Checkout repository 18 | uses: actions/checkout@v4 19 | 20 | - name: Set up Docker Buildx 21 | uses: docker/setup-buildx-action@v3 22 | 23 | - name: Log in to GitHub Container Registry 24 | uses: docker/login-action@v3 25 | with: 26 | registry: ghcr.io 27 | username: ${{ github.actor }} 28 | password: ${{ secrets.GITHUB_TOKEN }} 29 | 30 | - name: Extract metadata (tags, labels) for Docker 31 | id: meta 32 | uses: docker/metadata-action@v5 33 | with: 34 | images: ghcr.io/${{ github.repository_owner }}/${{ github.event.repository.name }} # e.g., ghcr.io/your-username/discovarr 35 | tags: | 36 | type=ref,event=tag 37 | type=raw,value=latest 38 | 39 | - name: Build and push Docker image 40 | uses: docker/build-push-action@v5 41 | id: docker_build # Add an ID to reference outputs 42 | with: 43 | context: . # Build context is the root of your repository 44 | file: ./Dockerfile # Explicitly point to your production Dockerfile 45 | push: true 46 | tags: ${{ steps.meta.outputs.tags }} 47 | labels: ${{ steps.meta.outputs.labels }} 48 | cache-from: type=gha 49 | cache-to: type=gha,mode=max 50 | 51 | - name: Create image digest file 52 | run: echo "${{ steps.docker_build.outputs.digest }}" > image-digest.txt 53 | 54 | - name: Create image tags file 55 | run: echo "${{ steps.meta.outputs.tags }}" > image-tags.txt 56 | 57 | - name: Upload image digest and tags as artifacts 58 | uses: actions/upload-artifact@v4 59 | with: 60 | name: image-info-artifact 61 | path: | 62 | image-digest.txt 63 | image-tags.txt 64 | 65 | release: 66 | name: Create Release 67 | needs: build-and-push 68 | runs-on: ubuntu-latest 69 | permissions: 70 | contents: write # Required to create a release 71 | steps: 72 | - name: Get version information from tag 73 | id: tag_info 74 | run: | 75 | # GITHUB_REF_NAME is the short ref name, e.g., "v1.0.0" for a tag 76 | TAG_NAME="${{ github.ref_name }}" 77 | # Remove 'v' prefix for changelog reader if needed, or use full tag if reader supports it 78 | VERSION_NUMBER="${TAG_NAME#v}" 79 | echo "tag_name=${TAG_NAME}" >> $GITHUB_OUTPUT 80 | echo "version_number=${VERSION_NUMBER}" >> $GITHUB_OUTPUT 81 | shell: bash 82 | 83 | - name: Checkout code 84 | uses: actions/checkout@v4 85 | 86 | - name: Get Changelog Entry 87 | id: changelog_reader 88 | uses: mindsers/changelog-reader-action@v2 89 | with: 90 | version: ${{ steps.tag_info.outputs.version_number }} # Use version without 'v' prefix 91 | path: ./CHANGELOG.md 92 | 93 | - name: Create directory for release assets 94 | run: mkdir ./release-assets 95 | 96 | - name: Download image information artifact 97 | uses: actions/download-artifact@v4 98 | with: 99 | name: image-info-artifact 100 | path: ./release-assets/ 101 | 102 | - name: Create release 103 | uses: ncipollo/release-action@v1 104 | with: 105 | tag: ${{ steps.tag_info.outputs.tag_name }} # Use the full Git tag (e.g., v1.0.0) 106 | name: Release ${{ steps.tag_info.outputs.tag_name }} # Release title using the full tag 107 | body: ${{ steps.changelog_reader.outputs.changes }} 108 | artifacts: ./release-assets/* # Upload all files from the release-assets directory 109 | prerelease: ${{ steps.changelog_reader.outputs.status == 'prereleased' }} 110 | draft: ${{ steps.changelog_reader.outputs.status == 'unreleased' }} 111 | token: ${{ secrets.GITHUB_TOKEN }} -------------------------------------------------------------------------------- /server/src/services/tmdb.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import logging 3 | import json 4 | import sys 5 | from typing import Optional 6 | 7 | class TMDB: 8 | """ 9 | A class to interact with the TMDB API for retrieving media information, such as poster art. 10 | """ 11 | 12 | def __init__(self, tmdb_api_key: str): 13 | """ 14 | Initializes the Tmdb class with the API key. 15 | 16 | Args: 17 | tmdb_api_key (str): The API key for TMDB. 18 | """ 19 | # Setup Logging 20 | self.logger = logging.getLogger(__name__) 21 | 22 | self.tmdb_api_key = tmdb_api_key 23 | if not self.tmdb_api_key: 24 | self.logger.error("TMDB API key is not configured.") 25 | 26 | def get_media_detail(self, tmdb_id: str, media_type: str) -> Optional[str]: 27 | if not self.tmdb_api_key: 28 | self.logger.error("TMDB API key is not configured.") 29 | return None 30 | 31 | base_url = "https://api.themoviedb.org/3" 32 | endpoint = f"{base_url}/{media_type}/{tmdb_id}" 33 | params = { 34 | "language": "en-US", 35 | } 36 | headers = { 37 | "Authorization": f"Bearer {self.tmdb_api_key}", 38 | } 39 | 40 | try: 41 | response = requests.get(endpoint, params=params, headers=headers) 42 | response.raise_for_status() 43 | data = response.json() 44 | return data 45 | except requests.exceptions.RequestException as e: 46 | self.logger.error(f"Failed to fetch {tmdb_id} from TMDB: {e}") 47 | return None 48 | except json.JSONDecodeError: 49 | self.logger.error("Error decoding JSON response from TMDB.") 50 | return None 51 | except Exception as e: 52 | self.logger.exception(f"An unexpected error occurred while fetching {tmdb_id}: {e}") 53 | return None 54 | 55 | def lookup_media(self, query: str, media_type: str) -> Optional[dict]: 56 | """ 57 | Searches for media (TV show or movie) using the TMDB search API. 58 | 59 | Args: 60 | query (str): The search query (title of movie or TV show) 61 | media_type (str): Type of media to search for ('tv' or 'movie') 62 | 63 | Returns: 64 | Optional[dict]: First search result containing media details or None if not found 65 | Returns fields like: 66 | - id: TMDB ID 67 | - title/name: Title of movie/show 68 | - overview: Plot description 69 | - first_air_date/release_date: Release date 70 | - poster_path: Poster image path 71 | """ 72 | if not self.tmdb_api_key: 73 | self.logger.error("TMDB API key is not configured.") 74 | return None 75 | 76 | if media_type not in ['tv', 'movie']: 77 | self.logger.error(f"Invalid media type: {media_type}. Must be 'tv' or 'movie'") 78 | return None 79 | 80 | base_url = "https://api.themoviedb.org/3" 81 | endpoint = f"{base_url}/search/{media_type}" 82 | 83 | params = { 84 | "query": query, 85 | "language": "en-US", 86 | "page": 1, 87 | "include_adult": False 88 | } 89 | 90 | headers = { 91 | "Authorization": f"Bearer {self.tmdb_api_key}", 92 | } 93 | 94 | try: 95 | response = requests.get(endpoint, params=params, headers=headers) 96 | response.raise_for_status() 97 | data = response.json() 98 | 99 | results = data.get('results', []) 100 | if not results: 101 | self.logger.warning(f"No {media_type} found for query: {query}") 102 | return None 103 | 104 | # Return the first result 105 | return results[0] 106 | 107 | except requests.exceptions.RequestException as e: 108 | self.logger.error(f"Failed to search {media_type} on TMDB: {e}") 109 | return None 110 | except json.JSONDecodeError: 111 | self.logger.error("Error decoding JSON response from TMDB.") 112 | return None 113 | except Exception as e: 114 | self.logger.exception(f"An unexpected error occurred while searching {media_type}: {e}") 115 | return None -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Production build. Serves everything on FastAPI. Vite apps is built and served in the ./server/static directory in the container. 2 | 3 | # Stage 1: Build Frontend Assets 4 | # Assumes Node.js for the client build. Adjust if using a different environment. 5 | FROM node:lts-alpine AS client-builder 6 | WORKDIR /app/client 7 | 8 | # Copy package.json and package-lock.json (or yarn.lock) 9 | COPY client/package.json client/package-lock.json* ./ 10 | 11 | # Install client dependencies 12 | # Using 'npm ci' for reproducible builds if package-lock.json is present and up-to-date 13 | # Otherwise, 'npm install' can be used. 14 | RUN npm ci || npm install 15 | 16 | # Copy the rest of the client application code 17 | COPY client/ ./ 18 | 19 | # Build the client application 20 | # Adjust 'npm run build' if you use a different command (e.g., yarn build) 21 | # Adjust the output directory if it's not 'dist' (e.g., 'build') 22 | RUN npm run build 23 | 24 | # Stage 2: Setup Backend Application 25 | FROM python:3.12-slim AS backend-builder 26 | WORKDIR /app 27 | 28 | # Set environment variables to prevent Python from writing .pyc files and to buffer output 29 | ENV PYTHONDONTWRITEBYTECODE 1 30 | ENV PYTHONUNBUFFERED 1 31 | 32 | # Install system dependencies if your Python packages need them 33 | # Example: RUN apt-get update && apt-get install -y --no-install-recommends some-build-dep && rm -rf /var/lib/apt/lists/* 34 | 35 | # Copy requirements file and install Python dependencies 36 | COPY ./server/src/requirements.txt . 37 | RUN pip install --no-cache-dir --upgrade pip 38 | RUN pip install --no-cache-dir -r requirements.txt 39 | 40 | # Copy the backend application code 41 | # Assumes your backend code is in a 'server' directory relative to the Dockerfile 42 | COPY ./server ./server 43 | 44 | # Stage 3: Final Production Image 45 | FROM python:3.12-slim 46 | WORKDIR /app 47 | 48 | ENV PYTHONDONTWRITEBYTECODE 1 49 | ENV PYTHONUNBUFFERED 1 50 | 51 | # Define build arguments for PUID/PGID with defaults 52 | # These are used for creating the user and setting initial permissions during the build. 53 | ARG PUID_BUILD=1884 54 | ARG PGID_BUILD=1884 55 | 56 | # Create a non-root user and group 'discovarr' with specific GID and UID for security 57 | RUN groupadd -g ${PGID_BUILD} discovarr && \ 58 | useradd --no-create-home -s /bin/false -u ${PUID_BUILD} -g ${PGID_BUILD} discovarr 59 | 60 | # Copy installed Python packages from the backend-builder stage 61 | COPY --from=backend-builder /usr/local/lib/python3.12/site-packages/ /usr/local/lib/python3.12/site-packages/ 62 | COPY --from=backend-builder /usr/local/bin/ /usr/local/bin/ 63 | # Copy the backend application code from the backend-builder stage 64 | COPY --from=backend-builder /app/server ./server 65 | 66 | # Copy built frontend assets from the client-builder stage 67 | # These assets should be served by your FastAPI application. 68 | # Assumes client build output is in '/app/client/dist' in the client-builder stage. 69 | # Assumes FastAPI will serve static files from a 'static' subdirectory within the 'server' directory. 70 | COPY --from=client-builder /app/client/dist ./server/static 71 | 72 | # Create directories for persistent data (config, backups) and set ownership 73 | # These paths align with the volumes in your docker-compose.yml 74 | RUN mkdir -p /config /backups /cache && \ 75 | chown -R ${PUID_BUILD}:${PGID_BUILD} /config /backups /app /cache 76 | 77 | # Install gosu for dropping privileges 78 | RUN apt-get update && \ 79 | apt-get install -y --no-install-recommends gosu postgresql-client && \ 80 | rm -rf /var/lib/apt/lists/* 81 | 82 | 83 | # Set default PUID/PGID environment variables if not provided at runtime 84 | # These are used by the entrypoint.sh script. 85 | ENV PUID=${PUID_BUILD} 86 | ENV PGID=${PGID_BUILD} 87 | 88 | # Copy the entrypoint script and make it executable 89 | COPY entrypoint.sh /usr/local/bin/entrypoint.sh 90 | RUN chmod +x /usr/local/bin/entrypoint.sh 91 | 92 | # The entrypoint script will handle switching to the non-root user 93 | # USER discovarr 94 | # Expose the port the application runs on (should match your FastAPI config and docker-compose) 95 | EXPOSE 8000 96 | 97 | WORKDIR /app/server/src 98 | 99 | # Set the entrypoint 100 | ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] 101 | # Command to run the FastAPI application 102 | # Adjust 'server.main:app' if your FastAPI app instance is named differently or located in a different file/module. 103 | CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"] -------------------------------------------------------------------------------- /server/src/tests/integration/base/base_live_request_provider_tests.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from typing import Dict, Any 3 | from services.response import APIResponse 4 | from base.request_provider_base import RequestProviderBase 5 | 6 | class BaseLiveRequestProviderTests: 7 | """ 8 | Abstract base class for **live integration tests** of RequestProviderBase implementations. 9 | 10 | Subclasses must: 11 | 1. Implement the `live_provider` fixture to return an initialized instance of the provider. 12 | 2. Optionally override fixtures like `movie_tmdb_id` and `tv_tmdb_id` if specific media 13 | is needed for testing against a particular server instance. 14 | """ 15 | 16 | # This fixture must be overridden by concrete test classes 17 | @pytest.fixture 18 | def live_provider(self) -> RequestProviderBase: 19 | """Provides a live instance of the request provider.""" 20 | raise NotImplementedError("Subclasses must implement this fixture.") 21 | 22 | @pytest.fixture 23 | def movie_tmdb_id(self) -> int: 24 | """Provides a valid TMDB ID for a well-known movie.""" 25 | return 550 # Fight Club 26 | 27 | @pytest.fixture 28 | def tv_tmdb_id(self) -> int: 29 | """Provides a valid TMDB ID for a well-known TV show.""" 30 | return 1396 # Breaking Bad 31 | 32 | def test_provider_name(self, live_provider: RequestProviderBase): 33 | """Tests that the provider has a valid name.""" 34 | assert hasattr(live_provider, 'PROVIDER_NAME') 35 | assert isinstance(live_provider.PROVIDER_NAME, str) 36 | assert len(live_provider.PROVIDER_NAME) > 0 37 | 38 | def test_get_quality_profiles(self, live_provider: RequestProviderBase): 39 | """Tests fetching quality profiles.""" 40 | response = live_provider.get_quality_profiles() 41 | 42 | assert isinstance(response, APIResponse), "get_quality_profiles should return an APIResponse object." 43 | assert response.success, f"API call to get quality profiles failed: {response.message}" 44 | 45 | profiles = response.data 46 | assert isinstance(profiles, list), "The 'data' in the response should be a list of profiles." 47 | 48 | # Jellyseerr returns an empty list with a message, which is valid. 49 | if live_provider.PROVIDER_NAME == 'jellyseerr': 50 | assert 'manages quality profiles internally' in response.message 51 | return 52 | 53 | # Overseerr returns an empty list with a message, which is valid. 54 | if live_provider.PROVIDER_NAME == 'overseerr': 55 | assert 'manages quality profiles internally' in response.message 56 | return 57 | 58 | # For other providers, we expect profiles. 59 | assert len(profiles) > 0, "Expected at least one quality profile." 60 | 61 | profile = profiles[0] 62 | assert isinstance(profile, dict) 63 | assert 'id' in profile 64 | assert 'name' in profile 65 | 66 | def test_lookup_movie(self, live_provider: RequestProviderBase, movie_tmdb_id: int): 67 | """Tests looking up a movie by its TMDB ID.""" 68 | # Sonarr doesn't handle movies. 69 | if live_provider.PROVIDER_NAME == 'sonarr': 70 | pytest.skip("Sonarr does not handle movies.") 71 | 72 | response = live_provider.lookup_media(tmdb_id=movie_tmdb_id, media_type='movie') 73 | 74 | assert isinstance(response, APIResponse) 75 | assert response.success, f"Movie lookup failed: {response.message}" 76 | 77 | movie_data = response.data 78 | assert isinstance(movie_data, dict) 79 | assert 'title' in movie_data 80 | # Radarr uses tmdbId, Jellyseerr uses id for tmdb_id in response 81 | assert movie_data.get('tmdbId') == movie_tmdb_id or movie_data.get('id') == movie_tmdb_id 82 | 83 | def test_lookup_tv_show(self, live_provider: RequestProviderBase, tv_tmdb_id: int): 84 | """Tests looking up a TV show by its TMDB ID.""" 85 | # Radarr doesn't handle TV shows. 86 | if live_provider.PROVIDER_NAME == 'radarr': 87 | pytest.skip("Radarr does not handle TV shows.") 88 | 89 | response = live_provider.lookup_media(tmdb_id=tv_tmdb_id, media_type='tv') 90 | 91 | assert isinstance(response, APIResponse) 92 | assert response.success, f"TV show lookup failed: {response.message}" 93 | 94 | tv_data = response.data 95 | assert isinstance(tv_data, dict) 96 | #assert 'title' in tv_data 97 | # Sonarr uses tmdbId, Jellyseerr uses id for tmdb_id in response 98 | assert tv_data.get('tmdbId') == tv_tmdb_id or tv_data.get('id') == tv_tmdb_id -------------------------------------------------------------------------------- /server/src/services/image_cache.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from pathlib import Path 3 | from typing import Optional 4 | from urllib.parse import urlparse 5 | import os 6 | import asyncio 7 | import aiohttp 8 | import aiofiles 9 | 10 | class ImageCacheService: 11 | """ 12 | A service to download and cache images locally. 13 | """ 14 | 15 | def __init__(self, cache_base_dir: str = "/cache/image"): 16 | """ 17 | Initializes the ImageCacheService. 18 | 19 | Args: 20 | cache_base_dir (str): The base directory where images will be cached. 21 | """ 22 | self.logger = logging.getLogger(__name__) 23 | self.cache_base_dir = Path(cache_base_dir) 24 | self._ensure_cache_dir_exists() 25 | 26 | def _ensure_cache_dir_exists(self) -> None: 27 | """Ensures that the cache directory exists.""" 28 | try: 29 | self.cache_base_dir.mkdir(parents=True, exist_ok=True) 30 | self.logger.info(f"Cache directory ensured at: {self.cache_base_dir}") 31 | except Exception as e: 32 | self.logger.error(f"Failed to create cache directory at {self.cache_base_dir}: {e}", exc_info=True) 33 | 34 | def _get_file_extension_from_url(self, image_url: str) -> Optional[str]: 35 | """ 36 | Attempts to extract a file extension from the image URL. 37 | """ 38 | try: 39 | path = urlparse(image_url).path 40 | ext = os.path.splitext(path)[1] 41 | return ext if ext else '.jpg' # Default to .jpg if no extension found 42 | except Exception: 43 | return '.jpg' # Default on any parsing error 44 | 45 | async def save_image_from_url(self, session: aiohttp.ClientSession, image_url: str, provider_name: str, item_id: str) -> Optional[str]: 46 | """ 47 | Downloads an image from a URL and saves it to the local cache. 48 | 49 | Args: 50 | image_url (str): The URL of the image to download. 51 | provider_name (str): The name of the provider (e.g., 'plex', 'tmdb'). 52 | item_id (str): The unique ID of the item associated with the image. 53 | session (aiohttp.ClientSession): An active aiohttp client session. 54 | 55 | Returns: 56 | Optional[str]: The local path to the cached image if successful, otherwise None. 57 | The path will be relative to the server root, e.g., "/cache/plex_123.jpg". 58 | """ 59 | if not image_url: 60 | self.logger.warning("No image URL provided. Skipping cache.") 61 | return None 62 | 63 | try: 64 | extension = self._get_file_extension_from_url(image_url) 65 | # Sanitize item_id to be safe for filenames (e.g., replace slashes if any) 66 | safe_item_id = str(item_id).replace('/', '_').replace('\\', '_') 67 | filename = f"{provider_name.lower()}_{safe_item_id}{extension}" 68 | local_image_path = self.cache_base_dir / filename 69 | #web_accessible_path = f"/{self.cache_base_dir.name}/{filename}" 70 | 71 | if local_image_path.exists(): 72 | self.logger.info(f"Image already cached at {local_image_path}. Using existing file.") 73 | return filename 74 | 75 | async with session.get(image_url, timeout=10) as response: 76 | response.raise_for_status() # Will raise an ClientResponseError for bad responses (4XX or 5XX) 77 | 78 | async with aiofiles.open(local_image_path, 'wb') as f: 79 | async for chunk in response.content.iter_chunked(8192): 80 | await f.write(chunk) 81 | 82 | self.logger.info(f"Successfully cached image to {local_image_path}") 83 | return filename 84 | except aiohttp.ClientError as e: 85 | self.logger.error(f"Failed to download image from {image_url}: {e}") 86 | except IOError as e: 87 | self.logger.error(f"Failed to save image to {local_image_path}: {e}") 88 | except Exception as e: 89 | self.logger.error(f"An unexpected error occurred while caching image from {image_url}: {e}", exc_info=True) 90 | return None 91 | 92 | def delete_cached_image(self, filename: str) -> bool: 93 | """ 94 | Deletes a specific image file from the cache. 95 | 96 | Args: 97 | filename (str): The name of the file to delete (e.g., "plex_123.jpg"). 98 | 99 | Returns: 100 | bool: True if the file was deleted successfully or did not exist, False on error. 101 | """ 102 | if not filename: 103 | self.logger.warning("No filename provided for deletion. Skipping.") 104 | return True # Consider it success if no action needed 105 | 106 | local_image_path = self.cache_base_dir / filename 107 | if local_image_path.exists(): 108 | try: 109 | local_image_path.unlink() 110 | self.logger.info(f"Successfully deleted cached image: {local_image_path}") 111 | return True 112 | except OSError as e: 113 | self.logger.error(f"Failed to delete cached image {local_image_path}: {e}") 114 | return False 115 | else: 116 | self.logger.info(f"Cached image {local_image_path} not found. No deletion needed.") 117 | return True -------------------------------------------------------------------------------- /scripts/import_watch_history.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import json 3 | from datetime import datetime, timezone 4 | import csv 5 | import argparse 6 | import sys 7 | import time # Import the time module 8 | 9 | # Global variable for the endpoint, to be formatted with base_url 10 | WATCH_HISTORY_ENDPOINT_PATH = "/api/watch-history/import" 11 | 12 | def add_watch_history_item(data: dict, base_url: str): 13 | """ 14 | Sends a POST request to add a new watch history item. 15 | """ 16 | try: 17 | print(f"Sending POST request to: {base_url}{WATCH_HISTORY_ENDPOINT_PATH}") 18 | # Ensure last_played_date is in ISO format if present 19 | if data.get("last_played_date") and isinstance(data["last_played_date"], datetime): 20 | data["last_played_date"] = data["last_played_date"].isoformat() 21 | 22 | print(f"Payload: {json.dumps(data, indent=2)}") 23 | 24 | response = requests.post(f"{base_url}{WATCH_HISTORY_ENDPOINT_PATH}", json=data) 25 | 26 | print(f"\nResponse Status Code: {response.status_code}") 27 | 28 | # Try to parse JSON response, otherwise print text 29 | try: 30 | response_json = response.json() 31 | print("Response JSON:") 32 | print(json.dumps(response_json, indent=2)) 33 | except json.JSONDecodeError: 34 | print("Response Text (not JSON):") 35 | print(response.text) 36 | 37 | if response.ok: # Checks for 2xx status codes 38 | print("\nWatch history item added/updated successfully!") 39 | else: 40 | print(f"\nFailed to add/update watch history item. Server responded with error.") 41 | 42 | except requests.exceptions.ConnectionError as e: 43 | print(f"\nConnection Error: Could not connect to the server at {base_url}.") 44 | print(f"Please ensure the Discovarr server is running and accessible.") 45 | print(f"Details: {e}") 46 | except Exception as e: 47 | print(f"\nAn unexpected error occurred: {e}") 48 | 49 | def process_csv(csv_filepath: str, base_url: str): 50 | """ 51 | Reads a CSV file and sends a POST request for each row. 52 | Required columns: title, watched_by, media_type 53 | Optional columns: media_id, last_played_date (ISO format), source, poster_url_source 54 | """ 55 | try: 56 | with open(csv_filepath, mode='r', encoding='utf-8') as csvfile: 57 | reader = csv.DictReader(csvfile) 58 | if not all(col in reader.fieldnames for col in ['title', 'watched_by', 'media_type']): 59 | print("Error: CSV file must contain 'title', 'watched_by', and 'media_type' columns.") 60 | sys.exit(1) 61 | 62 | for i, row in enumerate(reader): 63 | print(f"\n--- Processing row {i+1} ---") 64 | item_data = { 65 | "title": row.get('title'), 66 | "watched_by": row.get('watched_by'), 67 | "media_type": row.get('media_type', '').lower(), # Ensure lowercase for 'tv'/'movie' 68 | # Optional fields from CSV 69 | "media_id": row.get('media_id') if row.get('media_id') else None, 70 | # If last_played_date is not in CSV, server will default to now() 71 | "last_played_date": row.get('last_played_date') if row.get('last_played_date') else None, 72 | "source": row.get('source', 'csv_import'), # Default source if not in CSV 73 | "poster_url_source": row.get('poster_url_source') if row.get('poster_url_source') else None 74 | } 75 | 76 | # Basic validation for required fields from CSV 77 | if not item_data["title"] or not item_data["watched_by"] or not item_data["media_type"]: 78 | print(f"Skipping row {i+1} due to missing required fields (title, watched_by, media_type): {row}") 79 | continue 80 | 81 | if item_data["media_type"] not in ["movie", "tv"]: 82 | print(f"Skipping row {i+1} due to invalid media_type '{item_data['media_type']}'. Must be 'movie' or 'tv'.") 83 | continue 84 | 85 | add_watch_history_item(item_data, base_url) 86 | print("Waiting for 2 seconds before next request...") 87 | time.sleep(2) # Add a 2-second delay to avoid hitting the rate limit on the TMDB API 88 | 89 | except FileNotFoundError: 90 | print(f"Error: CSV file not found at {csv_filepath}") 91 | sys.exit(1) 92 | except Exception as e: 93 | print(f"An error occurred while processing the CSV file: {e}") 94 | sys.exit(1) 95 | 96 | if __name__ == "__main__": 97 | parser = argparse.ArgumentParser(description="Import watch history from a CSV file.") 98 | parser.add_argument("csv_file", help="Path to the CSV file to import.") 99 | parser.add_argument("--base_url", default="http://localhost:8000", help="Base URL of the Discovarr API (e.g., http://localhost:8000)") 100 | args = parser.parse_args() 101 | 102 | process_csv(args.csv_file, args.base_url) 103 | 104 | # To run this script: 105 | # python import_watch_history.py /path/to/your/watch_history.csv 106 | # python import_watch_history.py /path/to/your/watch_history.csv --base_url http://your_discovarr_host:port 107 | # 108 | # Example CSV with all supported fields: 109 | # title,media_id,media_type,watched_by,last_played_date,source,poster_url_source 110 | # "The Matrix",tt0133093,movie,user1,2023-01-15T10:00:00Z,my_csv_import,https://image.tmdb.org/t/p/w500/f89U3ADr1oiB1s9GkdPOEpXUk5H.jpg 111 | # "Breaking Bad",,tv,user2,,, # media_id, last_played_date, poster_url_source are optional 112 | # "Another Movie",,movie,user1,2023-02-20T12:30:00+02:00,, 113 | 114 | # Example CSV with minimal required fields: 115 | #title,watched_by,media_type 116 | #"The Grand Budapest Hotel",test,movie 117 | #"Pulp Fiction",test,movie 118 | #"Interstellar",test,movie 119 | -------------------------------------------------------------------------------- /server/src/services/api.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import json 3 | import logging 4 | from typing import Optional, Dict, Any 5 | from .response import APIResponse # Import the APIResponse class 6 | 7 | class Api: 8 | def __init__(self, url: str, api_key: str, api_key_header_name: str = "X-Api-Key", api_base_path: str = "api/v3"): 9 | """Initialize a generic API client. 10 | 11 | Args: 12 | url (str): The base URL of the API server (e.g., "http://your-server:8080") 13 | api_key (str): The API key for authentication. 14 | api_key_header_name (str): The name of the HTTP header used to send the API key. 15 | api_base_path (str): The base path for all API endpoints (e.g., "api/v3"). 16 | If empty, requests will be made directly against the base URL. 17 | """ 18 | self.logger = logging.getLogger(__name__) 19 | self.url = url.rstrip('/') # Ensure no trailing slash for consistent URL building 20 | self.api_key = api_key 21 | self.api_key_header_name = api_key_header_name 22 | # Ensure api_base_path is clean and doesn't affect URL construction negatively if empty 23 | self.api_base_path = api_base_path.strip('/') 24 | self.headers = { 25 | self.api_key_header_name: api_key, 26 | "Content-Type": "application/json", 27 | "Accept": "application/json", 28 | } 29 | 30 | def _make_request(self, method: str, endpoint: str, params: Optional[Dict] = None, data: Optional[Dict] = None) -> APIResponse: 31 | """Make a request to the configured API. 32 | 33 | Args: 34 | method (str): HTTP method (GET, POST, PUT, DELETE, etc.) 35 | endpoint (str): API endpoint path (e.g., "users", "items/1"). 36 | It will be appended to the base URL and API base path. 37 | params (Optional[Dict]): Query parameters for the request. 38 | data (Optional[Dict]): Data to send with request (will be JSON serialized for POST/PUT). 39 | 40 | Returns: 41 | APIResponse: An APIResponse object containing the result of the API call. 42 | """ 43 | path_parts = [self.api_base_path, endpoint.lstrip('/')] 44 | actual_path = "/".join(p for p in path_parts if p) # Joins non-empty parts 45 | full_url = f"{self.url}/{actual_path}" 46 | 47 | self.logger.debug(f"Making {method} request to {full_url} with params={params}, data is_present={data is not None}") 48 | 49 | try: 50 | response = requests.request(method, full_url, headers=self.headers, params=params, json=data) 51 | response.raise_for_status() # Raises HTTPError for 4xx/5xx responses 52 | 53 | if response.status_code == 204: # No Content 54 | return APIResponse(success=True, data=None, status_code=response.status_code) 55 | 56 | # Attempt to parse JSON, assuming most successful responses are JSON 57 | # If response is empty but status is 2xx (e.g. 200 with empty body), .json() might fail 58 | try: 59 | response_data = response.json() 60 | except json.JSONDecodeError as je: 61 | if not response.text: # Empty body with a 2xx status other than 204 62 | self.logger.debug(f"Successful response {response.status_code} with empty body from {method} {full_url}.") 63 | return APIResponse(success=True, data=None, status_code=response.status_code) 64 | # Non-empty, non-JSON successful response (should be rare for APIs this client targets) 65 | self.logger.warning(f"Could not decode JSON from successful response for {method} {full_url}: {je}. Body: {response.text[:100]}") 66 | return APIResponse(success=False, status_code=response.status_code, message="Failed to decode JSON from successful response.", error={"details": response.text}) 67 | 68 | return APIResponse(success=True, data=response_data, status_code=response.status_code) 69 | 70 | except requests.exceptions.HTTPError as e: 71 | response_content = None # Could be dict, list from JSON, or str 72 | response_text_data = e.response.text if e.response is not None else "No response body" 73 | status_code_data = e.response.status_code if e.response is not None else None 74 | 75 | if e.response is not None: 76 | try: 77 | response_content = e.response.json() 78 | if isinstance(response_content, list): 79 | errors = [] 80 | for error in response_content: 81 | errors.append(f"{error.get("errorCode")}: {error.get("errorMessage")}") 82 | 83 | if len(errors) > 0: 84 | response_content = errors 85 | except json.JSONDecodeError: 86 | self.logger.debug(f"Could not decode JSON from error response body: {response_text_data}") 87 | 88 | error_payload = response_content if response_content is not None else response_text_data 89 | 90 | err_msg = f"API HTTP Error for {method} {full_url}" 91 | self.logger.error(f"{err_msg}. Status: {status_code_data}. Body: {response_text_data[:500]}") # Log potentially large body truncated 92 | return APIResponse( 93 | success=False, 94 | status_code=status_code_data, 95 | message=err_msg, 96 | error={"details": error_payload} 97 | ) 98 | except requests.exceptions.RequestException as e: # Covers DNS errors, connection timeouts, etc. 99 | err_msg = f"API Request Error for {method} {full_url}: {e}" 100 | self.logger.error(err_msg) 101 | return APIResponse(success=False, message=err_msg, error={"details": str(e)}) 102 | except json.JSONDecodeError as e: # Should be caught by specific try-except for response.json() above. 103 | # This is a fallback for unexpected JSON errors during success. 104 | status_code_data = response.status_code if 'response' in locals() and hasattr(response, 'status_code') else None 105 | response_text_data = response.text if 'response' in locals() and hasattr(response, 'text') else str(e) 106 | err_msg = f"Failed to decode successful JSON response from API for {method} {full_url}: {e}" 107 | self.logger.error(f"{err_msg} Body: {response_text_data[:500]}") 108 | return APIResponse( 109 | success=False, 110 | status_code=status_code_data, 111 | message=err_msg, 112 | error={"details": response_text_data} 113 | ) -------------------------------------------------------------------------------- /client/src/components/RequestModal.vue: -------------------------------------------------------------------------------- 1 | 127 | 128 | 190 | 191 | -------------------------------------------------------------------------------- /server/src/tests/integration/base/base_live_library_provider_tests.py: -------------------------------------------------------------------------------- 1 | # in a new file, e.g., tests/base/base_library_provider_tests.py 2 | import pytest 3 | from typing import List, Optional, Union 4 | from services.models import LibraryUser, ItemsFiltered 5 | from base.library_provider_base import LibraryProviderBase # Your abstract base class 6 | 7 | class BaseLiveLibraryProviderTests: 8 | # This fixture must be overridden by concrete test classes 9 | @pytest.fixture 10 | def live_provider(self) -> LibraryProviderBase: 11 | """Provides a live instance of the library provider.""" 12 | raise NotImplementedError("Subclasses must implement this fixture.") 13 | 14 | @pytest.fixture 15 | def valid_user_id(self, live_provider: LibraryProviderBase) -> str: 16 | """ 17 | Provides a valid user ID for the specific provider. 18 | Subclasses might need to fetch this dynamically. 19 | """ 20 | # Example: Fetch the first user; subclasses might need more specific logic 21 | users = live_provider.get_users() 22 | if not users: 23 | pytest.skip(f"No users found for provider {live_provider.name} to get a valid_user_id.") 24 | return users[0].id 25 | 26 | @pytest.fixture 27 | def valid_username(self, live_provider: LibraryProviderBase) -> str: 28 | """Provides a valid username for the specific provider.""" 29 | users = live_provider.get_users() 30 | if not users: 31 | pytest.skip(f"No users found for provider {live_provider.name} to get a valid_username.") 32 | return users[0].name 33 | 34 | def test_provider_name(self, live_provider: LibraryProviderBase): 35 | assert isinstance(live_provider.name, str) 36 | assert len(live_provider.name) > 0 37 | 38 | def test_get_users(self, live_provider: LibraryProviderBase): 39 | users = live_provider.get_users() 40 | assert users is not None, "get_users should return a list or None" 41 | if users: # Okay if it's an empty list 42 | assert isinstance(users, list) 43 | for user in users: 44 | assert isinstance(user, LibraryUser) 45 | assert hasattr(user, 'id') # Check if the attribute exists 46 | # Allow id to be None, but if it's not None, check its type (e.g., str) 47 | if user.id is not None: 48 | assert isinstance(user.id, str), f"User ID for {user.name} should be a string if not None, got {type(user.id)}" 49 | assert hasattr(user, 'name') and user.name is not None 50 | assert hasattr(user, 'source_provider') and user.source_provider == live_provider.name 51 | 52 | def test_get_user_by_name(self, live_provider: LibraryProviderBase, valid_username: str): 53 | user = live_provider.get_user_by_name(valid_username) 54 | # This assertion depends on valid_username actually existing for the provider 55 | if live_provider.get_users(): # Only assert if we could get users to begin with 56 | assert user is not None, f"User '{valid_username}' should be found for provider {live_provider.name}" 57 | if user: 58 | assert isinstance(user, LibraryUser) 59 | assert user.name == valid_username 60 | 61 | non_existent_user = live_provider.get_user_by_name("a_user_that_REALLY_does_not_exist_12345abc_xyz") 62 | assert non_existent_user is None, "Non-existent user should return None" 63 | 64 | def test_get_recently_watched(self, live_provider: LibraryProviderBase, valid_user_id: str): 65 | watched_items_raw = live_provider.get_recently_watched(user_id=valid_user_id, limit=10) 66 | assert watched_items_raw is not None, "Expected recently watched items (raw) or an empty list" 67 | assert isinstance(watched_items_raw, list) 68 | assert len(watched_items_raw) <= 10, "Expected less than or equal to 10 items when limit is 10" 69 | if watched_items_raw: 70 | for item_dict in watched_items_raw: 71 | assert isinstance(item_dict, ItemsFiltered) 72 | # Add basic checks for expected keys if common across providers' raw output 73 | 74 | def test_get_recently_watched_all(self, live_provider: LibraryProviderBase, valid_user_id: str): 75 | watched_items_raw = live_provider.get_recently_watched(user_id=valid_user_id, limit=None) 76 | assert watched_items_raw is not None, "Expected recently watched items (raw) or an empty list" 77 | assert isinstance(watched_items_raw, list) 78 | assert len(watched_items_raw) > 1, "Expected a list of more than 10 items" 79 | if watched_items_raw: 80 | for item_dict in watched_items_raw: 81 | assert isinstance(item_dict, ItemsFiltered) 82 | # Add basic checks for expected keys if common across providers' raw output 83 | 84 | def test_get_favorites(self, live_provider: LibraryProviderBase, valid_user_id: str): 85 | # Similar to get_recently_watched, testing the List[Dict[str, Any]] contract 86 | favorite_items_raw = live_provider.get_favorites(user_id=valid_user_id, limit=5) 87 | assert favorite_items_raw is not None, "Expected favorite items (raw) or an empty list" 88 | assert isinstance(favorite_items_raw, list) 89 | if favorite_items_raw: 90 | for item_dict in favorite_items_raw: 91 | assert isinstance(item_dict, ItemsFiltered) 92 | 93 | def test_get_all_items_filtered_as_objects(self, live_provider: LibraryProviderBase): 94 | # Test when attribute_filter is None (should return List[ItemsFiltered]) 95 | filtered_items = live_provider.get_all_items_filtered() 96 | assert filtered_items is not None 97 | assert isinstance(filtered_items, list) 98 | if filtered_items: 99 | for item in filtered_items: 100 | assert isinstance(item, ItemsFiltered) 101 | assert hasattr(item, 'name') and item.name is not None 102 | assert hasattr(item, 'id') # Ensure the attribute exists 103 | if item.id is not None: # If ID is present, check its type 104 | assert isinstance(item.id, str), f"Item ID for '{item.name}' should be a string if not None, got {type(item.id)}" 105 | # ... other common ItemsFiltered attributes 106 | 107 | def test_get_all_items_filtered_as_names(self, live_provider: LibraryProviderBase): 108 | # Test when attribute_filter is "Name" (should return List[str]) 109 | # Note: Ensure the attribute_filter value ("Name" or "name") is consistent. 110 | filtered_names = live_provider.get_all_items_filtered(attribute_filter="Name") 111 | assert filtered_names is not None 112 | assert isinstance(filtered_names, list) 113 | if filtered_names: 114 | for name in filtered_names: 115 | assert isinstance(name, str) 116 | -------------------------------------------------------------------------------- /server/src/services/scheduler.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import Optional, Dict, Any, Callable 3 | from .schedule import Schedule 4 | import json 5 | from datetime import datetime 6 | import asyncio 7 | class DiscovarrScheduler(Schedule): 8 | """ 9 | Discovarr-specific implementation of the Schedule class. 10 | Maps schedule function names to actual Discovarr instance methods. 11 | """ 12 | 13 | def __init__(self, db, discovarr_instance): 14 | """ 15 | Initialize the Discovarr scheduler. 16 | 17 | Args: 18 | db: Database instance for loading and saving schedules 19 | discovarr_instance: Instance of Discovarr class containing the methods to be scheduled 20 | """ 21 | self.discovarr = discovarr_instance 22 | super().__init__(db) 23 | self.logger = logging.getLogger(__name__) 24 | # Schedules will be loaded explicitly after the scheduler is started 25 | # in the main application's startup event. 26 | 27 | # def _setup_default_schedules(self): 28 | # """ 29 | # Set up default schedules for common tasks. 30 | # Only called if no schedules exist in the database. 31 | # """ 32 | # # Process requests daily at 3 AM 33 | # self.add_job( 34 | # job_id="daily_process", 35 | # func=self.discovarr.process_watch_history, 36 | # hour=3, 37 | # minute=0 38 | # ) 39 | 40 | # # Check recently watched every 30 minutes 41 | # self.add_job( 42 | # job_id="check_recently_watched", 43 | # func=self.discovarr.jellyfin_get_recently_watched, 44 | # minute="*/30" 45 | # ) 46 | 47 | # self.logger.info("Default schedules have been set up") 48 | 49 | def get_function(self, func_name: str) -> Optional[Callable]: 50 | """ 51 | Map function names to actual Discovarr instance methods. 52 | The returned callable will receive args/kwargs from the Schedule table entry at runtime. 53 | 54 | Args: 55 | func_name (str): Name of the function to get 56 | 57 | Returns: 58 | Optional[callable]: The function if found, None otherwise 59 | """ 60 | # Map of function names to actual methods 61 | if func_name == 'sync_watch_history': 62 | # This is now an async function. AsyncIOScheduler can call it directly. 63 | # Expects no runtime args/kwargs from the DB schedule for this specific task. 64 | return self.discovarr.sync_watch_history 65 | elif func_name == 'process_watch_history': 66 | # _create_process_function returns an async function directly. 67 | return self._create_process_function() 68 | elif func_name == 'get_active_media': 69 | # Synchronous, expects no runtime args/kwargs. 70 | return self.discovarr.get_active_media 71 | elif func_name == 'get_ignored_suggestions': 72 | # Synchronous, expects no runtime args/kwargs. 73 | return self.discovarr.get_ignored_suggestions 74 | elif func_name == 'get_similar_media': 75 | # _create_search_function returns an async function directly. 76 | # It expects runtime kwargs from Schedule.kwargs in DB. 77 | return self._create_search_function() 78 | else: 79 | self.logger.warning(f"No function mapping found for {func_name}") 80 | return None 81 | 82 | def trigger_job_now(self, job_id: str) -> bool: 83 | """ 84 | Triggers a scheduled job to run immediately by modifying its next_run_time. 85 | 86 | Args: 87 | job_id: The ID of the job to trigger. 88 | 89 | Returns: 90 | True if the job was found and modified to run now, False otherwise. 91 | """ 92 | job = self.get_job(job_id) # Uses get_job from the base Schedule class 93 | if job: 94 | try: 95 | # self.scheduler is the APScheduler BackgroundScheduler instance from the base Schedule class 96 | self.scheduler.modify_job(job_id, next_run_time=datetime.now(self.scheduler.timezone)) 97 | self.logger.info(f"Job '{job_id}' (task: {job.name}) modified to run now.") 98 | return True 99 | except Exception as e: 100 | self.logger.error(f"Error modifying job '{job_id}' to run now: {e}", exc_info=True) 101 | return False 102 | else: 103 | self.logger.warning(f"Job '{job_id}' not found. Cannot trigger.") 104 | return False 105 | 106 | def _create_search_function(self) -> Callable: 107 | """ 108 | Returns the asynchronous 'get_similar_media' task. 109 | AsyncIOScheduler will call this directly with kwargs from the schedule DB entry. 110 | 111 | Returns: 112 | Callable: An asynchronous function that AsyncIOScheduler can execute. 113 | """ 114 | async def async_search_task(**runtime_job_kwargs): 115 | self.logger.info(f"Executing scheduled 'get_similar_media' with runtime_kwargs: {runtime_job_kwargs}") 116 | try: 117 | # These kwargs come from the 'kwargs' column of the Schedule table for this job 118 | media_name = runtime_job_kwargs.get('media_name') # Typically None for scheduled searches 119 | search_id = runtime_job_kwargs.get('search_id') 120 | custom_prompt = runtime_job_kwargs.get('custom_prompt') 121 | 122 | await self.discovarr.get_similar_media( 123 | media_name=media_name, 124 | search_id=search_id, 125 | custom_prompt=custom_prompt 126 | ) 127 | except Exception as e: 128 | self.logger.error(f"Error executing scheduled 'get_similar_media' with {runtime_job_kwargs}: {e}", exc_info=True) 129 | 130 | return async_search_task 131 | 132 | def _create_process_function(self) -> Callable: 133 | """ 134 | Returns the asynchronous 'process_watch_history' task. 135 | AsyncIOScheduler will call this directly. 136 | 137 | Returns: 138 | Callable: An asynchronous function that AsyncIOScheduler can execute. 139 | """ 140 | async def async_process_task(*args, **kwargs): # Accept args/kwargs passed by scheduler 141 | job_name = "process_watch_history" 142 | current_loop_id_in_task = id(asyncio.get_event_loop()) 143 | self.logger.info(f"Scheduled task '{job_name}' running in event loop ID: {current_loop_id_in_task}") 144 | self.logger.info(f"Executing scheduled '{job_name}' with args: {args}, kwargs: {kwargs}") 145 | # Log if unexpected arguments are passed, as this task doesn't use them 146 | if args or kwargs: 147 | self.logger.warning(f"Scheduled '{job_name}' received unexpected arguments. Args: {args}, Kwargs: {kwargs}. These will be ignored by the task.") 148 | 149 | try: 150 | await self.discovarr.process_watch_history() 151 | except Exception as e: 152 | self.logger.error(f"Error executing scheduled '{job_name}': {e}", exc_info=True) 153 | 154 | return async_process_task 155 | 156 | -------------------------------------------------------------------------------- /server/src/providers/radarr.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | from typing import Optional, Dict, Any 4 | from base.request_provider_base import RequestProviderBase 5 | from services.response import APIResponse 6 | from services.models import SettingType # Import SettingType 7 | 8 | class RadarrProvider(RequestProviderBase): 9 | PROVIDER_NAME = "radarr" 10 | 11 | def __init__(self, url: str, api_key: str): 12 | """Initialize Radarr client. 13 | 14 | Args: 15 | url (str): The URL of your Radarr server (e.g., "http://your-radarr-server:7878") 16 | api_key (str): Your Radarr API key 17 | """ 18 | super().__init__(url=url, api_key=api_key, api_key_header_name="X-Api-Key", api_base_path="api/v3") 19 | self.logger = logging.getLogger(__name__) 20 | # self.url, self.api_key, and self.headers are now managed by the Api base class 21 | 22 | def get_quality_profiles(self, default_profile_id: Optional[int] = None) -> APIResponse: 23 | """Get all quality profiles configured in Radarr. 24 | 25 | Returns: 26 | APIResponse: An APIResponse object. If successful, `data` contains a list of 27 | simplified quality profiles. Each profile contains: 28 | - id: The profile ID 29 | - name: The profile name 30 | - allowed_qualities: List of allowed quality names 31 | - is_default: Boolean indicating if it's the default 32 | """ 33 | profiles_response = self._make_request("GET", "qualityprofile") 34 | if not profiles_response.success: 35 | return profiles_response # Propagate error 36 | 37 | if not profiles_response.data or not isinstance(profiles_response.data, list): 38 | self.logger.warning("No quality profiles data received from Radarr or data is not a list.") 39 | return APIResponse( 40 | success=False, 41 | message="No quality profiles data received or in unexpected format from Radarr.", 42 | status_code=profiles_response.status_code 43 | ) 44 | 45 | simplified_profiles = [] 46 | for profile in profiles_response.data: 47 | allowed_qualities = [] 48 | for item in profile.get('items', []): 49 | # Check if the item has a direct allowed flag 50 | if item.get('allowed', False): 51 | # Try to get the quality name, fallback to item name if available 52 | quality_name = (item.get('quality', {}).get('name') or 53 | item.get('name') or 54 | f"Resolution {item.get('quality', {}).get('resolution')}p") 55 | allowed_qualities.append(quality_name) 56 | 57 | simplified_profiles.append({ 58 | 'id': profile['id'], 59 | 'name': profile['name'], 60 | 'allowed_qualities': allowed_qualities, 61 | 'is_default': profile['id'] == default_profile_id if default_profile_id is not None else False 62 | }) 63 | 64 | return APIResponse(success=True, data=simplified_profiles, status_code=profiles_response.status_code) 65 | 66 | def lookup_media(self, tmdb_id: int, media_type: Optional[str] = "movie") -> APIResponse: 67 | """Look up movie details from TMDB ID. 68 | 69 | Args: 70 | tmdb_id (int): The TMDb ID of the movie 71 | 72 | Returns: 73 | APIResponse: An APIResponse object. If successful, `data` contains movie details. 74 | """ 75 | # Radarr's lookup endpoint for a single movie by TMDB ID usually returns a single movie object, not a list. 76 | api_response = self._make_request("GET", "movie/lookup/tmdb", params={"tmdbId": tmdb_id}) 77 | 78 | if not api_response.success: 79 | return api_response # Propagate error 80 | 81 | # Check if the data from a successful call is valid (Radarr returns a single object for this lookup) 82 | if not api_response.data or not isinstance(api_response.data, dict): # Expecting a dict 83 | msg = f"Movie with TMDB ID {tmdb_id} not found in Radarr's lookup or lookup returned invalid data." 84 | self.logger.warning(msg) 85 | return APIResponse(success=False, message=msg, status_code=404, error={"details": "Lookup returned no or invalid results."}) 86 | 87 | return api_response 88 | 89 | def add_media(self, tmdb_id: int, quality_profile_id: int, root_dir_path: str = "/movies", 90 | monitor: bool = True, search_for_movie: bool = True) -> APIResponse: 91 | """Add a movie to Radarr using TMDb ID. 92 | 93 | Args: 94 | tmdb_id (int): The TMDb ID of the movie to add 95 | quality_profile_id (int): The ID of the quality profile to use 96 | root_dir_path (str): The Root Directory path in Radarr. Defaults to "/movies" 97 | monitor (bool, optional): Whether to monitor the movie. Defaults to True 98 | search_for_movie (bool, optional): Whether to search for movie after adding. Defaults to True 99 | 100 | Returns: 101 | APIResponse: An APIResponse object. If successful, `data` contains the Radarr response. 102 | """ 103 | # First lookup the movie details 104 | lookup_response = self.lookup_media(tmdb_id) 105 | if not lookup_response.success: 106 | return lookup_response # Propagate error from lookup 107 | 108 | movie_info = lookup_response.data # This is now the movie details dict 109 | # Prepare the payload with movie details 110 | data = { 111 | **movie_info, # Include all movie details from lookup 112 | "qualityProfileId": quality_profile_id, 113 | "rootFolderPath": root_dir_path, 114 | "monitored": monitor, 115 | "addOptions": { 116 | "searchForMovie": search_for_movie, 117 | }, 118 | } 119 | 120 | self.logger.info(f"Radarr add movie request: {json.dumps(data, indent=2)}") 121 | return self._make_request("POST", "movie", data=data) 122 | 123 | def delete_media(self, id: int) -> APIResponse: 124 | 125 | self.logger.info(f"Deleting Radarr movie with Radarr ID: {id}") 126 | return self._make_request("DELETE", f"movie/{id}") 127 | 128 | 129 | 130 | @classmethod 131 | def get_default_settings(cls) -> Dict[str, Dict[str, Any]]: 132 | """ 133 | Returns the default settings for the Radarr provider. 134 | """ 135 | return { 136 | "enabled": {"value": True, "type": SettingType.BOOLEAN, "description": "Enable or disable Radarr integration."}, 137 | "url": {"value": "http://radarr:7878", "type": SettingType.URL, "description": "Radarr server URL", "required": True}, 138 | "api_key": {"value": None, "type": SettingType.STRING, "description": "Radarr API key", "required": True}, 139 | "default_quality_profile_id": {"value": None, "type": SettingType.INTEGER, "description": "Radarr Default quality profile ID"}, 140 | "root_dir_path": {"value": "/movies", "type": SettingType.STRING, "description": "Root directory path for Radarr"}, 141 | "base_provider": {"value": "request", "type": SettingType.STRING, "show": False, "description": "Base Provider Type."}, 142 | } 143 | -------------------------------------------------------------------------------- /server/src/tests/integration/test_postgres.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import os 3 | import logging 4 | from pathlib import Path 5 | from peewee import PeeweeException, PostgresqlDatabase 6 | 7 | from services.database import Database 8 | from services.models import Settings, database as db_proxy, MODELS, Media, MediaResearch 9 | from services.settings import SettingsService 10 | from services.research import ResearchService 11 | 12 | # Configure basic logging for tests 13 | logging.basicConfig(level=logging.DEBUG) 14 | logger = logging.getLogger(__name__) 15 | 16 | # --- Test Configuration --- 17 | # IMPORTANT: These should point to a TEST PostgreSQL instance and database. 18 | # DO NOT run these tests against a production database. 19 | TEST_PG_HOST = os.environ.get("POSTGRES_TEST_HOST", "localhost") 20 | TEST_PG_PORT = os.environ.get("POSTGRES_TEST_PORT", "5432") 21 | TEST_PG_USER = os.environ.get("POSTGRES_TEST_USER", "testuser") 22 | TEST_PG_PASSWORD = os.environ.get("POSTGRES_TEST_PASSWORD", "testpassword") 23 | TEST_PG_DBNAME = os.environ.get("POSTGRES_TEST_DBNAME", "discovarr_test_db") 24 | 25 | @pytest.fixture(scope="function") 26 | def postgres_db_setup(monkeypatch, tmp_path: Path, caplog): 27 | """ 28 | Fixture to set up a PostgreSQL database for testing. 29 | It sets environment variables for PostgreSQL connection, 30 | initializes the database, creates tables, and cleans up afterwards. 31 | It also monkeypatches pathlib.Path to redirect "/backups" to a temporary directory. 32 | """ 33 | # Set caplog level early to capture logs from Database initialization 34 | caplog.set_level(logging.INFO) 35 | 36 | temp_sqlite_db_for_fallback = tmp_path / "dummy_sqlite_for_pg_test.db" 37 | temp_backup_dir_for_test = tmp_path / "test_fixture_backups_pg" 38 | 39 | # Monkeypatch pathlib.Path.__init__ to redirect "/backups" 40 | original_path_init = Path.__init__ 41 | 42 | def mocked_path_init(self_path, *args, **kwargs): 43 | if args and str(args[0]) == "/backups": 44 | # Call original __init__ with the temporary backup path 45 | original_path_init(self_path, temp_backup_dir_for_test) 46 | # BackupService will call mkdir on this path, so we don't strictly need to do it here, 47 | # but it's good practice if the patch needs to ensure existence. 48 | # temp_backup_dir_for_test.mkdir(parents=True, exist_ok=True) 49 | else: 50 | original_path_init(self_path, *args, **kwargs) 51 | 52 | monkeypatch.setattr(Path, "__init__", mocked_path_init) 53 | 54 | monkeypatch.setenv("DISCOVARR_DATABASE", "postgres") 55 | monkeypatch.setenv("POSTGRES_HOST", TEST_PG_HOST) 56 | monkeypatch.setenv("POSTGRES_PORT", TEST_PG_PORT) 57 | monkeypatch.setenv("POSTGRES_USER", TEST_PG_USER) 58 | monkeypatch.setenv("POSTGRES_PASSWORD", TEST_PG_PASSWORD) 59 | monkeypatch.setenv("POSTGRES_DBNAME", TEST_PG_DBNAME) 60 | 61 | # Ensure the test database exists or can be created 62 | # This part is tricky as Database.__init__ tries to create it. 63 | # For a robust test, the test DB should ideally be managed externally 64 | # or by a more sophisticated fixture. 65 | 66 | db_instance = None 67 | try: 68 | # Initialize Database, which connects and creates tables 69 | # The Database class itself will attempt to create the DB if it doesn't exist. 70 | db_instance = Database(db_path=str(temp_sqlite_db_for_fallback)) 71 | 72 | # Ensure we are connected to Postgres 73 | if db_instance.db_type != "postgres": 74 | pytest.skip("PostgreSQL not configured or connection failed, skipping test.") 75 | 76 | # At this point, tables should be created by Database.__init__ 77 | # including the Settings table. 78 | # SettingsService._initialize_settings() is called by Discovarr, 79 | # but Database._run_migrations and _add_default_tasks might interact with settings. 80 | # For this test, we'll directly initialize settings after DB setup. 81 | settings_service = SettingsService() 82 | settings_service._initialize_settings() 83 | 84 | yield db_instance 85 | 86 | except PeeweeException as e: 87 | logger.error(f"PeeweeException during PostgreSQL test setup: {e}", exc_info=True) 88 | pytest.skip(f"Skipping PostgreSQL test due to PeeweeException: {e}") 89 | except RuntimeError as e: # Catch RuntimeError from Database connection failure 90 | logger.error(f"RuntimeError during PostgreSQL test setup: {e}", exc_info=True) 91 | pytest.skip(f"Skipping PostgreSQL test due to RuntimeError: {e}") 92 | if db_instance and db_proxy.obj: 93 | try: 94 | # Connect to a maintenance database (e.g., 'postgres') to drop the test database 95 | maintenance_db_conn = None 96 | try: 97 | maintenance_db_conn = PostgresqlDatabase( 98 | "postgres", # Or 'template1' 99 | user=TEST_PG_USER, 100 | password=TEST_PG_PASSWORD, 101 | host=TEST_PG_HOST, 102 | port=int(TEST_PG_PORT) # Ensure port is an integer 103 | ) 104 | maintenance_db_conn.connect() 105 | # Terminate other connections to the database before dropping 106 | # This might require superuser privileges or specific grants for the test user. 107 | # Use with caution and ensure the test user has minimal necessary privileges. 108 | maintenance_db_conn.execute_sql(f"SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = '{TEST_PG_DBNAME}' AND pid <> pg_backend_pid();") 109 | maintenance_db_conn.execute_sql(f"DROP DATABASE IF EXISTS \"{TEST_PG_DBNAME}\";") 110 | logger.info(f"Successfully dropped PostgreSQL test database: {TEST_PG_DBNAME}") 111 | except Exception as drop_db_e: 112 | logger.error(f"Error dropping PostgreSQL test database '{TEST_PG_DBNAME}': {drop_db_e}", exc_info=True) 113 | finally: 114 | if maintenance_db_conn and not maintenance_db_conn.is_closed(): 115 | maintenance_db_conn.close() 116 | logger.info("Closed connection to maintenance database.") 117 | except Exception as cleanup_e: 118 | logger.error(f"Error during PostgreSQL test cleanup (table drop or initial close): {cleanup_e}", exc_info=True) 119 | 120 | def test_postgres_settings_initialization(postgres_db_setup): 121 | """ 122 | Tests that default settings are initialized in the PostgreSQL database. 123 | """ 124 | db_instance = postgres_db_setup # Get the initialized Database instance from the fixture 125 | assert db_instance.db_type == "postgres", "Database type should be PostgreSQL" 126 | 127 | # Check if a known default setting exists 128 | app_setting = Settings.get_or_none((Settings.group == "app") & (Settings.name == "recent_limit")) 129 | assert app_setting is not None, "Default 'app.recent_limit' setting should exist" 130 | assert app_setting.value == str(SettingsService.DEFAULT_SETTINGS["app"]["recent_limit"]["value"]), \ 131 | "Default 'app.recent_limit' value should match" 132 | 133 | logger.info("PostgreSQL settings initialization test passed.") 134 | 135 | @pytest.mark.integration_live # Mark this as a live integration test 136 | def test_postgres_backup(postgres_db_setup): 137 | """ 138 | Tests that the PostgreSQL database can be backed up using BackupService. 139 | """ 140 | db_instance = postgres_db_setup # Get the initialized Database instance from the fixture 141 | assert db_instance.db_type == "postgres", "Database type should be PostgreSQL for backup test" 142 | 143 | backup_path_str = db_instance.backup_service.backup_db(name="test_backup") 144 | 145 | assert backup_path_str is not None, "Backup should return a valid path on success" 146 | assert Path(backup_path_str).exists(), "Backup file should exist at the returned path" 147 | -------------------------------------------------------------------------------- /client/src/App.vue: -------------------------------------------------------------------------------- 1 | 76 | 77 | 193 | -------------------------------------------------------------------------------- /server/src/providers/sonarr.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | from typing import Optional, Dict, Any, List 4 | from base.request_provider_base import RequestProviderBase 5 | from services.response import APIResponse 6 | from services.models import SettingType # Import SettingType 7 | 8 | class SonarrProvider(RequestProviderBase): 9 | PROVIDER_NAME = "sonarr" 10 | 11 | def __init__(self, url: str, api_key: str): 12 | """Initialize Sonarr client. 13 | 14 | Args: 15 | url (str): The URL of your Sonarr server (e.g., "http://your-sonarr-server:8989") 16 | api_key (str): Your Sonarr API key 17 | """ 18 | super().__init__(url=url, api_key=api_key, api_key_header_name="X-Api-Key", api_base_path="api/v3") 19 | self.logger = logging.getLogger(__name__) 20 | # self.url, self.api_key, and self.headers are now managed by the Api base class 21 | 22 | def lookup_media(self, tmdb_id: str, media_type: Optional[str] = "tv") -> APIResponse: 23 | """Look up series details using IMDB ID. 24 | 25 | Args: 26 | tmdb_id (str): The IMDB ID of the series 27 | 28 | Returns: 29 | APIResponse: An APIResponse object. If successful, `data` contains series details. 30 | """ 31 | lookup_params = {"term": f"tmdb:{tmdb_id}"} 32 | api_response = self._make_request("GET", "series/lookup", params=lookup_params) 33 | 34 | if not api_response.success: 35 | return api_response # Propagate the error response 36 | 37 | # Sonarr specific error handling for list of errors if they were returned with a 2xx status 38 | # but are actually application-level errors (though usually these come with 4xx/5xx) 39 | # The new Api class puts the parsed JSON error directly into error['details'] for HTTPError. 40 | # For successful responses, if Sonarr returns a list of error objects in `data`, 41 | # we might want to format them here or ensure the calling code handles it. 42 | # However, the `_make_request` in `Api` class already handles JSON decoding for success. 43 | # If Sonarr returns a list of errors with a 200 OK, it would be in `api_response.data`. 44 | # The current logic below assumes `api_response.data` is the list of series for a successful lookup. 45 | 46 | # Check if the data from a successful call is valid 47 | if not api_response.data or not isinstance(api_response.data, list) or len(api_response.data) == 0: 48 | msg = f"Series with TMDB ID {tmdb_id} not found in Sonarr's lookup or lookup returned empty/invalid." 49 | self.logger.warning(msg) 50 | return APIResponse(success=False, message=msg, status_code=404, error={"details": "Lookup returned no results."}) 51 | 52 | # If successful and data is valid, update the data field with the first series 53 | api_response.data = api_response.data[0] 54 | return api_response 55 | 56 | def add_media(self, tmdb_id: str, quality_profile_id: int, root_dir_path: str = "/tv", 57 | language_profile_id: int = 1, season_folder: bool = True, 58 | monitor: bool = True, search_for_missing: bool = True) -> APIResponse: 59 | """Add a series to Sonarr using IMDB ID. 60 | 61 | Args: 62 | tmdb_id (str): The TMDB ID of the series to add 63 | quality_profile_id (int): The ID of the quality profile to use 64 | root_dir_path (str): The Root Directory path in Sonarr. Defaults to "/tv" 65 | language_profile_id (int, optional): The ID of the language profile. Defaults to 1=English 66 | season_folder (bool, optional): Whether to create season folders. Defaults to True 67 | monitor (bool, optional): Defaults to True 68 | search_for_missing (bool, optional): Whether to search for missing episodes. Defaults to True 69 | 70 | Returns: 71 | APIResponse: An APIResponse object. If successful, `data` contains the Sonarr response. 72 | """ 73 | lookup_response = self.lookup_media(tmdb_id) 74 | if not lookup_response.success: 75 | return lookup_response # Propagate error from lookup 76 | 77 | series_data = lookup_response.data # This is now the series details dict 78 | 79 | # Prepare the payload with series details 80 | data = { 81 | **series_data, 82 | "title": series_data["title"], 83 | "tmdbId": tmdb_id, 84 | "qualityProfileId": quality_profile_id, 85 | "rootFolderPath": root_dir_path, 86 | "languageProfileId": language_profile_id, 87 | "seasonFolder": season_folder, 88 | "monitored": monitor, 89 | "addOptions": { 90 | "searchForMissingEpisodes": search_for_missing, 91 | }, 92 | "titleSlug": series_data["titleSlug"], 93 | "images": series_data["images"], 94 | "seasons": series_data["seasons"], 95 | } 96 | 97 | self.logger.info(f"Sonarr request: {json.dumps(data, indent=2)}") 98 | return self._make_request("POST", "series", data=data) 99 | 100 | def delete_media(self, id: str) -> APIResponse: 101 | self.logger.info(f"Deleting Sonarr series with Sonarr ID: {id} ") 102 | return self._make_request("DELETE", f"series/{id}") 103 | 104 | def get_quality_profiles(self, default_profile_id: Optional[int] = None) -> APIResponse: 105 | """Get all quality profiles configured in Sonarr with their allowed qualities. 106 | 107 | Returns: 108 | Optional[List[Dict[str, Any]]]: List of simplified quality profiles or None on error. 109 | Each profile contains: 110 | - id: The profile ID 111 | - name: The profile name 112 | - allowed_qualities: List of allowed quality names 113 | """ 114 | profiles_response = self._make_request("GET", "qualityprofile") 115 | if not profiles_response.success: 116 | return profiles_response # Propagate error 117 | 118 | if not profiles_response.data or not isinstance(profiles_response.data, list): 119 | self.logger.warning("No quality profiles data received or data is not a list.") 120 | return APIResponse(success=False, message="No quality profiles data received or in unexpected format.", status_code=profiles_response.status_code) 121 | 122 | simplified_profiles = [] 123 | for profile in profiles_response.data: 124 | allowed_qualities = [] 125 | for item in profile.get('items', []): 126 | # Check if the item has a direct allowed flag 127 | if item.get('allowed', False): 128 | # Try to get the quality name, fallback to item name if available 129 | quality_name = (item.get('quality', {}).get('name') or 130 | item.get('name') or 131 | f"Resolution {item.get('quality', {}).get('resolution')}p") 132 | allowed_qualities.append(quality_name) 133 | 134 | simplified_profiles.append({ 135 | 'id': profile['id'], 136 | 'name': profile['name'], 137 | 'allowed_qualities': allowed_qualities, 138 | 'is_default': profile['id'] == default_profile_id if default_profile_id is not None else False 139 | }) 140 | 141 | return APIResponse(success=True, data=simplified_profiles, status_code=profiles_response.status_code) 142 | 143 | @classmethod 144 | def get_default_settings(cls) -> Dict[str, Dict[str, Any]]: 145 | """ 146 | Returns the default settings for the Sonarr provider. 147 | """ 148 | return { 149 | "enabled": {"value": True, "type": SettingType.BOOLEAN, "description": "Enable or disable Sonarr integration."}, 150 | "url": {"value": "http://sonarr:8989", "type": SettingType.URL, "description": "Sonarr server URL", "required": True}, 151 | "api_key": {"value": None, "type": SettingType.STRING, "description": "Sonarr API key", "required": True}, 152 | "default_quality_profile_id": {"value": None, "type": SettingType.INTEGER, "description": "Sonarr Default quality profile ID"}, # Corrected description from Radarr to Sonarr 153 | "root_dir_path": {"value": "/tv", "type": SettingType.STRING, "description": "Root directory path for Sonarr"}, 154 | "base_provider": {"value": "request", "type": SettingType.STRING, "show": False, "description": "Base Provider Type."}, 155 | } 156 | -------------------------------------------------------------------------------- /server/src/tests/integration/test_trakt_provider.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import os 3 | import json 4 | 5 | from providers.trakt import TraktProvider 6 | from services.models import ItemsFiltered, LibraryUser # Import LibraryUser 7 | from tests.integration.base.base_live_library_provider_tests import BaseLiveLibraryProviderTests 8 | 9 | 10 | # --- Fixtures --- 11 | 12 | @pytest.fixture(scope="module") 13 | def live_trakt_client_id() -> str: 14 | """Retrieves Trakt Client ID from environment variable.""" 15 | client_id = os.environ.get("TRAKT_TEST_CLIENT_ID") 16 | if not client_id: 17 | pytest.skip("TRAKT_TEST_CLIENT_ID environment variable not set. Skipping live Trakt tests.") 18 | return client_id 19 | 20 | @pytest.fixture(scope="module") 21 | def live_trakt_client_secret() -> str: 22 | """Retrieves Trakt Client Secret from environment variable.""" 23 | secret = os.environ.get("TRAKT_TEST_CLIENT_SECRET") 24 | if not secret: 25 | pytest.skip("TRAKT_TEST_CLIENT_SECRET environment variable not set. Skipping live Trakt tests.") 26 | return secret 27 | 28 | @pytest.fixture(scope="module") 29 | def live_trakt_redirect_uri() -> str: 30 | """Retrieves Trakt Redirect URI from environment variable, defaulting for device auth.""" 31 | return os.environ.get("TRAKT_TEST_REDIRECT_URI", "urn:ietf:wg:oauth:2.0:oob") 32 | 33 | @pytest.fixture(scope="module") 34 | def live_trakt_initial_authorization() -> dict: 35 | """ 36 | Loads pre-obtained Trakt authorization details from an environment variable. 37 | The TRAKT_TEST_AUTH_JSON should be a JSON string containing the full authorization dict. 38 | """ 39 | auth_json_str = os.environ.get("TRAKT_TEST_AUTH_JSON") 40 | if not auth_json_str: 41 | pytest.skip("TRAKT_TEST_AUTH_JSON environment variable not set. Skipping live Trakt tests.") 42 | try: 43 | auth_dict = json.loads(auth_json_str) 44 | required_keys = ["access_token", "refresh_token", "created_at", "expires_in", "token_type", "scope"] 45 | if not all(key in auth_dict for key in required_keys): 46 | pytest.fail(f"TRAKT_TEST_AUTH_JSON is missing one or more required keys: {required_keys}") 47 | return auth_dict 48 | except json.JSONDecodeError: 49 | pytest.fail("TRAKT_TEST_AUTH_JSON is not valid JSON.") 50 | return {} 51 | 52 | # --- Test Cases --- 53 | 54 | @pytest.mark.integration_live # Mark tests that hit the live API 55 | class TestTraktProviderLive(BaseLiveLibraryProviderTests): # Inherit from base 56 | """Groups live integration tests for TraktProvider.""" 57 | 58 | # Override the live_provider fixture from the base class and make it module-scoped 59 | @pytest.fixture(scope="module") 60 | def live_provider(self, live_trakt_client_id, live_trakt_client_secret, live_trakt_redirect_uri, live_trakt_initial_authorization) -> TraktProvider: 61 | provider = TraktProvider( 62 | client_id=live_trakt_client_id, 63 | client_secret=live_trakt_client_secret, 64 | redirect_uri=live_trakt_redirect_uri, 65 | initial_authorization=live_trakt_initial_authorization 66 | ) 67 | assert provider.authorization is not None, "Provider authorization should be set via initial_authorization." 68 | return provider 69 | 70 | # Specific Trakt assertions for get_users can remain if they check Trakt-specific details 71 | def test_get_users_live_specific_trakt_assertions(self, live_provider: TraktProvider): 72 | """Tests fetching the authenticated user's details.""" 73 | users = live_provider.get_users() 74 | assert users is not None, "get_users should return a list, not None" 75 | assert isinstance(users, list) 76 | assert len(users) > 0, "Expected at least one user (the authenticated user)" # TraktProvider returns a list 77 | 78 | user = users[0] 79 | assert isinstance(user, LibraryUser), "Expected user object to be an instance of LibraryUser" 80 | assert hasattr(user, 'id') and isinstance(user.id, str), "User 'id' (slug) should exist and be a string" 81 | assert hasattr(user, 'name') and isinstance(user.name, str), "User 'name' (username) should exist and be a string" 82 | assert hasattr(user, 'thumb'), "User 'thumb' attribute should exist (can be None or str)" 83 | assert user.source_provider == live_provider.PROVIDER_NAME 84 | 85 | # Specific Trakt assertions for get_user_by_name 86 | def test_get_user_by_name_live_specific_trakt_assertions(self, live_provider: TraktProvider): 87 | """Tests fetching a user by their name (should be the authenticated user).""" 88 | auth_user_list = live_provider.get_users() 89 | assert auth_user_list and len(auth_user_list) > 0, "Could not get authenticated user for this test" 90 | authenticated_username = auth_user_list[0].name # Access .name attribute 91 | 92 | user = live_provider.get_user_by_name(authenticated_username) 93 | assert user is not None, f"User '{authenticated_username}' should be found" 94 | assert isinstance(user, LibraryUser), "Expected user object to be an instance of LibraryUser" 95 | assert user.name == authenticated_username 96 | assert hasattr(user, 'id') 97 | 98 | non_existent_user = live_provider.get_user_by_name("a_user_that_REALLY_does_not_exist_12345abc") 99 | assert non_existent_user is None, "Non-existent user should return None" 100 | 101 | # The following tests for get_items_filtered are specific to Trakt's data transformation 102 | # and should remain. They use the `live_provider` fixture. 103 | def test_get_items_filtered_from_live_history(self, live_provider: TraktProvider): 104 | """Tests filtering of live recently watched items.""" 105 | users = live_provider.get_users() 106 | assert users and len(users) > 0 107 | user_id_slug = users[0].id 108 | 109 | raw_watched_items = live_provider.get_recently_watched(user_id=user_id_slug, limit=10) # user_id is slug 110 | # raw_watched_items is already List[ItemsFiltered] as get_recently_watched calls get_items_filtered internally. 111 | assert raw_watched_items is not None 112 | 113 | if not raw_watched_items: 114 | pytest.skip("No recently watched items found on Trakt for filtering test.") 115 | 116 | # No need to call get_items_filtered again. Assert directly on raw_watched_items. 117 | assert isinstance(raw_watched_items, list) 118 | 119 | item = raw_watched_items[0] 120 | assert isinstance(item, ItemsFiltered) 121 | assert item.name is not None 122 | assert item.id is not None 123 | assert item.type in ['movie', 'tv', None] 124 | assert hasattr(item, 'last_played_date') 125 | assert hasattr(item, 'play_count') 126 | assert item.is_favorite is True # This assertion might fail if a history item is also rated >= 8 by the user. 127 | 128 | def test_get_items_filtered_from_live_history_by_name(self, live_provider: TraktProvider): 129 | """Tests filtering of live recently watched items, returning only names.""" 130 | users = live_provider.get_users() 131 | assert users and len(users) > 0 132 | user_id_slug = users[0].id 133 | 134 | raw_watched_items = live_provider.get_recently_watched(user_id=user_id_slug, limit=10) # user_id is slug 135 | # raw_watched_items is List[ItemsFiltered] 136 | assert raw_watched_items is not None 137 | 138 | if not raw_watched_items: 139 | pytest.skip("No recently watched items found on Trakt for filtering by name test.") 140 | 141 | # To get names from the List[ItemsFiltered], iterate through it. 142 | # The get_items_filtered method with attribute_filter="Name" expects raw Trakt objects. 143 | # If you need to test that specific path of get_items_filtered, you'd need to mock raw Trakt objects. 144 | # For this test, let's assume we want names from the already filtered items. 145 | filtered_names = [item.name for item in raw_watched_items if item.name] 146 | assert isinstance(filtered_names, list) 147 | 148 | if filtered_names: 149 | item_name = filtered_names[0] 150 | assert isinstance(item_name, str), "Expected a list of strings (names)" 151 | else: 152 | live_provider.logger.info("No names returned after filtering, but test passed structurally.") 153 | 154 | def test_get_all_items_filtered_as_objects(self, live_provider: TraktProvider): 155 | pytest.skip("TraktProvider.get_all_items_filtered is not yet implemented.") 156 | 157 | def test_get_all_items_filtered_as_names(self, live_provider: TraktProvider): 158 | pytest.skip("TraktProvider.get_all_items_filtered is not yet implemented.") 159 | -------------------------------------------------------------------------------- /server/src/tests/integration/test_radarr_provider.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import os 3 | from tests.integration.base.base_live_request_provider_tests import BaseLiveRequestProviderTests 4 | from providers.radarr import RadarrProvider 5 | from services.response import APIResponse 6 | 7 | # Mark all tests in this module as 'live' to be run with `pytest -m live` 8 | pytestmark = pytest.mark.live 9 | 10 | class TestLiveRadarrProvider(BaseLiveRequestProviderTests): 11 | """ 12 | Live integration tests for the RadarrProvider. 13 | 14 | These tests require a running Radarr instance and are marked as 'live'. 15 | They use environment variables for configuration: 16 | - RADARR_TEST_URL: URL of the Radarr server. 17 | - RADARR_TEST_API_KEY: API key for the Radarr server. 18 | """ 19 | 20 | @pytest.fixture(scope="class") 21 | def live_provider(self) -> RadarrProvider: 22 | """ 23 | Provides a live instance of the RadarrProvider, skipping tests if not configured. 24 | """ 25 | radarr_url = os.getenv("RADARR_TEST_URL") 26 | radarr_api_key = os.getenv("RADARR_TEST_API_KEY") 27 | 28 | if not radarr_url or not radarr_api_key: 29 | pytest.skip("RADARR_TEST_URL and RADARR_TEST_API_KEY environment variables are required for live tests.") 30 | 31 | return RadarrProvider(url=radarr_url, api_key=radarr_api_key) 32 | 33 | @pytest.fixture(scope="class") 34 | def movie_for_add_delete_test_tmdb_id(self) -> int: 35 | """Provides the TMDB ID for a movie to be added and then deleted.""" 36 | return 597 # Titanic 37 | 38 | @pytest.fixture(scope="class") 39 | def movie_for_existing_test_tmdb_id(self) -> int: 40 | """Provides the TMDB ID for a movie to be added twice.""" 41 | return 8587 # The Lion King 42 | 43 | def test_add_and_delete_movie(self, live_provider: RadarrProvider, movie_for_add_delete_test_tmdb_id: int): 44 | """ 45 | Tests the full lifecycle of adding a movie and then deleting it. 46 | This ensures add_media and delete_media are working correctly. 47 | """ 48 | # 1. Get necessary info for adding the movie 49 | profiles_response = live_provider.get_quality_profiles() 50 | assert profiles_response.success, "Failed to get quality profiles before adding movie." 51 | quality_profiles = profiles_response.data 52 | assert len(quality_profiles) > 0, "No quality profiles found in Radarr instance." 53 | first_profile_id = quality_profiles[0]['id'] 54 | 55 | default_settings = live_provider.get_default_settings() 56 | root_dir_path = default_settings['root_dir_path']['value'] 57 | 58 | # 2. Add the movie 59 | add_response = live_provider.add_media( 60 | tmdb_id=movie_for_add_delete_test_tmdb_id, 61 | quality_profile_id=first_profile_id, 62 | root_dir_path=root_dir_path, 63 | search_for_movie=False # Don't search to make the test faster and less resource-intensive 64 | ) 65 | 66 | assert isinstance(add_response, APIResponse) 67 | assert add_response.success, f"Failed to add movie: {add_response.message} - {add_response.error}" 68 | 69 | added_movie_data = add_response.data 70 | assert isinstance(added_movie_data, dict) 71 | assert 'id' in added_movie_data, "Response for adding a movie did not contain a Radarr ID." 72 | radarr_movie_id = added_movie_data['id'] 73 | 74 | # 3. Delete the movie using its Radarr ID 75 | delete_response = live_provider.delete_media(id=radarr_movie_id) 76 | 77 | assert isinstance(delete_response, APIResponse) 78 | assert delete_response.success, f"Failed to delete movie: {delete_response.message}" 79 | 80 | # 4. Verify the movie is actually gone 81 | verify_delete_response = live_provider._make_request("GET", f"movie/{radarr_movie_id}") 82 | assert not verify_delete_response.success and verify_delete_response.status_code == 404, \ 83 | f"Movie was not successfully deleted from Radarr. Status: {verify_delete_response.status_code}, Message: {verify_delete_response.message}" 84 | 85 | def test_add_existing_movie(self, live_provider: RadarrProvider, movie_for_existing_test_tmdb_id: int): 86 | """ 87 | Tests attempting to add a movie that already exists in Radarr. 88 | """ 89 | # 1. Get necessary info for adding the movie 90 | profiles_response = live_provider.get_quality_profiles() 91 | assert profiles_response.success, "Failed to get quality profiles before adding movie." 92 | quality_profiles = profiles_response.data 93 | assert len(quality_profiles) > 0, "No quality profiles found in Radarr instance." 94 | first_profile_id = quality_profiles[0]['id'] 95 | 96 | default_settings = live_provider.get_default_settings() 97 | root_dir_path = default_settings['root_dir_path']['value'] 98 | 99 | radarr_movie_id = None 100 | try: 101 | # 2. Add the movie for the first time 102 | live_provider.logger.info(f"Attempting to add movie {movie_for_existing_test_tmdb_id} for the first time.") 103 | first_add_response = live_provider.add_media( 104 | tmdb_id=movie_for_existing_test_tmdb_id, 105 | quality_profile_id=first_profile_id, 106 | root_dir_path=root_dir_path, 107 | search_for_movie=False 108 | ) 109 | assert first_add_response.success, f"First attempt to add movie failed: {first_add_response.message} - {first_add_response.error}" 110 | radarr_movie_id = first_add_response.data['id'] 111 | live_provider.logger.info(f"Successfully added movie {movie_for_existing_test_tmdb_id} with Radarr ID: {radarr_movie_id}") 112 | 113 | # 3. Attempt to add the same movie again 114 | live_provider.logger.info(f"Attempting to add movie {movie_for_existing_test_tmdb_id} again (should fail).") 115 | second_add_response = live_provider.add_media( 116 | tmdb_id=movie_for_existing_test_tmdb_id, 117 | quality_profile_id=first_profile_id, 118 | root_dir_path=root_dir_path, 119 | search_for_movie=False 120 | ) 121 | 122 | # 4. Assert that the second add fails with a 409 Conflict 123 | assert not second_add_response.success, f"Second attempt to add movie should have failed but succeeded: {second_add_response.message}" 124 | assert second_add_response.status_code == 400, f"Expected 409 Conflict for existing movie, got {second_add_response.status_code}" 125 | assert "already been added" in str(second_add_response.error["details"]), f"Expected 'already exists' in message, got: {second_add_response.message}" 126 | live_provider.logger.info(f"Second add attempt correctly failed with status {second_add_response.status_code} and message: {second_add_response.message}") 127 | 128 | finally: 129 | # 5. Clean up: Delete the movie if it was added 130 | if radarr_movie_id: 131 | live_provider.logger.info(f"Cleaning up: Deleting movie with Radarr ID: {radarr_movie_id}") 132 | delete_response = live_provider.delete_media(id=radarr_movie_id) 133 | assert delete_response.success, f"Failed to delete movie during cleanup: {delete_response.message}" 134 | live_provider.logger.info(f"Successfully deleted movie {radarr_movie_id} during cleanup.") 135 | else: 136 | live_provider.logger.warning(f"No Radarr movie ID obtained, skipping cleanup for TMDB ID {movie_for_existing_test_tmdb_id}.") 137 | 138 | def test_get_quality_profiles_structure(self, live_provider: RadarrProvider): 139 | """ 140 | Tests the detailed structure of the quality profiles returned by Radarr. 141 | """ 142 | # The base test `test_get_quality_profiles` already checks for basic presence. 143 | # This test verifies the simplified structure returned by the provider. 144 | response = live_provider.get_quality_profiles() 145 | assert response.success, "API call to get quality profiles failed." 146 | 147 | profiles = response.data 148 | assert isinstance(profiles, list) 149 | assert len(profiles) > 0, "Expected at least one quality profile from Radarr." 150 | 151 | profile = profiles[0] 152 | assert 'id' in profile and isinstance(profile['id'], int) 153 | assert 'name' in profile and isinstance(profile['name'], str) 154 | assert 'allowed_qualities' in profile and isinstance(profile['allowed_qualities'], list) 155 | assert 'is_default' in profile and isinstance(profile['is_default'], bool) 156 | 157 | if profile['allowed_qualities']: 158 | assert isinstance(profile['allowed_qualities'][0], str) -------------------------------------------------------------------------------- /server/src/tests/integration/test_sonarr_provider.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import os 3 | from tests.integration.base.base_live_request_provider_tests import BaseLiveRequestProviderTests 4 | from providers.sonarr import SonarrProvider 5 | from services.response import APIResponse 6 | 7 | # Mark all tests in this module as 'live' to be run with `pytest -m live` 8 | pytestmark = pytest.mark.live 9 | 10 | class TestLiveSonarrProvider(BaseLiveRequestProviderTests): 11 | """ 12 | Live integration tests for the SonarrProvider. 13 | 14 | These tests require a running Sonarr instance and are marked as 'live'. 15 | They use environment variables for configuration: 16 | - SONARR_TEST_URL: URL of the Sonarr server. 17 | - SONARR_TEST_API_KEY: API key for the Sonarr server. 18 | """ 19 | 20 | @pytest.fixture(scope="class") 21 | def live_provider(self) -> SonarrProvider: 22 | """ 23 | Provides a live instance of the SonarrProvider, skipping tests if not configured. 24 | """ 25 | sonarr_url = os.getenv("SONARR_TEST_URL") 26 | sonarr_api_key = os.getenv("SONARR_TEST_API_KEY") 27 | 28 | if not sonarr_url or not sonarr_api_key: 29 | pytest.skip("SONARR_TEST_URL and SONARR_TEST_API_KEY environment variables are required for live tests.") 30 | 31 | return SonarrProvider(url=sonarr_url, api_key=sonarr_api_key) 32 | 33 | @pytest.fixture(scope="class") 34 | def tv_for_add_delete_test_tmdb_id(self) -> int: 35 | """Provides the TMDB ID for a TV show to be added and then deleted.""" 36 | return 456 # The Simpsons 37 | 38 | @pytest.fixture(scope="class") 39 | def tv_for_existing_test_tmdb_id(self) -> int: 40 | """Provides the TMDB ID for a TV show to be added twice.""" 41 | return 269 # One Tree Hill 42 | 43 | def test_add_and_delete_tv(self, live_provider: SonarrProvider, tv_for_add_delete_test_tmdb_id: int): 44 | """ 45 | Tests the full lifecycle of adding a series and then deleting it. 46 | This ensures add_media and delete_media are working correctly. 47 | """ 48 | # 1. Get necessary info for adding the series 49 | profiles_response = live_provider.get_quality_profiles() 50 | assert profiles_response.success, "Failed to get quality profiles before adding series." 51 | quality_profiles = profiles_response.data 52 | assert len(quality_profiles) > 0, "No quality profiles found in Sonarr instance." 53 | first_profile_id = quality_profiles[0]['id'] 54 | 55 | default_settings = live_provider.get_default_settings() 56 | root_dir_path = default_settings['root_dir_path']['value'] 57 | 58 | # 2. Add the series 59 | add_response = live_provider.add_media( 60 | tmdb_id=tv_for_add_delete_test_tmdb_id, 61 | quality_profile_id=first_profile_id, 62 | root_dir_path=root_dir_path, 63 | search_for_missing=False # Don't search to make the test faster 64 | ) 65 | 66 | assert isinstance(add_response, APIResponse) 67 | assert add_response.success, f"Failed to add series: {add_response.message} - {add_response.error}" 68 | 69 | added_series_data = add_response.data 70 | assert isinstance(added_series_data, dict) 71 | assert 'id' in added_series_data, "Response for adding a series did not contain a Sonarr ID." 72 | sonarr_series_id = added_series_data['id'] 73 | 74 | # 3. Delete the series using its Sonarr ID 75 | delete_response = live_provider.delete_media(id=sonarr_series_id) 76 | 77 | assert isinstance(delete_response, APIResponse) 78 | assert delete_response.success, f"Failed to delete series: {delete_response.message}" 79 | 80 | # 4. Verify the series is actually gone 81 | verify_delete_response = live_provider._make_request("GET", f"series/{sonarr_series_id}") 82 | assert not verify_delete_response.success and verify_delete_response.status_code == 404, \ 83 | f"Series was not successfully deleted from Sonarr. Status: {verify_delete_response.status_code}, Message: {verify_delete_response.message}" 84 | 85 | def test_add_existing_tv(self, live_provider: SonarrProvider, tv_for_existing_test_tmdb_id: int): 86 | """ 87 | Tests attempting to add a series that already exists in Sonarr. 88 | """ 89 | # 1. Get necessary info for adding the series 90 | profiles_response = live_provider.get_quality_profiles() 91 | assert profiles_response.success, "Failed to get quality profiles before adding series." 92 | quality_profiles = profiles_response.data 93 | assert len(quality_profiles) > 0, "No quality profiles found in Sonarr instance." 94 | first_profile_id = quality_profiles[0]['id'] 95 | 96 | default_settings = live_provider.get_default_settings() 97 | root_dir_path = default_settings['root_dir_path']['value'] 98 | 99 | sonarr_series_id = None 100 | try: 101 | # 2. Add the series for the first time 102 | live_provider.logger.info(f"Attempting to add series {tv_for_existing_test_tmdb_id} for the first time.") 103 | first_add_response = live_provider.add_media( 104 | tmdb_id=tv_for_existing_test_tmdb_id, 105 | quality_profile_id=first_profile_id, 106 | root_dir_path=root_dir_path, 107 | search_for_missing=False 108 | ) 109 | assert first_add_response.success, f"First attempt to add series failed: {first_add_response.message} - {first_add_response.error}" 110 | sonarr_series_id = first_add_response.data['id'] 111 | live_provider.logger.info(f"Successfully added series {tv_for_existing_test_tmdb_id} with Sonarr ID: {sonarr_series_id}") 112 | 113 | # 3. Attempt to add the same series again 114 | live_provider.logger.info(f"Attempting to add series {tv_for_existing_test_tmdb_id} again (should fail).") 115 | second_add_response = live_provider.add_media( 116 | tmdb_id=tv_for_existing_test_tmdb_id, 117 | quality_profile_id=first_profile_id, 118 | root_dir_path=root_dir_path, 119 | search_for_missing=False 120 | ) 121 | 122 | # 4. Assert that the second add fails with a 400 Bad Request 123 | assert not second_add_response.success, f"Second attempt to add series should have failed but succeeded: {second_add_response.message}" 124 | assert second_add_response.status_code == 400, f"Expected 400 Bad Request for existing series, got {second_add_response.status_code}" 125 | assert "already been added" in str(second_add_response.error["details"]).lower(), f"Expected 'already been added' in message, got: {second_add_response.message}" 126 | live_provider.logger.info(f"Second add attempt correctly failed with status {second_add_response.status_code} and message: {second_add_response.message}") 127 | 128 | finally: 129 | # 5. Clean up: Delete the series if it was added 130 | if sonarr_series_id: 131 | live_provider.logger.info(f"Cleaning up: Deleting series with Sonarr ID: {sonarr_series_id}") 132 | delete_response = live_provider.delete_media(id=sonarr_series_id) 133 | assert delete_response.success, f"Failed to delete series during cleanup: {delete_response.message}" 134 | live_provider.logger.info(f"Successfully deleted series {sonarr_series_id} during cleanup.") 135 | else: 136 | live_provider.logger.warning(f"No Sonarr series ID obtained, skipping cleanup for TMDB ID {tv_for_existing_test_tmdb_id}.") 137 | 138 | def test_get_quality_profiles_structure(self, live_provider: SonarrProvider): 139 | """ 140 | Tests the detailed structure of the quality profiles returned by Sonarr. 141 | """ 142 | # The base test `test_get_quality_profiles` already checks for basic presence. 143 | # This test verifies the simplified structure returned by the provider. 144 | response = live_provider.get_quality_profiles() 145 | assert response.success, "API call to get quality profiles failed." 146 | 147 | profiles = response.data 148 | assert isinstance(profiles, list) 149 | assert len(profiles) > 0, "Expected at least one quality profile from Sonarr." 150 | 151 | profile = profiles[0] 152 | assert 'id' in profile and isinstance(profile['id'], int) 153 | assert 'name' in profile and isinstance(profile['name'], str) 154 | assert 'allowed_qualities' in profile and isinstance(profile['allowed_qualities'], list) 155 | assert 'is_default' in profile and isinstance(profile['is_default'], bool) 156 | 157 | if profile['allowed_qualities']: 158 | assert isinstance(profile['allowed_qualities'][0], str) -------------------------------------------------------------------------------- /client/src/views/WatchHistoryView.vue: -------------------------------------------------------------------------------- 1 | 100 | 179 | 180 | 181 | -------------------------------------------------------------------------------- /client/src/components/VideoCarousel.vue: -------------------------------------------------------------------------------- 1 | 89 | 90 | 98 | 99 | 100 | 179 | 180 | --------------------------------------------------------------------------------