├── tests ├── unit │ └── __init__.py ├── __init__.py ├── e2e │ └── __init__.py ├── integration │ ├── __init__.py │ ├── test_health_endpoints.py │ ├── test_account_deletion.py │ ├── test_prompt_endpoints.py │ ├── test_user_endpoints.py │ ├── test_cascade_delete.py │ ├── helpers.py │ ├── test_journal_endpoints.py │ └── test_auth_endpoints.py ├── upgrade │ └── __init__.py ├── lib │ └── __init__.py └── conftest.py ├── app ├── api │ ├── __init__.py │ ├── v1 │ │ ├── __init__.py │ │ ├── endpoints │ │ │ ├── __init__.py │ │ │ ├── security.py │ │ │ └── health.py │ │ └── api.py │ └── dependencies.py ├── core │ ├── __init__.py │ ├── oidc.py │ ├── celery_app.py │ ├── exceptions.py │ └── security.py ├── services │ └── __init__.py ├── schemas │ ├── __init__.py │ ├── analytics.py │ ├── journal.py │ ├── base.py │ ├── tag.py │ ├── auth.py │ ├── mood.py │ ├── prompt.py │ └── entry.py ├── middleware │ ├── __init__.py │ └── csp_middleware.py ├── __init__.py ├── tasks │ └── __init__.py ├── utils │ └── import_export │ │ ├── __init__.py │ │ ├── constants.py │ │ ├── progress_utils.py │ │ ├── date_utils.py │ │ └── id_mapper.py └── models │ ├── __init__.py │ ├── entry_tag_link.py │ ├── tag.py │ ├── base.py │ ├── external_identity.py │ ├── prompt.py │ ├── enums.py │ ├── journal.py │ ├── import_job.py │ ├── analytics.py │ └── mood.py ├── web ├── .last_build_id ├── favicon.png ├── icons │ ├── AppIcon@3x.png │ ├── Icon-192.png │ ├── Icon-512.png │ ├── AppIcon~ipad.png │ ├── AppIcon@2x~ipad.png │ ├── Icon-maskable-192.png │ ├── Icon-maskable-512.png │ └── AppIcon~ios-marketing.png ├── canvaskit │ ├── skwasm.wasm │ ├── canvaskit.wasm │ ├── skwasm_heavy.wasm │ └── chromium │ │ └── canvaskit.wasm ├── version.json ├── assets │ ├── assets │ │ ├── icons │ │ │ └── Icon-512.png │ │ ├── fonts │ │ │ ├── Manrope-Bold.ttf │ │ │ ├── Manrope-Medium.ttf │ │ │ ├── Manrope-Regular.ttf │ │ │ ├── Manrope-SemiBold.ttf │ │ │ ├── NotoSans-Regular.ttf │ │ │ └── NotoEmoji-Regular.ttf │ │ └── images │ │ │ └── journiv-logo.png │ ├── fonts │ │ └── MaterialIcons-Regular.otf │ ├── packages │ │ └── cupertino_icons │ │ │ └── assets │ │ │ └── CupertinoIcons.ttf │ ├── FontManifest.json │ ├── AssetManifest.bin │ └── AssetManifest.bin.json ├── manifest.json └── LICENSE.md ├── .coderabbit.yaml ├── requirements ├── prod.txt ├── test.txt ├── dev.txt └── base.txt ├── init.sql ├── docker-compose.override.old-version.yml ├── alembic ├── script.py.mako ├── versions │ ├── 4fbf758e7995_add_externalidentity_model_for_oidc_.py │ ├── abc123def456_add_user_role_column.py │ ├── def789abc123_convert_user_role_to_enum.py │ ├── 6b2d62d09dd8_add_import_export_job_model.py │ └── 7c52fcc89c83_make_user_name_required.py └── env.py ├── docker-compose.yml ├── scripts ├── docker-entrypoint.sh ├── docker-entrypoint-dev.sh ├── migrate.sh ├── moods.json ├── fix_migration_imports.py ├── setup.sh ├── fresh_migration.sh └── deploy.sh ├── docker-compose.override.ci.yml ├── .github ├── FUNDING.yml └── workflows │ └── docker-release-main.yml ├── pytest.ini ├── docker-compose.dev.sqlite.yml ├── Dockerfile ├── docker-compose.prod.sqlite.yml ├── .dockerignore ├── alembic.ini ├── docker-compose.dev.postgres.yml ├── .gitignore ├── docker-compose.prod.postgres.yml └── README.md /tests/unit/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/api/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | API package. 3 | """ 4 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Test package initialization 2 | -------------------------------------------------------------------------------- /tests/e2e/__init__.py: -------------------------------------------------------------------------------- 1 | # End-to-end tests package 2 | -------------------------------------------------------------------------------- /web/.last_build_id: -------------------------------------------------------------------------------- 1 | ad5beb76471fc4b688507308cec5da9e -------------------------------------------------------------------------------- /app/api/v1/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | API v1 package. 3 | """ 4 | -------------------------------------------------------------------------------- /tests/integration/__init__.py: -------------------------------------------------------------------------------- 1 | # Integration tests package 2 | -------------------------------------------------------------------------------- /app/core/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Core application components. 3 | """ 4 | -------------------------------------------------------------------------------- /tests/upgrade/__init__.py: -------------------------------------------------------------------------------- 1 | """Upgrade testing package for Journiv.""" 2 | -------------------------------------------------------------------------------- /app/api/v1/endpoints/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | API v1 endpoints package. 3 | """ 4 | -------------------------------------------------------------------------------- /app/services/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Service layer for business logic. 3 | """ 4 | -------------------------------------------------------------------------------- /web/favicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/journiv/journiv-app/HEAD/web/favicon.png -------------------------------------------------------------------------------- /app/schemas/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Pydantic schemas for API requests and responses. 3 | """ 4 | -------------------------------------------------------------------------------- /.coderabbit.yaml: -------------------------------------------------------------------------------- 1 | reviews: 2 | path_filters: 3 | - "!web/**" # Ignore Flutter web build output 4 | -------------------------------------------------------------------------------- /web/icons/AppIcon@3x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/journiv/journiv-app/HEAD/web/icons/AppIcon@3x.png -------------------------------------------------------------------------------- /web/icons/Icon-192.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/journiv/journiv-app/HEAD/web/icons/Icon-192.png -------------------------------------------------------------------------------- /web/icons/Icon-512.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/journiv/journiv-app/HEAD/web/icons/Icon-512.png -------------------------------------------------------------------------------- /web/canvaskit/skwasm.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/journiv/journiv-app/HEAD/web/canvaskit/skwasm.wasm -------------------------------------------------------------------------------- /web/icons/AppIcon~ipad.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/journiv/journiv-app/HEAD/web/icons/AppIcon~ipad.png -------------------------------------------------------------------------------- /web/version.json: -------------------------------------------------------------------------------- 1 | {"app_name":"journiv","version":"0.1.0-beta.10","build_number":"1","package_name":"journiv"} -------------------------------------------------------------------------------- /web/canvaskit/canvaskit.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/journiv/journiv-app/HEAD/web/canvaskit/canvaskit.wasm -------------------------------------------------------------------------------- /web/icons/AppIcon@2x~ipad.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/journiv/journiv-app/HEAD/web/icons/AppIcon@2x~ipad.png -------------------------------------------------------------------------------- /web/canvaskit/skwasm_heavy.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/journiv/journiv-app/HEAD/web/canvaskit/skwasm_heavy.wasm -------------------------------------------------------------------------------- /web/icons/Icon-maskable-192.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/journiv/journiv-app/HEAD/web/icons/Icon-maskable-192.png -------------------------------------------------------------------------------- /web/icons/Icon-maskable-512.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/journiv/journiv-app/HEAD/web/icons/Icon-maskable-512.png -------------------------------------------------------------------------------- /requirements/prod.txt: -------------------------------------------------------------------------------- 1 | # Include base requirements 2 | -r base.txt 3 | 4 | # Production Server 5 | gunicorn==23.0.0 6 | -------------------------------------------------------------------------------- /web/assets/assets/icons/Icon-512.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/journiv/journiv-app/HEAD/web/assets/assets/icons/Icon-512.png -------------------------------------------------------------------------------- /web/icons/AppIcon~ios-marketing.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/journiv/journiv-app/HEAD/web/icons/AppIcon~ios-marketing.png -------------------------------------------------------------------------------- /web/canvaskit/chromium/canvaskit.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/journiv/journiv-app/HEAD/web/canvaskit/chromium/canvaskit.wasm -------------------------------------------------------------------------------- /web/assets/assets/fonts/Manrope-Bold.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/journiv/journiv-app/HEAD/web/assets/assets/fonts/Manrope-Bold.ttf -------------------------------------------------------------------------------- /web/assets/assets/images/journiv-logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/journiv/journiv-app/HEAD/web/assets/assets/images/journiv-logo.png -------------------------------------------------------------------------------- /web/assets/assets/fonts/Manrope-Medium.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/journiv/journiv-app/HEAD/web/assets/assets/fonts/Manrope-Medium.ttf -------------------------------------------------------------------------------- /web/assets/assets/fonts/Manrope-Regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/journiv/journiv-app/HEAD/web/assets/assets/fonts/Manrope-Regular.ttf -------------------------------------------------------------------------------- /web/assets/assets/fonts/Manrope-SemiBold.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/journiv/journiv-app/HEAD/web/assets/assets/fonts/Manrope-SemiBold.ttf -------------------------------------------------------------------------------- /web/assets/assets/fonts/NotoSans-Regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/journiv/journiv-app/HEAD/web/assets/assets/fonts/NotoSans-Regular.ttf -------------------------------------------------------------------------------- /web/assets/fonts/MaterialIcons-Regular.otf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/journiv/journiv-app/HEAD/web/assets/fonts/MaterialIcons-Regular.otf -------------------------------------------------------------------------------- /web/assets/assets/fonts/NotoEmoji-Regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/journiv/journiv-app/HEAD/web/assets/assets/fonts/NotoEmoji-Regular.ttf -------------------------------------------------------------------------------- /app/middleware/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Middleware modules. 3 | """ 4 | from .request_logging import RequestLoggingMiddleware 5 | 6 | __all__ = ["RequestLoggingMiddleware"] 7 | -------------------------------------------------------------------------------- /web/assets/packages/cupertino_icons/assets/CupertinoIcons.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/journiv/journiv-app/HEAD/web/assets/packages/cupertino_icons/assets/CupertinoIcons.ttf -------------------------------------------------------------------------------- /app/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Journiv App Backend - A private journal application. 3 | """ 4 | 5 | __version__ = "0.1.0-beta.10" 6 | __author__ = "Swalab Tech" 7 | __description__ = "Private Journal" 8 | -------------------------------------------------------------------------------- /init.sql: -------------------------------------------------------------------------------- 1 | -- Initialize database with extensions 2 | CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; 3 | CREATE EXTENSION IF NOT EXISTS "pg_trgm"; 4 | 5 | -- Create database if it doesn't exist (this will be handled by Docker) 6 | -- CREATE DATABASE journiv_dev; 7 | -------------------------------------------------------------------------------- /app/tasks/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Celery tasks for async operations. 3 | """ 4 | from .export_tasks import process_export_job 5 | from .import_tasks import process_import_job 6 | 7 | __all__ = [ 8 | "process_export_job", 9 | "process_import_job", 10 | ] 11 | -------------------------------------------------------------------------------- /tests/lib/__init__.py: -------------------------------------------------------------------------------- 1 | """Shared helpers for Journiv test suites.""" 2 | 3 | from .api import ApiUser, JournivApiClient, JournivApiError, make_api_user 4 | 5 | __all__ = [ 6 | "ApiUser", 7 | "JournivApiClient", 8 | "JournivApiError", 9 | "make_api_user", 10 | ] 11 | -------------------------------------------------------------------------------- /requirements/test.txt: -------------------------------------------------------------------------------- 1 | # Test dependencies 2 | pytest==8.3.5 3 | pytest-asyncio==0.25.2 4 | pytest-cov==5.0.0 5 | pytest-mock==3.15.1 6 | pytest-httpx==0.35.0 7 | 8 | # HTTP testing 9 | httpx==0.28.1 10 | requests==2.32.5 # For upgrade tests (external HTTP calls) 11 | 12 | # Database testing - using psycopg2-binary for compatibility 13 | psycopg2-binary==2.9.11 14 | 15 | # Authentication testing - ensure argon2 backend is available 16 | argon2-cffi==25.1.0 17 | -------------------------------------------------------------------------------- /web/assets/FontManifest.json: -------------------------------------------------------------------------------- 1 | [{"family":"MaterialIcons","fonts":[{"asset":"fonts/MaterialIcons-Regular.otf"}]},{"family":"Manrope","fonts":[{"weight":400,"asset":"assets/fonts/Manrope-Regular.ttf"},{"weight":500,"asset":"assets/fonts/Manrope-Medium.ttf"},{"weight":600,"asset":"assets/fonts/Manrope-SemiBold.ttf"},{"weight":700,"asset":"assets/fonts/Manrope-Bold.ttf"}]},{"family":"packages/cupertino_icons/CupertinoIcons","fonts":[{"asset":"packages/cupertino_icons/assets/CupertinoIcons.ttf"}]}] -------------------------------------------------------------------------------- /requirements/dev.txt: -------------------------------------------------------------------------------- 1 | # Include base requirements 2 | -r base.txt 3 | 4 | # Include test requirements 5 | -r test.txt 6 | 7 | # Development conveniences for OIDC/Redis 8 | types-redis>=4.6.0 # Type stubs for mypy/pyright 9 | fakeredis>=2.32,<3 # In-memory Redis for development without real Redis 10 | 11 | # Development Tools (optional - uncomment if needed) 12 | # black==23.11.0 13 | # isort==5.12.0 14 | # flake8==6.1.0 15 | # mypy==1.7.1 16 | # pre-commit==3.5.0 17 | # watchfiles==0.21.0 18 | -------------------------------------------------------------------------------- /docker-compose.override.old-version.yml: -------------------------------------------------------------------------------- 1 | # Docker Compose Override for OLD version in Upgrade Tests 2 | # 3 | # This override is used when starting OLD versions during upgrade testing. 4 | # Older versions may not have celery-worker service, so we scale it to 0. 5 | # 6 | # Usage: 7 | # docker compose -f docker-compose.prod.sqlite.yml \ 8 | # -f docker-compose.override.ci.yml \ 9 | # -f docker-compose.override.old-version.yml \ 10 | # up -d 11 | 12 | services: 13 | # Don't start celery-worker for OLD versions (may not exist in old releases) 14 | celery-worker: 15 | deploy: 16 | replicas: 0 17 | -------------------------------------------------------------------------------- /alembic/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | ${imports if imports else ""} 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = ${repr(up_revision)} 14 | down_revision = ${repr(down_revision)} 15 | branch_labels = ${repr(branch_labels)} 16 | depends_on = ${repr(depends_on)} 17 | 18 | 19 | def upgrade() -> None: 20 | ${upgrades if upgrades else "pass"} 21 | 22 | 23 | def downgrade() -> None: 24 | ${downgrades if downgrades else "pass"} 25 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | journiv: 3 | image: swalabtech/journiv-app:latest 4 | container_name: journiv 5 | ports: 6 | # if you want to expose on a different port, change the left hand side of colon to the port 7 | # you want to expose on do not change the 8000 on the right hand side 8 | - "8000:8000" 9 | environment: 10 | - SECRET_KEY=your-secret-key-here # (REQUIRED) Replace with a strong secret key 11 | - DOMAIN_NAME=192.168.1.1 # (REQUIRED) Your server IP or domain 12 | volumes: 13 | - journiv_data:/data 14 | restart: unless-stopped 15 | 16 | volumes: 17 | journiv_data: 18 | -------------------------------------------------------------------------------- /scripts/docker-entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | 4 | echo "Ensuring data directories exist..." 5 | mkdir -p /data/media /data/logs 6 | 7 | echo "Running database migrations in entrypoint script..." 8 | alembic upgrade head 9 | 10 | echo "Seeding initial data in entrypoint script..." 11 | SKIP_DATA_SEEDING=false python -c "from app.core.database import seed_initial_data; seed_initial_data()" 12 | 13 | echo "Starting Gunicorn..." 14 | # Production uses 2 workers for optimal resource usage 15 | # Increase -w flag if you need higher concurrency 16 | exec gunicorn app.main:app -w 2 -k uvicorn.workers.UvicornWorker --worker-connections 1000 --max-requests 1000 --max-requests-jitter 100 --timeout 120 --access-logfile - -b 0.0.0.0:8000 17 | -------------------------------------------------------------------------------- /web/assets/AssetManifest.bin: -------------------------------------------------------------------------------- 1 | assets/fonts/Manrope-Bold.ttf  assetassets/fonts/Manrope-Bold.ttfassets/fonts/Manrope-Medium.ttf  assetassets/fonts/Manrope-Medium.ttf assets/fonts/Manrope-Regular.ttf  asset assets/fonts/Manrope-Regular.ttf!assets/fonts/Manrope-SemiBold.ttf  asset!assets/fonts/Manrope-SemiBold.ttf"assets/fonts/NotoEmoji-Regular.ttf  asset"assets/fonts/NotoEmoji-Regular.ttf!assets/fonts/NotoSans-Regular.ttf  asset!assets/fonts/NotoSans-Regular.ttfassets/icons/Icon-512.png  assetassets/icons/Icon-512.pngassets/images/journiv-logo.png  assetassets/images/journiv-logo.png2packages/cupertino_icons/assets/CupertinoIcons.ttf  asset2packages/cupertino_icons/assets/CupertinoIcons.ttf -------------------------------------------------------------------------------- /app/utils/import_export/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Import/Export utility modules. 3 | """ 4 | from .id_mapper import IDMapper 5 | from .media_handler import MediaHandler 6 | from .zip_handler import ZipHandler 7 | from .date_utils import parse_datetime, ensure_utc, format_datetime, normalize_datetime 8 | from .validators import validate_import_data, validate_export_data 9 | from .progress_utils import create_throttled_progress_callback 10 | 11 | __all__ = [ 12 | "create_throttled_progress_callback", 13 | "ensure_utc", 14 | "format_datetime", 15 | "IDMapper", 16 | "MediaHandler", 17 | "normalize_datetime", 18 | "parse_datetime", 19 | "validate_export_data", 20 | "validate_import_data", 21 | "ZipHandler", 22 | ] 23 | -------------------------------------------------------------------------------- /docker-compose.override.ci.yml: -------------------------------------------------------------------------------- 1 | # Docker Compose Override for CI/CD Environments 2 | # 3 | # ⚠️ WARNING: DO NOT USE IN PRODUCTION ⚠️ 4 | # 5 | # 6 | # Usage in CI workflows: 7 | # docker compose -f docker-compose.prod.sqlite.yml -f docker-compose.override.ci.yml up -d 8 | # 9 | # Multiple -f flags merge the compose files, with later files overriding earlier ones. 10 | 11 | services: 12 | app: 13 | volumes: 14 | - journiv_ci_data:/data 15 | environment: 16 | RATE_LIMITING_ENABLED: "false" # Disable rate limiting in CI tests 17 | 18 | celery-worker: 19 | volumes: 20 | - journiv_ci_data:/data 21 | depends_on: 22 | - app # Wait for the app to be ready to avoid race conditions in dir creation 23 | 24 | volumes: 25 | journiv_ci_data: 26 | -------------------------------------------------------------------------------- /scripts/docker-entrypoint-dev.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | 4 | echo "Ensuring data directories exist..." 5 | mkdir -p /data/media /data/logs 6 | 7 | echo "Running database migrations in entrypoint script..." 8 | alembic upgrade head 9 | 10 | echo "Seeding initial data in entrypoint script..." 11 | SKIP_DATA_SEEDING=false python -c "from app.core.database import seed_initial_data; seed_initial_data()" 12 | 13 | echo "Starting Gunicorn in development mode with hot reload..." 14 | exec gunicorn app.main:app \ 15 | -w ${GUNICORN_WORKERS:-2} \ 16 | -k uvicorn.workers.UvicornWorker \ 17 | --worker-connections 1000 \ 18 | --max-requests 1000 \ 19 | --max-requests-jitter 100 \ 20 | --timeout ${GUNICORN_TIMEOUT:-120} \ 21 | --access-logfile - \ 22 | -b 0.0.0.0:8000 23 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: swalabtech 4 | buy_me_a_coffee: journiv 5 | patreon: # Replace with a single Patreon username 6 | open_collective: # Replace with a single Open Collective username 7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel 8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry 9 | liberapay: # Replace with a single Liberapay username 10 | issuehunt: # Replace with a single IssueHunt username 11 | lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry 12 | polar: # Replace with a single Polar username 13 | thanks_dev: # Replace with a single thanks.dev username 14 | custom: https://buy.stripe.com/4gM9AVeGk3lE6PbfYRffy00 15 | ko_fi: journiv 16 | -------------------------------------------------------------------------------- /app/models/__init__.py: -------------------------------------------------------------------------------- 1 | # Import all models for easy access 2 | from .analytics import WritingStreak 3 | from .base import BaseModel 4 | from .entry import Entry, EntryMedia 5 | from .entry_tag_link import EntryTagLink 6 | from .export_job import ExportJob 7 | from .external_identity import ExternalIdentity 8 | from .import_job import ImportJob 9 | from .journal import Journal 10 | from .mood import Mood, MoodLog 11 | from .prompt import Prompt 12 | from .tag import Tag 13 | from .user import User, UserSettings 14 | 15 | __all__ = [ 16 | "BaseModel", 17 | "User", 18 | "UserSettings", 19 | "Journal", 20 | "Entry", 21 | "EntryMedia", 22 | "Mood", 23 | "MoodLog", 24 | "Prompt", 25 | "Tag", 26 | "EntryTagLink", 27 | "WritingStreak", 28 | "ExternalIdentity", 29 | "ImportJob", 30 | "ExportJob", 31 | ] 32 | -------------------------------------------------------------------------------- /web/assets/AssetManifest.bin.json: -------------------------------------------------------------------------------- 1 | "DQkHHWFzc2V0cy9mb250cy9NYW5yb3BlLUJvbGQudHRmDAENAQcFYXNzZXQHHWFzc2V0cy9mb250cy9NYW5yb3BlLUJvbGQudHRmBx9hc3NldHMvZm9udHMvTWFucm9wZS1NZWRpdW0udHRmDAENAQcFYXNzZXQHH2Fzc2V0cy9mb250cy9NYW5yb3BlLU1lZGl1bS50dGYHIGFzc2V0cy9mb250cy9NYW5yb3BlLVJlZ3VsYXIudHRmDAENAQcFYXNzZXQHIGFzc2V0cy9mb250cy9NYW5yb3BlLVJlZ3VsYXIudHRmByFhc3NldHMvZm9udHMvTWFucm9wZS1TZW1pQm9sZC50dGYMAQ0BBwVhc3NldAchYXNzZXRzL2ZvbnRzL01hbnJvcGUtU2VtaUJvbGQudHRmByJhc3NldHMvZm9udHMvTm90b0Vtb2ppLVJlZ3VsYXIudHRmDAENAQcFYXNzZXQHImFzc2V0cy9mb250cy9Ob3RvRW1vamktUmVndWxhci50dGYHIWFzc2V0cy9mb250cy9Ob3RvU2Fucy1SZWd1bGFyLnR0ZgwBDQEHBWFzc2V0ByFhc3NldHMvZm9udHMvTm90b1NhbnMtUmVndWxhci50dGYHGWFzc2V0cy9pY29ucy9JY29uLTUxMi5wbmcMAQ0BBwVhc3NldAcZYXNzZXRzL2ljb25zL0ljb24tNTEyLnBuZwceYXNzZXRzL2ltYWdlcy9qb3Vybml2LWxvZ28ucG5nDAENAQcFYXNzZXQHHmFzc2V0cy9pbWFnZXMvam91cm5pdi1sb2dvLnBuZwcycGFja2FnZXMvY3VwZXJ0aW5vX2ljb25zL2Fzc2V0cy9DdXBlcnRpbm9JY29ucy50dGYMAQ0BBwVhc3NldAcycGFja2FnZXMvY3VwZXJ0aW5vX2ljb25zL2Fzc2V0cy9DdXBlcnRpbm9JY29ucy50dGY=" -------------------------------------------------------------------------------- /requirements/base.txt: -------------------------------------------------------------------------------- 1 | # Web Framework 2 | fastapi==0.121.1 3 | uvicorn[standard]==0.38.0 4 | pydantic==2.12.0 5 | 6 | # Database 7 | sqlmodel==0.0.14 8 | sqlalchemy==2.0.44 9 | alembic==1.17.1 10 | psycopg2-binary==2.9.11 11 | 12 | # Authentication 13 | python-jose[cryptography]==3.5.0 14 | passlib[argon2]==1.7.4 15 | argon2-cffi==25.1.0 16 | python-multipart==0.0.6 17 | 18 | # Configuration 19 | pydantic-settings==2.0.3 20 | python-dotenv==1.0.0 21 | 22 | 23 | # Rate Limiting 24 | slowapi==0.1.9 25 | 26 | 27 | # Media Processing 28 | Pillow==11.3.0 29 | python-magic==0.4.27 30 | aiofiles==23.2.1 31 | ffmpeg-python==0.2.0 32 | 33 | # HTTP Client 34 | httpx==0.28.1 35 | 36 | # OIDC / OAuth 37 | authlib==1.6.5 38 | redis==5.0.1 39 | itsdangerous==2.1.2 # Required by Authlib for session state management 40 | 41 | # Task Queue 42 | celery==5.4.0 43 | 44 | # Logging - using standard Python logging 45 | 46 | # Validation 47 | email-validator==2.1.0 48 | 49 | # Utilities 50 | python-dateutil==2.8.2 51 | psutil==5.9.6 52 | -------------------------------------------------------------------------------- /web/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Journiv", 3 | "short_name": "Journiv", 4 | "description": "Journiv - Private Journal", 5 | "start_url": "/?source=pwa", 6 | "scope": "/", 7 | "display": "standalone", 8 | "orientation": "portrait-primary", 9 | "background_color": "#16181D", 10 | "theme_color": "#16181D", 11 | "prefer_related_applications": false, 12 | "icons": [ 13 | { 14 | "src": "icons/Icon-192.png", 15 | "sizes": "192x192", 16 | "type": "image/png" 17 | }, 18 | { 19 | "src": "icons/Icon-512.png", 20 | "sizes": "512x512", 21 | "type": "image/png" 22 | }, 23 | { 24 | "src": "icons/Icon-maskable-192.png", 25 | "sizes": "192x192", 26 | "type": "image/png", 27 | "purpose": "maskable" 28 | }, 29 | { 30 | "src": "icons/Icon-maskable-512.png", 31 | "sizes": "512x512", 32 | "type": "image/png", 33 | "purpose": "maskable" 34 | } 35 | ], 36 | "categories": ["productivity", "journal", "lifestyle"] 37 | } 38 | -------------------------------------------------------------------------------- /app/utils/import_export/constants.py: -------------------------------------------------------------------------------- 1 | """ 2 | Constants for import/export operations. 3 | 4 | Centralizes magic numbers and configuration values for better maintainability. 5 | """ 6 | 7 | 8 | class ProgressStages: 9 | """Progress percentage milestones for job tracking.""" 10 | 11 | # Export job stages 12 | EXPORT_BUILDING_DATA = 10 13 | EXPORT_CREATING_ZIP = 50 14 | EXPORT_FINALIZING = 90 15 | 16 | # Import job stages 17 | IMPORT_EXTRACTING = 10 18 | IMPORT_PROCESSING = 30 19 | IMPORT_FINALIZING = 90 20 | 21 | # Common 22 | COMPLETED = 100 23 | 24 | 25 | class ExportConfig: 26 | """Configuration constants for export operations.""" 27 | 28 | EXPORT_VERSION = "1.0" 29 | DATA_FILENAME = "data.json" 30 | 31 | 32 | class ImportConfig: 33 | """Configuration constants for import operations.""" 34 | 35 | # File validation 36 | MAX_FILENAME_LENGTH = 255 37 | ALLOWED_EXTENSIONS = frozenset({".zip"}) 38 | 39 | # Batch processing (for future optimization) 40 | ENTRY_BATCH_SIZE = 100 41 | MEDIA_BATCH_SIZE = 50 42 | -------------------------------------------------------------------------------- /app/models/entry_tag_link.py: -------------------------------------------------------------------------------- 1 | """ 2 | Entry-Tag link model. 3 | """ 4 | import uuid 5 | 6 | from sqlalchemy import Column, ForeignKey 7 | from sqlmodel import Field, Index, SQLModel 8 | 9 | from .base import TimestampMixin 10 | 11 | 12 | class EntryTagLink(TimestampMixin, SQLModel, table=True): 13 | """ 14 | Link table for many-to-many relationship between entries and tags. 15 | """ 16 | __tablename__ = "entry_tag_link" 17 | 18 | entry_id: uuid.UUID = Field( 19 | sa_column=Column( 20 | ForeignKey("entry.id", ondelete="CASCADE"), 21 | primary_key=True, 22 | nullable=False 23 | ) 24 | ) 25 | tag_id: uuid.UUID = Field( 26 | sa_column=Column( 27 | ForeignKey("tag.id", ondelete="CASCADE"), 28 | primary_key=True, 29 | nullable=False 30 | ) 31 | ) 32 | 33 | # Table constraints and indexes 34 | __table_args__ = ( 35 | # We only need a separate index for tag_id lookups to efficiently find all entries for a given tag. 36 | Index('idx_entry_tag_link_tag_id', 'tag_id'), 37 | ) 38 | -------------------------------------------------------------------------------- /app/schemas/analytics.py: -------------------------------------------------------------------------------- 1 | """ 2 | Analytics schemas. 3 | """ 4 | import uuid 5 | from datetime import datetime, date 6 | from typing import Optional 7 | 8 | from pydantic import BaseModel 9 | 10 | from app.schemas.base import TimestampMixin 11 | 12 | 13 | class WritingStreakBase(BaseModel): 14 | """Base writing streak schema.""" 15 | current_streak: int = 0 16 | longest_streak: int = 0 17 | last_entry_date: Optional[date] = None 18 | streak_start_date: Optional[date] = None 19 | total_entries: int = 0 20 | total_words: int = 0 21 | average_words_per_entry: float = 0.0 22 | 23 | 24 | class WritingStreakResponse(WritingStreakBase, TimestampMixin): 25 | """Writing streak response schema.""" 26 | id: uuid.UUID 27 | user_id: uuid.UUID 28 | created_at: datetime 29 | updated_at: datetime 30 | 31 | 32 | class AnalyticsSummary(BaseModel): 33 | """Analytics summary schema.""" 34 | total_entries: int 35 | total_words: int 36 | current_streak: int 37 | longest_streak: int 38 | average_words_per_entry: float 39 | entries_this_month: int 40 | entries_this_week: int 41 | most_used_tags: list[dict] 42 | mood_distribution: dict 43 | writing_frequency: dict 44 | -------------------------------------------------------------------------------- /app/core/oidc.py: -------------------------------------------------------------------------------- 1 | """ 2 | OIDC client configuration and utilities. 3 | 4 | Provides OAuth2/OIDC client setup using Authlib and PKCE helpers. 5 | """ 6 | import os 7 | import base64 8 | import hashlib 9 | import secrets 10 | from authlib.integrations.starlette_client import OAuth 11 | from starlette.config import Config 12 | 13 | # Initialize Authlib OAuth client 14 | config = Config(environ=os.environ) 15 | oauth = OAuth(config) 16 | 17 | 18 | def build_pkce() -> tuple[str, str]: 19 | """ 20 | Build PKCE (Proof Key for Code Exchange) verifier and challenge. 21 | 22 | PKCE adds security for the authorization code flow by ensuring that 23 | the entity that requested the authorization code is the same entity 24 | that exchanges it for tokens. 25 | 26 | Returns: 27 | Tuple of (code_verifier, code_challenge) 28 | """ 29 | # Generate a cryptographically random verifier (43-128 characters) 30 | verifier = base64.urlsafe_b64encode(secrets.token_bytes(32)).decode().rstrip("=") 31 | 32 | # Create SHA256 challenge from the verifier 33 | challenge = base64.urlsafe_b64encode( 34 | hashlib.sha256(verifier.encode()).digest() 35 | ).decode().rstrip("=") 36 | 37 | return verifier, challenge 38 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | testpaths = tests 3 | python_files = test_*.py 4 | python_classes = Test* 5 | python_functions = test_* 6 | addopts = 7 | --strict-markers 8 | --strict-config 9 | --verbose 10 | --tb=short 11 | --cov=app 12 | --cov-report=term-missing 13 | --cov-report=html:htmlcov 14 | --cov-report=xml 15 | --cov-fail-under=60 16 | --asyncio-mode=auto 17 | --disable-warnings 18 | markers = 19 | unit: Unit tests (tests/unit/) 20 | integration: Integration tests (tests/integration/) 21 | e2e: End-to-end tests (tests/e2e/) 22 | upgrade: Upgrade tests (tests/upgrade/) 23 | slow: Slow tests that take more than 5 seconds 24 | auth: Authentication and authorization tests 25 | database: Database-related tests 26 | api: API endpoint tests 27 | security: Security-related tests 28 | performance: Performance and load tests 29 | media: Media processing and file upload tests 30 | analytics: Analytics and insights tests 31 | health: Health check endpoint tests 32 | file_processing: File processing background tasks 33 | filterwarnings = 34 | ignore::DeprecationWarning 35 | ignore::PendingDeprecationWarning 36 | ignore::pytest.PytestUnraisableExceptionWarning 37 | -------------------------------------------------------------------------------- /scripts/migrate.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Database Migration Script 4 | set -e 5 | 6 | echo "🗄️ Running database migrations..." 7 | 8 | # Check if .env exists 9 | if [ ! -f .env ]; then 10 | echo "❌ .env file not found. Please run ./scripts/setup.sh first." 11 | exit 1 12 | fi 13 | 14 | # Determine compose file based on environment 15 | if [[ "${ENVIRONMENT:-development}" == "production" ]]; then 16 | COMPOSE_FILE="docker-compose.yml" 17 | else 18 | COMPOSE_FILE="docker-compose.dev.sqlite.yml" 19 | fi 20 | 21 | # Check if compose file exists 22 | if [[ ! -f "$COMPOSE_FILE" ]]; then 23 | echo "❌ Compose file not found: $COMPOSE_FILE" 24 | exit 1 25 | fi 26 | 27 | echo "📋 Using compose file: $COMPOSE_FILE" 28 | 29 | # Check if services are running 30 | if ! docker-compose -f "$COMPOSE_FILE" ps | grep -q "Up"; then 31 | echo "🐳 Starting services..." 32 | docker-compose -f "$COMPOSE_FILE" up -d 33 | echo "⏳ Waiting for services to be ready..." 34 | sleep 10 35 | fi 36 | 37 | # Run migrations 38 | echo "📝 Applying database migrations..." 39 | if docker-compose -f "$COMPOSE_FILE" exec app alembic upgrade head; then 40 | echo "✅ Migrations completed successfully!" 41 | else 42 | echo "❌ Migration failed!" 43 | exit 1 44 | fi 45 | -------------------------------------------------------------------------------- /app/schemas/journal.py: -------------------------------------------------------------------------------- 1 | """ 2 | Journal schemas. 3 | """ 4 | import uuid 5 | from datetime import datetime 6 | from typing import Optional 7 | 8 | from pydantic import BaseModel 9 | 10 | from app.models.enums import JournalColor 11 | from app.schemas.base import TimestampMixin 12 | 13 | 14 | class JournalBase(BaseModel): 15 | """Base journal schema.""" 16 | title: str 17 | description: Optional[str] = None 18 | color: Optional[JournalColor] = None 19 | icon: Optional[str] = None 20 | 21 | 22 | class JournalCreate(JournalBase): 23 | """Journal creation schema.""" 24 | pass 25 | 26 | 27 | class JournalUpdate(BaseModel): 28 | """Journal update schema.""" 29 | title: Optional[str] = None 30 | description: Optional[str] = None 31 | color: Optional[JournalColor] = None 32 | icon: Optional[str] = None 33 | is_favorite: Optional[bool] = None 34 | is_archived: Optional[bool] = None 35 | 36 | 37 | class JournalResponse(JournalBase, TimestampMixin): 38 | """Journal response schema.""" 39 | id: uuid.UUID 40 | user_id: uuid.UUID 41 | is_favorite: bool 42 | is_archived: bool 43 | entry_count: int 44 | total_words: int 45 | last_entry_at: Optional[datetime] = None 46 | created_at: datetime 47 | updated_at: datetime 48 | -------------------------------------------------------------------------------- /.github/workflows/docker-release-main.yml: -------------------------------------------------------------------------------- 1 | name: Build & Push Docker Image (main tag) 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | # Only run when PRs are merged — not when commits are pushed directly 8 | # (direct pushes to main should be disabled) 9 | workflow_dispatch: 10 | 11 | jobs: 12 | build: 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - name: Checkout code 17 | uses: actions/checkout@v4 18 | 19 | - name: Set up QEMU 20 | uses: docker/setup-qemu-action@v3 21 | 22 | - name: Set up Docker Buildx 23 | uses: docker/setup-buildx-action@v3 24 | 25 | - name: Login to Docker Hub 26 | uses: docker/login-action@v3 27 | with: 28 | username: ${{ secrets.DOCKERHUB_USERNAME }} 29 | password: ${{ secrets.DOCKERHUB_TOKEN }} 30 | 31 | - name: Build and Push (main tag) 32 | uses: docker/build-push-action@v5 33 | with: 34 | context: . 35 | push: true 36 | # Add a tag based on the short commit SHA for immutability 37 | tags: | 38 | swalabtech/journiv-app:main 39 | swalabtech/journiv-app:${{ github.sha }} 40 | platforms: linux/amd64,linux/arm64 41 | cache-from: type=gha 42 | cache-to: type=gha,mode=max 43 | -------------------------------------------------------------------------------- /app/schemas/base.py: -------------------------------------------------------------------------------- 1 | """ 2 | Base schemas with common functionality. 3 | """ 4 | from datetime import datetime, timezone 5 | from pydantic import BaseModel, field_serializer 6 | 7 | 8 | class TimestampMixin(BaseModel): 9 | """Mixin for models with created_at/updated_at timestamps. 10 | 11 | Ensures datetime fields are always serialized as UTC ISO 8601 with 'Z' suffix. 12 | """ 13 | 14 | @field_serializer('created_at', 'updated_at', check_fields=False) 15 | def serialize_datetime(self, dt: datetime, _info): 16 | """Ensure datetime is serialized as UTC ISO 8601 with 'Z' suffix.""" 17 | if dt is None: 18 | return None 19 | if dt.tzinfo is None: 20 | # If naive datetime, assume it's UTC 21 | dt = dt.replace(tzinfo=timezone.utc) 22 | # Convert to UTC and format as ISO 8601 with 'Z' suffix 23 | return dt.astimezone(timezone.utc).isoformat().replace('+00:00', 'Z') 24 | 25 | class Config: 26 | from_attributes = True 27 | json_encoders = { 28 | datetime: lambda dt: ( 29 | dt.astimezone(timezone.utc).isoformat().replace('+00:00', 'Z') 30 | if dt and dt.tzinfo 31 | else dt.replace(tzinfo=timezone.utc).isoformat().replace('+00:00', 'Z') 32 | if dt 33 | else None 34 | ) 35 | } 36 | -------------------------------------------------------------------------------- /app/core/celery_app.py: -------------------------------------------------------------------------------- 1 | """ 2 | Celery application configuration for async import/export tasks. 3 | """ 4 | from celery import Celery 5 | from app.core.config import settings 6 | 7 | # Create Celery app instance 8 | celery_app = Celery("journiv") 9 | 10 | # Configure Celery from settings 11 | celery_app.conf.update( 12 | broker_url=settings.celery_broker_url, 13 | result_backend=settings.celery_result_backend, 14 | task_serializer=settings.celery_task_serializer, 15 | result_serializer=settings.celery_result_serializer, 16 | accept_content=settings.celery_accept_content, 17 | timezone=settings.celery_timezone, 18 | enable_utc=settings.celery_enable_utc, 19 | task_track_started=True, 20 | task_time_limit=3600, # 1 hour hard limit for tasks 21 | task_soft_time_limit=3300, # 55 minutes soft limit 22 | worker_prefetch_multiplier=1, # One task at a time 23 | worker_max_tasks_per_child=1000, # Restart worker after 1000 tasks 24 | task_acks_late=True, # Acknowledge tasks after completion 25 | task_reject_on_worker_lost=True, # Requeue tasks if worker dies 26 | broker_connection_retry_on_startup=True, # Retry broker connection on startup 27 | ) 28 | 29 | # Auto-discover tasks from app.tasks module 30 | celery_app.autodiscover_tasks(["app.tasks"]) 31 | 32 | 33 | def get_celery_app() -> Celery: 34 | """Get Celery app instance.""" 35 | return celery_app 36 | -------------------------------------------------------------------------------- /app/schemas/tag.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tag schemas. 3 | """ 4 | import uuid 5 | from datetime import datetime 6 | from typing import Optional 7 | 8 | from pydantic import BaseModel, validator 9 | 10 | from app.schemas.base import TimestampMixin 11 | 12 | 13 | class TagBase(BaseModel): 14 | """Base tag schema.""" 15 | name: str 16 | 17 | 18 | class TagCreate(TagBase): 19 | """Tag creation schema.""" 20 | 21 | @validator('name') 22 | def validate_name(cls, v): 23 | if not v or not v.strip(): 24 | raise ValueError('Tag name cannot be empty') 25 | return v.strip().lower() 26 | 27 | 28 | class TagUpdate(BaseModel): 29 | """Tag update schema.""" 30 | name: Optional[str] = None 31 | 32 | @validator('name') 33 | def validate_name(cls, v): 34 | if v is None: 35 | return v 36 | if not v.strip(): 37 | raise ValueError('Tag name cannot be empty') 38 | return v.strip().lower() 39 | 40 | 41 | class TagResponse(TagBase, TimestampMixin): 42 | """Tag response schema.""" 43 | id: uuid.UUID 44 | user_id: uuid.UUID 45 | usage_count: int 46 | created_at: datetime 47 | updated_at: datetime 48 | 49 | 50 | class EntryTagLinkBase(BaseModel): 51 | """Base entry tag link schema.""" 52 | entry_id: uuid.UUID 53 | tag_id: uuid.UUID 54 | 55 | 56 | class EntryTagLinkCreate(EntryTagLinkBase): 57 | """Entry tag link creation schema.""" 58 | pass 59 | 60 | 61 | class EntryTagLinkResponse(EntryTagLinkBase, TimestampMixin): 62 | """Entry tag link response schema.""" 63 | created_at: datetime 64 | updated_at: datetime 65 | -------------------------------------------------------------------------------- /app/api/v1/api.py: -------------------------------------------------------------------------------- 1 | """ 2 | API v1 router. 3 | """ 4 | from fastapi import APIRouter 5 | from app.api.v1.endpoints import ( 6 | auth, users, journals, entries, moods, prompts, tags, 7 | analytics, media, health, security, oidc, admin 8 | ) 9 | # Import/Export routers 10 | from app.api.v1.endpoints.export_data import router as export_router 11 | from app.api.v1.endpoints.import_data import router as import_router 12 | 13 | api_router = APIRouter() 14 | 15 | # Include all endpoint routers 16 | api_router.include_router(auth.router, prefix="/auth", tags=["authentication"]) 17 | api_router.include_router(oidc.router, tags=["authentication"]) # OIDC routes (prefix already in router) 18 | api_router.include_router(users.router, prefix="/users", tags=["users"]) 19 | api_router.include_router(journals.router, prefix="/journals", tags=["journals"]) 20 | api_router.include_router(entries.router, prefix="/entries", tags=["entries"]) 21 | api_router.include_router(moods.router, prefix="/moods", tags=["moods"]) 22 | api_router.include_router(prompts.router, prefix="/prompts", tags=["prompts"]) 23 | api_router.include_router(tags.router, prefix="/tags", tags=["tags"]) 24 | api_router.include_router(analytics.router, prefix="/analytics", tags=["analytics"]) 25 | api_router.include_router(media.router, prefix="/media", tags=["media"]) 26 | api_router.include_router(export_router, prefix="/export", tags=["import-export"]) 27 | api_router.include_router(import_router, prefix="/import", tags=["import-export"]) 28 | api_router.include_router(health.router, tags=["health"]) 29 | api_router.include_router(security.router, prefix="/security", tags=["security"]) 30 | api_router.include_router(admin.router, tags=["admin"]) # Admin routes (prefix already in router) 31 | -------------------------------------------------------------------------------- /web/LICENSE.md: -------------------------------------------------------------------------------- 1 | # Journiv Web Client License 2 | 3 | Copyright (c) 2025 Swalab Tech 4 | 5 | 6 | The Journiv Web Client (the “Web Client”) located in this `web/` directory is proprietary 7 | software owned and copyrighted by Swalab Tech. It is **not** licensed under the PolyForm 8 | Noncommercial License 1.0.0 or any open-source license. 9 | 10 | Use of the Web Client is permitted **only** for personal, non-commercial self-hosting, 11 | testing, or evaluation in conjunction with the Journiv backend. 12 | Personal or household self-hosting for non-commercial use is expressly permitted. 13 | 14 | Any other use including, but not limited to: 15 | - publicly hosting or providing the Web Client as a service, 16 | - offering it to others outside your immediate family or household, 17 | - or integrating it into another product or service is strictly prohibited without the prior written consent of Swalab Tech. 18 | 19 | You may **not** copy, distribute, sublicense, sell, lease, lend, modify, adapt, translate, 20 | reverse-engineer, decompile, disassemble, or otherwise attempt to derive source code from 21 | the Web Client, nor may you create derivative works based upon it, except to the limited 22 | extent required for personal, non-commercial self-hosting in unmodified form. 23 | 24 | By downloading, using, or deploying the Web Client, you acknowledge that it remains the 25 | exclusive property of Swalab Tech and agree to these terms as a condition of such use. 26 | 27 | All rights not expressly granted are reserved under applicable copyright law and 28 | international treaties. 29 | 30 | --- 31 | **Note:** “Swalab Tech” refers to the creator and copyright holder of Journiv, identified by the GitHub handle [`swalabtech`](https://github.com/swalabtech). 32 | -------------------------------------------------------------------------------- /app/schemas/auth.py: -------------------------------------------------------------------------------- 1 | """ 2 | Authentication schemas. 3 | """ 4 | from typing import Optional 5 | 6 | from pydantic import BaseModel, validator 7 | 8 | 9 | class Token(BaseModel): 10 | """ 11 | Token response schema. 12 | 13 | refresh_token is optional - only included during login, not during token refresh. 14 | This ensures refresh tokens eventually expire and require re-login. 15 | """ 16 | access_token: str 17 | refresh_token: Optional[str] = None 18 | token_type: str = "bearer" 19 | 20 | 21 | class LoginResponse(BaseModel): 22 | """Login response schema with tokens and user info.""" 23 | access_token: str 24 | refresh_token: str 25 | token_type: str = "bearer" 26 | user: dict 27 | 28 | 29 | class TokenData(BaseModel): 30 | """Token data schema.""" 31 | user_id: Optional[str] = None 32 | 33 | 34 | class UserCreate(BaseModel): 35 | """User creation schema.""" 36 | email: str 37 | password: str 38 | name: str 39 | 40 | @validator('email') 41 | def validate_email(cls, v): 42 | if v and '@' not in v: 43 | raise ValueError('Invalid email address') 44 | return v.lower().strip() if v else v 45 | 46 | @validator('name') 47 | def validate_name(cls, v): 48 | if not v or len(v.strip()) == 0: 49 | raise ValueError('Name cannot be empty') 50 | return v.strip() 51 | 52 | 53 | class UserLogin(BaseModel): 54 | """User login schema.""" 55 | email: str 56 | password: str 57 | 58 | @validator('email') 59 | def validate_email(cls, v): 60 | if v and '@' not in v: 61 | raise ValueError('Invalid email address') 62 | return v.lower().strip() if v else v 63 | 64 | 65 | class TokenRefresh(BaseModel): 66 | """Token refresh schema.""" 67 | refresh_token: str 68 | -------------------------------------------------------------------------------- /app/models/tag.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tag-related models. 3 | """ 4 | from sqlmodel import SQLModel, Field, Relationship, Index, UniqueConstraint, CheckConstraint 5 | from sqlalchemy import Column, ForeignKey 6 | from datetime import datetime 7 | from typing import List, TYPE_CHECKING 8 | from pydantic import field_validator 9 | import uuid 10 | from .base import BaseModel, TimestampMixin 11 | 12 | if TYPE_CHECKING: 13 | from .user import User 14 | from .entry import Entry 15 | 16 | # Import EntryTagLink from separate file to avoid circular imports 17 | from .entry_tag_link import EntryTagLink 18 | 19 | 20 | class Tag(BaseModel, table=True): 21 | """ 22 | Tag model for categorizing journal entries. 23 | """ 24 | __tablename__ = "tag" 25 | 26 | name: str = Field(..., min_length=1, max_length=100, index=True) 27 | user_id: uuid.UUID = Field( 28 | sa_column=Column( 29 | ForeignKey("user.id", ondelete="CASCADE"), 30 | nullable=False 31 | ) 32 | ) # Tags are user-specific 33 | usage_count: int = Field(default=0, ge=0) 34 | 35 | # Relations 36 | user: "User" = Relationship(back_populates="tags") 37 | entries: List["Entry"] = Relationship( 38 | back_populates="tags", 39 | link_model=EntryTagLink 40 | ) 41 | 42 | # Table constraints and indexes 43 | __table_args__ = ( 44 | Index('idx_tags_usage_count', 'user_id', 'usage_count'), 45 | # Constraints 46 | # Ensures a user cannot have two tags with the same name. 47 | UniqueConstraint('user_id', 'name', name='uq_tag_user_name'), 48 | CheckConstraint('length(name) > 0', name='check_tag_name_not_empty'), 49 | CheckConstraint('usage_count >= 0', name='check_usage_count_non_negative'), 50 | ) 51 | 52 | @field_validator('name') 53 | @classmethod 54 | def validate_name(cls, v): 55 | if not v or len(v.strip()) == 0: 56 | raise ValueError('Tag name cannot be empty') 57 | return v.strip().lower() 58 | -------------------------------------------------------------------------------- /app/schemas/mood.py: -------------------------------------------------------------------------------- 1 | """ 2 | Mood schemas. 3 | """ 4 | import uuid 5 | from datetime import datetime, date 6 | from typing import Optional 7 | 8 | from pydantic import BaseModel, Field, field_serializer 9 | 10 | from app.schemas.base import TimestampMixin 11 | 12 | 13 | class MoodBase(BaseModel): 14 | """Base mood schema.""" 15 | name: str 16 | icon: Optional[str] = None 17 | category: str 18 | 19 | 20 | class MoodResponse(MoodBase, TimestampMixin): 21 | """Mood response schema.""" 22 | id: uuid.UUID 23 | created_at: datetime 24 | updated_at: datetime 25 | 26 | 27 | class MoodLogBase(BaseModel): 28 | """Base mood log schema.""" 29 | mood_id: uuid.UUID 30 | note: Optional[str] = None 31 | logged_datetime_utc: Optional[datetime] = None 32 | logged_timezone: Optional[str] = None 33 | 34 | 35 | class MoodLogCreate(MoodLogBase): 36 | """Mood log creation schema.""" 37 | entry_id: Optional[uuid.UUID] = None 38 | 39 | 40 | class MoodLogUpdate(BaseModel): 41 | """Mood log update schema.""" 42 | mood_id: Optional[uuid.UUID] = None 43 | note: Optional[str] = None 44 | logged_datetime_utc: Optional[datetime] = None 45 | logged_timezone: Optional[str] = None 46 | 47 | 48 | class MoodLogResponse(MoodLogBase, TimestampMixin): 49 | """Mood log response schema.""" 50 | id: uuid.UUID 51 | user_id: uuid.UUID 52 | entry_id: Optional[uuid.UUID] = None 53 | created_at: datetime 54 | logged_date: date = Field(description="The date this mood represents") 55 | logged_datetime_utc: datetime 56 | logged_timezone: str 57 | mood: Optional[MoodResponse] = None 58 | entry_date: Optional[date] = Field(None, description="Date from associated entry if available (deprecated, use logged_date)") 59 | 60 | @field_serializer('logged_date', 'entry_date') 61 | def serialize_dates(self, v: Optional[date], _info) -> Optional[str]: 62 | """Serialize date to ISO format string.""" 63 | return v.isoformat() if v else None 64 | -------------------------------------------------------------------------------- /scripts/moods.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name": "Happy", 4 | "icon": "😊", 5 | "category": "positive" 6 | }, 7 | { 8 | "name": "Excited", 9 | "icon": "🤩", 10 | "category": "positive" 11 | }, 12 | { 13 | "name": "Grateful", 14 | "icon": "🙏", 15 | "category": "positive" 16 | }, 17 | { 18 | "name": "Calm", 19 | "icon": "😌", 20 | "category": "positive" 21 | }, 22 | { 23 | "name": "Focused", 24 | "icon": "🎯", 25 | "category": "positive" 26 | }, 27 | { 28 | "name": "Sad", 29 | "icon": "😢", 30 | "category": "negative" 31 | }, 32 | { 33 | "name": "Angry", 34 | "icon": "😠", 35 | "category": "negative" 36 | }, 37 | { 38 | "name": "Stressed", 39 | "icon": "😫", 40 | "category": "negative" 41 | }, 42 | { 43 | "name": "Lonely", 44 | "icon": "😔", 45 | "category": "negative" 46 | }, 47 | { 48 | "name": "Tired", 49 | "icon": "😴", 50 | "category": "negative" 51 | }, 52 | { 53 | "name": "Neutral", 54 | "icon": "😐", 55 | "category": "neutral" 56 | }, 57 | { 58 | "name": "Confused", 59 | "icon": "😕", 60 | "category": "neutral" 61 | }, 62 | { 63 | "name": "Curious", 64 | "icon": "🤔", 65 | "category": "neutral" 66 | }, 67 | { 68 | "name": "Surprised", 69 | "icon": "😮", 70 | "category": "neutral" 71 | }, 72 | { 73 | "name": "Anxious", 74 | "icon": "😰", 75 | "category": "negative" 76 | }, 77 | { 78 | "name": "Proud", 79 | "icon": "😤", 80 | "category": "positive" 81 | }, 82 | { 83 | "name": "Hopeful", 84 | "icon": "🌟", 85 | "category": "positive" 86 | }, 87 | { 88 | "name": "Disappointed", 89 | "icon": "😞", 90 | "category": "negative" 91 | }, 92 | { 93 | "name": "Relaxed", 94 | "icon": "😌", 95 | "category": "positive" 96 | }, 97 | { 98 | "name": "Motivated", 99 | "icon": "💪", 100 | "category": "positive" 101 | } 102 | ] 103 | -------------------------------------------------------------------------------- /tests/integration/test_health_endpoints.py: -------------------------------------------------------------------------------- 1 | """ 2 | Health check verification against the running stack. 3 | """ 4 | import time 5 | 6 | import pytest 7 | 8 | from tests.lib import JournivApiClient 9 | 10 | 11 | def _get_health(api_client: JournivApiClient): 12 | response = api_client.request("GET", "/health") 13 | assert response.status_code == 200 14 | payload = response.json() 15 | assert "status" in payload 16 | assert "database" in payload 17 | assert "version" in payload 18 | return payload 19 | 20 | 21 | def _get_memory(api_client: JournivApiClient): 22 | response = api_client.request("GET", "/memory") 23 | assert response.status_code == 200 24 | payload = response.json() 25 | assert "status" in payload 26 | assert "timestamp" in payload 27 | assert "system_memory" in payload 28 | assert "process_memory" in payload 29 | return payload 30 | 31 | def test_health_endpoint_reports_status(api_client: JournivApiClient): 32 | """The exposed health endpoint should report healthy status.""" 33 | payload = _get_health(api_client) 34 | assert payload["status"] in {"healthy", "degraded"} 35 | assert "connected" in payload["database"] 36 | 37 | 38 | def test_health_endpoint_is_fast(api_client: JournivApiClient): 39 | """Response time should stay within a snappy SLA.""" 40 | start = time.time() 41 | _get_health(api_client) 42 | duration = time.time() - start 43 | assert duration < 2.0 44 | 45 | 46 | def test_health_timestamp_format(api_client: JournivApiClient): 47 | """Timestamp should be ISO 8601 formatted.""" 48 | payload = _get_health(api_client) 49 | timestamp = payload["timestamp"] 50 | assert "T" in timestamp 51 | assert timestamp.endswith("Z") or "+" in timestamp 52 | 53 | 54 | 55 | @pytest.mark.parametrize("path", ["/health", "/memory"]) 56 | def test_health_endpoints_accessible_without_auth(api_client: JournivApiClient, path: str): 57 | """Public health checks should not require authentication.""" 58 | response = api_client.request("GET", path) 59 | assert response.status_code == 200 60 | -------------------------------------------------------------------------------- /alembic/versions/4fbf758e7995_add_externalidentity_model_for_oidc_.py: -------------------------------------------------------------------------------- 1 | """Add ExternalIdentity model for OIDC authentication 2 | 3 | Revision ID: 4fbf758e7995 4 | Revises: 31316b8d8d5f 5 | Create Date: 2025-11-05 14:08:51.079296 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '4fbf758e7995' 14 | down_revision = '31316b8d8d5f' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade() -> None: 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.create_table('external_identities', 22 | sa.Column('created_at', sa.DateTime(), nullable=False), 23 | sa.Column('updated_at', sa.DateTime(), nullable=False), 24 | sa.Column('id', sa.Uuid(), nullable=False), 25 | sa.Column('user_id', sa.Uuid(), nullable=False), 26 | sa.Column('issuer', sa.String(length=512), nullable=False), 27 | sa.Column('subject', sa.String(length=255), nullable=False), 28 | sa.Column('email', sa.String(length=255), nullable=True), 29 | sa.Column('name', sa.String(length=255), nullable=True), 30 | sa.Column('picture', sa.String(length=512), nullable=True), 31 | sa.Column('last_login_at', sa.DateTime(), nullable=False), 32 | sa.ForeignKeyConstraint(['user_id'], ['user.id'], ondelete='CASCADE'), 33 | sa.PrimaryKeyConstraint('id'), 34 | sa.UniqueConstraint('issuer', 'subject', name='uq_issuer_subject') 35 | ) 36 | op.create_index(op.f('ix_external_identities_id'), 'external_identities', ['id'], unique=False) 37 | op.create_index(op.f('ix_external_identities_issuer'), 'external_identities', ['issuer'], unique=False) 38 | op.create_index(op.f('ix_external_identities_user_id'), 'external_identities', ['user_id'], unique=False) 39 | # ### end Alembic commands ### 40 | 41 | 42 | def downgrade() -> None: 43 | # ### commands auto generated by Alembic - please adjust! ### 44 | op.drop_index(op.f('ix_external_identities_user_id'), table_name='external_identities') 45 | op.drop_index(op.f('ix_external_identities_issuer'), table_name='external_identities') 46 | op.drop_index(op.f('ix_external_identities_id'), table_name='external_identities') 47 | op.drop_table('external_identities') 48 | # ### end Alembic commands ### 49 | -------------------------------------------------------------------------------- /scripts/fix_migration_imports.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Fix migration files by replacing sqlmodel.sql.sqltypes.AutoString with sa.String 4 | and ensuring proper imports. 5 | """ 6 | import os 7 | import re 8 | from pathlib import Path 9 | 10 | 11 | def fix_migration_file(file_path: Path) -> bool: 12 | """Fix a single migration file.""" 13 | print(f"Processing {file_path}") 14 | 15 | with open(file_path, 'r') as f: 16 | content = f.read() 17 | 18 | # Check if file needs fixing 19 | if 'sqlmodel.sql.sqltypes.AutoString' not in content: 20 | print(f" ✓ No AutoString found, skipping") 21 | return False 22 | 23 | # Add sqlmodel import if not present 24 | if 'import sqlmodel' not in content: 25 | # Find the import section and add sqlmodel import 26 | import_pattern = r'(from alembic import op\nimport sqlalchemy as sa)' 27 | replacement = r'\1\nimport sqlmodel' 28 | content = re.sub(import_pattern, replacement, content) 29 | print(f" ✓ Added sqlmodel import") 30 | 31 | # Replace sqlmodel.sql.sqltypes.AutoString with sa.String 32 | # Handle AutoString with length parameter 33 | content = re.sub( 34 | r'sqlmodel\.sql\.sqltypes\.AutoString\(length=(\d+)\)', 35 | r'sa.String(length=\1)', 36 | content 37 | ) 38 | 39 | # Handle AutoString without length parameter 40 | content = re.sub( 41 | r'sqlmodel\.sql\.sqltypes\.AutoString\(\)', 42 | r'sa.String()', 43 | content 44 | ) 45 | 46 | # Write the fixed content back 47 | with open(file_path, 'w') as f: 48 | f.write(content) 49 | 50 | print(f" ✓ Fixed AutoString references") 51 | return True 52 | 53 | 54 | def main(): 55 | """Fix all migration files in the versions directory.""" 56 | versions_dir = Path("alembic/versions") 57 | 58 | if not versions_dir.exists(): 59 | print(f"Error: {versions_dir} does not exist") 60 | return 1 61 | 62 | fixed_count = 0 63 | for file_path in versions_dir.glob("*.py"): 64 | if file_path.name == "__init__.py": 65 | continue 66 | 67 | if fix_migration_file(file_path): 68 | fixed_count += 1 69 | 70 | print(f"\n✓ Fixed {fixed_count} migration files") 71 | return 0 72 | 73 | 74 | if __name__ == "__main__": 75 | exit(main()) 76 | 77 | -------------------------------------------------------------------------------- /app/models/base.py: -------------------------------------------------------------------------------- 1 | """ 2 | Base model classes and common functionality. 3 | 4 | This module provides base classes for all database models in the application. 5 | Uses timezone-aware UTC datetimes following modern Python best practices. 6 | """ 7 | import uuid 8 | from datetime import datetime 9 | 10 | from sqlmodel import SQLModel, Field 11 | from app.core.time_utils import utc_now 12 | 13 | 14 | class BaseModel(SQLModel): 15 | """ 16 | Base model with common fields for all entities. 17 | 18 | Provides: 19 | - UUID primary key for globally unique identifiers 20 | - created_at: Timestamp when record was created (auto-set, immutable) 21 | - updated_at: Timestamp when record was last modified (auto-updated in application code) 22 | 23 | Note: The updated_at field is managed in application code (services) rather than 24 | database triggers for better portability and explicit control. 25 | """ 26 | __abstract__ = True 27 | id: uuid.UUID = Field( 28 | default_factory=uuid.uuid4, 29 | primary_key=True, 30 | index=True, 31 | description="Unique identifier for this record" 32 | ) 33 | created_at: datetime = Field( 34 | default_factory=utc_now, 35 | nullable=False, 36 | description="UTC timestamp when this record was created" 37 | ) 38 | updated_at: datetime = Field( 39 | default_factory=utc_now, 40 | nullable=False, 41 | description="UTC timestamp when this record was last updated" 42 | ) 43 | 44 | 45 | class TimestampMixin(SQLModel): 46 | """ 47 | Mixin for models that need timestamps but have their own primary key strategy. 48 | 49 | Used by models like UserSettings that don't use the standard UUID primary key 50 | pattern but still need creation and update timestamps. 51 | 52 | Note: The updated_at field is managed in application code (services) for 53 | consistency and explicit control. Services should call datetime.now(timezone.utc) 54 | and set updated_at when modifying records. 55 | """ 56 | created_at: datetime = Field( 57 | default_factory=utc_now, 58 | nullable=False, 59 | description="UTC timestamp when this record was created" 60 | ) 61 | updated_at: datetime = Field( 62 | default_factory=utc_now, 63 | nullable=False, 64 | description="UTC timestamp when this record was last updated" 65 | ) 66 | -------------------------------------------------------------------------------- /app/core/exceptions.py: -------------------------------------------------------------------------------- 1 | """ 2 | Custom application exceptions. 3 | """ 4 | 5 | class JournivAppException(Exception): 6 | """Base exception for the journal app.""" 7 | pass 8 | 9 | 10 | class UserNotFoundError(JournivAppException): 11 | """Raised when a user is not found.""" 12 | pass 13 | 14 | 15 | class UserAlreadyExistsError(JournivAppException): 16 | """Raised when a user already exists.""" 17 | pass 18 | 19 | 20 | class InvalidCredentialsError(JournivAppException): 21 | """Raised when credentials are invalid.""" 22 | pass 23 | 24 | 25 | class JournalNotFoundError(JournivAppException): 26 | """Raised when a journal is not found.""" 27 | pass 28 | 29 | 30 | class EntryNotFoundError(JournivAppException): 31 | """Raised when an entry is not found.""" 32 | pass 33 | 34 | 35 | class MoodNotFoundError(JournivAppException): 36 | """Raised when a mood is not found.""" 37 | pass 38 | 39 | 40 | class PromptNotFoundError(JournivAppException): 41 | """Raised when a prompt is not found.""" 42 | pass 43 | 44 | 45 | class MediaNotFoundError(JournivAppException): 46 | """Raised when a media file is not found.""" 47 | pass 48 | 49 | 50 | class FileTooLargeError(JournivAppException): 51 | """Raised when uploaded file exceeds size limit.""" 52 | pass 53 | 54 | 55 | class InvalidFileTypeError(JournivAppException): 56 | """Raised when file type is not supported.""" 57 | pass 58 | 59 | 60 | class FileValidationError(JournivAppException): 61 | """Raised when file validation fails.""" 62 | pass 63 | 64 | 65 | class TagNotFoundError(JournivAppException): 66 | """Raised when a tag is not found.""" 67 | pass 68 | 69 | 70 | class UnauthorizedError(JournivAppException): 71 | """Raised when user is not authorized.""" 72 | pass 73 | 74 | 75 | class UserSettingsNotFoundError(JournivAppException): 76 | """Raised when user settings are not found.""" 77 | pass 78 | 79 | 80 | class FileProcessingError(JournivAppException): 81 | """Raised when file processing fails.""" 82 | pass 83 | 84 | 85 | class TokenNotFoundError(JournivAppException): 86 | """Raised when a token is not found.""" 87 | pass 88 | 89 | 90 | class TokenAlreadyRevokedError(JournivAppException): 91 | """Raised when a token is already revoked.""" 92 | pass 93 | 94 | 95 | class ValidationError(JournivAppException): 96 | """Raised when validation fails.""" 97 | pass 98 | -------------------------------------------------------------------------------- /app/models/external_identity.py: -------------------------------------------------------------------------------- 1 | """ 2 | External identity model for OIDC/OAuth authentication. 3 | """ 4 | import uuid 5 | from datetime import datetime 6 | from typing import Optional, TYPE_CHECKING 7 | 8 | from sqlalchemy import Column, ForeignKey, UniqueConstraint 9 | from sqlmodel import Field, Relationship, String 10 | 11 | from .base import TimestampMixin 12 | 13 | if TYPE_CHECKING: 14 | from .user import User 15 | 16 | 17 | class ExternalIdentity(TimestampMixin, table=True): 18 | """ 19 | External identity linking OIDC/OAuth accounts to internal users. 20 | 21 | Links external authentication providers (issuer + subject) to internal user accounts. 22 | Supports multiple external identities per user and OIDC auto-provisioning. 23 | """ 24 | 25 | __tablename__ = "external_identities" 26 | 27 | id: uuid.UUID = Field( 28 | default_factory=uuid.uuid4, 29 | primary_key=True, 30 | index=True 31 | ) 32 | user_id: uuid.UUID = Field( 33 | sa_column=Column( 34 | ForeignKey("user.id", ondelete="CASCADE"), 35 | nullable=False, 36 | index=True 37 | ) 38 | ) 39 | issuer: str = Field( 40 | ..., 41 | sa_column=Column(String(512), nullable=False, index=True), 42 | description="OIDC issuer URL (e.g., https://accounts.myhomelab.com)" 43 | ) 44 | subject: str = Field( 45 | ..., 46 | sa_column=Column(String(255), nullable=False), 47 | description="OIDC subject identifier (unique per issuer)" 48 | ) 49 | email: Optional[str] = Field( 50 | default=None, 51 | sa_column=Column(String(255), nullable=True), 52 | description="Email from OIDC provider" 53 | ) 54 | name: Optional[str] = Field( 55 | default=None, 56 | max_length=255, 57 | description="Display name from OIDC provider" 58 | ) 59 | picture: Optional[str] = Field( 60 | default=None, 61 | sa_column=Column(String(512), nullable=True), 62 | description="Profile picture URL from OIDC provider" 63 | ) 64 | last_login_at: datetime = Field( 65 | default_factory=datetime.utcnow, 66 | description="Last successful login via this external identity" 67 | ) 68 | 69 | # Relationships 70 | user: "User" = Relationship(back_populates="external_identities") 71 | 72 | # Table constraints 73 | __table_args__ = ( 74 | UniqueConstraint( 75 | "issuer", 76 | "subject", 77 | name="uq_issuer_subject" 78 | ), 79 | ) 80 | -------------------------------------------------------------------------------- /tests/integration/test_account_deletion.py: -------------------------------------------------------------------------------- 1 | """ 2 | End-to-end account deletion scenarios. 3 | """ 4 | from datetime import date 5 | 6 | from tests.integration.helpers import ( 7 | EndpointCase, 8 | assert_requires_authentication, 9 | assert_status_codes, 10 | ) 11 | from tests.lib import ApiUser, JournivApiClient 12 | 13 | 14 | def test_account_deletion_removes_all_access( 15 | api_client: JournivApiClient, 16 | api_user: ApiUser, 17 | journal_factory, 18 | entry_factory, 19 | ): 20 | """Deleting the account should revoke access and make data inaccessible.""" 21 | journal = journal_factory(title="Goodbye Journal") 22 | entry = entry_factory(journal=journal, title="Last entry") 23 | tag = api_client.create_tag(api_user.access_token, name="farewell") 24 | moods = api_client.list_moods(api_user.access_token) 25 | if moods: 26 | api_client.create_mood_log( 27 | api_user.access_token, 28 | entry_id=entry["id"], 29 | mood_id=moods[0]["id"], 30 | logged_date=date.today().isoformat(), 31 | notes="Log before deletion", 32 | ) 33 | 34 | deletion = api_client.delete_account(api_user.access_token) 35 | assert "deleted" in deletion["message"].lower() 36 | 37 | # The previous access token should now fail for every endpoint. 38 | protected_cases = [ 39 | EndpointCase("GET", "/users/me"), 40 | EndpointCase("GET", "/journals/"), 41 | EndpointCase("GET", "/entries/"), 42 | EndpointCase("GET", "/tags/"), 43 | EndpointCase("GET", "/moods/"), 44 | EndpointCase("GET", "/moods/logs"), 45 | EndpointCase("GET", "/prompts/"), 46 | EndpointCase("GET", "/analytics/writing-streak"), 47 | EndpointCase("GET", "/analytics/productivity"), 48 | EndpointCase("GET", "/analytics/journals"), 49 | EndpointCase("GET", "/media/formats"), 50 | EndpointCase("GET", "/import/"), 51 | EndpointCase("GET", "/export/"), 52 | ] 53 | assert_status_codes( 54 | api_client, 55 | protected_cases, 56 | token=api_user.access_token, 57 | expected_status=(401,), 58 | ) 59 | 60 | # Attempting to login again should fail. 61 | login = api_client.request( 62 | "POST", 63 | "/auth/login", 64 | json={"email": api_user.email, "password": api_user.password}, 65 | ) 66 | assert login.status_code == 401 67 | 68 | 69 | def test_account_deletion_requires_auth(api_client: JournivApiClient): 70 | """Deleting an account without a token must return 401.""" 71 | assert_requires_authentication( 72 | api_client, 73 | [EndpointCase("DELETE", "/users/me")], 74 | ) 75 | -------------------------------------------------------------------------------- /docker-compose.dev.sqlite.yml: -------------------------------------------------------------------------------- 1 | # Journiv Development Docker Compose (SQLite) 2 | # Recommended for quick local development. 3 | # 4 | # Usage: 5 | # docker compose -f docker-compose.dev.sqlite.yml up -d 6 | # Note: dev compose files uses the main tag (docker image from main # branch) for development purposes. Do not run this for production # use prod compose files instead which uses the latest tag. 7 | 8 | services: 9 | redis: 10 | image: redis:7 11 | container_name: journiv-dev-redis 12 | restart: unless-stopped 13 | volumes: 14 | - redis_data:/data 15 | healthcheck: 16 | test: ["CMD", "redis-cli", "ping"] 17 | interval: 10s 18 | timeout: 5s 19 | retries: 5 20 | start_period: 10s 21 | 22 | celery-worker: 23 | build: . 24 | container_name: journiv-dev-celery-worker 25 | command: celery -A app.core.celery_app worker --loglevel=info 26 | depends_on: 27 | redis: 28 | condition: service_healthy 29 | env_file: 30 | - .env 31 | environment: 32 | # Docker-specific: service names 33 | - SERVICE_ROLE=celery-worker 34 | - REDIS_URL=redis://redis:6379/0 35 | - CELERY_BROKER_URL=redis://redis:6379/0 36 | - CELERY_RESULT_BACKEND=redis://redis:6379/0 37 | 38 | volumes: 39 | - .:/app 40 | - ./data:/data 41 | 42 | healthcheck: 43 | test: ["CMD-SHELL", "celery -A app.core.celery_app inspect ping --timeout=5 | grep -q pong"] 44 | interval: 30s 45 | timeout: 10s 46 | retries: 5 47 | start_period: 40s 48 | 49 | deploy: 50 | resources: 51 | limits: 52 | memory: 512MB 53 | reservations: 54 | memory: 256MB 55 | 56 | app: 57 | build: . 58 | container_name: journiv-dev-sqlite-app 59 | entrypoint: ["/app/scripts/docker-entrypoint-dev.sh"] 60 | ports: 61 | - "${APP_PORT:-8000}:8000" 62 | depends_on: 63 | redis: 64 | condition: service_healthy 65 | env_file: 66 | - .env 67 | environment: 68 | # Docker-specific: service names 69 | - SERVICE_ROLE=app 70 | - REDIS_URL=redis://redis:6379/0 71 | - CELERY_BROKER_URL=redis://redis:6379/0 72 | - CELERY_RESULT_BACKEND=redis://redis:6379/0 73 | 74 | volumes: 75 | - .:/app 76 | - ./data:/data 77 | 78 | deploy: 79 | resources: 80 | limits: 81 | memory: 512MB 82 | reservations: 83 | memory: 256MB 84 | 85 | healthcheck: 86 | test: ["CMD-SHELL", "curl -f http://localhost:8000/api/v1/health"] 87 | interval: 30s 88 | timeout: 10s 89 | retries: 3 90 | start_period: 40s 91 | 92 | volumes: 93 | redis_data: 94 | 95 | networks: 96 | default: 97 | driver: bridge 98 | -------------------------------------------------------------------------------- /tests/integration/test_prompt_endpoints.py: -------------------------------------------------------------------------------- 1 | """ 2 | Prompt API integration coverage. 3 | """ 4 | import pytest 5 | 6 | from tests.integration.helpers import EndpointCase, assert_requires_authentication 7 | from tests.lib import ApiUser, JournivApiClient 8 | 9 | 10 | def _first_prompt(api_client: JournivApiClient, token: str) -> dict: 11 | prompts = api_client.list_prompts(token, limit=5) 12 | if not prompts: 13 | pytest.skip("No prompts available in the system") 14 | return prompts[0] 15 | 16 | 17 | def test_prompt_catalog_and_details(api_client: JournivApiClient, api_user: ApiUser): 18 | """System prompts should support filtering, detail fetching, and searching.""" 19 | prompt = _first_prompt(api_client, api_user.access_token) 20 | detail = api_client.request( 21 | "GET", f"/prompts/{prompt['id']}", token=api_user.access_token 22 | ).json() 23 | assert detail["id"] == prompt["id"] 24 | 25 | params = {"category": prompt.get("category"), "difficulty_level": prompt.get("difficulty_level")} 26 | listing = api_client.list_prompts( 27 | api_user.access_token, limit=3, **{k: v for k, v in params.items() if v} 28 | ) 29 | assert isinstance(listing, list) 30 | 31 | search_term = (prompt.get("text") or prompt.get("category") or "prompt").split()[0] 32 | search = api_client.request( 33 | "GET", 34 | "/prompts/search", 35 | token=api_user.access_token, 36 | params={"q": search_term[:5]}, 37 | ).json() 38 | assert isinstance(search, list) 39 | 40 | 41 | def test_prompt_random_daily_and_statistics(api_client: JournivApiClient, api_user: ApiUser): 42 | """Random, daily, and analytics endpoints should respond with structured data.""" 43 | random_prompt = api_client.request( 44 | "GET", 45 | "/prompts/random", 46 | token=api_user.access_token, 47 | ) 48 | assert random_prompt.status_code in (200, 404) 49 | 50 | daily_prompt = api_client.request( 51 | "GET", "/prompts/daily", token=api_user.access_token 52 | ) 53 | assert daily_prompt.status_code in (200, 204) 54 | 55 | stats = api_client.request( 56 | "GET", "/prompts/analytics/statistics", token=api_user.access_token 57 | ).json() 58 | assert "total_prompts" in stats 59 | 60 | 61 | def test_prompt_endpoints_require_auth(api_client: JournivApiClient): 62 | assert_requires_authentication( 63 | api_client, 64 | [ 65 | EndpointCase("GET", "/prompts/"), 66 | EndpointCase("GET", "/prompts/random"), 67 | EndpointCase("GET", "/prompts/daily"), 68 | EndpointCase("GET", "/prompts/search", params={"q": "test"}), 69 | EndpointCase("GET", "/prompts/analytics/statistics"), 70 | ], 71 | ) 72 | -------------------------------------------------------------------------------- /tests/integration/test_user_endpoints.py: -------------------------------------------------------------------------------- 1 | """ 2 | Integration coverage for /users endpoints. 3 | """ 4 | from tests.integration.helpers import EndpointCase, assert_requires_authentication 5 | from tests.lib import ApiUser, JournivApiClient, make_api_user 6 | 7 | 8 | def test_get_and_update_profile(api_client: JournivApiClient, api_user: ApiUser): 9 | """Users can retrieve and update their profile information.""" 10 | profile = api_client.current_user(api_user.access_token) 11 | assert profile["id"] == api_user.user_id 12 | assert profile["email"] == api_user.email 13 | 14 | updated = api_client.update_profile( 15 | api_user.access_token, 16 | {"name": "Updated Test User"}, 17 | ) 18 | assert updated["name"] == "Updated Test User" 19 | 20 | 21 | def test_settings_round_trip(api_client: JournivApiClient, api_user: ApiUser): 22 | """Settings endpoint should return and persist preferences.""" 23 | current_settings = api_client.get_user_settings(api_user.access_token) 24 | assert "time_zone" in current_settings 25 | 26 | desired = { 27 | "time_zone": "America/New_York", 28 | "daily_prompt_enabled": False, 29 | "theme": "dark", 30 | } 31 | updated = api_client.update_user_settings(api_user.access_token, desired) 32 | 33 | assert updated["time_zone"] == desired["time_zone"] 34 | assert updated["daily_prompt_enabled"] is False 35 | assert updated["theme"] == "dark" 36 | 37 | 38 | def test_account_deletion_revokes_access(api_client: JournivApiClient): 39 | """Deleting the account immediately revokes existing tokens.""" 40 | user = make_api_user(api_client) 41 | 42 | response = api_client.delete_account(user.access_token) 43 | assert "deleted" in response["message"].lower() 44 | 45 | # Existing token should now be rejected 46 | unauthorized = api_client.request("GET", "/users/me", token=user.access_token) 47 | assert unauthorized.status_code == 401 48 | 49 | # Logging in again should also fail 50 | login_attempt = api_client.request( 51 | "POST", 52 | "/auth/login", 53 | json={"email": user.email, "password": user.password}, 54 | ) 55 | assert login_attempt.status_code == 401 56 | 57 | 58 | def test_user_endpoints_require_authentication(api_client: JournivApiClient): 59 | """Endpoints under /users/me should reject missing tokens.""" 60 | assert_requires_authentication( 61 | api_client, 62 | [ 63 | EndpointCase("GET", "/users/me"), 64 | EndpointCase("PUT", "/users/me", json={"name": "Nope"}), 65 | EndpointCase("DELETE", "/users/me"), 66 | EndpointCase("GET", "/users/me/settings"), 67 | EndpointCase("PUT", "/users/me/settings", json={}), 68 | ], 69 | ) 70 | -------------------------------------------------------------------------------- /scripts/setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Journal App Setup Script 4 | set -e 5 | 6 | echo "🚀 Setting up Journal App Backend..." 7 | 8 | # Check if Docker is installed 9 | if ! command -v docker &> /dev/null; then 10 | echo "❌ Docker is not installed. Please install Docker first." 11 | exit 1 12 | fi 13 | 14 | # Check if Docker Compose is installed 15 | if ! command -v docker-compose &> /dev/null; then 16 | echo "❌ Docker Compose is not installed. Please install Docker Compose first." 17 | exit 1 18 | fi 19 | 20 | # Create .env file if it doesn't exist 21 | if [ ! -f .env ]; then 22 | echo "📝 Creating .env file from template..." 23 | if [ -f .env.template ]; then 24 | cp .env.template .env 25 | echo "✅ .env file created. Please edit it with your configuration." 26 | else 27 | echo "❌ .env.template file not found. Cannot create .env file." 28 | exit 1 29 | fi 30 | else 31 | echo "✅ .env file already exists." 32 | fi 33 | 34 | # Create required directories 35 | echo "📁 Creating required directories..." 36 | mkdir -p media logs data 37 | 38 | # Determine compose file (default to SQLite development) 39 | COMPOSE_FILE="${COMPOSE_FILE:-docker-compose.dev.sqlite.yml}" 40 | 41 | # Check if compose file exists 42 | if [[ ! -f "$COMPOSE_FILE" ]]; then 43 | echo "❌ Compose file not found: $COMPOSE_FILE" 44 | echo "Available files:" 45 | ls -la docker-compose*.yml 2>/dev/null || echo "No docker-compose files found" 46 | exit 1 47 | fi 48 | 49 | echo "📋 Using compose file: $COMPOSE_FILE" 50 | 51 | # Start services 52 | echo "🐳 Starting Docker services..." 53 | if docker-compose -f "$COMPOSE_FILE" up -d; then 54 | echo "✅ Services started successfully." 55 | else 56 | echo "❌ Failed to start services." 57 | exit 1 58 | fi 59 | 60 | # Wait for services to be ready 61 | echo "⏳ Waiting for services to be ready..." 62 | sleep 15 63 | 64 | # Check if services are running 65 | if ! docker-compose -f "$COMPOSE_FILE" ps | grep -q "Up"; then 66 | echo "❌ Services are not running properly." 67 | echo "Service status:" 68 | docker-compose -f "$COMPOSE_FILE" ps 69 | exit 1 70 | fi 71 | 72 | # Run database migrations 73 | echo "🗄️ Running database migrations..." 74 | if docker-compose -f "$COMPOSE_FILE" exec app alembic upgrade head; then 75 | echo "✅ Database migrations completed successfully." 76 | else 77 | echo "❌ Database migrations failed." 78 | exit 1 79 | fi 80 | 81 | echo "" 82 | echo "✅ Setup complete!" 83 | echo "" 84 | echo "🌐 Your Journal App Backend is ready!" 85 | echo " - API: http://localhost:8000" 86 | echo " - Docs: http://localhost:8000/docs" 87 | echo " - ReDoc: http://localhost:8000/redoc" 88 | echo "" 89 | echo "📋 Next steps:" 90 | echo " - Edit .env file with your configuration" 91 | echo " - Use ./scripts/deploy.sh for future deployments" 92 | echo "" 93 | echo "🚀 Quick commands:" 94 | echo " - Start: ./scripts/deploy.sh --env development" 95 | echo " - Stop: docker-compose -f $COMPOSE_FILE down" 96 | echo " - Logs: docker-compose -f $COMPOSE_FILE logs -f" 97 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | """ 2 | Pytest fixtures shared across the integration and upgrade suites. 3 | 4 | All integration tests must exercise the running Journiv stack through the 5 | public HTTP API. 6 | """ 7 | from __future__ import annotations 8 | 9 | import os 10 | import uuid 11 | from datetime import date 12 | from typing import Callable, Dict 13 | 14 | import pytest 15 | 16 | from tests.lib import ApiUser, JournivApiClient, make_api_user 17 | 18 | 19 | @pytest.fixture(scope="session") 20 | def api_client() -> JournivApiClient: 21 | """ 22 | Session scoped API client against the running Journiv instance. 23 | 24 | The base URL can be overridden through JOURNIV_API_BASE_URL. CI sets 25 | it to http://localhost:8000/api/v1 which points to the docker compose 26 | stack started in the workflows. 27 | """ 28 | base_url = os.getenv("JOURNIV_API_BASE_URL") 29 | client = JournivApiClient(base_url=base_url) 30 | 31 | # Wait for health once per session to fail fast if the stack is broken. 32 | client.wait_for_health("/api/v1/health") 33 | 34 | yield client 35 | client.close() 36 | 37 | 38 | @pytest.fixture 39 | def api_user(api_client: JournivApiClient) -> ApiUser: 40 | """ 41 | Create a unique test user via the public API. 42 | """ 43 | return make_api_user(api_client) 44 | 45 | 46 | @pytest.fixture 47 | def journal_factory( 48 | api_client: JournivApiClient, api_user: ApiUser 49 | ) -> Callable[..., Dict]: 50 | """ 51 | Factory that creates journals owned by the current test user. 52 | """ 53 | 54 | def _create(**overrides: Dict) -> Dict: 55 | title = overrides.pop("title", f"Journal {uuid.uuid4().hex[:6]}") 56 | journal = api_client.create_journal( 57 | api_user.access_token, 58 | title=title, 59 | color=overrides.pop("color", "#3B82F6"), 60 | description=overrides.pop( 61 | "description", "Journal created during integration tests" 62 | ), 63 | icon=overrides.pop("icon", "📝"), 64 | ) 65 | return journal 66 | 67 | return _create 68 | 69 | 70 | @pytest.fixture 71 | def entry_factory( 72 | api_client: JournivApiClient, api_user: ApiUser, journal_factory: Callable[..., Dict] 73 | ) -> Callable[..., Dict]: 74 | """ 75 | Factory that creates entries ensuring the journal exists. 76 | """ 77 | 78 | def _create(**overrides: Dict) -> Dict: 79 | journal = overrides.pop("journal", None) 80 | if journal is None: 81 | journal = journal_factory() 82 | 83 | entry = api_client.create_entry( 84 | api_user.access_token, 85 | journal_id=journal["id"], 86 | title=overrides.pop("title", f"Entry {uuid.uuid4().hex[:6]}"), 87 | content=overrides.pop( 88 | "content", "Content written by the integration test suite." 89 | ), 90 | entry_date=overrides.pop( 91 | "entry_date", date.today().isoformat() 92 | ), 93 | **overrides, 94 | ) 95 | entry["journal"] = journal 96 | return entry 97 | 98 | return _create 99 | -------------------------------------------------------------------------------- /app/models/prompt.py: -------------------------------------------------------------------------------- 1 | """ 2 | Prompt-related models. 3 | """ 4 | import uuid 5 | from typing import List, Optional, TYPE_CHECKING 6 | 7 | from pydantic import field_validator 8 | from sqlalchemy import Column, ForeignKey 9 | from sqlmodel import Field, Relationship, Index, CheckConstraint 10 | 11 | from .base import BaseModel 12 | from .enums import PromptCategory 13 | 14 | if TYPE_CHECKING: 15 | from .user import User 16 | from .entry import Entry 17 | 18 | 19 | class Prompt(BaseModel, table=True): 20 | """ 21 | Prompt model for journaling prompts with categorization. 22 | """ 23 | __tablename__ = "prompt" 24 | 25 | text: str = Field(..., min_length=1, max_length=1000) 26 | category: Optional[str] = Field(None, max_length=100) # Should be a PromptCategory enum value 27 | difficulty_level: int = Field(default=1, ge=1, le=5) # 1=easy, 5=complex 28 | estimated_time_minutes: Optional[int] = Field(None, ge=1, le=120) 29 | is_active: bool = Field(default=True) 30 | usage_count: int = Field(default=0, ge=0) 31 | # A user_id of NULL means it's a system prompt 32 | user_id: Optional[uuid.UUID] = Field( 33 | sa_column=Column( 34 | ForeignKey("user.id", ondelete="CASCADE"), 35 | nullable=True 36 | ) 37 | ) 38 | 39 | # Relations 40 | user: Optional["User"] = Relationship(back_populates="user_prompts") 41 | entries: List["Entry"] = Relationship(back_populates="prompt") 42 | 43 | # Table constraints and indexes 44 | __table_args__ = ( 45 | Index('idx_prompts_category', 'category'), 46 | Index('idx_prompts_difficulty_level', 'difficulty_level'), 47 | Index('idx_prompts_user_active', 'user_id', 'is_active'), 48 | Index('idx_prompts_popular', 'is_active', 'usage_count'), # For popular prompts queries 49 | # Constraints 50 | CheckConstraint('length(text) > 0', name='check_prompt_text_not_empty'), 51 | CheckConstraint('difficulty_level >= 1 AND difficulty_level <= 5', name='check_difficulty_level_range'), 52 | CheckConstraint('estimated_time_minutes IS NULL OR estimated_time_minutes > 0', name='check_estimated_time_positive'), 53 | CheckConstraint('usage_count >= 0', name='check_usage_count_positive'), 54 | ) 55 | 56 | @field_validator('text') 57 | @classmethod 58 | def validate_text(cls, v): 59 | if not v or len(v.strip()) == 0: 60 | raise ValueError('Prompt text cannot be empty') 61 | return v.strip() 62 | 63 | @field_validator('category') 64 | @classmethod 65 | def validate_category(cls, v): 66 | """Validate and normalize category to lowercase.""" 67 | if v and len(v.strip()) == 0: 68 | return None 69 | 70 | if v: 71 | # Normalize to lowercase 72 | normalized = v.strip().lower() 73 | 74 | # Validate against allowed categories 75 | allowed_categories = {cat.value for cat in PromptCategory} 76 | if normalized not in allowed_categories: 77 | raise ValueError( 78 | f'Invalid category: {v}. Must be one of {sorted(allowed_categories)}' 79 | ) 80 | 81 | return normalized 82 | 83 | return v 84 | -------------------------------------------------------------------------------- /app/schemas/prompt.py: -------------------------------------------------------------------------------- 1 | """ 2 | Prompt schemas. 3 | """ 4 | import uuid 5 | from datetime import datetime 6 | from typing import Optional 7 | 8 | from pydantic import BaseModel, validator 9 | 10 | from app.models.enums import PromptCategory 11 | from app.schemas.base import TimestampMixin 12 | 13 | 14 | class PromptBase(BaseModel): 15 | """Base prompt schema.""" 16 | 17 | text: str 18 | category: Optional[str] = None 19 | difficulty_level: int = 1 20 | estimated_time_minutes: Optional[int] = None 21 | 22 | @validator("text") 23 | def validate_text_not_empty(cls, value: str) -> str: 24 | if not value or not value.strip(): 25 | raise ValueError("Text cannot be empty") 26 | return value.strip() 27 | 28 | @validator("category") 29 | def validate_category(cls, value: Optional[str]) -> Optional[str]: 30 | if value is None: 31 | return value 32 | normalized = value.strip().lower() 33 | allowed = {category.value for category in PromptCategory} 34 | if normalized not in allowed: 35 | raise ValueError(f"Invalid category: {value}. Must be one of {sorted(allowed)}") 36 | return normalized 37 | 38 | 39 | class PromptResponse(PromptBase, TimestampMixin): 40 | """Prompt response schema.""" 41 | 42 | id: uuid.UUID 43 | is_active: bool 44 | usage_count: int 45 | user_id: Optional[uuid.UUID] = None 46 | created_at: datetime 47 | updated_at: datetime 48 | 49 | 50 | class PromptCreate(PromptBase): 51 | """Prompt creation schema.""" 52 | pass 53 | 54 | 55 | class PromptUpdate(BaseModel): 56 | """Prompt update schema.""" 57 | 58 | text: Optional[str] = None 59 | category: Optional[str] = None 60 | difficulty_level: Optional[int] = None 61 | estimated_time_minutes: Optional[int] = None 62 | is_active: Optional[bool] = None 63 | 64 | @validator("text") 65 | def validate_text(cls, value: Optional[str]) -> Optional[str]: 66 | if value is None: 67 | return value 68 | if not value.strip(): 69 | raise ValueError("Text cannot be empty") 70 | return value.strip() 71 | 72 | @validator("category") 73 | def validate_category(cls, value: Optional[str]) -> Optional[str]: 74 | if value is None: 75 | return value 76 | normalized = value.strip().lower() 77 | allowed = {category.value for category in PromptCategory} 78 | if normalized not in allowed: 79 | raise ValueError(f"Invalid category: {value}. Must be one of {sorted(allowed)}") 80 | return normalized 81 | 82 | @validator("difficulty_level") 83 | def validate_difficulty(cls, value: Optional[int]) -> Optional[int]: 84 | if value is None: 85 | return value 86 | if value < 1 or value > 5: 87 | raise ValueError("difficulty_level must be between 1 and 5") 88 | return value 89 | 90 | @validator("estimated_time_minutes") 91 | def validate_estimated_time(cls, value: Optional[int]) -> Optional[int]: 92 | if value is None: 93 | return value 94 | if value <= 0: 95 | raise ValueError("estimated_time_minutes must be greater than 0") 96 | return value 97 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # ========================= 2 | # Stage 1: Builder 3 | # ========================= 4 | FROM python:3.11-alpine AS builder 5 | 6 | ENV PYTHONDONTWRITEBYTECODE=1 \ 7 | PYTHONUNBUFFERED=1 \ 8 | PYTHONPATH=/app \ 9 | PATH=/root/.local/bin:$PATH 10 | 11 | WORKDIR /app 12 | 13 | # Install build dependencies (includes ffmpeg) 14 | RUN apk add --no-cache --virtual .build-deps \ 15 | gcc \ 16 | musl-dev \ 17 | libffi-dev \ 18 | postgresql-dev \ 19 | libmagic \ 20 | curl \ 21 | ffmpeg \ 22 | build-base \ 23 | && echo "🔍 Checking FFmpeg license (builder stage)..." \ 24 | && ffmpeg -version | grep -E "enable-gpl|enable-nonfree" && (echo "❌ GPL/nonfree FFmpeg detected!" && exit 1) || echo "✅ LGPL FFmpeg build verified." 25 | 26 | # Copy requirements and install Python deps 27 | COPY requirements/ requirements/ 28 | RUN pip install --no-cache-dir --upgrade pip \ 29 | && pip install --no-cache-dir -r requirements/prod.txt 30 | 31 | # ========================= 32 | # Stage 2: Runtime 33 | # ========================= 34 | FROM python:3.11-alpine AS runtime 35 | 36 | ENV PYTHONDONTWRITEBYTECODE=1 \ 37 | PYTHONUNBUFFERED=1 \ 38 | PYTHONPATH=/app \ 39 | PATH=/root/.local/bin:$PATH \ 40 | ENVIRONMENT=production \ 41 | LOG_LEVEL=INFO 42 | 43 | WORKDIR /app 44 | 45 | # Install runtime dependencies and verify ffmpeg license 46 | RUN apk add --no-cache \ 47 | libmagic \ 48 | curl \ 49 | ffmpeg \ 50 | libffi \ 51 | postgresql-libs \ 52 | libpq \ 53 | && echo "🔍 Checking FFmpeg license (runtime stage)..." \ 54 | && ffmpeg -version | grep -E "enable-gpl|enable-nonfree" && (echo "❌ GPL/nonfree FFmpeg detected!" && exit 1) || echo "✅ LGPL FFmpeg build verified." 55 | 56 | # Copy installed Python packages from builder 57 | COPY --from=builder /usr/local/lib/python3.11/site-packages /usr/local/lib/python3.11/site-packages 58 | COPY --from=builder /usr/local/bin /usr/local/bin 59 | 60 | # Copy app code and assets 61 | COPY app/ app/ 62 | 63 | # Copy database migration files 64 | COPY alembic/ alembic/ 65 | COPY alembic.ini . 66 | 67 | # Copy scripts directory (seed data and entrypoint) 68 | COPY scripts/moods.json scripts/moods.json 69 | COPY scripts/prompts.json scripts/prompts.json 70 | COPY scripts/docker-entrypoint.sh scripts/docker-entrypoint.sh 71 | 72 | # Copy prebuilt Flutter web app 73 | COPY web/ web/ 74 | 75 | # Copy license 76 | COPY LICENSE.md . 77 | 78 | # Create non-root user and set up data directories 79 | RUN adduser -D -u 1000 appuser \ 80 | && mkdir -p /data/media /data/logs \ 81 | && chmod +x scripts/docker-entrypoint.sh \ 82 | # Fix permissions in case some directories gets copied as 700 83 | && chmod -R a+rX /app \ 84 | && chmod -R a+rwX /data \ 85 | && chown -R appuser:appuser /app /data 86 | 87 | USER appuser 88 | 89 | EXPOSE 8000 90 | 91 | HEALTHCHECK --interval=30s --timeout=10s --start-period=30s --retries=3 \ 92 | CMD sh -c '\ 93 | if [ "${SERVICE_ROLE:-app}" = "celery-worker" ]; then \ 94 | celery -A app.core.celery_app inspect ping --timeout=5 | grep -q "pong"; \ 95 | else \ 96 | curl -f http://localhost:8000/api/v1/health; \ 97 | fi' 98 | 99 | CMD ["/app/scripts/docker-entrypoint.sh"] 100 | -------------------------------------------------------------------------------- /app/utils/import_export/progress_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | Progress callback utilities for import/export operations. 3 | """ 4 | from typing import Callable 5 | from sqlalchemy.orm import Session 6 | 7 | 8 | def create_throttled_progress_callback( 9 | job, 10 | db: Session, 11 | start_progress: int = 0, 12 | end_progress: int = 90, 13 | commit_interval: int = 10, 14 | percentage_threshold: int = 5, 15 | ) -> Callable[[int, int], None]: 16 | """ 17 | Create a throttled progress callback that commits to DB efficiently. 18 | 19 | Progress is guaranteed to be monotonic (never decreases) and works within 20 | the specified range [start_progress, end_progress]. 21 | 22 | Args: 23 | job: Job object with processed_items, total_items, and set_progress method 24 | db: Database session 25 | start_progress: Starting progress percentage (default 0) 26 | end_progress: Ending progress percentage (default 90) 27 | commit_interval: Commit every N entries (default 10) 28 | percentage_threshold: Commit on N% progress changes (default 5) 29 | 30 | Returns: 31 | Progress callback function that ensures monotonic progress 32 | """ 33 | last_committed_progress = 0 34 | last_committed_percentage = start_progress 35 | progress_range = end_progress - start_progress 36 | zero_total_committed = False 37 | 38 | def handle_progress(processed: int, total: int): 39 | nonlocal last_committed_progress, last_committed_percentage, zero_total_committed 40 | job.processed_items = processed 41 | job.total_items = total 42 | 43 | if total > 0: 44 | # Reset zero_total flag when we have a valid total 45 | zero_total_committed = False 46 | 47 | # Calculate progress within the range [start_progress, end_progress] 48 | ratio = processed / total 49 | calculated_progress = start_progress + int(ratio * progress_range) 50 | 51 | # Ensure progress never decreases (monotonic guarantee) 52 | current_progress = job.progress or start_progress 53 | new_progress = max(current_progress, calculated_progress) 54 | 55 | # Clamp to end_progress to avoid exceeding range 56 | new_progress = min(new_progress, end_progress) 57 | 58 | job.set_progress(new_progress) 59 | 60 | should_commit = ( 61 | (processed - last_committed_progress) >= commit_interval or 62 | (new_progress - last_committed_percentage) >= percentage_threshold or 63 | processed == total 64 | ) 65 | 66 | if should_commit: 67 | db.commit() 68 | last_committed_progress = processed 69 | last_committed_percentage = new_progress 70 | else: 71 | # No total yet, ensure we're at least at start_progress 72 | # Only commit once for zero-total case to avoid repeated commits 73 | if not zero_total_committed: 74 | current_progress = job.progress or start_progress 75 | if current_progress < start_progress: 76 | job.set_progress(start_progress) 77 | db.commit() 78 | zero_total_committed = True 79 | 80 | return handle_progress 81 | 82 | -------------------------------------------------------------------------------- /app/core/security.py: -------------------------------------------------------------------------------- 1 | """ 2 | Security utilities for authentication and password hashing. 3 | """ 4 | import uuid 5 | from datetime import datetime, timedelta, timezone 6 | from typing import Optional 7 | 8 | from jose import ExpiredSignatureError, JWTError, jwt 9 | from passlib.context import CryptContext 10 | 11 | from app.core.config import settings 12 | from app.core.logging_config import log_error 13 | 14 | pwd_context = CryptContext(schemes=["argon2"], deprecated="auto") 15 | 16 | 17 | def verify_password(plain_password: str, hashed_password: str) -> bool: 18 | """Verify a password against its hash.""" 19 | if plain_password is None: 20 | return False 21 | try: 22 | return pwd_context.verify(plain_password, hashed_password) 23 | except (ValueError, TypeError): 24 | return False 25 | 26 | 27 | def get_password_hash(password: str) -> str: 28 | """Hash a password.""" 29 | return pwd_context.hash(password) 30 | 31 | 32 | def _create_token(data: dict, token_type: str, expires_delta: timedelta) -> str: 33 | """ 34 | Internal helper to create a JWT. 35 | 36 | Includes a JTI (JWT ID) claim for future compatibility with token revocation. 37 | Currently, JTI is not validated against a database, but including it now 38 | allows for future implementation of token blacklisting without breaking 39 | existing clients. 40 | """ 41 | if not settings.secret_key: 42 | raise ValueError("Secret key is required for token creation") 43 | 44 | to_encode = data.copy() 45 | if "sub" not in to_encode: 46 | raise ValueError("Token payload must include a 'sub' claim") 47 | 48 | now = datetime.now(timezone.utc) 49 | expire = now + expires_delta 50 | 51 | to_encode.update({ 52 | "exp": expire, 53 | "type": token_type, 54 | "iat": now, 55 | "jti": str(uuid.uuid4()) # JWT ID for future token revocation support 56 | }) 57 | encoded_jwt = jwt.encode(to_encode, settings.secret_key, algorithm=settings.algorithm) 58 | return encoded_jwt 59 | 60 | 61 | def create_access_token(data: dict, expires_delta: Optional[timedelta] = None) -> str: 62 | """Create JWT access token.""" 63 | delta = expires_delta or timedelta(minutes=settings.access_token_expire_minutes) 64 | return _create_token(data, "access", delta) 65 | 66 | 67 | def create_refresh_token(data: dict, expires_delta: Optional[timedelta] = None) -> str: 68 | """Create JWT refresh token.""" 69 | delta = expires_delta or timedelta(days=settings.refresh_token_expire_days) 70 | return _create_token(data, "refresh", delta) 71 | 72 | 73 | def verify_token(token: str, token_type: str = "access") -> dict: 74 | """Verify and decode JWT token.""" 75 | try: 76 | payload = jwt.decode( 77 | token, 78 | settings.secret_key, 79 | algorithms=[settings.algorithm], 80 | options={"verify_aud": False} 81 | ) 82 | 83 | if payload.get("type") != token_type: 84 | raise JWTError("Invalid token type") 85 | # Check for required 'sub' field 86 | if "sub" not in payload: 87 | raise JWTError("Token missing required 'sub' field") 88 | return payload 89 | except ExpiredSignatureError: 90 | raise 91 | except JWTError as e: 92 | log_error(e) 93 | raise 94 | -------------------------------------------------------------------------------- /app/models/enums.py: -------------------------------------------------------------------------------- 1 | """ 2 | Enums and constants for the application. 3 | """ 4 | from enum import Enum 5 | 6 | 7 | class MediaType(str, Enum): 8 | """Media types for journal entries.""" 9 | IMAGE = "image" 10 | VIDEO = "video" 11 | AUDIO = "audio" 12 | UNKNOWN = "unknown" 13 | 14 | 15 | class UploadStatus(str, Enum): 16 | """Upload status for media files.""" 17 | PENDING = "pending" 18 | PROCESSING = "processing" 19 | COMPLETED = "completed" 20 | FAILED = "failed" 21 | 22 | 23 | class MoodCategory(str, Enum): 24 | """Mood categories.""" 25 | POSITIVE = "positive" 26 | NEGATIVE = "negative" 27 | NEUTRAL = "neutral" 28 | 29 | 30 | class PromptCategory(str, Enum): 31 | """Categories for journaling prompts.""" 32 | # Self-awareness & emotional growth 33 | GRATITUDE = "gratitude" 34 | REFLECTION = "reflection" 35 | EMOTIONS = "emotions" 36 | MINDFULNESS = "mindfulness" 37 | SELF_DISCOVERY = "self_discovery" 38 | 39 | # Goals & productivity 40 | GOALS = "goals" 41 | PRODUCTIVITY = "productivity" 42 | GROWTH = "growth" # personal/professional improvement 43 | 44 | # Relationships & connection 45 | RELATIONSHIPS = "relationships" 46 | FAMILY = "family" 47 | LOVE = "love" 48 | SOCIAL = "social" 49 | 50 | # Creativity & imagination 51 | CREATIVITY = "creativity" 52 | DREAMS = "dreams" 53 | MEMORIES = "memories" 54 | 55 | # Well-being 56 | SELF_CARE = "self_care" 57 | HEALTH = "health" 58 | SPIRITUALITY = "spirituality" 59 | 60 | # Misc / catch-all 61 | GENERAL = "general" 62 | 63 | 64 | class Theme(str, Enum): 65 | """UI themes.""" 66 | LIGHT = "light" 67 | DARK = "dark" 68 | AUTO = "auto" 69 | 70 | 71 | class TokenType(str, Enum): 72 | """JWT token types.""" 73 | ACCESS = "access" 74 | REFRESH = "refresh" 75 | 76 | 77 | class UserRole(str, Enum): 78 | """User roles for authorization.""" 79 | ADMIN = "admin" 80 | USER = "user" 81 | 82 | 83 | class JournalColor(str, Enum): 84 | """Preset colors for journals.""" 85 | RED = "#EF4444" 86 | ORANGE = "#F97316" 87 | AMBER = "#F59E0B" 88 | YELLOW = "#EAB308" 89 | LIME = "#84CC16" 90 | GREEN = "#22C55E" 91 | EMERALD = "#10B981" 92 | TEAL = "#14B8A6" 93 | CYAN = "#06B6D4" 94 | SKY = "#0EA5E9" 95 | BLUE = "#3B82F6" 96 | INDIGO = "#6366F1" 97 | VIOLET = "#8B5CF6" 98 | PURPLE = "#A855F7" 99 | FUCHSIA = "#D946EF" 100 | PINK = "#EC4899" 101 | ROSE = "#F43F5E" 102 | SLATE = "#64748B" 103 | GRAY = "#6B7280" 104 | ZINC = "#71717A" 105 | NEUTRAL = "#737373" 106 | STONE = "#78716C" 107 | 108 | 109 | class JobStatus(str, Enum): 110 | """Status for import/export jobs.""" 111 | PENDING = "pending" 112 | RUNNING = "running" 113 | COMPLETED = "completed" 114 | FAILED = "failed" 115 | CANCELLED = "cancelled" 116 | 117 | 118 | class ImportSourceType(str, Enum): 119 | """Source types for imports.""" 120 | JOURNIV = "journiv" 121 | MARKDOWN = "markdown" 122 | DAYONE = "dayone" 123 | 124 | 125 | class ExportType(str, Enum): 126 | """Types of exports.""" 127 | FULL = "full" # Full user export 128 | JOURNAL = "journal" # Single journal export 129 | 130 | -------------------------------------------------------------------------------- /docker-compose.prod.sqlite.yml: -------------------------------------------------------------------------------- 1 | # Journiv Production Docker Compose (SQLite) 2 | # Recommended for single-user or small deployments. 3 | # 4 | # Usage: 5 | # docker compose -f docker-compose.prod.sqlite.yml up -d 6 | # 7 | # Required Environment Variables: 8 | # SECRET_KEY - Generate with: python -c "import secrets; print(secrets.token_urlsafe(32))" 9 | # DOMAIN_NAME - Needed when running in same-origin SPA mode (ENABLE_CORS=false) 10 | 11 | services: 12 | redis: 13 | image: redis:7 14 | container_name: journiv-redis-cache 15 | restart: unless-stopped 16 | volumes: 17 | - redis_data:/data 18 | networks: 19 | - backend 20 | healthcheck: 21 | test: ["CMD", "redis-cli", "ping"] 22 | interval: 10s 23 | timeout: 5s 24 | retries: 5 25 | start_period: 10s 26 | 27 | celery-worker: 28 | image: swalabtech/journiv-app:${APP_VERSION:-latest} 29 | container_name: journiv-celery-worker 30 | command: celery -A app.core.celery_app worker --loglevel=info 31 | env_file: 32 | - .env 33 | environment: 34 | # Docker-specific: service names 35 | - SERVICE_ROLE=celery-worker 36 | - ENVIRONMENT=production 37 | - REDIS_URL=redis://redis:6379/0 38 | - CELERY_BROKER_URL=redis://redis:6379/0 39 | - CELERY_RESULT_BACKEND=redis://redis:6379/0 40 | 41 | volumes: 42 | - app_data:/data 43 | depends_on: 44 | redis: 45 | condition: service_healthy 46 | networks: 47 | - backend 48 | restart: unless-stopped 49 | healthcheck: 50 | test: ["CMD-SHELL", "celery -A app.core.celery_app inspect ping --timeout=5 | grep -q pong"] 51 | interval: 30s 52 | timeout: 10s 53 | retries: 5 54 | start_period: 40s 55 | cpus: "1.0" 56 | mem_limit: 1g 57 | mem_reservation: 256m 58 | logging: 59 | driver: "json-file" 60 | options: 61 | max-size: "50m" 62 | max-file: "5" 63 | 64 | app: 65 | image: swalabtech/journiv-app:${APP_VERSION:-latest} 66 | container_name: journiv-sqlite-app 67 | ports: 68 | - "${APP_PORT:-8000}:8000" # Right side must remain 8000 69 | env_file: 70 | - .env 71 | environment: 72 | # Docker-specific: service names 73 | - SERVICE_ROLE=app 74 | - ENVIRONMENT=production 75 | - REDIS_URL=redis://redis:6379/0 76 | - CELERY_BROKER_URL=redis://redis:6379/0 77 | - CELERY_RESULT_BACKEND=redis://redis:6379/0 78 | - RATE_LIMIT_STORAGE_URI=redis://redis:6379/1 79 | 80 | volumes: 81 | - app_data:/data 82 | depends_on: 83 | redis: 84 | condition: service_healthy 85 | networks: 86 | - backend 87 | - frontend 88 | restart: unless-stopped 89 | 90 | healthcheck: 91 | test: ["CMD-SHELL", "curl -f http://localhost:8000/api/v1/health"] 92 | interval: 30s 93 | timeout: 10s 94 | retries: 3 95 | start_period: 40s 96 | 97 | cpus: "2.0" 98 | mem_limit: 2g 99 | mem_reservation: 512m 100 | logging: 101 | driver: "json-file" 102 | options: 103 | max-size: "50m" 104 | max-file: "5" 105 | 106 | volumes: 107 | app_data: 108 | redis_data: 109 | 110 | networks: 111 | backend: 112 | driver: bridge 113 | frontend: 114 | driver: bridge 115 | -------------------------------------------------------------------------------- /app/models/journal.py: -------------------------------------------------------------------------------- 1 | """ 2 | Journal-related models. 3 | """ 4 | import uuid 5 | from datetime import datetime 6 | from typing import List, Optional, TYPE_CHECKING 7 | 8 | from pydantic import field_validator 9 | from sqlalchemy import Column, ForeignKey, Enum as SAEnum 10 | from sqlmodel import Field, Relationship, Index, CheckConstraint 11 | 12 | from .base import BaseModel 13 | from .enums import JournalColor 14 | 15 | if TYPE_CHECKING: 16 | from .user import User 17 | from .entry import Entry 18 | 19 | 20 | class Journal(BaseModel, table=True): 21 | """ 22 | Journal model with enhanced features for better organization. 23 | """ 24 | __tablename__ = "journal" 25 | 26 | title: str = Field(..., min_length=1, max_length=200) 27 | description: Optional[str] = Field(None, max_length=1000) 28 | color: Optional[JournalColor] = Field( 29 | default=None, 30 | sa_column=Column( 31 | SAEnum(JournalColor, name="journal_color_enum"), 32 | nullable=True 33 | ) 34 | ) 35 | icon: Optional[str] = Field(None, max_length=50) 36 | user_id: uuid.UUID = Field( 37 | sa_column=Column( 38 | ForeignKey("user.id", ondelete="CASCADE"), 39 | nullable=False 40 | ) 41 | ) 42 | is_favorite: bool = Field(default=False) 43 | is_archived: bool = Field(default=False) 44 | entry_count: int = Field(default=0, ge=0) # Denormalized for performance 45 | total_words: int = Field(default=0, ge=0) # Denormalized for performance 46 | last_entry_at: Optional[datetime] = None 47 | 48 | # Relations 49 | user: "User" = Relationship(back_populates="journals") 50 | entries: List["Entry"] = Relationship( 51 | back_populates="journal", 52 | sa_relationship_kwargs={"cascade": "all, delete-orphan"} 53 | ) 54 | 55 | # Table constraints and indexes 56 | __table_args__ = ( 57 | # Composite indexes for common query patterns. 58 | Index('idx_journal_user_created', 'user_id', 'created_at'), 59 | Index('idx_journal_user_favorite', 'user_id', 'is_favorite'), 60 | Index('idx_journal_user_archived', 'user_id', 'is_archived'), 61 | # Constraints 62 | CheckConstraint('length(title) > 0', name='check_title_not_empty'), 63 | CheckConstraint('entry_count >= 0', name='check_entry_count_positive'), 64 | CheckConstraint('total_words >= 0', name='check_total_words_positive'), 65 | ) 66 | 67 | @field_validator('title') 68 | @classmethod 69 | def validate_title(cls, v): 70 | if not v or len(v.strip()) == 0: 71 | raise ValueError('Title cannot be empty') 72 | return v.strip() 73 | 74 | @field_validator('description') 75 | @classmethod 76 | def validate_description(cls, v): 77 | if v and len(v.strip()) == 0: 78 | return None 79 | return v.strip() if v else v 80 | 81 | @field_validator('color') 82 | @classmethod 83 | def validate_color(cls, v): 84 | if v is None: 85 | return v 86 | if isinstance(v, JournalColor): 87 | return v 88 | value = v.strip().upper() 89 | try: 90 | return JournalColor(value) 91 | except ValueError as exc: 92 | allowed = ", ".join(color.value for color in JournalColor) 93 | raise ValueError(f"Color must be one of predefined palette values: {allowed}") from exc 94 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | # =============================================================== 2 | # PYTHON / FASTAPI BACKEND 3 | # =============================================================== 4 | 5 | # Bytecode and caches 6 | __pycache__/ 7 | *.py[cod] 8 | *$py.class 9 | 10 | # Virtual environments 11 | .env 12 | .venv 13 | env/ 14 | venv/ 15 | ENV/ 16 | 17 | # Test / coverage artifacts 18 | .coverage 19 | .coverage.* 20 | htmlcov/ 21 | .pytest_cache/ 22 | .tox/ 23 | .nox/ 24 | .cache/ 25 | .mypy_cache/ 26 | .pytype/ 27 | .pyre/ 28 | cython_debug/ 29 | 30 | # Logs 31 | logs/ 32 | *.log 33 | app.log 34 | error.log 35 | access.log 36 | 37 | # Local databases 38 | *.db 39 | *.db-shm 40 | *.db-wal 41 | *.sqlite 42 | *.sqlite3 43 | data/ 44 | db.sqlite3 45 | db.sqlite3-journal 46 | 47 | # Alembic 48 | alembic/versions/__pycache__/ 49 | 50 | # =============================================================== 51 | # FLUTTER WEB FRONTEND 52 | # =============================================================== 53 | 54 | # Flutter build caches 55 | .dart_tool/ 56 | .packages 57 | .flutter-plugins 58 | .flutter-plugins-dependencies 59 | .flutter-versions 60 | .flutter_export_environment.sh 61 | .pub/ 62 | 63 | # Ignore intermediate build artifacts 64 | build/ 65 | 66 | # We DO NOT ignore web/ for flutter web app build output 67 | 68 | # For future extra Flutter project structure, ignore others: 69 | android/ 70 | ios/ 71 | linux/ 72 | macos/ 73 | windows/ 74 | test/ 75 | coverage/ 76 | integration_test/ 77 | 78 | # =============================================================== 79 | # MEDIA / USER FILES 80 | # =============================================================== 81 | 82 | media/ 83 | uploads/ 84 | user_media/ 85 | entry_media/ 86 | thumbnails/ 87 | generated/ 88 | backups/ 89 | exports/ 90 | imports/ 91 | 92 | # =============================================================== 93 | # CONFIG / ENVIRONMENT / SECRETS 94 | # =============================================================== 95 | 96 | .env* 97 | config.ini 98 | secrets.json 99 | .secrets 100 | .local/ 101 | local/ 102 | .envrc 103 | 104 | # =============================================================== 105 | # DEV / EDITOR / GIT / SYSTEM FILES 106 | # =============================================================== 107 | 108 | # Git 109 | .git/ 110 | .gitignore 111 | 112 | # IDEs 113 | .vscode/ 114 | .idea/ 115 | *.iml 116 | *.ipr 117 | *.iws 118 | 119 | # Swap / temp files 120 | *.swp 121 | *.swo 122 | *~ 123 | 124 | # macOS / Windows / Linux junk 125 | .DS_Store 126 | .AppleDouble 127 | .LSOverride 128 | ._* 129 | Thumbs.db 130 | Desktop.ini 131 | $RECYCLE.BIN/ 132 | .directory 133 | .fuse_hidden* 134 | .Trash-* 135 | .nfs* 136 | 137 | # =============================================================== 138 | # DOCKER / CI / DOCS / MISC 139 | # =============================================================== 140 | 141 | # Docker / Compose 142 | Dockerfile* 143 | *.dockerignore 144 | docker-compose.override.yml 145 | docker-compose.dev.yml 146 | 147 | # Scripts (exclude Python and shell scripts, but keep entrypoint and JSON data) 148 | scripts/__pycache__/ 149 | scripts/*.py 150 | scripts/*.sh 151 | # Exception: Include docker entrypoint script 152 | !scripts/docker-entrypoint.sh 153 | 154 | # Docs / site builds 155 | docs/_build/ 156 | docs/build/ 157 | site/ 158 | 159 | # AI notes / misc 160 | CLAUDE.md 161 | GEMINI.md 162 | -------------------------------------------------------------------------------- /tests/integration/test_cascade_delete.py: -------------------------------------------------------------------------------- 1 | """ 2 | Behavioural tests that assert cascades through the public API surface. 3 | """ 4 | from datetime import date 5 | 6 | from tests.integration.helpers import ( 7 | EndpointCase, 8 | UNKNOWN_UUID, 9 | assert_requires_authentication, 10 | sample_jpeg_bytes, 11 | ) 12 | from tests.lib import ApiUser, JournivApiClient 13 | 14 | 15 | def test_deleting_journal_removes_entries_and_media( 16 | api_client: JournivApiClient, 17 | api_user: ApiUser, 18 | journal_factory, 19 | entry_factory, 20 | ): 21 | """Deleting a journal should cascade entries and their media.""" 22 | journal = journal_factory(title="Cascade Journal") 23 | entry_one = entry_factory(journal=journal, title="First entry") 24 | entry_two = entry_factory(journal=journal, title="Second entry") 25 | 26 | api_client.upload_media( 27 | api_user.access_token, 28 | entry_id=entry_one["id"], 29 | filename="photo.jpg", 30 | content=sample_jpeg_bytes(), 31 | content_type="image/jpeg", 32 | ) 33 | 34 | entries_before = api_client.request( 35 | "GET", 36 | f"/entries/journal/{journal['id']}", 37 | token=api_user.access_token, 38 | ).json() 39 | assert len(entries_before) >= 2 40 | 41 | api_client.delete_journal(api_user.access_token, journal["id"]) 42 | 43 | after_delete = api_client.request( 44 | "GET", 45 | f"/entries/journal/{journal['id']}", 46 | token=api_user.access_token, 47 | ) 48 | assert after_delete.status_code in (404, 200) 49 | if after_delete.status_code == 200: 50 | assert after_delete.json() == [] 51 | 52 | # Verify entries are gone 53 | for entry in (entry_one, entry_two): 54 | response = api_client.request( 55 | "GET", f"/entries/{entry['id']}", token=api_user.access_token 56 | ) 57 | assert response.status_code == 404 58 | 59 | 60 | def test_deleting_entry_removes_related_artifacts( 61 | api_client: JournivApiClient, 62 | api_user: ApiUser, 63 | entry_factory, 64 | ): 65 | """Deleting an entry should remove pins, media, and mood logs associated with it.""" 66 | entry = entry_factory(title="Cascade Entry") 67 | moods = api_client.list_moods(api_user.access_token) 68 | if moods: 69 | api_client.create_mood_log( 70 | api_user.access_token, 71 | entry_id=entry["id"], 72 | mood_id=moods[0]["id"], 73 | logged_date=date.today().isoformat(), 74 | notes="Cascade mood", 75 | ) 76 | 77 | api_client.upload_media( 78 | api_user.access_token, 79 | entry_id=entry["id"], 80 | filename="entry-media.jpg", 81 | content=sample_jpeg_bytes(), 82 | content_type="image/jpeg", 83 | ) 84 | 85 | api_client.pin_entry(api_user.access_token, entry["id"]) 86 | api_client.delete_entry(api_user.access_token, entry["id"]) 87 | 88 | mood_logs = api_client.list_mood_logs(api_user.access_token) 89 | assert all(log["entry_id"] != entry["id"] for log in mood_logs) 90 | 91 | entries = api_client.list_entries(api_user.access_token, limit=50) 92 | assert all(item["id"] != entry["id"] for item in entries) 93 | 94 | 95 | def test_cascade_operations_require_auth(api_client: JournivApiClient): 96 | """Requests that mutate cascading resources must require auth.""" 97 | assert_requires_authentication( 98 | api_client, 99 | [ 100 | EndpointCase("DELETE", f"/journals/{UNKNOWN_UUID}"), 101 | EndpointCase("DELETE", f"/entries/{UNKNOWN_UUID}"), 102 | ], 103 | ) 104 | -------------------------------------------------------------------------------- /alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # path to migration scripts 5 | script_location = alembic 6 | 7 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s 8 | # Uncomment the line below if you want the files to be prepended with date and time 9 | # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s 10 | 11 | # sys.path path, will be prepended to sys.path if present. 12 | # defaults to the current working directory. 13 | prepend_sys_path = . 14 | 15 | # timezone to use when rendering the date within the migration file 16 | # as well as the filename. 17 | # If specified, requires the python-dateutil library that can be 18 | # installed by adding `alembic[tz]` to the pip requirements 19 | # string value is passed to dateutil.tz.gettz() 20 | # leave blank for localtime 21 | # timezone = 22 | 23 | # max length of characters to apply to the 24 | # "slug" field 25 | # truncate_slug_length = 40 26 | 27 | # set to 'true' to run the environment during 28 | # the 'revision' command, regardless of autogenerate 29 | # revision_environment = false 30 | 31 | # set to 'true' to allow .pyc and .pyo files without 32 | # a source .py file to be detected as revisions in the 33 | # versions/ directory 34 | # sourceless = false 35 | 36 | # version number format 37 | version_num_format = %%04d 38 | 39 | # version path separator; As mentioned above, this is the character used to split 40 | # version_locations. The default within new alembic.ini files is "os", which uses 41 | # os.pathsep. If this key is omitted entirely, it falls back to the legacy 42 | # behavior of splitting on spaces and/or commas. 43 | # Valid values for version_path_separator are: 44 | # 45 | # version_path_separator = : 46 | # version_path_separator = ; 47 | # version_path_separator = space 48 | version_path_separator = os 49 | 50 | # set to 'true' to search source files recursively 51 | # in each "version_locations" directory 52 | # new in Alembic version 1.10 53 | # recursive_version_locations = false 54 | 55 | # the output encoding used when revision files 56 | # are written from script.py.mako 57 | # output_encoding = utf-8 58 | 59 | # sqlalchemy.url = postgresql://journiv:journiv_password@localhost:5432/journiv_dev 60 | # Database URL is now configured in env.py from settings 61 | 62 | 63 | [post_write_hooks] 64 | # post_write_hooks defines scripts or Python functions that are run 65 | # on newly generated revision scripts. See the documentation for further 66 | # detail and examples 67 | 68 | # format using "black" - use the console_scripts runner, against the "black" entrypoint 69 | # hooks = black 70 | # black.type = console_scripts 71 | # black.entrypoint = black 72 | # black.options = -l 79 REVISION_SCRIPT_FILENAME 73 | 74 | # lint with attempts to fix using "ruff" - use the exec runner, execute a binary 75 | # hooks = ruff 76 | # ruff.type = exec 77 | # ruff.executable = %(here)s/.venv/bin/ruff 78 | # ruff.options = --fix REVISION_SCRIPT_FILENAME 79 | 80 | # Logging configuration 81 | [loggers] 82 | keys = root,sqlalchemy,alembic 83 | 84 | [handlers] 85 | keys = console 86 | 87 | [formatters] 88 | keys = generic 89 | 90 | [logger_root] 91 | level = WARN 92 | handlers = console 93 | qualname = 94 | 95 | [logger_sqlalchemy] 96 | level = WARN 97 | handlers = 98 | qualname = sqlalchemy.engine 99 | 100 | [logger_alembic] 101 | level = INFO 102 | handlers = 103 | qualname = alembic 104 | 105 | [handler_console] 106 | class = StreamHandler 107 | args = (sys.stderr,) 108 | level = NOTSET 109 | formatter = generic 110 | 111 | [formatter_generic] 112 | format = %(levelname)-5.5s [%(name)s] %(message)s 113 | datefmt = %H:%M:%S 114 | -------------------------------------------------------------------------------- /app/api/v1/endpoints/security.py: -------------------------------------------------------------------------------- 1 | """ 2 | CSP (Content Security Policy) reporting endpoint. 3 | Handles CSP violation reports for security monitoring. 4 | """ 5 | from datetime import datetime 6 | from typing import Optional 7 | 8 | from fastapi import APIRouter, Request, HTTPException 9 | from pydantic import BaseModel 10 | 11 | from app.core.config import settings 12 | from app.core.csp_config import get_csp_config 13 | from app.core.logging_config import log_error 14 | 15 | router = APIRouter() 16 | 17 | 18 | class CSPViolationReport(BaseModel): 19 | """CSP violation report model.""" 20 | blocked_uri: Optional[str] = None 21 | column_number: Optional[int] = None 22 | document_uri: Optional[str] = None 23 | effective_directive: Optional[str] = None 24 | line_number: Optional[int] = None 25 | original_policy: Optional[str] = None 26 | referrer: Optional[str] = None 27 | source_file: Optional[str] = None 28 | status_code: Optional[int] = None 29 | violated_directive: Optional[str] = None 30 | 31 | 32 | class CSPReport(BaseModel): 33 | """CSP report wrapper.""" 34 | csp_report: CSPViolationReport 35 | 36 | 37 | @router.post( 38 | "/csp-report", 39 | responses={ 40 | 500: {"description": "Failed to process CSP report"}, 41 | } 42 | ) 43 | async def report_csp_violation( 44 | request: Request, 45 | report: CSPReport 46 | ): 47 | """ 48 | Handle CSP violation reports. 49 | 50 | Receives CSP violation reports from browsers when Content Security Policy violations occur. 51 | """ 52 | try: 53 | # Extract client information 54 | client_ip = request.client.host if request.client else "unknown" 55 | user_agent = request.headers.get("user-agent", "unknown") 56 | 57 | # Log the violation 58 | violation_data = { 59 | "timestamp": datetime.utcnow().isoformat(), 60 | "client_ip": client_ip, 61 | "user_agent": user_agent, 62 | "violation": report.csp_report.dict(), 63 | "request_id": getattr(request.state, "request_id", None), 64 | } 65 | 66 | # Log as warning for monitoring 67 | log_error( 68 | Exception(f"CSP Violation: {report.csp_report.violated_directive} blocked URI: {report.csp_report.blocked_uri}"), 69 | request_id=getattr(request.state, "request_id", ""), 70 | user_email="" 71 | ) 72 | 73 | # TODO: Implement the following: 74 | # 1. Store violations in database for analysis 75 | # 2. Send alerts for critical violations 76 | # 3. Aggregate violation statistics 77 | 78 | return {"status": "received", "message": "CSP violation report received"} 79 | 80 | except Exception as e: 81 | log_error(e, request_id="", user_email="") 82 | raise HTTPException(status_code=500, detail="Failed to process CSP report") 83 | 84 | 85 | @router.get( 86 | "/csp-status", 87 | responses={ 88 | 200: {"description": "CSP configuration status retrieved"}, 89 | } 90 | ) 91 | async def get_csp_status(): 92 | """ 93 | Get CSP configuration status. 94 | 95 | Returns current CSP configuration and monitoring status. 96 | """ 97 | csp_config = get_csp_config(settings.environment) 98 | 99 | return { 100 | "csp_enabled": csp_config.is_csp_enabled(), 101 | "environment": settings.environment, 102 | "reporting_enabled": csp_config.is_reporting_enabled(), 103 | "hsts_enabled": csp_config.is_hsts_enabled(), 104 | "report_uri": csp_config.get_report_uri(), 105 | "timestamp": datetime.utcnow().isoformat(), 106 | "message": "CSP is active and monitoring violations" if csp_config.is_csp_enabled() else "CSP is disabled" 107 | } 108 | -------------------------------------------------------------------------------- /app/utils/import_export/date_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | Date and time utilities for import/export operations. 3 | 4 | Handles timezone conversion, parsing, and formatting of dates. 5 | """ 6 | from datetime import datetime, timezone 7 | from typing import Optional, Union 8 | from dateutil import parser as date_parser 9 | 10 | 11 | def parse_datetime(date_str: Union[str, datetime, int, float]) -> datetime: 12 | """ 13 | Parse a datetime string or object into a datetime. 14 | 15 | Accepts: 16 | - ISO 8601 strings (with or without timezone) 17 | - datetime objects 18 | - Unix timestamps (as int, float, or strings) 19 | 20 | Args: 21 | date_str: Date string, datetime object, or unix timestamp (int/float) 22 | 23 | Returns: 24 | Parsed datetime object 25 | 26 | Raises: 27 | ValueError: If the date string cannot be parsed 28 | TypeError: If the input type is unsupported 29 | """ 30 | if isinstance(date_str, datetime): 31 | return date_str 32 | elif isinstance(date_str, (int, float)): 33 | # Unix timestamp 34 | return datetime.fromtimestamp(date_str, tz=timezone.utc) 35 | elif isinstance(date_str, str): 36 | # Try parsing as unix timestamp first 37 | try: 38 | timestamp = float(date_str) 39 | return datetime.fromtimestamp(timestamp, tz=timezone.utc) 40 | except (ValueError, OSError): 41 | pass 42 | 43 | # Parse as date string 44 | try: 45 | dt = date_parser.parse(date_str) 46 | return dt 47 | except (ValueError, TypeError) as e: 48 | raise ValueError(f"Unable to parse date: {date_str}") from e 49 | else: 50 | # Defensive check for unexpected types (e.g., None, list, dict, etc.) 51 | raise TypeError(f"Unsupported date type: {type(date_str)}") 52 | 53 | 54 | def ensure_utc(dt: datetime) -> datetime: 55 | """ 56 | Ensure a datetime is in UTC timezone. 57 | 58 | Args: 59 | dt: Datetime object (may be naive or timezone-aware) 60 | 61 | Returns: 62 | Datetime in UTC timezone 63 | """ 64 | if dt.tzinfo is None: 65 | # Naive datetime - assume it's already UTC 66 | return dt.replace(tzinfo=timezone.utc) 67 | 68 | # Convert to UTC 69 | return dt.astimezone(timezone.utc) 70 | 71 | 72 | def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str: 73 | """ 74 | Format a datetime object to string. 75 | 76 | Args: 77 | dt: Datetime object to format 78 | format_str: strftime format string 79 | 80 | Returns: 81 | Formatted datetime string 82 | """ 83 | return dt.strftime(format_str) 84 | 85 | 86 | def normalize_datetime(date_input: Union[str, datetime, int, float]) -> datetime: 87 | """ 88 | Parse and normalize a datetime to UTC. 89 | 90 | Combines parse_datetime and ensure_utc for convenience. 91 | 92 | Args: 93 | date_input: Date string, datetime object, or unix timestamp 94 | 95 | Returns: 96 | Datetime in UTC timezone 97 | 98 | Raises: 99 | ValueError: If the date cannot be parsed 100 | """ 101 | dt = parse_datetime(date_input) 102 | return ensure_utc(dt) 103 | 104 | 105 | def safe_parse_datetime(date_input: Union[str, datetime, int, float, None]) -> Optional[datetime]: 106 | """ 107 | Safely parse a datetime, returning None if parsing fails. 108 | 109 | Args: 110 | date_input: Date to parse (or None) 111 | 112 | Returns: 113 | Parsed datetime in UTC, or None if parsing fails or input is None 114 | """ 115 | if date_input is None: 116 | return None 117 | 118 | try: 119 | return normalize_datetime(date_input) 120 | except (ValueError, TypeError, OSError): 121 | return None 122 | -------------------------------------------------------------------------------- /app/api/v1/endpoints/health.py: -------------------------------------------------------------------------------- 1 | """ 2 | Simple health check endpoint. 3 | """ 4 | import os 5 | from datetime import datetime, timezone 6 | from typing import Dict, Any, Annotated 7 | 8 | from fastapi import APIRouter, Depends, HTTPException 9 | from sqlmodel import Session, text 10 | 11 | try: 12 | import psutil 13 | PSUTIL_AVAILABLE = True 14 | except ImportError: 15 | PSUTIL_AVAILABLE = False 16 | 17 | from app.core.database import get_session 18 | from app.core.logging_config import log_error 19 | from app.core.config import settings 20 | 21 | router = APIRouter() 22 | 23 | 24 | def _utc_now_iso() -> str: 25 | return datetime.now(timezone.utc).isoformat() 26 | 27 | 28 | @router.get( 29 | "/health", 30 | response_model=Dict[str, Any], 31 | responses={ 32 | 500: {"description": "Internal server error"}, 33 | } 34 | ) 35 | async def health_check(session: Annotated[Session, Depends(get_session)]): 36 | """ 37 | Detailed health check with database status. 38 | 39 | Returns degraded status if database is unreachable but service is running. 40 | """ 41 | try: 42 | # Check database connection 43 | db_status = "connected" 44 | try: 45 | session.exec(text("SELECT 1")).first() 46 | except Exception as e: 47 | db_status = f"disconnected: {str(e)}" 48 | 49 | return { 50 | "status": "healthy" if db_status == "connected" else "degraded", 51 | "timestamp": _utc_now_iso(), 52 | "service": settings.app_name, 53 | "version": settings.app_version, 54 | "database": db_status 55 | } 56 | except Exception as e: 57 | log_error(e, request_id=None) 58 | raise HTTPException(status_code=500, detail="Health check failed") 59 | 60 | 61 | @router.get( 62 | "/memory", 63 | response_model=Dict[str, Any], 64 | responses={ 65 | 500: {"description": "Internal server error"}, 66 | } 67 | ) 68 | async def memory_status(): 69 | """ 70 | Get current memory usage status. 71 | 72 | Returns system and process memory metrics for monitoring. 73 | """ 74 | try: 75 | if not PSUTIL_AVAILABLE: 76 | return { 77 | "status": "unavailable", 78 | "timestamp": _utc_now_iso(), 79 | "message": "psutil not available - memory monitoring disabled" 80 | } 81 | 82 | # Get system memory info 83 | memory = psutil.virtual_memory() 84 | 85 | # Get current process memory info 86 | process = psutil.Process(os.getpid()) 87 | process_memory = process.memory_info() 88 | 89 | # Calculate memory usage percentage 90 | memory_percent = memory.percent 91 | process_memory_mb = process_memory.rss / 1024 / 1024 92 | 93 | # Determine status based on usage 94 | if memory_percent > 90: 95 | status = "critical" 96 | elif memory_percent > 80: 97 | status = "warning" 98 | else: 99 | status = "ok" 100 | 101 | return { 102 | "status": status, 103 | "timestamp": _utc_now_iso(), 104 | "system_memory": { 105 | "total_mb": round(memory.total / 1024 / 1024, 1), 106 | "used_mb": round(memory.used / 1024 / 1024, 1), 107 | "available_mb": round(memory.available / 1024 / 1024, 1), 108 | "percent_used": round(memory_percent, 1) 109 | }, 110 | "process_memory": { 111 | "rss_mb": round(process_memory_mb, 1), 112 | "vms_mb": round(process_memory.vms / 1024 / 1024, 1) 113 | } 114 | } 115 | except Exception as e: 116 | log_error(e, request_id=None) 117 | raise HTTPException(status_code=500, detail="Failed to get memory status") 118 | -------------------------------------------------------------------------------- /app/schemas/entry.py: -------------------------------------------------------------------------------- 1 | """ 2 | Entry schemas. 3 | """ 4 | import uuid 5 | from datetime import datetime, date 6 | from typing import Optional 7 | 8 | from pydantic import BaseModel, validator 9 | 10 | from app.schemas.base import TimestampMixin 11 | 12 | 13 | class EntryBase(BaseModel): 14 | """Base entry schema.""" 15 | title: str 16 | content: str 17 | entry_date: Optional[date] = None # Allows backdating/future-dating entries 18 | entry_datetime_utc: Optional[datetime] = None 19 | entry_timezone: Optional[str] = None 20 | location: Optional[str] = None 21 | weather: Optional[str] = None 22 | 23 | 24 | class EntryCreate(EntryBase): 25 | """Entry creation schema.""" 26 | journal_id: uuid.UUID 27 | prompt_id: Optional[uuid.UUID] = None 28 | 29 | @validator('title') 30 | def validate_title_not_empty(cls, v): 31 | if not v or len(v.strip()) == 0: 32 | raise ValueError('Title cannot be empty') 33 | return v.strip() 34 | 35 | 36 | class EntryUpdate(BaseModel): 37 | """Entry update schema.""" 38 | title: Optional[str] = None 39 | content: Optional[str] = None 40 | entry_date: Optional[date] = None 41 | entry_datetime_utc: Optional[datetime] = None 42 | entry_timezone: Optional[str] = None 43 | location: Optional[str] = None 44 | weather: Optional[str] = None 45 | is_pinned: Optional[bool] = None 46 | journal_id: Optional[uuid.UUID] = None 47 | 48 | 49 | class EntryResponse(EntryBase, TimestampMixin): 50 | """Entry response schema.""" 51 | id: uuid.UUID 52 | journal_id: uuid.UUID 53 | prompt_id: Optional[uuid.UUID] = None 54 | entry_date: date # Override to make it required in response 55 | entry_datetime_utc: datetime 56 | entry_timezone: str 57 | word_count: int 58 | is_pinned: bool 59 | user_id: uuid.UUID 60 | created_at: datetime 61 | updated_at: datetime 62 | 63 | 64 | class EntryPreviewResponse(TimestampMixin): 65 | """Entry preview schema for listings (truncated content).""" 66 | id: uuid.UUID 67 | title: str 68 | content: str # Truncated by endpoint 69 | journal_id: uuid.UUID 70 | created_at: datetime 71 | entry_date: date 72 | entry_datetime_utc: datetime 73 | entry_timezone: str 74 | 75 | 76 | from app.models.enums import MediaType, UploadStatus 77 | 78 | 79 | class EntryMediaBase(BaseModel): 80 | """Base entry media schema.""" 81 | media_type: MediaType 82 | file_path: str 83 | original_filename: Optional[str] = None 84 | file_size: int 85 | mime_type: str 86 | thumbnail_path: Optional[str] = None 87 | duration: Optional[int] = None 88 | width: Optional[int] = None 89 | height: Optional[int] = None 90 | alt_text: Optional[str] = None 91 | upload_status: UploadStatus = UploadStatus.PENDING 92 | file_metadata: Optional[str] = None 93 | 94 | 95 | class EntryMediaCreate(EntryMediaBase): 96 | """Entry media creation schema.""" 97 | entry_id: uuid.UUID 98 | checksum: Optional[str] = None 99 | 100 | 101 | class EntryMediaResponse(EntryMediaBase, TimestampMixin): 102 | """Entry media response schema.""" 103 | id: uuid.UUID 104 | entry_id: uuid.UUID 105 | created_at: datetime 106 | checksum: Optional[str] = None 107 | processing_error: Optional[str] = None 108 | 109 | def model_dump(self, **kwargs): 110 | """Custom serialization to ensure enums are converted to strings.""" 111 | data = super().model_dump(**kwargs) 112 | # Convert enums to their string values 113 | if 'media_type' in data and hasattr(data['media_type'], 'value'): 114 | data['media_type'] = data['media_type'].value 115 | if 'upload_status' in data and hasattr(data['upload_status'], 'value'): 116 | data['upload_status'] = data['upload_status'].value 117 | return data 118 | -------------------------------------------------------------------------------- /tests/integration/helpers.py: -------------------------------------------------------------------------------- 1 | """ 2 | Utility helpers shared across the integration test suite. 3 | """ 4 | from __future__ import annotations 5 | 6 | from dataclasses import dataclass 7 | from typing import Any, Iterable, Sequence 8 | 9 | from tests.lib import JournivApiClient 10 | 11 | 12 | UNKNOWN_UUID = "00000000-0000-0000-0000-000000000000" 13 | 14 | 15 | @dataclass(frozen=True) 16 | class EndpointCase: 17 | """ 18 | Declarative representation of an endpoint invocation used by helpers. 19 | """ 20 | 21 | method: str 22 | path: str 23 | json: dict[str, Any] | None = None 24 | params: dict[str, Any] | None = None 25 | data: dict[str, Any] | None = None 26 | files: dict[str, Any] | None = None 27 | headers: dict[str, str] | None = None 28 | description: str | None = None 29 | 30 | def label(self) -> str: 31 | return self.description or f"{self.method} {self.path}" 32 | 33 | 34 | def _exercise_cases( 35 | api_client: JournivApiClient, 36 | cases: Iterable[EndpointCase], 37 | *, 38 | token: str | None = None, 39 | ): 40 | for case in cases: 41 | request_kwargs: dict[str, Any] = {} 42 | if case.json is not None: 43 | request_kwargs["json"] = case.json 44 | if case.params is not None: 45 | request_kwargs["params"] = case.params 46 | if case.data is not None: 47 | request_kwargs["data"] = case.data 48 | if case.files is not None: 49 | request_kwargs["files"] = case.files 50 | if case.headers is not None: 51 | request_kwargs["headers"] = case.headers 52 | 53 | response = api_client.request( 54 | case.method, 55 | case.path, 56 | token=token, 57 | **request_kwargs, 58 | ) 59 | yield case, response 60 | 61 | 62 | def _format_failure(case: EndpointCase, received: int, expected: Sequence[int]) -> str: 63 | return f"{case.label()} returned {received}, expected one of {tuple(expected)}" 64 | 65 | 66 | def assert_status_codes( 67 | api_client: JournivApiClient, 68 | cases: Iterable[EndpointCase], 69 | *, 70 | token: str | None = None, 71 | expected_status: Sequence[int] = (200,), 72 | ): 73 | """ 74 | Execute a batch of endpoint cases asserting their HTTP status codes. 75 | """ 76 | responses = [] 77 | for case, response in _exercise_cases(api_client, cases, token=token): 78 | assert ( 79 | response.status_code in expected_status 80 | ), _format_failure(case, response.status_code, expected_status) 81 | responses.append(response) 82 | return responses 83 | 84 | 85 | def assert_requires_authentication( 86 | api_client: JournivApiClient, cases: Iterable[EndpointCase] 87 | ) -> None: 88 | """ 89 | Assert that each endpoint rejects anonymous callers with HTTP 401. 90 | """ 91 | assert_status_codes(api_client, cases, expected_status=(401,)) 92 | 93 | 94 | def assert_not_found( 95 | api_client: JournivApiClient, 96 | token: str, 97 | cases: Iterable[EndpointCase], 98 | ) -> None: 99 | """ 100 | Assert that each endpoint returns HTTP 404 for missing identifiers. 101 | """ 102 | assert_status_codes(api_client, cases, token=token, expected_status=(404,)) 103 | 104 | 105 | def sample_jpeg_bytes() -> bytes: 106 | """Provide a valid, tiny JPEG payload accepted by the media endpoints.""" 107 | return ( 108 | b"\xff\xd8\xff\xe0\x00\x10JFIF\x00\x01\x01\x00\x00\x01\x00\x01\x00\x00" 109 | b"\xff\xdb\x00C\x00\x08\x06\x06\x07\x06\x05\x08\x07\x07\x07" 110 | b"\xff\xc0\x00\x11\x08\x00\x01\x00\x01\x03\x01\x11\x00\x02\x11\x01\x03\x11\x01" 111 | b"\xff\xc4\x00\x14\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" 112 | b"\xff\xda\x00\x0c\x03\x01\x00\x02\x11\x03\x11\x00?\x00\xff\xd9" 113 | ) 114 | -------------------------------------------------------------------------------- /app/middleware/csp_middleware.py: -------------------------------------------------------------------------------- 1 | """ 2 | Content Security Policy (CSP) middleware for FastAPI. 3 | Provides comprehensive security headers for the Journiv web application. 4 | """ 5 | import logging 6 | from typing import Optional 7 | 8 | from fastapi import Request, Response 9 | from starlette.middleware.base import BaseHTTPMiddleware 10 | 11 | from app.core.csp_config import get_csp_config 12 | 13 | logger = logging.getLogger(__name__) 14 | 15 | 16 | class CSPMiddleware(BaseHTTPMiddleware): 17 | """ 18 | Content Security Policy middleware for FastAPI. 19 | 20 | Implements comprehensive security headers including: 21 | - Content Security Policy (CSP) 22 | - Other security headers (HSTS, X-Frame-Options, etc.) 23 | - Environment-specific configurations 24 | """ 25 | 26 | def __init__( 27 | self, 28 | app, 29 | environment: str = "development", 30 | enable_csp: bool = True, 31 | enable_hsts: bool = True, 32 | enable_csp_reporting: bool = False, 33 | csp_report_uri: Optional[str] = None, 34 | ): 35 | super().__init__(app) 36 | self.environment = environment 37 | self.csp_config = get_csp_config(environment) 38 | 39 | # Override config with explicit parameters if provided 40 | if enable_csp is not None: 41 | self.csp_config._config["enable_csp"] = enable_csp 42 | if enable_hsts is not None: 43 | self.csp_config._config["enable_hsts"] = enable_hsts 44 | if enable_csp_reporting is not None: 45 | self.csp_config._config["enable_csp_reporting"] = enable_csp_reporting 46 | if csp_report_uri is not None: 47 | self.csp_config._config["csp_report_uri"] = csp_report_uri 48 | 49 | async def dispatch(self, request: Request, call_next): 50 | """Process request and add security headers.""" 51 | response = await call_next(request) 52 | 53 | # Add security headers 54 | self._add_security_headers(request, response) 55 | 56 | return response 57 | 58 | def _add_security_headers(self, request: Request, response: Response): 59 | """Add comprehensive security headers to the response.""" 60 | 61 | # Get base URL for CSP policy 62 | base_url = f"{request.url.scheme}://{request.url.netloc}" 63 | 64 | # Get security headers from config 65 | security_headers = self.csp_config.get_security_headers(base_url) 66 | 67 | # Add all security headers 68 | for header_name, header_value in security_headers.items(): 69 | response.headers[header_name] = header_value 70 | 71 | logger.debug(f"Added security headers for {request.url.path}") 72 | 73 | 74 | def create_csp_middleware( 75 | environment: str = "development", 76 | enable_csp: bool = True, 77 | enable_hsts: bool = True, 78 | enable_csp_reporting: bool = False, 79 | csp_report_uri: Optional[str] = None, 80 | ): 81 | """ 82 | Factory function to create CSP middleware with configuration. 83 | 84 | Args: 85 | environment: Environment (development/production) 86 | enable_csp: Enable Content Security Policy 87 | enable_hsts: Enable HTTP Strict Transport Security 88 | enable_csp_reporting: Enable CSP violation reporting 89 | csp_report_uri: URI for CSP violation reports 90 | 91 | Returns: 92 | CSPMiddleware class configured with parameters 93 | """ 94 | class ConfiguredCSPMiddleware(CSPMiddleware): 95 | def __init__(self, app): 96 | super().__init__( 97 | app=app, 98 | environment=environment, 99 | enable_csp=enable_csp, 100 | enable_hsts=enable_hsts, 101 | enable_csp_reporting=enable_csp_reporting, 102 | csp_report_uri=csp_report_uri, 103 | ) 104 | 105 | return ConfiguredCSPMiddleware 106 | 107 | -------------------------------------------------------------------------------- /docker-compose.dev.postgres.yml: -------------------------------------------------------------------------------- 1 | # Journiv Development Docker Compose (PostgreSQL) 2 | # Recommended for testing PostgreSQL-specific features or multi-user scenarios. 3 | # 4 | # Usage: 5 | # docker compose -f docker-compose.dev.postgres.yml up -d 6 | # Note: dev compose files uses the main tag (docker image from main # branch) for development purposes. Do not run this for production # use prod compose files instead which uses the latest tag. 7 | 8 | services: 9 | postgres: 10 | image: postgres:15 11 | container_name: journiv-dev-postgres 12 | # Expose the port to the host in dev mode for local development 13 | ports: 14 | - "5432:5432" 15 | healthcheck: 16 | test: 17 | [ 18 | "CMD-SHELL", 19 | "pg_isready -U ${POSTGRES_USER:-journiv} -d ${POSTGRES_DB:-journiv_dev}", 20 | ] 21 | interval: 10s 22 | timeout: 5s 23 | retries: 5 24 | start_period: 10s 25 | environment: 26 | - POSTGRES_USER=${POSTGRES_USER:-journiv} 27 | - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} 28 | - POSTGRES_DB=${POSTGRES_DB:-journiv_dev} 29 | volumes: 30 | - postgres_data:/var/lib/postgresql/data 31 | 32 | redis: 33 | image: redis:7 34 | container_name: journiv-dev-redis 35 | restart: unless-stopped 36 | volumes: 37 | - redis_data:/data 38 | healthcheck: 39 | test: ["CMD", "redis-cli", "ping"] 40 | interval: 10s 41 | timeout: 5s 42 | retries: 5 43 | start_period: 10s 44 | 45 | celery-worker: 46 | build: . 47 | container_name: journiv-dev-celery-worker 48 | command: celery -A app.core.celery_app worker --loglevel=info 49 | depends_on: 50 | postgres: 51 | condition: service_healthy 52 | redis: 53 | condition: service_healthy 54 | env_file: 55 | - .env 56 | environment: 57 | # Docker-specific: service names 58 | - SERVICE_ROLE=celery-worker 59 | - DB_DRIVER=postgres 60 | - POSTGRES_HOST=postgres 61 | - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} 62 | - ENVIRONMENT=development 63 | - REDIS_URL=redis://redis:6379/0 64 | - CELERY_BROKER_URL=redis://redis:6379/0 65 | - CELERY_RESULT_BACKEND=redis://redis:6379/0 66 | 67 | volumes: 68 | - .:/app 69 | - ./data:/data 70 | 71 | healthcheck: 72 | test: ["CMD-SHELL", "celery -A app.core.celery_app inspect ping --timeout=5 | grep -q pong"] 73 | interval: 30s 74 | timeout: 10s 75 | retries: 5 76 | start_period: 40s 77 | 78 | deploy: 79 | resources: 80 | limits: 81 | memory: 512MB 82 | reservations: 83 | memory: 256MB 84 | 85 | app: 86 | build: . 87 | container_name: journiv-dev-postgres-app 88 | entrypoint: ["/app/scripts/docker-entrypoint-dev.sh"] 89 | ports: 90 | - "${APP_PORT:-8000}:8000" 91 | depends_on: 92 | postgres: 93 | condition: service_healthy 94 | redis: 95 | condition: service_healthy 96 | env_file: 97 | - .env 98 | environment: 99 | # Docker-specific: service names 100 | - SERVICE_ROLE=app 101 | - DB_DRIVER=postgres 102 | - POSTGRES_HOST=postgres 103 | - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} 104 | - ENVIRONMENT=development 105 | - REDIS_URL=redis://redis:6379/0 106 | - CELERY_BROKER_URL=redis://redis:6379/0 107 | - CELERY_RESULT_BACKEND=redis://redis:6379/0 108 | 109 | volumes: 110 | - .:/app 111 | - ./data:/data 112 | 113 | deploy: 114 | resources: 115 | limits: 116 | memory: 512MB 117 | reservations: 118 | memory: 256MB 119 | 120 | healthcheck: 121 | test: ["CMD-SHELL", "curl -f http://localhost:8000/api/v1/health"] 122 | interval: 30s 123 | timeout: 10s 124 | retries: 3 125 | start_period: 40s 126 | 127 | volumes: 128 | postgres_data: 129 | redis_data: 130 | 131 | networks: 132 | default: 133 | driver: bridge 134 | -------------------------------------------------------------------------------- /alembic/versions/abc123def456_add_user_role_column.py: -------------------------------------------------------------------------------- 1 | """Add user role column for admin/user management 2 | 3 | Revision ID: abc123def456 4 | Revises: 7c52fcc89c83 5 | Create Date: 2025-01-28 00:00:00.000000 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'abc123def456' 14 | down_revision = '7c52fcc89c83' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade() -> None: 20 | """Add role column to user table and promote first user to admin.""" 21 | connection = op.get_bind() 22 | is_sqlite = connection.dialect.name == "sqlite" 23 | 24 | if is_sqlite: 25 | op.execute(''' 26 | CREATE TABLE user_new ( 27 | id TEXT NOT NULL, 28 | created_at DATETIME NOT NULL, 29 | updated_at DATETIME NOT NULL, 30 | email TEXT NOT NULL, 31 | password TEXT NOT NULL, 32 | name TEXT NOT NULL, 33 | role TEXT NOT NULL DEFAULT 'user', 34 | is_active BOOLEAN NOT NULL, 35 | profile_picture_url TEXT, 36 | last_login_at DATETIME, 37 | PRIMARY KEY (id), 38 | UNIQUE (email), 39 | CHECK(length(name) > 0) 40 | ) 41 | ''') 42 | 43 | op.execute(''' 44 | INSERT INTO user_new 45 | SELECT id, created_at, updated_at, email, password, name, 46 | 'user', is_active, profile_picture_url, last_login_at 47 | FROM user 48 | ''') 49 | 50 | op.execute(''' 51 | UPDATE user_new 52 | SET role = 'admin' 53 | WHERE id = ( 54 | SELECT id FROM user_new 55 | ORDER BY created_at ASC 56 | LIMIT 1 57 | ) 58 | ''') 59 | 60 | op.execute('DROP TABLE user') 61 | op.execute('ALTER TABLE user_new RENAME TO user') 62 | 63 | op.execute('CREATE INDEX idx_user_active ON user (is_active)') 64 | op.execute('CREATE UNIQUE INDEX ix_user_email ON user (email)') 65 | op.execute('CREATE INDEX ix_user_id ON user (id)') 66 | else: 67 | op.add_column('user', sa.Column('role', sa.String(length=20), nullable=False, server_default='user')) 68 | 69 | op.execute(sa.text(''' 70 | UPDATE "user" 71 | SET role = 'admin' 72 | WHERE id = ( 73 | SELECT id FROM "user" 74 | ORDER BY created_at ASC 75 | LIMIT 1 76 | ) 77 | ''')) 78 | 79 | 80 | def downgrade() -> None: 81 | """Remove role column from user table.""" 82 | connection = op.get_bind() 83 | is_sqlite = connection.dialect.name == "sqlite" 84 | 85 | if is_sqlite: 86 | op.execute(''' 87 | CREATE TABLE user_new ( 88 | id TEXT NOT NULL, 89 | created_at DATETIME NOT NULL, 90 | updated_at DATETIME NOT NULL, 91 | email TEXT NOT NULL, 92 | password TEXT NOT NULL, 93 | name TEXT NOT NULL, 94 | is_active BOOLEAN NOT NULL, 95 | profile_picture_url TEXT, 96 | last_login_at DATETIME, 97 | PRIMARY KEY (id), 98 | UNIQUE (email), 99 | CHECK(length(name) > 0) 100 | ) 101 | ''') 102 | 103 | op.execute(''' 104 | INSERT INTO user_new 105 | SELECT id, created_at, updated_at, email, password, name, 106 | is_active, profile_picture_url, last_login_at 107 | FROM user 108 | ''') 109 | 110 | op.execute('DROP TABLE user') 111 | op.execute('ALTER TABLE user_new RENAME TO user') 112 | 113 | op.execute('CREATE INDEX idx_user_active ON user (is_active)') 114 | op.execute('CREATE UNIQUE INDEX ix_user_email ON user (email)') 115 | op.execute('CREATE INDEX ix_user_id ON user (id)') 116 | else: 117 | op.drop_column('user', 'role') 118 | -------------------------------------------------------------------------------- /scripts/fresh_migration.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Regenerates the initial Alembic migration for a clean SQLite database. 4 | # All existing migration files and the SQLite database file are removed. 5 | 6 | set -euo pipefail 7 | 8 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" 9 | PROJECT_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)" 10 | cd "${PROJECT_ROOT}" 11 | 12 | echo "==========================================" 13 | echo "Fresh Initial Migration Generator (SQLite)" 14 | echo "==========================================" 15 | echo "" 16 | echo "WARNING: This will delete your local SQLite database file and migrations." 17 | echo "Press Ctrl+C within 5 seconds to cancel..." 18 | sleep 5 19 | 20 | # Capture DATABASE_URL passed via environment so it can override .env 21 | CLI_DATABASE_URL="${DATABASE_URL:-}" 22 | 23 | # Load environment variables if .env exists so DATABASE_URL is available 24 | if [ -f ".env" ]; then 25 | set -o allexport 26 | # shellcheck disable=SC1091 27 | source .env 28 | set +o allexport 29 | fi 30 | 31 | # Allow CLI-provided DATABASE_URL to take precedence 32 | if [[ -n "${CLI_DATABASE_URL}" ]]; then 33 | DATABASE_URL="${CLI_DATABASE_URL}" 34 | fi 35 | 36 | DATABASE_URL=${DATABASE_URL:-"sqlite:///./journiv.db"} 37 | 38 | if [[ "${DATABASE_URL}" != sqlite://* ]]; then 39 | echo "ERROR: This script currently only supports SQLite DATABASE_URL values." 40 | echo "Current DATABASE_URL='${DATABASE_URL}'" 41 | exit 1 42 | fi 43 | 44 | # Resolve the SQLite file path using Python for reliability 45 | DB_PATH="$(python3 - "${DATABASE_URL}" "${PROJECT_ROOT}" <<'PY' 46 | import os 47 | import sys 48 | from sqlalchemy.engine import make_url 49 | 50 | raw_url = sys.argv[1] 51 | project_root = sys.argv[2] 52 | 53 | url = make_url(raw_url) 54 | 55 | if url.drivername != "sqlite": 56 | sys.exit(0) 57 | 58 | database = url.database or "" 59 | 60 | if database in ("", ":memory:"): 61 | print(":memory:") 62 | else: 63 | if not os.path.isabs(database): 64 | database = os.path.join(project_root, database) 65 | print(os.path.abspath(database)) 66 | PY 67 | )" 68 | 69 | if [[ -z "${DB_PATH}" ]]; then 70 | echo "ERROR: Unable to resolve SQLite path from DATABASE_URL='${DATABASE_URL}'." 71 | exit 1 72 | fi 73 | 74 | if [[ "${DB_PATH}" == ":memory:" ]]; then 75 | echo "" 76 | echo "ℹ︎ DATABASE_URL points to an in-memory SQLite database; skipping file deletion." 77 | else 78 | echo "" 79 | echo "Step 1: Removing SQLite database file..." 80 | if [ -f "${DB_PATH}" ]; then 81 | rm -f "${DB_PATH}" 82 | echo "✓ Removed ${DB_PATH}" 83 | else 84 | echo "ℹ︎ No database file found at ${DB_PATH}" 85 | fi 86 | fi 87 | 88 | # Step 2: Clean existing migration files 89 | echo "" 90 | echo "Step 2: Cleaning up old migration files..." 91 | rm -f alembic/versions/*.py 92 | mkdir -p alembic/versions 93 | touch alembic/versions/.gitkeep 94 | echo "✓ Old migrations removed" 95 | 96 | # Step 3: Generate fresh migration 97 | echo "" 98 | echo "Step 3: Generating fresh initial migration..." 99 | DATABASE_URL="${DATABASE_URL}" alembic revision --autogenerate -m "initial schema" 100 | echo "✓ Initial migration generated" 101 | 102 | # Step 3.5: Fix migration imports 103 | echo "" 104 | echo "Step 3.5: Fixing migration imports..." 105 | python3 scripts/fix_migration_imports.py 106 | echo "✓ Migration imports fixed" 107 | 108 | # Step 4: Show summary 109 | MIGRATION_FILE=$(ls -t alembic/versions/*.py 2>/dev/null | head -1) 110 | if [ -n "${MIGRATION_FILE}" ]; then 111 | echo "" 112 | echo "Generated migration: ${MIGRATION_FILE}" 113 | TABLES=$(grep -c "op.create_table" "${MIGRATION_FILE}" 2>/dev/null || echo "0") 114 | INDEXES=$(grep -c "op.create_index" "${MIGRATION_FILE}" 2>/dev/null || echo "0") 115 | echo " - Tables to create: ${TABLES}" 116 | echo " - Indexes to create: ${INDEXES}" 117 | fi 118 | 119 | echo "" 120 | echo "==========================================" 121 | echo "✓ Fresh migration generated successfully!" 122 | echo "==========================================" 123 | echo "Next steps:" 124 | echo "1. Inspect the migration: cat ${MIGRATION_FILE}" 125 | echo "2. Apply it: DATABASE_URL=${DATABASE_URL} alembic upgrade head" 126 | echo "3. Run tests to verify schema" 127 | -------------------------------------------------------------------------------- /alembic/versions/def789abc123_convert_user_role_to_enum.py: -------------------------------------------------------------------------------- 1 | """Convert user role column from VARCHAR to enum type 2 | 3 | Revision ID: def789abc123 4 | Revises: abc123def456 5 | Create Date: 2025-01-29 00:00:00.000000 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | 12 | 13 | # revision identifiers, used by Alembic. 14 | revision = 'def789abc123' 15 | down_revision = 'abc123def456' 16 | branch_labels = None 17 | depends_on = None 18 | 19 | 20 | def upgrade() -> None: 21 | """ 22 | Convert user.role from VARCHAR to enum type. 23 | 24 | For PostgreSQL: 25 | - Create enum type 'user_role_enum' with values ('admin', 'user') 26 | - Drop existing server default temporarily 27 | - Convert role column from VARCHAR to enum using USING clause 28 | - Re-add server default as enum value 29 | 30 | For SQLite: 31 | - No changes needed (SQLite doesn't support native enums) 32 | - SQLAlchemy will use CHECK constraint instead 33 | """ 34 | connection = op.get_bind() 35 | is_sqlite = connection.dialect.name == "sqlite" 36 | 37 | if not is_sqlite: 38 | # PostgreSQL: Create enum type and convert column 39 | 40 | # Check if the enum type already exists 41 | result = connection.execute(sa.text(""" 42 | SELECT EXISTS ( 43 | SELECT 1 FROM pg_type WHERE typname = 'user_role_enum' 44 | ) 45 | """)) 46 | enum_exists = result.scalar() 47 | 48 | # Create the enum type if it doesn't exist 49 | # Using native PostgreSQL ENUM for better type safety 50 | if not enum_exists: 51 | op.execute("CREATE TYPE user_role_enum AS ENUM ('admin', 'user')") 52 | 53 | # Drop the server default temporarily (it's a string, incompatible with enum) 54 | op.execute('ALTER TABLE "user" ALTER COLUMN role DROP DEFAULT') 55 | 56 | # Validate that all role values are valid before conversion 57 | result = connection.execute(sa.text(""" 58 | SELECT COUNT(*) FROM "user" 59 | WHERE role NOT IN ('admin', 'user') 60 | """)) 61 | invalid_count = result.scalar() 62 | if invalid_count > 0: 63 | raise ValueError( 64 | f"Found {invalid_count} users with invalid role values. " 65 | "Please fix the data before running this migration." 66 | ) 67 | 68 | # Convert the column type using USING clause to cast existing values 69 | op.execute(""" 70 | ALTER TABLE "user" 71 | ALTER COLUMN role TYPE user_role_enum 72 | USING role::user_role_enum 73 | """) 74 | 75 | # Re-add the server default as an enum value 76 | op.execute("ALTER TABLE \"user\" ALTER COLUMN role SET DEFAULT 'user'::user_role_enum") 77 | 78 | # SQLite: No migration needed 79 | # SQLAlchemy will handle enum validation at the application level 80 | # The column remains VARCHAR with server_default='user' 81 | 82 | 83 | def downgrade() -> None: 84 | """ 85 | Revert user.role from enum back to VARCHAR. 86 | 87 | For PostgreSQL: 88 | - Drop enum server default 89 | - Convert role column from enum back to VARCHAR(20) 90 | - Re-add string server default 91 | - Drop the user_role_enum type 92 | 93 | For SQLite: 94 | - No changes needed 95 | """ 96 | connection = op.get_bind() 97 | is_sqlite = connection.dialect.name == "sqlite" 98 | 99 | if not is_sqlite: 100 | # PostgreSQL: Convert back to VARCHAR and drop enum type 101 | 102 | # Drop the enum default 103 | op.execute('ALTER TABLE "user" ALTER COLUMN role DROP DEFAULT') 104 | 105 | # Convert column back to VARCHAR(20), casting enum values to text 106 | op.execute(""" 107 | ALTER TABLE "user" 108 | ALTER COLUMN role TYPE VARCHAR(20) 109 | USING role::text 110 | """) 111 | 112 | # Re-add the string server default 113 | op.execute("ALTER TABLE \"user\" ALTER COLUMN role SET DEFAULT 'user'") 114 | 115 | # Drop the enum type 116 | op.execute("DROP TYPE user_role_enum") 117 | 118 | # SQLite: No downgrade needed 119 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # =============================================================== 2 | # PYTHON / FASTAPI BACKEND 3 | # =============================================================== 4 | 5 | # Bytecode and caches 6 | __pycache__/ 7 | *.py[cod] 8 | *$py.class 9 | 10 | # Virtual environments 11 | .env 12 | .venv 13 | env/ 14 | venv/ 15 | ENV/ 16 | env.bak/ 17 | venv.bak/ 18 | 19 | # Build artifacts and packaging 20 | build/ 21 | dist/ 22 | *.egg-info/ 23 | .eggs/ 24 | wheels/ 25 | *.egg 26 | MANIFEST 27 | 28 | # Logs 29 | logs/ 30 | *.log 31 | app.log 32 | error.log 33 | access.log 34 | 35 | # Databases 36 | data/ 37 | *.db 38 | *.db-shm 39 | *.db-wal 40 | *.sqlite 41 | *.sqlite3 42 | database.db 43 | dev.db 44 | test.db 45 | local.db 46 | db.sqlite3 47 | db.sqlite3-journal 48 | 49 | # Coverage and test artifacts 50 | coverage.* 51 | htmlcov/ 52 | .coverage 53 | .coverage.* 54 | .cache/ 55 | .pytest_cache/ 56 | .nox/ 57 | .tox/ 58 | 59 | # Type checking and linting caches 60 | .mypy_cache/ 61 | .dmypy.json 62 | .pyre/ 63 | .pytype/ 64 | cython_debug/ 65 | 66 | # Local settings and configs 67 | config.ini 68 | secrets.json 69 | .secrets 70 | .local/ 71 | local/ 72 | 73 | # Celery / Redis 74 | celerybeat-schedule* 75 | dump.rdb 76 | 77 | # Temporary files 78 | tmp/ 79 | temp/ 80 | processing/ 81 | temp_processing/ 82 | *.tmp 83 | *.bak 84 | *.backup 85 | *.old 86 | 87 | # Cache 88 | .cache/ 89 | cache/ 90 | 91 | # =============================================================== 92 | # FLUTTER WEB FRONTEND (BUILT SPA) 93 | # =============================================================== 94 | 95 | # Flutter build outputs 96 | build/ 97 | .flutter-plugins 98 | .flutter-plugins-dependencies 99 | .packages 100 | .dart_tool/ 101 | .flutter-plugins-dependencies 102 | 103 | # Web build artifacts 104 | flutter_web_build/ 105 | web_build/ 106 | frontend_build/ 107 | frontend/dist/ 108 | frontend/build/ 109 | flutter_web_release/ 110 | build/web/ 111 | 112 | # Node/npm dependencies (for future frontend tooling) 113 | node_modules/ 114 | npm-debug.log* 115 | yarn-debug.log* 116 | yarn-error.log* 117 | 118 | # Flutter/Dart environment 119 | .pub/ 120 | .dart_tool/ 121 | .flutter-plugins 122 | flutter_export_environment.sh 123 | 124 | # Flutter web build output 125 | # web/ 126 | 127 | # =============================================================== 128 | # MEDIA / USER FILES 129 | # =============================================================== 130 | 131 | media/ 132 | uploads/ 133 | user_media/ 134 | entry_media/ 135 | thumbnails/ 136 | generated/ 137 | public/uploads/ 138 | static/uploads/ 139 | 140 | # Backups / exports / imports 141 | backups/ 142 | exports/ 143 | imports/ 144 | *.export 145 | *.import 146 | 147 | # Test / fixture data 148 | test_data/ 149 | fixtures/ 150 | 151 | # =============================================================== 152 | # DOCS / BUILD / PROJECT FILES 153 | # =============================================================== 154 | 155 | # Documentation builds 156 | docs/_build/ 157 | docs/build/ 158 | 159 | # Site generator outputs 160 | /site 161 | 162 | # =============================================================== 163 | # DOCKER / DEPLOYMENT 164 | # =============================================================== 165 | docker-compose.override.yml 166 | .env* 167 | !.env.template # Exclude the template file from being ignored 168 | 169 | # Ignore local sqlite if mounted in volume 170 | /data/ 171 | 172 | # =============================================================== 173 | # IDE / OS / EDITOR 174 | # =============================================================== 175 | 176 | # VS Code / JetBrains 177 | .vscode/ 178 | .idea/ 179 | *.iml 180 | *.ipr 181 | *.iws 182 | 183 | # Swap / temp files 184 | *.swp 185 | *.swo 186 | *~ 187 | 188 | # macOS 189 | .DS_Store 190 | .AppleDouble 191 | .LSOverride 192 | ._* 193 | 194 | # Windows 195 | Thumbs.db 196 | Desktop.ini 197 | $RECYCLE.BIN/ 198 | 199 | # Linux 200 | *~ 201 | .fuse_hidden* 202 | .directory 203 | .Trash-* 204 | .nfs* 205 | 206 | # =============================================================== 207 | # JUPYTER / NOTEBOOKS 208 | # =============================================================== 209 | 210 | .ipynb_checkpoints/ 211 | 212 | # =============================================================== 213 | # END OF FILE 214 | # =============================================================== 215 | -------------------------------------------------------------------------------- /app/utils/import_export/id_mapper.py: -------------------------------------------------------------------------------- 1 | """ 2 | ID mapping utilities for import operations. 3 | 4 | During import, UUIDs from external systems need to be remapped to new UUIDs 5 | to avoid conflicts with existing data. 6 | """ 7 | import uuid 8 | from typing import Dict, Optional 9 | 10 | 11 | class IDMapper: 12 | """ 13 | Maps old IDs to new UUIDs during import. 14 | 15 | This is critical because: 16 | 1. Imported data may have UUIDs that conflict with existing data 17 | 2. Foreign key relationships need to be preserved 18 | 3. We need to track which external IDs map to which internal IDs 19 | 20 | None-handling semantics: 21 | - None is always treated as a brand-new ID and is never tracked 22 | - map(None) always returns a fresh UUID and never records it 23 | - record(None, new_id) silently ignores the mapping 24 | - This ensures that missing IDs don't interfere with ID tracking 25 | """ 26 | 27 | def __init__(self): 28 | """Initialize the ID mapper with empty mappings.""" 29 | self._mappings: Dict[str, uuid.UUID] = {} 30 | 31 | def map(self, old_id: Optional[str]) -> uuid.UUID: 32 | """ 33 | Map an old ID to a new UUID. 34 | 35 | If the old_id has been seen before, returns the same UUID. 36 | Otherwise, generates a new UUID and stores the mapping. 37 | 38 | Note: None is always treated as a brand-new ID and is never tracked. 39 | Each call to map(None) returns a fresh UUID. 40 | 41 | Args: 42 | old_id: Original ID from source system (can be UUID string or any unique ID). 43 | If None, returns a fresh UUID without recording it. 44 | 45 | Returns: 46 | New UUID for use in the target system 47 | """ 48 | if old_id is None: 49 | # Generate a new UUID for null IDs (never tracked) 50 | return uuid.uuid4() 51 | 52 | # Convert to string for consistent mapping 53 | old_id_str = str(old_id) 54 | 55 | # Return existing mapping if available 56 | if old_id_str in self._mappings: 57 | return self._mappings[old_id_str] 58 | 59 | # Generate new UUID and store mapping 60 | new_id = uuid.uuid4() 61 | self._mappings[old_id_str] = new_id 62 | return new_id 63 | 64 | def get(self, old_id: str) -> Optional[uuid.UUID]: 65 | """ 66 | Get the mapped UUID for an old ID without creating a new one. 67 | 68 | Args: 69 | old_id: Original ID from source system 70 | 71 | Returns: 72 | Mapped UUID if it exists, None otherwise 73 | """ 74 | return self._mappings.get(str(old_id)) 75 | 76 | def has(self, old_id: str) -> bool: 77 | """ 78 | Check if an old ID has been mapped. 79 | 80 | Args: 81 | old_id: Original ID from source system 82 | 83 | Returns: 84 | True if the ID has been mapped, False otherwise 85 | """ 86 | return str(old_id) in self._mappings 87 | 88 | def record(self, old_id: Optional[str], new_id: uuid.UUID): 89 | """ 90 | Record an explicit mapping for a known new UUID. 91 | 92 | Note: If old_id is None, this method silently ignores the mapping. 93 | None is never tracked to avoid interfering with ID mapping logic. 94 | 95 | Args: 96 | old_id: Original ID from source system. If None, the mapping is ignored. 97 | new_id: Newly created UUID in the target system 98 | """ 99 | if old_id is None: 100 | return 101 | self._mappings[str(old_id)] = new_id 102 | 103 | def clear(self): 104 | """Clear all mappings.""" 105 | self._mappings.clear() 106 | 107 | def size(self) -> int: 108 | """Get the number of mapped IDs.""" 109 | return len(self._mappings) 110 | 111 | def get_all_mappings(self) -> Dict[str, uuid.UUID]: 112 | """ 113 | Get all ID mappings. 114 | 115 | Returns: 116 | Dictionary of old_id -> new_uuid mappings 117 | """ 118 | return self._mappings.copy() 119 | 120 | def as_string_mapping(self) -> Dict[str, str]: 121 | """Get mappings with UUIDs serialized as strings.""" 122 | return {old: str(new_id) for old, new_id in self._mappings.items()} 123 | -------------------------------------------------------------------------------- /alembic/versions/6b2d62d09dd8_add_import_export_job_model.py: -------------------------------------------------------------------------------- 1 | """Add import and export job models 2 | 3 | Revision ID: 6b2d62d09dd8 4 | Revises: c3ea6c0f0a1d 5 | Create Date: 2025-11-13 15:16:06.287377 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '6b2d62d09dd8' 14 | down_revision = 'c3ea6c0f0a1d' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade() -> None: 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | 22 | # Add total_words column to journal table 23 | op.add_column('journal', sa.Column('total_words', sa.Integer(), nullable=False, server_default='0')) 24 | 25 | # Create export_jobs table 26 | op.create_table('export_jobs', 27 | sa.Column('id', sa.Uuid(), nullable=False), 28 | sa.Column('created_at', sa.DateTime(), nullable=False), 29 | sa.Column('updated_at', sa.DateTime(), nullable=False), 30 | sa.Column('user_id', sa.Uuid(), nullable=False), 31 | sa.Column('status', sa.Enum('pending', 'running', 'completed', 'failed', 'cancelled', name='job_status_enum'), nullable=False), 32 | sa.Column('progress', sa.Integer(), nullable=False), 33 | sa.Column('export_type', sa.Enum('full', 'journal', name='export_type_enum'), nullable=False), 34 | sa.Column('journal_ids', sa.JSON(), nullable=True), 35 | sa.Column('include_media', sa.Boolean(), nullable=False), 36 | sa.Column('total_items', sa.Integer(), nullable=False), 37 | sa.Column('processed_items', sa.Integer(), nullable=False), 38 | sa.Column('file_path', sa.String(), nullable=True), 39 | sa.Column('file_size', sa.Integer(), nullable=True), 40 | sa.Column('result_data', sa.JSON(), nullable=True), 41 | sa.Column('errors', sa.JSON(), nullable=True), 42 | sa.Column('warnings', sa.JSON(), nullable=True), 43 | sa.Column('completed_at', sa.DateTime(), nullable=True), 44 | sa.ForeignKeyConstraint(['user_id'], ['user.id'], ondelete='CASCADE'), 45 | sa.PrimaryKeyConstraint('id') 46 | ) 47 | op.create_index(op.f('ix_export_jobs_id'), 'export_jobs', ['id'], unique=False) 48 | op.create_index(op.f('ix_export_jobs_status'), 'export_jobs', ['status'], unique=False) 49 | op.create_index(op.f('ix_export_jobs_user_id'), 'export_jobs', ['user_id'], unique=False) 50 | 51 | # Create import_jobs table 52 | op.create_table('import_jobs', 53 | sa.Column('id', sa.Uuid(), nullable=False), 54 | sa.Column('created_at', sa.DateTime(), nullable=False), 55 | sa.Column('updated_at', sa.DateTime(), nullable=False), 56 | sa.Column('user_id', sa.Uuid(), nullable=False), 57 | sa.Column('status', sa.Enum('pending', 'running', 'completed', 'failed', 'cancelled', name='job_status_enum'), nullable=False), 58 | sa.Column('progress', sa.Integer(), nullable=False), 59 | sa.Column('source_type', sa.Enum('journiv', 'markdown', 'dayone', name='import_source_type_enum'), nullable=False), 60 | sa.Column('file_path', sa.String(), nullable=False), 61 | sa.Column('total_items', sa.Integer(), nullable=False), 62 | sa.Column('processed_items', sa.Integer(), nullable=False), 63 | sa.Column('result_data', sa.JSON(), nullable=True), 64 | sa.Column('errors', sa.JSON(), nullable=True), 65 | sa.Column('warnings', sa.JSON(), nullable=True), 66 | sa.Column('completed_at', sa.DateTime(), nullable=True), 67 | sa.ForeignKeyConstraint(['user_id'], ['user.id'], ondelete='CASCADE'), 68 | sa.PrimaryKeyConstraint('id') 69 | ) 70 | op.create_index(op.f('ix_import_jobs_id'), 'import_jobs', ['id'], unique=False) 71 | op.create_index(op.f('ix_import_jobs_status'), 'import_jobs', ['status'], unique=False) 72 | op.create_index(op.f('ix_import_jobs_user_id'), 'import_jobs', ['user_id'], unique=False) 73 | # ### end Alembic commands ### 74 | 75 | 76 | def downgrade() -> None: 77 | # ### commands auto generated by Alembic - please adjust! ### 78 | op.drop_index(op.f('ix_import_jobs_user_id'), table_name='import_jobs') 79 | op.drop_index(op.f('ix_import_jobs_status'), table_name='import_jobs') 80 | op.drop_index(op.f('ix_import_jobs_id'), table_name='import_jobs') 81 | op.drop_table('import_jobs') 82 | op.drop_index(op.f('ix_export_jobs_user_id'), table_name='export_jobs') 83 | op.drop_index(op.f('ix_export_jobs_status'), table_name='export_jobs') 84 | op.drop_index(op.f('ix_export_jobs_id'), table_name='export_jobs') 85 | op.drop_table('export_jobs') 86 | op.drop_column('journal', 'total_words') 87 | # ### end Alembic commands ### 88 | -------------------------------------------------------------------------------- /alembic/versions/7c52fcc89c83_make_user_name_required.py: -------------------------------------------------------------------------------- 1 | """Make user name field required 2 | 3 | Revision ID: 7c52fcc89c83 4 | Revises: 6b2d62d09dd8 5 | Create Date: 2025-01-27 12:00:00.000000 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '7c52fcc89c83' 14 | down_revision = '6b2d62d09dd8' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade() -> None: 20 | connection = op.get_bind() 21 | is_sqlite = connection.dialect.name == "sqlite" 22 | 23 | if is_sqlite: 24 | op.execute(sa.text("UPDATE user SET name = 'No Name' WHERE name IS NULL OR name = ''")) 25 | else: 26 | op.execute(sa.text('UPDATE "user" SET name = \'No Name\' WHERE name IS NULL OR name = \'\'')) 27 | 28 | if is_sqlite: 29 | op.execute(''' 30 | CREATE TABLE user_new ( 31 | id TEXT NOT NULL, 32 | created_at DATETIME NOT NULL, 33 | updated_at DATETIME NOT NULL, 34 | email TEXT NOT NULL, 35 | password TEXT NOT NULL, 36 | name TEXT NOT NULL, 37 | is_active BOOLEAN NOT NULL, 38 | profile_picture_url TEXT, 39 | last_login_at DATETIME, 40 | PRIMARY KEY (id), 41 | UNIQUE (email), 42 | CHECK(length(name) > 0) 43 | ) 44 | ''') 45 | 46 | op.execute(''' 47 | INSERT INTO user_new 48 | SELECT id, created_at, updated_at, email, password, 49 | COALESCE(NULLIF(name, ''), 'No Name'), is_active, profile_picture_url, last_login_at 50 | FROM user 51 | ''') 52 | 53 | op.execute('DROP TABLE user') 54 | op.execute('ALTER TABLE user_new RENAME TO user') 55 | 56 | op.execute('CREATE INDEX idx_user_active ON user (is_active)') 57 | op.execute('CREATE UNIQUE INDEX ix_user_email ON user (email)') 58 | op.execute('CREATE INDEX ix_user_id ON user (id)') 59 | else: 60 | op.drop_constraint('check_name_not_empty', 'user', type_='check') 61 | 62 | op.alter_column('user', 'name', 63 | existing_type=sa.String(length=100), 64 | nullable=False, 65 | existing_nullable=True) 66 | 67 | op.create_check_constraint( 68 | 'check_name_not_empty', 69 | 'user', 70 | 'length(name) > 0', 71 | ) 72 | 73 | 74 | def downgrade() -> None: 75 | connection = op.get_bind() 76 | is_sqlite = connection.dialect.name == "sqlite" 77 | 78 | if not is_sqlite: 79 | op.drop_constraint('check_name_not_empty', 'user', type_='check') 80 | 81 | if is_sqlite: 82 | op.execute(''' 83 | CREATE TABLE user_new ( 84 | id TEXT NOT NULL, 85 | created_at DATETIME NOT NULL, 86 | updated_at DATETIME NOT NULL, 87 | email TEXT NOT NULL, 88 | password TEXT NOT NULL, 89 | name TEXT, 90 | is_active BOOLEAN NOT NULL, 91 | profile_picture_url TEXT, 92 | last_login_at DATETIME, 93 | PRIMARY KEY (id), 94 | UNIQUE (email), 95 | CHECK(name IS NULL OR length(name) > 0) 96 | ) 97 | ''') 98 | 99 | op.execute(''' 100 | INSERT INTO user_new 101 | SELECT id, created_at, updated_at, email, password, 102 | CASE WHEN name = 'No Name' THEN NULL ELSE name END, 103 | is_active, profile_picture_url, last_login_at 104 | FROM user 105 | ''') 106 | 107 | op.execute('DROP TABLE user') 108 | op.execute('ALTER TABLE user_new RENAME TO user') 109 | 110 | op.execute('CREATE INDEX idx_user_active ON user (is_active)') 111 | op.execute('CREATE UNIQUE INDEX ix_user_email ON user (email)') 112 | op.execute('CREATE INDEX ix_user_id ON user (id)') 113 | else: 114 | op.alter_column('user', 'name', 115 | existing_type=sa.String(length=100), 116 | nullable=True, 117 | existing_nullable=False) 118 | 119 | op.create_check_constraint( 120 | 'check_name_not_empty', 121 | 'user', 122 | 'name IS NULL OR length(name) > 0', 123 | ) 124 | 125 | -------------------------------------------------------------------------------- /app/models/import_job.py: -------------------------------------------------------------------------------- 1 | """ 2 | Import job model for tracking async import operations. 3 | """ 4 | from datetime import datetime 5 | from typing import Optional, Dict, Any, List 6 | import uuid 7 | 8 | from sqlalchemy import Column, ForeignKey, Enum as SAEnum 9 | from sqlmodel import Field, Column as SQLModelColumn, JSON 10 | 11 | from app.models.base import BaseModel 12 | from app.models.enums import JobStatus, ImportSourceType 13 | from app.core.time_utils import utc_now 14 | 15 | 16 | class ImportJob(BaseModel, table=True): 17 | """ 18 | Track import job progress and results. 19 | 20 | Import jobs are created when a user uploads a file for import 21 | and processed asynchronously by Celery workers. 22 | """ 23 | __tablename__ = "import_jobs" 24 | 25 | # Foreign key to user who initiated the import 26 | user_id: uuid.UUID = Field( 27 | sa_column=Column( 28 | ForeignKey("user.id", ondelete="CASCADE"), 29 | nullable=False, 30 | index=True 31 | ) 32 | ) 33 | 34 | # Job status and progress 35 | status: JobStatus = Field( 36 | default=JobStatus.PENDING, 37 | sa_column=Column( 38 | SAEnum(JobStatus, name="job_status_enum", values_callable=lambda x: [e.value for e in x]), 39 | nullable=False, 40 | index=True 41 | ) 42 | ) 43 | progress: int = Field(default=0, ge=0, le=100, description="Progress percentage 0-100") 44 | 45 | # Source information 46 | source_type: ImportSourceType = Field( 47 | sa_column=Column( 48 | SAEnum(ImportSourceType, name="import_source_type_enum", values_callable=lambda x: [e.value for e in x]), 49 | nullable=False 50 | ) 51 | ) 52 | file_path: str = Field(nullable=False, description="Path to uploaded file") 53 | 54 | # Progress tracking 55 | total_items: int = Field(default=0, description="Total number of items to import") 56 | processed_items: int = Field(default=0, description="Number of items processed so far") 57 | 58 | # Results and errors 59 | result_data: Optional[Dict[str, Any]] = Field( 60 | default=None, 61 | sa_column=SQLModelColumn(JSON), 62 | description="Final import statistics (journals, entries, media counts)" 63 | ) 64 | errors: Optional[List[str]] = Field( 65 | default=None, 66 | sa_column=SQLModelColumn(JSON), 67 | description="List of error messages" 68 | ) 69 | warnings: Optional[List[str]] = Field( 70 | default=None, 71 | sa_column=SQLModelColumn(JSON), 72 | description="List of warning messages" 73 | ) 74 | 75 | # Completion timestamp 76 | completed_at: Optional[datetime] = Field(default=None, description="When the job completed or failed") 77 | 78 | def __repr__(self) -> str: 79 | return f"" 80 | 81 | def mark_running(self): 82 | """Mark job as running.""" 83 | self.status = JobStatus.RUNNING 84 | self.progress = 0 85 | 86 | def update_progress(self, processed: int, total: int): 87 | """Update progress based on processed/total items.""" 88 | self.processed_items = processed 89 | self.total_items = total 90 | if total > 0: 91 | self.progress = min(100, int((processed / total) * 100)) 92 | 93 | def set_progress(self, percent: int): 94 | """Set progress percentage directly.""" 95 | self.progress = max(0, min(100, percent)) 96 | 97 | def mark_completed(self, result_data: Dict[str, Any]): 98 | """Mark job as completed with results.""" 99 | self.status = JobStatus.COMPLETED 100 | self.progress = 100 101 | self.result_data = result_data 102 | self.completed_at = utc_now() 103 | 104 | def mark_failed(self, error_message: str): 105 | """Mark job as failed with error.""" 106 | self.status = JobStatus.FAILED 107 | if self.errors is None: 108 | self.errors = [] 109 | self.errors.append(error_message) 110 | self.completed_at = utc_now() 111 | 112 | def mark_cancelled(self): 113 | """Mark job as cancelled.""" 114 | self.status = JobStatus.CANCELLED 115 | self.completed_at = utc_now() 116 | 117 | def add_warning(self, warning: str): 118 | """Add a warning message.""" 119 | if self.warnings is None: 120 | self.warnings = [] 121 | self.warnings.append(warning) 122 | -------------------------------------------------------------------------------- /app/models/analytics.py: -------------------------------------------------------------------------------- 1 | """ 2 | Analytics and tracking models. 3 | 4 | This module provides models for tracking user writing analytics including 5 | streaks, entry counts, and word counts. 6 | """ 7 | import uuid 8 | from datetime import date 9 | from typing import Optional, TYPE_CHECKING 10 | 11 | from pydantic import field_validator 12 | from sqlalchemy import Column, ForeignKey 13 | from sqlmodel import Field, Relationship, Index, CheckConstraint 14 | 15 | from .base import BaseModel 16 | 17 | if TYPE_CHECKING: 18 | from .user import User 19 | 20 | 21 | class WritingStreak(BaseModel, table=True): 22 | """ 23 | Writing streak tracking for users. 24 | 25 | Tracks daily writing streaks, total statistics, and analytics. 26 | All denormalized fields (total_entries, total_words, average_words_per_entry) 27 | should be recalculated periodically using analytics_service methods. 28 | """ 29 | __tablename__ = "writing_streak" 30 | 31 | user_id: uuid.UUID = Field( 32 | sa_column=Column( 33 | ForeignKey("user.id", ondelete="CASCADE"), 34 | unique=True, 35 | nullable=False 36 | ), 37 | description="User this streak record belongs to" 38 | ) 39 | current_streak: int = Field( 40 | default=0, 41 | ge=0, 42 | description="Current consecutive days with entries" 43 | ) 44 | longest_streak: int = Field( 45 | default=0, 46 | ge=0, 47 | description="Longest streak ever achieved by user" 48 | ) 49 | last_entry_date: Optional[date] = Field( 50 | default=None, 51 | description="Date of most recent entry" 52 | ) 53 | streak_start_date: Optional[date] = Field( 54 | default=None, 55 | description="Date when current streak started" 56 | ) 57 | total_entries: int = Field( 58 | default=0, 59 | ge=0, 60 | description="Total number of entries (denormalized, recalculate periodically)" 61 | ) 62 | total_words: int = Field( 63 | default=0, 64 | ge=0, 65 | description="Total word count across all entries (denormalized, recalculate periodically)" 66 | ) 67 | average_words_per_entry: float = Field( 68 | default=0.0, 69 | ge=0.0, 70 | description="Average words per entry (denormalized, recalculate periodically)" 71 | ) 72 | 73 | # Relations 74 | user: "User" = Relationship(back_populates="writing_streak") 75 | 76 | # Table constraints and indexes 77 | __table_args__ = ( 78 | Index('idx_writing_streak_user_date', 'user_id', 'last_entry_date'), 79 | # Index for date-based analytics queries 80 | Index('idx_writing_streak_last_entry', 'last_entry_date'), 81 | # Index for finding active streaks 82 | Index('idx_writing_streak_active', 'last_entry_date', 'current_streak'), 83 | # Constraints 84 | CheckConstraint('current_streak >= 0', name='check_current_streak_positive'), 85 | CheckConstraint('longest_streak >= 0', name='check_longest_streak_positive'), 86 | CheckConstraint('total_entries >= 0', name='check_total_entries_positive'), 87 | CheckConstraint('total_words >= 0', name='check_total_words_positive'), 88 | CheckConstraint('average_words_per_entry >= 0.0', name='check_avg_words_positive'), 89 | CheckConstraint('longest_streak >= current_streak', name='check_longest_gte_current'), 90 | ) 91 | 92 | @field_validator('average_words_per_entry') 93 | @classmethod 94 | def validate_average_words(cls, v: float) -> float: 95 | """ 96 | Validate that average_words_per_entry is non-negative. 97 | 98 | Note: This validator only checks bounds. The actual average is calculated 99 | in the service layer (analytics_service.py) to ensure consistency. 100 | """ 101 | if v < 0: 102 | raise ValueError('average_words_per_entry must be non-negative') 103 | return v 104 | 105 | @field_validator('longest_streak') 106 | @classmethod 107 | def validate_longest_streak(cls, v: int, info) -> int: 108 | """ 109 | Validate that longest_streak is at least as large as current_streak. 110 | 111 | This is also enforced by a database constraint for data integrity. 112 | """ 113 | # Note: info.data contains previously validated fields 114 | current_streak = info.data.get('current_streak', 0) 115 | if v < current_streak: 116 | raise ValueError('longest_streak must be >= current_streak') 117 | return v 118 | -------------------------------------------------------------------------------- /alembic/env.py: -------------------------------------------------------------------------------- 1 | """ 2 | Alembic environment configuration tailored for SQLModel models. 3 | 4 | The key customization is a renderer that converts SQLModel-specific column 5 | types (e.g., AutoString) into portable SQLAlchemy primitives so generated 6 | migrations run cleanly on both SQLite and PostgreSQL. 7 | Currently this does not work as expected so the migration has been patched to use the correct types. See scripts/fix_migration_imports.py for the patch. 8 | """ 9 | from logging.config import fileConfig 10 | from typing import Any 11 | 12 | from alembic import context 13 | from sqlalchemy import engine_from_config, pool 14 | from sqlalchemy.sql.sqltypes import Uuid 15 | 16 | try: # Alembic ≥1.10 17 | from alembic.autogenerate.api import AutogenContext 18 | except ImportError: # pragma: no cover - fallback for older Alembic 19 | AutogenContext = Any # type: ignore[assignment] 20 | 21 | from alembic.autogenerate import renderers 22 | from app.core.config import settings 23 | from app.models import * # noqa: F401,F403 (needed for metadata discovery) 24 | from sqlmodel import SQLModel 25 | from sqlmodel.sql.sqltypes import AutoString 26 | 27 | # --------------------------------------------------------------------------- 28 | # Global configuration 29 | # --------------------------------------------------------------------------- 30 | config = context.config 31 | 32 | if config.config_file_name is not None: 33 | fileConfig(config.config_file_name) 34 | 35 | target_metadata = SQLModel.metadata 36 | 37 | 38 | # --------------------------------------------------------------------------- 39 | # Autogenerate helpers 40 | # --------------------------------------------------------------------------- 41 | def _ensure_import(autogen_context: AutogenContext, import_stmt: str) -> None: 42 | """Ensure the given import is included in the generated migration.""" 43 | imports = getattr(autogen_context, "imports", None) 44 | if imports is None: 45 | imports = set() 46 | autogen_context.imports = imports # type: ignore[attr-defined] 47 | imports.add(import_stmt) 48 | 49 | 50 | @renderers.dispatch_for(AutoString) 51 | def _render_auto_string(type_: AutoString, autogen_context: AutogenContext) -> str: 52 | """Render SQLModel AutoString columns as sa.String.""" 53 | _ensure_import(autogen_context, "import sqlalchemy as sa") 54 | _ensure_import(autogen_context, "import sqlmodel") 55 | length = getattr(type_, "length", None) 56 | return f"sa.String(length={length})" if length else "sa.String()" 57 | 58 | 59 | @renderers.dispatch_for(Uuid) 60 | def _render_uuid(type_: Uuid, autogen_context: AutogenContext) -> str: 61 | """Render SQLAlchemy's Uuid as sa.String(36) for cross-database support.""" 62 | _ensure_import(autogen_context, "import sqlalchemy as sa") 63 | return "sa.String(length=36)" 64 | 65 | 66 | # --------------------------------------------------------------------------- 67 | # Utility functions 68 | # --------------------------------------------------------------------------- 69 | def get_url() -> str: 70 | """Resolve the database URL Alembic should target.""" 71 | return settings.effective_database_url 72 | 73 | 74 | # --------------------------------------------------------------------------- 75 | # Migration entrypoints 76 | # --------------------------------------------------------------------------- 77 | def run_migrations_offline() -> None: 78 | """Run migrations in 'offline' mode.""" 79 | url = get_url() 80 | context.configure( 81 | url=url, 82 | target_metadata=target_metadata, 83 | literal_binds=True, 84 | compare_type=True, 85 | dialect_opts={"paramstyle": "named"}, 86 | ) 87 | 88 | with context.begin_transaction(): 89 | context.run_migrations() 90 | 91 | 92 | def run_migrations_online() -> None: 93 | """Run migrations in 'online' mode.""" 94 | configuration = config.get_section(config.config_ini_section) 95 | configuration["sqlalchemy.url"] = get_url() 96 | 97 | connectable = engine_from_config( 98 | configuration, 99 | prefix="sqlalchemy.", 100 | poolclass=pool.NullPool, 101 | ) 102 | 103 | with connectable.connect() as connection: 104 | context.configure( 105 | connection=connection, 106 | target_metadata=target_metadata, 107 | compare_type=True, 108 | ) 109 | 110 | with context.begin_transaction(): 111 | context.run_migrations() 112 | 113 | 114 | if context.is_offline_mode(): 115 | run_migrations_offline() 116 | else: 117 | run_migrations_online() 118 | -------------------------------------------------------------------------------- /docker-compose.prod.postgres.yml: -------------------------------------------------------------------------------- 1 | # Journiv Production Docker Compose (PostgreSQL) - Recommended for multiple users use. 2 | # 3 | # Usage: 4 | # docker compose -f docker-compose.prod.postgres.yml up -d 5 | # 6 | # Required Environment Variables: 7 | # SECRET_KEY - Generate with: python -c "import secrets; print(secrets.token_urlsafe(32))" 8 | # DOMAIN_NAME - Needed when running in same-origin SPA mode (ENABLE_CORS=false). 9 | # POSTGRES_PASSWORD (if not relying on defaults, strongly recommended to change in production). 10 | services: 11 | postgres: 12 | image: postgres:15 13 | container_name: journiv-postgres-db 14 | environment: 15 | - POSTGRES_USER=${POSTGRES_USER:-journiv} 16 | - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} # must provide a password in .env file 17 | - POSTGRES_DB=${POSTGRES_DB:-journiv_prod} 18 | volumes: 19 | - postgres_data:/var/lib/postgresql/data 20 | networks: 21 | - backend 22 | restart: unless-stopped 23 | cpus: "1.0" 24 | mem_limit: 1g 25 | mem_reservation: 256m 26 | logging: 27 | driver: "json-file" 28 | options: 29 | max-size: "50m" 30 | max-file: "5" 31 | healthcheck: 32 | test: 33 | [ 34 | "CMD-SHELL", 35 | "pg_isready -U ${POSTGRES_USER:-journiv} -d ${POSTGRES_DB:-journiv_prod}", 36 | ] 37 | interval: 10s 38 | timeout: 5s 39 | retries: 5 40 | start_period: 10s 41 | 42 | redis: 43 | # Redis (Do not remove. Journiv runs with multiple workers which need to share state.) 44 | image: redis:7 45 | container_name: journiv-redis-cache 46 | restart: unless-stopped 47 | volumes: 48 | - redis_data:/data 49 | networks: 50 | - backend 51 | healthcheck: 52 | test: ["CMD", "redis-cli", "ping"] 53 | interval: 10s 54 | timeout: 5s 55 | retries: 5 56 | start_period: 10s 57 | 58 | celery-worker: 59 | image: swalabtech/journiv-app:${APP_VERSION:-latest} 60 | container_name: journiv-celery-worker 61 | command: celery -A app.core.celery_app worker --loglevel=info 62 | env_file: 63 | - .env 64 | environment: 65 | # Docker-specific: service names 66 | - SERVICE_ROLE=celery-worker 67 | - DB_DRIVER=postgres 68 | - POSTGRES_HOST=postgres 69 | - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-} 70 | - REDIS_URL=redis://redis:6379/0 71 | - CELERY_BROKER_URL=redis://redis:6379/0 72 | - CELERY_RESULT_BACKEND=redis://redis:6379/0 73 | 74 | volumes: 75 | - app_data:/data 76 | depends_on: 77 | postgres: 78 | condition: service_healthy 79 | redis: 80 | condition: service_healthy 81 | networks: 82 | - backend 83 | restart: unless-stopped 84 | healthcheck: 85 | test: ["CMD-SHELL", "celery -A app.core.celery_app inspect ping --timeout=5 | grep -q pong"] 86 | interval: 30s 87 | timeout: 10s 88 | retries: 5 89 | start_period: 40s 90 | cpus: "1.0" 91 | mem_limit: 1g 92 | mem_reservation: 256m 93 | logging: 94 | driver: "json-file" 95 | options: 96 | max-size: "50m" 97 | max-file: "5" 98 | 99 | app: 100 | image: swalabtech/journiv-app:${APP_VERSION:-latest} 101 | container_name: journiv-postgres-app 102 | ports: 103 | - "${APP_PORT:-8000}:8000" # Right side must remain 8000 104 | env_file: 105 | - .env 106 | environment: 107 | # Docker-specific: service names 108 | - SERVICE_ROLE=app 109 | - DB_DRIVER=postgres 110 | - POSTGRES_HOST=postgres 111 | - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} 112 | - ENVIRONMENT=production 113 | - REDIS_URL=redis://redis:6379/0 114 | - CELERY_BROKER_URL=redis://redis:6379/0 115 | - CELERY_RESULT_BACKEND=redis://redis:6379/0 116 | - RATE_LIMIT_STORAGE_URI=redis://redis:6379/1 117 | 118 | volumes: 119 | - app_data:/data 120 | depends_on: 121 | postgres: 122 | condition: service_healthy 123 | redis: 124 | condition: service_healthy 125 | networks: 126 | - backend 127 | - frontend 128 | restart: unless-stopped 129 | healthcheck: 130 | test: ["CMD-SHELL", "curl -f http://localhost:8000/api/v1/health"] 131 | interval: 30s 132 | timeout: 10s 133 | retries: 3 134 | start_period: 40s 135 | cpus: "2.0" 136 | mem_limit: 2g 137 | mem_reservation: 512m 138 | logging: 139 | driver: "json-file" 140 | options: 141 | max-size: "50m" 142 | max-file: "5" 143 | 144 | volumes: 145 | app_data: 146 | postgres_data: 147 | redis_data: 148 | 149 | networks: 150 | backend: 151 | driver: bridge 152 | frontend: 153 | driver: bridge 154 | -------------------------------------------------------------------------------- /app/api/dependencies.py: -------------------------------------------------------------------------------- 1 | """ 2 | Shared API dependencies. 3 | """ 4 | import logging 5 | from typing import Annotated, Optional 6 | 7 | from fastapi import Depends, HTTPException, status, Cookie 8 | from fastapi.security import OAuth2PasswordBearer 9 | from jose import JWTError, ExpiredSignatureError 10 | from sqlmodel import Session 11 | 12 | from app.core.database import get_session 13 | from app.core.security import verify_token 14 | from app.middleware.request_logging import request_id_ctx 15 | from app.models.user import User 16 | from app.models.enums import UserRole 17 | from app.services.user_service import UserService 18 | 19 | logger = logging.getLogger(__name__) 20 | 21 | oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/api/v1/auth/token") 22 | 23 | 24 | def get_request_id() -> str: 25 | """ 26 | Dependency to get the current request ID from context. 27 | 28 | This can be used in endpoints to access the request ID for logging or other purposes. 29 | 30 | Usage: 31 | @router.get("/example") 32 | async def example(request_id: Annotated[str, Depends(get_request_id)]): 33 | logger.info(f"Processing request {request_id}") 34 | 35 | Returns: 36 | The current request ID, or 'unknown' if not in a request context. 37 | """ 38 | return request_id_ctx.get() 39 | 40 | 41 | async def get_current_user( 42 | token: Annotated[Optional[str], Depends(oauth2_scheme)], 43 | cookie_token: Annotated[Optional[str], Cookie(alias="access_token")] = None, 44 | session: Annotated[Session, Depends(get_session)] = None 45 | ) -> User: 46 | """ 47 | Dependency to get the current authenticated user from the token. 48 | Raises HTTPException with status 401 if authentication fails or token is revoked. 49 | """ 50 | credentials_exception = HTTPException( 51 | status_code=status.HTTP_401_UNAUTHORIZED, 52 | detail="Could not validate credentials", 53 | headers={"WWW-Authenticate": "Bearer"}, 54 | ) 55 | 56 | def _unauthorized(detail: str = "Could not validate credentials") -> HTTPException: 57 | return HTTPException( 58 | status_code=status.HTTP_401_UNAUTHORIZED, 59 | detail=detail, 60 | headers={"WWW-Authenticate": "Bearer"}, 61 | ) 62 | 63 | # Use token from Authorization header or cookie (web video streaming) 64 | token_to_use = token or cookie_token 65 | if token_to_use is None: 66 | raise credentials_exception 67 | 68 | try: 69 | # Verify token signature and expiration 70 | payload = verify_token(token_to_use, "access") 71 | user_id: str = payload.get("sub") 72 | 73 | # Validate claim types 74 | if not isinstance(user_id, str) or not user_id: 75 | raise credentials_exception 76 | 77 | except HTTPException: 78 | raise 79 | except ExpiredSignatureError: 80 | logger.info("Expired token presented", extra={"user_id": locals().get('user_id')}) 81 | raise credentials_exception 82 | except JWTError as e: 83 | logger.warning("JWT error during token validation", extra={"error": str(e)}) 84 | raise credentials_exception 85 | except Exception as e: 86 | logger.error("Unexpected token validation error", extra={"error": str(e)}) 87 | raise credentials_exception 88 | 89 | # Get user from database 90 | user = UserService(session).get_user_by_id(user_id) 91 | if user is None: 92 | raise credentials_exception 93 | 94 | # Check if user is active 95 | if not user.is_active: 96 | logger.info("Inactive user access attempt", extra={"user_id": user_id}) 97 | raise HTTPException( 98 | status_code=status.HTTP_403_FORBIDDEN, 99 | detail="User account is inactive" 100 | ) 101 | 102 | return user 103 | 104 | 105 | async def get_current_admin_user( 106 | current_user: Annotated[User, Depends(get_current_user)] 107 | ) -> User: 108 | """ 109 | Dependency to verify that the current user is an admin. 110 | Raises HTTPException with status 403 if user is not an admin. 111 | 112 | Usage: 113 | @router.get("/admin/users") 114 | async def list_users(admin: Annotated[User, Depends(get_current_admin_user)]): 115 | # Only admins can access this endpoint 116 | ... 117 | """ 118 | if current_user.role != UserRole.ADMIN: 119 | logger.warning( 120 | "Non-admin user attempted to access admin endpoint", 121 | extra={"user_id": str(current_user.id), "user_email": current_user.email} 122 | ) 123 | raise HTTPException( 124 | status_code=status.HTTP_403_FORBIDDEN, 125 | detail="Admin access required" 126 | ) 127 | 128 | return current_user 129 | -------------------------------------------------------------------------------- /app/models/mood.py: -------------------------------------------------------------------------------- 1 | """ 2 | Mood-related models. 3 | """ 4 | import uuid 5 | from datetime import date, datetime 6 | from typing import List, Optional, TYPE_CHECKING 7 | 8 | from pydantic import field_validator 9 | from sqlalchemy import Column, ForeignKey, Date, DateTime, String 10 | from sqlmodel import Field, Relationship, Index, CheckConstraint 11 | 12 | from app.core.time_utils import utc_now 13 | from .base import BaseModel 14 | from .enums import MoodCategory 15 | 16 | if TYPE_CHECKING: 17 | from .user import User 18 | from .entry import Entry 19 | 20 | 21 | class Mood(BaseModel, table=True): 22 | """ 23 | System mood definitions for mood tracking. 24 | """ 25 | __tablename__ = "mood" 26 | 27 | name: str = Field(..., unique=True, min_length=1, max_length=100) 28 | icon: Optional[str] = Field(None, max_length=50) 29 | category: str = Field(..., max_length=50) # Should be a MoodCategory enum value 30 | 31 | # Relations 32 | mood_logs: List["MoodLog"] = Relationship(back_populates="mood") 33 | 34 | # Table constraints and indexes 35 | __table_args__ = ( 36 | CheckConstraint('length(name) > 0', name='check_mood_name_not_empty'), 37 | CheckConstraint( 38 | "category IN ('positive', 'negative', 'neutral')", 39 | name='check_mood_category' 40 | ), 41 | ) 42 | 43 | @field_validator('name') 44 | @classmethod 45 | def validate_name(cls, v): 46 | """Validate and normalize mood name to lowercase.""" 47 | if not v or len(v.strip()) == 0: 48 | raise ValueError('Mood name cannot be empty') 49 | # Normalize to lowercase for consistency 50 | return v.strip().lower() 51 | 52 | @field_validator('category') 53 | @classmethod 54 | def validate_category(cls, v): 55 | """Validate category against MoodCategory enum.""" 56 | allowed_categories = {cat.value for cat in MoodCategory} 57 | if v not in allowed_categories: 58 | raise ValueError( 59 | f'Invalid category: {v}. Must be one of {sorted(allowed_categories)}' 60 | ) 61 | return v 62 | 63 | 64 | class MoodLog(BaseModel, table=True): 65 | """ 66 | Simple mood logging for tracking user moods. 67 | """ 68 | __tablename__ = "mood_log" 69 | 70 | user_id: uuid.UUID = Field( 71 | sa_column=Column( 72 | ForeignKey("user.id", ondelete="CASCADE"), 73 | nullable=False 74 | ) 75 | ) 76 | entry_id: Optional[uuid.UUID] = Field( 77 | sa_column=Column( 78 | ForeignKey("entry.id", ondelete="CASCADE"), 79 | nullable=True, 80 | unique=True # Enforce one-to-one relationship at DB level 81 | ) 82 | ) 83 | mood_id: uuid.UUID = Field( 84 | sa_column=Column( 85 | ForeignKey("mood.id", ondelete="CASCADE"), 86 | nullable=False 87 | ) 88 | ) 89 | note: Optional[str] = Field(None, max_length=500) 90 | logged_date: date = Field( 91 | sa_column=Column(Date, nullable=False, index=True), 92 | description="The date this mood represents (from entry or when logged)" 93 | ) 94 | logged_datetime_utc: datetime = Field( 95 | default_factory=utc_now, 96 | sa_column=Column(DateTime(timezone=True), nullable=False, index=True), 97 | description="UTC timestamp when the mood was logged" 98 | ) 99 | logged_timezone: str = Field( 100 | default="UTC", 101 | sa_column=Column(String(100), nullable=False, default="UTC"), 102 | description="IANA timezone for the mood log context" 103 | ) 104 | 105 | # Relations 106 | user: "User" = Relationship(back_populates="mood_logs") 107 | entry: Optional["Entry"] = Relationship(back_populates="mood_log") 108 | mood: "Mood" = Relationship(back_populates="mood_logs") 109 | 110 | # Table constraints and indexes 111 | __table_args__ = ( 112 | # For fetching a user's mood history (e.g., a timeline) 113 | Index('idx_mood_logs_user_id_logged_date', 'user_id', 'logged_date'), 114 | Index('idx_mood_logs_user_datetime', 'user_id', 'logged_datetime_utc'), 115 | # For analytics across all users (e.g., "moods logged on a date") 116 | Index('idx_mood_logs_logged_date', 'logged_date'), 117 | # For analytics on specific moods (e.g., "how many 'happy' logs exist") 118 | Index('idx_mood_logs_mood_id', 'mood_id'), 119 | Index('idx_mood_logs_user_mood', 'user_id', 'mood_id'), # For "how often does user feel this mood" queries 120 | ) 121 | 122 | @field_validator('note') 123 | @classmethod 124 | def validate_note(cls, v): 125 | if v and len(v.strip()) == 0: 126 | return None 127 | return v.strip() if v else v 128 | -------------------------------------------------------------------------------- /tests/integration/test_journal_endpoints.py: -------------------------------------------------------------------------------- 1 | """ 2 | Journal API integration coverage. 3 | """ 4 | 5 | from tests.integration.helpers import ( 6 | EndpointCase, 7 | UNKNOWN_UUID, 8 | assert_not_found, 9 | assert_requires_authentication, 10 | ) 11 | from tests.lib import ApiUser, JournivApiClient 12 | 13 | 14 | def _create_sample_journal(api_client: JournivApiClient, token: str, title: str) -> str: 15 | journal = api_client.create_journal( 16 | token, 17 | title=title, 18 | description=f"{title} description", 19 | color="#3B82F6", 20 | icon="📘", 21 | ) 22 | return journal["id"] 23 | 24 | 25 | def test_journal_crud_and_favorites( 26 | api_client: JournivApiClient, 27 | api_user: ApiUser, 28 | ): 29 | """Covers create → retrieve → favorite toggle → update → delete.""" 30 | journal_id = _create_sample_journal(api_client, api_user.access_token, "Primary Journal") 31 | 32 | fetched = api_client.get_journal(api_user.access_token, journal_id) 33 | assert fetched["title"] == "Primary Journal" 34 | assert fetched["is_favorite"] is False 35 | 36 | toggled = api_client.request( 37 | "POST", 38 | f"/journals/{journal_id}/favorite", 39 | token=api_user.access_token, 40 | ).json() 41 | assert toggled["is_favorite"] is True 42 | 43 | favorites = api_client.request( 44 | "GET", "/journals/favorites", token=api_user.access_token 45 | ).json() 46 | assert any(journal["id"] == journal_id for journal in favorites) 47 | 48 | updated = api_client.update_journal( 49 | api_user.access_token, 50 | journal_id, 51 | {"title": "Renamed Journal", "description": "Updated description"}, 52 | ) 53 | assert updated["title"] == "Renamed Journal" 54 | assert updated["description"] == "Updated description" 55 | 56 | api_client.delete_journal(api_user.access_token, journal_id) 57 | response = api_client.request( 58 | "GET", f"/journals/{journal_id}", token=api_user.access_token 59 | ) 60 | assert response.status_code == 404 61 | 62 | 63 | def test_archiving_controls_visibility( 64 | api_client: JournivApiClient, api_user: ApiUser 65 | ): 66 | """Archived journals should be hidden unless explicitly requested.""" 67 | active_id = _create_sample_journal(api_client, api_user.access_token, "Active Journal") 68 | archived_id = _create_sample_journal(api_client, api_user.access_token, "Archived Journal") 69 | 70 | api_client.archive_journal(api_user.access_token, archived_id) 71 | 72 | active_only = api_client.list_journals(api_user.access_token) 73 | assert any(journal["id"] == active_id for journal in active_only) 74 | assert all(journal["id"] != archived_id for journal in active_only) 75 | 76 | with_archived = api_client.list_journals( 77 | api_user.access_token, include_archived=True 78 | ) 79 | assert any(journal["id"] == archived_id for journal in with_archived) 80 | 81 | # unarchive restores default visibility 82 | api_client.unarchive_journal(api_user.access_token, archived_id) 83 | refreshed = api_client.list_journals(api_user.access_token) 84 | assert any(journal["id"] == archived_id for journal in refreshed) 85 | 86 | 87 | def test_journal_endpoints_require_auth(api_client: JournivApiClient): 88 | """Requests without a bearer token should fail fast.""" 89 | assert_requires_authentication( 90 | api_client, 91 | [ 92 | EndpointCase("GET", "/journals/"), 93 | EndpointCase("GET", "/journals/favorites"), 94 | EndpointCase( 95 | "POST", 96 | "/journals/", 97 | json={ 98 | "title": "No auth", 99 | "description": "Missing token should fail", 100 | "color": "#F97316", 101 | "icon": "❌", 102 | }, 103 | ), 104 | ], 105 | ) 106 | 107 | 108 | def test_journal_not_found_errors( 109 | api_client: JournivApiClient, 110 | api_user: ApiUser, 111 | ): 112 | """Accessing or mutating unknown journals should return 404.""" 113 | assert_not_found( 114 | api_client, 115 | api_user.access_token, 116 | [ 117 | EndpointCase("GET", f"/journals/{UNKNOWN_UUID}"), 118 | EndpointCase( 119 | "PUT", 120 | f"/journals/{UNKNOWN_UUID}", 121 | json={"title": "Missing"}, 122 | ), 123 | EndpointCase("DELETE", f"/journals/{UNKNOWN_UUID}"), 124 | EndpointCase("POST", f"/journals/{UNKNOWN_UUID}/favorite"), 125 | EndpointCase("POST", f"/journals/{UNKNOWN_UUID}/archive"), 126 | EndpointCase("POST", f"/journals/{UNKNOWN_UUID}/unarchive"), 127 | ], 128 | ) 129 | -------------------------------------------------------------------------------- /scripts/deploy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Journiv Backend Deployment Script 4 | # This script helps in deploying the backend in production or development environment. 5 | 6 | set -e 7 | 8 | # Colors for output 9 | RED='\033[0;31m' 10 | GREEN='\033[0;32m' 11 | YELLOW='\033[1;33m' 12 | BLUE='\033[0;34m' 13 | NC='\033[0m' # No Color 14 | 15 | # Function to print colored output 16 | print_status() { 17 | echo -e "${BLUE}[INFO]${NC} $1" 18 | } 19 | 20 | print_success() { 21 | echo -e "${GREEN}[SUCCESS]${NC} $1" 22 | } 23 | 24 | print_warning() { 25 | echo -e "${YELLOW}[WARNING]${NC} $1" 26 | } 27 | 28 | print_error() { 29 | echo -e "${RED}[ERROR]${NC} $1" 30 | } 31 | 32 | # Function to show usage 33 | show_usage() { 34 | echo "Usage: $0 [OPTIONS]" 35 | echo "" 36 | echo "Options:" 37 | echo " -e, --env ENVIRONMENT Set environment (development|production) [default: development]" 38 | echo " -d, --database DATABASE Set database type (sqlite|postgresql) [default: sqlite]" 39 | echo " -b, --build Force rebuild Docker images" 40 | echo " -t, --detach Run in detached mode" 41 | echo " -h, --help Show this help message" 42 | echo "" 43 | echo "Examples:" 44 | echo " $0 # Development with SQLite (default)" 45 | echo " $0 --env development # Development with SQLite (default)" 46 | echo " $0 --env development --database postgresql # Development with PostgreSQL" 47 | echo " $0 --env production # Production with SQLite (default)" 48 | echo " $0 --env production --database postgresql # Production with PostgreSQL" 49 | echo " $0 --build # Deployment with rebuild" 50 | } 51 | 52 | # Default values 53 | ENVIRONMENT="development" 54 | DATABASE="sqlite" 55 | BUILD_FLAG="" 56 | DETACH_FLAG="" 57 | 58 | # Parse command line arguments 59 | while [[ $# -gt 0 ]]; do 60 | case $1 in 61 | -e|--env) 62 | ENVIRONMENT="$2" 63 | shift 2 64 | ;; 65 | -d|--database) 66 | DATABASE="$2" 67 | shift 2 68 | ;; 69 | -b|--build) 70 | BUILD_FLAG="--build" 71 | shift 72 | ;; 73 | -t|--detach) 74 | DETACH_FLAG="-d" 75 | shift 76 | ;; 77 | -h|--help) 78 | show_usage 79 | exit 0 80 | ;; 81 | *) 82 | print_error "Unknown option: $1" 83 | show_usage 84 | exit 1 85 | ;; 86 | esac 87 | done 88 | 89 | # Validate environment 90 | if [[ "$ENVIRONMENT" != "development" && "$ENVIRONMENT" != "production" ]]; then 91 | print_error "Invalid environment: $ENVIRONMENT. Must be 'development' or 'production'" 92 | exit 1 93 | fi 94 | 95 | # Validate database 96 | if [[ "$DATABASE" != "sqlite" && "$DATABASE" != "postgresql" ]]; then 97 | print_error "Invalid database: $DATABASE. Must be 'sqlite' or 'postgresql'" 98 | exit 1 99 | fi 100 | 101 | # Determine compose file 102 | if [[ "$ENVIRONMENT" == "production" ]]; then 103 | if [[ "$DATABASE" == "postgresql" ]]; then 104 | COMPOSE_FILE="docker-compose.prod.postgres.yml" 105 | else 106 | COMPOSE_FILE="docker-compose.prod.sqlite.yml" 107 | fi 108 | else 109 | if [[ "$DATABASE" == "postgresql" ]]; then 110 | COMPOSE_FILE="docker-compose.dev.postgres.yml" 111 | else 112 | COMPOSE_FILE="docker-compose.dev.sqlite.yml" 113 | fi 114 | fi 115 | 116 | # Check if compose file exists 117 | if [[ ! -f "$COMPOSE_FILE" ]]; then 118 | print_error "Compose file not found: $COMPOSE_FILE" 119 | exit 1 120 | fi 121 | 122 | print_status "Starting deployment..." 123 | print_status "Environment: $ENVIRONMENT" 124 | print_status "Database: $DATABASE" 125 | print_status "Compose file: $COMPOSE_FILE" 126 | 127 | # Stop existing containers 128 | print_status "Stopping existing containers..." 129 | docker-compose -f "$COMPOSE_FILE" down 130 | 131 | # Build images if requested 132 | if [[ -n "$BUILD_FLAG" ]]; then 133 | print_status "Building Docker images..." 134 | docker-compose -f "$COMPOSE_FILE" build --no-cache 135 | fi 136 | 137 | 138 | print_status "Starting services..." 139 | docker-compose -f "$COMPOSE_FILE" up $DETACH_FLAG 140 | 141 | if [[ -n "$DETACH_FLAG" ]]; then 142 | print_success "Services started in detached mode" 143 | print_status "Use 'docker-compose -f $COMPOSE_FILE logs -f' to view logs" 144 | print_status "Use 'docker-compose -f $COMPOSE_FILE down' to stop services" 145 | else 146 | print_success "Deployment completed!" 147 | print_status "Database: $DATABASE" 148 | print_status "API available at: http://localhost:8000" 149 | print_status "API docs at: http://localhost:8000/docs" 150 | fi 151 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 📘 Journiv - Private Journal 2 | 3 | > ⚠️ **Beta Software** 4 | > 5 | > Journiv is currently in **beta** and under **active development**. 6 | > While the developers aims to keep data **backward-compatible**, breaking changes may still occur. Please **keep regular backups of your data** to avoid loss during updates. 7 | 8 | 9 | Journiv is a self-hosted private journal. It features comprehensive journaling capabilities including mood tracking, prompt-based journaling, media uploads, analytics, and advanced search with a clean and minimal UI. 10 | 15 | 16 |

17 | 18 | Visit Journiv Website 19 | 20 |    21 | 22 | Docker Pulls 23 | 24 |    25 | 26 | Join Journiv Discord 27 | 28 |    29 | 30 | Join Journiv Reddit 31 | 32 |

33 |

34 | Status: Beta 35 | Active Development 36 | Backups Recommended 37 |

38 | 39 | 48 | 49 | 50 |
51 | 52 | Journiv_Web_Tab_Mobile 53 | 54 | 55 | 59 |
60 | 61 |

62 | 👉 Watch Demo Videos           63 |

64 | 65 | ## Quick Start 66 | Give Journiv a quick try with one docker command. 67 | 68 | ### Docker Run 69 | 70 | ```bash 71 | docker run -d \ 72 | --name journiv \ 73 | -p 8000:8000 \ 74 | -e SECRET_KEY=your-secret-key-here \ 75 | -e DOMAIN_NAME=192.168.1.1 \ 76 | -v journiv_data:/data \ 77 | --restart unless-stopped \ 78 | swalabtech/journiv-app:latest 79 | ``` 80 | 81 | **Access Journiv:** Open `http://192.168.1.1:8000` (replace with your server IP) in your browser to start journaling! 82 | 83 | **For complete installation guide see [installation guide](https://journiv.com/docs/installation).** 84 | 85 | ## Documentation 86 | 87 | Read the [docs](https://journiv.com/docs) to learn more about Journiv and configuring it. 88 | 89 | 90 | 91 | ## Contributing 92 | 93 | Contributions are welcome! Please see CONTRIBUTING.md and LICENSE for guidelines. 94 | 95 | ## License 96 | 97 | This project is licensed under the terms specified in the LICENSE file. 98 | 99 | ## Support 100 | 101 | Need help or want to report an issue? 102 | 103 | - **GitHub Issues**: Report bugs or request features 104 | - **Discussions**: Ask questions and share ideas 105 | - **Email**: journiv@protonmail.com 106 | - **Discord**: Join our [community server](https://discord.gg/CuEJ8qft46) 107 | 108 | ![Star History Chart](https://api.star-history.com/svg?repos=journiv/journiv-app&type=Date) 109 | 110 | --- 111 | 112 | **Made with care for privacy-conscious journaling** 113 | 114 | Disclaimer: 115 | This repository contains portions of code, documentation, or text generated with the assistance of AI/LLM tools. All outputs have been reviewed and adapted by the author to the best of their ability before inclusion. 116 | -------------------------------------------------------------------------------- /tests/integration/test_auth_endpoints.py: -------------------------------------------------------------------------------- 1 | """ 2 | Integration coverage for authentication endpoints. 3 | """ 4 | import uuid 5 | 6 | from tests.lib import ApiUser, JournivApiClient, make_api_user 7 | 8 | 9 | def _unique_credentials(prefix: str = "auth") -> tuple[str, str]: 10 | suffix = uuid.uuid4().hex[:8] 11 | email = f"{prefix}-{suffix}@example.com" 12 | password = f"Pass-{suffix}-Aa1!" 13 | return email, password 14 | 15 | 16 | def test_user_registration_and_login(api_client: JournivApiClient): 17 | """New users can register, log in, and fetch their profile.""" 18 | email, password = _unique_credentials() 19 | created = api_client.register_user( 20 | email=email, 21 | password=password, 22 | name="Integration Test", 23 | ) 24 | assert created["email"] == email 25 | assert created["is_active"] is True 26 | assert created["time_zone"] 27 | assert created["is_oidc_user"] is False 28 | assert created["name"] == "Integration Test" 29 | 30 | tokens = api_client.login(email, password) 31 | assert tokens["user"]["email"] == email 32 | assert tokens["user"]["is_active"] is True 33 | assert tokens["access_token"] 34 | assert tokens["refresh_token"] 35 | 36 | profile = api_client.current_user(tokens["access_token"]) 37 | assert profile["email"] == email 38 | assert profile["id"] == tokens["user"]["id"] 39 | 40 | 41 | def test_login_rejects_invalid_credentials(api_client: JournivApiClient): 42 | """Invalid credentials should return 401 without leaking detail.""" 43 | response = api_client.request( 44 | "POST", 45 | "/auth/login", 46 | json={"email": "missing@example.com", "password": "nope"}, 47 | ) 48 | assert response.status_code == 401 49 | assert response.json()["detail"] 50 | 51 | 52 | def test_refresh_token_flow(api_client: JournivApiClient): 53 | """Refreshing the token returns a brand new access token.""" 54 | user = make_api_user(api_client) 55 | assert user.refresh_token, "API did not issue a refresh token" 56 | 57 | refreshed = api_client.refresh(user.refresh_token) 58 | assert refreshed["access_token"] != user.access_token 59 | 60 | profile = api_client.current_user(refreshed["access_token"]) 61 | assert profile["id"] == user.user_id 62 | 63 | 64 | def test_refresh_rejects_invalid_token(api_client: JournivApiClient): 65 | """Tampered refresh tokens should be rejected.""" 66 | response = api_client.request( 67 | "POST", 68 | "/auth/refresh", 69 | json={"refresh_token": "not-a-real-token"}, 70 | ) 71 | assert response.status_code == 401 72 | assert response.json()["detail"] 73 | 74 | 75 | def test_oauth_token_endpoint_accepts_form_credentials( 76 | api_client: JournivApiClient, api_user: ApiUser 77 | ): 78 | """OAuth2 password grant endpoint should mirror login behavior.""" 79 | response = api_client.request( 80 | "POST", 81 | "/auth/token", 82 | data={"username": api_user.email, "password": api_user.password}, 83 | ) 84 | assert response.status_code == 200 85 | payload = response.json() 86 | assert payload["access_token"] 87 | assert payload["refresh_token"] 88 | assert payload["token_type"] == "bearer" 89 | 90 | 91 | def test_oauth_token_endpoint_rejects_bad_credentials(api_client: JournivApiClient): 92 | """OAuth2 password grant should return 401 for invalid credentials.""" 93 | response = api_client.request( 94 | "POST", 95 | "/auth/token", 96 | data={"username": "unknown@example.com", "password": "nope"}, 97 | ) 98 | assert response.status_code == 401 99 | 100 | 101 | def test_logout_requires_and_uses_authentication( 102 | api_client: JournivApiClient, api_user: ApiUser 103 | ): 104 | unauthorized = api_client.request("POST", "/auth/logout") 105 | assert unauthorized.status_code == 401 106 | 107 | response = api_client.request( 108 | "POST", "/auth/logout", token=api_user.access_token 109 | ) 110 | assert response.status_code == 200 111 | body = response.json() 112 | assert body["message"] 113 | assert body["detail"] 114 | 115 | 116 | def test_protected_endpoint_requires_token(api_client: JournivApiClient): 117 | """Hitting a protected endpoint without auth returns 401.""" 118 | response = api_client.request("GET", "/users/me") 119 | assert response.status_code == 401 120 | 121 | 122 | def test_registering_duplicate_email_is_rejected( 123 | api_client: JournivApiClient, api_user: ApiUser 124 | ): 125 | """Registering the same email twice should raise 400/409.""" 126 | response = api_client.request( 127 | "POST", 128 | "/auth/register", 129 | json={ 130 | "email": api_user.email, 131 | "password": api_user.password, 132 | "name": "Dup User", 133 | "first_name": "Dup", 134 | "last_name": "User", 135 | }, 136 | ) 137 | assert response.status_code in (400, 409) 138 | detail = response.json().get("detail", "") 139 | assert "already" in detail.lower() 140 | --------------------------------------------------------------------------------