├── .dockerignore ├── .env.example ├── .github └── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md ├── .gitignore ├── Dockerfile ├── LICENSE ├── README.md ├── backend ├── .coveragerc ├── .gitignore ├── __init__.py ├── alembic.ini ├── alembic │ ├── README │ ├── env.py │ ├── script.py.mako │ └── versions │ │ ├── 0157205d4a2e_updating_users_table_for_totp_setup.py │ │ ├── 09c9fb1450dd_adding_indexes_to_users_table.py │ │ ├── 0f3ae9fc03e3_users_table_migration.py │ │ ├── 103c5bb766a1_converting_last_used_otp_to_string.py │ │ ├── 12285fde0fd3_new_epubmetadata_table_indexes.py │ │ ├── 18d14440c61c_updating_users_table_for_checks_against_.py │ │ └── 1dca544a8766_new_tables.py ├── celery_app.py ├── config │ ├── __init__.py │ ├── config.py │ └── logger.py ├── entrypoint.sh ├── functions │ ├── __init__.py │ ├── auth.py │ ├── blueprints.py │ ├── book_management.py │ ├── db.py │ ├── extensions.py │ ├── init.py │ ├── metadata │ │ ├── __init__.py │ │ └── scan.py │ ├── tasks │ │ └── scan.py │ └── utils.py ├── gunicorn_logging.py ├── main.py ├── migrations.py ├── models │ ├── __init__.py │ ├── base.py │ ├── epub_metadata.py │ ├── progress_mapping.py │ └── users.py ├── requirements.txt ├── routes │ ├── __init__.py │ ├── admin.py │ ├── auth.py │ ├── authors.py │ ├── books.py │ ├── media.py │ ├── opds.py │ ├── react.py │ ├── scan.py │ └── users.py └── tests │ ├── __init__.py │ ├── conftest.py │ ├── epubs │ ├── Pride_and_Prejudice.epub │ ├── Test Book - Author One.epub │ └── Test Book 2 - Author Two.epub │ ├── test.png │ ├── test_celery.py │ ├── test_config_config.py │ ├── test_config_logger.py │ ├── test_functions_book_management.py │ ├── test_functions_db.py │ ├── test_functions_init.py │ ├── test_functions_metadata_scan.py │ ├── test_functions_utils.py │ ├── test_routes_admin.py │ ├── test_routes_auth.py │ ├── test_routes_authors.py │ ├── test_routes_books.py │ ├── test_routes_media.py │ ├── test_routes_react.py │ └── test_routes_users.py ├── bookhaven_home.png ├── compose.yml.example └── frontend ├── .gitignore ├── eslint.config.js ├── index.html ├── package-lock.json ├── package.json ├── public ├── icon-180x180.png ├── icon-192x192.png ├── icon-512x512.png ├── icon.svg ├── manifest.json └── webfonts │ ├── fa-brands-400.ttf │ ├── fa-brands-400.woff2 │ ├── fa-regular-400.ttf │ ├── fa-regular-400.woff2 │ ├── fa-solid-900.ttf │ ├── fa-solid-900.woff2 │ ├── fa-v4compatibility.ttf │ └── fa-v4compatibility.woff2 ├── src ├── App.css ├── App.tsx ├── assets │ └── react.svg ├── components │ ├── AccountModal.tsx │ ├── AdminModal.tsx │ ├── All.css │ ├── AuthorGridCell.tsx │ ├── AuthorPage.tsx │ ├── Authors.tsx │ ├── BookCard.tsx │ ├── Books.tsx │ ├── Home.tsx │ ├── Login.tsx │ ├── Otp.tsx │ ├── Reader.tsx │ ├── SearchBar.tsx │ └── Sidebar.tsx ├── context │ └── ConfigProvider.tsx ├── index.css ├── main.tsx ├── styles │ └── custom-bootstrap.scss ├── types.ts ├── utilities │ ├── apiClient.ts │ └── fetchApiConfig.ts └── vite-env.d.ts ├── tsconfig.app.json ├── tsconfig.json ├── tsconfig.node.json └── vite.config.ts /.dockerignore: -------------------------------------------------------------------------------- 1 | backend/venv 2 | backend/.env 3 | backend/apply_env.sh 4 | **/__pycache__ 5 | **/*.pyc 6 | **/*.pyo 7 | backend/tests 8 | frontend/node_modules -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | ################################################### 2 | ## APPLICATION CONFIGURATION: ## 3 | ################################################### 4 | 5 | # BASE DIRECTORY (REQUIRED) 6 | # The directory where your ebooks are mounted inside the container. 7 | # Example: /ebooks 8 | BASE_DIRECTORY=/ebooks 9 | 10 | # BASE URL (REQUIRED) 11 | # The URL where the application will be accessible. Include protocol, hostname, and optional port. 12 | # Format: https://books.example.com or http://localhost:5000 13 | BASE_URL= 14 | 15 | # SECRET KEY (REQUIRED) 16 | # Used for encrypting JWT tokens. 17 | # Generate a new key using openssl rand -hex 32 18 | SECRET_KEY= 19 | 20 | # ADMIN EMAIL (REQUIRED ON FIRST STARTUP) 21 | # Used to set the initial admin user's email address on first startup. 22 | # Can be removed/unset after initialization 23 | ADMIN_EMAIL= 24 | 25 | # ADMIN PASS (REQUIRED ON FIRST STARTUP) 26 | # Used to set the initial admin user's password on first startup. 27 | # Can be removed/unset after initialization 28 | ADMIN_PASS= 29 | 30 | # ADMIN RESET (OPTIONAL) 31 | # Used to set the admin user's password to ADMIN_PASS, and to remove configured MFA 32 | # Should be used as a last resort if admin credentials or MFA method have been lost 33 | ADMIN_RESET=false 34 | 35 | # UI BASE COLOR (OPTIONAL) 36 | # Used to set the base color of the UI. 37 | # Valid options: green (default), blue, red, yellow, white, black, pink, purple, orange, cyan 38 | UI_BASE_COLOR=green 39 | 40 | # WRITE TO EPUB (OPTIONAL) 41 | # If this is set any metadata changes are written to the ePub file itself as well as the database 42 | # WARNING: Changes are one-way and are irreversible 43 | # NOTE: If your ePub file does not already have a cover image, BookHaven will not be able to add a new one. 44 | # It can only replace an existing cover image in an ePub file at this time. 45 | WRITE_TO_EPUB=false 46 | 47 | # OPDS ENABLED (OPTIONAL) 48 | # If this is set a new /opds endpoint is exposed to use with any device or app that supports the OPDS spec. 49 | # The endpoint uses basic authentication which can be insecure, especially over http. 50 | # It also does not work for OIDC accounts, only local, and fully bypasses MFA. Use at your own risk. 51 | # Default: False - due to above security considerations. 52 | #OPDS_ENABLED=false 53 | 54 | # REDIS OPDS DB (OPTIONAL) 55 | # The Redis database used by OPDS for session management. 56 | # Default: 8 57 | #REDIS_OPDS_DB=8 58 | 59 | # CF ACCESS AUTH (OPTIONAL) 60 | # Used to set whether or not you're authenticating through a Cloudflare Access application 61 | # Default: False 62 | CF_ACCESS_AUTH=false 63 | 64 | # OIDC ENABLED (OPTIONAL) 65 | # Used to enable OIDC support 66 | # Default: False 67 | OIDC_ENABLED=false 68 | 69 | # OIDC CLIENT ID (REQUIRED IF OIDC_ENABLED) 70 | # Your client-id provided to you by your OIDC provider 71 | # OIDC_CLIENT_ID= 72 | 73 | # OIDC_CLIENT_SECRET (REQUIRED IF OIDC_ENABLED) 74 | # Your client secret provided to you by your OIDC provider 75 | # OIDC_CLIENT_SECRET= 76 | 77 | # OIDC PROVIDER (REQUIRED IF OIDC_ENABLED) 78 | # Your OIDC provider 79 | # e.g. keycloak 80 | # OIDC_PROVIDER= 81 | 82 | # OIDC METADATA ENDPOINT (REQUIRED IF OIDC_ENABLED) 83 | # The openid-configuration metadata endpoint for your provider 84 | # e.g. https://accounts.google.com/.well-known/openid-configuration 85 | # OIDC_METADATA_ENDPOINT= 86 | 87 | # OIDC AUTO REGISTER USER (OPTIONAL) 88 | # Automatically register new users that log in using OIDC 89 | # Default: false 90 | OIDC_AUTO_REGISTER_USER=false 91 | 92 | # OIDC AUTO LINK USER (OPTIONAL) 93 | # Automatically links existing users to OIDC when logging in via OIDC for the first time 94 | # Note: If disabled users can still manually link their accounts to OIDC from their Account Settings 95 | # Default: false 96 | OIDC_AUTO_LINK_USER=false 97 | 98 | # LOG LEVEL (OPTIONAL) 99 | # The logging level for the application. Defaults to 'INFO'. 100 | # Options: DEBUG, INFO, WARNING, ERROR, CRITICAL 101 | LOG_LEVEL=INFO 102 | 103 | # APP PORT (REQUIRED FOR DOCKER COMPOSE) 104 | # The port the app will listen on within the container. This is mapped via Docker Compose. 105 | # Default: 5000 106 | APP_PORT=5000 107 | 108 | # ENABLE HTTPS (OPTIONAL) 109 | # Whether or not your app is HTTPS enabled internally. 110 | # Can be disabled if you reverse proxy does SSL. 111 | # Default: false 112 | ENABLE_HTTPS=false 113 | 114 | # SSL CERT FILE (REQUIRED IF ENABLE_HTTPS=true) 115 | # Path relative to /ssl/ where your certificate is mounted 116 | # For example, if your certificate is mounted to /ssl/cert.crt, then SSL_CERT_FILE=cert.crt 117 | # SSL_CERT_FILE=cert.crt 118 | 119 | # SSL KEY FILE (REQUIRED IF ENABLE_HTTPS=true) 120 | # Path relative to /ssl/ where your certificate is mounted 121 | # For example, if your certificate is mounted to /ssl/key.key, then SSL_CERT_FILE=key.key 122 | # SSL_KEY_FILE=key.key 123 | 124 | # RATE LIMITER ENABLED (OPTIONAL) 125 | # Whether or not to enable the IP-based rate limiter 126 | # Default: True 127 | RATE_LIMITER_ENABLED=true 128 | 129 | # SCHEDULER ENABLED (OPTIONAL) 130 | # Whether or not to enable the periodic scanning of your library. 131 | # Manual library scanning is still available whether disabled or enabled. 132 | # Default: True 133 | SCHEDULER_ENABLED=true 134 | 135 | # PERIODIC SCAN INTERVAL (OPTIONAL) 136 | # How frequently the scheduler will scan your library, in minutes 137 | # Default: 10 138 | PERIODIC_SCAN_INTERVAL=10 139 | 140 | ################################################### 141 | ## DATABASE CONFIGURATION FOR APPLICATION: ## 142 | ################################################### 143 | 144 | # DATABASE TYPE (REQUIRED) 145 | # Specify the type of database to use for the application. 146 | # Supported options: mysql, postgres, sqlite (not recommended for production) 147 | DB_TYPE=mysql 148 | 149 | # DATABASE HOST (REQUIRED) 150 | # The hostname or IP address of the database. 151 | # Defaults to 'mysql' when using the provided MySQL container. 152 | DB_HOST=mysql 153 | 154 | # DATABASE PORT (REQUIRED FOR DOCKER COMPOSE OR CUSTOM CONNECTIONS) 155 | # The port used to connect to the database. Required for MySQL and PostgreSQL. 156 | # Default: 3306 for MySQL, 5432 for PostgreSQL, or leave unset for SQLite. 157 | DB_PORT=3306 158 | 159 | # DATABASE NAME (REQUIRED) 160 | # Name of the database used to store EPUB metadata. 161 | # Default: epub_library 162 | DB_NAME=epub_library 163 | 164 | # DATABASE USER (REQUIRED) 165 | # The username for the database connection. 166 | DB_USER=epub_user 167 | 168 | # DATABASE PASSWORD (REQUIRED) 169 | # The password for the database connection. 170 | # Use a strong and secure password. 171 | DB_PASSWORD=secure_password_here 172 | 173 | ################################################### 174 | ## REDIS CONFIGURATION FOR APPLICATION: ## 175 | ################################################### 176 | 177 | # REDIS HOST (REQUIRED) 178 | # Host running Redis 179 | # Default: localhost 180 | REDIS_HOST=redis 181 | 182 | # REDIS PORT (REQUIRED) 183 | # Redis port 184 | # Default: 6379 185 | REDIS_PORT=6379 186 | 187 | # REDIS PASSWORD (REQUIRED if Redis auth is enabled on your REDIS instance) 188 | # Default: N/A, DOCKER COMPOSE REDIS does not use auth 189 | # REDIS_PASSWORD=password 190 | 191 | # REDIS LIMITER DATABASE 192 | # Redis database for the rate limiter to use to keep track of IPs/Requests 193 | # Default: 0 194 | REDIS_LIMITER_DB=0 195 | 196 | # REDIS SCHEDULER DATABASE 197 | # Redis database for the periodic library scanner to use 198 | # Default: 5 199 | REDIS_SCHEDULER_DB=5 200 | 201 | ################################################### 202 | ## MYSQL CONTAINER CONFIGURATION: ## 203 | ################################################### 204 | 205 | # MYSQL ROOT PASSWORD (REQUIRED FOR DOCKER COMPOSE MYSQL CONTAINER) 206 | # Set the root password for MySQL if using the MySQL container provided in the Docker Compose file. 207 | # Use a strong and secure password. 208 | MYSQL_ROOT_PASSWORD=secure_mysql_root_password 209 | 210 | # MYSQL DATABASE (REQUIRED FOR DOCKER COMPOSE MYSQL CONTAINER) 211 | # Specifies the default database created in the MySQL container. 212 | MYSQL_DATABASE=epub_library 213 | 214 | # (REQUIRED FOR DOCKER COMPOSE MYSQL CONTAINER) 215 | MYSQL_USER=epub_user 216 | 217 | # (REQUIRED FOR DOCKER COMPOSE MYSQL CONTAINER) 218 | MYSQL_PASSWORD=secure_password_here -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: "[BUG]" 5 | labels: bug 6 | assignees: HrBingR 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Log Output** 27 | Add any logs, either docker logs or console logs. 28 | 29 | **Additional context** 30 | Add any other context about the problem here. 31 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: "[FEATURE REQUEST]" 5 | labels: enhancement 6 | assignees: HrBingR 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Additional context** 17 | Add any other context or screenshots about the feature request here. 18 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | compose.yml 2 | .env 3 | project_roadmap.md 4 | dbuild.sh -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.13 2 | 3 | WORKDIR /app 4 | 5 | COPY backend/requirements.txt /app/backend/requirements.txt 6 | 7 | RUN pip install --upgrade pip && \ 8 | pip install -r /app/backend/requirements.txt 9 | 10 | RUN apt-get update && apt-get install -y tini && apt-get install openssl 11 | 12 | COPY backend/ /app/backend 13 | COPY frontend/dist/ /app/frontend/dist 14 | 15 | RUN chmod +x /app/backend/entrypoint.sh 16 | 17 | WORKDIR /app/backend 18 | 19 | ENTRYPOINT ["/usr/bin/tini", "--", "/app/backend/entrypoint.sh"] -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # BookHaven 2 | 3 | ![BookHaven Home](./bookhaven_home.png) 4 | 5 | ## Table of Contents 6 | 1. [What the Application Does]() 7 | 2. [Features]() 8 | 3. [Deployment]() 9 | - [Docker]() 10 | - [Local]() 11 | - [Development]() 12 | 13 | 4. [Building the Application]() 14 | 15 | 16 | ## What the Application Does 17 | BookHaven scans and manages your local library of EPUB ebooks, and allows you to read and download your ebooks on any of your devices, with a sleek, modern, and responsive interface. 18 | 19 | ## Features 20 | - **Read eBooks in the Browser** 21 | Users can access and read their EPUB-formatted eBooks directly without any additional software. 22 | - **Download eBooks** 23 | Easily download a copy of any eBook in the collection to your device. 24 | - **Non-Destructive Metadata Editing** 25 | Changes to eBook metadata (e.g., title, author, series) are stored in the database, leaving the original EPUB files untouched. 26 | - **Automatic or Manual Library Scanning** 27 | Once deployed the app will periodically, on a configurable interval, scan your library for any changes, while also allowing for manual library scans. 28 | - **Home Page with Alphabetical Sorting** 29 | Books are sorted first alphabetically by their author and then by series, offering a clean and intuitive browsing experience. 30 | - **Powerful Search** 31 | The search feature on the home page allows users to filter their library by author, book title, or series, helping locate specific content quickly. 32 | - **Filters** 33 | Basic filters are made available to allow filtering for books marked as favorite, as finished, or books that haven't been marked as finished. 34 | - **Author Page with Intuitive Navigation** 35 | A dedicated author page organizes authors into a clickable alphabetical grid. Users can click on a letter to expand its list of authors, navigate to an author's page, and view their books sorted alphabetically by series and standalone titles. 36 | - **Supports CloudFlare Access** 37 | Has a flag to bypass the login screen when making use of CloudFlare Access. See `.env.example` for details. 38 | - **OIDC Support** 39 | Allows for the configuration of OIDC for new user registration, and for existing users. 40 | - **OPDS Support** 41 | Use your favorite OPDS-compatible e-reader or app to browse, download, and read books from your library. 42 | 43 | ## Deployment 44 | 45 | ### Requirements 46 | 47 | At a minimum to run the application you require: 48 | 49 | - A database (MySQL, SQLite, PostgreSQL) 50 | - A Redis instance 51 | 52 | For quick and easy deployment the .compose.yml.example defines both of these already. 53 | 54 | ### Docker 55 | Follow these steps to deploy the application with Docker Compose: 56 | 1. **Download Configuration Files** 57 | Download or clone the repository to get `compose.yml.example` and `.env.example`. 58 | 59 | 2. **Rename the Example Files** 60 | ``` bash 61 | mv compose.yml.example compose.yml 62 | mv .env.example .env 63 | ``` 64 | 3. **Customize the `.env` File** 65 | 66 | Edit `.env` to configure essential settings: 67 | 68 | - **BASE_DIRECTORY**: Path to your eBooks directory. 69 | - **BASE_URL**: URL where your app will be accessible. 70 | - **DB_TYPE**: Database engine (e.g., mysql, sqlite, postgres). 71 | - _Other DB_ settings_* for your database configuration. 72 | 73 | 4. **Start the Application** 74 | 75 | Run the following command: 76 | ``` bash 77 | docker compose up -d 78 | ``` 79 | This starts the `BookHaven`, Redis, and MySQL containers. 80 | 5. **Access the Application** 81 | 82 | Open your browser and navigate to the `BASE_URL`:`APP_PORT` you configured (default is `http://localhost:5000`). 83 | 84 | 6. **Stopping the Application** 85 | 86 | ``` bash 87 | docker compose down 88 | ``` 89 | 90 | ### Development 91 | Follow these steps to deploy for development: 92 | 1. **Clone the repository**: 93 | ``` bash 94 | git clone https://github.com/HrBingR/BookHaven.git 95 | cd BookHaven 96 | ``` 97 | 98 | 2. **Rename the example files**: 99 | ```bash 100 | mv compose.exmaple.yml compose.yml 101 | mv .env.example .env 102 | ``` 103 | 104 | 3. **Customize the `.env` file**: 105 | 106 | Edit `.env` to configure essential settings. 107 | 108 | 4. **Modify the `compose.yml` file**: 109 | 110 | Change: 111 | 112 | ```yaml 113 | epub-reader: 114 | image: hrbingr/bookhaven:latest 115 | ``` 116 | 117 | To: 118 | 119 | ```yaml 120 | epub-reader: 121 | build: 122 | context: . 123 | dockerfile: Dockerfile 124 | ``` 125 | 126 | 5. **Build the container**: 127 | ```bash 128 | docker compose up --build -d 129 | ``` 130 | 131 | 6. **Access the app**: 132 | 133 | Access the app on the `BASE_URL` and `APP_PORT` defined in the `.env` file. 134 | 135 | ## Building the Application 136 | To build the application for production: 137 | 1. **Build the Frontend**: 138 | ``` bash 139 | cd frontend 140 | npm run build:dev 141 | ``` 142 | 2. **Build the Docker Image**: 143 | 144 | In the root project directory (BookHaven), run: 145 | ``` bash 146 | docker build -t tag:version . 147 | ``` 148 | Replace `tag:version` with your preferred image name and version (e.g., `bookhaven:1.0.0`). 149 | 150 | ## Change log: 151 | 152 | - v1.0.x - Initial Release 153 | - v1.1.0 - Added OIDC support 154 | - v1.1.1 - Fixed a bug where OIDC front-end components would still render with OIDC disabled. 155 | - v1.2.0 - Added support for optionally writing metadata to the ePub file, instead of just to the database. 156 | - v1.2.1 - Fixed issue where Celery would detect PERIODIC_SCAN_INTERVAL, if explicitly defined, as a string and throw an exception. 157 | - v1.2.2 - Fixed migration and DB model logic that would prevent Postgres users from successfully initializing the database. 158 | - v1.3.0 - Added rudimentary OPDS support. 159 | - v1.3.1 - Improved OPDS functionality. 160 | 161 | ## TODO: 162 | 163 | - Fix scan logic to ensure initial library scan on first startup 164 | - Update tests with latest additions 165 | - Explore support for other formats 166 | - Explore adding more metadata fields for editing -------------------------------------------------------------------------------- /backend/.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | omit = 3 | */__init__.py 4 | */tests/* 5 | */alembic/* 6 | gunicorn_logging.py 7 | migrations.py -------------------------------------------------------------------------------- /backend/.gitignore: -------------------------------------------------------------------------------- 1 | ../.idea 2 | .env 3 | epub_library.db 4 | venv 5 | tests/test.db 6 | /tests/cov.html/ 7 | apply_config.sh -------------------------------------------------------------------------------- /backend/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HrBingR/BookHaven/d7d6138facb8fa7c86ff008842d387dbe6184976/backend/__init__.py -------------------------------------------------------------------------------- /backend/alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # path to migration scripts 5 | # Use forward slashes (/) also on windows to provide an os agnostic path 6 | script_location = alembic 7 | 8 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s 9 | # Uncomment the line below if you want the files to be prepended with date and time 10 | # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file 11 | # for all available tokens 12 | # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s 13 | 14 | # sys.path path, will be prepended to sys.path if present. 15 | # defaults to the current working directory. 16 | prepend_sys_path = . 17 | 18 | # timezone to use when rendering the date within the migration file 19 | # as well as the filename. 20 | # If specified, requires the python>=3.9 or backports.zoneinfo library. 21 | # Any required deps can installed by adding `alembic[tz]` to the pip requirements 22 | # string value is passed to ZoneInfo() 23 | # leave blank for localtime 24 | # timezone = 25 | 26 | # max length of characters to apply to the "slug" field 27 | # truncate_slug_length = 40 28 | 29 | # set to 'true' to run the environment during 30 | # the 'revision' command, regardless of autogenerate 31 | # revision_environment = false 32 | 33 | # set to 'true' to allow .pyc and .pyo files without 34 | # a source .py file to be detected as revisions in the 35 | # versions/ directory 36 | # sourceless = false 37 | 38 | # version location specification; This defaults 39 | # to alembic/versions. When using multiple version 40 | # directories, initial revisions must be specified with --version-path. 41 | # The path separator used here should be the separator specified by "version_path_separator" below. 42 | # version_locations = %(here)s/bar:%(here)s/bat:alembic/versions 43 | 44 | # version path separator; As mentioned above, this is the character used to split 45 | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. 46 | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. 47 | # Valid values for version_path_separator are: 48 | # 49 | # version_path_separator = : 50 | # version_path_separator = ; 51 | # version_path_separator = space 52 | # version_path_separator = newline 53 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects. 54 | 55 | # set to 'true' to search source files recursively 56 | # in each "version_locations" directory 57 | # new in Alembic version 1.10 58 | # recursive_version_locations = false 59 | 60 | # the output encoding used when revision files 61 | # are written from script.py.mako 62 | # output_encoding = utf-8 63 | 64 | #sqlalchemy.url = sqlite:///epub_library.db 65 | 66 | 67 | [post_write_hooks] 68 | # post_write_hooks defines scripts or Python functions that are run 69 | # on newly generated revision scripts. See the documentation for further 70 | # detail and examples 71 | 72 | # format using "black" - use the console_scripts runner, against the "black" entrypoint 73 | # hooks = black 74 | # black.type = console_scripts 75 | # black.entrypoint = black 76 | # black.options = -l 79 REVISION_SCRIPT_FILENAME 77 | 78 | # lint with attempts to fix using "ruff" - use the exec runner, execute a binary 79 | # hooks = ruff 80 | # ruff.type = exec 81 | # ruff.executable = %(here)s/.venv/bin/ruff 82 | # ruff.options = --fix REVISION_SCRIPT_FILENAME 83 | 84 | # Logging configuration 85 | [loggers] 86 | keys = root,sqlalchemy,alembic 87 | 88 | [handlers] 89 | keys = console 90 | 91 | [formatters] 92 | keys = generic 93 | 94 | [logger_root] 95 | level = WARNING 96 | handlers = console 97 | qualname = 98 | 99 | [logger_sqlalchemy] 100 | level = WARNING 101 | handlers = 102 | qualname = sqlalchemy.engine 103 | 104 | [logger_alembic] 105 | level = INFO 106 | handlers = 107 | qualname = alembic 108 | 109 | [handler_console] 110 | class = StreamHandler 111 | args = (sys.stderr,) 112 | level = NOTSET 113 | formatter = generic 114 | 115 | [formatter_generic] 116 | format = %(levelname)-5.5s [%(name)s] %(message)s 117 | datefmt = %H:%M:%S 118 | -------------------------------------------------------------------------------- /backend/alembic/README: -------------------------------------------------------------------------------- 1 | Generic single-database configuration. -------------------------------------------------------------------------------- /backend/alembic/env.py: -------------------------------------------------------------------------------- 1 | from functions.db import get_database_url 2 | from sqlalchemy import create_engine, pool 3 | from alembic import context 4 | 5 | DATABASE_URL = get_database_url() 6 | 7 | if not DATABASE_URL: 8 | raise ValueError("DATABASE_URL is not configured. Please check your environment variables or config file.") 9 | 10 | # this is the Alembic Config object, which provides 11 | # access to the values within the .ini file in use. 12 | config = context.config 13 | config.set_main_option("sqlalchemy.url", DATABASE_URL) 14 | 15 | # Interpret the config file for Python logging. 16 | # This line sets up loggers basically. 17 | # if config.config_file_name is not None: 18 | # fileConfig(config.config_file_name) 19 | 20 | # add your model's MetaData object here 21 | # for 'autogenerate' support 22 | # from myapp import mymodel 23 | # target_metadata = mymodel.Base.metadata 24 | 25 | from models.base import Base 26 | from models.epub_metadata import EpubMetadata 27 | from models.users import Users 28 | from models.progress_mapping import ProgressMapping 29 | target_metadata = Base.metadata 30 | 31 | # other values from the config, defined by the needs of env.py, 32 | # can be acquired: 33 | # my_important_option = config.get_main_option("my_important_option") 34 | # ... etc. 35 | 36 | 37 | def run_migrations_offline() -> None: 38 | """Run migrations in 'offline' mode. 39 | 40 | This configures the context with just a URL 41 | and not an Engine, though an Engine is acceptable 42 | here as well. By skipping the Engine creation 43 | we don't even need a DBAPI to be available. 44 | 45 | Calls to context.execute() here emit the given string to the 46 | script output. 47 | 48 | """ 49 | url = config.get_main_option("sqlalchemy.url") 50 | context.configure( 51 | url=url, 52 | target_metadata=target_metadata, 53 | literal_binds=True, 54 | dialect_opts={"paramstyle": "named"}, 55 | ) 56 | 57 | with context.begin_transaction(): 58 | context.run_migrations() 59 | 60 | 61 | def run_migrations_online() -> None: 62 | """Run migrations in 'online' mode. 63 | 64 | In this scenario we need to create an Engine 65 | and associate a connection with the context. 66 | 67 | """ 68 | connectable = create_engine(DATABASE_URL, poolclass=pool.NullPool) 69 | 70 | with connectable.connect() as connection: 71 | context.configure( 72 | connection=connection, target_metadata=target_metadata 73 | ) 74 | 75 | with context.begin_transaction(): 76 | context.run_migrations() 77 | 78 | 79 | if context.is_offline_mode(): 80 | run_migrations_offline() 81 | else: 82 | run_migrations_online() 83 | -------------------------------------------------------------------------------- /backend/alembic/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | ${imports if imports else ""} 13 | 14 | # revision identifiers, used by Alembic. 15 | revision: str = ${repr(up_revision)} 16 | down_revision: Union[str, None] = ${repr(down_revision)} 17 | branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} 18 | depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} 19 | 20 | 21 | def upgrade() -> None: 22 | ${upgrades if upgrades else "pass"} 23 | 24 | 25 | def downgrade() -> None: 26 | ${downgrades if downgrades else "pass"} 27 | -------------------------------------------------------------------------------- /backend/alembic/versions/0157205d4a2e_updating_users_table_for_totp_setup.py: -------------------------------------------------------------------------------- 1 | """Updating users table for TOTP setup 2 | 3 | Revision ID: 0157205d4a2e 4 | Revises: 0f3ae9fc03e3 5 | Create Date: 2025-01-18 13:53:45.931732 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | 13 | 14 | # revision identifiers, used by Alembic. 15 | revision: str = '0157205d4a2e' 16 | down_revision: Union[str, None] = '0f3ae9fc03e3' 17 | branch_labels: Union[str, Sequence[str], None] = None 18 | depends_on: Union[str, Sequence[str], None] = None 19 | 20 | 21 | def upgrade() -> None: 22 | # ### commands auto generated by Alembic - please adjust! ### 23 | op.add_column('users', sa.Column('mfa_enabled', sa.Boolean(), nullable=False)) 24 | op.add_column('users', sa.Column('mfa_secret', sa.String(length=255), nullable=True)) 25 | # ### end Alembic commands ### 26 | 27 | 28 | def downgrade() -> None: 29 | # ### commands auto generated by Alembic - please adjust! ### 30 | op.drop_column('users', 'mfa_secret') 31 | op.drop_column('users', 'mfa_enabled') 32 | # ### end Alembic commands ### 33 | -------------------------------------------------------------------------------- /backend/alembic/versions/09c9fb1450dd_adding_indexes_to_users_table.py: -------------------------------------------------------------------------------- 1 | """Adding indexes to Users table 2 | 3 | Revision ID: 09c9fb1450dd 4 | Revises: 103c5bb766a1 5 | Create Date: 2025-02-06 23:24:06.299990 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | 13 | 14 | # revision identifiers, used by Alembic. 15 | revision: str = '09c9fb1450dd' 16 | down_revision: Union[str, None] = '103c5bb766a1' 17 | branch_labels: Union[str, Sequence[str], None] = None 18 | depends_on: Union[str, Sequence[str], None] = None 19 | 20 | 21 | def upgrade() -> None: 22 | # ### commands auto generated by Alembic - please adjust! ### 23 | op.create_index('ix_users_email', 'users', ['email'], unique=False) 24 | op.create_index('ix_users_id', 'users', ['id'], unique=False) 25 | op.create_index('ix_users_oidc_user_id', 'users', ['oidc_user_id'], unique=False) 26 | # ### end Alembic commands ### 27 | 28 | 29 | def downgrade() -> None: 30 | # ### commands auto generated by Alembic - please adjust! ### 31 | op.drop_index('ix_users_oidc_user_id', table_name='users') 32 | op.drop_index('ix_users_id', table_name='users') 33 | op.drop_index('ix_users_email', table_name='users') 34 | # ### end Alembic commands ### 35 | -------------------------------------------------------------------------------- /backend/alembic/versions/0f3ae9fc03e3_users_table_migration.py: -------------------------------------------------------------------------------- 1 | """Users Table Migration 2 | 3 | Revision ID: 0f3ae9fc03e3 4 | Revises: 1dca544a8766 5 | Create Date: 2025-01-08 22:56:10.849442 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | 13 | 14 | # revision identifiers, used by Alembic. 15 | revision: str = '0f3ae9fc03e3' 16 | down_revision: Union[str, None] = '1dca544a8766' 17 | branch_labels: Union[str, Sequence[str], None] = None 18 | depends_on: Union[str, Sequence[str], None] = None 19 | 20 | 21 | def upgrade() -> None: 22 | # ### commands auto generated by Alembic - please adjust! ### 23 | auth_type = sa.Enum('oidc', 'local', name='auth_type') 24 | auth_type.create(op.get_bind(), checkfirst=True) 25 | op.add_column( 26 | 'users', 27 | sa.Column('auth_type', auth_type, nullable=False) 28 | ) 29 | # ### end Alembic commands ### 30 | 31 | 32 | def downgrade() -> None: 33 | # ### commands auto generated by Alembic - please adjust! ### 34 | op.drop_column('users', 'auth_type') 35 | 36 | auth_type = sa.Enum(name='auth_type') 37 | auth_type.drop(op.get_bind(), checkfirst=True) 38 | # ### end Alembic commands ### 39 | -------------------------------------------------------------------------------- /backend/alembic/versions/103c5bb766a1_converting_last_used_otp_to_string.py: -------------------------------------------------------------------------------- 1 | """Converting last_used_otp to string 2 | 3 | Revision ID: 103c5bb766a1 4 | Revises: 18d14440c61c 5 | Create Date: 2025-01-29 04:02:10.470503 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | from sqlalchemy.dialects import mysql 13 | 14 | # revision identifiers, used by Alembic. 15 | revision: str = '103c5bb766a1' 16 | down_revision: Union[str, None] = '18d14440c61c' 17 | branch_labels: Union[str, Sequence[str], None] = None 18 | depends_on: Union[str, Sequence[str], None] = None 19 | 20 | 21 | def upgrade() -> None: 22 | # ### commands auto generated by Alembic - please adjust! ### 23 | op.alter_column('users', 'last_used_otp', 24 | existing_type=mysql.INTEGER(), 25 | type_=sa.String(length=8), 26 | existing_nullable=True) 27 | # ### end Alembic commands ### 28 | 29 | 30 | def downgrade() -> None: 31 | # ### commands auto generated by Alembic - please adjust! ### 32 | op.alter_column('users', 'last_used_otp', 33 | existing_type=sa.String(length=8), 34 | type_=mysql.INTEGER(), 35 | existing_nullable=True) 36 | # ### end Alembic commands ### 37 | -------------------------------------------------------------------------------- /backend/alembic/versions/12285fde0fd3_new_epubmetadata_table_indexes.py: -------------------------------------------------------------------------------- 1 | """New epubmetadata table indexes 2 | 3 | Revision ID: 12285fde0fd3 4 | Revises: 09c9fb1450dd 5 | Create Date: 2025-02-08 05:50:18.768116 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | 13 | 14 | # revision identifiers, used by Alembic. 15 | revision: str = '12285fde0fd3' 16 | down_revision: Union[str, None] = '09c9fb1450dd' 17 | branch_labels: Union[str, Sequence[str], None] = None 18 | depends_on: Union[str, Sequence[str], None] = None 19 | 20 | 21 | def upgrade() -> None: 22 | # ### commands auto generated by Alembic - please adjust! ### 23 | op.create_index('author_title_index_series_idx', 'epub_metadata', ['authors', 'series', 'seriesindex', 'title'], unique=True) 24 | # ### end Alembic commands ### 25 | 26 | 27 | def downgrade() -> None: 28 | # ### commands auto generated by Alembic - please adjust! ### 29 | op.drop_index('author_title_index_series_idx', table_name='epub_metadata') 30 | # ### end Alembic commands ### 31 | -------------------------------------------------------------------------------- /backend/alembic/versions/18d14440c61c_updating_users_table_for_checks_against_.py: -------------------------------------------------------------------------------- 1 | """Updating users table for checks against replay attacks 2 | 3 | Revision ID: 18d14440c61c 4 | Revises: 0157205d4a2e 5 | Create Date: 2025-01-19 03:48:48.949840 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | 13 | 14 | # revision identifiers, used by Alembic. 15 | revision: str = '18d14440c61c' 16 | down_revision: Union[str, None] = '0157205d4a2e' 17 | branch_labels: Union[str, Sequence[str], None] = None 18 | depends_on: Union[str, Sequence[str], None] = None 19 | 20 | 21 | def upgrade() -> None: 22 | # ### commands auto generated by Alembic - please adjust! ### 23 | op.add_column('users', sa.Column('last_used_otp', sa.Integer(), nullable=True)) 24 | # ### end Alembic commands ### 25 | 26 | 27 | def downgrade() -> None: 28 | # ### commands auto generated by Alembic - please adjust! ### 29 | op.drop_column('users', 'last_used_otp') 30 | # ### end Alembic commands ### 31 | -------------------------------------------------------------------------------- /backend/alembic/versions/1dca544a8766_new_tables.py: -------------------------------------------------------------------------------- 1 | """New tables 2 | 3 | Revision ID: 1dca544a8766 4 | Revises: 5 | Create Date: 2025-01-08 02:36:16.624449 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | from sqlalchemy.dialects import mysql 13 | 14 | # revision identifiers, used by Alembic. 15 | revision: str = '1dca544a8766' 16 | down_revision: Union[str, None] = None 17 | branch_labels: Union[str, Sequence[str], None] = None 18 | depends_on: Union[str, Sequence[str], None] = None 19 | 20 | 21 | def upgrade() -> None: 22 | # ### commands auto generated by Alembic - please adjust! ### 23 | op.create_table('epub_metadata', 24 | sa.Column('id', sa.Integer(), nullable=False), 25 | sa.Column('identifier', sa.String(length=255), nullable=False), 26 | sa.Column('title', sa.String(length=255), nullable=True), 27 | sa.Column('authors', sa.String(length=255), nullable=True), 28 | sa.Column('series', sa.String(length=255), nullable=True), 29 | sa.Column('seriesindex', sa.Float(), nullable=True), 30 | sa.Column('relative_path', sa.String(length=255), nullable=True), 31 | sa.Column('cover_image_data', sa.LargeBinary().with_variant(mysql.LONGBLOB(), 'mysql'), nullable=True), 32 | sa.Column('cover_media_type', sa.String(length=255), nullable=True), 33 | sa.Column('progress', sa.String(length=255), nullable=True), 34 | sa.PrimaryKeyConstraint('id'), 35 | sa.UniqueConstraint('identifier'), 36 | sa.UniqueConstraint('relative_path') 37 | ) 38 | op.create_index('book_identifier', 'epub_metadata', ['identifier'], unique=True) 39 | op.create_table('users', 40 | sa.Column('id', sa.Integer(), nullable=False), 41 | sa.Column('username', sa.String(length=255), nullable=False), 42 | sa.Column('email', sa.String(length=255), nullable=False), 43 | sa.Column('password_hash', sa.String(length=255), nullable=True), 44 | sa.Column('is_admin', sa.Boolean(), nullable=False), 45 | sa.Column('oidc_user_id', sa.String(length=255), nullable=True), 46 | sa.Column('created_at', sa.DateTime(), nullable=True), 47 | sa.Column('updated_at', sa.DateTime(), nullable=True), 48 | sa.Column('last_login', sa.DateTime(), nullable=True), 49 | sa.Column('failed_login_count', sa.Integer(), nullable=False), 50 | sa.PrimaryKeyConstraint('id'), 51 | sa.UniqueConstraint('email'), 52 | sa.UniqueConstraint('oidc_user_id'), 53 | sa.UniqueConstraint('username') 54 | ) 55 | op.create_table('progress_mapping', 56 | sa.Column('id', sa.Integer(), nullable=False), 57 | sa.Column('user_id', sa.Integer(), nullable=False), 58 | sa.Column('book_id', sa.Integer(), nullable=False), 59 | sa.Column('progress', sa.String(length=255), nullable=True), 60 | sa.Column('is_finished', sa.Boolean(), nullable=False), 61 | sa.Column('marked_favorite', sa.Boolean(), nullable=False), 62 | sa.Column('created_at', sa.DateTime(), nullable=True), 63 | sa.Column('updated_at', sa.DateTime(), nullable=True), 64 | sa.PrimaryKeyConstraint('id') 65 | ) 66 | op.create_index('ix_user_book', 'progress_mapping', ['user_id', 'book_id'], unique=True) 67 | # ### end Alembic commands ### 68 | 69 | 70 | def downgrade() -> None: 71 | # ### commands auto generated by Alembic - please adjust! ### 72 | op.drop_index('ix_user_book', table_name='progress_mapping') 73 | op.drop_table('progress_mapping') 74 | op.drop_table('users') 75 | op.drop_index('book_identifier', table_name='epub_metadata') 76 | op.drop_table('epub_metadata') 77 | # ### end Alembic commands ### 78 | -------------------------------------------------------------------------------- /backend/celery_app.py: -------------------------------------------------------------------------------- 1 | from celery import Celery 2 | from config.config import config 3 | from datetime import timedelta 4 | 5 | def make_celery(): 6 | celery = Celery( 7 | __name__, 8 | broker=config.CELERY_BROKER_URL, 9 | backend=config.CELERY_RESULT_BACKEND, 10 | include=['functions.tasks.scan'] # Include your task modules 11 | ) 12 | scan_interval = config.PERIODIC_SCAN_INTERVAL 13 | try: 14 | scan_interval = int(scan_interval) 15 | except ValueError: 16 | scan_interval = 10 17 | if config.SCHEDULER_ENABLED: 18 | celery.conf.update({ 19 | 'timezone': 'UTC', 20 | 'enable_utc': True, 21 | 'result_expires': timedelta(hours=24), 22 | 'beat_schedule': { 23 | 'scan-library-periodically': { 24 | 'task': 'functions.tasks.scan.scan_library_task', 25 | 'schedule': timedelta(minutes=scan_interval) 26 | }, 27 | }, 28 | }) 29 | else: 30 | celery.conf.update({ 31 | 'timezone': 'UTC', 32 | 'enable_utc': True, 33 | 'result_expires': timedelta(hours=24), 34 | }) 35 | return celery 36 | 37 | celery = make_celery() -------------------------------------------------------------------------------- /backend/config/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HrBingR/BookHaven/d7d6138facb8fa7c86ff008842d387dbe6184976/backend/config/__init__.py -------------------------------------------------------------------------------- /backend/config/config.py: -------------------------------------------------------------------------------- 1 | import os 2 | from dotenv import load_dotenv 3 | 4 | load_dotenv() 5 | 6 | def str_to_bool(value): 7 | if isinstance(value, bool): 8 | return value 9 | if not value: 10 | return False 11 | if isinstance(value, int): 12 | if value == 1: 13 | return True 14 | if isinstance(value, str): 15 | value = value.lower() 16 | if value in ('true', 'yes', 't', 'y', '1'): 17 | return True 18 | if value in ('false', 'no', 'f', 'n', '0'): 19 | return False 20 | return False 21 | 22 | class Config: 23 | def __init__(self): 24 | 25 | self.UI_BASE_COLOR = os.getenv("UI_BASE_COLOR", "green") 26 | 27 | self.ENVIRONMENT = "production" 28 | self.BASE_DIRECTORY = os.getenv('BASE_DIRECTORY', '/ebooks') 29 | self.BASE_URL = os.getenv('BASE_URL', "").strip() 30 | self.SECRET_KEY = os.getenv('SECRET_KEY', "").strip() 31 | self.ADMIN_PASS = os.getenv('ADMIN_PASS') 32 | self.ADMIN_EMAIL = os.getenv('ADMIN_EMAIL') 33 | self.ADMIN_RESET = str_to_bool(os.getenv('ADMIN_RESET', False)) 34 | self.CF_ACCESS_AUTH = str_to_bool(os.getenv('CF_ACCESS_AUTH', False)) 35 | self.ALLOW_UNAUTHENTICATED = str_to_bool(os.getenv('ALLOW_UNAUTHENTICATED', False)) 36 | self.WRITE_TO_EPUB = str_to_bool(os.getenv('WRITE_TO_EPUB', False)) 37 | 38 | self.OIDC_ENABLED = str_to_bool(os.getenv('OIDC_ENABLED', False)) 39 | self.OIDC_CLIENT_ID = os.getenv('OIDC_CLIENT_ID', None) 40 | self.OIDC_CLIENT_SECRET = os.getenv('OIDC_CLIENT_SECRET', None) 41 | self.OIDC_PROVIDER = os.getenv('OIDC_PROVIDER', None) 42 | self.OIDC_METADATA_ENDPOINT = os.getenv('OIDC_METADATA_ENDPOINT', None) 43 | self.OIDC_AUTO_REGISTER_USER = str_to_bool(os.getenv('OIDC_AUTO_REGISTER_USER', False)) 44 | self.OIDC_AUTO_LINK_USER = str_to_bool(os.getenv('OIDC_AUTO_LINK_USER', False)) 45 | 46 | self.REDIS_HOST = os.getenv('REDIS_HOST', 'localhost') 47 | self.REDIS_PORT = os.getenv('REDIS_PORT', '6379') 48 | self.REDIS_PASSWORD = os.getenv('REDIS_PASSWORD', "").strip() 49 | self.REDIS_LIMITER_DB = os.getenv('REDIS_LIMITER_DB', 0) 50 | self.REDIS_SCHEDULER_DB = os.getenv('REDIS_SCHEDULER_DB', 5) 51 | self.REDIS_OPDS_DB = os.getenv('REDIS_OPDS_DB', 8) 52 | 53 | self.RATE_LIMITER_ENABLED = str_to_bool(os.getenv('RATE_LIMITER_ENABLED', True)) 54 | self.SCHEDULER_ENABLED = str_to_bool(os.getenv('SCHEDULER_ENABLED', True)) 55 | self.OPDS_ENABLED = str_to_bool(os.getenv('OPDS_ENABLED', False)) 56 | 57 | self.PERIODIC_SCAN_INTERVAL = os.getenv('PERIODIC_SCAN_INTERVAL', 10) 58 | 59 | self.DB_TYPE = os.getenv('DB_TYPE', 'sqlite').lower() 60 | self.DB_HOST = os.getenv('DB_HOST', 'localhost') 61 | self.DB_PORT = os.getenv('DB_PORT') 62 | self.DB_NAME = os.getenv('DB_NAME', 'epub_library') 63 | self.DB_USER = os.getenv('DB_USER', 'root') 64 | self.DB_PASSWORD = os.getenv('DB_PASSWORD', None) 65 | 66 | @property 67 | def RATE_LIMITER_URI(self): 68 | if not self.REDIS_PASSWORD: 69 | return f"redis://{self.REDIS_HOST}:{self.REDIS_PORT}/{self.REDIS_LIMITER_DB}" 70 | return f"redis://:{self.REDIS_PASSWORD}@{self.REDIS_HOST}:{self.REDIS_PORT}/{self.REDIS_LIMITER_DB}" 71 | 72 | @property 73 | def CELERY_BROKER_URL(self): 74 | if not self.REDIS_PASSWORD: 75 | return f"redis://{self.REDIS_HOST}:{self.REDIS_PORT}/{self.REDIS_SCHEDULER_DB}" 76 | return f"redis://:{self.REDIS_PASSWORD}@{self.REDIS_HOST}:{self.REDIS_PORT}/{self.REDIS_SCHEDULER_DB}" 77 | 78 | @property 79 | def CELERY_RESULT_BACKEND(self): 80 | if not self.REDIS_PASSWORD: 81 | return f"redis://{self.REDIS_HOST}:{self.REDIS_PORT}/{self.REDIS_SCHEDULER_DB}" 82 | return f"redis://:{self.REDIS_PASSWORD}@{self.REDIS_HOST}:{self.REDIS_PORT}/{self.REDIS_SCHEDULER_DB}" 83 | 84 | @property 85 | def OPDS_REDIS_URI(self): 86 | if not self.REDIS_PASSWORD: 87 | return f"redis://{self.REDIS_HOST}:{self.REDIS_PORT}/{self.REDIS_OPDS_DB}" 88 | return f"redis://:{self.REDIS_PASSWORD}@{self.REDIS_HOST}:{self.REDIS_PORT}/{self.REDIS_OPDS_DB}" 89 | 90 | 91 | config = Config() 92 | -------------------------------------------------------------------------------- /backend/config/logger.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | from dotenv import load_dotenv 4 | 5 | load_dotenv() 6 | 7 | LOG_LEVEL = os.getenv('LOG_LEVEL', 'INFO').upper() 8 | valid_levels = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'] 9 | if LOG_LEVEL not in valid_levels: 10 | print(f"Invalid log level '{LOG_LEVEL}', defaulting to INFO") 11 | LOG_LEVEL = "INFO" 12 | 13 | class Logger: 14 | def __init__(self): 15 | self.logger = logging.getLogger('epubdl') 16 | self.logger.propagate = True 17 | if not self.logger.handlers: 18 | self.logger.setLevel(getattr(logging, LOG_LEVEL)) 19 | self.handler = logging.StreamHandler() 20 | self.log_formatter = logging.Formatter( 21 | fmt="time=%(asctime)s level=%(levelname)s msg=\"%(message)s\"", 22 | datefmt="%Y-%m-%dT%H:%M:%S" 23 | ) 24 | self.handler.setFormatter(self.log_formatter) 25 | self.handler.setLevel(getattr(logging, LOG_LEVEL)) 26 | self.logger.addHandler(self.handler) 27 | def info(self, msg, *args): 28 | self.logger.info(msg, *args) 29 | def debug(self, msg, *args): 30 | self.logger.debug(msg, *args) 31 | def error(self, msg, *args, exc_info=None): 32 | if isinstance(msg, Exception): 33 | self.logger.error(str(msg), exc_info=msg) 34 | else: 35 | self.logger.error(msg, *args, exc_info=exc_info) 36 | def warning(self, msg, *args): 37 | self.logger.warning(msg, *args) 38 | def exception(self, msg, *args): 39 | self.logger.exception(msg, *args) 40 | 41 | logger = Logger() -------------------------------------------------------------------------------- /backend/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | APP_PORT="${PORT:-5000}" 6 | ENABLE_HTTPS="${ENABLE_HTTPS:-false}" 7 | SSL_CERT_FILE="/ssl/${SSL_CERT_FILE:-}" 8 | SSL_KEY_FILE="/ssl/${SSL_KEY_FILE:-}" 9 | CELERY_LOG_LEVEL="${CELERY_LOG_LEVEL:-info}" 10 | 11 | echo "Checking and applying database migrations..." 12 | if ! python migrations.py; then 13 | echo "Database migrations failed, exiting." 14 | exit 1 15 | fi 16 | 17 | if [ "$ENABLE_HTTPS" = "true" ]; then 18 | if [ -f "$SSL_CERT_FILE" ] && [ -f "$SSL_KEY_FILE" ]; then 19 | echo "Starting $NUM_WORKERS gunicorn workers with HTTPS..." 20 | gunicorn -w 1 --certfile="$SSL_CERT_FILE" --keyfile="$SSL_KEY_FILE" --config gunicorn_logging.py -b 0.0.0.0:"$APP_PORT" main:app & 21 | else 22 | echo "ERROR: HTTPS is enabled but the SSL_CERT_FILE or SSL_KEY_FILE is missing. Exiting..." 23 | exit 1 24 | fi 25 | else 26 | echo "Starting $NUM_WORKERS gunicorn workers without HTTPS..." 27 | gunicorn -w 1 --config gunicorn_logging.py -b 0.0.0.0:"$APP_PORT" main:app & 28 | fi 29 | 30 | echo "Starting Celery and Celery Beat..." 31 | celery -A celery_app.celery worker --loglevel="$CELERY_LOG_LEVEL" & 32 | celery -A celery_app.celery beat --loglevel="$CELERY_LOG_LEVEL" & 33 | 34 | wait -------------------------------------------------------------------------------- /backend/functions/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HrBingR/BookHaven/d7d6138facb8fa7c86ff008842d387dbe6184976/backend/functions/__init__.py -------------------------------------------------------------------------------- /backend/functions/auth.py: -------------------------------------------------------------------------------- 1 | from config.config import config 2 | import jwt 3 | 4 | def verify_token(token): 5 | try: 6 | return jwt.decode(token, config.SECRET_KEY, algorithms=['HS256']) 7 | except jwt.ExpiredSignatureError: 8 | return None -------------------------------------------------------------------------------- /backend/functions/blueprints.py: -------------------------------------------------------------------------------- 1 | from config.config import config 2 | from routes.books import books_bp 3 | from routes.media import media_bp 4 | from routes.authors import authors_bp 5 | from routes.auth import auth_bp 6 | from routes.admin import admin_bp 7 | from routes.users import users_bp 8 | from routes.react import react_bp 9 | from routes.scan import scan_bp 10 | from routes.opds import opds_bp 11 | 12 | def register_blueprints(app): 13 | app.register_blueprint(books_bp) 14 | app.register_blueprint(media_bp) 15 | app.register_blueprint(authors_bp) 16 | app.register_blueprint(auth_bp) 17 | app.register_blueprint(admin_bp) 18 | app.register_blueprint(users_bp) 19 | app.register_blueprint(react_bp) 20 | app.register_blueprint(scan_bp) 21 | if config.OPDS_ENABLED: 22 | app.register_blueprint(opds_bp) -------------------------------------------------------------------------------- /backend/functions/book_management.py: -------------------------------------------------------------------------------- 1 | from flask import request, jsonify 2 | from models.epub_metadata import EpubMetadata 3 | from models.progress_mapping import ProgressMapping 4 | from functions.db import get_session 5 | from models.users import Users 6 | from config.logger import logger 7 | from functions.auth import verify_token 8 | from functools import wraps 9 | from config.config import config 10 | import inspect 11 | 12 | def user_logged_in(): 13 | session = get_session() 14 | try: 15 | auth_header = request.headers.get('Authorization') 16 | no_token = "no_token" 17 | if auth_header and auth_header.startswith("Bearer "): 18 | token = auth_header.split(" ")[1] 19 | decoded_token = verify_token(token) 20 | if not decoded_token: 21 | return False, "Invalid or expired token.", no_token 22 | user_id = decoded_token.get("user_id") 23 | user_record = session.query(Users).filter_by(id=user_id).first() 24 | if not user_record: 25 | return False, "User not found.", no_token 26 | return True, "User token validated.", decoded_token 27 | return True, "Unauthenticated session in progress.", no_token 28 | finally: 29 | session.close() 30 | 31 | def login_required(func=None, totp=False): 32 | """ 33 | Decorator to check login status and enforce token type: 34 | - Allows optional token validation based on config.ALLOW_UNAUTHENTICATED. 35 | - Updates token_state if token type is invalid or missing. 36 | 37 | Parameters: 38 | func (function): The wrapped function. 39 | totp (bool): Set to True to allow TOTP tokens specifically for the route. 40 | """ 41 | def decorator(func): 42 | @wraps(func) 43 | def wrapper(*args, **kwargs): 44 | user_login_status, message, token_state = user_logged_in() 45 | if not user_login_status: 46 | logger.debug(message) 47 | if not config.ALLOW_UNAUTHENTICATED and token_state == "no_token": 48 | logger.debug("Unauthenticated access denied.") 49 | return jsonify({ 50 | "error": "Unauthenticated access is not allowed. Please see ALLOW_UNAUTHENTICATED environment variable" 51 | }), 401 52 | 53 | if totp and token_state == "no_token": 54 | return jsonify({ 55 | "error": "TOTP verification requires authentication." 56 | }), 401 57 | if token_state != "no_token": 58 | token_type = token_state.get("token_type", None) 59 | if token_type == "totp" and not totp: 60 | logger.debug("TOTP token detected, marking as no_token.") 61 | token_state = "no_token" 62 | func_params = inspect.signature(func).parameters 63 | if 'token_state' in func_params: 64 | return func(*args, token_state=token_state, **kwargs) 65 | return func(*args, **kwargs) 66 | return wrapper 67 | if func: 68 | return decorator(func) 69 | return decorator 70 | 71 | def generate_session_id(): 72 | import uuid 73 | return str(uuid.uuid4()) 74 | 75 | def get_book_progress_record(token_user_id, book_identifier, session): 76 | user = session.query(Users).filter_by(id=token_user_id).first() 77 | book = session.query(EpubMetadata).filter_by(identifier=book_identifier).first() 78 | if not session.query(ProgressMapping).filter_by(user_id=user.id, book_id=book.id).first(): 79 | return False, None 80 | progress_record = session.query(ProgressMapping).filter_by(user_id=user.id, book_id=book.id).first() 81 | return True, progress_record 82 | 83 | def get_book_progress(token_state, book_identifier, session): 84 | token_user_id = token_state.get("user_id") 85 | book_progress_status, book_progress = get_book_progress_record(token_user_id, book_identifier, session) 86 | if not book_progress_status: 87 | return False, None 88 | return True, book_progress 89 | 90 | def construct_new_book_progress_record(data): 91 | record = {} 92 | if 'is_finished' in data: 93 | record['is_finished'] = bool(data['is_finished']) 94 | if 'progress' in data: 95 | record['progress'] = str(data['progress']) 96 | if 'favorite' in data: 97 | record['marked_favorite'] = bool(data['favorite']) 98 | return record 99 | 100 | 101 | def update_book_progress_state(token_state, book_identifier, data): 102 | token_user_id = token_state.get("user_id") 103 | session = get_session() 104 | record_status, record = get_book_progress_record(token_user_id, book_identifier, session) 105 | try: 106 | if not record_status: 107 | user = session.query(Users).filter_by(id=token_user_id).first() 108 | book = session.query(EpubMetadata).filter_by(identifier=book_identifier).first() 109 | logger.debug(f"Book identifier: {book.identifier}") 110 | if user and book: 111 | progress_record = construct_new_book_progress_record(data) 112 | updated_state = ProgressMapping(user_id=user.id, book_id=book.id, **progress_record) 113 | session.add(updated_state) 114 | session.commit() 115 | return True, "Book progress updated successfully" 116 | if 'is_finished' in data: 117 | finished_state = data['is_finished'] 118 | record.is_finished = bool(finished_state) 119 | session.commit() 120 | if 'progress' in data: 121 | progress_state = data['progress'] 122 | record.progress = progress_state 123 | session.commit() 124 | if 'favorite' in data: 125 | favorite_state = data['favorite'] 126 | record.marked_favorite = favorite_state 127 | session.commit() 128 | return True, "Book progress updated successfully" 129 | except Exception as e: 130 | session.rollback() 131 | logger.exception("Error occurred: %s", e) 132 | return False, f"Error updating finished state: {str(e)}" 133 | finally: 134 | session.close() -------------------------------------------------------------------------------- /backend/functions/db.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import create_engine 2 | from sqlalchemy.orm import sessionmaker 3 | from config.config import config 4 | 5 | def get_database_url(): 6 | """ 7 | Construct the SQLAlchemy database URL based on the configuration. 8 | """ 9 | if config.DB_TYPE == 'test': 10 | return "sqlite:///tests/test.db" 11 | elif config.DB_TYPE == 'mysql': 12 | return f"mysql+pymysql://{config.DB_USER}:{config.DB_PASSWORD}@{config.DB_HOST}:{config.DB_PORT or 3306}/{config.DB_NAME}" 13 | elif config.DB_TYPE == 'postgres': 14 | return f"postgresql://{config.DB_USER}:{config.DB_PASSWORD}@{config.DB_HOST}:{config.DB_PORT or 5432}/{config.DB_NAME}" 15 | elif config.DB_TYPE == 'sqlite': 16 | return f"sqlite:///{config.DB_NAME}.db" 17 | else: 18 | raise ValueError(f"Unsupported DB_TYPE: {config.DB_TYPE}") 19 | 20 | def get_engine(): 21 | database_url = get_database_url() 22 | return create_engine(database_url) 23 | 24 | def get_session(): 25 | """ 26 | Provides a new database session (connection). 27 | """ 28 | engine = get_engine() 29 | Session = sessionmaker(bind=engine) 30 | return Session() -------------------------------------------------------------------------------- /backend/functions/extensions.py: -------------------------------------------------------------------------------- 1 | from flask_limiter import Limiter 2 | from flask_limiter.util import get_remote_address 3 | from flask_cors import CORS 4 | from config.config import config 5 | 6 | limiter = Limiter( 7 | key_func=get_remote_address, 8 | default_limits=["60 per minute"] 9 | ) 10 | 11 | def setup_limiter(app): 12 | limiter.init_app(app) 13 | return limiter 14 | 15 | def setup_cors(app): 16 | allowed_origin = config.BASE_URL 17 | CORS(app, resources={ 18 | r"/api/*": {"origins": allowed_origin}, 19 | r"/stream/*": {"origins": allowed_origin}, 20 | r"/files/*": {"origins": allowed_origin}, 21 | r"/download/*": {"origins": allowed_origin}, 22 | r"/login*": {"origins": allowed_origin}, 23 | r"/api/admin/*": {"origins": allowed_origin}, 24 | r"/validate-otp": {"origins": allowed_origin}, 25 | r"/scan-library": {"origins": allowed_origin}, 26 | r"/scan-status/*": {"origins": allowed_origin} 27 | }) -------------------------------------------------------------------------------- /backend/functions/init.py: -------------------------------------------------------------------------------- 1 | import binascii 2 | from flask import Flask 3 | import sys 4 | from functions.utils import check_admin_user, reset_admin_user_password, check_required_envs 5 | from config.config import config 6 | from config.logger import logger 7 | import base64 8 | import redis 9 | from authlib.integrations.flask_client import OAuth, OAuthError 10 | from typing import Optional 11 | 12 | class CustomFlask(Flask): 13 | oauth: OAuth 14 | redis: Optional[redis.StrictRedis] 15 | 16 | def init_redis() -> Optional[redis.StrictRedis]: 17 | if config.OPDS_ENABLED: 18 | try: 19 | redis_client = redis.StrictRedis.from_url(config.OPDS_REDIS_URI, decode_responses=True) 20 | return redis_client 21 | except redis.RedisError as e: 22 | logger.exception(f"Could not connect to Redis: {e}") 23 | raise 24 | else: 25 | return None 26 | 27 | def init_rate_limit(app): 28 | if config.ENVIRONMENT != "test": 29 | app.config["RATELIMIT_ENABLED"] = config.RATE_LIMITER_ENABLED 30 | else: 31 | app.config["RATELIMIT_ENABLED"] = False 32 | 33 | def init_env(): 34 | try: 35 | result, message = check_required_envs(config.SECRET_KEY, config.BASE_URL, config.OIDC_ENABLED) 36 | if not result: 37 | logger.error(message) 38 | sys.exit(1) 39 | else: 40 | logger.debug("Required environment variables checked successfully.") 41 | except Exception as e: 42 | logger.exception("Failed to check required environment variables: %s", str(e)) 43 | sys.exit(1) 44 | 45 | def init_admin_user(): 46 | if config.ENVIRONMENT != "test": 47 | try: 48 | result, message = check_admin_user(config.ADMIN_PASS, config.ADMIN_EMAIL) 49 | if not result: 50 | logger.error("Failed to initialize admin user: %s", message) 51 | sys.exit(1) 52 | else: 53 | logger.info("Admin user initialized successfully.") 54 | except Exception as e: 55 | logger.exception("Failed to initialize admin user: %s", str(e)) 56 | sys.exit(1) 57 | else: 58 | logger.debug("TEST ENVIRONMENT") 59 | 60 | def init_admin_password_reset(): 61 | if config.ADMIN_RESET: 62 | try: 63 | result, message = reset_admin_user_password(config.ADMIN_PASS) 64 | if not result: 65 | logger.error("Failed to reset admin user password: %s", message) 66 | sys.exit(1) 67 | except Exception as e: 68 | logger.exception("Failed to reset admin user password: %s", str(e)) 69 | 70 | def init_encryption(app): 71 | key_bytes = binascii.unhexlify(config.SECRET_KEY) 72 | fernet_key = base64.urlsafe_b64encode(key_bytes) 73 | app.config["FERNET_KEY"] = fernet_key 74 | 75 | def init_oauth(app): 76 | if config.OIDC_ENABLED: 77 | try: 78 | oauth = OAuth(app) 79 | oauth.register( 80 | name=config.OIDC_PROVIDER, 81 | client_id=config.OIDC_CLIENT_ID, 82 | client_secret=config.OIDC_CLIENT_SECRET, 83 | server_metadata_url=config.OIDC_METADATA_ENDPOINT, 84 | client_kwargs={"scope": "openid email profile"} 85 | ) 86 | return oauth 87 | except OAuthError as e: 88 | logger.exception(f"Could not instantiate OIDC configuration; Exception occurred: {e}") 89 | except Exception as e: 90 | logger.exception(f"Could not instantiate OIDC configuration; Exception occurred: {e}") 91 | return None -------------------------------------------------------------------------------- /backend/functions/metadata/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HrBingR/BookHaven/d7d6138facb8fa7c86ff008842d387dbe6184976/backend/functions/metadata/__init__.py -------------------------------------------------------------------------------- /backend/functions/metadata/scan.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | import ebookmeta 4 | from models.epub_metadata import EpubMetadata 5 | from models.progress_mapping import ProgressMapping 6 | from models.users import Users 7 | from functions.db import get_session 8 | from config.logger import logger 9 | from config.config import config 10 | from sqlalchemy import select 11 | 12 | def find_epubs(base_directory): 13 | epubs = [] 14 | for root, dirs, files in os.walk(base_directory): 15 | for file in files: 16 | if file.endswith('.epub'): 17 | full_path = os.path.join(root, file) 18 | epubs.append(full_path) 19 | return epubs 20 | 21 | def extract_metadata(epub_path, base_directory): 22 | book = ebookmeta.get_metadata(epub_path) 23 | unique_id = book.identifier or epub_path 24 | if re.match(r'https?://', unique_id): 25 | unique_id = re.sub(r'[^a-zA-Z0-9]', '-', unique_id) 26 | unique_id = re.sub(r'-+', '-', unique_id) 27 | title = book.title 28 | if not unique_id.strip(): 29 | unique_id = title 30 | authors = book.author_list 31 | series = book.series or '' 32 | seriesindex = book.series_index if book.series_index is not None else 0.0 33 | cover_image_data = book.cover_image_data 34 | cover_media_type = book.cover_media_type 35 | 36 | relative_path = os.path.relpath(epub_path, base_directory) 37 | return { 38 | 'identifier': unique_id, 39 | 'title': title, 40 | 'authors': authors, 41 | 'series': series, 42 | 'seriesindex': seriesindex, 43 | 'relative_path': relative_path, 44 | 'cover_image_data': cover_image_data, 45 | 'cover_media_type': cover_media_type 46 | } 47 | 48 | def remove_missing_files(session, db_identifiers, filesystem_identifiers): 49 | """ 50 | Deletes database records corresponding to files missing in the filesystem, and removes the associated DB entry from ProgressMapping 51 | """ 52 | missing_files = db_identifiers - filesystem_identifiers # DB IDs not in filesystem 53 | if missing_files: 54 | filtered_results = session.query(EpubMetadata).filter(EpubMetadata.identifier.in_(missing_files)).all() 55 | for result in filtered_results: 56 | mapping_records = session.query(ProgressMapping).filter(ProgressMapping.book_id == result.id).all() 57 | for record in mapping_records: 58 | session.delete(record) 59 | session.delete(result) 60 | logger.debug(f"Removed {len(missing_files)} records from DB as the files are missing in filesystem.") 61 | 62 | def remove_missing_user_progress(session): 63 | valid_users_subquery = select(Users.id) 64 | session.query(ProgressMapping).filter(~ProgressMapping.user_id.in_(valid_users_subquery)).delete(synchronize_session=False) 65 | 66 | def scan_and_store_metadata(base_directory): 67 | session = get_session() 68 | 69 | try: 70 | epubs = find_epubs(base_directory) 71 | logger.debug(f"Found {len(epubs)} ePubs in base directory: {base_directory}") 72 | all_db_records = session.query(EpubMetadata).all() 73 | db_identifiers = {record.identifier for record in all_db_records} 74 | filesystem_identifiers = set() 75 | for epub_path in epubs: 76 | metadata = extract_metadata(epub_path, base_directory) 77 | unique_id = metadata['identifier'] 78 | logger.debug(f"Book Title: {metadata['title']}") 79 | 80 | filesystem_identifiers.add(unique_id) 81 | 82 | existing_record = session.query(EpubMetadata).filter_by(identifier=unique_id).first() 83 | 84 | if existing_record: 85 | if existing_record.relative_path != metadata['relative_path']: 86 | existing_record.relative_path = metadata['relative_path'] 87 | session.add(existing_record) 88 | logger.debug(f"Updated relative_path in DB for identifier={unique_id}, Path: {metadata['relative_path']}") 89 | else: 90 | new_entry = EpubMetadata( 91 | identifier=unique_id, 92 | title=metadata['title'], 93 | authors=', '.join(metadata['authors']), 94 | series=metadata['series'], 95 | seriesindex=metadata['seriesindex'], 96 | relative_path=metadata['relative_path'], 97 | cover_image_data=metadata['cover_image_data'], 98 | cover_media_type=metadata['cover_media_type'] 99 | ) 100 | session.add(new_entry) 101 | if not new_entry.identifier.strip(): 102 | session.flush() 103 | new_entry.identifier = new_entry.title 104 | logger.debug(f"Stored new metadata in DB for identifier={unique_id}") 105 | if config.ENVIRONMENT != "test": 106 | remove_missing_files(session, db_identifiers, filesystem_identifiers) 107 | remove_missing_user_progress(session) 108 | session.commit() 109 | logger.info("Library scan and metadata update completed successfully.") 110 | except Exception as e: 111 | logger.error(f"Error during library scan and metadata update: {e}") 112 | session.rollback() 113 | finally: 114 | logger.debug("Closing database session.") 115 | session.close() -------------------------------------------------------------------------------- /backend/functions/tasks/scan.py: -------------------------------------------------------------------------------- 1 | from celery_app import celery 2 | from functions.metadata.scan import scan_and_store_metadata 3 | from config.config import config 4 | from sqlalchemy.exc import SQLAlchemyError 5 | from celery.exceptions import MaxRetriesExceededError 6 | from config.logger import logger 7 | 8 | @celery.task(bind=True, max_retries=5) 9 | def scan_library_task(self): 10 | try: 11 | scan_and_store_metadata(config.BASE_DIRECTORY) 12 | except SQLAlchemyError as exc: 13 | if self.request.retries >= self.max_retries: 14 | logger.exception( 15 | f"Maximum retries exceeded for task '{self.name}' after {self.request.retries} attempts. " 16 | f"Original DB Error: {str(exc)}" 17 | ) 18 | raise MaxRetriesExceededError() 19 | retry_delay = 3 ** self.request.retries 20 | logger.warning(f"Retrying task due to database error: {str(exc)}. Attempt {self.request.retries + 1}") 21 | raise self.retry(exc=exc, countdown=retry_delay) -------------------------------------------------------------------------------- /backend/functions/utils.py: -------------------------------------------------------------------------------- 1 | from cryptography.fernet import Fernet 2 | from models.users import Users 3 | from functions.db import get_session 4 | from email_validator import validate_email, EmailNotValidError 5 | import bcrypt 6 | import re 7 | from flask import current_app, jsonify, request 8 | from config.config import config 9 | from config.logger import logger 10 | 11 | def check_required_envs(secret_key: str, base_url: str, oidc_enabled: bool) -> tuple[bool, str]: 12 | if not secret_key: 13 | return False, "SECRET_KEY environment variable is not set. Generate one (bash) using: openssl rand -hex 32" 14 | if len(secret_key) != 64: 15 | return False, "SECRET_KEY environment variable is invalid. Generate one (bash) using: openssl rand -hex 32" 16 | if not base_url: 17 | return False, "BASE_URL is not set. Please set this to your application's base URL" 18 | if oidc_enabled: 19 | if not config.OIDC_PROVIDER: 20 | return False, "OIDC_ENABLED is True but OIDC_PROVIDER is not configured." 21 | if not config.OIDC_CLIENT_ID: 22 | return False, "OIDC_ENABLED is True but OIDC_CLIENT_ID is not configured." 23 | if not config.OIDC_CLIENT_SECRET: 24 | return False, "OIDC_ENABLED is True but OIDC_CLIENT_SECRET is not configured." 25 | if not config.OIDC_METADATA_ENDPOINT: 26 | return False, "OIDC_ENABLED is True but OIDC_METADATA_ENDPOINT is not configured." 27 | return True, "Required environment variables are set." 28 | 29 | def hash_password(password: str) -> str: 30 | return bcrypt.hashpw(password.encode('utf-8'), bcrypt.gensalt()).decode('utf-8') 31 | 32 | def check_pw_complexity(password: str) -> tuple[bool, str]: 33 | if len(password) < 8: 34 | return False, "Password must be at least 8 characters long." 35 | if not re.search(r"[A-Z]", password): 36 | return False, "Password must contain at least one uppercase letter." 37 | if not re.search(r"[a-z]", password): 38 | return False, "Password must contain at least one lowercase letter." 39 | if not re.search(r"[0-9]", password): 40 | return False, "Password must contain at least one number." 41 | if not re.search(r"[!@#$%^&*(),.?\":{}|<>]", password): 42 | return False, "Password must contain at least one special character." 43 | return True, "Password complexity requirements have been met." 44 | 45 | def check_admin_user(password: str, email: str) -> tuple[bool, str]: 46 | session = get_session() 47 | try: 48 | admin_user = session.query(Users).filter_by(username='admin').first() 49 | if admin_user: 50 | return True, "Admin user already exists. Skipping initial setup." 51 | if not password or not email: 52 | return False, "Missing admin credentials. Please set ADMIN_PASS and ADMIN_EMAIL in environment variables. These variables can be unset after initial setup." 53 | try: 54 | email_address = validate_email(email, check_deliverability=False) 55 | email = email_address.normalized 56 | except EmailNotValidError as e: 57 | return False, f"Email validation error: {str(e)}" 58 | valid_pw, message = check_pw_complexity(password) 59 | if not valid_pw: 60 | return False, message 61 | hashed_password = hash_password(password) 62 | new_admin_user = Users( 63 | username='admin', 64 | email=email, 65 | password_hash=hashed_password, 66 | is_admin=True, 67 | auth_type='local' 68 | ) 69 | session.add(new_admin_user) 70 | session.commit() 71 | return True, "Admin user created successfully." 72 | except Exception as e: 73 | return False, str(e) 74 | finally: 75 | session.close() 76 | 77 | def reset_admin_user_password(password: str) -> tuple[bool, str]: 78 | if not password: 79 | return False, "Missing password for admin user password reset. Please set ADMIN_PASS in environment variables." 80 | session = get_session() 81 | try: 82 | admin_user = session.query(Users).filter_by(username='admin').first() 83 | if admin_user: 84 | hashed_password = hash_password(password) 85 | admin_user.password_hash = hashed_password 86 | if admin_user.mfa_enabled: 87 | admin_user.mfa_secret = None 88 | admin_user.mfa_enabled = False 89 | session.commit() 90 | return True, "Admin password and MFA reset successfully." 91 | else: 92 | return False, "Admin user not found in the database." 93 | except Exception as e: 94 | return False, str(e) 95 | finally: 96 | session.close() 97 | 98 | def encrypt_totp_secret(secret): 99 | fernet = Fernet(current_app.config["FERNET_KEY"]) 100 | encrypted_secret = fernet.encrypt(secret.encode('utf-8')).decode('utf-8') 101 | return encrypted_secret 102 | 103 | def decrypt_totp_secret(secret): 104 | fernet = Fernet(current_app.config["FERNET_KEY"]) 105 | decrypted_secret = fernet.decrypt(secret.encode('utf-8')).decode('utf-8') 106 | return decrypted_secret 107 | 108 | def unlink_oidc(user_id): 109 | data = request.get_json(silent=True) 110 | if data is None or "new_password" not in data: 111 | return jsonify({"error": "No password submitted"}), 400 112 | new_password = data.get('new_password') 113 | valid_pw, message = check_pw_complexity(new_password) 114 | if not valid_pw: 115 | return jsonify({"error": message}), 400 116 | hashed_password = hash_password(new_password) 117 | session = get_session() 118 | try: 119 | user = session.query(Users).filter(Users.id == user_id).first() 120 | if not user: 121 | return jsonify({"error": "User not found."}), 404 122 | user.oidc_user_id = None 123 | user.auth_type = "local" 124 | user.password_hash = hashed_password 125 | user.mfa_enabled = False 126 | user.mfa_secret = None 127 | user.last_used_otp = None 128 | session.commit() 129 | return jsonify({"message": "Successfully un-linked OIDC"}), 200 130 | except Exception as e: 131 | logger.exception(f"Exception occurred: {e}") 132 | return jsonify({"error": "Internal server error"}), 500 133 | finally: 134 | session.close() -------------------------------------------------------------------------------- /backend/gunicorn_logging.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from logging.handlers import RotatingFileHandler 3 | 4 | 5 | # Disable traceback in Gunicorn error logs 6 | class NoStacktraceFilter(logging.Filter): 7 | def filter(self, record): 8 | # Filters out stacktraces 9 | return not record.exc_info 10 | 11 | 12 | # Configure logging 13 | gunicorn_error_logger = logging.getLogger("gunicorn.error") 14 | gunicorn_error_logger.setLevel(logging.ERROR) 15 | 16 | # Add a handler for errors without stacktraces 17 | handler = RotatingFileHandler("gunicorn.log", maxBytes=100000, backupCount=10) 18 | formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s") 19 | handler.setFormatter(formatter) 20 | handler.addFilter(NoStacktraceFilter()) 21 | gunicorn_error_logger.addHandler(handler) -------------------------------------------------------------------------------- /backend/main.py: -------------------------------------------------------------------------------- 1 | from functions.blueprints import register_blueprints 2 | from functions.extensions import setup_cors, setup_limiter 3 | from functions.init import init_env, init_admin_user, init_admin_password_reset, init_rate_limit, init_encryption, init_oauth, CustomFlask, init_redis 4 | from config.config import config 5 | from celery_app import celery 6 | 7 | def create_app() -> CustomFlask: 8 | app = CustomFlask(__name__, static_folder="../frontend/dist", static_url_path="/static") 9 | app.secret_key = config.SECRET_KEY 10 | init_env() 11 | init_encryption(app) 12 | init_rate_limit(app) 13 | app.config["RATELIMIT_STORAGE_URI"] = config.RATE_LIMITER_URI 14 | setup_cors(app) 15 | setup_limiter(app) 16 | register_blueprints(app) 17 | app.celery = celery 18 | init_admin_user() 19 | init_admin_password_reset() 20 | app.oauth = init_oauth(app) 21 | app.redis = init_redis() 22 | return app 23 | 24 | app = create_app() -------------------------------------------------------------------------------- /backend/migrations.py: -------------------------------------------------------------------------------- 1 | from alembic.config import Config 2 | from alembic import command 3 | from sqlalchemy import create_engine, inspect 4 | from sqlalchemy.engine import reflection 5 | from functions.db import get_database_url 6 | from sqlalchemy.exc import SQLAlchemyError 7 | from config.logger import logger 8 | import sys 9 | 10 | # Dynamically fetch the database URL 11 | DATABASE_URL = get_database_url() 12 | 13 | 14 | def check_migrations_and_apply(): 15 | try: 16 | # Load Alembic configuration 17 | alembic_cfg = Config("alembic.ini") 18 | alembic_cfg.set_main_option("sqlalchemy.url", DATABASE_URL) 19 | 20 | # Create engine and inspector 21 | engine = create_engine(DATABASE_URL) 22 | inspector = inspect(engine) 23 | 24 | logger.info("Starting migration process...") 25 | 26 | # Get current revision before upgrade 27 | from alembic.script import ScriptDirectory 28 | script = ScriptDirectory.from_config(alembic_cfg) 29 | with engine.begin() as connection: 30 | context = MigrationContext.configure(connection) 31 | current_rev = context.get_current_revision() 32 | 33 | # Get latest available revision 34 | head_rev = script.get_current_head() 35 | 36 | if current_rev == head_rev: 37 | logger.info("Database is up to date, no migrations needed") 38 | return True 39 | 40 | logger.info(f"Current revision: {current_rev}") 41 | logger.info(f"Target revision: {head_rev}") 42 | logger.info("Applying pending migrations...") 43 | 44 | # Run the upgrade 45 | command.upgrade(alembic_cfg, "head") 46 | 47 | # Verify the upgrade 48 | with engine.begin() as connection: 49 | context = MigrationContext.configure(connection) 50 | new_rev = context.get_current_revision() 51 | 52 | if new_rev == head_rev: 53 | logger.info("Migrations completed successfully") 54 | logger.info(f"New revision: {new_rev}") 55 | return True 56 | else: 57 | logger.error("Migration may have failed - revision mismatch") 58 | logger.error(f"Expected: {head_rev}, Got: {new_rev}") 59 | return False 60 | 61 | except SQLAlchemyError as e: 62 | logger.error(f"Database error: {str(e)}") 63 | if hasattr(e, 'orig'): 64 | logger.error(f"Original error: {e.orig}") 65 | return False 66 | except Exception as e: 67 | logger.error(f"Unexpected error: {str(e)}") 68 | return False 69 | 70 | 71 | if __name__ == "__main__": 72 | from alembic.runtime.migration import MigrationContext 73 | from alembic.script import ScriptDirectory 74 | 75 | success = check_migrations_and_apply() 76 | if not success: 77 | sys.exit(1) 78 | sys.exit(0) -------------------------------------------------------------------------------- /backend/models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HrBingR/BookHaven/d7d6138facb8fa7c86ff008842d387dbe6184976/backend/models/__init__.py -------------------------------------------------------------------------------- /backend/models/base.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.orm import declarative_base 2 | 3 | Base = declarative_base() -------------------------------------------------------------------------------- /backend/models/epub_metadata.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Column, Integer, String, Float, LargeBinary, Index 2 | from sqlalchemy.dialects.mysql import LONGBLOB 3 | from models.base import Base 4 | 5 | class EpubMetadata(Base): 6 | __tablename__ = 'epub_metadata' 7 | 8 | id = Column(Integer, primary_key=True) 9 | identifier = Column(String(255), unique=True, nullable=False) # Add 'index=True' 10 | title = Column(String(255)) 11 | authors = Column(String(255)) 12 | series = Column(String(255)) 13 | seriesindex = Column(Float) 14 | relative_path = Column(String(255), unique=True) 15 | cover_image_data = Column(LargeBinary().with_variant(LONGBLOB, 'mysql')) 16 | cover_media_type = Column(String(255)) 17 | progress = Column(String(255), nullable=True) 18 | 19 | __table_args__ = ( 20 | Index('book_identifier', 'identifier', unique=True), 21 | Index('author_title_index_series_idx', 'authors', 'series', 'seriesindex', 'title', unique=True) 22 | ) -------------------------------------------------------------------------------- /backend/models/progress_mapping.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timezone 2 | from sqlalchemy import Column, Integer, String, Boolean, ForeignKey, DateTime, Index 3 | from models.base import Base 4 | 5 | class ProgressMapping(Base): 6 | __tablename__ = 'progress_mapping' 7 | 8 | id = Column(Integer, primary_key=True) 9 | user_id = Column(Integer, nullable=False) 10 | book_id = Column(Integer, nullable=False) 11 | progress = Column(String(255), nullable=True) 12 | is_finished = Column(Boolean, default=False, nullable=False) 13 | marked_favorite = Column(Boolean, default=False, nullable=False) 14 | created_at = Column(DateTime, default=datetime.now(timezone.utc)) 15 | updated_at = Column(DateTime, default=datetime.now(timezone.utc), onupdate=datetime.now(timezone.utc)) 16 | 17 | __table_args__ = ( 18 | Index('ix_user_book', 'user_id', 'book_id', unique=True), # unique=True if you want to ensure uniqueness 19 | ) -------------------------------------------------------------------------------- /backend/models/users.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timezone 2 | from sqlalchemy import Column, Integer, String, DateTime, Boolean, Enum, Index 3 | from models.base import Base 4 | from typing import Optional 5 | 6 | class Users(Base): 7 | __tablename__ = 'users' 8 | __table_args__ = ( 9 | Index('ix_users_id', 'id'), 10 | Index('ix_users_email', 'email'), 11 | Index('ix_users_oidc_user_id', 'oidc_user_id'), 12 | ) 13 | 14 | id = Column(Integer, primary_key=True) 15 | username = Column(String(255), unique=True, nullable=False) 16 | email = Column(String(255), unique=True, nullable=False) 17 | password_hash = Column(String(255), nullable=True) 18 | is_admin = Column(Boolean, default=False, nullable=False) 19 | oidc_user_id: Optional[str] = Column(String(255), unique=True, nullable=True) 20 | created_at = Column(DateTime, default=datetime.now(timezone.utc)) 21 | updated_at = Column(DateTime, default=datetime.now(timezone.utc), onupdate=datetime.now(timezone.utc)) 22 | last_login = Column(DateTime, default=datetime.now(timezone.utc)) 23 | failed_login_count = Column(Integer, default=0, nullable=False) 24 | auth_type = Column(Enum('oidc', 'local'), default='local', nullable=False, name="auth_type") 25 | mfa_enabled = Column(Boolean, default=False, nullable=False) 26 | mfa_secret = Column(String(255), nullable=True) 27 | last_used_otp = Column(String(8), nullable=True) 28 | 29 | -------------------------------------------------------------------------------- /backend/requirements.txt: -------------------------------------------------------------------------------- 1 | alembic==1.14.0 2 | amqp==5.3.1 3 | Authlib==1.4.0 4 | bcrypt==4.2.1 5 | billiard==4.2.1 6 | blinker==1.9.0 7 | cachelib==0.9.0 8 | celery==5.4.0 9 | certifi==2025.1.31 10 | cffi==1.17.1 11 | charset-normalizer==3.4.1 12 | click==8.1.8 13 | click-didyoumean==0.3.1 14 | click-plugins==1.1.1 15 | click-repl==0.3.0 16 | coverage==7.6.10 17 | cryptography==44.0.0 18 | Deprecated==1.2.15 19 | dnspython==2.7.0 20 | EbookLib==0.18 21 | ebookmeta==1.2.11 22 | email_validator==2.2.0 23 | filelock==3.16.1 24 | Flask==3.1.0 25 | Flask-Caching==2.3.0 26 | Flask-Cors==5.0.0 27 | Flask-Limiter==3.10.0 28 | Flask-Login==0.6.3 29 | gunicorn==23.0.0 30 | idna==3.10 31 | iniconfig==2.0.0 32 | itsdangerous==2.2.0 33 | Jinja2==3.1.5 34 | kombu==5.4.2 35 | limits==4.0.0 36 | logassert==8.1 37 | lxml==5.3.0 38 | Mako==1.3.8 39 | markdown-it-py==3.0.0 40 | MarkupSafe==3.0.2 41 | mdurl==0.1.2 42 | ordered-set==4.1.0 43 | packaging==24.2 44 | pluggy==1.5.0 45 | prompt_toolkit==3.0.48 46 | psycopg2-binary==2.9.10 47 | pycparser==2.22 48 | Pygments==2.19.1 49 | PyJWT==2.10.1 50 | PyMySQL==1.1.1 51 | pyotp==2.9.0 52 | pytest==8.3.4 53 | pytest-cov==6.0.0 54 | python-dateutil==2.9.0.post0 55 | python-dotenv==1.0.1 56 | redis==5.2.1 57 | requests==2.32.3 58 | rich==13.9.4 59 | setuptools==75.8.0 60 | six==1.17.0 61 | SQLAlchemy==2.0.36 62 | typing_extensions==4.12.2 63 | tzdata==2024.2 64 | urllib3==2.3.0 65 | vine==5.1.0 66 | wcwidth==0.2.13 67 | Werkzeug==3.1.3 68 | wrapt==1.17.2 69 | -------------------------------------------------------------------------------- /backend/routes/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HrBingR/BookHaven/d7d6138facb8fa7c86ff008842d387dbe6184976/backend/routes/__init__.py -------------------------------------------------------------------------------- /backend/routes/authors.py: -------------------------------------------------------------------------------- 1 | from flask import Blueprint, jsonify 2 | from models.epub_metadata import EpubMetadata 3 | from functions.db import get_session 4 | from functions.book_management import login_required 5 | 6 | authors_bp = Blueprint('authors', __name__) 7 | 8 | @authors_bp.route('/api/authors', methods=['GET']) 9 | @login_required 10 | def get_authors(): 11 | """ 12 | Returns a list of distinct authors sorted alphabetically. 13 | """ 14 | session = get_session() 15 | total_books = session.query(EpubMetadata.id).count() 16 | if total_books == 0: 17 | return jsonify({ 18 | "authors": [], 19 | "total_authors": 0 20 | }) 21 | authors_query = session.query(EpubMetadata.authors).all() 22 | authors = set() 23 | for entry in authors_query: 24 | if entry.authors: 25 | authors.update([author.strip() for author in entry.authors.split(",")]) 26 | sorted_authors = sorted(authors) 27 | return jsonify({ 28 | "authors": sorted_authors, 29 | "total_authors": len(sorted_authors) 30 | }) 31 | 32 | @authors_bp.route('/api/authors/', methods=['GET']) 33 | @login_required 34 | def get_author_books(author_name): 35 | """ 36 | Returns all books by a specific author. 37 | """ 38 | session = get_session() 39 | normalized_author_name = author_name.replace('-', ' ').lower() 40 | author_query = session.query(EpubMetadata).filter( 41 | EpubMetadata.authors.ilike(f"%{normalized_author_name}%") 42 | ).all() 43 | if not author_query: 44 | return jsonify({"error": f"No books found for author: {author_name}"}), 404 45 | books = [{ 46 | "id": book.id, 47 | "title": book.title, 48 | "authors": book.authors.split(", "), 49 | "series": book.series, 50 | "seriesindex": book.seriesindex, 51 | "coverUrl": f"/api/covers/{book.identifier}", 52 | "relative_path": book.relative_path, 53 | "identifier": book.identifier, 54 | } for book in author_query] 55 | return jsonify({ 56 | "author": author_name, 57 | "books": books, 58 | "total_books": len(books) 59 | }), 200 -------------------------------------------------------------------------------- /backend/routes/media.py: -------------------------------------------------------------------------------- 1 | import os 2 | from flask import Blueprint, Response, current_app as app, send_from_directory, jsonify, abort, url_for, request 3 | from models.epub_metadata import EpubMetadata 4 | from functions.db import get_session 5 | from config.config import config 6 | from config.logger import logger 7 | 8 | media_bp = Blueprint('media', __name__) 9 | 10 | @media_bp.route('/api/covers/', methods=['GET']) 11 | def get_cover(book_identifier): 12 | """ 13 | Serve cover images with browser-side caching enabled to reduce repeated requests. 14 | """ 15 | session = get_session() 16 | book_record = session.query(EpubMetadata).filter_by(identifier=str(book_identifier)).first() 17 | if not book_record or not book_record.cover_image_data: 18 | placeholder_path = os.path.join(app.static_folder, 'placeholder.jpg') 19 | with open(placeholder_path, 'rb') as f: 20 | placeholder_image = f.read() 21 | return Response(placeholder_image, mimetype='image/jpeg', headers={ 22 | "Cache-Control": "public, max-age=259200" 23 | }) 24 | return Response(book_record.cover_image_data, mimetype=book_record.cover_media_type, headers={ 25 | "Cache-Control": "public, max-age=259200" 26 | }) 27 | 28 | @media_bp.route('/download/', methods=['GET']) 29 | def download(book_identifier): 30 | session = get_session() 31 | book_record = session.query(EpubMetadata).filter_by(identifier=str(book_identifier)).first() 32 | if not book_record: 33 | abort(404, description="Resource not found") 34 | relative_path = book_record.relative_path 35 | try: 36 | custom_file_header = request.headers.get("file") 37 | if config.ENVIRONMENT == "test" and custom_file_header == "not_found": 38 | raise FileNotFoundError 39 | return send_from_directory(config.BASE_DIRECTORY, relative_path, as_attachment=True) 40 | except FileNotFoundError: 41 | abort(404, description="File not found") 42 | 43 | @media_bp.route('/stream/', methods=['GET']) 44 | def stream(book_identifier): 45 | session = get_session() 46 | book_record = session.query(EpubMetadata).filter_by(identifier=book_identifier).first() 47 | if not book_record: 48 | abort(404, description="Book not found.") 49 | relative_path = book_record.relative_path 50 | full_path = os.path.join(config.BASE_DIRECTORY, relative_path) 51 | if not os.path.exists(full_path): 52 | abort(404, description="ePub file not found.") 53 | epub_file_url = config.BASE_URL.rstrip("/") + url_for('media.serve_book_file', filename=relative_path) 54 | return jsonify({"url": epub_file_url}) 55 | 56 | @media_bp.route('/files/', methods=['GET']) 57 | def serve_book_file(filename): 58 | try: 59 | base_directory = config.BASE_DIRECTORY # Root location of your ePub files 60 | custom_file_header = request.headers.get("file") 61 | if config.ENVIRONMENT == "test" and custom_file_header == "not_found": 62 | raise FileNotFoundError 63 | return send_from_directory(base_directory, filename) 64 | except FileNotFoundError: 65 | abort(404, description="File not found.") -------------------------------------------------------------------------------- /backend/routes/react.py: -------------------------------------------------------------------------------- 1 | import os 2 | from flask import Blueprint, send_from_directory, current_app, jsonify 3 | from config.config import config 4 | 5 | react_bp = Blueprint("react", __name__, static_folder="../frontend/dist") 6 | 7 | @react_bp.route("/", defaults={"path": ""}) 8 | @react_bp.route("/") 9 | def serve_react_app(path): 10 | """ 11 | Serve the React app's index.html for all non-API routes. 12 | React will take over routing for SPA functionality. 13 | """ 14 | static_folder = os.path.join(current_app.root_path, "../frontend/dist") 15 | if path != "" and os.path.exists(os.path.join(static_folder, path)): 16 | return send_from_directory(static_folder, path) 17 | else: 18 | return send_from_directory(static_folder, "index.html") 19 | 20 | @react_bp.route("/api/react-init", methods=["GET"]) 21 | def react_frontend_config(): 22 | color_variants = { 23 | 'green': 'success', 24 | 'blue': 'primary', 25 | 'red': 'danger', 26 | 'yellow': 'warning', 27 | 'white': 'light', 28 | 'black': 'dark', 29 | 'pink': 'pink', 30 | 'purple': 'purple', 31 | 'orange': 'orange', 32 | 'cyan': 'cyan' 33 | } 34 | color = color_variants.get(config.UI_BASE_COLOR, "success") 35 | cloudflare = config.CF_ACCESS_AUTH 36 | oidc = config.OIDC_ENABLED 37 | react_config = { 38 | "UI_BASE_COLOR": color, 39 | "CF_ACCESS_AUTH": cloudflare, 40 | "OIDC_ENABLED": oidc 41 | } 42 | return jsonify(react_config), 200 -------------------------------------------------------------------------------- /backend/routes/scan.py: -------------------------------------------------------------------------------- 1 | from flask import Blueprint, jsonify 2 | from functions.tasks.scan import scan_library_task 3 | from celery_app import celery 4 | 5 | scan_bp = Blueprint('scan_bp', __name__) 6 | 7 | @scan_bp.route('/scan-library', methods=['POST']) 8 | def trigger_scan_manually(): 9 | task = scan_library_task.delay() 10 | return jsonify({"task_id": task.id}), 200 11 | 12 | @scan_bp.route('/scan-status/', methods=['GET']) 13 | def get_scan_status(task_id): 14 | result = celery.AsyncResult(task_id) 15 | # result.state could be 'PENDING', 'STARTED', 'RETRY', 'FAILURE', or 'SUCCESS' 16 | return jsonify({"state": result.state}), 200 17 | -------------------------------------------------------------------------------- /backend/routes/users.py: -------------------------------------------------------------------------------- 1 | import bcrypt 2 | import pyotp 3 | from flask import Blueprint, jsonify, request 4 | from sqlalchemy.exc import SQLAlchemyError 5 | from config.logger import logger 6 | from functions.book_management import login_required 7 | from functions.db import get_session 8 | from functions.utils import hash_password, check_pw_complexity, encrypt_totp_secret, unlink_oidc 9 | from functions.extensions import limiter 10 | from models.users import Users 11 | 12 | users_bp = Blueprint('users', __name__, url_prefix='/api') 13 | 14 | @users_bp.route('/user/change-password', methods=['PATCH']) 15 | @login_required 16 | @limiter.limit('2 per second') 17 | def change_password(token_state): 18 | data = request.get_json(silent=True) 19 | if token_state == "no_token": 20 | return jsonify({"error": "Unauthenticated access is not allowed"}), 401 21 | user_id = token_state["user_id"] 22 | if data is None: 23 | return jsonify({"error": "No data submitted"}), 400 24 | required_fields = ["new_password", "old_password"] 25 | missing_fields = [field for field in required_fields if field not in data] 26 | if missing_fields: 27 | return jsonify({"error": f"{', '.join(missing_fields).capitalize().split("_")} field(s) required."}), 400 28 | new_password = data.get("new_password") 29 | old_password = data.get("old_password") 30 | if not new_password.strip() or not old_password.strip(): 31 | return jsonify({"error": "Password fields cannot be empty."}), 400 32 | valid_pw, message = check_pw_complexity(new_password) 33 | if not valid_pw: 34 | return jsonify({"error": message}), 400 35 | session = get_session() 36 | try: 37 | user_record = session.query(Users).filter_by(id=user_id).first() 38 | if user_record.auth_type == "oidc": 39 | return jsonify({"error": "Unable to change your password while connected to OIDC. Please revert to a local account to change your password."}), 400 40 | current_hashed_password = user_record.password_hash 41 | valid_old_pw = bcrypt.checkpw(old_password.encode('utf-8'), current_hashed_password.encode('utf-8')) 42 | if not valid_old_pw: 43 | return jsonify({"error": "Current password is incorrect."}), 401 44 | if bcrypt.checkpw(new_password.encode('utf-8'), current_hashed_password.encode('utf-8')): 45 | return jsonify({"error": "The new password cannot be the same as the current password."}), 400 46 | user_record.password_hash = hash_password(new_password) 47 | session.commit() 48 | return jsonify({"message": "Password changed successfully."}), 200 49 | except SQLAlchemyError as e: 50 | session.rollback() 51 | logger.exception(f"Failed to update password: {e}") 52 | return jsonify({"error": "An unexpected database error occurred. Please try again later."}), 500 53 | except Exception as e: 54 | session.rollback() 55 | logger.exception(f"Failed to update password: {e}") 56 | return jsonify({"error": "An unexpected error occurred. Please try again later."}), 500 57 | finally: 58 | session.close() 59 | 60 | @users_bp.route('/user/enable-mfa', methods=['POST']) 61 | @login_required 62 | @limiter.limit('2 per second') 63 | def enable_mfa(token_state): 64 | logger.debug("Attempting to enable MFA") 65 | if token_state == "no_token": 66 | return jsonify({"error": "Unauthenticated access is not allowed"}), 401 67 | user_id = token_state["user_id"] 68 | session = get_session() 69 | try: 70 | user = session.query(Users).filter_by(id=user_id).first() 71 | if not user: 72 | return jsonify({"error": "User not found"}), 404 73 | if user.auth_type == "oidc": 74 | return jsonify({"error": "Cannot enable MFA with OIDC auth type"}), 400 75 | if user.mfa_enabled: 76 | return jsonify({"error": "MFA is already enabled"}), 400 77 | mfa_secret = pyotp.random_base32() 78 | encrypted_mfa_secret = encrypt_totp_secret(mfa_secret) 79 | user.mfa_secret = encrypted_mfa_secret 80 | session.commit() 81 | user_totp_name = f"{user.username}/{user.email}" 82 | totp = pyotp.TOTP(mfa_secret) 83 | provisioning_url = totp.provisioning_uri(name=user_totp_name, issuer_name="BookHaven") 84 | mfa_secret_split = " ".join(mfa_secret[i:i + 4] for i in range(0, len(mfa_secret), 4)) 85 | except Exception as e: 86 | session.rollback() 87 | logger.exception(f"Error enabling MFA for user ID {user_id}: {e}") 88 | return jsonify({"error": "An unexpected error occurred"}), 500 89 | finally: 90 | session.close() 91 | return jsonify({ 92 | "message": "MFA setup initiated. Validate the OTP to complete setup.", 93 | "totp_provisioning_url": provisioning_url, 94 | "mfa_secret": mfa_secret_split 95 | }), 200 96 | 97 | @users_bp.route('/user/disable-mfa', methods=['DELETE']) 98 | @login_required 99 | @limiter.limit('2 per second') 100 | def disable_mfa(token_state): 101 | if token_state == "no_token": 102 | return jsonify({"error": "Unauthenticated access is not allowed"}), 401 103 | user_id = token_state["user_id"] 104 | session = get_session() 105 | try: 106 | user = session.query(Users).filter_by(id=user_id).first() 107 | if not user: 108 | return jsonify({"error": "User not found"}), 404 109 | if user.auth_type == "oidc": 110 | return jsonify({"error": "Cannot disable MFA with OIDC auth type"}), 400 111 | if not user.mfa_enabled: 112 | return jsonify({"error": "MFA is not enabled"}), 400 113 | user.mfa_enabled = False 114 | user.mfa_secret = None 115 | session.commit() 116 | except Exception as e: 117 | session.rollback() 118 | logger.exception(f"Error disabling MFA for user ID {user_id}: {e}") 119 | return jsonify({"error": "An unexpected error occurred"}), 500 120 | finally: 121 | session.close() 122 | return jsonify({"message": "MFA successfully disabled."}), 200 123 | 124 | @users_bp.route('/user/get-mfa-status', methods=['GET']) 125 | @login_required 126 | @limiter.limit('2 per second') 127 | def get_mfa_status(token_state): 128 | if token_state == "no_token": 129 | return jsonify({"error": "Unauthenticated access is not allowed"}), 401 130 | user_id = token_state["user_id"] 131 | session = get_session() 132 | try: 133 | user = session.query(Users).filter_by(id=user_id).first() 134 | if not user: 135 | return jsonify({"error": "User not found"}), 404 136 | if user.mfa_enabled: 137 | return jsonify({"message": "true"}), 200 138 | else: 139 | return jsonify({"message": "false"}), 200 140 | except Exception as e: 141 | logger.exception(f"Error retrieving MFA status for user ID {user_id}: {e}") 142 | return jsonify({"error": "An unexpected error occurred"}), 500 143 | finally: 144 | session.close() 145 | 146 | @users_bp.route('/user/unlink-oidc', methods=['PATCH']) 147 | @login_required 148 | @limiter.limit('2 per second') 149 | def unlink_oidc_user(token_state): 150 | if token_state == "no_token": 151 | return jsonify({"error": "Unauthenticated access is not allowed"}), 401 152 | user_id = token_state["user_id"] 153 | unlink_response, unlink_status = unlink_oidc(user_id) 154 | return unlink_response, unlink_status 155 | 156 | # @users_bp.route('/user/link-oidc', methods=['PATCH']) 157 | # @login_required 158 | # @limiter.limit('2 per seconds') 159 | # def link_oidc_user(): 160 | # oidc_session["link_oidc"] = True 161 | # redirect_uri = url_for('auth.oidc_login', _external=True) 162 | # return redirect(redirect_uri) 163 | 164 | @users_bp.route('/user/get-oidc-status', methods=['GET']) 165 | @login_required 166 | @limiter.limit('2 per second') 167 | def get_oidc_status(token_state): 168 | if token_state == "no_token": 169 | return jsonify({"error": "Unauthenticated access is not allowed"}), 401 170 | user_id = token_state["user_id"] 171 | session = get_session() 172 | try: 173 | user = session.query(Users).filter_by(id=user_id).first() 174 | if not user: 175 | return jsonify({"error": "User not found"}), 404 176 | if user.auth_type == 'oidc': 177 | return jsonify({"message": "true"}), 200 178 | else: 179 | return jsonify({"message": "false"}), 200 180 | except Exception as e: 181 | logger.exception(f"Error retrieving OIDC status for user ID {user_id}: {e}") 182 | return jsonify({"error": "An unexpected error occurred"}), 500 183 | finally: 184 | session.close() 185 | 186 | ### TO BE IMPLEMENTED AFTER EMAIL/SMTP: 187 | 188 | # @users_bp.route('/user/change-email', methods=['PATCH']) 189 | # @login_required 190 | # def change_email(token_state): 191 | # data = request.get_json(silent=True) 192 | # if token_state == "no_token": 193 | # return jsonify({"error": "Unauthenticated access is not allowed"}), 401 194 | # user_id = token_state["user_id"] 195 | # if data is None: 196 | # return jsonify({"error": "No data submitted"}), 400 197 | # required_fields = [""] -------------------------------------------------------------------------------- /backend/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HrBingR/BookHaven/d7d6138facb8fa7c86ff008842d387dbe6184976/backend/tests/__init__.py -------------------------------------------------------------------------------- /backend/tests/epubs/Pride_and_Prejudice.epub: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HrBingR/BookHaven/d7d6138facb8fa7c86ff008842d387dbe6184976/backend/tests/epubs/Pride_and_Prejudice.epub -------------------------------------------------------------------------------- /backend/tests/epubs/Test Book - Author One.epub: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HrBingR/BookHaven/d7d6138facb8fa7c86ff008842d387dbe6184976/backend/tests/epubs/Test Book - Author One.epub -------------------------------------------------------------------------------- /backend/tests/epubs/Test Book 2 - Author Two.epub: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HrBingR/BookHaven/d7d6138facb8fa7c86ff008842d387dbe6184976/backend/tests/epubs/Test Book 2 - Author Two.epub -------------------------------------------------------------------------------- /backend/tests/test.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HrBingR/BookHaven/d7d6138facb8fa7c86ff008842d387dbe6184976/backend/tests/test.png -------------------------------------------------------------------------------- /backend/tests/test_celery.py: -------------------------------------------------------------------------------- 1 | def test_celery_scheduler_disabled(): 2 | from unittest.mock import patch 3 | from celery_app import make_celery 4 | from config.config import config 5 | 6 | # Mock SCHEDULER_ENABLED to be False 7 | with patch.object(config, 'SCHEDULER_ENABLED', False): 8 | celery = make_celery() 9 | 10 | # Assert that beat_schedule is not included in the Celery configuration 11 | assert 'beat_schedule' not in celery.conf 12 | # Assert other general Celery configurations are present 13 | assert celery.conf['timezone'] == 'UTC' 14 | assert celery.conf['enable_utc'] is True 15 | assert 'result_expires' in celery.conf 16 | 17 | import pytest 18 | 19 | @pytest.mark.usefixtures("celery_app", "celery_worker") 20 | def test_celery_scan_task_execution(): 21 | from unittest.mock import patch 22 | from functions.tasks.scan import scan_library_task 23 | 24 | # Define the test base directory 25 | test_base_directory = "/fake/directory" 26 | 27 | # Mock `config.BASE_DIRECTORY` and `scan_and_store_metadata` 28 | with patch('config.config.config.BASE_DIRECTORY', test_base_directory), \ 29 | patch('functions.tasks.scan.scan_and_store_metadata', return_value=None) as mock_scan: 30 | # Trigger the Celery task (this internally uses the mocked BASE_DIRECTORY) 31 | result = scan_library_task.apply_async().get() 32 | 33 | # Ensure the task completed successfully (returns None because it's mocked) 34 | assert result is None 35 | 36 | # Ensure `scan_and_store_metadata` was called with the mocked base directory 37 | mock_scan.assert_called_once_with(test_base_directory) 38 | 39 | @pytest.fixture 40 | def client(): 41 | from flask import Flask 42 | from routes.scan import scan_bp 43 | app = Flask(__name__) 44 | app.register_blueprint(scan_bp) 45 | 46 | with app.test_client() as client: 47 | yield client 48 | 49 | def test_scan_library_route(client): 50 | from unittest.mock import patch 51 | # Mock the Celery task to avoid triggering a real scan 52 | with patch('functions.tasks.scan.scan_library_task.delay') as mock_delay: 53 | response = client.post('/scan-library') 54 | 55 | # Check the response message 56 | assert response.status_code == 200 57 | assert response.json == {"message": "Library scan initiated."} 58 | 59 | # Ensure the Celery task was called 60 | mock_delay.assert_called_once() 61 | 62 | from unittest.mock import patch, MagicMock 63 | from celery.exceptions import MaxRetriesExceededError, Retry 64 | from sqlalchemy.exc import SQLAlchemyError 65 | from functions.tasks.scan import scan_library_task 66 | 67 | 68 | def test_scan_library_task_retry_on_db_error(): 69 | # Mock session.commit to raise SQLAlchemyError 70 | mock_session = MagicMock() 71 | mock_session.commit.side_effect = SQLAlchemyError("DB Error") 72 | 73 | # Patch `get_session` to return the mocked session 74 | with patch('functions.metadata.scan.get_session', return_value=mock_session), \ 75 | patch('config.logger.logger.warning') as mock_logger_warning, \ 76 | patch.object(scan_library_task, 'retry', side_effect=Retry()) as mock_retry: 77 | # Mock Celery request retries attribute 78 | scan_library_task.request.retries = 0 # Simulate first retry attempt 79 | 80 | # Call the task and confirm it retries 81 | with pytest.raises(Retry): # Expect Retry exception to be raised 82 | scan_library_task() # Celery injects `self` automatically 83 | 84 | # Verify that the logger warned about retry 85 | mock_logger_warning.assert_called_once_with( 86 | "Retrying task due to database error: DB Error. Attempt 1" 87 | ) 88 | 89 | # Verify that `retry` was called 90 | mock_retry.assert_called_once() 91 | 92 | @patch("celery.app.task.Task.request") 93 | def test_scan_library_task_max_retries_exceeded(mock_request): 94 | # Simulate that the task is not called directly 95 | mock_request.called_directly = False 96 | 97 | # Simulate the task retry counter 98 | mock_request.retries = 5 # Simulate already at max retries 99 | 100 | # Mock session.commit to raise SQLAlchemyError 101 | mock_session = MagicMock() 102 | mock_session.commit.side_effect = SQLAlchemyError("DB Error") 103 | 104 | # Patch `get_session` to return the mocked session 105 | with patch("functions.metadata.scan.get_session", return_value=mock_session), \ 106 | patch("config.logger.logger.exception") as mock_logger_exception: 107 | # Ensure retry limit is reached and MaxRetriesExceededError is raised 108 | with pytest.raises(MaxRetriesExceededError): # Expect MaxRetriesExceededError 109 | scan_library_task() # Let Celery handle retries automatically 110 | 111 | # Verify logger exception was called 112 | mock_logger_exception.assert_called_once_with( 113 | "Maximum retries exceeded for task 'functions.tasks.scan.scan_library_task' after 5 attempts. Original DB Error: DB Error" 114 | ) -------------------------------------------------------------------------------- /backend/tests/test_config_config.py: -------------------------------------------------------------------------------- 1 | from config.config import str_to_bool 2 | 3 | def test_str_to_bool(): 4 | test_value = "False" 5 | result = str_to_bool(test_value) 6 | assert result is False 7 | 8 | test_value = "" 9 | result = str_to_bool(test_value) 10 | assert result is False 11 | 12 | test_value = 1 13 | result = str_to_bool(test_value) 14 | assert result is True 15 | 16 | test_value = 30 17 | result = str_to_bool(test_value) 18 | assert result is False 19 | 20 | test_value = "y" 21 | result = str_to_bool(test_value) 22 | assert result is True 23 | 24 | test_value = "Hello" 25 | result = str_to_bool(test_value) 26 | assert result is False 27 | 28 | test_value = True 29 | result = str_to_bool(test_value) 30 | assert result is True 31 | 32 | def test_redis_pw(): 33 | from unittest.mock import patch 34 | from config.config import Config 35 | config = Config() 36 | 37 | with patch.object(config, "REDIS_PASSWORD", "HELLO"): 38 | assert "HELLO" in config.RATE_LIMITER_URI 39 | assert "HELLO" in config.CELERY_BROKER_URL 40 | assert "HELLO" in config.CELERY_RESULT_BACKEND -------------------------------------------------------------------------------- /backend/tests/test_config_logger.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import patch 2 | import importlib 3 | import config.logger 4 | 5 | @patch("os.getenv", return_value="HAHAHA") 6 | def test_logger_log_level(mockenv): 7 | importlib.reload(config.logger) # Re-imports and executes the logger module 8 | from config.logger import LOG_LEVEL 9 | assert LOG_LEVEL == "INFO" # Should now default to "INFO" 10 | 11 | def test_logger_loggin_info(logs): 12 | config.logger.logger.info("INFO TEST") 13 | assert "INFO TEST" in logs.info 14 | 15 | def test_logger_logging_error(logs): 16 | config.logger.logger.error("ERROR TEST") 17 | assert "ERROR TEST" in logs.error 18 | def test_logger_logging_exception(logs): 19 | config.logger.logger.exception("EXCEPTION TEST") 20 | assert "EXCEPTION TEST" in logs.error 21 | def test_logger_logging_error_exc_info(logs): 22 | try: 23 | raise ValueError("REAL EXCEPTION TEST") 24 | except ValueError as e: 25 | config.logger.logger.error(e) 26 | assert "REAL EXCEPTION TEST" in logs.error 27 | -------------------------------------------------------------------------------- /backend/tests/test_functions_book_management.py: -------------------------------------------------------------------------------- 1 | from uuid import UUID 2 | from functions.book_management import generate_session_id 3 | 4 | def is_valid_uuid(val): 5 | try: 6 | return str(UUID(str(val))) == str(val) 7 | except ValueError: 8 | return False 9 | 10 | def test_generate_sesion_id(): 11 | uuid = generate_session_id() 12 | 13 | assert is_valid_uuid(uuid) is True 14 | 15 | -------------------------------------------------------------------------------- /backend/tests/test_functions_db.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import patch 2 | import pytest 3 | 4 | @patch("config.config.config.DB_TYPE", "mysql") # Patch the DB_TYPE directly 5 | def test_get_database_url_mysql(): 6 | from functions.db import get_database_url 7 | url = get_database_url() 8 | assert url.startswith("mysql+pymysql://") # Ensure the URL uses MySQL format 9 | 10 | @patch("config.config.config.DB_TYPE", "postgres") # Patch the DB_TYPE directly 11 | def test_get_database_url_postgres(): 12 | from functions.db import get_database_url 13 | url = get_database_url() 14 | assert url.startswith("postgresql://") # Ensure the URL uses MySQL format 15 | 16 | @patch("config.config.config.DB_TYPE", "sqlite") # Patch the DB_TYPE directly 17 | def test_get_database_url_sqlite(): 18 | from functions.db import get_database_url 19 | url = get_database_url() 20 | assert url.startswith("sqlite:///") # Ensure the URL uses MySQL format 21 | 22 | @patch("config.config.config.DB_TYPE", "unknown") # Patch the DB_TYPE to an unsupported value 23 | def test_get_database_url_invalid_db_type(): 24 | from functions.db import get_database_url 25 | 26 | # Use pytest.raises to catch the ValueError 27 | with pytest.raises(ValueError, match="Unsupported DB_TYPE: unknown"): 28 | get_database_url() -------------------------------------------------------------------------------- /backend/tests/test_functions_init.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | @pytest.fixture 4 | def test_app(): 5 | from flask import Flask 6 | app = Flask(__name__) 7 | 8 | # Add minimal configuration if necessary 9 | app.config["TESTING"] = True # Enables "testing mode" 10 | app.config["DEBUG"] = False # Turns off debugging for tests 11 | 12 | yield app 13 | 14 | def test_init_rate_limit(test_app): 15 | from unittest.mock import patch 16 | with patch("config.config.config.ENVIRONMENT", "production"): 17 | from config.config import config 18 | from functions.init import init_rate_limit 19 | init_rate_limit(test_app) 20 | assert test_app.config["RATELIMIT_ENABLED"] == config.RATE_LIMITER_ENABLED 21 | 22 | def test_init_env_fail(): 23 | from unittest.mock import patch 24 | with patch("config.config.config.BASE_URL", ""): 25 | from functions.init import init_env 26 | with pytest.raises(SystemExit) as exc: 27 | init_env() 28 | assert exc.value.code == 1 29 | 30 | def test_init_env_exception(): 31 | from unittest.mock import patch 32 | with patch("config.config.config.BASE_URL", ""): 33 | with patch("functions.init.check_required_envs", side_effect=Exception("Simulated exception")): 34 | from functions.init import init_env 35 | with pytest.raises(SystemExit) as exc: 36 | init_env() 37 | assert exc.value.code == 1 38 | 39 | def test_check_admin_user(db_session): 40 | from functions.utils import check_admin_user 41 | 42 | password = "" 43 | email = "" 44 | check_result, message = check_admin_user(password, email) 45 | 46 | assert check_result is False 47 | assert message == "Missing admin credentials. Please set ADMIN_PASS and ADMIN_EMAIL in environment variables. These variables can be unset after initial setup." 48 | 49 | password = "123456" 50 | email = "testadmin@example.com" 51 | check_result, message = check_admin_user(password, email) 52 | 53 | assert check_result is False 54 | assert message == "Password must be at least 8 characters long." 55 | 56 | password = "12345678" 57 | check_result, message = check_admin_user(password, email) 58 | 59 | assert check_result is False 60 | assert message == "Password must contain at least one uppercase letter." 61 | 62 | password = "A1234567" 63 | check_result, message = check_admin_user(password, email) 64 | 65 | assert check_result is False 66 | assert message == "Password must contain at least one lowercase letter." 67 | 68 | password = "AbCdEfGh" 69 | check_result, message = check_admin_user(password, email) 70 | 71 | assert check_result is False 72 | assert message == "Password must contain at least one number." 73 | 74 | password = "Ab123456" 75 | check_result, message = check_admin_user(password, email) 76 | 77 | assert check_result is False 78 | assert message == "Password must contain at least one special character." 79 | 80 | password = "P@ssw0rd" 81 | email = "test@admine.$xamplecom" 82 | check_result, message = check_admin_user(password, email) 83 | 84 | assert check_result is False 85 | assert message.startswith("Email validation error:") 86 | 87 | def test_init_admin_user_failed(): 88 | from unittest.mock import patch 89 | from functions.init import init_admin_user 90 | with patch("config.config.config.ADMIN_PASS", "Pssw0rd"), \ 91 | patch("config.config.config.ADMIN_EMAIL", "testadminy@example.com"), \ 92 | patch("config.config.config.ENVIRONMENT", "production"): 93 | with pytest.raises(SystemExit) as exc: 94 | init_admin_user() 95 | 96 | assert exc.value.code == 1 97 | 98 | def test_init_admin_user_exception(): 99 | from unittest.mock import patch 100 | from functions.init import init_admin_user 101 | with patch("config.config.config.ADMIN_PASS", "Pssw0rd"), \ 102 | patch("config.config.config.ADMIN_EMAIL", "testadminy@example.com"), \ 103 | patch("config.config.config.ENVIRONMENT", "production"): 104 | with patch("functions.init.check_admin_user", side_effect=Exception("Simulated exception")): 105 | with pytest.raises(SystemExit) as exc: 106 | init_admin_user() 107 | assert exc.value.code == 1 108 | 109 | def test_init_admin_user(db_session): 110 | from unittest.mock import patch 111 | from functions.init import init_admin_user 112 | from functions.utils import check_admin_user 113 | from models.users import Users 114 | with patch("config.config.config.ADMIN_PASS", "P@ssw0rd"), \ 115 | patch("config.config.config.ADMIN_EMAIL", "testadminy@example.com"), \ 116 | patch("config.config.config.ENVIRONMENT", "production"): 117 | from config.config import config 118 | init_admin_user() 119 | admin_user = db_session.query(Users).filter_by(username="admin").first() 120 | assert admin_user.email == "testadminy@example.com" 121 | 122 | check_result, message = check_admin_user(config.ADMIN_PASS, config.ADMIN_EMAIL) 123 | 124 | assert check_result is True 125 | assert message == "Admin user already exists. Skipping initial setup." 126 | 127 | admin_user = db_session.query(Users).filter_by(username="admin").first() 128 | assert admin_user.username == "admin" 129 | 130 | def test_reset_admin_user_password_errors(db_session): 131 | from functions.utils import reset_admin_user_password 132 | from models.users import Users 133 | import bcrypt 134 | password = "" 135 | check_result, message = reset_admin_user_password(password) 136 | 137 | assert check_result is False 138 | assert message == "Missing password for admin user password reset. Please set ADMIN_PASS in environment variables." 139 | 140 | admin_user = db_session.query(Users).filter_by(username="admin").first() 141 | admin_user_hashed_pw = admin_user.password_hash 142 | password = "P@ssw0rd" 143 | assert bcrypt.checkpw(password.encode('utf-8'),admin_user_hashed_pw.encode('utf-8')) 144 | 145 | admin_user.mfa_secret = 'haha' 146 | admin_user.mfa_enabled = True 147 | db_session.commit() 148 | 149 | check_result, message = reset_admin_user_password(password) 150 | 151 | assert check_result is True 152 | assert message == "Admin password and MFA reset successfully." 153 | 154 | db_session.delete(admin_user) 155 | db_session.commit() 156 | 157 | check_result, message = reset_admin_user_password(password) 158 | 159 | assert check_result is False 160 | assert message == "Admin user not found in the database." 161 | 162 | def test_init_admin_password_reset_fail(): 163 | from unittest.mock import patch 164 | with patch("config.config.config.ADMIN_RESET", True), \ 165 | patch("config.config.config.ADMIN_PASS", ""): 166 | from functions.init import init_admin_password_reset 167 | with pytest.raises(SystemExit) as exc: 168 | init_admin_password_reset() 169 | assert exc.value.code == 1 170 | 171 | def test_init_admin_password_reset_exception(logs): 172 | from unittest.mock import patch 173 | with patch("config.config.config.ADMIN_RESET", True), \ 174 | patch("config.config.config.ADMIN_PASS", ""): 175 | with patch("functions.init.reset_admin_user_password", side_effect=Exception("Simulated exception")): 176 | from functions.init import init_admin_password_reset 177 | init_admin_password_reset() 178 | assert "Failed to reset admin user password" in logs.error -------------------------------------------------------------------------------- /backend/tests/test_functions_metadata_scan.py: -------------------------------------------------------------------------------- 1 | from models.progress_mapping import ProgressMapping 2 | 3 | 4 | def test_scan_and_store_metadata_moved(db_session): 5 | from functions.metadata.scan import scan_and_store_metadata 6 | from models.epub_metadata import EpubMetadata 7 | import os 8 | import shutil 9 | new_directory = "tests/epubs/test_dir" 10 | os.mkdir(new_directory) 11 | file_to_move = "tests/epubs/Pride_and_Prejudice.epub" 12 | shutil.move(file_to_move, new_directory) 13 | current_file_path = os.path.abspath(__file__) 14 | project_root = os.path.dirname(current_file_path) 15 | project_root = os.path.dirname(project_root) 16 | scan_and_store_metadata(project_root) 17 | 18 | updated_record = db_session.query(EpubMetadata).filter_by(title="Pride and Prejudice").first() 19 | 20 | assert updated_record.relative_path == "tests/epubs/test_dir/Pride_and_Prejudice.epub" 21 | 22 | file_to_move = "tests/epubs/test_dir/Pride_and_Prejudice.epub" 23 | epubs_directory = "tests/epubs" 24 | shutil.move(file_to_move, epubs_directory) 25 | os.rmdir(new_directory) 26 | 27 | def test_scan_and_store_metadata_deletes_missing_files(db_session): 28 | from functions.metadata.scan import scan_and_store_metadata 29 | from models.epub_metadata import EpubMetadata 30 | import os 31 | from unittest.mock import patch 32 | 33 | # Prepopulate database 34 | records = [ 35 | EpubMetadata( 36 | identifier="http://test-book-7", 37 | title="Test Book 7", 38 | authors="Author Seven", 39 | series="Test Series 7", 40 | seriesindex=7.0, 41 | relative_path="path/to/file7.epub" 42 | ), 43 | EpubMetadata( 44 | id=2000, 45 | identifier="file8", 46 | title="Test Book 8", 47 | authors="Author Eight", 48 | series="Test Series 8", 49 | seriesindex=8.0, 50 | relative_path="path/to/file8.epub" 51 | ), 52 | ProgressMapping( 53 | id=20, 54 | user_id=8, 55 | book_id=2000, 56 | ), 57 | ProgressMapping( 58 | id=30, 59 | user_id=89, 60 | book_id=1 61 | ) 62 | ] 63 | db_session.add_all(records) 64 | db_session.commit() 65 | 66 | # Mock filesystem with one file missing 67 | current_file_path = os.path.abspath(__file__) 68 | project_root = os.path.dirname(current_file_path) 69 | project_root = os.path.dirname(project_root) 70 | 71 | # Patch environment to bypass the condition 72 | with patch("config.config.config.ENVIRONMENT", "production"): 73 | scan_and_store_metadata(project_root) 74 | 75 | # Verify 'file2' has been removed 76 | remaining_records = db_session.query(EpubMetadata).all() 77 | assert len(remaining_records) == 3 78 | p_and_p = db_session.query(EpubMetadata).filter_by(title="Pride and Prejudice").first() 79 | assert p_and_p.title == "Pride and Prejudice" 80 | user_id = 89 81 | pgmapping_result = db_session.query(ProgressMapping).filter_by(user_id=user_id).all() # user_id = 89 82 | pgmapping_user_ids = [mapping.user_id for mapping in pgmapping_result] 83 | assert user_id not in pgmapping_user_ids 84 | 85 | def test_scan_and_store_metadata_exception_handling(): 86 | """ 87 | Test the exception handling in scan_and_store_metadata. 88 | """ 89 | from functions.metadata.scan import scan_and_store_metadata 90 | from unittest.mock import patch, MagicMock 91 | 92 | # Mock the session to simulate a failing database operation 93 | mock_session = MagicMock() 94 | mock_session.commit.side_effect = Exception("Simulated database failure") # Trigger exception on commit 95 | mock_get_session = MagicMock(return_value=mock_session) 96 | 97 | # Mock the logger to track calls 98 | with patch("functions.metadata.scan.get_session", mock_get_session), \ 99 | patch("functions.metadata.scan.logger.error") as mock_logger_error: 100 | 101 | # Perform the test (forcing an exception to occur) 102 | try: 103 | scan_and_store_metadata("/dummy/path") 104 | except Exception as e: 105 | assert str(e) == "Simulated database failure" # Check the exception is raised as expected 106 | 107 | # Assert rollback was called 108 | mock_session.rollback.assert_called_once() 109 | 110 | # Assert logger.error was called at least once 111 | mock_logger_error.assert_called_once() 112 | 113 | # Extract the logged message 114 | logged_message = mock_logger_error.call_args[0][0] # The full logged string 115 | 116 | # Verify the fully formatted message 117 | assert logged_message == "Error during library scan and metadata update: Simulated database failure" 118 | 119 | # Ensure session closes even after failure 120 | mock_session.close.assert_called_once() -------------------------------------------------------------------------------- /backend/tests/test_functions_utils.py: -------------------------------------------------------------------------------- 1 | def test_check_required_envs_secret_key(): 2 | from functions.utils import check_required_envs 3 | from unittest.mock import patch 4 | with patch("config.config.config.ENVIRONMENT", "production"), \ 5 | patch("config.config.config.SECRET_KEY", ""): 6 | from config.config import config 7 | test, message = check_required_envs(config.SECRET_KEY, config.BASE_URL) 8 | 9 | assert test is False 10 | assert message == "SECRET_KEY environment variable is not set. Generate one (bash) using: openssl rand -hex 32" 11 | 12 | def test_check_required_envs_invalid_secret_key(): 13 | from functions.utils import check_required_envs 14 | from unittest.mock import patch 15 | with patch("config.config.config.ENVIRONMENT", "production"), \ 16 | patch("config.config.config.SECRET_KEY", "Hello_There"): 17 | from config.config import config 18 | test, message = check_required_envs(config.SECRET_KEY, config.BASE_URL) 19 | 20 | assert test is False 21 | assert message == "SECRET_KEY environment variable is invalid. Generate one (bash) using: openssl rand -hex 32" 22 | 23 | def test_check_required_envs_base_url(): 24 | from functions.utils import check_required_envs 25 | from unittest.mock import patch 26 | with patch("config.config.config.ENVIRONMENT", "production"), \ 27 | patch("config.config.config.BASE_URL", ""): 28 | from config.config import config 29 | test, message = check_required_envs(config.SECRET_KEY, config.BASE_URL) 30 | 31 | assert test is False 32 | assert message == "BASE_URL is not set. Please set this to your application's base URL" 33 | 34 | def test_hash_password(): 35 | from functions.utils import hash_password 36 | import bcrypt 37 | 38 | # Test case 1: Verify hashing results in a valid BCrypt hash 39 | password = "123456789" 40 | hashed_pw = hash_password(password) 41 | 42 | # Check the hash starts with the correct BCrypt prefix 43 | assert hashed_pw.startswith("$2b$"), "Generated hash is not a valid BCrypt hash." 44 | 45 | # Validate the hashed password with bcrypt's `checkpw` 46 | assert bcrypt.checkpw(password.encode('utf-8'), 47 | hashed_pw.encode('utf-8')), "Hash does not correspond to the original password." 48 | 49 | # Test case 2: Ensure unique hashes are generated each time 50 | hashed_pw_2 = hash_password(password) 51 | assert hashed_pw != hashed_pw_2, "Two hashes for the same password should not be identical due to salting." 52 | 53 | # Test case 3: Handle edge cases - empty password 54 | empty_hashed = hash_password("") 55 | assert bcrypt.checkpw(b"".decode('utf-8').encode('utf-8'), empty_hashed.encode('utf-8')) 56 | 57 | def test_check_pw_complexity(): 58 | from functions.utils import check_pw_complexity 59 | 60 | password = "P@ssw0rd" 61 | check_result, message = check_pw_complexity(password) 62 | 63 | assert check_result is True 64 | assert message == "Password complexity requirements have been met." 65 | 66 | def test_check_admin_user_handles_exceptions(): 67 | """ 68 | Test the `check_admin_user` function to ensure it handles exceptions and returns 69 | the correct error responses. 70 | """ 71 | from functions.utils import check_admin_user 72 | from unittest.mock import patch, MagicMock 73 | 74 | # Mock the session and force it to raise an exception 75 | mock_session = MagicMock() 76 | mock_session.query.side_effect = Exception("Simulated database exception") # Simulate an error 77 | mock_get_session = MagicMock(return_value=mock_session) 78 | 79 | # Patch `get_session` to use the mocked session 80 | with patch("functions.utils.get_session", mock_get_session): 81 | # Call the function with arbitrary valid inputs 82 | result, message = check_admin_user("validpassword123", "admin@example.com") 83 | 84 | # Verify the return values 85 | assert result is False, "Expected result to be False when an exception occurs." 86 | assert message == "Simulated database exception", "Expected the exception message to be returned." 87 | 88 | # Ensure the database session was closed even after an exception 89 | mock_session.close.assert_called_once() 90 | 91 | def test_reset_admin_user_password_handles_exceptions(): 92 | """ 93 | Test the `reset_admin_user_password` function to ensure it handles exceptions and returns 94 | the correct error responses. 95 | """ 96 | from functions.utils import reset_admin_user_password 97 | from unittest.mock import patch, MagicMock 98 | 99 | # Mock the session and force it to raise an exception 100 | mock_session = MagicMock() 101 | mock_session.query.side_effect = Exception("Simulated database exception") # Simulate an error 102 | mock_get_session = MagicMock(return_value=mock_session) 103 | 104 | # Patch `get_session` to use the mocked session 105 | with patch("functions.utils.get_session", mock_get_session): 106 | # Call the function with arbitrary valid inputs 107 | result, message = reset_admin_user_password("validpassword123") 108 | 109 | # Verify the return values 110 | assert result is False, "Expected result to be False when an exception occurs." 111 | assert message == "Simulated database exception", "Expected the exception message to be returned." 112 | 113 | # Ensure the database session was closed even after an exception 114 | mock_session.close.assert_called_once() 115 | 116 | def test_encrypt_totp_secret(app): 117 | from functions.utils import encrypt_totp_secret, decrypt_totp_secret 118 | with app.app_context(): 119 | secret = "hello" 120 | encrypted_secret = encrypt_totp_secret(secret) 121 | decrypted_secret = decrypt_totp_secret(encrypted_secret) 122 | assert decrypted_secret == "hello" -------------------------------------------------------------------------------- /backend/tests/test_routes_auth.py: -------------------------------------------------------------------------------- 1 | def test_login_successful(client, db_session): 2 | """ 3 | Test the /login endpoint for successful login 4 | """ 5 | # Send login request 6 | response = client.post("/login", json={ 7 | "username": "administrator", 8 | "password": "P@ssw0rd" 9 | }) 10 | 11 | assert response.status_code == 200 12 | assert "token" in response.json 13 | 14 | def test_login_successful_email(client, db_session): 15 | """ 16 | Test the /login endpoint for successful login 17 | """ 18 | # Send login request 19 | response = client.post("/login", json={ 20 | "username": "test@example.com", 21 | "password": "P@ssw0rd" 22 | }) 23 | 24 | assert response.status_code == 200 25 | assert "token" in response.json 26 | 27 | def test_login_totp(client, db_session): 28 | """ 29 | Test the /login endpoint for successful login 30 | """ 31 | # Send login request 32 | response = client.post("/login", json={ 33 | "username": "totp@example.com", 34 | "password": "P@ssw0rd" 35 | }) 36 | 37 | assert response.status_code == 200 38 | assert "token" in response.json 39 | 40 | def test_login_invalid_credentials(client, db_session): 41 | """ 42 | Test the /login endpoint for invalid credentials 43 | """ 44 | from models.users import Users 45 | response = client.post("/login", json={ 46 | "username": "administrator", 47 | "password": "wrongpassword" 48 | }) 49 | 50 | assert response.status_code == 401 51 | assert response.json["error"] == "Invalid credentials" 52 | 53 | user = db_session.query(Users).filter_by(username="administrator").first() 54 | assert user.failed_login_count == 1 55 | 56 | def test_login_missing_credentials(client, db_session): 57 | """ 58 | Test the /login endpoint for missing credentials 59 | """ 60 | response = client.post("/login", json={ 61 | "username": "testuser" 62 | }) 63 | 64 | assert response.status_code == 400 65 | assert response.json["error"] == "Missing username or password" 66 | 67 | def test_login_exception(client): 68 | """ 69 | Test the `login` endpoint to ensure it handles exceptions and returns 70 | the appropriate error message and status code. 71 | """ 72 | # Mock the session and force it to raise an exception 73 | import pytest 74 | from unittest.mock import patch, MagicMock 75 | from flask import json 76 | 77 | mock_session = MagicMock() 78 | mock_session.query.side_effect = Exception("Simulated database failure") 79 | mock_get_session = MagicMock(return_value=mock_session) 80 | 81 | # Patch `get_session` to use the mocked session 82 | with patch("routes.auth.get_session", mock_get_session): 83 | response = client.post( 84 | "/login", 85 | json={ 86 | "username": "administrator", 87 | "password": "P@ssw0rd", 88 | } 89 | ) 90 | # Ensure the response is a 500 error 91 | assert response.status_code == 500, "Expected a 500 Internal Server Error response." 92 | 93 | # Parse the JSON payload 94 | data = json.loads(response.data) 95 | 96 | # Verify the error message and details in the response 97 | assert data["error"] == "Internal server error", "Unexpected error message in the response." 98 | 99 | # Ensure session was closed even after failure 100 | mock_session.close.assert_called_once() 101 | 102 | def test_otp_with_invalid_token(client): 103 | response = client.post( 104 | "/login/check-otp", 105 | ) 106 | assert response.status_code == 401 107 | assert response.json["error"] == "TOTP verification requires authentication." 108 | 109 | import pyotp 110 | from datetime import datetime, timezone 111 | 112 | def test_check_otp(app, client, db_session): 113 | # Step 1: Set up the user and MFA secret 114 | from functions.utils import encrypt_totp_secret 115 | from models.users import Users 116 | from bcrypt import hashpw, gensalt 117 | 118 | # Create a test user with an MFA secret 119 | with app.app_context(): 120 | mfa_secret = pyotp.random_base32() 121 | encrypted_mfa_secret = encrypt_totp_secret(mfa_secret) 122 | 123 | user = db_session.query(Users).filter_by(id=49).first() 124 | user.mfa_secret = encrypted_mfa_secret 125 | user.last_used_otp = None 126 | db_session.commit() 127 | 128 | totp = pyotp.TOTP(mfa_secret) 129 | 130 | # Step 3: Simulate the login process to get `token_state` 131 | login_response = client.post('/login', json={ 132 | 'username': user.username, 133 | 'password': 'P@ssw0rd' # Replace with the valid password used to hash above 134 | }) 135 | assert "token" in login_response.json 136 | assert login_response.status_code == 200 137 | 138 | login_token = login_response.json.get('token') 139 | assert login_token is not None 140 | 141 | # Step 4: Simulate the `check_otp` step with the valid TOTP 142 | headers = {"Authorization": f"Bearer {login_token}"} 143 | valid_otp = totp.now() 144 | otp_response = client.post('/login/check-otp', headers=headers, json={'otp': valid_otp}) 145 | 146 | # Step 5: Assert successful OTP verification 147 | assert otp_response.status_code == 200 148 | otp_token = otp_response.json.get('token') 149 | assert otp_token is not None 150 | 151 | # Step 6: Test prevention of OTP reuse 152 | db_session.expire_all() 153 | db_session.refresh(user) 154 | second_otp_response = client.post('/login/check-otp', headers=headers, json={'otp': valid_otp}) 155 | assert second_otp_response.status_code == 400 156 | assert "already been used" in second_otp_response.json['error'] 157 | 158 | third_otp_response = client.post('/login/check-otp', headers=headers, json={'otp': '123456'}) 159 | assert third_otp_response.status_code == 403 160 | assert "Incorrect one time pin" in third_otp_response.json["error"] 161 | 162 | def test_check_no_otp(headers, client): 163 | otp_response = client.post('/login/check-otp', headers=headers) 164 | assert otp_response.json["error"] == "No one time pin submitted" 165 | 166 | def test_check_otp_exception_handling(client, headers): 167 | """ 168 | Test the `check_otp` route to ensure it handles exceptions and returns 169 | the appropriate error message and status code. 170 | """ 171 | from unittest.mock import patch, MagicMock 172 | from flask import json 173 | 174 | # Mock the session and force it to raise an exception 175 | mock_session = MagicMock() 176 | mock_session.query.side_effect = Exception("Simulated database failure") 177 | mock_get_session = MagicMock(return_value=mock_session) 178 | 179 | # Build the token state (as required by the check_otp function) 180 | token_state = {"user_id": 50} 181 | 182 | # Patch `get_session` to use the mocked session 183 | with patch("routes.auth.get_session", mock_get_session): 184 | # Send a POST request to the /login/check-otp endpoint 185 | response = client.post( 186 | "/login/check-otp", 187 | headers=headers, 188 | json={"otp": "123456"}, 189 | environ_base={"token_state": token_state} 190 | ) 191 | 192 | # Ensure the response is a 500 Internal Server Error 193 | assert response.status_code == 500, "Expected a 500 Internal Server Error response." 194 | 195 | # Parse the JSON payload 196 | data = json.loads(response.data) 197 | 198 | # Verify the error message and details in the response 199 | assert data["error"] == "Internal server error.", "Unexpected error message in the response." 200 | 201 | # Ensure session rollback and closure happened 202 | mock_session.rollback.assert_called_once() 203 | mock_session.close.assert_called_once() -------------------------------------------------------------------------------- /backend/tests/test_routes_authors.py: -------------------------------------------------------------------------------- 1 | from email.encoders import encode_7or8bit 2 | from unittest.mock import patch 3 | 4 | 5 | def test_get_authors_no_books(client): 6 | """ 7 | Test /api/authors when there are no books in the database (total_books == 0). 8 | """ 9 | from unittest.mock import patch 10 | with patch("routes.authors.get_session") as mock_get_session: 11 | # Mock the session 12 | mock_session = mock_get_session.return_value 13 | mock_session.query.return_value.count.return_value = 0 14 | 15 | # Make a GET request to the endpoint 16 | response = client.get("/api/authors") 17 | 18 | # Verify the status code and response 19 | assert response.status_code == 200 20 | assert response.json == { 21 | "authors": [], 22 | "total_authors": 0 23 | } 24 | 25 | def test_get_authors(client, headers): 26 | """ 27 | Test the /api/authors endpoint for returning books 28 | """ 29 | # Perform GET request 30 | response = client.get( 31 | "/api/authors", 32 | headers=headers 33 | ) 34 | 35 | assert response.status_code == 200 36 | assert response.json["total_authors"] != 0 37 | 38 | def test_get_authors_no_auth(client): 39 | """ 40 | Test the /api/authors endpoint for returning books 41 | """ 42 | from unittest.mock import patch 43 | with patch("config.config.config.ALLOW_UNAUTHENTICATED", False): 44 | # Perform GET request 45 | response = client.get("/api/authors") 46 | assert response.status_code == 401 47 | assert response.json["error"] == "Unauthenticated access is not allowed. Please see ALLOW_UNAUTHENTICATED environment variable" 48 | 49 | def test_get_author_books(client): 50 | from urllib.parse import quote 51 | encoded_author = quote("Jane Austen") 52 | response = client.get(f"/api/authors/{encoded_author}",) 53 | 54 | assert response.status_code == 200 55 | assert response.json["total_books"] == 1 56 | 57 | def test_get_author_books_not_exist(client): 58 | from urllib.parse import quote 59 | encoded_author = quote("Jane Austin") 60 | response = client.get(f"/api/authors/{encoded_author}",) 61 | 62 | assert response.status_code == 404 63 | assert "No books found for author" in response.json["error"] -------------------------------------------------------------------------------- /backend/tests/test_routes_media.py: -------------------------------------------------------------------------------- 1 | from models.epub_metadata import EpubMetadata 2 | 3 | def test_get_cover_with_cover_image(client): 4 | """ 5 | Test that a valid book with a cover image returns the correct image data and MIME type. 6 | """ 7 | # Make request for the cover of an existing book (real endpoint) 8 | response = client.get("/api/covers/http-www-gutenberg-org-1342") 9 | 10 | # Validate response 11 | assert response.status_code == 200 12 | assert response.headers["Content-Type"] == "image/jpeg" # Ensure the MIME type is JPEG 13 | assert len(response.data) > 0 # Ensure image data is returned (non-empty binary data) 14 | 15 | def test_get_cover_without_cover_image(client): 16 | """ 17 | Test that a book without a cover image serves the placeholder image. 18 | """ 19 | # Simulate an identifier for a book without a cover (non-existent ID in the database) 20 | import os 21 | response = client.get("/api/covers/nonexistent1234") 22 | 23 | # Validate response 24 | assert response.status_code == 200 25 | 26 | # Ensure the placeholder image is served 27 | placeholder_path = os.path.join(client.application.static_folder, "placeholder.jpg") 28 | with open(placeholder_path, "rb") as f: 29 | placeholder_data = f.read() 30 | assert response.data == placeholder_data # Validate it's indeed the placeholder image 31 | assert response.headers["Content-Type"] == "image/jpeg" # Validate the MIME type is still JPEG 32 | 33 | def test_download_valid_book(client): 34 | """ 35 | Test downloading a book with a valid identifier that exists in the database and filesystem. 36 | """ 37 | import os 38 | from unittest.mock import patch 39 | 40 | current_file_path = os.path.abspath(__file__) 41 | project_root = os.path.dirname(current_file_path) 42 | project_root = os.path.dirname(project_root) 43 | 44 | with patch("config.config.config.BASE_DIRECTORY", project_root): 45 | response = client.get("/download/61cee114-a920-4427-809f-50da0678c004") 46 | 47 | # Assert 48 | assert response.status_code == 200 49 | assert response.headers["Content-Disposition"].startswith("attachment;") 50 | assert response.content_type == "application/epub+zip" 51 | 52 | # Reading the file to check content 53 | epub_path = os.path.join("tests", "epubs", "Test Book - Author One.epub") 54 | with open(epub_path, "rb") as f: 55 | expected_file_data = f.read() 56 | 57 | assert response.data == expected_file_data # Confirm file content matches 58 | 59 | def test_download_book_not_in_database(client): 60 | """ 61 | Test downloading a book with an identifier that does not exist in the database. 62 | """ 63 | # Act 64 | response = client.get("/download/61cee114-a920-4427-809f-50ad7678c004") 65 | 66 | # Assert 67 | assert response.status_code == 404 68 | assert b"Resource not found" in response.data 69 | 70 | def test_download_file_missing(client, headers_media_test): 71 | """ 72 | Test downloading a book that exists in the database but is missing in the filesystem. 73 | """ 74 | import os 75 | import shutil 76 | book_identifier = "61cee114-a920-4427-809f-50da0678c004" 77 | epub_path = os.path.join("tests", "epubs", "Test Book - Author One.epub") 78 | tmp_directory = os.path.join("tests", "epubs", "tmp_test_dir") 79 | 80 | try: 81 | # Move file to simulate a missing file 82 | os.mkdir(tmp_directory) 83 | shutil.move(epub_path, tmp_directory) 84 | 85 | # Act 86 | response = client.get( 87 | f"/download/{book_identifier}", 88 | headers=headers_media_test 89 | ) 90 | 91 | # Assert 92 | assert response.status_code == 404 93 | assert b"Not Found" in response.data 94 | finally: 95 | # Clean up: Move file back and remove temporary directory 96 | shutil.move(os.path.join(tmp_directory, "Test Book - Author One.epub"), "tests/epubs") 97 | os.rmdir(tmp_directory) 98 | 99 | def test_stream_valid_book(client): 100 | """ 101 | Test streaming a book with a valid identifier that exists in the database and filesystem, 102 | and indirectly test `serve_book_file` by following the returned URL. 103 | """ 104 | import os 105 | from unittest.mock import patch 106 | 107 | # The valid book identifier (as used in `test_download_valid_book`) 108 | book_identifier = "61cee114-a920-4427-809f-50da0678c004" 109 | 110 | # Get the current project root as in other tests 111 | current_file_path = os.path.abspath(__file__) 112 | project_root = os.path.dirname(current_file_path) 113 | project_root = os.path.dirname(project_root) 114 | 115 | # Patch the BASE_DIRECTORY to set up the file system correctly 116 | with patch("config.config.config.BASE_DIRECTORY", project_root): 117 | # Act: Make request to `stream` endpoint 118 | from config.config import config 119 | response = client.get(f"/stream/{book_identifier}") 120 | 121 | # Assert: The `stream` endpoint responds as expected 122 | assert response.status_code == 200 123 | response_json = response.get_json() 124 | assert "url" in response_json # Ensure `url` exists in the JSON 125 | epub_file_url = response_json["url"] 126 | assert epub_file_url.startswith(config.BASE_URL.rstrip("/")) 127 | 128 | # Act: Make a follow-up request to the generated URL (indirectly testing `serve_book_file`) 129 | epub_relative_path = epub_file_url.split("/files/")[-1] # Extract path after `/files/` 130 | serve_response = client.get(f"/files/{epub_relative_path}") 131 | 132 | # Assert: The `serve_book_file` endpoint responds as expected 133 | assert serve_response.status_code == 200 134 | assert serve_response.headers["Content-Disposition"].startswith("inline;") 135 | assert serve_response.content_type == "application/epub+zip" 136 | 137 | # Validate that the correct file is served 138 | epub_path = os.path.join("tests", "epubs", "Test Book - Author One.epub") 139 | with open(epub_path, "rb") as f: 140 | expected_file_data = f.read() 141 | 142 | assert serve_response.data == expected_file_data # Confirm file content matches 143 | 144 | def test_stream_missing_db_record(client): 145 | """ 146 | Test streaming a book with a missing database entry, expecting a 404 response. 147 | """ 148 | # Act: Make a request with a non-existent book identifier 149 | response = client.get("/stream/nonexistent-book-id") 150 | 151 | # Assert: Check for 404 response and error message 152 | assert response.status_code == 404 153 | assert b"Book not found." in response.data 154 | 155 | def test_stream_missing_file(client, headers_media_test): 156 | """ 157 | Test streaming a book with a valid database entry but missing file, expecting a 404 response. 158 | """ 159 | import os 160 | import shutil 161 | 162 | book_identifier = "61cee114-a920-4427-809f-50da0678c004" # Valid book ID 163 | epub_path = os.path.join("tests", "epubs", "Test Book - Author One.epub") 164 | tmp_directory = os.path.join("tests", "epubs", "tmp_test_dir") 165 | 166 | try: 167 | # Move the file to simulate it being missing 168 | os.mkdir(tmp_directory) 169 | shutil.move(epub_path, tmp_directory) 170 | 171 | # Act: Make the streaming request 172 | response = client.get( 173 | f"/stream/{book_identifier}", 174 | headers=headers_media_test 175 | ) 176 | 177 | # Assert: Check for 404 response and correct error message 178 | assert response.status_code == 404 179 | assert b"ePub file not found." in response.data 180 | finally: 181 | # Clean up: Move the file back to its original location 182 | shutil.move(os.path.join(tmp_directory, "Test Book - Author One.epub"), epub_path) 183 | os.rmdir(tmp_directory) 184 | 185 | def test_serve_book_file_missing_file(client, headers_media_test): 186 | """ 187 | Test directly calling `serve_book_file` with a non-existent file, expecting a 404 response. 188 | """ 189 | # Act: Make a request to the `serve_book_file` endpoint with a missing file 190 | response = client.get( 191 | "/files/nonexistent.epub", 192 | headers=headers_media_test 193 | ) 194 | 195 | # Assert: Check for 404 response and appropriate error message 196 | assert response.status_code == 404 197 | assert b"File not found." in response.data -------------------------------------------------------------------------------- /backend/tests/test_routes_react.py: -------------------------------------------------------------------------------- 1 | def test_serve_react_manifest(client): 2 | """ 3 | Test if the manifest.json file is served correctly when it exists. 4 | """ 5 | # Simulate requesting a valid static file 6 | response = client.get("/manifest.json") 7 | 8 | # If the file actually exists, we expect a successful response 9 | assert response.status_code == 200 10 | assert "application/json" in response.content_type # Ensure correct MIME type 11 | 12 | 13 | def test_serve_react_placeholder_image(client): 14 | """ 15 | Test if a placeholder image is served correctly when it exists. 16 | """ 17 | # Simulate requesting a valid static image 18 | response = client.get("/placeholder.jpg") 19 | 20 | # If the file actually exists, we expect a successful response 21 | assert response.status_code == 200 22 | assert "image/jpeg" in response.content_type # Ensure correct MIME type 23 | 24 | 25 | def test_serve_react_fallback_to_index(client): 26 | """ 27 | Test fallback serving of index.html for unmatched paths. 28 | """ 29 | # Simulate requesting a non-existent path 30 | response = client.get("/non-existent-route") 31 | 32 | # Ensure the fallback file (index.html) is served 33 | assert response.status_code == 200 34 | assert "text/html" in response.content_type # Ensure MIME type for HTML -------------------------------------------------------------------------------- /bookhaven_home.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HrBingR/BookHaven/d7d6138facb8fa7c86ff008842d387dbe6184976/bookhaven_home.png -------------------------------------------------------------------------------- /compose.yml.example: -------------------------------------------------------------------------------- 1 | services: 2 | # The MySQL and Redis containers are optional if you already have your own DB in place. 3 | mysql: 4 | container_name: mysql 5 | image: mysql:latest 6 | env_file: 7 | - .env 8 | networks: 9 | - bookhaven-net 10 | volumes: 11 | - /path/to/mysql/storage:/var/lib/mysql 12 | healthcheck: 13 | test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost"] 14 | timeout: 20s 15 | retries: 10 16 | 17 | redis: 18 | container_name: redis 19 | image: redis:latest 20 | ports: 21 | - "6379:6379" 22 | networks: 23 | - bookhaven-net 24 | 25 | bookhaven: 26 | container_name: bookhaven 27 | image: hrbingr/bookhaven:latest 28 | env_file: 29 | - .env 30 | ports: 31 | - ${APP_PORT}:${APP_PORT} 32 | volumes: 33 | - /path/to/epub/directory:${BASE_DIRECTORY} 34 | networks: 35 | - bookhaven-net 36 | depends_on: 37 | mysql: 38 | condition: service_healthy 39 | 40 | networks: 41 | bookhaven-net: 42 | driver: bridge 43 | -------------------------------------------------------------------------------- /frontend/.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | pnpm-debug.log* 8 | lerna-debug.log* 9 | 10 | node_modules 11 | dist 12 | dist-ssr 13 | *.local 14 | 15 | # Editor directories and files 16 | .vscode/* 17 | !.vscode/extensions.json 18 | .idea 19 | .DS_Store 20 | *.suo 21 | *.ntvs* 22 | *.njsproj 23 | *.sln 24 | *.sw? 25 | -------------------------------------------------------------------------------- /frontend/eslint.config.js: -------------------------------------------------------------------------------- 1 | import js from '@eslint/js' 2 | import globals from 'globals' 3 | import reactHooks from 'eslint-plugin-react-hooks' 4 | import reactRefresh from 'eslint-plugin-react-refresh' 5 | import tseslint from 'typescript-eslint' 6 | 7 | export default tseslint.config( 8 | { ignores: ['dist'] }, 9 | { 10 | extends: [js.configs.recommended, ...tseslint.configs.recommended], 11 | files: ['**/*.{ts,tsx}'], 12 | languageOptions: { 13 | ecmaVersion: 2020, 14 | globals: globals.browser, 15 | }, 16 | plugins: { 17 | 'react-hooks': reactHooks, 18 | 'react-refresh': reactRefresh, 19 | }, 20 | rules: { 21 | ...reactHooks.configs.recommended.rules, 22 | 'react-refresh/only-export-components': [ 23 | 'warn', 24 | { allowConstantExport: true }, 25 | ], 26 | }, 27 | }, 28 | ) 29 | -------------------------------------------------------------------------------- /frontend/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | BookHaven 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 |
24 | 25 | 26 | -------------------------------------------------------------------------------- /frontend/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "frontend", 3 | "private": true, 4 | "version": "0.0.0", 5 | "type": "module", 6 | "scripts": { 7 | "dev": "vite", 8 | "build": "tsc -b && vite build", 9 | "build:dev": "tsc -b && vite build --mode development-build", 10 | "lint": "eslint .", 11 | "preview": "vite preview" 12 | }, 13 | "dependencies": { 14 | "@fortawesome/fontawesome-free": "^6.7.2", 15 | "@fortawesome/free-brands-svg-icons": "^6.7.2", 16 | "@fortawesome/free-regular-svg-icons": "^6.7.2", 17 | "@fortawesome/free-solid-svg-icons": "^6.7.2", 18 | "@fortawesome/react-fontawesome": "^0.2.2", 19 | "@react-spring/web": "^9.7.5", 20 | "@types/react-router-dom": "^5.3.3", 21 | "axios": "^1.7.9", 22 | "bootstrap": "^5.3.3", 23 | "epubjs": "^0.3.93", 24 | "jwt-decode": "^4.0.0", 25 | "qrcode.react": "^4.2.0", 26 | "react": "^18.3.1", 27 | "react-bootstrap": "^2.10.7", 28 | "react-dom": "^18.3.1", 29 | "react-reader": "^2.0.12", 30 | "react-router-dom": "^7.1.1", 31 | "react-transition-group": "^4.4.5", 32 | "scss": "^0.2.4" 33 | }, 34 | "devDependencies": { 35 | "@eslint/js": "^9.17.0", 36 | "@types/react": "^18.3.18", 37 | "@types/react-dom": "^18.3.5", 38 | "@vitejs/plugin-react-swc": "^3.5.0", 39 | "eslint": "^9.17.0", 40 | "eslint-plugin-react-hooks": "^5.0.0", 41 | "eslint-plugin-react-refresh": "^0.4.16", 42 | "globals": "^15.14.0", 43 | "sass-embedded": "^1.83.4", 44 | "typescript": "~5.6.2", 45 | "typescript-eslint": "^8.18.2", 46 | "vite": "^6.0.11" 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /frontend/public/icon-180x180.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HrBingR/BookHaven/d7d6138facb8fa7c86ff008842d387dbe6184976/frontend/public/icon-180x180.png -------------------------------------------------------------------------------- /frontend/public/icon-192x192.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HrBingR/BookHaven/d7d6138facb8fa7c86ff008842d387dbe6184976/frontend/public/icon-192x192.png -------------------------------------------------------------------------------- /frontend/public/icon-512x512.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HrBingR/BookHaven/d7d6138facb8fa7c86ff008842d387dbe6184976/frontend/public/icon-512x512.png -------------------------------------------------------------------------------- /frontend/public/icon.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /frontend/public/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Epub Reader", 3 | "short_name": "Reader", 4 | "start_url": "/", 5 | "display": "standalone", 6 | "background_color": "#ffffff", 7 | "theme_color": "#ffffff", 8 | "icons": [ 9 | { 10 | "src": "/icon-192x192.png", 11 | "sizes": "192x192", 12 | "type": "image/png" 13 | }, 14 | { 15 | "src": "/icon-512x512.png", 16 | "sizes": "512x512", 17 | "type": "image/png" 18 | }, 19 | { 20 | "src": "/icon-180x180.png", 21 | "sizes": "180x180", 22 | "type": "image/png", 23 | "purpose": "any" 24 | } 25 | ] 26 | } -------------------------------------------------------------------------------- /frontend/public/webfonts/fa-brands-400.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HrBingR/BookHaven/d7d6138facb8fa7c86ff008842d387dbe6184976/frontend/public/webfonts/fa-brands-400.ttf -------------------------------------------------------------------------------- /frontend/public/webfonts/fa-brands-400.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HrBingR/BookHaven/d7d6138facb8fa7c86ff008842d387dbe6184976/frontend/public/webfonts/fa-brands-400.woff2 -------------------------------------------------------------------------------- /frontend/public/webfonts/fa-regular-400.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HrBingR/BookHaven/d7d6138facb8fa7c86ff008842d387dbe6184976/frontend/public/webfonts/fa-regular-400.ttf -------------------------------------------------------------------------------- /frontend/public/webfonts/fa-regular-400.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HrBingR/BookHaven/d7d6138facb8fa7c86ff008842d387dbe6184976/frontend/public/webfonts/fa-regular-400.woff2 -------------------------------------------------------------------------------- /frontend/public/webfonts/fa-solid-900.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HrBingR/BookHaven/d7d6138facb8fa7c86ff008842d387dbe6184976/frontend/public/webfonts/fa-solid-900.ttf -------------------------------------------------------------------------------- /frontend/public/webfonts/fa-solid-900.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HrBingR/BookHaven/d7d6138facb8fa7c86ff008842d387dbe6184976/frontend/public/webfonts/fa-solid-900.woff2 -------------------------------------------------------------------------------- /frontend/public/webfonts/fa-v4compatibility.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HrBingR/BookHaven/d7d6138facb8fa7c86ff008842d387dbe6184976/frontend/public/webfonts/fa-v4compatibility.ttf -------------------------------------------------------------------------------- /frontend/public/webfonts/fa-v4compatibility.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HrBingR/BookHaven/d7d6138facb8fa7c86ff008842d387dbe6184976/frontend/public/webfonts/fa-v4compatibility.woff2 -------------------------------------------------------------------------------- /frontend/src/App.css: -------------------------------------------------------------------------------- 1 | #root { 2 | max-width: 1280px; 3 | margin: 0 auto; 4 | padding: 2rem; 5 | text-align: center; 6 | } 7 | 8 | .logo { 9 | height: 6em; 10 | padding: 1.5em; 11 | will-change: filter; 12 | transition: filter 300ms; 13 | } 14 | .logo:hover { 15 | filter: drop-shadow(0 0 2em #646cffaa); 16 | } 17 | .logo.react:hover { 18 | filter: drop-shadow(0 0 2em #61dafbaa); 19 | } 20 | 21 | @keyframes logo-spin { 22 | from { 23 | transform: rotate(0deg); 24 | } 25 | to { 26 | transform: rotate(360deg); 27 | } 28 | } 29 | 30 | @media (prefers-reduced-motion: no-preference) { 31 | a:nth-of-type(2) .logo { 32 | animation: logo-spin infinite 20s linear; 33 | } 34 | } 35 | 36 | .card { 37 | padding: 2em; 38 | } 39 | 40 | .read-the-docs { 41 | color: #888; 42 | } 43 | -------------------------------------------------------------------------------- /frontend/src/App.tsx: -------------------------------------------------------------------------------- 1 | // src/App.tsx 2 | import React, { useState, useEffect } from 'react'; 3 | import {BrowserRouter as Router, Route, Routes, Navigate} from 'react-router-dom'; 4 | import { jwtDecode } from 'jwt-decode'; 5 | import Sidebar from './components/Sidebar'; 6 | import Home from './components/Home'; 7 | import Authors from './components/Authors'; 8 | import AuthorPage from './components/AuthorPage'; 9 | import Reader from "./components/Reader.tsx"; 10 | import Login from './components/Login.tsx'; 11 | import OTP from './components/Otp.tsx'; 12 | import '@fortawesome/fontawesome-free/css/all.min.css'; 13 | import { useConfig } from './context/ConfigProvider'; 14 | 15 | interface DecodedToken { 16 | token_type: string; 17 | user_is_admin: boolean; 18 | user_id: number; 19 | exp?: number; 20 | } 21 | 22 | interface DecodedCFToken { 23 | token_type: string; 24 | user_is_admin: boolean; 25 | user_id: number; 26 | iss: string; 27 | exp?: number; 28 | } 29 | 30 | const App: React.FC = () => { 31 | const [isLoggedIn, setIsLoggedIn] = useState(false); 32 | const [isAdmin, setIsAdmin] = useState(false); 33 | const { CF_ACCESS_AUTH } = useConfig(); 34 | 35 | useEffect(() => { 36 | const token = localStorage.getItem('token'); 37 | if (token) { 38 | try { 39 | const decoded: DecodedToken = jwtDecode(token); 40 | if (decoded.token_type === 'login') { 41 | setIsLoggedIn(true); 42 | } 43 | if (decoded.user_is_admin) { 44 | setIsAdmin(true) 45 | } 46 | } catch (err) { 47 | console.error('Invalid token or decoding error:', err); 48 | setIsLoggedIn(false); 49 | } 50 | } 51 | }, []); 52 | 53 | const handleLogin = (token: string) => { 54 | try { 55 | const decoded: DecodedToken = jwtDecode(token); 56 | 57 | if (decoded.token_type === 'login') { 58 | localStorage.setItem('token', token); 59 | setIsLoggedIn(true); 60 | } else if (decoded.token_type === 'totp') { 61 | localStorage.setItem('token', token); 62 | setIsLoggedIn(false); 63 | } else { 64 | throw new Error('Unknown token type.'); 65 | } 66 | } catch (error) { 67 | console.error('Failed to process token:', error); 68 | alert('Invalid login attempt. Please try again.'); 69 | setIsLoggedIn(false); 70 | } 71 | }; 72 | 73 | const handleLogout = () => { 74 | const token = localStorage.getItem('token'); 75 | if (!token) { 76 | // No token found, just do your standard logout flow 77 | setIsLoggedIn(false); 78 | return; 79 | } 80 | console.log(CF_ACCESS_AUTH) 81 | if (CF_ACCESS_AUTH) { 82 | const cf_auth_token_decoded: DecodedCFToken = jwtDecode(token); 83 | console.log(cf_auth_token_decoded) 84 | const baseUrl = cf_auth_token_decoded.iss.endsWith('/') ? cf_auth_token_decoded.iss.slice(0, -1) : cf_auth_token_decoded.iss; 85 | const logoutUrl = `${baseUrl}/cdn-cgi/access/logout`; 86 | console.log(`redirecting to: ${logoutUrl}`) 87 | window.location.href = logoutUrl 88 | // localStorage.removeItem('token'); 89 | // setIsLoggedIn(false); 90 | } else { 91 | localStorage.removeItem('token'); 92 | setIsLoggedIn(false); 93 | // window.location.reload(); // Optional: reload to reset state 94 | } 95 | }; 96 | 97 | const ProtectedRoute: React.FC<{ children: React.ReactNode }> = ({ children }) => { 98 | const token = localStorage.getItem('token'); 99 | if (!token) { 100 | return ; 101 | } 102 | 103 | try { 104 | const decoded: DecodedToken = jwtDecode(token); 105 | if (decoded.token_type !== 'login') { 106 | return ; 107 | } 108 | return <>{children}; 109 | } catch (err) { 110 | console.error('Token validation failed:', err); 111 | return ; 112 | } 113 | }; 114 | 115 | return ( 116 | 117 |
118 | 119 |
120 | 121 | } /> 122 | } /> 123 | } /> 124 | } /> 125 | } /> 126 | } /> 127 | 128 |
129 |
130 |
131 | ); 132 | }; 133 | 134 | export default App; -------------------------------------------------------------------------------- /frontend/src/assets/react.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /frontend/src/components/AuthorGridCell.tsx: -------------------------------------------------------------------------------- 1 | // AuthorGridCell.tsx 2 | import React from 'react'; 3 | import { useNavigate } from 'react-router-dom'; 4 | import { useSpring, animated } from '@react-spring/web'; 5 | import { Button } from 'react-bootstrap'; 6 | import './All.css'; 7 | import { useConfig } from '../context/ConfigProvider'; 8 | 9 | interface AuthorGridCellProps { 10 | letter: string; 11 | hasAuthors: boolean; 12 | isExpanded: boolean; 13 | toggleLetter: (letter: string) => void; 14 | authors: string[]; 15 | } 16 | 17 | const AuthorGridCell: React.FC = ({ 18 | letter, 19 | hasAuthors, 20 | isExpanded, 21 | toggleLetter, 22 | authors, 23 | }) => { 24 | const expandedHeight = Math.max(150, 50 + authors.length * 30); // Base height + 30px per author, minimum 150px 25 | const collapsedHeight = 85; // Fixed height when collapsed 26 | const styles = useSpring({ 27 | height: isExpanded ? `${expandedHeight}px` : `${collapsedHeight}px`, 28 | transform: isExpanded ? 'scale(1.02)' : 'scale(1)', 29 | config: { tension: 1000, friction: 40 }, 30 | }); 31 | 32 | const navigate = useNavigate(); 33 | const { UI_BASE_COLOR } = useConfig(); 34 | 35 | return ( 36 | 37 | 45 | 46 |
47 | {hasAuthors ? ( 48 |
49 | {authors.map((author, index) => ( 50 | 58 | ))} 59 |
60 | ) : ( 61 |
No authors available
62 | )} 63 |
64 |
65 | ); 66 | }; 67 | 68 | export default AuthorGridCell; -------------------------------------------------------------------------------- /frontend/src/components/AuthorPage.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState, useEffect } from 'react'; 2 | import { useParams, Link, LinkProps } from 'react-router-dom'; 3 | import apiClient from '../utilities/apiClient'; 4 | import { Container, Row, Col, ButtonProps } from 'react-bootstrap'; 5 | import BookCard from './BookCard'; 6 | import { Book } from '../types'; 7 | import './All.css'; 8 | 9 | type ButtonLinkProps = ButtonProps & LinkProps; 10 | 11 | const RouterLink = React.forwardRef(({ to, ...rest }, ref) => ( 12 | 13 | )); 14 | RouterLink.displayName = 'RouterLink'; 15 | 16 | const groupAndSortBySeries = (books: Book[]) => { 17 | const grouped: { [key: string]: Book[] } = {}; 18 | 19 | books.forEach((book) => { 20 | const seriesName = book.series || "Standalone"; 21 | if (!grouped[seriesName]) { 22 | grouped[seriesName] = []; 23 | } 24 | grouped[seriesName].push(book); 25 | }); 26 | 27 | for (const series in grouped) { 28 | grouped[series].sort((a, b) => (a.seriesindex || 0) - (b.seriesindex || 0)); 29 | } 30 | 31 | return grouped; 32 | }; 33 | 34 | const kebabToTitleCase = (str: string | undefined): string => { 35 | if (!str) return ""; 36 | return str 37 | .split('-') 38 | .map((word) => word.charAt(0).toUpperCase() + word.slice(1)) 39 | .join(' '); 40 | }; 41 | 42 | const AuthorPage: React.FC<{ isLoggedIn: boolean }> = ({isLoggedIn}) => { 43 | const { authorName } = useParams<{ authorName: string }>(); 44 | const [booksBySeries, setBooksBySeries] = useState<{ [key: string]: Book[] }>({}); 45 | const [loading, setLoading] = useState(true); 46 | const [error, setError] = useState(null); 47 | 48 | useEffect(() => { 49 | const fetchAuthorBooks = async () => { 50 | setLoading(true); 51 | setError(null); 52 | 53 | try { 54 | const response = await apiClient.get(`/api/authors/${authorName}`); 55 | const groupedBooks = groupAndSortBySeries(response.data.books); 56 | setBooksBySeries(groupedBooks); 57 | } catch (err) { 58 | console.error('Error fetching author books:', err); 59 | setError('Failed to load books for this author.'); 60 | } finally { 61 | setLoading(false); 62 | } 63 | }; 64 | 65 | fetchAuthorBooks(); 66 | }, [authorName]); 67 | 68 | if (loading) { 69 | return

Loading books...

; 70 | } 71 | 72 | if (error) { 73 | return

{error}

; 74 | } 75 | 76 | return ( 77 | 78 |
79 |
80 |

{kebabToTitleCase(authorName)}

81 | 82 | Back to Authors 83 | 84 |
85 |
86 | 87 | {/* Books by Series */} 88 | {Object.entries(booksBySeries).map(([seriesName, books]) => ( 89 |
90 |
91 | {seriesName !== "Standalone" ? seriesName : "Standalone Titles"} 92 |
93 | 94 | 95 | {books.map((book) => ( 96 | 105 | {}} isLoggedIn={isLoggedIn} /> 106 | 107 | ))} 108 | {books.length < 4 && 109 | Array.from({ length: 7 - books.length }, (_, index) => ( 110 | 119 |
120 | 121 | ))} 122 |
123 |
124 | ))} 125 |
126 | ); 127 | }; 128 | 129 | export default AuthorPage; -------------------------------------------------------------------------------- /frontend/src/components/Authors.tsx: -------------------------------------------------------------------------------- 1 | // Authors.tsx 2 | import React, { useState, useEffect } from 'react'; 3 | import apiClient from '../utilities/apiClient'; 4 | import { Button } from 'react-bootstrap'; 5 | import './All.css'; 6 | import AuthorGridCell from './AuthorGridCell'; 7 | import { useConfig } from '../context/ConfigProvider'; 8 | 9 | const alphabet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'.split(''); 10 | 11 | interface AuthorsData { 12 | [key: string]: string[]; 13 | } 14 | 15 | const Authors: React.FC = () => { 16 | const [authorsData, setAuthorsData] = useState( 17 | alphabet.reduce((acc, letter) => { 18 | acc[letter] = []; 19 | return acc; 20 | }, {} as AuthorsData) 21 | ); 22 | const [expandedLetters, setExpandedLetters] = useState>(new Set()); 23 | const [loading, setLoading] = useState(true); 24 | const [error, setError] = useState(null); 25 | const { UI_BASE_COLOR } = useConfig(); 26 | 27 | useEffect(() => { 28 | const fetchAuthors = async () => { 29 | try { 30 | setLoading(true); 31 | setError(null); 32 | 33 | const response = await apiClient.get('/api/authors'); 34 | const authors: string[] = response.data.authors; 35 | 36 | const groupedAuthors: AuthorsData = alphabet.reduce((acc, letter) => { 37 | acc[letter] = []; 38 | return acc; 39 | }, {} as AuthorsData); 40 | 41 | authors.forEach((author) => { 42 | const firstLetter = author[0].toUpperCase(); 43 | if (groupedAuthors[firstLetter]) { 44 | groupedAuthors[firstLetter].push(author); 45 | } 46 | }); 47 | 48 | setAuthorsData(groupedAuthors); 49 | } catch (err) { 50 | console.error('Error fetching authors:', err); 51 | setError('Failed to load authors list. Please try again.'); 52 | } finally { 53 | setLoading(false); 54 | } 55 | }; 56 | 57 | fetchAuthors(); 58 | }, []); 59 | 60 | const toggleLetter = (letter: string) => { 61 | setExpandedLetters((prev) => { 62 | const updated = new Set(prev); 63 | if (updated.has(letter)) { 64 | updated.delete(letter); 65 | } else { 66 | updated.add(letter); 67 | } 68 | return updated; 69 | }); 70 | }; 71 | 72 | const expandAll = () => { 73 | const lettersWithAuthors = new Set( 74 | alphabet.filter((letter) => authorsData[letter]?.length > 0) 75 | ); 76 | setExpandedLetters(lettersWithAuthors); 77 | }; 78 | 79 | const collapseAll = () => { 80 | setExpandedLetters(new Set()); 81 | }; 82 | 83 | console.group(`Authors render: ${new Date().toISOString()}`); 84 | console.log('Alphabet:', alphabet); 85 | console.log('Authors Data:', authorsData); 86 | console.log('Expanded Letters:', [...expandedLetters]); 87 | console.groupEnd(); 88 | 89 | if (loading) { 90 | return
Loading authors...
; 91 | } 92 | 93 | if (error) { 94 | return
{error}
; 95 | } 96 | 97 | return ( 98 |
99 |

Books by Author

100 |
{/* Separator line */} 101 |
102 | 105 | 108 |
109 | 110 |
111 | {alphabet.map((letter) => { 112 | const hasAuthors = authorsData[letter]?.length > 0; 113 | const isExpanded = expandedLetters.has(letter); 114 | const authors = authorsData[letter] || []; 115 | 116 | return ( 117 | 125 | ); 126 | })} 127 |
128 |
129 | ); 130 | }; 131 | 132 | export default Authors; -------------------------------------------------------------------------------- /frontend/src/components/Books.tsx: -------------------------------------------------------------------------------- 1 | // src/components/Books.tsx 2 | import React from 'react'; 3 | import BookCard from './BookCard'; 4 | import { Row, Col } from 'react-bootstrap'; 5 | import { Book } from '../types'; 6 | 7 | interface BooksProps { 8 | books: Book[]; 9 | refreshBooks: () => void; 10 | isLoggedIn: boolean; 11 | } 12 | 13 | const Books: React.FC = ({ books, refreshBooks, isLoggedIn }) => { 14 | return ( 15 | 16 | {books.map((book) => ( 17 | 26 | 27 | 28 | ))} 29 | {/* Add placeholders if the number of books is less than the grid capacity */} 30 | {books.length < 3 && 31 | Array.from({ length: 7 - books.length }, (_, index) => ( 32 | 41 |
42 | 43 | ))} 44 |
45 | ); 46 | }; 47 | 48 | export default Books; -------------------------------------------------------------------------------- /frontend/src/components/Home.tsx: -------------------------------------------------------------------------------- 1 | // src/components/Home.tsx 2 | import React, { useState, useEffect, useRef, useCallback } from 'react'; 3 | import apiClient from '../utilities/apiClient'; 4 | import SearchBar from './SearchBar'; 5 | import Books from './Books'; 6 | import { Container } from 'react-bootstrap'; 7 | import { Book } from '../types'; 8 | 9 | const CHUNK_SIZE = 18; 10 | const MAX_WINDOW_SIZE = 54; 11 | 12 | const Home: React.FC<{ isLoggedIn: boolean }> = ({ isLoggedIn }) => { 13 | const [books, setBooks] = useState([]); 14 | const [offset, setOffset] = useState(0); 15 | const [hasMore, setHasMore] = useState(true); 16 | const [loading, setLoading] = useState(false); 17 | const [searchTerm, setSearchTerm] = useState(''); 18 | const [favoritesQueried, setFavoritesQueried] = useState(false); 19 | const [finishedQueried, setFinishedQueried] = useState(false); 20 | const [unfinishedQueried, setUnfinishedQueried] = useState(false); 21 | 22 | const observerRef = useRef(null); 23 | const triggerRef = useRef(null); 24 | 25 | const fetchBooks = async (offset: number, limit: number): Promise => { 26 | try { 27 | const response = await apiClient.get('/api/books', { 28 | params: { 29 | query: searchTerm, 30 | offset, 31 | limit, 32 | favorites: favoritesQueried, 33 | finished: finishedQueried, 34 | unfinished: unfinishedQueried, 35 | }, 36 | }); 37 | console.log('API Response:', response); 38 | if (!response.data || !Array.isArray(response.data.books)) { 39 | console.error('Invalid API response:', response.data); 40 | return []; 41 | } 42 | const fetchedBooks = response.data.books; 43 | return fetchedBooks || []; 44 | } catch (error) { 45 | console.error('Error fetching books:', error); 46 | return []; 47 | } 48 | }; 49 | 50 | const fetchAndAppendBooks = useCallback(async () => { 51 | if (loading || !hasMore) return; 52 | 53 | setLoading(true); 54 | 55 | try { 56 | const newBooks = await fetchBooks(offset, CHUNK_SIZE); 57 | 58 | if (newBooks.length === 0) { 59 | setHasMore(false); 60 | return; 61 | } 62 | 63 | setBooks((prevBooks) => { 64 | const existingBookIds = new Set(prevBooks.map((book) => book.id)); 65 | const uniqueNewBooks = newBooks.filter((book) => !existingBookIds.has(book.id)); 66 | 67 | const updatedBooks = [...prevBooks, ...uniqueNewBooks]; 68 | return updatedBooks.length > MAX_WINDOW_SIZE 69 | ? updatedBooks.slice(CHUNK_SIZE) 70 | : updatedBooks; 71 | }); 72 | 73 | setOffset((prevOffset) => prevOffset + CHUNK_SIZE); 74 | } finally { 75 | setLoading(false); 76 | } 77 | }, [loading, hasMore, offset, searchTerm]); 78 | const refreshBooks = () => { 79 | setOffset(0); 80 | setHasMore(true); 81 | setBooks([]); 82 | }; 83 | 84 | useEffect(() => { 85 | setOffset(0); 86 | setHasMore(true); 87 | setBooks([]); 88 | }, [searchTerm]); 89 | 90 | useEffect(() => { 91 | const observerCallback: IntersectionObserverCallback = (entries) => { 92 | if (entries[0].isIntersecting && hasMore) { 93 | fetchAndAppendBooks(); 94 | } 95 | }; 96 | 97 | observerRef.current = new IntersectionObserver(observerCallback, { 98 | root: null, 99 | rootMargin: '20px', 100 | threshold: 0, 101 | }); 102 | 103 | if (triggerRef.current) { 104 | observerRef.current.observe(triggerRef.current); 105 | } 106 | 107 | return () => { 108 | if (observerRef.current) observerRef.current.disconnect(); 109 | }; 110 | }, [fetchAndAppendBooks, hasMore]); 111 | 112 | const handleSearch = (term: string) => { 113 | setSearchTerm(term); 114 | }; 115 | 116 | return ( 117 | 118 |
119 | { 125 | setFavoritesQueried((prev) => !prev); 126 | refreshBooks(); 127 | }} 128 | onFinishedToggle={() => { 129 | setFinishedQueried((prev) => !prev); 130 | setUnfinishedQueried(false) 131 | refreshBooks(); 132 | }} 133 | onUnfinishedToggle={() => { 134 | setUnfinishedQueried((prev) => !prev); 135 | setFinishedQueried(false) 136 | refreshBooks(); 137 | }} 138 | isLoggedIn={isLoggedIn} 139 | /> 140 | 141 | {loading && ( 142 |
143 |

Loading...

144 |
145 | )} 146 |
147 | {!hasMore && !loading && ( 148 |
149 | {finishedQueried || favoritesQueried ? ( 150 |

No books found with the selected filter.

151 | ) : ( 152 |

No more books to load.

153 | )} 154 |
155 | )} 156 |
157 | 158 | ); 159 | }; 160 | 161 | export default Home; -------------------------------------------------------------------------------- /frontend/src/components/Login.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState, useEffect } from 'react'; 2 | import { Button, Form, Modal } from 'react-bootstrap'; 3 | import { useNavigate } from 'react-router-dom'; 4 | import apiClient from '../utilities/apiClient'; 5 | import { jwtDecode } from 'jwt-decode'; 6 | import { useConfig } from '../context/ConfigProvider'; 7 | 8 | interface DecodedToken { 9 | token_type: string; 10 | exp: number; 11 | } 12 | 13 | const Login: React.FC<{ onLogin: (token: string) => void }> = ({ onLogin }) => { 14 | const [username, setUsername] = useState(''); 15 | const [password, setPassword] = useState(''); 16 | const [error, setError] = useState(null); 17 | const navigate = useNavigate(); 18 | const { UI_BASE_COLOR, CF_ACCESS_AUTH, OIDC_ENABLED } = useConfig(); 19 | 20 | useEffect(() => { 21 | const params = new URLSearchParams(window.location.search); 22 | 23 | // If there's a token param, handle it 24 | const tokenFromUrl = params.get('token'); 25 | if (tokenFromUrl) { 26 | onLogin(tokenFromUrl); 27 | // Possibly navigate away if you want to hide the Login modal 28 | navigate('/'); 29 | } 30 | 31 | // If there's an error param, display it 32 | const errorFromUrl = params.get('error'); 33 | if (errorFromUrl) { 34 | setError(errorFromUrl); 35 | } 36 | }, [onLogin, navigate, setError]); 37 | 38 | 39 | const autoLogin = async () => { 40 | try { 41 | setError(null); 42 | // For example, call an endpoint that returns a Cloudflare-style token 43 | // or do any logic required to skip manual login 44 | const response = await apiClient.post('/login', {}); 45 | const token = response.data.token; 46 | onLogin(token); 47 | // Then navigate just like normal 48 | navigate('/'); 49 | } catch (err: any) { 50 | setError('Auto-login failed'); 51 | } 52 | }; 53 | 54 | useEffect(() => { 55 | if (CF_ACCESS_AUTH) { 56 | autoLogin(); 57 | } 58 | }, [CF_ACCESS_AUTH]); 59 | 60 | const handleOidc = async () => { 61 | try { 62 | setError(null); 63 | window.location.replace('/login/oidc') 64 | } catch (err: any) { 65 | setError('OIDC-login failed'); 66 | } 67 | }; 68 | 69 | const handleSubmit = async (e: React.FormEvent) => { 70 | e.preventDefault(); 71 | try { 72 | setError(null); // Reset errors 73 | const response = await apiClient.post('/login', { username, password }); 74 | 75 | const token = response.data.token; 76 | onLogin(token); 77 | const decoded: DecodedToken = jwtDecode(token); 78 | 79 | localStorage.setItem('token', token); 80 | 81 | if (decoded.token_type === 'login') { 82 | const redirectTo = localStorage.getItem('redirect') || '/'; 83 | localStorage.removeItem('redirect'); 84 | navigate(redirectTo); 85 | } else if (decoded.token_type === 'totp') { 86 | navigate('/otp'); 87 | } 88 | } catch (err: any) { 89 | const errorMessage = err.message || 'Unhandled error occurred. Please try again.'; 90 | setError(errorMessage); 91 | 92 | setTimeout(() => { 93 | setError(null); 94 | setUsername(''); 95 | setPassword(''); 96 | }, 3000); 97 | } 98 | }; 99 | 100 | return ( 101 | <> 102 | { !CF_ACCESS_AUTH && ( 103 | 104 | 105 | Login 106 | 107 | 108 |
109 | 110 | Username 111 | setUsername(e.target.value)} 116 | /> 117 | 118 | 119 | Password 120 | setPassword(e.target.value)} 125 | /> 126 | 127 | {error &&
{error}
} 128 | 131 | {OIDC_ENABLED && ( 132 | 135 | )} 136 |
137 |
138 |
139 | )} 140 | 141 | ); 142 | }; 143 | 144 | export default Login; -------------------------------------------------------------------------------- /frontend/src/components/Otp.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState } from 'react'; 2 | import { Button, Form, Modal } from 'react-bootstrap'; 3 | import { useNavigate } from 'react-router-dom'; 4 | import apiClient from '../utilities/apiClient'; 5 | import { jwtDecode } from 'jwt-decode'; 6 | import { useConfig } from '../context/ConfigProvider'; 7 | 8 | interface DecodedToken { 9 | token_type: string; 10 | exp: number; 11 | } 12 | 13 | const OTP: React.FC = () => { 14 | const [otp, setOtp] = useState(''); 15 | const [error, setError] = useState(null); 16 | const navigate = useNavigate(); 17 | const { UI_BASE_COLOR } = useConfig(); 18 | 19 | const handleSubmit = async (e: React.FormEvent) => { 20 | e.preventDefault(); 21 | try { 22 | setError(null); 23 | const response = await apiClient.post('/login/check-otp', { otp }); 24 | 25 | const token = response.data.token; 26 | const decoded: DecodedToken = jwtDecode(token); 27 | 28 | if (decoded.token_type === 'login') { 29 | localStorage.setItem('token', token); 30 | const redirectTo = localStorage.getItem('redirect') || '/'; 31 | localStorage.removeItem('redirect'); 32 | navigate(redirectTo); 33 | window.location.reload(); 34 | } else { 35 | throw new Error('Unexpected token type.'); 36 | } 37 | } catch (err: any) { 38 | setError(err.response?.data?.message || 'Invalid OTP provided.'); 39 | } 40 | }; 41 | 42 | return ( 43 | 44 | 45 | Verify OTP 46 | 47 | 48 |
49 | 50 | OTP 51 | setOtp(e.target.value)} 56 | /> 57 | 58 | {error &&
{error}
} 59 | 62 |
63 |
64 |
65 | ); 66 | }; 67 | 68 | export default OTP; -------------------------------------------------------------------------------- /frontend/src/components/Reader.tsx: -------------------------------------------------------------------------------- 1 | import React, { useEffect, useRef, useState } from "react"; 2 | import { useParams } from "react-router-dom"; 3 | import { ReactReader } from "react-reader"; 4 | import apiClient from '../utilities/apiClient'; 5 | import "./All.css"; 6 | 7 | const Reader: React.FC = () => { 8 | const { identifier } = useParams<{ identifier: string }>(); 9 | const [epubUrl, setEpubUrl] = useState(""); 10 | const [location, setLocation] = useState(null); 11 | const [fontSize, setFontSize] = useState(16); 12 | const renditionRef = useRef(null); 13 | 14 | useEffect(() => { 15 | const fetchBookDetails = async () => { 16 | try { 17 | const bookResponse = await apiClient.get(`/api/books/${identifier}`); 18 | const streamResponse = await apiClient.get(`/stream/${identifier}`); 19 | const { progress } = bookResponse.data; 20 | const { url } = streamResponse.data; 21 | console.log(url); 22 | 23 | setEpubUrl(url); 24 | if (progress) setLocation(progress); 25 | } catch (err) { 26 | console.error("Error occurred while fetching book details:", err); 27 | } 28 | }; 29 | 30 | fetchBookDetails(); 31 | }, [identifier]); 32 | 33 | const saveProgress = async (cfi: string) => { 34 | try { 35 | await apiClient.put(`/api/books/${identifier}/progress_state`, { progress: cfi }); 36 | } catch (err) { 37 | console.error("Error saving reading progress:", err); 38 | } 39 | }; 40 | 41 | const onLocationChange = (cfi: string) => { 42 | setLocation(cfi); 43 | saveProgress(cfi); 44 | }; 45 | 46 | const increaseFontSize = () => { 47 | setFontSize((prevFontSize) => { 48 | const newFontSize = prevFontSize + 2; 49 | updateFontSize(newFontSize); 50 | return newFontSize; 51 | }); 52 | }; 53 | 54 | const decreaseFontSize = () => { 55 | setFontSize((prevFontSize) => { 56 | const newFontSize = prevFontSize > 10 ? prevFontSize - 2 : prevFontSize; 57 | updateFontSize(newFontSize); 58 | return newFontSize; 59 | }); 60 | }; 61 | 62 | const updateFontSize = (size: number) => { 63 | if (renditionRef.current) { 64 | renditionRef.current.themes.fontSize(`${size}px`); 65 | } 66 | }; 67 | 68 | return ( 69 |
70 |
71 |
72 | 73 | 74 |
75 | {epubUrl && ( 76 | { 81 | renditionRef.current = rendition; 82 | renditionRef.current.themes.default({ 83 | body: { 84 | overflow: "hidden", 85 | }, 86 | }); 87 | updateFontSize(fontSize); 88 | renditionRef.current.flow("scrolled"); 89 | }} 90 | /> 91 | )} 92 |
93 |
94 | ); 95 | }; 96 | 97 | export default Reader; -------------------------------------------------------------------------------- /frontend/src/components/SearchBar.tsx: -------------------------------------------------------------------------------- 1 | // src/components/SearchBar.tsx 2 | import React, { useState } from 'react'; 3 | import { Form, FormControl, Button, ButtonGroup, InputGroup, Alert } from 'react-bootstrap'; 4 | import './All.css'; // Import the CSS file 5 | import { useConfig } from '../context/ConfigProvider'; 6 | import apiClient from "../utilities/apiClient.ts"; 7 | 8 | interface SearchBarProps { 9 | onSearch: (term: string) => void; 10 | favoritesActive: boolean; 11 | finishedActive: boolean; 12 | unFinishedActive: boolean; 13 | onFavoritesToggle: () => void; 14 | onFinishedToggle: () => void; 15 | onUnfinishedToggle: () => void; 16 | isLoggedIn: boolean; 17 | } 18 | 19 | const SearchBar: React.FC = ({ 20 | onSearch, 21 | favoritesActive, 22 | finishedActive, 23 | unFinishedActive, 24 | onFavoritesToggle, 25 | onFinishedToggle, 26 | onUnfinishedToggle, 27 | isLoggedIn }) => { 28 | const [searchTerm, setSearchTerm] = useState(''); 29 | const [showAlert, setShowAlert] = useState(false); 30 | const [alertMessage, setAlertMessage] = useState(''); 31 | const { UI_BASE_COLOR } = useConfig(); 32 | 33 | const handleSubmit = (e: React.FormEvent) => { 34 | e.preventDefault(); 35 | onSearch(searchTerm); 36 | }; 37 | const handleScan = async () => { 38 | try { 39 | // 1) Trigger the scan and get the task ID 40 | const response = await apiClient.post('/scan-library', {}); 41 | const taskId = response.data.task_id; 42 | 43 | // 2) Show a "scanning" message 44 | setAlertMessage("Scanning Library..."); 45 | setShowAlert(true); 46 | 47 | // 3) Poll for completion 48 | let isCompleted = false; 49 | while (!isCompleted) { 50 | const statusResp = await apiClient.get(`/scan-status/${taskId}`); 51 | const taskState = statusResp.data.state; 52 | if (taskState === "SUCCESS" || taskState === "FAILURE") { 53 | isCompleted = true; 54 | } else { 55 | // Wait a bit before checking again 56 | await new Promise(resolve => setTimeout(resolve, 1000)); 57 | } 58 | } 59 | 60 | // 4) Show success message (or handle failure) 61 | setAlertMessage("Library scan complete! Please refresh to see results."); 62 | // At this point you can reload or just let users stay on the page 63 | } catch (err: any) { 64 | alert(err.message); 65 | } 66 | }; 67 | 68 | 69 | return ( 70 |
71 |
72 | 73 | setSearchTerm(e.target.value)} 78 | className="search-bar" 79 | /> 80 | 83 | 92 | 93 |
94 | 95 | {isLoggedIn && ( 96 |
97 | 98 | 104 | 110 | 116 | {showAlert && setShowAlert(false)} dismissible>{alertMessage}} 117 | 118 |
119 | )} 120 |
121 | ); 122 | }; 123 | 124 | export default SearchBar; -------------------------------------------------------------------------------- /frontend/src/components/Sidebar.tsx: -------------------------------------------------------------------------------- 1 | // src/components/Sidebar.tsx 2 | import React, { useState, useEffect } from 'react'; 3 | import { Link } from 'react-router-dom'; 4 | import { Button } from 'react-bootstrap'; 5 | import AccountModal from './AccountModal'; 6 | import AdminModal from './AdminModal'; 7 | import {useConfig} from "../context/ConfigProvider.tsx"; 8 | 9 | const Sidebar: React.FC<{ isLoggedIn: boolean, isAdmin: boolean, onLogout: () => void }> = ({ isLoggedIn, isAdmin, onLogout }) => { 10 | const [isOpen, setIsOpen] = useState(true); 11 | const [isMobileView, setIsMobileView] = useState(false); 12 | const [showAccountModal, setShowAccountModal] = useState(false); 13 | const [showAdminModal, setShowAdminModal] = useState(false); 14 | const { UI_BASE_COLOR } = useConfig(); 15 | 16 | useEffect(() => { 17 | const handleResize = () => { 18 | if (window.innerWidth < 780) { 19 | setIsMobileView(true); 20 | setIsOpen(false); 21 | } else { 22 | setIsMobileView(false); 23 | setIsOpen(true); 24 | } 25 | }; 26 | handleResize(); 27 | window.addEventListener('resize', handleResize); 28 | return () => window.removeEventListener('resize', handleResize); 29 | }, []) 30 | 31 | const toggleSidebar = () => { 32 | setIsOpen(!isOpen); 33 | }; 34 | 35 | return ( 36 | <> 37 | {/* Sidebar */} 38 |
47 |
BookHaven
48 |
49 | 51 | Home 52 | 53 | 55 | Authors 56 | 57 | {isLoggedIn && ( 58 | <> 59 | 71 | {isAdmin && ( 72 | 84 | )} 85 | 86 | )} 87 |
88 | {isLoggedIn && ( 89 |
97 | 104 | 105 |
)} 106 |
107 | {isMobileView && ( 108 | 126 | )} 127 | {/* Account Modal */} 128 | {isLoggedIn && ( 129 | setShowAccountModal(false)} show={showAccountModal} /> 130 | )} 131 | {isLoggedIn && isAdmin && ( 132 | setShowAdminModal(false)} show={showAdminModal} /> 133 | )} 134 | 135 | ); 136 | }; 137 | 138 | export default Sidebar; -------------------------------------------------------------------------------- /frontend/src/context/ConfigProvider.tsx: -------------------------------------------------------------------------------- 1 | // src/context/ConfigProvider.tsx 2 | import React, { createContext, useContext, useEffect, useState } from 'react'; 3 | import { fetchApiConfig } from '../utilities/fetchApiConfig'; // Import your helper 4 | 5 | // Define the Config interface 6 | export interface ConfigContextType { 7 | UI_BASE_COLOR: string; 8 | CF_ACCESS_AUTH: boolean; 9 | OIDC_ENABLED: boolean; 10 | } 11 | 12 | // Create Context 13 | const ConfigContext = createContext(null); 14 | 15 | // Provider Component 16 | export const ConfigProvider: React.FC<{ children: React.ReactNode }> = ({ children }) => { 17 | const [config, setConfig] = useState(null); 18 | 19 | // Fetch configuration during initialization 20 | useEffect(() => { 21 | const loadConfig = async () => { 22 | try { 23 | const runtimeConfig = await fetchApiConfig(); 24 | setConfig(runtimeConfig); // Cache the config in state 25 | } catch (error) { 26 | console.error('Failed to load configuration:', error); 27 | } 28 | }; 29 | loadConfig(); 30 | }, []); 31 | 32 | // Render loading state until config is loaded 33 | if (!config) { 34 | return
Loading configuration...
; 35 | } 36 | 37 | return ( 38 | {children} 39 | ); 40 | }; 41 | 42 | // Hook to access the configuration 43 | export const useConfig = () => { 44 | const context = useContext(ConfigContext); 45 | if (!context) { 46 | throw new Error('useConfig must be used within a ConfigProvider'); 47 | } 48 | return context; 49 | }; -------------------------------------------------------------------------------- /frontend/src/index.css: -------------------------------------------------------------------------------- 1 | :root { 2 | font-family: Inter, system-ui, Avenir, Helvetica, Arial, sans-serif; 3 | line-height: 1.5; 4 | font-weight: 400; 5 | 6 | color-scheme: light dark; 7 | color: rgba(255, 255, 255, 0.87); 8 | background-color: #242424; 9 | 10 | font-synthesis: none; 11 | text-rendering: optimizeLegibility; 12 | -webkit-font-smoothing: antialiased; 13 | -moz-osx-font-smoothing: grayscale; 14 | } 15 | 16 | a { 17 | font-weight: 500; 18 | color: #646cff; 19 | text-decoration: inherit; 20 | } 21 | a:hover { 22 | color: #535bf2; 23 | } 24 | 25 | body { 26 | margin: 0; 27 | display: flex; 28 | place-items: center; 29 | min-width: 320px; 30 | min-height: 100vh; 31 | } 32 | 33 | h1 { 34 | font-size: 3.2em; 35 | line-height: 1.1; 36 | } 37 | 38 | button { 39 | border-radius: 8px; 40 | border: 1px solid transparent; 41 | padding: 0.6em 1.2em; 42 | font-size: 1em; 43 | font-weight: 500; 44 | font-family: inherit; 45 | background-color: #1a1a1a; 46 | cursor: pointer; 47 | transition: border-color 0.25s; 48 | } 49 | button:hover { 50 | border-color: #646cff; 51 | } 52 | button:focus, 53 | button:focus-visible { 54 | outline: 4px auto -webkit-focus-ring-color; 55 | } 56 | 57 | @media (prefers-color-scheme: light) { 58 | :root { 59 | color: #213547; 60 | background-color: #ffffff; 61 | } 62 | a:hover { 63 | color: #747bff; 64 | } 65 | button { 66 | background-color: #f9f9f9; 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /frontend/src/main.tsx: -------------------------------------------------------------------------------- 1 | import { StrictMode } from 'react' 2 | import { createRoot } from 'react-dom/client' 3 | import './index.css' 4 | import "./styles/custom-bootstrap.scss" 5 | import App from './App.tsx' 6 | import { ConfigProvider } from './context/ConfigProvider'; 7 | 8 | createRoot(document.getElementById('root')!).render( 9 | 10 | 11 | 12 | 13 | , 14 | ) 15 | -------------------------------------------------------------------------------- /frontend/src/styles/custom-bootstrap.scss: -------------------------------------------------------------------------------- 1 | @import "bootstrap/scss/functions"; 2 | @import "bootstrap/scss/variables"; 3 | @import "bootstrap/scss/mixins"; 4 | 5 | $theme-colors: map-merge($theme-colors, ( 6 | "pink": $pink, 7 | "purple": $purple, 8 | "orange": $orange, 9 | "cyan": $cyan, 10 | )); 11 | @import "bootstrap/scss/bootstrap"; 12 | 13 | .btn-orange, .btn-pink, .btn-purple, .btn-cyan { 14 | color: #fff; 15 | &:hover { 16 | color: #fff; 17 | } 18 | } 19 | 20 | .btn-outline-orange, .btn-outline-pink, .btn-outline-purple, .btn-outline-cyan { 21 | &:hover { 22 | color: #fff; 23 | } 24 | } 25 | 26 | .alert { 27 | --bs-alert-padding-x: 0.75rem; 28 | --bs-alert-padding-y: 0.375rem; 29 | margin-bottom: 0; 30 | margin-left: 10px; 31 | } 32 | 33 | .alert-dismissible .btn-close { 34 | padding: 0.6rem 1rem; 35 | } -------------------------------------------------------------------------------- /frontend/src/types.ts: -------------------------------------------------------------------------------- 1 | // src/types.ts 2 | export interface Book { 3 | id: number; 4 | title: string; 5 | authors: string[]; 6 | series: string; 7 | seriesindex: number; 8 | coverUrl: string; 9 | relative_path: string; 10 | identifier: string; 11 | is_finished: boolean; 12 | marked_favorite: boolean; 13 | } -------------------------------------------------------------------------------- /frontend/src/utilities/apiClient.ts: -------------------------------------------------------------------------------- 1 | import axios from 'axios'; 2 | 3 | const apiClient = axios.create({ 4 | baseURL: `${window.location.origin}`, 5 | withCredentials: true, 6 | }); 7 | 8 | apiClient.interceptors.request.use( 9 | (config) => { 10 | const token = localStorage.getItem('token'); 11 | if (token) { 12 | config.headers.Authorization = `Bearer ${token}`; 13 | } 14 | return config; 15 | }, 16 | (error) => { 17 | return Promise.reject(error); 18 | } 19 | ); 20 | 21 | apiClient.interceptors.response.use( 22 | (response) => response, 23 | (error) => { 24 | if (error.response) { 25 | const status = error.response.status; 26 | const data = error.response.data; 27 | 28 | if (status === 401) { 29 | localStorage.removeItem('token'); 30 | const redirectUrl = window.location.pathname; 31 | window.location.href = `/login?redirect=${encodeURIComponent(redirectUrl)}`; 32 | } 33 | 34 | return Promise.reject({ 35 | status, 36 | data, 37 | message: data?.error || error.message, 38 | }); 39 | } 40 | 41 | return Promise.reject({ 42 | status: null, 43 | data: null, 44 | message: error.message || 'An unexpected error occurred', 45 | }); 46 | } 47 | ); 48 | 49 | export default apiClient; -------------------------------------------------------------------------------- /frontend/src/utilities/fetchApiConfig.ts: -------------------------------------------------------------------------------- 1 | import { ConfigContextType } from '../context/ConfigProvider'; 2 | 3 | export const fetchApiConfig = async (): Promise => { 4 | const response = await fetch('/api/react-init'); 5 | if (!response.ok) { 6 | throw new Error(`Failed to fetch configuration: ${response.statusText}`); 7 | } 8 | const data = await response.json(); 9 | 10 | if (!data.UI_BASE_COLOR) { 11 | throw new Error('Missing required configuration property: UI_BASE_COLOR'); 12 | } 13 | 14 | return data; 15 | }; -------------------------------------------------------------------------------- /frontend/src/vite-env.d.ts: -------------------------------------------------------------------------------- 1 | /// 2 | -------------------------------------------------------------------------------- /frontend/tsconfig.app.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo", 4 | "target": "ES2020", 5 | "useDefineForClassFields": true, 6 | "lib": ["ES2020", "DOM", "DOM.Iterable"], 7 | "module": "ESNext", 8 | "skipLibCheck": true, 9 | 10 | /* Bundler mode */ 11 | "moduleResolution": "bundler", 12 | "allowImportingTsExtensions": true, 13 | "isolatedModules": true, 14 | "moduleDetection": "force", 15 | "noEmit": true, 16 | "jsx": "react-jsx", 17 | 18 | /* Linting */ 19 | "strict": true, 20 | "noUnusedLocals": true, 21 | "noUnusedParameters": true, 22 | "noFallthroughCasesInSwitch": true, 23 | "noUncheckedSideEffectImports": true 24 | }, 25 | "include": ["src"] 26 | } 27 | -------------------------------------------------------------------------------- /frontend/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "files": [], 3 | "references": [ 4 | { "path": "./tsconfig.app.json" }, 5 | { "path": "./tsconfig.node.json" } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /frontend/tsconfig.node.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo", 4 | "target": "ES2022", 5 | "lib": ["ES2023"], 6 | "module": "ESNext", 7 | "skipLibCheck": true, 8 | 9 | /* Bundler mode */ 10 | "moduleResolution": "bundler", 11 | "allowImportingTsExtensions": true, 12 | "isolatedModules": true, 13 | "moduleDetection": "force", 14 | "noEmit": true, 15 | 16 | /* Linting */ 17 | "strict": true, 18 | "noUnusedLocals": true, 19 | "noUnusedParameters": true, 20 | "noFallthroughCasesInSwitch": true, 21 | "noUncheckedSideEffectImports": true 22 | }, 23 | "include": ["vite.config.ts"] 24 | } 25 | -------------------------------------------------------------------------------- /frontend/vite.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from 'vite' 2 | import react from '@vitejs/plugin-react-swc' 3 | 4 | // https://vite.dev/config/ 5 | export default defineConfig(({ mode }) => ({ 6 | plugins: [react()], 7 | resolve: { 8 | alias: { 9 | '/webfonts': '/public/webfonts', 10 | }, 11 | }, 12 | server: { 13 | proxy: { 14 | // Proxy /api and /download requests to the Flask backend. 15 | '/api': { 16 | target: 'http://10.0.0.35:5000', // Flask dev server 17 | changeOrigin: true, 18 | }, 19 | '/download': { 20 | target: 'http://10.0.0.35:5000', // Flask dev server 21 | changeOrigin: true, 22 | }, 23 | '/stream': { 24 | target: 'http://10.0.0.35:5000', 25 | changeOrigin: true, 26 | }, 27 | '/files': { 28 | target: 'http://127.0.0.1:5000', 29 | changeOrigin: true, 30 | }, 31 | }, 32 | host: true, 33 | strictPort: true, 34 | port: 5173, 35 | }, 36 | build: { 37 | minify: mode === 'production', // Disable minification for dev builds 38 | sourcemap: mode !== 'production', // Enable source maps for easier debugging 39 | }, 40 | css: { 41 | preprocessorOptions: { 42 | scss: {} 43 | } 44 | } 45 | })); --------------------------------------------------------------------------------