├── .dockerignore ├── .env.example ├── .gitattributes ├── .github ├── FUNDING.yml ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md └── workflows │ ├── apidocs.yml │ ├── ci.yml │ └── trigger-release.yml ├── .gitignore ├── .pre-commit-config.yaml ├── Dockerfile ├── LICENSE ├── README.md ├── alembic.ini ├── client ├── .eslintrc.cjs ├── .gitignore ├── .npmrc ├── .prettierrc ├── README.MD ├── icons │ ├── spool.svg │ ├── spoolman.svg │ └── spoolman_monochrome.svg ├── index.html ├── package-lock.json ├── package.json ├── public │ ├── apple-touch-icon.png │ ├── favicon.ico │ ├── favicon.svg │ ├── icon512_maskable.png │ ├── icon512_rounded.png │ ├── kofi_s_logo_nolabel.png │ ├── locales │ │ ├── cs │ │ │ └── common.json │ │ ├── da │ │ │ └── common.json │ │ ├── de │ │ │ └── common.json │ │ ├── el │ │ │ └── common.json │ │ ├── en │ │ │ └── common.json │ │ ├── es │ │ │ └── common.json │ │ ├── et │ │ │ └── common.json │ │ ├── fa │ │ │ └── common.json │ │ ├── fr │ │ │ └── common.json │ │ ├── hu │ │ │ └── common.json │ │ ├── it │ │ │ └── common.json │ │ ├── ja │ │ │ └── common.json │ │ ├── lt │ │ │ └── common.json │ │ ├── nb-NO │ │ │ └── common.json │ │ ├── nl │ │ │ └── common.json │ │ ├── pl │ │ │ └── common.json │ │ ├── pt-BR │ │ │ └── common.json │ │ ├── pt │ │ │ └── common.json │ │ ├── ro │ │ │ └── common.json │ │ ├── ru │ │ │ └── common.json │ │ ├── sv │ │ │ └── common.json │ │ ├── ta │ │ │ └── common.json │ │ ├── th │ │ │ └── common.json │ │ ├── uk │ │ │ └── common.json │ │ ├── zh-Hant │ │ │ └── common.json │ │ └── zh │ │ │ └── common.json │ └── manifest.json ├── scripts │ └── check-i18n.js ├── src │ ├── App.tsx │ ├── components │ │ ├── column.tsx │ │ ├── dataProvider.ts │ │ ├── dateTimePicker.tsx │ │ ├── extraFields.tsx │ │ ├── favicon.tsx │ │ ├── filamentImportModal.tsx │ │ ├── header │ │ │ └── index.tsx │ │ ├── index.ts │ │ ├── inputNumberRange.tsx │ │ ├── layout.tsx │ │ ├── liveProvider.ts │ │ ├── liveify.ts │ │ ├── multiColorPicker.tsx │ │ ├── notificationProvider.tsx │ │ ├── numberField.tsx │ │ ├── otherModels.tsx │ │ ├── qrCodeScanner.tsx │ │ ├── spoolIcon.css │ │ ├── spoolIcon.tsx │ │ └── version.tsx │ ├── contexts │ │ └── color-mode │ │ │ └── index.tsx │ ├── i18n.ts │ ├── icon.svg │ ├── index.tsx │ ├── pages │ │ ├── filaments │ │ │ ├── create.tsx │ │ │ ├── edit.tsx │ │ │ ├── functions.ts │ │ │ ├── index.ts │ │ │ ├── list.tsx │ │ │ ├── model.tsx │ │ │ └── show.tsx │ │ ├── help │ │ │ └── index.tsx │ │ ├── home │ │ │ └── index.tsx │ │ ├── locations │ │ │ ├── components │ │ │ │ ├── location.tsx │ │ │ │ ├── locationContainer.tsx │ │ │ │ ├── spoolCard.tsx │ │ │ │ └── spoolList.tsx │ │ │ ├── dnd.ts │ │ │ ├── functions.ts │ │ │ ├── index.tsx │ │ │ └── locations.css │ │ ├── printing │ │ │ ├── index.tsx │ │ │ ├── printing.tsx │ │ │ ├── printingDialog.tsx │ │ │ ├── qrCodePrintingDialog.tsx │ │ │ ├── spoolQrCodePrintingDialog.tsx │ │ │ └── spoolSelectModal.tsx │ │ ├── settings │ │ │ ├── extraFieldsSettings.tsx │ │ │ ├── generalSettings.tsx │ │ │ └── index.tsx │ │ ├── spools │ │ │ ├── create.tsx │ │ │ ├── edit.tsx │ │ │ ├── functions.tsx │ │ │ ├── index.ts │ │ │ ├── list.tsx │ │ │ ├── model.tsx │ │ │ └── show.tsx │ │ └── vendors │ │ │ ├── create.tsx │ │ │ ├── edit.tsx │ │ │ ├── functions.ts │ │ │ ├── index.ts │ │ │ ├── list.tsx │ │ │ ├── model.tsx │ │ │ └── show.tsx │ ├── utils │ │ ├── filtering.ts │ │ ├── overrides.css │ │ ├── parsing.tsx │ │ ├── queryExternalDB.ts │ │ ├── queryFields.ts │ │ ├── querySettings.ts │ │ ├── saveload.ts │ │ ├── settings.ts │ │ ├── sorting.ts │ │ ├── support.ts │ │ └── url.ts │ └── vite-env.d.ts ├── tsconfig.json ├── tsconfig.node.json └── vite.config.ts ├── entrypoint.sh ├── migrations ├── README.md ├── __init__.py ├── env.py ├── script.py.mako └── versions │ ├── 2023_05_27_2146-684d32cf7e4d_initial.py │ ├── 2023_05_28_2136-b47376d60c6d_add_extruder_and_bed_temperature_.py │ ├── 2023_06_01_1953-db385b808a20_add_filament_color_code.py │ ├── 2023_07_14_1217-92186a5f7b0f_add_spool_archived_field.py │ ├── 2023_08_12_2121-92793c8a937c_color_hex_alpha.py │ ├── 2023_12_30_0839-b82cd9e2aa6f_added_spool_prices.py │ ├── 2024_01_03_1346-ccbb17aeda7c_added_settings_table.py │ ├── 2024_01_04_2209-b8881bdb716c_added_extra_fields.py │ ├── 2024_03_26_0948-aafcd7fb0e84_spool_weights.py │ ├── 2024_03_26_1349-304a32906234_spool_weight_population.py │ ├── 2024_03_26_1507-5f069e51bd89_vendor_empty_spool_weight.py │ ├── 2024_05_12_1930-395d560284b3_added_external_id.py │ ├── 2024_05_28_1846-415a8f855e14_multi_colors.py │ └── __init__.py ├── pdm.lock ├── pyproject.toml ├── scripts ├── install.sh └── start.sh ├── spoolman ├── __init__.py ├── api │ ├── __init__.py │ └── v1 │ │ ├── __init__.py │ │ ├── export.py │ │ ├── externaldb.py │ │ ├── field.py │ │ ├── filament.py │ │ ├── models.py │ │ ├── other.py │ │ ├── router.py │ │ ├── setting.py │ │ ├── spool.py │ │ └── vendor.py ├── bump.py ├── client.py ├── database │ ├── __init__.py │ ├── database.py │ ├── filament.py │ ├── models.py │ ├── setting.py │ ├── spool.py │ ├── utils.py │ └── vendor.py ├── docs.py ├── env.py ├── exceptions.py ├── export.py ├── externaldb.py ├── extra_fields.py ├── filecache.py ├── main.py ├── math.py ├── prometheus │ ├── __init__.py │ └── metrics.py ├── settings.py └── ws.py └── tests_integration ├── Dockerfile ├── __init__.py ├── docker-compose-cockroachdb.yml ├── docker-compose-mariadb.yml ├── docker-compose-postgres.yml ├── docker-compose-sqlite.yml ├── requirements.txt ├── run.py └── tests ├── __init__.py ├── conftest.py ├── fields ├── __init__.py ├── test_create.py ├── test_delete.py ├── test_get.py └── test_utilize.py ├── filament ├── __init__.py ├── test_add.py ├── test_delete.py ├── test_find.py ├── test_get.py └── test_update.py ├── setting ├── __init__.py ├── test_get.py └── test_set.py ├── spool ├── __init__.py ├── test_add.py ├── test_delete.py ├── test_find.py ├── test_get.py ├── test_measure.py ├── test_update.py └── test_use.py ├── test_backup.py └── vendor ├── __init__.py ├── test_add.py ├── test_delete.py ├── test_find.py ├── test_get.py └── test_update.py /.dockerignore: -------------------------------------------------------------------------------- 1 | client/node_modules 2 | 3 | *.egg-info 4 | *.egg/ 5 | *.pyc 6 | *.swp 7 | __pycache__ 8 | -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | # DB Type: sqlite, mysql, postgres, cockroachdb 2 | # Default if not set: sqlite 3 | #SPOOLMAN_DB_TYPE=sqlite 4 | 5 | # DB Setup, if not using sqlite 6 | # Database hostname 7 | #SPOOLMAN_DB_HOST= 8 | # Database port 9 | #SPOOLMAN_DB_PORT= 10 | # Database name 11 | #SPOOLMAN_DB_NAME= 12 | # Database login username 13 | #SPOOLMAN_DB_USERNAME= 14 | # Path of file which contains the database password. Can be used instead of SPOOLMAN_DB_PASSWORD if desired. 15 | #SPOOLMAN_DB_PASSWORD_FILE= 16 | # Database login password 17 | #SPOOLMAN_DB_PASSWORD= 18 | # Query parameters for the database connection, e.g. set to `unix_socket=/path/to/mysql.sock` to connect using a MySQL socket. 19 | #SPOOLMAN_DB_QUERY= 20 | 21 | # Logging level: DEBUG, INFO, WARNING, ERROR, CRITICAL 22 | # Logs will only be reported if the level is higher than the level set here 23 | # Default if not set: INFO 24 | #SPOOLMAN_LOGGING_LEVEL=INFO 25 | 26 | # Automatic nightly backup for SQLite databases 27 | # Default if not set: TRUE 28 | #SPOOLMAN_AUTOMATIC_BACKUP=TRUE 29 | 30 | # Data directory, where the SQLite database is stored 31 | # Default if not set: /home//.local/share/spoolman 32 | #SPOOLMAN_DIR_DATA=/home/pi/spoolman_data 33 | 34 | # Backup directory, where the SQLite database backups are stored 35 | # Default if not set: /home//.local/share/spoolman/backups 36 | #SPOOLMAN_DIR_BACKUPS=/home/pi/spoolman_data/backups 37 | 38 | # Log directory 39 | # Default if not set: /home//.local/share/spoolman 40 | #SPOOLMAN_DIR_LOGS=/home/pi/spoolman_data 41 | 42 | # Host and port to listen on 43 | SPOOLMAN_HOST=0.0.0.0 44 | SPOOLMAN_PORT=7912 45 | 46 | # Change base path 47 | # Set this if you want to host Spoolman at a sub-path 48 | # If you want the root to be e.g. myhost.com/spoolman 49 | # Then set this to /spoolman 50 | #SPOOLMAN_BASE_PATH= 51 | 52 | # Enable Collect Prometheus metrics at database 53 | # Default: FALSE 54 | #SPOOLMAN_METRICS_ENABLED=TRUE 55 | 56 | # Collect items (filaments, materials, etc.) from an external database 57 | # Set this to a URL of an external database. Set to an empty string to disable 58 | # Default: https://donkie.github.io/SpoolmanDB/ 59 | #EXTERNAL_DB_URL=https://myhost.com/spoolmandb/ 60 | # Sync interval in seconds, set to 0 to disable automatic sync. It will only sync on start-up then. 61 | # Default: 3600 62 | #EXTERNAL_DB_SYNC_INTERVAL=3600 63 | 64 | # Enable debug mode 65 | # If enabled, the client will accept requests from any host 66 | # This can be useful when developing, but is also a security risk 67 | # Default: FALSE 68 | #SPOOLMAN_DEBUG_MODE=TRUE 69 | 70 | # UID and GID of the user in the docker container 71 | # These only make sense if you are running Spoolman in a docker container 72 | # Default if not set: 1000 73 | #PUID=1000 74 | #PGID=1000 75 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Handle line endings automatically for files detected as text 2 | # and leave all files detected as binary untouched. 3 | * text=auto 4 | 5 | # Never modify line endings of our bash scripts 6 | *.sh -crlf 7 | .env.example text eol=lf -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] 4 | patreon: # Replace with a single Patreon username 5 | open_collective: # Replace with a single Open Collective username 6 | ko_fi: donkie 7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel 8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry 9 | liberapay: # Replace with a single Liberapay username 10 | issuehunt: # Replace with a single IssueHunt username 11 | otechie: # Replace with a single Otechie username 12 | lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry 13 | custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] 14 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: Bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Spoolman Host (please complete the following information):** 27 | - Installation type: [Docker or Standalone] 28 | - Installed version: [e.g. 0.17.0, can be found at the bottom of your spoolman website if you don't know] 29 | - OS and Distro: [e.g. Ubuntu, Raspbian] 30 | - Database type: [SQLite, PostgreSQL, etc] 31 | 32 | **Desktop (please complete the following information):** 33 | - OS: [e.g. Windows, Mac, iOS, Android, ...] 34 | - Browser [e.g. Chrome, Firefox, Safari, ...] 35 | 36 | **Additional context** 37 | Add any other context about the problem here. 38 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: Feature Request 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/workflows/apidocs.yml: -------------------------------------------------------------------------------- 1 | name: Generate and deploy API documentation 2 | 3 | on: 4 | release: 5 | types: [published] 6 | workflow_dispatch: 7 | 8 | permissions: 9 | contents: read 10 | pages: write 11 | id-token: write 12 | 13 | # Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. 14 | # However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. 15 | concurrency: 16 | group: "pages" 17 | cancel-in-progress: false 18 | 19 | jobs: 20 | # Single deploy job since we're just deploying 21 | deploy: 22 | environment: 23 | name: github-pages 24 | url: ${{ steps.deployment.outputs.page_url }} 25 | runs-on: ubuntu-latest 26 | steps: 27 | - name: Checkout 28 | uses: actions/checkout@v4.1.7 29 | - name: Set up PDM 30 | uses: pdm-project/setup-pdm@v4.1 31 | with: 32 | python-version: "3.9" 33 | - run: pdm sync 34 | - run: pdm run docs 35 | - name: Setup Pages 36 | uses: actions/configure-pages@v5.0.0 37 | - name: Upload artifact 38 | uses: actions/upload-pages-artifact@v3.0.1 39 | with: 40 | path: "docs/" 41 | - name: Deploy to GitHub Pages 42 | id: deployment 43 | uses: actions/deploy-pages@v4.0.5 44 | -------------------------------------------------------------------------------- /.github/workflows/trigger-release.yml: -------------------------------------------------------------------------------- 1 | name: Release Trigger 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | bump: 7 | type: choice 8 | description: Version to bump 9 | required: true 10 | default: minor 11 | options: 12 | - micro 13 | - minor 14 | - major 15 | 16 | jobs: 17 | bump-version: 18 | runs-on: ubuntu-latest 19 | steps: 20 | - name: Checkout 21 | uses: actions/checkout@v4 22 | with: 23 | token: ${{ secrets.PAT }} 24 | 25 | - name: Set up Python 26 | uses: actions/setup-python@v5.2.0 27 | with: 28 | python-version: "3.9" 29 | 30 | - name: Install node 31 | uses: actions/setup-node@v4.0.3 32 | with: 33 | node-version: '20' 34 | 35 | - name: Setup PDM 36 | run: | 37 | pip install pdm pdm-bump 38 | 39 | - name: Bump version 40 | run: | 41 | git config --global user.name "${GITHUB_ACTOR}" 42 | git config --global user.email "${GITHUB_ACTOR_ID}+${GITHUB_ACTOR}@users.noreply.github.com" 43 | pdm run bump ${{ github.event.inputs.bump }} 44 | 45 | - name: Push 46 | run: | 47 | latest_tag=$(git describe --tags --abbrev=0) 48 | echo "Pushing branch master and tag $latest_tag" 49 | git push --atomic origin master $latest_tag 50 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | default_language_version: 2 | python: python3.9 3 | repos: 4 | - repo: https://github.com/pre-commit/pre-commit-hooks 5 | rev: v5.0.0 6 | hooks: 7 | - id: check-yaml 8 | - id: check-added-large-files 9 | - id: check-case-conflict 10 | - id: check-merge-conflict 11 | - repo: https://github.com/psf/black 12 | rev: 25.1.0 13 | hooks: 14 | - id: black 15 | args: ["--check"] 16 | - repo: https://github.com/astral-sh/ruff-pre-commit 17 | rev: v0.11.0 18 | hooks: 19 | - id: ruff 20 | args: ["--target-version", "py39"] 21 | - repo: https://github.com/pdm-project/pdm 22 | rev: 2.22.4 23 | hooks: 24 | - id: pdm-lock-check 25 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.12-bookworm AS python-builder 2 | 3 | # Install dependencies 4 | RUN apt-get update && apt-get install -y \ 5 | g++ \ 6 | python3-dev \ 7 | libpq-dev \ 8 | libffi-dev \ 9 | python3-pip \ 10 | python3-setuptools \ 11 | python3-wheel \ 12 | python3-pdm \ 13 | && apt-get clean \ 14 | && rm -rf /var/lib/apt/lists/* 15 | 16 | # Add local user so we don't run as root 17 | RUN groupmod -g 1000 users \ 18 | && useradd -u 911 -U app \ 19 | && usermod -G users app 20 | 21 | ENV PATH="/home/app/.local/bin:${PATH}" 22 | 23 | # Copy and install dependencies 24 | COPY --chown=app:app pyproject.toml /home/app/spoolman/ 25 | COPY --chown=app:app pdm.lock /home/app/spoolman/ 26 | WORKDIR /home/app/spoolman 27 | RUN pdm sync --prod --no-editable 28 | 29 | # Copy and install app 30 | COPY --chown=app:app migrations /home/app/spoolman/migrations 31 | COPY --chown=app:app spoolman /home/app/spoolman/spoolman 32 | COPY --chown=app:app alembic.ini /home/app/spoolman/ 33 | COPY --chown=app:app README.md /home/app/spoolman/ 34 | 35 | FROM python:3.12-bookworm AS python-runner 36 | 37 | LABEL org.opencontainers.image.source=https://github.com/Donkie/Spoolman 38 | LABEL org.opencontainers.image.description="Keep track of your inventory of 3D-printer filament spools." 39 | LABEL org.opencontainers.image.licenses=MIT 40 | 41 | # Install latest su-exec 42 | RUN set -ex; \ 43 | \ 44 | curl -o /usr/local/bin/su-exec.c https://raw.githubusercontent.com/ncopa/su-exec/master/su-exec.c; \ 45 | \ 46 | fetch_deps='gcc libc-dev'; \ 47 | apt-get update; \ 48 | apt-get install -y --no-install-recommends $fetch_deps; \ 49 | rm -rf /var/lib/apt/lists/*; \ 50 | gcc -Wall \ 51 | /usr/local/bin/su-exec.c -o/usr/local/bin/su-exec; \ 52 | chown root:root /usr/local/bin/su-exec; \ 53 | chmod 0755 /usr/local/bin/su-exec; \ 54 | rm /usr/local/bin/su-exec.c; \ 55 | \ 56 | apt-get purge -y --auto-remove $fetch_deps 57 | 58 | # Add local user so we don't run as root 59 | RUN groupmod -g 1000 users \ 60 | && useradd -u 1000 -U app \ 61 | && usermod -G users app \ 62 | && mkdir -p /home/app/.local/share/spoolman \ 63 | && chown -R app:app /home/app/.local/share/spoolman 64 | 65 | # Copy built client 66 | COPY --chown=app:app ./client/dist /home/app/spoolman/client/dist 67 | 68 | # Copy built app 69 | COPY --chown=app:app --from=python-builder /home/app/spoolman /home/app/spoolman 70 | 71 | COPY entrypoint.sh /home/app/spoolman/entrypoint.sh 72 | RUN chmod +x /home/app/spoolman/entrypoint.sh 73 | 74 | WORKDIR /home/app/spoolman 75 | 76 | ENV PATH="/home/app/spoolman/.venv/bin:${PATH}" 77 | 78 | ARG GIT_COMMIT=unknown 79 | ARG BUILD_DATE=unknown 80 | ENV GIT_COMMIT=${GIT_COMMIT} 81 | ENV BUILD_DATE=${BUILD_DATE} 82 | 83 | # Write GIT_COMMIT and BUILD_DATE to a build.txt file 84 | RUN echo "GIT_COMMIT=${GIT_COMMIT}" > build.txt \ 85 | && echo "BUILD_DATE=${BUILD_DATE}" >> build.txt 86 | 87 | # Run command 88 | EXPOSE 8000 89 | ENTRYPOINT ["/home/app/spoolman/entrypoint.sh"] 90 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Daniel Hultgren 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Icon of a filament spool 5 | 6 | 7 |
8 | 9 | _Keep track of your inventory of 3D-printer filament spools._ 10 | 11 | Spoolman is a self-hosted web service designed to help you efficiently manage your 3D printer filament spools and monitor their usage. It acts as a centralized database that seamlessly integrates with popular 3D printing software like [OctoPrint](https://octoprint.org/) and [Klipper](https://www.klipper3d.org/)/[Moonraker](https://moonraker.readthedocs.io/en/latest/). When connected, it automatically updates spool weights as printing progresses, giving you real-time insights into filament usage. 12 | 13 | [![Static Badge](https://img.shields.io/badge/Spoolman%20Wiki-blue?link=https%3A%2F%2Fgithub.com%2FDonkie%2FSpoolman%2Fwiki)](https://github.com/Donkie/Spoolman/wiki) 14 | [![GitHub Release](https://img.shields.io/github/v/release/Donkie/Spoolman)](https://github.com/Donkie/Spoolman/releases) 15 | 16 | ### Features 17 | * **Filament Management**: Keep comprehensive records of filament types, manufacturers, and individual spools. 18 | * **API Integration**: The [REST API](https://donkie.github.io/Spoolman/) allows easy integration with other software, facilitating automated workflows and data exchange. 19 | * **Real-Time Updates**: Stay informed with live spool updates through Websockets, providing immediate feedback during printing operations. 20 | * **Central Filament Database**: A community-supported database of manufacturers and filaments simplify adding new spools to your inventory. Contribute by heading to [SpoolmanDB](https://github.com/Donkie/SpoolmanDB). 21 | * **Web-Based Client**: Spoolman includes a built-in web client that lets you manage data effortlessly: 22 | * View, create, edit, and delete filament data. 23 | * Add custom fields to tailor information to your specific needs. 24 | * Print labels with QR codes for easy spool identification and tracking. 25 | * Contribute to its translation into 18 languages via [Weblate](https://hosted.weblate.org/projects/spoolman/). 26 | * **Database Support**: SQLite, PostgreSQL, MySQL, and CockroachDB. 27 | * **Multi-Printer Management**: Handles spool updates from several printers simultaneously. 28 | * **Advanced Monitoring**: Integrate with [Prometheus](https://prometheus.io/) for detailed historical analysis of filament usage, helping you track and optimize your printing processes. See the [Wiki](https://github.com/Donkie/Spoolman/wiki/Filament-Usage-History) for instructions on how to set it up. 29 | 30 | **Spoolman integrates with:** 31 | * [Moonraker](https://moonraker.readthedocs.io/en/latest/configuration/#spoolman) and most front-ends (Fluidd, KlipperScreen, Mainsail, ...) 32 | * [OctoPrint](https://github.com/mdziekon/octoprint-spoolman) 33 | * [OctoEverywhere](https://octoeverywhere.com/spoolman?source=github_spoolman) 34 | * [Homeassistant](https://github.com/Disane87/spoolman-homeassistant) 35 | 36 | **Web client preview:** 37 | ![image](https://github.com/Donkie/Spoolman/assets/2332094/33928d5e-440f-4445-aca9-456c4370ad0d) 38 | 39 | ## Installation 40 | Please see the [Installation page on the Wiki](https://github.com/Donkie/Spoolman/wiki/Installation) for details how to install Spoolman. 41 | -------------------------------------------------------------------------------- /alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # path to migration scripts 5 | script_location = migrations 6 | 7 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s 8 | # Uncomment the line below if you want the files to be prepended with date and time 9 | file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s 10 | 11 | # sys.path path, will be prepended to sys.path if present. 12 | # defaults to the current working directory. 13 | prepend_sys_path = . 14 | 15 | # timezone to use when rendering the date within the migration file 16 | # as well as the filename. 17 | # If specified, requires the python-dateutil library that can be 18 | # installed by adding `alembic[tz]` to the pip requirements 19 | # string value is passed to dateutil.tz.gettz() 20 | # leave blank for localtime 21 | # timezone = 22 | 23 | # max length of characters to apply to the 24 | # "slug" field 25 | # truncate_slug_length = 40 26 | 27 | # set to 'true' to run the environment during 28 | # the 'revision' command, regardless of autogenerate 29 | # revision_environment = false 30 | 31 | # set to 'true' to allow .pyc and .pyo files without 32 | # a source .py file to be detected as revisions in the 33 | # versions/ directory 34 | # sourceless = false 35 | 36 | # version location specification; This defaults 37 | # to migrations/versions. When using multiple version 38 | # directories, initial revisions must be specified with --version-path. 39 | # The path separator used here should be the separator specified by "version_path_separator" below. 40 | # version_locations = %(here)s/bar:%(here)s/bat:migrations/versions 41 | 42 | # version path separator; As mentioned above, this is the character used to split 43 | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. 44 | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. 45 | # Valid values for version_path_separator are: 46 | # 47 | # version_path_separator = : 48 | # version_path_separator = ; 49 | # version_path_separator = space 50 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects. 51 | 52 | # set to 'true' to search source files recursively 53 | # in each "version_locations" directory 54 | # new in Alembic version 1.10 55 | # recursive_version_locations = false 56 | 57 | # the output encoding used when revision files 58 | # are written from script.py.mako 59 | # output_encoding = utf-8 60 | 61 | # sqlalchemy.url = driver://user:pass@localhost/dbname 62 | 63 | 64 | [post_write_hooks] 65 | # post_write_hooks defines scripts or Python functions that are run 66 | # on newly generated revision scripts. See the documentation for further 67 | # detail and examples 68 | 69 | # format using "black" - use the console_scripts runner, against the "black" entrypoint 70 | # hooks = black 71 | # black.type = console_scripts 72 | # black.entrypoint = black 73 | # black.options = -l 79 REVISION_SCRIPT_FILENAME 74 | 75 | # Logging configuration 76 | [loggers] 77 | keys = root,sqlalchemy,alembic 78 | 79 | [handlers] 80 | keys = console 81 | 82 | [formatters] 83 | keys = generic 84 | 85 | [logger_root] 86 | level = WARN 87 | handlers = console 88 | qualname = 89 | 90 | [logger_sqlalchemy] 91 | level = WARN 92 | handlers = 93 | qualname = sqlalchemy.engine 94 | 95 | [logger_alembic] 96 | level = INFO 97 | handlers = 98 | qualname = alembic 99 | 100 | [handler_console] 101 | class = StreamHandler 102 | args = (sys.stderr,) 103 | level = NOTSET 104 | formatter = generic 105 | 106 | [formatter_generic] 107 | format = %(name)-26s %(levelname)-8s %(message)s 108 | datefmt = %H:%M:%S 109 | -------------------------------------------------------------------------------- /client/.eslintrc.cjs: -------------------------------------------------------------------------------- 1 | /* eslint-env node */ 2 | 3 | module.exports = { 4 | env: { browser: true, es2020: true }, 5 | extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended", "plugin:react-hooks/recommended"], 6 | parser: "@typescript-eslint/parser", 7 | parserOptions: { ecmaVersion: "latest", sourceType: "module" }, 8 | plugins: ["react-refresh"], 9 | rules: { 10 | "react-refresh/only-export-components": "warn", 11 | }, 12 | }; 13 | -------------------------------------------------------------------------------- /client/.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | 8 | # testing 9 | /coverage 10 | 11 | # production 12 | /build 13 | 14 | # misc 15 | .DS_Store 16 | .env.local 17 | .env.development.local 18 | .env.test.local 19 | .env.production.local 20 | 21 | npm-debug.log* 22 | yarn-debug.log* 23 | yarn-error.log* 24 | .vite 25 | -------------------------------------------------------------------------------- /client/.npmrc: -------------------------------------------------------------------------------- 1 | legacy-peer-deps=true 2 | strict-peer-dependencies=false 3 | -------------------------------------------------------------------------------- /client/.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "printWidth": 120, 3 | "overrides": [ 4 | { 5 | "files": ["public/locales/**/*.json"], 6 | "options": { 7 | "tabWidth": 4 8 | } 9 | } 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /client/README.MD: -------------------------------------------------------------------------------- 1 | # Spoolman UI 2 | 3 | A simple [refine](https://github.com/refinedev/refine) based UI for manipulating the data in the Spoolman database. 4 | -------------------------------------------------------------------------------- /client/icons/spool.svg: -------------------------------------------------------------------------------- 1 | Created with Fabric.js 5.3.0 -------------------------------------------------------------------------------- /client/icons/spoolman_monochrome.svg: -------------------------------------------------------------------------------- 1 | Created with Fabric.js 5.3.0 -------------------------------------------------------------------------------- /client/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | Spoolman 12 | 13 | 14 | 15 | 16 |
17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /client/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "spoolman-ui", 3 | "version": "0.22.1", 4 | "engines": { 5 | "node": "20.x" 6 | }, 7 | "private": true, 8 | "type": "module", 9 | "dependencies": { 10 | "@ant-design/icons": "^5.6.1", 11 | "@ant-design/v5-patch-for-react-19": "^1.0.3", 12 | "@loadable/component": "^5.16.4", 13 | "@refinedev/antd": "^5.46.1", 14 | "@refinedev/core": "^4.57.9", 15 | "@refinedev/kbar": "^1.3.16", 16 | "@refinedev/react-router": "^1.0.1", 17 | "@refinedev/simple-rest": "^5.0.10", 18 | "@tanstack/react-query": "^4.36.1", 19 | "@tanstack/react-query-devtools": "^4.36.1", 20 | "@types/loadable__component": "^5.13.9", 21 | "@types/lodash": "^4.17.16", 22 | "@types/uuid": "^10.0.0", 23 | "@yudiel/react-qr-scanner": "^1.2.10", 24 | "antd": "^5.24.6", 25 | "axios": "^1.8.4", 26 | "html-to-image": "^1.11.13", 27 | "i18next": "^24.2.3", 28 | "i18next-browser-languagedetector": "^8.0.4", 29 | "i18next-http-backend": "^3.0.2", 30 | "lodash": "^4.17.21", 31 | "react": "^19.1.0", 32 | "react-dnd": "^16.0.1", 33 | "react-dnd-html5-backend": "^16.0.1", 34 | "react-dom": "^19.1.0", 35 | "react-i18next": "^15.4.1", 36 | "react-router": "^7.5.0", 37 | "react-to-print": "^3.0.6", 38 | "uuid": "^11.1.0", 39 | "vite-plugin-svgr": "^4.3.0", 40 | "zustand": "^5.0.3" 41 | }, 42 | "devDependencies": { 43 | "@refinedev/cli": "^2.16.46", 44 | "@simbathesailor/use-what-changed": "^2.0.0", 45 | "@types/node": "^22.14.1", 46 | "@types/react": "^19.1.1", 47 | "@types/react-dom": "^19.1.2", 48 | "@typescript-eslint/eslint-plugin": "^8.29.1", 49 | "@typescript-eslint/parser": "^8.29.1", 50 | "@vitejs/plugin-react": "^4.3.4", 51 | "eslint": "^9.24.0", 52 | "eslint-plugin-react-hooks": "^5.2.0", 53 | "eslint-plugin-react-refresh": "^0.4.19", 54 | "typescript": "^5.8.3", 55 | "vite": "^6.2.6", 56 | "vite-plugin-mkcert": "^1.17.8" 57 | }, 58 | "scripts": { 59 | "dev": "refine dev", 60 | "build": "tsc && refine build", 61 | "build.zip": "cd ./dist && zip -r spoolman-client.zip ./ ./ && cd ..", 62 | "preview": "refine start", 63 | "refine": "refine", 64 | "check-i18n": "node scripts/check-i18n.js" 65 | }, 66 | "browserslist": { 67 | "production": [ 68 | ">0.2%", 69 | "not dead", 70 | "not op_mini all" 71 | ], 72 | "development": [ 73 | "last 1 chrome version", 74 | "last 1 firefox version", 75 | "last 1 safari version" 76 | ] 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /client/public/apple-touch-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Donkie/Spoolman/488b37c5a061c611c127f562a161de161aa68a3f/client/public/apple-touch-icon.png -------------------------------------------------------------------------------- /client/public/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Donkie/Spoolman/488b37c5a061c611c127f562a161de161aa68a3f/client/public/favicon.ico -------------------------------------------------------------------------------- /client/public/icon512_maskable.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Donkie/Spoolman/488b37c5a061c611c127f562a161de161aa68a3f/client/public/icon512_maskable.png -------------------------------------------------------------------------------- /client/public/icon512_rounded.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Donkie/Spoolman/488b37c5a061c611c127f562a161de161aa68a3f/client/public/icon512_rounded.png -------------------------------------------------------------------------------- /client/public/kofi_s_logo_nolabel.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Donkie/Spoolman/488b37c5a061c611c127f562a161de161aa68a3f/client/public/kofi_s_logo_nolabel.png -------------------------------------------------------------------------------- /client/public/locales/et/common.json: -------------------------------------------------------------------------------- 1 | { 2 | "actions": { 3 | "list": "Kuva", 4 | "create": "Loo", 5 | "edit": "Muuda", 6 | "show": "Näita", 7 | "clone": "Klooni" 8 | }, 9 | "buttons": { 10 | "create": "Loo", 11 | "save": "Salvesta", 12 | "confirm": "Oled kindel?", 13 | "clear": "Tühjenda", 14 | "refresh": "Uuenda", 15 | "show": "Näita", 16 | "hideColumns": "Peida tulbad", 17 | "clearFilters": "Tühjenda filtrid", 18 | "edit": "Muuda", 19 | "undo": "Võta tagasi", 20 | "clone": "Klooni", 21 | "archive": "Arhiveeri", 22 | "notAccessTitle": "Sul pole õigusi sisenemiseks", 23 | "saveAndAdd": "Salvesta ja Lisa", 24 | "logout": "Logi välja", 25 | "delete": "Kustuta", 26 | "cancel": "Tühista", 27 | "import": "Impordi", 28 | "filter": "Filtreeri", 29 | "unArchive": "Võta arhiivist tagasi", 30 | "hideArchived": "Peida arhiveeritud", 31 | "showArchived": "Näita arhiveeritud", 32 | "continue": "Jätka" 33 | }, 34 | "notifications": { 35 | "error": "Viga (kood: {{statusCode}})", 36 | "undoable": "Sul on {{seconds}} sekundit, et muudatus tagasi võtta.", 37 | "success": "Õnnestunud", 38 | "createSuccess": "Loodi {{resource}}", 39 | "createError": "Tekkis viga {{resource}} loomisel. (staatuse kood: {{statusCode}})", 40 | "deleteSuccess": "Edukalt kustutatud {{resource}}", 41 | "deleteError": "Viga {{resource}} kustutamisel. (staatuse kood: {{statusCode}})", 42 | "editSuccess": "Edukalt muudetud {{resource}}", 43 | "editError": "Viga {{resource}} muutmisel (staatuse kood: {{statusCode}})", 44 | "importProgress": "Importimine: {{processed}}/{{total}}", 45 | "saveSuccessful": "Salvesta edukalt!", 46 | "validationError": "Viga validerimisel: {{error}}" 47 | }, 48 | "warnWhenUnsavedChanges": "Oled sa kindel, et soovid lahkuda? Sul on salvestamata muudatusi.", 49 | "kofi": "Anna mulle tippi Ko-fi's", 50 | "loading": "Laadimine", 51 | "version": "Versioon", 52 | "unknown": "Tundmatu", 53 | "yes": "Jah", 54 | "no": "Ei", 55 | "tags": { 56 | "clone": "Klooni" 57 | }, 58 | "dashboard": { 59 | "title": "Juhtpaneel" 60 | }, 61 | "printing": { 62 | "generic": { 63 | "title": "Trükkib" 64 | } 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /client/public/locales/lt/common.json: -------------------------------------------------------------------------------- 1 | {} 2 | -------------------------------------------------------------------------------- /client/public/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Spoolman", 3 | "short_name": "Spoolman", 4 | "description": "Keep track of your inventory of 3D-printer filament spools.", 5 | "icons": [ 6 | { 7 | "purpose": "maskable", 8 | "sizes": "512x512", 9 | "src": "icon512_maskable.png", 10 | "type": "image/png" 11 | }, 12 | { 13 | "purpose": "any", 14 | "sizes": "512x512", 15 | "src": "icon512_rounded.png", 16 | "type": "image/png" 17 | } 18 | ], 19 | "background_color": "#1F1F1F", 20 | "theme_color": "#DC7734", 21 | "display": "standalone" 22 | } 23 | -------------------------------------------------------------------------------- /client/scripts/check-i18n.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | import { readdirSync, readFileSync, statSync } from "fs"; 4 | import { dirname, join } from "path"; 5 | import { fileURLToPath } from "url"; 6 | 7 | const __dirname = dirname(fileURLToPath(import.meta.url)); 8 | 9 | const LOCALES_DIR = join(__dirname, "../public/locales"); 10 | const I18N_FILE = join(__dirname, "../src/i18n.ts"); 11 | 12 | const minLocaleFileSize = 1024 * 10; // Minimum 10kB for a locale file to be considered 13 | function getLocaleFolders() { 14 | return readdirSync(LOCALES_DIR).filter((folder) => { 15 | const folderPath = join(LOCALES_DIR, folder); 16 | const commonFilePath = join(folderPath, "common.json"); 17 | return ( 18 | statSync(folderPath).isDirectory() && 19 | statSync(commonFilePath).isFile() && 20 | statSync(commonFilePath).size >= minLocaleFileSize 21 | ); 22 | }); 23 | } 24 | 25 | function getDeclaredLanguages() { 26 | const i18nContent = readFileSync(I18N_FILE, "utf8"); 27 | const languageMatches = [...i18nContent.matchAll(/\["(.*?)"\]:/g)]; 28 | return languageMatches.map((match) => match[1]); 29 | } 30 | 31 | function main() { 32 | const foundLocales = new Set(getLocaleFolders()); 33 | const declaredLocales = new Set(getDeclaredLanguages()); 34 | 35 | const missingLocales = [...foundLocales].filter((locale) => !declaredLocales.has(locale)); 36 | 37 | if (missingLocales.length > 0) { 38 | console.error("❌ The following locales are missing from src/i18n.ts:"); 39 | missingLocales.forEach((locale) => console.error(` - ${locale}`)); 40 | console.error("⚠️ Please add them to the `languages` object in i18n.ts."); 41 | console.log("Template:"); 42 | for (const locale of missingLocales) { 43 | console.log(`["${locale}"]: { 44 | name: "", 45 | fullCode: "", 46 | djs: () => import("dayjs/locale/${locale.toLowerCase()}"), 47 | },`); 48 | } 49 | process.exit(1); 50 | } 51 | 52 | console.log("✅ All locales are properly declared in i18n.ts."); 53 | process.exit(0); 54 | } 55 | 56 | main(); 57 | -------------------------------------------------------------------------------- /client/src/components/dateTimePicker.tsx: -------------------------------------------------------------------------------- 1 | import { DatePicker } from "antd"; 2 | import dayjs from "dayjs"; 3 | import advancedFormat from "dayjs/plugin/advancedFormat"; 4 | import timezone from "dayjs/plugin/timezone"; 5 | import utc from "dayjs/plugin/utc"; 6 | 7 | dayjs.extend(utc); 8 | dayjs.extend(timezone); 9 | dayjs.extend(advancedFormat); 10 | 11 | // Localized date time format with timezone 12 | const dateTimeFormat = "YYYY-MM-DD HH:mm:ss"; 13 | 14 | export function DateTimePicker(props: { value?: T; onChange?: (value?: T) => void }) { 15 | return ( 16 | { 21 | if (value) { 22 | if (typeof props.value === "string") { 23 | props.onChange?.(value.toISOString() as T); 24 | } else { 25 | props.onChange?.(value as T); 26 | } 27 | } else { 28 | props.onChange?.(undefined); 29 | } 30 | }} 31 | /> 32 | ); 33 | } 34 | -------------------------------------------------------------------------------- /client/src/components/favicon.tsx: -------------------------------------------------------------------------------- 1 | import { useEffect } from "react"; 2 | 3 | /** 4 | * Renders a favicon element in the head of the HTML document with the specified URL. 5 | * 6 | * @param {string} props.url - The URL of the favicon image. 7 | * @return {JSX.Element} - An empty JSX element. 8 | */ 9 | export function Favicon(props: { url: string }) { 10 | useEffect(() => { 11 | let link = document.querySelector("link[rel~='icon']") as HTMLLinkElement; 12 | if (!link) { 13 | link = document.createElement("link") as HTMLLinkElement; 14 | link.rel = "icon"; 15 | document.getElementsByTagName("head")[0].appendChild(link); 16 | } 17 | link.href = props.url; 18 | }, [props.url]); 19 | return <>; 20 | } 21 | -------------------------------------------------------------------------------- /client/src/components/filamentImportModal.tsx: -------------------------------------------------------------------------------- 1 | import { useTranslate } from "@refinedev/core"; 2 | import { Form, Modal, Select } from "antd"; 3 | import { Trans } from "react-i18next"; 4 | import { formatFilamentLabel } from "../pages/spools/functions"; 5 | import { searchMatches } from "../utils/filtering"; 6 | import { ExternalFilament, useGetExternalDBFilaments } from "../utils/queryExternalDB"; 7 | 8 | export function FilamentImportModal(props: { 9 | isOpen: boolean; 10 | onImport: (filament: ExternalFilament) => void; 11 | onClose: () => void; 12 | }) { 13 | const [form] = Form.useForm(); 14 | const t = useTranslate(); 15 | 16 | const externalFilaments = useGetExternalDBFilaments(); 17 | const filamentOptions = 18 | externalFilaments.data?.map((item) => { 19 | return { 20 | label: formatFilamentLabel( 21 | item.name, 22 | item.diameter, 23 | item.manufacturer, 24 | item.material, 25 | item.weight, 26 | item.spool_type 27 | ), 28 | value: item.id, 29 | item: item, 30 | }; 31 | }) ?? []; 32 | filamentOptions.sort((a, b) => a.label.localeCompare(b.label, undefined, { sensitivity: "base" })); 33 | 34 | return ( 35 | form.submit()} 39 | onCancel={() => props.onClose()} 40 | > 41 |
{ 45 | const filament = filamentOptions.find((item) => item.value === values.filament)?.item; 46 | if (!filament) { 47 | throw new Error("Filament not found"); 48 | } 49 | props.onImport(filament); 50 | props.onClose(); 51 | form.resetFields(); 52 | }} 53 | > 54 |

55 | , 59 | }} 60 | /> 61 |

62 | 63 | 78 | 79 | 80 | 93 | 94 | 95 | 96 | 102 | 103 | 104 | 105 | 106 | 109 | 110 |
111 | {contextHolder} 112 | ); 113 | } 114 | -------------------------------------------------------------------------------- /client/src/pages/settings/index.tsx: -------------------------------------------------------------------------------- 1 | import { FileOutlined, HighlightOutlined, SolutionOutlined, ToolOutlined, UserOutlined } from "@ant-design/icons"; 2 | import { IResourceComponentsProps, useTranslate } from "@refinedev/core"; 3 | import { Menu, theme } from "antd"; 4 | import { Content } from "antd/es/layout/layout"; 5 | import dayjs from "dayjs"; 6 | import utc from "dayjs/plugin/utc"; 7 | import React from "react"; 8 | import { Route, Routes, useNavigate } from "react-router"; 9 | import { ExtraFieldsSettings } from "./extraFieldsSettings"; 10 | import { GeneralSettings } from "./generalSettings"; 11 | 12 | dayjs.extend(utc); 13 | 14 | const { useToken } = theme; 15 | 16 | export const Settings: React.FC = () => { 17 | const { token } = useToken(); 18 | const t = useTranslate(); 19 | const navigate = useNavigate(); 20 | 21 | const getCurrentKey = () => { 22 | const path = window.location.pathname.replace("/settings", ""); 23 | // Remove starting slash and ending slash if exists and return 24 | return path.replace(/^\/|\/$/g, ""); 25 | }; 26 | 27 | return ( 28 | <> 29 |

34 | {t("settings.header")} 35 |

36 | 49 | { 53 | if (e.key === "") { 54 | return navigate("/settings"); 55 | } else { 56 | return navigate(`/settings/${e.key}`); 57 | } 58 | }} 59 | items={[ 60 | { key: "", label: t("settings.general.tab"), icon: }, 61 | { 62 | key: "extra", 63 | label: t("settings.extra_fields.tab"), 64 | icon: , 65 | children: [ 66 | { 67 | label: t("spool.spool"), 68 | key: "extra/spool", 69 | icon: , 70 | }, 71 | { 72 | label: t("filament.filament"), 73 | key: "extra/filament", 74 | icon: , 75 | }, 76 | { 77 | label: t("vendor.vendor"), 78 | key: "extra/vendor", 79 | icon: , 80 | }, 81 | ], 82 | }, 83 | ]} 84 | style={{ 85 | marginBottom: "1em", 86 | }} 87 | /> 88 |
89 | 90 | } /> 91 | } /> 92 | 93 |
94 | 95 | 96 | ); 97 | }; 98 | 99 | export default Settings; 100 | -------------------------------------------------------------------------------- /client/src/pages/spools/index.ts: -------------------------------------------------------------------------------- 1 | export { SpoolCreate } from "./create"; 2 | export { SpoolEdit } from "./edit"; 3 | export { SpoolList } from "./list"; 4 | export { SpoolShow } from "./show"; 5 | -------------------------------------------------------------------------------- /client/src/pages/spools/model.tsx: -------------------------------------------------------------------------------- 1 | import { IFilament } from "../filaments/model"; 2 | 3 | export enum WeightToEnter { 4 | used_weight = 1, 5 | remaining_weight = 2, 6 | measured_weight = 3, 7 | } 8 | 9 | export interface ISpool { 10 | id: number; 11 | registered: string; 12 | first_used?: string; 13 | last_used?: string; 14 | filament: IFilament; 15 | price?: number; 16 | initial_weight?: number; 17 | spool_weight?: number; 18 | remaining_weight?: number; 19 | used_weight: number; 20 | remaining_length?: number; 21 | used_length: number; 22 | location?: string; 23 | lot_nr?: string; 24 | comment?: string; 25 | archived: boolean; 26 | extra: { [key: string]: string }; 27 | } 28 | 29 | // ISpoolParsedExtras is the same as ISpool, but with the extra field parsed into its real types 30 | export type ISpoolParsedExtras = Omit & { extra?: { [key: string]: unknown } }; 31 | -------------------------------------------------------------------------------- /client/src/pages/vendors/functions.ts: -------------------------------------------------------------------------------- 1 | import { getAPIURL } from "../../utils/url"; 2 | import { IVendor } from "./model"; 3 | 4 | /** 5 | * Get a vendor given its external ID. 6 | */ 7 | export async function getVendorByExternalID(external_id: string): Promise { 8 | // Make a search using GET and query params 9 | const response = await fetch(`${getAPIURL()}/vendor?${new URLSearchParams({ external_id })}`); 10 | if (!response.ok) { 11 | return null; 12 | } 13 | 14 | const data: IVendor[] = await response.json(); 15 | if (data.length === 0) { 16 | return null; 17 | } 18 | 19 | return data[0]; 20 | } 21 | 22 | /** 23 | * Create a new internal filament given an external filament object. 24 | * Returns the created internal filament. 25 | */ 26 | export async function getOrCreateVendorFromExternal(vendor_external_id: string): Promise { 27 | const existingVendor = await getVendorByExternalID(vendor_external_id); 28 | if (existingVendor) { 29 | return existingVendor; 30 | } 31 | 32 | const body: Omit = { 33 | name: vendor_external_id, 34 | external_id: vendor_external_id, 35 | }; 36 | 37 | const response = await fetch(getAPIURL() + "/vendor", { 38 | method: "POST", 39 | headers: { 40 | "Content-Type": "application/json", 41 | }, 42 | body: JSON.stringify(body), 43 | }); 44 | if (!response.ok) { 45 | throw new Error("Network response was not ok"); 46 | } 47 | return response.json(); 48 | } 49 | -------------------------------------------------------------------------------- /client/src/pages/vendors/index.ts: -------------------------------------------------------------------------------- 1 | export { VendorCreate } from "./create"; 2 | export { VendorEdit } from "./edit"; 3 | export { VendorList } from "./list"; 4 | export { VendorShow } from "./show"; 5 | -------------------------------------------------------------------------------- /client/src/pages/vendors/model.tsx: -------------------------------------------------------------------------------- 1 | export interface IVendor { 2 | id: number; 3 | registered: string; 4 | name: string; 5 | comment?: string; 6 | empty_spool_weight?: number; 7 | external_id?: string; 8 | extra: { [key: string]: string }; 9 | } 10 | 11 | // IVendorParsedExtras is the same as IVendor, but with the extra field parsed into its real types 12 | export type IVendorParsedExtras = Omit & { extra?: { [key: string]: unknown } }; 13 | -------------------------------------------------------------------------------- /client/src/pages/vendors/show.tsx: -------------------------------------------------------------------------------- 1 | import { DateField, NumberField, Show, TextField } from "@refinedev/antd"; 2 | import { IResourceComponentsProps, useShow, useTranslate } from "@refinedev/core"; 3 | import { Typography } from "antd"; 4 | import dayjs from "dayjs"; 5 | import utc from "dayjs/plugin/utc"; 6 | import React from "react"; 7 | import { ExtraFieldDisplay } from "../../components/extraFields"; 8 | import { enrichText } from "../../utils/parsing"; 9 | import { EntityType, useGetFields } from "../../utils/queryFields"; 10 | import { IVendor } from "./model"; 11 | 12 | dayjs.extend(utc); 13 | 14 | const { Title } = Typography; 15 | 16 | export const VendorShow: React.FC = () => { 17 | const t = useTranslate(); 18 | const extraFields = useGetFields(EntityType.vendor); 19 | 20 | const { queryResult } = useShow({ 21 | liveMode: "auto", 22 | }); 23 | const { data, isLoading } = queryResult; 24 | 25 | const record = data?.data; 26 | 27 | const formatTitle = (item: IVendor) => { 28 | return t("vendor.titles.show_title", { id: item.id, name: item.name, interpolation: { escapeValue: false } }); 29 | }; 30 | 31 | return ( 32 | 33 | {t("vendor.fields.id")} 34 | 35 | {t("vendor.fields.registered")} 36 | 41 | {t("vendor.fields.name")} 42 | 43 | {t("vendor.fields.comment")} 44 | 45 | {t("vendor.fields.empty_spool_weight")} 46 | 47 | {t("vendor.fields.external_id")} 48 | 49 | {t("settings.extra_fields.tab")} 50 | {extraFields?.data?.map((field, index) => ( 51 | 52 | ))} 53 | 54 | ); 55 | }; 56 | 57 | export default VendorShow; 58 | -------------------------------------------------------------------------------- /client/src/utils/filtering.ts: -------------------------------------------------------------------------------- 1 | import { CrudFilter, CrudOperators } from "@refinedev/core"; 2 | 3 | interface TypedCrudFilter { 4 | field: keyof Obj; 5 | operator: Exclude; 6 | value: string[]; 7 | } 8 | 9 | export function typeFilters(filters: CrudFilter[]): TypedCrudFilter[] { 10 | return filters as TypedCrudFilter[]; // <-- Unsafe cast 11 | } 12 | 13 | /** 14 | * Returns an array of filter values for a given field based on the provided filters. 15 | * @param filters An array of `CrudFilter` objects that define the filtering criteria. 16 | * @param field The field to get the filter values for. 17 | * @returns An array of filter values for the given field. 18 | */ 19 | export function getFiltersForField( 20 | filters: TypedCrudFilter[], 21 | field: Field 22 | ): string[] { 23 | const filterValues: string[] = []; 24 | filters.forEach((filter) => { 25 | if (filter.field === field) { 26 | filterValues.push(...(filter.value as string[])); 27 | } 28 | }); 29 | return filterValues; 30 | } 31 | 32 | /** 33 | * Function that returns an array with all undefined values removed. 34 | */ 35 | export function removeUndefined(array: (T | undefined)[]): T[] { 36 | return array.filter((value) => value !== undefined) as T[]; 37 | } 38 | 39 | /** 40 | * Performs a case-insensitive search for the given query in the given string. 41 | * The query is broken down into words and the search is performed on each word. 42 | */ 43 | export function searchMatches(query: string, test: string): boolean { 44 | const words = query.toLowerCase().split(" "); 45 | return words.every((word) => test.toLowerCase().includes(word)); 46 | } 47 | -------------------------------------------------------------------------------- /client/src/utils/overrides.css: -------------------------------------------------------------------------------- 1 | #qty-input { 2 | text-align: center !important; 3 | } 4 | -------------------------------------------------------------------------------- /client/src/utils/queryExternalDB.ts: -------------------------------------------------------------------------------- 1 | import { useQuery } from "@tanstack/react-query"; 2 | import { getAPIURL } from "./url"; 3 | 4 | export enum SpoolType { 5 | PLASTIC = "plastic", 6 | CARDBOARD = "cardboard", 7 | METAL = "metal", 8 | } 9 | 10 | export enum Finish { 11 | MATTE = "matte", 12 | GLOSSY = "glossy", 13 | } 14 | 15 | export enum MultiColorDirection { 16 | COAXIAL = "coaxial", 17 | LONGITUDINAL = "longitudinal", 18 | } 19 | 20 | export enum Pattern { 21 | MARBLE = "marble", 22 | SPARKLE = "sparkle", 23 | } 24 | 25 | export interface ExternalFilament { 26 | id: string; 27 | manufacturer: string; 28 | name: string; 29 | material: string; 30 | density: number; 31 | weight: number; 32 | spool_weight?: number; 33 | spool_type?: SpoolType; 34 | diameter: number; 35 | color_hex?: string; 36 | color_hexes?: string[]; 37 | extruder_temp?: number; 38 | bed_temp?: number; 39 | finish?: Finish; 40 | multi_color_direction?: MultiColorDirection; 41 | pattern?: Pattern; 42 | translucent: boolean; 43 | glow: boolean; 44 | } 45 | 46 | export interface ExternalMaterial { 47 | material: string; 48 | density: number; 49 | extruder_temp: number | null; 50 | bed_temp: number | null; 51 | } 52 | 53 | export function useGetExternalDBFilaments() { 54 | return useQuery({ 55 | queryKey: ["external", "filaments"], 56 | staleTime: 60, 57 | queryFn: async () => { 58 | const response = await fetch(`${getAPIURL()}/external/filament`); 59 | return response.json(); 60 | }, 61 | }); 62 | } 63 | 64 | export function useGetExternalDBMaterials() { 65 | return useQuery({ 66 | queryKey: ["external", "materials"], 67 | staleTime: 60, 68 | queryFn: async () => { 69 | const response = await fetch(`${getAPIURL()}/external/material`); 70 | return response.json(); 71 | }, 72 | }); 73 | } 74 | -------------------------------------------------------------------------------- /client/src/utils/queryFields.ts: -------------------------------------------------------------------------------- 1 | import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"; 2 | import dayjs from "dayjs"; 3 | import { getAPIURL } from "./url"; 4 | 5 | export enum FieldType { 6 | text = "text", 7 | integer = "integer", 8 | integer_range = "integer_range", 9 | float = "float", 10 | float_range = "float_range", 11 | datetime = "datetime", 12 | boolean = "boolean", 13 | choice = "choice", 14 | } 15 | 16 | export enum EntityType { 17 | vendor = "vendor", 18 | filament = "filament", 19 | spool = "spool", 20 | } 21 | 22 | export interface FieldParameters { 23 | name: string; 24 | order: number; 25 | unit?: string; 26 | field_type: FieldType; 27 | default_value?: string | (number | null)[] | boolean | dayjs.Dayjs; 28 | choices?: string[]; 29 | multi_choice?: boolean; 30 | } 31 | 32 | export interface Field extends FieldParameters { 33 | key: string; 34 | entity_type: EntityType; 35 | } 36 | 37 | export function useGetFields(entity_type: EntityType) { 38 | return useQuery({ 39 | queryKey: ["fields", entity_type], 40 | queryFn: async () => { 41 | const response = await fetch(`${getAPIURL()}/field/${entity_type}`); 42 | return response.json(); 43 | }, 44 | }); 45 | } 46 | 47 | export function useSetField(entity_type: EntityType) { 48 | const queryClient = useQueryClient(); 49 | 50 | return useMutation({ 51 | mutationFn: async ({ key, params }) => { 52 | const response = await fetch(`${getAPIURL()}/field/${entity_type}/${key}`, { 53 | method: "POST", 54 | headers: { 55 | "Content-Type": "application/json", 56 | }, 57 | body: JSON.stringify(params), 58 | }); 59 | 60 | // Throw error if response is not ok 61 | if (!response.ok) { 62 | throw new Error((await response.json()).message); 63 | } 64 | 65 | return response.json(); 66 | }, 67 | onMutate: async ({ key, params }) => { 68 | // Cancel any outgoing refetches (so they don't overwrite our optimistic update) 69 | await queryClient.cancelQueries(["fields", entity_type]); 70 | 71 | // Snapshot the previous value 72 | const previousFields = queryClient.getQueryData(["fields", entity_type]); 73 | 74 | // Optimistically update to the new value 75 | queryClient.setQueryData(["fields", entity_type], (old) => { 76 | if (!old) { 77 | return [ 78 | { 79 | key: key, 80 | entity_type: entity_type, 81 | ...params, 82 | }, 83 | ]; 84 | } 85 | return old.map((field) => { 86 | if (field.key === key) { 87 | return { ...field, ...params }; 88 | } 89 | return field; 90 | }); 91 | }); 92 | 93 | // Return a context object with the snapshotted value 94 | return { previousFields }; 95 | }, 96 | onError: (_err, _newFields, context) => { 97 | // Rollback to the previous value 98 | if (context?.previousFields) { 99 | queryClient.setQueryData(["fields", entity_type], context.previousFields); 100 | } 101 | }, 102 | onSettled: () => { 103 | // Invalidate and refetch 104 | queryClient.invalidateQueries(["fields", entity_type]); 105 | }, 106 | }); 107 | } 108 | 109 | export function useDeleteField(entity_type: EntityType) { 110 | const queryClient = useQueryClient(); 111 | 112 | return useMutation({ 113 | mutationFn: async (key) => { 114 | const response = await fetch(`${getAPIURL()}/field/${entity_type}/${key}`, { 115 | method: "DELETE", 116 | }); 117 | 118 | // Throw error if response is not ok 119 | if (!response.ok) { 120 | throw new Error((await response.json()).message); 121 | } 122 | 123 | return response.json(); 124 | }, 125 | onSuccess: () => { 126 | // Invalidate and refetch 127 | queryClient.invalidateQueries(["fields", entity_type]); 128 | }, 129 | }); 130 | } 131 | -------------------------------------------------------------------------------- /client/src/utils/querySettings.ts: -------------------------------------------------------------------------------- 1 | import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"; 2 | import { getAPIURL } from "./url"; 3 | 4 | interface SettingResponseValue { 5 | value: string; 6 | is_set: boolean; 7 | type: string; 8 | } 9 | 10 | interface SettingsResponse { 11 | [key: string]: SettingResponseValue; 12 | } 13 | 14 | export function useGetSettings() { 15 | return useQuery({ 16 | queryKey: ["settings"], 17 | queryFn: async () => { 18 | const response = await fetch(`${getAPIURL()}/setting/`); 19 | return response.json(); 20 | }, 21 | }); 22 | } 23 | 24 | export function useGetSetting(key: string) { 25 | return useQuery({ 26 | queryKey: ["settings", key], 27 | queryFn: async () => { 28 | const response = await fetch(`${getAPIURL()}/setting/${key}`); 29 | return response.json(); 30 | }, 31 | }); 32 | } 33 | 34 | export function useSetSetting(key: string) { 35 | const queryClient = useQueryClient(); 36 | 37 | return useMutation({ 38 | mutationFn: async (value) => { 39 | const response = await fetch(`${getAPIURL()}/setting/${key}`, { 40 | method: "POST", 41 | headers: { 42 | "Content-Type": "application/json", 43 | }, 44 | body: JSON.stringify(JSON.stringify(value)), 45 | }); 46 | 47 | // Throw error if response is not ok 48 | if (!response.ok) { 49 | throw new Error((await response.json()).message); 50 | } 51 | 52 | return response.json(); 53 | }, 54 | onMutate: async (value) => { 55 | await queryClient.cancelQueries(["settings", key]); 56 | const previousValue = queryClient.getQueryData(["settings", key]); 57 | queryClient.setQueryData(["settings", key], (old) => 58 | old ? { ...old, value: JSON.stringify(value) } : undefined 59 | ); 60 | return previousValue; 61 | }, 62 | onError: (_error, _value, context) => { 63 | queryClient.setQueryData(["settings", key], context); 64 | }, 65 | onSuccess: (_data, _value) => { 66 | // Invalidate and refetch 67 | queryClient.invalidateQueries(["settings", key]); 68 | }, 69 | }); 70 | } 71 | -------------------------------------------------------------------------------- /client/src/utils/settings.ts: -------------------------------------------------------------------------------- 1 | import { useGetSetting } from "./querySettings"; 2 | 3 | export function useCurrency() { 4 | const { data: currency } = useGetSetting("currency"); 5 | return JSON.parse(currency?.value ?? '"EUR"'); 6 | } 7 | 8 | export function getCurrencySymbol(locale: string | undefined, currency: string) { 9 | return (0) 10 | .toLocaleString(locale, { 11 | style: "currency", 12 | currency, 13 | currencyDisplay: "narrowSymbol", 14 | minimumFractionDigits: 0, 15 | maximumFractionDigits: 0, 16 | }) 17 | .replace(/\d/g, "") 18 | .trim(); 19 | } 20 | 21 | export function useCurrencyFormatter() { 22 | const currency = useCurrency(); 23 | const roundPrices = JSON.parse(useGetSetting("round_prices").data?.value ?? "false"); 24 | 25 | return new Intl.NumberFormat(undefined, { 26 | style: "currency", 27 | currency: currency, 28 | currencyDisplay: "narrowSymbol", 29 | notation: roundPrices ? "compact" : "standard", 30 | }); 31 | } 32 | -------------------------------------------------------------------------------- /client/src/utils/sorting.ts: -------------------------------------------------------------------------------- 1 | import { CrudSort } from "@refinedev/core"; 2 | import { SortOrder } from "antd/es/table/interface"; 3 | 4 | interface TypedCrudSort { 5 | field: keyof Obj; 6 | order: "asc" | "desc"; 7 | } 8 | 9 | /** 10 | * Returns the sort order for a given field based on the provided sorters. 11 | * @param sorters An array of `CrudSort` objects that define the sorting criteria. 12 | * @param field The field to get the sort order for. 13 | * @returns The sort order for the given field, or undefined if the field is not being sorted. 14 | */ 15 | export function getSortOrderForField( 16 | sorters: TypedCrudSort[], 17 | field: Field 18 | ): SortOrder | undefined { 19 | const sorter = sorters.find((s) => s.field === field); 20 | if (sorter) { 21 | return sorter.order === "asc" ? "ascend" : "descend"; 22 | } 23 | return undefined; 24 | } 25 | 26 | export function typeSorters(sorters: CrudSort[]): TypedCrudSort[] { 27 | return sorters as TypedCrudSort[]; // <-- Unsafe cast 28 | } 29 | -------------------------------------------------------------------------------- /client/src/utils/support.ts: -------------------------------------------------------------------------------- 1 | function _isLocalStorageAvailable(): boolean { 2 | try { 3 | localStorage.setItem("test", "test"); 4 | localStorage.removeItem("test"); 5 | return true; 6 | } catch (e) { 7 | return false; 8 | } 9 | } 10 | 11 | export const isLocalStorageAvailable = _isLocalStorageAvailable(); 12 | -------------------------------------------------------------------------------- /client/src/utils/url.ts: -------------------------------------------------------------------------------- 1 | declare global { 2 | interface Window { 3 | SPOOLMAN_BASE_PATH: string; 4 | } 5 | } 6 | 7 | /** 8 | * Returns the base path of the application. 9 | * 10 | * If a base path is set, this returns e.g. "/spoolman". If none is set, it returns "". 11 | * 12 | * @return {string} The base path of the application. If the `SPOOLMAN_BASE_PATH` 13 | * window variable is set and not empty, it is returned. Otherwise, the 14 | * default base path "" is returned. 15 | */ 16 | export function getBasePath(): string { 17 | if (window.SPOOLMAN_BASE_PATH && window.SPOOLMAN_BASE_PATH.length > 0) { 18 | return window.SPOOLMAN_BASE_PATH; 19 | } else { 20 | return ""; 21 | } 22 | } 23 | 24 | /** 25 | * A function that returns the Spoolman API URL 26 | * This returns e.g. "/spoolman/api/v1" if the base path is "/spoolman" 27 | * 28 | * @return {string} The API URL 29 | */ 30 | export function getAPIURL(): string { 31 | if (!import.meta.env.VITE_APIURL) { 32 | throw new Error("VITE_APIURL is not set"); 33 | } 34 | return getBasePath() + import.meta.env.VITE_APIURL; 35 | } 36 | -------------------------------------------------------------------------------- /client/src/vite-env.d.ts: -------------------------------------------------------------------------------- 1 | /// 2 | /// 3 | -------------------------------------------------------------------------------- /client/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ESNext", 4 | "useDefineForClassFields": true, 5 | "lib": ["DOM", "DOM.Iterable", "ESNext"], 6 | "allowJs": false, 7 | "skipLibCheck": true, 8 | "esModuleInterop": false, 9 | "allowSyntheticDefaultImports": true, 10 | "strict": true, 11 | "forceConsistentCasingInFileNames": true, 12 | "module": "ESNext", 13 | "moduleResolution": "Node", 14 | "resolveJsonModule": true, 15 | "isolatedModules": true, 16 | "noEmit": true, 17 | "jsx": "react-jsx" 18 | }, 19 | "include": ["src"], 20 | "references": [ 21 | { 22 | "path": "./tsconfig.node.json" 23 | } 24 | ] 25 | } 26 | -------------------------------------------------------------------------------- /client/tsconfig.node.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "composite": true, 4 | "module": "ESNext", 5 | "moduleResolution": "node" 6 | }, 7 | "include": ["vite.config.ts"] 8 | } 9 | -------------------------------------------------------------------------------- /client/vite.config.ts: -------------------------------------------------------------------------------- 1 | import react from "@vitejs/plugin-react"; 2 | import { defineConfig } from "vite"; 3 | import svgr from "vite-plugin-svgr"; 4 | 5 | export default defineConfig({ 6 | base: "", 7 | plugins: [react(), svgr()], 8 | }); 9 | -------------------------------------------------------------------------------- /entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | PUID=${PUID:-1000} 4 | PGID=${PGID:-1000} 5 | SPOOLMAN_PORT=${SPOOLMAN_PORT:-8000} 6 | SPOOLMAN_HOST=${SPOOLMAN_HOST:-0.0.0.0} 7 | 8 | groupmod -o -g "$PGID" app 9 | usermod -o -u "$PUID" app 10 | 11 | echo User UID: $(id -u app) 12 | echo User GID: $(id -g app) 13 | 14 | echo "Starting uvicorn..." 15 | 16 | # Execute the uvicorn command with any additional arguments 17 | exec su-exec "app" uvicorn spoolman.main:app --host $SPOOLMAN_HOST --port $SPOOLMAN_PORT "$@" 18 | -------------------------------------------------------------------------------- /migrations/README.md: -------------------------------------------------------------------------------- 1 | # Migrations 2 | 3 | Migrations are used to create and update the database schema. They are run automatically every time Spoolman starts. 4 | 5 | To create a new migration, edit the tables as desired in `spoolman/database/models.py`, then start the Spoolman server to update your local sqlite database. 6 | 7 | ```bash 8 | pdm run python -m spoolman.main 9 | ``` 10 | 11 | Stop the server once it's up. 12 | 13 | Then, let Alembic automatically create a new migration file: 14 | ```bash 15 | pdm run alembic revision -m "some title" --autogenerate 16 | ``` 17 | 18 | Go into the created migration and make sure it looks good, that the column changes etc are as desired. Format it with Black and Ruff. Commit. 19 | -------------------------------------------------------------------------------- /migrations/__init__.py: -------------------------------------------------------------------------------- 1 | """Database migrations system.""" 2 | -------------------------------------------------------------------------------- /migrations/env.py: -------------------------------------------------------------------------------- 1 | """Alembic environment file.""" 2 | 3 | import asyncio 4 | from logging.config import fileConfig 5 | 6 | from alembic import context 7 | from sqlalchemy.engine import Connection 8 | 9 | from spoolman.database.database import Database, get_connection_url 10 | from spoolman.database.models import Base 11 | 12 | config = context.config 13 | 14 | if config.config_file_name is not None: 15 | fileConfig(config.config_file_name) 16 | 17 | target_metadata = Base.metadata 18 | 19 | 20 | def run_migrations_offline() -> None: 21 | """Run migrations in 'offline' mode. 22 | 23 | This configures the context with just a URL 24 | and not an Engine, though an Engine is acceptable 25 | here as well. By skipping the Engine creation 26 | we don't even need a DBAPI to be available. 27 | 28 | Calls to context.execute() here emit the given string to the 29 | script output. 30 | 31 | """ 32 | context.configure( 33 | url=get_connection_url(), 34 | target_metadata=target_metadata, 35 | literal_binds=True, 36 | dialect_opts={"paramstyle": "named"}, 37 | render_as_batch=True, 38 | ) 39 | 40 | with context.begin_transaction(): 41 | context.run_migrations() 42 | 43 | 44 | def do_run_migrations(connection: Connection) -> None: 45 | """Run migrations in 'online' mode.""" 46 | context.configure(connection=connection, target_metadata=target_metadata) 47 | 48 | with context.begin_transaction(): 49 | context.run_migrations() 50 | 51 | 52 | async def run_async_migrations() -> None: 53 | """In this scenario we need to create an Engine and associate a connection with the context.""" 54 | db = Database(get_connection_url()) 55 | db.connect() 56 | 57 | if db.engine is None: 58 | raise RuntimeError("Engine not created.") 59 | 60 | async with db.engine.connect() as connection: 61 | await connection.run_sync(do_run_migrations) 62 | 63 | await db.engine.dispose() 64 | 65 | 66 | if context.is_offline_mode(): 67 | run_migrations_offline() 68 | else: 69 | asyncio.run(run_async_migrations()) 70 | -------------------------------------------------------------------------------- /migrations/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message}. 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | """ 7 | 8 | import sqlalchemy as sa 9 | from alembic import op 10 | ${imports if imports else ""} 11 | # revision identifiers, used by Alembic. 12 | revision = ${repr(up_revision)} 13 | down_revision = ${repr(down_revision)} 14 | branch_labels = ${repr(branch_labels)} 15 | depends_on = ${repr(depends_on)} 16 | 17 | 18 | def upgrade() -> None: 19 | """Perform the upgrade.""" 20 | ${upgrades if upgrades else "pass"} 21 | 22 | 23 | def downgrade() -> None: 24 | """Perform the downgrade.""" 25 | ${downgrades if downgrades else "pass"} 26 | -------------------------------------------------------------------------------- /migrations/versions/2023_05_27_2146-684d32cf7e4d_initial.py: -------------------------------------------------------------------------------- 1 | """initial. 2 | 3 | Revision ID: 684d32cf7e4d 4 | Create Date: 2023-05-27 21:46:24.361353 5 | """ 6 | 7 | import sqlalchemy as sa 8 | from alembic import op 9 | from sqlalchemy.engine.reflection import Inspector 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = "684d32cf7e4d" 13 | down_revision = None 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade() -> None: 19 | """Perform the upgrade.""" 20 | conn = op.get_bind() 21 | inspector = Inspector.from_engine(conn) # type: ignore[arg-type] 22 | tables = inspector.get_table_names() 23 | if "vendor" in tables: 24 | # If the vendor table exists, we assume that the initial migration has already been performed. 25 | return 26 | 27 | op.create_table( 28 | "vendor", 29 | sa.Column("id", sa.Integer(), nullable=False), 30 | sa.Column("registered", sa.DateTime(), nullable=False, server_default=sa.func.now()), 31 | sa.Column("name", sa.String(length=64), nullable=False), 32 | sa.Column("comment", sa.String(length=1024), nullable=True), 33 | sa.PrimaryKeyConstraint("id"), 34 | ) 35 | op.create_index(op.f("ix_vendor_id"), "vendor", ["id"], unique=False) 36 | op.create_table( 37 | "filament", 38 | sa.Column("id", sa.Integer(), nullable=False), 39 | sa.Column("registered", sa.DateTime(), nullable=False, server_default=sa.func.now()), 40 | sa.Column("name", sa.String(length=64), nullable=True), 41 | sa.Column("vendor_id", sa.Integer(), nullable=True), 42 | sa.Column("material", sa.String(length=64), nullable=True), 43 | sa.Column("price", sa.Float(), nullable=True), 44 | sa.Column("density", sa.Float(), nullable=False), 45 | sa.Column("diameter", sa.Float(), nullable=False), 46 | sa.Column("weight", sa.Float(), nullable=True, comment="The filament weight of a full spool (net weight)."), 47 | sa.Column("spool_weight", sa.Float(), nullable=True, comment="The weight of an empty spool."), 48 | sa.Column("article_number", sa.String(length=64), nullable=True), 49 | sa.Column("comment", sa.String(length=1024), nullable=True), 50 | sa.ForeignKeyConstraint( 51 | ["vendor_id"], 52 | ["vendor.id"], 53 | ), 54 | sa.PrimaryKeyConstraint("id"), 55 | ) 56 | op.create_index(op.f("ix_filament_id"), "filament", ["id"], unique=False) 57 | op.create_table( 58 | "spool", 59 | sa.Column("id", sa.Integer(), nullable=False), 60 | sa.Column("registered", sa.DateTime(), nullable=False, server_default=sa.func.now()), 61 | sa.Column("first_used", sa.DateTime(), nullable=True), 62 | sa.Column("last_used", sa.DateTime(), nullable=True), 63 | sa.Column("filament_id", sa.Integer(), nullable=False), 64 | sa.Column("used_weight", sa.Float(), nullable=False), 65 | sa.Column("location", sa.String(length=64), nullable=True), 66 | sa.Column("lot_nr", sa.String(length=64), nullable=True), 67 | sa.Column("comment", sa.String(length=1024), nullable=True), 68 | sa.ForeignKeyConstraint( 69 | ["filament_id"], 70 | ["filament.id"], 71 | ), 72 | sa.PrimaryKeyConstraint("id"), 73 | ) 74 | op.create_index(op.f("ix_spool_id"), "spool", ["id"], unique=False) 75 | 76 | 77 | def downgrade() -> None: 78 | """Perform the downgrade.""" 79 | op.drop_index(op.f("ix_spool_id"), table_name="spool") 80 | op.drop_table("spool") 81 | op.drop_index(op.f("ix_filament_id"), table_name="filament") 82 | op.drop_table("filament") 83 | op.drop_index(op.f("ix_vendor_id"), table_name="vendor") 84 | op.drop_table("vendor") 85 | -------------------------------------------------------------------------------- /migrations/versions/2023_05_28_2136-b47376d60c6d_add_extruder_and_bed_temperature_.py: -------------------------------------------------------------------------------- 1 | """add extruder and bed temperature override. 2 | 3 | Revision ID: b47376d60c6d 4 | Revises: 684d32cf7e4d 5 | Create Date: 2023-05-28 21:36:53.452067 6 | """ 7 | 8 | import sqlalchemy as sa 9 | from alembic import op 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = "b47376d60c6d" 13 | down_revision = "684d32cf7e4d" 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade() -> None: 19 | """Perform the upgrade.""" 20 | op.add_column( 21 | "filament", 22 | sa.Column("settings_extruder_temp", sa.Integer(), nullable=True, comment="Overridden extruder temperature."), 23 | ) 24 | op.add_column( 25 | "filament", 26 | sa.Column("settings_bed_temp", sa.Integer(), nullable=True, comment="Overridden bed temperature."), 27 | ) 28 | 29 | 30 | def downgrade() -> None: 31 | """Perform the downgrade.""" 32 | op.drop_column("filament", "settings_bed_temp") 33 | op.drop_column("filament", "settings_extruder_temp") 34 | -------------------------------------------------------------------------------- /migrations/versions/2023_06_01_1953-db385b808a20_add_filament_color_code.py: -------------------------------------------------------------------------------- 1 | """add filament color code. 2 | 3 | Revision ID: db385b808a20 4 | Revises: b47376d60c6d 5 | Create Date: 2023-06-01 19:53:44.440616 6 | """ 7 | 8 | import sqlalchemy as sa 9 | from alembic import op 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = "db385b808a20" 13 | down_revision = "b47376d60c6d" 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade() -> None: 19 | """Perform the upgrade.""" 20 | op.add_column("filament", sa.Column("color_hex", sa.String(length=6), nullable=True)) 21 | 22 | 23 | def downgrade() -> None: 24 | """Perform the downgrade.""" 25 | op.drop_column("filament", "color_hex") 26 | -------------------------------------------------------------------------------- /migrations/versions/2023_07_14_1217-92186a5f7b0f_add_spool_archived_field.py: -------------------------------------------------------------------------------- 1 | """add spool archived field. 2 | 3 | Revision ID: 92186a5f7b0f 4 | Revises: db385b808a20 5 | Create Date: 2023-07-14 12:17:13.162618 6 | """ 7 | 8 | import sqlalchemy as sa 9 | from alembic import op 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = "92186a5f7b0f" 13 | down_revision = "db385b808a20" 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade() -> None: 19 | """Perform the upgrade.""" 20 | op.add_column("spool", sa.Column("archived", sa.Boolean(), nullable=True)) 21 | 22 | 23 | def downgrade() -> None: 24 | """Perform the downgrade.""" 25 | op.drop_column("spool", "archived") 26 | -------------------------------------------------------------------------------- /migrations/versions/2023_08_12_2121-92793c8a937c_color_hex_alpha.py: -------------------------------------------------------------------------------- 1 | """color_hex alpha. 2 | 3 | Revision ID: 92793c8a937c 4 | Revises: 92186a5f7b0f 5 | Create Date: 2023-08-12 21:21:08.536216 6 | """ 7 | 8 | import sqlalchemy as sa 9 | from alembic import op 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = "92793c8a937c" 13 | down_revision = "92186a5f7b0f" 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade() -> None: 19 | """Perform the upgrade.""" 20 | with op.batch_alter_table("filament") as batch_op: 21 | batch_op.alter_column("color_hex", type_=sa.String(length=8), existing_nullable=True) 22 | 23 | 24 | def downgrade() -> None: 25 | """Perform the downgrade.""" 26 | with op.batch_alter_table("filament") as batch_op: 27 | batch_op.alter_column("color_hex", type_=sa.String(length=6), existing_nullable=True) 28 | -------------------------------------------------------------------------------- /migrations/versions/2023_12_30_0839-b82cd9e2aa6f_added_spool_prices.py: -------------------------------------------------------------------------------- 1 | """Added spool prices. 2 | 3 | Revision ID: b82cd9e2aa6f 4 | Revises: 92793c8a937c 5 | Create Date: 2023-12-30 08:39:48.430846 6 | """ 7 | 8 | import sqlalchemy as sa 9 | from alembic import op 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = "b82cd9e2aa6f" 13 | down_revision = "92793c8a937c" 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade() -> None: 19 | """Perform the upgrade.""" 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.add_column("spool", sa.Column("price", sa.Float(), nullable=True)) 22 | # ### end Alembic commands ### 23 | 24 | 25 | def downgrade() -> None: 26 | """Perform the downgrade.""" 27 | # ### commands auto generated by Alembic - please adjust! ### 28 | op.drop_column("spool", "price") 29 | # ### end Alembic commands ### 30 | -------------------------------------------------------------------------------- /migrations/versions/2024_01_03_1346-ccbb17aeda7c_added_settings_table.py: -------------------------------------------------------------------------------- 1 | """Added Settings table. 2 | 3 | Revision ID: ccbb17aeda7c 4 | Revises: b82cd9e2aa6f 5 | Create Date: 2024-01-03 13:46:41.362341 6 | """ 7 | 8 | import sqlalchemy as sa 9 | from alembic import op 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = "ccbb17aeda7c" 13 | down_revision = "b82cd9e2aa6f" 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade() -> None: 19 | """Perform the upgrade.""" 20 | op.create_table( 21 | "setting", 22 | sa.Column("key", sa.String(length=64), nullable=False), 23 | sa.Column("value", sa.Text(), nullable=False), 24 | sa.Column("last_updated", sa.DateTime(), nullable=False), 25 | sa.PrimaryKeyConstraint("key"), 26 | ) 27 | op.create_index(op.f("ix_setting_key"), "setting", ["key"], unique=False) 28 | 29 | 30 | def downgrade() -> None: 31 | """Perform the downgrade.""" 32 | op.drop_index(op.f("ix_setting_key"), table_name="setting") 33 | op.drop_table("setting") 34 | -------------------------------------------------------------------------------- /migrations/versions/2024_01_04_2209-b8881bdb716c_added_extra_fields.py: -------------------------------------------------------------------------------- 1 | """Added extra fields. 2 | 3 | Revision ID: b8881bdb716c 4 | Revises: ccbb17aeda7c 5 | Create Date: 2024-01-04 22:09:34.417527 6 | """ 7 | 8 | import sqlalchemy as sa 9 | from alembic import op 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = "b8881bdb716c" 13 | down_revision = "ccbb17aeda7c" 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade() -> None: 19 | """Perform the upgrade.""" 20 | op.create_table( 21 | "vendor_field", 22 | sa.Column("vendor_id", sa.Integer(), nullable=False), 23 | sa.Column("key", sa.String(length=64), nullable=False), 24 | sa.Column("value", sa.Text(), nullable=False), 25 | sa.ForeignKeyConstraint( 26 | ["vendor_id"], 27 | ["vendor.id"], 28 | ), 29 | sa.PrimaryKeyConstraint("vendor_id", "key"), 30 | ) 31 | op.create_index(op.f("ix_vendor_field_key"), "vendor_field", ["key"], unique=False) 32 | op.create_index(op.f("ix_vendor_field_vendor_id"), "vendor_field", ["vendor_id"], unique=False) 33 | 34 | op.create_table( 35 | "filament_field", 36 | sa.Column("filament_id", sa.Integer(), nullable=False), 37 | sa.Column("key", sa.String(length=64), nullable=False), 38 | sa.Column("value", sa.Text(), nullable=False), 39 | sa.ForeignKeyConstraint( 40 | ["filament_id"], 41 | ["filament.id"], 42 | ), 43 | sa.PrimaryKeyConstraint("filament_id", "key"), 44 | ) 45 | op.create_index(op.f("ix_filament_field_filament_id"), "filament_field", ["filament_id"], unique=False) 46 | op.create_index(op.f("ix_filament_field_key"), "filament_field", ["key"], unique=False) 47 | 48 | op.create_table( 49 | "spool_field", 50 | sa.Column("spool_id", sa.Integer(), nullable=False), 51 | sa.Column("key", sa.String(length=64), nullable=False), 52 | sa.Column("value", sa.Text(), nullable=False), 53 | sa.ForeignKeyConstraint( 54 | ["spool_id"], 55 | ["spool.id"], 56 | ), 57 | sa.PrimaryKeyConstraint("spool_id", "key"), 58 | ) 59 | op.create_index(op.f("ix_spool_field_key"), "spool_field", ["key"], unique=False) 60 | op.create_index(op.f("ix_spool_field_spool_id"), "spool_field", ["spool_id"], unique=False) 61 | 62 | 63 | def downgrade() -> None: 64 | """Perform the downgrade.""" 65 | op.drop_index(op.f("ix_spool_field_spool_id"), table_name="spool_field") 66 | op.drop_index(op.f("ix_spool_field_key"), table_name="spool_field") 67 | op.drop_table("spool_field") 68 | 69 | op.drop_index(op.f("ix_filament_field_key"), table_name="filament_field") 70 | op.drop_index(op.f("ix_filament_field_filament_id"), table_name="filament_field") 71 | op.drop_table("filament_field") 72 | 73 | op.drop_index(op.f("ix_vendor_field_vendor_id"), table_name="vendor_field") 74 | op.drop_index(op.f("ix_vendor_field_key"), table_name="vendor_field") 75 | op.drop_table("vendor_field") 76 | -------------------------------------------------------------------------------- /migrations/versions/2024_03_26_0948-aafcd7fb0e84_spool_weights.py: -------------------------------------------------------------------------------- 1 | """spool weights. 2 | 3 | Revision ID: aafcd7fb0e84 4 | Revises: b8881bdb716c 5 | Create Date: 2024-03-26 09:48:09.930022 6 | """ 7 | 8 | import sqlalchemy as sa 9 | from alembic import op 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = "aafcd7fb0e84" 13 | down_revision = "b8881bdb716c" 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade() -> None: 19 | """Perform the upgrade.""" 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.add_column( 22 | "spool", 23 | sa.Column( 24 | "initial_weight", 25 | sa.Float(), 26 | nullable=True, 27 | comment="The initial weight of the filament on the spool (net weight).", 28 | ), 29 | ) 30 | op.add_column( 31 | "spool", 32 | sa.Column("spool_weight", sa.Float(), nullable=True, comment="The weight of the empty spool (tare weight)."), 33 | ) 34 | # ### end Alembic commands ### 35 | 36 | 37 | def downgrade() -> None: 38 | """Perform the downgrade.""" 39 | # ### commands auto generated by Alembic - please adjust! ### 40 | op.drop_column("spool", "spool_weight") 41 | op.drop_column("spool", "initial_weight") 42 | # ### end Alembic commands ### 43 | -------------------------------------------------------------------------------- /migrations/versions/2024_03_26_1349-304a32906234_spool_weight_population.py: -------------------------------------------------------------------------------- 1 | """spool weight population. 2 | 3 | Revision ID: 304a32906234 4 | Revises: aafcd7fb0e84 5 | Create Date: 2024-03-26 13:49:26.594399 6 | """ 7 | 8 | import sqlalchemy as sa 9 | from alembic import op 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = "304a32906234" 13 | down_revision = "aafcd7fb0e84" 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade() -> None: 19 | """Pre-populate the spool weights.""" 20 | """This must be done in a separate migration because""" 21 | """of cockroachdb's execution of alembic migrations""" 22 | filament = sa.Table( 23 | "filament", 24 | sa.MetaData(), 25 | sa.Column("id", sa.Integer, primary_key=True), 26 | sa.Column("weight", sa.Float(), nullable=True), 27 | sa.Column("spool_weight", sa.Float(), nullable=True), 28 | ) 29 | 30 | spool = sa.Table( 31 | "spool", 32 | sa.MetaData(), 33 | sa.Column("id", sa.Integer, primary_key=True), 34 | sa.Column("filament_id", sa.Integer), 35 | sa.Column( 36 | "initial_weight", 37 | sa.Float(), 38 | nullable=False, 39 | ), 40 | sa.Column( 41 | "spool_weight", 42 | sa.Float(), 43 | nullable=False, 44 | ), 45 | ) 46 | 47 | initial_weight = ( 48 | sa.select((filament.c.weight).label("initial_weight")) 49 | .where(filament.c.id == spool.c.filament_id) 50 | .scalar_subquery() 51 | ) 52 | spool_weight = sa.select(filament.c.spool_weight).where(filament.c.id == spool.c.filament_id).scalar_subquery() 53 | 54 | set_initial_weight = sa.update(spool).values(initial_weight=initial_weight) 55 | op.execute(set_initial_weight) 56 | 57 | set_spool_weight = sa.update(spool).values(spool_weight=spool_weight) 58 | op.execute(set_spool_weight) 59 | 60 | 61 | def downgrade() -> None: 62 | """Perform the downgrade.""" 63 | -------------------------------------------------------------------------------- /migrations/versions/2024_03_26_1507-5f069e51bd89_vendor_empty_spool_weight.py: -------------------------------------------------------------------------------- 1 | """vendor empty spool weight. 2 | 3 | Revision ID: 5f069e51bd89 4 | Revises: 304a32906234 5 | Create Date: 2024-03-26 15:07:18.366290 6 | """ 7 | 8 | import sqlalchemy as sa 9 | from alembic import op 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = "5f069e51bd89" 13 | down_revision = "304a32906234" 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade() -> None: 19 | """Perform the upgrade.""" 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.add_column( 22 | "vendor", 23 | sa.Column("empty_spool_weight", sa.Float(), nullable=True, comment="The weight of an empty spool."), 24 | ) 25 | # ### end Alembic commands ### 26 | 27 | 28 | def downgrade() -> None: 29 | """Perform the downgrade.""" 30 | # ### commands auto generated by Alembic - please adjust! ### 31 | op.drop_column("vendor", "empty_spool_weight") 32 | # ### end Alembic commands ### 33 | -------------------------------------------------------------------------------- /migrations/versions/2024_05_12_1930-395d560284b3_added_external_id.py: -------------------------------------------------------------------------------- 1 | """Added external ID. 2 | 3 | Revision ID: 395d560284b3 4 | Revises: 5f069e51bd89 5 | Create Date: 2024-05-12 19:30:17.261396 6 | """ 7 | 8 | import sqlalchemy as sa 9 | from alembic import op 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = "395d560284b3" 13 | down_revision = "5f069e51bd89" 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade() -> None: 19 | """Perform the upgrade.""" 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.add_column("filament", sa.Column("external_id", sa.String(length=256), nullable=True)) 22 | op.add_column("vendor", sa.Column("external_id", sa.String(length=256), nullable=True)) 23 | # ### end Alembic commands ### 24 | 25 | 26 | def downgrade() -> None: 27 | """Perform the downgrade.""" 28 | # ### commands auto generated by Alembic - please adjust! ### 29 | op.drop_column("vendor", "external_id") 30 | op.drop_column("filament", "external_id") 31 | # ### end Alembic commands ### 32 | -------------------------------------------------------------------------------- /migrations/versions/2024_05_28_1846-415a8f855e14_multi_colors.py: -------------------------------------------------------------------------------- 1 | """multi_colors. 2 | 3 | Revision ID: 415a8f855e14 4 | Revises: 395d560284b3 5 | Create Date: 2024-05-28 18:46:12.935449 6 | """ 7 | 8 | import sqlalchemy as sa 9 | from alembic import op 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = "415a8f855e14" 13 | down_revision = "395d560284b3" 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade() -> None: 19 | """Perform the upgrade.""" 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.add_column("filament", sa.Column("multi_color_hexes", sa.String(length=128), nullable=True)) 22 | op.add_column("filament", sa.Column("multi_color_direction", sa.String(length=16), nullable=True)) 23 | # ### end Alembic commands ### 24 | 25 | 26 | def downgrade() -> None: 27 | """Perform the downgrade.""" 28 | # ### commands auto generated by Alembic - please adjust! ### 29 | op.drop_column("filament", "multi_color_direction") 30 | op.drop_column("filament", "multi_color_hexes") 31 | # ### end Alembic commands ### 32 | -------------------------------------------------------------------------------- /migrations/versions/__init__.py: -------------------------------------------------------------------------------- 1 | """Database migration versions.""" 2 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "spoolman" 3 | version = "0.22.1" 4 | description = "A web service that keeps track of 3D printing spools." 5 | authors = [ 6 | { name = "Donkie", email = "daniel.cf.hultgren@gmail.com" }, 7 | ] 8 | dependencies = [ 9 | "uvicorn~=0.34", 10 | "httptools>=0.6.4; platform_machine != \"armv7l\"", 11 | "uvloop!=0.15.0,!=0.15.1,>=0.21.0; platform_machine != \"armv7l\" and sys_platform != \"win32\" and (sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\")", 12 | "fastapi~=0.115", 13 | "SQLAlchemy[aiomysql,aiosqlite,asyncio,postgresql_asyncpg]~=2.0", 14 | "pydantic~=2.10", 15 | "platformdirs~=4.3", 16 | "alembic~=1.15", 17 | "scheduler~=0.8", 18 | "sqlalchemy-cockroachdb~=2.0", 19 | "asyncpg~=0.30", 20 | "psycopg2-binary~=2.9", 21 | "setuptools~=76.0", 22 | "WebSockets~=15.0", 23 | "prometheus-client~=0.21", 24 | "httpx~=0.28", 25 | "hishel~=0.1", 26 | ] 27 | requires-python = ">=3.9,<=3.12" 28 | 29 | [project.license] 30 | text = "MIT" 31 | 32 | [tool.pdm.dev-dependencies] 33 | dev = [ 34 | "ruff==0.11.0", 35 | "black~=25.1.0", 36 | "pre-commit~=4.1.0", 37 | "pytest~=8.3.5", 38 | "pytest-asyncio~=0.25.3", 39 | "httpx~=0.28.1", 40 | ] 41 | 42 | [tool.pdm.scripts.docs] 43 | call = "spoolman.docs:generate_docs" 44 | 45 | [tool.pdm.scripts.bump] 46 | call = "spoolman.bump:bump" 47 | 48 | [tool.pdm.scripts.app] 49 | cmd = "uvicorn spoolman.main:app" 50 | 51 | [tool.pdm.scripts.itest] 52 | cmd = "python tests_integration/run.py" 53 | 54 | [tool.ruff] 55 | line-length = 120 56 | target-version = "py39" 57 | 58 | [tool.ruff.lint] 59 | select = [ 60 | "ALL", 61 | ] 62 | ignore = [ 63 | "A003", 64 | "D101", 65 | "D104", 66 | "D203", 67 | "D213", 68 | "D406", 69 | "D407", 70 | "FA100", 71 | "FIX002", 72 | "S104", 73 | "TRY201", 74 | "TRY003", 75 | "EM101", 76 | "EM102", 77 | "DTZ003", 78 | "PLR0913", 79 | "SIM108", 80 | "TD002", 81 | "TD003", 82 | ] 83 | 84 | [tool.ruff.lint.per-file-ignores] 85 | "tests*/*" = [ 86 | "ANN201", 87 | "ASYNC210", 88 | "S101", 89 | "PLR2004", 90 | "D103", 91 | "TID252", 92 | ] 93 | "migrations/versions/*" = [ 94 | "N999", 95 | ] 96 | 97 | [tool.black] 98 | line-length = 120 99 | target-version = [ 100 | "py39", 101 | ] 102 | 103 | [build-system] 104 | requires = [ 105 | "pdm-backend", 106 | ] 107 | build-backend = "pdm.backend" 108 | -------------------------------------------------------------------------------- /scripts/start.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # ANSI color codes 4 | GREEN='\033[0;32m' 5 | ORANGE='\033[0;33m' 6 | NC='\033[0m' # No Color 7 | 8 | # CD to project root if we're in the scripts dir 9 | current_dir=$(pwd) 10 | if [ "$(basename "$current_dir")" = "scripts" ]; then 11 | cd .. 12 | fi 13 | 14 | # 15 | # Verify that the installation has been done, by checking for the existance of the venv folder 16 | # 17 | if [ ! -d ".venv" ]; then 18 | echo -e "${ORANGE}.venv folder not found. Please run the install script first.${NC}" 19 | exit 1 20 | fi 21 | 22 | # 23 | # Add python bin dir to PATH if needed 24 | # 25 | user_python_bin_dir=$(python3 -m site --user-base)/bin 26 | if [[ ! "$PATH" =~ "$user_python_bin_dir" ]]; then 27 | echo -e "${ORANGE}WARNING: $user_python_bin_dir is not in PATH, this will make it difficult to run PDM commands. Temporarily adding $user_python_bin_dir to PATH...${NC}" 28 | echo -e "${ORANGE}To make this permanent, add the following line to your .bashrc or .zshrc file:${NC}" 29 | echo -e "${ORANGE}export PATH=$user_python_bin_dir:\$PATH${NC}" 30 | export PATH=$user_python_bin_dir:$PATH 31 | fi 32 | 33 | # 34 | # Activate .venv 35 | # 36 | echo -e "${GREEN}Activating .venv...${NC}" 37 | source .venv/bin/activate 38 | 39 | # 40 | # Load envvars from .env file 41 | # 42 | set -o allexport 43 | source .env 44 | set +o allexport 45 | 46 | # 47 | # Start Spoolman using pdm run 48 | # 49 | echo -e "${GREEN}Starting Spoolman...${NC}" 50 | uvicorn spoolman.main:app --host $SPOOLMAN_HOST --port $SPOOLMAN_PORT 51 | -------------------------------------------------------------------------------- /spoolman/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Donkie/Spoolman/488b37c5a061c611c127f562a161de161aa68a3f/spoolman/__init__.py -------------------------------------------------------------------------------- /spoolman/api/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Donkie/Spoolman/488b37c5a061c611c127f562a161de161aa68a3f/spoolman/api/__init__.py -------------------------------------------------------------------------------- /spoolman/api/v1/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Donkie/Spoolman/488b37c5a061c611c127f562a161de161aa68a3f/spoolman/api/v1/__init__.py -------------------------------------------------------------------------------- /spoolman/api/v1/export.py: -------------------------------------------------------------------------------- 1 | """Functions for exporting data.""" 2 | 3 | import io 4 | from collections.abc import Iterable 5 | from enum import Enum 6 | from typing import Annotated 7 | 8 | from fastapi import APIRouter, Depends, Response 9 | from sqlalchemy.ext.asyncio import AsyncSession 10 | 11 | from spoolman.database import filament, spool, vendor 12 | from spoolman.database.database import get_db_session 13 | from spoolman.database.models import Base 14 | from spoolman.export import dump_as_csv, dump_as_json 15 | 16 | # ruff: noqa: D103,B008 17 | router = APIRouter( 18 | prefix="/export", 19 | tags=["export"], 20 | ) 21 | 22 | 23 | class ExportFormat(Enum): 24 | CSV = "csv" 25 | JSON = "json" 26 | 27 | 28 | @router.get( 29 | "/spools", 30 | name="Export spools", 31 | description="Export the list of spools in various formats. Filament and vendor data is included.", 32 | ) 33 | async def export_spools( 34 | *, 35 | db: Annotated[AsyncSession, Depends(get_db_session)], 36 | fmt: ExportFormat, 37 | ) -> Response: 38 | 39 | all_spools, _ = await spool.find(db=db) 40 | return await _export(all_spools, fmt) 41 | 42 | 43 | @router.get( 44 | "/filaments", 45 | name="Export filaments", 46 | description="Export the list of filaments in various formats. Vendor data is included.", 47 | ) 48 | async def export_filaments( 49 | *, 50 | db: Annotated[AsyncSession, Depends(get_db_session)], 51 | fmt: ExportFormat, 52 | ) -> Response: 53 | all_filaments, _ = await filament.find(db=db) 54 | return await _export(all_filaments, fmt) 55 | 56 | 57 | @router.get( 58 | "/vendors", 59 | name="Export vendors", 60 | description="Export the list of vendors in various formats.", 61 | ) 62 | async def export_vendors( 63 | *, 64 | db: Annotated[AsyncSession, Depends(get_db_session)], 65 | fmt: ExportFormat, 66 | ) -> Response: 67 | all_vendors, _ = await vendor.find(db=db) 68 | return await _export(all_vendors, fmt) 69 | 70 | 71 | async def _export(objects: Iterable[Base], fmt: ExportFormat) -> Response: 72 | """Export the objects in various formats.""" 73 | buffer = io.StringIO() 74 | media_type = "" 75 | 76 | if fmt == ExportFormat.CSV: 77 | media_type = "text/csv" 78 | await dump_as_csv(objects, buffer) 79 | elif fmt == ExportFormat.JSON: 80 | media_type = "application/json" 81 | await dump_as_json(objects, buffer) 82 | else: 83 | raise ValueError(f"Unknown export format: {fmt}") 84 | 85 | return Response(content=buffer.getvalue(), media_type=media_type) 86 | -------------------------------------------------------------------------------- /spoolman/api/v1/externaldb.py: -------------------------------------------------------------------------------- 1 | """External database API.""" 2 | 3 | import logging 4 | 5 | from fastapi import APIRouter 6 | from fastapi.responses import FileResponse 7 | 8 | from spoolman.externaldb import ExternalFilament, ExternalMaterial, get_filaments_file, get_materials_file 9 | 10 | router = APIRouter( 11 | prefix="/external", 12 | tags=["external"], 13 | ) 14 | 15 | # ruff: noqa: D103,B008 16 | 17 | logger = logging.getLogger(__name__) 18 | 19 | 20 | @router.get( 21 | "/filament", 22 | name="Get all external filaments", 23 | response_model_exclude_none=True, 24 | response_model=list[ExternalFilament], 25 | ) 26 | async def filaments() -> FileResponse: 27 | """Get all external filaments.""" 28 | return FileResponse(path=get_filaments_file(), media_type="application/json") 29 | 30 | 31 | @router.get( 32 | "/material", 33 | name="Get all external materials", 34 | response_model_exclude_none=True, 35 | response_model=list[ExternalMaterial], 36 | ) 37 | async def materials() -> FileResponse: 38 | """Get all external materials.""" 39 | return FileResponse(path=get_materials_file(), media_type="application/json") 40 | -------------------------------------------------------------------------------- /spoolman/api/v1/field.py: -------------------------------------------------------------------------------- 1 | """Vendor related endpoints.""" 2 | 3 | import logging 4 | from typing import Annotated, Union 5 | 6 | from fastapi import APIRouter, Depends, Path 7 | from fastapi.responses import JSONResponse 8 | from sqlalchemy.ext.asyncio import AsyncSession 9 | 10 | from spoolman.api.v1.models import Message 11 | from spoolman.database.database import get_db_session 12 | from spoolman.exceptions import ItemNotFoundError 13 | from spoolman.extra_fields import ( 14 | EntityType, 15 | ExtraField, 16 | ExtraFieldParameters, 17 | add_or_update_extra_field, 18 | delete_extra_field, 19 | get_extra_fields, 20 | ) 21 | 22 | router = APIRouter( 23 | prefix="/field", 24 | tags=["field"], 25 | ) 26 | 27 | # ruff: noqa: D103,B008 28 | 29 | logger = logging.getLogger(__name__) 30 | 31 | 32 | @router.get( 33 | "/{entity_type}", 34 | name="Get extra fields", 35 | description="Get all extra fields for a specific entity type.", 36 | response_model_exclude_none=True, 37 | ) 38 | async def get( 39 | db: Annotated[AsyncSession, Depends(get_db_session)], 40 | entity_type: Annotated[EntityType, Path(description="Entity type this field is for")], 41 | ) -> list[ExtraField]: 42 | return await get_extra_fields(db, entity_type) 43 | 44 | 45 | @router.post( 46 | "/{entity_type}/{key}", 47 | name="Add or update extra field", 48 | description=( 49 | "Add or update an extra field for a specific entity type. " 50 | "Returns the full list of extra fields for the entity type." 51 | ), 52 | response_model_exclude_none=True, 53 | response_model=list[ExtraField], 54 | responses={400: {"model": Message}}, 55 | ) 56 | async def update( 57 | db: Annotated[AsyncSession, Depends(get_db_session)], 58 | entity_type: Annotated[EntityType, Path(description="Entity type this field is for")], 59 | key: Annotated[str, Path(min_length=1, max_length=64, regex="^[a-z0-9_]+$")], 60 | body: ExtraFieldParameters, 61 | ) -> Union[list[ExtraField], JSONResponse]: 62 | dict_body = body.model_dump() 63 | dict_body["key"] = key 64 | dict_body["entity_type"] = entity_type 65 | body_with_key = ExtraField.model_validate(dict_body) 66 | 67 | try: 68 | await add_or_update_extra_field(db, entity_type, body_with_key) 69 | except ValueError as e: 70 | return JSONResponse(status_code=400, content=Message(message=str(e)).dict()) 71 | 72 | return await get_extra_fields(db, entity_type) 73 | 74 | 75 | @router.delete( 76 | "/{entity_type}/{key}", 77 | name="Delete extra field", 78 | description=( 79 | "Delete an extra field for a specific entity type. " 80 | "Returns the full list of extra fields for the entity type." 81 | ), 82 | response_model_exclude_none=True, 83 | response_model=list[ExtraField], 84 | responses={404: {"model": Message}}, 85 | ) 86 | async def delete( 87 | db: Annotated[AsyncSession, Depends(get_db_session)], 88 | entity_type: Annotated[EntityType, Path(description="Entity type this field is for")], 89 | key: Annotated[str, Path(min_length=1, max_length=64, regex="^[a-z0-9_]+$")], 90 | ) -> Union[list[ExtraField], JSONResponse]: 91 | try: 92 | await delete_extra_field(db, entity_type, key) 93 | except ItemNotFoundError: 94 | return JSONResponse( 95 | status_code=404, 96 | content=Message( 97 | message=f"Extra field with key {key} does not exist for entity type {entity_type.name}", 98 | ).dict(), 99 | ) 100 | 101 | return await get_extra_fields(db, entity_type) 102 | -------------------------------------------------------------------------------- /spoolman/api/v1/router.py: -------------------------------------------------------------------------------- 1 | """Router setup for the v1 version of the API.""" 2 | 3 | # ruff: noqa: D103 4 | 5 | import asyncio 6 | import logging 7 | 8 | from fastapi import FastAPI, WebSocket, WebSocketDisconnect 9 | from fastapi.responses import JSONResponse 10 | from starlette.requests import Request 11 | from starlette.responses import Response 12 | 13 | from spoolman import env 14 | from spoolman.database.database import backup_global_db 15 | from spoolman.exceptions import ItemNotFoundError 16 | from spoolman.ws import websocket_manager 17 | 18 | from . import export, externaldb, field, filament, models, other, setting, spool, vendor 19 | 20 | logger = logging.getLogger(__name__) 21 | 22 | app = FastAPI( 23 | title="Spoolman REST API v1", 24 | version="1.0.0", 25 | description=""" 26 | REST API for Spoolman. 27 | 28 | The API is served on the path `/api/v1/`. 29 | 30 | Some endpoints also serve a websocket on the same path. The websocket is used to listen for changes to the data 31 | that the endpoint serves. The websocket messages are JSON objects. Additionally, there is a root-level websocket 32 | endpoint that listens for changes to any data in the database. 33 | """, 34 | ) 35 | 36 | 37 | @app.exception_handler(ItemNotFoundError) 38 | async def itemnotfounderror_exception_handler(_request: Request, exc: ItemNotFoundError) -> Response: 39 | logger.debug(exc, exc_info=True) 40 | return JSONResponse( 41 | status_code=404, 42 | content={"message": exc.args[0]}, 43 | ) 44 | 45 | 46 | # Add a general info endpoint 47 | @app.get("/info") 48 | async def info() -> models.Info: 49 | """Return general info about the API.""" 50 | return models.Info( 51 | version=env.get_version(), 52 | debug_mode=env.is_debug_mode(), 53 | automatic_backups=env.is_automatic_backup_enabled(), 54 | data_dir=str(env.get_data_dir().resolve()), 55 | logs_dir=str(env.get_logs_dir().resolve()), 56 | backups_dir=str(env.get_backups_dir().resolve()), 57 | db_type=str(env.get_database_type() or "sqlite"), 58 | git_commit=env.get_commit_hash(), 59 | build_date=env.get_build_date(), 60 | ) 61 | 62 | 63 | # Add health check endpoint 64 | @app.get("/health") 65 | async def health() -> models.HealthCheck: 66 | """Return a health check.""" 67 | return models.HealthCheck(status="healthy") 68 | 69 | 70 | # Add endpoint for triggering a db backup 71 | @app.post( 72 | "/backup", 73 | description="Trigger a database backup. Only applicable for SQLite databases.", 74 | response_model=models.BackupResponse, 75 | responses={500: {"model": models.Message}}, 76 | ) 77 | async def backup(): # noqa: ANN201 78 | """Trigger a database backup.""" 79 | path = await backup_global_db() 80 | if path is None: 81 | return JSONResponse( 82 | status_code=500, 83 | content={"message": "Backup failed. See server logs for more information."}, 84 | ) 85 | return models.BackupResponse(path=str(path)) 86 | 87 | 88 | @app.websocket( 89 | "/", 90 | name="Listen to any changes", 91 | ) 92 | async def notify( 93 | websocket: WebSocket, 94 | ) -> None: 95 | await websocket.accept() 96 | websocket_manager.connect((), websocket) 97 | try: 98 | while True: 99 | await asyncio.sleep(0.5) 100 | if await websocket.receive_text(): 101 | await websocket.send_json({"status": "healthy"}) 102 | except WebSocketDisconnect: 103 | websocket_manager.disconnect((), websocket) 104 | 105 | 106 | # Add routers 107 | app.include_router(filament.router) 108 | app.include_router(spool.router) 109 | app.include_router(vendor.router) 110 | app.include_router(setting.router) 111 | app.include_router(field.router) 112 | app.include_router(other.router) 113 | app.include_router(externaldb.router) 114 | app.include_router(export.router) 115 | -------------------------------------------------------------------------------- /spoolman/bump.py: -------------------------------------------------------------------------------- 1 | """A python script that bumps the version number of a project.""" 2 | 3 | # ruff: noqa: PLR2004, T201, S603, S607 4 | 5 | import json 6 | import os 7 | import re 8 | import subprocess 9 | import sys 10 | from pathlib import Path 11 | 12 | 13 | def bump() -> None: 14 | """Bump the version number of the project.""" 15 | project_root = Path(__file__).parent.parent 16 | 17 | if len(sys.argv) < 2: 18 | print("Please specify a bump type, e.g. major, minor, micro.") 19 | sys.exit(1) 20 | 21 | if subprocess.run(["git", "diff", "--quiet", "pyproject.toml"], cwd=project_root, check=False).returncode != 0: 22 | print("The pyproject.toml file is dirty, please commit your changes before bumping the version number.") 23 | sys.exit(1) 24 | 25 | if subprocess.run(["git", "diff", "--cached", "--quiet"], cwd=project_root, check=False).returncode != 0: 26 | print("There are staged changes, please commit them before bumping the version number.") 27 | sys.exit(1) 28 | 29 | if subprocess.run(["pip", "show", "pdm-bump"], cwd=project_root, capture_output=True, check=False).returncode != 0: 30 | print("Please install pdm-bump using pip.") 31 | sys.exit(1) 32 | 33 | # Bump the version number, read the pdm bump output to determine the new version number 34 | bump_type = sys.argv[1] 35 | bump_output = subprocess.run(["pdm", "bump", bump_type], cwd=project_root, capture_output=True, check=True) 36 | # Example output: "Performing increment of version: 0.7.0 -> 0.8.0\nSome more text" 37 | # Parse using regex 38 | new_version_match = re.search(r"-> ([A-Za-z0-9\.\-]+)", bump_output.stdout.decode()) 39 | if new_version_match is None: 40 | print("Failed to parse pdm bump output, did it fail?") 41 | sys.exit(1) 42 | new_version = new_version_match.group(1) 43 | 44 | # Update the version number in the node project 45 | with Path("client", "package.json").open("r") as f: 46 | node_package = json.load(f) 47 | node_package["version"] = new_version 48 | with Path("client", "package.json").open("w") as f: 49 | json.dump(node_package, f, indent=2) 50 | 51 | # Run npm install to update the lock file with new version 52 | # On windows, shell=True is required for npm to be found 53 | if os.name == "nt": 54 | subprocess.run(["npm", "install"], cwd=project_root.joinpath("client"), check=True, shell=True) # noqa: S602 55 | else: 56 | subprocess.run(["npm", "install"], cwd=project_root.joinpath("client"), check=True) 57 | 58 | # Stage the changed files 59 | subprocess.run( 60 | ["git", "add", "pyproject.toml", "client/package.json", "client/package-lock.json"], 61 | cwd=project_root, 62 | check=True, 63 | ) 64 | 65 | # Commit the changes 66 | subprocess.run(["git", "commit", "-m", f"Bump version to {new_version}"], cwd=project_root, check=True) 67 | 68 | # Tag the commit, prefix with "v" 69 | subprocess.run(["git", "tag", f"v{new_version}"], cwd=project_root, check=True) 70 | 71 | # Notify user that the process is complete 72 | print(f"Bumped version to {new_version}.") 73 | -------------------------------------------------------------------------------- /spoolman/client.py: -------------------------------------------------------------------------------- 1 | """Functions for providing the client interface.""" 2 | 3 | # ruff: noqa: PTH118 4 | 5 | import logging 6 | import os 7 | from collections.abc import MutableMapping 8 | from pathlib import Path 9 | from typing import Any, Union 10 | 11 | from fastapi.staticfiles import StaticFiles 12 | from starlette.datastructures import Headers 13 | from starlette.responses import FileResponse, Response 14 | from starlette.staticfiles import NotModifiedResponse 15 | 16 | logger = logging.getLogger(__name__) 17 | 18 | PathLike = Union[str, "os.PathLike[str]"] 19 | Scope = MutableMapping[str, Any] 20 | 21 | 22 | class SinglePageApplication(StaticFiles): 23 | """Serve a single page application.""" 24 | 25 | def __init__(self, directory: str, base_path: str) -> None: 26 | """Construct.""" 27 | super().__init__(directory=directory, packages=None, html=True, check_dir=True) 28 | self.base_path = base_path.removeprefix("/") 29 | 30 | self.load_and_tweak_index_file() 31 | 32 | def load_and_tweak_index_file(self) -> None: 33 | """Load index.html and tweak it by replacing all asset paths.""" 34 | # Open index.html located in self.directory/index.html 35 | if not self.directory: 36 | return 37 | 38 | with (Path(self.directory) / "index.html").open() as f: 39 | html = f.read() 40 | 41 | # Replace all paths that start with "./" with f"/{self.base_path}" 42 | base_path = "/" if len(self.base_path.strip()) == 0 else f"/{self.base_path}/" 43 | self.html = html.replace('"./', f'"{base_path}') 44 | 45 | def file_response( 46 | self, 47 | full_path: PathLike, 48 | stat_result: os.stat_result, 49 | scope: Scope, 50 | status_code: int = 200, 51 | ) -> Response: 52 | """Overriden default file_response. 53 | 54 | Works the same way, but if the client requests any index.html, we will return our tweaked index.html. 55 | The tweaked index.html has all asset paths updated with the base path. 56 | """ 57 | method = scope["method"] 58 | request_headers = Headers(scope=scope) 59 | 60 | # If full_path points to a index.html, return our tweaked index.html 61 | if Path(full_path).name == "index.html": 62 | return Response(self.html, status_code=status_code, media_type="text/html") 63 | 64 | response = FileResponse(full_path, status_code=status_code, stat_result=stat_result, method=method) 65 | if self.is_not_modified(response.headers, request_headers): 66 | return NotModifiedResponse(response.headers) 67 | return response 68 | 69 | def lookup_path(self, path: str) -> tuple[str, Union[os.stat_result, None]]: 70 | """Return index.html if the requested file cannot be found.""" 71 | path = path.removeprefix(self.base_path).removeprefix("/") 72 | 73 | full_path, stat_result = super().lookup_path(path) 74 | 75 | if stat_result is None: 76 | ext = Path(path).suffix 77 | # Check if user is looking for some specific non-document file 78 | if len(ext) > 1 and ext != ".html": 79 | # If so, return 404 80 | return ("", None) 81 | # Otherwise, they did look for a document, lead them to index.html 82 | return super().lookup_path("index.html") 83 | 84 | return (full_path, stat_result) 85 | -------------------------------------------------------------------------------- /spoolman/database/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Donkie/Spoolman/488b37c5a061c611c127f562a161de161aa68a3f/spoolman/database/__init__.py -------------------------------------------------------------------------------- /spoolman/database/setting.py: -------------------------------------------------------------------------------- 1 | """Helper functions for interacting with vendor database objects.""" 2 | 3 | from datetime import datetime 4 | from typing import Optional 5 | 6 | from sqlalchemy import select 7 | from sqlalchemy.ext.asyncio import AsyncSession 8 | 9 | from spoolman.api.v1.models import EventType, SettingEvent, SettingKV 10 | from spoolman.database import models 11 | from spoolman.exceptions import ItemNotFoundError 12 | from spoolman.settings import SettingDefinition 13 | from spoolman.ws import websocket_manager 14 | 15 | SETTING_MAX_LENGTH = 2**16 - 1 16 | 17 | 18 | async def update( 19 | *, 20 | db: AsyncSession, 21 | definition: SettingDefinition, 22 | value: str, 23 | ) -> None: 24 | """Set a setting in the database.""" 25 | if len(value) > SETTING_MAX_LENGTH: 26 | raise ValueError(f"Setting value is too big, max size is {SETTING_MAX_LENGTH} characters.") 27 | 28 | setting = models.Setting( 29 | key=definition.key, 30 | value=value, 31 | last_updated=datetime.utcnow().replace(microsecond=0), 32 | ) 33 | await db.merge(setting) 34 | await setting_changed(definition, value, EventType.UPDATED) 35 | 36 | 37 | async def get(db: AsyncSession, definition: SettingDefinition) -> models.Setting: 38 | """Get a specific setting from the database.""" 39 | setting = await db.get(models.Setting, definition.key) 40 | if setting is None: 41 | raise ItemNotFoundError(f"Setting with key {definition.key} has not been set.") 42 | return setting 43 | 44 | 45 | async def get_all(db: AsyncSession) -> list[models.Setting]: 46 | """Get all set settings in the database.""" 47 | stmt = select(models.Setting) 48 | rows = await db.execute(stmt) 49 | return list(rows.scalars().all()) 50 | 51 | 52 | async def delete(db: AsyncSession, definition: SettingDefinition) -> None: 53 | """Delete a setting from the database.""" 54 | setting = await get(db, definition) 55 | await db.delete(setting) 56 | await setting_changed(definition, None, EventType.DELETED) 57 | 58 | 59 | async def setting_changed(definition: SettingDefinition, set_value: Optional[str], typ: EventType) -> None: 60 | """Notify websocket clients that a setting has changed.""" 61 | await websocket_manager.send( 62 | ("setting", str(definition.key)), 63 | SettingEvent( 64 | type=typ, 65 | resource="setting", 66 | date=datetime.utcnow(), 67 | payload=SettingKV.from_db(definition, set_value), 68 | ), 69 | ) 70 | -------------------------------------------------------------------------------- /spoolman/docs.py: -------------------------------------------------------------------------------- 1 | """Functions for generating documentation.""" 2 | 3 | import json 4 | import logging 5 | from pathlib import Path 6 | from typing import Any 7 | 8 | from fastapi import FastAPI 9 | from fastapi.openapi.utils import get_openapi 10 | 11 | from spoolman.api.v1.router import app as v1_app 12 | 13 | logger = logging.getLogger(__name__) 14 | logger.setLevel(logging.INFO) 15 | logger.addHandler(logging.StreamHandler()) # Print all log messages to stdout 16 | 17 | 18 | def generate_openapi(app: FastAPI) -> dict[str, Any]: 19 | """Generate the OpenAPI document for a specific FastAPI app. 20 | 21 | Args: 22 | app (FastAPI): The FastAPI app. 23 | 24 | Returns: 25 | dict[str, Any]: The OpenAPI document. 26 | 27 | """ 28 | return get_openapi( 29 | title=app.title, 30 | version=app.version, 31 | openapi_version=app.openapi_version, 32 | description=app.description, 33 | routes=app.routes, 34 | contact=app.contact, 35 | license_info=app.license_info, 36 | servers=app.servers, 37 | tags=app.openapi_tags, 38 | terms_of_service=app.terms_of_service, 39 | ) 40 | 41 | 42 | def generate_docs() -> None: 43 | """Generate documentation for this service in the docs/ directory.""" 44 | target_dir = Path("docs") 45 | 46 | logger.info('Generating documentation to "%s"...', target_dir.resolve()) 47 | 48 | target_dir.mkdir(parents=True, exist_ok=True) 49 | 50 | spec = json.dumps(generate_openapi(v1_app)) 51 | 52 | with target_dir.joinpath("index.html").open("w") as f: 53 | f.write( 54 | f""" 55 | 56 | 57 | 58 | Spoolman REST API v1 - ReDoc 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 |
67 | 68 | 72 | 73 | """, 74 | ) 75 | 76 | logger.info("Documentation generated successfully.") 77 | -------------------------------------------------------------------------------- /spoolman/exceptions.py: -------------------------------------------------------------------------------- 1 | """Various exceptions used.""" 2 | 3 | 4 | class ItemNotFoundError(Exception): 5 | pass 6 | 7 | 8 | class ItemDeleteError(Exception): 9 | pass 10 | 11 | 12 | class ItemCreateError(Exception): 13 | pass 14 | 15 | 16 | class SpoolMeasureError(Exception): 17 | pass 18 | -------------------------------------------------------------------------------- /spoolman/export.py: -------------------------------------------------------------------------------- 1 | """Functionality for exporting data in various format.""" 2 | 3 | import asyncio 4 | import csv 5 | import json 6 | from collections.abc import Iterable 7 | from typing import TYPE_CHECKING, Any 8 | 9 | from spoolman.database import models 10 | 11 | if TYPE_CHECKING: 12 | from _typeshed import SupportsWrite 13 | 14 | banned_attrs = {"awaitable_attrs", "metadata", "registry", "spools", "filaments"} 15 | 16 | 17 | async def flatten_sqlalchemy_object(obj: models.Base, parent_key: str = "", sep: str = ".") -> dict[str, Any]: 18 | """Recursively flattens a SQLAlchemy object into a dictionary with dot-separated keys.""" 19 | fields = {} 20 | for attr in dir(obj): 21 | # Check if the attribute is a column or a relationship 22 | if not attr.startswith("_") and attr not in banned_attrs: 23 | value = await getattr(obj.awaitable_attrs, attr) 24 | 25 | if attr == "extra": 26 | # Handle extra fields 27 | for v in value: 28 | fields[f"{parent_key}extra.{v.key}"] = v.value 29 | continue 30 | 31 | # Handle nested SQLAlchemy objects 32 | if isinstance(value, models.Base): 33 | nested_fields = await flatten_sqlalchemy_object(value, f"{parent_key}{attr}{sep}", sep=sep) 34 | fields.update(nested_fields) 35 | else: 36 | # Use only columns and simple data types 37 | fields[f"{parent_key}{attr}"] = value 38 | return fields 39 | 40 | 41 | async def dump_as_csv(sqlalchemy_objects: Iterable[models.Base], writer: "SupportsWrite[str]") -> None: 42 | """Export a list of objects as CSV to a writer. Nested objects are flattened with dot-separated keys.""" 43 | # Flatten each object and get all column names 44 | all_flattened = await asyncio.gather(*[flatten_sqlalchemy_object(obj) for obj in sqlalchemy_objects]) 45 | 46 | # Collect all unique headers across flattened objects 47 | headers = set() 48 | for flattened_obj in all_flattened: 49 | headers.update(flattened_obj.keys()) 50 | 51 | headers = sorted(headers) # Sort headers for consistent column ordering 52 | 53 | # Write to CSV 54 | csv_writer = csv.DictWriter(writer, fieldnames=headers) 55 | csv_writer.writeheader() 56 | for flattened_obj in all_flattened: 57 | csv_writer.writerow(flattened_obj) 58 | 59 | 60 | async def dump_as_json(sqlalchemy_objects: Iterable[models.Base], writer: "SupportsWrite[str]") -> None: 61 | """Export a list of objects as JSON to a writer. Nested objects are flattened with dot-separated keys.""" 62 | # Flatten each object and get all column names 63 | all_flattened = await asyncio.gather(*[flatten_sqlalchemy_object(obj) for obj in sqlalchemy_objects]) 64 | 65 | # Write to JSON 66 | json.dump(all_flattened, writer, default=str) 67 | -------------------------------------------------------------------------------- /spoolman/filecache.py: -------------------------------------------------------------------------------- 1 | """A file-based cache system for reading/writing files.""" 2 | 3 | from pathlib import Path 4 | 5 | from spoolman.env import get_cache_dir 6 | 7 | 8 | def get_file(name: str) -> Path: 9 | """Get the path to a file in the cache dir.""" 10 | return get_cache_dir() / name 11 | 12 | 13 | def update_file(name: str, data: bytes) -> None: 14 | """Update a file if it differs from the given data.""" 15 | path = get_file(name) 16 | if path.exists() and path.read_bytes() == data: 17 | return 18 | path.parent.mkdir(parents=True, exist_ok=True) 19 | path.write_bytes(data) 20 | 21 | 22 | def get_file_contents(name: str) -> bytes: 23 | """Get the contents of a file.""" 24 | path = get_file(name) 25 | return path.read_bytes() 26 | -------------------------------------------------------------------------------- /spoolman/math.py: -------------------------------------------------------------------------------- 1 | """Various math-related functions.""" 2 | 3 | # ruff: noqa: PLR2004 4 | 5 | import math 6 | 7 | 8 | def weight_from_length(*, length: float, diameter: float, density: float) -> float: 9 | """Calculate the weight of a piece of filament. 10 | 11 | Args: 12 | length (float): Filament length in mm 13 | diameter (float): Filament diameter in mm 14 | density (float): Density of filament material in g/cm3 15 | 16 | Returns: 17 | float: Weight in g 18 | 19 | """ 20 | volume_mm3 = length * math.pi * (diameter / 2) ** 2 21 | volume_cm3 = volume_mm3 / 1000 22 | return density * volume_cm3 23 | 24 | 25 | def length_from_weight(*, weight: float, diameter: float, density: float) -> float: 26 | """Calculate the length of a piece of filament. 27 | 28 | Args: 29 | weight (float): Filament weight in g 30 | diameter (float): Filament diameter in mm 31 | density (float): Density of filament material in g/cm3 32 | 33 | Returns: 34 | float: Length in mm 35 | 36 | """ 37 | volume_cm3 = weight / density 38 | volume_mm3 = volume_cm3 * 1000 39 | return volume_mm3 / (math.pi * (diameter / 2) ** 2) 40 | 41 | 42 | def rgb_to_lab(rgb: list[int]) -> list[float]: 43 | """Convert a RGB color to CIELAB. 44 | 45 | Input is of form [r, g, b] where r, g, and b are integers between 0 and 255. 46 | Output is of form [l, a, b] where l, a, and b are floats. 47 | """ 48 | r, g, b = rgb[0] / 255, rgb[1] / 255, rgb[2] / 255 49 | 50 | r = (r / 12.92) if (r <= 0.04045) else math.pow((r + 0.055) / 1.055, 2.4) 51 | g = (g / 12.92) if (g <= 0.04045) else math.pow((g + 0.055) / 1.055, 2.4) 52 | b = (b / 12.92) if (b <= 0.04045) else math.pow((b + 0.055) / 1.055, 2.4) 53 | 54 | x = (r * 0.4124 + g * 0.3576 + b * 0.1805) / 0.95047 55 | y = (r * 0.2126 + g * 0.7152 + b * 0.0722) / 1.00000 56 | z = (r * 0.0193 + g * 0.1192 + b * 0.9505) / 1.08883 57 | 58 | x = math.pow(x, 1 / 3) if (x > 0.008856) else (7.787 * x) + 16 / 116 59 | y = math.pow(y, 1 / 3) if (y > 0.008856) else (7.787 * y) + 16 / 116 60 | z = math.pow(z, 1 / 3) if (z > 0.008856) else (7.787 * z) + 16 / 116 61 | 62 | return [(116 * y) - 16, 500 * (x - y), 200 * (y - z)] 63 | 64 | 65 | def delta_e(lab_a: list[float], lab_b: list[float]) -> float: 66 | """Calculate the color difference between two CIELAB colors.""" 67 | delta_l = lab_a[0] - lab_b[0] 68 | delta_a = lab_a[1] - lab_b[1] 69 | delta_b = lab_a[2] - lab_b[2] 70 | c1 = math.sqrt(lab_a[1] * lab_a[1] + lab_a[2] * lab_a[2]) 71 | c2 = math.sqrt(lab_b[1] * lab_b[1] + lab_b[2] * lab_b[2]) 72 | delta_c = c1 - c2 73 | delta_h = delta_a * delta_a + delta_b * delta_b - delta_c * delta_c 74 | delta_h = math.sqrt(delta_h) if delta_h > 0 else 0 75 | sc = 1.0 + 0.045 * c1 76 | sh = 1.0 + 0.015 * c1 77 | delta_l_kl_sl = delta_l / 1.0 78 | delta_c_kc_sc = delta_c / sc 79 | delta_h_kh_sh = delta_h / sh 80 | i = delta_l_kl_sl * delta_l_kl_sl + delta_c_kc_sc * delta_c_kc_sc + delta_h_kh_sh * delta_h_kh_sh 81 | return math.sqrt(i) if i > 0 else 0 82 | 83 | 84 | def hex_to_rgb(hex_code: str) -> list[int]: 85 | """Convert a hex color code to RGB. 86 | 87 | Input is of form #RRGGBB where RR, GG, and BB are hexadecimal numbers. 88 | Output is of form [r, g, b] where r, g, and b are integers between 0 and 255. 89 | """ 90 | hex_code = hex_code.lstrip("#") 91 | 92 | r = int(hex_code[0:2], 16) 93 | g = int(hex_code[2:4], 16) 94 | b = int(hex_code[4:6], 16) 95 | 96 | return [r, g, b] 97 | -------------------------------------------------------------------------------- /spoolman/prometheus/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Donkie/Spoolman/488b37c5a061c611c127f562a161de161aa68a3f/spoolman/prometheus/__init__.py -------------------------------------------------------------------------------- /spoolman/prometheus/metrics.py: -------------------------------------------------------------------------------- 1 | """Prometheus metrics collectors.""" 2 | 3 | import logging 4 | from typing import Callable 5 | 6 | import sqlalchemy 7 | from prometheus_client import REGISTRY, Gauge, make_asgi_app 8 | from sqlalchemy.ext.asyncio import AsyncSession 9 | from sqlalchemy.orm import contains_eager 10 | 11 | from spoolman.database import models 12 | 13 | registry = REGISTRY 14 | 15 | PREFIX = "spoolman" 16 | 17 | SPOOL_PRICE = Gauge(f"{PREFIX}_spool_price", "Total Spool price", ["spool_id", "filament_id"]) 18 | SPOOL_USED_WEIGHT = Gauge(f"{PREFIX}_spool_weight_used", "Spool Used Weight in grams", ["spool_id", "filament_id"]) 19 | SPOOL_INITIAL_WEIGHT = Gauge( 20 | f"{PREFIX}_spool_initial_weight", 21 | "Spool Net weight in grams", 22 | ["spool_id", "filament_id"], 23 | ) 24 | FILAMENT_INFO = Gauge( 25 | f"{PREFIX}_filament_info", 26 | "Filament information", 27 | ["filament_id", "vendor", "name", "material", "color"], 28 | ) 29 | FILAMENT_DENSITY = Gauge(f"{PREFIX}_filament_density", "Density of filament gram/cm3", ["filament_id"]) 30 | FILAMENT_DIAMETER = Gauge(f"{PREFIX}_filament_diameter", "Diameter of filament", ["filament_id"]) 31 | FILAMENT_WEIGHT = Gauge(f"{PREFIX}_filament_weight", "Net weight of filament", ["filament_id"]) 32 | 33 | logger = logging.getLogger(__name__) 34 | 35 | 36 | def make_metrics_app() -> Callable: 37 | """Start ASGI prometheus app with global registry.""" 38 | logger.info("Start metrics app") 39 | return make_asgi_app(registry=registry) 40 | 41 | 42 | metrics_app = make_asgi_app() 43 | 44 | 45 | async def spool_metrics(db: AsyncSession) -> None: 46 | """Get metrics by Spools from DB and write to prometheus. 47 | 48 | Args: 49 | db: async db session 50 | 51 | """ 52 | stmt = sqlalchemy.select(models.Spool).where( 53 | sqlalchemy.or_( 54 | models.Spool.archived.is_(False), 55 | models.Spool.archived.is_(None), 56 | ), 57 | ) 58 | rows = await db.execute(stmt) 59 | result = list(rows.unique().scalars().all()) 60 | for row in result: 61 | if row.price is not None: 62 | SPOOL_PRICE.labels(str(row.id), str(row.filament_id)).set(row.price) 63 | if row.initial_weight is not None: 64 | SPOOL_INITIAL_WEIGHT.labels(str(row.id), str(row.filament_id)).set(row.initial_weight) 65 | SPOOL_USED_WEIGHT.labels(str(row.id), str(row.filament_id)).set(row.used_weight) 66 | 67 | 68 | async def filament_metrics(db: AsyncSession) -> None: 69 | """Get metrics and info by Filaments from DB and write to prometheus. 70 | 71 | Args: 72 | db: async db session 73 | 74 | """ 75 | stmt = ( 76 | sqlalchemy.select(models.Filament) 77 | .options(contains_eager(models.Filament.vendor)) 78 | .join(models.Filament.vendor, isouter=True) 79 | ) 80 | rows = await db.execute(stmt) 81 | result = list(rows.unique().scalars().all()) 82 | for row in result: 83 | vendor_name = "-" 84 | if row.vendor is not None: 85 | vendor_name = row.vendor.name 86 | FILAMENT_INFO.labels( 87 | str(row.id), 88 | vendor_name, 89 | row.name, 90 | row.material, 91 | row.color_hex, 92 | ).set(1) 93 | FILAMENT_DENSITY.labels(str(row.id)).set(row.density) 94 | FILAMENT_DIAMETER.labels(str(row.id)).set(row.diameter) 95 | if row.weight is not None: 96 | FILAMENT_WEIGHT.labels(str(row.id)).set(row.weight) 97 | -------------------------------------------------------------------------------- /spoolman/settings.py: -------------------------------------------------------------------------------- 1 | """Settings that can be changed by the user. 2 | 3 | All settings are JSON encoded and stored in the database. 4 | """ 5 | 6 | import json 7 | from dataclasses import dataclass 8 | from enum import Enum 9 | 10 | 11 | class SettingType(Enum): 12 | """The type of a setting.""" 13 | 14 | BOOLEAN = "boolean" 15 | NUMBER = "number" 16 | STRING = "string" 17 | ARRAY = "array" 18 | OBJECT = "object" 19 | 20 | 21 | @dataclass 22 | class SettingDefinition: 23 | """A setting that can be changed by the user.""" 24 | 25 | key: str 26 | type: SettingType 27 | default: str 28 | 29 | def validate_type(self, value: str) -> None: # noqa: C901 30 | """Validate that the value has the correct type.""" 31 | obj = json.loads(value) 32 | if self.type == SettingType.BOOLEAN: 33 | if not isinstance(obj, bool): 34 | raise ValueError(f"Setting {self.key} must be a boolean.") 35 | elif self.type == SettingType.NUMBER: 36 | if not isinstance(obj, (int, float)): 37 | raise ValueError(f"Setting {self.key} must be a number.") 38 | elif self.type == SettingType.STRING: 39 | if not isinstance(obj, str): 40 | raise ValueError(f"Setting {self.key} must be a string.") 41 | elif self.type == SettingType.ARRAY: 42 | if not isinstance(obj, list): 43 | raise ValueError(f"Setting {self.key} must be an array.") 44 | elif self.type == SettingType.OBJECT: # noqa: SIM102 45 | if not isinstance(obj, dict): 46 | raise ValueError(f"Setting {self.key} must be an object.") 47 | 48 | 49 | SETTINGS: dict[str, SettingDefinition] = {} 50 | 51 | 52 | def register_setting(key: str, typ: SettingType, default: str) -> None: 53 | """Register a setting.""" 54 | SETTINGS[key] = SettingDefinition(key, typ, default) 55 | 56 | 57 | def parse_setting(key: str) -> SettingDefinition: 58 | """Parse a setting key.""" 59 | if key not in SETTINGS: 60 | raise ValueError(f"Setting {key} does not exist.") 61 | return SETTINGS[key] 62 | 63 | 64 | register_setting("currency", SettingType.STRING, json.dumps("EUR")) 65 | register_setting("round_prices", SettingType.BOOLEAN, json.dumps(obj=False)) 66 | register_setting("print_presets", SettingType.ARRAY, json.dumps([])) 67 | 68 | register_setting("extra_fields_vendor", SettingType.ARRAY, json.dumps([])) 69 | register_setting("extra_fields_filament", SettingType.ARRAY, json.dumps([])) 70 | register_setting("extra_fields_spool", SettingType.ARRAY, json.dumps([])) 71 | register_setting("base_url", SettingType.STRING, json.dumps("")) 72 | 73 | register_setting("locations", SettingType.ARRAY, json.dumps([])) 74 | register_setting("locations_spoolorders", SettingType.OBJECT, json.dumps({})) 75 | -------------------------------------------------------------------------------- /spoolman/ws.py: -------------------------------------------------------------------------------- 1 | """Websocket functionality.""" 2 | 3 | import logging 4 | 5 | from fastapi import WebSocket 6 | from starlette.websockets import WebSocketState 7 | 8 | from spoolman.api.v1.models import Event 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | 13 | class SubscriptionTree: 14 | """Subscription tree. 15 | 16 | This is a tree structure that allows us to efficiently send messages to 17 | all websockets that are subscribed to a certain pool of events. 18 | 19 | You can subscribe to different levels of the tree, for example: 20 | - ("vendor", "1") will subscribe to events for vendor 1 21 | - ("vendor") will subscribe to events for all vendors 22 | - () will subscribe to events for all vendors, filaments and spools 23 | """ 24 | 25 | def __init__(self) -> None: 26 | """Initialize.""" 27 | self.children: dict[str, SubscriptionTree] = {} 28 | self.subscribers: set[WebSocket] = set() 29 | 30 | def add(self, path: tuple[str, ...], websocket: WebSocket) -> None: 31 | """Add a websocket to the subscription tree.""" 32 | if len(path) == 0: 33 | self.subscribers.add(websocket) 34 | else: 35 | if path[0] not in self.children: 36 | self.children[path[0]] = SubscriptionTree() 37 | self.children[path[0]].add(path[1:], websocket) 38 | 39 | def remove(self, path: tuple[str, ...], websocket: WebSocket) -> None: 40 | """Remove a websocket from the subscription tree.""" 41 | if len(path) == 0: 42 | self.subscribers.remove(websocket) 43 | elif path[0] in self.children: 44 | self.children[path[0]].remove(path[1:], websocket) 45 | 46 | async def send(self, path: tuple[str, ...], evt: Event) -> None: 47 | """Send a message to all websockets in this branch of the tree.""" 48 | # Broadcast to all subscribers on this level 49 | for websocket in self.subscribers: 50 | if ( 51 | websocket.client_state == WebSocketState.DISCONNECTED # noqa: PLR1714 52 | or websocket.application_state == WebSocketState.DISCONNECTED 53 | ): 54 | # A bad disconnection may have occurred 55 | self.remove(path, websocket) 56 | logger.info( 57 | "Forcing disconnection of client %s on pool %s", 58 | websocket.client.host if websocket.client else "?", 59 | ",".join(path), 60 | ) 61 | elif ( 62 | websocket.client_state == WebSocketState.CONNECTED 63 | and websocket.application_state == WebSocketState.CONNECTED 64 | ): 65 | await websocket.send_text(evt.json()) 66 | 67 | # Send the message further down the tree 68 | if len(path) > 0 and path[0] in self.children: 69 | await self.children[path[0]].send(path[1:], evt) 70 | 71 | 72 | class WebsocketManager: 73 | """Websocket manager.""" 74 | 75 | def __init__(self) -> None: 76 | """Initialize.""" 77 | self.tree = SubscriptionTree() 78 | 79 | def connect(self, pool: tuple[str, ...], websocket: WebSocket) -> None: 80 | """Connect a websocket.""" 81 | self.tree.add(pool, websocket) 82 | logger.info( 83 | "Client %s is now listening on pool %s", 84 | websocket.client.host if websocket.client else "?", 85 | ",".join(pool), 86 | ) 87 | 88 | def disconnect(self, pool: tuple[str, ...], websocket: WebSocket) -> None: 89 | """Disconnect a websocket.""" 90 | self.tree.remove(pool, websocket) 91 | logger.info( 92 | "Client %s has stopped listening on pool %s", 93 | websocket.client.host if websocket.client else "?", 94 | ",".join(pool), 95 | ) 96 | 97 | async def send(self, pool: tuple[str, ...], evt: Event) -> None: 98 | """Send a message to all websockets in a pool.""" 99 | await self.tree.send(pool, evt) 100 | 101 | 102 | websocket_manager = WebsocketManager() 103 | -------------------------------------------------------------------------------- /tests_integration/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.11-alpine 2 | 3 | COPY requirements.txt /tester/ 4 | 5 | WORKDIR /tester 6 | 7 | RUN pip install -r requirements.txt 8 | 9 | ENTRYPOINT [ "pytest", "--exitfirst", "tests" ] 10 | -------------------------------------------------------------------------------- /tests_integration/__init__.py: -------------------------------------------------------------------------------- 1 | """Integration tests root.""" 2 | -------------------------------------------------------------------------------- /tests_integration/docker-compose-cockroachdb.yml: -------------------------------------------------------------------------------- 1 | version: '3.9' 2 | services: 3 | db: 4 | image: cockroachdb/cockroach:v23.1.2 5 | command: start-single-node --insecure 6 | environment: 7 | - COCKROACH_USER=john 8 | - COCKROACH_DATABASE=spoolman 9 | - TZ=Asia/Seoul 10 | healthcheck: 11 | test: 12 | [ 13 | "CMD", 14 | "curl", 15 | "-f", 16 | "http://localhost:8080/health?ready=1" 17 | ] 18 | interval: 5s 19 | timeout: 10s 20 | retries: 5 21 | spoolman: 22 | image: donkie/spoolman:test 23 | environment: 24 | - SPOOLMAN_DB_TYPE=cockroachdb 25 | - SPOOLMAN_DB_HOST=db 26 | - SPOOLMAN_DB_PORT=26257 27 | - SPOOLMAN_DB_NAME=spoolman 28 | - SPOOLMAN_DB_USERNAME=john 29 | - SPOOLMAN_LOGGING_LEVEL=INFO 30 | - TZ=Europe/Stockholm 31 | depends_on: 32 | db: 33 | condition: service_healthy 34 | tester: 35 | image: donkie/spoolman-tester:latest 36 | volumes: 37 | - ./tests:/tester/tests 38 | environment: 39 | - DB_TYPE=cockroachdb 40 | depends_on: 41 | - spoolman 42 | -------------------------------------------------------------------------------- /tests_integration/docker-compose-mariadb.yml: -------------------------------------------------------------------------------- 1 | version: '3.9' 2 | services: 3 | db: 4 | image: mariadb:latest 5 | environment: 6 | - MARIADB_USER=john 7 | - MARIADB_RANDOM_ROOT_PASSWORD=yes 8 | - MARIADB_PASSWORD=abc 9 | - MARIADB_DATABASE=spoolman 10 | - MARIADB_MYSQL_LOCALHOST_USER=true 11 | - TZ=Asia/Seoul 12 | healthcheck: 13 | test: 14 | [ 15 | "CMD", 16 | "/usr/local/bin/healthcheck.sh", 17 | "--su-mysql", 18 | "--connect", 19 | "--innodb_initialized" 20 | ] 21 | interval: 1s 22 | timeout: 5s 23 | retries: 30 24 | spoolman: 25 | image: donkie/spoolman:test 26 | environment: 27 | - SPOOLMAN_DB_TYPE=mysql 28 | - SPOOLMAN_DB_HOST=db 29 | - SPOOLMAN_DB_PORT=3306 30 | - SPOOLMAN_DB_NAME=spoolman 31 | - SPOOLMAN_DB_USERNAME=john 32 | - SPOOLMAN_DB_PASSWORD=abc 33 | - SPOOLMAN_LOGGING_LEVEL=INFO 34 | - TZ=Europe/Stockholm 35 | depends_on: 36 | db: 37 | condition: service_healthy 38 | tester: 39 | image: donkie/spoolman-tester:latest 40 | volumes: 41 | - ./tests:/tester/tests 42 | environment: 43 | - DB_TYPE=mysql 44 | depends_on: 45 | - spoolman 46 | -------------------------------------------------------------------------------- /tests_integration/docker-compose-postgres.yml: -------------------------------------------------------------------------------- 1 | version: '3.8' 2 | services: 3 | db: 4 | image: postgres:11-alpine 5 | environment: 6 | - POSTGRES_PASSWORD=abc 7 | - TZ='GMT+4' 8 | - PGTZ='GMT+4' 9 | spoolman: 10 | image: donkie/spoolman:test 11 | environment: 12 | - SPOOLMAN_DB_TYPE=postgres 13 | - SPOOLMAN_DB_HOST=db 14 | - SPOOLMAN_DB_PORT=5432 15 | - SPOOLMAN_DB_NAME=postgres 16 | - SPOOLMAN_DB_USERNAME=postgres 17 | - SPOOLMAN_DB_PASSWORD=abc 18 | - SPOOLMAN_LOGGING_LEVEL=INFO 19 | - TZ=Europe/Stockholm 20 | depends_on: 21 | - db 22 | tester: 23 | image: donkie/spoolman-tester:latest 24 | volumes: 25 | - ./tests:/tester/tests 26 | environment: 27 | - DB_TYPE=postgres 28 | depends_on: 29 | - spoolman 30 | -------------------------------------------------------------------------------- /tests_integration/docker-compose-sqlite.yml: -------------------------------------------------------------------------------- 1 | version: '3.8' 2 | services: 3 | spoolman: 4 | image: donkie/spoolman:test 5 | environment: 6 | - SPOOLMAN_LOGGING_LEVEL=INFO 7 | - TZ=Europe/Stockholm 8 | tester: 9 | image: donkie/spoolman-tester:latest 10 | volumes: 11 | - ./tests:/tester/tests 12 | environment: 13 | - DB_TYPE=sqlite 14 | depends_on: 15 | - spoolman 16 | -------------------------------------------------------------------------------- /tests_integration/requirements.txt: -------------------------------------------------------------------------------- 1 | pytest==8.3.2 2 | pytest-asyncio==0.23.8 3 | httpx==0.27.0 4 | -------------------------------------------------------------------------------- /tests_integration/run.py: -------------------------------------------------------------------------------- 1 | """Build and run the integration tests.""" 2 | 3 | # ruff: noqa: S605, S607, T201 4 | 5 | import os 6 | import sys 7 | 8 | if __name__ == "__main__": 9 | print("Building and running integration tests...") 10 | print("Building Spoolman...") 11 | if os.system("docker build -t donkie/spoolman:test .") > 0: 12 | print("Failed to build Spoolman!") 13 | sys.exit(1) 14 | print("Building Spoolman tester...") 15 | if os.system("docker build -t donkie/spoolman-tester:latest tests_integration") > 0: 16 | print("Failed to build Spoolman tester!") 17 | sys.exit(1) 18 | 19 | # Support input arguments for running only specific tests 20 | if len(sys.argv) > 1: 21 | targets = sys.argv[1:] 22 | # Check that all targets are valid 23 | for target in targets: 24 | if target not in ["postgres", "sqlite", "mariadb", "cockroachdb"]: 25 | print(f"Unknown target: {target}") 26 | sys.exit(1) 27 | else: 28 | print("No targets specified, running all tests...") 29 | targets = [ 30 | "postgres", 31 | "sqlite", 32 | "mariadb", 33 | "cockroachdb", 34 | ] 35 | 36 | for target in targets: 37 | print(f"Running integration tests against {target}...") 38 | os.system(f"docker compose -f tests_integration/docker-compose-{target}.yml down -v") 39 | if ( 40 | os.system(f"docker compose -f tests_integration/docker-compose-{target}.yml up --abort-on-container-exit") 41 | > 0 42 | ): 43 | print(f"Integration tests against {target} failed!") 44 | sys.exit(1) 45 | 46 | print("Integration tests passed!") 47 | -------------------------------------------------------------------------------- /tests_integration/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for the integration of the application.""" 2 | -------------------------------------------------------------------------------- /tests_integration/tests/fields/__init__.py: -------------------------------------------------------------------------------- 1 | """Integration tests for the custom extra fields system.""" 2 | -------------------------------------------------------------------------------- /tests_integration/tests/fields/test_delete.py: -------------------------------------------------------------------------------- 1 | """Integration tests for the custom extra fields system.""" 2 | 3 | import json 4 | 5 | import httpx 6 | 7 | from ..conftest import URL, assert_httpx_success 8 | 9 | 10 | def test_delete_field(): 11 | """Test adding a field, deleting it, and making sure it's gone.""" 12 | result = httpx.post( 13 | f"{URL}/api/v1/field/spool/mytextfield", 14 | json={ 15 | "name": "My text field", 16 | "field_type": "text", 17 | "default_value": json.dumps("Hello World"), 18 | }, 19 | ) 20 | assert_httpx_success(result) 21 | 22 | # Delete 23 | result = httpx.delete(f"{URL}/api/v1/field/spool/mytextfield") 24 | assert_httpx_success(result) 25 | 26 | # Verify 27 | result = httpx.get(f"{URL}/api/v1/field/spool") 28 | assert_httpx_success(result) 29 | assert result.json() == [] 30 | -------------------------------------------------------------------------------- /tests_integration/tests/fields/test_get.py: -------------------------------------------------------------------------------- 1 | """Integration tests for the custom extra fields system.""" 2 | 3 | import json 4 | 5 | import httpx 6 | 7 | from ..conftest import URL, assert_httpx_success, assert_lists_compatible 8 | 9 | 10 | def test_get_field(): 11 | """Test adding a couple of fields to the spool and then getting them.""" 12 | result = httpx.post( 13 | f"{URL}/api/v1/field/spool/mytextfield", 14 | json={ 15 | "name": "My text field", 16 | "field_type": "text", 17 | "default_value": json.dumps("Hello World"), 18 | }, 19 | ) 20 | assert_httpx_success(result) 21 | 22 | result = httpx.post( 23 | f"{URL}/api/v1/field/spool/myintfield", 24 | json={ 25 | "name": "My int field", 26 | "field_type": "integer", 27 | "default_value": json.dumps(42), 28 | }, 29 | ) 30 | assert_httpx_success(result) 31 | 32 | result = httpx.get(f"{URL}/api/v1/field/spool") 33 | assert_httpx_success(result) 34 | assert_lists_compatible( 35 | result.json(), 36 | [ 37 | { 38 | "name": "My text field", 39 | "key": "mytextfield", 40 | "field_type": "text", 41 | "default_value": json.dumps("Hello World"), 42 | }, 43 | { 44 | "name": "My int field", 45 | "key": "myintfield", 46 | "field_type": "integer", 47 | "default_value": json.dumps(42), 48 | }, 49 | ], 50 | sort_key="key", 51 | ) 52 | 53 | # Clean up 54 | result = httpx.delete(f"{URL}/api/v1/field/spool/mytextfield") 55 | assert_httpx_success(result) 56 | 57 | result = httpx.delete(f"{URL}/api/v1/field/spool/myintfield") 58 | assert_httpx_success(result) 59 | -------------------------------------------------------------------------------- /tests_integration/tests/filament/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for the filament API.""" 2 | -------------------------------------------------------------------------------- /tests_integration/tests/filament/test_delete.py: -------------------------------------------------------------------------------- 1 | """Integration tests for the Filament API endpoint.""" 2 | 3 | from typing import Any 4 | 5 | import httpx 6 | 7 | from ..conftest import URL 8 | 9 | 10 | def test_delete_filament(random_vendor: dict[str, Any]): 11 | """Test deleting a filament from the database.""" 12 | # Setup 13 | result = httpx.post( 14 | f"{URL}/api/v1/filament", 15 | json={ 16 | "name": "Filament X", 17 | "vendor_id": random_vendor["id"], 18 | "material": "PLA", 19 | "price": 100, 20 | "density": 1.25, 21 | "diameter": 1.75, 22 | "weight": 1000, 23 | "spool_weight": 250, 24 | "article_number": "123456789", 25 | "comment": "abcdefghåäö", 26 | }, 27 | ) 28 | result.raise_for_status() 29 | added_filament = result.json() 30 | 31 | # Execute 32 | httpx.delete( 33 | f"{URL}/api/v1/filament/{added_filament['id']}", 34 | ).raise_for_status() 35 | 36 | # Verify 37 | result = httpx.get( 38 | f"{URL}/api/v1/filament/{added_filament['id']}", 39 | ) 40 | assert result.status_code == 404 41 | 42 | 43 | def test_delete_filament_not_found(): 44 | """Test deleting a filament that does not exist.""" 45 | # Execute 46 | result = httpx.delete(f"{URL}/api/v1/filament/123456789") 47 | 48 | # Verify 49 | assert result.status_code == 404 50 | message = result.json()["message"].lower() 51 | assert "filament" in message 52 | assert "id" in message 53 | assert "123456789" in message 54 | -------------------------------------------------------------------------------- /tests_integration/tests/filament/test_get.py: -------------------------------------------------------------------------------- 1 | """Integration tests for the Filament API endpoint.""" 2 | 3 | from typing import Any 4 | 5 | import httpx 6 | 7 | from ..conftest import URL, assert_dicts_compatible 8 | 9 | 10 | def test_get_filament(random_vendor: dict[str, Any]): 11 | """Test getting a filament from the database.""" 12 | # Setup 13 | name = "Filament X" 14 | material = "PLA" 15 | price = 100 16 | density = 1.25 17 | diameter = 1.75 18 | weight = 1000 19 | spool_weight = 250 20 | article_number = "123456789" 21 | comment = "abcdefghåäö" 22 | settings_extruder_temp = 200 23 | settings_bed_temp = 60 24 | color_hex = "FF0000" 25 | result = httpx.post( 26 | f"{URL}/api/v1/filament", 27 | json={ 28 | "name": name, 29 | "vendor_id": random_vendor["id"], 30 | "material": material, 31 | "price": price, 32 | "density": density, 33 | "diameter": diameter, 34 | "weight": weight, 35 | "spool_weight": spool_weight, 36 | "article_number": article_number, 37 | "comment": comment, 38 | "settings_extruder_temp": settings_extruder_temp, 39 | "settings_bed_temp": settings_bed_temp, 40 | "color_hex": color_hex, 41 | }, 42 | ) 43 | result.raise_for_status() 44 | added_filament = result.json() 45 | 46 | # Execute 47 | result = httpx.get( 48 | f"{URL}/api/v1/filament/{added_filament['id']}", 49 | ) 50 | result.raise_for_status() 51 | 52 | # Verify 53 | filament = result.json() 54 | assert_dicts_compatible( 55 | filament, 56 | { 57 | "id": added_filament["id"], 58 | "registered": added_filament["registered"], 59 | "name": name, 60 | "vendor": random_vendor, 61 | "material": material, 62 | "price": price, 63 | "density": density, 64 | "diameter": diameter, 65 | "weight": weight, 66 | "spool_weight": spool_weight, 67 | "article_number": article_number, 68 | "comment": comment, 69 | "settings_extruder_temp": settings_extruder_temp, 70 | "settings_bed_temp": settings_bed_temp, 71 | "color_hex": color_hex, 72 | }, 73 | ) 74 | 75 | # Clean up 76 | httpx.delete(f"{URL}/api/v1/filament/{filament['id']}").raise_for_status() 77 | 78 | 79 | def test_get_filament_not_found(): 80 | """Test getting a filament that does not exist.""" 81 | # Execute 82 | result = httpx.get( 83 | f"{URL}/api/v1/filament/123456789", 84 | ) 85 | 86 | # Verify 87 | assert result.status_code == 404 88 | message = result.json()["message"].lower() 89 | assert "filament" in message 90 | assert "id" in message 91 | assert "123456789" in message 92 | -------------------------------------------------------------------------------- /tests_integration/tests/setting/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for the setting API.""" 2 | -------------------------------------------------------------------------------- /tests_integration/tests/setting/test_get.py: -------------------------------------------------------------------------------- 1 | """Integration tests for the Vendor API endpoint.""" 2 | 3 | import httpx 4 | 5 | from ..conftest import URL 6 | 7 | 8 | def test_get_currency(): 9 | """Test getting the currency setting.""" 10 | # Execute 11 | result = httpx.get(f"{URL}/api/v1/setting/currency") 12 | result.raise_for_status() 13 | 14 | # Verify 15 | setting = result.json() 16 | assert setting == { 17 | "value": '"EUR"', 18 | "is_set": False, 19 | "type": "string", 20 | } 21 | 22 | 23 | def test_get_unknown(): 24 | """Test getting an unknown setting.""" 25 | # Execute 26 | result = httpx.get(f"{URL}/api/v1/setting/unknown") 27 | assert result.status_code == 404 28 | 29 | 30 | def test_get_all(): 31 | """Test getting all settings.""" 32 | # Execute 33 | result = httpx.get(f"{URL}/api/v1/setting/") 34 | result.raise_for_status() 35 | 36 | # Verify 37 | settings = result.json() 38 | assert "currency" in settings 39 | assert settings["currency"] == { 40 | "value": '"EUR"', 41 | "is_set": False, 42 | "type": "string", 43 | } 44 | -------------------------------------------------------------------------------- /tests_integration/tests/setting/test_set.py: -------------------------------------------------------------------------------- 1 | """Integration tests for the Vendor API endpoint.""" 2 | 3 | import json 4 | 5 | import httpx 6 | 7 | from ..conftest import URL 8 | 9 | 10 | def test_set_currency(): 11 | """Test setting the currency setting.""" 12 | # Execute 13 | result = httpx.post( 14 | f"{URL}/api/v1/setting/currency", 15 | json='"SEK"', 16 | ) 17 | result.raise_for_status() 18 | 19 | # Verify 20 | setting = result.json() 21 | assert setting == { 22 | "value": '"SEK"', 23 | "is_set": True, 24 | "type": "string", 25 | } 26 | 27 | # Cleanup 28 | result = httpx.post( 29 | f"{URL}/api/v1/setting/currency", 30 | json="", 31 | ) 32 | result.raise_for_status() 33 | 34 | 35 | def test_unset_currency(): 36 | """Test un-setting the currency setting.""" 37 | # Execute set 38 | result = httpx.post( 39 | f"{URL}/api/v1/setting/currency", 40 | json='"SEK"', 41 | ) 42 | result.raise_for_status() 43 | 44 | # Verify set 45 | setting = result.json() 46 | assert setting == { 47 | "value": '"SEK"', 48 | "is_set": True, 49 | "type": "string", 50 | } 51 | 52 | # Execute unset 53 | result = httpx.post( 54 | f"{URL}/api/v1/setting/currency", 55 | json="", 56 | ) 57 | result.raise_for_status() 58 | 59 | # Verify unset 60 | setting = result.json() 61 | assert setting == { 62 | "value": '"EUR"', 63 | "is_set": False, 64 | "type": "string", 65 | } 66 | 67 | 68 | def test_set_unknown(): 69 | """Test setting an invalid setting.""" 70 | # Execute 71 | result = httpx.post( 72 | f"{URL}/api/v1/setting/not-a-setting", 73 | json='"SEK"', 74 | ) 75 | assert result.status_code == 404 76 | 77 | 78 | def test_set_currency_wrong_type(): 79 | """Test setting the currency setting with the wrong type.""" 80 | # Execute 81 | result = httpx.post( 82 | f"{URL}/api/v1/setting/currency", 83 | json=123, 84 | ) 85 | assert result.status_code == 422 86 | 87 | 88 | def test_set_big_value(): 89 | """Test setting a setting to a long string which should be saved correctly.""" 90 | long_string = "a" * (2**16 - 1 - 2) # Backend guarantees that it can handle strings up to 65535 characters long. 91 | # Remove 2 characters to account for the quotes. 92 | 93 | # Execute 94 | result = httpx.post( 95 | f"{URL}/api/v1/setting/currency", 96 | json=json.dumps(long_string), 97 | ) 98 | result.raise_for_status() 99 | 100 | # Verify 101 | result = httpx.get(f"{URL}/api/v1/setting/currency") 102 | result.raise_for_status() 103 | setting = result.json() 104 | assert setting == { 105 | "value": json.dumps(long_string), 106 | "is_set": True, 107 | "type": "string", 108 | } 109 | 110 | # Cleanup 111 | result = httpx.post( 112 | f"{URL}/api/v1/setting/currency", 113 | json="", 114 | ) 115 | result.raise_for_status() 116 | -------------------------------------------------------------------------------- /tests_integration/tests/spool/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for the spool API.""" 2 | -------------------------------------------------------------------------------- /tests_integration/tests/spool/test_delete.py: -------------------------------------------------------------------------------- 1 | """Integration tests for the Spool API endpoint.""" 2 | 3 | from typing import Any 4 | 5 | import httpx 6 | 7 | from ..conftest import URL 8 | 9 | 10 | def test_delete_spool(random_filament: dict[str, Any]): 11 | """Test deleting a spool from the database.""" 12 | # Setup 13 | result = httpx.post( 14 | f"{URL}/api/v1/spool", 15 | json={ 16 | "filament_id": random_filament["id"], 17 | "remaining_weight": 1000, 18 | "location": "The Pantry", 19 | "lot_nr": "123456789", 20 | }, 21 | ) 22 | result.raise_for_status() 23 | spool = result.json() 24 | 25 | # Execute 26 | httpx.delete(f"{URL}/api/v1/spool/{spool['id']}").raise_for_status() 27 | 28 | # Verify 29 | result = httpx.get(f"{URL}/api/v1/spool/{spool['id']}") 30 | assert result.status_code == 404 31 | 32 | 33 | def test_delete_spool_not_found(): 34 | """Test deleting a spool that does not exist.""" 35 | # Execute 36 | result = httpx.delete(f"{URL}/api/v1/spool/123456789") 37 | 38 | # Verify 39 | assert result.status_code == 404 40 | message = result.json()["message"].lower() 41 | assert "spool" in message 42 | assert "id" in message 43 | assert "123456789" in message 44 | -------------------------------------------------------------------------------- /tests_integration/tests/test_backup.py: -------------------------------------------------------------------------------- 1 | """Integration tests for the Vendor API endpoint.""" 2 | 3 | import httpx 4 | 5 | from .conftest import URL, DbType, get_db_type 6 | 7 | 8 | def test_backup(): 9 | """Test triggering an automatic database backup.""" 10 | if get_db_type() != DbType.SQLITE: 11 | return 12 | 13 | # Trigger backup 14 | result = httpx.post(f"{URL}/api/v1/backup") 15 | result.raise_for_status() 16 | -------------------------------------------------------------------------------- /tests_integration/tests/vendor/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for the vendor API.""" 2 | -------------------------------------------------------------------------------- /tests_integration/tests/vendor/test_add.py: -------------------------------------------------------------------------------- 1 | """Integration tests for the Vendor API endpoint.""" 2 | 3 | from datetime import datetime, timezone 4 | 5 | import httpx 6 | 7 | from ..conftest import URL, assert_dicts_compatible 8 | 9 | 10 | def test_add_vendor(): 11 | """Test adding a vendor to the database.""" 12 | # Execute 13 | name = "John" 14 | comment = "abcdefghåäö" 15 | external_id = "external_id1" 16 | result = httpx.post( 17 | f"{URL}/api/v1/vendor", 18 | json={ 19 | "name": name, 20 | "external_id": external_id, 21 | "comment": comment, 22 | }, 23 | ) 24 | result.raise_for_status() 25 | 26 | # Verify 27 | vendor = result.json() 28 | assert_dicts_compatible( 29 | vendor, 30 | { 31 | "id": vendor["id"], 32 | "registered": vendor["registered"], 33 | "name": name, 34 | "comment": comment, 35 | "external_id": external_id, 36 | }, 37 | ) 38 | 39 | # Verify that registered happened almost now (within 1 minute) 40 | diff = abs((datetime.now(tz=timezone.utc) - datetime.fromisoformat(vendor["registered"])).total_seconds()) 41 | assert diff < 60 42 | 43 | # Clean up 44 | httpx.delete(f"{URL}/api/v1/vendor/{vendor['id']}").raise_for_status() 45 | 46 | 47 | def test_add_vendor_required(): 48 | """Test adding a vendor with only the required fields to the database.""" 49 | # Execute 50 | name = "John" 51 | result = httpx.post( 52 | f"{URL}/api/v1/vendor", 53 | json={"name": name}, 54 | ) 55 | result.raise_for_status() 56 | 57 | # Verify 58 | vendor = result.json() 59 | assert_dicts_compatible( 60 | vendor, 61 | { 62 | "id": vendor["id"], 63 | "registered": vendor["registered"], 64 | "name": name, 65 | }, 66 | ) 67 | 68 | # Clean up 69 | httpx.delete(f"{URL}/api/v1/vendor/{vendor['id']}").raise_for_status() 70 | -------------------------------------------------------------------------------- /tests_integration/tests/vendor/test_delete.py: -------------------------------------------------------------------------------- 1 | """Integration tests for the Vendor API endpoint.""" 2 | 3 | import httpx 4 | 5 | from ..conftest import URL 6 | 7 | 8 | def test_delete_vendor(): 9 | """Test deleting a vendor from the database.""" 10 | # Setup 11 | name = "John" 12 | comment = "abcdefghåäö" 13 | result = httpx.post( 14 | f"{URL}/api/v1/vendor", 15 | json={"name": name, "comment": comment}, 16 | ) 17 | result.raise_for_status() 18 | added_vendor = result.json() 19 | 20 | # Execute 21 | httpx.delete( 22 | f"{URL}/api/v1/vendor/{added_vendor['id']}", 23 | ).raise_for_status() 24 | 25 | # Verify 26 | result = httpx.get( 27 | f"{URL}/api/v1/vendor/{added_vendor['id']}", 28 | ) 29 | assert result.status_code == 404 30 | 31 | 32 | def test_delete_vendor_not_found(): 33 | """Test deleting a vendor that does not exist.""" 34 | # Execute 35 | result = httpx.delete(f"{URL}/api/v1/vendor/123456789") 36 | 37 | # Verify 38 | assert result.status_code == 404 39 | message = result.json()["message"].lower() 40 | assert "vendor" in message 41 | assert "id" in message 42 | assert "123456789" in message 43 | -------------------------------------------------------------------------------- /tests_integration/tests/vendor/test_get.py: -------------------------------------------------------------------------------- 1 | """Integration tests for the Vendor API endpoint.""" 2 | 3 | import httpx 4 | 5 | from ..conftest import URL 6 | 7 | 8 | def test_get_vendor(): 9 | """Test getting a vendor from the database.""" 10 | # Setup 11 | name = "John" 12 | comment = "abcdefghåäö" 13 | result = httpx.post( 14 | f"{URL}/api/v1/vendor", 15 | json={"name": name, "comment": comment}, 16 | ) 17 | result.raise_for_status() 18 | added_vendor = result.json() 19 | 20 | # Execute 21 | result = httpx.get( 22 | f"{URL}/api/v1/vendor/{added_vendor['id']}", 23 | ) 24 | result.raise_for_status() 25 | 26 | # Verify 27 | vendor = result.json() 28 | assert vendor["name"] == name 29 | assert vendor["comment"] == comment 30 | assert vendor["id"] == added_vendor["id"] 31 | assert vendor["registered"] == added_vendor["registered"] 32 | 33 | # Clean up 34 | httpx.delete(f"{URL}/api/v1/vendor/{vendor['id']}").raise_for_status() 35 | 36 | 37 | def test_get_vendor_not_found(): 38 | """Test getting a vendor that does not exist.""" 39 | # Execute 40 | result = httpx.get(f"{URL}/api/v1/vendor/123456789") 41 | 42 | # Verify 43 | assert result.status_code == 404 44 | message = result.json()["message"].lower() 45 | assert "vendor" in message 46 | assert "id" in message 47 | assert "123456789" in message 48 | -------------------------------------------------------------------------------- /tests_integration/tests/vendor/test_update.py: -------------------------------------------------------------------------------- 1 | """Integration tests for the Vendor API endpoint.""" 2 | 3 | import httpx 4 | 5 | from ..conftest import URL 6 | 7 | 8 | def test_update_vendor(): 9 | """Test update a vendor in the database.""" 10 | # Setup 11 | name = "John" 12 | comment = "abcdefghåäö" 13 | result = httpx.post( 14 | f"{URL}/api/v1/vendor", 15 | json={"name": name, "comment": comment}, 16 | ) 17 | result.raise_for_status() 18 | added_vendor = result.json() 19 | 20 | # Execute 21 | new_name = "Stan" 22 | new_comment = "gfdadfg" 23 | result = httpx.patch( 24 | f"{URL}/api/v1/vendor/{added_vendor['id']}", 25 | json={"name": new_name, "comment": new_comment}, 26 | ) 27 | result.raise_for_status() 28 | 29 | # Verify 30 | vendor = result.json() 31 | assert vendor["name"] == new_name 32 | assert vendor["comment"] == new_comment 33 | assert vendor["id"] == added_vendor["id"] 34 | assert vendor["registered"] == added_vendor["registered"] 35 | 36 | # Clean up 37 | httpx.delete(f"{URL}/api/v1/vendor/{vendor['id']}").raise_for_status() 38 | 39 | 40 | def test_update_vendor_not_found(): 41 | """Test updating a vendor that does not exist.""" 42 | # Execute 43 | result = httpx.patch(f"{URL}/api/v1/vendor/123456789", json={"name": "John"}) 44 | 45 | # Verify 46 | assert result.status_code == 404 47 | message = result.json()["message"].lower() 48 | assert "vendor" in message 49 | assert "id" in message 50 | assert "123456789" in message 51 | --------------------------------------------------------------------------------