├── .env ├── .gitattributes ├── .github └── workflows │ └── docker-build-test-deploy.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .vscode ├── launch.json └── settings.json ├── LICENSE.txt ├── README.md ├── backend ├── .dockerignore ├── .gitignore ├── Dockerfile ├── alembic.ini ├── app │ ├── __init__.py │ ├── alembic │ │ ├── README │ │ ├── env.py │ │ ├── script.py.mako │ │ └── versions │ │ │ ├── .keep │ │ │ └── 0f58b1b64e6f_.py │ ├── api │ │ ├── __init__.py │ │ ├── deps.py │ │ ├── main.py │ │ └── routes │ │ │ ├── __init__.py │ │ │ ├── assets.py │ │ │ ├── login.py │ │ │ ├── pipelines.py │ │ │ ├── users.py │ │ │ └── utils.py │ ├── backend_pre_start.py │ ├── core │ │ ├── __init__.py │ │ ├── config.py │ │ ├── db.py │ │ └── security.py │ ├── crud.py │ ├── email-templates │ │ ├── build │ │ │ ├── new_account.html │ │ │ ├── reset_password.html │ │ │ └── test_email.html │ │ └── src │ │ │ ├── new_account.mjml │ │ │ ├── reset_password.mjml │ │ │ └── test_email.mjml │ ├── initial_data.py │ ├── main.py │ ├── models.py │ ├── tests │ │ ├── __init__.py │ │ ├── api │ │ │ ├── __init__.py │ │ │ └── routes │ │ │ │ ├── __init__.py │ │ │ │ ├── test_login.py │ │ │ │ └── test_users.py │ │ ├── conftest.py │ │ ├── crud │ │ │ ├── __init__.py │ │ │ └── test_user.py │ │ ├── scripts │ │ │ ├── __init__.py │ │ │ ├── test_backend_pre_start.py │ │ │ └── test_test_pre_start.py │ │ └── utils │ │ │ ├── __init__.py │ │ │ ├── user.py │ │ │ └── utils.py │ ├── tests_pre_start.py │ ├── utils.py │ └── worker │ │ ├── __init__.py │ │ ├── cartesian.py │ │ ├── expression.py │ │ ├── main.py │ │ ├── polyhedron.py │ │ ├── processes.py │ │ ├── tasks.py │ │ ├── types.py │ │ └── utils.py ├── poetry.lock ├── prestart.sh ├── pyproject.toml ├── samples │ ├── cone.glb │ ├── model.glb │ └── tree.glb ├── scripts │ ├── celery-reload.sh │ ├── celery.sh │ ├── format.sh │ ├── lint.sh │ └── test.sh └── tests-start.sh ├── docker-compose.build.yml ├── docker-compose.dev.yml ├── docker-compose.override.yml ├── docker-compose.traefik.yml ├── docker-compose.yml ├── frontend ├── .dockerignore ├── .env ├── .env.production ├── .gitignore ├── .nvmrc ├── Dockerfile ├── biome.json ├── env.sh ├── index.html ├── mapstore │ └── overrides │ │ ├── configs │ │ └── localConfig.json │ │ └── map.html ├── modify-openapi-operationids.js ├── nginx-backend-not-found.conf ├── nginx.conf ├── package-lock.json ├── package.json ├── playwright.config.ts ├── public │ ├── assets │ │ └── images │ │ │ ├── favicon.png │ │ │ └── logo.svg │ └── preview.html ├── src │ ├── client │ │ ├── core │ │ │ ├── ApiError.ts │ │ │ ├── ApiRequestOptions.ts │ │ │ ├── ApiResult.ts │ │ │ ├── CancelablePromise.ts │ │ │ ├── OpenAPI.ts │ │ │ ├── request.ts │ │ │ └── types.ts │ │ ├── index.ts │ │ ├── models.ts │ │ ├── schemas.ts │ │ └── services.ts │ ├── components │ │ ├── Admin │ │ │ ├── AddUser.tsx │ │ │ └── EditUser.tsx │ │ ├── Common │ │ │ ├── ActionsMenu.tsx │ │ │ ├── DeleteAlert.tsx │ │ │ ├── Navbar.tsx │ │ │ ├── NotFound.tsx │ │ │ ├── Sidebar.tsx │ │ │ ├── SidebarItems.tsx │ │ │ └── UserMenu.tsx │ │ ├── UserSettings │ │ │ ├── Appearance.tsx │ │ │ ├── ChangePassword.tsx │ │ │ ├── DeleteAccount.tsx │ │ │ ├── DeleteConfirmation.tsx │ │ │ └── UserInformation.tsx │ │ └── Viewer │ │ │ ├── InputExpression.tsx │ │ │ ├── PointCloudCanvas.tsx │ │ │ ├── PointGeometryCanvas.tsx │ │ │ ├── PolygonGeometryCanvas.tsx │ │ │ └── ThreeCanvas.tsx │ ├── hooks │ │ ├── useAuth.ts │ │ └── useCustomToast.ts │ ├── main.css │ ├── main.tsx │ ├── routeTree.gen.ts │ ├── routes │ │ ├── __root.tsx │ │ ├── _layout.tsx │ │ ├── _layout │ │ │ ├── admin.tsx │ │ │ ├── assets.tsx │ │ │ ├── index.tsx │ │ │ ├── map.tsx │ │ │ ├── pipeline │ │ │ │ └── $pipelineId.tsx │ │ │ ├── pipelines.tsx │ │ │ └── settings.tsx │ │ ├── login.tsx │ │ ├── recover-password.tsx │ │ ├── reset-password.tsx │ │ └── signup.tsx │ ├── theme.tsx │ ├── utils.ts │ ├── utils │ │ ├── cartesian.ts │ │ ├── expression.ts │ │ └── polyhedron.ts │ └── vite-env.d.ts ├── tests │ ├── auth.setup.ts │ ├── config.ts │ ├── login.spec.ts │ ├── reset-password.spec.ts │ ├── sign-up.spec.ts │ ├── user-settings.spec.ts │ └── utils │ │ ├── mailcatcher.ts │ │ ├── random.ts │ │ └── user.ts ├── tsconfig.json ├── tsconfig.node.json ├── vite.config.d.ts ├── vite.config.js └── vite.config.ts ├── hooks └── post_gen_project.py ├── img └── digital-twin-toolbox.png └── scripts ├── build-push.sh ├── build.sh ├── deploy.sh ├── test-local.sh └── test.sh /.env: -------------------------------------------------------------------------------- 1 | # Domain 2 | # This would be set to the production domain with an env var on deployment 3 | DOMAIN=localhost 4 | 5 | # Environment: local, staging, production 6 | ENVIRONMENT=local 7 | 8 | PROJECT_NAME='Digital Twin Toolbox' 9 | STACK_NAME=digital-twin-toolbox 10 | 11 | # Backend 12 | BACKEND_CORS_ORIGINS="http://localhost,http://localhost:5173,http://localhost:8081,https://localhost,https://localhost:5173,http://localhost.dtt-project.com" 13 | SECRET_KEY=changethis 14 | FIRST_SUPERUSER=admin@example.com 15 | FIRST_SUPERUSER_PASSWORD=changethis 16 | 17 | # Emails 18 | SMTP_HOST= 19 | SMTP_USER= 20 | SMTP_PASSWORD= 21 | EMAILS_FROM_EMAIL=info@example.com 22 | SMTP_TLS=True 23 | SMTP_SSL=False 24 | SMTP_PORT=587 25 | 26 | # Postgres 27 | POSTGRES_SERVER=localhost 28 | POSTGRES_PORT=5432 29 | POSTGRES_DB=app 30 | POSTGRES_USER=postgres 31 | POSTGRES_PASSWORD=changethis 32 | POSTGRES_TASKS_DB=tasks 33 | 34 | SENTRY_DSN= 35 | 36 | # Configure these with your own Docker registry images 37 | DOCKER_IMAGE_BACKEND=geosolutionsit/digital-twin-toolbox-backend 38 | DOCKER_IMAGE_FRONTEND=geosolutionsit/digital-twin-toolbox-frontend 39 | 40 | # changing DTT_ENABLE_USERS_MANAGEMENT to False removes to user management and the upload will be available to everybody 41 | # all action on the UI will be performed as the default super user 42 | # set this to False only in controlled environment eg. for local testing or to use this app as a standalone app on local machine 43 | DTT_ENABLE_USERS_MANAGEMENT=True 44 | DTT_API_URL=http://${DOMAIN} 45 | # base path of the frontend routing 46 | DTT_ROUTER_BASE_PATH=/ 47 | # location of all frontend assets files 48 | DTT_PUBLIC_BASE_PATH=/ 49 | # if true enable the hash routing 50 | DTT_ENABLE_ROUTER_HASH_HISTORY=False 51 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto 2 | *.sh text eol=lf 3 | -------------------------------------------------------------------------------- /.github/workflows/docker-build-test-deploy.yml: -------------------------------------------------------------------------------- 1 | name: Build frontend and backend, test and deploy 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | - gha 8 | pull_request: 9 | branches: 10 | - main 11 | 12 | jobs: 13 | build-and-test: 14 | runs-on: self-hosted 15 | steps: 16 | - name: Checkout code 17 | uses: actions/checkout@v4 18 | 19 | - name: Set up Docker Buildx 20 | uses: docker/setup-buildx-action@v3 21 | 22 | - name: Cache Docker layers 23 | uses: actions/cache@v4 24 | with: 25 | path: /tmp/.buildx-cache 26 | key: ${{ runner.os }}-buildx-${{ github.ref_name }} 27 | restore-keys: | 28 | ${{ runner.os }}-buildx- 29 | 30 | - name: Build the docker images 31 | run: | 32 | DOCKER_BUILDKIT=1 docker compose -f docker-compose.yml -f docker-compose.build.yml build \ 33 | --build-arg BUILDKIT_INLINE_CACHE=1 \ 34 | backend frontend 35 | 36 | - name: Update the buildx cache with new one 37 | run: | 38 | rm -rf /tmp/.buildx-cache 39 | mv /tmp/.buildx-cache-new /tmp/.buildx-cache 40 | 41 | - name: Run tests 42 | run: sh scripts/test-local.sh 43 | 44 | login-and-deploy: 45 | runs-on: self-hosted 46 | needs: build-and-test 47 | if: ${{ github.event_name != 'pull_request' && github.ref_name == 'main' }} 48 | steps: 49 | - name: Login to DockerHub 50 | uses: docker/login-action@v3 51 | with: 52 | username: ${{ secrets.DOCKERHUB_USERNAME }} 53 | password: ${{ secrets.DOCKERHUB_TOKEN }} 54 | 55 | - name: Deploy the images 56 | run: | 57 | source .env 58 | docker push $DOCKER_IMAGE_BACKEND:latest 59 | docker push $DOCKER_IMAGE_FRONTEND:latest 60 | 61 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | /test-results/ 3 | /playwright-report/ 4 | /blob-report/ 5 | /playwright/.cache/ 6 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # See https://pre-commit.com for more information 2 | # See https://pre-commit.com/hooks.html for more hooks 3 | repos: 4 | - repo: https://github.com/pre-commit/pre-commit-hooks 5 | rev: v4.4.0 6 | hooks: 7 | - id: check-added-large-files 8 | - id: check-toml 9 | - id: check-yaml 10 | args: 11 | - --unsafe 12 | - id: end-of-file-fixer 13 | exclude: ^frontend/src/client/.* 14 | - id: trailing-whitespace 15 | exclude: ^frontend/src/client/.* 16 | - repo: https://github.com/charliermarsh/ruff-pre-commit 17 | rev: v0.2.2 18 | hooks: 19 | - id: ruff 20 | args: 21 | - --fix 22 | - id: ruff-format 23 | 24 | ci: 25 | autofix_commit_msg: 🎨 [pre-commit.ci] Auto format from pre-commit.com hooks 26 | autoupdate_commit_msg: ⬆ [pre-commit.ci] pre-commit autoupdate 27 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "name": "Debug Backend: Python Debugger", 9 | "type": "debugpy", 10 | "request": "launch", 11 | "module": "uvicorn", 12 | "args": [ 13 | "app.main:app", 14 | "--reload" 15 | ], 16 | "cwd": "${workspaceFolder}/backend", 17 | "jinja": true, 18 | "envFile": "${workspaceFolder}/.env", 19 | }, 20 | { 21 | "type": "chrome", 22 | "request": "launch", 23 | "name": "Debug Frontend: Launch Chrome against http://localhost:5173", 24 | "url": "http://localhost:5173", 25 | "webRoot": "${workspaceFolder}/frontend" 26 | }, 27 | ] 28 | } 29 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "files.eol": "\n" 3 | } -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Digital Twin Toolbox 2 | 3 | Introduction 4 | ============ 5 | This repository collects different tools/libraries and workflows inside a docker environment to generate 3D Tiles from common data sources such as Shapefiles and LAS files. 6 | The short term goal is to evaluate the various open source tools that are available to generate 3D Tiles from various data sources typically used when modeling an urban environment when creating a 3D Model like building and Lidar data. The long term goal is to transform this experiment into an engine that can be used to create 3D Tiles for urban environments. 7 | 8 | This project is still a work in progress and this application **is not** production ready. Extensive documentation about this project can be found in the [wiki](https://github.com/geosolutions-it/digital-twin-toolbox/wiki) page (see the Table of Contents). 9 | 10 | At the moment we have draft pipelines for: 11 | - converting shapefile data (polygons, lines, points) into 3DTiles 12 | - converting lidar data to point 3DTiles dataset, including lidar processing to fix/manage CRS, resample and color it 13 | 14 | ![Application viewer with extruded polygons](img/digital-twin-toolbox.png) 15 | 16 | License 17 | ============ 18 | This work is licensed using [GPL v3.0 license](https://github.com/geosolutions-it/digital-twin-toolbox/blob/main/LICENSE.txt). 19 | 20 | Credits 21 | ============ 22 | We would like to thanks the **City of Florence** and **Politechnic University of Turin** for providing funding to bootstrap this work. The evolution of this project is right now an effort funded by GeoSolutions. 23 | If you are interested in participating or funding this work please, drop an email to info@geosolutionsgroup.com or engage with us through GitHub discussions and issues. 24 | 25 | -------------------------------------------------------------------------------- /backend/.dockerignore: -------------------------------------------------------------------------------- 1 | # Python 2 | __pycache__ 3 | app.egg-info 4 | *.pyc 5 | .mypy_cache 6 | .coverage 7 | htmlcov 8 | .venv 9 | node_modules -------------------------------------------------------------------------------- /backend/.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | app.egg-info 3 | *.pyc 4 | .mypy_cache 5 | .coverage 6 | htmlcov 7 | .cache 8 | .venv 9 | node_modules -------------------------------------------------------------------------------- /backend/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM tiangolo/uvicorn-gunicorn-fastapi:python3.10 2 | 3 | WORKDIR /app/ 4 | 5 | SHELL ["/bin/bash", "-c"] 6 | 7 | ARG DDT_ENV=ddt 8 | 9 | # Install PDAL 10 | RUN cd / && \ 11 | curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/1.5.10 | tar -xvj bin/micromamba && \ 12 | ./bin/micromamba shell init -s bash -p ~/micromamba && \ 13 | source ~/.bashrc && \ 14 | micromamba config append channels conda-forge && \ 15 | micromamba create -n $DDT_ENV python=3.10.13 -y && \ 16 | micromamba install -n $DDT_ENV -c conda-forge pdal=2.6.3 -y 17 | 18 | RUN micromamba install -n $DDT_ENV -c conda-forge poetry 19 | 20 | # Copy poetry.lock* in case it doesn't exist in the repo 21 | COPY ./pyproject.toml ./poetry.lock* /app/ 22 | 23 | # Allow installing dev dependencies to run tests 24 | ARG INSTALL_DEV=false 25 | RUN bash -c "if [ $INSTALL_DEV == 'true' ] ; then micromamba run -n $DDT_ENV poetry install --no-root ; else micromamba run -n $DDT_ENV poetry install --no-root --only main ; fi" 26 | 27 | # Install gdal python 28 | RUN micromamba run -n $DDT_ENV pip install gdal=="$(micromamba run -n ${DDT_ENV} gdal-config --version).*" --no-build-isolation 29 | 30 | ENV PYTHONPATH=/app 31 | 32 | COPY ./scripts/ /app/ 33 | 34 | COPY ../scripts/ /app/scripts/ 35 | 36 | COPY ./alembic.ini /app/ 37 | 38 | COPY ./prestart.sh /app/ 39 | 40 | COPY ./tests-start.sh /app/ 41 | 42 | COPY ./app /app/app 43 | 44 | RUN mv /app/celery-reload.sh /celery-reload.sh && \ 45 | mv /app/celery.sh /celery.sh 46 | 47 | # Install node 48 | ARG NODE_VERSION=20.11.1 49 | 50 | RUN wget -qO- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.7/install.sh | bash 51 | 52 | ENV NVM_DIR=/root/.nvm 53 | 54 | RUN . "$NVM_DIR/nvm.sh" && nvm install ${NODE_VERSION} && \ 55 | . "$NVM_DIR/nvm.sh" && nvm use v${NODE_VERSION} && \ 56 | . "$NVM_DIR/nvm.sh" && nvm alias default v${NODE_VERSION} 57 | 58 | ENV PATH="/root/.nvm/versions/node/v${NODE_VERSION}/bin/:${PATH}" 59 | 60 | RUN npm install -g nodemon 61 | 62 | # Install pg2b3dm binaries 63 | RUN cd /bin && \ 64 | wget https://github.com/Geodan/pg2b3dm/releases/download/v2.19.0/pg2b3dm-linux-x64.zip && \ 65 | unzip ./pg2b3dm-linux-x64.zip && \ 66 | rm -rf ./pg2b3dm-linux-x64.zip 67 | 68 | # Install i3dm.export binaries 69 | RUN cd /bin && \ 70 | wget https://github.com/Geodan/i3dm.export/releases/download/v2.11.0/i3dm.export-linux-x64.zip && \ 71 | unzip ./i3dm.export-linux-x64.zip && \ 72 | rm -rf ./i3dm.export-linux-x64.zip 73 | 74 | # Activate micromamba enviroment as the default one 75 | RUN echo "micromamba activate ${DDT_ENV}" >> ~/.bashrc 76 | 77 | ENV PATH="/root/micromamba/envs/${DDT_ENV}/bin:${PATH}" 78 | 79 | ENTRYPOINT ["bash", "-l", "-c"] 80 | -------------------------------------------------------------------------------- /backend/alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # path to migration scripts 5 | script_location = app/alembic 6 | 7 | # template used to generate migration files 8 | # file_template = %%(rev)s_%%(slug)s 9 | 10 | # timezone to use when rendering the date 11 | # within the migration file as well as the filename. 12 | # string value is passed to dateutil.tz.gettz() 13 | # leave blank for localtime 14 | # timezone = 15 | 16 | # max length of characters to apply to the 17 | # "slug" field 18 | #truncate_slug_length = 40 19 | 20 | # set to 'true' to run the environment during 21 | # the 'revision' command, regardless of autogenerate 22 | # revision_environment = false 23 | 24 | # set to 'true' to allow .pyc and .pyo files without 25 | # a source .py file to be detected as revisions in the 26 | # versions/ directory 27 | # sourceless = false 28 | 29 | # version location specification; this defaults 30 | # to alembic/versions. When using multiple version 31 | # directories, initial revisions must be specified with --version-path 32 | # version_locations = %(here)s/bar %(here)s/bat alembic/versions 33 | 34 | # the output encoding used when revision files 35 | # are written from script.py.mako 36 | # output_encoding = utf-8 37 | 38 | # Logging configuration 39 | [loggers] 40 | keys = root,sqlalchemy,alembic 41 | 42 | [handlers] 43 | keys = console 44 | 45 | [formatters] 46 | keys = generic 47 | 48 | [logger_root] 49 | level = WARN 50 | handlers = console 51 | qualname = 52 | 53 | [logger_sqlalchemy] 54 | level = WARN 55 | handlers = 56 | qualname = sqlalchemy.engine 57 | 58 | [logger_alembic] 59 | level = INFO 60 | handlers = 61 | qualname = alembic 62 | 63 | [handler_console] 64 | class = StreamHandler 65 | args = (sys.stderr,) 66 | level = NOTSET 67 | formatter = generic 68 | 69 | [formatter_generic] 70 | format = %(levelname)-5.5s [%(name)s] %(message)s 71 | datefmt = %H:%M:%S 72 | -------------------------------------------------------------------------------- /backend/app/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geosolutions-it/digital-twin-toolbox/db92c088e2939efe6e7a5e459ac01828867088a7/backend/app/__init__.py -------------------------------------------------------------------------------- /backend/app/alembic/README: -------------------------------------------------------------------------------- 1 | Generic single-database configuration. 2 | -------------------------------------------------------------------------------- /backend/app/alembic/env.py: -------------------------------------------------------------------------------- 1 | import os 2 | from logging.config import fileConfig 3 | 4 | from alembic import context 5 | from sqlalchemy import engine_from_config, pool 6 | 7 | # this is the Alembic Config object, which provides 8 | # access to the values within the .ini file in use. 9 | config = context.config 10 | 11 | # Interpret the config file for Python logging. 12 | # This line sets up loggers basically. 13 | fileConfig(config.config_file_name) 14 | 15 | # add your model's MetaData object here 16 | # for 'autogenerate' support 17 | # from myapp import mymodel 18 | # target_metadata = mymodel.Base.metadata 19 | # target_metadata = None 20 | 21 | from app.models import SQLModel # noqa 22 | from app.core.config import settings # noqa 23 | 24 | target_metadata = SQLModel.metadata 25 | 26 | # other values from the config, defined by the needs of env.py, 27 | # can be acquired: 28 | # my_important_option = config.get_main_option("my_important_option") 29 | # ... etc. 30 | 31 | # https://github.com/sqlalchemy/alembic/discussions/1282 32 | # https://gist.github.com/nathancahill/aeec99f6a3423c5ada77 33 | 34 | exclude_tables = ['spatial_ref_sys'] 35 | 36 | def include_object(object, name, type_, reflected, compare_to): 37 | if type_ == "table" and name in exclude_tables: 38 | return False 39 | else: 40 | return True 41 | 42 | 43 | def get_url(): 44 | return str(settings.SQLALCHEMY_DATABASE_URI) 45 | 46 | 47 | def run_migrations_offline(): 48 | """Run migrations in 'offline' mode. 49 | 50 | This configures the context with just a URL 51 | and not an Engine, though an Engine is acceptable 52 | here as well. By skipping the Engine creation 53 | we don't even need a DBAPI to be available. 54 | 55 | Calls to context.execute() here emit the given string to the 56 | script output. 57 | 58 | """ 59 | url = get_url() 60 | context.configure( 61 | url=url, target_metadata=target_metadata, literal_binds=True, compare_type=True 62 | ) 63 | 64 | with context.begin_transaction(): 65 | context.run_migrations() 66 | 67 | 68 | def run_migrations_online(): 69 | """Run migrations in 'online' mode. 70 | 71 | In this scenario we need to create an Engine 72 | and associate a connection with the context. 73 | 74 | """ 75 | configuration = config.get_section(config.config_ini_section) 76 | configuration["sqlalchemy.url"] = get_url() 77 | connectable = engine_from_config( 78 | configuration, 79 | prefix="sqlalchemy.", 80 | poolclass=pool.NullPool, 81 | ) 82 | 83 | with connectable.connect() as connection: 84 | context.configure( 85 | connection=connection, 86 | target_metadata=target_metadata, 87 | compare_type=True, 88 | include_object=include_object, 89 | ) 90 | 91 | with context.begin_transaction(): 92 | context.run_migrations() 93 | 94 | if context.is_offline_mode(): 95 | run_migrations_offline() 96 | else: 97 | run_migrations_online() 98 | -------------------------------------------------------------------------------- /backend/app/alembic/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | import sqlmodel.sql.sqltypes 11 | ${imports if imports else ""} 12 | 13 | # revision identifiers, used by Alembic. 14 | revision = ${repr(up_revision)} 15 | down_revision = ${repr(down_revision)} 16 | branch_labels = ${repr(branch_labels)} 17 | depends_on = ${repr(depends_on)} 18 | 19 | 20 | def upgrade(): 21 | ${upgrades if upgrades else "pass"} 22 | 23 | 24 | def downgrade(): 25 | ${downgrades if downgrades else "pass"} 26 | -------------------------------------------------------------------------------- /backend/app/alembic/versions/.keep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geosolutions-it/digital-twin-toolbox/db92c088e2939efe6e7a5e459ac01828867088a7/backend/app/alembic/versions/.keep -------------------------------------------------------------------------------- /backend/app/alembic/versions/0f58b1b64e6f_.py: -------------------------------------------------------------------------------- 1 | """empty message 2 | 3 | Revision ID: 0f58b1b64e6f 4 | Revises: 5 | Create Date: 2024-08-28 09:55:47.229904 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | import sqlmodel.sql.sqltypes 11 | from sqlalchemy.dialects import postgresql 12 | 13 | # revision identifiers, used by Alembic. 14 | revision = '0f58b1b64e6f' 15 | down_revision = None 16 | branch_labels = None 17 | depends_on = None 18 | 19 | 20 | def upgrade(): 21 | # ### commands auto generated by Alembic - please adjust! ### 22 | op.create_table('user', 23 | sa.Column('email', sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False), 24 | sa.Column('is_active', sa.Boolean(), nullable=False), 25 | sa.Column('is_superuser', sa.Boolean(), nullable=False), 26 | sa.Column('full_name', sqlmodel.sql.sqltypes.AutoString(length=255), nullable=True), 27 | sa.Column('id', sa.Uuid(), nullable=False), 28 | sa.Column('hashed_password', sqlmodel.sql.sqltypes.AutoString(), nullable=False), 29 | sa.PrimaryKeyConstraint('id') 30 | ) 31 | op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=True) 32 | op.create_table('asset', 33 | sa.Column('filename', sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False), 34 | sa.Column('content_type', sqlmodel.sql.sqltypes.AutoString(length=255), nullable=True), 35 | sa.Column('content_size', sa.Integer(), nullable=True), 36 | sa.Column('asset_type', sqlmodel.sql.sqltypes.AutoString(), nullable=True), 37 | sa.Column('extension', sqlmodel.sql.sqltypes.AutoString(), nullable=True), 38 | sa.Column('geometry_type', sqlmodel.sql.sqltypes.AutoString(), nullable=True), 39 | sa.Column('upload_id', sqlmodel.sql.sqltypes.AutoString(), nullable=True), 40 | sa.Column('upload_status', sqlmodel.sql.sqltypes.AutoString(), nullable=True), 41 | sa.Column('upload_result', postgresql.JSONB(astext_type=sa.Text()), nullable=True), 42 | sa.Column('id', sa.Uuid(), nullable=False), 43 | sa.Column('owner_id', sa.Uuid(), nullable=False), 44 | sa.ForeignKeyConstraint(['owner_id'], ['user.id'], ondelete='CASCADE'), 45 | sa.PrimaryKeyConstraint('id') 46 | ) 47 | op.create_table('pipeline', 48 | sa.Column('title', sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False), 49 | sa.Column('asset_id', sa.Uuid(), nullable=True), 50 | sa.Column('data', postgresql.JSONB(astext_type=sa.Text()), nullable=True), 51 | sa.Column('task_id', sqlmodel.sql.sqltypes.AutoString(), nullable=True), 52 | sa.Column('task_status', sqlmodel.sql.sqltypes.AutoString(), nullable=True), 53 | sa.Column('task_result', postgresql.JSONB(astext_type=sa.Text()), nullable=True), 54 | sa.Column('id', sa.Uuid(), nullable=False), 55 | sa.Column('owner_id', sa.Uuid(), nullable=False), 56 | sa.ForeignKeyConstraint(['owner_id'], ['user.id'], ondelete='CASCADE'), 57 | sa.PrimaryKeyConstraint('id') 58 | ) 59 | # ### end Alembic commands ### 60 | 61 | 62 | def downgrade(): 63 | # ### commands auto generated by Alembic - please adjust! ### 64 | op.drop_table('pipeline') 65 | op.drop_table('asset') 66 | op.drop_index(op.f('ix_user_email'), table_name='user') 67 | op.drop_table('user') 68 | # ### end Alembic commands ### 69 | -------------------------------------------------------------------------------- /backend/app/api/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geosolutions-it/digital-twin-toolbox/db92c088e2939efe6e7a5e459ac01828867088a7/backend/app/api/__init__.py -------------------------------------------------------------------------------- /backend/app/api/deps.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Generator 2 | from typing import Annotated 3 | 4 | import jwt 5 | from fastapi import Depends, HTTPException, status 6 | from fastapi.security import OAuth2PasswordBearer 7 | from jwt.exceptions import InvalidTokenError 8 | from pydantic import ValidationError 9 | from sqlmodel import Session, select 10 | 11 | from app.core import security 12 | from app.core.config import settings 13 | from app.core.db import engine 14 | from app.models import TokenPayload, User 15 | 16 | reusable_oauth2 = OAuth2PasswordBearer( 17 | tokenUrl=f"{settings.API_V1_STR}/login/access-token" 18 | ) 19 | 20 | 21 | def get_db() -> Generator[Session, None, None]: 22 | with Session(engine) as session: 23 | yield session 24 | 25 | 26 | SessionDep = Annotated[Session, Depends(get_db)] 27 | TokenDep = Annotated[str, Depends(reusable_oauth2)] 28 | 29 | 30 | 31 | def get_current_user(session: SessionDep, token: TokenDep) -> User: 32 | 33 | 34 | try: 35 | payload = jwt.decode( 36 | token, settings.SECRET_KEY, algorithms=[security.ALGORITHM] 37 | ) 38 | token_data = TokenPayload(**payload) 39 | except (InvalidTokenError, ValidationError): 40 | raise HTTPException( 41 | status_code=status.HTTP_403_FORBIDDEN, 42 | detail="Could not validate credentials", 43 | ) 44 | user = session.get(User, token_data.sub) 45 | if not user: 46 | raise HTTPException(status_code=404, detail="User not found") 47 | if not user.is_active: 48 | raise HTTPException(status_code=400, detail="Inactive user") 49 | return user 50 | 51 | def get_no_security_user(session: SessionDep) -> User: 52 | user = session.exec( 53 | select(User).where(User.email == settings.FIRST_SUPERUSER) 54 | ).first() 55 | return user 56 | 57 | def get_annotated_current_user(): 58 | if not settings.DTT_ENABLE_USERS_MANAGEMENT: 59 | return Annotated[User, Depends(get_no_security_user)] 60 | return Annotated[User, Depends(get_current_user)] 61 | 62 | CurrentUser = get_annotated_current_user() 63 | 64 | def get_current_active_superuser(current_user: CurrentUser) -> User: 65 | if not current_user.is_superuser: 66 | raise HTTPException( 67 | status_code=403, detail="The user doesn't have enough privileges" 68 | ) 69 | return current_user 70 | -------------------------------------------------------------------------------- /backend/app/api/main.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter 2 | 3 | from app.api.routes import login, users, utils, assets, pipelines 4 | 5 | api_router = APIRouter() 6 | api_router.include_router(login.router, tags=["login"]) 7 | api_router.include_router(users.router, prefix="/users", tags=["users"]) 8 | api_router.include_router(utils.router, prefix="/utils", tags=["utils"]) 9 | api_router.include_router(assets.router, prefix="/assets", tags=["assets"]) 10 | api_router.include_router(pipelines.router, prefix="/pipelines", tags=["pipelines"]) 11 | -------------------------------------------------------------------------------- /backend/app/api/routes/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geosolutions-it/digital-twin-toolbox/db92c088e2939efe6e7a5e459ac01828867088a7/backend/app/api/routes/__init__.py -------------------------------------------------------------------------------- /backend/app/api/routes/login.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | from typing import Annotated, Any 3 | 4 | from fastapi import APIRouter, Depends, HTTPException 5 | from fastapi.responses import HTMLResponse 6 | from fastapi.security import OAuth2PasswordRequestForm 7 | 8 | from app import crud 9 | from app.api.deps import CurrentUser, SessionDep, get_current_active_superuser 10 | from app.core import security 11 | from app.core.config import settings 12 | from app.core.security import get_password_hash 13 | from app.models import Message, NewPassword, Token, UserPublic 14 | from app.utils import ( 15 | generate_password_reset_token, 16 | generate_reset_password_email, 17 | send_email, 18 | verify_password_reset_token, 19 | ) 20 | 21 | router = APIRouter() 22 | 23 | 24 | @router.post("/login/access-token") 25 | def login_access_token( 26 | session: SessionDep, form_data: Annotated[OAuth2PasswordRequestForm, Depends()] 27 | ) -> Token: 28 | """ 29 | OAuth2 compatible token login, get an access token for future requests 30 | """ 31 | user = crud.authenticate( 32 | session=session, email=form_data.username, password=form_data.password 33 | ) 34 | if not user: 35 | raise HTTPException(status_code=400, detail="Incorrect email or password") 36 | elif not user.is_active: 37 | raise HTTPException(status_code=400, detail="Inactive user") 38 | access_token_expires = timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES) 39 | return Token( 40 | access_token=security.create_access_token( 41 | user.id, expires_delta=access_token_expires 42 | ) 43 | ) 44 | 45 | 46 | @router.post("/login/test-token", response_model=UserPublic) 47 | def test_token(current_user: CurrentUser) -> Any: 48 | """ 49 | Test access token 50 | """ 51 | return current_user 52 | 53 | 54 | @router.post("/password-recovery/{email}") 55 | def recover_password(email: str, session: SessionDep) -> Message: 56 | """ 57 | Password Recovery 58 | """ 59 | user = crud.get_user_by_email(session=session, email=email) 60 | 61 | if not user: 62 | raise HTTPException( 63 | status_code=404, 64 | detail="The user with this email does not exist in the system.", 65 | ) 66 | password_reset_token = generate_password_reset_token(email=email) 67 | email_data = generate_reset_password_email( 68 | email_to=user.email, email=email, token=password_reset_token 69 | ) 70 | send_email( 71 | email_to=user.email, 72 | subject=email_data.subject, 73 | html_content=email_data.html_content, 74 | ) 75 | return Message(message="Password recovery email sent") 76 | 77 | 78 | @router.post("/reset-password/") 79 | def reset_password(session: SessionDep, body: NewPassword) -> Message: 80 | """ 81 | Reset password 82 | """ 83 | email = verify_password_reset_token(token=body.token) 84 | if not email: 85 | raise HTTPException(status_code=400, detail="Invalid token") 86 | user = crud.get_user_by_email(session=session, email=email) 87 | if not user: 88 | raise HTTPException( 89 | status_code=404, 90 | detail="The user with this email does not exist in the system.", 91 | ) 92 | elif not user.is_active: 93 | raise HTTPException(status_code=400, detail="Inactive user") 94 | hashed_password = get_password_hash(password=body.new_password) 95 | user.hashed_password = hashed_password 96 | session.add(user) 97 | session.commit() 98 | return Message(message="Password updated successfully") 99 | 100 | 101 | @router.post( 102 | "/password-recovery-html-content/{email}", 103 | dependencies=[Depends(get_current_active_superuser)], 104 | response_class=HTMLResponse, 105 | ) 106 | def recover_password_html_content(email: str, session: SessionDep) -> Any: 107 | """ 108 | HTML Content for Password Recovery 109 | """ 110 | user = crud.get_user_by_email(session=session, email=email) 111 | 112 | if not user: 113 | raise HTTPException( 114 | status_code=404, 115 | detail="The user with this username does not exist in the system.", 116 | ) 117 | password_reset_token = generate_password_reset_token(email=email) 118 | email_data = generate_reset_password_email( 119 | email_to=user.email, email=email, token=password_reset_token 120 | ) 121 | 122 | return HTMLResponse( 123 | content=email_data.html_content, headers={"subject:": email_data.subject} 124 | ) 125 | -------------------------------------------------------------------------------- /backend/app/backend_pre_start.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from sqlalchemy import Engine 4 | from sqlmodel import Session, select 5 | from tenacity import after_log, before_log, retry, stop_after_attempt, wait_fixed 6 | 7 | from app.core.db import engine 8 | 9 | logging.basicConfig(level=logging.INFO) 10 | logger = logging.getLogger(__name__) 11 | 12 | max_tries = 60 * 5 # 5 minutes 13 | wait_seconds = 1 14 | 15 | 16 | @retry( 17 | stop=stop_after_attempt(max_tries), 18 | wait=wait_fixed(wait_seconds), 19 | before=before_log(logger, logging.INFO), 20 | after=after_log(logger, logging.WARN), 21 | ) 22 | def init(db_engine: Engine) -> None: 23 | try: 24 | with Session(db_engine) as session: 25 | # Try to create session to check if DB is awake 26 | session.exec(select(1)) 27 | except Exception as e: 28 | logger.error(e) 29 | raise e 30 | 31 | 32 | def main() -> None: 33 | logger.info("Initializing service") 34 | init(engine) 35 | logger.info("Service finished initializing") 36 | 37 | 38 | if __name__ == "__main__": 39 | main() 40 | -------------------------------------------------------------------------------- /backend/app/core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geosolutions-it/digital-twin-toolbox/db92c088e2939efe6e7a5e459ac01828867088a7/backend/app/core/__init__.py -------------------------------------------------------------------------------- /backend/app/core/config.py: -------------------------------------------------------------------------------- 1 | import secrets 2 | import warnings 3 | from typing import Annotated, Any, Literal 4 | 5 | from pydantic import ( 6 | AnyUrl, 7 | BeforeValidator, 8 | HttpUrl, 9 | PostgresDsn, 10 | computed_field, 11 | model_validator, 12 | ) 13 | from pydantic_core import MultiHostUrl 14 | from pydantic_settings import BaseSettings, SettingsConfigDict 15 | from typing_extensions import Self 16 | 17 | 18 | def parse_cors(v: Any) -> list[str] | str: 19 | if isinstance(v, str) and not v.startswith("["): 20 | return [i.strip() for i in v.split(",")] 21 | elif isinstance(v, list | str): 22 | return v 23 | raise ValueError(v) 24 | 25 | 26 | class Settings(BaseSettings): 27 | model_config = SettingsConfigDict( 28 | env_file=".env", env_ignore_empty=True, extra="ignore" 29 | ) 30 | API_V1_STR: str = "/api/v1" 31 | SECRET_KEY: str = secrets.token_urlsafe(32) 32 | # 60 minutes * 24 hours * 8 days = 8 days 33 | ACCESS_TOKEN_EXPIRE_MINUTES: int = 60 * 24 * 8 34 | DOMAIN: str = "localhost" 35 | ENVIRONMENT: Literal["local", "staging", "production"] = "local" 36 | 37 | @computed_field # type: ignore[prop-decorator] 38 | @property 39 | def server_host(self) -> str: 40 | # Use HTTPS for anything other than local development 41 | if self.ENVIRONMENT == "local": 42 | return f"http://{self.DOMAIN}" 43 | return f"https://{self.DOMAIN}" 44 | 45 | BACKEND_CORS_ORIGINS: Annotated[ 46 | list[AnyUrl] | str, BeforeValidator(parse_cors) 47 | ] = [] 48 | 49 | PROJECT_NAME: str 50 | SENTRY_DSN: HttpUrl | None = None 51 | POSTGRES_SERVER: str 52 | POSTGRES_PORT: int = 5432 53 | POSTGRES_USER: str 54 | POSTGRES_PASSWORD: str = "" 55 | POSTGRES_DB: str = "" 56 | POSTGRES_TASKS_DB: str = "" 57 | ASSETS_DATA: str = "" 58 | DTT_ENABLE_USERS_MANAGEMENT: bool = False 59 | 60 | @computed_field # type: ignore[prop-decorator] 61 | @property 62 | def SQLALCHEMY_DATABASE_URI(self) -> PostgresDsn: 63 | return MultiHostUrl.build( 64 | scheme="postgresql+psycopg", 65 | username=self.POSTGRES_USER, 66 | password=self.POSTGRES_PASSWORD, 67 | host=self.POSTGRES_SERVER, 68 | port=self.POSTGRES_PORT, 69 | path=self.POSTGRES_DB, 70 | query="options=-csearch_path=public" 71 | ) 72 | 73 | @computed_field # type: ignore[prop-decorator] 74 | @property 75 | def SQLALCHEMY_TASKS_DATABASE_URI(self) -> PostgresDsn: 76 | return MultiHostUrl.build( 77 | scheme="postgresql+psycopg", 78 | username=self.POSTGRES_USER, 79 | password=self.POSTGRES_PASSWORD, 80 | host=self.POSTGRES_SERVER, 81 | port=self.POSTGRES_PORT, 82 | path=self.POSTGRES_TASKS_DB, 83 | ) 84 | 85 | SMTP_TLS: bool = True 86 | SMTP_SSL: bool = False 87 | SMTP_PORT: int = 587 88 | SMTP_HOST: str | None = None 89 | SMTP_USER: str | None = None 90 | SMTP_PASSWORD: str | None = None 91 | # TODO: update type to EmailStr when sqlmodel supports it 92 | EMAILS_FROM_EMAIL: str | None = None 93 | EMAILS_FROM_NAME: str | None = None 94 | 95 | @model_validator(mode="after") 96 | def _set_default_emails_from(self) -> Self: 97 | if not self.EMAILS_FROM_NAME: 98 | self.EMAILS_FROM_NAME = self.PROJECT_NAME 99 | return self 100 | 101 | EMAIL_RESET_TOKEN_EXPIRE_HOURS: int = 48 102 | 103 | @computed_field # type: ignore[prop-decorator] 104 | @property 105 | def emails_enabled(self) -> bool: 106 | return bool(self.SMTP_HOST and self.EMAILS_FROM_EMAIL) 107 | 108 | # TODO: update type to EmailStr when sqlmodel supports it 109 | EMAIL_TEST_USER: str = "test@example.com" 110 | # TODO: update type to EmailStr when sqlmodel supports it 111 | FIRST_SUPERUSER: str 112 | FIRST_SUPERUSER_PASSWORD: str 113 | 114 | def _check_default_secret(self, var_name: str, value: str | None) -> None: 115 | if value == "changethis": 116 | message = ( 117 | f'The value of {var_name} is "changethis", ' 118 | "for security, please change it, at least for deployments." 119 | ) 120 | if self.ENVIRONMENT == "local": 121 | warnings.warn(message, stacklevel=1) 122 | else: 123 | raise ValueError(message) 124 | 125 | @model_validator(mode="after") 126 | def _enforce_non_default_secrets(self) -> Self: 127 | self._check_default_secret("SECRET_KEY", self.SECRET_KEY) 128 | self._check_default_secret("POSTGRES_PASSWORD", self.POSTGRES_PASSWORD) 129 | self._check_default_secret( 130 | "FIRST_SUPERUSER_PASSWORD", self.FIRST_SUPERUSER_PASSWORD 131 | ) 132 | 133 | return self 134 | 135 | 136 | settings = Settings() # type: ignore 137 | -------------------------------------------------------------------------------- /backend/app/core/db.py: -------------------------------------------------------------------------------- 1 | from sqlmodel import Session, create_engine, select 2 | 3 | from app import crud 4 | from app.core.config import settings 5 | from app.models import User, UserCreate 6 | import subprocess 7 | 8 | engine = create_engine(str(settings.SQLALCHEMY_DATABASE_URI)) 9 | engine_tasks = create_engine(str(settings.SQLALCHEMY_TASKS_DATABASE_URI)) 10 | 11 | # make sure all SQLModel models are imported (app.models) before initializing DB 12 | # otherwise, SQLModel might fail to initialize relationships properly 13 | # for more details: https://github.com/fastapi/full-stack-fastapi-template/issues/28 14 | 15 | 16 | def init_db(session: Session) -> None: 17 | # Tables should be created with Alembic migrations 18 | # But if you don't want to use migrations, create 19 | # the tables un-commenting the next lines 20 | # from sqlmodel import SQLModel 21 | 22 | # from app.core.engine import engine 23 | # This works because the models are already imported and registered from app.models 24 | # SQLModel.metadata.create_all(engine) 25 | 26 | user = session.exec( 27 | select(User).where(User.email == settings.FIRST_SUPERUSER) 28 | ).first() 29 | if not user: 30 | user_in = UserCreate( 31 | email=settings.FIRST_SUPERUSER, 32 | password=settings.FIRST_SUPERUSER_PASSWORD, 33 | is_superuser=True, 34 | ) 35 | user = crud.create_user(session=session, user_create=user_in) 36 | 37 | def init_tasks_db() -> None: 38 | psql_connection = f"postgresql://{settings.POSTGRES_USER}:{settings.POSTGRES_PASSWORD}@{settings.POSTGRES_SERVER}:{settings.POSTGRES_PORT}" 39 | 40 | subprocess.run([ 'psql', psql_connection, '-c', f'CREATE DATABASE {settings.POSTGRES_TASKS_DB};' ]) 41 | psql_connection_db = f"{psql_connection}/{settings.POSTGRES_TASKS_DB}" 42 | subprocess.run([ 'psql', psql_connection_db, '-c', 'CREATE EXTENSION IF NOT EXISTS postgis;' ]) 43 | subprocess.run([ 'psql', psql_connection_db, '-c', 'CREATE EXTENSION IF NOT EXISTS postgis_topology;' ]) 44 | # Reconnect to update pg_setting.resetval 45 | # See https://github.com/postgis/docker-postgis/issues/288 46 | subprocess.run([ 'psql', psql_connection_db, '-c', '\c' ]) 47 | subprocess.run([ 'psql', psql_connection_db, '-c', 'CREATE EXTENSION IF NOT EXISTS fuzzystrmatch;' ]) 48 | subprocess.run([ 'psql', psql_connection_db, '-c', 'CREATE EXTENSION IF NOT EXISTS postgis_tiger_geocoder;' ]) 49 | return None 50 | -------------------------------------------------------------------------------- /backend/app/core/security.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta, timezone 2 | from typing import Any 3 | 4 | import jwt 5 | from passlib.context import CryptContext 6 | 7 | from app.core.config import settings 8 | 9 | pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") 10 | 11 | 12 | ALGORITHM = "HS256" 13 | 14 | 15 | def create_access_token(subject: str | Any, expires_delta: timedelta) -> str: 16 | expire = datetime.now(timezone.utc) + expires_delta 17 | to_encode = {"exp": expire, "sub": str(subject)} 18 | encoded_jwt = jwt.encode(to_encode, settings.SECRET_KEY, algorithm=ALGORITHM) 19 | return encoded_jwt 20 | 21 | 22 | def verify_password(plain_password: str, hashed_password: str) -> bool: 23 | return pwd_context.verify(plain_password, hashed_password) 24 | 25 | 26 | def get_password_hash(password: str) -> str: 27 | return pwd_context.hash(password) 28 | -------------------------------------------------------------------------------- /backend/app/crud.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | from typing import Any 3 | 4 | from sqlmodel import Session, select 5 | 6 | from app.core.security import get_password_hash, verify_password 7 | from app.models import User, UserCreate, UserUpdate 8 | 9 | 10 | def create_user(*, session: Session, user_create: UserCreate) -> User: 11 | db_obj = User.model_validate( 12 | user_create, update={"hashed_password": get_password_hash(user_create.password)} 13 | ) 14 | session.add(db_obj) 15 | session.commit() 16 | session.refresh(db_obj) 17 | return db_obj 18 | 19 | 20 | def update_user(*, session: Session, db_user: User, user_in: UserUpdate) -> Any: 21 | user_data = user_in.model_dump(exclude_unset=True) 22 | extra_data = {} 23 | if "password" in user_data: 24 | password = user_data["password"] 25 | hashed_password = get_password_hash(password) 26 | extra_data["hashed_password"] = hashed_password 27 | db_user.sqlmodel_update(user_data, update=extra_data) 28 | session.add(db_user) 29 | session.commit() 30 | session.refresh(db_user) 31 | return db_user 32 | 33 | 34 | def get_user_by_email(*, session: Session, email: str) -> User | None: 35 | statement = select(User).where(User.email == email) 36 | session_user = session.exec(statement).first() 37 | return session_user 38 | 39 | 40 | def authenticate(*, session: Session, email: str, password: str) -> User | None: 41 | db_user = get_user_by_email(session=session, email=email) 42 | if not db_user: 43 | return None 44 | if not verify_password(password, db_user.hashed_password): 45 | return None 46 | return db_user 47 | -------------------------------------------------------------------------------- /backend/app/email-templates/build/test_email.html: -------------------------------------------------------------------------------- 1 |
{{ project_name }}
Test email for: {{ email }}

-------------------------------------------------------------------------------- /backend/app/email-templates/src/new_account.mjml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | {{ project_name }} - New Account 6 | Welcome to your new account! 7 | Here are your account details: 8 | Username: {{ username }} 9 | Password: {{ password }} 10 | Go to Dashboard 11 | 12 | 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /backend/app/email-templates/src/reset_password.mjml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | {{ project_name }} - Password Recovery 6 | Hello {{ username }} 7 | We've received a request to reset your password. You can do it by clicking the button below: 8 | Reset password 9 | Or copy and paste the following link into your browser: 10 | {{ link }} 11 | This password will expire in {{ valid_hours }} hours. 12 | 13 | If you didn't request a password recovery you can disregard this email. 14 | 15 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /backend/app/email-templates/src/test_email.mjml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | {{ project_name }} 6 | Test email for: {{ email }} 7 | 8 | 9 | 10 | 11 | 12 | -------------------------------------------------------------------------------- /backend/app/initial_data.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from sqlmodel import Session 4 | 5 | from app.core.db import engine, init_db, init_tasks_db 6 | 7 | logging.basicConfig(level=logging.INFO) 8 | logger = logging.getLogger(__name__) 9 | 10 | 11 | def init() -> None: 12 | with Session(engine) as session: 13 | init_db(session) 14 | 15 | init_tasks_db() 16 | 17 | 18 | def main() -> None: 19 | logger.info("Creating initial data") 20 | init() 21 | logger.info("Initial data created") 22 | 23 | 24 | if __name__ == "__main__": 25 | main() 26 | -------------------------------------------------------------------------------- /backend/app/main.py: -------------------------------------------------------------------------------- 1 | import sentry_sdk 2 | from fastapi import FastAPI 3 | from fastapi.routing import APIRoute 4 | from starlette.middleware.cors import CORSMiddleware 5 | from fastapi.staticfiles import StaticFiles 6 | import os 7 | 8 | from app.api.main import api_router 9 | from app.core.config import settings 10 | 11 | 12 | def custom_generate_unique_id(route: APIRoute) -> str: 13 | return f"{route.tags[0]}-{route.name}" 14 | 15 | 16 | if settings.SENTRY_DSN and settings.ENVIRONMENT != "local": 17 | sentry_sdk.init(dsn=str(settings.SENTRY_DSN), enable_tracing=True) 18 | 19 | app = FastAPI( 20 | title=settings.PROJECT_NAME, 21 | openapi_url=f"{settings.API_V1_STR}/openapi.json", 22 | generate_unique_id_function=custom_generate_unique_id, 23 | ) 24 | 25 | # Set all CORS enabled origins 26 | if settings.BACKEND_CORS_ORIGINS: 27 | app.add_middleware( 28 | CORSMiddleware, 29 | allow_origins=[ 30 | str(origin).strip("/") for origin in settings.BACKEND_CORS_ORIGINS 31 | ], 32 | allow_credentials=True, 33 | allow_methods=["*"], 34 | allow_headers=["*"], 35 | ) 36 | 37 | app.include_router(api_router, prefix=settings.API_V1_STR) 38 | 39 | static_output = os.path.join(settings.ASSETS_DATA, "output") 40 | 41 | os.makedirs(static_output, exist_ok=True) 42 | 43 | app.mount( 44 | settings.API_V1_STR + '/output', 45 | StaticFiles(directory=static_output), 46 | name="output" 47 | ) 48 | -------------------------------------------------------------------------------- /backend/app/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geosolutions-it/digital-twin-toolbox/db92c088e2939efe6e7a5e459ac01828867088a7/backend/app/tests/__init__.py -------------------------------------------------------------------------------- /backend/app/tests/api/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geosolutions-it/digital-twin-toolbox/db92c088e2939efe6e7a5e459ac01828867088a7/backend/app/tests/api/__init__.py -------------------------------------------------------------------------------- /backend/app/tests/api/routes/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geosolutions-it/digital-twin-toolbox/db92c088e2939efe6e7a5e459ac01828867088a7/backend/app/tests/api/routes/__init__.py -------------------------------------------------------------------------------- /backend/app/tests/api/routes/test_login.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import patch 2 | 3 | from fastapi.testclient import TestClient 4 | from sqlmodel import Session, select 5 | 6 | from app.core.config import settings 7 | from app.core.security import verify_password 8 | from app.models import User 9 | from app.utils import generate_password_reset_token 10 | 11 | 12 | def test_get_access_token(client: TestClient) -> None: 13 | login_data = { 14 | "username": settings.FIRST_SUPERUSER, 15 | "password": settings.FIRST_SUPERUSER_PASSWORD, 16 | } 17 | r = client.post(f"{settings.API_V1_STR}/login/access-token", data=login_data) 18 | tokens = r.json() 19 | assert r.status_code == 200 20 | assert "access_token" in tokens 21 | assert tokens["access_token"] 22 | 23 | 24 | def test_get_access_token_incorrect_password(client: TestClient) -> None: 25 | login_data = { 26 | "username": settings.FIRST_SUPERUSER, 27 | "password": "incorrect", 28 | } 29 | r = client.post(f"{settings.API_V1_STR}/login/access-token", data=login_data) 30 | assert r.status_code == 400 31 | 32 | 33 | def test_use_access_token( 34 | client: TestClient, superuser_token_headers: dict[str, str] 35 | ) -> None: 36 | r = client.post( 37 | f"{settings.API_V1_STR}/login/test-token", 38 | headers=superuser_token_headers, 39 | ) 40 | result = r.json() 41 | assert r.status_code == 200 42 | assert "email" in result 43 | 44 | 45 | def test_recovery_password( 46 | client: TestClient, normal_user_token_headers: dict[str, str] 47 | ) -> None: 48 | with ( 49 | patch("app.core.config.settings.SMTP_HOST", "smtp.example.com"), 50 | patch("app.core.config.settings.SMTP_USER", "admin@example.com"), 51 | ): 52 | email = "test@example.com" 53 | r = client.post( 54 | f"{settings.API_V1_STR}/password-recovery/{email}", 55 | headers=normal_user_token_headers, 56 | ) 57 | assert r.status_code == 200 58 | assert r.json() == {"message": "Password recovery email sent"} 59 | 60 | 61 | def test_recovery_password_user_not_exits( 62 | client: TestClient, normal_user_token_headers: dict[str, str] 63 | ) -> None: 64 | email = "jVgQr@example.com" 65 | r = client.post( 66 | f"{settings.API_V1_STR}/password-recovery/{email}", 67 | headers=normal_user_token_headers, 68 | ) 69 | assert r.status_code == 404 70 | 71 | 72 | def test_reset_password( 73 | client: TestClient, superuser_token_headers: dict[str, str], db: Session 74 | ) -> None: 75 | token = generate_password_reset_token(email=settings.FIRST_SUPERUSER) 76 | data = {"new_password": "changethis", "token": token} 77 | r = client.post( 78 | f"{settings.API_V1_STR}/reset-password/", 79 | headers=superuser_token_headers, 80 | json=data, 81 | ) 82 | assert r.status_code == 200 83 | assert r.json() == {"message": "Password updated successfully"} 84 | 85 | user_query = select(User).where(User.email == settings.FIRST_SUPERUSER) 86 | user = db.exec(user_query).first() 87 | assert user 88 | assert verify_password(data["new_password"], user.hashed_password) 89 | 90 | 91 | def test_reset_password_invalid_token( 92 | client: TestClient, superuser_token_headers: dict[str, str] 93 | ) -> None: 94 | data = {"new_password": "changethis", "token": "invalid"} 95 | r = client.post( 96 | f"{settings.API_V1_STR}/reset-password/", 97 | headers=superuser_token_headers, 98 | json=data, 99 | ) 100 | response = r.json() 101 | 102 | assert "detail" in response 103 | assert r.status_code == 400 104 | assert response["detail"] == "Invalid token" 105 | -------------------------------------------------------------------------------- /backend/app/tests/conftest.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Generator 2 | 3 | import pytest 4 | from fastapi.testclient import TestClient 5 | from sqlmodel import Session, delete 6 | 7 | from app.core.config import settings 8 | from app.core.db import engine, init_db 9 | from app.main import app 10 | from app.models import User 11 | from app.tests.utils.user import authentication_token_from_email 12 | from app.tests.utils.utils import get_superuser_token_headers 13 | 14 | 15 | @pytest.fixture(scope="session", autouse=True) 16 | def db() -> Generator[Session, None, None]: 17 | with Session(engine) as session: 18 | init_db(session) 19 | yield session 20 | statement = delete(User) 21 | session.execute(statement) 22 | session.commit() 23 | 24 | 25 | @pytest.fixture(scope="module") 26 | def client() -> Generator[TestClient, None, None]: 27 | with TestClient(app) as c: 28 | yield c 29 | 30 | 31 | @pytest.fixture(scope="module") 32 | def superuser_token_headers(client: TestClient) -> dict[str, str]: 33 | return get_superuser_token_headers(client) 34 | 35 | 36 | @pytest.fixture(scope="module") 37 | def normal_user_token_headers(client: TestClient, db: Session) -> dict[str, str]: 38 | return authentication_token_from_email( 39 | client=client, email=settings.EMAIL_TEST_USER, db=db 40 | ) 41 | -------------------------------------------------------------------------------- /backend/app/tests/crud/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geosolutions-it/digital-twin-toolbox/db92c088e2939efe6e7a5e459ac01828867088a7/backend/app/tests/crud/__init__.py -------------------------------------------------------------------------------- /backend/app/tests/crud/test_user.py: -------------------------------------------------------------------------------- 1 | from fastapi.encoders import jsonable_encoder 2 | from sqlmodel import Session 3 | 4 | from app import crud 5 | from app.core.security import verify_password 6 | from app.models import User, UserCreate, UserUpdate 7 | from app.tests.utils.utils import random_email, random_lower_string 8 | 9 | 10 | def test_create_user(db: Session) -> None: 11 | email = random_email() 12 | password = random_lower_string() 13 | user_in = UserCreate(email=email, password=password) 14 | user = crud.create_user(session=db, user_create=user_in) 15 | assert user.email == email 16 | assert hasattr(user, "hashed_password") 17 | 18 | 19 | def test_authenticate_user(db: Session) -> None: 20 | email = random_email() 21 | password = random_lower_string() 22 | user_in = UserCreate(email=email, password=password) 23 | user = crud.create_user(session=db, user_create=user_in) 24 | authenticated_user = crud.authenticate(session=db, email=email, password=password) 25 | assert authenticated_user 26 | assert user.email == authenticated_user.email 27 | 28 | 29 | def test_not_authenticate_user(db: Session) -> None: 30 | email = random_email() 31 | password = random_lower_string() 32 | user = crud.authenticate(session=db, email=email, password=password) 33 | assert user is None 34 | 35 | 36 | def test_check_if_user_is_active(db: Session) -> None: 37 | email = random_email() 38 | password = random_lower_string() 39 | user_in = UserCreate(email=email, password=password) 40 | user = crud.create_user(session=db, user_create=user_in) 41 | assert user.is_active is True 42 | 43 | 44 | def test_check_if_user_is_active_inactive(db: Session) -> None: 45 | email = random_email() 46 | password = random_lower_string() 47 | user_in = UserCreate(email=email, password=password, disabled=True) 48 | user = crud.create_user(session=db, user_create=user_in) 49 | assert user.is_active 50 | 51 | 52 | def test_check_if_user_is_superuser(db: Session) -> None: 53 | email = random_email() 54 | password = random_lower_string() 55 | user_in = UserCreate(email=email, password=password, is_superuser=True) 56 | user = crud.create_user(session=db, user_create=user_in) 57 | assert user.is_superuser is True 58 | 59 | 60 | def test_check_if_user_is_superuser_normal_user(db: Session) -> None: 61 | username = random_email() 62 | password = random_lower_string() 63 | user_in = UserCreate(email=username, password=password) 64 | user = crud.create_user(session=db, user_create=user_in) 65 | assert user.is_superuser is False 66 | 67 | 68 | def test_get_user(db: Session) -> None: 69 | password = random_lower_string() 70 | username = random_email() 71 | user_in = UserCreate(email=username, password=password, is_superuser=True) 72 | user = crud.create_user(session=db, user_create=user_in) 73 | user_2 = db.get(User, user.id) 74 | assert user_2 75 | assert user.email == user_2.email 76 | assert jsonable_encoder(user) == jsonable_encoder(user_2) 77 | 78 | 79 | def test_update_user(db: Session) -> None: 80 | password = random_lower_string() 81 | email = random_email() 82 | user_in = UserCreate(email=email, password=password, is_superuser=True) 83 | user = crud.create_user(session=db, user_create=user_in) 84 | new_password = random_lower_string() 85 | user_in_update = UserUpdate(password=new_password, is_superuser=True) 86 | if user.id is not None: 87 | crud.update_user(session=db, db_user=user, user_in=user_in_update) 88 | user_2 = db.get(User, user.id) 89 | assert user_2 90 | assert user.email == user_2.email 91 | assert verify_password(new_password, user_2.hashed_password) 92 | -------------------------------------------------------------------------------- /backend/app/tests/scripts/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geosolutions-it/digital-twin-toolbox/db92c088e2939efe6e7a5e459ac01828867088a7/backend/app/tests/scripts/__init__.py -------------------------------------------------------------------------------- /backend/app/tests/scripts/test_backend_pre_start.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import MagicMock, patch 2 | 3 | from sqlmodel import select 4 | 5 | from app.backend_pre_start import init, logger 6 | 7 | 8 | def test_init_successful_connection() -> None: 9 | engine_mock = MagicMock() 10 | 11 | session_mock = MagicMock() 12 | exec_mock = MagicMock(return_value=True) 13 | session_mock.configure_mock(**{"exec.return_value": exec_mock}) 14 | 15 | with ( 16 | patch("sqlmodel.Session", return_value=session_mock), 17 | patch.object(logger, "info"), 18 | patch.object(logger, "error"), 19 | patch.object(logger, "warn"), 20 | ): 21 | try: 22 | init(engine_mock) 23 | connection_successful = True 24 | except Exception: 25 | connection_successful = False 26 | 27 | assert ( 28 | connection_successful 29 | ), "The database connection should be successful and not raise an exception." 30 | 31 | assert session_mock.exec.called_once_with( 32 | select(1) 33 | ), "The session should execute a select statement once." 34 | -------------------------------------------------------------------------------- /backend/app/tests/scripts/test_test_pre_start.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import MagicMock, patch 2 | 3 | from sqlmodel import select 4 | 5 | from app.tests_pre_start import init, logger 6 | 7 | 8 | def test_init_successful_connection() -> None: 9 | engine_mock = MagicMock() 10 | 11 | session_mock = MagicMock() 12 | exec_mock = MagicMock(return_value=True) 13 | session_mock.configure_mock(**{"exec.return_value": exec_mock}) 14 | 15 | with ( 16 | patch("sqlmodel.Session", return_value=session_mock), 17 | patch.object(logger, "info"), 18 | patch.object(logger, "error"), 19 | patch.object(logger, "warn"), 20 | ): 21 | try: 22 | init(engine_mock) 23 | connection_successful = True 24 | except Exception: 25 | connection_successful = False 26 | 27 | assert ( 28 | connection_successful 29 | ), "The database connection should be successful and not raise an exception." 30 | 31 | assert session_mock.exec.called_once_with( 32 | select(1) 33 | ), "The session should execute a select statement once." 34 | -------------------------------------------------------------------------------- /backend/app/tests/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geosolutions-it/digital-twin-toolbox/db92c088e2939efe6e7a5e459ac01828867088a7/backend/app/tests/utils/__init__.py -------------------------------------------------------------------------------- /backend/app/tests/utils/user.py: -------------------------------------------------------------------------------- 1 | from fastapi.testclient import TestClient 2 | from sqlmodel import Session 3 | 4 | from app import crud 5 | from app.core.config import settings 6 | from app.models import User, UserCreate, UserUpdate 7 | from app.tests.utils.utils import random_email, random_lower_string 8 | 9 | 10 | def user_authentication_headers( 11 | *, client: TestClient, email: str, password: str 12 | ) -> dict[str, str]: 13 | data = {"username": email, "password": password} 14 | 15 | r = client.post(f"{settings.API_V1_STR}/login/access-token", data=data) 16 | response = r.json() 17 | auth_token = response["access_token"] 18 | headers = {"Authorization": f"Bearer {auth_token}"} 19 | return headers 20 | 21 | 22 | def create_random_user(db: Session) -> User: 23 | email = random_email() 24 | password = random_lower_string() 25 | user_in = UserCreate(email=email, password=password) 26 | user = crud.create_user(session=db, user_create=user_in) 27 | return user 28 | 29 | 30 | def authentication_token_from_email( 31 | *, client: TestClient, email: str, db: Session 32 | ) -> dict[str, str]: 33 | """ 34 | Return a valid token for the user with given email. 35 | 36 | If the user doesn't exist it is created first. 37 | """ 38 | password = random_lower_string() 39 | user = crud.get_user_by_email(session=db, email=email) 40 | if not user: 41 | user_in_create = UserCreate(email=email, password=password) 42 | user = crud.create_user(session=db, user_create=user_in_create) 43 | else: 44 | user_in_update = UserUpdate(password=password) 45 | if not user.id: 46 | raise Exception("User id not set") 47 | user = crud.update_user(session=db, db_user=user, user_in=user_in_update) 48 | 49 | return user_authentication_headers(client=client, email=email, password=password) 50 | -------------------------------------------------------------------------------- /backend/app/tests/utils/utils.py: -------------------------------------------------------------------------------- 1 | import random 2 | import string 3 | 4 | from fastapi.testclient import TestClient 5 | 6 | from app.core.config import settings 7 | 8 | 9 | def random_lower_string() -> str: 10 | return "".join(random.choices(string.ascii_lowercase, k=32)) 11 | 12 | 13 | def random_email() -> str: 14 | return f"{random_lower_string()}@{random_lower_string()}.com" 15 | 16 | 17 | def get_superuser_token_headers(client: TestClient) -> dict[str, str]: 18 | login_data = { 19 | "username": settings.FIRST_SUPERUSER, 20 | "password": settings.FIRST_SUPERUSER_PASSWORD, 21 | } 22 | r = client.post(f"{settings.API_V1_STR}/login/access-token", data=login_data) 23 | tokens = r.json() 24 | a_token = tokens["access_token"] 25 | headers = {"Authorization": f"Bearer {a_token}"} 26 | return headers 27 | -------------------------------------------------------------------------------- /backend/app/tests_pre_start.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from sqlalchemy import Engine 4 | from sqlmodel import Session, select 5 | from tenacity import after_log, before_log, retry, stop_after_attempt, wait_fixed 6 | 7 | from app.core.db import engine 8 | 9 | logging.basicConfig(level=logging.INFO) 10 | logger = logging.getLogger(__name__) 11 | 12 | max_tries = 60 * 5 # 5 minutes 13 | wait_seconds = 1 14 | 15 | 16 | @retry( 17 | stop=stop_after_attempt(max_tries), 18 | wait=wait_fixed(wait_seconds), 19 | before=before_log(logger, logging.INFO), 20 | after=after_log(logger, logging.WARN), 21 | ) 22 | def init(db_engine: Engine) -> None: 23 | try: 24 | # Try to create session to check if DB is awake 25 | with Session(db_engine) as session: 26 | session.exec(select(1)) 27 | except Exception as e: 28 | logger.error(e) 29 | raise e 30 | 31 | 32 | def main() -> None: 33 | logger.info("Initializing service") 34 | init(engine) 35 | logger.info("Service finished initializing") 36 | 37 | 38 | if __name__ == "__main__": 39 | main() 40 | -------------------------------------------------------------------------------- /backend/app/utils.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from dataclasses import dataclass 3 | from datetime import datetime, timedelta, timezone 4 | from pathlib import Path 5 | from typing import Any 6 | 7 | import emails # type: ignore 8 | import jwt 9 | from jinja2 import Template 10 | from jwt.exceptions import InvalidTokenError 11 | 12 | from app.core.config import settings 13 | 14 | 15 | @dataclass 16 | class EmailData: 17 | html_content: str 18 | subject: str 19 | 20 | 21 | def render_email_template(*, template_name: str, context: dict[str, Any]) -> str: 22 | template_str = ( 23 | Path(__file__).parent / "email-templates" / "build" / template_name 24 | ).read_text() 25 | html_content = Template(template_str).render(context) 26 | return html_content 27 | 28 | 29 | def send_email( 30 | *, 31 | email_to: str, 32 | subject: str = "", 33 | html_content: str = "", 34 | ) -> None: 35 | assert settings.emails_enabled, "no provided configuration for email variables" 36 | message = emails.Message( 37 | subject=subject, 38 | html=html_content, 39 | mail_from=(settings.EMAILS_FROM_NAME, settings.EMAILS_FROM_EMAIL), 40 | ) 41 | smtp_options = {"host": settings.SMTP_HOST, "port": settings.SMTP_PORT} 42 | if settings.SMTP_TLS: 43 | smtp_options["tls"] = True 44 | elif settings.SMTP_SSL: 45 | smtp_options["ssl"] = True 46 | if settings.SMTP_USER: 47 | smtp_options["user"] = settings.SMTP_USER 48 | if settings.SMTP_PASSWORD: 49 | smtp_options["password"] = settings.SMTP_PASSWORD 50 | response = message.send(to=email_to, smtp=smtp_options) 51 | logging.info(f"send email result: {response}") 52 | 53 | 54 | def generate_test_email(email_to: str) -> EmailData: 55 | project_name = settings.PROJECT_NAME 56 | subject = f"{project_name} - Test email" 57 | html_content = render_email_template( 58 | template_name="test_email.html", 59 | context={"project_name": settings.PROJECT_NAME, "email": email_to}, 60 | ) 61 | return EmailData(html_content=html_content, subject=subject) 62 | 63 | 64 | def generate_reset_password_email(email_to: str, email: str, token: str) -> EmailData: 65 | project_name = settings.PROJECT_NAME 66 | subject = f"{project_name} - Password recovery for user {email}" 67 | link = f"{settings.server_host}/reset-password?token={token}" 68 | html_content = render_email_template( 69 | template_name="reset_password.html", 70 | context={ 71 | "project_name": settings.PROJECT_NAME, 72 | "username": email, 73 | "email": email_to, 74 | "valid_hours": settings.EMAIL_RESET_TOKEN_EXPIRE_HOURS, 75 | "link": link, 76 | }, 77 | ) 78 | return EmailData(html_content=html_content, subject=subject) 79 | 80 | 81 | def generate_new_account_email( 82 | email_to: str, username: str, password: str 83 | ) -> EmailData: 84 | project_name = settings.PROJECT_NAME 85 | subject = f"{project_name} - New account for user {username}" 86 | html_content = render_email_template( 87 | template_name="new_account.html", 88 | context={ 89 | "project_name": settings.PROJECT_NAME, 90 | "username": username, 91 | "password": password, 92 | "email": email_to, 93 | "link": settings.server_host, 94 | }, 95 | ) 96 | return EmailData(html_content=html_content, subject=subject) 97 | 98 | 99 | def generate_password_reset_token(email: str) -> str: 100 | delta = timedelta(hours=settings.EMAIL_RESET_TOKEN_EXPIRE_HOURS) 101 | now = datetime.now(timezone.utc) 102 | expires = now + delta 103 | exp = expires.timestamp() 104 | encoded_jwt = jwt.encode( 105 | {"exp": exp, "nbf": now, "sub": email}, 106 | settings.SECRET_KEY, 107 | algorithm="HS256", 108 | ) 109 | return encoded_jwt 110 | 111 | 112 | def verify_password_reset_token(token: str) -> str | None: 113 | try: 114 | decoded_token = jwt.decode(token, settings.SECRET_KEY, algorithms=["HS256"]) 115 | return str(decoded_token["sub"]) 116 | except InvalidTokenError: 117 | return None 118 | -------------------------------------------------------------------------------- /backend/app/worker/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geosolutions-it/digital-twin-toolbox/db92c088e2939efe6e7a5e459ac01828867088a7/backend/app/worker/__init__.py -------------------------------------------------------------------------------- /backend/app/worker/cartesian.py: -------------------------------------------------------------------------------- 1 | import math 2 | 3 | RADIANS_PER_DEGREE = math.pi / 180.0 4 | 5 | RADII_SQUARED = [ 6 | 6378137.0 * 6378137.0, 7 | 6378137.0 * 6378137.0, 8 | 6356752.3142451793 * 6356752.3142451793, 9 | ] 10 | 11 | def to_radians(degrees): 12 | return degrees * RADIANS_PER_DEGREE 13 | 14 | def vector_length(vector): 15 | x = vector[0] 16 | y = vector[1] 17 | z = vector[2] 18 | return x * x + y * y + z * z 19 | 20 | def vector_multiply_scalar(vector, scalar): 21 | x = vector[0] 22 | y = vector[1] 23 | z = vector[2] 24 | return [x * scalar, y * scalar, z * scalar] 25 | 26 | def vector_divide_scalar(vector, scalar): 27 | return vector_multiply_scalar(vector, 1 / scalar) 28 | 29 | def vector_normalize(vector): 30 | return vector_divide_scalar(vector, vector_length(vector)) 31 | 32 | def vector_add(a, b): 33 | return [a[0] + b[0], a[1] + b[1], a[2] + b[2]] 34 | 35 | def vector_multiply(a, b): 36 | return [a[0] * b[0], a[1] * b[1], a[2] * b[2]] 37 | 38 | def vector_dot(a, b): 39 | return a[0] * b[0] + a[1] * b[1] + a[2] * b[2] 40 | 41 | def from_radians(longitude, latitude, height): 42 | cos_latitude = math.cos(latitude) 43 | x = cos_latitude * math.cos(longitude) 44 | y = cos_latitude * math.sin(longitude) 45 | z = math.sin(latitude) 46 | 47 | normalized = vector_normalize([x, y, z]) 48 | k = vector_multiply(RADII_SQUARED, normalized) 49 | gamma = math.sqrt(vector_dot(normalized, k)) 50 | 51 | output = vector_add( 52 | vector_divide_scalar(k, gamma), 53 | vector_multiply_scalar(normalized, height) 54 | ) 55 | return output 56 | 57 | def convert_to_cartesian(coords): 58 | longitude = coords[0] 59 | latitude = coords[1] 60 | height = coords[2] 61 | if not height: 62 | height = 0 63 | return from_radians(to_radians(longitude), to_radians(latitude), height) 64 | -------------------------------------------------------------------------------- /backend/app/worker/expression.py: -------------------------------------------------------------------------------- 1 | 2 | import math 3 | 4 | def get_geometry(feature): 5 | if 'geometry' in feature: 6 | return feature['geometry'] 7 | return { 'type': '' } 8 | 9 | def get_z(coordinates): 10 | if len(coordinates) == 3: 11 | return coordinates[2] 12 | return 0 13 | 14 | def get_func(name, feature): 15 | if name in ['$maxZ', '$minZ']: 16 | geometry = get_geometry(feature) 17 | if geometry['type'] == 'Point': 18 | return get_z(geometry['coordinates']) 19 | if geometry['type'] == 'Polygon': 20 | min_z = math.inf 21 | max_z = -math.inf 22 | for ring in geometry['coordinates']: 23 | for coords in ring: 24 | z = get_z(coords) 25 | if z < min_z: 26 | min_z = z 27 | if z > max_z: 28 | max_z = z 29 | if name == '$maxZ': 30 | return max_z 31 | return min_z 32 | return None 33 | 34 | def parse_expression(type, value, feature, default_value = None): 35 | try: 36 | if not isinstance(value, list): 37 | if value == None: 38 | return None 39 | if type == 'string': 40 | return f"{value}" 41 | if type == 'number': 42 | return float(value) 43 | return value 44 | operator = value[0] 45 | 46 | _type = type 47 | if operator in ['property', 'func']: 48 | _type = '' 49 | 50 | value_length = len(value) 51 | 52 | a = None 53 | if value_length > 1: 54 | value_a = value[1] 55 | a = parse_expression(_type, value_a, feature) 56 | 57 | b = None 58 | if value_length > 2: 59 | value_b = value[2] 60 | b = parse_expression(_type, value_b, feature) 61 | 62 | if operator == '+': 63 | return float(a) + float(b) 64 | if operator == "-": 65 | return float(a) - float(b) 66 | if operator == "*": 67 | return float(a) * float(b) 68 | if operator == "/": 69 | return float(a) / float(b) 70 | if operator == "concat": 71 | return f"{a}{b}" 72 | if operator == "lowercase": 73 | return f"{a}".lower() 74 | if operator == "uppercase": 75 | return f"{a}".upper() 76 | if operator == "property": 77 | if a in feature['properties']: 78 | return feature['properties'][a] 79 | if operator == "func": 80 | return get_func(a, feature) 81 | return None 82 | except Exception as e: 83 | return default_value 84 | -------------------------------------------------------------------------------- /backend/app/worker/main.py: -------------------------------------------------------------------------------- 1 | import os 2 | import time 3 | from sqlmodel import Session 4 | from app.core.db import engine 5 | 6 | from celery import Celery, Task 7 | from app.models import Pipeline, Asset 8 | from celery.states import SUCCESS 9 | import app.worker.tasks as tasks 10 | 11 | celery = Celery(__name__) 12 | celery.conf.broker_url = os.environ.get("CELERY_BROKER_URL", "redis://localhost:6379") 13 | celery.conf.result_backend = os.environ.get("CELERY_RESULT_BACKEND", "redis://localhost:6379") 14 | 15 | class PipelineDatabaseTask(Task): 16 | abstract = True 17 | def after_return(self, status, retval, task_id, args, kwargs, einfo): 18 | with Session(engine) as session: 19 | pipeline_extended = kwargs.get('pipeline_extended') 20 | pipeline = session.get(Pipeline, pipeline_extended['id']) 21 | pipeline_in = { 22 | "task_id": task_id, 23 | "task_status": status 24 | } 25 | if status == SUCCESS: 26 | pipeline_in['task_result'] = retval 27 | 28 | pipeline.sqlmodel_update(pipeline_in) 29 | session.add(pipeline) 30 | session.commit() 31 | session.refresh(pipeline) 32 | 33 | class AssetDatabaseTask(Task): 34 | abstract = True 35 | def after_return(self, status, retval, task_id, args, kwargs, einfo): 36 | with Session(engine) as session: 37 | options = kwargs.get('options') 38 | asset_obj = options['asset'] 39 | asset = session.get(Asset, asset_obj['id']) 40 | asset_in = { 41 | "upload_id": task_id, 42 | "upload_status": status 43 | } 44 | if status == SUCCESS: 45 | asset_in['asset_type'] = retval['asset_type'] 46 | asset_in['geometry_type'] = retval['geometry_type'] 47 | asset_in['upload_result'] = retval['payload'] 48 | 49 | asset.sqlmodel_update(asset_in) 50 | session.add(asset) 51 | session.commit() 52 | session.refresh(asset) 53 | 54 | @celery.task(name="create_point_instance_3dtiles", base=PipelineDatabaseTask) 55 | def create_point_instance_3dtiles(pipeline_extended): 56 | return tasks.create_point_instance_3dtiles(pipeline_extended) 57 | 58 | @celery.task(name="create_mesh_3dtiles", base=PipelineDatabaseTask) 59 | def create_mesh_3dtiles(pipeline_extended): 60 | return tasks.create_mesh_3dtiles(pipeline_extended) 61 | 62 | @celery.task(name="create_point_cloud_3dtiles", base=PipelineDatabaseTask) 63 | def create_point_cloud_3dtiles(pipeline_extended): 64 | return tasks.create_point_cloud_3dtiles(pipeline_extended) 65 | 66 | @celery.task(name="complete_upload_process", base=AssetDatabaseTask) 67 | def complete_upload_process(options): 68 | return tasks.complete_upload_process(options) 69 | 70 | @celery.task(name="complete_asset_remove_process") 71 | def complete_asset_remove_process(options): 72 | return tasks.complete_asset_remove_process(options) 73 | 74 | @celery.task(name="complete_pipeline_remove_process") 75 | def complete_pipeline_remove_process(options): 76 | return tasks.complete_pipeline_remove_process(options) 77 | -------------------------------------------------------------------------------- /backend/app/worker/polyhedron.py: -------------------------------------------------------------------------------- 1 | from app.worker.processes import earcut 2 | 3 | def parse_coords(coords, z, translate_z): 4 | height = coords[2] 5 | if not height: 6 | height = 0 7 | if z: 8 | height = z 9 | 10 | _translate_z = translate_z 11 | if not _translate_z: 12 | _translate_z = 0 13 | return [coords[0], coords[1], height + _translate_z] 14 | 15 | def parse_ring(ring, z, translate_z): 16 | parsed_ring = [] 17 | for coords in ring: 18 | parsed_ring.append(parse_coords(coords, z, translate_z)) 19 | return parsed_ring 20 | 21 | def sum_until_index(arr, index): 22 | filtered_arr = [] 23 | for idx, value in enumerate(arr): 24 | if idx < index: 25 | filtered_arr.append(value) 26 | sum = 0 27 | for value in filtered_arr: 28 | sum += value 29 | return sum 30 | 31 | def triangulate(coordinates, reverse): 32 | 33 | indices = earcut(coordinates) 34 | 35 | vertices = [] 36 | 37 | for rings in coordinates: 38 | for ring in rings: 39 | vertices.append(ring) 40 | 41 | polyhedron = [] 42 | 43 | for i in range(len(indices)): 44 | if i % 3 == 0: 45 | if reverse: 46 | polyhedron.append([ 47 | vertices[indices[i]], 48 | vertices[indices[i + 2]], 49 | vertices[indices[i + 1]], 50 | vertices[indices[i]] 51 | ]) 52 | else: 53 | polyhedron.append([ 54 | vertices[indices[i]], 55 | vertices[indices[i + 1]], 56 | vertices[indices[i + 2]], 57 | vertices[indices[i]] 58 | ]) 59 | return polyhedron 60 | 61 | def plane_to_wall(lower_ring, upper_ring): 62 | polyhedron = [] 63 | 64 | for i in range(len(lower_ring) - 1): 65 | if lower_ring[i + 1]: 66 | bl = lower_ring[i] 67 | br = lower_ring[i + 1] 68 | tl = upper_ring[i] 69 | tr = upper_ring[i + 1] 70 | polyhedron.append([bl, tl, br, bl]) 71 | polyhedron.append([br, tl, tr, br]) 72 | 73 | return polyhedron 74 | 75 | def generate_walls(lower, upper): 76 | walls = [] 77 | for idx in range(len(lower)): 78 | walls += plane_to_wall(lower[idx], upper[idx]) 79 | return walls 80 | 81 | def to_polyhedral_surface(lower, upper, remove_bottom_surface): 82 | if not upper: 83 | return triangulate(lower, False) 84 | 85 | polyhedral_surface = [] 86 | polyhedral_surface += triangulate(upper, False) 87 | polyhedral_surface += generate_walls(lower, upper) 88 | 89 | if not remove_bottom_surface: 90 | polyhedral_surface += triangulate(lower, True) 91 | 92 | return polyhedral_surface 93 | 94 | def polygon_to_polyhedral_surface(geometry, options): 95 | lower_limit = options['lower_limit'] 96 | upper_limit = options['upper_limit'] 97 | translate_z = options['translate_z'] 98 | remove_bottom_surface = options['remove_bottom_surface'] 99 | lower = [] 100 | for ring in geometry['coordinates']: 101 | lower.append(parse_ring(ring, lower_limit, translate_z)) 102 | 103 | if lower_limit == None and upper_limit == None: 104 | return to_polyhedral_surface(lower, None, remove_bottom_surface) 105 | 106 | upper = [] 107 | for ring in geometry['coordinates']: 108 | upper.append(parse_ring(ring, upper_limit, translate_z)) 109 | 110 | average_z_lower = 0 111 | for coords in lower[0]: 112 | average_z_lower += coords[2] 113 | 114 | average_z_lower /= len(lower[0]) 115 | 116 | average_z_upper = 0 117 | for coords in upper[0]: 118 | average_z_upper += coords[2] 119 | 120 | average_z_upper /= len(upper[0]) 121 | 122 | if average_z_lower > average_z_upper: 123 | return to_polyhedral_surface(upper, lower, remove_bottom_surface) 124 | 125 | return to_polyhedral_surface(lower, upper, remove_bottom_surface) 126 | 127 | def geometry_to_polyhedral_surface(geometry, options): 128 | if geometry['type'] == 'Polygon': 129 | return polygon_to_polyhedral_surface(geometry, options) 130 | return [] 131 | 132 | def polyhedral_to_wkt(polyhedron): 133 | triangles = [] 134 | for triangle in polyhedron: 135 | vertices = [] 136 | for vertex in triangle: 137 | vertex_string = " ".join(map(str, vertex)) 138 | vertices.append(vertex_string) 139 | vertices_string = ", ".join(vertices) 140 | triangles.append(f"(({vertices_string}))") 141 | wkt = ",".join(triangles) 142 | return f"POLYHEDRALSURFACE Z({wkt})" 143 | -------------------------------------------------------------------------------- /backend/app/worker/types.py: -------------------------------------------------------------------------------- 1 | 2 | import sqlalchemy.types as types 3 | from sqlalchemy import func 4 | import json 5 | from decimal import Decimal 6 | from datetime import date, datetime 7 | 8 | class GeometryType(types.UserDefinedType): 9 | cache_ok = True 10 | 11 | def __init__(self, geometry_type = "POLYHEDRALSURFACEZ", epsg_code = 4326): 12 | self.geometry_type = geometry_type 13 | self.epsg_code = epsg_code 14 | 15 | def get_col_spec(self, **kw): 16 | return f"geometry({self.geometry_type}, {self.epsg_code})" 17 | 18 | def bind_expression(self, bindvalue): 19 | return func.ST_GeomFromText(bindvalue, type_=self) 20 | 21 | def column_expression(self, col): 22 | return func.ST_AsText(col, type_=self) 23 | 24 | def bind_processor(self, dialect): 25 | def process(value): 26 | return value 27 | return process 28 | 29 | def result_processor(self, dialect, coltype): 30 | def process(value): 31 | return value 32 | return process 33 | 34 | class JSONEncoder(json.JSONEncoder): 35 | def default(self, obj): 36 | if isinstance(obj, Decimal): 37 | return float(obj) 38 | if isinstance(obj, (datetime, date)): 39 | return obj.isoformat() 40 | return json.JSONEncoder.default(self, obj) -------------------------------------------------------------------------------- /backend/app/worker/utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | from app.core.config import settings 3 | 4 | def get_asset_upload_path(asset_id, extension, name = 'index'): 5 | return os.path.join(settings.ASSETS_DATA, "upload", f"{asset_id}", f"{name}{extension}") 6 | 7 | def get_asset_table_name(asset_id): 8 | a_id = f"{asset_id}".replace('-', '_') 9 | table_name = f'asset_{a_id}' 10 | return table_name 11 | 12 | def get_pipeline_table_name(pipeline_id): 13 | p_id = f"{pipeline_id}".replace('-', '_') 14 | table_name = f'pipeline_{p_id}' 15 | return table_name 16 | -------------------------------------------------------------------------------- /backend/prestart.sh: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env bash 2 | 3 | # Let the DB start 4 | python /app/app/backend_pre_start.py 5 | 6 | # Run migrations 7 | alembic upgrade head 8 | 9 | # Create initial data in DB 10 | python /app/app/initial_data.py 11 | -------------------------------------------------------------------------------- /backend/pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "app" 3 | version = "0.1.0" 4 | description = "" 5 | authors = ["Admin "] 6 | 7 | [tool.poetry.dependencies] 8 | python = ">=3.10,<3.12" 9 | uvicorn = {extras = ["standard"], version = "^0.24.0.post1"} 10 | fastapi = "^0.109.1" 11 | python-multipart = "^0.0.7" 12 | email-validator = "^2.1.0.post1" 13 | passlib = {extras = ["bcrypt"], version = "^1.7.4"} 14 | tenacity = "^8.2.3" 15 | pydantic = ">2.0" 16 | emails = "^0.6" 17 | 18 | gunicorn = "^22.0.0" 19 | jinja2 = "^3.1.4" 20 | alembic = "^1.12.1" 21 | httpx = "^0.25.1" 22 | psycopg = {extras = ["binary"], version = "^3.1.13"} 23 | sqlmodel = "^0.0.21" 24 | # Pin bcrypt until passlib supports the latest 25 | bcrypt = "4.0.1" 26 | pydantic-settings = "^2.2.1" 27 | sentry-sdk = {extras = ["fastapi"], version = "^1.40.6"} 28 | pyjwt = "^2.8.0" 29 | celery = "^5.4.0" 30 | redis = "^5.0.8" 31 | flower = "^2.0.1" 32 | earcut = "^1.1.5" 33 | py3dtiles = {extras = ["las", "ply", "postgres"], version = "^8.0.2"} 34 | laspy = {extras = ["laszip"], version = "^2.5.4"} 35 | mapbox-earcut = "^1.0.3" 36 | 37 | [tool.poetry.group.dev.dependencies] 38 | pytest = "^7.4.3" 39 | mypy = "^1.8.0" 40 | ruff = "^0.2.2" 41 | pre-commit = "^3.6.2" 42 | types-passlib = "^1.7.7.20240106" 43 | coverage = "^7.4.3" 44 | 45 | [build-system] 46 | requires = ["poetry>=0.12"] 47 | build-backend = "poetry.masonry.api" 48 | 49 | [tool.mypy] 50 | strict = true 51 | exclude = ["venv", ".venv", "alembic"] 52 | 53 | [tool.ruff] 54 | target-version = "py310" 55 | exclude = ["alembic"] 56 | 57 | [tool.ruff.lint] 58 | select = [ 59 | "E", # pycodestyle errors 60 | "W", # pycodestyle warnings 61 | "F", # pyflakes 62 | "I", # isort 63 | "B", # flake8-bugbear 64 | "C4", # flake8-comprehensions 65 | "UP", # pyupgrade 66 | "ARG001", # unused arguments in functions 67 | ] 68 | ignore = [ 69 | "E501", # line too long, handled by black 70 | "B008", # do not perform function calls in argument defaults 71 | "W191", # indentation contains tabs 72 | "B904", # Allow raising exceptions without from e, for HTTPException 73 | ] 74 | 75 | [tool.ruff.lint.pyupgrade] 76 | # Preserve types, even if a file imports `from __future__ import annotations`. 77 | keep-runtime-typing = true 78 | -------------------------------------------------------------------------------- /backend/samples/cone.glb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geosolutions-it/digital-twin-toolbox/db92c088e2939efe6e7a5e459ac01828867088a7/backend/samples/cone.glb -------------------------------------------------------------------------------- /backend/samples/model.glb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geosolutions-it/digital-twin-toolbox/db92c088e2939efe6e7a5e459ac01828867088a7/backend/samples/model.glb -------------------------------------------------------------------------------- /backend/samples/tree.glb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geosolutions-it/digital-twin-toolbox/db92c088e2939efe6e7a5e459ac01828867088a7/backend/samples/tree.glb -------------------------------------------------------------------------------- /backend/scripts/celery-reload.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # using nodemon to be able to detect changes in the mounted volumes 4 | # the --legacy-watch is needed 5 | # other python solutions were not working properly 6 | nodemon -e py --legacy-watch --exec /celery.sh 7 | -------------------------------------------------------------------------------- /backend/scripts/celery.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | celery -A app.worker.main.celery worker --loglevel=info 4 | -------------------------------------------------------------------------------- /backend/scripts/format.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh -e 2 | set -x 3 | 4 | ruff check app scripts --fix 5 | ruff format app scripts 6 | -------------------------------------------------------------------------------- /backend/scripts/lint.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | set -x 5 | 6 | mypy app 7 | ruff check app 8 | ruff format app --check 9 | -------------------------------------------------------------------------------- /backend/scripts/test.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | set -x 5 | 6 | coverage run --source=app -m pytest 7 | coverage report --show-missing 8 | coverage html --title "${@-coverage}" 9 | -------------------------------------------------------------------------------- /backend/tests-start.sh: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env bash 2 | set -e 3 | set -x 4 | 5 | python /app/app/tests_pre_start.py 6 | 7 | bash ./scripts/test.sh "$@" 8 | -------------------------------------------------------------------------------- /docker-compose.build.yml: -------------------------------------------------------------------------------- 1 | x-common-cache: &common-cache 2 | cache_from: 3 | - type=local,src=/tmp/.buildx-cache 4 | cache_to: 5 | - type=local,dest=/tmp/.buildx-cache-new,mode=max 6 | 7 | services: 8 | backend: 9 | build: 10 | context: ./backend 11 | args: 12 | INSTALL_DEV: ${INSTALL_DEV-false} 13 | <<: *common-cache 14 | 15 | frontend: 16 | build: 17 | context: ./frontend 18 | args: 19 | - NODE_ENV=production 20 | <<: *common-cache 21 | -------------------------------------------------------------------------------- /docker-compose.dev.yml: -------------------------------------------------------------------------------- 1 | services: 2 | backend: 3 | volumes: 4 | - ./backend/:/app 5 | command: /start-reload.sh 6 | 7 | worker: 8 | restart: "no" 9 | volumes: 10 | - ./backend/:/app 11 | command: /celery-reload.sh 12 | -------------------------------------------------------------------------------- /docker-compose.override.yml: -------------------------------------------------------------------------------- 1 | services: 2 | 3 | proxy: 4 | image: traefik:3.0 5 | volumes: 6 | - /var/run/docker.sock:/var/run/docker.sock 7 | ports: 8 | - "80:80" 9 | - "8090:8080" 10 | # Duplicate the command from docker-compose.yml to add --api.insecure=true 11 | command: 12 | # Enable Docker in Traefik, so that it reads labels from Docker services 13 | - --providers.docker 14 | # Add a constraint to only use services with the label for this stack 15 | - --providers.docker.constraints=Label(`traefik.constraint-label`, `traefik-public`) 16 | # Do not expose all Docker services, only the ones explicitly exposed 17 | - --providers.docker.exposedbydefault=false 18 | # Create an entrypoint "http" listening on port 80 19 | - --entrypoints.http.address=:80 20 | # Create an entrypoint "https" listening on port 443 21 | - --entrypoints.https.address=:443 22 | # Enable the access log, with HTTP requests 23 | - --accesslog 24 | # Enable the Traefik log, for configurations and errors 25 | - --log 26 | # Enable debug logging for local development 27 | - --log.level=DEBUG 28 | # Enable the Dashboard and API 29 | - --api 30 | # Enable the Dashboard and API in insecure mode for local development 31 | - --api.insecure=true 32 | labels: 33 | # Enable Traefik for this service, to make it available in the public network 34 | - traefik.enable=true 35 | - traefik.constraint-label=traefik-public 36 | # Dummy https-redirect middleware that doesn't really redirect, only to 37 | # allow running it locally 38 | - traefik.http.middlewares.https-redirect.contenttype.autodetect=false 39 | networks: 40 | - traefik-public 41 | - default 42 | 43 | db: 44 | restart: "no" 45 | ports: 46 | - "5432:5432" 47 | 48 | adminer: 49 | restart: "no" 50 | ports: 51 | - "8080:8080" 52 | 53 | backend: 54 | restart: "no" 55 | ports: 56 | - "8888:8888" 57 | build: 58 | context: ./backend 59 | args: 60 | INSTALL_DEV: ${INSTALL_DEV-true} 61 | 62 | environment: 63 | SMTP_HOST: "mailcatcher" 64 | SMTP_PORT: "1025" 65 | SMTP_TLS: "false" 66 | EMAILS_FROM_EMAIL: "noreply@example.com" 67 | 68 | worker: 69 | restart: "no" 70 | environment: 71 | SMTP_HOST: "mailcatcher" 72 | SMTP_PORT: "1025" 73 | SMTP_TLS: "false" 74 | EMAILS_FROM_EMAIL: "noreply@example.com" 75 | 76 | mailcatcher: 77 | image: schickling/mailcatcher 78 | ports: 79 | - "1080:1080" 80 | - "1025:1025" 81 | 82 | frontend: 83 | restart: "no" 84 | env_file: 85 | - .env 86 | 87 | networks: 88 | traefik-public: 89 | # For local dev, don't expect an external Traefik network 90 | external: false 91 | -------------------------------------------------------------------------------- /docker-compose.traefik.yml: -------------------------------------------------------------------------------- 1 | services: 2 | traefik: 3 | image: traefik:3.0 4 | ports: 5 | # Listen on port 80, default for HTTP, necessary to redirect to HTTPS 6 | - 80:80 7 | # Listen on port 443, default for HTTPS 8 | - 443:443 9 | restart: always 10 | labels: 11 | # Enable Traefik for this service, to make it available in the public network 12 | - traefik.enable=true 13 | # Use the traefik-public network (declared below) 14 | - traefik.docker.network=traefik-public 15 | # Define the port inside of the Docker service to use 16 | - traefik.http.services.traefik-dashboard.loadbalancer.server.port=8080 17 | # Make Traefik use this domain (from an environment variable) in HTTP 18 | - traefik.http.routers.traefik-dashboard-http.entrypoints=http 19 | - traefik.http.routers.traefik-dashboard-http.rule=Host(`traefik.${DOMAIN?Variable not set}`) 20 | # traefik-https the actual router using HTTPS 21 | - traefik.http.routers.traefik-dashboard-https.entrypoints=https 22 | - traefik.http.routers.traefik-dashboard-https.rule=Host(`traefik.${DOMAIN?Variable not set}`) 23 | - traefik.http.routers.traefik-dashboard-https.tls=true 24 | # Use the "le" (Let's Encrypt) resolver created below 25 | - traefik.http.routers.traefik-dashboard-https.tls.certresolver=le 26 | # Use the special Traefik service api@internal with the web UI/Dashboard 27 | - traefik.http.routers.traefik-dashboard-https.service=api@internal 28 | # https-redirect middleware to redirect HTTP to HTTPS 29 | - traefik.http.middlewares.https-redirect.redirectscheme.scheme=https 30 | - traefik.http.middlewares.https-redirect.redirectscheme.permanent=true 31 | # traefik-http set up only to use the middleware to redirect to https 32 | - traefik.http.routers.traefik-dashboard-http.middlewares=https-redirect 33 | # admin-auth middleware with HTTP Basic auth 34 | # Using the environment variables USERNAME and HASHED_PASSWORD 35 | - traefik.http.middlewares.admin-auth.basicauth.users=${USERNAME?Variable not set}:${HASHED_PASSWORD?Variable not set} 36 | # Enable HTTP Basic auth, using the middleware created above 37 | - traefik.http.routers.traefik-dashboard-https.middlewares=admin-auth 38 | volumes: 39 | # Add Docker as a mounted volume, so that Traefik can read the labels of other services 40 | - /var/run/docker.sock:/var/run/docker.sock:ro 41 | # Mount the volume to store the certificates 42 | - traefik-public-certificates:/certificates 43 | command: 44 | # Enable Docker in Traefik, so that it reads labels from Docker services 45 | - --providers.docker 46 | # Do not expose all Docker services, only the ones explicitly exposed 47 | - --providers.docker.exposedbydefault=false 48 | # Create an entrypoint "http" listening on port 80 49 | - --entrypoints.http.address=:80 50 | # Create an entrypoint "https" listening on port 443 51 | - --entrypoints.https.address=:443 52 | # Create the certificate resolver "le" for Let's Encrypt, uses the environment variable EMAIL 53 | - --certificatesresolvers.le.acme.email=${EMAIL?Variable not set} 54 | # Store the Let's Encrypt certificates in the mounted volume 55 | - --certificatesresolvers.le.acme.storage=/certificates/acme.json 56 | # Use the TLS Challenge for Let's Encrypt 57 | - --certificatesresolvers.le.acme.tlschallenge=true 58 | # Enable the access log, with HTTP requests 59 | - --accesslog 60 | # Enable the Traefik log, for configurations and errors 61 | - --log 62 | # Enable the Dashboard and API 63 | - --api 64 | networks: 65 | # Use the public network created to be shared between Traefik and 66 | # any other service that needs to be publicly available with HTTPS 67 | - traefik-public 68 | 69 | volumes: 70 | # Create a volume to store the certificates, even if the container is recreated 71 | traefik-public-certificates: 72 | 73 | networks: 74 | # Use the previously created public network "traefik-public", shared with other 75 | # services that need to be publicly available via this Traefik 76 | traefik-public: 77 | external: true 78 | -------------------------------------------------------------------------------- /frontend/.dockerignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | dist 3 | mapstore/node_modules 4 | mapstore/dist 5 | mapstore/package-lock.json 6 | .env 7 | -------------------------------------------------------------------------------- /frontend/.env: -------------------------------------------------------------------------------- 1 | VITE_API_URL=http://localhost 2 | VITE_ROUTER_BASE_PATH=/ 3 | VITE_ENABLE_USERS_MANAGEMENT=True 4 | VITE_PUBLIC_BASE_PATH=/ 5 | VITE_ENABLE_ROUTER_HASH_HISTORY=False 6 | -------------------------------------------------------------------------------- /frontend/.env.production: -------------------------------------------------------------------------------- 1 | VITE_API_URL=DTT_API_URL 2 | VITE_ROUTER_BASE_PATH=DTT_ROUTER_BASE_PATH 3 | VITE_ENABLE_USERS_MANAGEMENT=DTT_ENABLE_USERS_MANAGEMENT 4 | VITE_PUBLIC_BASE_PATH=DTT_PUBLIC_BASE_PATH 5 | VITE_ENABLE_ROUTER_HASH_HISTORY=DTT_ENABLE_ROUTER_HASH_HISTORY 6 | -------------------------------------------------------------------------------- /frontend/.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | pnpm-debug.log* 8 | lerna-debug.log* 9 | 10 | node_modules 11 | dist 12 | dist-ssr 13 | *.local 14 | openapi.json 15 | public/mapstore 16 | 17 | # Editor directories and files 18 | .vscode/* 19 | !.vscode/extensions.json 20 | .idea 21 | .DS_Store 22 | *.suo 23 | *.ntvs* 24 | *.njsproj 25 | *.sln 26 | *.sw? 27 | /test-results/ 28 | /playwright-report/ 29 | /blob-report/ 30 | /playwright/.cache/ 31 | -------------------------------------------------------------------------------- /frontend/.nvmrc: -------------------------------------------------------------------------------- 1 | 20 2 | -------------------------------------------------------------------------------- /frontend/Dockerfile: -------------------------------------------------------------------------------- 1 | # Stage 0, "build-stage", based on Node.js, to build and compile the frontend 2 | FROM node:20 AS build-stage 3 | 4 | WORKDIR /app 5 | 6 | COPY package*.json /app/ 7 | 8 | RUN npm install 9 | 10 | COPY ./ /app/ 11 | 12 | RUN npm run build 13 | 14 | RUN cd /app/mapstore/ && \ 15 | wget https://geosolutions-it.github.io/mapstore-static-examples/mapstore-api/dist.zip && \ 16 | mkdir ./mapstore && \ 17 | unzip ./dist.zip && \ 18 | rm -rf ./dist.zip && \ 19 | cp -r ./overrides/. ./ 20 | 21 | # RUN cd /app/mapstore/ && npm install 22 | # RUN cd /app/mapstore/ && npm run compile 23 | 24 | # Stage 1, based on Nginx, to have only the compiled app, ready for production with Nginx 25 | FROM nginx:1 26 | 27 | COPY --from=build-stage /app/dist/ /usr/share/nginx/html 28 | RUN mkdir /usr/share/nginx/html/mapstore/ 29 | COPY --from=build-stage /app/mapstore/ /usr/share/nginx/html/mapstore 30 | 31 | COPY ./nginx.conf /etc/nginx/conf.d/default.conf 32 | COPY ./nginx-backend-not-found.conf /etc/nginx/extra-conf.d/backend-not-found.conf 33 | COPY env.sh /docker-entrypoint.d/env.sh 34 | RUN chmod +x /docker-entrypoint.d/env.sh 35 | -------------------------------------------------------------------------------- /frontend/biome.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://biomejs.dev/schemas/1.6.1/schema.json", 3 | "organizeImports": { 4 | "enabled": true 5 | }, 6 | "files": { 7 | "ignore": [ 8 | "node_modules", 9 | "mapstore/", 10 | "src/client/", 11 | "src/routeTree.gen.ts", 12 | "playwright.config.ts", 13 | "playwright-report" 14 | ] 15 | }, 16 | "linter": { 17 | "enabled": true, 18 | "rules": { 19 | "recommended": true, 20 | "suspicious": { 21 | "noExplicitAny": "off", 22 | "noArrayIndexKey": "off" 23 | }, 24 | "style": { 25 | "noNonNullAssertion": "off" 26 | } 27 | } 28 | }, 29 | "formatter": { 30 | "indentStyle": "space" 31 | }, 32 | "javascript": { 33 | "formatter": { 34 | "quoteStyle": "double", 35 | "semicolons": "asNeeded" 36 | } 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /frontend/env.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # see https://stackoverflow.com/a/77454537 4 | for i in $(env | grep DTT_) 5 | do 6 | key=$(echo $i | cut -d '=' -f 1) 7 | value=$(echo $i | cut -d '=' -f 2-) 8 | echo $key=$value 9 | # sed All files 10 | # find /usr/share/nginx/html -type f -exec sed -i "s|${key}|${value}|g" '{}' + 11 | 12 | # sed JS and CSS only 13 | find /usr/share/nginx/html -type f \( -name '*.js' -o -name '*.css' \) -exec sed -i "s|${key}|${value}|g" '{}' + 14 | done 15 | -------------------------------------------------------------------------------- /frontend/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Digital Twin Toolbox 7 | 8 | 9 | 10 |
11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /frontend/mapstore/overrides/map.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | Map Viewer 9 | 10 | 11 | 112 | 113 | 114 | 115 |
116 |
117 |
118 |
119 |
Loading MapStore
120 |
121 | 147 | 148 | 149 | 150 | -------------------------------------------------------------------------------- /frontend/modify-openapi-operationids.js: -------------------------------------------------------------------------------- 1 | import * as fs from "node:fs" 2 | import * as http from "node:http" 3 | 4 | async function modifyOpenAPIFile(filePath) { 5 | try { 6 | const data = await fs.promises.readFile(filePath) 7 | const openapiContent = JSON.parse(data) 8 | 9 | const paths = openapiContent.paths 10 | for (const pathKey of Object.keys(paths)) { 11 | const pathData = paths[pathKey] 12 | for (const method of Object.keys(pathData)) { 13 | const operation = pathData[method] 14 | if (operation.tags && operation.tags.length > 0) { 15 | const tag = operation.tags[0] 16 | const operationId = operation.operationId 17 | const toRemove = `${tag}-` 18 | if (operationId.startsWith(toRemove)) { 19 | const newOperationId = operationId.substring(toRemove.length) 20 | operation.operationId = newOperationId 21 | } 22 | } 23 | } 24 | } 25 | 26 | await fs.promises.writeFile( 27 | filePath, 28 | JSON.stringify(openapiContent, null, 2), 29 | ) 30 | console.log("File successfully modified") 31 | } catch (err) { 32 | console.error("Error:", err) 33 | } 34 | } 35 | 36 | const filePath = "./openapi.json" 37 | 38 | fs.rmSync(filePath, { force: true }) 39 | 40 | http.get('http://localhost/api/v1/openapi.json', resp => { 41 | const file = fs.createWriteStream(filePath); 42 | file.on('finish', () => { modifyOpenAPIFile(filePath); }); 43 | resp.pipe(file); 44 | }); 45 | 46 | -------------------------------------------------------------------------------- /frontend/nginx-backend-not-found.conf: -------------------------------------------------------------------------------- 1 | location /api { 2 | return 404; 3 | } 4 | location /docs { 5 | return 404; 6 | } 7 | location /redoc { 8 | return 404; 9 | } 10 | -------------------------------------------------------------------------------- /frontend/nginx.conf: -------------------------------------------------------------------------------- 1 | server { 2 | listen 80; 3 | 4 | location / { 5 | root /usr/share/nginx/html; 6 | index index.html index.htm; 7 | try_files $uri /index.html =404; 8 | } 9 | 10 | include /etc/nginx/extra-conf.d/*.conf; 11 | } 12 | -------------------------------------------------------------------------------- /frontend/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "frontend", 3 | "private": true, 4 | "version": "0.0.0", 5 | "type": "module", 6 | "scripts": { 7 | "dev": "vite", 8 | "build": "tsc && vite build", 9 | "lint": "biome check --apply-unsafe --no-errors-on-unmatched --files-ignore-unknown=true ./", 10 | "preview": "vite preview", 11 | "generate-client": "node ./modify-openapi-operationids.js && openapi-ts --input ./openapi.json --output ./src/client --client axios --exportSchemas true && biome format --write ./src/client" 12 | }, 13 | "dependencies": { 14 | "@chakra-ui/icons": "2.1.1", 15 | "@chakra-ui/react": "2.8.2", 16 | "@emotion/react": "11.11.3", 17 | "@emotion/styled": "11.11.0", 18 | "@tanstack/react-query": "^5.28.14", 19 | "@tanstack/react-query-devtools": "^5.28.14", 20 | "@tanstack/react-router": "1.19.1", 21 | "@turf/turf": "^7.1.0", 22 | "axios": "1.7.5", 23 | "earcut": "^3.0.0", 24 | "form-data": "4.0.0", 25 | "framer-motion": "10.16.16", 26 | "react": "^18.2.0", 27 | "react-dom": "^18.2.0", 28 | "react-error-boundary": "^4.0.13", 29 | "react-hook-form": "7.49.3", 30 | "react-icons": "5.0.1", 31 | "three": "^0.167.1" 32 | }, 33 | "devDependencies": { 34 | "@biomejs/biome": "1.6.1", 35 | "@hey-api/openapi-ts": "^0.34.1", 36 | "@playwright/test": "^1.45.2", 37 | "@tanstack/router-devtools": "1.19.1", 38 | "@tanstack/router-vite-plugin": "1.19.0", 39 | "@types/earcut": "^2.1.4", 40 | "@types/node": "^20.10.5", 41 | "@types/react": "^18.2.37", 42 | "@types/react-dom": "^18.2.15", 43 | "@types/three": "^0.167.1", 44 | "@vitejs/plugin-react-swc": "^3.5.0", 45 | "dotenv": "^16.4.5", 46 | "typescript": "^5.2.2", 47 | "vite": "^5.0.13" 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /frontend/playwright.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig, devices } from '@playwright/test'; 2 | 3 | 4 | /** 5 | * Read environment variables from file. 6 | * https://github.com/motdotla/dotenv 7 | */ 8 | // require('dotenv').config(); 9 | 10 | /** 11 | * See https://playwright.dev/docs/test-configuration. 12 | */ 13 | export default defineConfig({ 14 | testDir: './tests', 15 | /* Run tests in files in parallel */ 16 | fullyParallel: true, 17 | /* Fail the build on CI if you accidentally left test.only in the source code. */ 18 | forbidOnly: !!process.env.CI, 19 | /* Retry on CI only */ 20 | retries: process.env.CI ? 2 : 0, 21 | /* Opt out of parallel tests on CI. */ 22 | workers: process.env.CI ? 1 : undefined, 23 | /* Reporter to use. See https://playwright.dev/docs/test-reporters */ 24 | reporter: 'html', 25 | /* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */ 26 | use: { 27 | /* Base URL to use in actions like `await page.goto('/')`. */ 28 | baseURL: 'http://localhost:5173', 29 | 30 | /* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */ 31 | trace: 'on-first-retry', 32 | }, 33 | 34 | /* Configure projects for major browsers */ 35 | projects: [ 36 | { name: 'setup', testMatch: /.*\.setup\.ts/ }, 37 | 38 | { 39 | name: 'chromium', 40 | use: { 41 | ...devices['Desktop Chrome'], 42 | storageState: 'playwright/.auth/user.json', 43 | }, 44 | dependencies: ['setup'], 45 | }, 46 | 47 | // { 48 | // name: 'firefox', 49 | // use: { 50 | // ...devices['Desktop Firefox'], 51 | // storageState: 'playwright/.auth/user.json', 52 | // }, 53 | // dependencies: ['setup'], 54 | // }, 55 | 56 | // { 57 | // name: 'webkit', 58 | // use: { 59 | // ...devices['Desktop Safari'], 60 | // storageState: 'playwright/.auth/user.json', 61 | // }, 62 | // dependencies: ['setup'], 63 | // }, 64 | 65 | /* Test against mobile viewports. */ 66 | // { 67 | // name: 'Mobile Chrome', 68 | // use: { ...devices['Pixel 5'] }, 69 | // }, 70 | // { 71 | // name: 'Mobile Safari', 72 | // use: { ...devices['iPhone 12'] }, 73 | // }, 74 | 75 | /* Test against branded browsers. */ 76 | // { 77 | // name: 'Microsoft Edge', 78 | // use: { ...devices['Desktop Edge'], channel: 'msedge' }, 79 | // }, 80 | // { 81 | // name: 'Google Chrome', 82 | // use: { ...devices['Desktop Chrome'], channel: 'chrome' }, 83 | // }, 84 | ], 85 | 86 | /* Run your local dev server before starting the tests */ 87 | webServer: { 88 | command: 'npm run dev', 89 | url: 'http://localhost:5173', 90 | reuseExistingServer: !process.env.CI, 91 | }, 92 | }); 93 | -------------------------------------------------------------------------------- /frontend/public/assets/images/favicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geosolutions-it/digital-twin-toolbox/db92c088e2939efe6e7a5e459ac01828867088a7/frontend/public/assets/images/favicon.png -------------------------------------------------------------------------------- /frontend/public/assets/images/logo.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 16 | 36 | 38 | 42 | 47 | 48 | 49 | -------------------------------------------------------------------------------- /frontend/src/client/core/ApiError.ts: -------------------------------------------------------------------------------- 1 | import type { ApiRequestOptions } from "./ApiRequestOptions" 2 | import type { ApiResult } from "./ApiResult" 3 | 4 | export class ApiError extends Error { 5 | public readonly url: string 6 | public readonly status: number 7 | public readonly statusText: string 8 | public readonly body: unknown 9 | public readonly request: ApiRequestOptions 10 | 11 | constructor( 12 | request: ApiRequestOptions, 13 | response: ApiResult, 14 | message: string, 15 | ) { 16 | super(message) 17 | 18 | this.name = "ApiError" 19 | this.url = response.url 20 | this.status = response.status 21 | this.statusText = response.statusText 22 | this.body = response.body 23 | this.request = request 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /frontend/src/client/core/ApiRequestOptions.ts: -------------------------------------------------------------------------------- 1 | export type ApiRequestOptions = { 2 | readonly method: 3 | | "GET" 4 | | "PUT" 5 | | "POST" 6 | | "DELETE" 7 | | "OPTIONS" 8 | | "HEAD" 9 | | "PATCH" 10 | readonly url: string 11 | readonly path?: Record 12 | readonly cookies?: Record 13 | readonly headers?: Record 14 | readonly query?: Record 15 | readonly formData?: Record 16 | readonly body?: any 17 | readonly mediaType?: string 18 | readonly responseHeader?: string 19 | readonly errors?: Record 20 | } 21 | -------------------------------------------------------------------------------- /frontend/src/client/core/ApiResult.ts: -------------------------------------------------------------------------------- 1 | export type ApiResult = { 2 | readonly body: TData 3 | readonly ok: boolean 4 | readonly status: number 5 | readonly statusText: string 6 | readonly url: string 7 | } 8 | -------------------------------------------------------------------------------- /frontend/src/client/core/CancelablePromise.ts: -------------------------------------------------------------------------------- 1 | export class CancelError extends Error { 2 | constructor(message: string) { 3 | super(message) 4 | this.name = "CancelError" 5 | } 6 | 7 | public get isCancelled(): boolean { 8 | return true 9 | } 10 | } 11 | 12 | export interface OnCancel { 13 | readonly isResolved: boolean 14 | readonly isRejected: boolean 15 | readonly isCancelled: boolean 16 | 17 | (cancelHandler: () => void): void 18 | } 19 | 20 | export class CancelablePromise implements Promise { 21 | private _isResolved: boolean 22 | private _isRejected: boolean 23 | private _isCancelled: boolean 24 | readonly cancelHandlers: (() => void)[] 25 | readonly promise: Promise 26 | private _resolve?: (value: T | PromiseLike) => void 27 | private _reject?: (reason?: unknown) => void 28 | 29 | constructor( 30 | executor: ( 31 | resolve: (value: T | PromiseLike) => void, 32 | reject: (reason?: unknown) => void, 33 | onCancel: OnCancel, 34 | ) => void, 35 | ) { 36 | this._isResolved = false 37 | this._isRejected = false 38 | this._isCancelled = false 39 | this.cancelHandlers = [] 40 | this.promise = new Promise((resolve, reject) => { 41 | this._resolve = resolve 42 | this._reject = reject 43 | 44 | const onResolve = (value: T | PromiseLike): void => { 45 | if (this._isResolved || this._isRejected || this._isCancelled) { 46 | return 47 | } 48 | this._isResolved = true 49 | if (this._resolve) this._resolve(value) 50 | } 51 | 52 | const onReject = (reason?: unknown): void => { 53 | if (this._isResolved || this._isRejected || this._isCancelled) { 54 | return 55 | } 56 | this._isRejected = true 57 | if (this._reject) this._reject(reason) 58 | } 59 | 60 | const onCancel = (cancelHandler: () => void): void => { 61 | if (this._isResolved || this._isRejected || this._isCancelled) { 62 | return 63 | } 64 | this.cancelHandlers.push(cancelHandler) 65 | } 66 | 67 | Object.defineProperty(onCancel, "isResolved", { 68 | get: (): boolean => this._isResolved, 69 | }) 70 | 71 | Object.defineProperty(onCancel, "isRejected", { 72 | get: (): boolean => this._isRejected, 73 | }) 74 | 75 | Object.defineProperty(onCancel, "isCancelled", { 76 | get: (): boolean => this._isCancelled, 77 | }) 78 | 79 | return executor(onResolve, onReject, onCancel as OnCancel) 80 | }) 81 | } 82 | 83 | get [Symbol.toStringTag]() { 84 | return "Cancellable Promise" 85 | } 86 | 87 | public then( 88 | onFulfilled?: ((value: T) => TResult1 | PromiseLike) | null, 89 | onRejected?: ((reason: unknown) => TResult2 | PromiseLike) | null, 90 | ): Promise { 91 | return this.promise.then(onFulfilled, onRejected) 92 | } 93 | 94 | public catch( 95 | onRejected?: ((reason: unknown) => TResult | PromiseLike) | null, 96 | ): Promise { 97 | return this.promise.catch(onRejected) 98 | } 99 | 100 | public finally(onFinally?: (() => void) | null): Promise { 101 | return this.promise.finally(onFinally) 102 | } 103 | 104 | public cancel(): void { 105 | if (this._isResolved || this._isRejected || this._isCancelled) { 106 | return 107 | } 108 | this._isCancelled = true 109 | if (this.cancelHandlers.length) { 110 | try { 111 | for (const cancelHandler of this.cancelHandlers) { 112 | cancelHandler() 113 | } 114 | } catch (error) { 115 | console.warn("Cancellation threw an error", error) 116 | return 117 | } 118 | } 119 | this.cancelHandlers.length = 0 120 | if (this._reject) this._reject(new CancelError("Request aborted")) 121 | } 122 | 123 | public get isCancelled(): boolean { 124 | return this._isCancelled 125 | } 126 | } 127 | -------------------------------------------------------------------------------- /frontend/src/client/core/OpenAPI.ts: -------------------------------------------------------------------------------- 1 | import type { AxiosRequestConfig, AxiosResponse } from "axios" 2 | import type { ApiRequestOptions } from "./ApiRequestOptions" 3 | import type { TResult } from "./types" 4 | 5 | type Headers = Record 6 | type Middleware = (value: T) => T | Promise 7 | type Resolver = (options: ApiRequestOptions) => Promise 8 | 9 | export class Interceptors { 10 | _fns: Middleware[] 11 | 12 | constructor() { 13 | this._fns = [] 14 | } 15 | 16 | eject(fn: Middleware) { 17 | const index = this._fns.indexOf(fn) 18 | if (index !== -1) { 19 | this._fns = [...this._fns.slice(0, index), ...this._fns.slice(index + 1)] 20 | } 21 | } 22 | 23 | use(fn: Middleware) { 24 | this._fns = [...this._fns, fn] 25 | } 26 | } 27 | 28 | export type OpenAPIConfig = { 29 | BASE: string 30 | CREDENTIALS: "include" | "omit" | "same-origin" 31 | ENCODE_PATH?: ((path: string) => string) | undefined 32 | HEADERS?: Headers | Resolver | undefined 33 | PASSWORD?: string | Resolver | undefined 34 | RESULT?: TResult 35 | TOKEN?: string | Resolver | undefined 36 | USERNAME?: string | Resolver | undefined 37 | VERSION: string 38 | WITH_CREDENTIALS: boolean 39 | interceptors: { 40 | request: Interceptors 41 | response: Interceptors 42 | } 43 | } 44 | 45 | export const OpenAPI: OpenAPIConfig = { 46 | BASE: "", 47 | CREDENTIALS: "include", 48 | ENCODE_PATH: undefined, 49 | HEADERS: undefined, 50 | PASSWORD: undefined, 51 | RESULT: "body", 52 | TOKEN: undefined, 53 | USERNAME: undefined, 54 | VERSION: "0.1.0", 55 | WITH_CREDENTIALS: false, 56 | interceptors: { request: new Interceptors(), response: new Interceptors() }, 57 | } 58 | -------------------------------------------------------------------------------- /frontend/src/client/core/types.ts: -------------------------------------------------------------------------------- 1 | import type { ApiResult } from "./ApiResult" 2 | 3 | export type TResult = "body" | "raw" 4 | 5 | export type TApiResponse = Exclude< 6 | T, 7 | "raw" 8 | > extends never 9 | ? ApiResult 10 | : ApiResult["body"] 11 | 12 | export type TConfig = { 13 | _result?: T 14 | } 15 | -------------------------------------------------------------------------------- /frontend/src/client/index.ts: -------------------------------------------------------------------------------- 1 | export { ApiError } from "./core/ApiError" 2 | export { CancelablePromise, CancelError } from "./core/CancelablePromise" 3 | export { OpenAPI } from "./core/OpenAPI" 4 | export type { OpenAPIConfig } from "./core/OpenAPI" 5 | 6 | export * from "./models" 7 | export * from "./schemas" 8 | export * from "./services" 9 | -------------------------------------------------------------------------------- /frontend/src/client/models.ts: -------------------------------------------------------------------------------- 1 | export type Asset = { 2 | filename: string 3 | content_type?: string | null 4 | content_size?: number | null 5 | asset_type?: string | null 6 | extension?: string | null 7 | geometry_type?: string | null 8 | upload_id?: string | null 9 | upload_status?: string | null 10 | upload_result: Record | null 11 | id?: string 12 | owner_id: string 13 | } 14 | 15 | export type AssetPublic = { 16 | filename: string 17 | content_type?: string | null 18 | content_size?: number | null 19 | asset_type?: string | null 20 | extension?: string | null 21 | geometry_type?: string | null 22 | upload_id?: string | null 23 | upload_status?: string | null 24 | upload_result: Record | null 25 | id: string 26 | owner_id: string 27 | } 28 | 29 | export type AssetsPublic = { 30 | data: Array 31 | count: number 32 | } 33 | 34 | export type Body_assets_create_asset = { 35 | file: Blob | File 36 | } 37 | 38 | export type Body_login_login_access_token = { 39 | grant_type?: string | null 40 | username: string 41 | password: string 42 | scope?: string 43 | client_id?: string | null 44 | client_secret?: string | null 45 | } 46 | 47 | export type HTTPValidationError = { 48 | detail?: Array 49 | } 50 | 51 | export type Message = { 52 | message: string 53 | } 54 | 55 | export type NewPassword = { 56 | token: string 57 | new_password: string 58 | } 59 | 60 | export type PipelineCreate = { 61 | title: string 62 | asset_id: string | null 63 | data: Record | null 64 | task_id: string | null 65 | task_status: string | null 66 | task_result: Record | null 67 | } 68 | 69 | export type PipelinePublic = { 70 | title: string 71 | asset_id: string | null 72 | data: Record | null 73 | task_id: string | null 74 | task_status: string | null 75 | task_result: Record | null 76 | id: string 77 | owner_id: string 78 | } 79 | 80 | export type PipelinePublicExtended = { 81 | title: string 82 | asset_id: string | null 83 | data: Record | null 84 | task_id: string | null 85 | task_status: string | null 86 | task_result: Record | null 87 | id: string 88 | owner_id: string 89 | asset: Asset | null 90 | } 91 | 92 | export type PipelineUpdate = { 93 | data: Record | null 94 | } 95 | 96 | export type PipelinesActionTypes = "run" | "cancel" 97 | 98 | export type PipelinesPublic = { 99 | data: Array 100 | count: number 101 | } 102 | 103 | export type Token = { 104 | access_token: string 105 | token_type?: string 106 | } 107 | 108 | export type UpdatePassword = { 109 | current_password: string 110 | new_password: string 111 | } 112 | 113 | export type UserCreate = { 114 | email: string 115 | is_active?: boolean 116 | is_superuser?: boolean 117 | full_name?: string | null 118 | password: string 119 | } 120 | 121 | export type UserPublic = { 122 | email: string 123 | is_active?: boolean 124 | is_superuser?: boolean 125 | full_name?: string | null 126 | id: string 127 | } 128 | 129 | export type UserRegister = { 130 | email: string 131 | password: string 132 | full_name?: string | null 133 | } 134 | 135 | export type UserUpdate = { 136 | email?: string | null 137 | is_active?: boolean 138 | is_superuser?: boolean 139 | full_name?: string | null 140 | password?: string | null 141 | } 142 | 143 | export type UserUpdateMe = { 144 | full_name?: string | null 145 | email?: string | null 146 | } 147 | 148 | export type UsersPublic = { 149 | data: Array 150 | count: number 151 | } 152 | 153 | export type ValidationError = { 154 | loc: Array 155 | msg: string 156 | type: string 157 | } 158 | -------------------------------------------------------------------------------- /frontend/src/components/Common/ActionsMenu.tsx: -------------------------------------------------------------------------------- 1 | import { 2 | Button, 3 | Menu, 4 | MenuButton, 5 | MenuItem, 6 | MenuList, 7 | useDisclosure, 8 | } from "@chakra-ui/react" 9 | import { BsThreeDotsVertical } from "react-icons/bs" 10 | import { FiEdit, FiTrash } from "react-icons/fi" 11 | 12 | import type { UserPublic } from "../../client" 13 | import EditUser from "../Admin/EditUser" 14 | import Delete from "./DeleteAlert" 15 | 16 | interface ActionsMenuProps { 17 | type: string 18 | value: UserPublic 19 | disabled?: boolean 20 | } 21 | 22 | const ActionsMenu = ({ type, value, disabled }: ActionsMenuProps) => { 23 | const editUserModal = useDisclosure() 24 | const deleteModal = useDisclosure() 25 | 26 | return ( 27 | <> 28 | 29 | } 33 | variant="unstyled" 34 | /> 35 | 36 | } 39 | > 40 | Edit {type} 41 | 42 | } 45 | color="ui.danger" 46 | > 47 | Delete {type} 48 | 49 | 50 | {type === "User" ? ( 51 | 56 | ) : null} 57 | 63 | 64 | 65 | ) 66 | } 67 | 68 | export default ActionsMenu 69 | -------------------------------------------------------------------------------- /frontend/src/components/Common/DeleteAlert.tsx: -------------------------------------------------------------------------------- 1 | import { 2 | AlertDialog, 3 | AlertDialogBody, 4 | AlertDialogContent, 5 | AlertDialogFooter, 6 | AlertDialogHeader, 7 | AlertDialogOverlay, 8 | Button, 9 | } from "@chakra-ui/react" 10 | import { useMutation, useQueryClient } from "@tanstack/react-query" 11 | import React from "react" 12 | import { useForm } from "react-hook-form" 13 | 14 | import { UsersService } from "../../client" 15 | import useCustomToast from "../../hooks/useCustomToast" 16 | 17 | interface DeleteProps { 18 | type: string 19 | id: string 20 | isOpen: boolean 21 | onClose: () => void 22 | } 23 | 24 | const Delete = ({ type, id, isOpen, onClose }: DeleteProps) => { 25 | const queryClient = useQueryClient() 26 | const showToast = useCustomToast() 27 | const cancelRef = React.useRef(null) 28 | const { 29 | handleSubmit, 30 | formState: { isSubmitting }, 31 | } = useForm() 32 | 33 | const deleteEntity = async (id: string) => { 34 | if (type === "User") { 35 | await UsersService.deleteUser({ userId: id }) 36 | } else { 37 | throw new Error(`Unexpected type: ${type}`) 38 | } 39 | } 40 | 41 | const mutation = useMutation({ 42 | mutationFn: deleteEntity, 43 | onSuccess: () => { 44 | showToast( 45 | "Success", 46 | `The ${type.toLowerCase()} was deleted successfully.`, 47 | "success", 48 | ) 49 | onClose() 50 | }, 51 | onError: () => { 52 | showToast( 53 | "An error occurred.", 54 | `An error occurred while deleting the ${type.toLowerCase()}.`, 55 | "error", 56 | ) 57 | }, 58 | onSettled: () => { 59 | queryClient.invalidateQueries({ 60 | queryKey: [type === "Item" ? "items" : "users"], 61 | }) 62 | }, 63 | }) 64 | 65 | const onSubmit = async () => { 66 | mutation.mutate(id) 67 | } 68 | 69 | return ( 70 | <> 71 | 78 | 79 | 80 | Delete {type} 81 | 82 | 83 | {type === "User" && ( 84 | 85 | All items associated with this user will also be{" "} 86 | permantly deleted. 87 | 88 | )} 89 | Are you sure? You will not be able to undo this action. 90 | 91 | 92 | 93 | 96 | 103 | 104 | 105 | 106 | 107 | 108 | ) 109 | } 110 | 111 | export default Delete 112 | -------------------------------------------------------------------------------- /frontend/src/components/Common/Navbar.tsx: -------------------------------------------------------------------------------- 1 | import type { ComponentType, ElementType } from "react" 2 | 3 | import { Button, Flex, Icon, useDisclosure } from "@chakra-ui/react" 4 | import { FaPlus } from "react-icons/fa" 5 | 6 | interface NavbarProps { 7 | type: string 8 | addModalAs: ComponentType | ElementType 9 | } 10 | 11 | const Navbar = ({ type, addModalAs }: NavbarProps) => { 12 | const addModal = useDisclosure() 13 | 14 | const AddModal = addModalAs 15 | return ( 16 | <> 17 | 18 | 21 | 22 | 23 | 24 | ) 25 | } 26 | 27 | export default Navbar 28 | -------------------------------------------------------------------------------- /frontend/src/components/Common/NotFound.tsx: -------------------------------------------------------------------------------- 1 | import { Button, Container, Text } from "@chakra-ui/react" 2 | import { Link } from "@tanstack/react-router" 3 | 4 | const NotFound = () => { 5 | return ( 6 | <> 7 | 12 | 19 | 26 | 404 27 | 28 | Oops! 29 | Page not found. 30 | 40 | 41 | 42 | 43 | ) 44 | } 45 | 46 | export default NotFound 47 | -------------------------------------------------------------------------------- /frontend/src/components/Common/Sidebar.tsx: -------------------------------------------------------------------------------- 1 | import { 2 | Box, 3 | Drawer, 4 | DrawerBody, 5 | DrawerCloseButton, 6 | DrawerContent, 7 | DrawerOverlay, 8 | Flex, 9 | IconButton, 10 | Text, 11 | useColorModeValue, 12 | useDisclosure, 13 | } from "@chakra-ui/react" 14 | import { useQueryClient } from "@tanstack/react-query" 15 | import { FiLogOut, FiMenu } from "react-icons/fi" 16 | 17 | import type { UserPublic } from "../../client" 18 | import useAuth from "../../hooks/useAuth" 19 | import SidebarItems from "./SidebarItems" 20 | import { hideUserSections } from '../../utils' 21 | 22 | const Sidebar = () => { 23 | const queryClient = useQueryClient() 24 | const bgColor = useColorModeValue("ui.light", "ui.dark") 25 | const textColor = useColorModeValue("ui.dark", "ui.light") 26 | const secBgColor = useColorModeValue("ui.secondary", "ui.darkSlate") 27 | const currentUser = queryClient.getQueryData(["currentUser"]) 28 | const { isOpen, onOpen, onClose } = useDisclosure() 29 | const { logout } = useAuth() 30 | const shouldHideUserSections = hideUserSections(); 31 | 32 | const handleLogout = async () => { 33 | logout() 34 | } 35 | 36 | return ( 37 | <> 38 | {/* Mobile */} 39 | } 47 | /> 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | {!shouldHideUserSections && 64 | 65 | Log out 66 | } 67 | 68 | {!shouldHideUserSections && currentUser?.email && ( 69 | 70 | Logged in as: {currentUser.email} 71 | 72 | )} 73 | 74 | 75 | 76 | 77 | 78 | {/* Desktop */} 79 | 87 | 94 | 95 | 96 | 97 | {!shouldHideUserSections && currentUser?.email && ( 98 | 105 | Logged in as: {currentUser.email} 106 | 107 | )} 108 | 109 | 110 | 111 | ) 112 | } 113 | 114 | export default Sidebar 115 | -------------------------------------------------------------------------------- /frontend/src/components/Common/SidebarItems.tsx: -------------------------------------------------------------------------------- 1 | import { Box, Flex, Icon, Text, useColorModeValue } from "@chakra-ui/react" 2 | import { useQueryClient } from "@tanstack/react-query" 3 | import { Link } from "@tanstack/react-router" 4 | import { FiBox, FiFile, FiMap, FiSettings, FiUsers } from "react-icons/fi" 5 | 6 | import type { UserPublic } from "../../client" 7 | import { hideUserSections } from '../../utils' 8 | 9 | interface SidebarItemsProps { 10 | onClose?: () => void 11 | } 12 | 13 | const SidebarItems = ({ onClose }: SidebarItemsProps) => { 14 | const queryClient = useQueryClient() 15 | const textColor = useColorModeValue("ui.main", "ui.light") 16 | const bgActive = useColorModeValue("#E2E8F0", "#4A5568") 17 | const currentUser = queryClient.getQueryData(["currentUser"]) 18 | 19 | const shouldHideUserSections = hideUserSections(); 20 | 21 | const items = [ 22 | { icon: FiFile, title: "Assets", path: "/assets" }, 23 | { icon: FiBox, title: "Pipelines", path: "/pipelines" }, 24 | { icon: FiMap, title: "Map", path: "/map" }, 25 | ...(shouldHideUserSections ? [] : [{ icon: FiSettings, title: "User Settings", path: "/settings" }]), 26 | ] 27 | 28 | const finalItems = !shouldHideUserSections && currentUser?.is_superuser 29 | ? [...items, { icon: FiUsers, title: "Admin", path: "/admin" }] 30 | : items 31 | 32 | const listItems = finalItems.map(({ icon, title, path }) => ( 33 | 48 | 49 | {title} 50 | 51 | )) 52 | 53 | return ( 54 | <> 55 | {listItems} 56 | 57 | ) 58 | } 59 | 60 | export default SidebarItems 61 | -------------------------------------------------------------------------------- /frontend/src/components/Common/UserMenu.tsx: -------------------------------------------------------------------------------- 1 | import { 2 | Box, 3 | IconButton, 4 | Menu, 5 | MenuButton, 6 | MenuItem, 7 | MenuList, 8 | } from "@chakra-ui/react" 9 | import { Link } from "@tanstack/react-router" 10 | import { FiLogOut, FiUser } from "react-icons/fi" 11 | 12 | import useAuth from "../../hooks/useAuth" 13 | 14 | const UserMenu = () => { 15 | const { logout } = useAuth() 16 | 17 | const handleLogout = async () => { 18 | logout() 19 | } 20 | 21 | return ( 22 | <> 23 | {/* Desktop */} 24 | 30 | 31 | } 35 | bg="ui.main" 36 | isRound 37 | data-testid="user-menu" 38 | /> 39 | 40 | } as={Link} to="settings"> 41 | My profile 42 | 43 | } 45 | onClick={handleLogout} 46 | color="ui.danger" 47 | fontWeight="bold" 48 | > 49 | Log out 50 | 51 | 52 | 53 | 54 | 55 | ) 56 | } 57 | 58 | export default UserMenu 59 | -------------------------------------------------------------------------------- /frontend/src/components/UserSettings/Appearance.tsx: -------------------------------------------------------------------------------- 1 | import { 2 | Badge, 3 | Container, 4 | Heading, 5 | Radio, 6 | RadioGroup, 7 | Stack, 8 | useColorMode, 9 | } from "@chakra-ui/react" 10 | 11 | const Appearance = () => { 12 | const { colorMode, toggleColorMode } = useColorMode() 13 | 14 | return ( 15 | <> 16 | 17 | 18 | Appearance 19 | 20 | 21 | 22 | {/* TODO: Add system default option */} 23 | 24 | Light Mode 25 | 26 | Default 27 | 28 | 29 | 30 | Dark Mode 31 | 32 | 33 | 34 | 35 | 36 | ) 37 | } 38 | export default Appearance 39 | -------------------------------------------------------------------------------- /frontend/src/components/UserSettings/ChangePassword.tsx: -------------------------------------------------------------------------------- 1 | import { 2 | Box, 3 | Button, 4 | Container, 5 | FormControl, 6 | FormErrorMessage, 7 | FormLabel, 8 | Heading, 9 | Input, 10 | useColorModeValue, 11 | } from "@chakra-ui/react" 12 | import { useMutation } from "@tanstack/react-query" 13 | import { type SubmitHandler, useForm } from "react-hook-form" 14 | 15 | import { type ApiError, type UpdatePassword, UsersService } from "../../client" 16 | import useCustomToast from "../../hooks/useCustomToast" 17 | import { confirmPasswordRules, handleError, passwordRules } from "../../utils" 18 | 19 | interface UpdatePasswordForm extends UpdatePassword { 20 | confirm_password: string 21 | } 22 | 23 | const ChangePassword = () => { 24 | const color = useColorModeValue("inherit", "ui.light") 25 | const showToast = useCustomToast() 26 | const { 27 | register, 28 | handleSubmit, 29 | reset, 30 | getValues, 31 | formState: { errors, isSubmitting }, 32 | } = useForm({ 33 | mode: "onBlur", 34 | criteriaMode: "all", 35 | }) 36 | 37 | const mutation = useMutation({ 38 | mutationFn: (data: UpdatePassword) => 39 | UsersService.updatePasswordMe({ requestBody: data }), 40 | onSuccess: () => { 41 | showToast("Success!", "Password updated successfully.", "success") 42 | reset() 43 | }, 44 | onError: (err: ApiError) => { 45 | handleError(err, showToast) 46 | }, 47 | }) 48 | 49 | const onSubmit: SubmitHandler = async (data) => { 50 | mutation.mutate(data) 51 | } 52 | 53 | return ( 54 | <> 55 | 56 | 57 | Change Password 58 | 59 | 64 | 65 | 66 | Current Password 67 | 68 | 75 | {errors.current_password && ( 76 | 77 | {errors.current_password.message} 78 | 79 | )} 80 | 81 | 82 | Set Password 83 | 90 | {errors.new_password && ( 91 | {errors.new_password.message} 92 | )} 93 | 94 | 95 | Confirm Password 96 | 103 | {errors.confirm_password && ( 104 | 105 | {errors.confirm_password.message} 106 | 107 | )} 108 | 109 | 117 | 118 | 119 | 120 | ) 121 | } 122 | export default ChangePassword 123 | -------------------------------------------------------------------------------- /frontend/src/components/UserSettings/DeleteAccount.tsx: -------------------------------------------------------------------------------- 1 | import { 2 | Button, 3 | Container, 4 | Heading, 5 | Text, 6 | useDisclosure, 7 | } from "@chakra-ui/react" 8 | 9 | import DeleteConfirmation from "./DeleteConfirmation" 10 | 11 | const DeleteAccount = () => { 12 | const confirmationModal = useDisclosure() 13 | 14 | return ( 15 | <> 16 | 17 | 18 | Delete Account 19 | 20 | 21 | Permanently delete your data and everything associated with your 22 | account. 23 | 24 | 27 | 31 | 32 | 33 | ) 34 | } 35 | export default DeleteAccount 36 | -------------------------------------------------------------------------------- /frontend/src/components/UserSettings/DeleteConfirmation.tsx: -------------------------------------------------------------------------------- 1 | import { 2 | AlertDialog, 3 | AlertDialogBody, 4 | AlertDialogContent, 5 | AlertDialogFooter, 6 | AlertDialogHeader, 7 | AlertDialogOverlay, 8 | Button, 9 | } from "@chakra-ui/react" 10 | import { useMutation, useQueryClient } from "@tanstack/react-query" 11 | import React from "react" 12 | import { useForm } from "react-hook-form" 13 | 14 | import { type ApiError, UsersService } from "../../client" 15 | import useAuth from "../../hooks/useAuth" 16 | import useCustomToast from "../../hooks/useCustomToast" 17 | import { handleError } from "../../utils" 18 | 19 | interface DeleteProps { 20 | isOpen: boolean 21 | onClose: () => void 22 | } 23 | 24 | const DeleteConfirmation = ({ isOpen, onClose }: DeleteProps) => { 25 | const queryClient = useQueryClient() 26 | const showToast = useCustomToast() 27 | const cancelRef = React.useRef(null) 28 | const { 29 | handleSubmit, 30 | formState: { isSubmitting }, 31 | } = useForm() 32 | const { logout } = useAuth() 33 | 34 | const mutation = useMutation({ 35 | mutationFn: () => UsersService.deleteUserMe(), 36 | onSuccess: () => { 37 | showToast( 38 | "Success", 39 | "Your account has been successfully deleted.", 40 | "success", 41 | ) 42 | logout() 43 | onClose() 44 | }, 45 | onError: (err: ApiError) => { 46 | handleError(err, showToast) 47 | }, 48 | onSettled: () => { 49 | queryClient.invalidateQueries({ queryKey: ["currentUser"] }) 50 | }, 51 | }) 52 | 53 | const onSubmit = async () => { 54 | mutation.mutate() 55 | } 56 | 57 | return ( 58 | <> 59 | 66 | 67 | 68 | Confirmation Required 69 | 70 | 71 | All your account data will be{" "} 72 | permanently deleted. If you are sure, please 73 | click "Confirm" to proceed. This action cannot be 74 | undone. 75 | 76 | 77 | 78 | 81 | 88 | 89 | 90 | 91 | 92 | 93 | ) 94 | } 95 | 96 | export default DeleteConfirmation 97 | -------------------------------------------------------------------------------- /frontend/src/components/UserSettings/UserInformation.tsx: -------------------------------------------------------------------------------- 1 | import { 2 | Box, 3 | Button, 4 | Container, 5 | Flex, 6 | FormControl, 7 | FormErrorMessage, 8 | FormLabel, 9 | Heading, 10 | Input, 11 | Text, 12 | useColorModeValue, 13 | } from "@chakra-ui/react" 14 | import { useMutation, useQueryClient } from "@tanstack/react-query" 15 | import { useState } from "react" 16 | import { type SubmitHandler, useForm } from "react-hook-form" 17 | 18 | import { 19 | type ApiError, 20 | type UserPublic, 21 | type UserUpdateMe, 22 | UsersService, 23 | } from "../../client" 24 | import useAuth from "../../hooks/useAuth" 25 | import useCustomToast from "../../hooks/useCustomToast" 26 | import { emailPattern, handleError } from "../../utils" 27 | 28 | const UserInformation = () => { 29 | const queryClient = useQueryClient() 30 | const color = useColorModeValue("inherit", "ui.light") 31 | const showToast = useCustomToast() 32 | const [editMode, setEditMode] = useState(false) 33 | const { user: currentUser } = useAuth() 34 | const { 35 | register, 36 | handleSubmit, 37 | reset, 38 | getValues, 39 | formState: { isSubmitting, errors, isDirty }, 40 | } = useForm({ 41 | mode: "onBlur", 42 | criteriaMode: "all", 43 | defaultValues: { 44 | full_name: currentUser?.full_name, 45 | email: currentUser?.email, 46 | }, 47 | }) 48 | 49 | const toggleEditMode = () => { 50 | setEditMode(!editMode) 51 | } 52 | 53 | const mutation = useMutation({ 54 | mutationFn: (data: UserUpdateMe) => 55 | UsersService.updateUserMe({ requestBody: data }), 56 | onSuccess: () => { 57 | showToast("Success!", "User updated successfully.", "success") 58 | }, 59 | onError: (err: ApiError) => { 60 | handleError(err, showToast) 61 | }, 62 | onSettled: () => { 63 | queryClient.invalidateQueries() 64 | }, 65 | }) 66 | 67 | const onSubmit: SubmitHandler = async (data) => { 68 | mutation.mutate(data) 69 | } 70 | 71 | const onCancel = () => { 72 | reset() 73 | toggleEditMode() 74 | } 75 | 76 | return ( 77 | <> 78 | 79 | 80 | User Information 81 | 82 | 87 | 88 | 89 | Full name 90 | 91 | {editMode ? ( 92 | 99 | ) : ( 100 | 107 | {currentUser?.full_name || "N/A"} 108 | 109 | )} 110 | 111 | 112 | 113 | Email 114 | 115 | {editMode ? ( 116 | 126 | ) : ( 127 | 128 | {currentUser?.email} 129 | 130 | )} 131 | {errors.email && ( 132 | {errors.email.message} 133 | )} 134 | 135 | 136 | 145 | {editMode && ( 146 | 149 | )} 150 | 151 | 152 | 153 | 154 | ) 155 | } 156 | 157 | export default UserInformation 158 | -------------------------------------------------------------------------------- /frontend/src/components/Viewer/ThreeCanvas.tsx: -------------------------------------------------------------------------------- 1 | import React from "react" 2 | import * as THREE from "three" 3 | import { MapControls } from "three/examples/jsm/controls/MapControls.js" 4 | 5 | const setDefaultCameraLocation = (camera: any, controls: any) => { 6 | camera.position.set(0, 1500, 1500) 7 | controls.target.set(0, 0, 0) 8 | } 9 | 10 | const material = new THREE.MeshNormalMaterial() 11 | const pointMaterial = new THREE.PointsMaterial({ size: 4, vertexColors: true }) 12 | 13 | interface ThreeCanvasProps { 14 | onMount: (options: any) => any 15 | } 16 | 17 | function ThreeCanvas({ onMount }: ThreeCanvasProps) { 18 | const canvas = React.useRef(null) 19 | const options = React.useRef({}) 20 | React.useEffect(() => { 21 | const canvasNode = canvas?.current || { clientWidth: 0, clientHeight: 0 } 22 | 23 | const scene = new THREE.Scene() 24 | const camera = new THREE.PerspectiveCamera( 25 | 75, 26 | canvasNode.clientWidth / canvasNode.clientHeight, 27 | 0.1, 28 | 1000000000, 29 | ) 30 | 31 | const renderer = new THREE.WebGLRenderer({ 32 | canvas: canvas?.current || undefined, 33 | }) 34 | renderer.setSize(canvasNode.clientWidth, canvasNode.clientHeight) 35 | 36 | const controls = new MapControls(camera, renderer.domElement) 37 | controls.enableDamping = false 38 | controls.dampingFactor = 0.05 39 | 40 | setDefaultCameraLocation(camera, controls) 41 | 42 | const axesHelper = new THREE.AxesHelper(500) 43 | scene.add(axesHelper) 44 | 45 | const gridGround = new THREE.GridHelper(3000, 150, 0x3f3f3f, 0x3f3f3f) 46 | scene.add(gridGround) 47 | let removed = false 48 | let requestAnimation: any 49 | function animate() { 50 | if (!removed) { 51 | requestAnimation = requestAnimationFrame(animate) 52 | controls.update() 53 | renderer.render(scene, camera) 54 | } 55 | } 56 | animate() 57 | const group = new THREE.Group() 58 | scene.add(group) 59 | options.current = { group, material, pointMaterial } 60 | window.addEventListener("resize", onWindowResize, false) 61 | function onWindowResize() { 62 | camera.aspect = canvasNode.clientWidth / canvasNode.clientHeight 63 | camera.updateProjectionMatrix() 64 | renderer.setSize(canvasNode.clientWidth, canvasNode.clientHeight) 65 | } 66 | return () => { 67 | removed = true 68 | if (requestAnimation) { 69 | cancelAnimationFrame(requestAnimation) 70 | } 71 | for (let i = 0; i < group.children.length; i++) { 72 | const mesh: any = group.children[i] 73 | mesh.geometry.dispose() 74 | } 75 | group.children.forEach((child: any) => group.remove(child)) 76 | group.children = [] 77 | renderer.dispose() 78 | } 79 | }, []) 80 | 81 | const _onMount = React.useRef(() => {}); 82 | _onMount.current = () => onMount(options.current); 83 | React.useEffect(() => { 84 | _onMount.current(); 85 | }, []) 86 | return ( 87 | 91 | ) 92 | } 93 | 94 | export default ThreeCanvas 95 | -------------------------------------------------------------------------------- /frontend/src/hooks/useAuth.ts: -------------------------------------------------------------------------------- 1 | import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query" 2 | import { useNavigate } from "@tanstack/react-router" 3 | import { useState } from "react" 4 | 5 | import { AxiosError } from "axios" 6 | import { 7 | type Body_login_login_access_token as AccessToken, 8 | type ApiError, 9 | LoginService, 10 | type UserPublic, 11 | type UserRegister, 12 | UsersService, 13 | } from "../client" 14 | import useCustomToast from "./useCustomToast" 15 | import { hideUserSections } from '../utils' 16 | 17 | const isLoggedIn = () => { 18 | return hideUserSections() ? true : localStorage.getItem("access_token") !== null 19 | } 20 | 21 | const useAuth = () => { 22 | const [error, setError] = useState(null) 23 | const navigate = useNavigate() 24 | const showToast = useCustomToast() 25 | const queryClient = useQueryClient() 26 | const { data: user, isLoading } = useQuery({ 27 | queryKey: ["currentUser"], 28 | queryFn: UsersService.readUserMe, 29 | enabled: hideUserSections() ? false : isLoggedIn(), 30 | }) 31 | 32 | const signUpMutation = useMutation({ 33 | mutationFn: (data: UserRegister) => 34 | UsersService.registerUser({ requestBody: data }), 35 | 36 | onSuccess: () => { 37 | navigate({ to: "/login" }) 38 | showToast( 39 | "Account created.", 40 | "Your account has been created successfully.", 41 | "success", 42 | ) 43 | }, 44 | onError: (err: ApiError) => { 45 | let errDetail = (err.body as any)?.detail 46 | 47 | if (err instanceof AxiosError) { 48 | errDetail = err.message 49 | } 50 | 51 | showToast("Something went wrong.", errDetail, "error") 52 | }, 53 | onSettled: () => { 54 | queryClient.invalidateQueries({ queryKey: ["users"] }) 55 | }, 56 | }) 57 | 58 | const login = async (data: AccessToken) => { 59 | const response = await LoginService.loginAccessToken({ 60 | formData: data, 61 | }) 62 | localStorage.setItem("access_token", response.access_token) 63 | } 64 | 65 | const loginMutation = useMutation({ 66 | mutationFn: login, 67 | onSuccess: () => { 68 | navigate({ to: "/" }) 69 | }, 70 | onError: (err: ApiError) => { 71 | let errDetail = (err.body as any)?.detail 72 | 73 | if (err instanceof AxiosError) { 74 | errDetail = err.message 75 | } 76 | 77 | if (Array.isArray(errDetail)) { 78 | errDetail = "Something went wrong" 79 | } 80 | 81 | setError(errDetail) 82 | }, 83 | }) 84 | 85 | const logout = () => { 86 | localStorage.removeItem("access_token") 87 | navigate({ to: "/login" }) 88 | } 89 | 90 | return { 91 | signUpMutation, 92 | loginMutation, 93 | logout, 94 | user, 95 | isLoading, 96 | error, 97 | resetError: () => setError(null), 98 | } 99 | } 100 | 101 | export { isLoggedIn } 102 | export default useAuth 103 | -------------------------------------------------------------------------------- /frontend/src/hooks/useCustomToast.ts: -------------------------------------------------------------------------------- 1 | import { useToast } from "@chakra-ui/react" 2 | import { useCallback } from "react" 3 | 4 | const useCustomToast = () => { 5 | const toast = useToast() 6 | 7 | const showToast = useCallback( 8 | (title: string, description: string, status: "success" | "error") => { 9 | toast({ 10 | title, 11 | description, 12 | status, 13 | isClosable: true, 14 | position: "bottom-right", 15 | }) 16 | }, 17 | [toast], 18 | ) 19 | 20 | return showToast 21 | } 22 | 23 | export default useCustomToast 24 | -------------------------------------------------------------------------------- /frontend/src/main.css: -------------------------------------------------------------------------------- 1 | 2 | html, 3 | body { 4 | position: relative; 5 | width: 100%; 6 | height: 100%; 7 | margin: 0; 8 | padding: 0; 9 | top: 0; 10 | left: 0; 11 | } 12 | 13 | :root { 14 | --dtt-vh: 100vh; 15 | } 16 | 17 | body { 18 | display: flex; 19 | flex-direction: column; 20 | } 21 | 22 | #root { 23 | position: relative; 24 | flex: 1; 25 | width: 100%; 26 | overflow: auto; 27 | } 28 | 29 | #root > * { 30 | position: absolute; 31 | width: 100%; 32 | min-width: 100%; 33 | } -------------------------------------------------------------------------------- /frontend/src/main.tsx: -------------------------------------------------------------------------------- 1 | import { ChakraProvider } from "@chakra-ui/react" 2 | import { QueryClient, QueryClientProvider } from "@tanstack/react-query" 3 | import { RouterProvider, createRouter, createHashHistory } from "@tanstack/react-router" 4 | import ReactDOM from "react-dom/client" 5 | import { routeTree } from "./routeTree.gen" 6 | 7 | import { StrictMode } from "react" 8 | import { OpenAPI } from "./client" 9 | import './main.css' 10 | import theme from "./theme" 11 | import { enableHashHistory, getRouterBasePath, getViteApiUrl } from './utils' 12 | 13 | OpenAPI.BASE = getViteApiUrl() 14 | 15 | OpenAPI.TOKEN = async () => { 16 | return localStorage.getItem("access_token") || "" 17 | } 18 | 19 | const queryClient = new QueryClient() 20 | 21 | const hashHistory = createHashHistory() 22 | const router = createRouter({ 23 | routeTree, 24 | basepath: getRouterBasePath(), 25 | ...(enableHashHistory() ? { history: hashHistory } : {}), 26 | }) 27 | 28 | declare module "@tanstack/react-router" { 29 | interface Register { 30 | router: typeof router 31 | } 32 | } 33 | 34 | ReactDOM.createRoot(document.getElementById("root")!).render( 35 | 36 | 37 | 38 | 39 | 40 | 41 | , 42 | ) 43 | -------------------------------------------------------------------------------- /frontend/src/routes/__root.tsx: -------------------------------------------------------------------------------- 1 | import { Outlet, createRootRoute } from "@tanstack/react-router" 2 | import React, { Suspense } from "react" 3 | 4 | import NotFound from "../components/Common/NotFound" 5 | 6 | const loadDevtools = () => 7 | Promise.all([ 8 | import("@tanstack/router-devtools"), 9 | import("@tanstack/react-query-devtools"), 10 | ]).then(([routerDevtools, reactQueryDevtools]) => { 11 | return { 12 | default: () => ( 13 | <> 14 | 15 | 16 | 17 | ), 18 | } 19 | }) 20 | 21 | const TanStackDevtools = 22 | process.env.NODE_ENV === "production" ? () => null : React.lazy(loadDevtools) 23 | 24 | export const Route = createRootRoute({ 25 | component: () => ( 26 | <> 27 | 28 | 29 | 30 | 31 | 32 | ), 33 | notFoundComponent: () => , 34 | }) 35 | -------------------------------------------------------------------------------- /frontend/src/routes/_layout.tsx: -------------------------------------------------------------------------------- 1 | import { Flex, Spinner } from "@chakra-ui/react" 2 | import { Outlet, createFileRoute, redirect } from "@tanstack/react-router" 3 | 4 | import Sidebar from "../components/Common/Sidebar" 5 | import UserMenu from "../components/Common/UserMenu" 6 | import useAuth, { isLoggedIn } from "../hooks/useAuth" 7 | import { hideUserSections } from '../utils' 8 | 9 | export const Route = createFileRoute("/_layout")({ 10 | component: Layout, 11 | beforeLoad: async () => { 12 | if (!hideUserSections() && !isLoggedIn()) { 13 | throw redirect({ 14 | to: "/login", 15 | }) 16 | } 17 | }, 18 | }) 19 | 20 | function Layout() { 21 | const { isLoading } = useAuth() 22 | 23 | return ( 24 | 25 | 26 | {isLoading ? ( 27 | 28 | 29 | 30 | ) : ( 31 | 32 | )} 33 | {hideUserSections() ? null : } 34 | 35 | ) 36 | } 37 | -------------------------------------------------------------------------------- /frontend/src/routes/_layout/index.tsx: -------------------------------------------------------------------------------- 1 | import { Container, Flex, Heading, Image, Link, Text } from "@chakra-ui/react" 2 | import { createFileRoute } from "@tanstack/react-router" 3 | import Logo from "/assets/images/logo.svg" 4 | 5 | export const Route = createFileRoute("/_layout/")({ 6 | component: Dashboard, 7 | }) 8 | 9 | function Dashboard() { 10 | return ( 11 | <> 12 | 13 | 14 | 15 | Digital Twin Toolbox logo 23 | Digital Twin Toolbox 24 | 25 | 26 | 27 | This project collects different tools/libraries and workflows inside a 28 | docker environment to generate 3D Tiles from common data sources such 29 | as Shapefiles and LAS files. 30 | 31 | 32 | Extensive documentation about this project can be found in the{" "} 33 | 37 | wiki 38 | {" "} 39 | page (see the Table of Contents). 40 | 41 | 42 | 43 | ) 44 | } 45 | -------------------------------------------------------------------------------- /frontend/src/routes/_layout/map.tsx: -------------------------------------------------------------------------------- 1 | import { Box, Divider, Flex } from "@chakra-ui/react" 2 | import { createFileRoute } from "@tanstack/react-router" 3 | import { OpenAPI } from "../../client/core/OpenAPI" 4 | import { getPublicBasePath, hideUserSections } from '../../utils' 5 | 6 | export const Route = createFileRoute("/_layout/map")({ 7 | component: MapStore, 8 | }) 9 | 10 | function MapStore() { 11 | return ( 12 | 13 | 14 | 15 |