├── .dockerignore ├── .github └── workflows │ ├── code-quality.yml │ └── python-app.yml ├── .gitignore ├── .pre-commit-config.yaml ├── CHANGELOG.md ├── Dockerfile ├── README.md ├── alembic.ini ├── app ├── .env.example ├── __init__.py ├── api │ ├── __init__.py │ ├── auth.py │ ├── aws_s3.py │ ├── cc.py │ ├── files.py │ ├── guides.py │ ├── issues.py │ ├── items.py │ ├── parts.py │ ├── settings.py │ ├── statistics.py │ ├── tags.py │ ├── users.py │ ├── users_groups.py │ └── users_permissions.py ├── config.py ├── crud │ ├── __init__.py │ ├── cc_crud.py │ ├── crud_auth.py │ ├── crud_events.py │ ├── crud_files.py │ ├── crud_groups.py │ ├── crud_guides.py │ ├── crud_issues.py │ ├── crud_items.py │ ├── crud_parts.py │ ├── crud_permission.py │ ├── crud_qr.py │ ├── crud_settings.py │ ├── crud_statistics.py │ ├── crud_tags.py │ └── crud_users.py ├── db.py ├── example.env ├── main.py ├── models │ ├── __init__.py │ ├── models.py │ └── shared_models.py ├── schemas │ ├── __init__.py │ ├── requests.py │ ├── responses.py │ └── schemas.py ├── service │ ├── __init__.py │ ├── auth.py │ ├── bearer_auth.py │ ├── company_details.py │ ├── default_settings.py │ ├── event.py │ ├── health_check.py │ ├── helpers.py │ ├── mentions.py │ ├── notification_email.py │ ├── notification_sms.py │ ├── notifications.py │ ├── password.py │ ├── scheduler.py │ └── tenants.py ├── storage │ ├── __init__.py │ ├── aws_s3.py │ ├── base.py │ └── s3.py └── utils │ ├── __init__.py │ └── decorators.py ├── commands ├── __init__.py └── db_backup │ ├── __init__.py │ ├── backups │ └── .gitkeep │ ├── manage_postgres_db.py │ └── readme.md ├── compose.yaml ├── dev.Dockerfile ├── docs ├── docs.md └── img │ └── FK_Tasks_Users.png ├── migrations ├── __init__.py ├── env.py ├── script.py.mako └── versions │ ├── 2022_07_18_1509-d6ba8c13303e_initial_shared.py │ ├── 2022_08_29_1620-80726328353e_add_tables_users_roles_permissions.py │ ├── 2022_08_29_1621-338496320c4d_add_permissions_entries.py │ ├── 2023_04_26_1750-055684700394_add_users_group.py │ ├── 2023_04_26_1752-a1b0cf6b2fbb_add_settings_table.py │ ├── 2023_04_26_1753-b2e42964ad3f_add_files_table.py │ ├── 2023_04_26_1754-38e5957fa66f_add_items_table.py │ ├── 2023_04_26_1755-40bde431a56f_add_guides_table.py │ ├── 2023_04_26_1757-7283939d25ad_add_qr_code_table.py │ ├── 2023_04_26_1759-249aba91b072_add_issues_table.py │ ├── 2023_04_26_1800-8899525de86a_add_events_table.py │ ├── 2023_04_26_1801-3e3981bb512d_add_tags_table.py │ ├── 2023_04_26_1814-debb10a33f57_add_videos_table.py │ ├── 2023_04_27_1418-cec65e1bd0de_add_parts_table.py │ └── 2023_07_10_1708-13be30248d7d_add_qr_code_scans.py ├── poetry.lock ├── prepush.sh ├── pyproject.toml ├── pytest.ini ├── requirements-dev.txt ├── requirements.txt └── tests ├── __init__.py ├── api_responses ├── GUS │ ├── gus_get_by_nip.json │ └── gus_get_by_nip_no_data_found.json └── rejestr_io_get_by_nip.json ├── conftest.py ├── csv_import_files └── import_users.csv ├── feature ├── test_auth.py ├── test_files.py ├── test_ideas.py ├── test_main.py └── test_user.py ├── files └── postbox.png └── unit ├── test_password.py └── test_s3.py /.dockerignore: -------------------------------------------------------------------------------- 1 | *.log 2 | **/.git 3 | **/.gitignore 4 | **/.vscode 5 | **/coverage 6 | **/.env 7 | **/.aws 8 | **/.ssh 9 | Dockerfile 10 | README.md 11 | docker-compose.yml 12 | compose.yaml 13 | **/.DS_Store 14 | **/venv 15 | **/env 16 | -------------------------------------------------------------------------------- /.github/workflows/code-quality.yml: -------------------------------------------------------------------------------- 1 | name: Qodana 2 | on: 3 | workflow_dispatch: 4 | # pull_request: 5 | # push: 6 | # branches: 7 | # - main 8 | # - 'releases/*' 9 | 10 | jobs: 11 | qodana: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@v3 15 | with: 16 | fetch-depth: 0 17 | - name: 'Qodana Scan' 18 | uses: JetBrains/qodana-action@v2023.2.1 19 | env: 20 | QODANA_TOKEN: ${{ secrets.QODANA_TOKEN }} 21 | -------------------------------------------------------------------------------- /.github/workflows/python-app.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a single version of Python 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | name: Python app CI 5 | 6 | env: 7 | COLUMNS: 120 8 | 9 | on: 10 | push: 11 | branches: [ main ] 12 | # pull_request: 13 | # branches: [ main ] 14 | workflow_dispatch: 15 | 16 | jobs: 17 | test: 18 | runs-on: ubuntu-22.04 19 | env: 20 | DB_USERNAME : ${{ secrets.DB_USERNAME }} 21 | DB_PASSWORD : ${{ secrets.DB_PASSWORD }} 22 | DB_HOST : ${{ secrets.DB_HOST }} 23 | DB_PORT : ${{ secrets.DB_PORT }} 24 | DB_DATABASE : ${{ secrets.DB_DATABASE }} 25 | AWS_ACCESS_KEY_ID : ${{ secrets.AWS_ACCESS_KEY_ID }} 26 | AWS_SECRET_ACCESS_KEY : ${{ secrets.AWS_SECRET_ACCESS_KEY }} 27 | AWS_DEFAULT_REGION : ${{ secrets.AWS_DEFAULT_REGION }} 28 | AWS_S3_BUCKET : ${{ secrets.AWS_S3_BUCKET }} 29 | AWS_S3_DEFAULT_REGION : ${{ secrets.AWS_S3_DEFAULT_REGION }} 30 | AWS_S3_ACCESS_KEY_ID : ${{ secrets.AWS_S3_ACCESS_KEY_ID }} 31 | AWS_S3_SECRET_ACCESS_KEY : ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }} 32 | 33 | steps: 34 | - uses: actions/checkout@v3 35 | - name: Set up Python 3.10 36 | uses: actions/setup-python@v4 37 | with: 38 | python-version: "3.10" 39 | cache: "pip" 40 | cache-dependency-path: requirements.txt 41 | - name: Install dependencies 42 | run: | 43 | python -m pip install --upgrade pip 44 | pip install flake8 pytest 45 | if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi 46 | # if [ -f requirements.txt ]; then pip install -r requirements.txt; fi 47 | - name: Lint with ruff 48 | run: | 49 | # stop the build if there are Python syntax errors or undefined names 50 | # flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 51 | ruff . --select=E9,F63,F7,F82 --show-source 52 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 53 | # flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 54 | ruff . --exit-zero --statistics 55 | - name: Test with pytest 56 | run: | 57 | # calling via python will also add the current directory to sys.path 58 | if [ -d tests ] || [ -d test ]; then python -m pytest -s; fi 59 | # env: 60 | # DB_USERNAME : ${{ secrets.DB_USERNAME }} 61 | # DB_PASSWORD : ${{ secrets.DB_PASSWORD }} 62 | # DB_HOST : ${{ secrets.DB_HOST }} 63 | # DB_PORT : ${{ secrets.DB_PORT }} 64 | # DB_DATABASE : ${{ secrets.DB_DATABASE }} 65 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/* 2 | .vscode/* 3 | *.db 4 | .coverage 5 | docs/coverage/ 6 | 7 | # Byte-compiled / optimized / DLL files 8 | __pycache__/ 9 | *.py[cod] 10 | *$py.class 11 | 12 | # C extensions 13 | *.so 14 | 15 | # Distribution / packaging 16 | .Python 17 | build/ 18 | develop-eggs/ 19 | dist/ 20 | downloads/ 21 | eggs/ 22 | .eggs/ 23 | lib/ 24 | lib64/ 25 | parts/ 26 | sdist/ 27 | var/ 28 | wheels/ 29 | pip-wheel-metadata/ 30 | share/python-wheels/ 31 | *.egg-info/ 32 | .installed.cfg 33 | *.egg 34 | MANIFEST 35 | 36 | # PyInstaller 37 | # Usually these files are written by a python script from a template 38 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 39 | *.manifest 40 | *.spec 41 | 42 | # Installer logs 43 | pip-log.txt 44 | pip-delete-this-directory.txt 45 | 46 | # Unit test / coverage reports 47 | htmlcov/ 48 | .tox/ 49 | .nox/ 50 | .coverage 51 | .coverage.* 52 | .cache 53 | nosetests.xml 54 | coverage.xml 55 | *.cover 56 | *.py,cover 57 | .hypothesis/ 58 | .pytest_cache/ 59 | 60 | # Translations 61 | *.mo 62 | *.pot 63 | 64 | # Django stuff: 65 | *.log 66 | local_settings.py 67 | db.sqlite3 68 | db.sqlite3-journal 69 | 70 | # Flask stuff: 71 | instance/ 72 | .webassets-cache 73 | 74 | # Scrapy stuff: 75 | .scrapy 76 | 77 | # Sphinx documentation 78 | docs/_build/ 79 | 80 | # PyBuilder 81 | target/ 82 | 83 | # Jupyter Notebook 84 | .ipynb_checkpoints 85 | 86 | # IPython 87 | profile_default/ 88 | ipython_config.py 89 | 90 | # pyenv 91 | .python-version 92 | 93 | # pipenv 94 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 95 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 96 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 97 | # install all needed dependencies. 98 | #Pipfile.lock 99 | 100 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 101 | __pypackages__/ 102 | 103 | # Celery stuff 104 | celerybeat-schedule 105 | celerybeat.pid 106 | 107 | # SageMath parsed files 108 | *.sage.py 109 | 110 | # Environments 111 | .env 112 | .env.testing 113 | .venv 114 | env/ 115 | venv/ 116 | ENV/ 117 | env.bak/ 118 | venv.bak/ 119 | 120 | # Spyder project settings 121 | .spyderproject 122 | .spyproject 123 | 124 | # Rope project settings 125 | .ropeproject 126 | 127 | # mkdocs documentation 128 | /site 129 | 130 | # mypy 131 | .mypy_cache/ 132 | .dmypy.json 133 | dmypy.json 134 | 135 | # Pyre type checker 136 | .pyre/ 137 | 138 | # config 139 | *.config 140 | 141 | # backups 142 | *.gz 143 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # See https://pre-commit.com for more information 2 | # See https://pre-commit.com/hooks.html for more hooks 3 | repos: 4 | - repo: https://github.com/pre-commit/pre-commit-hooks 5 | rev: v4.4.0 6 | hooks: 7 | - id: check-added-large-files 8 | - id: check-toml 9 | - id: check-yaml 10 | args: 11 | - --unsafe 12 | - id: end-of-file-fixer 13 | exclude: ^frontend/src/client/.* 14 | - id: trailing-whitespace 15 | exclude: ^frontend/src/client/.* 16 | ci: 17 | autofix_commit_msg: 🎨 [pre-commit.ci] Auto format from pre-commit.com hooks 18 | autoupdate_commit_msg: ⬆ [pre-commit.ci] pre-commit autoupdate 19 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | All notable changes to this project will be documented in this file. 4 | 5 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 6 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 7 | 8 | ## [Unreleased] 9 | 10 | ### Added 11 | 12 | - Events 13 | 14 | ### Changed 15 | 16 | ### Fixed 17 | 18 | ### Removed 19 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # pull official base image 2 | FROM python:3.10.14-slim-bookworm 3 | 4 | RUN apt-get update && apt-get install -y --no-install-recommends \ 5 | curl \ 6 | postgresql-client \ 7 | && rm -rf /var/lib/apt/lists/* 8 | 9 | # Prevents Python from writing pyc files. 10 | ENV PYTHONDONTWRITEBYTECODE=1 11 | 12 | # Keeps Python from buffering stdout and stderr to avoid situations where 13 | # the application crashes without emitting any logs due to buffering. 14 | ENV PYTHONUNBUFFERED=1 15 | 16 | # Enable python stacktraces on segfaults 17 | ENV PYTHONFAULTHANDLER=1 18 | 19 | WORKDIR /src 20 | 21 | # Create a non-privileged user that the app will run under. 22 | # See https://docs.docker.com/go/dockerfile-user-best-practices/ 23 | ARG UID=10001 24 | RUN adduser \ 25 | --disabled-password \ 26 | --gecos "" \ 27 | --home "/nonexistent" \ 28 | --shell "/sbin/nologin" \ 29 | --no-create-home \ 30 | --uid "${UID}" \ 31 | appuser 32 | 33 | 34 | # Download dependencies as a separate step to take advantage of Docker's caching. 35 | # Leverage a cache mount to /root/.cache/pip to speed up subsequent builds. 36 | # Leverage a bind mount to requirements.txt to avoid having to copy them into 37 | # into this layer. 38 | RUN --mount=type=cache,target=/root/.cache/pip \ 39 | --mount=type=bind,source=requirements.txt,target=requirements.txt \ 40 | python -m pip install -r requirements.txt 41 | 42 | # TODO: UV - https://github.com/djangopackages/djangopackages/blob/main/dockerfiles/django/Dockerfile-dev 43 | 44 | # set argument vars in docker-run command 45 | ARG AWS_ACCESS_KEY_ID 46 | ARG AWS_SECRET_ACCESS_KEY 47 | ARG AWS_DEFAULT_REGION 48 | 49 | ARG AWS_S3_BUCKET 50 | ARG AWS_S3_DEFAULT_REGION 51 | ARG AWS_S3_ACCESS_KEY_ID 52 | ARG AWS_S3_SECRET_ACCESS_KEY 53 | 54 | #APP 55 | ARG APP_ENV 56 | ARG APP_HOST 57 | 58 | # SENTRY DSN 59 | ARG SENTRY_DSN 60 | 61 | # GUS 62 | ARG GUS_API_DEV 63 | 64 | # API_VIDEO 65 | ARG API_VIDEO 66 | ARG API_VIDEO_UPLOAD 67 | 68 | # AWS RDS vars 69 | ARG DB_USERNAME 70 | ARG DB_PASSWORD 71 | ARG DB_HOST 72 | ARG DB_PORT 73 | ARG DB_DATABASE 74 | 75 | # EMAIL LABS 76 | ARG EMAIL_LABS_APP_KEY 77 | ARG EMAIL_LABS_SECRET_KEY 78 | ARG EMAIL_LABS_SMTP 79 | ARG EMAIL_LABS_SENDER 80 | ARG EMAIL_DEV 81 | 82 | # MAILJET 83 | ARG MAILJET_EMAIL_API_KEY 84 | ARG MAILJET_EMAIL_SECRET 85 | ARG MAILJET_EMAIL_SENDER 86 | ARG MAILJET_SMS_API_KEY 87 | ARG MAILJET_SMS_SENDER 88 | 89 | ENV APP_ENV $APP_ENV 90 | ENV APP_HOST $APP_HOST 91 | 92 | ENV AWS_ACCESS_KEY_ID $AWS_ACCESS_KEY_ID 93 | ENV AWS_SECRET_ACCESS_KEY $AWS_SECRET_ACCESS_KEY 94 | ENV AWS_DEFAULT_REGION $AWS_DEFAULT_REGION 95 | 96 | ENV AWS_S3_BUCKET $AWS_S3_BUCKET 97 | ENV AWS_S3_DEFAULT_REGION $AWS_S3_DEFAULT_REGION 98 | ENV AWS_S3_ACCESS_KEY_ID $AWS_S3_ACCESS_KEY_ID 99 | ENV AWS_S3_SECRET_ACCESS_KEY $AWS_S3_SECRET_ACCESS_KEY 100 | 101 | ENV SENTRY_DSN $SENTRY_DSN 102 | ENV GUS_API_DEV $GUS_API_DEV 103 | ENV API_VIDEO $API_VIDEO 104 | ENV API_VIDEO_UPLOAD $API_VIDEO_UPLOAD 105 | 106 | ENV EMAIL_LABS_APP_KEY $EMAIL_LABS_APP_KEY 107 | ENV EMAIL_LABS_SECRET_KEY $EMAIL_LABS_SECRET_KEY 108 | ENV EMAIL_LABS_SMTP $EMAIL_LABS_SMTP 109 | ENV EMAIL_LABS_SENDER $EMAIL_LABS_SENDER 110 | ENV EMAIL_DEV $EMAIL_DEV 111 | 112 | ENV MAILJET_EMAIL_API_KEY $MAILJET_EMAIL_API_KEY 113 | ENV MAILJET_EMAIL_SECRET $MAILJET_EMAIL_SECRET 114 | ENV MAILJET_EMAIL_SENDER $MAILJET_EMAIL_SENDER 115 | ENV MAILJET_SMS_API_KEY $MAILJET_SMS_API_KEY 116 | ENV MAILJET_SMS_SENDER $MAILJET_SMS_SENDER 117 | 118 | ENV DB_USERNAME $DB_USERNAME 119 | ENV DB_PASSWORD $DB_PASSWORD 120 | ENV DB_HOST $DB_HOST 121 | ENV DB_PORT $DB_PORT 122 | ENV DB_DATABASE $DB_DATABASE 123 | 124 | # Switch to the non-privileged user to run the application. 125 | USER appuser 126 | 127 | #USER alex 128 | COPY --chown=appuser:appuser ./commands /src/commands 129 | COPY --chown=appuser:appuser ./migrations /src/migrations 130 | COPY --chown=appuser:appuser ./alembic.ini /src/alembic.ini 131 | COPY --chown=appuser:appuser ./app /src/app 132 | COPY --chown=appuser:appuser ./tests/api_responses /src/tests/api_responses 133 | 134 | # Expose the port that the application listens on. 135 | EXPOSE 5000 136 | 137 | CMD ["uvicorn", "app.main:app","--no-server-header","--no-proxy-headers", "--host", "0.0.0.0", "--port", "5000" ] 138 | 139 | HEALTHCHECK --interval=21s --timeout=3s --start-period=10s CMD curl --fail http://localhost:5000/health || exit 1 140 | -------------------------------------------------------------------------------- /alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # path to migration scripts 5 | script_location = migrations 6 | 7 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s 8 | # Uncomment the line below if you want the files to be prepended with date and time 9 | # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file 10 | # for all available tokens 11 | file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s 12 | 13 | # sys.path path, will be prepended to sys.path if present. 14 | # defaults to the current working directory. 15 | prepend_sys_path = . 16 | 17 | # timezone to use when rendering the date within the migration file 18 | # as well as the filename. 19 | # If specified, requires the python-dateutil library that can be 20 | # installed by adding `alembic[tz]` to the pip requirements 21 | # string value is passed to dateutil.tz.gettz() 22 | # leave blank for localtime 23 | # timezone = 24 | 25 | # max length of characters to apply to the 26 | # "slug" field 27 | # truncate_slug_length = 40 28 | 29 | # set to 'true' to run the environment during 30 | # the 'revision' command, regardless of autogenerate 31 | # revision_environment = false 32 | 33 | # set to 'true' to allow .pyc and .pyo files without 34 | # a source .py file to be detected as revisions in the 35 | # versions/ directory 36 | # sourceless = false 37 | 38 | # version location specification; This defaults 39 | # to migrations/versions. When using multiple version 40 | # directories, initial revisions must be specified with --version-path. 41 | # The path separator used here should be the separator specified by "version_path_separator" below. 42 | # version_locations = %(here)s/bar:%(here)s/bat:migrations/versions 43 | 44 | # version path separator; As mentioned above, this is the character used to split 45 | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. 46 | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. 47 | # Valid values for version_path_separator are: 48 | # 49 | # version_path_separator = : 50 | # version_path_separator = ; 51 | # version_path_separator = space 52 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects. 53 | 54 | # the output encoding used when revision files 55 | # are written from script.py.mako 56 | # output_encoding = utf-8 57 | 58 | sqlalchemy.url = postgresql://%(DB_USER)s:%(DB_PASS)s@%(DB_HOST)s:5438/%(DB_DATABASE)s 59 | 60 | 61 | [post_write_hooks] 62 | # post_write_hooks defines scripts or Python functions that are run 63 | # on newly generated revision scripts. See the documentation for further 64 | # detail and examples 65 | 66 | # format using "black" - use the console_scripts runner, against the "black" entrypoint 67 | # hooks = black 68 | # black.type = console_scripts 69 | # black.entrypoint = black 70 | # black.options = -l 79 REVISION_SCRIPT_FILENAME 71 | 72 | # Logging configuration 73 | [loggers] 74 | keys = root,sqlalchemy,alembic 75 | 76 | [handlers] 77 | keys = console 78 | 79 | [formatters] 80 | keys = generic 81 | 82 | [logger_root] 83 | level = WARN 84 | handlers = console 85 | qualname = 86 | 87 | [logger_sqlalchemy] 88 | level = WARN 89 | handlers = 90 | qualname = sqlalchemy.engine 91 | 92 | [logger_alembic] 93 | level = INFO 94 | handlers = 95 | qualname = alembic 96 | 97 | [handler_console] 98 | class = StreamHandler 99 | args = (sys.stderr,) 100 | level = NOTSET 101 | formatter = generic 102 | 103 | [formatter_generic] 104 | format = %(levelname)-5.5s [%(name)s] %(message)s 105 | datefmt = %H:%M:%S 106 | -------------------------------------------------------------------------------- /app/.env.example: -------------------------------------------------------------------------------- 1 | #DEV 2 | APP_ENV=develop 3 | APP_DEBUG=true 4 | APP_KEY=random_string_Z;b)}NrY@.r6H)rA*_{=CiU[t6Kk;W 5 | APP_HOST=http://frontend-host.com 6 | APP_OPEN_API="/openapi.json" 7 | 8 | # AWS 9 | AWS_ACCESS_KEY_ID= 10 | AWS_SECRET_ACCESS_KEY= 11 | AWS_DEFAULT_REGION= 12 | 13 | AWS_S3_ACCESS_KEY_ID=A 14 | AWS_S3_SECRET_ACCESS_KEY= 15 | 16 | AWS_S3_DEFAULT_REGION= 17 | AWS_S3_BUCKET= 18 | 19 | #DB 20 | DB_HOST=localhost 21 | DB_PORT=5432 22 | DB_DATABASE=pg_db 23 | DB_USERNAME=postgres 24 | DB_PASSWORD=postgres 25 | 26 | # SENTRY 27 | SENTRY_DSN= 28 | 29 | # EMAIL_LABS 30 | EMAIL_LABS_APP_KEY= 31 | EMAIL_LABS_SECRET_KEY= 32 | EMAIL_LABS_SMTP= 33 | EMAIL_LABS_SENDER= 34 | EMAIL_DEV= 35 | 36 | #MAILJET 37 | MAILJET_EMAIL_API_KEY= 38 | MAILJET_EMAIL_SECRET= 39 | MAILJET_EMAIL_SENDER= 40 | MAILJET_SMS_API_KEY= 41 | MAILJET_SMS_SENDER= 42 | 43 | # API VIDEO 44 | API_VIDEO= 45 | API_VIDEO_UPLOAD= 46 | 47 | # EXTERNAL_API 48 | REJESTR_IO_KEY= 49 | GUS_API= 50 | GUS_API_DEV= 51 | -------------------------------------------------------------------------------- /app/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mgurg/fastapi_docker/46f90e0551ae4c4068e2dd1965801234f6f11f72/app/__init__.py -------------------------------------------------------------------------------- /app/api/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mgurg/fastapi_docker/46f90e0551ae4c4068e2dd1965801234f6f11f72/app/api/__init__.py -------------------------------------------------------------------------------- /app/api/aws_s3.py: -------------------------------------------------------------------------------- 1 | import io 2 | from typing import Annotated 3 | from uuid import uuid4 4 | 5 | from fastapi import APIRouter, Depends, Request, UploadFile 6 | from loguru import logger 7 | from sqlalchemy import func, select 8 | from sqlalchemy.orm import Session 9 | from starlette.responses import StreamingResponse 10 | 11 | from app.config import get_settings 12 | from app.db import get_db 13 | from app.models.models import File 14 | from app.storage.aws_s3 import s3_client, s3_resource 15 | 16 | settings = get_settings() 17 | s3_router = APIRouter() 18 | 19 | 20 | @s3_router.post("/create_bucket") 21 | def post_create_bucket(): 22 | logger.info("👋 from S3 route") 23 | prefix = "mgu" 24 | bucket_name = "".join([prefix, "-", "dc5b9aefbee54953824d9fc327df7faf"]) # str(uuid.uuid4().hex) 25 | # mgu-dc5b9aefbee54953824d9fc327df7faf 26 | location = {"LocationConstraint": settings.s3_region} 27 | 28 | try: 29 | s3_client.create_bucket(Bucket=bucket_name, CreateBucketConfiguration=location) 30 | except BaseException as error: 31 | print(error) 32 | 33 | return bucket_name 34 | 35 | 36 | @s3_router.get("/list_buckets") 37 | @logger.catch() 38 | def get_buckets_list(): 39 | s3_buckets = [] 40 | response = s3_client.list_buckets() 41 | 42 | print("Listing Amazon S3 Buckets:") 43 | 44 | for bucket in response["Buckets"]: 45 | s3_buckets.append(bucket["Name"]) 46 | print(f"-- {bucket['Name']}") 47 | return s3_buckets 48 | 49 | 50 | # @s3_router.get("/list_files") 51 | # @logger.catch() 52 | # def get_files_list(*, session: Session = Depends(get_db)): 53 | # # https://realpython.com/python-boto3-aws-s3/#object-traversal 54 | # # bucket = s3_resource.Bucket(name=settings.s3_bucket_name) 55 | # 56 | # files = [] 57 | # 58 | # # for obj in bucket.objects.all(): 59 | # # files.append({"name": obj.key}) 60 | # # print(obj.key) 61 | # # subsrc = obj.Object() 62 | # # print(obj.key, obj.storage_class, obj.last_modified, subsrc.version_id, subsrc.metadata) 63 | # 64 | # return files 65 | 66 | 67 | # @s3_router.delete("/dlete_bucket/{bucket_name}") 68 | # def remove_bucket(bucket_name: str): 69 | 70 | # for s3_object in s3_resource.Bucket(bucket_name).objects.all(): 71 | # s3_object.delete() 72 | # # Deleting objects versions if S3 versioning enabled 73 | # for s3_object_ver in s3_resource.Bucket(bucket_name).object_versions.all(): 74 | # s3_object_ver.delete() 75 | 76 | # response = s3_client.delete_bucket(Bucket=bucket_name) 77 | # return response 78 | 79 | 80 | @s3_router.delete("/delete_file/") 81 | def remove_bucket(*, session: Annotated[Session, Depends(get_db)], object_name: str): 82 | a = s3_resource.Object(settings.s3_bucket_name, object_name).delete() 83 | print(a) 84 | 85 | db_task = session.execute(select(File).where(File.file_name == object_name)).one_or_none() 86 | session.delete(db_task) 87 | session.commit() 88 | 89 | return {"msg": "ok"} 90 | 91 | 92 | @s3_router.get("/get_s3_obj/") 93 | def get_s3(s3_obj: str): 94 | """ 95 | Retreives an s3 jpg image and streams it back. 96 | ### Request Body 97 | - `s3_obj`: str 98 | #### The S3 Object's string 99 | 100 | ### Response 101 | Streamed image 102 | """ 103 | f = io.BytesIO() 104 | s3_resource.Bucket(settings.s3_bucket_name).download_fileobj(s3_obj, f) 105 | 106 | # f.seek(0) 107 | # mime_type = magic.from_buffer(f.read(2048), mime=True) 108 | 109 | f.seek(0) 110 | header = {"Content-Disposition": f'inline; filename="{s3_obj}"'} 111 | return StreamingResponse(f, headers=header) # media_type=mime_type, 112 | 113 | 114 | @s3_router.post("/upload/") 115 | @logger.catch() 116 | def upload_aws_s3(*, session: Annotated[Session, Depends(get_db)], request: Request, file: UploadFile | None = None): 117 | if not file: 118 | return {"message": "No file sent"} 119 | 120 | # https://www.youtube.com/watch?v=JKlOlDFwsao 121 | # https://github.com/search?q=upload_fileobj+fastapi&type=code 122 | 123 | # s3_resource.Bucket(settings.s3_bucket_name).upload_fileobj( 124 | # Fileobj=file.file, 125 | # Key=f"folder/{objectName}", 126 | # ExtraArgs={ 127 | # "ContentType": "image/png", 128 | # "ACL": "public-read", 129 | # }, 130 | # ) 131 | 132 | quota = session.execute(select([func.sum(File.size)]).where(File.account_id == 2)).one() 133 | print("quota", quota) 134 | 135 | # if quota > 300000: 136 | # raise HTTPException(status_code=413, detail="Quota exceeded") 137 | 138 | s3_resource.Bucket(settings.s3_bucket_name).upload_fileobj(Fileobj=file.file, Key=file.filename) 139 | 140 | new_file = File( 141 | uuid=str(uuid4()), 142 | account_id=2, 143 | owner_id=2, 144 | file_name=file.filename, 145 | file_id=1, 146 | extension="jpg", 147 | mimetype=file.content_type, 148 | size=request.headers["content-length"], 149 | ) 150 | 151 | session.add(new_file) 152 | session.commit() 153 | session.refresh(new_file) 154 | 155 | return {"mime": file.content_type, "filename": f"{file.filename}", "uuid": new_file.uuid} 156 | 157 | 158 | @s3_router.get("/upload_signed_url") 159 | def sign_s3_upload(object_name: str): 160 | try: 161 | url = s3_client.generate_presigned_url( 162 | "put_object", 163 | Params={"Bucket": settings.s3_bucket_name, "Key": object_name}, 164 | ExpiresIn=3600, 165 | HttpMethod="PUT", 166 | ) 167 | except BaseException: 168 | return None 169 | 170 | return url 171 | 172 | 173 | @s3_router.get("/download_signed_url") 174 | def sign_s3_download(tenant: str, file: str) -> str: 175 | url = s3_client.generate_presigned_url( 176 | ClientMethod="get_object", Params={"Bucket": settings.s3_bucket_name, "Key": f"folder/{file}"}, ExpiresIn=3600 177 | ) 178 | 179 | return url 180 | -------------------------------------------------------------------------------- /app/api/cc.py: -------------------------------------------------------------------------------- 1 | import traceback 2 | from typing import Annotated 3 | 4 | from fastapi import APIRouter, Depends 5 | from sqlalchemy import text 6 | from sqlalchemy.orm import Session 7 | 8 | from app.config import get_settings 9 | from app.crud import cc_crud, crud_files 10 | from app.db import engine, get_public_db 11 | from app.schemas.responses import StandardResponse 12 | from app.service.bearer_auth import is_app_owner 13 | from app.service.scheduler import scheduler 14 | from app.service.tenants import alembic_upgrade_head 15 | 16 | settings = get_settings() 17 | 18 | cc_router = APIRouter() 19 | 20 | PublicDB = Annotated[Session, Depends(get_public_db)] 21 | 22 | 23 | @cc_router.get("/create") 24 | def read_item(schema: str): 25 | # tenant_create(schema) 26 | # alembic_upgrade_head(schema) 27 | return {"schema": schema} 28 | 29 | 30 | @cc_router.get("/check_revision") 31 | def check_revision(schema: str): 32 | # with with_db(schema) as db: 33 | # context = MigrationContext.configure(db.connection()) 34 | # script = alembic.script.ScriptDirectory.from_config(alembic_config) 35 | # if context.get_current_revision() != script.get_current_head(): 36 | return {"ok": True} 37 | 38 | 39 | @cc_router.post("/mark_orphan_files", name="files:MarkOrphans") 40 | def cc_mark_orphan_files(*, public_db: PublicDB, auth=Depends(is_app_owner)): 41 | db_companies = cc_crud.get_public_companies(public_db) 42 | 43 | processed = [] 44 | for company in db_companies: 45 | connectable = engine.execution_options(schema_translate_map={"tenant": company.tenant_id}) 46 | with Session(autocommit=False, autoflush=False, bind=connectable, future=True) as db: 47 | orphaned_files_uuid = crud_files.get_orphaned_files(db) 48 | processed.append({company.tenant_id: orphaned_files_uuid}) 49 | # # TODO: one by one 50 | # scheduler.run_job(alembic_upgrade_head, args=[company.tenant_id]) # id=company.tenant_id 51 | # processed.append(company.tenant_id) 52 | 53 | return processed 54 | 55 | 56 | @cc_router.get("/", name="companies:List") 57 | def cc_get_all(*, public_db: PublicDB, auth=Depends(is_app_owner)): 58 | db_companies = cc_crud.get_public_companies(public_db) 59 | 60 | return db_companies 61 | 62 | 63 | @cc_router.post("/", name="migrate:All") 64 | def cc_migrate_all(*, public_db: PublicDB, auth=Depends(is_app_owner)): 65 | db_companies = cc_crud.get_public_companies(public_db) 66 | 67 | processed = [] 68 | for company in db_companies: 69 | # TODO: one by one 70 | scheduler.run_job(alembic_upgrade_head, args=[company.tenant_id]) # id=company.tenant_id 71 | processed.append(company.tenant_id) 72 | 73 | return processed 74 | 75 | 76 | @cc_router.post("/{tenant_id}", response_model=StandardResponse, name="migrate:One") 77 | def cc_migrate_one(*, public_db: PublicDB, tenant_id: str, auth=Depends(is_app_owner)): 78 | scheduler.add_job(alembic_upgrade_head, args=[tenant_id]) # , id="tenant_id" 79 | 80 | return {"ok": True} 81 | 82 | 83 | @cc_router.delete("/{tenant_id}", response_model=StandardResponse, name="migrate:One") 84 | def cc_delete_one(*, public_db: PublicDB, tenant_id: str, auth=Depends(is_app_owner)): 85 | print("Cleaning DB 🧹") 86 | 87 | connection = engine.connect() 88 | trans = connection.begin() 89 | try: 90 | connection.execute(text(f"DELETE FROM public.public_users WHERE tenant_id = '{tenant_id}';")) 91 | connection.execute(text(f"DELETE FROM public.public_companies WHERE tenant_id = '{tenant_id}';")) 92 | connection.execute(text('DROP SCHEMA IF EXISTS "' + tenant_id + '" CASCADE;')) 93 | trans.commit() 94 | except Exception: 95 | traceback.print_exc() 96 | trans.rollback() 97 | print("Bye! 🫡") 98 | 99 | return {"ok": True} 100 | -------------------------------------------------------------------------------- /app/api/parts.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timezone 2 | from typing import Annotated 3 | from uuid import UUID, uuid4 4 | 5 | from fastapi import APIRouter, Depends, HTTPException 6 | from sqlalchemy.orm import Session 7 | 8 | from app.crud import crud_issues, crud_parts 9 | from app.db import get_db 10 | from app.models.models import User 11 | from app.schemas.requests import PartCreateIn, PartEditIn 12 | from app.schemas.responses import PartResponse, StandardResponse 13 | from app.service.bearer_auth import has_token 14 | 15 | part_router = APIRouter() 16 | CurrentUser = Annotated[User, Depends(has_token)] 17 | UserDB = Annotated[Session, Depends(get_db)] 18 | 19 | 20 | @part_router.get("/issue/{issue_uuid}", response_model=list[PartResponse]) 21 | def parts_get_all( 22 | *, 23 | db: UserDB, 24 | issue_uuid: UUID, 25 | auth_user: CurrentUser, 26 | field: str = "name", 27 | order: str = "asc", 28 | is_hidden: bool | None = None, 29 | ): 30 | if field not in ["name"]: 31 | field = "name" 32 | 33 | issue = crud_issues.get_issue_by_uuid(db, issue_uuid) 34 | 35 | parts = crud_parts.get_parts(db, issue.id) 36 | return parts 37 | 38 | 39 | @part_router.post("/", response_model=PartResponse) 40 | def parts_add_one(*, db: UserDB, part: PartCreateIn, auth_user: CurrentUser): 41 | db_issue = crud_issues.get_issue_by_uuid(db, part.issue_uuid) 42 | 43 | if not db_issue: 44 | raise HTTPException(status_code=400, detail="Issue not found") 45 | 46 | value = part.value 47 | if not value: 48 | value = part.price * part.quantity 49 | 50 | part_data = { 51 | "uuid": str(uuid4()), 52 | "issue_id": db_issue.id, 53 | "author_id": auth_user.id, 54 | "name": part.name, 55 | "description": part.description, 56 | "price": part.price, 57 | "quantity": part.quantity, 58 | "unit": part.unit, 59 | "value": value, 60 | "created_at": datetime.now(timezone.utc), 61 | } 62 | 63 | if db_issue.item_id: 64 | part_data["item_id"] = db_issue.item_id 65 | 66 | # pprint(part_data) 67 | # return db_issue 68 | 69 | new_part = crud_parts.create_part(db, part_data) 70 | 71 | return new_part 72 | 73 | 74 | @part_router.patch("/{part_uuid}", response_model=PartResponse) 75 | def parts_edit_one(*, db: UserDB, part_uuid: UUID, part: PartEditIn, auth_user: CurrentUser): 76 | db_part = crud_parts.get_part_by_uuid(db, part_uuid) 77 | 78 | if not db_part: 79 | raise HTTPException(status_code=400, detail="Part not found") 80 | 81 | price = part.price 82 | quantity = part.quantity 83 | value = part.value 84 | 85 | if not price: 86 | price = db_part.price 87 | 88 | if not quantity: 89 | quantity = db_part.quantity 90 | 91 | if not value: 92 | value = price * quantity 93 | 94 | part_data = part.model_dump(exclude_unset=True) 95 | part_data["price"] = price 96 | part_data["quantity"] = quantity 97 | part_data["value"] = value 98 | 99 | # pprint(part_data) 100 | 101 | updated_part = crud_parts.update_part(db, db_part, part_data) 102 | return updated_part 103 | 104 | 105 | @part_router.delete("/{part_uuid}", response_model=StandardResponse) 106 | def parts_delete_one(*, db: UserDB, part_uuid: UUID, auth_user: CurrentUser, force_delete: bool = False): 107 | db_part = crud_parts.get_part_by_uuid(db, part_uuid) 108 | 109 | if not db_part: 110 | raise HTTPException(status_code=404, detail="part not found") 111 | 112 | db.delete(db_part) 113 | db.commit() 114 | 115 | return {"ok": True} 116 | -------------------------------------------------------------------------------- /app/api/settings.py: -------------------------------------------------------------------------------- 1 | # from typing import list 2 | from datetime import datetime, timezone 3 | from typing import Annotated 4 | 5 | from fastapi import APIRouter, Depends, HTTPException, Query 6 | from langcodes import standardize_tag 7 | from pydantic import parse_obj_as 8 | from sqlalchemy.orm import Session 9 | 10 | from app.crud import crud_settings, crud_users 11 | from app.db import get_db 12 | from app.models.models import User 13 | from app.schemas.requests import SettingGeneralIn, SettingNotificationIn, SettingUserLanguage 14 | from app.schemas.responses import SettingNotificationResponse, StandardResponse 15 | from app.service.bearer_auth import has_token 16 | from app.service.default_settings import allowed_settings 17 | 18 | setting_router = APIRouter() 19 | 20 | CurrentUser = Annotated[User, Depends(has_token)] 21 | UserDB = Annotated[Session, Depends(get_db)] 22 | 23 | 24 | # GENERAL 25 | @setting_router.get("/", name="setting:read") 26 | def setting_get_all(*, db: UserDB, auth_user: CurrentUser, settings: Annotated[list[str], Query()] = None): 27 | user_id = auth_user.id 28 | 29 | if user_id == 0: 30 | raise HTTPException(status_code=404, detail="Setting for anonymous user not exists!") 31 | 32 | if settings is None or not set(settings).issubset(allowed_settings.keys()): 33 | raise HTTPException(status_code=404, detail="Setting not allowed") 34 | 35 | db_settings = crud_settings.get_general_settings_by_names(db, user_id, settings) 36 | 37 | result = {} 38 | for elt in db_settings: 39 | result[elt.name] = parse_obj_as(elt.value_type, elt.value) 40 | 41 | if settings is not None: 42 | for status in settings: 43 | result.setdefault(status, allowed_settings[status]) 44 | 45 | return result 46 | 47 | 48 | @setting_router.post("/", name="setting:add") 49 | def setting_set_one(*, db: UserDB, setting: SettingGeneralIn, auth_user: CurrentUser): 50 | user_id = auth_user.id 51 | 52 | db_setting = crud_settings.get_user_general_setting_by_name(db, user_id, setting.name) 53 | 54 | if db_setting is None: 55 | data = { 56 | "user_id": user_id, 57 | "name": setting.name, 58 | "value": setting.value, 59 | "value_type": setting.type, 60 | "created_at": datetime.now(timezone.utc), 61 | } 62 | 63 | new_setting = crud_settings.create_user_setting(db, data) 64 | 65 | return new_setting 66 | 67 | data = { 68 | "value": setting.value, 69 | "value_type": setting.type, 70 | "prev_value": db_setting.value, 71 | "updated_at": datetime.now(timezone.utc), 72 | } 73 | 74 | updated_setting = crud_settings.update_user_setting(db, db_setting, data) 75 | return updated_setting 76 | 77 | # return "OK" 78 | 79 | 80 | # Notifications 81 | @setting_router.get("/notifications/", response_model=SettingNotificationResponse, name="settings:notifications") 82 | def setting_notification_get(*, db: UserDB, auth_user: CurrentUser): 83 | user_id = auth_user.id 84 | 85 | db_settings = crud_settings.get_notification_settings_by_user_id(db, user_id) 86 | 87 | if db_settings is None: 88 | raise HTTPException(status_code=404, detail="Notification setting not found") 89 | 90 | return db_settings 91 | 92 | 93 | @setting_router.post("/notifications/", response_model=SettingNotificationResponse, name="settings:notifications") 94 | def setting_notification_set(*, db: UserDB, setting: SettingNotificationIn, auth_user: CurrentUser): 95 | user_id = auth_user.id 96 | 97 | db_settings = crud_settings.get_notification_settings_by_user_id(db, user_id) 98 | 99 | if db_settings is None: 100 | setting_data = { 101 | "user_id": user_id, 102 | "sms_notification_level": setting.sms_notification_level, 103 | "email_notification_level": setting.email_notification_level, 104 | "created_at": datetime.now(timezone.utc), 105 | } 106 | 107 | db_settings = crud_settings.create_notification_setting(db, setting_data) 108 | return db_settings 109 | 110 | crud_settings.update_notification_setting(db, db_settings, setting.model_dump(exclude_unset=False)) 111 | 112 | return db_settings 113 | 114 | 115 | # LANG 116 | @setting_router.post("/user_lang/", response_model=StandardResponse, name="settings:notifications") 117 | def setting_user_lang(*, db: UserDB, lang: SettingUserLanguage, auth_user: CurrentUser): 118 | if lang.code not in ["de", "en-US", "fr", "pl"]: 119 | raise HTTPException(status_code=404, detail="Language code setting invalid") 120 | 121 | db_user = crud_users.get_user_by_id(db, auth_user.id) 122 | if not db_user: 123 | raise HTTPException(status_code=404, detail="User not found") 124 | 125 | crud_users.update_user(db, db_user, {"lang": standardize_tag(lang.code)}) 126 | 127 | return {"ok": True} 128 | -------------------------------------------------------------------------------- /app/api/statistics.py: -------------------------------------------------------------------------------- 1 | from typing import Annotated 2 | 3 | import pandas as pd 4 | from fastapi import APIRouter, Depends 5 | from fastapi.responses import StreamingResponse 6 | from sqlalchemy.orm import Session 7 | 8 | from app.crud import crud_statistics, crud_users 9 | from app.db import get_db 10 | from app.models.models import User 11 | from app.schemas.responses import StatsIssuesCounterResponse 12 | 13 | # from app.schemas.schemas import IdeaIndexResponse 14 | from app.service.bearer_auth import has_token 15 | 16 | statistics_router = APIRouter() 17 | 18 | CurrentUser = Annotated[User, Depends(has_token)] 19 | UserDB = Annotated[Session, Depends(get_db)] 20 | 21 | 22 | @statistics_router.get("/issues_counter", response_model=StatsIssuesCounterResponse) 23 | def stats_issues_counter(*, db: UserDB, auth_user: CurrentUser): 24 | issues_counter_summary = crud_statistics.get_issues_counter_summary(db) 25 | if not issues_counter_summary: 26 | return {"new": 0, "accepted": 0, "rejected": 0, "assigned": 0, "in_progress": 0, "paused": 0, "done": 0} 27 | 28 | issues_counter = dict(issues_counter_summary) 29 | 30 | for status in ["new", "accepted", "rejected", "assigned", "in_progress", "paused", "done"]: 31 | issues_counter.setdefault(status, 0) 32 | 33 | return issues_counter 34 | 35 | 36 | @statistics_router.get("/first_steps") 37 | def stats_first_steps(*, db: UserDB, auth_user: CurrentUser): 38 | user_id = auth_user.id 39 | response: dict = {} 40 | 41 | items = crud_statistics.get_items_counter_summary(db) 42 | items = dict(items) 43 | 44 | active = ["new", "accepted", "assigned", "in_progress", "paused"] 45 | inactive = ["rejected", "done"] 46 | 47 | issues_active = crud_statistics.get_issues_counter_by_status(db, active) 48 | issues_active = dict(issues_active) 49 | 50 | issues_inactive = crud_statistics.get_issues_counter_by_status(db, inactive) 51 | issues_inactive = dict(issues_inactive) 52 | 53 | response["items"] = {"total": sum(items.values()), "me": items.setdefault(user_id, 0)} 54 | response["users"] = crud_users.get_user_count(db, user_id) 55 | response["issues_active"] = {"total": sum(issues_active.values()), "me": issues_active.setdefault(user_id, 0)} 56 | response["issues_inactive"] = {"total": sum(issues_inactive.values()), "me": issues_inactive.setdefault(user_id, 0)} 57 | response["favourites"] = crud_statistics.get_favourites_counter_summary(db, user_id) 58 | 59 | return response 60 | 61 | 62 | @statistics_router.get("/all_items_failures") 63 | def stats_all_items_failures(*, db: UserDB, auth_user: CurrentUser): 64 | pass 65 | 66 | 67 | @statistics_router.get("/events") 68 | def stats_events_to_pd(*, db: UserDB, auth_user: CurrentUser): 69 | events = crud_statistics.get_events(db) 70 | 71 | columns = ["id", "action", "author_id"] 72 | df_from_records = pd.DataFrame.from_records(events, index="id", columns=columns) 73 | 74 | print("########") 75 | print(df_from_records.head(5)) 76 | print("########") 77 | 78 | df_from_records.info() 79 | 80 | output = df_from_records.to_csv(index=False) 81 | 82 | # https://stackoverflow.com/questions/61140398/fastapi-return-a-file-response-with-the-output-of-a-sql-query 83 | 84 | return StreamingResponse( 85 | iter([output]), media_type="text/csv", headers={"Content-Disposition": "attachment;filename=.csv"} 86 | ) 87 | -------------------------------------------------------------------------------- /app/api/tags.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timezone 2 | from typing import Annotated 3 | from uuid import UUID, uuid4 4 | 5 | from fastapi import APIRouter, Depends, HTTPException 6 | from sqlalchemy.orm import Session 7 | 8 | from app.crud import crud_issues, crud_tags 9 | from app.db import get_db 10 | from app.models.models import User 11 | from app.schemas.requests import TagCreateIn, TagEditIn 12 | from app.schemas.responses import StandardResponse, TagResponse 13 | from app.service.bearer_auth import has_token 14 | 15 | tag_router = APIRouter() 16 | 17 | CurrentUser = Annotated[User, Depends(has_token)] 18 | UserDB = Annotated[Session, Depends(get_db)] 19 | 20 | 21 | @tag_router.get("/", response_model=list[TagResponse]) 22 | def tags_get_all( 23 | *, db: UserDB, auth_user: CurrentUser, field: str = "name", order: str = "asc", is_hidden: bool | None = None 24 | ): 25 | if field not in ["name"]: 26 | field = "name" 27 | 28 | tags = crud_tags.get_tags(db, field, order, is_hidden) 29 | return tags 30 | 31 | 32 | @tag_router.post("/", response_model=TagResponse) 33 | def tags_add_one(*, db: UserDB, tag: TagCreateIn, auth_user: CurrentUser): 34 | db_tag = crud_tags.get_tag_by_name(db, tag.name) 35 | 36 | if db_tag: 37 | raise HTTPException(status_code=400, detail="Tag name already exists") 38 | 39 | tag_data = { 40 | "uuid": str(uuid4()), 41 | "name": tag.name, 42 | "color": tag.color, 43 | "icon": tag.icon, 44 | "author_id": auth_user.id, 45 | "created_at": datetime.now(timezone.utc), 46 | } 47 | 48 | tag = crud_tags.create_tag(db, tag_data) 49 | 50 | return tag 51 | 52 | 53 | @tag_router.patch("/{tag_uuid}", response_model=TagResponse) 54 | def tags_edit_one(*, db: UserDB, tag_uuid: UUID, tag: TagEditIn, auth_user: CurrentUser): 55 | db_tag = crud_tags.get_tag_by_uuid(db, tag_uuid) 56 | 57 | tag_data = {"is_hidden": tag.is_hidden} 58 | if tag.color is not None: 59 | tag_data["color"] = tag.color.as_hex() 60 | 61 | crud_tags.update_tag(db, db_tag, tag_data) 62 | return db_tag 63 | 64 | 65 | @tag_router.delete("/{tag_uuid}", response_model=StandardResponse) 66 | def tags_delete_one(*, db: UserDB, tag_uuid: UUID, auth_user: CurrentUser, force_delete: bool = False): 67 | db_tag = crud_tags.get_tag_by_uuid(db, tag_uuid) 68 | 69 | if not db_tag: 70 | raise HTTPException(status_code=404, detail="Tag not found") 71 | 72 | tag_usage = crud_issues.count_issues_by_tag(db, db_tag.id) 73 | 74 | if tag_usage > 0: 75 | raise HTTPException(status_code=400, detail="Tag in use") 76 | 77 | # print(tag_usage) 78 | db.delete(db_tag) 79 | db.commit() 80 | 81 | return {"ok": True} 82 | -------------------------------------------------------------------------------- /app/api/users_groups.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timezone 2 | from typing import Annotated 3 | from uuid import UUID, uuid4 4 | 5 | from fastapi import APIRouter, Depends, HTTPException 6 | from fastapi_pagination import Page, Params, paginate 7 | from sqlalchemy.orm import Session 8 | 9 | from app.crud import crud_groups, crud_users 10 | from app.db import get_db 11 | from app.models.models import User 12 | from app.schemas.requests import GroupAddIn, GroupEditIn 13 | from app.schemas.responses import GroupResponse, GroupSummaryResponse, StandardResponse 14 | from app.service.bearer_auth import has_token 15 | 16 | group_router = APIRouter() 17 | 18 | CurrentUser = Annotated[User, Depends(has_token)] 19 | UserDB = Annotated[Session, Depends(get_db)] 20 | 21 | 22 | @group_router.get("/", response_model=Page[GroupSummaryResponse]) 23 | def group_get_all( 24 | *, 25 | db: UserDB, 26 | params: Annotated[Params, Depends()], 27 | auth_user: CurrentUser, 28 | search: str = None, 29 | field: str = "name", 30 | order: str = "asc", 31 | ): 32 | if field not in ["name", "created_at"]: 33 | field = "name" 34 | 35 | db_user_groups = crud_groups.get_user_groups(db, search, field, order) 36 | return paginate(db_user_groups, params) 37 | 38 | 39 | @group_router.get("/{group_uuid}", response_model=GroupResponse) 40 | def group_get_one(*, db: UserDB, group_uuid: UUID, auth_user: CurrentUser): 41 | db_user_group = crud_groups.get_user_group_by_uuid(db, group_uuid) 42 | 43 | return db_user_group 44 | 45 | 46 | @group_router.post("/", response_model=GroupResponse) 47 | def group_add(*, db: UserDB, group: GroupAddIn, auth_user: CurrentUser): 48 | db_user_group = crud_groups.get_user_group_by_name(db, group.name) 49 | if db_user_group: 50 | raise HTTPException(status_code=400, detail="Group already exists!") 51 | 52 | users = [] 53 | if group.users is not None: 54 | for user_uuid in group.users: 55 | db_user = crud_users.get_user_by_uuid(db, user_uuid) 56 | if db_user: 57 | users.append(db_user) 58 | 59 | group_data = { 60 | "uuid": str(uuid4()), 61 | "name": group.name, 62 | "description": group.description, 63 | "symbol": group.symbol, 64 | "users": users, 65 | "created_at": datetime.now(timezone.utc), 66 | } 67 | 68 | new_group = crud_groups.create_group_with_users(db, group_data) 69 | 70 | return new_group 71 | 72 | 73 | @group_router.patch("/{group_uuid}", response_model=GroupResponse) 74 | def group_edit(*, db: UserDB, group_uuid: UUID, role: GroupEditIn, auth_user: CurrentUser): 75 | db_user_group = crud_groups.get_user_group_by_uuid(db, group_uuid) 76 | if not db_user_group: 77 | raise HTTPException(status_code=400, detail="Group not found exists!") 78 | 79 | group_data = role.model_dump(exclude_unset=True) 80 | 81 | users = [] 82 | if ("users" in group_data) and (group_data["users"] is not None): 83 | for user in db_user_group.users: 84 | db_user_group.users.remove(user) 85 | for user in group_data["users"]: 86 | db_permission = crud_users.get_user_by_uuid(db, user) 87 | if db_permission: 88 | users.append(db_permission) 89 | 90 | del group_data["users"] 91 | group_data["users"] = users 92 | 93 | new_group = crud_groups.update_user_group(db, db_user_group, group_data) 94 | 95 | return new_group 96 | 97 | 98 | @group_router.delete("/{group_uuid}", response_model=StandardResponse) 99 | def group_delete(*, db: UserDB, group_uuid: UUID, auth_user: CurrentUser): 100 | db_user_group = crud_groups.get_user_group_by_uuid(db, group_uuid) 101 | 102 | if not db_user_group: 103 | raise HTTPException(status_code=404, detail="Group not found") 104 | 105 | # TODO rel? 106 | db.delete(db_user_group) 107 | db.commit() 108 | 109 | return {"ok": True} 110 | -------------------------------------------------------------------------------- /app/api/users_permissions.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timezone 2 | from typing import Annotated 3 | from uuid import UUID, uuid4 4 | 5 | from fastapi import APIRouter, Depends, HTTPException 6 | from fastapi_pagination import Page, Params, paginate 7 | from sqlalchemy.orm import Session 8 | 9 | from app.crud import crud_permission, crud_users 10 | from app.db import get_db 11 | from app.models.models import User 12 | from app.schemas.requests import RoleAddIn, RoleEditIn 13 | from app.schemas.responses import PermissionResponse, RolePermissionFull, RoleSummaryResponse, StandardResponse 14 | from app.service.bearer_auth import has_token 15 | from app.service.helpers import to_snake_case 16 | 17 | permission_router = APIRouter() 18 | 19 | CurrentUser = Annotated[User, Depends(has_token)] 20 | UserDB = Annotated[Session, Depends(get_db)] 21 | 22 | 23 | @permission_router.get("/", response_model=Page[RoleSummaryResponse]) # , response_model=Page[UserIndexResponse] 24 | def role_get_all( 25 | *, 26 | db: UserDB, 27 | params: Annotated[Params, Depends()], 28 | auth_user: CurrentUser, 29 | search: str = None, 30 | all: bool = True, 31 | sortOrder: str = "asc", 32 | sortColumn: str = "name", 33 | ): 34 | sortTable = {"name": "role_title"} 35 | 36 | db_roles = crud_permission.get_roles_summary(db, search, all, sortTable[sortColumn], sortOrder) 37 | return paginate(db_roles, params) 38 | 39 | 40 | @permission_router.get("/all", response_model=list[PermissionResponse]) 41 | def permissions_get_all(*, db: UserDB, auth_user: CurrentUser): 42 | db_permissions = crud_permission.get_permissions(db) 43 | return db_permissions 44 | 45 | 46 | @permission_router.get("/{role_uuid}", response_model=RolePermissionFull) # , response_model=Page[UserIndexResponse] 47 | def role_get_one(*, db: UserDB, role_uuid: UUID, auth_user: CurrentUser): 48 | db_roles = crud_permission.get_role_by_uuid(db, role_uuid) 49 | if not db_roles: 50 | raise HTTPException(status_code=400, detail="Role already exists!") 51 | return db_roles 52 | 53 | 54 | @permission_router.post("/", response_model=RolePermissionFull) 55 | def role_add(*, db: UserDB, role: RoleAddIn, auth_user: CurrentUser): 56 | db_role = crud_permission.get_role_by_name(db, role.title) 57 | if db_role: 58 | raise HTTPException(status_code=400, detail="Role already exists!") 59 | 60 | permissions = [] 61 | if role.permissions is not None: 62 | for permissions_uuid in role.permissions: 63 | db_permission = crud_permission.get_permission_by_uuid(db, permissions_uuid) 64 | if db_permission: 65 | permissions.append(db_permission) 66 | 67 | role_data = { 68 | "uuid": str(uuid4()), 69 | "is_custom": True, 70 | "is_visible": True, 71 | "is_system": False, 72 | "role_name": role.title, 73 | "role_title": role.title, 74 | "role_description": role.description, 75 | "permission": permissions, 76 | } 77 | 78 | new_role = crud_permission.create_role_with_permissions(db, role_data) 79 | return new_role 80 | 81 | 82 | @permission_router.patch("/{role_uuid}", response_model=RolePermissionFull) 83 | def role_edit(*, db: UserDB, role_uuid: UUID, role: RoleEditIn, auth_user: CurrentUser): 84 | db_role = crud_permission.get_role_by_uuid(db, role_uuid) 85 | if not db_role: 86 | raise HTTPException(status_code=400, detail="Role already exists!") 87 | 88 | role_data = role.model_dump(exclude_unset=True) 89 | 90 | permissions = [] 91 | if ("permissions" in role_data) and (role_data["permissions"] is not None): 92 | for permission in db_role.permission: 93 | db_role.permission.remove(permission) 94 | for permission in role_data["permissions"]: 95 | db_permission = crud_permission.get_permission_by_uuid(db, permission) 96 | if db_permission: 97 | permissions.append(db_permission) 98 | 99 | role_data["permission"] = permissions 100 | del role_data["permissions"] 101 | 102 | role_data["role_name"] = to_snake_case(role.title) 103 | role_data["role_title"] = role.title 104 | role_data["role_description"] = role.description 105 | 106 | del role_data["title"] 107 | del role_data["description"] 108 | 109 | new_role = crud_permission.update_role(db, db_role, role_data) 110 | 111 | return new_role 112 | 113 | 114 | @permission_router.delete("/{role_uuid}", response_model=StandardResponse) 115 | def role_delete(*, db: UserDB, role_uuid: UUID, auth_user: CurrentUser, force: bool = False): 116 | db_role = crud_permission.get_role_by_uuid(db, role_uuid) 117 | 118 | if not db_role: 119 | raise HTTPException(status_code=404, detail="Role not found") 120 | 121 | db_users = crud_users.get_users_by_role_id(db, db_role.id) 122 | # fields = ['uuid', 'first_name', 'last_name'] 123 | # db_users_list = [dict(zip(fields, d)) for d in db_users] 124 | 125 | if db_users: 126 | error_message = {"message": "Permission assigned to One or more users", "count": len(db_users)} 127 | raise HTTPException(status_code=400, detail=error_message) 128 | 129 | if force is True: 130 | db.delete(db_role) 131 | db.commit() 132 | return {"ok": True} 133 | 134 | crud_permission.update_role(db, db_role, {"deleted_at": datetime.now(timezone.utc)}) 135 | 136 | return {"ok": True} 137 | -------------------------------------------------------------------------------- /app/config.py: -------------------------------------------------------------------------------- 1 | import os 2 | from functools import lru_cache 3 | from pathlib import Path 4 | from typing import Literal 5 | 6 | from pydantic_settings import BaseSettings, SettingsConfigDict 7 | 8 | APP_DIR = Path(__file__).parent.parent / "app" 9 | 10 | 11 | class Settings(BaseSettings): 12 | PROJECT_DIR: os.PathLike[str] = Path(__file__).parent.parent 13 | ENVIRONMENT: Literal["DEV", "PYTEST", "STG", "PRD"] | None = os.getenv("APP_ENV", "PYTEST") 14 | OPEN_API: str | None = os.getenv("APP_OPEN_API") 15 | base_app_url: str | None = os.getenv("APP_HOST", "https://frontend-host.com") 16 | 17 | s3_region: str | None = os.getenv("AWS_DEFAULT_REGION") 18 | s3_access_key: str | None = os.getenv("AWS_S3_ACCESS_KEY_ID") 19 | s3_secret_access_key: str | None = os.getenv("AWS_S3_SECRET_ACCESS_KEY") 20 | s3_bucket_name: str | None = os.getenv("AWS_S3_BUCKET") 21 | s3_bucket_region: str | None = os.getenv("AWS_S3_DEFAULT_REGION") 22 | 23 | sentry_dsn: str | None = os.getenv("SENTRY_DSN") 24 | 25 | email_labs_app_key: str | None = os.getenv("EMAIL_LABS_APP_KEY") 26 | email_labs_secret_key: str | None = os.getenv("EMAIL_LABS_APP_KEY") 27 | email_smtp: str | None = os.getenv("EMAIL_LABS_SMTP") 28 | email_sender: str | None = os.getenv("EMAIL_LABS_SENDER") 29 | email_dev: str | None = os.getenv("EMAIL_DEV") 30 | 31 | email_mailjet_app_key: str | None = os.getenv("MAILJET_EMAIL_API_KEY") 32 | email_mailjet_secret_key: str | None = os.getenv("MAILJET_EMAIL_SECRET") 33 | email_mailjet_sender: str | None = os.getenv("MAILJET_EMAIL_SENDER") 34 | sms_mailjet_api_key: str | None = os.getenv("MAILJET_SMS_API_KEY") 35 | sms_mailjet_sender: str | None = os.getenv("MAILJET_SMS_SENDER") 36 | 37 | # API 38 | REJESTR_IO_KEY: str | None = os.getenv("REJESTR_IO_KEY") 39 | GUS_KEY: str | None = os.getenv("GUS_API") 40 | GUS_API_DEV: str | None = os.getenv("GUS_API_DEV") 41 | API_VIDEO: str | None = os.getenv("API_VIDEO") 42 | API_VIDEO_UPLOAD: str | None = os.getenv("API_VIDEO_UPLOAD") 43 | 44 | # POSTGRESQL DEFAULT DATABASE 45 | DEFAULT_DATABASE_HOSTNAME: str | None = os.getenv("DB_HOST") 46 | DEFAULT_DATABASE_PORT: str | None = os.getenv("DB_PORT") 47 | DEFAULT_DATABASE_DB: str | None = os.getenv("DB_DATABASE") 48 | DEFAULT_DATABASE_USER: str | None = os.getenv("DB_USERNAME") 49 | DEFAULT_DATABASE_PASSWORD: str | None = os.getenv("DB_PASSWORD") 50 | 51 | # DEFAULT_SQLALCHEMY_DATABASE_URI: str | None = os.getenv("DEFAULT_SQLALCHEMY_DATABASE_URI") 52 | 53 | # POSTGRESQL TEST DATABASE 54 | TEST_DATABASE_HOSTNAME: str | None = "postgres" 55 | TEST_DATABASE_USER: str | None = "postgres" 56 | TEST_DATABASE_PASSWORD: str | None = "postgres" 57 | TEST_DATABASE_PORT: str | None = "5432" 58 | TEST_DATABASE_DB: str | None = "postgres" 59 | # TEST_SQLALCHEMY_DATABASE_URI: str | None = "" 60 | # TEST_SQLALCHEMY_DATABASE_URI: str | None = os.getenv("TEST_SQLALCHEMY_DATABASE_URI") 61 | 62 | model_config = SettingsConfigDict( 63 | env_prefix="", env_file_encoding="utf-8", env_file=f"{APP_DIR}/.env", extra="allow" 64 | ) 65 | 66 | 67 | @lru_cache 68 | def get_settings() -> BaseSettings: 69 | # path = Path(__file__).parent.parent / "app" / ".env.testing" 70 | # return Settings(_env_file=path.as_posix(), _env_file_encoding="utf-8") 71 | return Settings() 72 | -------------------------------------------------------------------------------- /app/crud/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mgurg/fastapi_docker/46f90e0551ae4c4068e2dd1965801234f6f11f72/app/crud/__init__.py -------------------------------------------------------------------------------- /app/crud/cc_crud.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Sequence 2 | 3 | from sqlalchemy import select 4 | from sqlalchemy.orm import Session 5 | 6 | from app.models.shared_models import PublicCompany 7 | 8 | 9 | def get_public_companies(db: Session) -> Sequence[PublicCompany] | None: 10 | return db.execute(select(PublicCompany)).scalars().all() 11 | -------------------------------------------------------------------------------- /app/crud/crud_auth.py: -------------------------------------------------------------------------------- 1 | import base64 2 | from datetime import datetime, timezone 3 | 4 | from fastapi import HTTPException 5 | from sqlalchemy import distinct, func, select 6 | from sqlalchemy.orm import Session 7 | 8 | from app.db import engine 9 | from app.models.models import User 10 | from app.models.shared_models import PublicCompany, PublicUser 11 | 12 | 13 | def get_public_user_by_email(db: Session, email: str) -> PublicUser | None: 14 | query = select(PublicUser).where(PublicUser.email == email) 15 | 16 | result = db.execute(query) # await db.execute(query) 17 | 18 | return result.scalar_one_or_none() 19 | 20 | 21 | def get_public_user_by_service_token(db: Session, token: str) -> PublicUser | None: 22 | query = ( 23 | select(PublicUser) 24 | .where(PublicUser.service_token == token) 25 | .where(PublicUser.is_active == False) # noqa: E712 26 | .where(PublicUser.service_token_valid_to > datetime.now(timezone.utc)) 27 | ) 28 | 29 | result = db.execute(query) # await db.execute(query) 30 | return result.scalar_one_or_none() 31 | 32 | 33 | def get_public_active_user_by_service_token(db: Session, token: str) -> PublicUser | None: 34 | query = ( 35 | select(PublicUser) 36 | .where(PublicUser.service_token == token) 37 | .where(PublicUser.is_active == True) # noqa: E712 38 | .where(PublicUser.service_token_valid_to > datetime.now(timezone.utc)) 39 | ) 40 | 41 | result = db.execute(query) # await db.execute(query) 42 | return result.scalar_one_or_none() 43 | 44 | 45 | def get_public_company_count(db: Session) -> int | None: 46 | query = select(func.count(PublicCompany.id)) 47 | 48 | result = db.execute(query) # await db.execute(query) 49 | return result.scalar_one_or_none() 50 | 51 | 52 | def get_public_company_by_nip(db: Session, nip: str) -> PublicCompany | None: 53 | query = select(PublicCompany).where(PublicCompany.nip == nip) 54 | result = db.execute(query) # await db.execute(query) 55 | return result.scalar_one_or_none() 56 | 57 | 58 | def get_public_company_by_qr_id(db: Session, qr_id: str) -> PublicCompany | None: 59 | query = select(PublicCompany).where(PublicCompany.qr_id == qr_id) 60 | result = db.execute(query) # await db.execute(query) 61 | return result.scalar_one_or_none() 62 | 63 | 64 | def get_public_company_by_tenant_id(db: Session, tenant_id: str) -> PublicCompany | None: 65 | query = select(PublicCompany).where(PublicCompany.tenant_id == tenant_id) 66 | result = db.execute(query) # await db.execute(query) 67 | return result.scalar_one_or_none() 68 | 69 | 70 | def get_schemas_from_public_company(db: Session): 71 | query = select(distinct(PublicCompany.tenant_id)) 72 | result = db.execute(query) # await db.execute(query) 73 | return result.scalars().all() 74 | 75 | 76 | def create_public_user(db: Session, public_user: dict) -> PublicUser: 77 | new_user = PublicUser(**public_user) 78 | db.add(new_user) 79 | db.commit() 80 | db.refresh(new_user) 81 | 82 | return new_user 83 | 84 | 85 | def create_public_company(db: Session, company: dict) -> PublicCompany: 86 | new_company = PublicCompany(**company) 87 | db.add(new_company) 88 | db.commit() 89 | db.refresh(new_company) 90 | 91 | return new_company 92 | 93 | 94 | def update_public_user(db: Session, db_user: PublicUser, update_data: dict) -> PublicUser: 95 | for key, value in update_data.items(): 96 | setattr(db_user, key, value) 97 | 98 | db.add(db_user) 99 | db.commit() 100 | db.refresh(db_user) 101 | 102 | return db_user 103 | 104 | 105 | # def update_tenant_user(db: Session, db_user: User, update_data: dict) -> User: 106 | # try: 107 | # for key, value in update_data.items(): 108 | # setattr(db_user, key, value) 109 | 110 | # db.add(db_user) 111 | # db.commit() 112 | # db.refresh(db_user) 113 | # except Exception as e: 114 | # print("#####", e) 115 | # return db_user 116 | 117 | 118 | def create_tenant_user(db: Session, tenant_data) -> User: 119 | # try: 120 | new_user = User(**tenant_data) 121 | 122 | db.add(new_user) 123 | db.commit() 124 | db.refresh(new_user) 125 | # except Exception as e: 126 | # print(e) 127 | return new_user 128 | 129 | 130 | def get_tenant_user_by_auth_token(db: Session, token: str) -> User | None: 131 | try: 132 | query = ( 133 | select(User) 134 | .where(User.auth_token == token) 135 | .where(User.is_active == True) # noqa: E712 136 | .where(User.auth_token_valid_to > datetime.now(timezone.utc)) 137 | ) 138 | 139 | result = db.execute(query) # await db.execute(query) 140 | db_tenant_user = result.scalar_one_or_none() 141 | 142 | return db_tenant_user 143 | except Exception as e: 144 | print(e) 145 | 146 | 147 | def get_anonymous_user(db: Session) -> User: 148 | query = select(User).where(User.email == "anonymous@example.com").where(User.is_visible == False) # noqa: E712 149 | 150 | result = db.execute(query) # await db.execute(query) 151 | return result.scalar_one_or_none() 152 | 153 | 154 | def generate_base64_token(token: str) -> str: 155 | message_bytes = token.encode("ascii") 156 | base64_bytes = base64.b64encode(message_bytes) 157 | return base64_bytes.decode("ascii") 158 | 159 | 160 | def get_public_company_from_tenant(tenant_id: str) -> PublicCompany | None: 161 | company = None 162 | schema_translate_map = {"tenant": "public"} 163 | connectable = engine.execution_options(schema_translate_map=schema_translate_map) 164 | with Session(autocommit=False, autoflush=False, bind=connectable) as public_db: 165 | company = get_public_company_by_tenant_id(public_db, tenant_id) 166 | if not company: 167 | raise HTTPException(status_code=400, detail="Unknown Company!") 168 | 169 | return company 170 | -------------------------------------------------------------------------------- /app/crud/crud_events.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Sequence 2 | from uuid import UUID 3 | 4 | from sqlalchemy import distinct, func, select 5 | from sqlalchemy.orm import Session 6 | 7 | from app.models.models import Event, EventSummary 8 | 9 | 10 | def get_event_time_statistics_by_item(db: Session, item_uuid: UUID): 11 | return db.execute( 12 | select(EventSummary.action, func.sum(EventSummary.duration).label("time_duration")) 13 | .where(EventSummary.resource == "item") 14 | .where(EventSummary.resource_uuid == item_uuid) 15 | .group_by(EventSummary.action) 16 | ).all() 17 | 18 | 19 | def get_event_time_statistics_by_issue(db: Session, issue_uuid: UUID): 20 | return db.execute( 21 | select(EventSummary.action, func.sum(EventSummary.duration).label("time_duration")) 22 | .where(EventSummary.issue_uuid == issue_uuid) 23 | .group_by(EventSummary.action) 24 | ).all() 25 | 26 | 27 | def get_statistics_by_issue_uuid_and_status(db: Session, issue_uuid: UUID, status: str) -> EventSummary | None: 28 | query = ( 29 | select(EventSummary) 30 | .where(EventSummary.issue_uuid == issue_uuid) 31 | .where(EventSummary.action == status) 32 | .where(EventSummary.date_to.is_(None)) 33 | ) 34 | return db.execute(query).scalar_one_or_none() 35 | 36 | 37 | def get_event_summary_by_resource_uuid_and_status( 38 | db: Session, resource: str, resource_uuid: UUID, status: str, internal_value: str | None = None 39 | ) -> EventSummary | None: 40 | query = ( 41 | select(EventSummary) 42 | .where(EventSummary.resource == resource) 43 | .where(EventSummary.resource_uuid == resource_uuid) 44 | .where(EventSummary.action == status) 45 | .where(EventSummary.date_to.is_(None)) 46 | ) 47 | 48 | if internal_value is not None: 49 | query = query.where(EventSummary.internal_value == internal_value) 50 | 51 | result = db.execute(query) # await db.execute(query) 52 | 53 | return result.scalar_one_or_none() 54 | 55 | 56 | def get_basic_summary_users_uuids(db: Session, resource: str, resource_uuid: UUID, action: str) -> list[UUID]: 57 | query = ( 58 | select(distinct(EventSummary.internal_value)) 59 | .where(EventSummary.resource == resource) 60 | .where(EventSummary.resource_uuid == resource_uuid) 61 | .where(EventSummary.action == action) 62 | ) 63 | 64 | result = db.execute(query) 65 | return result.scalars().all() 66 | 67 | 68 | def get_events_by_uuid_and_resource( 69 | db: Session, resource_uuid: UUID, action: str = None, date_from=None, date_to=None 70 | ) -> Sequence[Event]: 71 | # .where(Event.created_at > date_from) 72 | # .where(Event.created_at < date_to) 73 | 74 | query = select(Event).where(Event.resource_uuid == resource_uuid).where(Event.resource == "item") 75 | 76 | if action is not None: 77 | query = query.where(Event.action == action) 78 | 79 | result = db.execute(query) # await db.execute(query) 80 | events_with_date = result.scalars().all() 81 | 82 | return events_with_date 83 | 84 | 85 | def get_event_status_list(db: Session, resource: str, resource_uuid: UUID): 86 | query = select(Event.action).where(Event.resource_uuid == resource_uuid).where(Event.resource == resource) 87 | 88 | result = db.execute(query) # await db.execute(query) 89 | event_actions = result.scalars().all() 90 | 91 | return event_actions 92 | 93 | 94 | def get_events_for_issue_summary(db: Session, resource: str, resource_uuid: UUID): 95 | query = ( 96 | select( 97 | EventSummary.action, 98 | func.sum(EventSummary.duration).label("time_duration"), 99 | func.count(EventSummary.action).label("total"), 100 | ) 101 | .where(EventSummary.resource == resource) 102 | .where(EventSummary.resource_uuid == resource_uuid) 103 | .group_by(EventSummary.action) 104 | ) 105 | 106 | result = db.execute(query) # await db.execute(query) 107 | events_with_date = result.all() 108 | 109 | return events_with_date 110 | 111 | 112 | def get_events_user_issue_summary(db: Session, resource: str, resource_uuid: UUID, user_uuid: list[UUID]): 113 | query = ( 114 | select( 115 | EventSummary.internal_value, 116 | func.sum(EventSummary.duration).label("time_duration"), 117 | func.count(EventSummary.internal_value).label("total"), 118 | ) 119 | .where(EventSummary.action == "issueUserActivity") 120 | .where(EventSummary.resource == resource) 121 | .where(EventSummary.resource_uuid == resource_uuid) 122 | .where(EventSummary.internal_value.in_(user_uuid)) 123 | .group_by(EventSummary.internal_value) 124 | ) 125 | 126 | result = db.execute(query) # await db.execute(query) 127 | events_with_date = result.all() 128 | 129 | return events_with_date 130 | 131 | 132 | def get_events_by_thread(db: Session, resource_uuid: UUID, resource: str = None, date_from=None, date_to=None) -> Event: 133 | # .where(Event.created_at > date_from) 134 | # .where(Event.created_at < date_to) 135 | 136 | query = select(Event).where(Event.resource == resource) 137 | query = query.where(Event.resource_uuid == resource_uuid) 138 | 139 | events_with_date = db.execute(query).scalars().all() 140 | 141 | return events_with_date 142 | 143 | 144 | def create_event(db: Session, data: dict) -> Event: 145 | new_event = Event(**data) 146 | db.add(new_event) 147 | db.commit() 148 | db.refresh(new_event) 149 | 150 | return new_event 151 | 152 | 153 | def create_event_statistic(db: Session, data: dict) -> EventSummary: 154 | new_event_statistics = EventSummary(**data) 155 | db.add(new_event_statistics) 156 | db.commit() 157 | db.refresh(new_event_statistics) 158 | 159 | return new_event_statistics 160 | 161 | 162 | def update_event(db: Session, db_event: EventSummary, update_data: dict) -> EventSummary: 163 | for key, value in update_data.items(): 164 | setattr(db_event, key, value) 165 | 166 | db.add(db_event) 167 | db.commit() 168 | db.refresh(db_event) 169 | 170 | return db_event 171 | -------------------------------------------------------------------------------- /app/crud/crud_files.py: -------------------------------------------------------------------------------- 1 | from uuid import UUID 2 | 3 | from sqlalchemy import select 4 | from sqlalchemy.orm import Session 5 | from sqlalchemy.sql import func 6 | 7 | from app.models.models import File 8 | 9 | 10 | def get_files(db: Session) -> File: 11 | return db.execute(select(File).where(File.deleted_at.is_(None))).scalars().all() 12 | 13 | 14 | def get_file_by_uuid(db: Session, uuid: UUID) -> File | None: 15 | query = select(File).where(File.uuid == uuid).where(File.deleted_at.is_(None)) 16 | 17 | result = db.execute(query) 18 | 19 | return result.scalar_one_or_none() 20 | 21 | 22 | def get_file_by_id(db: Session, id: int) -> File: 23 | return db.execute(select(File).where(File.id == id).where(File.deleted_at.is_(None))).scalar_one() 24 | 25 | 26 | def get_orphaned_files(db: Session) -> list[UUID]: 27 | files_guides = db.execute(select(File.id).filter(File.guide.any())).scalars().all() 28 | files_items = db.execute(select(File.id).filter(File.item.any())).scalars().all() 29 | files_ideas = db.execute(select(File.id).filter(File.idea.any())).scalars().all() 30 | 31 | files_with_relations = list(set(files_guides + files_items + files_ideas)) 32 | 33 | files_without_relations = db.execute(select(File.uuid).where(File.id.not_in(files_with_relations))).scalars().all() 34 | # for f in files_guides: 35 | # print(f.id) 36 | # files_guides = db.execute(select(file_guide_rel)).scalars().all() 37 | # files_items = db.execute(select(file_item_rel)).scalars().all() 38 | # files_ideas = db.execute(select(file_idea_rel)).scalars().all() 39 | # 40 | return files_without_relations 41 | 42 | 43 | def get_files_size_in_db(db: Session) -> int: 44 | db_size = db.execute(select(func.sum(File.size))).scalar_one_or_none() 45 | if not db_size: 46 | return 0 47 | return db_size 48 | 49 | 50 | def create_file(db: Session, data: dict) -> File: 51 | new_file = File(**data) 52 | db.add(new_file) 53 | db.commit() 54 | db.refresh(new_file) 55 | 56 | return new_file 57 | -------------------------------------------------------------------------------- /app/crud/crud_groups.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Sequence 2 | from uuid import UUID 3 | 4 | from sqlalchemy import func, select, text 5 | from sqlalchemy.orm import Session, selectinload 6 | 7 | from app.models.models import UserGroup 8 | 9 | 10 | def get_user_groups(db: Session, search: str, sort_column: str, sort_order: str) -> Sequence[UserGroup]: 11 | query = select(UserGroup) 12 | 13 | search_filters = [] 14 | if search is not None: 15 | search_filters.append(UserGroup.name.ilike(f"%{search}%")) 16 | 17 | query = query.filter(*search_filters) 18 | 19 | query = query.order_by(text(f"{sort_column} {sort_order}")) 20 | 21 | result = db.execute(query) # await db.execute(query) 22 | 23 | return result.scalars().all() 24 | 25 | 26 | def get_user_group_by_uuid(db: Session, uuid: UUID) -> UserGroup: 27 | # return db.execute(select(UserGroup).where(UserGroup.uuid == uuid).options(selectinload("*"))).scalar_one_or_none() 28 | return db.execute(select(UserGroup).where(UserGroup.uuid == uuid).options(selectinload("*"))).scalar_one_or_none() 29 | 30 | 31 | def get_user_group_by_name(db: Session, name: str) -> UserGroup: 32 | return db.execute(select(UserGroup).where(func.lower(UserGroup.name) == name.lower())).scalar_one_or_none() 33 | 34 | 35 | def create_group_with_users(db: Session, data: dict) -> UserGroup: 36 | new_group = UserGroup(**data) 37 | db.add(new_group) 38 | db.commit() 39 | db.refresh(new_group) 40 | 41 | return new_group 42 | 43 | 44 | # def get_roles(db: Session): 45 | # return db.execute( 46 | # select(Role.uuid, Role.role_title, Role.role_description, Role.is_custom, func.count(User.id).label("count")) 47 | # .outerjoin(User, User.user_role_id == Role.id) 48 | # .group_by(Role.uuid, Role.role_title, Role.role_description, Role.is_custom) 49 | # .order_by(Role.is_custom) 50 | # ).all() 51 | 52 | 53 | # def get_permission_by_uuid(db: Session, uuid: UUID) -> Permission: 54 | # return db.execute(select(Permission).where(Permission.uuid == uuid)).scalar_one_or_none() 55 | 56 | 57 | def update_user_group(db: Session, db_group: UserGroup, update_data: dict) -> UserGroup: 58 | for key, value in update_data.items(): 59 | setattr(db_group, key, value) 60 | 61 | db.add(db_group) 62 | db.commit() 63 | db.refresh(db_group) 64 | 65 | return db_group 66 | -------------------------------------------------------------------------------- /app/crud/crud_guides.py: -------------------------------------------------------------------------------- 1 | from uuid import UUID 2 | 3 | from sqlalchemy import Select, or_, select, text 4 | from sqlalchemy.orm import Session 5 | 6 | from app.models.models import Guide, Item 7 | 8 | 9 | def get_guides(search: str, item_id: int, sort_column: str, sort_order: str) -> Select[tuple[Guide]]: 10 | query = select(Guide) 11 | 12 | search_filters = [] 13 | if search is not None: 14 | search_filters.append(Guide.name.ilike(f"%{search}%")) 15 | search_filters.append(Guide.text.ilike(f"%{search}%")) 16 | 17 | query = query.filter(or_(False, *search_filters)) 18 | 19 | if item_id is not None: 20 | query = query.filter(Guide.item.any(Item.id == item_id)) 21 | 22 | query = query.order_by(text(f"{sort_column} {sort_order}")) 23 | 24 | return query 25 | 26 | # result = db.execute(query) # await db.execute(query) 27 | # 28 | # return result.scalars().all() 29 | 30 | 31 | def get_guide_by_uuid(db: Session, uuid: UUID) -> Guide: 32 | return db.execute(select(Guide).where(Guide.uuid == uuid)).scalar_one_or_none() 33 | 34 | 35 | def create_guide(db: Session, data: dict) -> Guide: 36 | new_guide = Guide(**data) 37 | db.add(new_guide) 38 | db.commit() 39 | db.refresh(new_guide) 40 | 41 | return new_guide 42 | 43 | 44 | def update_guide(db: Session, db_guide: Guide, update_data: dict) -> Guide: 45 | for key, value in update_data.items(): 46 | setattr(db_guide, key, value) 47 | 48 | db.add(db_guide) 49 | db.commit() 50 | db.refresh(db_guide) 51 | 52 | return db_guide 53 | -------------------------------------------------------------------------------- /app/crud/crud_items.py: -------------------------------------------------------------------------------- 1 | from uuid import UUID 2 | 3 | from sqlalchemy import Select, or_, select, text 4 | from sqlalchemy.orm import Session 5 | 6 | from app.models.models import Item, User 7 | 8 | 9 | def get_items( 10 | sort_column: str, sort_order: str, search: str | None = None, user_id: int | None = None 11 | ) -> Select[tuple[Item]]: 12 | query = select(Item).where(Item.deleted_at.is_(None)) 13 | 14 | search_filters = [] 15 | if search is not None: 16 | search_filters.append(Item.name.ilike(f"%{search}%")) 17 | search_filters.append(Item.text.ilike(f"%{search}%")) 18 | 19 | query = query.filter(or_(False, *search_filters)) 20 | 21 | if user_id is not None: 22 | query = query.filter(Item.users_item.any(User.id == user_id)) 23 | 24 | query = query.order_by(text(f"{sort_column} {sort_order}")) 25 | return query 26 | # result = db.execute(query) # await db.execute(query) 27 | # 28 | # return result.scalars().all() 29 | 30 | 31 | def get_item_by_uuid(db: Session, uuid: UUID) -> Item | None: 32 | query = select(Item).where(Item.uuid == uuid) 33 | 34 | result = db.execute(query) # await db.execute(query) 35 | return result.scalar_one_or_none() 36 | 37 | 38 | def get_item_by_id(db: Session, id: int) -> Item: 39 | query = select(Item).where(Item.id == id) 40 | 41 | result = db.execute(query) # await db.execute(query) 42 | return result.scalar_one_or_none() 43 | 44 | 45 | def create_item(db: Session, data: dict) -> Item: 46 | new_item = Item(**data) 47 | db.add(new_item) 48 | db.commit() 49 | db.refresh(new_item) 50 | 51 | return new_item 52 | 53 | 54 | def update_item(db: Session, db_item: Item, update_data: dict) -> Item: 55 | for key, value in update_data.items(): 56 | setattr(db_item, key, value) 57 | 58 | db.add(db_item) 59 | db.commit() 60 | db.refresh(db_item) 61 | 62 | return db_item 63 | -------------------------------------------------------------------------------- /app/crud/crud_parts.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Sequence 2 | from uuid import UUID 3 | 4 | from sqlalchemy import not_, select 5 | from sqlalchemy.orm import Session 6 | 7 | from app.models.models import PartUsed 8 | 9 | 10 | def get_parts(db: Session, issue_id: UUID | None = None, is_hidden: bool | None = None) -> Sequence[PartUsed]: 11 | query = select(PartUsed).where(PartUsed.deleted_at.is_(None)) 12 | 13 | if is_hidden is True: 14 | query = query.where(not_(PartUsed.is_hidden.is_(True))) 15 | 16 | if issue_id is not None: 17 | query = query.filter(PartUsed.issue_id == issue_id) 18 | 19 | # query = query.order_by(text(f"{sort_column} {sort_order}")) 20 | 21 | result = db.execute(query) 22 | 23 | return result.scalars().all() 24 | 25 | 26 | def get_part_by_uuid(db: Session, uuid: UUID) -> PartUsed | None: 27 | query = select(PartUsed).where(PartUsed.uuid == uuid) 28 | 29 | result = db.execute(query) 30 | 31 | return result.scalar_one_or_none() 32 | 33 | 34 | # def get_part_by_name(db: Session, name: str) -> Tag: 35 | # query = select(Tag).where(Tag.name == name).where(Tag.deleted_at.is_(None)) 36 | 37 | # result = db.execute(query) 38 | 39 | # return result.scalar_one_or_none() 40 | 41 | 42 | # def get_parts_id_by_uuid(db: Session, uuid: list[UUID]) -> Tag: 43 | # query = select(Tag.id).filter(Tag.uuid.in_(uuid)) 44 | 45 | # result = db.execute(query) 46 | 47 | # return result.scalars().all() 48 | 49 | 50 | def create_part(db: Session, data: dict) -> PartUsed: 51 | new_part = PartUsed(**data) 52 | db.add(new_part) 53 | db.commit() 54 | db.refresh(new_part) 55 | 56 | return new_part 57 | 58 | 59 | def update_part(db: Session, db_part: PartUsed, update_data: dict) -> PartUsed: 60 | for key, value in update_data.items(): 61 | setattr(db_part, key, value) 62 | 63 | db.add(db_part) 64 | db.commit() 65 | db.refresh(db_part) 66 | 67 | return db_part 68 | -------------------------------------------------------------------------------- /app/crud/crud_permission.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Sequence 2 | from uuid import UUID 3 | 4 | from sqlalchemy import func, select, text 5 | from sqlalchemy.orm import Session, selectinload 6 | 7 | from app.models.models import Permission, Role, User 8 | 9 | 10 | def get_roles_summary(db: Session, search: str, all: bool, sortColumn: str, sortOrder: str): 11 | query = ( 12 | select( 13 | Role.uuid, 14 | Role.role_title, 15 | Role.role_description, 16 | Role.is_custom, 17 | func.count(User.id).label("count"), 18 | func.count(User.deleted_at).label("uncounted"), 19 | ) 20 | .outerjoin(User, User.user_role_id == Role.id) 21 | .where(Role.deleted_at.is_(None)) 22 | .group_by(Role.uuid, Role.role_title, Role.role_description, Role.is_custom) 23 | .order_by(text(f"{sortColumn} {sortOrder}")) 24 | ) 25 | 26 | all_filters = [] 27 | 28 | if search is not None: 29 | all_filters.append(Role.role_title.ilike(f"%{search}%")) 30 | query = query.filter(*all_filters) 31 | 32 | if (all is not None) and (all is False): 33 | query = query.where(Role.is_system == False) # noqa: E712 34 | 35 | result = db.execute(query) # await db.execute(query) 36 | 37 | return result.all() 38 | 39 | 40 | def get_role_by_uuid(db: Session, uuid: UUID) -> Role | None: 41 | query = select(Role).where(Role.uuid == uuid).options(selectinload("*")) 42 | 43 | result = db.execute(query) 44 | 45 | return result.scalar_one_or_none() 46 | 47 | 48 | def get_permission_by_uuid(db: Session, uuid: UUID) -> Permission | None: 49 | query = select(Permission).where(Permission.uuid == uuid) 50 | 51 | result = db.execute(query) 52 | 53 | return result.scalar_one_or_none() 54 | 55 | 56 | def get_role_by_name(db: Session, name: str) -> Role | None: 57 | query = select(Role).where(func.lower(Role.role_title) == name.lower()) 58 | 59 | result = db.execute(query) 60 | 61 | return result.scalar_one_or_none() 62 | 63 | 64 | def get_permissions(db: Session) -> Sequence[Permission]: 65 | query = select(Permission).order_by(Permission.group.asc(), Permission.id.asc()) 66 | result = db.execute(query) 67 | return result.scalars().all() 68 | 69 | 70 | def create_role_with_permissions(db: Session, data: dict) -> Role: 71 | new_role = Role(**data) 72 | db.add(new_role) 73 | db.commit() 74 | db.refresh(new_role) 75 | 76 | return new_role 77 | 78 | 79 | def update_role(db: Session, db_role: Role, update_data: dict) -> Role: 80 | for key, value in update_data.items(): 81 | setattr(db_role, key, value) 82 | 83 | db.add(db_role) 84 | db.commit() 85 | db.refresh(db_role) 86 | 87 | return db_role 88 | -------------------------------------------------------------------------------- /app/crud/crud_qr.py: -------------------------------------------------------------------------------- 1 | import random 2 | from uuid import UUID 3 | 4 | from sqlalchemy import select 5 | from sqlalchemy.orm import Session 6 | 7 | from app.models.models import QrCode 8 | from app.models.shared_models import PublicCompany 9 | 10 | 11 | def get_entity_by_qr_code(db: Session, qr_code_id: str) -> QrCode: 12 | return db.execute(select(QrCode).where(QrCode.qr_code_id == qr_code_id)).scalar_one_or_none() 13 | 14 | 15 | def get_qr_code_by_resource_uuid(db: Session, resource_uuid: UUID) -> QrCode: 16 | return db.execute(select(QrCode).where(QrCode.resource_uuid == resource_uuid)).scalar_one_or_none() 17 | 18 | 19 | def create_qr_code(db: Session, data: dict) -> QrCode: 20 | new_qr_code = QrCode(**data) 21 | db.add(new_qr_code) 22 | db.commit() 23 | db.refresh(new_qr_code) 24 | 25 | return new_qr_code 26 | 27 | 28 | def generate_custom_unique_id(allowed_chars: str, company_ids): 29 | proposed_id = "".join(random.choice(allowed_chars) for _x in range(3)) 30 | while proposed_id in company_ids: 31 | proposed_id = "".join(random.choice(allowed_chars) for _x in range(3)) 32 | return proposed_id 33 | 34 | 35 | def add_noise_to_qr(qr_code: str) -> str: 36 | noise = ["2", "3", "4", "5", "6", "7", "8", "9"] 37 | return "".join(f"{x}{random.choice(noise) if random.randint(0, 1) else ''}" for x in qr_code) 38 | 39 | 40 | def generate_company_qr_id(db: Session) -> str: 41 | company_ids = db.execute(select(PublicCompany.qr_id)).scalars().all() 42 | allowed_chars = "abcdefghijkmnopqrstuvwxyz" # ABCDEFGHJKLMNPRSTUVWXYZ23456789 43 | return generate_custom_unique_id(allowed_chars, company_ids) 44 | 45 | 46 | def generate_item_qr_id(db: Session) -> str: 47 | items_ids = db.execute(select(QrCode.qr_code_id)).scalars().all() 48 | allowed_chars = "abcdefghijkmnopqrstuvwxyz23456789" # ABCDEFGHJKLMNPRSTUVWXYZ23456789 49 | return generate_custom_unique_id(allowed_chars, items_ids) 50 | -------------------------------------------------------------------------------- /app/crud/crud_settings.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Sequence 2 | 3 | from sqlalchemy import select 4 | from sqlalchemy.orm import Session 5 | 6 | from app.models.models import SettingNotification, SettingUser, User 7 | 8 | 9 | # GENERAL 10 | def get_general_settings_by_names(db: Session, user_id: int, names: list[str]) -> Sequence[SettingUser]: 11 | query = select(SettingUser).where(SettingUser.user_id == user_id).where(SettingUser.name.in_(names)) 12 | 13 | result = db.execute(query) 14 | 15 | return result.scalars().all() 16 | 17 | 18 | def get_user_general_setting_by_name(db: Session, user_id: int, name: str) -> SettingUser: 19 | query = select(SettingUser).where(SettingUser.user_id == user_id).where(SettingUser.name == name) 20 | 21 | result = db.execute(query) 22 | 23 | return result.scalar_one_or_none() 24 | 25 | 26 | def create_user_setting(db: Session, data: dict) -> SettingUser: 27 | new_setting = SettingUser(**data) 28 | db.add(new_setting) 29 | db.commit() 30 | db.refresh(new_setting) 31 | 32 | return new_setting 33 | 34 | 35 | def update_user_setting(db: Session, db_setting: SettingUser, update_data: dict) -> SettingUser: 36 | for key, value in update_data.items(): 37 | setattr(db_setting, key, value) 38 | 39 | db.add(db_setting) 40 | db.commit() 41 | db.refresh(db_setting) 42 | 43 | return db_setting 44 | 45 | 46 | # NOTIFICATIONS 47 | def get_notification_settings_by_user_id(db: Session, user_id: int) -> SettingNotification: 48 | query = select(SettingNotification).where(SettingNotification.user_id == user_id) 49 | 50 | result = db.execute(query) 51 | 52 | return result.scalar_one_or_none() 53 | 54 | 55 | def get_users_for_sms_notification(db: Session, notification_level: str): 56 | query = ( 57 | select(User.phone, SettingNotification.sms_notification_level) 58 | .where(SettingNotification.sms_notification_level == notification_level) 59 | .outerjoin(User, User.id == SettingNotification.user_id) 60 | ) 61 | 62 | result = db.execute(query) 63 | 64 | return result.all() 65 | 66 | 67 | # def get_users_for_email_notification(db: Session, notification_level: str): 68 | # query = ( 69 | # select(User.email, SettingNotification.email_notification_level) 70 | # .where(SettingNotification.email_notification_level == notification_level) 71 | # .outerjoin(User, User.id == SettingNotification.user_id) 72 | # ) 73 | # 74 | # result = db.execute(query) 75 | # 76 | # return result.all() 77 | 78 | 79 | def get_users_list_for_email_notification( 80 | db: Session, notification_level: str, user_id: int | None = None 81 | ) -> list[User]: 82 | query = ( 83 | select(User.id, User.email, User.first_name, User.last_name) 84 | .select_from(SettingNotification) 85 | .where(SettingNotification.email_notification_level == notification_level) 86 | .outerjoin(User, User.id == SettingNotification.user_id) 87 | ) 88 | 89 | if user_id: 90 | query = query.where(User.id == user_id) 91 | 92 | result = db.execute(query) 93 | 94 | return result.all() 95 | 96 | 97 | def create_notification_setting(db: Session, data: dict) -> SettingNotification: 98 | new_setting_notification = SettingNotification(**data) 99 | db.add(new_setting_notification) 100 | db.commit() 101 | db.refresh(new_setting_notification) 102 | 103 | return new_setting_notification 104 | 105 | 106 | def update_notification_setting(db: Session, db_setting: SettingNotification, update_data: dict) -> SettingNotification: 107 | for key, value in update_data.items(): 108 | setattr(db_setting, key, value) 109 | 110 | db.add(db_setting) 111 | db.commit() 112 | db.refresh(db_setting) 113 | 114 | return db_setting 115 | -------------------------------------------------------------------------------- /app/crud/crud_statistics.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, time, timezone 2 | 3 | from sqlalchemy import func, select 4 | from sqlalchemy.orm import Session 5 | 6 | from app.models.models import Event, Issue, Item, User 7 | 8 | 9 | def get_issues_counter_summary(db: Session): 10 | # TODO: tylko dla dzisiaj 11 | 12 | date_from = datetime.combine(datetime.now(timezone.utc), time.min) 13 | 14 | query = select(Issue.status, func.count(Issue.status)).group_by(Issue.status) 15 | query = query.filter(func.DATE(Issue.created_at) >= date_from) 16 | 17 | result = db.execute(query) # await db.execute(query) 18 | return result.all() 19 | 20 | 21 | def get_issues_counter_by_status(db: Session, status: list): 22 | query = ( 23 | select(Issue.author_id, func.count(Issue.author_id)).where(Issue.status.in_(status)).group_by(Issue.author_id) 24 | ) 25 | 26 | result = db.execute(query) # await db.execute(query) 27 | return result.all() 28 | 29 | 30 | def get_items_counter_summary(db: Session): 31 | query = select(Item.author_id, func.count(Item.author_id)).group_by(Item.author_id) 32 | result = db.execute(query) # await db.execute(query) 33 | return result.all() 34 | 35 | 36 | def get_favourites_counter_summary(db: Session, user_id: int): 37 | query = select(func.count(Item.id)).filter(Item.users_item.any(User.id == user_id)) 38 | 39 | result = db.execute(query) # await db.execute(query) 40 | return result.scalar_one_or_none() 41 | 42 | 43 | def get_events(db: Session): 44 | query = select(Event.id, Event.action, Event.author_id) 45 | 46 | result = db.execute(query) # await db.execute(query) 47 | 48 | return result.all() 49 | -------------------------------------------------------------------------------- /app/crud/crud_tags.py: -------------------------------------------------------------------------------- 1 | from uuid import UUID 2 | 3 | from sqlalchemy import not_, select, text 4 | from sqlalchemy.orm import Session 5 | 6 | from app.models.models import Tag 7 | 8 | 9 | def get_tags(db: Session, sort_column: str, sort_order: str, is_hidden: bool | None = None) -> Tag: 10 | query = select(Tag).where(Tag.deleted_at.is_(None)) 11 | 12 | if is_hidden is True: 13 | query = query.where(not_(Tag.is_hidden.is_(True))) 14 | 15 | query = query.order_by(text(f"{sort_column} {sort_order}")) 16 | 17 | result = db.execute(query) 18 | 19 | return result.scalars().all() 20 | 21 | 22 | def get_tag_by_uuid(db: Session, uuid: UUID) -> Tag | None: 23 | query = select(Tag).where(Tag.uuid == uuid) 24 | 25 | result = db.execute(query) 26 | 27 | return result.scalar_one_or_none() 28 | 29 | 30 | def get_tag_by_name(db: Session, name: str) -> Tag | None: 31 | query = select(Tag).where(Tag.name == name).where(Tag.deleted_at.is_(None)) 32 | 33 | result = db.execute(query) 34 | 35 | return result.scalar_one_or_none() 36 | 37 | 38 | def get_tags_id_by_uuid(db: Session, uuid: list[UUID]) -> Tag: 39 | query = select(Tag.id).filter(Tag.uuid.in_(uuid)) 40 | 41 | result = db.execute(query) 42 | 43 | return result.scalars().all() 44 | 45 | 46 | def create_tag(db: Session, data: dict) -> Tag: 47 | new_tag = Tag(**data) 48 | db.add(new_tag) 49 | db.commit() 50 | db.refresh(new_tag) 51 | 52 | return new_tag 53 | 54 | 55 | def update_tag(db: Session, db_tag: Tag, update_data: dict) -> Tag: 56 | for key, value in update_data.items(): 57 | setattr(db_tag, key, value) 58 | 59 | db.add(db_tag) 60 | db.commit() 61 | db.refresh(db_tag) 62 | 63 | return db_tag 64 | -------------------------------------------------------------------------------- /app/crud/crud_users.py: -------------------------------------------------------------------------------- 1 | from uuid import UUID 2 | 3 | from pydantic import EmailStr 4 | from sqlalchemy import Select, func, select, text 5 | from sqlalchemy.orm import Session 6 | 7 | from app.models.models import User 8 | 9 | 10 | def get_users(sort_column: str, sort_order: str, search: str | None = None) -> Select[tuple[User]]: 11 | query = ( 12 | select(User) 13 | .where(User.deleted_at.is_(None)) 14 | .where(User.is_visible.is_(True)) 15 | .order_by(text(f"{sort_column} {sort_order}")) 16 | ) 17 | 18 | all_filters = [] 19 | if search is not None: 20 | all_filters.append(func.concat(User.first_name, " ", User.last_name).ilike(f"%{search}%")) 21 | 22 | query = query.filter(*all_filters) 23 | 24 | return query 25 | # result = db.execute(query) # await db.execute(query) 26 | # 27 | # return result.scalars().all() 28 | 29 | 30 | def get_user_by_uuid(db: Session, uuid: UUID) -> User: 31 | query = select(User).where(User.uuid == uuid) 32 | 33 | result = db.execute(query) 34 | 35 | return result.scalar_one_or_none() 36 | 37 | 38 | def get_users_by_role_id(db: Session, id: int): 39 | query = ( 40 | select(User.uuid, User.first_name, User.last_name) 41 | .where(User.user_role_id == id) 42 | .where(User.deleted_at.is_(None)) 43 | ) 44 | 45 | result = db.execute(query) 46 | 47 | return result.all() 48 | 49 | 50 | def get_user_by_id(db: Session, id: int) -> User: 51 | query = select(User).where(User.id == id) 52 | 53 | result = db.execute(query) 54 | 55 | return result.scalar_one_or_none() 56 | 57 | 58 | def get_user_by_email(db: Session, email: EmailStr) -> User: 59 | query = select(User).where(User.email == email).where(User.deleted_at.is_(None)) 60 | 61 | result = db.execute(query) 62 | 63 | return result.scalar_one_or_none() 64 | 65 | 66 | def get_user_count(db: Session, user_id: int | None = None) -> int: 67 | query = ( 68 | select(func.count(User.id)) 69 | .where(User.deleted_at.is_(None)) 70 | .where(User.is_verified.is_(True)) 71 | .where(User.is_visible.is_(True)) 72 | ) 73 | 74 | if user_id: 75 | query = query.where(User.id != user_id) 76 | 77 | result = db.execute(query) # await db.execute(query) 78 | return result.scalar_one_or_none() 79 | 80 | 81 | def create_user(db: Session, data: dict) -> User: 82 | new_user = User(**data) 83 | db.add(new_user) 84 | db.commit() 85 | db.refresh(new_user) 86 | 87 | return new_user 88 | 89 | 90 | def bulk_insert(db: Session, data: dict) -> bool: 91 | db.bulk_insert_mappings(User, data) 92 | db.commit() 93 | 94 | return True 95 | 96 | 97 | def update_user(db: Session, db_user: User, update_data: dict) -> User: 98 | for key, value in update_data.items(): 99 | setattr(db_user, key, value) 100 | 101 | db.add(db_user) 102 | db.commit() 103 | db.refresh(db_user) 104 | 105 | return db_user 106 | -------------------------------------------------------------------------------- /app/db.py: -------------------------------------------------------------------------------- 1 | import time 2 | from contextlib import contextmanager 3 | from functools import lru_cache 4 | from typing import Annotated 5 | 6 | import sqlalchemy as sa 7 | from fastapi import Depends, Request 8 | from loguru import logger 9 | from sqlalchemy import create_engine, event, select 10 | from sqlalchemy.engine import Engine 11 | from sqlalchemy.orm import Session, declarative_base 12 | 13 | from app.config import get_settings 14 | from app.models.shared_models import PublicCompany 15 | 16 | settings = get_settings() 17 | 18 | DEFAULT_DB_USER = settings.DEFAULT_DATABASE_USER 19 | DEFAULT_DB_PASS = settings.DEFAULT_DATABASE_PASSWORD 20 | DEFAULT_DB_HOST = settings.DEFAULT_DATABASE_HOSTNAME 21 | DEFAULT_DB_PORT = settings.DEFAULT_DATABASE_PORT 22 | DEFAULT_DB = settings.DEFAULT_DATABASE_DB 23 | # SQLALCHEMY_DB_URL = settings.DEFAULT_SQLALCHEMY_DATABASE_URI 24 | 25 | 26 | # SQLALCHEMY_DB_URL = PostgresDsn.build( 27 | # scheme="postgresql", 28 | # user=settings.DEFAULT_DATABASE_USER, 29 | # password=settings.DEFAULT_DATABASE_PASSWORD, 30 | # host=settings.DEFAULT_DATABASE_HOSTNAME, 31 | # port=5432, 32 | # path=settings.DEFAULT_DATABASE_DB, 33 | # ) 34 | 35 | sql_performance_monitoring = False 36 | if sql_performance_monitoring is True: 37 | 38 | @event.listens_for(Engine, "before_cursor_execute") 39 | def before_cursor_execute(conn, cursor, statement, parameters, context, executemany): 40 | conn.info.setdefault("query_start_time", []).append(time.time()) 41 | logger.debug("Start Query:") 42 | logger.debug("%s" % statement) 43 | 44 | @event.listens_for(Engine, "after_cursor_execute") 45 | def after_cursor_execute(conn, cursor, statement, parameters, context, executemany): 46 | total = time.time() - conn.info["query_start_time"].pop(-1) 47 | logger.debug("Query Complete!") 48 | logger.debug("Total Time: %f" % total) 49 | 50 | 51 | SQLALCHEMY_DB_URL = f"postgresql+psycopg://{DEFAULT_DB_USER}:{DEFAULT_DB_PASS}@{DEFAULT_DB_HOST}:5432/{DEFAULT_DB}" 52 | echo = False 53 | 54 | if settings.ENVIRONMENT != "PRD": 55 | print(SQLALCHEMY_DB_URL) 56 | echo = False 57 | 58 | # TODO: https://bitestreams.com/nl/blog/fastapi_sqlalchemy/ 59 | engine = create_engine(SQLALCHEMY_DB_URL, echo=echo, pool_pre_ping=True, pool_recycle=280) 60 | 61 | # print(SQLALCHEMY_DB_URL) 62 | 63 | metadata = sa.MetaData(schema="tenant") 64 | Base = declarative_base(metadata=metadata) 65 | 66 | 67 | class TenantNotFoundError(Exception): 68 | def __init__(self, tenant_name): 69 | self.message = "Tenant %s not found!" % str(tenant_name) 70 | super().__init__(self.message) 71 | 72 | 73 | @lru_cache 74 | def get_tenant(request: Request) -> PublicCompany | None: 75 | try: 76 | # host_without_port = request.headers["host"].split(":", 1)[0] # based on domain: __abc__.domain.com 77 | host_without_port = request.headers.get("tenant") # based on tenant header: abc 78 | 79 | if host_without_port is None: 80 | return None 81 | 82 | with with_db(None) as db: 83 | query = select(PublicCompany).where(PublicCompany.tenant_id == host_without_port) 84 | 85 | result = db.execute(query) 86 | tenant = result.scalar_one_or_none() 87 | 88 | if tenant is None: 89 | # raise TenantNotFoundError(host_without_port) 90 | return None 91 | except Exception as e: 92 | print(e) 93 | return tenant 94 | 95 | 96 | def get_db(tenant: Annotated[PublicCompany, Depends(get_tenant)]): 97 | if tenant is None: 98 | yield None 99 | 100 | with with_db(tenant.tenant_id) as db: 101 | yield db 102 | 103 | 104 | def get_public_db(): 105 | with with_db("public") as db: 106 | yield db 107 | # -------------------- 108 | 109 | 110 | @contextmanager 111 | def with_db(tenant_schema: str | None): 112 | if tenant_schema: 113 | schema_translate_map = {"tenant": tenant_schema} 114 | else: 115 | schema_translate_map = None 116 | 117 | connectable = engine.execution_options(schema_translate_map=schema_translate_map) 118 | try: 119 | db = Session(autocommit=False, autoflush=False, bind=connectable) 120 | yield db 121 | except Exception as e: 122 | logger.error(e) 123 | print("ERRRR: " + tenant_schema) 124 | raise 125 | finally: 126 | db.close() 127 | -------------------------------------------------------------------------------- /app/example.env: -------------------------------------------------------------------------------- 1 | #APP 2 | APP_ENV=local 3 | APP_DEBUG=true 4 | 5 | # AWS 6 | AWS_ACCESS_KEY_ID= 7 | AWS_SECRET_ACCESS_KEY= 8 | AWS_DEFAULT_REGION= 9 | 10 | 11 | AWS_S3_ACCESS_KEY_ID= 12 | AWS_S3_SECRET_ACCESS_KEY= 13 | 14 | AWS_S3_DEFAULT_REGION= 15 | AWS_S3_BUCKET= 16 | 17 | 18 | #DB 19 | DB_HOST= 20 | DB_PORT=5432 21 | DB_DATABASE= 22 | DB_USERNAME= 23 | DB_PASSWORD= 24 | 25 | #SENTRY 26 | SENTRY_DSN= 27 | 28 | #EMAIL_LABS 29 | EMAIL_LABS_APP_KEY= 30 | EMAIL_LABS_SECRET_KEY= 31 | EMAIL_LABS_SENDER=sender@gmail.com 32 | EMAIL_DEV=receiver_dev86@gmail.com 33 | -------------------------------------------------------------------------------- /app/models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mgurg/fastapi_docker/46f90e0551ae4c4068e2dd1965801234f6f11f72/app/models/__init__.py -------------------------------------------------------------------------------- /app/models/shared_models.py: -------------------------------------------------------------------------------- 1 | from uuid import uuid4 2 | 3 | import sqlalchemy as sa 4 | from sqlalchemy.dialects.postgresql import UUID 5 | from sqlalchemy.orm import declarative_base 6 | 7 | metadata = sa.MetaData(schema="shared") 8 | Base = declarative_base(metadata=metadata) 9 | 10 | 11 | class Tenant(Base): 12 | __tablename__ = "tenants" 13 | id = sa.Column(sa.Integer(), sa.Identity(), primary_key=True, autoincrement=True, nullable=False) 14 | uuid = sa.Column(UUID(as_uuid=True), default=uuid4(), unique=True) 15 | name = sa.Column("name", sa.String(128), nullable=True) 16 | schema = sa.Column(sa.String(128), nullable=True) 17 | schema_header_id = sa.Column("schema_header_id", sa.String(128), nullable=True) 18 | 19 | __table_args__ = {"schema": "public"} 20 | 21 | 22 | class PublicUser(Base): 23 | __tablename__ = "public_users" 24 | id = sa.Column(sa.INTEGER(), sa.Identity(), primary_key=True, autoincrement=True, nullable=False) 25 | uuid = sa.Column(UUID(as_uuid=True), autoincrement=False, nullable=True) 26 | first_name = sa.Column(sa.VARCHAR(length=100), autoincrement=False, nullable=True) 27 | last_name = sa.Column(sa.VARCHAR(length=100), autoincrement=False, nullable=True) 28 | email = sa.Column(sa.VARCHAR(length=256), autoincrement=False, nullable=True, unique=True) 29 | password = sa.Column(sa.VARCHAR(length=256), autoincrement=False, nullable=True, unique=True) 30 | service_token = sa.Column(sa.VARCHAR(length=100), autoincrement=False, nullable=True, unique=True) 31 | service_token_valid_to = sa.Column(sa.TIMESTAMP(timezone=True), autoincrement=False, nullable=True) 32 | is_active = sa.Column(sa.BOOLEAN(), autoincrement=False, nullable=True) 33 | is_verified = sa.Column(sa.BOOLEAN(), autoincrement=False, nullable=True) 34 | tos = sa.Column(sa.BOOLEAN(), autoincrement=False, nullable=True) 35 | tenant_id = sa.Column(sa.VARCHAR(length=256), autoincrement=False, nullable=True) 36 | tz = sa.Column(sa.VARCHAR(length=64), autoincrement=False, nullable=True, unique=True) 37 | lang = sa.Column(sa.VARCHAR(length=8), autoincrement=False, nullable=True, unique=True) 38 | created_at = sa.Column(sa.TIMESTAMP(timezone=True), autoincrement=False, nullable=True) 39 | updated_at = sa.Column(sa.TIMESTAMP(timezone=True), autoincrement=False, nullable=True) 40 | __table_args__ = {"schema": "public"} 41 | 42 | 43 | class PublicCompany(Base): 44 | __tablename__ = "public_companies" 45 | id = sa.Column(sa.INTEGER(), sa.Identity(), primary_key=True, autoincrement=True, nullable=False) 46 | uuid = sa.Column(UUID(as_uuid=True), autoincrement=False, nullable=True) 47 | name = sa.Column(sa.VARCHAR(length=256), autoincrement=False, nullable=True, unique=True) 48 | short_name = sa.Column(sa.VARCHAR(length=256), autoincrement=False, nullable=True, unique=True) 49 | nip = sa.Column(sa.VARCHAR(length=16), autoincrement=False, nullable=True, unique=True) 50 | country = sa.Column(sa.VARCHAR(length=128), autoincrement=False, nullable=True, unique=True) 51 | city = sa.Column(sa.VARCHAR(length=128), autoincrement=False, nullable=True, unique=True) 52 | tenant_id = sa.Column(sa.VARCHAR(length=64), autoincrement=False, nullable=True) 53 | qr_id = sa.Column(sa.VARCHAR(length=32), autoincrement=False, nullable=True, unique=True) 54 | created_at = sa.Column(sa.TIMESTAMP(timezone=True), autoincrement=False, nullable=True) 55 | updated_at = sa.Column(sa.TIMESTAMP(timezone=True), autoincrement=False, nullable=True) 56 | __table_args__ = {"schema": "public"} 57 | -------------------------------------------------------------------------------- /app/schemas/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mgurg/fastapi_docker/46f90e0551ae4c4068e2dd1965801234f6f11f72/app/schemas/__init__.py -------------------------------------------------------------------------------- /app/schemas/schemas.py: -------------------------------------------------------------------------------- 1 | # class StandardResponse(BaseModel): # OK 2 | # ok: bool 3 | 4 | 5 | # class PubliCompanyAdd(BaseModel): # OK 6 | # name: str 7 | # short_name: str 8 | # nip: str 9 | # country: str 10 | # city: str 11 | 12 | 13 | # class RoleBase(BaseModel): 14 | # __tablename__ = "roles" 15 | # id: int | None 16 | # uuid: UUID 17 | # account_id: int 18 | # role_name: str 19 | # role_description: str 20 | # hidden: bool 21 | # created_at: datetime | None 22 | # updated_at: datetime | None 23 | # deleted_at: datetime | None 24 | # 25 | # # permission: List["Permissions"] = Relationship(back_populates="role", link_model=RolePermissionLink) # hasMany 26 | # # users_FK: List["Users"] = Relationship(back_populates="role_FK") # hasOne 27 | # users_FK: List["UserBase"] 28 | # 29 | # class Config: 30 | # orm_mode = True 31 | # 32 | # 33 | # class PermissionsMini(BaseModel): 34 | # name: str 35 | # 36 | # class Config: 37 | # orm_mode = True 38 | # 39 | # 40 | # class PermissionsFull(BaseModel): 41 | # uuid: UUID 42 | # name: str 43 | # title: str 44 | # description: str 45 | # group: str 46 | 47 | # class Config: 48 | # orm_mode = True 49 | 50 | 51 | # class RoleBasic(BaseModel): 52 | # role_name: str 53 | # permission: List[PermissionsMini] 54 | 55 | # class Config: 56 | # orm_mode = True 57 | 58 | # 59 | # class RolePermissionFull(BaseModel): 60 | # role_name: str 61 | # role_description: str 62 | # role_title: str 63 | # is_custom: bool 64 | # permission: List[PermissionsFull] 65 | # 66 | # class Config: 67 | # orm_mode = True 68 | # 69 | # 70 | # class UserBase(BaseModel): 71 | # __tablename__ = "users" 72 | # id: int 73 | # account_id: int 74 | # password: str 75 | # email: EmailStr | None 76 | # phone: str | None 77 | # first_name: str | None 78 | # last_name: str | None 79 | # auth_token: str | None 80 | # auth_token_valid_to: datetime | None 81 | # is_active: bool 82 | # is_verified: bool 83 | # service_token: str | None 84 | # service_token_valid_to: datetime | None 85 | # tos: bool 86 | # # user_role_id: int = Field(default=None, foreign_key="roles.id") 87 | # # user_info_id:int | None = Field(default=None, foreign_key="users_info.id") 88 | # tz: str 89 | # lang: str 90 | # deleted_at: datetime | None 91 | # created_at: datetime 92 | # updated_at: datetime | None 93 | # uuid: UUID 94 | # 95 | # role_FK: RoleBase 96 | # 97 | # # usr_FK: List["Tasks"] = Relationship(back_populates="assignee") 98 | # # role_FK: Optional["Roles"] = Relationship(back_populates="users_FK") # hasOne 99 | # class Config: 100 | # orm_mode = True 101 | # 102 | # 103 | # class GroupAdd(BaseModel): 104 | # name: str 105 | # description: str 106 | # # users: List[UserBase] 107 | # 108 | # class Config: 109 | # orm_mode = True 110 | # 111 | # 112 | # class UserLoginIn(BaseModel): # OK 113 | # email: EmailStr 114 | # password: str | None 115 | # permanent: bool 116 | # 117 | # class Config: 118 | # orm_mode = True 119 | 120 | 121 | # class UserLoginOut(BaseModel): # OK 122 | # auth_token: str 123 | # auth_token_valid_to: datetime 124 | # first_name: str 125 | # last_name: str 126 | # tz: str 127 | # lang: str 128 | # uuid: UUID 129 | # role_FK: RoleBasic 130 | # tenant_id: str | None 131 | 132 | # class Config: 133 | # orm_mode = True 134 | 135 | 136 | # class UserVerifyToken(BaseModel): # OK 137 | # auth_token_valid_to: datetime 138 | # first_name: str 139 | # last_name: str 140 | # tz: str 141 | # lang: str 142 | # uuid: UUID 143 | # role_FK: RoleBasic 144 | 145 | # class Config: 146 | # orm_mode = True 147 | 148 | 149 | # class FileBasicInfo(BaseModel): 150 | # uuid: UUID 151 | # file_name: str 152 | # extension: str 153 | # mimetype: str 154 | # size: int 155 | # url: str | None 156 | 157 | # class Config: 158 | # orm_mode = True 159 | 160 | 161 | # class IdeaIndexResponse(BaseModel): 162 | # uuid: UUID 163 | # color: str 164 | # name: str 165 | # text: str 166 | # text_json: Json | None 167 | # upvotes: int | None 168 | # downvotes: int | None 169 | # status: str | None 170 | # created_at: datetime 171 | # files_idea: List[FileBasicInfo] | None 172 | 173 | # class Config: 174 | # orm_mode = True 175 | 176 | 177 | # class BasicItems(BaseModel): 178 | # uuid: UUID 179 | # name: str | None 180 | 181 | # class Config: 182 | # orm_mode = True 183 | 184 | 185 | # class GuideIndexResponse(BaseModel): 186 | # uuid: UUID 187 | # name: str | None 188 | # text: str | None 189 | # text_json: dict | None 190 | # video_id: str | None 191 | # video_json: dict | None 192 | # files_guide: List[FileBasicInfo] | None 193 | # item: List[BasicItems] | None 194 | 195 | # class Config: 196 | # orm_mode = True 197 | 198 | 199 | # class SettingBase(BaseModel): 200 | # __tablename__ = "users" 201 | # id: int 202 | # name: str 203 | # value: str 204 | # value_type: str 205 | # prev_value: str 206 | # description: str 207 | # updated_by: datetime 208 | # created_at: datetime 209 | # updated_at: datetime 210 | # 211 | # class Config: 212 | # orm_mode = True 213 | -------------------------------------------------------------------------------- /app/service/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mgurg/fastapi_docker/46f90e0551ae4c4068e2dd1965801234f6f11f72/app/service/__init__.py -------------------------------------------------------------------------------- /app/service/auth.py: -------------------------------------------------------------------------------- 1 | import json 2 | import re 3 | import uuid as uuid 4 | from uuid import UUID 5 | 6 | import pytz 7 | from disposable_email_domains import blocklist 8 | from requests import request 9 | from stdnum.pl import nip 10 | 11 | from app.config import get_settings 12 | 13 | settings = get_settings() 14 | 15 | 16 | def is_email_temporary(email): 17 | return email.strip().split("@")[1] in blocklist 18 | 19 | 20 | def is_timezone_correct(tz): 21 | return tz in pytz.all_timezones_set 22 | 23 | 24 | def is_nip_correct(nip_id: str): 25 | re.sub("[^0-9]", "", nip_id) 26 | return nip.is_valid(nip_id) 27 | 28 | 29 | def get_ip_info(ip: str): 30 | url = f"http://ipinfo.io/{ip}?token={settings.api_ipinfo}" 31 | response = "{'foo':'bar'}" 32 | if ip is not None: 33 | response = request("GET", url, headers={}, data={}) 34 | 35 | if response.status_code != 200: 36 | return "Error code: " + str(response.status_code) 37 | return json.dumps(response.json()) 38 | 39 | 40 | def uuid_convert(o: uuid.uuid4) -> str: 41 | """Custom UUID converter for json.dumps(), because neither the UUID or the hex is a serializable object""" 42 | if isinstance(o, UUID): 43 | return o.hex 44 | -------------------------------------------------------------------------------- /app/service/bearer_auth.py: -------------------------------------------------------------------------------- 1 | import base64 2 | from typing import Annotated 3 | 4 | import pendulum 5 | from fastapi import Depends, HTTPException 6 | from fastapi.security import HTTPBasicCredentials, HTTPBearer 7 | from sqlalchemy.orm import Session 8 | 9 | from app.config import get_settings 10 | from app.crud import crud_auth 11 | from app.db import get_db 12 | from app.models.models import User 13 | 14 | settings = get_settings() 15 | security = HTTPBearer() 16 | 17 | UserDB = Annotated[Session, Depends(get_db)] 18 | 19 | 20 | def is_base64(sb: str) -> bool: 21 | try: 22 | if isinstance(sb, str): 23 | # If there's any unicode here, an exception will be thrown and the function will return false 24 | decoded_token = base64.b64decode(sb).decode("utf-8") 25 | base64_token = crud_auth.generate_base64_token(decoded_token) 26 | # message_bytes = .encode("ascii") 27 | # base64_bytes = base64.b64encode(message_bytes) 28 | # base64_message = base64_bytes.decode("ascii") 29 | return base64_token == sb 30 | else: 31 | raise ValueError("Argument must be string") 32 | 33 | except Exception: 34 | return False 35 | 36 | 37 | def has_token(*, db: UserDB, credentials: Annotated[HTTPBasicCredentials, Depends(security)]) -> User: 38 | """ 39 | Function that is used to validate the token in the case that it requires it 40 | """ 41 | 42 | if db is None: 43 | raise HTTPException(status_code=401, detail="General DB Error, missing tenant?") 44 | 45 | token = credentials.credentials 46 | if token is None: 47 | raise HTTPException(status_code=401, detail="Missing auth token") 48 | 49 | db_user_data = crud_auth.get_tenant_user_by_auth_token(db, token) 50 | 51 | if db_user_data is not None: 52 | # user_id, account_id = db_user_data 53 | # return {"user_id": db_user_data.id} 54 | return db_user_data 55 | 56 | if is_base64(token) and (db_user_data is None): 57 | base64_message = token 58 | base64_bytes = base64_message.encode("ascii") 59 | message_bytes = base64.b64decode(base64_bytes) 60 | message = message_bytes.decode("ascii") 61 | 62 | tenant_id, date = message.split(".") # TODO: tenant_id 63 | 64 | dt = pendulum.from_format(date, "YYYY-MM-DD HH:mm:ss", tz="UTC") 65 | if dt.diff(pendulum.now("UTC")).in_seconds() < 1: 66 | raise HTTPException(status_code=401, detail="Anonymous token expired") 67 | 68 | return crud_auth.get_anonymous_user(db) 69 | 70 | raise HTTPException(status_code=401, detail="Incorrect auth token") 71 | 72 | 73 | def is_app_owner(credentials: Annotated[HTTPBasicCredentials, Depends(security)]): 74 | token = credentials.credentials 75 | if token is None: 76 | raise HTTPException(status_code=401, detail="Missing auth token") 77 | 78 | if token == "123": 79 | return True 80 | 81 | return False 82 | 83 | 84 | # def has_permission(*, session: Session = Depends(get_session), user_id, permission): 85 | # fields = [Users.id, Users.user_role_id, Users.account_id] 86 | # user_data = session.exec( 87 | # select(*fields).where(Users.id == user_id).where(Users.is_active == 1).where(Users.deleted_at == None) 88 | # ).one_or_none() 89 | # raise HTTPException(status_code=403, detail="Insufficient privileges") 90 | -------------------------------------------------------------------------------- /app/service/default_settings.py: -------------------------------------------------------------------------------- 1 | allowed_settings: dict = { 2 | "idea_registration_mode": None, 3 | "issue_registration_email": None, 4 | "dashboard_show_intro": True, 5 | } 6 | -------------------------------------------------------------------------------- /app/service/event.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timezone 2 | from uuid import UUID, uuid4 3 | 4 | import pendulum 5 | from sqlalchemy.orm import Session 6 | 7 | from app.crud import crud_events 8 | from app.models.models import Event, EventSummary, Issue, User 9 | 10 | # OPEN (start) 11 | # REJECT (?) 12 | # IN-PROGRESS (resolve) 13 | # PAUSED (?) 14 | # RESOLVED (close) 15 | # UNDER REVIEW (?) 16 | # CLOSED (reopen) 17 | # REOPEN (start) 18 | 19 | # Status – where the issue is (for example, “In Progress” or “Under Review”) 20 | # Resolution – why the issue is no longer in flight (for example, because it’s completed) 21 | 22 | 23 | def create_new_basic_event( 24 | db: Session, 25 | author: User, 26 | issue: Issue, 27 | action: str, 28 | description: str | None = None, 29 | internal_value: str | None = None, 30 | ) -> Event: 31 | resource_id = None 32 | resource_uuid = None 33 | if issue is not None: 34 | resource_id = issue.id 35 | resource_uuid = issue.uuid 36 | 37 | event_data = { 38 | "uuid": str(uuid4()), 39 | "author_id": author.id, 40 | "resource": "issue", 41 | "resource_id": resource_id, 42 | "resource_uuid": resource_uuid, 43 | "action": action, 44 | "description": description, 45 | "internal_value": internal_value, 46 | "created_at": datetime.now(timezone.utc), 47 | } 48 | 49 | new_event = crud_events.create_event(db, event_data) 50 | return new_event 51 | 52 | 53 | def open_new_basic_summary( 54 | db: Session, resource: str, resource_uuid: UUID, action: str, internal_value: str | None = None 55 | ) -> EventSummary: 56 | event_statistic = { 57 | "uuid": str(uuid4()), 58 | "resource": resource, 59 | "resource_uuid": resource_uuid, 60 | "action": action, 61 | "internal_value": internal_value, 62 | "date_from": datetime.now(timezone.utc), 63 | "date_to": None, 64 | "duration": None, 65 | "created_at": datetime.now(timezone.utc), 66 | } 67 | new_event_statistics = crud_events.create_event_statistic(db, event_statistic) 68 | return new_event_statistics 69 | 70 | 71 | def close_new_basic_summary( 72 | db: Session, resource: str, resource_uuid: UUID, previous_event: str, internal_value: str | UUID | None = None 73 | ): 74 | event = crud_events.get_event_summary_by_resource_uuid_and_status( 75 | db, resource, resource_uuid, previous_event, internal_value 76 | ) 77 | 78 | if event is not None: 79 | dt = pendulum.parse(str(event.date_from)) 80 | time_diff = dt.diff(pendulum.now("UTC")).in_seconds() 81 | 82 | event_statistic_update = {"date_to": datetime.now(timezone.utc), "duration": time_diff} 83 | event = crud_events.update_event(db, event, event_statistic_update) 84 | 85 | return event 86 | -------------------------------------------------------------------------------- /app/service/health_check.py: -------------------------------------------------------------------------------- 1 | from loguru import logger 2 | 3 | from app.config import get_settings 4 | from app.db import engine 5 | from app.storage.aws_s3 import s3_client 6 | 7 | settings = get_settings() 8 | 9 | 10 | def test_db(): 11 | try: 12 | with engine.connect(): 13 | return {"db": "healthy"} 14 | except Exception as err: 15 | logger.exception(err) 16 | raise err 17 | 18 | 19 | def test_storage(): 20 | try: 21 | s3_client.head_bucket(Bucket=settings.s3_bucket_name) # response = 22 | # print("@@@@@@@@@@@@@", response) 23 | except Exception as err: 24 | logger.exception(err) # , exc_info=True 25 | raise err 26 | 27 | return {"storage": "healthy"} 28 | 29 | 30 | def run_healthcheck() -> dict[str, str]: 31 | test_db() 32 | test_storage() 33 | return {"status": "ALIVE"} 34 | -------------------------------------------------------------------------------- /app/service/helpers.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | 3 | 4 | def to_snake_case(value: str): 5 | return "_".join(value.lower().split()) 6 | 7 | 8 | def is_valid_uuid(value: str): 9 | try: 10 | return uuid.UUID(str(value)) 11 | except ValueError: 12 | return None 13 | -------------------------------------------------------------------------------- /app/service/mentions.py: -------------------------------------------------------------------------------- 1 | class Mention: 2 | def __init__(self, obj: dict, keyword: str): 3 | self.arr: list = [] 4 | self.sub_arr: list = [] 5 | self.obj: dict = obj 6 | self.keyword: str = keyword 7 | 8 | def extract(self, obj, sub_arr, val) -> list: 9 | if isinstance(obj, dict): 10 | for k, v in obj.items(): 11 | found_arr = [*sub_arr, k] 12 | if isinstance(v, dict | list): 13 | self.extract(v, found_arr, val) 14 | elif v == val: 15 | self.arr.append(found_arr) 16 | elif isinstance(obj, list): 17 | for item in obj: 18 | found_arr = [*sub_arr, obj.index(item)] 19 | if isinstance(item, dict | list): 20 | self.extract(item, found_arr, val) 21 | elif item == val: 22 | self.arr.append(found_arr) 23 | return self.arr 24 | 25 | def traverse_dict_by_path(self, dictionary: dict, paths: list) -> list: 26 | self.extract(self.obj, [], self.keyword) 27 | 28 | res = [] 29 | 30 | for path in paths: 31 | _dictionary = dictionary.copy() 32 | for item in path[:-1]: 33 | _dictionary = _dictionary[item] 34 | res.append(_dictionary) 35 | return res 36 | 37 | def process(self) -> list: 38 | mention_uuids = [] 39 | mentions = self.traverse_dict_by_path(self.obj, self.arr) 40 | 41 | for mention in mentions: 42 | mention_uuids.append(mention["attrs"]["id"]) 43 | return mention_uuids 44 | -------------------------------------------------------------------------------- /app/service/notification_email.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import os 3 | from uuid import UUID 4 | 5 | from loguru import logger 6 | from requests import request 7 | 8 | from app.config import get_settings 9 | from app.models.models import User 10 | from app.models.shared_models import PublicUser 11 | 12 | settings = get_settings() 13 | 14 | 15 | class EmailNotification: 16 | def __init__(self): 17 | self.app_key = settings.email_mailjet_app_key 18 | self.secret_key = settings.email_mailjet_secret_key 19 | self.auth_header = self.generate_basic_auth(self.app_key, self.secret_key) 20 | self.debug = False 21 | 22 | self.sender = settings.email_sender 23 | self.base_url = settings.base_app_url 24 | self.product_name = "Malgori" 25 | 26 | def generate_basic_auth(self, username: str, password: str): 27 | return base64.b64encode(f"{username}:{password}".encode()) 28 | 29 | # def send_by_email_labs(self, receiver: str, subject: str, template: str, vars: dict): 30 | # url = "https://api.emaillabs.net.pl/api/sendmail_templates" 31 | # smtp = settings.email_smtp 32 | # 33 | # receiver_data = {f"to[{receiver}]": ""} 34 | # 35 | # for key, value in vars.items(): 36 | # receiver_data[f"to[{receiver}][vars][{key}]"] = value 37 | # 38 | # headers = {"Authorization": f"Basic {self.auth_header.decode()}"} 39 | # template_data = {"from": self.sender, "smtp_account": smtp, "subject": subject, "template_id": template} 40 | # 41 | # payload = receiver_data | template_data 42 | # files = {} 43 | # 44 | # response = request("POST", url, headers=headers, data=payload, files=files) 45 | # return response.text 46 | 47 | def send_by_mailjet(self, payload: dict): 48 | if (os.getenv("TESTING") is not None) and (os.getenv("TESTING") == "1"): 49 | logger.info("Email test") 50 | return "TEST_EMAIL_NOTIFICATION" 51 | 52 | url = "https://api.mailjet.com/v3.1/send" 53 | headers = {"Content-Type": "application/json", "Authorization": f"Basic {self.auth_header.decode()}"} 54 | 55 | # pprint(payload) 56 | response = request("POST", url, headers=headers, json=payload) 57 | print("======") 58 | print(response.text) 59 | return response.text 60 | 61 | # return "OK" 62 | 63 | # MAILJET TEMPLATES COMMON 64 | 65 | def message_from_field(self): 66 | from_field = {"Email": "awaria@remontmaszyn.pl", "Name": "remontmaszyn.pl"} 67 | return from_field 68 | 69 | def message_to_field(self, user: User | PublicUser): 70 | if settings.ENVIRONMENT == "PRD": 71 | return [{"Email": user.email, "Name": f"{user.first_name} {user.last_name}"}] 72 | 73 | first_name = "Groovy" 74 | last_name = "Gorilla" 75 | return [{"Email": settings.email_dev, "Name": f"{first_name} {last_name}"}] 76 | 77 | def get_template_admin_registration(self, user: User, activation_url: str): 78 | message_dict = { 79 | "From": self.message_from_field(), 80 | "To": self.message_to_field(user), 81 | "TemplateID": 4561351, 82 | "TemplateLanguage": True, 83 | "Subject": "[Malgori] Dziękuję za rejestrację", 84 | "Variables": { 85 | "product_name": self.product_name, 86 | "activation_url": f"{self.base_url}{activation_url}", 87 | "login_url": f"{self.base_url}/login", 88 | "user_name": user.email, 89 | }, 90 | } 91 | 92 | if self.debug: 93 | self.add_template_debugging(message_dict) 94 | 95 | return {"Messages": [message_dict]} 96 | 97 | def get_template_reset_password_request(self, user: User, reset_token: str, browser: str, user_os: str): 98 | message_dict = { 99 | "From": self.message_from_field(), 100 | "To": self.message_to_field(user), 101 | "TemplateID": 4561364, 102 | "TemplateLanguage": True, 103 | "Subject": "[Malgori] Reset hasła", 104 | "Variables": { 105 | "product_name": self.product_name, 106 | "reset_password_url": f"{self.base_url}/set_password/{reset_token}", 107 | "operating_system": user_os, 108 | "browser_name": browser, 109 | }, 110 | } 111 | 112 | if self.debug: 113 | self.add_template_debugging(message_dict) 114 | 115 | return {"Messages": [message_dict]} 116 | 117 | def get_template_failure(self, users: list[User], name: str, description: str, uuid: UUID): 118 | messages_list = [] 119 | for user in users: 120 | message_dict = { 121 | "From": self.message_from_field(), 122 | "To": self.message_to_field(user), 123 | "TemplateID": 4534065, 124 | "TemplateLanguage": True, 125 | "Subject": "[Malgori] Nowa awaria", 126 | "Variables": { 127 | "issue_name": name, 128 | "issue_description": description, 129 | "issue_url": f"{self.base_url}/issues/{uuid}", 130 | }, 131 | } 132 | 133 | messages_list.append(message_dict) 134 | 135 | if self.debug: 136 | self.add_template_debugging(messages_list) 137 | 138 | return {"Messages": messages_list} 139 | 140 | def add_template_debugging(self, message_dict): 141 | message_dict["TemplateErrorReporting"] = {"Email": "m@m.pl", "Name": "Mailjet Template Errors"} 142 | 143 | def send_admin_registration(self, user: User | PublicUser, activation_url: str) -> None: 144 | data = self.get_template_admin_registration(user, activation_url) 145 | self.send_by_mailjet(data) 146 | 147 | def send_password_reset_request( 148 | self, user: User | PublicUser, reset_token: str, browser: str, user_os: str 149 | ) -> None: 150 | data = self.get_template_reset_password_request(user, reset_token, browser, user_os) 151 | 152 | self.send_by_mailjet(data) 153 | 154 | def send_failure_notification(self, users: list[User], name: str, description: str, uuid: UUID): 155 | data = self.get_template_failure(users, name, description, uuid) 156 | print(data) 157 | self.send_by_mailjet(data) 158 | -------------------------------------------------------------------------------- /app/service/notification_sms.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import os 3 | 4 | from requests import request 5 | 6 | from app.config import get_settings 7 | 8 | settings = get_settings() 9 | 10 | 11 | class SMSNotification: 12 | def __init__(self): 13 | self.app_key = settings.email_labs_app_key 14 | self.secret_key = settings.email_labs_secret_key 15 | self.smtp = "" 16 | self.auth_header = base64.b64encode(f"{self.app_key}:{self.secret_key}".encode()) 17 | self.url = "https://api.emaillabs.net.pl/api/sendmail_templates" 18 | self.smtp = settings.email_smtp 19 | 20 | def send(self, sender: str, receiver: list, message: str, template: str): 21 | if settings.ENVIRONMENT != "PRD": 22 | receiver = settings.email_dev 23 | 24 | receiver_data = {f"to[{receiver}]": ""} 25 | 26 | for key, value in vars.items(): 27 | receiver_data[f"to[{receiver}][vars][{key}]"] = value 28 | 29 | headers = {"Authorization": f"Basic {self.auth_header.decode()}"} 30 | template_data = {"from": sender, "smtp_account": self.smtp, "subject": "subject", "template_id": template} 31 | 32 | payload = receiver_data | template_data 33 | files = {} 34 | 35 | if (os.getenv("TESTING") is not None) and (os.getenv("TESTING") == "1"): 36 | return "TEST_EMAIL_NOTIFICATION" 37 | 38 | # print(response.text) 39 | # pprint(payload) 40 | 41 | response = request("POST", self.url, headers=headers, data=payload, files=files) 42 | return response.text 43 | -------------------------------------------------------------------------------- /app/service/notifications.py: -------------------------------------------------------------------------------- 1 | from app.config import get_settings 2 | from app.models.models import Issue, User 3 | from app.service.notification_email import EmailNotification 4 | 5 | settings = get_settings() 6 | 7 | 8 | def notify_users(sms_list: list[User], email_list: list[User], issue: Issue = None): 9 | print(">>> bulk_email_notification") 10 | email = EmailNotification() 11 | email.send_failure_notification(email_list, issue.name, issue.text, issue.uuid) 12 | 13 | 14 | def chunks(lst, n): 15 | """Yield successive n-sized chunks from lst.""" 16 | for i in range(0, len(lst), n): 17 | yield lst[i : i + n] 18 | -------------------------------------------------------------------------------- /app/service/password.py: -------------------------------------------------------------------------------- 1 | from passlib.hash import argon2 2 | 3 | 4 | class Password: 5 | def __init__(self, password: str): 6 | self.password = password 7 | 8 | def validate(self) -> str | bool: 9 | vals = { 10 | # "Password must contain an uppercase letter.": lambda s: any(x.isupper() for x in s), 11 | "Password must contain a lowercase letter.": lambda s: any(x.islower() for x in s), 12 | # "Password must contain a digit.": lambda s: any(x.isdigit() for x in s), 13 | # "Password must be at least 8 characters.": lambda s: len(s) >= 8, 14 | # "Password cannot contain white spaces.": lambda s: not any(x.isspace() for x in s), 15 | } 16 | valid = True 17 | for n, val in vals.items(): 18 | if not val(self.password): 19 | valid = False 20 | return n 21 | return valid 22 | 23 | def compare(self, password_confirmation: str) -> str | bool: 24 | is_valid = self.validate() 25 | 26 | if is_valid is not True: 27 | return is_valid 28 | 29 | if self.password != password_confirmation: 30 | return "Password and password confirmation not match" 31 | 32 | return True 33 | 34 | def hash(self): 35 | return argon2.hash(self.password) 36 | -------------------------------------------------------------------------------- /app/service/scheduler.py: -------------------------------------------------------------------------------- 1 | from apscheduler.schedulers.sync import Scheduler 2 | from fastapi import FastAPI 3 | 4 | scheduler = Scheduler() 5 | 6 | 7 | def start_scheduler(app: FastAPI): 8 | @app.on_event("startup") 9 | def init_scheduler(): 10 | # print("start scheduler...") 11 | scheduler.start_in_background() 12 | # print("start scheduler... DONE") 13 | -------------------------------------------------------------------------------- /app/service/tenants.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import re 3 | from uuid import UUID 4 | 5 | import sqlalchemy as sa 6 | from alembic import command 7 | from alembic.config import Config 8 | from loguru import logger 9 | from sentry_sdk import capture_exception 10 | from unidecode import unidecode 11 | 12 | from app.config import get_settings 13 | from app.db import SQLALCHEMY_DB_URL, with_db 14 | from app.utils.decorators import timer 15 | 16 | settings = get_settings() 17 | 18 | 19 | @timer 20 | def alembic_upgrade_head(tenant_name: str, revision="head", url: str = None): 21 | logger.info("🔺 [Schema upgrade] " + tenant_name + " to version: " + revision) 22 | print("🔺[Schema upgrade] " + tenant_name + " to version: " + revision) 23 | # set the paths values 24 | 25 | if url is None: 26 | url = SQLALCHEMY_DB_URL 27 | try: 28 | # create Alembic config and feed it with paths 29 | config = Config(str(settings.PROJECT_DIR / "alembic.ini")) 30 | config.set_main_option("script_location", str(settings.PROJECT_DIR / "migrations")) # replace("%", "%%") 31 | config.set_main_option("sqlalchemy.url", url) 32 | config.cmd_opts = argparse.Namespace() # arguments stub 33 | 34 | # If it is required to pass -x parameters to alembic 35 | x_arg = "".join(["tenant=", tenant_name]) # "dry_run=" + "True" 36 | if not hasattr(config.cmd_opts, "x"): 37 | if x_arg is not None: 38 | config.cmd_opts.x = [] 39 | if isinstance(x_arg, list) or isinstance(x_arg, tuple): 40 | for x in x_arg: 41 | config.cmd_opts.x.append(x) 42 | else: 43 | config.cmd_opts.x.append(x_arg) 44 | else: 45 | config.cmd_opts.x = None 46 | 47 | # prepare and run the command 48 | revision = revision 49 | sql = False 50 | tag = None 51 | # command.stamp(config, revision, sql=sql, tag=tag) 52 | 53 | # upgrade command 54 | command.upgrade(config, revision, sql=sql, tag=tag) 55 | except Exception as e: 56 | logger.error(e) 57 | # capture_exception(e) 58 | # print(traceback.format_exc()) 59 | 60 | logger.info("✅ Schema upgraded for: " + tenant_name + " to version: " + revision) 61 | print("✅ Schema upgraded for: " + tenant_name + " to version: " + revision) 62 | 63 | 64 | def tenant_create(schema: str) -> None: 65 | logger.info("START create schema: " + schema) 66 | 67 | try: 68 | with with_db("public") as db: 69 | db.execute(sa.schema.CreateSchema(schema)) 70 | db.commit() 71 | except Exception as e: 72 | logger.error(e) 73 | capture_exception(e) 74 | logger.info("Done create schema: " + schema) 75 | 76 | 77 | def tenant_remove(schema: str) -> None: 78 | logger.info("START DROP schema: " + schema) 79 | try: 80 | with with_db("public") as db: 81 | db.execute(sa.schema.DropSchema(schema, cascade=True)) 82 | db.commit() 83 | except Exception as e: 84 | capture_exception(e) 85 | logger.error(e) 86 | print(e) 87 | logger.info("Done DROP schema: " + schema) 88 | 89 | 90 | def generate_tenant_id(name: str, uuid: UUID) -> str: 91 | company = re.sub("[^A-Za-z0-9 _]", "", unidecode(name)) 92 | uuid = uuid.replace("-", "") 93 | 94 | return "".join([company[:28], "_", uuid]).lower().replace(" ", "_") 95 | -------------------------------------------------------------------------------- /app/storage/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mgurg/fastapi_docker/46f90e0551ae4c4068e2dd1965801234f6f11f72/app/storage/__init__.py -------------------------------------------------------------------------------- /app/storage/aws_s3.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | 3 | from app.config import get_settings 4 | 5 | settings = get_settings() 6 | 7 | s3_resource = boto3.resource( 8 | service_name="s3", 9 | region_name=settings.s3_region, 10 | aws_access_key_id=settings.s3_access_key, 11 | aws_secret_access_key=settings.s3_secret_access_key, 12 | ) 13 | 14 | s3_client = boto3.client( 15 | "s3", 16 | region_name=settings.s3_region, 17 | aws_access_key_id=settings.s3_access_key, 18 | aws_secret_access_key=settings.s3_secret_access_key, 19 | ) 20 | 21 | 22 | def generate_presigned_url(tenant: str, file: str) -> str: 23 | presigned_url = s3_client.generate_presigned_url( 24 | ClientMethod="get_object", Params={"Bucket": settings.s3_bucket_name, "Key": f"{tenant}/{file}"}, ExpiresIn=3600 25 | ) 26 | return presigned_url 27 | -------------------------------------------------------------------------------- /app/storage/base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from typing import BinaryIO 3 | 4 | 5 | class BaseStorage(ABC): # pragma: no cover 6 | OVERWRITE_EXISTING_FILES = True 7 | """Whether to overwrite existing files 8 | if the name is the same or add a suffix to the filename.""" 9 | 10 | @abstractmethod 11 | def get_name(self, name: str) -> str: ... 12 | 13 | # def get_path(self, name: str) -> str: 14 | # ... 15 | 16 | @abstractmethod 17 | def get_size(self, name: str) -> int: ... 18 | 19 | # def open(self, name: str) -> BinaryIO: 20 | # ... 21 | 22 | @abstractmethod 23 | def write(self, file: BinaryIO, name: str) -> str: ... 24 | 25 | # def generate_new_filename(self, filename: str) -> str: 26 | # ... 27 | -------------------------------------------------------------------------------- /app/storage/s3.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | import unicodedata 4 | from pathlib import Path 5 | from typing import BinaryIO 6 | 7 | from app.config import get_settings 8 | from app.storage.base import BaseStorage 9 | 10 | try: 11 | import boto3 12 | except ImportError: # pragma: no cover 13 | boto3 = None 14 | 15 | settings = get_settings() 16 | 17 | 18 | class S3Storage(BaseStorage): 19 | """ 20 | Amazon S3 or any S3 compatible storage backend. 21 | You might want to use this with the `FileType` type. 22 | Requires `boto3` to be installed. 23 | """ 24 | 25 | AWS_ACCESS_KEY_ID = settings.s3_access_key 26 | """AWS access key ID. Either set here or as an environment variable.""" 27 | 28 | AWS_SECRET_ACCESS_KEY = settings.s3_secret_access_key 29 | """AWS secret access key. Either set here or as an environment variable.""" 30 | 31 | AWS_S3_REGION = settings.s3_region 32 | """AWS S3 bucket name to use.""" 33 | 34 | AWS_S3_BUCKET_NAME = settings.s3_bucket_name 35 | """AWS S3 bucket name to use.""" 36 | 37 | AWS_S3_ENDPOINT_URL = "" 38 | """AWS S3 endpoint URL.""" 39 | 40 | AWS_S3_USE_SSL = True 41 | """Indicate if SSL should be used.""" 42 | 43 | AWS_DEFAULT_ACL = "" 44 | """Optional ACL set on the object like `public-read`. 45 | By default file will be private.""" 46 | 47 | AWS_QUERYSTRING_AUTH = False 48 | """Indicate if query parameter authentication should be used in URLs.""" 49 | 50 | AWS_S3_CUSTOM_DOMAIN = "" 51 | """Custom domain to use for serving object URLs.""" 52 | 53 | def __init__(self) -> None: 54 | print("REGION", self.AWS_S3_REGION) 55 | assert boto3 is not None, "'boto3' is not installed" 56 | assert not self.AWS_S3_ENDPOINT_URL.startswith("http"), "URL should not contain protocol" 57 | 58 | self._http_scheme = "https" if self.AWS_S3_USE_SSL else "http" 59 | self._url = f"{self._http_scheme}://{self.AWS_S3_ENDPOINT_URL}" 60 | self._s3 = boto3.resource( 61 | "s3", 62 | region_name=self.AWS_S3_REGION, 63 | # endpoint_url=self._url, 64 | use_ssl=self.AWS_S3_USE_SSL, 65 | aws_access_key_id=self.AWS_ACCESS_KEY_ID, 66 | aws_secret_access_key=self.AWS_SECRET_ACCESS_KEY, 67 | ) 68 | self._bucket = self._s3.Bucket(name=self.AWS_S3_BUCKET_NAME) 69 | 70 | def secure_filename(self, filename: str) -> str: 71 | r"""Pass it a filename and it will return a secure version of it. This 72 | filename can then safely be stored on a regular file system and passed 73 | to :func:`os.path.join`. The filename returned is an ASCII only string 74 | for maximum portability. 75 | 76 | On Windows systems the function also makes sure that the file is not 77 | named after one of the special device files. 78 | 79 | > secure_filename("My cool movie.mov") 80 | 'My_cool_movie.mov' 81 | > secure_filename("../../../etc/passwd") 82 | 'etc_passwd' 83 | > secure_filename('i contain cool \xfcml\xe4uts.txt') 84 | 'i_contain_cool_umlauts.txt' 85 | 86 | The function might return an empty filename. It's your responsibility 87 | to ensure that the filename is unique and that you abort or 88 | generate a random filename if the function returned an empty one. 89 | 90 | :param filename: the filename to secure 91 | """ 92 | 93 | _entity_re = re.compile(r"&([^;]+);") 94 | _filename_ascii_strip_re = re.compile(r"[^A-Za-z0-9_.-]") 95 | _windows_device_files = { 96 | "CON", 97 | "PRN", 98 | "AUX", 99 | "NUL", 100 | *(f"COM{i}" for i in range(10)), 101 | *(f"LPT{i}" for i in range(10)), 102 | } 103 | 104 | filename = unicodedata.normalize("NFKD", filename) 105 | filename = filename.encode("ascii", "ignore").decode("ascii") 106 | 107 | for sep in os.sep, os.path.altsep: 108 | if sep: 109 | filename = filename.replace(sep, " ") 110 | filename = str(_filename_ascii_strip_re.sub("", "_".join(filename.split()))).strip("._") 111 | 112 | # on nt a couple of special files are present in each folder. We 113 | # have to ensure that the target file is not such a filename. In 114 | # this case we prepend an underline 115 | if os.name == "nt" and filename and filename.split(".")[0].upper() in _windows_device_files: 116 | filename = f"_{filename}" 117 | 118 | return filename 119 | 120 | def get_name(self, name: str) -> str: 121 | """ 122 | Get the normalized name of the file. 123 | """ 124 | 125 | filename = self.secure_filename(Path(name).name) 126 | return str(Path(name).with_name(filename)) 127 | 128 | def get_size(self, name: str) -> int: 129 | """ 130 | Get file size in bytes. 131 | """ 132 | 133 | key = self.get_name(name) 134 | return self._bucket.Object(key).content_length 135 | 136 | def get_path(self, name: str) -> str: 137 | """ 138 | Get full URL to the file. 139 | """ 140 | 141 | key = self.get_name(name) 142 | 143 | if self.AWS_S3_CUSTOM_DOMAIN: 144 | return f"{self._http_scheme}://{self.AWS_S3_CUSTOM_DOMAIN}/{key}" 145 | 146 | if self.AWS_QUERYSTRING_AUTH: 147 | params = {"Bucket": self._bucket.name, "Key": key} 148 | return self._s3.meta.client.generate_presigned_url("get_object", Params=params) 149 | 150 | return f"{self._http_scheme}://{self.AWS_S3_ENDPOINT_URL}/{self.AWS_S3_BUCKET_NAME}/{key}" 151 | 152 | def write(self, file: BinaryIO, name: str) -> str: 153 | """ 154 | Write input file which is opened in binary mode to destination. 155 | """ 156 | 157 | file.seek(0, 0) 158 | key = self.get_name(name) 159 | 160 | self._bucket.upload_fileobj(Fileobj=file, Key=name) # , ExtraArgs={"ACL": self.AWS_DEFAULT_ACL} 161 | return key 162 | 163 | def read(self): ... 164 | 165 | def remove_file(self): ... 166 | -------------------------------------------------------------------------------- /app/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mgurg/fastapi_docker/46f90e0551ae4c4068e2dd1965801234f6f11f72/app/utils/__init__.py -------------------------------------------------------------------------------- /app/utils/decorators.py: -------------------------------------------------------------------------------- 1 | import functools 2 | import time 3 | import tracemalloc 4 | from functools import wraps 5 | from time import perf_counter, strftime 6 | 7 | 8 | def trycatch(func): 9 | """Wraps the decorated function in a try-catch. If function fails print out the exception.""" 10 | 11 | @wraps(func) 12 | def wrapper(*args, **kwargs): 13 | try: 14 | res = func(*args, **kwargs) 15 | return res 16 | except Exception as e: 17 | print(f"Exception in {func.__name__}: {e}") 18 | 19 | return wrapper 20 | 21 | 22 | def performance_check(func): 23 | """Measure performance of a function""" 24 | 25 | @wraps(func) 26 | def wrapper(*args, **kwargs): 27 | tracemalloc.start() 28 | start_time = time.perf_counter() 29 | res = func(*args, **kwargs) 30 | duration = time.perf_counter() - start_time 31 | current, peak = tracemalloc.get_traced_memory() 32 | tracemalloc.stop() 33 | 34 | print( 35 | f"\nFunction: {func.__name__} ({func.__doc__})" 36 | f"\nMemory usage: {current / 10**6:.6f} MB" 37 | f"\nPeak memory usage: {peak / 10**6:.6f} MB" 38 | f"\nDuration: {duration:.6f} sec" 39 | f"\n{'-'*40}" 40 | ) 41 | return res 42 | 43 | return wrapper 44 | 45 | 46 | def timer(func): 47 | @functools.wraps(func) 48 | def wrapper_timer(*args, **kwargs): 49 | tic = perf_counter() 50 | value = func(*args, **kwargs) 51 | toc = perf_counter() 52 | elapsed_time = toc - tic 53 | print( 54 | f"\n{func.__name__!r} finished at {strftime('%l:%M%p %Z on %b %d, %Y') } in {elapsed_time:0.4f} seconds\n" 55 | ) 56 | return value 57 | 58 | return wrapper_timer 59 | -------------------------------------------------------------------------------- /commands/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mgurg/fastapi_docker/46f90e0551ae4c4068e2dd1965801234f6f11f72/commands/__init__.py -------------------------------------------------------------------------------- /commands/db_backup/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mgurg/fastapi_docker/46f90e0551ae4c4068e2dd1965801234f6f11f72/commands/db_backup/__init__.py -------------------------------------------------------------------------------- /commands/db_backup/backups/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mgurg/fastapi_docker/46f90e0551ae4c4068e2dd1965801234f6f11f72/commands/db_backup/backups/.gitkeep -------------------------------------------------------------------------------- /commands/db_backup/readme.md: -------------------------------------------------------------------------------- 1 | # postgres_manage_python 2 | 3 | Utility to backup, restore and list Postgresql databases from/to AWS S3 (or local storage) using python 4 | 5 | ## Getting Started 6 | 7 | ### Setup 8 | 9 | - Activate virtualenv 10 | 11 | ```bash 12 | source .venv/bin/activate 13 | ``` 14 | - Create configuration file (ie. sample.config) 15 | 16 | ``` 17 | [setup] 18 | # define if LOCAL or S3 storage will be used when storing/restoring the backup 19 | storage_engine='S3' 20 | 21 | [S3] 22 | bucket_name="db_backups.s3.my.domain.com" # S3 bucket name (no need for s3:// prefix) 23 | bucket_backup_path="postgres/" # PATH in the bucket to store your backups 24 | 25 | [local_storage] 26 | path=./backups/ 27 | 28 | [postgresql] 29 | host= 30 | port= 31 | db= 32 | user= 33 | password= 34 | ``` 35 | 36 | ### Usage 37 | 38 | - List databases on a postgresql server 39 | 40 | ```bash 41 | python3 manage_postgres_db.py --configfile sample.config --action list_dbs --verbose true 42 | ``` 43 | 44 | - Create database backup and store it (based on config file details) 45 | 46 | ```bash 47 | python3 manage_postgres_db.py --configfile sample.config --action backup --verbose true 48 | ``` 49 | 50 | - List previously created database backups available on storage engine 51 | 52 | ```bash 53 | python3 manage_postgres_db.py --configfile sample.config --action list --verbose true 54 | ``` 55 | - Restore previously created database backups available on storage engine (check available dates with _list_ action) 56 | 57 | ```bash 58 | python3 manage_postgres_db.py --configfile sample.config --action restore --date "YYYY-MM-dd" --verbose true 59 | ``` 60 | 61 | - Restore previously created database backups into a new destination database 62 | 63 | ```bash 64 | python3 manage_postgres_db.py --configfile sample.config --action restore --date "YYYY-MM-dd" --dest-db new_DB_name 65 | ``` 66 | 67 | ## Authors 68 | 69 | - **Val Feron** - _Initial work_ - [GitHub](https://github.com/valferon) 70 | 71 | ## LicenseMIT License 72 | 73 | Copyright (c) valferon 74 | 75 | Permission is hereby granted, free of charge, to any person obtaining a copy 76 | of this software and associated documentation files (the "Software"), to deal 77 | in the Software without restriction, including without limitation the rights 78 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 79 | copies of the Software, and to permit persons to whom the Software is 80 | furnished to do so, subject to the following conditions: 81 | 82 | The above copyright notice and this permission notice shall be included in all 83 | copies or substantial portions of the Software. 84 | 85 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 86 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 87 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 88 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 89 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 90 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 91 | SOFTWARE. 92 | -------------------------------------------------------------------------------- /compose.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | web: 3 | build: 4 | context: . 5 | # dockerfile: ./Dockerfile 6 | dockerfile: dev.Dockerfile 7 | args: 8 | - "UID=${UID:-1000}" 9 | volumes: 10 | - ./app:/src/app 11 | ports: 12 | - "5000:5000" 13 | - "5678:5678" 14 | env_file: ./app/.env 15 | depends_on: 16 | db: 17 | condition: service_healthy # Disable this if you are using an external Postgres database 18 | db: 19 | image: postgres:15 20 | healthcheck: 21 | test: pg_isready -U postgres -h localhost 22 | interval: 5s 23 | timeout: 5s 24 | retries: 10 25 | expose: 26 | - 5432 27 | ports: 28 | - "5432:5432" 29 | environment: 30 | - POSTGRES_USER=postgres 31 | - POSTGRES_PASSWORD=postgres 32 | - POSTGRES_DB=pg_db 33 | -------------------------------------------------------------------------------- /dev.Dockerfile: -------------------------------------------------------------------------------- 1 | # pull official base image 2 | FROM python:3.10.14-slim-bookworm 3 | 4 | RUN apt-get update && apt-get install -y --no-install-recommends \ 5 | curl \ 6 | postgresql-client \ 7 | && rm -rf /var/lib/apt/lists/* 8 | 9 | # Prevents Python from writing pyc files. 10 | ENV PYTHONDONTWRITEBYTECODE=1 11 | 12 | # Keeps Python from buffering stdout and stderr to avoid situations where 13 | # the application crashes without emitting any logs due to buffering. 14 | ENV PYTHONUNBUFFERED=1 15 | 16 | # Enable python stacktraces on segfaults 17 | ENV PYTHONFAULTHANDLER=1 18 | 19 | WORKDIR /src 20 | 21 | # Create a non-privileged user that the app will run under. 22 | # See https://docs.docker.com/go/dockerfile-user-best-practices/ 23 | ARG UID=10001 24 | RUN adduser \ 25 | --disabled-password \ 26 | --gecos "" \ 27 | --home "/nonexistent" \ 28 | --shell "/sbin/nologin" \ 29 | --no-create-home \ 30 | --uid "${UID}" \ 31 | appuser 32 | 33 | 34 | # Download dependencies as a separate step to take advantage of Docker's caching. 35 | # Leverage a cache mount to /root/.cache/pip to speed up subsequent builds. 36 | # Leverage a bind mount to requirements.txt to avoid having to copy them into 37 | # into this layer. 38 | RUN --mount=type=cache,target=/root/.cache/pip \ 39 | --mount=type=bind,source=requirements.txt,target=requirements.txt \ 40 | python -m pip install -r requirements.txt 41 | 42 | # TODO: UV - https://github.com/djangopackages/djangopackages/blob/main/dockerfiles/django/Dockerfile-dev 43 | 44 | # set argument vars in docker-run command 45 | ARG AWS_ACCESS_KEY_ID 46 | ARG AWS_SECRET_ACCESS_KEY 47 | ARG AWS_DEFAULT_REGION 48 | 49 | ARG AWS_S3_BUCKET 50 | ARG AWS_S3_DEFAULT_REGION 51 | ARG AWS_S3_ACCESS_KEY_ID 52 | ARG AWS_S3_SECRET_ACCESS_KEY 53 | 54 | #APP 55 | ARG APP_ENV 56 | ARG APP_HOST 57 | 58 | # SENTRY DSN 59 | ARG SENTRY_DSN 60 | 61 | # GUS 62 | ARG GUS_API_DEV 63 | 64 | # API_VIDEO 65 | ARG API_VIDEO 66 | ARG API_VIDEO_UPLOAD 67 | 68 | # AWS RDS vars 69 | ARG DB_USERNAME 70 | ARG DB_PASSWORD 71 | ARG DB_HOST 72 | ARG DB_PORT 73 | ARG DB_DATABASE 74 | 75 | # EMAIL LABS 76 | ARG EMAIL_LABS_APP_KEY 77 | ARG EMAIL_LABS_SECRET_KEY 78 | ARG EMAIL_LABS_SMTP 79 | ARG EMAIL_LABS_SENDER 80 | ARG EMAIL_DEV 81 | 82 | # MAILJET 83 | ARG MAILJET_EMAIL_API_KEY 84 | ARG MAILJET_EMAIL_SECRET 85 | ARG MAILJET_EMAIL_SENDER 86 | ARG MAILJET_SMS_API_KEY 87 | ARG MAILJET_SMS_SENDER 88 | 89 | ENV APP_ENV $APP_ENV 90 | ENV APP_HOST $APP_HOST 91 | 92 | ENV AWS_ACCESS_KEY_ID $AWS_ACCESS_KEY_ID 93 | ENV AWS_SECRET_ACCESS_KEY $AWS_SECRET_ACCESS_KEY 94 | ENV AWS_DEFAULT_REGION $AWS_DEFAULT_REGION 95 | 96 | ENV AWS_S3_BUCKET $AWS_S3_BUCKET 97 | ENV AWS_S3_DEFAULT_REGION $AWS_S3_DEFAULT_REGION 98 | ENV AWS_S3_ACCESS_KEY_ID $AWS_S3_ACCESS_KEY_ID 99 | ENV AWS_S3_SECRET_ACCESS_KEY $AWS_S3_SECRET_ACCESS_KEY 100 | 101 | ENV SENTRY_DSN $SENTRY_DSN 102 | ENV GUS_API_DEV $GUS_API_DEV 103 | ENV API_VIDEO $API_VIDEO 104 | ENV API_VIDEO_UPLOAD $API_VIDEO_UPLOAD 105 | 106 | ENV EMAIL_LABS_APP_KEY $EMAIL_LABS_APP_KEY 107 | ENV EMAIL_LABS_SECRET_KEY $EMAIL_LABS_SECRET_KEY 108 | ENV EMAIL_LABS_SMTP $EMAIL_LABS_SMTP 109 | ENV EMAIL_LABS_SENDER $EMAIL_LABS_SENDER 110 | ENV EMAIL_DEV $EMAIL_DEV 111 | 112 | ENV MAILJET_EMAIL_API_KEY $MAILJET_EMAIL_API_KEY 113 | ENV MAILJET_EMAIL_SECRET $MAILJET_EMAIL_SECRET 114 | ENV MAILJET_EMAIL_SENDER $MAILJET_EMAIL_SENDER 115 | ENV MAILJET_SMS_API_KEY $MAILJET_SMS_API_KEY 116 | ENV MAILJET_SMS_SENDER $MAILJET_SMS_SENDER 117 | 118 | ENV DB_USERNAME $DB_USERNAME 119 | ENV DB_PASSWORD $DB_PASSWORD 120 | ENV DB_HOST $DB_HOST 121 | ENV DB_PORT $DB_PORT 122 | ENV DB_DATABASE $DB_DATABASE 123 | 124 | # Switch to the non-privileged user to run the application. 125 | USER appuser 126 | 127 | #USER alex 128 | COPY --chown=appuser:appuser ./commands /src/commands 129 | COPY --chown=appuser:appuser ./migrations /src/migrations 130 | COPY --chown=appuser:appuser ./alembic.ini /src/alembic.ini 131 | COPY --chown=appuser:appuser ./app /src/app 132 | COPY --chown=appuser:appuser ./tests/api_responses /src/tests/api_responses 133 | 134 | # Expose the port that the application listens on. 135 | EXPOSE 5000 136 | 137 | CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "5000", "--reload", "--reload-dir", "/src/app"] 138 | 139 | HEALTHCHECK --interval=21s --timeout=3s --start-period=10s CMD curl --fail http://localhost:5000/health || exit 1 140 | -------------------------------------------------------------------------------- /docs/img/FK_Tasks_Users.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mgurg/fastapi_docker/46f90e0551ae4c4068e2dd1965801234f6f11f72/docs/img/FK_Tasks_Users.png -------------------------------------------------------------------------------- /migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mgurg/fastapi_docker/46f90e0551ae4c4068e2dd1965801234f6f11f72/migrations/__init__.py -------------------------------------------------------------------------------- /migrations/env.py: -------------------------------------------------------------------------------- 1 | import os 2 | from logging.config import fileConfig 3 | 4 | from alembic import context 5 | from dotenv import load_dotenv 6 | from sqlalchemy import MetaData, engine_from_config, pool, text 7 | 8 | from app.db import Base, metadata 9 | 10 | # this is the Alembic Config object, which provides 11 | # access to the values within the .ini file in use. 12 | config = context.config 13 | 14 | # Interpret the config file for Python logging. 15 | # This line sets up loggers basically. 16 | load_dotenv("./app/.env") 17 | section = config.config_ini_section 18 | config.set_section_option(section, "DB_USER", os.environ.get("DB_USERNAME")) 19 | config.set_section_option(section, "DB_PASS", os.environ.get("DB_PASSWORD")) 20 | 21 | if os.environ.get("APP_ENV") == "local": 22 | config.set_section_option(section, "DB_HOST", "localhost") 23 | else: 24 | config.set_section_option(section, "DB_HOST", os.environ.get("DB_HOST")) 25 | 26 | config.set_section_option(section, "DB_DATABASE", os.environ.get("DB_DATABASE")) 27 | 28 | 29 | if config.config_file_name is not None: 30 | fileConfig(config.config_file_name) 31 | 32 | # add your model's MetaData object here 33 | # for 'autogenerate' support 34 | # from myapp import mymodel 35 | # target_metadata = mymodel.Base.metadata 36 | target_metadata = None 37 | 38 | # other values from the config, defined by the needs of env.py, 39 | # can be acquired: 40 | # my_important_option = config.get_main_option("my_important_option") 41 | # ... etc. 42 | 43 | 44 | def run_migrations_offline() -> None: 45 | """Run migrations in 'offline' mode. 46 | 47 | This configures the context with just a URL 48 | and not an Engine, though an Engine is acceptable 49 | here as well. By skipping the Engine creation 50 | we don't even need a DBAPI to be available. 51 | 52 | Calls to context.execute() here emit the given string to the 53 | script output. 54 | 55 | """ 56 | url = config.get_main_option("sqlalchemy.url") 57 | context.configure( 58 | url=url, 59 | target_metadata=target_metadata, 60 | literal_binds=True, 61 | dialect_opts={"paramstyle": "named"}, 62 | ) 63 | 64 | with context.begin_transaction(): 65 | context.run_migrations() 66 | 67 | 68 | def run_migrations_online() -> None: 69 | """Run migrations in 'online' mode. 70 | 71 | In this scenario we need to create an Engine 72 | and associate a connection with the context. 73 | 74 | """ 75 | connectable = engine_from_config( 76 | config.get_section(config.config_ini_section), 77 | prefix="sqlalchemy.", 78 | poolclass=pool.NullPool, 79 | ) 80 | current_tenant = context.get_x_argument(as_dictionary=True).get("tenant") 81 | dry_run = context.get_x_argument(as_dictionary=True).get("dry_run") 82 | 83 | with connectable.connect() as connection: 84 | connection.execute(text('set search_path to "%s"' % current_tenant)) 85 | connection.dialect.default_schema_name = current_tenant 86 | 87 | # add this line 88 | connection.commit() 89 | 90 | context.configure( 91 | connection=connection, 92 | target_metadata=target_metadata, 93 | version_table_schema=current_tenant, 94 | ) 95 | 96 | with context.begin_transaction(): 97 | context.run_migrations() 98 | # if bool(dry_run) == True: 99 | # print("Dry-run succeeded; now rolling back transaction...") 100 | # transaction.rollback() 101 | 102 | 103 | if context.is_offline_mode(): 104 | run_migrations_offline() 105 | else: 106 | run_migrations_online() 107 | -------------------------------------------------------------------------------- /migrations/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | ${imports if imports else ""} 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = ${repr(up_revision)} 14 | down_revision = ${repr(down_revision)} 15 | branch_labels = ${repr(branch_labels)} 16 | depends_on = ${repr(depends_on)} 17 | 18 | 19 | def upgrade() -> None: 20 | ${upgrades if upgrades else "pass"} 21 | 22 | 23 | def downgrade() -> None: 24 | ${downgrades if downgrades else "pass"} 25 | -------------------------------------------------------------------------------- /migrations/versions/2022_07_18_1509-d6ba8c13303e_initial_shared.py: -------------------------------------------------------------------------------- 1 | """Initial public 2 | 3 | Revision ID: d6ba8c13303e 4 | Revises: 5 | Create Date: 2022-07-18 15:09:21.528588 6 | 7 | """ 8 | from alembic import op 9 | 10 | # revision identifiers, used by Alembic. 11 | revision = "d6ba8c13303e" 12 | down_revision = None 13 | branch_labels = None 14 | depends_on = None 15 | 16 | # op.execute("DROP SCHEMA IF EXISTS test_00000000000000000000000000000000;") 17 | # op.execute("CREATE SCHEMA IF NOT EXISTS test_00000000000000000000000000000000;") 18 | 19 | 20 | def upgrade() -> None: 21 | # tenants_statement = """ 22 | # CREATE TABLE IF NOT EXISTS public.tenants ( 23 | # id int GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, 24 | # uuid uuid UNIQUE, 25 | # name varchar(256) UNIQUE, 26 | # schema varchar(256) UNIQUE, 27 | # schema_header_id varchar(256) UNIQUE 28 | # ); 29 | 30 | # """ 31 | # op.execute(tenants_statement) 32 | 33 | public_users = """ 34 | CREATE TABLE IF NOT EXISTS public.public_users ( 35 | id int GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, 36 | uuid uuid UNIQUE NOT NULL, 37 | first_name varchar(100), 38 | last_name varchar(100), 39 | email varchar(256), 40 | phone varchar(16), 41 | password varchar(256), 42 | service_token varchar(256), 43 | service_token_valid_to TIMESTAMPTZ, 44 | is_active BOOLEAN NOT NULL, 45 | is_verified BOOLEAN NOT NULL, 46 | tos BOOLEAN NOT NULL, 47 | tenant_id varchar(64), 48 | tz varchar(64), 49 | lang varchar(8), 50 | created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), 51 | updated_at TIMESTAMPTZ, 52 | deleted_at TIMESTAMPTZ 53 | ); 54 | """ 55 | op.execute(public_users) 56 | 57 | public_users_index = """ 58 | CREATE INDEX IF NOT EXISTS public_users_tenant_id_idx ON public.public_users (tenant_id); 59 | """ 60 | op.execute(public_users_index) 61 | 62 | public_companies = """ 63 | CREATE TABLE IF NOT EXISTS public.public_companies ( 64 | id int GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, 65 | uuid uuid UNIQUE, 66 | name varchar(256), 67 | short_name varchar(256), 68 | nip varchar(16), 69 | city varchar(128), 70 | street varchar(256), 71 | country varchar(128), 72 | tenant_id varchar(64) UNIQUE, 73 | qr_id varchar(32) UNIQUE, 74 | created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), 75 | updated_at TIMESTAMPTZ, 76 | deleted_at TIMESTAMPTZ 77 | ); 78 | """ 79 | op.execute(public_companies) 80 | 81 | # query = sa.text("INSERT INTO public.tenants (name, schema, host) " "VALUES (:name, :schema, :host)").bindparams( 82 | # name="default", schema="tenant_default", schema_header_id="127.0.0.1" 83 | # ) 84 | # op.execute(query) 85 | 86 | 87 | def downgrade() -> None: 88 | # op.execute("DROP TABLE IF EXISTS public.public_users CASCADE;") 89 | # op.execute("DROP TABLE IF EXISTS public.tenants CASCADE;") 90 | # op.execute("DROP SCHEMA IF EXISTS public;") 91 | pass 92 | -------------------------------------------------------------------------------- /migrations/versions/2022_08_29_1620-80726328353e_add_tables_users_roles_permissions.py: -------------------------------------------------------------------------------- 1 | """Add Tables Users Roles Permissions 2 | 3 | Revision ID: 80726328353e 4 | Revises: d6ba8c13303e 5 | Create Date: 2022-08-29 16:20:28.084722 6 | 7 | """ 8 | import sqlalchemy as sa 9 | from alembic import op 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "80726328353e" 14 | down_revision = "d6ba8c13303e" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade() -> None: 20 | op.create_table( 21 | "roles", 22 | sa.Column("id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 23 | sa.Column("uuid", postgresql.UUID(as_uuid=True), autoincrement=False, nullable=False, index=True), 24 | sa.Column("is_custom", sa.BOOLEAN(), autoincrement=False, nullable=True), 25 | sa.Column("is_visible", sa.BOOLEAN(), autoincrement=False, nullable=True, default=True), 26 | sa.Column("is_system", sa.BOOLEAN(), autoincrement=False, nullable=True, default=False), 27 | sa.Column("role_name", sa.VARCHAR(length=100), autoincrement=False, nullable=False), 28 | sa.Column("role_title", sa.VARCHAR(length=100), autoincrement=False, nullable=False), 29 | sa.Column("role_description", sa.VARCHAR(length=100), autoincrement=False, nullable=True), 30 | sa.Column("created_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 31 | sa.Column("updated_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 32 | sa.Column("deleted_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 33 | sa.UniqueConstraint("role_title", "deleted_at", name="role_title_key", postgresql_nulls_not_distinct=True), 34 | sa.PrimaryKeyConstraint("id", name="roles_pkey"), 35 | schema=None, 36 | ) 37 | 38 | op.create_table( 39 | "users", 40 | sa.Column("id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 41 | sa.Column("uuid", postgresql.UUID(as_uuid=True), autoincrement=False, nullable=False, index=True), 42 | sa.Column("email", sa.VARCHAR(length=256), autoincrement=False, nullable=False), 43 | sa.Column("phone", sa.VARCHAR(length=16), autoincrement=False, nullable=True), 44 | sa.Column("password", sa.VARCHAR(length=256), autoincrement=False, nullable=True), 45 | sa.Column("tos", sa.BOOLEAN(), autoincrement=False, nullable=True), 46 | sa.Column("first_name", sa.VARCHAR(length=100), autoincrement=False, nullable=True), 47 | sa.Column("last_name", sa.VARCHAR(length=100), autoincrement=False, nullable=True), 48 | sa.Column("user_role_id", sa.INTEGER(), autoincrement=False, nullable=True), 49 | sa.Column("auth_token", sa.VARCHAR(length=128), autoincrement=False, nullable=True, unique=True), 50 | sa.Column("auth_token_valid_to", sa.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 51 | sa.Column("is_active", sa.BOOLEAN(), autoincrement=False, nullable=False), 52 | sa.Column("is_verified", sa.BOOLEAN(), autoincrement=False, nullable=False), 53 | sa.Column("is_visible", sa.BOOLEAN(), autoincrement=False, nullable=True, default=True), 54 | sa.Column("service_token", sa.VARCHAR(length=100), autoincrement=False, nullable=True, unique=True), 55 | sa.Column("service_token_valid_to", sa.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 56 | sa.Column("tz", sa.VARCHAR(length=64), autoincrement=False, nullable=False), 57 | sa.Column("lang", sa.VARCHAR(length=8), autoincrement=False, nullable=False), 58 | sa.Column("tenant_id", sa.VARCHAR(length=64), autoincrement=False, nullable=True), 59 | sa.Column("created_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 60 | sa.Column("updated_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 61 | sa.Column("deleted_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 62 | sa.ForeignKeyConstraint(["user_role_id"], ["roles.id"], name="role_fk"), 63 | sa.PrimaryKeyConstraint("id", name="users_pkey"), 64 | schema=None, 65 | ) 66 | op.create_table( 67 | "permissions", 68 | sa.Column("id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 69 | sa.Column("uuid", postgresql.UUID(as_uuid=True), autoincrement=False, nullable=False, index=True), 70 | sa.Column("name", sa.VARCHAR(length=100), autoincrement=False, nullable=True), 71 | sa.Column("title", sa.VARCHAR(length=100), autoincrement=False, nullable=True), 72 | sa.Column("description", sa.VARCHAR(length=100), autoincrement=False, nullable=True), 73 | sa.Column("is_visible", sa.BOOLEAN(), autoincrement=False, nullable=True, default=True), 74 | sa.Column("group", sa.VARCHAR(length=100), autoincrement=False, nullable=True), 75 | sa.PrimaryKeyConstraint("id", name="permissions_pkey"), 76 | sa.UniqueConstraint("uuid", name="permissions_uuid_key"), 77 | schema=None, 78 | ) 79 | 80 | op.create_table( 81 | "roles_permissions_link", 82 | sa.Column("role_id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 83 | sa.Column("permission_id", sa.INTEGER(), autoincrement=False, nullable=False), 84 | sa.ForeignKeyConstraint(["permission_id"], ["permissions.id"], name="roles_permissions_link_fk"), 85 | sa.ForeignKeyConstraint(["role_id"], ["roles.id"], name="roles_permissions_link_fk_1"), 86 | sa.PrimaryKeyConstraint("role_id", "permission_id", name="roles_permissions_link_pkey"), 87 | schema=None, 88 | ) 89 | 90 | 91 | def downgrade() -> None: 92 | op.drop_constraint("roles_permissions_link_fk", "roles_permissions_link") 93 | op.drop_constraint("roles_permissions_link_fk_1", "roles_permissions_link") 94 | op.drop_constraint("roles_permissions_link_pkey", "roles_permissions_link") 95 | op.drop_table("permissions", schema=None) 96 | op.drop_table("users", schema=None) 97 | op.drop_table("roles", schema=None) 98 | op.drop_table("roles_permissions_link", schema=None) 99 | -------------------------------------------------------------------------------- /migrations/versions/2023_04_26_1750-055684700394_add_users_group.py: -------------------------------------------------------------------------------- 1 | """add_users_group 2 | 3 | Revision ID: 055684700394 4 | Revises: 338496320c4d 5 | Create Date: 2023-04-26 17:50:19.323100 6 | 7 | """ 8 | import sqlalchemy as sa 9 | from alembic import op 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "055684700394" 14 | down_revision = "338496320c4d" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade() -> None: 20 | op.create_table( 21 | "users_groups", 22 | sa.Column("id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 23 | sa.Column("uuid", postgresql.UUID(as_uuid=True), autoincrement=False, nullable=False, index=True), 24 | sa.Column("name", sa.VARCHAR(length=256), unique=True, autoincrement=False, nullable=False), 25 | sa.Column("description", sa.VARCHAR(length=512), autoincrement=False, nullable=True), 26 | sa.Column("symbol", sa.VARCHAR(length=64), autoincrement=False, nullable=True), 27 | sa.Column("supervisor_uuid", postgresql.UUID(as_uuid=True), autoincrement=False, nullable=True), 28 | sa.Column("created_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 29 | sa.Column("updated_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 30 | sa.PrimaryKeyConstraint("id", name="users_groups_pkey"), 31 | schema=None, 32 | ) 33 | 34 | op.create_table( 35 | "users_groups_link", 36 | sa.Column("user_id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 37 | sa.Column("user_group_id", sa.INTEGER(), autoincrement=False, nullable=False), 38 | # sa.Column("user_is_supervisor", sa.BOOLEAN(), autoincrement=False, nullable=True), 39 | sa.ForeignKeyConstraint(["user_group_id"], ["users_groups.id"], name="users_groups_link_fk"), 40 | sa.ForeignKeyConstraint(["user_id"], ["users.id"], name="users_groups_link_fk_1"), 41 | sa.PrimaryKeyConstraint("user_id", "user_group_id", name="users_groups_link_pkey"), 42 | schema=None, 43 | ) 44 | 45 | 46 | def downgrade() -> None: 47 | op.drop_constraint("users_groups_link_fk", "users_groups_link") 48 | op.drop_constraint("users_groups_link_fk_1", "users_groups_link") 49 | op.drop_constraint("users_groups_link_pkey", "users_groups_link") 50 | op.drop_table("users_groups", schema=None) 51 | op.drop_table("users_groups_link", schema=None) 52 | -------------------------------------------------------------------------------- /migrations/versions/2023_04_26_1752-a1b0cf6b2fbb_add_settings_table.py: -------------------------------------------------------------------------------- 1 | """add_settings_table 2 | 3 | Revision ID: a1b0cf6b2fbb 4 | Revises: 055684700394 5 | Create Date: 2023-04-26 17:52:02.820335 6 | 7 | """ 8 | import sqlalchemy as sa 9 | from alembic import op 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "a1b0cf6b2fbb" 14 | down_revision = "055684700394" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade() -> None: 20 | op.create_table( 21 | "settings", 22 | sa.Column("id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 23 | sa.Column("name", sa.VARCHAR(length=256), unique=True, autoincrement=False, nullable=True, index=True), 24 | sa.Column("value", sa.VARCHAR(length=256), autoincrement=False, nullable=True), 25 | sa.Column("value_type", sa.VARCHAR(length=64), autoincrement=False, nullable=True), 26 | sa.Column("prev_value", sa.VARCHAR(length=256), autoincrement=False, nullable=True), 27 | sa.Column("description", sa.VARCHAR(length=256), autoincrement=False, nullable=True), 28 | sa.Column("updated_by", sa.VARCHAR(length=256), autoincrement=False, nullable=True), 29 | sa.Column("created_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 30 | sa.Column("updated_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 31 | sa.PrimaryKeyConstraint("id", name="settings_pkey"), 32 | schema=None, 33 | ) 34 | 35 | op.create_table( 36 | "settings_users", 37 | sa.Column("id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 38 | sa.Column("user_id", sa.INTEGER(), autoincrement=False, nullable=True), 39 | sa.Column("name", sa.VARCHAR(length=256), unique=True, autoincrement=False, nullable=True, index=True), 40 | sa.Column("value", sa.VARCHAR(length=256), autoincrement=False, nullable=True), 41 | sa.Column("value_type", sa.VARCHAR(length=64), autoincrement=False, nullable=True), 42 | sa.Column("prev_value", sa.VARCHAR(length=256), autoincrement=False, nullable=True), 43 | sa.Column("created_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 44 | sa.Column("updated_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 45 | sa.PrimaryKeyConstraint("id", name="settings_users_pkey"), 46 | schema=None, 47 | ) 48 | 49 | op.create_table( 50 | "settings_notifications", 51 | sa.Column("id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 52 | sa.Column("user_uuid", postgresql.UUID(as_uuid=True), autoincrement=False, nullable=True), 53 | sa.Column("user_id", sa.INTEGER(), autoincrement=False, nullable=True), 54 | sa.Column("sms_notification_level", sa.VARCHAR(length=128), autoincrement=False, nullable=True), 55 | sa.Column("email_notification_level", sa.VARCHAR(length=128), autoincrement=False, nullable=True), 56 | sa.Column("created_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 57 | sa.Column("updated_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 58 | sa.ForeignKeyConstraint(["user_id"], ["users.id"], name="user_notification_fk"), 59 | sa.PrimaryKeyConstraint("id", name="settings_notifications_pkey"), 60 | schema=None, 61 | ) 62 | 63 | # StringValue 64 | # IntValue 65 | # DateValue 66 | # DecimalValue 67 | 68 | 69 | def downgrade() -> None: 70 | op.drop_table("settings", schema=None) 71 | op.drop_table("settings_user", schema=None) 72 | op.drop_table("settings_notifications", schema=None) 73 | -------------------------------------------------------------------------------- /migrations/versions/2023_04_26_1753-b2e42964ad3f_add_files_table.py: -------------------------------------------------------------------------------- 1 | """add_files_table 2 | 3 | Revision ID: b2e42964ad3f 4 | Revises: a1b0cf6b2fbb 5 | Create Date: 2023-04-26 17:53:19.260665 6 | 7 | """ 8 | import sqlalchemy as sa 9 | from alembic import op 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "b2e42964ad3f" 14 | down_revision = "a1b0cf6b2fbb" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade() -> None: 20 | op.create_table( 21 | "files", 22 | sa.Column("id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 23 | sa.Column("uuid", postgresql.UUID(as_uuid=True), autoincrement=False, nullable=False, index=True), 24 | sa.Column("owner_id", sa.INTEGER(), autoincrement=False, nullable=True), 25 | sa.Column("file_name", sa.VARCHAR(length=256), autoincrement=False, nullable=True), 26 | sa.Column("file_description", sa.VARCHAR(length=256), autoincrement=False, nullable=True), 27 | sa.Column("extension", sa.VARCHAR(length=8), autoincrement=False, nullable=True), 28 | sa.Column("mimetype", sa.VARCHAR(length=256), autoincrement=False, nullable=True), 29 | sa.Column("size", sa.INTEGER(), autoincrement=False, nullable=True), 30 | sa.Column("created_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 31 | sa.Column("updated_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 32 | sa.Column("deleted_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 33 | sa.PrimaryKeyConstraint("id", name="files_pkey"), 34 | schema=None, 35 | ) 36 | 37 | op.create_foreign_key("files_users_fk", "files", "users", ["owner_id"], ["id"]) 38 | 39 | 40 | def downgrade() -> None: 41 | op.drop_table("files", schema=None) 42 | -------------------------------------------------------------------------------- /migrations/versions/2023_04_26_1754-38e5957fa66f_add_items_table.py: -------------------------------------------------------------------------------- 1 | """add_items_table 2 | 3 | Revision ID: 38e5957fa66f 4 | Revises: b2e42964ad3f 5 | Create Date: 2023-04-26 17:54:59.069712 6 | 7 | """ 8 | import sqlalchemy as sa 9 | from alembic import op 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "38e5957fa66f" 14 | down_revision = "b2e42964ad3f" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade() -> None: 20 | op.create_table( 21 | "items", 22 | sa.Column("id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 23 | sa.Column("uuid", postgresql.UUID(as_uuid=True), autoincrement=False, nullable=False, index=True), 24 | sa.Column("author_id", sa.INTEGER(), autoincrement=False, nullable=True), 25 | sa.Column("name", sa.VARCHAR(length=512), unique=False, nullable=False), 26 | sa.Column("symbol", sa.VARCHAR(length=64), unique=False, nullable=True), 27 | sa.Column("summary", sa.VARCHAR(length=1024), unique=False, nullable=False), 28 | sa.Column("text", postgresql.TEXT, autoincrement=False, nullable=True), 29 | sa.Column("text_json", postgresql.JSONB, autoincrement=False, nullable=True), 30 | sa.Column("qr_code_id", sa.INTEGER(), autoincrement=False, nullable=True), 31 | sa.Column("public_access", sa.BOOLEAN(), autoincrement=False, nullable=True), 32 | sa.Column("created_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 33 | sa.Column("updated_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 34 | sa.Column("deleted_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 35 | sa.ForeignKeyConstraint(["author_id"], ["users.id"], name="item_user_link_fk"), 36 | # sa.ForeignKeyConstraint(["qr_code_id"], ["qr_codes.id"], name="qr_code_fk"), 37 | sa.PrimaryKeyConstraint("id", name="items_pkey"), 38 | schema=None, 39 | ) 40 | 41 | op.create_foreign_key("items_users_fk", "items", "users", ["author_id"], ["id"]) 42 | 43 | op.create_table( 44 | "files_items_link", 45 | sa.Column("item_id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 46 | sa.Column("file_id", sa.INTEGER(), autoincrement=False, nullable=False), 47 | sa.ForeignKeyConstraint(["file_id"], ["files.id"], name="files_items_link_fk"), 48 | sa.ForeignKeyConstraint(["item_id"], ["items.id"], name="files_items_link_fk_1"), 49 | sa.PrimaryKeyConstraint("item_id", "file_id", name="files_items_link_pkey"), 50 | schema=None, 51 | ) 52 | 53 | op.create_table( 54 | "users_items_link", 55 | sa.Column("item_id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 56 | sa.Column("user_id", sa.INTEGER(), autoincrement=False, nullable=False), 57 | sa.ForeignKeyConstraint(["user_id"], ["users.id"], name="users_items_link_fk"), 58 | sa.ForeignKeyConstraint(["item_id"], ["items.id"], name="users_items_link_fk_1"), 59 | sa.PrimaryKeyConstraint("item_id", "user_id", name="users_items_link_pkey"), 60 | schema=None, 61 | ) 62 | 63 | 64 | def downgrade() -> None: 65 | op.drop_constraint("author_id", "items") 66 | # op.drop_constraint("qr_code_id", "items") 67 | 68 | op.drop_constraint("files_items_link_fk", "files_items_link") 69 | op.drop_constraint("files_items_link_fk_1", "files_items_link") 70 | op.drop_constraint("files_items_link_pkey", "files_items_link") 71 | 72 | op.drop_constraint("users_items_link_fk", "users_items_link") 73 | op.drop_constraint("users_items_link_fk_1", "users_items_link") 74 | op.drop_constraint("users_items_link_pkey", "users_items_link") 75 | 76 | op.drop_table("items", schema=None) 77 | op.drop_table("files_items_link", schema=None) 78 | op.drop_table("users_items_link", schema=None) 79 | -------------------------------------------------------------------------------- /migrations/versions/2023_04_26_1755-40bde431a56f_add_guides_table.py: -------------------------------------------------------------------------------- 1 | """add_qr_code_table 2 | 3 | Revision ID: 40bde431a56f 4 | Revises: 38e5957fa66f 5 | Create Date: 2023-04-26 17:55:47.920796 6 | 7 | """ 8 | import sqlalchemy as sa 9 | from alembic import op 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "40bde431a56f" 14 | down_revision = "38e5957fa66f" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade() -> None: 20 | op.create_table( 21 | "guides", 22 | sa.Column("id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 23 | sa.Column("uuid", postgresql.UUID(as_uuid=True), autoincrement=False, nullable=False, index=True), 24 | sa.Column("author_id", sa.INTEGER(), autoincrement=False, nullable=True), 25 | sa.Column("name", sa.VARCHAR(length=512), unique=False, autoincrement=False, nullable=False), 26 | sa.Column("text", sa.TEXT, autoincrement=False, nullable=True), 27 | sa.Column("text_json", postgresql.JSONB, autoincrement=False, nullable=True), 28 | sa.Column("qr_code_id", sa.INTEGER(), autoincrement=False, nullable=True), 29 | sa.Column("type", sa.VARCHAR(length=32), unique=False, autoincrement=False, nullable=True), 30 | sa.Column("is_public", sa.BOOLEAN(), autoincrement=False, nullable=True), 31 | sa.Column("created_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 32 | sa.Column("updated_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 33 | sa.Column("deleted_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 34 | sa.ForeignKeyConstraint(["author_id"], ["users.id"], name="guide_user_link_fk"), 35 | sa.PrimaryKeyConstraint("id", name="guides_pkey"), 36 | schema=None, 37 | ) 38 | 39 | op.create_foreign_key("guides_users_fk", "guides", "users", ["author_id"], ["id"]) 40 | 41 | op.create_table( 42 | "files_guides_link", 43 | sa.Column("guide_id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 44 | sa.Column("file_id", sa.INTEGER(), autoincrement=False, nullable=False), 45 | sa.ForeignKeyConstraint(["file_id"], ["files.id"], name="files_guides_link_fk"), 46 | sa.ForeignKeyConstraint(["guide_id"], ["guides.id"], name="files_guides_link_fk_1"), 47 | sa.PrimaryKeyConstraint("guide_id", "file_id", name="files_guides_link_pkey"), 48 | schema=None, 49 | ) 50 | 51 | op.create_table( 52 | "items_guides_link", 53 | sa.Column("guide_id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 54 | sa.Column("item_id", sa.INTEGER(), autoincrement=False, nullable=False), 55 | sa.ForeignKeyConstraint(["item_id"], ["items.id"], name="items_guides_link_fk"), 56 | sa.ForeignKeyConstraint(["guide_id"], ["guides.id"], name="items_guides_link_fk_1"), 57 | sa.PrimaryKeyConstraint("guide_id", "item_id", name="items_guides_link_pkey"), 58 | schema=None, 59 | ) 60 | 61 | 62 | def downgrade() -> None: 63 | op.drop_constraint("guide_user_link_fk", "guides") 64 | 65 | op.drop_constraint("files_guides_link_fk", "files_guides_link") 66 | op.drop_constraint("files_guides_link_fk_1", "files_guides_link") 67 | op.drop_constraint("files_guides_link_pkey", "files_guides_link") 68 | 69 | op.drop_constraint("items_guides_link_fk", "items_guides_link") 70 | op.drop_constraint("items_guides_link_fk_1", "items_guides_link") 71 | op.drop_constraint("items_guides_link_pkey", "items_guides_link") 72 | op.drop_table("items_guides_link", schema=None) 73 | 74 | op.drop_table("guides", schema=None) 75 | op.drop_table("files_guides_link", schema=None) 76 | -------------------------------------------------------------------------------- /migrations/versions/2023_04_26_1757-7283939d25ad_add_qr_code_table.py: -------------------------------------------------------------------------------- 1 | """add_guides_table 2 | 3 | Revision ID: 7283939d25ad 4 | Revises: 40bde431a56f 5 | Create Date: 2023-04-26 17:57:32.574715 6 | 7 | """ 8 | import sqlalchemy as sa 9 | from alembic import op 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "7283939d25ad" 14 | down_revision = "40bde431a56f" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade() -> None: 20 | op.create_table( 21 | "qr_codes", 22 | sa.Column("id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 23 | sa.Column("uuid", postgresql.UUID(as_uuid=True), autoincrement=False, nullable=False, index=True), 24 | sa.Column("resource", sa.VARCHAR(length=64), autoincrement=False, nullable=True), 25 | sa.Column("resource_uuid", postgresql.UUID(as_uuid=True), autoincrement=False, nullable=True), 26 | sa.Column("qr_code_id", sa.VARCHAR(length=32), autoincrement=False, nullable=True), 27 | sa.Column("qr_code_full_id", sa.VARCHAR(length=64), autoincrement=False, nullable=True), 28 | sa.Column("public_access", sa.BOOLEAN(), autoincrement=False, nullable=True), 29 | sa.Column("ecc", sa.CHAR(length=1), autoincrement=False, nullable=True), 30 | sa.Column("created_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 31 | sa.Column("updated_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 32 | sa.PrimaryKeyConstraint("id", name="qr_codes_pkey"), 33 | schema=None, 34 | ) 35 | 36 | op.create_foreign_key("qr_code_items_fk", "items", "qr_codes", ["qr_code_id"], ["id"]) 37 | op.create_foreign_key("qr_code_guides_fk", "guides", "qr_codes", ["qr_code_id"], ["id"]) 38 | 39 | 40 | def downgrade() -> None: 41 | op.drop_table("qr_codes", schema=None) 42 | -------------------------------------------------------------------------------- /migrations/versions/2023_04_26_1759-249aba91b072_add_issues_table.py: -------------------------------------------------------------------------------- 1 | """add_issues_table 2 | 3 | Revision ID: 249aba91b072 4 | Revises: 7283939d25ad 5 | Create Date: 2023-04-26 17:59:32.188568 6 | 7 | """ 8 | import sqlalchemy as sa 9 | from alembic import op 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "249aba91b072" 14 | down_revision = "7283939d25ad" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade() -> None: 20 | op.create_table( 21 | "issues", 22 | sa.Column("id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 23 | sa.Column("uuid", postgresql.UUID(as_uuid=True), autoincrement=False, nullable=False, index=True), 24 | sa.Column("author_id", sa.INTEGER(), autoincrement=False, nullable=True), 25 | sa.Column("author_name", sa.VARCHAR(length=256), autoincrement=False, nullable=True), 26 | sa.Column("item_id", sa.INTEGER(), autoincrement=False, nullable=True), 27 | sa.Column("name", sa.VARCHAR(length=512), autoincrement=False, nullable=True), 28 | sa.Column("symbol", sa.VARCHAR(length=32), unique=True, nullable=False), 29 | sa.Column("summary", sa.VARCHAR(length=512), autoincrement=False, nullable=True), 30 | sa.Column("text", sa.TEXT, autoincrement=False, nullable=True), 31 | sa.Column("text_json", postgresql.JSONB, autoincrement=False, nullable=True), 32 | sa.Column("color", sa.VARCHAR(length=256), autoincrement=False, nullable=True), 33 | sa.Column("priority", sa.VARCHAR(length=256), autoincrement=False, nullable=True), 34 | sa.Column("status", sa.VARCHAR(length=256), autoincrement=False, nullable=True), 35 | sa.Column("created_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 36 | sa.Column("updated_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 37 | sa.Column("deleted_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 38 | sa.ForeignKeyConstraint(["item_id"], ["items.id"], name="issue_item_link_fk"), 39 | sa.ForeignKeyConstraint(["author_id"], ["users.id"], name="issue_user_link_fk_1"), 40 | sa.PrimaryKeyConstraint("id", name="issues_pkey"), 41 | schema=None, 42 | ) 43 | 44 | op.create_table( 45 | "files_issues_link", 46 | sa.Column("issue_id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 47 | sa.Column("file_id", sa.INTEGER(), autoincrement=False, nullable=False), 48 | sa.ForeignKeyConstraint(["file_id"], ["files.id"], name="files_issues_link_fk"), 49 | sa.ForeignKeyConstraint(["issue_id"], ["issues.id"], name="files_issues_link_fk_1"), 50 | sa.PrimaryKeyConstraint("issue_id", "file_id", name="files_issues_link_pkey"), 51 | schema=None, 52 | ) 53 | 54 | op.create_table( 55 | "users_issues_link", 56 | sa.Column("issue_id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 57 | sa.Column("user_id", sa.INTEGER(), autoincrement=False, nullable=False), 58 | sa.ForeignKeyConstraint(["user_id"], ["users.id"], name="users_issues_link_fk"), 59 | sa.ForeignKeyConstraint(["issue_id"], ["issues.id"], name="users_issues_link_fk_1"), 60 | sa.PrimaryKeyConstraint("issue_id", "user_id", name="users_issues_link_pkey"), 61 | schema=None, 62 | ) 63 | 64 | 65 | def downgrade() -> None: 66 | op.drop_constraint("issue_item_link_fk", "issues") 67 | op.drop_constraint("issue_user_link_fk_1", "issues") 68 | 69 | op.drop_constraint("users_issues_link_fk", "users_issues_link") 70 | op.drop_constraint("users_issues_link_fk_1", "users_issues_link") 71 | op.drop_constraint("users_issues_link_pkey", "users_issues_link") 72 | 73 | op.drop_constraint("files_issues_link_fk", "files_issues_link") 74 | op.drop_constraint("files_issues_link_fk_1", "files_issues_link") 75 | op.drop_constraint("files_issues_link_pkey", "files_issues_link") 76 | 77 | op.drop_table("issues", schema=None) 78 | 79 | op.drop_table("files_issues_link", schema=None) 80 | op.drop_table("user_issues_link", schema=None) 81 | -------------------------------------------------------------------------------- /migrations/versions/2023_04_26_1800-8899525de86a_add_events_table.py: -------------------------------------------------------------------------------- 1 | """add_events_table 2 | 3 | Revision ID: 8899525de86a 4 | Revises: 249aba91b072 5 | Create Date: 2023-04-26 18:00:43.460927 6 | 7 | """ 8 | import sqlalchemy as sa 9 | from alembic import op 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "8899525de86a" 14 | down_revision = "249aba91b072" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade() -> None: 20 | op.create_table( 21 | "events", 22 | sa.Column("id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 23 | sa.Column("uuid", postgresql.UUID(as_uuid=True), autoincrement=False, nullable=False, index=True), 24 | sa.Column("action", sa.VARCHAR(length=64), autoincrement=False, nullable=True), 25 | sa.Column("action_from", sa.VARCHAR(length=64), autoincrement=False, nullable=True), 26 | sa.Column("action_to", sa.VARCHAR(length=64), autoincrement=False, nullable=True), 27 | sa.Column("description", sa.VARCHAR(length=512), autoincrement=False, nullable=True), 28 | sa.Column("internal_value", sa.VARCHAR(length=512), autoincrement=False, nullable=True), 29 | sa.Column("resource", sa.VARCHAR(length=64), autoincrement=False, nullable=True), 30 | sa.Column("resource_id", sa.INTEGER(), autoincrement=False, nullable=True), 31 | sa.Column("resource_uuid", postgresql.UUID(as_uuid=True), autoincrement=False, nullable=True), 32 | sa.Column("author_id", sa.INTEGER(), autoincrement=False, nullable=True), 33 | # sa.Column("author_uuid", postgresql.UUID(as_uuid=True), autoincrement=False, nullable=True), 34 | # sa.Column("author_name", sa.VARCHAR(length=256), autoincrement=False, nullable=True), 35 | sa.Column("created_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 36 | sa.ForeignKeyConstraint(["author_id"], ["users.id"], name="event_user_link_fk"), 37 | sa.PrimaryKeyConstraint("id", name="events_pkey"), 38 | schema=None, 39 | ) 40 | 41 | op.create_table( 42 | "events_summary", 43 | sa.Column("id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 44 | sa.Column("uuid", postgresql.UUID(as_uuid=True), autoincrement=False, nullable=False), 45 | sa.Column("resource", sa.VARCHAR(length=64), autoincrement=False, nullable=True), 46 | sa.Column("resource_uuid", postgresql.UUID(as_uuid=True), autoincrement=False, nullable=True), 47 | sa.Column("action", sa.VARCHAR(length=512), autoincrement=False, nullable=True), 48 | sa.Column("date_from", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 49 | sa.Column("date_to", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 50 | sa.Column("duration", sa.INTEGER(), autoincrement=False, nullable=True), 51 | sa.Column("internal_value", sa.VARCHAR(length=512), autoincrement=False, nullable=True), 52 | sa.Column("created_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 53 | sa.PrimaryKeyConstraint("id", name="events_summary_pkey"), 54 | schema=None, 55 | ) 56 | 57 | 58 | def downgrade() -> None: 59 | op.drop_constraint("event_user_link_fk", "events") 60 | op.drop_table("events", schema=None) 61 | op.drop_table("events_summary", schema=None) 62 | -------------------------------------------------------------------------------- /migrations/versions/2023_04_26_1801-3e3981bb512d_add_tags_table.py: -------------------------------------------------------------------------------- 1 | """add_tags_table 2 | 3 | Revision ID: 3e3981bb512d 4 | Revises: 8899525de86a 5 | Create Date: 2023-04-26 18:01:25.632291 6 | 7 | """ 8 | import sqlalchemy as sa 9 | from alembic import op 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "3e3981bb512d" 14 | down_revision = "8899525de86a" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade() -> None: 20 | op.create_table( 21 | "tags", 22 | sa.Column("id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 23 | sa.Column("uuid", postgresql.UUID(as_uuid=True), autoincrement=False, nullable=False, index=True), 24 | sa.Column("name", sa.VARCHAR(length=512), autoincrement=False, nullable=True), 25 | sa.Column("color", sa.VARCHAR(length=512), autoincrement=False, nullable=True), 26 | sa.Column("icon", sa.VARCHAR(length=512), autoincrement=False, nullable=True), 27 | sa.Column("author_id", sa.INTEGER(), autoincrement=False, nullable=False), 28 | sa.Column("is_hidden", sa.BOOLEAN(), autoincrement=False, nullable=True), 29 | sa.Column("created_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 30 | sa.Column("deleted_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 31 | sa.PrimaryKeyConstraint("id", name="tags_pkey"), 32 | sa.UniqueConstraint("name", "deleted_at", name="tag_name_key"), 33 | schema=None, 34 | ) 35 | 36 | op.create_table( 37 | "tags_issues_link", 38 | sa.Column("issue_id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 39 | sa.Column("tag_id", sa.INTEGER(), autoincrement=False, nullable=False), 40 | sa.Column("user_id", sa.INTEGER(), autoincrement=False, nullable=True), 41 | sa.ForeignKeyConstraint(["tag_id"], ["tags.id"], name="tags_issues_link_fk"), 42 | sa.ForeignKeyConstraint(["issue_id"], ["issues.id"], name="tags_issues_link_fk_1"), 43 | sa.PrimaryKeyConstraint("issue_id", "tag_id", name="tags_issues_link_pkey"), 44 | schema=None, 45 | ) 46 | 47 | 48 | def downgrade() -> None: 49 | op.drop_constraint("tags_issues_link_fk", "tags_issues_link") 50 | op.drop_constraint("tags_issues_link_fk_1", "tags_issues_link") 51 | op.drop_constraint("tags_issues_link_pkey", "tags_issues_link") 52 | 53 | op.drop_table("tags", schema=None) 54 | op.drop_table("tags_issues_link", schema=None) 55 | -------------------------------------------------------------------------------- /migrations/versions/2023_04_26_1814-debb10a33f57_add_videos_table.py: -------------------------------------------------------------------------------- 1 | """add_videos_table 2 | 3 | Revision ID: debb10a33f57 4 | Revises: 3e3981bb512d 5 | Create Date: 2023-04-26 18:14:19.648514 6 | 7 | """ 8 | import sqlalchemy as sa 9 | from alembic import op 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "debb10a33f57" 14 | down_revision = "3e3981bb512d" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade() -> None: 20 | op.create_table( 21 | "videos", 22 | sa.Column("id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 23 | sa.Column("uuid", postgresql.UUID(as_uuid=True), autoincrement=False, nullable=False, index=True), 24 | sa.Column("author_id", sa.INTEGER(), autoincrement=False, nullable=False), 25 | sa.Column("video_id", sa.VARCHAR(length=256), autoincrement=False, nullable=True), 26 | sa.Column("video_json", postgresql.JSONB, autoincrement=False, nullable=True), 27 | sa.Column("name", sa.VARCHAR(length=512), autoincrement=False, nullable=True), 28 | sa.Column("duration", sa.INTEGER(), autoincrement=False, nullable=True), 29 | sa.Column("size", sa.INTEGER(), autoincrement=False, nullable=True), 30 | sa.Column("source", sa.VARCHAR(length=64), autoincrement=False, nullable=True), 31 | sa.Column("url", sa.VARCHAR(length=512), autoincrement=False, nullable=True), 32 | sa.Column("created_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 33 | sa.Column("deleted_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 34 | sa.PrimaryKeyConstraint("id", name="videos_pkey"), 35 | sa.UniqueConstraint("name", "deleted_at", name="video_name_key"), 36 | schema=None, 37 | ) 38 | 39 | op.create_table( 40 | "videos_guides_link", 41 | sa.Column("guide_id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 42 | sa.Column("video_id", sa.INTEGER(), autoincrement=False, nullable=False), 43 | sa.Column("user_id", sa.INTEGER(), autoincrement=False, nullable=True), 44 | sa.ForeignKeyConstraint(["video_id"], ["videos.id"], name="videos_guides_link_fk"), 45 | sa.ForeignKeyConstraint(["guide_id"], ["guides.id"], name="videos_guides_link_fk_1"), 46 | sa.PrimaryKeyConstraint("guide_id", "video_id", name="videos_guides_link_pkey"), 47 | schema=None, 48 | ) 49 | 50 | 51 | def downgrade() -> None: 52 | op.drop_constraint("videos_guides_link_fk", "videos_guides_link") 53 | op.drop_constraint("videos_guides_link_fk_1", "videos_guides_link") 54 | op.drop_constraint("videos_guides_link_pkey", "videos_guides_link") 55 | 56 | op.drop_table("videos", schema=None) 57 | op.drop_table("videos_guides_link", schema=None) 58 | -------------------------------------------------------------------------------- /migrations/versions/2023_04_27_1418-cec65e1bd0de_add_parts_table.py: -------------------------------------------------------------------------------- 1 | """Add parts table 2 | 3 | Revision ID: cec65e1bd0de 4 | Revises: debb10a33f57 5 | Create Date: 2023-04-27 14:18:03.643717 6 | 7 | """ 8 | import sqlalchemy as sa 9 | from alembic import op 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "cec65e1bd0de" 14 | down_revision = "debb10a33f57" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade() -> None: 20 | op.create_table( 21 | "parts_used", 22 | sa.Column("id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 23 | sa.Column("uuid", postgresql.UUID(as_uuid=True), autoincrement=False, nullable=False, index=True), 24 | sa.Column("item_id", sa.INTEGER(), autoincrement=False, nullable=True), 25 | sa.Column("issue_id", sa.INTEGER(), autoincrement=False, nullable=False), 26 | sa.Column("author_id", sa.INTEGER(), autoincrement=False, nullable=False), 27 | sa.Column("name", sa.VARCHAR(length=512), autoincrement=False, nullable=True), 28 | sa.Column("symbol", sa.VARCHAR(length=512), autoincrement=False, nullable=True), 29 | sa.Column("description", sa.VARCHAR(length=512), autoincrement=False, nullable=True), 30 | sa.Column("price", sa.DECIMAL(10, 2), autoincrement=False, nullable=True), 31 | sa.Column("quantity", sa.DECIMAL(4, 2), autoincrement=False, nullable=True), 32 | sa.Column("unit", sa.VARCHAR(length=32), autoincrement=False, nullable=True), 33 | sa.Column("value", sa.DECIMAL(10, 2), autoincrement=False, nullable=True), 34 | sa.Column("created_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 35 | sa.Column("updated_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 36 | sa.Column("deleted_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 37 | sa.PrimaryKeyConstraint("id", name="parts_used_pkey"), 38 | sa.UniqueConstraint("name", "deleted_at", name="part_used_name_key"), 39 | schema=None, 40 | ) 41 | 42 | op.create_foreign_key("parts_used_items_fk", "parts_used", "items", ["item_id"], ["id"]) 43 | op.create_foreign_key("parts_used_issues_fk", "parts_used", "issues", ["issue_id"], ["id"]) 44 | op.create_foreign_key("parts_used_users_fk", "parts_used", "users", ["author_id"], ["id"]) 45 | 46 | # op.create_table( 47 | # "parts_used_issues_link", 48 | # sa.Column("issue_id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 49 | # sa.Column("part_used_id", sa.INTEGER(), autoincrement=False, nullable=False), 50 | # # sa.Column("user_id", sa.INTEGER(), autoincrement=False, nullable=True), 51 | # sa.ForeignKeyConstraint(["part_used_id"], ["parts_used.id"], name="parts_used_issues_link_fk"), 52 | # sa.ForeignKeyConstraint(["issue_id"], ["issues.id"], name="parts_used_issues_link_fk_1"), 53 | # sa.PrimaryKeyConstraint("issue_id", "part_used_id", name="parts_used_issues_link_pkey"), 54 | # schema=None, 55 | # ) 56 | 57 | 58 | def downgrade() -> None: 59 | # op.drop_constraint("parts_used_issues_link_fk", "parts_used_issues_link") 60 | # op.drop_constraint("parts_used_issues_link_fk_1", "parts_used_issues_link") 61 | # op.drop_constraint("parts_used_issues_link_pkey", "parts_used_issues_link") 62 | 63 | op.drop_table("parts_used", schema=None) 64 | # op.drop_table("parts_used_issues_link", schema=None) 65 | -------------------------------------------------------------------------------- /migrations/versions/2023_07_10_1708-13be30248d7d_add_qr_code_scans.py: -------------------------------------------------------------------------------- 1 | """add QR Code Scans 2 | 3 | Revision ID: 13be30248d7d 4 | Revises: cec65e1bd0de 5 | Create Date: 2023-07-10 17:08:58.348097 6 | 7 | """ 8 | import sqlalchemy as sa 9 | from alembic import op 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "13be30248d7d" 14 | down_revision = "cec65e1bd0de" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade() -> None: 20 | op.create_table( 21 | "qr_code_scans", 22 | sa.Column("id", sa.INTEGER(), sa.Identity(), autoincrement=True, nullable=False), 23 | sa.Column("qr_code_id", sa.VARCHAR(length=32), autoincrement=False, nullable=True), 24 | sa.Column("resource", sa.VARCHAR(length=64), autoincrement=False, nullable=True), 25 | sa.Column("resource_uuid", postgresql.UUID(as_uuid=True), autoincrement=False, nullable=True), 26 | sa.Column("author_id", sa.INTEGER(), autoincrement=False, nullable=True), 27 | sa.Column("ua", sa.TEXT(), autoincrement=False, nullable=True), 28 | sa.Column("browser", sa.VARCHAR(length=128), autoincrement=False, nullable=True), 29 | sa.Column("system", sa.VARCHAR(length=128), autoincrement=False, nullable=True), 30 | sa.Column("device", sa.VARCHAR(length=128), autoincrement=False, nullable=True), 31 | sa.Column("ip", sa.VARCHAR(length=64), autoincrement=False, nullable=True), 32 | sa.Column("lang", sa.VARCHAR(length=8), autoincrement=False, nullable=True), 33 | sa.Column("country", sa.VARCHAR(length=128), autoincrement=False, nullable=True), 34 | sa.Column("city", sa.VARCHAR(length=128), autoincrement=False, nullable=True), 35 | sa.Column("lat", sa.DECIMAL(10, 8), autoincrement=False, nullable=True), 36 | sa.Column("lon", sa.DECIMAL(10, 8), autoincrement=False, nullable=True), 37 | sa.Column("created_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), 38 | sa.PrimaryKeyConstraint("id", name="qr_code_scans_pkey"), 39 | schema=None, 40 | ) 41 | 42 | 43 | def downgrade() -> None: 44 | pass 45 | -------------------------------------------------------------------------------- /prepush.sh: -------------------------------------------------------------------------------- 1 | DIR="$( cd "$( dirname "$0" )" && pwd )" 2 | 3 | echo "export requirements.txt" 4 | poetry export -o requirements.txt --without-hashes 5 | poetry export -o requirements-dev.txt --with dev --without-hashes 6 | 7 | echo "ruff" 8 | ruff check app --fix 9 | ruff format app 10 | echo "truncate log file" 11 | : > $DIR/app/logs/logs.log 12 | echo "OK" 13 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "fastapi_docker" 3 | version = "0.1.0" 4 | description = "" 5 | authors = ["Your Name "] 6 | 7 | [tool.poetry.dependencies] 8 | python = "^3.10" 9 | fastapi = { extras = ["all"], version = "^0.110" } 10 | SQLAlchemy = "^2.0" 11 | alembic = "^1.12.0" 12 | uvicorn = ">=0.23.1" 13 | gunicorn = "^21.2.0" 14 | python-dotenv = "^1.0.0" 15 | loguru = "^0.7.0" 16 | typeguard = "^4.1.0" 17 | Faker = "^16" 18 | email-validator = ">=2.0.0" 19 | requests = "^2.28.1" 20 | typer = { extras = ["all"], version = "^0.12.0" } 21 | disposable-email-domains = ">=0.0.94" 22 | passlib = { extras = ["argon2"], version = "^1.7.4" } 23 | pytz = "^2024.1" 24 | langcodes = "^3.3.0" 25 | python-stdnum = "^1.18" 26 | Unidecode = "^1.3" 27 | pendulum = "^2.1.2" 28 | sentry-sdk = { extras = ["fastapi"], version = "^1.31.0" } 29 | user-agents = "^2.2.0" 30 | boto3 = "^1.28.00" 31 | fastapi-pagination = { extras = ["sqlalchemy"], version = "^0.12.0" } 32 | pydantic-factories = "^1.4.1" 33 | RegonAPI = "^1.3.1" 34 | APScheduler = "4.0.0a2" 35 | sqlalchemy-easy-profile = "^1.3" 36 | pyvieser = "^0.0.4" 37 | pyotp = "^2.9.0" 38 | httpx = "^0.27" 39 | psycopg = { extras = ["binary"], version = "^3.1" } 40 | pandas = "^2.1.0" 41 | fastapi-babel = "^0.0.9" 42 | 43 | [tool.poetry.group.dev.dependencies] 44 | pre-commit = "^3.6.2" 45 | pytest = "^7.2" 46 | pytest-cov = "^4.0.0" 47 | pytest-pretty = "^1.2" 48 | ruff = ">=0.0.249" 49 | bump-pydantic = "^0.6.1" 50 | pymender = "^0.2.0" 51 | setuptools = "^68.0.0" 52 | moto = "^4.1.14" 53 | 54 | [build-system] 55 | requires = ["poetry-core>=1.0.0"] 56 | build-backend = "poetry.core.masonry.api" 57 | 58 | [tool.coverage.run] 59 | parallel = false 60 | source = [ 61 | "app", 62 | ] 63 | omit = [ 64 | # omit anything in a .local directory anywhere 65 | '*/.local/*', 66 | '__init__.py', 67 | 'tests/*', 68 | '*/tests/*', 69 | # omit anything in a .venv directory anywhere 70 | '.venv/*' 71 | ] 72 | 73 | [tool.coverage.report] 74 | skip_empty = true 75 | precision = 2 76 | exclude_lines = [ 77 | "pragma: no cover", 78 | "@overload", 79 | 'if __name__ == "__main__":', 80 | "if TYPE_CHECKING:", 81 | ] 82 | 83 | 84 | [tool.ruff.lint] 85 | select = [ 86 | "ASYNC", 87 | "E", # pycodestyle errors 88 | "W", # pycodestyle warnings 89 | "F", # pyflakes 90 | "I", # isort 91 | "C", # flake8-comprehensions 92 | "B", # flake8-bugbear 93 | 'Q', 94 | 'RUF100', 95 | 'C90', # is too complex ({complexity} > {max_complexity}) 96 | 'UP', # upgrade syntax for newer versions of the language. 97 | ] 98 | ignore = [ 99 | # "E501", # line too long, handled by black 100 | "B008", # do not perform function calls in argument defaults 101 | # "C901", # too complex 102 | "W191", # indentation contains tabs 103 | ] 104 | #flake8-quotes = {inline-quotes = 'single', multiline-quotes = 'double'} 105 | 106 | [tool.ruff] 107 | line-length = 120 108 | target-version = "py310" 109 | exclude = ["migrations"] 110 | #flake8-quotes = {inline-quotes = 'single', multiline-quotes = 'double'} 111 | 112 | [tool.ruff.format] 113 | #quote-style = "single" 114 | #indent-style = "tab" 115 | 116 | [tool.ruff.lint.isort] 117 | combine-as-imports = true 118 | known-third-party = ["fastapi", "pydantic", "starlette"] 119 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | log_cli = True 3 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mgurg/fastapi_docker/46f90e0551ae4c4068e2dd1965801234f6f11f72/tests/__init__.py -------------------------------------------------------------------------------- /tests/api_responses/GUS/gus_get_by_nip.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "DataZakonczeniaDzialalnosci":"", 4 | "Gmina":"Praga-Północ", 5 | "KodPocztowy":"03-301", 6 | "Miejscowosc":"Warszawa", 7 | "MiejscowoscPoczty":"Warszawa", 8 | "Nazwa":"CD PROJEKT SPÓŁKA AKCYJNA", 9 | "Nip":"7342867148", 10 | "NrLokalu":"", 11 | "NrNieruchomosci":"74", 12 | "Powiat":"m. st. Warszawa", 13 | "Regon":"492707333", 14 | "SilosID":"6", 15 | "StatusNip":"", 16 | "Typ":"P", 17 | "Ulica":"ul. Test-Wilcza", 18 | "Wojewodztwo":"MAZOWIECKIE" 19 | } 20 | ] 21 | -------------------------------------------------------------------------------- /tests/api_responses/GUS/gus_get_by_nip_no_data_found.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "ErrorCode":"4", 4 | "ErrorMessageEn":"No data found for the specified search criteria.", 5 | "ErrorMessagePl":"Nie znaleziono podmiotu dla podanych kryteriów wyszukiwania.", 6 | "Nip":"9542752600" 7 | } 8 | ] 9 | -------------------------------------------------------------------------------- /tests/api_responses/rejestr_io_get_by_nip.json: -------------------------------------------------------------------------------- 1 | { 2 | "liczba_wszystkich_wynikow": 1, 3 | "wyniki": [ 4 | { 5 | "adres": { 6 | "kod": "87-100", 7 | "miejscowosc": "Toruń", 8 | "nr_domu": "123B", 9 | "panstwo": "Polska", 10 | "poczta": "Toruń", 11 | "teryt": { 12 | "gmina": "046301", 13 | "powiat": "0463", 14 | "wojewodztwo": "04" 15 | }, 16 | "ulica": "Łódzka" 17 | }, 18 | "glowna_osoba": { 19 | "id": "2823589", 20 | "imiona_i_nazwisko": "Daria Maria Garnicz Garnicka" 21 | }, 22 | "id": 876518, 23 | "krs_powiazania_liczby": { 24 | "aktualne": 3, 25 | "przeszle": 4 26 | }, 27 | "krs_rejestry": { 28 | "rejestr_przedsiebiorcow_data_wpisu": "2020-12-30" 29 | }, 30 | "krs_wpisy": { 31 | "najnowszy_data": "2022-06-23", 32 | "najnowszy_numer": 5, 33 | "pierwszy_data": "2020-12-30" 34 | }, 35 | "metadane": { 36 | "krs_odpis_synchronizacja_data_czas": "2022-06-23 10:10:14" 37 | }, 38 | "nazwy": { 39 | "pelna": "V2 SPÓŁKA Z OGRANICZONĄ ODPOWIEDZIALNOŚCIĄ", 40 | "skrocona": "V2" 41 | }, 42 | "numery": { 43 | "krs": "0000876518", 44 | "nip": "9562365881", 45 | "regon": "387816523" 46 | }, 47 | "stan": { 48 | "czy_dofinansowana_przez_ue": false, 49 | "czy_jest_na_gpw": false, 50 | "czy_otrzymala_pomoc_publiczna": false, 51 | "czy_pozytku_publicznego": false, 52 | "czy_spolka_skarbu_panstwa": false, 53 | "czy_wykreslona": false, 54 | "forma_prawna": "SPÓŁKA Z OGRANICZONĄ ODPOWIEDZIALNOŚCIĄ", 55 | "pkd_przewazajace_dzial": "Działalność usługowa w zakresie informacji", 56 | "w_likwidacji": false, 57 | "w_upadlosci": false, 58 | "w_zawieszeniu": false 59 | }, 60 | "typ": "organizacja" 61 | } 62 | ] 63 | } 64 | -------------------------------------------------------------------------------- /tests/csv_import_files/import_users.csv: -------------------------------------------------------------------------------- 1 | first_name;last_name;email;password;phone 2 | Adam;Nowak;adam.nowak@firma.pl;; 3 | Joanna;Kowalska;joanna.kowalska@firma.pl;Haslo123; 4 | -------------------------------------------------------------------------------- /tests/feature/test_files.py: -------------------------------------------------------------------------------- 1 | from fastapi.testclient import TestClient 2 | from sqlalchemy.orm import Session 3 | 4 | from app.config import get_settings 5 | 6 | settings = get_settings() 7 | 8 | 9 | def test_get_files(session: Session, client: TestClient): 10 | response = client.request( 11 | "GET", "/files/", headers={"tenant": "fake_tenant_company_for_test_00000000000000000000000000000000"} 12 | ) 13 | response.json() 14 | assert response.status_code == 200 15 | 16 | 17 | # def test_add_file(session: Session, client: TestClient): 18 | 19 | # logger.info(settings.PROJECT_DIR) 20 | # file = "postbox.png" 21 | # p = Path(settings.PROJECT_DIR / "tests" / file) 22 | # data = {"image": (p.open(mode="rb"), file)} 23 | # logger.info(p.is_file()) 24 | # # assert 200 == 200 25 | # # file_name = "fake-text-stream.txt" 26 | # # data = {"file": (io.BytesIO(b"some initial text data"), file_name)} 27 | 28 | # # headers = {"tenant": "fake_tenant_company_for_test_00000000000000000000000000000000", "Content-Type": "multipart/form-data"} 29 | # headers = {"tenant": "fake_tenant_company_for_test_00000000000000000000000000000000", "Content-Type": "application/json"} 30 | # response = client.request("POST","/files/", data=data, headers=headers) 31 | # data = response.json() 32 | # logger.info(data) 33 | # assert response.status_code == 400 34 | -------------------------------------------------------------------------------- /tests/feature/test_ideas.py: -------------------------------------------------------------------------------- 1 | # import json 2 | 3 | # from faker import Faker 4 | # from fastapi.testclient import TestClient 5 | # from loguru import logger 6 | # from sqlalchemy import func, select 7 | # from sqlalchemy.orm import Session 8 | 9 | # from app.models.models import Idea 10 | 11 | 12 | # def test_get_ideas(session: Session, client: TestClient): 13 | # response = client.request( 14 | # "GET", "/ideas", headers={"tenant": "fake_tenant_company_for_test_00000000000000000000000000000000"} 15 | # ) 16 | # response.json() 17 | # assert response.status_code == 200 18 | 19 | 20 | # def test_add_ideas(session: Session, client: TestClient): 21 | # fake = Faker() 22 | 23 | # data = { 24 | # "name": fake.text(max_nb_chars=20), 25 | # "summary": fake.paragraph(nb_sentences=1), 26 | # "color": fake.safe_color_name(), 27 | # "text_html": "

asd

asasd

", 28 | # "text_json": { 29 | # "type": "doc", 30 | # "content": [ 31 | # {"type": "heading", "attrs": {"level": 1}, "content": [{"type": "text", "text": "asd"}]}, 32 | # {"type": "paragraph", "content": [{"type": "text", "text": "asasd"}]}, 33 | # ], 34 | # }, 35 | # } 36 | # headers = { 37 | # "tenant": "fake_tenant_company_for_test_00000000000000000000000000000000", 38 | # "Content-Type": "application/json", 39 | # } 40 | # response = client.request("POST", "/ideas/", content=json.dumps(data), headers=headers) 41 | # data = response.json() 42 | # logger.info(data) 43 | # assert response.status_code == 200 44 | 45 | 46 | # def test_get_idea(session: Session, client: TestClient): 47 | # idea = session.execute(select(Idea).order_by(func.random()).limit(1)).scalar_one() 48 | # response = client.request( 49 | # "GET", 50 | # "/ideas/" + str(idea.uuid), 51 | # headers={"tenant": "fake_tenant_company_for_test_00000000000000000000000000000000"}, 52 | # ) 53 | # data = response.json() 54 | # assert response.status_code == 200 55 | # assert data["color"] == idea.color 56 | # assert data["name"] == idea.name 57 | # assert data["text"] == idea.text 58 | # assert data["uuid"] == str(idea.uuid) 59 | 60 | 61 | # def test_delete_idea(session: Session, client: TestClient): 62 | # idea = session.execute(select(Idea).order_by(func.random()).limit(1)).scalar_one() 63 | # logger.info(idea.uuid) 64 | # response = client.request("DELETE","/ideas/" + str(idea.uuid), headers={"tenant": "a"}) 65 | # data = response.json() 66 | # logger.info(data) 67 | # # {'ok': True} 68 | # assert response.status_code == 200 69 | -------------------------------------------------------------------------------- /tests/feature/test_main.py: -------------------------------------------------------------------------------- 1 | from fastapi.testclient import TestClient 2 | 3 | from app.main import app 4 | 5 | client = TestClient(app) 6 | 7 | 8 | def test_main(): 9 | response = client.request("GET", "/") 10 | assert response.status_code == 200 11 | # assert response.json() == {"Hello": "World"} 12 | -------------------------------------------------------------------------------- /tests/feature/test_user.py: -------------------------------------------------------------------------------- 1 | from fastapi.testclient import TestClient 2 | from loguru import logger 3 | from sqlalchemy.orm import Session 4 | 5 | 6 | def test_get_users(session: Session, client: TestClient): 7 | response = client.request( 8 | "GET", "/users", headers={"tenant": "fake_tenant_company_for_test_00000000000000000000000000000000"} 9 | ) 10 | r = { 11 | "items": [ 12 | { 13 | "first_name": "faker_000_Thomas", 14 | "last_name": "faker_000_Franklin", 15 | "email": "faker_000_@email.com", 16 | "phone": None, 17 | "uuid": "ef37fb58-98aa-4a85-8901-5b63a0c3563b", 18 | "is_active": True, 19 | "is_verified": True, 20 | "role_FK": { 21 | "uuid": "e255f78e-704f-4046-b1d7-89bb83786fef", 22 | "role_name": "ADMIN_MASTER", 23 | "role_title": "Main admin", 24 | }, 25 | } 26 | ], 27 | "total": 1, 28 | "page": 1, 29 | "size": 50, 30 | } 31 | data = response.json() 32 | logger.error(data) 33 | assert response.status_code == 200 34 | assert data["items"] 35 | assert data["total"] 36 | assert data["page"] 37 | assert data["size"] 38 | assert data["items"][0]["first_name"] 39 | assert data["items"][0]["last_name"] 40 | assert data["items"][0]["email"] 41 | # assert data["items"][0]["phone"] 42 | assert data["items"][0]["uuid"] 43 | assert data["items"][0]["is_active"] 44 | assert data["items"][0]["is_verified"] 45 | assert data["items"][0]["role_FK"] 46 | assert data["items"][0]["role_FK"]["uuid"] 47 | assert data["items"][0]["role_FK"]["role_name"] 48 | assert data["items"][0]["role_FK"]["role_title"] 49 | 50 | 51 | # TODO role_uuid 52 | # def test_add_users(session: Session, client: TestClient): 53 | 54 | # fake = Faker() 55 | 56 | # password = fake.password() 57 | 58 | # data = { 59 | # "first_name": fake.first_name(), 60 | # "last_name": fake.last_name(), 61 | # "email": fake.email(), 62 | # "password": password, 63 | # "password_confirmation": password 64 | # # "is_verified": True, 65 | # } 66 | # headers = {"tenant": "fake_tenant_company_for_test_00000000000000000000000000000000", "Content-Type": "application/json"} 67 | # response = client.request("POST","/users/", content=json.dumps(data), headers=headers) 68 | # data = response.json() 69 | # logger.info(data) 70 | # assert response.status_code == 200 71 | 72 | 73 | # def test_edit_user(session: Session, client: TestClient): 74 | 75 | # fake = Faker() 76 | 77 | # password = fake.password() 78 | 79 | # data = { 80 | # "first_name": fake.first_name(), 81 | # "last_name": fake.last_name(), 82 | # "email": fake.email(), 83 | # "password": password, 84 | # "password_confirmation": password 85 | # # "is_verified": True, 86 | # } 87 | 88 | # user = session.execute(select(User).order_by(func.random()).limit(1)).scalar_one() 89 | # headers = {"tenant": "fake_tenant_company_for_test_00000000000000000000000000000000", "Content-Type": "application/json"} 90 | 91 | # response = client.request("PATCH","/users/" + str(user.uuid), content=json.dumps(data), headers=headers) 92 | # data = response.json() 93 | # assert response.status_code == 200 94 | 95 | 96 | # def test_get_user(session: Session, client: TestClient): 97 | # user = session.execute(select(User).order_by(func.random()).limit(1)).scalar_one() 98 | # response = client.request("GET","/users/" + str(user.uuid), headers={"tenant": "fake_tenant_company_for_test_00000000000000000000000000000000"}) 99 | # data = response.json() 100 | # assert response.status_code == 200 101 | # assert data["first_name"] == user.first_name 102 | # assert data["last_name"] == user.last_name 103 | # assert data["email"] == user.email 104 | # assert data["uuid"] == str(user.uuid) 105 | 106 | 107 | # def test_delete_user(session: Session, client: TestClient): 108 | # user = session.execute(select(User).order_by(func.random()).limit(1)).scalar_one() 109 | # logger.info(user.uuid) 110 | # response = client.request("DELETE","/users/" + str(user.uuid), headers={"tenant": "fake_tenant_company_for_test_00000000000000000000000000000000"}) 111 | # data = response.json() 112 | # logger.info(data) 113 | # # {'ok': True} 114 | # assert response.status_code == 200 115 | -------------------------------------------------------------------------------- /tests/files/postbox.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mgurg/fastapi_docker/46f90e0551ae4c4068e2dd1965801234f6f11f72/tests/files/postbox.png -------------------------------------------------------------------------------- /tests/unit/test_password.py: -------------------------------------------------------------------------------- 1 | from app.service.password import Password 2 | 3 | 4 | def test_password_not_match(): 5 | password = Password("a") 6 | is_password_ok = password.compare("B") 7 | assert is_password_ok == "Password and password confirmation not match" 8 | 9 | 10 | def test_password_without_lowercase(): 11 | password = Password("A") 12 | is_password_ok = password.compare("A") 13 | assert is_password_ok == "Password must contain a lowercase letter." 14 | -------------------------------------------------------------------------------- /tests/unit/test_s3.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import Path 3 | 4 | import boto3 5 | from moto import mock_s3 6 | 7 | from app.storage.s3 import S3Storage 8 | 9 | os.environ["MOTO_S3_CUSTOM_ENDPOINTS"] = "http://custom.s3.endpoint" 10 | 11 | 12 | class PrivateS3Storage(S3Storage): 13 | AWS_ACCESS_KEY_ID = "access" 14 | AWS_SECRET_ACCESS_KEY = "secret" 15 | AWS_S3_BUCKET_NAME = "bucket" 16 | AWS_S3_ENDPOINT_URL = "custom.s3.endpoint" 17 | AWS_S3_REGION = "us-east-1" 18 | AWS_S3_USE_SSL = False 19 | 20 | 21 | @mock_s3 22 | def test_s3_storage_methods(tmp_path: Path) -> None: 23 | s3 = boto3.client("s3") 24 | s3.create_bucket(Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": "eu-west-1"}) 25 | 26 | tmp_file = tmp_path / "example.txt" 27 | tmp_file.write_bytes(b"123") 28 | 29 | storage = PrivateS3Storage() 30 | 31 | assert storage.get_name("test (1).txt") == "test_1.txt" 32 | --------------------------------------------------------------------------------