├── .docker ├── Dockerfile ├── dev.Dockerfile ├── krb.Dockerfile ├── lint.Dockerfile └── test.Dockerfile ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md ├── dependabot.yml └── workflows │ ├── build-beta.yml │ ├── build-docker-image.yml │ ├── build-linters.yml │ └── checks.yml ├── .gitignore ├── .gitmodules ├── .kerberos └── config_server.py ├── .package ├── docker-compose.yml ├── setup.bat ├── setup.sh ├── setup_keepalived.sh ├── swarm │ ├── README-swarm.md │ ├── clean-swarm.sh │ ├── docker-compose-swarm.yml │ ├── setup-swarm.sh │ └── traefik.yaml └── traefik.yml ├── LICENSE ├── Makefile ├── README.md ├── app ├── alembic.ini ├── alembic │ ├── README │ ├── env.py │ ├── script.py.mako │ └── versions │ │ ├── 196f0d327c6a_.py │ │ ├── 275222846605_initial_ldap_schema.py │ │ ├── 4334e2e871a4_add_sessions_ttl.py │ │ ├── 4442d1d982a4_remove_krb_policy.py │ │ ├── 59e98bbd8ad8_.py │ │ ├── 692ae64e0cc5_.py │ │ ├── 6f8fe2548893_fix_read_only.py │ │ ├── 8c2bd40dd809_add_protocols_attr.py │ │ ├── ba78cef9700a_initial_entity_type.py │ │ ├── bf435bbd95ff_add_rdn_attr_name.py │ │ ├── bv546ccd35fa_fix_krbadmin_attrs.py │ │ ├── dafg3a4b22ab_add_preauth_princ.py │ │ ├── f68a134a3685_add_bypass.py │ │ └── fafc3d0b11ec_.py ├── api │ ├── __init__.py │ ├── auth │ │ ├── __init__.py │ │ ├── oauth2.py │ │ ├── router.py │ │ ├── router_mfa.py │ │ ├── router_pwd_policy.py │ │ ├── schema.py │ │ ├── session_router.py │ │ └── utils.py │ ├── exception_handlers.py │ ├── ldap_schema │ │ ├── __init__.py │ │ ├── attribute_type_router.py │ │ ├── entity_type_router.py │ │ └── object_class_router.py │ ├── main │ │ ├── __init__.py │ │ ├── ap_router.py │ │ ├── dns_router.py │ │ ├── krb5_router.py │ │ ├── router.py │ │ ├── schema.py │ │ └── utils.py │ ├── network │ │ ├── __init__.py │ │ ├── router.py │ │ ├── schema.py │ │ └── utils.py │ └── shadow │ │ ├── __init__.py │ │ └── router.py ├── config.py ├── extra │ ├── __init__.py │ ├── alembic_utils.py │ ├── common_pwds.txt │ ├── dev_data.py │ ├── dump_acme_certs.py │ ├── generate_cert.sh │ ├── scripts │ │ ├── __init__.py │ │ ├── check_ldap_principal.py │ │ ├── principal_block_user_sync.py │ │ ├── uac_sync.py │ │ └── update_krb5_config.py │ ├── setup_dev.py │ └── templates │ │ ├── kdc.conf │ │ ├── krb5.conf │ │ ├── named_conf_local_zone_part.template │ │ └── zone.template ├── ioc.py ├── ldap_protocol │ ├── __init__.py │ ├── asn1parser.py │ ├── dependency.py │ ├── dialogue.py │ ├── dns.py │ ├── exceptions.py │ ├── filter_interpreter.py │ ├── kerberos │ │ ├── __init__.py │ │ ├── base.py │ │ ├── client.py │ │ ├── stub.py │ │ └── utils.py │ ├── ldap_codes.py │ ├── ldap_requests │ │ ├── __init__.py │ │ ├── abandon.py │ │ ├── add.py │ │ ├── base.py │ │ ├── bind.py │ │ ├── bind_methods │ │ │ ├── __init__.py │ │ │ ├── base.py │ │ │ ├── sasl_gssapi.py │ │ │ ├── sasl_plain.py │ │ │ └── simple.py │ │ ├── compare.py │ │ ├── delete.py │ │ ├── extended.py │ │ ├── modify.py │ │ ├── modify_dn.py │ │ └── search.py │ ├── ldap_responses.py │ ├── ldap_schema │ │ ├── __init__.py │ │ ├── attribute_type_dao.py │ │ ├── entity_type_dao.py │ │ └── object_class_dao.py │ ├── messages.py │ ├── multifactor.py │ ├── objects.py │ ├── policies │ │ ├── __init__.py │ │ ├── access_policy.py │ │ ├── network_policy.py │ │ └── password_policy.py │ ├── server.py │ ├── session_storage.py │ ├── user_account_control.py │ └── utils │ │ ├── __init__.py │ │ ├── const.py │ │ ├── cte.py │ │ ├── helpers.py │ │ ├── pagination.py │ │ ├── queries.py │ │ └── raw_definition_parser.py ├── logs │ └── .gitignore ├── models.py ├── multidirectory.py ├── schedule.py └── security.py ├── certs └── .gitignore ├── docker-compose.dev.yml ├── docker-compose.remote.test.yml ├── docker-compose.test.yml ├── docker-compose.yml ├── integration_tests ├── kerberos │ └── Dockerfile └── ssh │ ├── Dockerfile │ ├── docker-compose.yml │ ├── entrypoint.sh │ ├── init.sh │ └── run.sh ├── local.env ├── poetry.lock ├── pyproject.toml ├── tests ├── __init__.py ├── conftest.py ├── test_api │ ├── __init__.py │ ├── test_auth │ │ ├── __init__.py │ │ ├── test_pwd_policy.py │ │ ├── test_router.py │ │ ├── test_router_mfa.py │ │ └── test_sessions.py │ ├── test_ldap_schema │ │ ├── __init__.py │ │ ├── test_attribute_type_router.py │ │ ├── test_attribute_type_router_datasets.py │ │ ├── test_entity_type_router.py │ │ ├── test_entity_type_router_datasets.py │ │ ├── test_object_class_router.py │ │ └── test_object_class_router_datasets.py │ ├── test_main │ │ ├── __init__.py │ │ ├── conftest.py │ │ ├── test_dns.py │ │ ├── test_kadmin.py │ │ ├── test_multifactor.py │ │ ├── test_openapi.py │ │ └── test_router │ │ │ ├── test_add.py │ │ │ ├── test_delete.py │ │ │ ├── test_login.py │ │ │ ├── test_modify.py │ │ │ ├── test_modify_dn.py │ │ │ └── test_search.py │ ├── test_network │ │ ├── __init__.py │ │ └── test_router.py │ └── test_shadow │ │ ├── __init__.py │ │ ├── conftest.py │ │ └── test_router.py ├── test_ldap │ ├── __init__.py │ ├── test_bind.py │ ├── test_ldap3_definition_parse.py │ ├── test_ldap3_lib.py │ ├── test_ldap3_whoami.py │ ├── test_passwd_change.py │ ├── test_pool_client_handler.py │ └── test_util │ │ ├── __init__.py │ │ ├── test_add.py │ │ ├── test_delete.py │ │ ├── test_modify.py │ │ ├── test_search.py │ │ └── test_whoami.py └── test_shedule.py └── traefik.yml /.docker/Dockerfile: -------------------------------------------------------------------------------- 1 | # The builder image, used to build the virtual environment 2 | FROM python:3.12.6-bookworm AS builder 3 | 4 | RUN pip install poetry 5 | 6 | ENV POETRY_NO_INTERACTION=1 \ 7 | POETRY_VIRTUALENVS_IN_PROJECT=1 \ 8 | POETRY_VIRTUALENVS_CREATE=1 \ 9 | POETRY_VIRTUALENVS_OPTIONS_NO_PIP=1 \ 10 | POETRY_CACHE_DIR=/tmp/poetry_cache \ 11 | POETRY_VIRTUALENVS_PATH=/venvs \ 12 | VIRTUAL_ENV=/venvs/.venv \ 13 | PATH="/venvs/.venv/bin:$PATH" 14 | 15 | WORKDIR /venvs 16 | 17 | COPY pyproject.toml poetry.lock ./ 18 | 19 | RUN --mount=type=cache,target=$POETRY_CACHE_DIR poetry install --without test,linters,dev --no-root 20 | 21 | # The runtime image, used to just run the code provided its virtual environment 22 | FROM python:3.12.6-slim-bookworm AS runtime 23 | 24 | WORKDIR /app 25 | ARG VERSION 26 | 27 | ENV VIRTUAL_ENV=/venvs/.venv \ 28 | PATH="/venvs/.venv/bin:$PATH" \ 29 | VERSION=${VERSION:-beta} 30 | 31 | RUN set -eux; apt-get update -y && apt-get install netcat-traditional --no-install-recommends -y 32 | COPY app /app 33 | COPY pyproject.toml / 34 | 35 | COPY --from=builder ${VIRTUAL_ENV} ${VIRTUAL_ENV} 36 | -------------------------------------------------------------------------------- /.docker/dev.Dockerfile: -------------------------------------------------------------------------------- 1 | # The builder image, used to build the virtual environment 2 | FROM python:3.12.6-bookworm AS builder 3 | 4 | RUN pip install poetry 5 | 6 | ENV POETRY_NO_INTERACTION=1 \ 7 | POETRY_VIRTUALENVS_IN_PROJECT=1 \ 8 | POETRY_VIRTUALENVS_CREATE=1 \ 9 | POETRY_VIRTUALENVS_OPTIONS_NO_PIP=1 \ 10 | POETRY_CACHE_DIR=/tmp/poetry_cache \ 11 | POETRY_VIRTUALENVS_PATH=/venvs \ 12 | VIRTUAL_ENV=/venvs/.venv \ 13 | PATH="/venvs/.venv/bin:$PATH" 14 | 15 | WORKDIR /venvs 16 | 17 | COPY pyproject.toml poetry.lock ./ 18 | 19 | RUN --mount=type=cache,target=$POETRY_CACHE_DIR poetry install --without test,linters --no-root 20 | 21 | # The runtime image, used to just run the code provided its virtual environment 22 | FROM python:3.12.6-slim-bookworm AS runtime 23 | 24 | WORKDIR /app 25 | ARG VERSION 26 | 27 | ENV VIRTUAL_ENV=/venvs/.venv \ 28 | PATH="/venvs/.venv/bin:$PATH" \ 29 | VERSION=${VERSION:-beta} 30 | 31 | RUN set -eux; apt-get update -y && apt-get install netcat-traditional --no-install-recommends -y 32 | COPY app /app 33 | COPY pyproject.toml / 34 | 35 | COPY --from=builder ${VIRTUAL_ENV} ${VIRTUAL_ENV} 36 | -------------------------------------------------------------------------------- /.docker/krb.Dockerfile: -------------------------------------------------------------------------------- 1 | # The builder image, used to build the virtual environment 2 | ARG VERSION 3 | 4 | FROM python:3.12.6-bookworm AS builder 5 | 6 | ENV VIRTUAL_ENV=/venvs/.venv \ 7 | PATH="/venvs/.venv/bin:$PATH" 8 | 9 | WORKDIR /venvs 10 | 11 | RUN python -m venv .venv 12 | RUN pip install \ 13 | fastapi \ 14 | uvicorn \ 15 | https://github.com/xianglei/python-kadmv/releases/download/0.1.7/python-kadmV-0.1.7.tar.gz 16 | 17 | 18 | FROM ghcr.io/multidirectorylab/krb5_base:${VERSION} AS runtime 19 | 20 | ENV LANG=C.UTF-8 \ 21 | DEBIAN_FRONTEND=noninteractive \ 22 | VIRTUAL_ENV=/venvs/.venv \ 23 | PATH="/venvs/.venv/bin:$PATH" \ 24 | PYTHONDONTWRITEBYTECODE=1 \ 25 | PYTHONUNBUFFERED=1 26 | 27 | COPY --from=builder ${VIRTUAL_ENV} ${VIRTUAL_ENV} 28 | 29 | COPY .kerberos/config_server.py /server/ 30 | EXPOSE 8000 31 | -------------------------------------------------------------------------------- /.docker/lint.Dockerfile: -------------------------------------------------------------------------------- 1 | # The builder image, used to build the virtual environment 2 | FROM python:3.12.6-bookworm AS builder 3 | 4 | RUN pip install poetry 5 | 6 | ENV POETRY_NO_INTERACTION=1 \ 7 | POETRY_VIRTUALENVS_IN_PROJECT=1 \ 8 | POETRY_VIRTUALENVS_CREATE=1 \ 9 | POETRY_VIRTUALENVS_OPTIONS_NO_PIP=1 \ 10 | POETRY_CACHE_DIR=/tmp/poetry_cache \ 11 | POETRY_VIRTUALENVS_PATH=/venvs \ 12 | VIRTUAL_ENV=/venvs/.venv \ 13 | PATH="/venvs/.venv/bin:$PATH" 14 | 15 | WORKDIR /venvs 16 | 17 | COPY pyproject.toml poetry.lock ./ 18 | 19 | RUN --mount=type=cache,target=$POETRY_CACHE_DIR poetry install --with linters --no-root 20 | 21 | # The runtime image, used to just run the code provided its virtual environment 22 | FROM python:3.12.6-slim-bookworm AS runtime 23 | 24 | WORKDIR /app 25 | RUN set -eux; 26 | 27 | ENV VIRTUAL_ENV=/venvs/.venv \ 28 | PATH="/venvs/.venv/bin:$PATH" \ 29 | PYTHONDONTWRITEBYTECODE=1 \ 30 | PYTHONUNBUFFERED=1 31 | 32 | COPY --from=builder ${VIRTUAL_ENV} ${VIRTUAL_ENV} 33 | 34 | COPY app /app 35 | COPY pyproject.toml ./ -------------------------------------------------------------------------------- /.docker/test.Dockerfile: -------------------------------------------------------------------------------- 1 | # The builder image, used to build the virtual environment 2 | FROM python:3.12.6-bookworm AS builder 3 | 4 | RUN pip install poetry 5 | 6 | ENV POETRY_NO_INTERACTION=1 \ 7 | POETRY_VIRTUALENVS_IN_PROJECT=1 \ 8 | POETRY_VIRTUALENVS_CREATE=1 \ 9 | POETRY_VIRTUALENVS_OPTIONS_NO_PIP=1 \ 10 | POETRY_CACHE_DIR=/tmp/poetry_cache \ 11 | POETRY_VIRTUALENVS_PATH=/venvs \ 12 | VIRTUAL_ENV=/venvs/.venv \ 13 | PATH="/venvs/.venv/bin:$PATH" 14 | 15 | WORKDIR /venvs 16 | 17 | COPY pyproject.toml poetry.lock ./ 18 | 19 | RUN --mount=type=cache,target=$POETRY_CACHE_DIR poetry install --with test --no-root 20 | 21 | # The runtime image, used to just run the code provided its virtual environment 22 | FROM python:3.12.6-slim-bookworm AS runtime 23 | 24 | WORKDIR /app 25 | RUN set -eux; apt-get update -y && apt-get install ldap-utils curl --no-install-recommends -y 26 | 27 | ENV VIRTUAL_ENV=/venvs/.venv \ 28 | PATH="/venvs/.venv/bin:$PATH" \ 29 | PYTHONDONTWRITEBYTECODE=1 \ 30 | PYTHONUNBUFFERED=1 31 | 32 | COPY --from=builder ${VIRTUAL_ENV} ${VIRTUAL_ENV} 33 | 34 | COPY app /app 35 | COPY tests /app/tests 36 | COPY pyproject.toml / 37 | 38 | 39 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | 15 | MultiDirectory version: '....' 16 | Enviroment: docker compose v2 17 | Server OS: '....' 18 | Kerberos: MIT Integrated 19 | DNS: Bind9 Integrated 20 | 21 | Steps to reproduce the behavior: 22 | 1. Go to '...' 23 | 2. Click on '....' 24 | 3. Scroll down to '....' 25 | 4. See error 26 | 27 | **Expected behavior** 28 | A clear and concise description of what you expected to happen. 29 | 30 | **Screenshots** 31 | If applicable, add screenshots to help explain your problem. 32 | 33 | **Desktop (please complete the following information):** 34 | - OS: [e.g. iOS] 35 | - Browser [e.g. chrome, safari] 36 | - Version [e.g. 22] 37 | 38 | **Smartphone (please complete the following information):** 39 | - Device: [e.g. iPhone6] 40 | - OS: [e.g. iOS8.1] 41 | - Browser [e.g. stock browser, safari] 42 | - Version [e.g. 22] 43 | 44 | **Additional context** 45 | Add any other context about the problem here. 46 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: pip 4 | directory: "/" 5 | schedule: 6 | interval: "weekly" 7 | open-pull-requests-limit: 10 8 | target-branch: main 9 | - package-ecosystem: "github-actions" 10 | directory: "/" 11 | schedule: 12 | interval: "weekly" 13 | target-branch: main 14 | -------------------------------------------------------------------------------- /.github/workflows/build-linters.yml: -------------------------------------------------------------------------------- 1 | name: build-push linters cache 2 | on: 3 | push: 4 | branches: [main] 5 | 6 | 7 | env: 8 | REPO: ${{ github.repository }} 9 | 10 | jobs: 11 | build: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - name: downcase REPO 15 | run: | 16 | echo "REPO=${GITHUB_REPOSITORY,,}" >>${GITHUB_ENV} 17 | - uses: actions/checkout@v4 18 | - name: Login to GitHub Container Registry 19 | uses: docker/login-action@v3 20 | with: 21 | registry: ghcr.io 22 | username: ${{ github.actor }} 23 | password: ${{ secrets.GITHUB_TOKEN }} 24 | 25 | - name: Build docker image 26 | env: 27 | TAG: ghcr.io/${{ env.REPO }}_linters:beta 28 | DOCKER_BUILDKIT: '1' 29 | run: | 30 | echo $TAG 31 | docker build --push --target=runtime -f .docker/lint.Dockerfile . -t $TAG --cache-to type=gha,mode=max --cache-from $TAG --build-arg BUILDKIT_INLINE_CACHE=1 -------------------------------------------------------------------------------- /.github/workflows/checks.yml: -------------------------------------------------------------------------------- 1 | name: process_pull_request 2 | on: 3 | push: 4 | branches: [main] 5 | pull_request: null 6 | 7 | env: 8 | REPO: ${{ github.repository }} 9 | 10 | jobs: 11 | ruff_linter: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@v4 15 | - name: Login to GitHub Container Registry 16 | uses: docker/login-action@v3 17 | with: 18 | registry: ghcr.io 19 | username: ${{ github.actor }} 20 | password: ${{ secrets.GITHUB_TOKEN }} 21 | - name: build linters 22 | env: 23 | TAG: ghcr.io/${{ env.REPO }}_linters:latest 24 | NEW_TAG: linter 25 | run: docker build --target=runtime -f .docker/lint.Dockerfile . -t $NEW_TAG --cache-to type=gha,mode=max --cache-from $TAG --build-arg BUILDKIT_INLINE_CACHE=1 26 | - name: Run linters 27 | env: 28 | NEW_TAG: linter 29 | run: docker run $NEW_TAG ruff check --output-format=github . 30 | 31 | ruff_format: 32 | runs-on: ubuntu-latest 33 | steps: 34 | - uses: actions/checkout@v4 35 | - name: Login to GitHub Container Registry 36 | uses: docker/login-action@v3 37 | with: 38 | registry: ghcr.io 39 | username: ${{ github.actor }} 40 | password: ${{ secrets.GITHUB_TOKEN }} 41 | - name: build linters 42 | env: 43 | TAG: ghcr.io/${{ env.REPO }}_linters:latest 44 | NEW_TAG: linter 45 | run: docker build --target=runtime -f .docker/lint.Dockerfile . -t $NEW_TAG --cache-to type=gha,mode=max --cache-from $TAG --build-arg BUILDKIT_INLINE_CACHE=1 46 | - name: Run linters 47 | env: 48 | NEW_TAG: linter 49 | run: docker run $NEW_TAG ruff format --check 50 | 51 | mypy: 52 | runs-on: ubuntu-latest 53 | steps: 54 | - uses: actions/checkout@v4 55 | - name: Login to GitHub Container Registry 56 | uses: docker/login-action@v3 57 | with: 58 | registry: ghcr.io 59 | username: ${{ github.actor }} 60 | password: ${{ secrets.GITHUB_TOKEN }} 61 | - name: build linters 62 | env: 63 | TAG: ghcr.io/${{ env.REPO }}_linters:latest 64 | NEW_TAG: linter 65 | run: docker build --target=runtime -f .docker/lint.Dockerfile . -t $NEW_TAG --cache-to type=gha,mode=max --cache-from $TAG --build-arg BUILDKIT_INLINE_CACHE=1 66 | - name: Run linters 67 | env: 68 | NEW_TAG: linter 69 | run: docker run $NEW_TAG mypy . 70 | 71 | tests: 72 | runs-on: ubuntu-latest 73 | steps: 74 | - uses: actions/checkout@v4 75 | - name: Login to GitHub Container Registry 76 | uses: docker/login-action@v3 77 | with: 78 | registry: ghcr.io 79 | username: ${{ github.actor }} 80 | password: ${{ secrets.GITHUB_TOKEN }} 81 | - name: build tests 82 | env: 83 | CACHE: ghcr.io/${{ env.REPO }}_test:latest 84 | TAG: tests 85 | run: docker build --target=runtime -f .docker/test.Dockerfile . -t $TAG --cache-to type=gha,mode=max --cache-from $CACHE --build-arg BUILDKIT_INLINE_CACHE=1 86 | - name: Run tests 87 | env: 88 | TAG: tests 89 | run: docker compose -f docker-compose.remote.test.yml up --no-log-prefix --attach md-test --exit-code-from md-test -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | .idea/ 16 | .DS_Store 17 | .vscode/ 18 | eggs/ 19 | .eggs/ 20 | lib/ 21 | lib64/ 22 | parts/ 23 | sdist/ 24 | var/ 25 | wheels/ 26 | pip-wheel-metadata/ 27 | share/python-wheels/ 28 | *.egg-info/ 29 | .installed.cfg 30 | *.egg 31 | MANIFEST 32 | 33 | # PyInstaller 34 | # Usually these files are written by a python script from a template 35 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 36 | *.manifest 37 | *.spec 38 | 39 | # Installer logs 40 | pip-log.txt 41 | pip-delete-this-directory.txt 42 | 43 | # Unit test / coverage reports 44 | htmlcov/ 45 | .tox/ 46 | .nox/ 47 | .coverage 48 | .coverage.* 49 | .cache 50 | nosetests.xml 51 | coverage.xml 52 | *.cover 53 | *.py,cover 54 | .hypothesis/ 55 | .pytest_cache/ 56 | 57 | # Translations 58 | *.mo 59 | *.pot 60 | 61 | # Django stuff: 62 | *.log 63 | local_settings.py 64 | db.sqlite3 65 | db.sqlite3-journal 66 | 67 | # Flask stuff: 68 | instance/ 69 | .webassets-cache 70 | 71 | # Scrapy stuff: 72 | .scrapy 73 | 74 | # Sphinx documentation 75 | docs/_build/ 76 | 77 | # PyBuilder 78 | target/ 79 | 80 | # Jupyter Notebook 81 | .ipynb_checkpoints 82 | 83 | # IPython 84 | profile_default/ 85 | ipython_config.py 86 | 87 | # pyenv 88 | .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 98 | __pypackages__/ 99 | 100 | # Celery stuff 101 | celerybeat-schedule 102 | celerybeat.pid 103 | 104 | # SageMath parsed files 105 | *.sage.py 106 | 107 | # Environments 108 | .env 109 | .venv 110 | env/ 111 | venv/ 112 | ENV/ 113 | env.bak/ 114 | venv.bak/ 115 | 116 | # Spyder project settings 117 | .spyderproject 118 | .spyproject 119 | 120 | # Rope project settings 121 | .ropeproject 122 | 123 | # mkdocs documentation 124 | /site 125 | 126 | # mypy 127 | .mypy_cache/ 128 | .dmypy.json 129 | dmypy.json 130 | 131 | # Pyre type checker 132 | .pyre/ 133 | 134 | # ldap 135 | *.ldif 136 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "interface"] 2 | path = interface 3 | url = https://github.com/MultifactorLab/MultiDirectory-Web-Admin.git 4 | ignore = all 5 | -------------------------------------------------------------------------------- /.package/setup.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | copy NUL ./.env 3 | type nul > ./.env 4 | setlocal EnableDelayedExpansion 5 | 6 | set char=abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789 7 | set count=0 8 | 9 | set length=16 10 | 11 | :Loop 12 | set /a count+=1 13 | set /a rand=%Random%%%61 14 | set password=!password!!char:~%rand%,1! 15 | if !count! leq !length! goto Loop 16 | 17 | set postgres_password=%password% 18 | 19 | set count=0 20 | set length=128 21 | 22 | :Loop2 23 | set /a count+=1 24 | set /a rand=%Random%%%61 25 | set secret_key=!secret_key!!char:~%rand%,1! 26 | if !count! leq !length! goto Loop2 27 | 28 | 29 | set "postgres_user=user" 30 | set /p "postgres_user=Enter postgres user or just ENTER for default [%postgres_user%]: " 31 | 32 | set "postgres_host=postgres" 33 | set /p "postgres_host=Enter postgres host or just ENTER for default [%postgres_host%]: " 34 | 35 | set "postgres_db=postgres" 36 | set /p "postgres_db=Enter postgres database name or just ENTER for default [%postgres_db%]: " 37 | 38 | set /p "postgres_password=Enter postgres password or just ENTER for default [autogenerated]: " 39 | 40 | :domain 41 | set /p "domain=Enter domain name: " 42 | IF "%domain%"=="" echo domain required && goto domain 43 | 44 | 45 | echo POSTGRES_HOST=%postgres_host% >> .env 46 | echo POSTGRES_USER=%postgres_user% >> .env 47 | echo POSTGRES_DB=%postgres_db% >> .env 48 | echo POSTGRES_PASSWORD=%postgres_password% >> .env 49 | echo DOMAIN=%domain% >> .env 50 | echo SECRET_KEY=%secret_key% >> .env 51 | -------------------------------------------------------------------------------- /.package/setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | touch .env 3 | > .env 4 | 5 | read -p "Enter postgres user [default: user]: " postgres_user 6 | postgres_user=${postgres_user:-user} 7 | 8 | read -p "Enter postgres database name [default: postgres]: " postgres_db 9 | postgres_db=${postgres_db:-postgres} 10 | 11 | read -p "Enter postgres host (leave it default if you are using default database) [default: postgres]: " postgres_host 12 | postgres_host=${postgres_host:-postgres} 13 | 14 | 15 | read -p "Enter postgres password [default: autogenerate]: " postgres_password 16 | postgres_password=${postgres_password:-$(openssl rand -hex 16)} 17 | 18 | 19 | read -p "Enter interface domain [required]: " domain 20 | if [ -z "$domain" ]; then echo "interface domain required" && exit 1; fi 21 | 22 | secret_key=$(openssl rand -hex 32) 23 | 24 | echo "POSTGRES_HOST="$postgres_host >> .env 25 | echo "POSTGRES_USER="$postgres_user >> .env 26 | echo "POSTGRES_DB="$postgres_db >> .env 27 | echo "POSTGRES_PASSWORD="$postgres_password >> .env 28 | echo "DOMAIN="$domain >> .env 29 | echo "SECRET_KEY="$secret_key >> .env 30 | -------------------------------------------------------------------------------- /.package/setup_keepalived.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | unset -v IP 4 | unset -v STATE 5 | unset -v DEVICE 6 | 7 | while getopts i:s:d: opt; do 8 | case $opt in 9 | i) IP=$OPTARG ;; 10 | s) STATE=$OPTARG ;; 11 | d) DEVICE=$OPTARG ;; 12 | *) 13 | echo 'Error in command line parsing' >&2 14 | exit 1 15 | esac 16 | done 17 | 18 | shift "$(( OPTIND - 1 ))" 19 | 20 | if [ -z "$IP" ] || [ -z "$STATE" ] || [ -z "$DEVICE" ]; then 21 | echo 'Missing -i or -s or -d. Example: -i 192.168.1.200 -s MASTER -d eth0 ' >&2 22 | exit 1 23 | fi 24 | 25 | # Determine OS platform 26 | UNAME=$(uname | tr "[:upper:]" "[:lower:]") 27 | # If Linux, try to determine specific distribution 28 | if [ "$UNAME" == "linux" ]; then 29 | # If available, use LSB to identify distribution 30 | if [ -f /etc/lsb-release -o -d /etc/lsb-release.d ]; then 31 | export DISTRO=$(lsb_release -i | cut -d: -f2 | sed s/'^\t'//) 32 | # Otherwise, use release info file 33 | else 34 | export DISTRO=$(ls -d /etc/[A-Za-z]*[_-][rv]e[lr]* | grep -v "lsb" | cut -d'/' -f3 | cut -d'-' -f1 | cut -d'_' -f1) 35 | fi 36 | fi 37 | # For everything else (or if above failed), just use generic identifier 38 | [ "$DISTRO" == "" ] && export DISTRO=$UNAME 39 | unset UNAME 40 | 41 | if [ $DISTRO == "Ubuntu" ]; then 42 | apt-get update 43 | apt install -y keepalived 44 | else 45 | yum update 46 | yum install -y keepalived ipvsadm 47 | fi 48 | 49 | echo "Adding keepalived user for running checks" 50 | adduser --disabled-password --gecos "" keepalived 51 | 52 | 53 | echo " 54 | global_defs { 55 | enable_script_security 56 | script_user keepalived 57 | } 58 | vrrp_script chk_docker { 59 | script \"pgrep dockerd\" #Had to use this on debian distros 60 | #script "pidof dockerd" 61 | interval 1 62 | weight 20 63 | } 64 | vrrp_script chk_traefik { 65 | script \"pgrep traefik\" #Had to use this on debian distros 66 | #script "pidof traefik" 67 | interval 30 68 | weight 10 69 | } 70 | vrrp_script keepalived_check { 71 | script \"nc -zvw1 localhost 443\" 72 | interval 5 73 | timeout 5 74 | rise 3 75 | fall 3 76 | } 77 | vrrp_instance SWARM { 78 | state $STATE 79 | interface $DEVICE 80 | virtual_router_id 51 81 | priority 100 82 | advert_int 1 83 | authentication { 84 | auth_type PASS 85 | auth_pass qwerty 86 | } 87 | virtual_ipaddress { 88 | $IP/24 89 | } 90 | track_script { 91 | chk_docker 92 | chk_traefik 93 | keepalived_check 94 | } 95 | } 96 | " > /etc/keepalived/keepalived.conf 97 | 98 | lsmod | grep -P '^ip_vs\s' || (echo "modprobe ip_vs" >> /etc/modules && modprobe ip_vs) 99 | systemctl enable keepalived 100 | systemctl start keepalived 101 | -------------------------------------------------------------------------------- /.package/swarm/README-swarm.md: -------------------------------------------------------------------------------- 1 | # MultiDirectory-CI 2 | Ready to deploy ldap and http server. 3 | 4 | Configuration repository for 5 | 1. [MultiDirecory](https://github.com/MultifactorLab/MultiDirectory) - ldap and JSON web API server 6 | 7 | 2. [MultiDirectory-Web-Admin](https://github.com/MultifactorLab/MultiDirectory-Web-Admin) - web interface for API 8 | 9 | All services are running through [traefik](https://doc.traefik.io/traefik/providers/docker/), using [postgres](https://www.postgresql.org/) as database, other DBMS are incompatible. 10 | 11 | ## Installation HA mode docker swarm (linux only) 12 | 13 | 1. Install [docker](https://docs.docker.com/engine/install/) and [docker compose](https://docs.docker.com/compose/install/) 14 | 15 | 2. Register and assosiate domain with your server IP, for e.g. `multidirectory.example.com` -> `255.255.255.255` 16 | 17 | 3. 18 | ``` sh 19 | git clone https://github.com/MultiDirectoryLab/MultiDirectory-CI.git 20 | cd MultiDirecory-CI 21 | ``` 22 | 23 | 4. Generate config `.env` file with: 24 | 25 | On primary postgres node run `./setup-swarm.sh` for Unix systems then follow instructions. 26 | Script will generate `.env` file, services will be deployed automatically. 27 | To check services running: 28 | ``` docker service ls ``` 29 | 30 | 5. Login https:// 31 | 32 | ## Update services: 33 | 34 | Run following command: 35 | 36 | Compose v2: 37 | ```sh 38 | env $(cat .env | grep ^[A-Z] | xargs) docker stack deploy --compose-file docker-compose-swarm.yml md1 39 | ``` 40 | 41 | To update config files (docker-compose.yml and setup.*), please, redownload repository, using step 3. 42 | 43 | 44 | ## Development 45 | 46 | To access docs and redoc of API, request `/api/redoc` and `/api/docs` url from your API domain. 47 | 48 | ## Custom database 49 | 50 | To deploy MultiDirectory with custom postgres database, you can setup following variables in `.env` file: 51 | 52 | POSTGRES_HOST 53 | POSTGRES_USER 54 | POSTGRES_PASSWORD 55 | POSTGRES_DB 56 | 57 | Please, note, other DBMS, rather than PostgreSQL, are incompatiple with MultiDirectory app. 58 | 59 | In that case you may need to remove `postgres` service from `docker-compose.yml` file. 60 | -------------------------------------------------------------------------------- /.package/swarm/clean-swarm.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # clean volumes after "docker stack rm md1" for clean initial setup 3 | docker volume ls --format={{.Name}} | grep md1 | xargs docker volume rm 4 | -------------------------------------------------------------------------------- /.package/swarm/setup-swarm.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Цветовые коды 4 | RED='\033[0;31m' 5 | GREEN='\033[0;32m' 6 | YELLOW='\033[0;33m' 7 | NC='\033[0m' # No Color 8 | 9 | TIME=0 10 | TIMEOUT=60 11 | EXITSTATUS=1 12 | 13 | touch .env 14 | > .env 15 | 16 | read -p "Enter postgres user [default: user]: " postgres_user 17 | postgres_user=${postgres_user:-user} 18 | 19 | read -p "Enter postgres database name [default: md]: " postgres_db 20 | postgres_db=${postgres_db:-md} 21 | 22 | read -p "Enter postgres host (leave it default if you are using default database) [default: postgres]: " postgres_host 23 | postgres_host=${postgres_host:-postgres} 24 | 25 | 26 | read -p "Enter postgres password [default: autogenerate]: " postgres_password 27 | postgres_password=${postgres_password:-$(openssl rand -hex 16)} 28 | 29 | 30 | read -p "Enter interface domain [required]: " domain 31 | if [ -z "$domain" ]; then echo "interface domain required" && exit 1; fi 32 | 33 | secret_key=$(openssl rand -hex 32) 34 | 35 | echo "POSTGRES_HOST="$postgres_host >> .env 36 | echo "POSTGRES_USER="$postgres_user >> .env 37 | echo "POSTGRES_DB="$postgres_db >> .env 38 | echo "POSTGRES_PASSWORD="$postgres_password >> .env 39 | echo "DOMAIN="$domain >> .env 40 | echo "SECRET_KEY="$secret_key >> .env 41 | echo "MD_WEB_ADMIN_IMAGE=ghcr.io/multidirectorylab/multidirectory-web-admin:latest" >> .env 42 | echo "MD_IMAGE=ghcr.io/multidirectorylab/multidirectory:latest" >> .env 43 | 44 | # set docker node add label = primary 45 | docker node update --label-add type=primary $HOSTNAME 46 | 47 | # generate certs 48 | openssl req -nodes -new -x509 -keyout certs/privkey-md.pem -out certs/cert-md.pem -subj '/C=RU/ST=Moscow/L=Moscow/O=Multidirectory/OU=IT/CN='$domain 49 | 50 | # Проверка кода завершения команды 51 | if [ $? -eq 0 ]; then 52 | echo -e "${GREEN}Сертификат успешно создан${NC}" 53 | else 54 | echo -e "${RED}Ошибка при создании сертификата${NC}" 55 | exit 1 56 | fi 57 | 58 | sleep 1 59 | 60 | #deploy stack md1 61 | 62 | echo "Запущен процесс установки контейнеров" 63 | 64 | COMMAND_OUTPUT=$(env $(cat .env | grep ^[A-Z] | xargs) docker stack deploy --compose-file docker-compose-swarm.yml md1 2>&1) 65 | 66 | # Проверка кода завершения команды 67 | if [ $? -eq 0 ]; then 68 | echo -e "${GREEN}Установка выполнена успешно${NC}" 69 | else 70 | echo -e "${RED}Ошибка при выполнении команды${NC}" 71 | echo -e "${YELLOW}Сообщение об ошибке: $COMMAND_OUTPUT${NC}" 72 | exit 1 73 | fi 74 | 75 | 76 | sleep 1 77 | 78 | until [[ $TIME -eq $TIMEOUT ]] || [[ $EXITSTATUS -eq 0 ]]; do 79 | echo $TIME 'check running postgres ..' 80 | if ( docker stack ps md1 --format "{{.Image}} {{.CurrentState}}" --filter "desired-state=running" --no-trunc | grep Running | grep pgpool ); then 81 | EXITSTATUS=0 82 | else 83 | EXITSTATUS=1 84 | fi 85 | sleep 3 86 | ((TIME++)) 87 | done 88 | 89 | # Проверка результата выполнения цикла 90 | if [[ $EXITSTATUS -eq 0 ]]; then 91 | echo -e "${GREEN}Проверка успешно выполнена, postgres запущен${NC}" 92 | else 93 | echo -e "${RED}Ошибка: превышен тайм-аут ожидания запуска postgres${NC}" 94 | exit 1 95 | fi 96 | 97 | sleep 1 98 | 99 | #RUN migration 100 | docker run --rm -it --network md --env-file .env ghcr.io/multidirectorylab/multidirectory:latest sh -c 'alembic upgrade head' 101 | 102 | echo "done ... visit https://$domain " 103 | -------------------------------------------------------------------------------- /.package/swarm/traefik.yaml: -------------------------------------------------------------------------------- 1 | tls: 2 | certificates: 3 | - certFile: /run/secrets/cert.pem 4 | keyFile: /run/secrets/privkey.pem 5 | -------------------------------------------------------------------------------- /.package/traefik.yml: -------------------------------------------------------------------------------- 1 | log: 2 | level: ERROR 3 | 4 | api: 5 | insecure: true 6 | 7 | ping: 8 | entryPoint: "ping" 9 | 10 | entryPoints: 11 | ping: 12 | address: ":8800" 13 | web: 14 | address: ":80" 15 | http: 16 | redirections: 17 | entryPoint: 18 | to: websecure 19 | scheme: https 20 | permanent: true 21 | ldap: 22 | address: ":389" 23 | proxyProtocol: 24 | insecure: true 25 | ldaps: 26 | address: ":636" 27 | proxyProtocol: 28 | insecure: true 29 | kadmind: 30 | address: ":749" 31 | kpasswd: 32 | address: ":464" 33 | bind_dns: 34 | address: ":53" 35 | websecure: 36 | address: ":443" 37 | http: 38 | tls: 39 | certResolver: md-resolver 40 | 41 | accesslog: 42 | format: json 43 | 44 | tls: 45 | stores: 46 | default: 47 | defaultCertificate: 48 | certFile: /letsencrypt/cert.pem 49 | keyFile: /letsencrypt/privkey.pem 50 | 51 | options: 52 | default: 53 | maxVersion: VersionTLS12 54 | maxtls12: 55 | maxVersion: VersionTLS12 56 | 57 | certificatesResolvers: 58 | md-resolver: 59 | acme: 60 | tlschallenge: true 61 | email: support@multifactor.com 62 | storage: /letsencrypt/acme.json 63 | 64 | providers: 65 | file: 66 | filename: /traefik.yml 67 | watch: true 68 | docker: 69 | endpoint: "unix:///var/run/docker.sock" 70 | exposedByDefault: false 71 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | # local development commands 2 | help: ## show help message 3 | @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m\n"} /^[$$()% a-zA-Z_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST) 4 | 5 | before_pr: 6 | ruff format . 7 | ruff check . --fix --unsafe-fixes 8 | mypy . 9 | 10 | build: ## build app and manually generate self-signed cert 11 | make down 12 | docker compose build 13 | 14 | up: ## run tty container with related services, use with run command 15 | make down; docker compose up 16 | 17 | test: ## run tests 18 | docker compose -f docker-compose.test.yml down --remove-orphans 19 | make down; 20 | docker compose -f docker-compose.test.yml up --no-log-prefix --attach test --exit-code-from test 21 | 22 | run: ## runs server 386/636 port 23 | clear;docker exec -it multidirectory bash -c "python ." 24 | 25 | launch: ## run standalone app without tty container 26 | docker compose down; 27 | docker compose run bash -c "alembic upgrade head && python ." 28 | 29 | downgrade: ## re-run migration 30 | docker exec -it multidirectory_api bash -c\ 31 | "alembic downgrade -1; alembic upgrade head;" 32 | 33 | down: ## shutdown services 34 | docker compose -f docker-compose.test.yml down --remove-orphans 35 | docker compose down --remove-orphans 36 | 37 | # server stage/development commands 38 | 39 | stage_gen_cert: ## generate self-signed cert 40 | docker compose -f docker-compose.dev.yml run server bash -c "cd /certs; openssl req -nodes -new -x509 -keyout privkey.pem -out cert.pem" 41 | 42 | stage_build: ## build stage server 43 | docker compose -f docker-compose.dev.yml down 44 | docker compose -f docker-compose.dev.yml build 45 | 46 | stage_up: ## run app and detach 47 | make stage_down; 48 | docker compose -f docker-compose.dev.yml up -d 49 | 50 | stage_down: ## stop all services 51 | docker compose -f docker-compose.dev.yml down --remove-orphans 52 | 53 | stage_update: ## update service 54 | make stage_down; 55 | make stage_build; 56 | docker compose -f docker-compose.dev.yml pull; 57 | make stage_up; 58 | docker exec -it multidirectory-ldap bash -c\ 59 | "alembic downgrade -1; alembic upgrade head; python -m extra.setup_dev" 60 | 61 | krb_client_build: ## build krb client service 62 | docker build -f integration_tests/kerberos/Dockerfile . -t krbclient:runtime 63 | 64 | krb_client: ## run krb client bash 65 | docker run --rm --init -it --name krbclient --network multidirectory_default krbclient:runtime bash 66 | 67 | migrations: ## generate migration file 68 | docker compose run ldap_server alembic revision --autogenerate 69 | 70 | migrate: ## upgrade db 71 | docker compose run ldap_server alembic upgrade head 72 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # MultiDirectory 2 | Ready to deploy ldap and http server. 3 | 4 | Configuration repository for 5 | 1. [MultiDirecory](https://github.com/MultiDirectoryLab/MultiDirectory) - ldap and JSON web API server 6 | 7 | 2. [MultiDirectory-Web-Admin](https://github.com/MultiDirectoryLab/MultiDirectory-Web-Admin) - web interface for API 8 | 9 | All services are running through [traefik](https://doc.traefik.io/traefik/providers/docker/), using [postgres](https://www.postgresql.org/) v15+ as database, other DBMS are incompatible. 10 | 11 | ## Installation 12 | 13 | 1. Install [docker](https://docs.docker.com/engine/install/) and [docker compose](https://docs.docker.com/compose/install/) 14 | 15 | 2. Register and associate domain with your server IP, for e.g. `multidirectory.example.com` -> `255.255.255.255` 16 | 17 | 3. Create multidirectory folder: 18 | ```sh 19 | mkdir MultiDirectory; cd MultiDirectory; 20 | ``` 21 | 22 | 4. Generate config `.env` file with: 23 | 24 | ### For Linux: 25 | ```sh 26 | bash <(curl https://raw.githubusercontent.com/MultiDirectoryLab/MultiDirectory/main/.package/setup.sh); 27 | curl -O https://raw.githubusercontent.com/MultiDirectoryLab/MultiDirectory/main/.package/docker-compose.yml; 28 | curl -O https://raw.githubusercontent.com/MultiDirectoryLab/MultiDirectory/main/.package/traefik.yml; 29 | curl https://raw.githubusercontent.com/MultiDirectoryLab/MultiDirectory/main/LICENSE 30 | ``` 31 | 32 | Then follow .env file fill instructions. 33 | After generating `.env` file, services are ready to deploy 34 | 35 | ### For Windows: 36 | ```sh 37 | curl -O https://raw.githubusercontent.com/MultiDirectoryLab/MultiDirectory/main/.package/setup.bat; 38 | curl -O https://raw.githubusercontent.com/MultiDirectoryLab/MultiDirectory/main/.package/docker-compose.yml; 39 | curl -O https://raw.githubusercontent.com/MultiDirectoryLab/MultiDirectory/main/.package/traefik.yml; 40 | curl https://raw.githubusercontent.com/MultiDirectoryLab/MultiDirectory/main/LICENSE 41 | ``` 42 | 43 | Run `./setup.bat`, then follow instructions. 44 | After generating `.env` file, services are ready to deploy 45 | 46 | 5. Start services with command: 47 | 48 | Compose v2: 49 | ```sh 50 | docker compose pull; docker compose up -d 51 | ``` 52 | 53 | ## Update services: 54 | 55 | Run following command: 56 | 57 | Compose v2: 58 | ```sh 59 | docker compose down; docker compose pull; docker compose up -d 60 | ``` 61 | 62 | To update config files (docker-compose.yml and setup.*), please, redownload repository, using step 4. 63 | 64 | 65 | ## Development 66 | 67 | To access docs and redoc of API, request `/api/redoc` and `/api/docs` url from your API domain. 68 | 69 | ## Custom database 70 | 71 | To deploy MultiDirectory with custom postgres database, you can setup following variables in `.env` file: 72 | 73 | POSTGRES_HOST 74 | POSTGRES_USER 75 | POSTGRES_PASSWORD 76 | POSTGRES_DB 77 | 78 | In that case you may need to remove `postgres` service from `docker-compose.yml` file. 79 | 80 | Please note that only PostgreSQL DBMS version 15 or later is compatible with the MultiDirectory app. 81 | -------------------------------------------------------------------------------- /app/alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # path to migration scripts 5 | script_location = alembic 6 | 7 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s 8 | # Uncomment the line below if you want the files to be prepended with date and time 9 | # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file 10 | # for all available tokens 11 | # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s 12 | 13 | # sys.path path, will be prepended to sys.path if present. 14 | # defaults to the current working directory. 15 | prepend_sys_path = . 16 | 17 | # timezone to use when rendering the date within the migration file 18 | # as well as the filename. 19 | # If specified, requires the python-dateutil library that can be 20 | # installed by adding `alembic[tz]` to the pip requirements 21 | # string value is passed to dateutil.tz.gettz() 22 | # leave blank for localtime 23 | # timezone = 24 | 25 | # max length of characters to apply to the 26 | # "slug" field 27 | # truncate_slug_length = 40 28 | 29 | # set to 'true' to run the environment during 30 | # the 'revision' command, regardless of autogenerate 31 | # revision_environment = false 32 | 33 | # set to 'true' to allow .pyc and .pyo files without 34 | # a source .py file to be detected as revisions in the 35 | # versions/ directory 36 | # sourceless = false 37 | 38 | # version location specification; This defaults 39 | # to alembic/versions. When using multiple version 40 | # directories, initial revisions must be specified with --version-path. 41 | # The path separator used here should be the separator specified by "version_path_separator" below. 42 | # version_locations = %(here)s/bar:%(here)s/bat:alembic/versions 43 | 44 | # version path separator; As mentioned above, this is the character used to split 45 | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. 46 | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. 47 | # Valid values for version_path_separator are: 48 | # 49 | # version_path_separator = : 50 | # version_path_separator = ; 51 | # version_path_separator = space 52 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects. 53 | 54 | # the output encoding used when revision files 55 | # are written from script.py.mako 56 | # output_encoding = utf-8 57 | 58 | sqlalchemy.url = driver://user:pass@localhost/dbname 59 | 60 | 61 | [post_write_hooks] 62 | # post_write_hooks defines scripts or Python functions that are run 63 | # on newly generated revision scripts. See the documentation for further 64 | # detail and examples 65 | 66 | # format using "black" - use the console_scripts runner, against the "black" entrypoint 67 | # hooks = black 68 | # black.type = console_scripts 69 | # black.entrypoint = black 70 | # black.options = -l 79 REVISION_SCRIPT_FILENAME 71 | 72 | # Logging configuration 73 | [loggers] 74 | keys = root,sqlalchemy,alembic 75 | 76 | [handlers] 77 | keys = console 78 | 79 | [formatters] 80 | keys = generic 81 | 82 | [logger_root] 83 | level = WARN 84 | handlers = console 85 | qualname = 86 | 87 | [logger_sqlalchemy] 88 | level = WARN 89 | handlers = 90 | qualname = sqlalchemy.engine 91 | 92 | [logger_alembic] 93 | level = INFO 94 | handlers = 95 | qualname = alembic 96 | 97 | [handler_console] 98 | class = StreamHandler 99 | args = (sys.stderr,) 100 | level = NOTSET 101 | formatter = generic 102 | 103 | [formatter_generic] 104 | format = %(levelname)-5.5s [%(name)s] %(message)s 105 | datefmt = %H:%M:%S 106 | -------------------------------------------------------------------------------- /app/alembic/README: -------------------------------------------------------------------------------- 1 | Generic single-database configuration. -------------------------------------------------------------------------------- /app/alembic/env.py: -------------------------------------------------------------------------------- 1 | """Alembic migrations file.""" 2 | 3 | import asyncio 4 | from logging.config import fileConfig 5 | 6 | from alembic import context 7 | from sqlalchemy.ext.asyncio import create_async_engine 8 | 9 | from config import Settings 10 | from models import Base 11 | 12 | # this is the Alembic Config object, which provides 13 | # access to the values within the .ini file in use. 14 | config = context.config 15 | 16 | # Interpret the config file for Python logging. 17 | # This line sets up loggers basically. 18 | if config.config_file_name is not None: 19 | fileConfig(config.config_file_name) 20 | 21 | target_metadata = Base.metadata 22 | 23 | 24 | def run_sync_migrations(connection): 25 | """Run sync migrations.""" 26 | context.configure( 27 | connection=connection, 28 | target_metadata=target_metadata, 29 | include_schemas=True, 30 | version_table_schema=target_metadata.schema, 31 | ) 32 | 33 | with context.begin_transaction(): 34 | context.run_migrations() 35 | 36 | 37 | async def run_async_migrations(settings): 38 | """Run async migrations.""" 39 | engine = create_async_engine(str(settings.POSTGRES_URI)) 40 | 41 | async with engine.connect() as connection: 42 | await connection.run_sync(run_sync_migrations) 43 | 44 | 45 | def run_migrations_online(): 46 | """Run migrations in 'online' mode. 47 | 48 | In this scenario we need to create an Engine 49 | and associate a connection with the context. 50 | """ 51 | conn = context.config.attributes.get("connection", None) 52 | settings = context.config.attributes.get( 53 | "app_settings", 54 | Settings.from_os(), 55 | ) 56 | 57 | if conn is None: 58 | asyncio.run(run_async_migrations(settings)) 59 | else: 60 | run_sync_migrations(conn) 61 | 62 | 63 | run_migrations_online() 64 | -------------------------------------------------------------------------------- /app/alembic/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | ${imports if imports else ""} 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = ${repr(up_revision)} 14 | down_revision = ${repr(down_revision)} 15 | branch_labels = ${repr(branch_labels)} 16 | depends_on = ${repr(depends_on)} 17 | 18 | 19 | def upgrade() -> None: 20 | ${upgrades if upgrades else "pass"} 21 | 22 | 23 | def downgrade() -> None: 24 | ${downgrades if downgrades else "pass"} 25 | -------------------------------------------------------------------------------- /app/alembic/versions/4334e2e871a4_add_sessions_ttl.py: -------------------------------------------------------------------------------- 1 | """add sessions ttl. 2 | 3 | Revision ID: 4334e2e871a4 4 | Revises: dafg3a4b22ab 5 | Create Date: 2025-02-20 13:01:56.736774 6 | 7 | """ 8 | 9 | import sqlalchemy as sa 10 | from alembic import op 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "4334e2e871a4" 14 | down_revision = "dafg3a4b22ab" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade() -> None: 20 | """Upgrade.""" 21 | op.add_column( 22 | "Policies", 23 | sa.Column( 24 | "ldap_session_ttl", 25 | sa.Integer(), 26 | server_default="-1", 27 | nullable=False, 28 | ), 29 | ) 30 | op.add_column( 31 | "Policies", 32 | sa.Column( 33 | "http_session_ttl", 34 | sa.Integer(), 35 | server_default="28800", 36 | nullable=False, 37 | ), 38 | ) 39 | 40 | 41 | def downgrade() -> None: 42 | """Downgrade.""" 43 | op.drop_column("Policies", "http_session_ttl") 44 | op.drop_column("Policies", "ldap_session_ttl") 45 | -------------------------------------------------------------------------------- /app/alembic/versions/4442d1d982a4_remove_krb_policy.py: -------------------------------------------------------------------------------- 1 | """Remove default_policy. 2 | 3 | Revision ID: 4442d1d982a4 4 | Revises: 692ae64e0cc5 5 | Create Date: 2025-02-28 12:01:56.745334 6 | 7 | """ 8 | 9 | from alembic import op 10 | from sqlalchemy import delete 11 | from sqlalchemy.orm import Session 12 | 13 | from extra.alembic_utils import temporary_stub_entity_type_id 14 | from models import Attribute, Directory 15 | 16 | # revision identifiers, used by Alembic. 17 | revision = "4442d1d982a4" 18 | down_revision = "692ae64e0cc5" 19 | branch_labels = None 20 | depends_on = None 21 | 22 | 23 | @temporary_stub_entity_type_id 24 | def upgrade() -> None: 25 | """Upgrade.""" 26 | bind = op.get_bind() 27 | session = Session(bind=bind) 28 | session.execute(delete(Directory).filter_by(name="default_policy")) 29 | session.execute(delete(Attribute).filter_by(name="krbpwdpolicyreference")) 30 | 31 | 32 | def downgrade() -> None: 33 | """Downgrade.""" 34 | -------------------------------------------------------------------------------- /app/alembic/versions/692ae64e0cc5_.py: -------------------------------------------------------------------------------- 1 | """Merge migrations alembic head. 2 | 3 | Revision ID: 692ae64e0cc5 4 | Revises: 4334e2e871a4 5 | Create Date: 2025-02-24 10:46:08.830692 6 | 7 | """ 8 | 9 | from alembic import op 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = "692ae64e0cc5" 13 | down_revision = "4334e2e871a4" 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade() -> None: 19 | """Upgrade.""" 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.create_unique_constraint( 22 | "group_policy_uc", 23 | "GroupAccessPolicyMemberships", 24 | ["group_id", "policy_id"], 25 | ) 26 | # ### end Alembic commands ### 27 | 28 | 29 | def downgrade() -> None: 30 | """Downgrade.""" 31 | # ### commands auto generated by Alembic - please adjust! ### 32 | op.drop_constraint( 33 | "group_policy_uc", "GroupAccessPolicyMemberships", type_="unique" 34 | ) 35 | # ### end Alembic commands ### 36 | -------------------------------------------------------------------------------- /app/alembic/versions/6f8fe2548893_fix_read_only.py: -------------------------------------------------------------------------------- 1 | """Fix Read-Only. 2 | 3 | Revision ID: 6f8fe2548893 4 | Revises: fafc3d0b11ec 5 | Create Date: 2024-11-14 13:02:33.899640 6 | 7 | """ 8 | 9 | from alembic import op 10 | from sqlalchemy import delete, select, update 11 | from sqlalchemy.orm import Session 12 | 13 | from extra.alembic_utils import temporary_stub_entity_type_id 14 | from ldap_protocol.utils.helpers import create_integer_hash 15 | from models import Attribute, Directory 16 | 17 | # revision identifiers, used by Alembic. 18 | revision = "6f8fe2548893" 19 | down_revision = "fafc3d0b11ec" 20 | branch_labels = None 21 | depends_on = None 22 | 23 | 24 | @temporary_stub_entity_type_id 25 | def upgrade() -> None: 26 | """Upgrade.""" 27 | bind = op.get_bind() 28 | session = Session(bind=bind) 29 | 30 | ro_dir = session.scalar( 31 | select(Directory) 32 | .where(Directory.name == "readonly domain controllers") 33 | ) # fmt: skip 34 | 35 | if ro_dir: 36 | session.execute( 37 | delete(Attribute) 38 | .where( 39 | Attribute.name == "objectSid", 40 | Attribute.directory == ro_dir, 41 | ) 42 | ) # fmt: skip 43 | session.execute( 44 | update(Attribute) 45 | .where( 46 | Attribute.name == "sAMAccountName", 47 | Attribute.directory == ro_dir, 48 | Attribute.value == "domain users", 49 | ) 50 | .values({"value": ro_dir.name}), 51 | ) 52 | 53 | attr_object_class = session.scalar( 54 | select(Attribute) 55 | .where( 56 | Attribute.name == "objectClass", 57 | Attribute.directory == ro_dir, 58 | Attribute.value == "group", 59 | ), 60 | ) # fmt: skip 61 | if not attr_object_class: 62 | session.add( 63 | Attribute( 64 | name="objectClass", 65 | value="group", 66 | directory=ro_dir, 67 | ), 68 | ) 69 | session.add( 70 | Attribute( 71 | name=ro_dir.rdname, 72 | value=ro_dir.name, 73 | directory=ro_dir, 74 | ), 75 | ) 76 | session.add( 77 | Attribute( 78 | name="gidNumber", 79 | value=str(create_integer_hash(ro_dir.name)), 80 | directory=ro_dir, 81 | ), 82 | ) 83 | 84 | domain_sid = "-".join(ro_dir.object_sid.split("-")[:-1]) 85 | ro_dir.object_sid = domain_sid + "-521" 86 | 87 | session.commit() 88 | 89 | 90 | def downgrade() -> None: 91 | """Downgrade.""" 92 | -------------------------------------------------------------------------------- /app/alembic/versions/8c2bd40dd809_add_protocols_attr.py: -------------------------------------------------------------------------------- 1 | """Add protocols attrs. 2 | 3 | Revision ID: 8c2bd40dd809 4 | Revises: 6f8fe2548893 5 | Create Date: 2024-12-04 16:24:35.521868 6 | 7 | """ 8 | 9 | import sqlalchemy as sa 10 | from alembic import op 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "8c2bd40dd809" 14 | down_revision = "6f8fe2548893" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade() -> None: 20 | """Upgrade.""" 21 | for protocol_field in ("is_http", "is_ldap", "is_kerberos"): 22 | op.add_column( 23 | "Policies", 24 | sa.Column( 25 | protocol_field, 26 | sa.Boolean(), 27 | server_default=sa.text("true"), 28 | nullable=False, 29 | ), 30 | ) 31 | 32 | 33 | def downgrade() -> None: 34 | """Downgrade.""" 35 | for protocol_field in ("is_http", "is_ldap", "is_kerberos"): 36 | op.drop_column("Policies", protocol_field) 37 | -------------------------------------------------------------------------------- /app/alembic/versions/bf435bbd95ff_add_rdn_attr_name.py: -------------------------------------------------------------------------------- 1 | """Add RDN Attribute Naming and Resolve Duplication in KrbAdmin. 2 | 3 | Revision ID: bf435bbd95ff 4 | Revises: 196f0d327c6a 5 | Create Date: 2024-10-23 10:46:24.419163 6 | 7 | """ 8 | 9 | import sqlalchemy as sa 10 | from alembic import op 11 | from sqlalchemy.orm import Session 12 | 13 | from extra.alembic_utils import temporary_stub_entity_type_id 14 | from models import Attribute, Directory 15 | 16 | # revision identifiers, used by Alembic. 17 | revision = "bf435bbd95ff" 18 | down_revision = "196f0d327c6a" 19 | branch_labels = None 20 | depends_on = None 21 | 22 | 23 | @temporary_stub_entity_type_id 24 | def upgrade() -> None: 25 | """Upgrade.""" 26 | op.add_column("Directory", sa.Column("rdname", sa.String(length=64))) 27 | 28 | bind = op.get_bind() 29 | session = Session(bind=bind) 30 | 31 | attrs = [] 32 | 33 | for directory in session.query(Directory): 34 | if directory.is_domain: 35 | directory.rdname = "" 36 | continue 37 | 38 | rdname = directory.path[-1].split("=")[0] 39 | directory.rdname = rdname 40 | 41 | if rdname == "krbprincipalname": 42 | continue # already exists 43 | 44 | attrs.append( 45 | Attribute( 46 | name=rdname, 47 | value=directory.name, 48 | directory_id=directory.id, 49 | ) 50 | ) 51 | 52 | session.add_all(attrs) 53 | session.commit() 54 | 55 | op.alter_column("Directory", "rdname", nullable=False) 56 | 57 | 58 | @temporary_stub_entity_type_id 59 | def downgrade() -> None: 60 | """Downgrade.""" 61 | bind = op.get_bind() 62 | session = Session(bind=bind) 63 | 64 | for directory in session.query(Directory): 65 | if directory.is_domain: 66 | directory.rdname = "" 67 | continue 68 | 69 | session.execute( 70 | sa.delete(Attribute) 71 | .where( 72 | Attribute.name == directory.rdname, 73 | Attribute.name != "krbprincipalname", 74 | Attribute.directory_id == directory.id, 75 | ), 76 | ) # fmt: skip 77 | 78 | op.drop_column("Directory", "rdname") 79 | -------------------------------------------------------------------------------- /app/alembic/versions/bv546ccd35fa_fix_krbadmin_attrs.py: -------------------------------------------------------------------------------- 1 | """Resolve Duplication in KrbAdmin. 2 | 3 | Revision ID: bv546ccd35fa 4 | Revises: 8c2bd40dd809 5 | Create Date: 2024-12-10 10:46:24.419163 6 | 7 | """ 8 | 9 | import sqlalchemy as sa 10 | from alembic import op 11 | from sqlalchemy.orm import Session 12 | 13 | from extra.alembic_utils import temporary_stub_entity_type_id 14 | from models import Attribute, Directory 15 | 16 | # revision identifiers, used by Alembic. 17 | revision = "bv546ccd35fa" 18 | down_revision = "8c2bd40dd809" 19 | branch_labels = None 20 | depends_on = None 21 | 22 | 23 | @temporary_stub_entity_type_id 24 | def upgrade() -> None: 25 | """Upgrade.""" 26 | bind = op.get_bind() 27 | session = Session(bind=bind) 28 | 29 | krb_admin_user = session.scalar( 30 | sa.select(Directory) 31 | .join(Directory.user) 32 | .filter(Directory.name == "krbadmin"), 33 | ) 34 | 35 | if krb_admin_user: 36 | for attr, new_value in { 37 | "loginShell": "/bin/false", 38 | "uidNumber": "800", 39 | "gidNumber": "800", 40 | "homeDirectory": "/home/krbadmin", 41 | }.items(): 42 | session.execute( 43 | sa.delete(Attribute) 44 | .where( 45 | Attribute.name == attr, 46 | Attribute.directory_id == krb_admin_user.id, 47 | ), 48 | ) # fmt: skip 49 | session.add( 50 | Attribute( 51 | name=attr, 52 | value=new_value, 53 | directory_id=krb_admin_user.id, 54 | ) 55 | ) 56 | 57 | krb_admin_group = session.scalar( 58 | sa.select(Directory) 59 | .join(Directory.group) 60 | .filter(Directory.name == "krbadmin"), 61 | ) 62 | 63 | session.execute( 64 | sa.delete(Attribute) 65 | .where( 66 | Attribute.name == "gidNumber", 67 | Attribute.directory_id == krb_admin_group.id, 68 | ), 69 | ) # fmt: skip 70 | session.add( 71 | Attribute( 72 | name="gidNumber", 73 | value="800", 74 | directory_id=krb_admin_group.id, 75 | ) 76 | ) 77 | 78 | session.commit() 79 | 80 | 81 | def downgrade() -> None: 82 | """Downgrade.""" 83 | -------------------------------------------------------------------------------- /app/alembic/versions/dafg3a4b22ab_add_preauth_princ.py: -------------------------------------------------------------------------------- 1 | """Add preauth principals. 2 | 3 | Revision ID: dafg3a4b22ab 4 | Revises: f68a134a3685 5 | Create Date: 2024-12-20 16:28:24.419163 6 | 7 | """ 8 | 9 | import sqlalchemy as sa 10 | from alembic import op 11 | from sqlalchemy.orm import Session 12 | 13 | from extra.alembic_utils import temporary_stub_entity_type_id 14 | from ldap_protocol.kerberos import KERBEROS_STATE_NAME 15 | from models import Attribute, CatalogueSetting, User 16 | 17 | # revision identifiers, used by Alembic. 18 | revision = "dafg3a4b22ab" 19 | down_revision = "f68a134a3685" 20 | branch_labels = None 21 | depends_on = None 22 | 23 | 24 | @temporary_stub_entity_type_id 25 | def upgrade() -> None: 26 | """Upgrade.""" 27 | bind = op.get_bind() 28 | session = Session(bind=bind) 29 | 30 | for user in session.query(User): 31 | if user.sam_accout_name == "krbadmin": 32 | continue 33 | 34 | username, domain = user.user_principal_name.split("@") 35 | principal = f"{username}@{domain.upper()}" 36 | 37 | attr_principal = session.scalar( 38 | sa.select(Attribute) 39 | .filter( 40 | Attribute.name == "krbprincipalname", 41 | Attribute.value == principal, 42 | ), 43 | ) # fmt: skip 44 | if attr_principal: 45 | session.add( 46 | Attribute( 47 | name="krbticketflags", 48 | value="128", 49 | directory_id=attr_principal.directory_id, 50 | ) 51 | ) 52 | 53 | # NOTE: Remove duplicate Kerberos state settings and keep the latest one 54 | settings = session.scalar( 55 | sa.select(CatalogueSetting) 56 | .where(CatalogueSetting.name == KERBEROS_STATE_NAME), 57 | ) # fmt: skip 58 | 59 | if settings: 60 | session.execute( 61 | sa.delete(CatalogueSetting) 62 | .where( 63 | CatalogueSetting.name == KERBEROS_STATE_NAME, 64 | CatalogueSetting.id != settings.id, 65 | ), 66 | ) # fmt: skip 67 | 68 | session.commit() 69 | 70 | # NOTE: Set unique constraint on Settings.name 71 | op.drop_index(op.f("ix_Settings_name"), table_name="Settings") 72 | 73 | op.create_index( 74 | op.f("ix_Settings_name"), 75 | "Settings", 76 | ["name"], 77 | unique=True, 78 | ) 79 | 80 | 81 | def downgrade() -> None: 82 | """Downgrade.""" 83 | op.drop_index(op.f("ix_Settings_name"), table_name="Settings") 84 | op.create_index( 85 | op.f("ix_Settings_name"), 86 | "Settings", 87 | ["name"], 88 | unique=False, 89 | ) 90 | -------------------------------------------------------------------------------- /app/alembic/versions/f68a134a3685_add_bypass.py: -------------------------------------------------------------------------------- 1 | """Add bypass. 2 | 3 | Revision ID: f68a134a3685 4 | Revises: bv546ccd35fa 5 | Create Date: 2024-12-18 14:52:13.992686 6 | 7 | """ 8 | 9 | import sqlalchemy as sa 10 | from alembic import op 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "f68a134a3685" 14 | down_revision = "bv546ccd35fa" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade() -> None: 20 | """Upgrade.""" 21 | op.add_column( 22 | "Policies", 23 | sa.Column( 24 | "bypass_no_connection", 25 | sa.Boolean(), 26 | server_default=sa.text("false"), 27 | nullable=False, 28 | ), 29 | ) 30 | op.add_column( 31 | "Policies", 32 | sa.Column( 33 | "bypass_service_failure", 34 | sa.Boolean(), 35 | server_default=sa.text("false"), 36 | nullable=False, 37 | ), 38 | ) 39 | 40 | 41 | def downgrade() -> None: 42 | """Downgrade.""" 43 | op.drop_column("Policies", "bypass_service_failure") 44 | op.drop_column("Policies", "bypass_no_connection") 45 | -------------------------------------------------------------------------------- /app/alembic/versions/fafc3d0b11ec_.py: -------------------------------------------------------------------------------- 1 | """Add ReadOnly group and access policy for it. 2 | 3 | Revision ID: fafc3d0b11ec 4 | Revises: bf435bbd95ff 5 | Create Date: 2024-11-11 15:21:23.568233 6 | 7 | """ 8 | 9 | from alembic import op 10 | from sqlalchemy import delete, exists, select 11 | from sqlalchemy.exc import DBAPIError, IntegrityError 12 | from sqlalchemy.ext.asyncio import AsyncSession 13 | 14 | from extra.alembic_utils import temporary_stub_entity_type_id 15 | from ldap_protocol.policies.access_policy import create_access_policy 16 | from ldap_protocol.utils.queries import ( 17 | create_group, 18 | get_base_directories, 19 | get_search_path, 20 | ) 21 | from models import AccessPolicy, Directory 22 | 23 | # revision identifiers, used by Alembic. 24 | revision = "fafc3d0b11ec" 25 | down_revision = "bf435bbd95ff" 26 | branch_labels = None 27 | depends_on = None 28 | 29 | 30 | @temporary_stub_entity_type_id 31 | def upgrade() -> None: 32 | """Upgrade.""" 33 | 34 | async def _create_readonly_grp_and_plcy(connection) -> None: 35 | session = AsyncSession(bind=connection) 36 | await session.begin() 37 | base_dn_list = await get_base_directories(session) 38 | if not base_dn_list: 39 | return 40 | 41 | try: 42 | group_dir_query = select( 43 | exists(Directory) 44 | .where(Directory.name == "readonly domain controllers") 45 | ) # fmt: skip 46 | group_dir = (await session.scalars(group_dir_query)).one() 47 | 48 | if not group_dir: 49 | dir_, _ = await create_group( 50 | name="readonly domain controllers", 51 | sid=521, 52 | session=session, 53 | ) 54 | 55 | await session.flush() 56 | except (IntegrityError, DBAPIError): 57 | pass 58 | 59 | ro_access_policy_q = select( 60 | exists(AccessPolicy) 61 | .where(AccessPolicy.name == "ReadOnly Access Policy") 62 | ) # fmt: skip 63 | ro_access_policy = (await session.scalars(ro_access_policy_q)).one() 64 | 65 | if not ro_access_policy: 66 | await create_access_policy( 67 | name="ReadOnly Access Policy", 68 | can_add=False, 69 | can_modify=False, 70 | can_read=True, 71 | can_delete=False, 72 | grant_dn=base_dn_list[0].path_dn, 73 | groups=[dir_.path_dn], 74 | session=session, 75 | ) 76 | 77 | await session.commit() 78 | await session.close() 79 | 80 | op.run_async(_create_readonly_grp_and_plcy) 81 | 82 | 83 | @temporary_stub_entity_type_id 84 | def downgrade() -> None: 85 | """Downgrade.""" 86 | 87 | async def _delete_readonly_grp_and_plcy(connection) -> None: 88 | session = AsyncSession(bind=connection) 89 | await session.begin() 90 | base_dn_list = await get_base_directories(session) 91 | if not base_dn_list: 92 | return 93 | 94 | group_dn = ( 95 | "cn=readonly domain controllers,cn=groups," 96 | + base_dn_list[0].path_dn 97 | ) 98 | 99 | await session.execute( 100 | delete(AccessPolicy) 101 | .where(AccessPolicy.name == "ReadOnly Access Policy") 102 | ) # fmt: skip 103 | 104 | await session.execute( 105 | delete(Directory) 106 | .where(Directory.path == get_search_path(group_dn)) 107 | ) # fmt: skip 108 | 109 | await session.commit() 110 | 111 | op.run_async(_delete_readonly_grp_and_plcy) 112 | -------------------------------------------------------------------------------- /app/api/__init__.py: -------------------------------------------------------------------------------- 1 | """API module. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from .auth.router import auth_router 8 | from .auth.router_mfa import mfa_router 9 | from .auth.router_pwd_policy import pwd_router 10 | from .auth.session_router import session_router 11 | from .ldap_schema.entity_type_router import ldap_schema_router 12 | from .main.ap_router import access_policy_router 13 | from .main.dns_router import dns_router 14 | from .main.krb5_router import krb5_router 15 | from .main.router import entry_router 16 | from .network.router import network_router 17 | from .shadow.router import shadow_router 18 | 19 | __all__ = [ 20 | "auth_router", 21 | "session_router", 22 | "network_router", 23 | "mfa_router", 24 | "pwd_router", 25 | "access_policy_router", 26 | "ldap_schema_router", 27 | "dns_router", 28 | "krb5_router", 29 | "entry_router", 30 | "network_router", 31 | "shadow_router", 32 | ] 33 | -------------------------------------------------------------------------------- /app/api/auth/__init__.py: -------------------------------------------------------------------------------- 1 | """Auth api/module imports. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from .oauth2 import get_current_user 8 | 9 | __all__ = ["get_current_user"] 10 | -------------------------------------------------------------------------------- /app/api/auth/oauth2.py: -------------------------------------------------------------------------------- 1 | """OAuth modules. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from ipaddress import IPv4Address, IPv6Address 8 | from typing import Annotated 9 | 10 | from dishka import FromDishka 11 | from dishka.integrations.fastapi import inject 12 | from fastapi import Depends, HTTPException, Request, Response, status 13 | from sqlalchemy import select 14 | from sqlalchemy.ext.asyncio import AsyncSession 15 | from sqlalchemy.orm import defaultload 16 | 17 | from api.auth.utils import get_ip_from_request, get_user_agent_from_request 18 | from config import Settings 19 | from ldap_protocol.dialogue import UserSchema 20 | from ldap_protocol.session_storage import SessionStorage 21 | from ldap_protocol.utils.queries import get_user 22 | from models import Group, User 23 | from security import verify_password 24 | 25 | ALGORITHM = "HS256" 26 | 27 | _CREDENTIALS_EXCEPTION = HTTPException( 28 | status_code=status.HTTP_401_UNAUTHORIZED, 29 | detail="Could not validate credentials", 30 | headers={"WWW-Authenticate": "Bearer"}, 31 | ) 32 | 33 | 34 | async def authenticate_user( 35 | session: AsyncSession, 36 | username: str, 37 | password: str, 38 | ) -> User | None: 39 | """Get user and verify password. 40 | 41 | :param AsyncSession session: sa session 42 | :param str username: any str 43 | :param str password: any str 44 | :return User | None: User model (pydantic) 45 | """ 46 | user = await get_user(session, username) 47 | 48 | if not user or not user.password or not password: 49 | return None 50 | if not verify_password(password, user.password): 51 | return None 52 | return user 53 | 54 | 55 | @inject 56 | async def get_current_user( 57 | settings: FromDishka[Settings], 58 | session: FromDishka[AsyncSession], 59 | session_storage: FromDishka[SessionStorage], 60 | request: Request, 61 | response: Response, 62 | ip: Annotated[IPv4Address | IPv6Address, Depends(get_ip_from_request)], 63 | user_agent: Annotated[str, Depends(get_user_agent_from_request)], 64 | ) -> UserSchema: 65 | """Get current user. 66 | 67 | Fetches the user id associated with the session stored in the 68 | request's cookies, verifies the session, and returns the user schema. 69 | Makes a rekey of the session if necessary. 70 | 71 | :param FromDishka[Settings] settings: settings 72 | :param FromDishka[AsyncSession] session: db session 73 | :param FromDishka[SessionStorage] session_storage: session storage 74 | :param Request request: request 75 | :param Response response: response 76 | :param Annotated[IPv4Address | IPv6Address] ip: ip address 77 | :param Annotated[str] user_agent: user agent 78 | :return UserSchema: user schema 79 | """ 80 | session_key = request.cookies.get("id", "") 81 | try: 82 | user_id = await session_storage.get_user_id( 83 | settings, 84 | session_key, 85 | user_agent, 86 | str(ip), 87 | ) 88 | except KeyError as err: 89 | raise _CREDENTIALS_EXCEPTION from err 90 | 91 | user = await session.scalar( 92 | select(User) 93 | .options(defaultload(User.groups).selectinload(Group.access_policies)) 94 | .where(User.id == user_id) 95 | ) 96 | 97 | if user is None: 98 | raise _CREDENTIALS_EXCEPTION 99 | 100 | session_id, _ = session_key.split(".") 101 | try: 102 | if await session_storage.check_rekey( 103 | session_id, 104 | settings.SESSION_REKEY_INTERVAL, 105 | ): 106 | key = await session_storage.rekey_session(session_id, settings) 107 | response.set_cookie( 108 | key="id", 109 | value=key, 110 | httponly=True, 111 | expires=session_storage.key_ttl, 112 | ) 113 | except KeyError as err: 114 | raise _CREDENTIALS_EXCEPTION from err 115 | 116 | return await UserSchema.from_db(user, session_id) 117 | -------------------------------------------------------------------------------- /app/api/auth/router_pwd_policy.py: -------------------------------------------------------------------------------- 1 | """Password policy views. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from dishka import FromDishka 8 | from dishka.integrations.fastapi import DishkaRoute 9 | from fastapi import APIRouter, Depends, status 10 | from sqlalchemy.ext.asyncio import AsyncSession 11 | 12 | from api.auth import get_current_user 13 | from ldap_protocol.policies.password_policy import PasswordPolicySchema 14 | 15 | pwd_router = APIRouter( 16 | prefix="/password-policy", 17 | dependencies=[Depends(get_current_user)], 18 | tags=["Password policy"], 19 | route_class=DishkaRoute, 20 | ) 21 | 22 | 23 | @pwd_router.post("", status_code=status.HTTP_201_CREATED) 24 | async def create_policy( 25 | policy: PasswordPolicySchema, 26 | session: FromDishka[AsyncSession], 27 | ) -> PasswordPolicySchema: 28 | """Create current policy setting.""" 29 | return await policy.create_policy_settings(session) 30 | 31 | 32 | @pwd_router.get("") 33 | async def get_policy( 34 | session: FromDishka[AsyncSession], 35 | ) -> PasswordPolicySchema: 36 | """Get current policy setting.""" 37 | return await PasswordPolicySchema.get_policy_settings(session) 38 | 39 | 40 | @pwd_router.put("") 41 | async def update_policy( 42 | policy: PasswordPolicySchema, 43 | session: FromDishka[AsyncSession], 44 | ) -> PasswordPolicySchema: 45 | """Update current policy setting.""" 46 | await policy.update_policy_settings(session) 47 | return policy 48 | 49 | 50 | @pwd_router.delete("") 51 | async def reset_policy( 52 | session: FromDishka[AsyncSession], 53 | ) -> PasswordPolicySchema: 54 | """Reset current policy setting.""" 55 | return await PasswordPolicySchema.delete_policy_settings(session) 56 | -------------------------------------------------------------------------------- /app/api/auth/schema.py: -------------------------------------------------------------------------------- 1 | """Schemas for auth module. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | import re 8 | from datetime import datetime 9 | from ipaddress import IPv4Address, IPv6Address 10 | from typing import Literal 11 | 12 | from fastapi.param_functions import Form 13 | from fastapi.security import OAuth2PasswordRequestForm 14 | from pydantic import ( 15 | BaseModel, 16 | ConfigDict, 17 | Field, 18 | SecretStr, 19 | computed_field, 20 | field_validator, 21 | ) 22 | 23 | from ldap_protocol.utils.const import EmailStr 24 | 25 | _domain_re = re.compile( 26 | "^((?!-)[A-Za-z0-9-]" + "{1,63}(? str: # noqa 70 | if re.match(_domain_re, v) is None: 71 | raise ValueError("Invalid domain value") 72 | return v.lower() 73 | 74 | 75 | class MFACreateRequest(BaseModel): 76 | """Create MFA creds request.""" 77 | 78 | mfa_key: str 79 | mfa_secret: str 80 | is_ldap_scope: bool 81 | 82 | @computed_field # type: ignore 83 | @property 84 | def key_name(self) -> str: 85 | if self.is_ldap_scope: 86 | return "mfa_key_ldap" 87 | 88 | return "mfa_key" 89 | 90 | @computed_field # type: ignore 91 | @property 92 | def secret_name(self) -> str: 93 | if self.is_ldap_scope: 94 | return "mfa_secret_ldap" 95 | 96 | return "mfa_secret" 97 | 98 | 99 | class MFAGetResponse(BaseModel): 100 | """Secret creds of api.""" 101 | 102 | mfa_key: str | None 103 | mfa_secret: SecretStr | None 104 | mfa_key_ldap: str | None 105 | mfa_secret_ldap: SecretStr | None 106 | 107 | 108 | class MFAChallengeResponse(BaseModel): 109 | """MFA Challenge state.""" 110 | 111 | status: str 112 | message: str 113 | 114 | 115 | class SessionContentSchema(BaseModel): 116 | """Session content schema.""" 117 | 118 | model_config = ConfigDict(extra="allow") 119 | 120 | id: int 121 | sign: str = Field("", description="Session signature") 122 | issued: datetime 123 | ip: IPv4Address | IPv6Address 124 | protocol: Literal["ldap", "http"] = "http" 125 | -------------------------------------------------------------------------------- /app/api/auth/session_router.py: -------------------------------------------------------------------------------- 1 | """Session router for handling user sessions.""" 2 | 3 | from dishka import FromDishka 4 | from dishka.integrations.fastapi import DishkaRoute 5 | from fastapi import Depends, HTTPException, status 6 | from fastapi.routing import APIRouter 7 | from sqlalchemy.ext.asyncio import AsyncSession 8 | 9 | from ldap_protocol.session_storage import SessionStorage 10 | from ldap_protocol.utils.queries import get_user 11 | 12 | from .oauth2 import get_current_user 13 | from .schema import SessionContentSchema 14 | 15 | session_router = APIRouter( 16 | prefix="/sessions", 17 | tags=["Session"], 18 | route_class=DishkaRoute, 19 | dependencies=[Depends(get_current_user)], 20 | ) 21 | 22 | 23 | @session_router.get("/{upn}") 24 | async def get_user_session( 25 | upn: str, 26 | storage: FromDishka[SessionStorage], 27 | session: FromDishka[AsyncSession], 28 | ) -> dict[str, SessionContentSchema]: 29 | """Get user (upn, san or dn) data.""" 30 | user = await get_user(session, upn) 31 | if not user: 32 | raise HTTPException(status.HTTP_404_NOT_FOUND, "User not found.") 33 | return await storage.get_user_sessions(user.id) 34 | 35 | 36 | @session_router.delete("/{upn}", status_code=status.HTTP_204_NO_CONTENT) 37 | async def delete_user_sessions( 38 | upn: str, 39 | storage: FromDishka[SessionStorage], 40 | session: FromDishka[AsyncSession], 41 | ) -> None: 42 | """Delete user (upn, san or dn) data.""" 43 | user = await get_user(session, upn) 44 | if not user: 45 | raise HTTPException(status.HTTP_404_NOT_FOUND, "User not found.") 46 | await storage.clear_user_sessions(user.id) 47 | 48 | 49 | @session_router.delete( 50 | "/session/{session_id}", 51 | status_code=status.HTTP_204_NO_CONTENT, 52 | ) 53 | async def delete_session( 54 | session_id: str, 55 | storage: FromDishka[SessionStorage], 56 | ) -> None: 57 | """Delete current logged in user data.""" 58 | await storage.delete_user_session(session_id) 59 | -------------------------------------------------------------------------------- /app/api/auth/utils.py: -------------------------------------------------------------------------------- 1 | """Auth utils. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from ipaddress import IPv4Address, IPv6Address, ip_address 8 | 9 | from fastapi import HTTPException, Request, Response, status 10 | from sqlalchemy.ext.asyncio import AsyncSession 11 | 12 | from config import Settings 13 | from ldap_protocol.session_storage import SessionStorage 14 | from ldap_protocol.utils.queries import set_last_logon_user 15 | from models import User 16 | 17 | 18 | def get_ip_from_request(request: Request) -> IPv4Address | IPv6Address: 19 | """Get IP address from request. 20 | 21 | :param Request request: The incoming request object. 22 | :return IPv4Address | None: The IP address or None. 23 | """ 24 | forwarded_for = request.headers.get("X-Forwarded-For") 25 | if forwarded_for: 26 | client_ip = forwarded_for.split(",")[0] 27 | else: 28 | if request.client is None: 29 | raise HTTPException(status.HTTP_403_FORBIDDEN) 30 | client_ip = request.client.host 31 | 32 | return ip_address(client_ip) 33 | 34 | 35 | def get_user_agent_from_request(request: Request) -> str: 36 | """Get user agent from request. 37 | 38 | :param Request request: The incoming request object. 39 | :return str: The user agent header. 40 | """ 41 | user_agent_header = request.headers.get("User-Agent") 42 | return user_agent_header if user_agent_header else "" 43 | 44 | 45 | async def create_and_set_session_key( 46 | user: User, 47 | session: AsyncSession, 48 | settings: Settings, 49 | response: Response, 50 | storage: SessionStorage, 51 | ip: IPv4Address | IPv6Address, 52 | user_agent: str, 53 | ) -> None: 54 | """Create and set access and refresh tokens. 55 | 56 | Update the user's last logon time and set the appropriate cookies 57 | in the response. 58 | 59 | :param User user: db user 60 | :param AsyncSession session: db session 61 | :param Settings settings: app settings 62 | :param Response response: fastapi response object 63 | """ 64 | await set_last_logon_user(user, session, settings.TIMEZONE) 65 | 66 | key = await storage.create_session( 67 | user.id, 68 | settings, 69 | extra_data={ 70 | "ip": str(ip), 71 | "user_agent": storage.get_user_agent_hash(user_agent), 72 | }, 73 | ) 74 | 75 | response.set_cookie( 76 | key="id", 77 | value=key, 78 | httponly=True, 79 | expires=storage.key_ttl, 80 | ) 81 | -------------------------------------------------------------------------------- /app/api/exception_handlers.py: -------------------------------------------------------------------------------- 1 | """Exception handlers.""" 2 | 3 | from typing import NoReturn 4 | 5 | from fastapi import HTTPException, Request, status 6 | from loguru import logger 7 | 8 | 9 | def handle_db_connect_error( 10 | request: Request, # noqa: ARG001 11 | exc: Exception, 12 | ) -> NoReturn: 13 | """Handle duplicate.""" 14 | if "QueuePool limit of size" in str(exc): 15 | logger.critical("POOL EXCEEDED {}", exc) 16 | 17 | raise HTTPException( 18 | status.HTTP_429_TOO_MANY_REQUESTS, 19 | detail="Connection Pool Exceeded", 20 | ) 21 | 22 | logger.critical("DB BACKEND ERR {}", exc) 23 | 24 | raise HTTPException(status.HTTP_503_SERVICE_UNAVAILABLE) 25 | 26 | 27 | async def handle_dns_error( 28 | request: Request, # noqa: ARG001 29 | exc: Exception, 30 | ) -> NoReturn: 31 | """Handle EmptyLabel exception.""" 32 | logger.critical("DNS manager error: {}", exc) 33 | raise HTTPException(status.HTTP_503_SERVICE_UNAVAILABLE) 34 | 35 | 36 | async def handle_instance_not_found_error( 37 | request: Request, # noqa: ARG001 38 | exc: Exception, # noqa: ARG001 39 | ) -> NoReturn: 40 | """Handle Instance Not Found error.""" 41 | raise HTTPException( 42 | status_code=status.HTTP_404_NOT_FOUND, 43 | detail="Instance not found.", 44 | ) 45 | 46 | 47 | async def handle_instance_cant_modify_error( 48 | request: Request, # noqa: ARG001 49 | exc: Exception, # noqa: ARG001 50 | ) -> NoReturn: 51 | """Handle Instance Cant Modify error.""" 52 | raise HTTPException( 53 | status_code=status.HTTP_400_BAD_REQUEST, 54 | detail="System Instance cannot be modified.", 55 | ) 56 | -------------------------------------------------------------------------------- /app/api/ldap_schema/__init__.py: -------------------------------------------------------------------------------- 1 | """LDAP Schema routers. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from typing import Annotated 8 | 9 | from annotated_types import Len 10 | from dishka.integrations.fastapi import DishkaRoute 11 | from fastapi import APIRouter, Body, Depends 12 | 13 | from api.auth import get_current_user 14 | 15 | LimitedListType = Annotated[ 16 | list[str], 17 | Len(min_length=1, max_length=10000), 18 | Body(embed=True), 19 | ] 20 | 21 | ldap_schema_router = APIRouter( 22 | prefix="/schema", 23 | tags=["Schema"], 24 | dependencies=[Depends(get_current_user)], 25 | route_class=DishkaRoute, 26 | ) 27 | -------------------------------------------------------------------------------- /app/api/main/__init__.py: -------------------------------------------------------------------------------- 1 | """Main web routers.""" 2 | -------------------------------------------------------------------------------- /app/api/main/ap_router.py: -------------------------------------------------------------------------------- 1 | """Access policy management router. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from dishka.integrations.fastapi import FromDishka, inject 8 | from fastapi import APIRouter, Depends 9 | from sqlalchemy.ext.asyncio import AsyncSession 10 | 11 | from api.auth import get_current_user 12 | from ldap_protocol.policies.access_policy import get_policies 13 | 14 | from .schema import MaterialAccessPolicySchema 15 | 16 | access_policy_router = APIRouter( 17 | prefix="/access_policy", 18 | tags=["Access Policy"], 19 | ) 20 | 21 | 22 | @access_policy_router.get("", dependencies=[Depends(get_current_user)]) 23 | @inject 24 | async def get_access_policies( 25 | session: FromDishka[AsyncSession], 26 | ) -> list[MaterialAccessPolicySchema]: 27 | """Get APs. 28 | 29 | \f 30 | :param AccessPolicySchema policy: ap 31 | :param FromDishka[AsyncSession] session: db. 32 | """ 33 | return [ 34 | MaterialAccessPolicySchema( 35 | id=policy.id, 36 | name=policy.name, 37 | can_read=policy.can_read, 38 | can_add=policy.can_add, 39 | can_modify=policy.can_modify, 40 | directories=(d.path_dn for d in policy.directories), 41 | groups=(g.directory.path_dn for g in policy.groups), 42 | ) 43 | for policy in await get_policies(session) 44 | ] 45 | -------------------------------------------------------------------------------- /app/api/main/dns_router.py: -------------------------------------------------------------------------------- 1 | """DNS service router. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from dishka import FromDishka 8 | from dishka.integrations.fastapi import DishkaRoute 9 | from fastapi import Depends, HTTPException 10 | from fastapi.routing import APIRouter 11 | from sqlalchemy.ext.asyncio import AsyncSession 12 | from starlette import status 13 | 14 | from api.auth import get_current_user 15 | from api.main.schema import ( 16 | DNSServiceRecordCreateRequest, 17 | DNSServiceRecordDeleteRequest, 18 | DNSServiceRecordUpdateRequest, 19 | DNSServiceSetupRequest, 20 | ) 21 | from config import Settings 22 | from ldap_protocol.dns import ( 23 | AbstractDNSManager, 24 | DNSManagerSettings, 25 | DNSManagerState, 26 | DNSRecords, 27 | get_dns_state, 28 | set_dns_manager_state, 29 | ) 30 | 31 | dns_router = APIRouter( 32 | prefix="/dns", 33 | tags=["DNS_SERVICE"], 34 | dependencies=[Depends(get_current_user)], 35 | route_class=DishkaRoute, 36 | ) 37 | 38 | 39 | @dns_router.post("/record") 40 | async def create_record( 41 | data: DNSServiceRecordCreateRequest, 42 | dns_manager: FromDishka[AbstractDNSManager], 43 | ) -> None: 44 | """Create DNS record with given params.""" 45 | await dns_manager.create_record( 46 | data.record_name, 47 | data.record_value, 48 | data.record_type, 49 | data.ttl, 50 | ) 51 | 52 | 53 | @dns_router.delete("/record") 54 | async def delete_single_record( 55 | data: DNSServiceRecordDeleteRequest, 56 | dns_manager: FromDishka[AbstractDNSManager], 57 | ) -> None: 58 | """Delete DNS record with given params.""" 59 | await dns_manager.delete_record( 60 | data.record_name, 61 | data.record_value, 62 | data.record_type, 63 | ) 64 | 65 | 66 | @dns_router.patch("/record") 67 | async def update_record( 68 | data: DNSServiceRecordUpdateRequest, 69 | dns_manager: FromDishka[AbstractDNSManager], 70 | ) -> None: 71 | """Update DNS record with given params.""" 72 | await dns_manager.update_record( 73 | data.record_name, 74 | data.record_value, 75 | data.record_type, 76 | data.ttl, 77 | ) 78 | 79 | 80 | @dns_router.get("/record") 81 | async def get_all_records( 82 | dns_manager: FromDishka[AbstractDNSManager], 83 | ) -> list[DNSRecords]: 84 | """Get all DNS records of current zone.""" 85 | return await dns_manager.get_all_records() 86 | 87 | 88 | @dns_router.get("/status") 89 | async def get_dns_status( 90 | session: FromDishka[AsyncSession], 91 | dns_settings: FromDishka[DNSManagerSettings], 92 | ) -> dict[str, str | None]: 93 | """Get DNS service status.""" 94 | state = await get_dns_state(session) 95 | return { 96 | "dns_status": state, 97 | "zone_name": dns_settings.zone_name, 98 | "dns_server_ip": dns_settings.dns_server_ip, 99 | } 100 | 101 | 102 | @dns_router.post("/setup") 103 | async def setup_dns( 104 | data: DNSServiceSetupRequest, 105 | dns_manager: FromDishka[AbstractDNSManager], 106 | session: FromDishka[AsyncSession], 107 | settings: FromDishka[Settings], 108 | ) -> None: 109 | """Set up DNS service. 110 | 111 | Create zone file, get TSIG key, reload DNS server if selfhosted. 112 | """ 113 | zone_file = None 114 | conf_part = None 115 | dns_ip_address = data.dns_ip_address 116 | tsig_key = data.tsig_key 117 | 118 | if data.dns_status == DNSManagerState.SELFHOSTED: 119 | zone_file_template = settings.TEMPLATES.get_template("zone.template") 120 | zone_file = await zone_file_template.render_async(domain=data.domain) 121 | 122 | tmpl = settings.TEMPLATES.get_template( 123 | "named_conf_local_zone_part.template", 124 | ) 125 | conf_part = await tmpl.render_async(domain=data.domain) 126 | 127 | try: 128 | await dns_manager.setup( 129 | session=session, 130 | settings=settings, 131 | domain=data.domain, 132 | dns_ip_address=dns_ip_address, 133 | zone_file=zone_file, 134 | tsig_key=tsig_key, 135 | named_conf_local_part=conf_part, 136 | ) 137 | except Exception as e: 138 | raise HTTPException(status.HTTP_424_FAILED_DEPENDENCY, e) 139 | 140 | await set_dns_manager_state(session, data.dns_status) 141 | await session.commit() 142 | -------------------------------------------------------------------------------- /app/api/main/router.py: -------------------------------------------------------------------------------- 1 | """Main API module, mirrors ldap schema. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from dishka.integrations.fastapi import DishkaRoute 8 | from fastapi import Depends, Request 9 | from fastapi.routing import APIRouter 10 | 11 | from ldap_protocol.ldap_requests import ( 12 | AddRequest, 13 | DeleteRequest, 14 | ModifyDNRequest, 15 | ModifyRequest, 16 | ) 17 | from ldap_protocol.ldap_responses import LDAPResult 18 | 19 | from .schema import SearchRequest, SearchResponse, SearchResultDone 20 | from .utils import get_ldap_session 21 | 22 | entry_router = APIRouter( 23 | prefix="/entry", 24 | tags=["LDAP API"], 25 | route_class=DishkaRoute, 26 | dependencies=[Depends(get_ldap_session)], 27 | ) 28 | 29 | 30 | @entry_router.post("/search") 31 | async def search( 32 | request: SearchRequest, 33 | req: Request, 34 | ) -> SearchResponse: 35 | """LDAP SEARCH entry request.""" 36 | responses = await request.handle_api(req.state.dishka_container) 37 | metadata: SearchResultDone = responses.pop(-1) # type: ignore 38 | 39 | return SearchResponse( 40 | result_code=metadata.result_code, 41 | matchedDN=metadata.matched_dn, 42 | errorMessage=metadata.error_message, 43 | search_result=responses, 44 | total_objects=metadata.total_objects, 45 | total_pages=metadata.total_pages, 46 | ) 47 | 48 | 49 | @entry_router.post("/add") 50 | async def add( 51 | request: AddRequest, 52 | req: Request, 53 | ) -> LDAPResult: 54 | """LDAP ADD entry request.""" 55 | return await request.handle_api(req.state.dishka_container) 56 | 57 | 58 | @entry_router.patch("/update") 59 | async def modify( 60 | request: ModifyRequest, 61 | req: Request, 62 | ) -> LDAPResult: 63 | """LDAP MODIFY entry request.""" 64 | return await request.handle_api(req.state.dishka_container) 65 | 66 | 67 | @entry_router.patch("/update_many") 68 | async def modify_many( 69 | requests: list[ModifyRequest], 70 | req: Request, 71 | ) -> list[LDAPResult]: 72 | """Bulk LDAP MODIFY entry request.""" 73 | results = [] 74 | for request in requests: 75 | results.append(await request.handle_api(req.state.dishka_container)) 76 | return results 77 | 78 | 79 | @entry_router.put("/update/dn") 80 | async def modify_dn( 81 | request: ModifyDNRequest, 82 | req: Request, 83 | ) -> LDAPResult: 84 | """LDAP MODIFY entry DN request.""" 85 | return await request.handle_api(req.state.dishka_container) 86 | 87 | 88 | @entry_router.delete("/delete") 89 | async def delete( 90 | request: DeleteRequest, 91 | req: Request, 92 | ) -> LDAPResult: 93 | """LDAP DELETE entry request.""" 94 | return await request.handle_api(req.state.dishka_container) 95 | -------------------------------------------------------------------------------- /app/api/main/schema.py: -------------------------------------------------------------------------------- 1 | """Schemas for main router. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from typing import final 8 | 9 | from dishka import AsyncContainer 10 | from pydantic import BaseModel, Field, SecretStr 11 | from sqlalchemy.sql.elements import ColumnElement, UnaryExpression 12 | 13 | from ldap_protocol.dns import DNSManagerState 14 | from ldap_protocol.filter_interpreter import Filter, cast_str_filter2sql 15 | from ldap_protocol.ldap_requests import SearchRequest as LDAPSearchRequest 16 | from ldap_protocol.ldap_responses import SearchResultDone, SearchResultEntry 17 | 18 | 19 | class SearchRequest(LDAPSearchRequest): 20 | """Search request for web api.""" 21 | 22 | filter: str = Field(..., examples=["(objectClass=*)"]) # type: ignore 23 | 24 | def cast_filter(self) -> UnaryExpression | ColumnElement: 25 | """Cast str filter to sa sql.""" 26 | filter_ = self.filter.lower().replace("objectcategory", "objectclass") 27 | return cast_str_filter2sql(Filter.parse(filter_).simplify()) 28 | 29 | @final 30 | async def handle_api( # type: ignore 31 | self, 32 | container: AsyncContainer, 33 | ) -> list[SearchResultEntry | SearchResultDone]: 34 | """Get all responses.""" 35 | return await self._handle_api(container) # type: ignore 36 | 37 | 38 | class SearchResponse(SearchResultDone): 39 | """Search response for web api.""" 40 | 41 | search_result: list[SearchResultEntry] 42 | 43 | 44 | class KerberosSetupRequest(BaseModel): 45 | """Kerberos setup data.""" 46 | 47 | krbadmin_password: SecretStr 48 | admin_password: SecretStr 49 | stash_password: SecretStr 50 | 51 | 52 | class _PolicyFields: 53 | name: str 54 | can_read: bool 55 | can_add: bool 56 | can_modify: bool 57 | directories: list[str] 58 | groups: list[str] 59 | 60 | 61 | class _MaterialFields: 62 | id: int 63 | 64 | 65 | class AccessPolicySchema(_PolicyFields, BaseModel): 66 | """AP Schema w/o id.""" 67 | 68 | 69 | class MaterialAccessPolicySchema(_PolicyFields, _MaterialFields, BaseModel): 70 | """AP Schema with id.""" 71 | 72 | 73 | class DNSServiceSetupRequest(BaseModel): 74 | """DNS setup request schema.""" 75 | 76 | dns_status: DNSManagerState 77 | domain: str 78 | dns_ip_address: str | None = Field(None) 79 | tsig_key: str | None = Field(None) 80 | 81 | 82 | class DNSServiceRecordBaseRequest(BaseModel): 83 | """DNS setup base schema.""" 84 | 85 | record_name: str 86 | record_type: str 87 | 88 | 89 | class DNSServiceRecordCreateRequest(DNSServiceRecordBaseRequest): 90 | """DNS create request schema.""" 91 | 92 | record_value: str 93 | ttl: int | None = Field(None) 94 | 95 | 96 | class DNSServiceRecordDeleteRequest(DNSServiceRecordBaseRequest): 97 | """DNS delete request schema.""" 98 | 99 | record_value: str 100 | 101 | 102 | class DNSServiceRecordUpdateRequest(DNSServiceRecordBaseRequest): 103 | """DNS update request schema.""" 104 | 105 | record_value: str | None = Field(None) 106 | ttl: int | None = Field(None) 107 | -------------------------------------------------------------------------------- /app/api/main/utils.py: -------------------------------------------------------------------------------- 1 | """Common utils for ldap api.""" 2 | 3 | from typing import Annotated 4 | 5 | from dishka import FromDishka 6 | from dishka.integrations.fastapi import inject 7 | from fastapi import Depends 8 | 9 | from api.auth import get_current_user 10 | from ldap_protocol.dialogue import LDAPSession, UserSchema 11 | 12 | 13 | @inject 14 | async def get_ldap_session( 15 | ldap_session: FromDishka[LDAPSession], 16 | user: Annotated[UserSchema, Depends(get_current_user)], 17 | ) -> LDAPSession: 18 | """Create LDAP session.""" 19 | await ldap_session.set_user(user) 20 | return ldap_session 21 | -------------------------------------------------------------------------------- /app/api/network/__init__.py: -------------------------------------------------------------------------------- 1 | """Network routers.""" 2 | -------------------------------------------------------------------------------- /app/api/network/utils.py: -------------------------------------------------------------------------------- 1 | """Network utils. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from fastapi import HTTPException, status 8 | from sqlalchemy import func, select 9 | from sqlalchemy.ext.asyncio import AsyncSession 10 | 11 | from models import NetworkPolicy 12 | 13 | 14 | async def check_policy_count(session: AsyncSession) -> None: 15 | """Check if policy count euqals 1. 16 | 17 | :param AsyncSession session: db 18 | :raises HTTPException: 422 19 | """ 20 | count = await session.scalars( 21 | ( 22 | select(func.count()) 23 | .select_from(NetworkPolicy) 24 | .filter_by(enabled=True) 25 | ), 26 | ) 27 | 28 | if count.one() == 1: 29 | raise HTTPException( 30 | status.HTTP_422_UNPROCESSABLE_ENTITY, 31 | "At least one policy should be active", 32 | ) 33 | -------------------------------------------------------------------------------- /app/api/shadow/__init__.py: -------------------------------------------------------------------------------- 1 | """Shadow router.""" 2 | -------------------------------------------------------------------------------- /app/api/shadow/router.py: -------------------------------------------------------------------------------- 1 | """Shadow api. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from ipaddress import IPv4Address 8 | from typing import Annotated 9 | 10 | from dishka import FromDishka 11 | from dishka.integrations.fastapi import DishkaRoute 12 | from fastapi import APIRouter, Body, HTTPException, status 13 | from loguru import logger 14 | from sqlalchemy.ext.asyncio import AsyncSession 15 | 16 | from ldap_protocol.multifactor import LDAPMultiFactorAPI, MultifactorAPI 17 | from ldap_protocol.policies.network_policy import get_user_network_policy 18 | from ldap_protocol.policies.password_policy import ( 19 | PasswordPolicySchema, 20 | post_save_password_actions, 21 | ) 22 | from ldap_protocol.utils.queries import get_user 23 | from models import MFAFlags 24 | from security import get_password_hash 25 | 26 | shadow_router = APIRouter(route_class=DishkaRoute) 27 | 28 | 29 | @shadow_router.post("/mfa/push") 30 | async def proxy_request( 31 | principal: Annotated[str, Body(embed=True)], 32 | ip: Annotated[IPv4Address, Body(embed=True)], 33 | mfa: FromDishka[LDAPMultiFactorAPI], 34 | session: FromDishka[AsyncSession], 35 | ) -> None: 36 | """Proxy request to mfa.""" 37 | user = await get_user(session, principal) 38 | 39 | if not user: 40 | raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY) 41 | 42 | network_policy = await get_user_network_policy( 43 | ip, 44 | user, 45 | session, 46 | ) 47 | 48 | if network_policy is None or not network_policy.is_kerberos: 49 | raise HTTPException(status.HTTP_403_FORBIDDEN) 50 | 51 | if not mfa or network_policy.mfa_status == MFAFlags.DISABLED: 52 | return 53 | elif network_policy.mfa_status in (MFAFlags.ENABLED, MFAFlags.WHITELIST): 54 | if ( 55 | network_policy.mfa_status == MFAFlags.WHITELIST 56 | and not network_policy.mfa_groups 57 | ): 58 | return 59 | 60 | try: 61 | if await mfa.ldap_validate_mfa(user.user_principal_name, None): 62 | return 63 | 64 | except MultifactorAPI.MFAConnectError: 65 | logger.error("MFA connect error") 66 | if network_policy.bypass_no_connection: 67 | return 68 | except MultifactorAPI.MFAMissconfiguredError: 69 | logger.error("MFA missconfigured error") 70 | return 71 | except MultifactorAPI.MultifactorError: 72 | logger.error("MFA service failure") 73 | if network_policy.bypass_service_failure: 74 | return 75 | 76 | raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) 77 | 78 | 79 | @shadow_router.post("/sync/password") 80 | async def sync_password( 81 | principal: Annotated[str, Body(embed=True)], 82 | new_password: Annotated[str, Body(embed=True)], 83 | session: FromDishka[AsyncSession], 84 | ) -> None: 85 | """Reset user's (entry) password. 86 | 87 | - **principal**: user upn 88 | - **new_password**: password to set 89 | \f 90 | :param FromDishka[AsyncSession] session: db 91 | :param FromDishka[AbstractKadmin] kadmin: kadmin api 92 | :param Annotated[str, Body principal: reset target user 93 | :param Annotated[str, Body new_password: new password for user 94 | :raises HTTPException: 404 if user not found 95 | :raises HTTPException: 422 if password not valid 96 | :return None: None 97 | """ 98 | user = await get_user(session, principal) 99 | 100 | if not user: 101 | raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) 102 | 103 | policy = await PasswordPolicySchema.get_policy_settings(session) 104 | errors = await policy.validate_password_with_policy(new_password, user) 105 | 106 | if errors: 107 | raise HTTPException( 108 | status.HTTP_422_UNPROCESSABLE_ENTITY, 109 | detail=errors, 110 | ) 111 | 112 | user.password = get_password_hash(new_password) 113 | await post_save_password_actions(user, session) 114 | -------------------------------------------------------------------------------- /app/extra/__init__.py: -------------------------------------------------------------------------------- 1 | """Extra data actions. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from .dev_data import DATA, TEST_DATA 8 | from .setup_dev import setup_enviroment 9 | 10 | __all__ = ["DATA", "TEST_DATA", "setup_enviroment"] 11 | -------------------------------------------------------------------------------- /app/extra/alembic_utils.py: -------------------------------------------------------------------------------- 1 | """Alembic utils.""" 2 | 3 | from typing import Callable 4 | 5 | import sqlalchemy as sa 6 | from alembic import op 7 | 8 | 9 | def temporary_stub_entity_type_id(func: Callable) -> Callable: 10 | """Add and drop the 'entity_type_id' column in the 'Directory' table. 11 | 12 | State of the database at the time of migration 13 | doesn`t contains 'entity_type_id' column into 'Directory' table, 14 | but 'Directory' model has the column. 15 | 16 | Before starting the migration, add 'entity_type_id' column. 17 | Then migration complited, delete 'entity_type_id' column. 18 | 19 | Don`t like excluding columns with Deferred(), 20 | because you will need to refactor SQL queries 21 | that precede the 'ba78cef9700a_initial_entity_type.py' migration 22 | and include working with the Directory. 23 | 24 | :param Callable func: any function 25 | :return Callable: any function 26 | """ 27 | 28 | def wrapper(*args, **kwargs): 29 | op.add_column( 30 | "Directory", 31 | sa.Column("entity_type_id", sa.Integer(), nullable=True), 32 | ) 33 | func(*args, **kwargs) 34 | op.drop_column("Directory", "entity_type_id") 35 | return None 36 | 37 | return wrapper 38 | -------------------------------------------------------------------------------- /app/extra/dump_acme_certs.py: -------------------------------------------------------------------------------- 1 | """Dumps Let's Encrypt certificate from the `acme.json` file. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | import base64 8 | import json 9 | import os 10 | import time 11 | 12 | from loguru import logger 13 | 14 | 15 | def dump_acme_cert(resolver: str = "md-resolver") -> None: 16 | """Dump Let's Encrypt certificate from the `acme.json` file. 17 | 18 | acme file can be generated long enough to exit the script, 19 | try read until file contents is generated. 20 | """ 21 | if os.path.exists("/certs/cert.pem") and os.path.exists( 22 | "/certs/privkey.pem" 23 | ): 24 | logger.info("Certeficate and key already exists, exiting...") 25 | return 26 | 27 | if not os.path.exists("/certs/acme.json"): 28 | logger.warning("Cannot load ACME file, exiting...") 29 | return 30 | 31 | try: 32 | with open("/certs/acme.json") as certfile: 33 | data = json.load(certfile) 34 | 35 | domain = data[resolver]["Certificates"][0]["domain"]["main"] 36 | cert: str = data[resolver]["Certificates"][0]["certificate"] 37 | key: str = data[resolver]["Certificates"][0]["key"] 38 | except (KeyError, IndexError, TypeError, json.JSONDecodeError) as err: 39 | logger.error("Error loading TLS certeficate, exiting...") 40 | time.sleep(5) 41 | raise SystemExit(1) from err 42 | 43 | logger.info(f"Loaded certeficate for {domain}") 44 | 45 | with ( 46 | open("/certs/cert.pem", "w") as cert_f, 47 | open("/certs/privkey.pem", "w") as key_f, 48 | ): 49 | cert_f.write(base64.b64decode(cert.encode("ascii")).decode()) 50 | key_f.write(base64.b64decode(key.encode("ascii")).decode()) 51 | 52 | # The config time needs to be updated in order 53 | # for the certificates to be updated. 54 | os.utime("/traefik.yml", (time.time(), time.time())) 55 | 56 | logger.info("Certeficate and key dumped") 57 | return 58 | -------------------------------------------------------------------------------- /app/extra/generate_cert.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | CERT_PATH="/certs/krbcert.pem" 4 | EXPIRATION_THRESHOLD=30 5 | LIFESPAN_IN_DAYS=1095 # 3 years 6 | 7 | generate_certificate() { 8 | openssl req -nodes -new -x509 \ 9 | -days $LIFESPAN_IN_DAYS \ 10 | -keyout /certs/krbkey.pem \ 11 | -out /certs/krbcert.pem \ 12 | -addext "subjectAltName=DNS:kadmin_api" \ 13 | -subj '/C=RU/ST=Moscow/L=Moscow/O=Global Security/OU=Multifactor/CN=kadmin_api' > /dev/null 2>&1 14 | } 15 | 16 | if [[ ! -f "$CERT_PATH" ]]; then 17 | echo "Certificate not found. Generating a new certificate..." 18 | generate_certificate 19 | exit 0 20 | fi 21 | 22 | end_date=$(openssl x509 -enddate -noout -in "$CERT_PATH" | cut -d= -f2) 23 | end_date_epoch=$(date -d "$end_date" +%s) 24 | current_date_epoch=$(date +%s) 25 | days_left=$(( (end_date_epoch - current_date_epoch) / 86400 )) 26 | 27 | if [[ $days_left -le $EXPIRATION_THRESHOLD ]]; then 28 | echo "Certificate expires in $days_left days or has already expired. Generating a new certificate..." 29 | generate_certificate 30 | exit 0 31 | fi 32 | 33 | echo "Certificate is valid for another $days_left days." 34 | -------------------------------------------------------------------------------- /app/extra/scripts/__init__.py: -------------------------------------------------------------------------------- 1 | """Periodic or runtime scripts.""" 2 | -------------------------------------------------------------------------------- /app/extra/scripts/check_ldap_principal.py: -------------------------------------------------------------------------------- 1 | """Check ldap principal and keytab existence. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | import os 8 | 9 | from loguru import logger 10 | from sqlalchemy.ext.asyncio import AsyncSession 11 | 12 | from config import Settings 13 | from ldap_protocol.kerberos import ( 14 | AbstractKadmin, 15 | KerberosState, 16 | get_krb_server_state, 17 | ) 18 | from ldap_protocol.utils.queries import get_base_directories 19 | 20 | 21 | async def check_ldap_principal( 22 | kadmin: AbstractKadmin, 23 | session: AsyncSession, 24 | settings: Settings, 25 | ) -> None: 26 | """Check ldap principal and keytab existence. 27 | 28 | :param AbstractKadmin kadmin: kadmin 29 | :param AsyncSession session: db 30 | :param Settings settings: settings 31 | """ 32 | logger.info("Checking ldap principal and keytab existence.") 33 | 34 | if os.path.exists(settings.KRB5_LDAP_KEYTAB): 35 | return 36 | 37 | domains = await get_base_directories(session) 38 | if not domains: 39 | logger.info("Cannot get base directory") 40 | return 41 | 42 | domain = domains[0].name 43 | ldap_principal_name = f"ldap/{domain}" 44 | 45 | kerberos_state = await get_krb_server_state(session) 46 | 47 | if kerberos_state != KerberosState.READY: 48 | logger.info("Kerberos server is not ready") 49 | return 50 | 51 | status = await kadmin.get_status(wait_for_positive=True) 52 | 53 | if status: 54 | await kadmin.ldap_principal_setup( 55 | ldap_principal_name, 56 | settings.KRB5_LDAP_KEYTAB, 57 | ) 58 | -------------------------------------------------------------------------------- /app/extra/scripts/principal_block_user_sync.py: -------------------------------------------------------------------------------- 1 | """Principal and user account block sync. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from datetime import datetime 8 | 9 | from sqlalchemy import Integer, String, cast, update 10 | from sqlalchemy.ext.asyncio import AsyncSession 11 | from sqlalchemy.sql import select 12 | 13 | from config import Settings 14 | from ldap_protocol.user_account_control import ( 15 | UserAccountControlFlag, 16 | get_check_uac, 17 | ) 18 | from ldap_protocol.utils.queries import ( 19 | add_lock_and_expire_attributes, 20 | get_principal_directory, 21 | ) 22 | from models import Attribute, Directory, User 23 | 24 | 25 | async def principal_block_sync( 26 | session: AsyncSession, 27 | settings: Settings, 28 | ) -> None: 29 | """Synchronize principal and user account blocking.""" 30 | for user in await session.scalars(select(User)): 31 | uac_check = await get_check_uac(session, user.directory_id) 32 | if uac_check(UserAccountControlFlag.ACCOUNTDISABLE): 33 | continue 34 | 35 | if "@" in user.user_principal_name: 36 | principal_postfix = user.user_principal_name.split("@")[1].upper() 37 | principal_name = f"{user.get_upn_prefix()}@{principal_postfix}" 38 | else: 39 | continue 40 | 41 | principal_directory = await get_principal_directory( 42 | session=session, 43 | principal_name=principal_name, 44 | ) 45 | if not principal_directory: 46 | continue 47 | 48 | krb_exp_attr = _find_krb_exp_attr(principal_directory) 49 | if (not krb_exp_attr) or (not krb_exp_attr.value): 50 | continue 51 | 52 | expiration_time = datetime.strptime( 53 | krb_exp_attr.value, 54 | "%Y%m%d%H%M%SZ", 55 | ).replace( 56 | tzinfo=settings.TIMEZONE, 57 | ) 58 | 59 | now = datetime.now(tz=settings.TIMEZONE) 60 | if expiration_time > now: 61 | continue 62 | 63 | new_value = cast( 64 | cast(Attribute.value, Integer).op("|")( 65 | UserAccountControlFlag.ACCOUNTDISABLE, 66 | ), 67 | String, 68 | ) 69 | 70 | conditions = [ 71 | Attribute.directory_id == user.directory_id, 72 | Attribute.name == "userAccountControl", 73 | ] 74 | 75 | await session.execute( 76 | update(Attribute) 77 | .values(value=new_value) 78 | .where(*conditions) 79 | .execution_options(synchronize_session=False), 80 | ) 81 | 82 | await add_lock_and_expire_attributes( 83 | session=session, 84 | directory=user.directory, 85 | tz=settings.TIMEZONE, 86 | ) 87 | 88 | await session.commit() 89 | 90 | 91 | def _find_krb_exp_attr(directory: Directory) -> Attribute | None: 92 | """Find krbprincipalexpiration attribute in directory. 93 | 94 | :param Directory directory: the directory object 95 | :return Atrribute | None: the attribute with 96 | the name 'krbprincipalexpiration', or None if not found. 97 | """ 98 | for attr in directory.attributes: 99 | if attr.name == "krbprincipalexpiration": 100 | return attr 101 | return None 102 | -------------------------------------------------------------------------------- /app/extra/scripts/uac_sync.py: -------------------------------------------------------------------------------- 1 | """userAccountControl sync. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from sqlalchemy import Integer, String, cast, update 8 | from sqlalchemy.ext.asyncio import AsyncSession 9 | from sqlalchemy.sql import func, select 10 | 11 | from config import Settings 12 | from ldap_protocol.kerberos import AbstractKadmin 13 | from ldap_protocol.user_account_control import UserAccountControlFlag 14 | from ldap_protocol.utils.queries import add_lock_and_expire_attributes 15 | from models import Attribute, User 16 | 17 | 18 | async def disable_accounts( 19 | session: AsyncSession, 20 | kadmin: AbstractKadmin, 21 | settings: Settings, 22 | ) -> None: 23 | """Update userAccountControl attr. 24 | 25 | :param AsyncSession session: db 26 | 27 | Original query: 28 | update "Attributes" a 29 | set value = (CAST(a.value AS INTEGER) | 2)::text 30 | from "Users" u 31 | where (CAST(a.value AS INTEGER) & 2) = 0 and 32 | u."accountExpires" < NOW() and 33 | a."directoryId" = u."directoryId" and 34 | a."name" = 'userAccountControl' 35 | """ 36 | subquery = ( 37 | select(User.directory_id) 38 | .where( 39 | User.account_exp < func.now(), 40 | User.directory_id == Attribute.directory_id, 41 | ) 42 | .scalar_subquery() 43 | ) 44 | new_value = cast( 45 | cast(Attribute.value, Integer).op("|")( 46 | UserAccountControlFlag.ACCOUNTDISABLE, 47 | ), 48 | String, 49 | ) 50 | conditions = [ 51 | ( 52 | cast(Attribute.value, Integer).op("&")( 53 | UserAccountControlFlag.ACCOUNTDISABLE, 54 | ) 55 | == 0 56 | ), 57 | Attribute.directory_id.in_(subquery), 58 | Attribute.name == "userAccountControl", 59 | ] 60 | 61 | ids = await session.scalars( 62 | update(Attribute) 63 | .values(value=new_value) 64 | .where(*conditions) 65 | .returning(Attribute.directory_id) 66 | .execution_options(synchronize_session=False), 67 | ) 68 | 69 | users = await session.stream_scalars( 70 | select(User) 71 | .where(User.directory_id.in_(ids)) 72 | ) # fmt: skip 73 | 74 | async for user in users: 75 | await kadmin.lock_principal(user.get_upn_prefix()) 76 | 77 | await add_lock_and_expire_attributes( 78 | session, 79 | user.directory, 80 | settings.TIMEZONE, 81 | ) 82 | 83 | await session.commit() 84 | -------------------------------------------------------------------------------- /app/extra/scripts/update_krb5_config.py: -------------------------------------------------------------------------------- 1 | """Kerberos update config. 2 | 3 | Copyright (c) 2025 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from loguru import logger 8 | from sqlalchemy.ext.asyncio import AsyncSession 9 | 10 | from config import Settings 11 | from ldap_protocol.kerberos import AbstractKadmin 12 | from ldap_protocol.utils.queries import get_base_directories 13 | 14 | 15 | async def update_krb5_config( 16 | kadmin: AbstractKadmin, 17 | session: AsyncSession, 18 | settings: Settings, 19 | ) -> None: 20 | """Update kerberos config.""" 21 | if not (await kadmin.get_status(wait_for_positive=True)): 22 | logger.error("kadmin_api is not running") 23 | return 24 | 25 | base_dn_list = await get_base_directories(session) 26 | base_dn = base_dn_list[0].path_dn 27 | domain: str = base_dn_list[0].name 28 | 29 | krbadmin = "cn=krbadmin,ou=users," + base_dn 30 | services_container = "ou=services," + base_dn 31 | 32 | krb5_template = settings.TEMPLATES.get_template("krb5.conf") 33 | kdc_template = settings.TEMPLATES.get_template("kdc.conf") 34 | 35 | kdc_config = await kdc_template.render_async(domain=domain) 36 | 37 | krb5_config = await krb5_template.render_async( 38 | domain=domain, 39 | krbadmin=krbadmin, 40 | services_container=services_container, 41 | ldap_uri=settings.KRB5_LDAP_URI, 42 | ) 43 | 44 | await kadmin.setup_configs(krb5_config, kdc_config) 45 | -------------------------------------------------------------------------------- /app/extra/templates/kdc.conf: -------------------------------------------------------------------------------- 1 | [kdcdefaults] 2 | kdc_ports = 88 3 | kdc_tcp_ports = 88 4 | 5 | [realms] 6 | {{ domain.upper() }} = { 7 | #master_key_type = aes256-cts 8 | acl_file = /etc/krb5kdc/kadm5.acl 9 | dict_file = /usr/share/dict/words 10 | key_stash_file = /etc/krb5kdc/.k5.{{ domain.upper() }} 11 | admin_keytab = /etc/krb5kdc/kadm5.keytab 12 | supported_enctypes = aes128-cts-hmac-sha1-96:normal aes256-cts-hmac-sha1-96:normal aes128-cts-hmac-sha256-128:normal aes256-cts-hmac-sha384-192:normal 13 | } 14 | -------------------------------------------------------------------------------- /app/extra/templates/krb5.conf: -------------------------------------------------------------------------------- 1 | [logging] 2 | default = SYSLOG:INFO:LOCAL1 3 | kdc = SYSLOG:NOTICE:LOCAL1 4 | admin_server = SYSLOG:WARNING:LOCAL1 5 | 6 | [libdefaults] 7 | default_realm = {{ domain.upper() }} 8 | dns_lookup_realm = false 9 | dns_lookup_kdc = false 10 | realm_try_domains = 1 11 | ticket_lifetime = 24h 12 | renew_lifetime = 7d 13 | forwardable = true 14 | 15 | [realms] 16 | {{ domain.upper() }} = { 17 | kdc = {{ domain }} 18 | admin_server = {{ domain }} 19 | default_domain = {{ domain }} 20 | database_module = openldap_ldapconf 21 | } 22 | 23 | [domain_realm] 24 | .{{ domain }} = {{ domain.upper() }} 25 | {{ domain }} = {{ domain.upper() }} 26 | 27 | [appdefaults] 28 | pam = { 29 | debug = false 30 | ticket_lifetime = 36000 31 | renew_lifetime = 36000 32 | forwardable = true 33 | krb4_convert = false 34 | } 35 | krb5-sync = { 36 | ad_keytab = /etc/krb5.keytab 37 | ad_principal = admin@{{ domain.upper() }} 38 | ad_realm = {{ domain.upper() }} 39 | ad_admin_server = {{ ldap_uri.replace('ldap://', '') }} 40 | ad_ldap_base = {% for d in domain.split('.') %}dc={{ d }},{% endfor %} 41 | ad_instances = root ipass 42 | ad_base_instance = windows 43 | ad_queue_only = false 44 | 45 | queue_dir = /var/spool/krb5-sync 46 | syslog = true 47 | } 48 | 49 | [dbmodules] 50 | openldap_ldapconf = { 51 | db_library = kldap 52 | ldap_kerberos_container_dn = cn=kerberos,{{ services_container }} 53 | ldap_kdc_dn = {{ krbadmin }} 54 | ldap_kadmind_dn = {{ krbadmin }} 55 | ldap_service_password_file = /etc/krb5.d/stash.keyfile 56 | ldap_servers = {{ ldap_uri }} 57 | ldap_conns_per_server = 5 58 | } 59 | 60 | [plugins] 61 | kadm5_hook = { 62 | module = mdk5sync:/plugins/mdk5sync.so 63 | } 64 | 65 | [MD] 66 | config_md = { 67 | push_url = http://shadow_api:8000/mfa/push 68 | sync_url = http://shadow_api:8000/sync/password 69 | } 70 | -------------------------------------------------------------------------------- /app/extra/templates/named_conf_local_zone_part.template: -------------------------------------------------------------------------------- 1 | zone "{{ domain }}" { 2 | type master; 3 | file "/opt/db.zone"; 4 | notify no; 5 | allow-query { any; }; 6 | allow-update { key zone. ; }; 7 | }; 8 | -------------------------------------------------------------------------------- /app/extra/templates/zone.template: -------------------------------------------------------------------------------- 1 | $ORIGIN . 2 | $TTL 604800 ; 1 week 3 | {{ domain }} IN SOA ns1.{{ domain }}. info.{{ domain }}. ( 4 | 20240725 ; serial 5 | 10800 ; refresh (3 hours) 6 | 3600 ; retry (1 hour) 7 | 604800 ; expire (1 week) 8 | 21600 ; negative (6 hours) 9 | ) 10 | NS ns1.{{ domain }}. 11 | ns1.{{ domain }} IN A 127.0.0.1 12 | {{ domain }} IN A 192.0.0.1 13 | {{ domain }} IN A 192.0.2.1 14 | {{ domain }}. IN A 192.0.0.1 15 | _ldap._tcp.{{ domain }}. IN SRV 0 0 389 {{ domain }}. 16 | _ldaps._tcp.{{ domain }}. IN SRV 0 0 636 {{ domain }}. 17 | _kerberos._tcp.{{ domain }}. IN SRV 0 0 88 {{ domain }}. 18 | _kerberos._udp.{{ domain }}. IN SRV 0 0 88 {{ domain }}. 19 | _kdc._tcp.{{ domain }}. IN SRV 0 0 88 {{ domain }}. 20 | _kdc._udp.{{ domain }}. IN SRV 0 0 88 {{ domain }}. 21 | _kpasswd._tcp.{{ domain }}. IN SRV 0 0 464 {{ domain }}. 22 | _kpasswd._udp.{{ domain }}. IN SRV 0 0 464 {{ domain }}. 23 | 1.2.0.192.in-addr.arpa. IN PTR {{ domain }}. 24 | 1.0.0.192.in-addr.arpa. IN PTR {{ domain }}. 25 | -------------------------------------------------------------------------------- /app/ldap_protocol/__init__.py: -------------------------------------------------------------------------------- 1 | """Multidirectory ldap module. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from .dialogue import LDAPSession 8 | from .messages import LDAPRequestMessage 9 | 10 | __all__ = [ 11 | "LDAPRequestMessage", 12 | "LDAPSession", 13 | ] 14 | -------------------------------------------------------------------------------- /app/ldap_protocol/dependency.py: -------------------------------------------------------------------------------- 1 | """DI Resolver MiltiDirecory module. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from functools import partial, wraps 8 | from typing import Callable, TypeVar, get_type_hints 9 | 10 | from dishka import AsyncContainer 11 | 12 | T = TypeVar("T", bound=Callable) 13 | 14 | 15 | async def resolve_deps(func: T, container: AsyncContainer) -> T: 16 | """Provide async dependencies. 17 | 18 | :param T func: Awaitable 19 | :param AsyncContainer container: IoC container 20 | :return T: Awaitable 21 | """ 22 | hints = get_type_hints(func) 23 | del hints["return"] 24 | kwargs = {} 25 | 26 | for arg_name, hint in hints.items(): 27 | kwargs[arg_name] = await container.get(hint) 28 | 29 | return wraps(func)(partial(func, **kwargs)) # type: ignore 30 | -------------------------------------------------------------------------------- /app/ldap_protocol/exceptions.py: -------------------------------------------------------------------------------- 1 | """Exceptions for LDAP Protocol operations. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE. 5 | """ 6 | 7 | 8 | class InstanceNotFoundError(Exception): 9 | """Raised when an instance is not found.""" 10 | 11 | 12 | class InstanceCantModifyError(Exception): 13 | """Raised when an instance cannot be modified.""" 14 | -------------------------------------------------------------------------------- /app/ldap_protocol/kerberos/__init__.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.ext.asyncio import AsyncSession 2 | 3 | from .base import ( 4 | KERBEROS_STATE_NAME, 5 | AbstractKadmin, 6 | KerberosState, 7 | KRBAPIError, 8 | ) 9 | from .client import KerberosMDAPIClient 10 | from .stub import StubKadminMDADPIClient 11 | from .utils import get_krb_server_state, set_state, unlock_principal 12 | 13 | 14 | async def get_kerberos_class(session: AsyncSession) -> type[AbstractKadmin]: 15 | """Get kerberos server state. 16 | 17 | :param AsyncSession session: db 18 | :return type[KerberosMDAPIClient] | type[StubKadminMDADPIClient]: api 19 | """ 20 | if await get_krb_server_state(session) == KerberosState.READY: 21 | return KerberosMDAPIClient 22 | return StubKadminMDADPIClient 23 | 24 | 25 | __all__ = [ 26 | "get_kerberos_class", 27 | "KerberosMDAPIClient", 28 | "StubKadminMDADPIClient", 29 | "AbstractKadmin", 30 | "KerberosState", 31 | "KRBAPIError", 32 | "unlock_principal", 33 | "KERBEROS_STATE_NAME", 34 | "set_state", 35 | ] 36 | -------------------------------------------------------------------------------- /app/ldap_protocol/kerberos/client.py: -------------------------------------------------------------------------------- 1 | """Ready to work kadmin client.""" 2 | 3 | import httpx 4 | 5 | from .base import AbstractKadmin, KRBAPIError 6 | from .utils import logger_wraps 7 | 8 | 9 | class KerberosMDAPIClient(AbstractKadmin): 10 | """KRB server integration.""" 11 | 12 | @logger_wraps(is_stub=True) 13 | async def setup(*_, **__) -> None: # type: ignore 14 | """Stub method, setup is not needed.""" 15 | 16 | @logger_wraps() 17 | async def add_principal( 18 | self, 19 | name: str, 20 | password: str | None, 21 | timeout: int = 1, 22 | ) -> None: 23 | """Add request.""" 24 | response = await self.client.post( 25 | "principal", 26 | json={"name": name, "password": password}, 27 | timeout=timeout, 28 | ) 29 | 30 | if response.status_code != 201: 31 | raise KRBAPIError(response.text) 32 | 33 | @logger_wraps() 34 | async def get_principal(self, name: str) -> dict: 35 | """Get request.""" 36 | response = await self.client.get("principal", params={"name": name}) 37 | if response.status_code != 200: 38 | raise KRBAPIError(response.text) 39 | 40 | return response.json() 41 | 42 | @logger_wraps() 43 | async def del_principal(self, name: str) -> None: 44 | """Delete principal.""" 45 | response = await self.client.delete("principal", params={"name": name}) 46 | if response.status_code != 200: 47 | raise KRBAPIError(response.text) 48 | 49 | @logger_wraps() 50 | async def change_principal_password( 51 | self, 52 | name: str, 53 | password: str, 54 | ) -> None: 55 | """Change password request.""" 56 | response = await self.client.patch( 57 | "principal", 58 | json={"name": name, "password": password}, 59 | ) 60 | if response.status_code != 201: 61 | raise KRBAPIError(response.text) 62 | 63 | @logger_wraps() 64 | async def create_or_update_principal_pw( 65 | self, 66 | name: str, 67 | password: str, 68 | ) -> None: 69 | """Change password request.""" 70 | response = await self.client.post( 71 | "/principal/create_or_update", 72 | json={"name": name, "password": password}, 73 | ) 74 | if response.status_code != 201: 75 | raise KRBAPIError(response.text) 76 | 77 | @logger_wraps() 78 | async def rename_princ(self, name: str, new_name: str) -> None: 79 | """Rename request.""" 80 | response = await self.client.put( 81 | "principal", 82 | json={"name": name, "new_name": new_name}, 83 | ) 84 | if response.status_code != 202: 85 | raise KRBAPIError(response.text) 86 | 87 | async def ktadd(self, names: list[str]) -> httpx.Response: 88 | """Ktadd build request for stream and return response. 89 | 90 | :param list[str] names: principals 91 | :return httpx.Response: stream 92 | """ 93 | request = self.client.build_request( 94 | "POST", 95 | "/principal/ktadd", 96 | json=names, 97 | ) 98 | 99 | response = await self.client.send(request, stream=True) 100 | if response.status_code == 404: 101 | raise KRBAPIError("Principal not found") 102 | 103 | return response 104 | 105 | @logger_wraps() 106 | async def lock_principal(self, name: str) -> None: 107 | """Lock princ. 108 | 109 | :param str name: upn 110 | :raises KRBAPIError: on error 111 | """ 112 | response = await self.client.post( 113 | "principal/lock", 114 | json={"name": name}, 115 | ) 116 | 117 | if response.status_code != 200: 118 | raise KRBAPIError(response.text) 119 | 120 | async def force_princ_pw_change(self, name: str) -> None: 121 | """Force mark password change for principal. 122 | 123 | :param str name: pw 124 | :raises KRBAPIError: err 125 | """ 126 | response = await self.client.post( 127 | "principal/force_reset", 128 | json={"name": name}, 129 | ) 130 | 131 | if response.status_code != 200: 132 | raise KRBAPIError(response.text) 133 | -------------------------------------------------------------------------------- /app/ldap_protocol/kerberos/stub.py: -------------------------------------------------------------------------------- 1 | """Stub calls for kadmin client.""" 2 | 3 | from typing import NoReturn 4 | 5 | from .base import AbstractKadmin, KRBAPIError 6 | from .utils import logger_wraps 7 | 8 | 9 | class StubKadminMDADPIClient(AbstractKadmin): 10 | """Stub client for non set up dirs.""" 11 | 12 | @logger_wraps() 13 | async def setup(self, *args, **kwargs) -> None: # type: ignore 14 | """Call setup.""" 15 | await super().setup(*args, **kwargs) 16 | 17 | @logger_wraps(is_stub=True) 18 | async def add_principal( 19 | self, 20 | name: str, 21 | password: str | None, 22 | timeout: int = 1, 23 | ) -> None: ... 24 | 25 | @logger_wraps(is_stub=True) 26 | async def get_principal(self, name: str) -> None: ... 27 | 28 | @logger_wraps(is_stub=True) 29 | async def del_principal(self, name: str) -> None: ... 30 | 31 | @logger_wraps(is_stub=True) 32 | async def change_principal_password( 33 | self, 34 | name: str, 35 | password: str, 36 | ) -> None: ... 37 | 38 | @logger_wraps(is_stub=True) 39 | async def create_or_update_principal_pw( 40 | self, 41 | name: str, 42 | password: str, 43 | ) -> None: ... 44 | 45 | @logger_wraps(is_stub=True) 46 | async def rename_princ(self, name: str, new_name: str) -> None: ... 47 | 48 | @logger_wraps(is_stub=True) 49 | async def ktadd(self, names: list[str]) -> NoReturn: # noqa: ARG002 50 | raise KRBAPIError 51 | 52 | @logger_wraps(is_stub=True) 53 | async def lock_principal(self, name: str) -> None: ... 54 | 55 | @logger_wraps(is_stub=True) 56 | async def force_princ_pw_change(self, name: str) -> None: ... 57 | -------------------------------------------------------------------------------- /app/ldap_protocol/kerberos/utils.py: -------------------------------------------------------------------------------- 1 | """Utils for kadmin.""" 2 | 3 | from functools import wraps 4 | from typing import Any, Callable 5 | 6 | import httpx 7 | from sqlalchemy import delete, select, update 8 | from sqlalchemy.ext.asyncio import AsyncSession 9 | 10 | from models import Attribute, CatalogueSetting, Directory 11 | 12 | from .base import KERBEROS_STATE_NAME, KerberosState, KRBAPIError, log 13 | 14 | 15 | def logger_wraps(is_stub: bool = False) -> Callable: 16 | """Log kadmin calls. 17 | 18 | :param bool is_stub: flag to change logs, defaults to False 19 | :return Callable: any method 20 | """ 21 | 22 | def wrapper(func: Callable) -> Callable: 23 | name = func.__name__ 24 | bus_type = " stub " if is_stub else " " 25 | 26 | @wraps(func) 27 | async def wrapped(*args: str, **kwargs: str) -> Any: 28 | logger = log.opt(depth=1) 29 | try: 30 | principal = args[1] 31 | except IndexError: 32 | principal = kwargs.get("name", "") 33 | 34 | logger.info(f"Calling{bus_type}'{name}' for {principal}") 35 | try: 36 | result = await func(*args, **kwargs) 37 | except (httpx.ConnectError, httpx.ConnectTimeout): 38 | logger.critical("Can not access kadmin server!") 39 | raise KRBAPIError 40 | 41 | except KRBAPIError as err: 42 | logger.error(f"{name} call raised: {err}") 43 | raise 44 | 45 | else: 46 | if not is_stub: 47 | logger.success(f"Executed {name}") 48 | return result 49 | 50 | return wrapped 51 | 52 | return wrapper 53 | 54 | 55 | async def set_state(session: AsyncSession, state: "KerberosState") -> None: 56 | """Set the server state in the database. 57 | 58 | This function updates the server state in the database by either adding 59 | a new entry, updating an existing entry, or deleting and re-adding the 60 | entry if there are multiple entries found. 61 | """ 62 | results = await session.execute( 63 | select(CatalogueSetting) 64 | .where(CatalogueSetting.name == KERBEROS_STATE_NAME) 65 | ) # fmt: skip 66 | kerberos_state = results.scalar_one_or_none() 67 | 68 | if not kerberos_state: 69 | session.add(CatalogueSetting(name=KERBEROS_STATE_NAME, value=state)) 70 | return 71 | 72 | await session.execute( 73 | update(CatalogueSetting) 74 | .where(CatalogueSetting.name == KERBEROS_STATE_NAME) 75 | .values(value=state), 76 | ) 77 | 78 | 79 | async def get_krb_server_state(session: AsyncSession) -> "KerberosState": 80 | """Get kerberos server state.""" 81 | state = await session.scalar( 82 | select(CatalogueSetting) 83 | .filter(CatalogueSetting.name == KERBEROS_STATE_NAME) 84 | ) # fmt: skip 85 | 86 | if state is None: 87 | return KerberosState.NOT_CONFIGURED 88 | return KerberosState(state.value) 89 | 90 | 91 | async def unlock_principal(name: str, session: AsyncSession) -> None: 92 | """Unlock principal. 93 | 94 | :param str name: upn 95 | :param AsyncSession session: db 96 | """ 97 | subquery = ( 98 | select(Directory.id) 99 | .where(Directory.name.ilike(name)) 100 | .scalar_subquery() 101 | ) 102 | await session.execute( 103 | delete(Attribute) 104 | .where( 105 | Attribute.directory_id == subquery, 106 | Attribute.name == "krbprincipalexpiration", 107 | ) 108 | .execution_options(synchronize_session=False), 109 | ) 110 | -------------------------------------------------------------------------------- /app/ldap_protocol/ldap_codes.py: -------------------------------------------------------------------------------- 1 | """Ldap codes class. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from enum import IntEnum 8 | 9 | 10 | class LDAPCodes(IntEnum): 11 | """LDAP protocol codes mapping. 12 | 13 | SUCCESS = 0 14 | OPERATIONS_ERROR = 1 15 | PROTOCOL_ERROR = 2 16 | TIME_LIMIT_EXCEEDED = 3 17 | SIZE_LIMIT_EXCEEDED = 4 18 | COMPARE_FALSE = 5 19 | COMPARE_TRUE = 6 20 | AUTH_METHOD_NOT_SUPPORTED = 7 21 | STRONGER_AUTH_REQUIRED = 8 22 | # -- 9 reserved -- 23 | REFERRAL = 10 24 | ADMIN_LIMIT_EXCEEDED = 11 25 | UNAVAILABLE_CRITICAL_EXTENSION = 12 26 | CONFIDENTIALITY_REQUIRED = 13 27 | SASL_BIND_IN_PROGRESS = 14 28 | NO_SUCH_ATTRIBUTE = 16 29 | UNDEFINED_ATTRIBUTE_TYPE = 17 30 | INAPPROPRIATE_MATCHING = 18 31 | CONSTRAINT_VIOLATION = 19 32 | ATTRIBUTE_OR_VALUE_EXISTS = 20 33 | INVALID_ATTRIBUTE_SYNTAX = 21 34 | # -- 22-31 unused -- 35 | NO_SUCH_OBJECT = 32 36 | ALIAS_PROBLEM = 33 37 | INVALID_DN_SYNTAX = 34 38 | # -- 35 reserved for undefined isLeaf -- 39 | ALIAS_DEREFERENCING_PROBLEM = 36 40 | # -- 37-47 unused -- 41 | INAPPROPRIATE_AUTHENTICATION = 48 42 | INVALID_CREDENTIALS = 49 43 | INSUFFICIENT_ACCESS_RIGHTS = 50 44 | BUSY = 51 45 | UNAVAILABLE = 52 46 | UNWILLING_TO_PERFORM = 53 47 | LOOP_DETECT = 54 48 | # -- 55-63 unused -- 49 | NAMING_VIOLATION = 64 50 | OBJECT_CLASS_VIOLATION = 65 51 | NOT_ALLOWED_ON_NON_LEAF = 66 52 | NOT_ALLOWED_ON_RDN = 67 53 | ENTRY_ALREADY_EXISTS = 68 54 | OBJECT_CLASS_MODS_PROHIBITED = 69 55 | # -- 70 reserved for CLDAP -- 56 | AFFECTS_MULTIPLE_DS_AS = 71 57 | # -- 72-79 unused -- 58 | OTHER = 80 59 | """ 60 | 61 | SUCCESS = 0 62 | OPERATIONS_ERROR = 1 63 | PROTOCOL_ERROR = 2 64 | TIME_LIMIT_EXCEEDED = 3 65 | SIZE_LIMIT_EXCEEDED = 4 66 | COMPARE_FALSE = 5 67 | COMPARE_TRUE = 6 68 | AUTH_METHOD_NOT_SUPPORTED = 7 69 | STRONGER_AUTH_REQUIRED = 8 70 | # -- 9 reserved -- 71 | REFERRAL = 10 72 | ADMIN_LIMIT_EXCEEDED = 11 73 | UNAVAILABLE_CRITICAL_EXTENSION = 12 74 | CONFIDENTIALITY_REQUIRED = 13 75 | SASL_BIND_IN_PROGRESS = 14 76 | NO_SUCH_ATTRIBUTE = 16 77 | UNDEFINED_ATTRIBUTE_TYPE = 17 78 | INAPPROPRIATE_MATCHING = 18 79 | CONSTRAINT_VIOLATION = 19 80 | ATTRIBUTE_OR_VALUE_EXISTS = 20 81 | INVALID_ATTRIBUTE_SYNTAX = 21 82 | # -- 22-31 unused -- 83 | NO_SUCH_OBJECT = 32 84 | ALIAS_PROBLEM = 33 85 | INVALID_DN_SYNTAX = 34 86 | # -- 35 reserved for undefined isLeaf -- 87 | ALIAS_DEREFERENCING_PROBLEM = 36 88 | # -- 37-47 unused -- 89 | INAPPROPRIATE_AUTHENTICATION = 48 90 | INVALID_CREDENTIALS = 49 91 | INSUFFICIENT_ACCESS_RIGHTS = 50 92 | BUSY = 51 93 | UNAVAILABLE = 52 94 | UNWILLING_TO_PERFORM = 53 95 | LOOP_DETECT = 54 96 | # -- 55-63 unused -- 97 | NAMING_VIOLATION = 64 98 | OBJECT_CLASS_VIOLATION = 65 99 | NOT_ALLOWED_ON_NON_LEAF = 66 100 | NOT_ALLOWED_ON_RDN = 67 101 | ENTRY_ALREADY_EXISTS = 68 102 | OBJECT_CLASS_MODS_PROHIBITED = 69 103 | # -- 70 reserved for CLDAP -- 104 | AFFECTS_MULTIPLE_DS_AS = 71 105 | # -- 72-79 unused -- 106 | OTHER = 80 107 | -------------------------------------------------------------------------------- /app/ldap_protocol/ldap_requests/__init__.py: -------------------------------------------------------------------------------- 1 | """LDAP protocol map. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from .abandon import AbandonRequest 8 | from .add import AddRequest 9 | from .base import BaseRequest 10 | from .bind import BindRequest, UnbindRequest 11 | from .delete import DeleteRequest 12 | from .extended import ExtendedRequest 13 | from .modify import ModifyRequest 14 | from .modify_dn import ModifyDNRequest 15 | from .search import SearchRequest 16 | 17 | requests: list[type[BaseRequest]] = [ 18 | AbandonRequest, 19 | AddRequest, 20 | BindRequest, 21 | UnbindRequest, 22 | DeleteRequest, 23 | ExtendedRequest, 24 | ModifyRequest, 25 | ModifyDNRequest, 26 | SearchRequest, 27 | ] 28 | 29 | protocol_id_map: dict[int, type[BaseRequest]] = { 30 | request.PROTOCOL_OP: request # type: ignore 31 | for request in requests 32 | } 33 | 34 | 35 | __all__ = ["protocol_id_map", "BaseRequest"] 36 | -------------------------------------------------------------------------------- /app/ldap_protocol/ldap_requests/abandon.py: -------------------------------------------------------------------------------- 1 | """Abandon request. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | import asyncio 8 | from typing import AsyncGenerator, ClassVar 9 | 10 | from ldap_protocol.asn1parser import ASN1Row 11 | 12 | from .base import BaseRequest 13 | 14 | 15 | class AbandonRequest(BaseRequest): 16 | """Abandon protocol.""" 17 | 18 | PROTOCOL_OP: ClassVar[int] = 16 19 | message_id: int 20 | 21 | @classmethod 22 | def from_data(cls, data: dict[str, list[ASN1Row]]) -> "AbandonRequest": # noqa: ARG003 23 | """Create structure from ASN1Row dataclass list.""" 24 | return cls(message_id=1) 25 | 26 | async def handle(self) -> AsyncGenerator: 27 | """Handle message with current user.""" 28 | await asyncio.sleep(0) 29 | return 30 | yield # type: ignore 31 | -------------------------------------------------------------------------------- /app/ldap_protocol/ldap_requests/base.py: -------------------------------------------------------------------------------- 1 | """LDAP message abstract structure. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from abc import ABC, abstractmethod 8 | from typing import TYPE_CHECKING, AsyncGenerator, Callable, ClassVar, Protocol 9 | 10 | from dishka import AsyncContainer 11 | from loguru import logger 12 | from pydantic import BaseModel 13 | 14 | from config import Settings 15 | from ldap_protocol.dependency import resolve_deps 16 | from ldap_protocol.dialogue import LDAPSession 17 | from ldap_protocol.ldap_responses import BaseResponse, LDAPResult 18 | from ldap_protocol.utils.helpers import get_class_name 19 | 20 | log_api = logger.bind(name="admin") 21 | 22 | log_api.add( 23 | "logs/admin_{time:DD-MM-YYYY}.log", 24 | filter=lambda rec: rec["extra"].get("name") == "admin", 25 | retention="10 days", 26 | rotation="1d", 27 | colorize=False, 28 | ) 29 | 30 | type handler = Callable[..., AsyncGenerator[BaseResponse, None]] 31 | type serializer = Callable[..., "BaseRequest"] 32 | 33 | 34 | if TYPE_CHECKING: 35 | 36 | class _APIProtocol(Protocol): 37 | """Protocol for API handling.""" 38 | 39 | async def _handle_api( 40 | self, 41 | container: AsyncContainer, 42 | ) -> list[BaseResponse] | BaseResponse: ... 43 | else: 44 | 45 | class _APIProtocol: ... 46 | 47 | 48 | class BaseRequest(ABC, _APIProtocol, BaseModel): 49 | """Base request builder.""" 50 | 51 | handle: ClassVar[handler] 52 | from_data: ClassVar[serializer] 53 | 54 | @property 55 | @abstractmethod 56 | def PROTOCOL_OP(self) -> int: # noqa: N802 57 | """Protocol OP response code.""" 58 | 59 | async def _handle_api( 60 | self, 61 | container: AsyncContainer, 62 | ) -> list[BaseResponse]: 63 | """Hanlde response with api user. 64 | 65 | :param DBUser user: user from db 66 | :param AsyncSession session: db session 67 | :return list[BaseResponse]: list of handled responses 68 | """ 69 | handler = await resolve_deps(func=self.handle, container=container) 70 | ldap_session = await container.get(LDAPSession) 71 | settings = await container.get(Settings) 72 | 73 | un = getattr(ldap_session.user, "user_principal_name", "ANONYMOUS") 74 | 75 | if settings.DEBUG: 76 | log_api.info(f"{get_class_name(self)}: {self.model_dump_json()}") 77 | else: 78 | log_api.info(f"{get_class_name(self)}[{un}]") 79 | 80 | responses = [response async for response in handler()] 81 | 82 | if settings.DEBUG: 83 | for response in responses: 84 | log_api.info( 85 | "{}: {}", 86 | get_class_name(response), 87 | response.model_dump_json(), 88 | ) 89 | else: 90 | for response in responses: 91 | log_api.info(f"{get_class_name(response)}[{un}]") 92 | 93 | return responses 94 | 95 | async def handle_api(self, container: AsyncContainer) -> LDAPResult: 96 | """Get single response.""" 97 | return (await self._handle_api(container))[0] # type: ignore 98 | -------------------------------------------------------------------------------- /app/ldap_protocol/ldap_requests/bind_methods/__init__.py: -------------------------------------------------------------------------------- 1 | """Bind methods. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from .base import ( 8 | AbstractLDAPAuth, 9 | LDAPBindErrors, 10 | SaslAuthentication, 11 | SASLMethod, 12 | get_bad_response, 13 | ) 14 | from .sasl_gssapi import GSSAPISL, GSSAPIAuthStatus, SaslGSSAPIAuthentication 15 | from .sasl_plain import SaslPLAINAuthentication 16 | from .simple import SimpleAuthentication 17 | 18 | sasl_mechanism: list[type[SaslAuthentication]] = [ 19 | SaslPLAINAuthentication, 20 | SaslGSSAPIAuthentication, 21 | ] 22 | 23 | sasl_mechanism_map: dict[SASLMethod, type[SaslAuthentication]] = { 24 | request.mechanism: request for request in sasl_mechanism 25 | } 26 | 27 | __all__ = [ 28 | "get_bad_response", 29 | "sasl_mechanism_map", 30 | "AbstractLDAPAuth", 31 | "SASLMethod", 32 | "SaslAuthentication", 33 | "SaslGSSAPIAuthentication", 34 | "SaslPLAINAuthentication", 35 | "SimpleAuthentication", 36 | "GSSAPIAuthStatus", 37 | "GSSAPISL", 38 | "LDAPBindErrors", 39 | ] 40 | -------------------------------------------------------------------------------- /app/ldap_protocol/ldap_requests/bind_methods/base.py: -------------------------------------------------------------------------------- 1 | """LDAP bind auth methods structure. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from abc import ABC, abstractmethod 8 | from enum import StrEnum 9 | from typing import ClassVar 10 | 11 | from pydantic import BaseModel, Field, SecretStr 12 | from sqlalchemy.ext.asyncio import AsyncSession 13 | 14 | from ldap_protocol.asn1parser import ASN1Row 15 | from ldap_protocol.ldap_codes import LDAPCodes 16 | from ldap_protocol.ldap_responses import BindResponse 17 | from models import User 18 | 19 | 20 | class SASLMethod(StrEnum): 21 | """SASL choices.""" 22 | 23 | PLAIN = "PLAIN" 24 | EXTERNAL = "EXTERNAL" 25 | GSSAPI = "GSSAPI" 26 | CRAM_MD5 = "CRAM-MD5" 27 | DIGEST_MD5 = "DIGEST-MD5" 28 | SCRAM_SHA_1 = "SCRAM-SHA-1" 29 | SCRAM_SHA_256 = "SCRAM-SHA-256" 30 | OAUTHBEARER = "OAUTHBEARER" 31 | UNBOUNDID_CERTIFICATE_PLUS_PASSWORD = "UNBOUNDID-CERTIFICATE-PLUS-PASSWORD" # noqa 32 | UNBOUNDID_TOTP = "UNBOUNDID-TOTP" 33 | UNBOUNDID_DELIVERED_OTP = "UNBOUNDID-DELIVERED-OTP" 34 | UNBOUNDID_YUBIKEY_OTP = "UNBOUNDID-YUBIKEY-OTP" 35 | 36 | 37 | class LDAPBindErrors(StrEnum): 38 | """LDAP Bind errors.""" 39 | 40 | NO_SUCH_USER = "525" 41 | LOGON_FAILURE = "52e" 42 | INVALID_LOGON_HOURS = "530" 43 | INVALID_WORKSTATION = "531" 44 | PASSWORD_EXPIRED = "532" # noqa 45 | ACCOUNT_DISABLED = "533" 46 | ACCOUNT_EXPIRED = "701" 47 | PASSWORD_MUST_CHANGE = "773" # noqa 48 | ACCOUNT_LOCKED_OUT = "775" 49 | 50 | def __str__(self) -> str: 51 | """Return the error message as a string.""" 52 | return ( 53 | "80090308: LdapErr: DSID-0C09030B, " 54 | "comment: AcceptSecurityContext error, " 55 | f"data {self.value}, v893" 56 | ) 57 | 58 | 59 | def get_bad_response(error_message: LDAPBindErrors) -> BindResponse: 60 | """Generate BindResponse object with an invalid credentials error. 61 | 62 | :param LDAPBindErrors error_message: Error message to include in the 63 | response 64 | :return BindResponse: A response object with the result code set to 65 | INVALID_CREDENTIALS, an empty matchedDN, and the 66 | provided error message 67 | """ 68 | return BindResponse( 69 | result_code=LDAPCodes.INVALID_CREDENTIALS, 70 | matchedDN="", 71 | errorMessage=str(error_message), 72 | ) 73 | 74 | 75 | class AbstractLDAPAuth(ABC, BaseModel): 76 | """Auth base class.""" 77 | 78 | otpassword: str | None = Field(None, max_length=6, min_length=6) 79 | password: SecretStr 80 | 81 | @property 82 | @abstractmethod 83 | def METHOD_ID(self) -> int: # noqa: N802 84 | """Abstract method id.""" 85 | 86 | @abstractmethod 87 | def is_valid(self, user: User) -> bool: 88 | """Validate state.""" 89 | 90 | @abstractmethod 91 | def is_anonymous(self) -> bool: 92 | """Return true if anonymous.""" 93 | 94 | @abstractmethod 95 | async def get_user(self, session: AsyncSession, username: str) -> User: 96 | """Get user.""" 97 | 98 | 99 | class SaslAuthentication(AbstractLDAPAuth): 100 | """Sasl auth form.""" 101 | 102 | METHOD_ID: ClassVar[int] = 3 103 | mechanism: ClassVar[SASLMethod] 104 | 105 | @classmethod 106 | @abstractmethod 107 | def from_data(cls, data: list[ASN1Row]) -> "SaslAuthentication": 108 | """Get auth from data.""" 109 | -------------------------------------------------------------------------------- /app/ldap_protocol/ldap_requests/bind_methods/sasl_plain.py: -------------------------------------------------------------------------------- 1 | """Sasl plain auth method. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from typing import ClassVar 8 | 9 | from sqlalchemy.ext.asyncio import AsyncSession 10 | 11 | from ldap_protocol.asn1parser import ASN1Row 12 | from ldap_protocol.utils.queries import get_user 13 | from models import User 14 | from security import verify_password 15 | 16 | from .base import SaslAuthentication, SASLMethod 17 | 18 | 19 | class SaslPLAINAuthentication(SaslAuthentication): 20 | """Sasl plain auth form.""" 21 | 22 | mechanism: ClassVar[SASLMethod] = SASLMethod.PLAIN 23 | credentials: bytes 24 | username: str | None = None 25 | 26 | def is_valid(self, user: User | None) -> bool: 27 | """Check if pwd is valid for user. 28 | 29 | :param User | None user: indb user 30 | :return bool: status 31 | """ 32 | password = getattr(user, "password", None) 33 | if password is not None: 34 | return verify_password( 35 | self.password.get_secret_value(), 36 | password, 37 | ) 38 | return False 39 | 40 | def is_anonymous(self) -> bool: 41 | """Check if auth is anonymous. 42 | 43 | :return bool: status 44 | """ 45 | return False 46 | 47 | @classmethod 48 | def from_data(cls, data: list[ASN1Row]) -> "SaslPLAINAuthentication": 49 | """Get auth from data.""" 50 | _, username, password = data[1].value.split("\\x00") 51 | return cls( 52 | credentials=data[1].value, 53 | username=username, 54 | password=password, 55 | ) 56 | 57 | async def get_user(self, session: AsyncSession, _: str) -> User: 58 | """Get user.""" 59 | return await get_user(session, self.username) # type: ignore 60 | -------------------------------------------------------------------------------- /app/ldap_protocol/ldap_requests/bind_methods/simple.py: -------------------------------------------------------------------------------- 1 | """Simple auth method. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from typing import ClassVar 8 | 9 | from sqlalchemy.ext.asyncio import AsyncSession 10 | 11 | from ldap_protocol.utils.queries import get_user 12 | from models import User 13 | from security import verify_password 14 | 15 | from .base import AbstractLDAPAuth 16 | 17 | 18 | class SimpleAuthentication(AbstractLDAPAuth): 19 | """Simple auth form.""" 20 | 21 | METHOD_ID: ClassVar[int] = 0 22 | 23 | def is_valid(self, user: User | None) -> bool: 24 | """Check if pwd is valid for user. 25 | 26 | :param User | None user: indb user 27 | :return bool: status 28 | """ 29 | password = getattr(user, "password", None) 30 | if password is not None: 31 | return verify_password(self.password.get_secret_value(), password) 32 | return False 33 | 34 | def is_anonymous(self) -> bool: 35 | """Check if auth is anonymous. 36 | 37 | :return bool: status 38 | """ 39 | return not self.password 40 | 41 | async def get_user(self, session: AsyncSession, username: str) -> User: 42 | """Get user.""" 43 | return await get_user(session, username) # type: ignore 44 | -------------------------------------------------------------------------------- /app/ldap_protocol/ldap_requests/compare.py: -------------------------------------------------------------------------------- 1 | """Compare protocol. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from typing import ClassVar 8 | 9 | from .base import BaseRequest 10 | 11 | 12 | class CompareRequest(BaseRequest): 13 | """Compare protocol.""" 14 | 15 | PROTOCOL_OP: ClassVar[int] = 14 16 | -------------------------------------------------------------------------------- /app/ldap_protocol/ldap_requests/delete.py: -------------------------------------------------------------------------------- 1 | """Delete protocol. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from typing import AsyncGenerator, ClassVar 8 | 9 | from sqlalchemy import delete, select 10 | from sqlalchemy.ext.asyncio import AsyncSession 11 | from sqlalchemy.orm import defaultload 12 | 13 | from ldap_protocol.asn1parser import ASN1Row 14 | from ldap_protocol.dialogue import LDAPSession 15 | from ldap_protocol.kerberos import AbstractKadmin, KRBAPIError 16 | from ldap_protocol.ldap_codes import LDAPCodes 17 | from ldap_protocol.ldap_responses import ( 18 | INVALID_ACCESS_RESPONSE, 19 | DeleteResponse, 20 | ) 21 | from ldap_protocol.policies.access_policy import mutate_ap 22 | from ldap_protocol.session_storage import SessionStorage 23 | from ldap_protocol.utils.helpers import is_dn_in_base_directory 24 | from ldap_protocol.utils.queries import ( 25 | get_base_directories, 26 | get_filter_from_path, 27 | is_computer, 28 | validate_entry, 29 | ) 30 | from models import Directory 31 | 32 | from .base import BaseRequest 33 | 34 | 35 | class DeleteRequest(BaseRequest): 36 | """Delete request. 37 | 38 | DelRequest ::= [APPLICATION 10] LDAPDN 39 | """ 40 | 41 | PROTOCOL_OP: ClassVar[int] = 10 42 | 43 | entry: str 44 | 45 | @classmethod 46 | def from_data(cls, data: ASN1Row) -> "DeleteRequest": 47 | return cls(entry=data) 48 | 49 | async def handle( 50 | self, 51 | session: AsyncSession, 52 | ldap_session: LDAPSession, 53 | kadmin: AbstractKadmin, 54 | session_storage: SessionStorage, 55 | ) -> AsyncGenerator[DeleteResponse, None]: 56 | """Delete request handler.""" 57 | if not ldap_session.user: 58 | yield DeleteResponse(**INVALID_ACCESS_RESPONSE) 59 | return 60 | 61 | if not validate_entry(self.entry.lower()): 62 | yield DeleteResponse(result_code=LDAPCodes.INVALID_DN_SYNTAX) 63 | return 64 | 65 | query = ( 66 | select(Directory) 67 | .options( 68 | defaultload(Directory.user), 69 | defaultload(Directory.attributes), 70 | ) 71 | .filter(get_filter_from_path(self.entry)) 72 | ) 73 | 74 | directory = await session.scalar(mutate_ap(query, ldap_session.user)) 75 | 76 | if not directory: 77 | yield DeleteResponse(result_code=LDAPCodes.NO_SUCH_OBJECT) 78 | return 79 | 80 | if not await session.scalar( 81 | mutate_ap(query, ldap_session.user, "del"), 82 | ): 83 | yield DeleteResponse( 84 | result_code=LDAPCodes.INSUFFICIENT_ACCESS_RIGHTS, 85 | ) 86 | return 87 | 88 | if directory.is_domain: 89 | yield DeleteResponse(result_code=LDAPCodes.UNWILLING_TO_PERFORM) 90 | return 91 | 92 | for base_directory in await get_base_directories(session): 93 | if is_dn_in_base_directory(base_directory, self.entry): 94 | base_dn = base_directory 95 | break 96 | 97 | try: 98 | if directory.user: 99 | await kadmin.del_principal(directory.user.get_upn_prefix()) 100 | await session_storage.clear_user_sessions(directory.user.id) 101 | 102 | if await is_computer(directory.id, session): 103 | await kadmin.del_principal(directory.host_principal) 104 | await kadmin.del_principal( 105 | f"{directory.host_principal}.{base_dn.name}", 106 | ) 107 | except KRBAPIError: 108 | yield DeleteResponse( 109 | result_code=LDAPCodes.UNAVAILABLE, 110 | errorMessage="KerberosError", 111 | ) 112 | return 113 | 114 | await session.execute(delete(Directory).filter_by(id=directory.id)) 115 | await session.commit() 116 | 117 | yield DeleteResponse(result_code=LDAPCodes.SUCCESS) 118 | -------------------------------------------------------------------------------- /app/ldap_protocol/ldap_schema/__init__.py: -------------------------------------------------------------------------------- 1 | """Data Access Objects for LDAP Schema. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | -------------------------------------------------------------------------------- /app/ldap_protocol/objects.py: -------------------------------------------------------------------------------- 1 | """Subcontainers for requests/responses. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from enum import IntEnum, StrEnum 8 | 9 | 10 | class Scope(IntEnum): 11 | """Enum for search request. 12 | 13 | ``` 14 | BASE_OBJECT = 0 15 | SINGLE_LEVEL = 1 16 | WHOLE_SUBTREE = 2 17 | SUBORDINATE_SUBTREE = 3 18 | ``` 19 | """ 20 | 21 | BASE_OBJECT = 0 22 | SINGLE_LEVEL = 1 23 | WHOLE_SUBTREE = 2 24 | SUBORDINATE_SUBTREE = 3 25 | 26 | 27 | class DerefAliases(IntEnum): 28 | """Enum for search request. 29 | 30 | ``` 31 | NEVER_DEREF_ALIASES = 0 32 | DEREF_IN_SEARCHING = 1 33 | DEREF_FINDING_BASE_OBJ = 2 34 | DEREF_ALWAYS = 3 35 | ``` 36 | """ 37 | 38 | NEVER_DEREF_ALIASES = 0 39 | DEREF_IN_SEARCHING = 1 40 | DEREF_FINDING_BASE_OBJ = 2 41 | DEREF_ALWAYS = 3 42 | 43 | 44 | class LDAPMatchingRule(StrEnum): 45 | """Enum for LDAP Matching Rules (extensibleMatch).""" 46 | 47 | LDAP_MATCHING_RULE_BIT_AND = "1.2.840.113556.1.4.803" 48 | LDAP_MATCHING_RULE_BIT_OR = "1.2.840.113556.1.4.804" 49 | LDAP_MATCHING_RULE_TRANSITIVE_EVAL = "1.2.840.113556.1.4.1941" 50 | LDAP_MATCHING_RULE_DN_WITH_DATA = "1.2.840.113556.1.4.2253" 51 | -------------------------------------------------------------------------------- /app/ldap_protocol/policies/__init__.py: -------------------------------------------------------------------------------- 1 | """Policies module.""" 2 | -------------------------------------------------------------------------------- /app/ldap_protocol/policies/access_policy.py: -------------------------------------------------------------------------------- 1 | """Access policy manager. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from typing import Literal, TypeVar 8 | 9 | from sqlalchemy import ARRAY, String, bindparam, select, text 10 | from sqlalchemy.ext.asyncio import AsyncSession 11 | from sqlalchemy.orm import selectinload 12 | from sqlalchemy.sql.expression import Select, and_, or_ 13 | 14 | from ldap_protocol.dialogue import UserSchema 15 | from ldap_protocol.utils.const import GRANT_DN_STRING 16 | from ldap_protocol.utils.queries import ( 17 | get_groups, 18 | get_path_filter, 19 | get_search_path, 20 | ) 21 | from models import AccessPolicy, Directory, Group 22 | 23 | T = TypeVar("T", bound=Select) 24 | __all__ = ["get_policies", "create_access_policy", "mutate_ap"] 25 | 26 | 27 | async def get_policies(session: AsyncSession) -> list[AccessPolicy]: 28 | """Get policies. 29 | 30 | :param AsyncSession session: db 31 | :return list[AccessPolicy]: result 32 | """ 33 | query = select(AccessPolicy).options( 34 | selectinload(AccessPolicy.groups).selectinload(Group.directory), 35 | selectinload(AccessPolicy.directories), 36 | ) 37 | 38 | return list((await session.scalars(query)).all()) 39 | 40 | 41 | async def create_access_policy( 42 | name: str, 43 | can_read: bool, 44 | can_add: bool, 45 | can_modify: bool, 46 | can_delete: bool, 47 | grant_dn: GRANT_DN_STRING, 48 | groups: list[GRANT_DN_STRING], 49 | session: AsyncSession, 50 | ) -> None: 51 | """Get policies.""" 52 | path = get_search_path(grant_dn) 53 | dir_filter = get_path_filter( 54 | column=Directory.path[1 : len(path)], 55 | path=path, 56 | ) 57 | 58 | directories = await session.scalars(select(Directory).where(dir_filter)) 59 | groups_dirs = await get_groups(groups, session) 60 | 61 | policy = AccessPolicy( 62 | name=name, 63 | can_read=can_read, 64 | can_add=can_add, 65 | can_modify=can_modify, 66 | can_delete=can_delete, 67 | directories=directories.all(), 68 | groups=groups_dirs, 69 | ) 70 | session.add(policy) 71 | await session.flush() 72 | 73 | 74 | def mutate_ap( 75 | query: T, 76 | user: UserSchema, 77 | action: Literal["add", "read", "modify", "del"] = "read", 78 | ) -> T: 79 | """Modify query with read rule filter, joins acess policies. 80 | 81 | :param T query: select(Directory) 82 | :param UserSchema user: user data 83 | :return T: select(Directory).join(Directory.access_policies) 84 | """ 85 | whitelist = AccessPolicy.id.in_(user.access_policies_ids) 86 | 87 | if action == "read": 88 | user_path = get_search_path(user.dn) 89 | get_upper_tree_elem = text( 90 | '(:path)[1:"Directory"."depth"]', 91 | ).bindparams(bindparam("path", value=user_path, type_=ARRAY(String))) 92 | 93 | ap_filter = or_( 94 | and_(AccessPolicy.can_read.is_(True), whitelist), 95 | Directory.id == user.directory_id, 96 | Directory.path == get_upper_tree_elem, 97 | ) 98 | 99 | elif action == "add": 100 | ap_filter = AccessPolicy.can_add.is_(True) & whitelist 101 | 102 | elif action == "modify": 103 | ap_filter = AccessPolicy.can_modify.is_(True) & whitelist 104 | 105 | elif action == "del": 106 | ap_filter = AccessPolicy.can_delete.is_(True) & whitelist 107 | 108 | return query.join(Directory.access_policies, isouter=True).where(ap_filter) 109 | -------------------------------------------------------------------------------- /app/ldap_protocol/policies/network_policy.py: -------------------------------------------------------------------------------- 1 | """Network policy manager. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from ipaddress import IPv4Address, IPv6Address 8 | from typing import Literal 9 | 10 | from sqlalchemy import exists, or_, select, text 11 | from sqlalchemy.ext.asyncio import AsyncSession 12 | from sqlalchemy.orm import selectinload 13 | from sqlalchemy.sql.expression import Select, true 14 | 15 | from models import Group, NetworkPolicy, User 16 | 17 | 18 | def build_policy_query( 19 | ip: IPv4Address | IPv6Address, 20 | protocol_field_name: Literal["is_http", "is_ldap", "is_kerberos"], 21 | user_group_ids: list[int] | None = None, 22 | ) -> Select: 23 | """Build a base query for network policies with optional group filtering. 24 | 25 | :param IPv4Address ip: IP address to filter 26 | :param Literal["is_http", "is_ldap", "is_kerberos"] protocol_field_name 27 | protocol: Protocol to filter 28 | :param list[int] | None user_group_ids: List of user group IDs, optional 29 | :return: Select query 30 | """ 31 | protocol_field = getattr(NetworkPolicy, protocol_field_name) 32 | query = ( 33 | select(NetworkPolicy) 34 | .filter_by(enabled=True) 35 | .options( 36 | selectinload(NetworkPolicy.groups), 37 | selectinload(NetworkPolicy.mfa_groups), 38 | ) 39 | .filter( 40 | text(':ip <<= ANY("Policies".netmasks)').bindparams(ip=ip), 41 | protocol_field == true(), 42 | ) 43 | .order_by(NetworkPolicy.priority.asc()) 44 | .limit(1) 45 | ) 46 | 47 | if user_group_ids is not None: 48 | return query.filter( 49 | or_( 50 | NetworkPolicy.groups == None, # noqa 51 | NetworkPolicy.groups.any(Group.id.in_(user_group_ids)), 52 | ), 53 | ) 54 | 55 | return query 56 | 57 | 58 | async def check_mfa_group( 59 | policy: NetworkPolicy, 60 | user: User, 61 | session: AsyncSession, 62 | ) -> bool: 63 | """Check if user is in a group with MFA policy. 64 | 65 | :param NetworkPolicy policy: policy object 66 | :param User user: user object 67 | :param AsyncSession session: db session 68 | :return bool: status 69 | """ 70 | return await session.scalar( 71 | select( 72 | exists().where( # type: ignore 73 | Group.mfa_policies.contains(policy), 74 | Group.users.contains(user), 75 | ), 76 | ), 77 | ) 78 | 79 | 80 | async def get_user_network_policy( 81 | ip: IPv4Address | IPv6Address, 82 | user: User, 83 | session: AsyncSession, 84 | ) -> NetworkPolicy | None: 85 | """Get the highest priority network policy for user, ip and protocol. 86 | 87 | :param User user: user object 88 | :param AsyncSession session: db session 89 | :return NetworkPolicy | None: a NetworkPolicy object 90 | """ 91 | user_group_ids = [group.id for group in user.groups] 92 | 93 | query = build_policy_query(ip, "is_http", user_group_ids) 94 | 95 | return await session.scalar(query) 96 | 97 | 98 | async def is_user_group_valid( 99 | user: User | None, 100 | policy: NetworkPolicy | None, 101 | session: AsyncSession, 102 | ) -> bool: 103 | """Validate user groups, is it including to policy. 104 | 105 | :param User user: db user 106 | :param NetworkPolicy policy: db policy 107 | :param AsyncSession session: db 108 | :return bool: status 109 | """ 110 | if user is None or policy is None: 111 | return False 112 | 113 | if not policy.groups: 114 | return True 115 | 116 | query = ( 117 | select(Group) 118 | .join(Group.users) 119 | .join(Group.policies, isouter=True) 120 | .filter(Group.users.contains(user) & Group.policies.contains(policy)) 121 | .limit(1) 122 | ) 123 | 124 | group = await session.scalar(query) 125 | return bool(group) 126 | -------------------------------------------------------------------------------- /app/ldap_protocol/utils/__init__.py: -------------------------------------------------------------------------------- 1 | """App utils module.""" 2 | -------------------------------------------------------------------------------- /app/ldap_protocol/utils/const.py: -------------------------------------------------------------------------------- 1 | """Functions for SQL. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | import re 8 | from typing import Annotated 9 | 10 | from pydantic import AfterValidator 11 | 12 | from .helpers import validate_entry 13 | 14 | 15 | def _type_validate_entry(entry: str) -> str: 16 | if validate_entry(entry): 17 | return entry 18 | raise ValueError(f"Invalid entry name {entry}") 19 | 20 | 21 | EMAIL_RE = re.compile( 22 | r"([A-Za-z0-9]+[.-_])*[A-Za-z0-9]+@[A-Za-z0-9-]+(\.[A-Z|a-z0-9-]{2,})+", 23 | ) 24 | 25 | 26 | def _type_validate_email(email: str) -> str: 27 | if EMAIL_RE.fullmatch(email): 28 | return email 29 | raise ValueError(f"Invalid entry name {email}") 30 | 31 | 32 | GRANT_DN_STRING = Annotated[str, AfterValidator(_type_validate_entry)] 33 | EmailStr = Annotated[str, AfterValidator(_type_validate_email)] 34 | -------------------------------------------------------------------------------- /app/ldap_protocol/utils/pagination.py: -------------------------------------------------------------------------------- 1 | """Pagination util. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE. 5 | """ 6 | 7 | import sys 8 | from abc import abstractmethod 9 | from dataclasses import dataclass 10 | from math import ceil 11 | from typing import Sequence, TypeVar 12 | 13 | from pydantic import BaseModel, Field 14 | from sqlalchemy import func, select 15 | from sqlalchemy.ext.asyncio import AsyncSession 16 | from sqlalchemy.sql.expression import Select 17 | 18 | from models import Base 19 | 20 | P = TypeVar("P", contravariant=True, bound=BaseModel) 21 | S = TypeVar("S", contravariant=True, bound=Base) 22 | 23 | 24 | class PaginationParams(BaseModel): 25 | """Pagination parameters.""" 26 | 27 | page_number: int = Field( 28 | ..., 29 | ge=1, 30 | le=sys.maxsize, 31 | ) 32 | page_size: int = Field( 33 | default=25, 34 | ge=1, 35 | le=100, 36 | ) 37 | 38 | 39 | @dataclass 40 | class PaginationMetadata: 41 | """Pagination metadata.""" 42 | 43 | page_number: int 44 | page_size: int 45 | total_count: int | None = None 46 | total_pages: int | None = None 47 | 48 | 49 | class BasePaginationSchema[P: BaseModel](BaseModel): 50 | """Paginator Schema.""" 51 | 52 | metadata: PaginationMetadata 53 | items: list[P] 54 | 55 | class Config: 56 | """Config for Paginator.""" 57 | 58 | arbitrary_types_allowed = True 59 | 60 | 61 | class BaseSchemaModel[S: Base](BaseModel): 62 | """Model for Schema. 63 | 64 | Schema is used for serialization and deserialization. 65 | """ 66 | 67 | @classmethod 68 | @abstractmethod 69 | def from_db(cls, sqla_instance: S) -> "BaseSchemaModel[S]": 70 | """Create an instance of Schema from instance of SQLA model.""" 71 | 72 | 73 | @dataclass 74 | class PaginationResult[S: Base]: 75 | """Paginator. 76 | 77 | Paginator contains metadata about pagination and chunk of items. 78 | """ 79 | 80 | metadata: PaginationMetadata 81 | items: Sequence[S] 82 | 83 | @classmethod 84 | async def get( 85 | cls, 86 | query: Select[tuple[S]], 87 | params: PaginationParams, 88 | sqla_model: type[S], 89 | session: AsyncSession, 90 | ) -> "PaginationResult[S]": 91 | """Get paginator.""" 92 | if query._order_by_clause is None or len(query._order_by_clause) == 0: 93 | raise ValueError("Select query must have an order_by clause.") 94 | 95 | metadata = PaginationMetadata( 96 | page_number=params.page_number, 97 | page_size=params.page_size, 98 | ) 99 | 100 | total_count_query = select(func.count()).select_from(sqla_model) 101 | metadata.total_count = (await session.scalars(total_count_query)).one() 102 | metadata.total_pages = ceil(metadata.total_count / params.page_size) 103 | 104 | offset = (params.page_number - 1) * params.page_size 105 | query = query.offset(offset).limit(params.page_size) 106 | result = await session.scalars(query) 107 | items = result.all() 108 | 109 | return cls(metadata=metadata, items=items) 110 | -------------------------------------------------------------------------------- /app/ldap_protocol/utils/raw_definition_parser.py: -------------------------------------------------------------------------------- 1 | """Raw definition parser. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from ldap3.protocol.rfc4512 import AttributeTypeInfo, ObjectClassInfo 8 | from sqlalchemy import select 9 | from sqlalchemy.ext.asyncio import AsyncSession 10 | 11 | from models import AttributeType, ObjectClass 12 | 13 | 14 | class RawDefinitionParser: 15 | """Parser for ObjectClass and AttributeType raw definition.""" 16 | 17 | @staticmethod 18 | def _list_to_string(data: list[str]) -> str | None: 19 | if not data: 20 | return None 21 | if len(data) == 1: 22 | return data[0] 23 | raise ValueError("Data is not a single element list") 24 | 25 | @staticmethod 26 | def _get_attribute_type_info(raw_definition: str) -> AttributeTypeInfo: 27 | tmp = AttributeTypeInfo.from_definition(definitions=[raw_definition]) 28 | return list(tmp.values())[0] 29 | 30 | @staticmethod 31 | def get_object_class_info(raw_definition: str) -> ObjectClassInfo: 32 | tmp = ObjectClassInfo.from_definition(definitions=[raw_definition]) 33 | return list(tmp.values())[0] 34 | 35 | @staticmethod 36 | async def _get_attribute_types_by_names( 37 | session: AsyncSession, 38 | names: list[str], 39 | ) -> list[AttributeType]: 40 | query = await session.execute( 41 | select(AttributeType) 42 | .where(AttributeType.name.in_(names)) 43 | ) # fmt: skip 44 | return list(query.scalars().all()) 45 | 46 | @staticmethod 47 | def create_attribute_type_by_raw( 48 | raw_definition: str, 49 | ) -> AttributeType: 50 | attribute_type_info = RawDefinitionParser._get_attribute_type_info( 51 | raw_definition=raw_definition 52 | ) 53 | 54 | return AttributeType( 55 | oid=attribute_type_info.oid, 56 | name=RawDefinitionParser._list_to_string(attribute_type_info.name), 57 | syntax=attribute_type_info.syntax, 58 | single_value=attribute_type_info.single_value, 59 | no_user_modification=attribute_type_info.no_user_modification, 60 | is_system=True, 61 | ) 62 | 63 | @staticmethod 64 | async def _get_object_class_by_name( 65 | object_class_name: str | None, 66 | session: AsyncSession, 67 | ) -> ObjectClass | None: 68 | if not object_class_name: 69 | return None 70 | 71 | return await session.scalar( 72 | select(ObjectClass) 73 | .where(ObjectClass.name == object_class_name) 74 | ) # fmt: skip 75 | 76 | @staticmethod 77 | async def create_object_class_by_info( 78 | session: AsyncSession, 79 | object_class_info: ObjectClassInfo, 80 | ) -> ObjectClass: 81 | """Create Object Class by ObjectClassInfo.""" 82 | superior_name = RawDefinitionParser._list_to_string( 83 | object_class_info.superior 84 | ) 85 | 86 | superior_object_class = ( 87 | await RawDefinitionParser._get_object_class_by_name( 88 | superior_name, 89 | session, 90 | ) 91 | ) 92 | 93 | object_class = ObjectClass( 94 | oid=object_class_info.oid, 95 | name=RawDefinitionParser._list_to_string(object_class_info.name), 96 | superior=superior_object_class, 97 | kind=object_class_info.kind, 98 | is_system=True, 99 | ) 100 | if object_class_info.must_contain: 101 | object_class.attribute_types_must.extend( 102 | await RawDefinitionParser._get_attribute_types_by_names( 103 | session, 104 | object_class_info.must_contain, 105 | ) 106 | ) 107 | if object_class_info.may_contain: 108 | object_class.attribute_types_may.extend( 109 | await RawDefinitionParser._get_attribute_types_by_names( 110 | session, 111 | object_class_info.may_contain, 112 | ) 113 | ) 114 | 115 | return object_class 116 | -------------------------------------------------------------------------------- /app/logs/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore -------------------------------------------------------------------------------- /app/schedule.py: -------------------------------------------------------------------------------- 1 | """Simple scheduler for tasks.""" 2 | 3 | import asyncio 4 | from typing import Callable, Coroutine 5 | 6 | import uvloop 7 | from dishka import AsyncContainer, Scope, make_async_container 8 | from loguru import logger 9 | 10 | from config import Settings 11 | from extra.scripts.check_ldap_principal import check_ldap_principal 12 | from extra.scripts.principal_block_user_sync import principal_block_sync 13 | from extra.scripts.uac_sync import disable_accounts 14 | from extra.scripts.update_krb5_config import update_krb5_config 15 | from ioc import MainProvider 16 | from ldap_protocol.dependency import resolve_deps 17 | 18 | type task_type = Callable[..., Coroutine] 19 | 20 | _TASKS: set[tuple[task_type, float]] = { 21 | (disable_accounts, 600.0), 22 | (principal_block_sync, 60.0), 23 | (check_ldap_principal, -1.0), 24 | (update_krb5_config, -1.0), 25 | } 26 | 27 | 28 | async def _schedule( 29 | task: task_type, 30 | wait: float, 31 | container: AsyncContainer, 32 | ) -> None: 33 | """Run task periodically. 34 | 35 | :param Awaitable task: any task 36 | :param AsyncContainer container: container 37 | :param float wait: time to wait after execution 38 | """ 39 | logger.info("Registered: {}", task.__name__) 40 | while True: 41 | async with container(scope=Scope.REQUEST) as ctnr: 42 | handler = await resolve_deps(func=task, container=ctnr) 43 | await handler() 44 | 45 | # NOTE: one-time tasks 46 | if wait < 0.0: 47 | break 48 | 49 | await asyncio.sleep(wait) 50 | 51 | 52 | def scheduler(settings: Settings) -> None: 53 | """Sript entrypoint.""" 54 | 55 | async def runner(settings: Settings) -> None: 56 | container = make_async_container( 57 | MainProvider(), 58 | context={Settings: settings}, 59 | ) 60 | 61 | async with asyncio.TaskGroup() as tg: 62 | for task, timeout in _TASKS: 63 | tg.create_task(_schedule(task, timeout, container)) 64 | 65 | def _run() -> None: 66 | uvloop.run(runner(settings)) 67 | 68 | try: 69 | import py_hot_reload 70 | except ImportError: 71 | _run() 72 | else: 73 | if settings.DEBUG: 74 | py_hot_reload.run_with_reloader(_run) 75 | else: 76 | _run() 77 | 78 | 79 | __all__ = ["scheduler"] 80 | -------------------------------------------------------------------------------- /app/security.py: -------------------------------------------------------------------------------- 1 | """Security base module. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from passlib.context import CryptContext 8 | 9 | pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") 10 | 11 | 12 | def verify_password(plain_password: str, hashed_password: str) -> bool: 13 | """Validate password. 14 | 15 | :param str plain_password: raw password 16 | :param str hashed_password: pwd hash from db 17 | :return bool: is password valid 18 | """ 19 | return pwd_context.verify(plain_password, hashed_password) 20 | 21 | 22 | def get_password_hash(password: str) -> str: 23 | """Hash password. 24 | 25 | :param str password: raw pwd 26 | :return str: hash 27 | """ 28 | return pwd_context.hash(password, max_rounds=9) 29 | -------------------------------------------------------------------------------- /certs/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore -------------------------------------------------------------------------------- /docker-compose.remote.test.yml: -------------------------------------------------------------------------------- 1 | services: 2 | md-test: 3 | image: ${TAG} 4 | restart: "no" 5 | environment: 6 | DEBUG: 1 7 | DOMAIN: md.test 8 | POSTGRES_USER: user1 9 | POSTGRES_PASSWORD: password123 10 | SECRET_KEY: 6a0452ae20cab4e21b6e9d18fa4b7bf397dd66ec3968b2d7407694278fd84cce 11 | command: bash -c "python -m pytest -W ignore::DeprecationWarning -vv" 12 | 13 | postgres: 14 | image: postgres:16 15 | restart: "no" 16 | environment: 17 | POSTGRES_USER: user1 18 | POSTGRES_PASSWORD: password123 19 | expose: 20 | - "5432" 21 | logging: 22 | driver: "none" 23 | 24 | dragonfly: 25 | image: 'docker.dragonflydb.io/dragonflydb/dragonfly' 26 | expose: 27 | - "6379" 28 | deploy: 29 | resources: 30 | limits: 31 | cpus: '0.5' 32 | memory: 0.5GiB 33 | -------------------------------------------------------------------------------- /docker-compose.test.yml: -------------------------------------------------------------------------------- 1 | services: 2 | test: 3 | build: 4 | context: . 5 | dockerfile: ./.docker/test.Dockerfile 6 | args: 7 | DOCKER_BUILDKIT: 1 8 | target: runtime 9 | restart: "no" 10 | container_name: multidirectory-test 11 | volumes: 12 | - ./app:/app 13 | - ./tests:/app/tests 14 | environment: 15 | DEBUG: 1 16 | DOMAIN: md.test 17 | POSTGRES_USER: user1 18 | POSTGRES_PASSWORD: password123 19 | SECRET_KEY: 6a0452ae20cab4e21b6e9d18fa4b7bf397dd66ec3968b2d7407694278fd84cce 20 | # PYTHONTRACEMALLOC: 1 21 | PYTHONDONTWRITEBYTECODE: 1 22 | command: bash -c "python -B -m pytest -x -W ignore::DeprecationWarning -vv" 23 | tty: true 24 | 25 | postgres: 26 | container_name: MD-test-postgres 27 | image: postgres:16 28 | restart: "no" 29 | environment: 30 | POSTGRES_USER: user1 31 | POSTGRES_PASSWORD: password123 32 | expose: 33 | - "5432" 34 | logging: 35 | driver: "none" 36 | 37 | dragonfly: 38 | image: 'docker.dragonflydb.io/dragonflydb/dragonfly' 39 | container_name: dragonfly-test 40 | expose: 41 | - "6379" 42 | deploy: 43 | resources: 44 | limits: 45 | cpus: '0.5' 46 | memory: 0.5GiB 47 | -------------------------------------------------------------------------------- /integration_tests/kerberos/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM debian:bookworm 2 | 3 | RUN set -eux; \ 4 | apt-get update -y; \ 5 | apt-get install \ 6 | krb5-kdc-ldap \ 7 | krb5-pkinit \ 8 | wamerican \ 9 | libsasl2-modules-gssapi-mit \ 10 | --no-install-recommends -y 11 | 12 | RUN touch /etc/krb5.conf && echo """\ 13 | [libdefaults]\n\ 14 | default_realm = MD.LOCALHOST\n\ 15 | noaddresses = TRUE\n\ 16 | [realms]\n\ 17 | MD.LOCALHOST = {\n\ 18 | kdc = kerberos:88\n\ 19 | admin_server = kerberos:749\n\ 20 | default_domain = kerberos\n\ 21 | }\n\ 22 | """ > /etc/krb5.conf; 23 | -------------------------------------------------------------------------------- /integration_tests/ssh/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM debian:latest 2 | 3 | ENV DEBIAN_FRONTEND noninteractive 4 | RUN apt-get update && apt-get -y install libnss-ldapd libpam-ldapd openssh-sftp-server openssh-server 5 | RUN apt-get update && apt-get -y install rsyslog 6 | RUN mkdir -p /var/run/sshd 7 | ADD init.sh /init.sh 8 | RUN chmod u+x /init.sh 9 | 10 | EXPOSE 22 11 | 12 | CMD /init.sh 13 | -------------------------------------------------------------------------------- /integration_tests/ssh/docker-compose.yml: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 MultiFactor 2 | # License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 3 | 4 | services: 5 | migrations: 6 | image: ghcr.io/multidirectorylab/multidirectory_test:beta 7 | container_name: multidirectory_migrations 8 | restart: "no" 9 | environment: 10 | DOMAIN: md.multifactor.dev 11 | DEBUG: 1 12 | POSTGRES_USER: user 13 | POSTGRES_PASSWORD: test_pwd 14 | SECRET_KEY: 6a0452ae20cab4e21b6e9d18fa4b7bf397dd66ec3968b2d74 15 | command: alembic upgrade head 16 | depends_on: 17 | postgres: 18 | condition: service_healthy 19 | 20 | cert_check: 21 | image: ghcr.io/multidirectorylab/multidirectory_test:beta 22 | container_name: multidirectory_certs_check 23 | restart: "no" 24 | command: ./extra/generate_cert.sh 25 | volumes: 26 | - certs:/certs 27 | 28 | api_server: 29 | image: ghcr.io/multidirectorylab/multidirectory_test:beta 30 | container_name: multidirectory_api 31 | restart: unless-stopped 32 | command: python multidirectory.py --http 33 | environment: 34 | USE_CORE_TLS: 1 35 | DOMAIN: md.multifactor.dev 36 | DEBUG: 1 37 | POSTGRES_USER: user 38 | POSTGRES_PASSWORD: test_pwd 39 | SECRET_KEY: 6a0452ae20cab4e21b6e9d18fa4b7bf397dd66ec3968b2d74 40 | MFA_API_SOURCE: dev 41 | hostname: api_server 42 | depends_on: 43 | migrations: 44 | condition: service_completed_successfully 45 | cert_check: 46 | condition: service_completed_successfully 47 | volumes: 48 | - certs:/certs 49 | ports: 50 | - 8000:8000 51 | healthcheck: 52 | test: curl --fail http://localhost:8000/auth/setup || exit 1 53 | interval: 5s 54 | timeout: 5s 55 | retries: 10 56 | start_period: 3s 57 | 58 | multidirectory_ldap: 59 | image: ghcr.io/multidirectorylab/multidirectory_test:beta 60 | container_name: multidirectory_ldap 61 | restart: unless-stopped 62 | hostname: multidirectory_ldap 63 | entrypoint: ./entrypoint.sh 64 | volumes: 65 | - certs:/certs 66 | - ./entrypoint.sh:/app/entrypoint.sh 67 | environment: 68 | DOCKER_DEFAULT_PLATFORM: linux/arm64 69 | DOMAIN: md.multifactor.dev 70 | DEBUG: 1 71 | POSTGRES_USER: user 72 | POSTGRES_PASSWORD: test_pwd 73 | SECRET_KEY: 6a0452ae20cab4e21b6e9d18fa4b7bf397dd66ec3968b2d74 74 | MFA_API_SOURCE: dev 75 | expose: 76 | - 389 77 | depends_on: 78 | dragonfly: 79 | condition: service_started 80 | migrations: 81 | condition: service_completed_successfully 82 | cert_check: 83 | condition: service_completed_successfully 84 | 85 | postgres: 86 | container_name: MD-ssh-postgres 87 | image: postgres:16 88 | restart: unless-stopped 89 | environment: 90 | POSTGRES_USER: user 91 | POSTGRES_PASSWORD: test_pwd 92 | PGDATA: /data/postgres 93 | expose: 94 | - 5432 95 | healthcheck: 96 | test: ["CMD-SHELL", "pg_isready -d postgres -U user"] 97 | interval: 1s 98 | timeout: 15s 99 | retries: 10 100 | start_period: 3s 101 | 102 | ssh: 103 | container_name: md-ssh 104 | image: ghcr.io/multidirectorylab/multidirectory_ssh_test:beta 105 | environment: 106 | SERVER: "ldap://multidirectory_ldap" 107 | BASE: "dc=md,dc=multifactor,dc=dev" 108 | BIND_DN: "admin" 109 | BASE_PASSWORD: "Password123" 110 | ports: 111 | - "222:22" 112 | healthcheck: 113 | test: service ssh status | grep 'sshd is running.' 114 | interval: 1s 115 | timeout: 15s 116 | retries: 10 117 | start_period: 3s 118 | 119 | dragonfly: 120 | image: 'docker.dragonflydb.io/dragonflydb/dragonfly' 121 | container_name: dragonfly 122 | expose: 123 | - 6379 124 | deploy: 125 | resources: 126 | limits: 127 | cpus: '1.0' 128 | memory: 2GiB 129 | reservations: 130 | cpus: '0.75' 131 | memory: 0.5GiB 132 | 133 | connect: 134 | image: ghcr.io/multidirectorylab/multidirectory:beta 135 | depends_on: 136 | ssh: 137 | condition: service_healthy 138 | multidirectory_ldap: 139 | condition: service_started 140 | api_server: 141 | condition: service_healthy 142 | 143 | volumes: 144 | certs: 145 | -------------------------------------------------------------------------------- /integration_tests/ssh/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | mkdir /certs; 3 | openssl req -nodes -new -x509 -keyout /certs/privkey.pem -out /certs/cert.pem -subj \ 4 | '/C=RU/ST=Moscow/L=Moscow/O=Global Security/OU=Multifactor/CN=md.multifactor.dev'; 5 | python multidirectory.py --ldap; 6 | -------------------------------------------------------------------------------- /integration_tests/ssh/init.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo -e "base ${BASE}\nuri ${SERVER}\nbinddn ${BIND_DN}\nbindpw ${BASE_PASSWORD}" > /etc/nslcd.conf 3 | 4 | for item in passwd shadow group; do 5 | sed -i "s/^${item}:.*/${item}: files ldap/g" /etc/nsswitch.conf 6 | done 7 | 8 | /usr/sbin/nslcd 9 | /usr/sbin/rsyslogd 10 | /usr/sbin/sshd 11 | 12 | tail -F /var/log/auth.log 13 | -------------------------------------------------------------------------------- /integration_tests/ssh/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | curl -X 'POST' \ 4 | 'http://localhost:8000/auth/setup' \ 5 | -H 'accept: application/json' \ 6 | -H 'Content-Type: application/json' \ 7 | -d '{ 8 | "domain": "md.multifactor.dev", 9 | "username": "admin", 10 | "user_principal_name": "admin", 11 | "display_name": "admin", 12 | "mail": "admin@example.com", 13 | "password": "Password123" 14 | }' -m 30; 15 | echo -e "performing ssh conn"; 16 | sshpass -p Password123 \ 17 | ssh \ 18 | -o UserKnownHostsFile=/dev/null \ 19 | -o StrictHostKeyChecking=no \ 20 | admin@localhost -p 222 "exit 0"; 21 | exit $?; 22 | -------------------------------------------------------------------------------- /local.env: -------------------------------------------------------------------------------- 1 | DEBUG=1 2 | AUTO_RELOAD=1 3 | DOMAIN=md.localhost 4 | POSTGRES_USER=user1 5 | POSTGRES_PASSWORD=password123 6 | SECRET_KEY=6a0452ae20cab4e21b6e9d18fa4b7bf397dd66ec3968b2d7407694278fd84cce 7 | MFA_API_SOURCE=dev 8 | ACCESS_TOKEN_EXPIRE_MINUTES=180 9 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MultiDirectoryLab/MultiDirectory/6dd5f5ad251cdfaea9080442c8f20b0d397834d4/tests/__init__.py -------------------------------------------------------------------------------- /tests/test_api/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MultiDirectoryLab/MultiDirectory/6dd5f5ad251cdfaea9080442c8f20b0d397834d4/tests/test_api/__init__.py -------------------------------------------------------------------------------- /tests/test_api/test_auth/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MultiDirectoryLab/MultiDirectory/6dd5f5ad251cdfaea9080442c8f20b0d397834d4/tests/test_api/test_auth/__init__.py -------------------------------------------------------------------------------- /tests/test_api/test_auth/test_pwd_policy.py: -------------------------------------------------------------------------------- 1 | """Test policy API. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from copy import copy 8 | 9 | import pytest 10 | from fastapi import status 11 | from httpx import AsyncClient 12 | 13 | 14 | @pytest.mark.asyncio 15 | @pytest.mark.usefixtures("session") 16 | async def test_policy_password(http_client: AsyncClient) -> None: 17 | """Test create policy.""" 18 | policy_data = { 19 | "name": "Default domain password policy", 20 | "password_history_length": 4, 21 | "maximum_password_age_days": 0, 22 | "minimum_password_age_days": 0, 23 | "minimum_password_length": 7, 24 | "password_must_meet_complexity_requirements": True, 25 | } 26 | 27 | response = await http_client.post("/password-policy", json=policy_data) 28 | 29 | assert response.status_code == 201 30 | assert response.json() == policy_data 31 | 32 | response = await http_client.get("/password-policy") 33 | assert response.status_code == status.HTTP_200_OK 34 | assert response.json() == policy_data 35 | 36 | changed_data = copy(policy_data) 37 | changed_data["maximum_password_age_days"] = 80 38 | changed_data["minimum_password_age_days"] = 30 39 | 40 | response = await http_client.put( 41 | "/password-policy", 42 | json=changed_data, 43 | ) 44 | 45 | assert response.status_code == status.HTTP_200_OK 46 | assert response.json() == changed_data 47 | 48 | response = await http_client.get("/password-policy") 49 | assert response.status_code == status.HTTP_200_OK 50 | assert response.json() == changed_data 51 | 52 | response = await http_client.delete("/password-policy") 53 | 54 | assert response.status_code == status.HTTP_200_OK 55 | assert response.json() == policy_data 56 | 57 | response = await http_client.get("/password-policy") 58 | assert response.status_code == status.HTTP_200_OK 59 | assert response.json() == policy_data 60 | -------------------------------------------------------------------------------- /tests/test_api/test_auth/test_router_mfa.py: -------------------------------------------------------------------------------- 1 | """MFA methods. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from datetime import datetime, timedelta 8 | 9 | import httpx 10 | import pytest 11 | from jose import jwt 12 | from sqlalchemy import select 13 | from sqlalchemy.ext.asyncio import AsyncSession 14 | 15 | from api.auth.oauth2 import authenticate_user 16 | from models import CatalogueSetting 17 | from tests.conftest import TestCreds 18 | 19 | 20 | @pytest.mark.asyncio 21 | async def test_set_and_remove_mfa( 22 | http_client: httpx.AsyncClient, 23 | session: AsyncSession, 24 | ) -> None: 25 | """Set mfa.""" 26 | response = await http_client.post( 27 | "/multifactor/setup", 28 | json={ 29 | "mfa_key": "123", 30 | "mfa_secret": "123", 31 | "is_ldap_scope": False, 32 | }, 33 | ) 34 | 35 | assert response.json() is True 36 | assert response.status_code == 201 37 | 38 | assert await session.scalar( 39 | select(CatalogueSetting) 40 | .filter_by(name="mfa_key", value="123") 41 | ) # fmt: skip 42 | assert await session.scalar( 43 | select(CatalogueSetting) 44 | .filter_by(name="mfa_secret", value="123") 45 | ) # fmt: skip 46 | 47 | response = await http_client.delete("/multifactor/keys?scope=http") 48 | 49 | assert response.status_code == 200 50 | 51 | assert not await session.scalar( 52 | select(CatalogueSetting) 53 | .filter_by(name="mfa_key", value="123") 54 | ) # fmt: skip 55 | assert not await session.scalar( 56 | select(CatalogueSetting) 57 | .filter_by(name="mfa_secret", value="123") 58 | ) # fmt: skip 59 | 60 | 61 | @pytest.mark.asyncio 62 | @pytest.mark.usefixtures("setup_session") 63 | async def test_connect_mfa( 64 | http_client: httpx.AsyncClient, 65 | session: AsyncSession, 66 | creds: TestCreds, 67 | ) -> None: 68 | """Test websocket mfa.""" 69 | session.add( 70 | CatalogueSetting(name="mfa_secret", value="123"), 71 | ) 72 | session.add(CatalogueSetting(name="mfa_key", value="123")) 73 | await session.commit() 74 | 75 | redirect_url = "example.com" 76 | 77 | response = await http_client.post( 78 | "/multifactor/connect", 79 | data={"username": creds.un, "password": creds.pw}, 80 | ) 81 | 82 | assert response.json() == {"status": "pending", "message": redirect_url} 83 | 84 | user = await authenticate_user(session, creds.un, creds.pw) 85 | 86 | assert user 87 | 88 | exp = datetime.now() + timedelta(minutes=5) 89 | 90 | token = jwt.encode({"aud": "123", "uid": user.id, "exp": exp}, "123") 91 | 92 | response = await http_client.post( 93 | "/multifactor/create", 94 | data={"accessToken": token}, 95 | follow_redirects=False, 96 | ) 97 | 98 | assert response.status_code == 302 99 | assert response.cookies.get("id") 100 | -------------------------------------------------------------------------------- /tests/test_api/test_ldap_schema/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MultiDirectoryLab/MultiDirectory/6dd5f5ad251cdfaea9080442c8f20b0d397834d4/tests/test_api/test_ldap_schema/__init__.py -------------------------------------------------------------------------------- /tests/test_api/test_ldap_schema/test_attribute_type_router_datasets.py: -------------------------------------------------------------------------------- 1 | """Datasets for test attribute type router.""" 2 | 3 | from fastapi import status 4 | 5 | test_modify_one_attribute_type_dataset = [ 6 | { 7 | "attribute_type_name": "testAttributeType0", 8 | "attribute_type_data": { 9 | "oid": "1.2.3.4", 10 | "name": "testAttributeType0", 11 | "syntax": "1.3.6.1.4.1.1466.115.121.1.15", 12 | "single_value": False, 13 | "no_user_modification": False, 14 | "is_system": False, 15 | }, 16 | "attribute_type_changes": { 17 | "syntax": "1.3.6.1.4.1.1466.115.121.1.15", 18 | "single_value": True, 19 | "no_user_modification": False, 20 | }, 21 | "status_code": status.HTTP_200_OK, 22 | }, 23 | { 24 | "attribute_type_name": "testAttributeType1_notvalidname", 25 | "attribute_type_data": { 26 | "oid": "1.2.3.4", 27 | "name": "testAttributeType1", 28 | "syntax": "1.3.6.1.4.1.1466.115.121.1.15", 29 | "single_value": True, 30 | "no_user_modification": False, 31 | "is_system": False, 32 | }, 33 | "attribute_type_changes": { 34 | "syntax": "1.3.6.1.4.1.1466.115.121.1.15", 35 | "single_value": True, 36 | "no_user_modification": False, 37 | }, 38 | "status_code": status.HTTP_404_NOT_FOUND, 39 | }, 40 | { 41 | "attribute_type_name": "testAttributeType2", 42 | "attribute_type_data": { 43 | "oid": "1.2.3.4", 44 | "name": "testAttributeType2", 45 | "syntax": "1.3.6.1.4.1.1466.115.121.1.15", 46 | "single_value": False, 47 | "no_user_modification": False, 48 | "is_system": True, 49 | }, 50 | "attribute_type_changes": { 51 | "syntax": "1.3.6.1.4.1.1466.115.121.1.15", 52 | "single_value": True, 53 | "no_user_modification": False, 54 | }, 55 | "status_code": status.HTTP_200_OK, 56 | }, 57 | ] 58 | 59 | test_delete_bulk_attribute_types_dataset = [ 60 | { 61 | "attribute_type_datas": [ 62 | { 63 | "oid": "1.2.3.4", 64 | "name": "testAttributeType1", 65 | "syntax": "1.3.6.1.4.1.1466.115.121.1.15", 66 | "single_value": True, 67 | "no_user_modification": False, 68 | "is_system": False, 69 | }, 70 | { 71 | "oid": "1.2.3.4.5", 72 | "name": "testAttributeType2", 73 | "syntax": "1.3.6.1.4.1.1466.115.121.1.15", 74 | "single_value": True, 75 | "no_user_modification": False, 76 | "is_system": False, 77 | }, 78 | ], 79 | "attribute_types_deleted": [ 80 | "testAttributeType1", 81 | "testAttributeType2", 82 | ], 83 | "status_code": status.HTTP_200_OK, 84 | }, 85 | { 86 | "attribute_type_datas": [ 87 | { 88 | "oid": "1.2.3.4", 89 | "name": "testAttributeType1", 90 | "syntax": "1.3.6.1.4.1.1466.115.121.1.15", 91 | "single_value": True, 92 | "no_user_modification": False, 93 | "is_system": False, 94 | }, 95 | ], 96 | "attribute_types_deleted": [ 97 | "testAttributeType1", 98 | "testAttributeType2", 99 | "testAttributeType3", 100 | "testAttributeType4", 101 | ], 102 | "status_code": status.HTTP_200_OK, 103 | }, 104 | { 105 | "attribute_type_datas": [], 106 | "attribute_types_deleted": [], 107 | "status_code": status.HTTP_422_UNPROCESSABLE_ENTITY, 108 | }, 109 | ] 110 | -------------------------------------------------------------------------------- /tests/test_api/test_main/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MultiDirectoryLab/MultiDirectory/6dd5f5ad251cdfaea9080442c8f20b0d397834d4/tests/test_api/test_main/__init__.py -------------------------------------------------------------------------------- /tests/test_api/test_main/test_multifactor.py: -------------------------------------------------------------------------------- 1 | """Test MultifactorAPI.""" 2 | 3 | from unittest.mock import AsyncMock, Mock 4 | 5 | import httpx 6 | import pytest 7 | 8 | from config import Settings 9 | from ldap_protocol.multifactor import MultifactorAPI 10 | 11 | 12 | @pytest.mark.asyncio 13 | @pytest.mark.usefixtures("session") 14 | @pytest.mark.parametrize( 15 | ( 16 | "mock_post_side_effect", 17 | "expected_result", 18 | "expected_exception", 19 | ), 20 | [ 21 | # 1. httpx.ConnectTimeout => raise MFAConnectError 22 | ( 23 | httpx.ConnectTimeout("Connection timed out"), 24 | True, 25 | MultifactorAPI.MFAConnectError, 26 | ), 27 | # 2. httpx.ReadTimeout => False 28 | ( 29 | httpx.ReadTimeout("Read timed out"), 30 | False, 31 | None, 32 | ), 33 | # 3. status_code=401 => raise MFAMissconfiguredError 34 | ( 35 | httpx.Response(status_code=401), 36 | True, 37 | MultifactorAPI.MFAMissconfiguredError, 38 | ), 39 | # 4. status_code=500 => raise MultifactorError 40 | ( 41 | httpx.Response(status_code=500), 42 | True, 43 | MultifactorAPI.MultifactorError, 44 | ), 45 | # 5. status_code=200, 'model.status' != "Granted" => False 46 | ( 47 | httpx.Response( 48 | status_code=200, 49 | json={"model": {"status": "Denied"}}, 50 | request=httpx.Request("POST", ""), 51 | ), 52 | False, 53 | None, 54 | ), 55 | # 6. status_code=200, 'model.status' == "Granted" => True 56 | ( 57 | httpx.Response( 58 | status_code=200, 59 | json={"model": {"status": "Granted"}}, 60 | request=httpx.Request("POST", ""), 61 | ), 62 | True, 63 | None, 64 | ), 65 | ], 66 | ) 67 | async def test_ldap_validate_mfa( 68 | mock_post_side_effect: httpx.Response, 69 | expected_result: bool, 70 | expected_exception: Exception | None, 71 | settings: Settings, 72 | ) -> None: 73 | """Test the LDAP validate MFA function with various scenarios.""" 74 | async_client = Mock() 75 | if isinstance(mock_post_side_effect, Exception): 76 | async_client.post = AsyncMock(side_effect=mock_post_side_effect) 77 | else: 78 | async_client.post = AsyncMock(return_value=mock_post_side_effect) 79 | 80 | mfa_api = MultifactorAPI("test", "test", async_client, settings) 81 | 82 | if expected_exception: 83 | with pytest.raises(expected_exception): # type: ignore 84 | await mfa_api.ldap_validate_mfa("user", "password") 85 | else: 86 | result = await mfa_api.ldap_validate_mfa( 87 | "user", 88 | "password", 89 | ) 90 | assert result == expected_result 91 | -------------------------------------------------------------------------------- /tests/test_api/test_main/test_openapi.py: -------------------------------------------------------------------------------- 1 | """Test the OpenAPI router.""" 2 | 3 | import pytest 4 | from fastapi import status 5 | from httpx import AsyncClient 6 | 7 | 8 | @pytest.mark.asyncio 9 | async def test_swagger_ui( 10 | http_client: AsyncClient, 11 | ) -> None: 12 | """Test the Swagger UI.""" 13 | response = await http_client.get("/api/docs") 14 | assert response.status_code == status.HTTP_200_OK 15 | 16 | 17 | @pytest.mark.asyncio 18 | async def test_redoc( 19 | http_client: AsyncClient, 20 | ) -> None: 21 | """Test the ReDoc.""" 22 | response = await http_client.get("/api/redoc") 23 | assert response.status_code == status.HTTP_200_OK 24 | -------------------------------------------------------------------------------- /tests/test_api/test_main/test_router/test_delete.py: -------------------------------------------------------------------------------- 1 | """Test API Delete. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | import pytest 8 | from httpx import AsyncClient 9 | 10 | from ldap_protocol.ldap_codes import LDAPCodes 11 | 12 | 13 | @pytest.mark.asyncio 14 | @pytest.mark.usefixtures("adding_test_user") 15 | @pytest.mark.usefixtures("setup_session") 16 | @pytest.mark.usefixtures("session") 17 | async def test_api_correct_delete(http_client: AsyncClient) -> None: 18 | """Test API for delete object.""" 19 | response = await http_client.request( 20 | "delete", 21 | "/entry/delete", 22 | json={"entry": "cn=test,dc=md,dc=test"}, 23 | ) 24 | 25 | data = response.json() 26 | 27 | assert isinstance(data, dict) 28 | assert data.get("resultCode") == LDAPCodes.SUCCESS 29 | 30 | 31 | @pytest.mark.asyncio 32 | @pytest.mark.usefixtures("adding_test_user") 33 | @pytest.mark.usefixtures("setup_session") 34 | @pytest.mark.usefixtures("session") 35 | async def test_api_delete_with_incorrect_dn(http_client: AsyncClient) -> None: 36 | """Test API for delete object with incorrect DN.""" 37 | response = await http_client.request( 38 | "delete", 39 | "/entry/delete", 40 | json={ 41 | "entry": "cn!=test,dc=md,dc=test", 42 | }, 43 | ) 44 | 45 | data = response.json() 46 | 47 | assert isinstance(data, dict) 48 | assert data.get("resultCode") == LDAPCodes.INVALID_DN_SYNTAX 49 | 50 | 51 | @pytest.mark.asyncio 52 | @pytest.mark.usefixtures("adding_test_user") 53 | @pytest.mark.usefixtures("setup_session") 54 | @pytest.mark.usefixtures("session") 55 | async def test_api_delete_non_exist_object(http_client: AsyncClient) -> None: 56 | """Test API for delete non-existen object.""" 57 | response = await http_client.request( 58 | "delete", 59 | "/entry/delete", 60 | json={ 61 | "entry": "cn=non-exist-object,dc=md,dc=test", 62 | }, 63 | ) 64 | 65 | data = response.json() 66 | 67 | assert isinstance(data, dict) 68 | assert data.get("resultCode") == LDAPCodes.NO_SUCH_OBJECT 69 | -------------------------------------------------------------------------------- /tests/test_api/test_main/test_router/test_login.py: -------------------------------------------------------------------------------- 1 | """Test API Modify DN. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | import httpx 8 | import pytest 9 | from fastapi import status 10 | 11 | from ldap_protocol.ldap_requests.modify import Operation 12 | from tests.conftest import TestCreds 13 | 14 | 15 | @pytest.mark.asyncio 16 | @pytest.mark.usefixtures("session") 17 | async def test_api_before_setup( 18 | unbound_http_client: httpx.AsyncClient, 19 | ) -> None: 20 | """Test api before setup.""" 21 | response = await unbound_http_client.get("auth/me") 22 | 23 | assert response.status_code == status.HTTP_401_UNAUTHORIZED 24 | 25 | 26 | @pytest.mark.asyncio 27 | @pytest.mark.usefixtures("adding_test_user") 28 | @pytest.mark.usefixtures("session") 29 | async def test_api_auth_after_change_account_exp( 30 | http_client: httpx.AsyncClient, 31 | ) -> None: 32 | """Test api auth.""" 33 | await http_client.patch( 34 | "/entry/update", 35 | json={ 36 | "object": "cn=test,dc=md,dc=test", 37 | "changes": [ 38 | { 39 | "operation": Operation.ADD, 40 | "modification": { 41 | "type": "accountExpires", 42 | "vals": ["133075840000000000"], 43 | }, 44 | }, 45 | ], 46 | }, 47 | ) 48 | auth = await http_client.post( 49 | "auth/", 50 | data={ 51 | "username": "new_user@md.test", 52 | "password": "P@ssw0rd", 53 | }, 54 | ) 55 | 56 | assert auth.status_code == status.HTTP_403_FORBIDDEN 57 | 58 | await http_client.patch( 59 | "/entry/update", 60 | json={ 61 | "object": "cn=test,dc=md,dc=test", 62 | "changes": [ 63 | { 64 | "operation": Operation.REPLACE, 65 | "modification": { 66 | "type": "accountExpires", 67 | "vals": ["0"], 68 | }, 69 | }, 70 | ], 71 | }, 72 | ) 73 | auth = await http_client.post( 74 | "auth/", 75 | data={ 76 | "username": "new_user@md.test", 77 | "password": "P@ssw0rd", 78 | }, 79 | ) 80 | 81 | assert auth.cookies.get("id") 82 | 83 | 84 | @pytest.mark.usefixtures("setup_session") 85 | async def test_refresh_and_logout_flow( 86 | unbound_http_client: httpx.AsyncClient, 87 | creds: TestCreds, 88 | ) -> None: 89 | """Test login, refresh and logout cookie flow.""" 90 | await unbound_http_client.post( 91 | "auth/", 92 | data={"username": creds.un, "password": creds.pw}, 93 | ) 94 | 95 | old_token = unbound_http_client.cookies.get("id") 96 | 97 | assert old_token 98 | 99 | await unbound_http_client.delete("auth/") 100 | 101 | assert not unbound_http_client.cookies 102 | -------------------------------------------------------------------------------- /tests/test_api/test_network/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MultiDirectoryLab/MultiDirectory/6dd5f5ad251cdfaea9080442c8f20b0d397834d4/tests/test_api/test_network/__init__.py -------------------------------------------------------------------------------- /tests/test_api/test_shadow/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MultiDirectoryLab/MultiDirectory/6dd5f5ad251cdfaea9080442c8f20b0d397834d4/tests/test_api/test_shadow/__init__.py -------------------------------------------------------------------------------- /tests/test_api/test_shadow/test_router.py: -------------------------------------------------------------------------------- 1 | """Test shadow api. 2 | 3 | Copyright (c) 2025 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | import pytest 8 | from fastapi import status 9 | from httpx import AsyncClient 10 | from sqlalchemy import delete, update 11 | from sqlalchemy.ext.asyncio import AsyncSession 12 | 13 | from models import MFAFlags, NetworkPolicy 14 | 15 | from .conftest import ProxyRequestModel 16 | 17 | 18 | @pytest.mark.asyncio 19 | @pytest.mark.usefixtures("setup_session") 20 | async def test_shadow_api_non_existent_user(http_client: AsyncClient) -> None: 21 | """Test shadow api with non-existent user.""" 22 | response = await http_client.post( 23 | "/shadow/mfa/push", 24 | json=ProxyRequestModel( 25 | principal="non-existent_user", 26 | ip="127.0.0.1", 27 | ).model_dump(), 28 | ) 29 | 30 | assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY 31 | 32 | 33 | @pytest.mark.asyncio 34 | @pytest.mark.usefixtures("setup_session") 35 | async def test_shadow_api_without_network_policies( 36 | http_client: AsyncClient, 37 | adding_mfa_user_and_group: dict, 38 | session: AsyncSession, 39 | ) -> None: 40 | """Test shadow api without network policy.""" 41 | await session.execute(delete(NetworkPolicy)) 42 | 43 | response = await http_client.post( 44 | "/shadow/mfa/push", 45 | json=adding_mfa_user_and_group, 46 | ) 47 | 48 | assert response.status_code == status.HTTP_403_FORBIDDEN 49 | 50 | 51 | @pytest.mark.asyncio 52 | @pytest.mark.usefixtures("setup_session") 53 | async def test_shadow_api_without_kerberos_protocol( 54 | http_client: AsyncClient, 55 | adding_mfa_user_and_group: dict, 56 | session: AsyncSession, 57 | ) -> None: 58 | """Test shadow api without network policy with kerberos protocol.""" 59 | await session.execute( 60 | update(NetworkPolicy).values({NetworkPolicy.is_kerberos: False}), 61 | ) 62 | 63 | response = await http_client.post( 64 | "/shadow/mfa/push", 65 | json=adding_mfa_user_and_group, 66 | ) 67 | 68 | assert response.status_code == status.HTTP_403_FORBIDDEN 69 | 70 | 71 | @pytest.mark.asyncio 72 | @pytest.mark.usefixtures("setup_session") 73 | async def test_shadow_api_with_disable_mfa( 74 | http_client: AsyncClient, 75 | adding_mfa_user_and_group: dict, 76 | ) -> None: 77 | """Test shadow api with disable mfa.""" 78 | response = await http_client.post( 79 | "/shadow/mfa/push", 80 | json=adding_mfa_user_and_group, 81 | ) 82 | 83 | assert response.status_code == status.HTTP_200_OK 84 | 85 | 86 | @pytest.mark.asyncio 87 | @pytest.mark.usefixtures("setup_session") 88 | async def test_shadow_api_whitelist_without_user_group( 89 | http_client: AsyncClient, 90 | adding_mfa_user_and_group: dict, 91 | session: AsyncSession, 92 | ) -> None: 93 | """Test shadow api whitelist without user group.""" 94 | await session.execute( 95 | update(NetworkPolicy).values( 96 | {NetworkPolicy.mfa_status: MFAFlags.WHITELIST} 97 | ), 98 | ) 99 | 100 | response = await http_client.post( 101 | "/shadow/mfa/push", 102 | json=adding_mfa_user_and_group, 103 | ) 104 | 105 | assert response.status_code == status.HTTP_200_OK 106 | 107 | 108 | @pytest.mark.asyncio 109 | @pytest.mark.usefixtures("setup_session") 110 | async def test_shadow_api_enable_mfa( 111 | http_client: AsyncClient, 112 | adding_mfa_user_and_group: dict, 113 | session: AsyncSession, 114 | ) -> None: 115 | """Test shadow api enable mfa.""" 116 | await session.execute( 117 | update(NetworkPolicy).values( 118 | {NetworkPolicy.mfa_status: MFAFlags.ENABLED} 119 | ), 120 | ) 121 | 122 | response = await http_client.post( 123 | "/shadow/mfa/push", 124 | json=adding_mfa_user_and_group, 125 | ) 126 | 127 | assert response.status_code == status.HTTP_200_OK 128 | -------------------------------------------------------------------------------- /tests/test_ldap/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MultiDirectoryLab/MultiDirectory/6dd5f5ad251cdfaea9080442c8f20b0d397834d4/tests/test_ldap/__init__.py -------------------------------------------------------------------------------- /tests/test_ldap/test_ldap3_definition_parse.py: -------------------------------------------------------------------------------- 1 | """Test parse ldap3 definition. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | import pytest 8 | from sqlalchemy.ext.asyncio import AsyncSession 9 | 10 | from ldap_protocol.utils.raw_definition_parser import ( 11 | RawDefinitionParser as RDParser, 12 | ) 13 | from models import AttributeType, ObjectClass 14 | 15 | test_ldap3_parse_attribute_types_dataset = [ 16 | [ 17 | "( 1.2.840.113556.1.4.149 NAME 'attributeSecurityGUID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )", # noqa: E501 18 | "( 1.2.840.113556.1.4.1703 NAME 'msDS-FilterContainers' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )", # noqa: E501 19 | "( 1.2.840.113556.1.4.655 NAME 'legacyExchangeDN' SYNTAX '1.2.840.113556.1.4.905' SINGLE-VALUE )", # noqa: E501 20 | "( 1.2.840.113556.1.4.21 NAME 'cOMProgID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )", # noqa: E501 21 | "( 1.2.840.113556.1.4.2147 NAME 'msDNS-PropagationTime' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )", # noqa: E501 22 | "( 1.2.840.113556.1.6.18.1.301 NAME 'msSFU30KeyAttributes' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )", # noqa: E501 23 | "( 1.2.840.113556.1.4.686 NAME 'domainID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )", # noqa: E501 24 | "( 1.2.840.113556.1.6.13.3.23 NAME 'msDFSR-ReplicationGroupGuid' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )", # noqa: E501 25 | "( 1.2.840.113556.1.4.818 NAME 'productCode' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )", # noqa: E501 26 | "( 1.3.6.1.1.1.1.18 NAME 'oncRpcNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )", # noqa: E501 27 | "( 1.2.840.113556.1.4.221 NAME 'sAMAccountName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )", # noqa: E501 28 | "( 1.2.840.113556.1.4.375 NAME 'systemFlags' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE NO-USER-MODIFICATION )", # noqa: E501 29 | ], 30 | ] 31 | 32 | 33 | @pytest.mark.parametrize( 34 | "test_dataset", 35 | test_ldap3_parse_attribute_types_dataset, 36 | ) 37 | @pytest.mark.asyncio 38 | async def test_ldap3_parse_attribute_types(test_dataset: list[str]) -> None: 39 | """Test parse ldap3 attribute types.""" 40 | for raw_definition in test_dataset: 41 | attribute_type: AttributeType = RDParser.create_attribute_type_by_raw( 42 | raw_definition 43 | ) 44 | 45 | assert raw_definition == attribute_type.get_raw_definition() 46 | 47 | 48 | test_ldap3_parse_object_classes_dataset = [ 49 | [ 50 | "( 1.2.840.113556.1.5.152 NAME 'intellimirrorGroup' STRUCTURAL )", 51 | "( 1.2.840.113556.1.5.262 NAME 'msImaging-PSPs' STRUCTURAL )", 52 | "( 1.2.840.113556.1.5.27 NAME 'rpcEntry' ABSTRACT )", 53 | ], 54 | ] 55 | 56 | 57 | @pytest.mark.parametrize( 58 | "test_dataset", 59 | test_ldap3_parse_object_classes_dataset, 60 | ) 61 | @pytest.mark.asyncio 62 | async def test_ldap3_parse_object_classes( 63 | session: AsyncSession, 64 | test_dataset: list[str], 65 | ) -> None: 66 | """Test parse ldap3 object classes.""" 67 | for raw_definition in test_dataset: 68 | object_class_info = RDParser.get_object_class_info( 69 | raw_definition=raw_definition 70 | ) 71 | object_class: ObjectClass = await RDParser.create_object_class_by_info( 72 | session=session, 73 | object_class_info=object_class_info, 74 | ) 75 | 76 | assert raw_definition == object_class.get_raw_definition() 77 | -------------------------------------------------------------------------------- /tests/test_ldap/test_ldap3_lib.py: -------------------------------------------------------------------------------- 1 | """Test ldap3 lib call. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from asyncio import BaseEventLoop 8 | from functools import partial 9 | 10 | import pytest 11 | from ldap3 import Connection 12 | 13 | from tests.conftest import TestCreds 14 | 15 | 16 | @pytest.mark.asyncio 17 | @pytest.mark.usefixtures("setup_session") 18 | @pytest.mark.usefixtures("session") 19 | async def test_ldap3_search( 20 | ldap_client: Connection, 21 | event_loop: BaseEventLoop, 22 | creds: TestCreds, 23 | ) -> None: 24 | """Test ldap3 search.""" 25 | await event_loop.run_in_executor( 26 | None, 27 | partial(ldap_client.rebind, user=creds.un, password=creds.pw), 28 | ) 29 | 30 | result = await event_loop.run_in_executor( 31 | None, 32 | partial( 33 | ldap_client.search, 34 | "dc=md,dc=test", 35 | "(objectclass=*)", 36 | ), 37 | ) 38 | 39 | assert result 40 | assert ldap_client.entries 41 | 42 | 43 | @pytest.mark.asyncio 44 | @pytest.mark.usefixtures("setup_session") 45 | @pytest.mark.usefixtures("session") 46 | async def test_ldap3_search_memberof( 47 | ldap_client: Connection, 48 | event_loop: BaseEventLoop, 49 | creds: TestCreds, 50 | ) -> None: 51 | """Test ldap3 search memberof.""" 52 | member = "cn=user1,ou=moscow,ou=russia,ou=users,dc=md,dc=test" 53 | await event_loop.run_in_executor( 54 | None, 55 | partial(ldap_client.rebind, user=creds.un, password=creds.pw), 56 | ) 57 | 58 | result = await event_loop.run_in_executor( 59 | None, 60 | partial( 61 | ldap_client.search, 62 | "dc=md,dc=test", 63 | "(memberOf=cn=developers,cn=groups,dc=md,dc=test)", 64 | ), 65 | ) 66 | 67 | assert result 68 | assert ldap_client.entries 69 | assert ldap_client.entries[0].entry_dn == member 70 | -------------------------------------------------------------------------------- /tests/test_ldap/test_ldap3_whoami.py: -------------------------------------------------------------------------------- 1 | """Test whoami. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | import asyncio 8 | from functools import partial 9 | 10 | import pytest 11 | from ldap3 import Connection 12 | 13 | from tests.conftest import TestCreds 14 | 15 | 16 | @pytest.mark.asyncio 17 | @pytest.mark.usefixtures("setup_session") 18 | async def test_anonymous_whoami( 19 | event_loop: asyncio.BaseEventLoop, 20 | ldap_client: Connection, 21 | ) -> None: 22 | """Test anonymous pwd change.""" 23 | await event_loop.run_in_executor(None, partial(ldap_client.rebind)) 24 | 25 | result = await event_loop.run_in_executor( 26 | None, 27 | ldap_client.extend.standard.who_am_i, 28 | ) 29 | 30 | assert result is None 31 | 32 | 33 | @pytest.mark.asyncio 34 | @pytest.mark.usefixtures("setup_session") 35 | async def test_bind_whoami( 36 | event_loop: asyncio.BaseEventLoop, 37 | ldap_client: Connection, 38 | creds: TestCreds, 39 | ) -> None: 40 | """Test anonymous pwd change.""" 41 | await event_loop.run_in_executor( 42 | None, 43 | partial(ldap_client.rebind, user=creds.un, password=creds.pw), 44 | ) 45 | 46 | result = await event_loop.run_in_executor( 47 | None, 48 | ldap_client.extend.standard.who_am_i, 49 | ) 50 | 51 | assert result == "u:user0" 52 | -------------------------------------------------------------------------------- /tests/test_ldap/test_passwd_change.py: -------------------------------------------------------------------------------- 1 | """Test password change. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | import asyncio 8 | from functools import partial 9 | 10 | import pytest 11 | from ldap3 import Connection 12 | from sqlalchemy.ext.asyncio import AsyncSession 13 | 14 | from ldap_protocol.utils.queries import get_user 15 | from security import verify_password 16 | from tests.conftest import TestCreds 17 | 18 | 19 | @pytest.mark.asyncio 20 | @pytest.mark.usefixtures("setup_session") 21 | @pytest.mark.usefixtures("_force_override_tls") 22 | async def test_anonymous_pwd_change( 23 | session: AsyncSession, 24 | event_loop: asyncio.BaseEventLoop, 25 | ldap_client: Connection, 26 | creds: TestCreds, 27 | ) -> None: 28 | """Test anonymous pwd change.""" 29 | user_dn = "cn=user0,ou=users,dc=md,dc=test" 30 | password = creds.pw 31 | new_test_password = "Password123" # noqa 32 | await event_loop.run_in_executor(None, ldap_client.bind) 33 | 34 | result = await event_loop.run_in_executor( 35 | None, 36 | partial( 37 | ldap_client.extend.standard.modify_password, 38 | user_dn, 39 | old_password=password, 40 | new_password=new_test_password, 41 | ), 42 | ) 43 | 44 | assert result 45 | 46 | user = await get_user(session, user_dn) 47 | assert user 48 | assert user.password 49 | 50 | assert verify_password(new_test_password, user.password) 51 | 52 | await event_loop.run_in_executor(None, ldap_client.unbind) 53 | 54 | 55 | @pytest.mark.asyncio 56 | @pytest.mark.usefixtures("setup_session") 57 | @pytest.mark.usefixtures("_force_override_tls") 58 | async def test_bind_pwd_change( 59 | session: AsyncSession, 60 | event_loop: asyncio.BaseEventLoop, 61 | ldap_client: Connection, 62 | creds: TestCreds, 63 | ) -> None: 64 | """Test anonymous pwd change.""" 65 | user_dn = "cn=user0,ou=users,dc=md,dc=test" 66 | password = creds.pw 67 | new_test_password = "Password123" # noqa 68 | await event_loop.run_in_executor( 69 | None, 70 | partial(ldap_client.rebind, user=user_dn, password=password), 71 | ) 72 | 73 | result = await event_loop.run_in_executor( 74 | None, 75 | partial( 76 | ldap_client.extend.standard.modify_password, 77 | old_password=password, 78 | new_password=new_test_password, 79 | ), 80 | ) 81 | 82 | assert result 83 | 84 | user = await get_user(session, user_dn) 85 | 86 | assert user 87 | assert user.password 88 | 89 | assert verify_password(new_test_password, user.password) 90 | 91 | await event_loop.run_in_executor(None, ldap_client.unbind) 92 | -------------------------------------------------------------------------------- /tests/test_ldap/test_pool_client_handler.py: -------------------------------------------------------------------------------- 1 | """Test policy api. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | from ipaddress import IPv4Address, IPv4Network 8 | 9 | import pytest 10 | from sqlalchemy.ext.asyncio import AsyncSession 11 | 12 | from ldap_protocol.dialogue import LDAPSession 13 | from ldap_protocol.policies.network_policy import is_user_group_valid 14 | from ldap_protocol.utils.queries import get_group, get_user 15 | from models import NetworkPolicy 16 | 17 | 18 | @pytest.mark.asyncio 19 | @pytest.mark.usefixtures("setup_session") 20 | @pytest.mark.usefixtures("session") 21 | async def test_check_policy( 22 | ldap_session: LDAPSession, 23 | session: AsyncSession, 24 | ) -> None: 25 | """Check policy.""" 26 | policy = await ldap_session._get_policy(IPv4Address("127.0.0.1"), session) 27 | assert policy 28 | assert policy.netmasks == [IPv4Network("0.0.0.0/0")] 29 | 30 | 31 | @pytest.mark.asyncio 32 | async def test_specific_policy_ok( 33 | ldap_session: LDAPSession, 34 | session: AsyncSession, 35 | ) -> None: 36 | """Test specific ip.""" 37 | session.add( 38 | NetworkPolicy( 39 | name="Local policy", 40 | netmasks=[IPv4Network("127.100.10.5/32")], 41 | raw=["127.100.10.5/32"], 42 | enabled=True, 43 | priority=1, 44 | ) 45 | ) 46 | await session.commit() 47 | policy = await ldap_session._get_policy( 48 | ip=IPv4Address("127.100.10.5"), 49 | session=session, 50 | ) 51 | assert policy 52 | assert policy.netmasks == [IPv4Network("127.100.10.5/32")] 53 | assert not await ldap_session._get_policy( 54 | ip=IPv4Address("127.100.10.4"), 55 | session=session, 56 | ) 57 | 58 | 59 | @pytest.mark.asyncio 60 | @pytest.mark.usefixtures("setup_session") 61 | @pytest.mark.usefixtures("settings") 62 | async def test_check_policy_group( 63 | ldap_session: LDAPSession, 64 | session: AsyncSession, 65 | ) -> None: 66 | """Check policy.""" 67 | user = await get_user(session, "user0") 68 | assert user 69 | 70 | policy = await ldap_session._get_policy(IPv4Address("127.0.0.1"), session) 71 | assert policy 72 | 73 | assert await is_user_group_valid(user, policy, session) 74 | 75 | group_dir = await get_group( 76 | dn="cn=domain admins,cn=groups,dc=md,dc=test", 77 | session=session, 78 | ) 79 | 80 | policy.groups.append(group_dir.group) 81 | await session.commit() 82 | 83 | assert await is_user_group_valid(user, policy, session) 84 | -------------------------------------------------------------------------------- /tests/test_ldap/test_util/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MultiDirectoryLab/MultiDirectory/6dd5f5ad251cdfaea9080442c8f20b0d397834d4/tests/test_ldap/test_util/__init__.py -------------------------------------------------------------------------------- /tests/test_ldap/test_util/test_whoami.py: -------------------------------------------------------------------------------- 1 | """Test whoami with ldaputil. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | import asyncio 8 | 9 | import pytest 10 | 11 | from config import Settings 12 | from tests.conftest import TestCreds 13 | 14 | 15 | @pytest.mark.asyncio 16 | @pytest.mark.usefixtures("setup_session") 17 | async def test_anonymous_whoami(settings: Settings) -> None: 18 | """Test anonymous whoami.""" 19 | proc = await asyncio.create_subprocess_exec( 20 | "ldapwhoami", 21 | "-x", 22 | "-H", 23 | f"ldap://{settings.HOST}:{settings.PORT}", 24 | ) 25 | 26 | assert await proc.wait() == 0 27 | 28 | 29 | @pytest.mark.asyncio 30 | @pytest.mark.usefixtures("setup_session") 31 | async def test_binded_whoami(settings: Settings, creds: TestCreds) -> None: 32 | """Test anonymous whoami.""" 33 | proc = await asyncio.create_subprocess_exec( 34 | "ldapwhoami", 35 | "-x", 36 | "-H", 37 | f"ldap://{settings.HOST}:{settings.PORT}", 38 | "-D", 39 | creds.un, 40 | "-w", 41 | creds.pw, 42 | ) 43 | 44 | assert await proc.wait() == 0 45 | -------------------------------------------------------------------------------- /tests/test_shedule.py: -------------------------------------------------------------------------------- 1 | """Test shedule functions. 2 | 3 | Copyright (c) 2024 MultiFactor 4 | License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE 5 | """ 6 | 7 | import pytest 8 | from sqlalchemy.ext.asyncio import AsyncSession 9 | 10 | from config import Settings 11 | from extra.scripts.check_ldap_principal import check_ldap_principal 12 | from extra.scripts.principal_block_user_sync import principal_block_sync 13 | from extra.scripts.uac_sync import disable_accounts 14 | from extra.scripts.update_krb5_config import update_krb5_config 15 | from ldap_protocol.kerberos import AbstractKadmin 16 | 17 | 18 | @pytest.mark.asyncio 19 | @pytest.mark.usefixtures("session") 20 | @pytest.mark.usefixtures("setup_session") 21 | async def test_disable_accounts( 22 | session: AsyncSession, 23 | settings: Settings, 24 | kadmin: AbstractKadmin, 25 | ) -> None: 26 | """Test disable_accounts.""" 27 | await disable_accounts( 28 | session=session, 29 | kadmin=kadmin, 30 | settings=settings, 31 | ) 32 | 33 | 34 | @pytest.mark.asyncio 35 | @pytest.mark.usefixtures("session") 36 | @pytest.mark.usefixtures("setup_session") 37 | async def test_principal_block_sync( 38 | session: AsyncSession, 39 | settings: Settings, 40 | ) -> None: 41 | """Test principal_block_sync.""" 42 | await principal_block_sync( 43 | session=session, 44 | settings=settings, 45 | ) 46 | 47 | 48 | @pytest.mark.asyncio 49 | @pytest.mark.usefixtures("session") 50 | @pytest.mark.usefixtures("setup_session") 51 | async def test_check_ldap_principal( 52 | session: AsyncSession, 53 | settings: Settings, 54 | kadmin: AbstractKadmin, 55 | ) -> None: 56 | """Test check_ldap_principal.""" 57 | await check_ldap_principal( 58 | session=session, 59 | kadmin=kadmin, 60 | settings=settings, 61 | ) 62 | 63 | 64 | @pytest.mark.asyncio 65 | @pytest.mark.usefixtures("session") 66 | @pytest.mark.usefixtures("setup_session") 67 | async def test_update_krb5_config( 68 | session: AsyncSession, 69 | settings: Settings, 70 | kadmin: AbstractKadmin, 71 | ) -> None: 72 | """Test update_krb5_config.""" 73 | await update_krb5_config( 74 | session=session, 75 | kadmin=kadmin, 76 | settings=settings, 77 | ) 78 | -------------------------------------------------------------------------------- /traefik.yml: -------------------------------------------------------------------------------- 1 | log: 2 | level: INFO 3 | 4 | api: 5 | insecure: true 6 | 7 | ping: 8 | entryPoint: "ping" 9 | 10 | entryPoints: 11 | ping: 12 | address: ":8800" 13 | web: 14 | address: ":80" 15 | ldap: 16 | address: ":389" 17 | proxyProtocol: 18 | insecure: true 19 | ldaps: 20 | address: ":636" 21 | proxyProtocol: 22 | insecure: true 23 | kadmind: 24 | address: ":749" 25 | kpasswd: 26 | address: ":464" 27 | bind_dns: 28 | address: ":53" 29 | 30 | tls: 31 | stores: 32 | default: 33 | defaultCertificate: 34 | certFile: /certs/cert.pem 35 | keyFile: /certs/privkey.pem 36 | 37 | options: 38 | default: 39 | maxVersion: VersionTLS12 40 | maxtls12: 41 | maxVersion: VersionTLS12 42 | 43 | providers: 44 | file: 45 | filename: /traefik.yml 46 | watch: true 47 | docker: 48 | endpoint: "unix:///var/run/docker.sock" 49 | exposedByDefault: false 50 | --------------------------------------------------------------------------------