├── .github ├── actions │ └── setup-project │ │ └── action.yml └── workflows │ ├── ci.yml │ └── docker-publish.yml ├── .gitignore ├── .pre-commit-config.yaml ├── CHANGELOG.rst ├── Dockerfile ├── LICENSE ├── README.rst ├── compose.yml ├── config ├── dmarc-metrics-exporter.grafana.sample.json ├── dmarc-metrics-exporter.grafana.sample.png ├── dmarc-metrics-exporter.sample.json └── dmarc-metrics-exporter.service ├── dmarc-aggregate-report.xsd ├── dmarc_metrics_exporter ├── __init__.py ├── __main__.py ├── app.py ├── deserialization.py ├── dmarc_event.py ├── dmarc_metrics.py ├── expiring_set.py ├── imap_client.py ├── imap_parser.py ├── imap_queue.py ├── logging.py ├── metrics_persister.py ├── model │ ├── __init__.py │ ├── dmarc_aggregate_report.py │ └── tests │ │ ├── __init__.py │ │ ├── sample_data.py │ │ └── test_deserialization.py ├── prometheus_exporter.py └── tests │ ├── __init__.py │ ├── conftest.py │ ├── sample_emails.py │ ├── test_app.py │ ├── test_deserialization.py │ ├── test_dmarc_metrics.py │ ├── test_e2e.py │ ├── test_expiring_set.py │ ├── test_imap_client.py │ ├── test_imap_parser.py │ ├── test_imap_queue.py │ ├── test_logging.py │ ├── test_metrics_persister.py │ └── test_prometheus_exporter.py ├── generate-dataclasses-from-xsd.sh ├── mypy.ini ├── poetry.lock ├── pyproject.toml └── roles ├── README.rst └── dmarc_metrics_exporter ├── defaults └── main.yml ├── handlers └── main.yml ├── meta └── main.yml ├── tasks └── main.yml └── templates ├── dmarc-metrics-exporter.json.j2 └── dmarc-metrics-exporter.service.j2 /.github/actions/setup-project/action.yml: -------------------------------------------------------------------------------- 1 | name: Setup project 2 | description: Setup project with poetry 3 | runs: 4 | using: composite 5 | steps: 6 | - name: Install poetry 7 | run: pip install poetry 8 | shell: bash 9 | 10 | - name: Install dependencies 11 | run: poetry install 12 | shell: bash 13 | env: 14 | POETRY_VIRTUALENVS_IN_PROJECT: true 15 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI and release pipeline 2 | 3 | on: 4 | pull_request: 5 | push: 6 | branches: ["main", "develop"] 7 | tags: ["*"] 8 | 9 | jobs: 10 | static-analysis: 11 | runs-on: ubuntu-latest 12 | strategy: 13 | matrix: 14 | check: 15 | - name: Check code-formatting 16 | run: poetry run ruff format --check . 17 | - name: Lint 18 | run: poetry run ruff check 19 | - name: Check static typing 20 | run: poetry run mypy dmarc_metrics_exporter 21 | steps: 22 | - uses: actions/checkout@v4 23 | - run: git fetch --no-tags --prune --depth=1 origin +refs/heads/main:refs/remotes/origin/main 24 | 25 | - name: Set up Python 26 | uses: actions/setup-python@v5.0.0 27 | with: 28 | python-version: '3.13' 29 | 30 | - uses: actions/cache@v4.0.0 31 | with: 32 | path: ~/.cache/pip 33 | key: ${{ runner.os }}-pip 34 | - uses: actions/cache@v4.0.0 35 | with: 36 | path: .venv 37 | key: ${{ runner.os }}-py3.13-venv-${{ hashFiles('pyproject.toml') }} 38 | restore-keys: | 39 | ${{ runner.os }}-py3.13-venv- 40 | - uses: ./.github/actions/setup-project 41 | 42 | - name: ${{ matrix.check.name }} 43 | run: ${{ matrix.check.run }} 44 | 45 | test: 46 | runs-on: ubuntu-latest 47 | strategy: 48 | matrix: 49 | python-version: ['3.9', '3.10', '3.11', '3.12', '3.13'] 50 | 51 | steps: 52 | - uses: actions/checkout@v4 53 | 54 | - name: Set up Python ${{ matrix.python-version }} 55 | uses: actions/setup-python@v5.0.0 56 | with: 57 | python-version: ${{ matrix.python-version }} 58 | 59 | - uses: actions/cache@v4.0.0 60 | with: 61 | path: ~/.cache/pip 62 | key: ${{ runner.os }}-pip 63 | - uses: actions/cache@v4.0.0 64 | with: 65 | path: .venv 66 | key: ${{ runner.os }}-py${{ matrix.python-version }}-venv-${{ hashFiles('pyproject.toml') }} 67 | restore-keys: | 68 | ${{ runner.os }}-py${{ matrix.python-version }}-venv- 69 | - uses: ./.github/actions/setup-project 70 | 71 | - name: Start required services (Greenmail) 72 | run: docker compose up -d 73 | - name: Test and measure coverage with pytest 74 | run: poetry run pytest --verbose --cov=dmarc_metrics_exporter --cov-report=xml 75 | - uses: codecov/codecov-action@v4 76 | env: 77 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} 78 | 79 | release: 80 | runs-on: ubuntu-latest 81 | needs: [static-analysis, test] 82 | if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') 83 | steps: 84 | - uses: actions/checkout@v4 85 | 86 | - name: Set up Python 87 | uses: actions/setup-python@v5.0.0 88 | with: 89 | python-version: '3.13' 90 | 91 | - uses: actions/cache@v4.0.0 92 | with: 93 | path: ~/.cache/pip 94 | key: ${{ runner.os }}-pip 95 | - uses: actions/cache@v4.0.0 96 | with: 97 | path: .venv 98 | key: ${{ runner.os }}-py3.13-venv-${{ hashFiles('pyproject.toml') }} 99 | restore-keys: | 100 | ${{ runner.os }}-py3.13-venv- 101 | - uses: ./.github/actions/setup-project 102 | 103 | - name: Publish to PyPI 104 | run: poetry publish --build 105 | env: 106 | POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_TOKEN }} 107 | - name: Wait for release to become available 108 | run: sleep 10 109 | 110 | - name: Set version 111 | id: version 112 | run: echo "version=${GITHUB_REF#refs/*/v}" >> $GITHUB_OUTPUT 113 | 114 | - name: Extract changelog 115 | id: changelog 116 | run: sed -E -n '/^\[${{ steps.version.outputs.version }}\]/,/^\[[0-9\.]+\]/{/^\[[0-9\.]+\]|^-+$/!p;}' CHANGELOG.rst > release-body.rst 117 | 118 | - uses: docker://pandoc/core:3 119 | with: 120 | args: --wrap none -f rst -t gfm --output=release-body.md release-body.rst 121 | 122 | - name: Check if prerelease 123 | id: check-prerelease 124 | uses: ./.github/actions/check-prerelease 125 | with: 126 | version: ${{ steps.version.outputs.version }} 127 | 128 | - name: Create GitHub release 129 | uses: softprops/action-gh-release@v2 130 | with: 131 | body_path: release-body.md 132 | tag_name: v${{ steps.version.outputs.version }} 133 | prerelease: ${{ steps.check-prerelease.outputs.prerelease }} 134 | env: 135 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 136 | 137 | docker-image: 138 | needs: [release] 139 | uses: "jgosmann/dmarc-metrics-exporter/.github/workflows/docker-publish.yml@main" 140 | with: 141 | version: ${{ github.ref }} 142 | secrets: 143 | DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} 144 | DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} 145 | -------------------------------------------------------------------------------- /.github/workflows/docker-publish.yml: -------------------------------------------------------------------------------- 1 | name: Docker 2 | 3 | # This workflow uses actions that are not certified by GitHub. 4 | # They are provided by a third-party and are governed by 5 | # separate terms of service, privacy policy, and support 6 | # documentation. 7 | 8 | on: 9 | workflow_call: 10 | inputs: 11 | version: 12 | description: 'Git reference to extract version from to build a Docker image for.' 13 | required: true 14 | type: string 15 | secrets: 16 | DOCKERHUB_USERNAME: 17 | required: true 18 | DOCKERHUB_TOKEN: 19 | required: true 20 | workflow_dispatch: 21 | inputs: 22 | version: 23 | description: 'Published version on PyPI to build a Docker image for.' 24 | required: true 25 | 26 | env: 27 | IMAGE_NAME: jgosmann/dmarc-metrics-exporter 28 | 29 | 30 | jobs: 31 | build: 32 | 33 | runs-on: ubuntu-latest 34 | permissions: 35 | contents: read 36 | packages: write 37 | 38 | steps: 39 | - name: Set version from ref 40 | id: version 41 | run: REF=${{ inputs.version }}; TAG=${REF#refs/*/}; echo "version=${TAG#v}" >> $GITHUB_OUTPUT 42 | 43 | - name: Checkout repository 44 | uses: actions/checkout@v4 45 | 46 | - name: Set up QEMU 47 | uses: docker/setup-qemu-action@v3 48 | 49 | - name: Set up Docker Buildx 50 | uses: docker/setup-buildx-action@v3 51 | 52 | # https://github.com/docker/login-action 53 | - name: Log into registry ${{ env.REGISTRY }} 54 | if: github.event_name != 'pull_request' 55 | uses: docker/login-action@v3 56 | with: 57 | username: ${{ secrets.DOCKERHUB_USERNAME }} 58 | password: ${{ secrets.DOCKERHUB_TOKEN }} 59 | 60 | # Extract metadata (tags, labels) for Docker 61 | # https://github.com/docker/metadata-action 62 | - name: Extract Docker metadata 63 | id: meta 64 | uses: docker/metadata-action@v5 65 | with: 66 | images: ${{ env.IMAGE_NAME }} 67 | 68 | - name: Get tags for Docker image 69 | id: image-tags 70 | shell: bash 71 | run: | 72 | echo 'TAGS<> "$GITHUB_OUTPUT" 73 | curl --header "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" --header "X-GitHub-Api-Version: 2022-11-28" \ 74 | https://api.github.com/repos/jgosmann/dmarc-metrics-exporter/releases \ 75 | | jq --raw-output '.[]["tag_name"] | sub("^v"; "")' \ 76 | | docker run --rm jgosmann/gen-semver-tags:0.1 ${{ steps.version.outputs.version }} \ 77 | | awk '{print "${{ env.IMAGE_NAME }}:"$0}' \ 78 | >> "$GITHUB_OUTPUT" 79 | echo 'EOF' >> "$GITHUB_OUTPUT" 80 | 81 | # Build and push Docker image with Buildx (don't push on PR) 82 | # https://github.com/docker/build-push-action 83 | - name: Build and push Docker image 84 | uses: docker/build-push-action@v5 85 | with: 86 | context: . 87 | push: ${{ github.event_name != 'pull_request' }} 88 | tags: ${{ steps.image-tags.outputs.TAGS }} 89 | platforms: linux/amd64,linux/arm64 90 | labels: ${{ steps.meta.outputs.labels }} 91 | build-args: "version=${{ steps.version.outputs.version }}" 92 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | .pytest_cache 3 | .vscode 4 | .*.swp 5 | .*.swo 6 | *.egg-info 7 | dist 8 | /.talismanrc 9 | /.idea 10 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # See https://pre-commit.com for more information 2 | # See https://pre-commit.com/hooks.html for more hooks 3 | repos: 4 | - repo: https://github.com/pre-commit/pre-commit-hooks 5 | rev: v5.0.0 6 | hooks: 7 | - id: trailing-whitespace 8 | - id: end-of-file-fixer 9 | - id: check-yaml 10 | - id: check-added-large-files 11 | - repo: https://github.com/astral-sh/ruff-pre-commit 12 | # Ruff version. 13 | rev: v0.6.9 14 | hooks: 15 | # Run the linter. 16 | - id: ruff 17 | # Run the formatter. 18 | - id: ruff-format 19 | - repo: https://github.com/pre-commit/mirrors-mypy 20 | rev: v1.12.0 21 | hooks: 22 | - id: mypy 23 | additional_dependencies: ["types-requests==2.32.0.20240914"] 24 | -------------------------------------------------------------------------------- /CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | Changelog 2 | ========= 3 | 4 | All notable changes to this project will be documented in this file. 5 | 6 | The format is based on `Keep a Changelog `_, 7 | and this project adheres to `Semantic Versioning `_. 8 | 9 | [1.2.0] - 2024-10-15 10 | -------------------- 11 | 12 | Added 13 | ^^^^^ 14 | 15 | * Official support for Python 3.13 16 | 17 | Changed 18 | ^^^^^^^ 19 | 20 | * Drop support for Python 3.8. 21 | 22 | 23 | [1.1.1] - 2024-10-10 24 | -------------------- 25 | 26 | Fixed 27 | ^^^^^ 28 | 29 | * Fix misspelled metric name (use ``dmarc_invalid_reports_total`` instead of 30 | ``dmaric_invalid_reports_total``). 31 | (`#50 `_) 32 | 33 | 34 | [1.1.0] - 2024-08-20 35 | -------------------- 36 | 37 | Added 38 | ^^^^^ 39 | 40 | * Handle reports with MIME type ``application/octet-stream``. 41 | (`#48 `_) 42 | 43 | 44 | [1.0.0] - 2024-02-17 45 | -------------------- 46 | 47 | Added 48 | ^^^^^ 49 | 50 | * Support for structured JSON logging. 51 | (`#35 `_) 52 | * Docker images will be published for ``linux/amd64`` and ``linux/arm6`` 53 | platforms. 54 | (`#42 `_) 55 | * Latest stable release will be tagged with ``latest`` tag on Docker Hub. 56 | 57 | Changed 58 | ^^^^^^^ 59 | 60 | * Logs are colored by default. 61 | * It is no longer possible to override the following keys in the logging 62 | configuration: ``version``, ``incremental``, ``formatters``. 63 | * ``root.handlers`` will be set to ``['default']`` in the logging configuration 64 | if not provided explicitly. 65 | * The ``--debug`` argument will always take precedence for the root logger, no 66 | matter what might be configured in the logging configuration. 67 | 68 | 69 | [0.11.0] - 2024-02-11 70 | --------------------- 71 | 72 | Added 73 | ^^^^^ 74 | 75 | * `dmarc_invalid_reports_total` metric with a count of emails from which no 76 | DMARC report could be parsed. 77 | 78 | Fixed 79 | ^^^^^ 80 | 81 | * Correctly calculate compliant number and percentage in sample dashboard 82 | (use last non null value instead of mean over range). 83 | (`#37 `_) 84 | 85 | 86 | [0.10.1] - 2024-01-07 87 | --------------------- 88 | 89 | Fixed 90 | ^^^^^ 91 | 92 | * Add missing Python 3.12 classifier to package. 93 | 94 | 95 | [0.10.0] - 2024-01-07 96 | --------------------- 97 | 98 | Added 99 | ^^^^^ 100 | 101 | * Official support for Python 3.12. 102 | 103 | Fixed 104 | ^^^^^ 105 | 106 | * Prevent deadlock if IMAP connection terminates abnormally. 107 | 108 | 109 | [0.9.4] - 2023-08-01 110 | -------------------- 111 | 112 | This release exclusively updates dependencies in use. 113 | 114 | 115 | [0.9.3] - 2023-07-25 116 | -------------------- 117 | 118 | Fixed 119 | ^^^^^ 120 | 121 | * Gracefully handle unknown properties within report XML. In particular, this 122 | should allow to process reports send by Google again, which was not working 123 | anymore starting June 2023. 124 | 125 | 126 | [0.9.2] - 2023-06-30 127 | -------------------- 128 | 129 | Fixed 130 | ^^^^^ 131 | 132 | * Fix issue with Microsoft Exchange not handling string length being split 133 | across multiple packages. 134 | (`#36 `_) 135 | 136 | 137 | [0.9.1] - 2023-02-09 138 | -------------------- 139 | 140 | Fixed 141 | ^^^^^ 142 | 143 | * Fix problems with large emails that would either cause the whole program to 144 | crash or prevent the processing of any new emails. This was solved by 145 | updating the bite-parser dependency to at least version 0.2.2. 146 | (`#34 `_, 147 | `bite-parser v0.2.2 `_) 148 | 149 | 150 | [0.9.0] - 2023-01-12 151 | -------------------- 152 | 153 | Changed 154 | ^^^^^^^ 155 | 156 | * Update bite-parser dependency to version 0.2.1 to get better error output. 157 | * Drop support for Python 3.7. 158 | 159 | 160 | [0.8.1] - 2023-01-05 161 | -------------------- 162 | 163 | Fixed 164 | ^^^^^ 165 | 166 | * With some IMAP servers emails were not correctly processed if the UID and 167 | RFC822 where returned in reverse order in the response. 168 | (`#33 `_) 169 | 170 | 171 | [0.8.0] - 2022-12-11 172 | -------------------- 173 | 174 | Added 175 | ^^^^^ 176 | 177 | * More logging when email are not processed and more debug logging on closing 178 | the IMAP connection. 179 | 180 | Changed 181 | ^^^^^^^ 182 | 183 | * The systemd unit provided as part of the Ansible role has been hardened. 184 | 185 | Fixed 186 | ^^^^^ 187 | 188 | * Correctly handle logout timeout when closing IMAP connection. Previously, 189 | a timeout during logout would have aborted the process of closing the 190 | connection. 191 | 192 | 193 | [0.7.0] - 2022-11-06 194 | -------------------- 195 | 196 | Added 197 | ^^^^^ 198 | 199 | * Officially declare Python 3.11 support. 200 | * Added the ``dmarc_metrics_exporter_build_info`` metric which contains version 201 | information in its labels. 202 | 203 | Changed 204 | ^^^^^^^ 205 | 206 | * Renamed Ansible role ``dmarc-metrics-exporter`` to ``dmarc_metrics_exporter`` 207 | [as ``-`` is no longer allowed in role 208 | names](https://galaxy.ansible.com/docs/contributing/creating_role.html#role-names). 209 | * ``dmarc_metrics_exporter_virtualenv_path`` variable has been added 210 | * The Ansible role no longer creates a system user. Instead the systemd 211 | "DynamicUser" feature is used. 212 | * Add the ``imap_use_ssl`` and ``imap_verify_certificate`` variables to the 213 | Ansible role. 214 | 215 | 216 | [0.6.2] - 2022-09-24 217 | -------------------- 218 | 219 | Fixed 220 | ^^^^^ 221 | 222 | * More robust handling of IMAP fetch responses including unknown fields. 223 | (`#29 `_) 224 | 225 | 226 | [0.6.1] - 2022-07-17 227 | -------------------- 228 | 229 | Fixed 230 | ^^^^^ 231 | 232 | * The polling of the IMAP mailbox was broken since presumably version 0.5 and 233 | should be fixed now. 234 | 235 | 236 | [0.6.0] - 2022-06-11 237 | -------------------- 238 | 239 | Added 240 | ^^^^^ 241 | 242 | * ``dmarc-metrics-exporter`` script to allow invocation without the `python -m` 243 | syntax. 244 | 245 | Fixed 246 | ^^^^^ 247 | 248 | * Allow installation with Python 3.10. 249 | * Use ``poetry-core`` as build system to allow installations with fetching fewer 250 | Poetry dependencies. 251 | 252 | 253 | [0.5.1] - 2022-02-09 254 | -------------------- 255 | 256 | Added 257 | ^^^^^ 258 | 259 | * Officially declare Python 3.10 support. 260 | 261 | 262 | [0.5.0] - 2022-02-09 263 | -------------------- 264 | 265 | Added 266 | ^^^^^ 267 | 268 | * Possibility to configure log output with ``logging`` key in the configuration 269 | file. `See logging.config documentation for details. 270 | `_ 271 | * ``--debug`` flag to quickly enable debug log output. 272 | 273 | 274 | Fixed 275 | ^^^^^ 276 | 277 | * Parse IMAP fetch responses properly to support a wider range of IMAP servers. 278 | (`#17 `_) 279 | 280 | 281 | [0.4.3] - 2022-01-25 282 | -------------------- 283 | 284 | Fixed 285 | ^^^^^ 286 | 287 | * Improve compatibility with different IMAP servers. 288 | (`#17 `_) 289 | 290 | 291 | [0.4.2] - 2022-01-01 292 | -------------------- 293 | 294 | Fixed 295 | ^^^^^ 296 | 297 | * Make detection of required folders more robust. Fixes issues with Office365. 298 | (`#15 `_, 299 | `#16 `_) 300 | 301 | 302 | [0.4.1] - 2021-11-13 303 | -------------------- 304 | 305 | Fixed 306 | ^^^^^ 307 | 308 | * Fixed the changelog markup so that it renders correctly. 309 | * Fixes of the deployment pipelines. 310 | 311 | 312 | [0.4.0] - 2021-11-13 313 | -------------------- 314 | 315 | Changed 316 | ^^^^^^^ 317 | 318 | * The ``metrics_db`` configuration option has been replaced with the 319 | ``storage_path`` configuration option. To migrate your existing setup: 320 | 321 | 1. Ensure that your ``metrics_db`` file is called ``metrics.db``. 322 | 2. Ensure that the directory containing the `metrics.db` file is writable by 323 | the dmarc-metrics-exporter. 324 | 2. Remove the ``metrics_db`` setting from the configuration file. 325 | 3. Add a new ``storage_path`` setting pointing to the directory containing the 326 | ``metrics.db`` file. 327 | 328 | * Disabled the access log. It clutters the log output with barely relevant 329 | messages (there is only a single page being served and it will be polled 330 | regularly by Prometheus). 331 | 332 | Added 333 | ^^^^^ 334 | 335 | * Support for reports sent in gzip format. 336 | * A log message will be produced for emails from which no report could be 337 | extracted. 338 | * Duplicate reports will now only be counted once. The duration for which report 339 | IDs are stored to detect duplicates can be configured with the 340 | ``deduplication_max_seconds`` configuration setting. The default is one week. 341 | * Added a Dockerfile to the repository to build a Docker image with 342 | dmarc-metrics-exporter. `Images for official releases will be published on 343 | Docker Hub. `_ 344 | * Support for Python 3.9. 345 | 346 | 347 | [0.3.0] - 2021-03-01 348 | -------------------- 349 | 350 | Changed 351 | ^^^^^^^ 352 | 353 | * Change default port to 9797 which does to collide with other Prometheus 354 | exporter. 355 | 356 | 357 | [0.2.3] - 2021-01-11 358 | -------------------- 359 | 360 | Fixed 361 | ^^^^^ 362 | 363 | * Change the repository link to the correct repository (e.g. on PyPI) 364 | 365 | 366 | [0.2.2] - 2020-12-31 367 | -------------------- 368 | 369 | Added 370 | ^^^^^ 371 | 372 | * Ansible role for deployment. 373 | 374 | 375 | [0.2.1] - 2020-12-31 376 | -------------------- 377 | 378 | Initial release. 379 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM --platform=linux/amd64 python:3.13-slim AS builder 2 | ARG version 3 | 4 | # install python package in venv 5 | RUN python3 -m venv venv && \ 6 | venv/bin/pip3 --disable-pip-version-check install dmarc-metrics-exporter==${version} 7 | 8 | FROM python:3.13-alpine AS runner 9 | 10 | # adduser and directories 11 | RUN addgroup --system --gid 1000 dmarc-metrics && \ 12 | adduser --system --uid 1000 dmarc-metrics 13 | USER dmarc-metrics 14 | 15 | # copy pre-installed venv 16 | COPY --from=builder /venv /venv 17 | 18 | # configuration file will be linked into container on runtime 19 | # -v {your path}/dmarc-metrics-exporter.json:/etc/dmarc-metrics-exporter.json 20 | 21 | EXPOSE 9797 22 | 23 | ENTRYPOINT ["/venv/bin/python3", "-m", "dmarc_metrics_exporter"] 24 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020-2021 Jan Gosmann 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | .. image:: https://github.com/jgosmann/dmarc-metrics-exporter/actions/workflows/ci.yml/badge.svg 2 | :target: https://github.com/jgosmann/dmarc-metrics-exporter/actions/workflows/ci.yml 3 | :alt: CI and release pipeline 4 | .. image:: https://codecov.io/gh/jgosmann/dmarc-metrics-exporter/branch/main/graph/badge.svg?token=O4M05YWNQK 5 | :target: https://codecov.io/gh/jgosmann/dmarc-metrics-exporter 6 | :alt: Codecov coverage 7 | .. image:: https://img.shields.io/pypi/v/dmarc-metrics-exporter 8 | :target: https://pypi.org/project/dmarc-metrics-exporter/ 9 | :alt: PyPI 10 | .. image:: https://img.shields.io/pypi/pyversions/dmarc-metrics-exporter 11 | :target: https://pypi.org/project/dmarc-metrics-exporter/ 12 | :alt: PyPI - Python Version 13 | .. image:: https://img.shields.io/pypi/l/dmarc-metrics-exporter 14 | :target: https://pypi.org/project/dmarc-metrics-exporter/ 15 | :alt: PyPI - License 16 | 17 | dmarcs-metrics-exporter 18 | ======================= 19 | 20 | Export metrics derived from DMARC aggregate reports to Prometheus. 21 | This exporter regularly polls 22 | for new aggregate report emails 23 | via IMAP. 24 | The following metrics will be collected 25 | and exposed at an HTTP endpoint 26 | for Prometheus: 27 | 28 | * ``dmarc_total``: Total number of reported messages. 29 | * ``dmarc_compliant_total``: Total number of DMARC compliant messages. 30 | * ``dmarc_quarantine_total``: Total number of quarantined messages. 31 | * ``dmarc_reject_total``: Total number of rejected messages. 32 | * ``dmarc_spf_aligned_total``: Total number of SPF algined messages. 33 | * ``dmarc_spf_pass_total``: Total number of messages with raw SPF pass. 34 | * ``dmarc_dkim_aligned_total``: Total number of DKIM algined messages. 35 | * ``dmarc_dkim_pass_total``: Total number of messages with raw DKIM pass. 36 | 37 | Each of these metrics is subdivided by the following labels: 38 | 39 | * ``reporter``: Domain from which a DMARC aggregate report originated. 40 | * ``from_domain``: Domain from which the evaluated email originated. 41 | * ``dkim_domain``: Domain the DKIM signature is for. 42 | * ``spf_domain``: Domain used for the SPF check. 43 | 44 | In addition, there is a ``dmarc_invalid_reports_total`` metric with a count of 45 | DMARC report emails from which no report could be parsed. It is subdivided by 46 | a single ``from_email`` label. 47 | 48 | 49 | Installation 50 | ------------ 51 | 52 | This describes the manual setup fo dmarc-metrics-exporter. 53 | An Ansible role for automated deployment is provided in ``roles``. 54 | Further instructions for Ansible are given in the readme file 55 | provided in that directory. 56 | 57 | It is best to run dmarc-metrics-exporter under a separate system user account. 58 | Create one for example with 59 | 60 | .. code-block:: bash 61 | 62 | adduser --system --group dmarc-metrics 63 | 64 | Then you can install dmarc-metrics-exporter with ``pip`` from PyPI for that 65 | user: 66 | 67 | .. code-block:: bash 68 | 69 | sudo -u dmarc-metrics pip3 install dmarc-metrics-exporter 70 | 71 | You will need a location to store the ``metrics.db`` that is writable by that 72 | user, for example: 73 | 74 | .. code-block:: bash 75 | 76 | mkdir /var/lib/dmarc-metrics-exporter 77 | chown dmarc-metrics:dmarc-metrics /var/lib/dmarc-metrics-exporter 78 | 79 | 80 | Configuration 81 | ------------- 82 | 83 | To run dmarc-metrics-exporter a configuration file in JSON format is required. 84 | The default location is ``/etc/dmarc-metrics-exporter.json``. 85 | 86 | Because the configuration file will contain the IMAP password, 87 | make sure to ensure proper permissions on it, 88 | for example: 89 | 90 | .. code-block:: bash 91 | 92 | chown root:dmarc-metrics /etc/dmarc-metrics-exporter.json 93 | chmod 640 /etc/dmarc-metrics-exporter.json 94 | 95 | An example configuration file is provided in this repository in 96 | ``config/dmarc-metrics-exporter.sample.json``. 97 | 98 | The following configuration options are available: 99 | 100 | * ``listen_addr`` (string, default ``"127.0.0.1"``): Listen address for the HTTP endpoint. Use ``"0.0.0.0"`` if running in a dockerized environment. 101 | * ``port`` (number, default ``9797``): Port to listen on for the HTTP endpoint. 102 | * ``imap`` (object, required): IMAP configuration to check for aggregate reports. 103 | 104 | * ``host`` (string, default ``"localhost"``): Hostname of IMAP server to connect to. 105 | * ``port`` (number, default ``993``): Port of the IMAP server to connect to. 106 | * ``username`` (string, required): Login username for the IMAP connection. 107 | * ``password``: (string, required): Login password for the IMAP connection. 108 | * ``use_ssl``: (boolean, default ``true``): Whether to use SSL encryption for the connection. Disabling this will transmit the password in clear text! Currently, there is no support for STARTTLS. 109 | * ``verify_certificate``: (boolean, default ``true``): Whether to verify the server's SSL certificate. You might have to set this to ``false`` if you are using a self-signed certificate. If this is disabled, someone else could impersonate the server and obtain the login data. 110 | 111 | * ``folders`` (object): 112 | 113 | * ``inbox`` (string, default ``"INBOX"``): IMAP mailbox that is checked for incoming DMARC aggregate reports. 114 | * ``done`` (string, default ``"Archive"``): IMAP mailbox that successfully processed reports are moved to. 115 | * ``error``: (string, default ``"Invalid"``): IMAP mailbox that emails are moved to that could not be processed. 116 | 117 | * ``storage_path`` (string, default ``"/var/lib/dmarc-metrics-exporter"``): 118 | Directory to persist data in that has to persisted between restarts. 119 | * ``poll_interval_seconds`` (number, default ``60``): How often to poll the IMAP server in seconds. 120 | * ``deduplication_max_seconds`` (number, default ``604800`` which is 7 days): How long individual report IDs will be remembered to avoid counting double delivered reports twice. 121 | * ``logging`` (object, default ``{}``): Logging configuration, see the "Logging configuration" section below. 122 | 123 | Logging configuration 124 | ^^^^^^^^^^^^^^^^^^^^^ 125 | 126 | When providing a custom logging configuration, it must follow the dictionary 127 | schema (version 1) described in the `logging.config documentation `_. 128 | In general, a provided top-level key will replace the default configuration, 129 | but there are some exceptions. The following keys are always fixed: 130 | 131 | * ``version`` will always be ``1``. 132 | * ``incremental`` will always be ``false``. 133 | * ``formatters`` is fixed and provides the following formatters: 134 | 135 | * ``plain`` renders human-readable log messages without colors. 136 | * ``colored`` renders human-readable log messages with colors. 137 | * ``json`` renders structured JSON log messages. 138 | 139 | In addition, the ``root`` key has some special handling. If it is overridden, 140 | but not ``handlers`` key is provided, ``handlers: ['default']`` will be inserted 141 | automatically. Also, the ``level`` key will be set to ``'DEBUG'`` if the 142 | application is started with the ``--debug`` flag. 143 | 144 | Configuring log level 145 | """"""""""""""""""""" 146 | 147 | To change the log level globally: 148 | 149 | .. code-block:: json 150 | 151 | { 152 | "logging": { 153 | "root": { 154 | "level": "WARNING" 155 | } 156 | } 157 | } 158 | 159 | Configuring logging format 160 | """""""""""""""""""""""""" 161 | 162 | To change the logging format: 163 | 164 | .. code-block:: json 165 | 166 | { 167 | "logging": { 168 | "handlers": { 169 | "default": { 170 | "class": "logging.StreamHandler", 171 | "formatter": "json" 172 | } 173 | } 174 | } 175 | } 176 | 177 | Valid formats are ``plain``, ``colored``, and ``json``. 178 | 179 | Disabling Uvicorn access logs 180 | """"""""""""""""""""""""""""" 181 | 182 | To disable the Uvicorn access logs: 183 | 184 | .. code-block:: json 185 | 186 | { 187 | "logging": { 188 | "loggers": { 189 | "uvicorn.access": { 190 | "propagate": false 191 | } 192 | } 193 | } 194 | } 195 | 196 | 197 | Usage 198 | ----- 199 | 200 | To run dmarc-metrics-exporter with the default configuration in 201 | ``/etc/dmarc-metrics-exporter.json``: 202 | 203 | .. code-block:: bash 204 | 205 | sudo -u dmarc-metrics python3 -m dmarc_metrics_exporter 206 | 207 | To use a different configuration file: 208 | 209 | .. code-block:: bash 210 | 211 | sudo -u dmarc-metrics python3 -m dmarc_metrics_exporter --configuration 212 | 213 | You can enable debug logging with the `--debug` 214 | if you do not want to provide your own logging configuration: 215 | 216 | .. code-block:: bash 217 | 218 | sudo -u dmarc-metrics python3 -m dmarc_metrics_exporter --debug 219 | 220 | 221 | systemd 222 | ^^^^^^^ 223 | 224 | Instead of manually starting the dmarc-metrics-exporter, 225 | you likely want to have it run as a system service. 226 | An example systemd service file is provided in this repository in 227 | ``config/dmarc-metrics-exporter.service``. 228 | Make sure that the paths and user/group names match your configuration 229 | and copy it to ``/etc/systemd/system`` to use it. 230 | To have systemd pick it up a ``systemctl daemon-reload`` might be necessary. 231 | 232 | You can than start/stop dmarc-metrics-exorter with: 233 | 234 | .. code-block:: bash 235 | 236 | systemctl start dmarc-metrics-exporter 237 | systemctl stop dmarc-metrics-exporter 238 | 239 | To have dmarc-metrics-exporter start on system boot: 240 | 241 | .. code-block:: bash 242 | 243 | systemctl enable dmarc-metrics-exporter 244 | 245 | Docker 246 | ^^^^^^ 247 | 248 | A new docker image is build for each release 249 | with GitHub Actions as described in this yaml-file: 250 | ``.github/workflows/docker-publish.yml``. 251 | 252 | Note that you should configure the `listen_addr` to `0.0.0.0` to be able to 253 | access the metrics exporter from outside the container. 254 | 255 | Example docker-compose file: 256 | 257 | .. code-block:: yml 258 | 259 | version: "3" 260 | 261 | services: 262 | 263 | dmarc-metrics-exporter: 264 | # source: https://github.com/jamborjan/dmarc-metrics-exporter/pkgs/container/dmarc-metrics-exporter 265 | container_name: dmarc-metrics-exporter 266 | hostname: dmarc-metrics-exporter 267 | image: jgosmann/dmarc-metrics-exporter:1.2.0 268 | restart: unless-stopped 269 | user: 1000:1000 #PUID=1000:PGID=1000 270 | expose: 271 | - 9797 272 | volumes: 273 | - '/host/folder/dmarc-metrics-exporter.json:/etc/dmarc-metrics-exporter.json' 274 | - '/host/folder/dmarc-metrics-exporter/metrics:/var/lib/dmarc-metrics-exporter:rw' 275 | logging: 276 | driver: "json-file" 277 | options: 278 | tag: "{{.ImageName}}|{{.Name}}|{{.ImageFullID}}|{{.FullID}}" 279 | networks: 280 | - YourDockerLan 281 | 282 | # $ docker network create -d bridge --attachable YourDockerLan 283 | networks: 284 | YourDockerLan: 285 | external: 286 | name: YourDockerLan 287 | 288 | Prometheus 289 | ^^^^^^^^^^ 290 | 291 | Example prometheus config file: 292 | 293 | .. code-block:: yml 294 | 295 | global: 296 | scrape_interval: 15s 297 | evaluation_interval: 15s 298 | 299 | rule_files: 300 | 301 | scrape_configs: 302 | 303 | - job_name: 'dmarc-metrics-exporter' 304 | static_configs: 305 | - targets: ['dmarc-metrics-exporter:9797'] 306 | 307 | Grafana 308 | ^^^^^^^ 309 | 310 | An example configuration file is provided in this repository in 311 | ``config/dmarc-metrics-exporter.grafana.sample.json``. This example dashboard displays the collected metrics as shown in the screenshot below. 312 | 313 | .. figure:: config/dmarc-metrics-exporter.grafana.sample.png 314 | 315 | Example grafana dashboard 316 | 317 | Hints 318 | ^^^^^ 319 | 320 | You should not use your normal email and password credentials for the dmarc-metrics-exporter. 321 | If you are not able to create a dedicated service account email account, you should use an app password. 322 | 323 | Microsoft Exchange Online 324 | """"""""""""""""""""""""" 325 | 326 | * App passwords are available when you are using Multi Factor Authentication (MFA). 327 | `Manage app passwords for two-step verification `_ 328 | * If you don't see the app passwords option or get an error, 329 | `check if MFA is enabled `_ 330 | for the user. 331 | * If you still don't see the app passwords option, 332 | `check if app passwords are allowed in your organization `_ 333 | * Finally, `ensure that IMAP is enabled for the user `_. 334 | 335 | 336 | Development 337 | ----------- 338 | 339 | Prerequisites 340 | ^^^^^^^^^^^^^ 341 | 342 | * `Python `_ 343 | * `pre-commit `_ 344 | * `Poetry `_ 345 | * `Docker `_ 346 | 347 | Setup development environment 348 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 349 | 350 | .. code-block:: bash 351 | 352 | pre-commit install 353 | poetry install 354 | 355 | 356 | Run tests 357 | ^^^^^^^^^ 358 | 359 | .. code-block:: bash 360 | 361 | docker-compose up -d 362 | poetry run pytest 363 | -------------------------------------------------------------------------------- /compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.9" 2 | services: 3 | greenmail: 4 | image: "greenmail/standalone:2.0.1" 5 | ports: 6 | - "3025:3025" 7 | - "3993:3993" 8 | - "127.0.0.1:8080:8080" 9 | -------------------------------------------------------------------------------- /config/dmarc-metrics-exporter.grafana.sample.json: -------------------------------------------------------------------------------- 1 | { 2 | "annotations": { 3 | "list": [ 4 | { 5 | "builtIn": 1, 6 | "datasource": "-- Grafana --", 7 | "enable": true, 8 | "hide": true, 9 | "iconColor": "rgba(0, 211, 255, 1)", 10 | "name": "Annotations & Alerts", 11 | "type": "dashboard" 12 | } 13 | ] 14 | }, 15 | "editable": true, 16 | "gnetId": null, 17 | "graphTooltip": 0, 18 | "id": 8, 19 | "links": [], 20 | "panels": [ 21 | { 22 | "datasource": null, 23 | "fieldConfig": { 24 | "defaults": { 25 | "color": { 26 | "mode": "thresholds" 27 | }, 28 | "decimals": 0, 29 | "mappings": [], 30 | "thresholds": { 31 | "mode": "absolute", 32 | "steps": [ 33 | { 34 | "color": "green", 35 | "value": null 36 | } 37 | ] 38 | }, 39 | "unit": "none" 40 | }, 41 | "overrides": [] 42 | }, 43 | "gridPos": { 44 | "h": 8, 45 | "w": 24, 46 | "x": 0, 47 | "y": 0 48 | }, 49 | "id": 8, 50 | "interval": "1d", 51 | "options": { 52 | "colorMode": "value", 53 | "graphMode": "none", 54 | "justifyMode": "auto", 55 | "orientation": "auto", 56 | "reduceOptions": { 57 | "calcs": [ 58 | "lastNotNull" 59 | ], 60 | "fields": "", 61 | "values": false 62 | }, 63 | "text": {}, 64 | "textMode": "auto" 65 | }, 66 | "pluginVersion": "7.5.6", 67 | "targets": [ 68 | { 69 | "exemplar": true, 70 | "expr": "sum(increase(dmarc_total[$__range]))", 71 | "interval": "", 72 | "legendFormat": "", 73 | "refId": "A" 74 | } 75 | ], 76 | "title": "Total", 77 | "type": "stat" 78 | }, 79 | { 80 | "datasource": null, 81 | "fieldConfig": { 82 | "defaults": { 83 | "color": { 84 | "mode": "thresholds" 85 | }, 86 | "decimals": 0, 87 | "mappings": [], 88 | "thresholds": { 89 | "mode": "percentage", 90 | "steps": [ 91 | { 92 | "color": "green", 93 | "value": null 94 | } 95 | ] 96 | }, 97 | "unit": "none" 98 | }, 99 | "overrides": [] 100 | }, 101 | "gridPos": { 102 | "h": 8, 103 | "w": 8, 104 | "x": 0, 105 | "y": 8 106 | }, 107 | "id": 11, 108 | "interval": "1d", 109 | "options": { 110 | "colorMode": "value", 111 | "graphMode": "none", 112 | "justifyMode": "auto", 113 | "orientation": "auto", 114 | "reduceOptions": { 115 | "calcs": [ 116 | "lastNotNull" 117 | ], 118 | "fields": "", 119 | "values": false 120 | }, 121 | "text": {}, 122 | "textMode": "value" 123 | }, 124 | "pluginVersion": "7.5.6", 125 | "targets": [ 126 | { 127 | "exemplar": true, 128 | "expr": "sum(increase(dmarc_compliant_total[$__range]))", 129 | "interval": "", 130 | "legendFormat": "", 131 | "refId": "A" 132 | } 133 | ], 134 | "title": "# compliant", 135 | "type": "stat" 136 | }, 137 | { 138 | "datasource": null, 139 | "fieldConfig": { 140 | "defaults": { 141 | "color": { 142 | "mode": "thresholds" 143 | }, 144 | "decimals": 0, 145 | "mappings": [], 146 | "thresholds": { 147 | "mode": "absolute", 148 | "steps": [ 149 | { 150 | "color": "green", 151 | "value": null 152 | }, 153 | { 154 | "color": "red", 155 | "value": 0.0001 156 | } 157 | ] 158 | }, 159 | "unit": "none" 160 | }, 161 | "overrides": [] 162 | }, 163 | "gridPos": { 164 | "h": 8, 165 | "w": 8, 166 | "x": 8, 167 | "y": 8 168 | }, 169 | "id": 13, 170 | "interval": "1d", 171 | "options": { 172 | "colorMode": "value", 173 | "graphMode": "none", 174 | "justifyMode": "auto", 175 | "orientation": "auto", 176 | "reduceOptions": { 177 | "calcs": [ 178 | "lastNotNull" 179 | ], 180 | "fields": "", 181 | "values": false 182 | }, 183 | "text": {}, 184 | "textMode": "auto" 185 | }, 186 | "pluginVersion": "7.5.6", 187 | "targets": [ 188 | { 189 | "exemplar": true, 190 | "expr": "sum(increase(dmarc_reject_total[$__range]))", 191 | "interval": "", 192 | "legendFormat": "", 193 | "refId": "A" 194 | } 195 | ], 196 | "timeFrom": null, 197 | "title": "# reject", 198 | "type": "stat" 199 | }, 200 | { 201 | "datasource": null, 202 | "fieldConfig": { 203 | "defaults": { 204 | "color": { 205 | "mode": "thresholds" 206 | }, 207 | "decimals": 0, 208 | "mappings": [], 209 | "thresholds": { 210 | "mode": "absolute", 211 | "steps": [ 212 | { 213 | "color": "green", 214 | "value": null 215 | }, 216 | { 217 | "color": "red", 218 | "value": 0.0001 219 | } 220 | ] 221 | }, 222 | "unit": "none" 223 | }, 224 | "overrides": [] 225 | }, 226 | "gridPos": { 227 | "h": 8, 228 | "w": 8, 229 | "x": 16, 230 | "y": 8 231 | }, 232 | "id": 10, 233 | "interval": "1d", 234 | "options": { 235 | "colorMode": "value", 236 | "graphMode": "none", 237 | "justifyMode": "auto", 238 | "orientation": "auto", 239 | "reduceOptions": { 240 | "calcs": [ 241 | "lastNotNull" 242 | ], 243 | "fields": "", 244 | "values": false 245 | }, 246 | "text": {}, 247 | "textMode": "auto" 248 | }, 249 | "pluginVersion": "7.5.6", 250 | "targets": [ 251 | { 252 | "exemplar": true, 253 | "expr": "sum(increase(dmarc_quarantine_total[$__range]))", 254 | "interval": "", 255 | "legendFormat": "", 256 | "refId": "A" 257 | } 258 | ], 259 | "timeFrom": null, 260 | "title": "# quarantine", 261 | "type": "stat" 262 | }, 263 | { 264 | "datasource": null, 265 | "fieldConfig": { 266 | "defaults": { 267 | "color": { 268 | "mode": "thresholds" 269 | }, 270 | "mappings": [], 271 | "thresholds": { 272 | "mode": "percentage", 273 | "steps": [ 274 | { 275 | "color": "red", 276 | "value": null 277 | }, 278 | { 279 | "color": "green", 280 | "value": 100 281 | } 282 | ] 283 | }, 284 | "unit": "percentunit" 285 | }, 286 | "overrides": [] 287 | }, 288 | "gridPos": { 289 | "h": 8, 290 | "w": 8, 291 | "x": 0, 292 | "y": 16 293 | }, 294 | "id": 6, 295 | "interval": "1d", 296 | "options": { 297 | "colorMode": "value", 298 | "graphMode": "none", 299 | "justifyMode": "auto", 300 | "orientation": "auto", 301 | "reduceOptions": { 302 | "calcs": [ 303 | "lastNotNull" 304 | ], 305 | "fields": "", 306 | "values": false 307 | }, 308 | "text": {}, 309 | "textMode": "value" 310 | }, 311 | "pluginVersion": "7.5.6", 312 | "targets": [ 313 | { 314 | "exemplar": true, 315 | "expr": "sum(increase(dmarc_compliant_total[$__range]))/sum(increase(dmarc_total[$__range]))", 316 | "interval": "", 317 | "legendFormat": "", 318 | "refId": "A" 319 | } 320 | ], 321 | "title": "% compliant", 322 | "type": "stat" 323 | }, 324 | { 325 | "datasource": null, 326 | "fieldConfig": { 327 | "defaults": { 328 | "color": { 329 | "mode": "thresholds" 330 | }, 331 | "mappings": [], 332 | "thresholds": { 333 | "mode": "absolute", 334 | "steps": [ 335 | { 336 | "color": "green", 337 | "value": null 338 | }, 339 | { 340 | "color": "red", 341 | "value": 0.0001 342 | } 343 | ] 344 | }, 345 | "unit": "percentunit" 346 | }, 347 | "overrides": [] 348 | }, 349 | "gridPos": { 350 | "h": 8, 351 | "w": 8, 352 | "x": 8, 353 | "y": 16 354 | }, 355 | "id": 12, 356 | "interval": "1d", 357 | "options": { 358 | "colorMode": "value", 359 | "graphMode": "none", 360 | "justifyMode": "auto", 361 | "orientation": "auto", 362 | "reduceOptions": { 363 | "calcs": [ 364 | "lastNotNull" 365 | ], 366 | "fields": "", 367 | "values": false 368 | }, 369 | "text": {}, 370 | "textMode": "auto" 371 | }, 372 | "pluginVersion": "7.5.6", 373 | "targets": [ 374 | { 375 | "exemplar": true, 376 | "expr": "sum(increase(dmarc_reject_total[$__range]))/sum(increase(dmarc_total[$__range]))", 377 | "interval": "", 378 | "legendFormat": "", 379 | "refId": "A" 380 | } 381 | ], 382 | "timeFrom": null, 383 | "title": "% reject", 384 | "type": "stat" 385 | }, 386 | { 387 | "datasource": null, 388 | "fieldConfig": { 389 | "defaults": { 390 | "color": { 391 | "mode": "thresholds" 392 | }, 393 | "mappings": [], 394 | "thresholds": { 395 | "mode": "absolute", 396 | "steps": [ 397 | { 398 | "color": "green", 399 | "value": null 400 | }, 401 | { 402 | "color": "red", 403 | "value": 0.0001 404 | } 405 | ] 406 | }, 407 | "unit": "percentunit" 408 | }, 409 | "overrides": [] 410 | }, 411 | "gridPos": { 412 | "h": 8, 413 | "w": 8, 414 | "x": 16, 415 | "y": 16 416 | }, 417 | "id": 14, 418 | "interval": "1d", 419 | "options": { 420 | "colorMode": "value", 421 | "graphMode": "none", 422 | "justifyMode": "auto", 423 | "orientation": "auto", 424 | "reduceOptions": { 425 | "calcs": [ 426 | "lastNotNull" 427 | ], 428 | "fields": "", 429 | "values": false 430 | }, 431 | "text": {}, 432 | "textMode": "auto" 433 | }, 434 | "pluginVersion": "7.5.6", 435 | "targets": [ 436 | { 437 | "exemplar": true, 438 | "expr": "sum(increase(dmarc_quarantine_total[$__range]))/sum(increase(dmarc_total[$__range]))", 439 | "interval": "", 440 | "legendFormat": "", 441 | "refId": "A" 442 | } 443 | ], 444 | "timeFrom": null, 445 | "title": "% quarantine", 446 | "type": "stat" 447 | }, 448 | { 449 | "aliasColors": {}, 450 | "bars": false, 451 | "dashLength": 10, 452 | "dashes": false, 453 | "datasource": null, 454 | "description": "", 455 | "fieldConfig": { 456 | "defaults": {}, 457 | "overrides": [] 458 | }, 459 | "fill": 1, 460 | "fillGradient": 0, 461 | "gridPos": { 462 | "h": 9, 463 | "w": 24, 464 | "x": 0, 465 | "y": 24 466 | }, 467 | "hiddenSeries": false, 468 | "id": 2, 469 | "interval": "1d", 470 | "legend": { 471 | "avg": false, 472 | "current": false, 473 | "max": false, 474 | "min": false, 475 | "show": true, 476 | "total": false, 477 | "values": false 478 | }, 479 | "lines": true, 480 | "linewidth": 1, 481 | "nullPointMode": "null", 482 | "options": { 483 | "alertThreshold": true 484 | }, 485 | "percentage": false, 486 | "pluginVersion": "7.5.6", 487 | "pointradius": 2, 488 | "points": false, 489 | "renderer": "flot", 490 | "seriesOverrides": [], 491 | "spaceLength": 10, 492 | "stack": false, 493 | "steppedLine": false, 494 | "targets": [ 495 | { 496 | "exemplar": true, 497 | "expr": "sum(increase(dmarc_compliant_total[$__rate_interval])) by (reporter)", 498 | "interval": "", 499 | "legendFormat": "Compliant by {{reporter}}", 500 | "refId": "A" 501 | }, 502 | { 503 | "exemplar": true, 504 | "expr": "sum(increase(dmarc_reject_total[$__rate_interval])) by (reporter)", 505 | "hide": false, 506 | "interval": "", 507 | "legendFormat": "Reject by {{reporter}}", 508 | "refId": "B" 509 | } 510 | ], 511 | "thresholds": [], 512 | "timeFrom": null, 513 | "timeRegions": [], 514 | "timeShift": null, 515 | "title": "# by reporter", 516 | "tooltip": { 517 | "shared": true, 518 | "sort": 0, 519 | "value_type": "individual" 520 | }, 521 | "type": "graph", 522 | "xaxis": { 523 | "buckets": null, 524 | "mode": "time", 525 | "name": null, 526 | "show": true, 527 | "values": [] 528 | }, 529 | "yaxes": [ 530 | { 531 | "format": "short", 532 | "label": null, 533 | "logBase": 1, 534 | "max": null, 535 | "min": null, 536 | "show": true 537 | }, 538 | { 539 | "format": "short", 540 | "label": null, 541 | "logBase": 1, 542 | "max": null, 543 | "min": null, 544 | "show": true 545 | } 546 | ], 547 | "yaxis": { 548 | "align": false, 549 | "alignLevel": null 550 | } 551 | }, 552 | { 553 | "aliasColors": {}, 554 | "bars": false, 555 | "dashLength": 10, 556 | "dashes": false, 557 | "datasource": null, 558 | "fieldConfig": { 559 | "defaults": {}, 560 | "overrides": [] 561 | }, 562 | "fill": 1, 563 | "fillGradient": 0, 564 | "gridPos": { 565 | "h": 9, 566 | "w": 24, 567 | "x": 0, 568 | "y": 33 569 | }, 570 | "hiddenSeries": false, 571 | "id": 4, 572 | "interval": "1d", 573 | "legend": { 574 | "avg": false, 575 | "current": false, 576 | "max": false, 577 | "min": false, 578 | "show": true, 579 | "total": false, 580 | "values": false 581 | }, 582 | "lines": true, 583 | "linewidth": 1, 584 | "nullPointMode": "null", 585 | "options": { 586 | "alertThreshold": true 587 | }, 588 | "percentage": false, 589 | "pluginVersion": "7.5.6", 590 | "pointradius": 2, 591 | "points": false, 592 | "renderer": "flot", 593 | "seriesOverrides": [], 594 | "spaceLength": 10, 595 | "stack": false, 596 | "steppedLine": false, 597 | "targets": [ 598 | { 599 | "exemplar": true, 600 | "expr": "sum(increase(dmarc_compliant_total[$__rate_interval])) by (dkim_domain)", 601 | "interval": "", 602 | "legendFormat": "Compliant from {{dkim_domain}}", 603 | "refId": "A" 604 | }, 605 | { 606 | "exemplar": true, 607 | "expr": "sum(increase(dmarc_reject_total[$__rate_interval])) by (dkim_domain)", 608 | "hide": false, 609 | "interval": "", 610 | "legendFormat": "Rejected from {{dkim_domain}}", 611 | "refId": "B" 612 | } 613 | ], 614 | "thresholds": [], 615 | "timeFrom": null, 616 | "timeRegions": [], 617 | "timeShift": null, 618 | "title": "# by sender domain", 619 | "tooltip": { 620 | "shared": true, 621 | "sort": 0, 622 | "value_type": "individual" 623 | }, 624 | "type": "graph", 625 | "xaxis": { 626 | "buckets": null, 627 | "mode": "time", 628 | "name": null, 629 | "show": true, 630 | "values": [] 631 | }, 632 | "yaxes": [ 633 | { 634 | "format": "short", 635 | "label": null, 636 | "logBase": 1, 637 | "max": null, 638 | "min": null, 639 | "show": true 640 | }, 641 | { 642 | "format": "short", 643 | "label": null, 644 | "logBase": 1, 645 | "max": null, 646 | "min": null, 647 | "show": true 648 | } 649 | ], 650 | "yaxis": { 651 | "align": false, 652 | "alignLevel": null 653 | } 654 | } 655 | ], 656 | "schemaVersion": 27, 657 | "style": "dark", 658 | "tags": [], 659 | "templating": { 660 | "list": [] 661 | }, 662 | "time": { 663 | "from": "now-30d", 664 | "to": "now" 665 | }, 666 | "timepicker": {}, 667 | "timezone": "", 668 | "title": "dmarc dashboard", 669 | "uid": "dWAaq-cnk", 670 | "version": 11 671 | } 672 | -------------------------------------------------------------------------------- /config/dmarc-metrics-exporter.grafana.sample.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jgosmann/dmarc-metrics-exporter/d1d3912e9aed14be28c75467b1726d5a237e1777/config/dmarc-metrics-exporter.grafana.sample.png -------------------------------------------------------------------------------- /config/dmarc-metrics-exporter.sample.json: -------------------------------------------------------------------------------- 1 | { 2 | "listen_addr": "127.0.0.1", 3 | "port": 9797, 4 | "imap": { 5 | "host": "localhost", 6 | "port": 993, 7 | "username": "dmarc@your-server.com", 8 | "password": "supersecret", 9 | "use_ssl": true, 10 | "verify_certificate": true 11 | }, 12 | "folders": { 13 | "inbox": "INBOX", 14 | "done": "Archive", 15 | "error": "Invalid" 16 | }, 17 | "storage_path": "/var/lib/dmarc-metrics-exporter", 18 | "poll_interval_seconds": 60, 19 | "deduplication_max_seconds": 604800, 20 | "logging": { 21 | "handlers": { 22 | "default": { 23 | "class": "logging.StreamHandler", 24 | "formatter": "json" 25 | } 26 | }, 27 | "loggers": { 28 | "uvicorn.access": { 29 | "propagate": false 30 | } 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /config/dmarc-metrics-exporter.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Export Prometheus metrics from DMARC reports. 3 | 4 | [Service] 5 | ExecStart=/usr/bin/python3 -m dmarc_metrics_exporter 6 | Environment=PYTHONUNBUFFERED=1 7 | Restart=on-failure 8 | User=dmarc-metrics 9 | PrivateTmp=yes 10 | PrivateDevices=yes 11 | ProtectSystem=full 12 | ProtectHome=read-only 13 | NoNewPrivileges=yes 14 | 15 | [Install] 16 | WantedBy=default.target 17 | -------------------------------------------------------------------------------- /dmarc-aggregate-report.xsd: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 6 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 22 | 23 | 24 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 49 | 50 | 51 | 52 | 53 | 54 | 56 | 57 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 93 | 94 | 95 | 96 | 98 | 99 | 100 | 101 | 103 | 104 | 105 | 106 | 107 | 108 | 110 | 111 | 112 | 115 | 118 | 119 | 120 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 134 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 145 | 146 | 148 | 149 | 151 | 152 | 153 | 154 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 173 | 174 | 176 | 177 | 179 | 181 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | 208 | 209 | 210 | 211 | 212 | 213 | 214 | 215 | 217 | 218 | 219 | 220 | 222 | 223 | 224 | 226 | 228 | 229 | 231 | 232 | 233 | 234 | 237 | 238 | 239 | 240 | 241 | 242 | 243 | 244 | 245 | 246 | 247 | 248 | 249 | 251 | 253 | 255 | 256 | 257 | 259 | 260 | 261 | 262 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = "1.2.0" 2 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/__main__.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | from .app import main 4 | 5 | 6 | def run(): 7 | main(sys.argv[1:]) 8 | 9 | 10 | if __name__ == "__main__": 11 | run() 12 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/app.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import asyncio 3 | import json 4 | from asyncio import CancelledError 5 | from email.message import EmailMessage 6 | from pathlib import Path 7 | from typing import Any, Callable, Optional, Sequence, Tuple 8 | 9 | import structlog 10 | 11 | from dmarc_metrics_exporter.deserialization import ( 12 | ReportExtractionError, 13 | convert_to_events, 14 | get_aggregate_report_from_email, 15 | ) 16 | from dmarc_metrics_exporter.dmarc_metrics import DmarcMetricsCollection, InvalidMeta 17 | from dmarc_metrics_exporter.expiring_set import ExpiringSet 18 | from dmarc_metrics_exporter.imap_queue import ConnectionConfig, ImapQueue, QueueFolders 19 | from dmarc_metrics_exporter.logging import configure_logging 20 | from dmarc_metrics_exporter.metrics_persister import MetricsPersister 21 | from dmarc_metrics_exporter.prometheus_exporter import PrometheusExporter 22 | 23 | logger = structlog.get_logger() 24 | 25 | 26 | def main(argv: Sequence[str]): 27 | parser = argparse.ArgumentParser( 28 | description="Monitor an IMAP account for DMARC aggregate reports and " 29 | "provide a Prometheus endpoint for metrics derived from incoming " 30 | "reports." 31 | ) 32 | parser.add_argument( 33 | "--configuration", 34 | type=argparse.FileType("r"), 35 | default="/etc/dmarc-metrics-exporter.json", 36 | help="Configuration file", 37 | ) 38 | parser.add_argument( 39 | "--debug", 40 | default=False, 41 | action="store_true", 42 | help="Enable debug logging", 43 | ) 44 | args = parser.parse_args(argv) 45 | 46 | configuration = json.load(args.configuration) 47 | args.configuration.close() 48 | 49 | configure_logging(configuration.get("logging", {}), debug=args.debug) 50 | 51 | storage_path = Path( 52 | configuration.get("storage_path", "/var/lib/dmarc-metrics-exporter") 53 | ) 54 | app = App( 55 | prometheus_addr=( 56 | configuration.get("listen_addr", "127.0.0.1"), 57 | configuration.get("port", 9797), 58 | ), 59 | imap_queue=ImapQueue( 60 | connection=ConnectionConfig(**configuration["imap"]), 61 | folders=QueueFolders(**configuration.get("folders", {})), 62 | poll_interval_seconds=configuration.get("poll_interval_seconds", 60), 63 | ), 64 | metrics_persister=MetricsPersister(storage_path / "metrics.db"), 65 | deduplication_max_seconds=configuration.get( 66 | "deduplication_max_seconds", 7 * 24 * 60 * 60 67 | ), 68 | seen_reports_db=storage_path / "seen-reports.db", 69 | ) 70 | 71 | asyncio.run(app.run()) 72 | 73 | 74 | class App: 75 | # pylint: disable=too-many-instance-attributes 76 | _seen_reports: ExpiringSet[Tuple[str, str]] 77 | 78 | # pylint: disable=too-many-arguments 79 | def __init__( 80 | self, 81 | *, 82 | prometheus_addr: Tuple[str, int], 83 | imap_queue: ImapQueue, 84 | metrics_persister: MetricsPersister, 85 | exporter_cls: Callable[[DmarcMetricsCollection], Any] = PrometheusExporter, 86 | autosave_interval_seconds: float = 60, 87 | deduplication_max_seconds: float = 7 * 24 * 60 * 60, 88 | seen_reports_db: Optional[Path] = None, 89 | ): 90 | self.prometheus_addr = prometheus_addr 91 | self.exporter = exporter_cls(DmarcMetricsCollection()) 92 | self.imap_queue = imap_queue 93 | self.exporter_cls = exporter_cls 94 | self.metrics_persister = metrics_persister 95 | self.autosave_interval_seconds = autosave_interval_seconds 96 | self.seen_reports_db = seen_reports_db 97 | if seen_reports_db and seen_reports_db.exists(): 98 | self._seen_reports = ExpiringSet.load( 99 | seen_reports_db, deduplication_max_seconds 100 | ) 101 | else: 102 | self._seen_reports = ExpiringSet(deduplication_max_seconds) 103 | 104 | async def run(self): 105 | self.exporter = self.exporter_cls(self.metrics_persister.load()) 106 | try: 107 | self.imap_queue.consume(self.process_email) 108 | async with self.exporter.start_server(*self.prometheus_addr): 109 | while True: 110 | await asyncio.sleep(self.autosave_interval_seconds or 60) 111 | if self.autosave_interval_seconds: 112 | self._save_metrics() 113 | except CancelledError: 114 | pass 115 | finally: 116 | self._save_metrics() 117 | await self.imap_queue.stop_consumer() 118 | 119 | def _save_metrics(self): 120 | with self.exporter.get_metrics() as metrics: 121 | self.metrics_persister.save(metrics) 122 | if self.seen_reports_db: 123 | self._seen_reports.persist(self.seen_reports_db) 124 | 125 | async def process_email(self, msg: EmailMessage): 126 | try: 127 | for report in get_aggregate_report_from_email(msg): 128 | org_name = report.report_metadata and report.report_metadata.org_name 129 | report_id = report.report_metadata and report.report_metadata.report_id 130 | if org_name and report_id: 131 | if (org_name, report_id) in self._seen_reports: 132 | continue 133 | self._seen_reports.add((org_name, report_id)) 134 | 135 | for event in convert_to_events(report): 136 | with self.exporter.get_metrics() as metrics: 137 | metrics.update(event) 138 | except ReportExtractionError as err: 139 | with self.exporter.get_metrics() as metrics: 140 | metrics.inc_invalid(InvalidMeta(err.msg.get("from", None))) 141 | logger.warning(str(err), exc_info=err, msg=err.msg) 142 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/deserialization.py: -------------------------------------------------------------------------------- 1 | import gzip 2 | import io 3 | import os.path 4 | from email.contentmanager import raw_data_manager 5 | from email.message import EmailMessage 6 | from typing import Callable, Generator, Mapping, Optional 7 | from zipfile import ZipFile 8 | 9 | from xsdata.formats.dataclass.context import XmlContext 10 | from xsdata.formats.dataclass.parsers.config import ParserConfig 11 | from xsdata.formats.dataclass.parsers.xml import XmlParser 12 | 13 | from dmarc_metrics_exporter.dmarc_event import ( 14 | Disposition, 15 | DmarcEvent, 16 | DmarcResult, 17 | Meta, 18 | ) 19 | from dmarc_metrics_exporter.model.dmarc_aggregate_report import ( 20 | DispositionType, 21 | DkimresultType, 22 | DmarcresultType, 23 | Feedback, 24 | SpfresultType, 25 | ) 26 | 27 | 28 | def handle_octet_stream(filename: str, buffer: bytes) -> Generator[str, None, None]: 29 | _, file_extension = os.path.splitext(filename) 30 | return file_extension_handlers[file_extension](filename, buffer) 31 | 32 | 33 | def handle_application_gzip( 34 | _filename: str, gzip_bytes: bytes 35 | ) -> Generator[str, None, None]: 36 | yield gzip.decompress(gzip_bytes).decode("utf-8") 37 | 38 | 39 | def handle_application_zip( 40 | _filename: str, zip_bytes: bytes 41 | ) -> Generator[str, None, None]: 42 | with ZipFile(io.BytesIO(zip_bytes), "r") as zip_file: 43 | for name in zip_file.namelist(): 44 | with zip_file.open(name, "r") as f: 45 | yield f.read().decode("utf-8") 46 | 47 | 48 | def handle_text_xml(_filename: str, content: str) -> Generator[str, None, None]: 49 | yield content 50 | 51 | 52 | content_type_handlers: Mapping[str, Callable[..., Generator[str, None, None]]] = { 53 | "application/octet-stream": handle_octet_stream, 54 | "application/gzip": handle_application_gzip, 55 | "application/zip": handle_application_zip, 56 | "text/xml": handle_text_xml, 57 | } 58 | 59 | file_extension_handlers: Mapping[str, Callable[..., Generator[str, None, None]]] = { 60 | ".gz": handle_application_gzip, 61 | ".zip": handle_application_zip, 62 | } 63 | 64 | 65 | class ReportExtractionError(Exception): 66 | def __init__(self, msg): 67 | self.msg = msg 68 | 69 | def __str__(self): 70 | from_email = self.msg.get("from", "") 71 | subject = self.msg.get("subject", "") 72 | return f"Failed to extract report from email by {from_email} with subject '{subject}'." 73 | 74 | 75 | def get_aggregate_report_from_email( 76 | msg: EmailMessage, 77 | ) -> Generator[Feedback, None, None]: 78 | parser = XmlParser( 79 | context=XmlContext(), config=ParserConfig(fail_on_unknown_properties=False) 80 | ) 81 | has_found_a_report = False 82 | for part in msg.walk(): 83 | if part.get_content_type() in content_type_handlers: 84 | handler = content_type_handlers[part.get_content_type()] 85 | content = raw_data_manager.get_content(part) 86 | has_found_a_report = True 87 | for payload in handler(part.get_filename(), content): 88 | yield parser.from_string(payload, Feedback) 89 | if not has_found_a_report: 90 | raise ReportExtractionError(msg) 91 | 92 | 93 | def _map_disposition(disposition: Optional[DispositionType]) -> Disposition: 94 | if disposition is None: 95 | return Disposition.NONE_VALUE 96 | return { 97 | DispositionType.NONE_VALUE: Disposition.NONE_VALUE, 98 | DispositionType.QUARANTINE: Disposition.QUARANTINE, 99 | DispositionType.REJECT: Disposition.REJECT, 100 | }[disposition] 101 | 102 | 103 | def convert_to_events(feedback: Feedback) -> Generator[DmarcEvent, None, None]: 104 | for record in feedback.record: 105 | if record.row is None: 106 | continue 107 | 108 | if feedback.report_metadata: 109 | reporter = feedback.report_metadata.org_name or "" 110 | else: 111 | reporter = "" 112 | 113 | if record.identifiers: 114 | from_domain = record.identifiers.header_from or "" 115 | else: 116 | from_domain = "" 117 | 118 | if record.auth_results and len(record.auth_results.dkim) > 0: 119 | dkim = record.auth_results.dkim[0] 120 | dkim_domain = dkim.domain or "" 121 | dkim_pass = dkim.result == DkimresultType.PASS_VALUE 122 | else: 123 | dkim_domain = "" 124 | dkim_pass = False 125 | 126 | if record.auth_results and len(record.auth_results.spf) > 0: 127 | spf = record.auth_results.spf[0] 128 | spf_domain = spf.domain or "" 129 | spf_pass = spf.result == SpfresultType.PASS_VALUE 130 | else: 131 | spf_domain = "" 132 | spf_pass = False 133 | 134 | if record.row.policy_evaluated: 135 | disposition = _map_disposition(record.row.policy_evaluated.disposition) 136 | dkim_aligned = ( 137 | record.row.policy_evaluated.dkim == DmarcresultType.PASS_VALUE 138 | ) 139 | spf_aligned = record.row.policy_evaluated.spf == DmarcresultType.PASS_VALUE 140 | else: 141 | disposition = Disposition.NONE_VALUE 142 | dkim_aligned = False 143 | spf_aligned = False 144 | 145 | yield DmarcEvent( 146 | count=record.row.count or 1, 147 | meta=Meta( 148 | reporter=reporter, 149 | from_domain=from_domain, 150 | dkim_domain=dkim_domain, 151 | spf_domain=spf_domain, 152 | ), 153 | result=DmarcResult( 154 | disposition=disposition, 155 | dkim_aligned=dkim_aligned, 156 | spf_aligned=spf_aligned, 157 | dkim_pass=dkim_pass, 158 | spf_pass=spf_pass, 159 | ), 160 | ) 161 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/dmarc_event.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from enum import Enum 3 | 4 | 5 | @dataclass(frozen=True) 6 | class Meta: 7 | reporter: str 8 | from_domain: str 9 | dkim_domain: str 10 | spf_domain: str 11 | 12 | 13 | class Disposition(Enum): 14 | NONE_VALUE = "none" 15 | QUARANTINE = "quarantine" 16 | REJECT = "reject" 17 | 18 | 19 | @dataclass(frozen=True) 20 | class DmarcResult: 21 | disposition: Disposition 22 | dkim_pass: bool 23 | spf_pass: bool 24 | dkim_aligned: bool 25 | spf_aligned: bool 26 | 27 | @property 28 | def dmarc_compliant(self) -> bool: 29 | return (self.dkim_aligned and self.dkim_pass) or ( 30 | self.spf_aligned and self.spf_pass 31 | ) 32 | 33 | 34 | @dataclass(frozen=True) 35 | class DmarcEvent: 36 | count: int 37 | meta: Meta 38 | result: DmarcResult 39 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/dmarc_metrics.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Mapping 2 | from dataclasses import dataclass, field 3 | from typing import Dict, Iterator, Optional 4 | 5 | from dmarc_metrics_exporter.dmarc_event import ( 6 | Disposition, 7 | DmarcEvent, 8 | DmarcResult, 9 | Meta, 10 | ) 11 | 12 | 13 | @dataclass 14 | class DmarcMetrics: 15 | total_count: int = 0 16 | disposition_counts: Dict[Disposition, int] = field(default_factory=dict) 17 | dmarc_compliant_count: int = 0 18 | dkim_pass_count: int = 0 19 | spf_pass_count: int = 0 20 | dkim_aligned_count: int = 0 21 | spf_aligned_count: int = 0 22 | 23 | def update(self, count: int, result: DmarcResult): 24 | self.total_count += count 25 | if result.disposition not in self.disposition_counts: 26 | self.disposition_counts[result.disposition] = 0 27 | self.disposition_counts[result.disposition] += count 28 | if result.dmarc_compliant: 29 | self.dmarc_compliant_count += count 30 | if result.dkim_pass: 31 | self.dkim_pass_count += count 32 | if result.spf_pass: 33 | self.spf_pass_count += count 34 | if result.dkim_aligned: 35 | self.dkim_aligned_count += count 36 | if result.spf_aligned: 37 | self.spf_aligned_count += count 38 | 39 | 40 | @dataclass(frozen=True) 41 | class InvalidMeta: 42 | from_email: Optional[str] 43 | 44 | 45 | @dataclass 46 | class DmarcMetricsCollection(Mapping): 47 | metrics: Dict[Meta, DmarcMetrics] = field(default_factory=dict) 48 | invalid_reports: Dict[InvalidMeta, int] = field(default_factory=dict) 49 | 50 | def __getitem__(self, key: Meta) -> DmarcMetrics: 51 | return self.metrics[key] 52 | 53 | def __iter__(self) -> Iterator[Meta]: 54 | return iter(self.metrics) 55 | 56 | def __len__(self) -> int: 57 | return len(self.metrics) 58 | 59 | def update(self, event: DmarcEvent): 60 | if event.meta not in self.metrics: 61 | self.metrics[event.meta] = DmarcMetrics() 62 | self.metrics[event.meta].update(event.count, event.result) 63 | 64 | def inc_invalid(self, meta: InvalidMeta): 65 | if meta not in self.invalid_reports: 66 | self.invalid_reports[meta] = 0 67 | self.invalid_reports[meta] += 1 68 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/expiring_set.py: -------------------------------------------------------------------------------- 1 | import pickle 2 | import time 3 | from collections import deque 4 | from collections.abc import Container 5 | from pathlib import Path 6 | from typing import Callable, Deque, Generic, Set, Tuple, TypeVar, Union 7 | 8 | T = TypeVar("T") 9 | 10 | 11 | class ExpiringSet(Generic[T], Container): 12 | __PICKLE_PROTOCOL = 4 13 | __VERSION = 0 14 | 15 | _items: Set[T] 16 | _expiry_queue: Deque[Tuple[float, T]] 17 | 18 | def __init__(self, ttl: float, time_fn: Callable[[], float] = time.time): 19 | self.ttl = ttl 20 | self._time = time_fn 21 | self._items = set() 22 | self._expiry_queue = deque() 23 | 24 | def add(self, item: T): 25 | self._expire() 26 | self._items.add(item) 27 | self._expiry_queue.append((self._time(), item)) 28 | 29 | def __contains__(self, item: object) -> bool: 30 | self._expire() 31 | return item in self._items 32 | 33 | def _expire(self): 34 | while ( 35 | len(self._expiry_queue) > 0 36 | and self._time() - self._expiry_queue[0][0] >= self.ttl 37 | ): 38 | _, item = self._expiry_queue.popleft() 39 | self._items.remove(item) 40 | 41 | def persist(self, path: Union[Path, str]): 42 | self._expire() 43 | with open(path, "wb") as f: 44 | pickle.dump( 45 | {"version": self.__VERSION, "expiry_queue": self._expiry_queue}, 46 | f, 47 | self.__PICKLE_PROTOCOL, 48 | ) 49 | 50 | @classmethod 51 | def load( 52 | cls, 53 | path: Union[Path, str], 54 | ttl: float, 55 | time_fn: Callable[[], float] = time.time, 56 | ) -> "ExpiringSet[T]": 57 | # pylint: disable=protected-access 58 | reconstructed = ExpiringSet[T](ttl, time_fn) 59 | with open(path, "rb") as f: 60 | data = pickle.load(f) 61 | if data["version"] != cls.__VERSION: 62 | raise RuntimeError("Unsupported version.") 63 | reconstructed._expiry_queue.extend( 64 | (timestamp, item) 65 | for timestamp, item in data["expiry_queue"] 66 | if time_fn() - timestamp < ttl 67 | ) 68 | reconstructed._items.update(item for _, item in reconstructed._expiry_queue) 69 | return reconstructed 70 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/imap_client.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import contextlib 3 | import itertools 4 | import ssl 5 | import time 6 | from asyncio import ( 7 | Event, 8 | Lock, 9 | Queue, 10 | StreamReader, 11 | StreamWriter, 12 | create_task, 13 | open_connection, 14 | wait_for, 15 | ) 16 | from dataclasses import dataclass 17 | from enum import Enum 18 | from typing import Callable, Coroutine, Dict, FrozenSet, Literal, Optional, Union 19 | 20 | import structlog 21 | from bite import parse_incremental 22 | from bite.parsers import ParsedNode 23 | 24 | from .imap_parser import response as response_grammar 25 | 26 | logger = structlog.get_logger() 27 | 28 | 29 | @dataclass 30 | class ConnectionConfig: 31 | username: str 32 | password: str 33 | host: str = "localhost" 34 | port: int = 993 35 | use_ssl: bool = True 36 | verify_certificate: bool = True 37 | tls_maximum_version: ssl.TLSVersion = ssl.TLSVersion.MAXIMUM_SUPPORTED 38 | 39 | def create_ssl_context(self) -> Union[Literal[False], ssl.SSLContext]: 40 | if self.use_ssl: 41 | ssl_context = ssl.create_default_context() 42 | if not self.verify_certificate: 43 | ssl_context.check_hostname = False 44 | ssl_context.verify_mode = ssl.CERT_NONE 45 | ssl_context.maximum_version = self.tls_maximum_version 46 | return ssl_context 47 | else: 48 | return False 49 | 50 | 51 | class ImapError(Exception): 52 | pass 53 | 54 | 55 | class IncompleteResponse(ImapError): 56 | pass 57 | 58 | 59 | class ResponseType(Enum): 60 | CONTINUE_REQ = "+" 61 | UNTAGGED = "*" 62 | TAGGED = "tagged" 63 | 64 | 65 | class _ImapTag: 66 | state: Optional[bytes] 67 | text: Optional[bytes] 68 | 69 | def __init__(self, name: bytes): 70 | self.name = name 71 | self._response_received = Event() 72 | self.state = None 73 | self.text = None 74 | 75 | async def wait_response(self): 76 | await self._response_received.wait() 77 | 78 | def has_response(self) -> bool: 79 | return self._response_received.is_set() 80 | 81 | def set_response(self, state: bytes, text: bytes): 82 | logger.debug( 83 | "IMAP command completed with ", 84 | command=self.name, 85 | state=state, 86 | state_description=text, 87 | logger=ImapClient.__name__, 88 | ) 89 | self.state = state 90 | self.text = text 91 | self._response_received.set() 92 | 93 | 94 | class _ImapCommandWriter: 95 | def __init__(self, writer: StreamWriter, server_ready: Event, timeout_seconds: int): 96 | self.writer = writer 97 | self._server_ready = server_ready 98 | self.timeout_seconds = timeout_seconds 99 | 100 | async def _drain(self): 101 | await asyncio.wait_for(self.writer.drain(), timeout=self.timeout_seconds) 102 | 103 | async def write_raw(self, buf: bytes): 104 | self.writer.write(buf) 105 | await self._drain() 106 | 107 | async def write_int(self, num: int): 108 | self.writer.write(str(num).encode("ascii")) 109 | await self._drain() 110 | 111 | async def write_string_literal(self, string: str): 112 | encoded = string.encode("utf-8") 113 | self._server_ready.clear() 114 | await self.write_raw(b"{" + str(len(encoded)).encode("ascii") + b"}\r\n") 115 | await self._server_ready.wait() 116 | 117 | await self.write_raw(encoded) 118 | 119 | 120 | class _CommandsInUse: 121 | def __init__(self): 122 | self._in_use = set() 123 | self._change_condition = asyncio.Condition() 124 | 125 | async def acquire(self, name: str): 126 | async with self._change_condition: 127 | await self._change_condition.wait_for(lambda: name not in self._in_use) 128 | self._in_use.add(name) 129 | 130 | async def release(self, name: str): 131 | async with self._change_condition: 132 | self._in_use.remove(name) 133 | self._change_condition.notify_all() 134 | 135 | 136 | # pylint: disable=too-many-instance-attributes 137 | class ImapClient: 138 | num_exists: Optional[int] 139 | fetched_queue: Queue 140 | _capabilities: FrozenSet[str] 141 | _tag_completions: Dict[bytes, _ImapTag] 142 | 143 | def __init__(self, connection: ConnectionConfig, timeout_seconds: int = 10): 144 | self.connection = connection 145 | self.timeout_seconds = timeout_seconds 146 | self.num_exists = None 147 | self.fetched_queue = Queue() 148 | self._last_response = time.time() 149 | self._capabilities = frozenset() 150 | self._ongoing_commands = _CommandsInUse() 151 | self._command_lock = Lock() 152 | self._server_ready = Event() 153 | self._process_responses_task = None 154 | self._writer = None 155 | self._tag_gen = (f"a{i}".encode("ascii") for i in itertools.count()) 156 | self._tag_completions = {} 157 | self._log = logger.bind(logger=self.__class__.__name__) 158 | 159 | async def __aenter__(self): 160 | reader, self._writer = await open_connection( 161 | self.connection.host, 162 | self.connection.port, 163 | ssl=self.connection.create_ssl_context(), 164 | ) 165 | self._process_responses_task = create_task(self._process_responses(reader)) 166 | 167 | try: 168 | await wait_for(self._server_ready.wait(), self.timeout_seconds) 169 | await self._log.adebug("IMAP server ready.") 170 | await self._capability() 171 | await self._login(self.connection.username, self.connection.password) 172 | except: 173 | self._process_responses_task.cancel() 174 | raise 175 | return self 176 | 177 | async def __aexit__(self, exc_type, exc, traceback): 178 | if not self._writer.is_closing(): 179 | await self._log.adebug("Logging out.", timeout=self.timeout_seconds) 180 | with contextlib.suppress(asyncio.TimeoutError): 181 | await wait_for(self._logout(), self.timeout_seconds) 182 | self._writer.close() 183 | await asyncio.gather( 184 | self._log.adebug("Waiting for writer to be closed."), 185 | self._writer.wait_closed(), 186 | ) 187 | await asyncio.gather( 188 | self._log.adebug("Processing remaining responses after connection close."), 189 | self._process_responses_task, 190 | ) 191 | self._server_ready.clear() 192 | await self._log.adebug("Connection closed.") 193 | 194 | async def _process_responses(self, reader: StreamReader): 195 | try: 196 | async for parse_tree in parse_incremental(response_grammar, reader): 197 | response = parse_tree.values 198 | await self._log.adebug("IMAP response.", response=response) 199 | 200 | self._last_response = time.time() 201 | 202 | if response[0] == b"+": 203 | self._server_ready.set() 204 | elif response[0] == b"*": 205 | await self._process_untagged_response(response) 206 | else: 207 | tag_name, state, text = response[0:3] 208 | self._tag_completions[tag_name].set_response(state, text) 209 | await self._log.adebug("End of response stream.") 210 | except Exception: # pylint: disable=broad-except 211 | await self._log.aexception("Error while processing server responses.") 212 | for tag_completion in self._tag_completions.values(): 213 | if not tag_completion.has_response(): 214 | tag_completion.set_response(b"NO", b"") 215 | 216 | async def _process_untagged_response(self, response: ParsedNode): 217 | if response[1] == b"OK": 218 | self._server_ready.set() 219 | elif response[1] == b"CAPABILITY": 220 | await self._log.adebug( 221 | "IMAP server reported capabilities.", capabilities=response[2] 222 | ) 223 | self._capabilities = frozenset( 224 | c.strip().upper() for c in response[2].decode("utf-8").split(" ") 225 | ) 226 | elif len(response) >= 3 and response[2] == b"EXISTS": 227 | self.num_exists = response[1] 228 | elif len(response) >= 3 and response[2] == b"EXPUNGE": 229 | if self.num_exists is not None: 230 | self.num_exists -= 1 231 | elif len(response) >= 3 and response[2] == b"FETCH": 232 | await self.fetched_queue.put(response[1:]) 233 | else: 234 | await self._log.adebug( 235 | "Ignored untagged IMAP response.", response=response[1] 236 | ) 237 | 238 | async def _command( 239 | self, name: str, write_command: Callable[[_ImapCommandWriter], Coroutine] 240 | ): 241 | assert self._writer 242 | tag = _ImapTag(next(self._tag_gen)) 243 | self._tag_completions[tag.name] = tag 244 | wait_response = asyncio.ensure_future(tag.wait_response()) 245 | try: 246 | await self._ongoing_commands.acquire(name) 247 | 248 | async with self._command_lock: 249 | self._writer.write(tag.name) 250 | self._writer.write(b" ") 251 | cmd_writer = _ImapCommandWriter( 252 | self._writer, self._server_ready, self.timeout_seconds 253 | ) 254 | _, pending = await asyncio.wait( 255 | [asyncio.ensure_future(write_command(cmd_writer)), wait_response], 256 | return_when=asyncio.FIRST_COMPLETED, 257 | ) 258 | 259 | while not wait_response.done(): 260 | _, pending = await asyncio.wait( 261 | [wait_response], timeout=self.timeout_seconds 262 | ) 263 | if ( 264 | not wait_response.done() 265 | and self.timeout_seconds < time.time() - self._last_response 266 | ): 267 | for future in pending: 268 | future.cancel() 269 | raise asyncio.TimeoutError("Waiting for response timed out.") 270 | 271 | if tag.state and not tag.state.upper() == b"OK": 272 | raise ImapServerError(name, tag.state, tag.text) 273 | finally: 274 | del self._tag_completions[tag.name] 275 | if not wait_response.done(): 276 | wait_response.cancel() 277 | await self._ongoing_commands.release(name) 278 | 279 | async def _login(self, username: str, password: str): 280 | async def login_writer(cmd_writer: _ImapCommandWriter): 281 | await cmd_writer.write_raw(b"LOGIN ") 282 | await cmd_writer.write_string_literal(username) 283 | await cmd_writer.write_raw(b" ") 284 | await cmd_writer.write_string_literal(password) 285 | await cmd_writer.write_raw(b"\r\n") 286 | 287 | await self._command("LOGIN", login_writer) 288 | 289 | async def _logout(self): 290 | async def logout_writer(cmd_writer: _ImapCommandWriter): 291 | await cmd_writer.write_raw(b"LOGOUT\r\n") 292 | 293 | await self._command("LOGOUT", logout_writer) 294 | 295 | async def _capability(self): 296 | async def capability_writer(cmd_writer: _ImapCommandWriter): 297 | await cmd_writer.write_raw(b"CAPABILITY\r\n") 298 | 299 | await self._command("CAPABILITY", capability_writer) 300 | 301 | def has_capability(self, capability: str) -> bool: 302 | return capability.upper() in self._capabilities 303 | 304 | async def select(self, mailbox: str = "INBOX") -> Optional[int]: 305 | async def select_writer(cmd_writer: _ImapCommandWriter): 306 | await cmd_writer.write_raw(b"SELECT ") 307 | await cmd_writer.write_string_literal(mailbox) 308 | await cmd_writer.write_raw(b"\r\n") 309 | 310 | await self._command("SELECT", select_writer) 311 | return self.num_exists 312 | 313 | async def fetch(self, sequence_set: bytes, attrs: bytes): 314 | async def fetch_writer(cmd_writer: _ImapCommandWriter): 315 | await cmd_writer.write_raw(b"FETCH ") 316 | await cmd_writer.write_raw(sequence_set) 317 | await cmd_writer.write_raw(b" ") 318 | await cmd_writer.write_raw(attrs) 319 | await cmd_writer.write_raw(b"\r\n") 320 | 321 | await self._command("FETCH", fetch_writer) 322 | 323 | async def create(self, name: str): 324 | async def create_writer(cmd_writer: _ImapCommandWriter): 325 | await cmd_writer.write_raw(b"CREATE ") 326 | await cmd_writer.write_string_literal(name) 327 | await cmd_writer.write_raw(b"\r\n") 328 | 329 | await self._command("CREATE", create_writer) 330 | 331 | async def create_if_not_exists(self, name: str): 332 | try: 333 | await self.select(name) 334 | except ImapServerError: 335 | await self.create(name) 336 | 337 | async def delete(self, name: str): 338 | async def create_writer(cmd_writer: _ImapCommandWriter): 339 | await cmd_writer.write_raw(b"DELETE ") 340 | await cmd_writer.write_string_literal(name) 341 | await cmd_writer.write_raw(b"\r\n") 342 | 343 | await self._command("DELETE", create_writer) 344 | 345 | async def uid_copy(self, uid: int, destination: str): 346 | async def uid_copy_writer(cmd_writer: _ImapCommandWriter): 347 | await cmd_writer.write_raw(b"UID COPY ") 348 | await cmd_writer.write_int(uid) 349 | await cmd_writer.write_raw(b" ") 350 | await cmd_writer.write_string_literal(destination) 351 | await cmd_writer.write_raw(b"\r\n") 352 | 353 | await self._command("UID COPY", uid_copy_writer) 354 | 355 | async def uid_move(self, uid: int, destination: str): 356 | async def uid_move_writer(cmd_writer: _ImapCommandWriter): 357 | await cmd_writer.write_raw(b"UID MOVE ") 358 | await cmd_writer.write_int(uid) 359 | await cmd_writer.write_raw(b" ") 360 | await cmd_writer.write_string_literal(destination) 361 | await cmd_writer.write_raw(b"\r\n") 362 | 363 | await self._command("UID MOVE", uid_move_writer) 364 | 365 | async def uid_move_graceful(self, uid: int, destination: str): 366 | if self.has_capability("MOVE"): 367 | await self.uid_move(uid, destination) 368 | else: 369 | await self.uid_copy(uid, destination) 370 | await self.uid_store(uid, rb"+FLAGS.SILENT (\Deleted)") 371 | await self.expunge() 372 | 373 | async def uid_store(self, uid: int, flags: bytes): 374 | async def uid_store_writer(cmd_writer: _ImapCommandWriter): 375 | await cmd_writer.write_raw(b"UID STORE ") 376 | await cmd_writer.write_int(uid) 377 | await cmd_writer.write_raw(b" ") 378 | await cmd_writer.write_raw(flags) 379 | await cmd_writer.write_raw(b"\r\n") 380 | 381 | await self._command("STORE", uid_store_writer) 382 | 383 | async def expunge(self): 384 | async def expunge_writer(cmd_writer: _ImapCommandWriter): 385 | await cmd_writer.write_raw(b"EXPUNGE\r\n") 386 | 387 | await self._command("EXPUNGE", expunge_writer) 388 | 389 | 390 | class ImapServerError(ImapError): 391 | """Error class for errors reported from the server.""" 392 | 393 | def __init__(self, command, result, server_response): 394 | self.command = command 395 | self.result = result 396 | self.server_response = server_response 397 | super().__init__(command, result, server_response) 398 | 399 | def __str__(self): 400 | return ( 401 | f"IMAP error: Command {self.command} returned {self.result} " 402 | f"with response data: {self.server_response}" 403 | ) 404 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/imap_parser.py: -------------------------------------------------------------------------------- 1 | from bite import ( 2 | And, 3 | CaselessLiteral, 4 | CharacterSet, 5 | Combine, 6 | Counted, 7 | FixedByteCount, 8 | Forward, 9 | Literal, 10 | Opt, 11 | Parser, 12 | Suppress, 13 | TransformValues, 14 | ) 15 | from bite.transformers import Group 16 | 17 | nil = CaselessLiteral(b"NIL") 18 | atom_char = CharacterSet( 19 | rb'(){ %*"\]' + bytes(range(0x1F + 1)) + bytes(range(0x7F, 0x9F + 1)), invert=True 20 | ) 21 | atom = Combine(atom_char[1, ...]) 22 | resp_specials = Literal(b"]") 23 | astring_char = atom_char | resp_specials 24 | 25 | sp = Suppress(CharacterSet(b" \t")[1, ...]) 26 | crlf = Suppress(Literal(b"\r\n")) 27 | 28 | integer = TransformValues( 29 | Combine(CharacterSet(b"0123456789")[1, ...]), 30 | lambda values: tuple(int(v) for v in values), 31 | ) 32 | literal_string = Counted( 33 | Suppress(Literal(b"{")) + integer + Suppress(Literal(b"}") + crlf), 34 | FixedByteCount, 35 | ) 36 | dbl_quoted_string = ( 37 | Suppress(Literal(b'"')) 38 | + Combine(CharacterSet(b'"', invert=True)[0, ...]) 39 | + Suppress(Literal(b'"')) 40 | ) 41 | string = dbl_quoted_string | literal_string 42 | 43 | astring = Combine(astring_char[1, ...]) | string 44 | nstring = string | nil 45 | 46 | 47 | def parenthesized_list(items_expr: Parser) -> Parser: 48 | return Group( 49 | Suppress(Literal(b"(")) 50 | + Opt(sp) 51 | + Opt(items_expr + (sp + items_expr)[0, ...]) 52 | + Opt(sp) 53 | + Suppress(Literal(b")")) 54 | ) 55 | 56 | 57 | def pair(keyword_expr: Parser, expr: Parser) -> Parser: 58 | return Group(And([keyword_expr, sp, expr])) 59 | 60 | 61 | address = parenthesized_list(nstring) 62 | 63 | header_field_name = astring 64 | header_list = parenthesized_list(header_field_name) 65 | 66 | section_msgtext = ( 67 | ( 68 | CaselessLiteral(b"HEADER.FIELDS") 69 | + Opt(CaselessLiteral(b".NOT")) 70 | + Opt(sp) 71 | + header_list 72 | ) 73 | | CaselessLiteral(b"HEADER") 74 | | CaselessLiteral(b"TEXT") 75 | ) 76 | section_text = section_msgtext | CaselessLiteral(b"MIME") 77 | section_part = integer + (Suppress(Literal(b".")) + integer)[0, ...] 78 | section_spec = section_msgtext | ( 79 | section_part + Opt(Suppress(Literal(b".")) + section_text) 80 | ) 81 | section = Suppress(Literal(b"[")) + Group(Opt(section_spec)) + Suppress(Literal(b"]")) 82 | 83 | nested_lists = Forward() 84 | nested_lists.assign( 85 | parenthesized_list( 86 | nstring | nested_lists | Combine(CharacterSet(b" )", invert=True)[1, ...]) 87 | ) 88 | ) 89 | 90 | 91 | body_structure = Group( 92 | (CaselessLiteral(b"BODYSTRUCTURE") | CaselessLiteral(b"BODY")) 93 | + sp 94 | + Group(Suppress(nested_lists)) 95 | ) 96 | length = Suppress(Literal(b"<")) + integer + Suppress(Literal(b">")) 97 | body_section = pair( 98 | CaselessLiteral(b"BODY") + section + Opt(length), 99 | nstring, 100 | ) 101 | envelope = pair( 102 | CaselessLiteral(b"ENVELOPE"), 103 | parenthesized_list(parenthesized_list(address) | nstring), 104 | ) 105 | flag = Combine(Literal(b"\\") + atom) 106 | flags = pair(CaselessLiteral(b"FLAGS"), parenthesized_list(flag)) 107 | internaldate = pair(CaselessLiteral(b"INTERNALDATE"), dbl_quoted_string) 108 | rfc822 = pair( 109 | CaselessLiteral(b"RFC822"), 110 | nstring, 111 | ) 112 | rfc822_header = pair(CaselessLiteral(b"RFC822.HEADER"), nstring) 113 | rfc822_text = pair(CaselessLiteral(b"RFC822.TEXT"), nstring) 114 | rfc822_size = pair(CaselessLiteral(b"RFC822.SIZE"), integer) 115 | uid = pair(CaselessLiteral(b"UID"), integer) 116 | unknown_fetch_response_pair = pair( 117 | Combine(CharacterSet(b" \t\r\n[<", invert=True)[1, ...]) 118 | + Opt( 119 | Combine(Literal(b"[") + CharacterSet(b"]", invert=True)[0, ...] + Literal(b"]")) 120 | ) 121 | + Opt(length), 122 | nil | integer | nstring | nested_lists, 123 | ) 124 | fetch_response_pair = ( 125 | body_section 126 | | body_structure 127 | | envelope 128 | | flags 129 | | internaldate 130 | | rfc822 131 | | rfc822_header 132 | | rfc822_text 133 | | rfc822_size 134 | | uid 135 | | unknown_fetch_response_pair 136 | ) 137 | 138 | fetch_response_line = ( 139 | integer 140 | + sp 141 | + CaselessLiteral(b"FETCH") 142 | + Opt(sp) 143 | + parenthesized_list(fetch_response_pair) 144 | ) 145 | 146 | text = Combine(CharacterSet(b"\r\n", invert=True)[0, ...]) 147 | flag_perm = flag | Literal(rb"\*") 148 | auth_type = atom 149 | capability = (CaselessLiteral(b"AUTH=") + auth_type) | atom 150 | capability_data = ( 151 | CaselessLiteral(b"CAPABILITY") 152 | + capability[0, ...] 153 | + CaselessLiteral(b"IMAP4rev1") 154 | + capability[0, ...] 155 | ) 156 | response_text_code = ( 157 | CaselessLiteral(b"ALERT") 158 | | Group(CaselessLiteral(b"BADCHARSET") + Opt(sp) + Opt(parenthesized_list(astring))) 159 | | capability_data 160 | | CaselessLiteral(b"PARSE") 161 | | Group( 162 | CaselessLiteral(b"PERMANENTFLAGS") + Opt(sp) + parenthesized_list(flag_perm) 163 | ) 164 | | CaselessLiteral(b"READ-ONLY") 165 | | CaselessLiteral(b"READ-WRITE") 166 | | CaselessLiteral(b"TRYCREATE") 167 | | Group(CaselessLiteral(b"UIDNEXT") + sp + integer) 168 | | Group(CaselessLiteral(b"UIDVALIDITY") + sp + integer) 169 | | Group(CaselessLiteral(b"UNSEEN") + sp + integer) 170 | | Group(atom + Opt(sp + Combine(CharacterSet(b"\r\n]", invert=True)[1, ...]))) 171 | ) 172 | resp_text = Opt( 173 | Suppress(Literal(b"[")) 174 | + response_text_code 175 | + Suppress(Literal(b"]")) 176 | + Suppress(Opt(sp)) 177 | ) + (~Literal(b"[") + text) 178 | resp_cond_state = ( 179 | CaselessLiteral(b"OK") | CaselessLiteral(b"NO") | CaselessLiteral(b"BAD") 180 | ) 181 | tag = ~Literal(b"+") + Combine(astring_char[1, ...]) 182 | response_tagged = tag + sp + resp_cond_state + sp + resp_text 183 | 184 | server_greeting = Literal(b"OK") + sp + text 185 | server_goodbye = Literal(b"BYE") + sp + text 186 | 187 | capability = CaselessLiteral(b"CAPABILITY") + sp + text 188 | response_untagged = ( 189 | Literal(b"*") 190 | + sp 191 | + ( 192 | server_goodbye 193 | | capability 194 | | (integer + sp + Literal(b"EXISTS")) 195 | | (integer + sp + Literal(b"EXPUNGE")) 196 | | fetch_response_line 197 | | server_greeting 198 | | ( 199 | ( 200 | Combine(CharacterSet(b"{\r\n", invert=True)[1, ...]) 201 | + Opt(literal_string) 202 | )[0, ...] 203 | ) 204 | ) 205 | ) 206 | response_continue = Literal(b"+") + sp + text 207 | response = (response_continue | response_untagged | response_tagged) + crlf 208 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/imap_queue.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import contextlib 3 | import email.policy 4 | from asyncio.tasks import Task 5 | from dataclasses import astuple, dataclass 6 | from email.message import EmailMessage 7 | from email.parser import BytesParser 8 | from typing import Any, Awaitable, Callable, Iterable, Optional, Tuple, cast 9 | from urllib.parse import ParseResult 10 | 11 | import structlog 12 | 13 | from dmarc_metrics_exporter.imap_client import ConnectionConfig, ImapClient 14 | 15 | logger = structlog.get_logger() 16 | 17 | 18 | @dataclass 19 | class QueueFolders: 20 | inbox: str = "INBOX" 21 | done: str = "Archive" 22 | error: str = "Invalid" 23 | 24 | 25 | class ImapQueue: 26 | def __init__( 27 | self, 28 | *, 29 | connection: ConnectionConfig, 30 | folders: QueueFolders = QueueFolders(), 31 | poll_interval_seconds: int = 60, 32 | timeout_seconds: int = 60, 33 | ): 34 | self.connection = connection 35 | self.folders = folders 36 | self.poll_interval_seconds = poll_interval_seconds 37 | self.timeout_seconds = timeout_seconds 38 | self._client = ImapClient(connection, timeout_seconds) 39 | self._stop: Optional[asyncio.Event] = None 40 | self._poll_task: Optional[Task[Any]] = None 41 | 42 | def consume(self, handler: Callable[[Any], Awaitable[None]]): 43 | self._stop = asyncio.Event() 44 | self._poll_task = asyncio.create_task(self._poll_imap(handler)) 45 | 46 | async def _poll_imap(self, handler: Callable[[Any], Awaitable[None]]): 47 | log = logger.bind(logger=self.__class__.__name__) 48 | try: 49 | while self._stop is not None and not self._stop.is_set(): 50 | await log.adebug("Polling IMAP ...") 51 | try: 52 | await self._process_new_messages(handler) 53 | except ( # pylint: disable=broad-except 54 | asyncio.TimeoutError, 55 | Exception, 56 | ): 57 | await log.aexception("Error during message processing.") 58 | await log.adebug( 59 | "Going to sleep for until next poll.", 60 | poll_interval_seconds=self.poll_interval_seconds, 61 | ) 62 | with contextlib.suppress(asyncio.TimeoutError): 63 | await asyncio.wait_for( 64 | self._stop.wait(), self.poll_interval_seconds 65 | ) 66 | except Exception: # pylint: disable=broad-except 67 | await log.aexception("Error in IMAP queue polling function.") 68 | return 69 | 70 | async def _process_new_messages(self, handler: Callable[[Any], Awaitable[None]]): 71 | log = logger.bind() 72 | async with ImapClient(self.connection, self.timeout_seconds) as client: 73 | for folder in astuple(self.folders): 74 | await client.create_if_not_exists(folder) 75 | 76 | msg_count = await client.select(self.folders.inbox) 77 | await log.adebug("Messages to fetch.", msg_count=msg_count) 78 | if msg_count > 0: 79 | fetch_task = asyncio.create_task( 80 | client.fetch( 81 | b"1:" + str(msg_count).encode("ascii"), b"(UID RFC822)" 82 | ) 83 | ) 84 | while not fetch_task.done() or not client.fetched_queue.empty(): 85 | fetched = await client.fetched_queue.get() 86 | uid, msg = self._extract_uid_and_msg(fetched) 87 | if uid is None: 88 | await log.awarning("Failed to extract UID.", message=fetched[0]) 89 | elif msg is None: 90 | await log.awarning( 91 | "Failed to extract RFC822 message for message.", 92 | message=fetched[0], 93 | uid=uid, 94 | ) 95 | else: 96 | try: 97 | await asyncio.gather( 98 | log.adebug("Processing message.", uid=uid), 99 | handler(msg), 100 | ) 101 | except Exception: # pylint: disable=broad-except 102 | await log.aexception( 103 | "Handler for message in IMAP queue failed." 104 | ) 105 | await client.uid_move_graceful(uid, self.folders.error) 106 | else: 107 | await client.uid_move_graceful(uid, self.folders.done) 108 | await log.adebug("Processed all messages.") 109 | await fetch_task 110 | 111 | @classmethod 112 | def _extract_uid_and_msg( 113 | cls, parsed_response: ParseResult 114 | ) -> Tuple[Optional[int], Optional[EmailMessage]]: 115 | uid, msg = None, None 116 | if parsed_response[1] == b"FETCH": 117 | mail_body = None 118 | for key, value in cast(Iterable[Tuple[Any, Any]], parsed_response[2]): 119 | if key == b"UID": 120 | uid = cast(int, value) 121 | elif key == b"RFC822": 122 | mail_body = value 123 | if uid and mail_body: 124 | msg = cast( 125 | EmailMessage, 126 | BytesParser(policy=email.policy.default).parsebytes(mail_body), 127 | ) 128 | return uid, msg 129 | 130 | async def stop_consumer(self): 131 | if self._stop is not None: 132 | self._stop.set() 133 | await self._poll_task 134 | self._stop = None 135 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/logging.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import logging.config 3 | from typing import Any, Dict, Union, cast 4 | 5 | import structlog 6 | 7 | 8 | def configure_logging(overrides: dict, *, debug: bool): 9 | log_level = ( 10 | logging.DEBUG 11 | if debug 12 | else parse_log_level(overrides.get("root", {}).get("level", logging.INFO)) 13 | ) 14 | 15 | structlog.configure( 16 | processors=[ 17 | structlog.contextvars.merge_contextvars, 18 | structlog.processors.add_log_level, 19 | structlog.dev.set_exc_info, 20 | structlog.processors.format_exc_info, 21 | structlog.stdlib.ProcessorFormatter.wrap_for_formatter, 22 | ], 23 | wrapper_class=structlog.make_filtering_bound_logger(log_level), 24 | logger_factory=structlog.stdlib.LoggerFactory(), 25 | cache_logger_on_first_use=True, 26 | ) 27 | 28 | foreign_pre_chain = [ 29 | structlog.stdlib.add_log_level, 30 | structlog.stdlib.add_logger_name, 31 | structlog.stdlib.ExtraAdder(), 32 | structlog.processors.format_exc_info, 33 | ] 34 | console_processors = [ 35 | structlog.stdlib.ProcessorFormatter.remove_processors_meta, 36 | structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S", utc=False), 37 | ] 38 | json_processors = [ 39 | structlog.stdlib.ProcessorFormatter.remove_processors_meta, 40 | structlog.processors.TimeStamper(fmt="iso"), 41 | structlog.processors.dict_tracebacks, 42 | structlog.processors.JSONRenderer(), 43 | ] 44 | 45 | logging_config: Dict[str, Any] = { 46 | "handlers": { 47 | "default": { 48 | "class": "logging.StreamHandler", 49 | "formatter": "colored", 50 | }, 51 | }, 52 | "root": {}, 53 | } 54 | logging_config.update(overrides) 55 | logging_config.update( 56 | { 57 | "version": 1, 58 | "incremental": False, 59 | "formatters": { 60 | "plain": { 61 | "()": structlog.stdlib.ProcessorFormatter, 62 | "processors": console_processors 63 | + [ 64 | structlog.dev.ConsoleRenderer(colors=False), 65 | ], 66 | "foreign_pre_chain": foreign_pre_chain, 67 | }, 68 | "colored": { 69 | "()": structlog.stdlib.ProcessorFormatter, 70 | "processors": console_processors 71 | + [structlog.dev.ConsoleRenderer(colors=True)], 72 | "foreign_pre_chain": foreign_pre_chain, 73 | }, 74 | "json": { 75 | "()": structlog.stdlib.ProcessorFormatter, 76 | "processors": json_processors, 77 | "foreign_pre_chain": foreign_pre_chain, 78 | }, 79 | }, 80 | } 81 | ) 82 | root = cast(dict, logging_config["root"]) 83 | if "handlers" not in root: 84 | root.update({"handlers": ["default"]}) 85 | root.update({"level": log_level}) 86 | logging.config.dictConfig(logging_config) 87 | 88 | 89 | def parse_log_level(level: Union[str, int]) -> int: 90 | if isinstance(level, str): 91 | level = level.lower() 92 | if level == "debug": 93 | return logging.DEBUG 94 | if level == "info": 95 | return logging.INFO 96 | if level == "warning": 97 | return logging.WARNING 98 | if level == "error": 99 | return logging.ERROR 100 | if level == "critical": 101 | return logging.CRITICAL 102 | raise ValueError("invalid log level") 103 | return level 104 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/metrics_persister.py: -------------------------------------------------------------------------------- 1 | import json 2 | from pathlib import Path 3 | from typing import Any, List, Tuple 4 | 5 | from dataclasses_serialization.json import JSONSerializer 6 | 7 | from dmarc_metrics_exporter.dmarc_event import Disposition, Meta 8 | 9 | from .dmarc_metrics import DmarcMetrics, DmarcMetricsCollection, InvalidMeta 10 | 11 | 12 | # false positive, pylint: disable=no-value-for-parameter 13 | @JSONSerializer.register_serializer(Disposition) 14 | def disposition_serializer(disposition: Disposition) -> str: 15 | return disposition.value 16 | 17 | 18 | @JSONSerializer.register_serializer(DmarcMetricsCollection) 19 | def dmarc_metrics_collection_serializer( 20 | metrics: DmarcMetricsCollection, 21 | ) -> List[Tuple[Any, Any]]: 22 | return JSONSerializer.serialize( 23 | { 24 | "metrics": [list(item) for item in metrics.items()], 25 | "invalid_reports": [list(item) for item in metrics.invalid_reports.items()], 26 | } 27 | ) 28 | 29 | 30 | @JSONSerializer.register_deserializer(Disposition) 31 | def disposition_deserializer(_cls, obj: str) -> Disposition: 32 | return Disposition(obj) 33 | 34 | 35 | @JSONSerializer.register_deserializer(DmarcMetricsCollection) 36 | def dmarc_metrics_collection_deserializer(_cls, obj) -> DmarcMetricsCollection: 37 | is_old_format = isinstance(obj, list) 38 | if is_old_format: 39 | obj = {"metrics": obj} 40 | return DmarcMetricsCollection( 41 | dict( 42 | ( 43 | JSONSerializer.deserialize(Meta, meta), 44 | JSONSerializer.deserialize(DmarcMetrics, metrics), 45 | ) 46 | for meta, metrics in obj.get("metrics", tuple()) 47 | ), 48 | dict( 49 | (JSONSerializer.deserialize(InvalidMeta, meta), count) 50 | for meta, count in obj.get("invalid_reports", tuple()) 51 | ), 52 | ) 53 | 54 | 55 | class MetricsPersister: 56 | def __init__(self, path: Path): 57 | self.path = path 58 | 59 | def load(self) -> DmarcMetricsCollection: 60 | try: 61 | with open(self.path, "r", encoding="utf-8") as f: 62 | return JSONSerializer.deserialize(DmarcMetricsCollection, json.load(f)) 63 | except FileNotFoundError: 64 | return DmarcMetricsCollection() 65 | 66 | def save(self, metrics: DmarcMetricsCollection): 67 | with open(self.path, "w", encoding="utf-8") as f: 68 | json.dump(JSONSerializer.serialize(metrics), f) 69 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/model/__init__.py: -------------------------------------------------------------------------------- 1 | from dmarc_metrics_exporter.model.dmarc_aggregate_report import ( 2 | AlignmentType as AlignmentType, 3 | ) 4 | from dmarc_metrics_exporter.model.dmarc_aggregate_report import ( 5 | AuthResultType as AuthResultType, 6 | ) 7 | from dmarc_metrics_exporter.model.dmarc_aggregate_report import ( 8 | DateRangeType as DateRangeType, 9 | ) 10 | from dmarc_metrics_exporter.model.dmarc_aggregate_report import ( 11 | DispositionType as DispositionType, 12 | ) 13 | from dmarc_metrics_exporter.model.dmarc_aggregate_report import ( 14 | DkimauthResultType as DkimauthResultType, 15 | ) 16 | from dmarc_metrics_exporter.model.dmarc_aggregate_report import ( 17 | DkimresultType as DkimresultType, 18 | ) 19 | from dmarc_metrics_exporter.model.dmarc_aggregate_report import ( 20 | DmarcresultType as DmarcresultType, 21 | ) 22 | from dmarc_metrics_exporter.model.dmarc_aggregate_report import ( 23 | Feedback as Feedback, 24 | ) 25 | from dmarc_metrics_exporter.model.dmarc_aggregate_report import ( 26 | IdentifierType as IdentifierType, 27 | ) 28 | from dmarc_metrics_exporter.model.dmarc_aggregate_report import ( 29 | PolicyEvaluatedType as PolicyEvaluatedType, 30 | ) 31 | from dmarc_metrics_exporter.model.dmarc_aggregate_report import ( 32 | PolicyOverrideReason as PolicyOverrideReason, 33 | ) 34 | from dmarc_metrics_exporter.model.dmarc_aggregate_report import ( 35 | PolicyOverrideType as PolicyOverrideType, 36 | ) 37 | from dmarc_metrics_exporter.model.dmarc_aggregate_report import ( 38 | PolicyPublishedType as PolicyPublishedType, 39 | ) 40 | from dmarc_metrics_exporter.model.dmarc_aggregate_report import ( 41 | RecordType as RecordType, 42 | ) 43 | from dmarc_metrics_exporter.model.dmarc_aggregate_report import ( 44 | ReportMetadataType as ReportMetadataType, 45 | ) 46 | from dmarc_metrics_exporter.model.dmarc_aggregate_report import ( 47 | RowType as RowType, 48 | ) 49 | from dmarc_metrics_exporter.model.dmarc_aggregate_report import ( 50 | SpfauthResultType as SpfauthResultType, 51 | ) 52 | from dmarc_metrics_exporter.model.dmarc_aggregate_report import ( 53 | SpfdomainScope as SpfdomainScope, 54 | ) 55 | from dmarc_metrics_exporter.model.dmarc_aggregate_report import ( 56 | SpfresultType as SpfresultType, 57 | ) 58 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/model/dmarc_aggregate_report.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass, field 2 | from decimal import Decimal 3 | from enum import Enum 4 | from typing import List, Optional 5 | 6 | __NAMESPACE__ = "http://dmarc.org/dmarc-xml/0.1" 7 | 8 | 9 | class AlignmentType(Enum): 10 | R = "r" 11 | S = "s" 12 | 13 | 14 | class DkimresultType(Enum): 15 | NONE_VALUE = "none" 16 | PASS_VALUE = "pass" 17 | FAIL = "fail" 18 | POLICY = "policy" 19 | NEUTRAL = "neutral" 20 | TEMPERROR = "temperror" 21 | PERMERROR = "permerror" 22 | 23 | 24 | class DmarcresultType(Enum): 25 | PASS_VALUE = "pass" 26 | FAIL = "fail" 27 | 28 | 29 | @dataclass 30 | class DateRangeType: 31 | begin: Optional[int] = field( 32 | default=None, 33 | metadata={ 34 | "type": "Element", 35 | "namespace": "", 36 | "required": True, 37 | }, 38 | ) 39 | end: Optional[int] = field( 40 | default=None, 41 | metadata={ 42 | "type": "Element", 43 | "namespace": "", 44 | "required": True, 45 | }, 46 | ) 47 | 48 | 49 | class DispositionType(Enum): 50 | NONE_VALUE = "none" 51 | QUARANTINE = "quarantine" 52 | REJECT = "reject" 53 | 54 | 55 | @dataclass 56 | class IdentifierType: 57 | envelope_to: Optional[str] = field( 58 | default=None, 59 | metadata={ 60 | "type": "Element", 61 | "namespace": "", 62 | }, 63 | ) 64 | envelope_from: Optional[str] = field( 65 | default=None, 66 | metadata={ 67 | "type": "Element", 68 | "namespace": "", 69 | "required": True, 70 | }, 71 | ) 72 | header_from: Optional[str] = field( 73 | default=None, 74 | metadata={ 75 | "type": "Element", 76 | "namespace": "", 77 | "required": True, 78 | }, 79 | ) 80 | 81 | 82 | class PolicyOverrideType(Enum): 83 | FORWARDED = "forwarded" 84 | SAMPLED_OUT = "sampled_out" 85 | TRUSTED_FORWARDER = "trusted_forwarder" 86 | MAILING_LIST = "mailing_list" 87 | LOCAL_POLICY = "local_policy" 88 | OTHER = "other" 89 | 90 | 91 | class SpfdomainScope(Enum): 92 | HELO = "helo" 93 | MFROM = "mfrom" 94 | 95 | 96 | class SpfresultType(Enum): 97 | NONE_VALUE = "none" 98 | NEUTRAL = "neutral" 99 | PASS_VALUE = "pass" 100 | FAIL = "fail" 101 | SOFTFAIL = "softfail" 102 | TEMPERROR = "temperror" 103 | PERMERROR = "permerror" 104 | 105 | 106 | @dataclass 107 | class DkimauthResultType: 108 | class Meta: 109 | name = "DKIMAuthResultType" 110 | 111 | domain: Optional[str] = field( 112 | default=None, 113 | metadata={ 114 | "type": "Element", 115 | "namespace": "", 116 | "required": True, 117 | }, 118 | ) 119 | selector: Optional[str] = field( 120 | default=None, 121 | metadata={ 122 | "type": "Element", 123 | "namespace": "", 124 | }, 125 | ) 126 | result: Optional[DkimresultType] = field( 127 | default=None, 128 | metadata={ 129 | "type": "Element", 130 | "namespace": "", 131 | "required": True, 132 | }, 133 | ) 134 | human_result: Optional[str] = field( 135 | default=None, 136 | metadata={ 137 | "type": "Element", 138 | "namespace": "", 139 | }, 140 | ) 141 | 142 | 143 | @dataclass 144 | class PolicyOverrideReason: 145 | type: Optional[PolicyOverrideType] = field( 146 | default=None, 147 | metadata={ 148 | "type": "Element", 149 | "namespace": "", 150 | "required": True, 151 | }, 152 | ) 153 | comment: Optional[str] = field( 154 | default=None, 155 | metadata={ 156 | "type": "Element", 157 | "namespace": "", 158 | }, 159 | ) 160 | 161 | 162 | @dataclass 163 | class PolicyPublishedType: 164 | domain: Optional[str] = field( 165 | default=None, 166 | metadata={ 167 | "type": "Element", 168 | "namespace": "", 169 | "required": True, 170 | }, 171 | ) 172 | adkim: Optional[AlignmentType] = field( 173 | default=None, 174 | metadata={ 175 | "type": "Element", 176 | "namespace": "", 177 | }, 178 | ) 179 | aspf: Optional[AlignmentType] = field( 180 | default=None, 181 | metadata={ 182 | "type": "Element", 183 | "namespace": "", 184 | }, 185 | ) 186 | p: Optional[DispositionType] = field( 187 | default=None, 188 | metadata={ 189 | "type": "Element", 190 | "namespace": "", 191 | "required": True, 192 | }, 193 | ) 194 | sp: Optional[DispositionType] = field( 195 | default=None, 196 | metadata={ 197 | "type": "Element", 198 | "namespace": "", 199 | "required": True, 200 | }, 201 | ) 202 | pct: Optional[int] = field( 203 | default=None, 204 | metadata={ 205 | "type": "Element", 206 | "namespace": "", 207 | "required": True, 208 | }, 209 | ) 210 | fo: Optional[str] = field( 211 | default=None, 212 | metadata={ 213 | "type": "Element", 214 | "namespace": "", 215 | "required": True, 216 | }, 217 | ) 218 | 219 | 220 | @dataclass 221 | class ReportMetadataType: 222 | org_name: Optional[str] = field( 223 | default=None, 224 | metadata={ 225 | "type": "Element", 226 | "namespace": "", 227 | "required": True, 228 | }, 229 | ) 230 | email: Optional[str] = field( 231 | default=None, 232 | metadata={ 233 | "type": "Element", 234 | "namespace": "", 235 | "required": True, 236 | }, 237 | ) 238 | extra_contact_info: Optional[str] = field( 239 | default=None, 240 | metadata={ 241 | "type": "Element", 242 | "namespace": "", 243 | }, 244 | ) 245 | report_id: Optional[str] = field( 246 | default=None, 247 | metadata={ 248 | "type": "Element", 249 | "namespace": "", 250 | "required": True, 251 | }, 252 | ) 253 | date_range: Optional[DateRangeType] = field( 254 | default=None, 255 | metadata={ 256 | "type": "Element", 257 | "namespace": "", 258 | "required": True, 259 | }, 260 | ) 261 | error: List[str] = field( 262 | default_factory=list, 263 | metadata={ 264 | "type": "Element", 265 | "namespace": "", 266 | }, 267 | ) 268 | 269 | 270 | @dataclass 271 | class SpfauthResultType: 272 | class Meta: 273 | name = "SPFAuthResultType" 274 | 275 | domain: Optional[str] = field( 276 | default=None, 277 | metadata={ 278 | "type": "Element", 279 | "namespace": "", 280 | "required": True, 281 | }, 282 | ) 283 | scope: Optional[SpfdomainScope] = field( 284 | default=None, 285 | metadata={ 286 | "type": "Element", 287 | "namespace": "", 288 | "required": True, 289 | }, 290 | ) 291 | result: Optional[SpfresultType] = field( 292 | default=None, 293 | metadata={ 294 | "type": "Element", 295 | "namespace": "", 296 | "required": True, 297 | }, 298 | ) 299 | 300 | 301 | @dataclass 302 | class AuthResultType: 303 | dkim: List[DkimauthResultType] = field( 304 | default_factory=list, 305 | metadata={ 306 | "type": "Element", 307 | "namespace": "", 308 | }, 309 | ) 310 | spf: List[SpfauthResultType] = field( 311 | default_factory=list, 312 | metadata={ 313 | "type": "Element", 314 | "namespace": "", 315 | "min_occurs": 1, 316 | }, 317 | ) 318 | 319 | 320 | @dataclass 321 | class PolicyEvaluatedType: 322 | disposition: Optional[DispositionType] = field( 323 | default=None, 324 | metadata={ 325 | "type": "Element", 326 | "namespace": "", 327 | "required": True, 328 | }, 329 | ) 330 | dkim: Optional[DmarcresultType] = field( 331 | default=None, 332 | metadata={ 333 | "type": "Element", 334 | "namespace": "", 335 | "required": True, 336 | }, 337 | ) 338 | spf: Optional[DmarcresultType] = field( 339 | default=None, 340 | metadata={ 341 | "type": "Element", 342 | "namespace": "", 343 | "required": True, 344 | }, 345 | ) 346 | reason: List[PolicyOverrideReason] = field( 347 | default_factory=list, 348 | metadata={ 349 | "type": "Element", 350 | "namespace": "", 351 | }, 352 | ) 353 | 354 | 355 | @dataclass 356 | class RowType: 357 | source_ip: Optional[str] = field( 358 | default=None, 359 | metadata={ 360 | "type": "Element", 361 | "namespace": "", 362 | "required": True, 363 | "pattern": r"((1?[0-9]?[0-9]|2[0-4][0-9]|25[0-5]).){3} (1?[0-9]?[0-9]|2[0-4][0-9]|25[0-5])| ([A-Fa-f0-9]{1,4}:){7}[A-Fa-f0-9]{1,4}", 364 | }, 365 | ) 366 | count: Optional[int] = field( 367 | default=None, 368 | metadata={ 369 | "type": "Element", 370 | "namespace": "", 371 | "required": True, 372 | }, 373 | ) 374 | policy_evaluated: Optional[PolicyEvaluatedType] = field( 375 | default=None, 376 | metadata={ 377 | "type": "Element", 378 | "namespace": "", 379 | "required": True, 380 | }, 381 | ) 382 | 383 | 384 | @dataclass 385 | class RecordType: 386 | row: Optional[RowType] = field( 387 | default=None, 388 | metadata={ 389 | "type": "Element", 390 | "namespace": "", 391 | "required": True, 392 | }, 393 | ) 394 | identifiers: Optional[IdentifierType] = field( 395 | default=None, 396 | metadata={ 397 | "type": "Element", 398 | "namespace": "", 399 | "required": True, 400 | }, 401 | ) 402 | auth_results: Optional[AuthResultType] = field( 403 | default=None, 404 | metadata={ 405 | "type": "Element", 406 | "namespace": "", 407 | "required": True, 408 | }, 409 | ) 410 | 411 | 412 | @dataclass 413 | class Feedback: 414 | class Meta: 415 | name = "feedback" 416 | namespace = "http://dmarc.org/dmarc-xml/0.1" 417 | 418 | version: Optional[Decimal] = field( 419 | default=None, 420 | metadata={ 421 | "type": "Element", 422 | "namespace": "", 423 | "required": True, 424 | }, 425 | ) 426 | report_metadata: Optional[ReportMetadataType] = field( 427 | default=None, 428 | metadata={ 429 | "type": "Element", 430 | "namespace": "", 431 | "required": True, 432 | }, 433 | ) 434 | policy_published: Optional[PolicyPublishedType] = field( 435 | default=None, 436 | metadata={ 437 | "type": "Element", 438 | "namespace": "", 439 | "required": True, 440 | }, 441 | ) 442 | record: List[RecordType] = field( 443 | default_factory=list, 444 | metadata={ 445 | "type": "Element", 446 | "namespace": "", 447 | "min_occurs": 1, 448 | }, 449 | ) 450 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/model/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jgosmann/dmarc-metrics-exporter/d1d3912e9aed14be28c75467b1726d5a237e1777/dmarc_metrics_exporter/model/tests/__init__.py -------------------------------------------------------------------------------- /dmarc_metrics_exporter/model/tests/sample_data.py: -------------------------------------------------------------------------------- 1 | import dmarc_metrics_exporter.model as m 2 | 3 | 4 | def create_sample_xml(*, report_id: str = "12598866915817748661") -> str: 5 | return f""" 6 | 7 | 8 | 9 | google.com 10 | noreply-dmarc-support@google.com 11 | https://support.google.com/a/answer/2466580 12 | {report_id} 13 | 14 | 1607299200 15 | 1607385599 16 | 17 | 18 | 19 | mydomain.de 20 | r 21 | r 22 |

none

23 | none 24 | 100 25 | none 26 |
27 | 28 | 29 | dead:beef:1:abc:: 30 | 1 31 | 32 | none 33 | pass 34 | fail 35 | 36 | 37 | 38 | mydomain.de 39 | 40 | 41 | 42 | mydomain.de 43 | pass 44 | default 45 | 46 | 47 | my-spf-domain.de 48 | pass 49 | 50 | 51 | 52 |
53 | """.strip() 54 | 55 | 56 | SAMPLE_DATACLASS = m.Feedback( 57 | report_metadata=m.ReportMetadataType( 58 | org_name="google.com", 59 | email="noreply-dmarc-support@google.com", 60 | extra_contact_info="https://support.google.com/a/answer/2466580", 61 | report_id="12598866915817748661", 62 | date_range=m.DateRangeType( 63 | begin=1607299200, 64 | end=1607385599, 65 | ), 66 | ), 67 | policy_published=m.PolicyPublishedType( 68 | domain="mydomain.de", 69 | adkim=m.AlignmentType.R, 70 | aspf=m.AlignmentType.R, 71 | p=m.DispositionType.NONE_VALUE, 72 | sp=m.DispositionType.NONE_VALUE, 73 | pct=100, 74 | ), 75 | record=[ 76 | m.RecordType( 77 | row=m.RowType( 78 | source_ip="dead:beef:1:abc::", 79 | count=1, 80 | policy_evaluated=m.PolicyEvaluatedType( 81 | disposition=m.DispositionType.NONE_VALUE, 82 | dkim=m.DmarcresultType.PASS_VALUE, 83 | spf=m.DmarcresultType.FAIL, 84 | ), 85 | ), 86 | identifiers=m.IdentifierType( 87 | header_from="mydomain.de", 88 | ), 89 | auth_results=m.AuthResultType( 90 | dkim=[ 91 | m.DkimauthResultType( 92 | domain="mydomain.de", 93 | result=m.DkimresultType.PASS_VALUE, 94 | selector="default", 95 | ) 96 | ], 97 | spf=[ 98 | m.SpfauthResultType( 99 | domain="my-spf-domain.de", result=m.SpfresultType.PASS_VALUE 100 | ) 101 | ], 102 | ), 103 | ) 104 | ], 105 | ) 106 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/model/tests/test_deserialization.py: -------------------------------------------------------------------------------- 1 | from xsdata.formats.dataclass.context import XmlContext 2 | from xsdata.formats.dataclass.parsers import XmlParser 3 | from xsdata.formats.dataclass.parsers.config import ParserConfig 4 | 5 | from dmarc_metrics_exporter.model.dmarc_aggregate_report import Feedback 6 | 7 | from .sample_data import SAMPLE_DATACLASS, create_sample_xml 8 | 9 | 10 | def test_deserialization(): 11 | parser = XmlParser( 12 | context=XmlContext(), config=ParserConfig(fail_on_unknown_properties=False) 13 | ) 14 | assert parser.from_string(create_sample_xml(), Feedback) == SAMPLE_DATACLASS 15 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/prometheus_exporter.py: -------------------------------------------------------------------------------- 1 | import threading 2 | from contextlib import contextmanager 3 | from typing import Any, Generator, Iterable, Tuple 4 | 5 | import uvicorn 6 | from prometheus_client.core import REGISTRY, CounterMetricFamily, GaugeMetricFamily 7 | from prometheus_client.exposition import make_asgi_app 8 | 9 | import dmarc_metrics_exporter 10 | from dmarc_metrics_exporter.dmarc_event import Disposition 11 | from dmarc_metrics_exporter.dmarc_metrics import DmarcMetricsCollection 12 | 13 | 14 | class Server: 15 | def __init__(self, exporter: "PrometheusExporter", listen_addr: str, port: int): 16 | self.exporter = exporter 17 | config = uvicorn.Config( 18 | make_asgi_app(), host=listen_addr, port=port, log_config=None 19 | ) 20 | self.server = uvicorn.Server(config) 21 | self.host = config.host 22 | self.port = port 23 | self._main_loop = None 24 | 25 | async def __aenter__(self): 26 | REGISTRY.register(self.exporter) 27 | config = self.server.config 28 | if not config.loaded: 29 | config.load() 30 | self.server.lifespan = config.lifespan_class(config) 31 | await self.server.startup() 32 | self._main_loop = self.server.main_loop() 33 | return self 34 | 35 | async def __aexit__(self, exc_type, exc, traceback): 36 | self.server.should_exit = True 37 | await self._main_loop 38 | self._main_loop = None 39 | await self.server.shutdown() 40 | REGISTRY.unregister(self.exporter) 41 | 42 | 43 | class PrometheusExporter: 44 | LABELS = ("reporter", "from_domain", "dkim_domain", "spf_domain") 45 | INVALID_LABELS = ("from_email",) 46 | 47 | def __init__(self, metrics: DmarcMetricsCollection): 48 | self._metrics_lock = threading.Lock() 49 | self._metrics = metrics 50 | 51 | def start_server(self, listen_addr="127.0.0.1", port=9797) -> Server: 52 | return Server(self, listen_addr, port) 53 | 54 | @contextmanager 55 | def get_metrics(self) -> Generator[DmarcMetricsCollection, None, None]: 56 | with self._metrics_lock: 57 | yield self._metrics 58 | 59 | def collect(self) -> Tuple[Any, ...]: 60 | build_info = GaugeMetricFamily( 61 | "dmarc_metrics_exporter_build_info", 62 | "A metric with a constant '1' value labeled by version of the dmarc-metrics-exporter.", 63 | labels=("version",), 64 | ) 65 | build_info.add_metric((dmarc_metrics_exporter.__version__,), 1.0) 66 | 67 | dmarc_total = CounterMetricFamily( 68 | "dmarc_total", "Total number of reported messages.", labels=self.LABELS 69 | ) 70 | dmarc_compliant_total = CounterMetricFamily( 71 | "dmarc_compliant_total", 72 | "Total number of DMARC compliant messages.", 73 | labels=self.LABELS, 74 | ) 75 | dmarc_quarantine_total = CounterMetricFamily( 76 | "dmarc_quarantine_total", 77 | "Total number of quarantined messages.", 78 | labels=self.LABELS, 79 | ) 80 | dmarc_reject_total = CounterMetricFamily( 81 | "dmarc_reject_total", 82 | "Total number of rejected messages.", 83 | labels=self.LABELS, 84 | ) 85 | dmarc_spf_aligned_total = CounterMetricFamily( 86 | "dmarc_spf_aligned_total", 87 | "Total number of SPF algined messages.", 88 | labels=self.LABELS, 89 | ) 90 | dmarc_spf_pass_total = CounterMetricFamily( 91 | "dmarc_spf_pass_total", 92 | "Total number of messages with raw SPF pass.", 93 | labels=self.LABELS, 94 | ) 95 | dmarc_dkim_aligned_total = CounterMetricFamily( 96 | "dmarc_dkim_aligned_total", 97 | "Total number of DKIM algined messages.", 98 | labels=self.LABELS, 99 | ) 100 | dmarc_dkim_pass_total = CounterMetricFamily( 101 | "dmarc_dkim_pass_total", 102 | "Total number of messages with raw DKIM pass.", 103 | labels=self.LABELS, 104 | ) 105 | dmarc_invalid_reports_total = CounterMetricFamily( 106 | "dmarc_invalid_reports_total", 107 | "Total numebr of report emails from which no report could be parsed.", 108 | labels=self.INVALID_LABELS, 109 | ) 110 | 111 | with self._metrics_lock: 112 | for meta, metrics in self._metrics.items(): 113 | labels = self._meta2labels(meta, self.LABELS) 114 | dmarc_total.add_metric(labels, metrics.total_count) 115 | dmarc_compliant_total.add_metric(labels, metrics.dmarc_compliant_count) 116 | dmarc_quarantine_total.add_metric( 117 | labels, metrics.disposition_counts.get(Disposition.QUARANTINE, 0) 118 | ) 119 | dmarc_reject_total.add_metric( 120 | labels, metrics.disposition_counts.get(Disposition.REJECT, 0) 121 | ) 122 | dmarc_spf_aligned_total.add_metric(labels, metrics.spf_aligned_count) 123 | dmarc_spf_pass_total.add_metric(labels, metrics.spf_pass_count) 124 | dmarc_dkim_aligned_total.add_metric(labels, metrics.dkim_aligned_count) 125 | dmarc_dkim_pass_total.add_metric(labels, metrics.dkim_pass_count) 126 | for meta, count in self._metrics.invalid_reports.items(): 127 | labels = self._meta2labels(meta, self.INVALID_LABELS) 128 | dmarc_invalid_reports_total.add_metric(labels, count) 129 | 130 | return ( 131 | build_info, 132 | dmarc_total, 133 | dmarc_compliant_total, 134 | dmarc_quarantine_total, 135 | dmarc_reject_total, 136 | dmarc_spf_aligned_total, 137 | dmarc_spf_pass_total, 138 | dmarc_dkim_aligned_total, 139 | dmarc_dkim_pass_total, 140 | dmarc_invalid_reports_total, 141 | ) 142 | 143 | @staticmethod 144 | def _meta2labels(meta: object, labels: Iterable[str]) -> Tuple[str, ...]: 145 | return tuple(getattr(meta, label) for label in labels) 146 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jgosmann/dmarc-metrics-exporter/d1d3912e9aed14be28c75467b1726d5a237e1777/dmarc_metrics_exporter/tests/__init__.py -------------------------------------------------------------------------------- /dmarc_metrics_exporter/tests/conftest.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import smtplib 3 | import ssl 4 | import time 5 | from dataclasses import astuple, dataclass 6 | from email.message import EmailMessage 7 | from typing import Any, Awaitable, Callable, Union 8 | 9 | import pytest 10 | import requests 11 | 12 | from dmarc_metrics_exporter.imap_client import ImapClient 13 | from dmarc_metrics_exporter.imap_queue import ConnectionConfig 14 | 15 | 16 | @dataclass 17 | class NetworkAddress: 18 | host: str 19 | port: int 20 | 21 | 22 | @dataclass 23 | class Greenmail: 24 | smtp: NetworkAddress 25 | imap: ConnectionConfig 26 | api: NetworkAddress 27 | 28 | @property 29 | def api_url(self) -> str: 30 | return f"http://{self.api.host}:{self.api.port}/api" 31 | 32 | def is_ready(self) -> bool: 33 | return ( 34 | requests.get(f"{self.api_url}/service/readiness", timeout=1).status_code 35 | == requests.codes.ok 36 | ) 37 | 38 | def purge_mails(self): 39 | requests.post(f"{self.api_url}/mail/purge", timeout=5).raise_for_status() 40 | 41 | async def restart(self): 42 | requests.post(f"{self.api_url}/service/reset", timeout=5) 43 | await try_until_success(self.is_ready) 44 | 45 | 46 | @pytest.fixture(name="greenmail") 47 | def fixture_greenmail() -> Greenmail: 48 | greenmail = Greenmail( 49 | smtp=NetworkAddress("localhost", 3025), 50 | imap=ConnectionConfig( 51 | host="localhost", 52 | port=3993, 53 | username="queue@localhost", 54 | password="password", 55 | use_ssl=True, 56 | verify_certificate=False, 57 | # The Java SSL/TLS impplementation that Greenmail is relying on 58 | # is sending a "user_cancelled" alert before the "close_notify" 59 | # when logging out. This is in violation of the the TLS spec and 60 | # OpenSSL (used by Python) will give an error for this, when 61 | # using TLS 1.3, starting with OpenSSL 3.2. 62 | # This has been previously reported as bug JDK-8282600, but the 63 | # fix only omits the "user_cancelled" alert if the client already 64 | # closed its end of the TLS connection. This we cannot easily do 65 | # as we first want to receive the IMAP server's confirmation of the 66 | # logout. It would also require to handle the SSL/TLS connection 67 | # on a lower level where we can actually control sending the 68 | # "close_notify" alert. 69 | # 70 | # JDK-8282600: https://bugs.openjdk.org/browse/JDK-8282600 71 | tls_maximum_version=ssl.TLSVersion.TLSv1_2, 72 | ), 73 | api=NetworkAddress("localhost", 8080), 74 | ) 75 | greenmail.purge_mails() 76 | return greenmail 77 | 78 | 79 | async def try_until_success( 80 | function: Union[Callable[[], Awaitable], Callable[[], Any]], 81 | timeout_seconds: int = 10, 82 | max_fn_duration_seconds: int = 1, 83 | poll_interval_seconds: float = 0.1, 84 | ): 85 | timeout = time.time() + timeout_seconds 86 | last_err = None 87 | while time.time() < timeout: 88 | try: 89 | result = function() 90 | if hasattr(result, "__await__"): 91 | return await asyncio.wait_for(result, max_fn_duration_seconds) 92 | else: 93 | return result 94 | except asyncio.TimeoutError as err: 95 | raise TimeoutError( 96 | f"Function execution duration exceeded {max_fn_duration_seconds} seconds." 97 | ) from err 98 | except Exception as err: # pylint: disable=broad-except 99 | last_err = err 100 | await asyncio.sleep(poll_interval_seconds) 101 | raise TimeoutError( 102 | f"Call to {function} not successful within {timeout_seconds} seconds." 103 | ) from last_err 104 | 105 | 106 | async def send_email(msg: EmailMessage, network_address: NetworkAddress): 107 | smtp = smtplib.SMTP(*astuple(network_address)) 108 | smtp.send_message(msg) 109 | smtp.quit() 110 | 111 | 112 | async def verify_email_delivered(connection: ConnectionConfig, mailboxes=("INBOX",)): 113 | async with ImapClient(connection) as client: 114 | msg_counts = await asyncio.gather( 115 | *(client.select(mailbox) for mailbox in mailboxes) 116 | ) 117 | assert any(count > 0 for count in msg_counts) 118 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/tests/sample_emails.py: -------------------------------------------------------------------------------- 1 | import io 2 | from email.message import EmailMessage 3 | from email.mime.application import MIMEApplication 4 | from email.mime.base import MIMEBase 5 | from email.mime.text import MIMEText 6 | from gzip import GzipFile 7 | from zipfile import ZipFile 8 | 9 | from dmarc_metrics_exporter.model.tests.sample_data import create_sample_xml 10 | 11 | 12 | def create_minimal_email(to="dmarc-feedback@mydomain.de", content=None): 13 | msg = EmailMessage() 14 | msg["Subject"] = "Minimal email" 15 | msg["From"] = "noreply-dmarc-support@google.com" 16 | msg["To"] = to 17 | if content: 18 | msg.set_content(content) 19 | return msg 20 | 21 | 22 | def create_xml_report(*, report_id="12598866915817748661") -> MIMEText: 23 | xml = MIMEText(create_sample_xml(report_id=report_id), "xml") 24 | xml.add_header( 25 | "Content-Disposition", 26 | "attachment", 27 | filename="reporter.com!localhost!1601510400!1601596799.xml", 28 | ) 29 | return xml 30 | 31 | 32 | def create_zip_report( 33 | *, report_id="12598866915817748661", subtype="zip" 34 | ) -> MIMEApplication: 35 | compressed = io.BytesIO() 36 | with ZipFile(compressed, "w") as zip_file: 37 | zip_file.writestr( 38 | "reporter.com!localhost!1601510400!1601596799.xml", 39 | create_sample_xml(report_id=report_id), 40 | ) 41 | 42 | zip_mime = MIMEApplication(compressed.getvalue(), subtype) 43 | zip_mime.add_header( 44 | "Content-Disposition", 45 | "attachment", 46 | filename="reporter.com!localhost!1601510400!1601596799.zip", 47 | ) 48 | return zip_mime 49 | 50 | 51 | def create_gzip_report( 52 | *, report_id="12598866915817748661", subtype="gzip" 53 | ) -> MIMEApplication: 54 | compressed = io.BytesIO() 55 | filename = "reporter.com!localhost!1601510400!1601596799.xml.gz" 56 | with GzipFile(filename, mode="wb", fileobj=compressed) as gzip_file: 57 | gzip_file.write(create_sample_xml(report_id=report_id).encode("utf-8")) 58 | 59 | gzip_mime = MIMEApplication(compressed.getvalue(), subtype) 60 | gzip_mime.add_header( 61 | "Content-Disposition", 62 | "attachment", 63 | filename="reporter.com!localhost!1601510400!1601596799.xml.gz", 64 | ) 65 | return gzip_mime 66 | 67 | 68 | def create_email_with_attachment( 69 | attachment: MIMEBase, *, to="dmarc-feedback@mydomain.de" 70 | ): 71 | msg = EmailMessage() 72 | msg.add_attachment(attachment) 73 | msg["Subject"] = "DMARC Aggregate Report" 74 | msg["From"] = "noreply-dmarc-support@google.com" 75 | msg["To"] = to 76 | return msg 77 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/tests/test_app.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from dataclasses import dataclass, field, fields 3 | from typing import Tuple 4 | from unittest.mock import MagicMock 5 | 6 | import pytest 7 | 8 | from dmarc_metrics_exporter.app import App 9 | from dmarc_metrics_exporter.dmarc_metrics import DmarcMetricsCollection, InvalidMeta 10 | from dmarc_metrics_exporter.tests.sample_emails import ( 11 | create_email_with_attachment, 12 | create_minimal_email, 13 | create_zip_report, 14 | ) 15 | 16 | from .conftest import try_until_success 17 | 18 | 19 | class ServerMock: 20 | async def __aenter__(self): 21 | pass 22 | 23 | async def __aexit__(self, exc_type, exc, traceback): 24 | pass 25 | 26 | 27 | async def async_noop(): 28 | pass 29 | 30 | 31 | @dataclass 32 | class AppDependencies: 33 | prometheus_addr: Tuple[str, int] = ("127.0.0.1", 9797) 34 | exporter_cls: MagicMock = field(default_factory=MagicMock) 35 | metrics_persister: MagicMock = field(default_factory=MagicMock) 36 | imap_queue: MagicMock = field(default_factory=MagicMock) 37 | 38 | def as_flat_dict(self): 39 | return {field.name: getattr(self, field.name) for field in fields(self)} 40 | 41 | 42 | @dataclass 43 | class AppMocks: 44 | dependencies: AppDependencies = field(default_factory=AppDependencies) 45 | metrics: DmarcMetricsCollection = field(default_factory=DmarcMetricsCollection) 46 | metrics_provider: MagicMock = field(default_factory=MagicMock) 47 | exporter: MagicMock = field(default_factory=MagicMock) 48 | 49 | def __post_init__(self): 50 | self.metrics_provider.__enter__.return_value = self.metrics 51 | self.exporter.start_server.return_value = ServerMock() 52 | self.exporter.get_metrics.return_value = self.metrics_provider 53 | self.dependencies.exporter_cls.return_value = self.exporter 54 | self.dependencies.metrics_persister.load.return_value = self.metrics 55 | self.dependencies.imap_queue.stop_consumer.side_effect = async_noop 56 | 57 | 58 | @pytest.mark.asyncio 59 | async def test_loads_persisted_metrics_and_stores_them_on_shutdown(): 60 | mocks = AppMocks() 61 | app = App(autosave_interval_seconds=None, **mocks.dependencies.as_flat_dict()) 62 | main = asyncio.create_task(app.run()) 63 | 64 | try: 65 | await try_until_success( 66 | app.metrics_persister.load.assert_called_once, timeout_seconds=2 67 | ) 68 | finally: 69 | main.cancel() 70 | await main 71 | mocks.dependencies.metrics_persister.save.assert_called_once_with(mocks.metrics) 72 | 73 | 74 | @pytest.mark.asyncio 75 | async def test_metrics_autosave(): 76 | mocks = AppMocks() 77 | app = App(autosave_interval_seconds=0.5, **mocks.dependencies.as_flat_dict()) 78 | main = asyncio.create_task(app.run()) 79 | 80 | try: 81 | await asyncio.sleep(1) 82 | mocks.dependencies.metrics_persister.save.assert_called_with(mocks.metrics) 83 | finally: 84 | main.cancel() 85 | await main 86 | 87 | 88 | @pytest.mark.asyncio 89 | async def test_processes_duplicate_report_only_once(): 90 | mocks = AppMocks() 91 | app = App(autosave_interval_seconds=0.5, **mocks.dependencies.as_flat_dict()) 92 | email = create_email_with_attachment(create_zip_report()) 93 | 94 | await app.process_email(email) 95 | await app.process_email(email) 96 | 97 | assert sum(m.total_count for m in mocks.metrics.values()) == 1 98 | 99 | 100 | @pytest.mark.asyncio 101 | async def test_counts_failed_extractions(): 102 | mocks = AppMocks() 103 | app = App(autosave_interval_seconds=0.5, **mocks.dependencies.as_flat_dict()) 104 | email = create_minimal_email() 105 | 106 | await app.process_email(email) 107 | 108 | assert mocks.metrics.invalid_reports == {InvalidMeta(email["From"]): 1} 109 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/tests/test_deserialization.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from dmarc_metrics_exporter.deserialization import ( 4 | ReportExtractionError, 5 | convert_to_events, 6 | get_aggregate_report_from_email, 7 | ) 8 | from dmarc_metrics_exporter.dmarc_event import ( 9 | Disposition, 10 | DmarcEvent, 11 | DmarcResult, 12 | Meta, 13 | ) 14 | from dmarc_metrics_exporter.model.tests.sample_data import SAMPLE_DATACLASS 15 | from dmarc_metrics_exporter.tests.sample_emails import ( 16 | create_email_with_attachment, 17 | create_gzip_report, 18 | create_minimal_email, 19 | create_xml_report, 20 | create_zip_report, 21 | ) 22 | 23 | 24 | def test_extracts_plain_xml_from_email(): 25 | msg = create_email_with_attachment(create_xml_report()) 26 | assert list(get_aggregate_report_from_email(msg)) == [SAMPLE_DATACLASS] 27 | 28 | 29 | def test_extracts_zipped_xml_from_email(): 30 | msg = create_email_with_attachment(create_zip_report()) 31 | assert list(get_aggregate_report_from_email(msg)) == [SAMPLE_DATACLASS] 32 | 33 | 34 | def test_extracts_gzipped_xml_from_email(): 35 | msg = create_email_with_attachment(create_gzip_report()) 36 | assert list(get_aggregate_report_from_email(msg)) == [SAMPLE_DATACLASS] 37 | 38 | 39 | def test_extracts_zipped_xml_from_email_with_octet_stream_content_type(): 40 | msg = create_email_with_attachment(create_zip_report(subtype="octet-stream")) 41 | assert list(get_aggregate_report_from_email(msg)) == [SAMPLE_DATACLASS] 42 | 43 | 44 | def test_extracts_gzipped_xml_from_email_with_octet_stream_content_type(): 45 | msg = create_email_with_attachment(create_gzip_report(subtype="octet-stream")) 46 | assert list(get_aggregate_report_from_email(msg)) == [SAMPLE_DATACLASS] 47 | 48 | 49 | def test_returns_err_if_no_report_can_be_extracted(): 50 | msg = create_minimal_email() 51 | with pytest.raises(ReportExtractionError) as err: 52 | list(get_aggregate_report_from_email(msg)) 53 | assert err.value.msg is msg 54 | 55 | 56 | def test_convert_to_events(): 57 | assert list(convert_to_events(SAMPLE_DATACLASS)) == [ 58 | DmarcEvent( 59 | count=1, 60 | meta=Meta( 61 | reporter="google.com", 62 | from_domain="mydomain.de", 63 | dkim_domain="mydomain.de", 64 | spf_domain="my-spf-domain.de", 65 | ), 66 | result=DmarcResult( 67 | disposition=Disposition.NONE_VALUE, 68 | dkim_pass=True, 69 | dkim_aligned=True, 70 | spf_pass=True, 71 | spf_aligned=False, 72 | ), 73 | ) 74 | ] 75 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/tests/test_dmarc_metrics.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from dmarc_metrics_exporter.dmarc_event import ( 4 | Disposition, 5 | DmarcEvent, 6 | DmarcResult, 7 | Meta, 8 | ) 9 | from dmarc_metrics_exporter.dmarc_metrics import ( 10 | DmarcMetrics, 11 | DmarcMetricsCollection, 12 | InvalidMeta, 13 | ) 14 | 15 | 16 | def test_dmarc_metrics_upate(): 17 | metrics = DmarcMetrics() 18 | metrics.update( 19 | 2, 20 | DmarcResult( 21 | disposition=Disposition.QUARANTINE, 22 | dkim_pass=True, 23 | dkim_aligned=False, 24 | spf_pass=True, 25 | spf_aligned=True, 26 | ), 27 | ) 28 | assert metrics == DmarcMetrics( 29 | total_count=2, 30 | disposition_counts={ 31 | Disposition.QUARANTINE: 2, 32 | }, 33 | dmarc_compliant_count=2, 34 | dkim_pass_count=2, 35 | spf_aligned_count=2, 36 | spf_pass_count=2, 37 | ) 38 | 39 | 40 | def test_dmarc_metrics_collection_update(): 41 | metrics_collector = DmarcMetricsCollection({}) 42 | meta = Meta( 43 | reporter="google.com", 44 | from_domain="mydomain.de", 45 | dkim_domain="sub.mydomain.de", 46 | spf_domain="mydomain.de", 47 | ) 48 | result = DmarcResult( 49 | disposition=Disposition.NONE_VALUE, 50 | dkim_pass=False, 51 | dkim_aligned=False, 52 | spf_pass=False, 53 | spf_aligned=False, 54 | ) 55 | metrics_collector.update(DmarcEvent(count=1, meta=meta, result=result)) 56 | assert metrics_collector.metrics == { 57 | meta: DmarcMetrics( 58 | total_count=1, disposition_counts={Disposition.NONE_VALUE: 1} 59 | ) 60 | } 61 | 62 | 63 | @pytest.mark.parametrize("from_email", [None, "someone@example.com"]) 64 | def test_dmarc_metrics_inc_invalid(from_email): 65 | metrics_collector = DmarcMetricsCollection({}) 66 | metrics_collector.inc_invalid(InvalidMeta(from_email)) 67 | metrics_collector.inc_invalid(InvalidMeta(from_email)) 68 | assert metrics_collector.invalid_reports == {InvalidMeta(from_email): 2} 69 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/tests/test_e2e.py: -------------------------------------------------------------------------------- 1 | import json 2 | import subprocess 3 | import sys 4 | from contextlib import contextmanager 5 | from dataclasses import asdict 6 | 7 | import aiohttp 8 | import pytest 9 | from prometheus_client.parser import text_string_to_metric_families 10 | from prometheus_client.samples import Sample 11 | 12 | from .conftest import send_email, try_until_success 13 | from .sample_emails import create_email_with_attachment, create_zip_report 14 | 15 | 16 | @contextmanager 17 | def dmarc_metrics_exporter(config_path): 18 | proc = subprocess.Popen( 19 | [ 20 | sys.executable, 21 | "-m", 22 | "dmarc_metrics_exporter", 23 | "--configuration", 24 | str(config_path), 25 | ], 26 | stdout=sys.stdout, 27 | stderr=sys.stderr, 28 | encoding="utf-8", 29 | ) 30 | yield proc 31 | proc.terminate() 32 | try: 33 | proc.wait(20) 34 | except subprocess.TimeoutExpired: 35 | proc.kill() 36 | 37 | 38 | @pytest.mark.asyncio 39 | async def test_successful_processing_of_incoming_queue_message(greenmail, tmp_path): 40 | # Given 41 | msg = create_email_with_attachment( 42 | create_zip_report(report_id="1"), to=greenmail.imap.username 43 | ) 44 | await try_until_success(lambda: send_email(msg, greenmail.smtp)) 45 | 46 | config = { 47 | "listen_addr": "127.0.0.1", 48 | "port": 9797, 49 | "imap": asdict(greenmail.imap), 50 | "poll_interval_seconds": 1, 51 | "storage_path": str(tmp_path), 52 | } 53 | config_path = tmp_path / "dmarc-metrics-exporter.conf" 54 | with open(config_path, "w", encoding="utf-8") as f: 55 | json.dump(config, f) 56 | 57 | # When 58 | expected_meta = { 59 | "reporter": "google.com", 60 | "from_domain": "mydomain.de", 61 | "dkim_domain": "mydomain.de", 62 | "spf_domain": "my-spf-domain.de", 63 | } 64 | 65 | def expected_metrics(processed_email_count): 66 | return { 67 | "dmarc_total": processed_email_count, 68 | "dmarc_compliant_total": processed_email_count, 69 | "dmarc_quarantine_total": 0, 70 | "dmarc_reject_total": 0, 71 | "dmarc_dkim_aligned_total": processed_email_count, 72 | "dmarc_dkim_pass_total": processed_email_count, 73 | "dmarc_spf_aligned_total": 0, 74 | "dmarc_spf_pass_total": processed_email_count, 75 | } 76 | 77 | with dmarc_metrics_exporter(config_path): 78 | url = f"http://{config['listen_addr']}:{config['port']}/metrics" 79 | await try_until_success( 80 | lambda: assert_exported_metrics( 81 | url, 82 | expected_meta, 83 | expected_metrics(1), 84 | ), 85 | timeout_seconds=20, 86 | ) 87 | msg = create_email_with_attachment( 88 | create_zip_report(report_id="2"), to=greenmail.imap.username 89 | ) 90 | await send_email(msg, greenmail.smtp) 91 | await try_until_success( 92 | lambda: assert_exported_metrics( 93 | url, 94 | expected_meta, 95 | expected_metrics(2), 96 | ), 97 | timeout_seconds=20, 98 | ) 99 | 100 | 101 | async def assert_exported_metrics(url, expected_meta, expected_metrics): 102 | async with aiohttp.ClientSession() as session: 103 | async with session.get(url) as response: 104 | served_metrics = text_string_to_metric_families(await response.text()) 105 | 106 | samples = [ 107 | sample for served_metric in served_metrics for sample in served_metric.samples 108 | ] 109 | for prometheus_name, value in expected_metrics.items(): 110 | assert ( 111 | Sample( 112 | prometheus_name, 113 | labels=expected_meta, 114 | value=value, 115 | timestamp=None, 116 | exemplar=None, 117 | ) 118 | in samples 119 | ) 120 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/tests/test_expiring_set.py: -------------------------------------------------------------------------------- 1 | from dmarc_metrics_exporter.expiring_set import ExpiringSet 2 | 3 | 4 | def test_containment_with_ttl(): 5 | current_time = 0 6 | 7 | expiring_set = ExpiringSet(1, lambda: current_time) 8 | assert "a" not in expiring_set 9 | expiring_set.add("a") 10 | assert "a" in expiring_set 11 | current_time += 1 12 | assert "a" not in expiring_set 13 | 14 | 15 | def test_roundtrip_persistence(tmp_path): 16 | current_time = 0 17 | filepath = tmp_path / "seen_reports.db" 18 | 19 | expiring_set = ExpiringSet(3, lambda: current_time) 20 | expiring_set.add("t0") 21 | current_time += 1 22 | expiring_set.add("t1") 23 | current_time += 1 24 | expiring_set.add("t2") 25 | current_time += 1 26 | expiring_set.persist(filepath) 27 | 28 | expiring_set = ExpiringSet.load(filepath, 3, lambda: current_time) 29 | assert "t0" not in expiring_set 30 | assert "t1" in expiring_set 31 | assert "t2" in expiring_set 32 | current_time += 1 33 | assert "t1" not in expiring_set 34 | assert "t2" in expiring_set 35 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/tests/test_imap_client.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import io 3 | import re 4 | from asyncio import ( 5 | Condition, 6 | Event, 7 | IncompleteReadError, 8 | StreamReader, 9 | StreamWriter, 10 | create_task, 11 | start_server, 12 | wait_for, 13 | ) 14 | from typing import Callable, Coroutine, Dict, List, Optional 15 | 16 | import pytest 17 | import structlog 18 | 19 | from dmarc_metrics_exporter.imap_client import ( 20 | ConnectionConfig, 21 | ImapClient, 22 | ImapServerError, 23 | ) 24 | from dmarc_metrics_exporter.tests.conftest import send_email, try_until_success 25 | from dmarc_metrics_exporter.tests.sample_emails import create_minimal_email 26 | 27 | logger = structlog.get_logger() 28 | 29 | 30 | class MockReader: 31 | def __init__(self, input_buf: bytes): 32 | self.reader = io.BytesIO(input_buf) 33 | self.eol_pos = self.reader.seek(0, io.SEEK_END) 34 | self.reader.seek(0, io.SEEK_SET) 35 | 36 | async def read(self, n=-1) -> bytes: 37 | return self.reader.read(n) 38 | 39 | async def readline(self) -> bytes: 40 | return self.reader.readline() 41 | 42 | async def readexactly(self, n) -> bytes: 43 | buf = await self.read(n) 44 | if len(buf) < n: 45 | raise IncompleteReadError(buf, n) 46 | return buf 47 | 48 | async def readuntil(self, separator=b"\n") -> bytes: 49 | raise NotImplementedError() 50 | 51 | def at_eof(self) -> bool: 52 | return self.reader.tell() >= self.eol_pos 53 | 54 | 55 | @pytest.mark.asyncio 56 | async def test_basic_connection(greenmail): 57 | async with ImapClient(greenmail.imap) as client: 58 | assert client.has_capability("IMAP4rev1") 59 | 60 | 61 | @pytest.mark.asyncio 62 | async def test_fetch(greenmail): 63 | await send_email(create_minimal_email(greenmail.imap.username), greenmail.smtp) 64 | async with ImapClient(greenmail.imap) as client: 65 | assert await client.select("INBOX") == 1 66 | await client.fetch(b"1:1", b"(BODY[HEADER.FIELDS (SUBJECT)])") 67 | fetched_email = await wait_for(client.fetched_queue.get(), 5) 68 | assert fetched_email[:2] == (1, b"FETCH") 69 | assert [ 70 | item[2] 71 | for item in fetched_email[2] 72 | if item[:2] == (b"BODY", (b"HEADER.FIELDS", (b"SUBJECT",))) 73 | ] == [b"Subject: Minimal email\r\n"] 74 | 75 | 76 | @pytest.mark.asyncio 77 | async def test_fetch_non_ascii_chars(greenmail): 78 | await send_email( 79 | create_minimal_email(greenmail.imap.username, "üüüü"), 80 | greenmail.smtp, 81 | ) 82 | async with ImapClient(greenmail.imap) as client: 83 | assert await client.select("INBOX") == 1 84 | await client.fetch(b"1:1", b"(RFC822)") 85 | fetched_email = await wait_for(client.fetched_queue.get(), 5) 86 | assert fetched_email[:2] == (1, b"FETCH") 87 | body = next(value for key, value in fetched_email[2] if key == b"RFC822") 88 | assert body.endswith("üüüü".encode("utf-8")) 89 | 90 | 91 | @pytest.mark.asyncio 92 | async def test_create_delete(greenmail): 93 | async with ImapClient(greenmail.imap) as client: 94 | try: 95 | await client.delete("new mailbox") 96 | except ImapServerError: 97 | pass 98 | 99 | try: 100 | await client.create("new mailbox") 101 | assert await client.select("new mailbox") == 0 102 | finally: 103 | await client.select("INBOX") 104 | await client.delete("new mailbox") 105 | 106 | with pytest.raises(ImapServerError): 107 | await client.select("new mailbox") 108 | 109 | 110 | @pytest.mark.asyncio 111 | async def test_create_if_not_exists(greenmail): 112 | async with ImapClient(greenmail.imap) as client: 113 | try: 114 | await client.create_if_not_exists("new mailbox") 115 | await client.create_if_not_exists("new mailbox") 116 | assert await client.select("new mailbox") == 0 117 | finally: 118 | await client.select("INBOX") 119 | await client.delete("new mailbox") 120 | 121 | 122 | @pytest.mark.asyncio 123 | async def test_uid_copy(greenmail): 124 | await send_email(create_minimal_email(greenmail.imap.username), greenmail.smtp) 125 | async with ImapClient(greenmail.imap) as client: 126 | try: 127 | await client.create_if_not_exists("destination") 128 | assert await client.select("INBOX") == 1 129 | await client.fetch(b"1:1", b"(UID)") 130 | fetched_email = await wait_for(client.fetched_queue.get(), 5) 131 | assert fetched_email[:2] == (1, b"FETCH") 132 | uid = [value for key, value in fetched_email[2] if key == b"UID"][0] 133 | await client.uid_copy(uid, "destination") 134 | assert await client.select("destination") == 1 135 | finally: 136 | await client.select("INBOX") 137 | await client.delete("destination") 138 | 139 | 140 | @pytest.mark.asyncio 141 | async def test_uid_move(greenmail): 142 | await send_email(create_minimal_email(greenmail.imap.username), greenmail.smtp) 143 | async with ImapClient(greenmail.imap) as client: 144 | try: 145 | await client.create_if_not_exists("destination") 146 | assert await client.select("INBOX") == 1 147 | await client.fetch(b"1:1", b"(UID)") 148 | fetched_email = await wait_for(client.fetched_queue.get(), 5) 149 | assert fetched_email[:2] == (1, b"FETCH") 150 | uid = [value for key, value in fetched_email[2] if key == b"UID"][0] 151 | await client.uid_move(uid, "destination") 152 | assert client.num_exists == 0 153 | assert await client.select("destination") == 1 154 | finally: 155 | await client.select("INBOX") 156 | await client.delete("destination") 157 | 158 | 159 | @pytest.mark.asyncio 160 | async def test_uid_move_graceful(greenmail): 161 | await send_email(create_minimal_email(greenmail.imap.username), greenmail.smtp) 162 | async with ImapClient(greenmail.imap) as client: 163 | try: 164 | await client.create_if_not_exists("destination") 165 | assert await client.select("INBOX") == 1 166 | await client.fetch(b"1:1", b"(UID)") 167 | fetched_email = await wait_for(client.fetched_queue.get(), 5) 168 | assert fetched_email[:2] == (1, b"FETCH") 169 | uid = [value for key, value in fetched_email[2] if key == b"UID"][0] 170 | await client.uid_move_graceful(uid, "destination") 171 | assert client.num_exists == 0 172 | assert await client.select("destination") == 1 173 | finally: 174 | await client.select("INBOX") 175 | await client.delete("destination") 176 | 177 | 178 | @pytest.mark.asyncio 179 | async def test_uid_store(greenmail): 180 | await send_email(create_minimal_email(greenmail.imap.username), greenmail.smtp) 181 | async with ImapClient(greenmail.imap) as client: 182 | assert await client.select("INBOX") == 1 183 | await client.fetch(b"1:1", b"(UID)") 184 | fetched_email = await wait_for(client.fetched_queue.get(), 5) 185 | assert fetched_email[:2] == (1, b"FETCH") 186 | uid = [value for key, value in fetched_email[2] if key == b"UID"][0] 187 | await client.uid_store(uid, rb"+FLAGS (\Deleted)") 188 | 189 | fetched_email = await wait_for(client.fetched_queue.get(), 5) 190 | assert fetched_email[:2] == (1, b"FETCH") 191 | assert [value for key, value in fetched_email[2] if key == b"FLAGS"][0] == ( 192 | b"\\Deleted", 193 | ) 194 | 195 | 196 | @pytest.mark.asyncio 197 | async def test_uid_expunge(greenmail): 198 | await send_email(create_minimal_email(greenmail.imap.username), greenmail.smtp) 199 | async with ImapClient(greenmail.imap) as client: 200 | assert await client.select("INBOX") == 1 201 | await client.fetch(b"1:1", b"(UID)") 202 | fetched_email = await wait_for(client.fetched_queue.get(), 5) 203 | assert fetched_email[:2] == (1, b"FETCH") 204 | uid = [value for key, value in fetched_email[2] if key == b"UID"][0] 205 | await client.uid_store(uid, rb"+FLAGS (\Deleted)") 206 | await client.expunge() 207 | assert client.num_exists == 0 208 | 209 | 210 | @pytest.mark.asyncio 211 | async def test_executes_same_command_type_sequentially(): 212 | continue_triggers_change = Condition() 213 | continue_triggers = [] 214 | 215 | async def select_handler(_: StreamWriter): 216 | continue_event = Event() 217 | async with continue_triggers_change: 218 | continue_triggers.append(continue_event) 219 | continue_triggers_change.notify_all() 220 | await continue_event.wait() 221 | 222 | async with MockImapServer( 223 | host="localhost", port=4143, command_handlers={b"SELECT": select_handler} 224 | ) as mock_server: 225 | async with ImapClient(mock_server.connection_config) as client: 226 | select_tasks = [ 227 | create_task(client.select("INBOX")), 228 | create_task(client.select("foo")), 229 | ] 230 | async with continue_triggers_change: 231 | await asyncio.wait_for( 232 | continue_triggers_change.wait_for( 233 | lambda: len(continue_triggers) >= 1 234 | ), 235 | timeout=5, 236 | ) 237 | assert len(continue_triggers) == 1 238 | continue_triggers[0].set() 239 | # Lambda required because list access must be revaluated each time 240 | # pylint: disable=unnecessary-lambda 241 | await try_until_success(lambda: continue_triggers[1].set()) 242 | await asyncio.gather(*select_tasks) 243 | 244 | 245 | @pytest.mark.asyncio 246 | async def test_executes_different_commands_in_parallel(): 247 | continue_fetch = Event() 248 | continue_store = Event() 249 | num_commands_received_condition = Condition() 250 | num_commands_received = 0 251 | log = logger.bind(logger="test_executes_different_commands_in_parallel") 252 | 253 | async def fetch_handler(_: StreamWriter): 254 | nonlocal num_commands_received 255 | await log.adebug("fetch handle") 256 | async with num_commands_received_condition: 257 | num_commands_received += 1 258 | num_commands_received_condition.notify_all() 259 | await continue_fetch.wait() 260 | 261 | async def store_handler(_: StreamWriter): 262 | nonlocal num_commands_received 263 | await log.adebug("store handle") 264 | async with num_commands_received_condition: 265 | num_commands_received += 1 266 | num_commands_received_condition.notify_all() 267 | await continue_store.wait() 268 | 269 | async with MockImapServer( 270 | host="localhost", 271 | port=4143, 272 | command_handlers={b"FETCH": fetch_handler, b"UID": store_handler}, 273 | ) as mock_server: 274 | async with ImapClient(mock_server.connection_config) as client: 275 | await client.select("INBOX") 276 | tasks = [ 277 | create_task(client.fetch(b"1", b"(UID)")), 278 | create_task(client.uid_store(123, rb"+FLAGS (\Seen)")), 279 | ] 280 | async with num_commands_received_condition: 281 | await asyncio.wait_for( 282 | num_commands_received_condition.wait_for( 283 | lambda: num_commands_received >= 2 284 | ), 285 | timeout=5, 286 | ) 287 | continue_store.set() 288 | continue_fetch.set() 289 | await asyncio.gather(*tasks) 290 | 291 | 292 | @pytest.mark.asyncio 293 | async def test_timeout_behavior_waiting_for_server_ready(): 294 | event = Event() 295 | 296 | async def client_connected_cb(reader: StreamReader, writer: StreamWriter): 297 | await event.wait() 298 | writer.close() 299 | 300 | server = await start_server(client_connected_cb, host="localhost", port=4143) 301 | 302 | async def connect(): 303 | try: 304 | async with ImapClient( 305 | ConnectionConfig( 306 | "username", 307 | "password", 308 | host="localhost", 309 | port=4143, 310 | use_ssl=False, 311 | ), 312 | timeout_seconds=0.2, 313 | ): 314 | pass 315 | except asyncio.TimeoutError: 316 | pass 317 | 318 | async with server: 319 | await asyncio.wait_for(connect(), timeout=1) 320 | event.set() 321 | 322 | 323 | @pytest.mark.asyncio 324 | async def test_command_timeout_no_response_at_all(): 325 | async def select_handler(): 326 | return True 327 | 328 | async with MockImapServer( 329 | host="localhost", 330 | port=4143, 331 | command_handlers={b"SELECT": select_handler}, 332 | ) as mock_server: 333 | async with ImapClient( 334 | mock_server.connection_config, 335 | timeout_seconds=0.2, 336 | ) as client: 337 | 338 | async def run_command(): 339 | try: 340 | await client.select() 341 | except asyncio.TimeoutError: 342 | pass 343 | 344 | await asyncio.wait_for(run_command(), timeout=1) 345 | 346 | 347 | @pytest.mark.asyncio 348 | async def test_command_timeout_single_untagged_response_only(): 349 | async def select_handler(writer: StreamWriter): 350 | writer.write(b"* 42 EXISTS\r\n") 351 | await writer.drain() 352 | return True 353 | 354 | async with MockImapServer( 355 | host="localhost", 356 | port=4143, 357 | command_handlers={b"SELECT": select_handler}, 358 | ) as mock_server: 359 | async with ImapClient( 360 | mock_server.connection_config, 361 | timeout_seconds=0.2, 362 | ) as client: 363 | 364 | async def run_command(): 365 | try: 366 | await client.select() 367 | except asyncio.TimeoutError: 368 | pass 369 | 370 | await asyncio.wait_for(run_command(), timeout=1) 371 | 372 | 373 | @pytest.mark.asyncio 374 | async def test_command_not_timing_out_if_interresponse_time_stays_below_threshold(): 375 | async def select_handler(writer: StreamWriter): 376 | await asyncio.sleep(0.1) 377 | writer.write(b"* 42 EXISTS\r\n") 378 | await writer.drain() 379 | await asyncio.sleep(0.1) 380 | writer.write(b"* 42 RECENT\r\n") 381 | await writer.drain() 382 | await asyncio.sleep(0.1) 383 | writer.write(b"* OK UNSEEN 23\r\n") 384 | await writer.drain() 385 | await asyncio.sleep(0.1) 386 | 387 | async with MockImapServer( 388 | host="localhost", 389 | port=4143, 390 | command_handlers={b"SELECT": select_handler}, 391 | ) as mock_server: 392 | async with ImapClient( 393 | mock_server.connection_config, 394 | timeout_seconds=0.2, 395 | ) as client: 396 | assert await client.select() == 42 397 | 398 | 399 | class MockImapServer: 400 | def __init__( 401 | self, 402 | host: str = "localhost", 403 | port: int = 4143, 404 | command_handlers: Optional[ 405 | Dict[bytes, Callable[[StreamWriter], Coroutine]] 406 | ] = None, 407 | ): 408 | self.host = host 409 | self.port = port 410 | self.command_handlers = command_handlers or {} 411 | self._server = None 412 | self._write_lock = asyncio.Lock() 413 | self._tasks: List[asyncio.Task] = [] 414 | self._log = logger.bind(logger=self.__class__.__name__) 415 | 416 | @property 417 | def connection_config(self) -> ConnectionConfig: 418 | return ConnectionConfig( 419 | "username", "password", self.host, self.port, use_ssl=False 420 | ) 421 | 422 | async def __aenter__(self): 423 | self._server = await start_server( 424 | self._client_connected_cb, host="localhost", port=4143 425 | ) 426 | await self._server.__aenter__() 427 | return self 428 | 429 | async def __aexit__(self, exc_type, exc, traceback): 430 | return await self._server.__aexit__(exc_type, exc, traceback) 431 | 432 | async def _client_connected_cb(self, reader: StreamReader, writer: StreamWriter): 433 | writer.write(b"* OK hello\r\n") 434 | await writer.drain() 435 | 436 | while not reader.at_eof(): 437 | line = await reader.readline() 438 | await self._log.adebug("MockImapServer received line.", line=line) 439 | parsed = re.match( 440 | rb"^(?P\w+)\s+(?P\w+)(?P.*)$", line 441 | ) 442 | if not parsed: 443 | continue 444 | tag, command, remainder = ( 445 | parsed.group("tag"), 446 | parsed.group("command"), 447 | parsed.group("remainder") + b"\n", 448 | ) 449 | async with self._write_lock: 450 | while remainder.endswith(b"}\r\n"): 451 | writer.write(b"+ OK continue\r\n") 452 | await writer.drain() 453 | remainder += await reader.readline() 454 | 455 | self._tasks.append( 456 | asyncio.create_task(self._finish_command_handling(tag, command, writer)) 457 | ) 458 | 459 | await asyncio.gather(*self._tasks) 460 | writer.close() 461 | 462 | async def _finish_command_handling( 463 | self, tag: bytes, command: bytes, writer: StreamWriter 464 | ): 465 | handled = False 466 | suppress_tagged_response = False 467 | if command in self.command_handlers: 468 | handled = True 469 | suppress_tagged_response = await self.command_handlers[command](writer) 470 | 471 | async with self._write_lock: 472 | if handled: 473 | pass 474 | elif command == b"CAPABILITY": 475 | writer.write(b"* CAPABILITY IMAP4rev1\r\n") 476 | elif command == b"LOGOUT": 477 | writer.write(b"* BYE see you soon\r\n") 478 | 479 | if not suppress_tagged_response: 480 | writer.write(b" ".join((tag, b"OK", command, b"completed\r\n"))) 481 | if command == b"LOGOUT": 482 | writer.write_eof() 483 | await writer.drain() 484 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/tests/test_imap_parser.py: -------------------------------------------------------------------------------- 1 | import bite 2 | import pytest 3 | from bite.parse_functions import parse_bytes 4 | 5 | from dmarc_metrics_exporter.imap_parser import ( 6 | fetch_response_line, 7 | response, 8 | response_tagged, 9 | string, 10 | ) 11 | 12 | 13 | @pytest.mark.asyncio 14 | @pytest.mark.parametrize( 15 | "given_input, expected", 16 | [ 17 | (b'"quoted string"', b"quoted string"), 18 | (b"{14}\r\nliteral string", b"literal string"), 19 | (b"{13}\r\nwith\r\nnewline", b"with\r\nnewline"), 20 | ], 21 | ) 22 | async def test_parses_strings(given_input, expected): 23 | parse_tree = await parse_bytes(string, given_input, parse_all=True) 24 | assert parse_tree.values == (expected,) 25 | 26 | 27 | RFC822_HEADER = ( 28 | b"Return-Path: \r\n" 29 | b"Received: from 172.10.0.1 " 30 | b"(HELO xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx); " 31 | b"Wed Jan 05 10:26:45 UTC 2022\r\n" 32 | b'Content-Type: text/plain; charset="utf-8"\r\n' 33 | b"Content-Transfer-Encoding: 7bit\r\n" 34 | b"MIME-Version: 1.0\r\n" 35 | b"Subject: Message subject\r\n" 36 | b"From: sender@some-domain.org\r\n" 37 | b"To: queue@localhost\r\n\r\n" 38 | ) 39 | RFC822_BODY = b"message content\r\n" 40 | RFC822_MESSAGE = RFC822_HEADER + RFC822_BODY 41 | 42 | 43 | @pytest.mark.asyncio 44 | @pytest.mark.parametrize( 45 | "given_input, expected", 46 | [ 47 | # RFC822 body at end 48 | ( 49 | f"1 FETCH (FLAGS (\\Seen) UID 6 RFC822 {{{len(RFC822_MESSAGE)}}}\r\n".encode( 50 | "ascii" 51 | ) 52 | + RFC822_MESSAGE 53 | + b")", 54 | ( 55 | 1, 56 | b"FETCH", 57 | ( 58 | (b"FLAGS", (b"\\Seen",)), 59 | (b"UID", 6), 60 | (b"RFC822", RFC822_MESSAGE), 61 | ), 62 | ), 63 | ), 64 | # RFC822 body followed by flags 65 | ( 66 | f"2 FETCH (UID 7 RFC822 {{{len(RFC822_MESSAGE)}}}\r\n".encode("ascii") 67 | + RFC822_MESSAGE 68 | + b" FLAGS (\\Seen))", 69 | ( 70 | 2, 71 | b"FETCH", 72 | ( 73 | (b"UID", 7), 74 | (b"RFC822", RFC822_MESSAGE), 75 | (b"FLAGS", (b"\\Seen",)), 76 | ), 77 | ), 78 | ), 79 | # Test each allowed pair 80 | # (body is not actually reproduced in the parsed value) 81 | ( 82 | b'4 FETCH (BODY ("MESSAGE" "text/html" ' 83 | b'("a" "b(") "body-fld-id" "body-fld-desc" "8BIT" 123))', 84 | ( 85 | 4, 86 | b"FETCH", 87 | ((b"BODY", ()),), 88 | ), 89 | ), 90 | ( 91 | f"5 FETCH (BODY[] {{{len(RFC822_MESSAGE)}}}\r\n".encode("ascii") 92 | + RFC822_MESSAGE 93 | + b")", 94 | (5, b"FETCH", ((b"BODY", (), RFC822_MESSAGE),)), 95 | ), 96 | ( 97 | f"6 FETCH (BODY[]<42> {{{len(RFC822_MESSAGE) - 42}}}\r\n".encode("ascii") 98 | + RFC822_MESSAGE[42:] 99 | + b")", 100 | (6, b"FETCH", (((b"BODY", (), 42, RFC822_MESSAGE[42:])),)), 101 | ), 102 | ( 103 | f"7 FETCH (BODY[HEADER] {{{len(RFC822_HEADER)}}}\r\n".encode("ascii") 104 | + RFC822_HEADER 105 | + b")", 106 | (7, b"FETCH", ((b"BODY", (b"HEADER",), RFC822_HEADER),)), 107 | ), 108 | ( 109 | b'8 FETCH (BODYSTRUCTURE ("MESSAGE" "text/html" ' 110 | b'("a" "b(") "body-fld-id" "body-fld-desc" "8BIT" 123))', 111 | ( 112 | 8, 113 | b"FETCH", 114 | ((b"BODYSTRUCTURE", ()),), 115 | ), 116 | ), 117 | ( 118 | b'9 FETCH (ENVELOPE ("date" "subject" ' 119 | b'(("from" NIL "from" "example.com")) ' 120 | b'(("sender" NIL "sender" "example.com")) NIL ' 121 | b'(("to" NIL "to" "example.com")) NIL NIL NIL "message-id"' 122 | b"))", 123 | ( 124 | 9, 125 | b"FETCH", 126 | ( 127 | ( 128 | b"ENVELOPE", 129 | ( 130 | b"date", 131 | b"subject", 132 | ((b"from", b"NIL", b"from", b"example.com"),), 133 | ((b"sender", b"NIL", b"sender", b"example.com"),), 134 | b"NIL", 135 | ((b"to", b"NIL", b"to", b"example.com"),), 136 | b"NIL", 137 | b"NIL", 138 | b"NIL", 139 | b"message-id", 140 | ), 141 | ), 142 | ), 143 | ), 144 | ), 145 | ( 146 | b"10 FETCH (FLAGS (\\Seen \\Foo))", 147 | (10, b"FETCH", ((b"FLAGS", (b"\\Seen", b"\\Foo")),)), 148 | ), 149 | ( 150 | b'11 FETCH (INTERNALDATE " 2-Mar-2022 12:34:56 +0200")', 151 | (11, b"FETCH", ((b"INTERNALDATE", b" 2-Mar-2022 12:34:56 +0200"),)), 152 | ), 153 | ( 154 | f"12 FETCH (RFC822 {{{len(RFC822_MESSAGE)}}}\r\n".encode("ascii") 155 | + RFC822_MESSAGE 156 | + b")", 157 | (12, b"FETCH", ((b"RFC822", RFC822_MESSAGE),)), 158 | ), 159 | ( 160 | f"13 FETCH (RFC822.HEADER {{{len(RFC822_HEADER)}}}\r\n".encode("ascii") 161 | + RFC822_HEADER 162 | + b")", 163 | (13, b"FETCH", ((b"RFC822.HEADER", RFC822_HEADER),)), 164 | ), 165 | ( 166 | b"14 FETCH (RFC822.SIZE 12345)", 167 | (14, b"FETCH", ((b"RFC822.SIZE", 12345),)), 168 | ), 169 | ( 170 | f"15 FETCH (RFC822.TEXT {{{len(RFC822_BODY)}}}\r\n".encode("ascii") 171 | + RFC822_BODY 172 | + b")", 173 | (15, b"FETCH", ((b"RFC822.TEXT", RFC822_BODY),)), 174 | ), 175 | ( 176 | b"16 FETCH (UID 42)", 177 | ( 178 | 16, 179 | b"FETCH", 180 | ((b"UID", 42),), 181 | ), 182 | ), 183 | ( 184 | b"1 FETCH (FLAGS (\\Seen) BODY[HEADER.FIELDS (SUBJECT)] {24}\r\n" 185 | b"Subject: Minimal email\r\n)", 186 | ( 187 | 1, 188 | b"FETCH", 189 | ( 190 | (b"FLAGS", (b"\\Seen",)), 191 | ( 192 | b"BODY", 193 | (b"HEADER.FIELDS", (b"SUBJECT",)), 194 | b"Subject: Minimal email\r\n", 195 | ), 196 | ), 197 | ), 198 | ), 199 | ( 200 | b"1 FETCH (UNKNOWN NIL)", 201 | ( 202 | 1, 203 | b"FETCH", 204 | ((b"UNKNOWN", b"NIL"),), 205 | ), 206 | ), 207 | ( 208 | b"1 FETCH (UNKNOWN 123)", 209 | ( 210 | 1, 211 | b"FETCH", 212 | ((b"UNKNOWN", 123),), 213 | ), 214 | ), 215 | ( 216 | b'1 FETCH (UNKNOWN "foo")', 217 | ( 218 | 1, 219 | b"FETCH", 220 | ((b"UNKNOWN", b"foo"),), 221 | ), 222 | ), 223 | ( 224 | b"1 FETCH (UNKNOWN {3}\r\nfoo)", 225 | ( 226 | 1, 227 | b"FETCH", 228 | ((b"UNKNOWN", b"foo"),), 229 | ), 230 | ), 231 | ( 232 | b"1 FETCH (UNKNOWN (foo 123 ({6}\r\nfoobar)))", 233 | ( 234 | 1, 235 | b"FETCH", 236 | ((b"UNKNOWN", (b"foo", b"123", (b"foobar",))),), 237 | ), 238 | ), 239 | ( 240 | b"1 FETCH (UNKNOWN.FOO NIL)", 241 | ( 242 | 1, 243 | b"FETCH", 244 | ((b"UNKNOWN.FOO", b"NIL"),), 245 | ), 246 | ), 247 | ( 248 | b"1 FETCH (UNKNOWN.FOO[FIELD1 FIELD2 (ITEM1 ITEM2)] NIL)", 249 | ( 250 | 1, 251 | b"FETCH", 252 | ((b"UNKNOWN.FOO", b"[FIELD1 FIELD2 (ITEM1 ITEM2)]", b"NIL"),), 253 | ), 254 | ), 255 | ( 256 | b"1 FETCH (UNKNOWN.FOO<42> NIL)", 257 | ( 258 | 1, 259 | b"FETCH", 260 | ((b"UNKNOWN.FOO", 42, b"NIL"),), 261 | ), 262 | ), 263 | ( 264 | b"1 FETCH (UNKNOWN.FOO[FIELD1 FIELD2 (ITEM1 ITEM2)]<42> NIL)", 265 | ( 266 | 1, 267 | b"FETCH", 268 | ((b"UNKNOWN.FOO", b"[FIELD1 FIELD2 (ITEM1 ITEM2)]", 42, b"NIL"),), 269 | ), 270 | ), 271 | ], 272 | ) 273 | async def test_parses_fetch_response_line(given_input, expected): 274 | parse_tree = await parse_bytes(fetch_response_line, given_input, parse_all=True) 275 | assert parse_tree.values == expected 276 | 277 | 278 | @pytest.mark.asyncio 279 | @pytest.mark.parametrize( 280 | "given_input, expected", 281 | [ 282 | (b"tag123 OK some text", (b"tag123", b"OK", (b"some text"))), 283 | ( 284 | b"tag123 OK [UIDNEXT 456] some text", 285 | ( 286 | b"tag123", 287 | b"OK", 288 | (b"UIDNEXT", 456), 289 | b"some text", 290 | ), 291 | ), 292 | ( 293 | b"tag123 OK [BADCHARSET ({8}\r\nfoo\r\nbar)] some text", 294 | ( 295 | b"tag123", 296 | b"OK", 297 | ( 298 | b"BADCHARSET", 299 | (b"foo\r\nbar",), 300 | ), 301 | b"some text", 302 | ), 303 | ), 304 | ( 305 | b"tag123 OK [FOO 123] some text", 306 | (b"tag123", b"OK", (b"FOO", b"123"), b"some text"), 307 | ), 308 | ], 309 | ) 310 | async def test_parses_tagged_response_line(given_input, expected): 311 | parse_tree = await parse_bytes(response_tagged, given_input, parse_all=True) 312 | assert parse_tree.values == expected 313 | 314 | 315 | @pytest.mark.asyncio 316 | @pytest.mark.parametrize( 317 | "given_input", 318 | [ 319 | b"tag123 OK [BADCHARSET ({8}\r\n", 320 | b"tag123 OK [BADCHARSET ({8}\r\nfoo\r\n", 321 | ], 322 | ) 323 | async def test_parses_tagged_response_line_exceptions(given_input): 324 | with pytest.raises(bite.ParseError): 325 | await parse_bytes(response_tagged, given_input) 326 | 327 | 328 | @pytest.mark.asyncio 329 | async def test_continue_response(): 330 | given_input = b"+ foobar\r\n" 331 | parse_tree = await parse_bytes(response, given_input, parse_all=True) 332 | assert parse_tree.values == (b"+", b"foobar") 333 | 334 | 335 | @pytest.mark.asyncio 336 | @pytest.mark.parametrize( 337 | "given_input,expected", 338 | [ 339 | ( 340 | b"* OK IMAP4rev1 Server GreenMail v1.6.5 ready\r\n", 341 | ( 342 | b"*", 343 | b"OK", 344 | b"IMAP4rev1 Server GreenMail v1.6.5 ready", 345 | ), 346 | ), 347 | (b"* BYE cu later alligator\r\n", (b"*", b"BYE", b"cu later alligator")), 348 | ( 349 | b"* CAPABILITY IMAP4rev1 LITERAL+ SORT UIDPLUS IDLE QUOTA\r\n", 350 | ( 351 | b"*", 352 | b"CAPABILITY", 353 | b"IMAP4rev1 LITERAL+ SORT UIDPLUS IDLE QUOTA", 354 | ), 355 | ), 356 | (b"* 1 EXISTS\r\n", (b"*", 1, b"EXISTS")), 357 | (b"* 2 EXPUNGE\r\n", (b"*", 2, b"EXPUNGE")), 358 | (b"* 3 FETCH (UID 42)\r\n", (b"*", 3, b"FETCH", ((b"UID", 42),))), 359 | ( 360 | b'* foo {10}\r\n0123456789 "xyz" (A B C)\r\n', 361 | (b"*", b"foo ", b"0123456789", b' "xyz" (A B C)'), 362 | ), 363 | ], 364 | ) 365 | async def test_untagged_response(given_input, expected): 366 | parse_tree = await parse_bytes(response, given_input, parse_all=True) 367 | assert parse_tree.values == expected 368 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/tests/test_imap_queue.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from email.message import EmailMessage 3 | 4 | import pytest 5 | 6 | from dmarc_metrics_exporter.imap_queue import ImapClient, ImapQueue 7 | 8 | from .conftest import send_email, try_until_success, verify_email_delivered 9 | 10 | 11 | def create_dummy_email(to: str): 12 | msg = EmailMessage() 13 | msg.set_content("message content") 14 | msg["Subject"] = "Message subject" 15 | msg["From"] = "sender@some-domain.org" 16 | msg["To"] = to 17 | return msg 18 | 19 | 20 | def assert_emails_equal(a: EmailMessage, b: EmailMessage): 21 | assert all(a[header] == b[header] for header in ("Subject", "From", "To")) 22 | assert a.get_content().strip() == b.get_content().strip() 23 | 24 | 25 | @pytest.mark.asyncio 26 | async def test_successful_processing_of_existing_queue_message(greenmail): 27 | # Given 28 | msg = create_dummy_email(greenmail.imap.username) 29 | await try_until_success(lambda: send_email(msg, greenmail.smtp)) 30 | await try_until_success(lambda: verify_email_delivered(greenmail.imap)) 31 | 32 | is_done = asyncio.Event() 33 | 34 | async def handler(queue_msg: EmailMessage, is_done=is_done): 35 | is_done.set() 36 | assert_emails_equal(queue_msg, msg) 37 | 38 | # When 39 | queue = ImapQueue(connection=greenmail.imap) 40 | queue.consume(handler) 41 | try: 42 | await asyncio.wait_for(is_done.wait(), 10) 43 | finally: 44 | await queue.stop_consumer() 45 | 46 | # Then 47 | async with ImapClient(greenmail.imap) as client: 48 | assert await client.select() == 0 49 | assert await client.select(queue.folders.done) == 1 50 | 51 | 52 | @pytest.mark.asyncio 53 | async def test_successful_processing_of_incoming_queue_message(greenmail): 54 | # Given 55 | msg = create_dummy_email(greenmail.imap.username) 56 | 57 | is_done = asyncio.Event() 58 | 59 | async def handler(queue_msg: EmailMessage, is_done=is_done): 60 | is_done.set() 61 | assert_emails_equal(queue_msg, msg) 62 | 63 | # When 64 | queue = ImapQueue(connection=greenmail.imap, poll_interval_seconds=0.1) 65 | queue.consume(handler) 66 | 67 | await asyncio.sleep(0.5) 68 | await try_until_success(lambda: send_email(msg, greenmail.smtp)) 69 | await try_until_success( 70 | lambda: verify_email_delivered( 71 | greenmail.imap, mailboxes=("INBOX", queue.folders.done) 72 | ) 73 | ) 74 | 75 | try: 76 | await asyncio.wait_for(is_done.wait(), 10) 77 | finally: 78 | await queue.stop_consumer() 79 | 80 | # Then 81 | async with ImapClient(greenmail.imap) as client: 82 | assert await client.select() == 0 83 | assert await client.select(queue.folders.done) == 1 84 | 85 | 86 | @pytest.mark.asyncio 87 | async def test_error_handling_when_processing_queue_message(greenmail): 88 | # Given 89 | msg = create_dummy_email(greenmail.imap.username) 90 | await try_until_success(lambda: send_email(msg, greenmail.smtp)) 91 | await try_until_success(lambda: verify_email_delivered(greenmail.imap)) 92 | 93 | is_done = asyncio.Event() 94 | 95 | async def handler(_queue_msg: EmailMessage, is_done=is_done): 96 | is_done.set() 97 | # pylint: disable=broad-exception-raised 98 | raise Exception("Error raised on purpose.") 99 | 100 | # When 101 | queue = ImapQueue(connection=greenmail.imap) 102 | queue.consume(handler) 103 | try: 104 | await asyncio.wait_for(is_done.wait(), 10) 105 | finally: 106 | await queue.stop_consumer() 107 | 108 | # Then 109 | async with ImapClient(greenmail.imap) as client: 110 | assert await client.select() == 0 111 | assert await client.select(queue.folders.error) == 1 112 | 113 | 114 | @pytest.mark.asyncio 115 | async def test_reconnects_if_imap_connection_is_lost(greenmail): 116 | is_done = asyncio.Event() 117 | 118 | async def handler(queue_msg: EmailMessage, is_done=is_done): 119 | is_done.set() 120 | assert_emails_equal(queue_msg, msg) 121 | 122 | queue = None 123 | try: 124 | queue = ImapQueue( 125 | connection=greenmail.imap, 126 | poll_interval_seconds=0.1, 127 | timeout_seconds=0.5, 128 | ) 129 | queue.consume(handler) 130 | msg = create_dummy_email(greenmail.imap.username) 131 | await try_until_success(lambda: send_email(msg, greenmail.smtp)) 132 | await asyncio.wait_for(is_done.wait(), 10) 133 | 134 | is_done.clear() 135 | await greenmail.restart() 136 | 137 | msg = create_dummy_email(greenmail.imap.username) 138 | await try_until_success(lambda: send_email(msg, greenmail.smtp)) 139 | await asyncio.wait_for(is_done.wait(), 10) 140 | finally: 141 | if queue is not None: 142 | await queue.stop_consumer() 143 | 144 | 145 | @pytest.mark.parametrize( 146 | "parsed_response", 147 | [ 148 | (1, b"FETCH", ((b"UID", 42), (b"RFC822", bytearray(b"mail body")))), 149 | (1, b"FETCH", ((b"RFC822", bytearray(b"mail body")), (b"UID", 42))), 150 | ], 151 | ) 152 | def test_regression_extract_uid_and_msg_works_with_any_order(parsed_response): 153 | # pylint: disable=protected-access 154 | uid, msg = ImapQueue._extract_uid_and_msg(parsed_response) 155 | assert uid == 42 156 | assert isinstance(msg, EmailMessage) 157 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/tests/test_logging.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import re 4 | 5 | import pytest 6 | import structlog 7 | 8 | from dmarc_metrics_exporter.logging import configure_logging, parse_log_level 9 | 10 | 11 | @pytest.fixture(autouse=True) 12 | def reset_logging_config_after_test(): 13 | yield None 14 | logging.Logger.manager.loggerDict.clear() 15 | configure_logging({}, debug=True) 16 | 17 | 18 | def test_parse_log_level_returns_int_for_int_arg(): 19 | assert parse_log_level(20) == 20 20 | assert parse_log_level(40) == 40 21 | 22 | 23 | @pytest.mark.parametrize( 24 | "input_level,output", 25 | [ 26 | ("debug", logging.DEBUG), 27 | ("info", logging.INFO), 28 | ("warning", logging.WARNING), 29 | ("error", logging.ERROR), 30 | ("critical", logging.CRITICAL), 31 | ], 32 | ) 33 | def test_parse_log_level_parses_string_levels(input_level, output): 34 | assert parse_log_level(input_level) == output 35 | 36 | 37 | @pytest.mark.parametrize( 38 | "input_level,output", 39 | [ 40 | ("info", logging.INFO), 41 | ("INFO", logging.INFO), 42 | ("iNfO", logging.INFO), 43 | ("ERRor", logging.ERROR), 44 | ], 45 | ) 46 | def test_parse_log_level_is_case_insensitive(input_level, output): 47 | assert parse_log_level(input_level) == output 48 | 49 | 50 | def test_configure_logging_setting_log_level(caplog): 51 | configure_logging( 52 | { 53 | "root": {"level": "WARNING"}, 54 | }, 55 | debug=False, 56 | ) 57 | structlog_logger = structlog.get_logger("test-logger") 58 | stdlib_logger = logging.getLogger("test-logger") 59 | logging.getLogger().addHandler(caplog.handler) 60 | 61 | for logger in (structlog_logger, stdlib_logger): 62 | logger.debug("not_visible") 63 | logger.warning("visible") 64 | 65 | assert caplog.record_tuples == [ 66 | ("test-logger", logging.WARNING, "{'event': 'visible', 'level': 'warning'}"), 67 | ("test-logger", logging.WARNING, "visible"), 68 | ] 69 | 70 | 71 | def test_configure_logging_debug_overrides_log_level(caplog): 72 | configure_logging( 73 | { 74 | "root": {"level": "WARNING"}, 75 | }, 76 | debug=True, 77 | ) 78 | structlog_logger = structlog.get_logger("test-logger") 79 | stdlib_logger = logging.getLogger("test-logger") 80 | logging.getLogger().addHandler(caplog.handler) 81 | 82 | for logger in (structlog_logger, stdlib_logger): 83 | logger.debug("visible") 84 | 85 | assert caplog.record_tuples == [ 86 | ("test-logger", logging.DEBUG, "{'event': 'visible', 'level': 'debug'}"), 87 | ("test-logger", logging.DEBUG, "visible"), 88 | ] 89 | 90 | 91 | def test_configure_logging_default_log_message_format(caplog, capsys): 92 | configure_logging( 93 | {}, 94 | debug=False, 95 | ) 96 | structlog_logger = structlog.get_logger("test-logger").bind(logger="test-logger") 97 | stdlib_logger = logging.getLogger("test-logger") 98 | logging.getLogger().addHandler(caplog.handler) 99 | 100 | structlog_logger.warning("event", some_key="some_value") 101 | stdlib_logger.warning("event", extra={"some_key": "some_value"}) 102 | 103 | captured = capsys.readouterr() 104 | without_color = re.sub("\x1b\\[\\d+m", "", captured.err) 105 | timestamp = r"\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}" 106 | spacing = " " * 24 107 | assert re.match( 108 | f"^{timestamp} \\[warning \\] event {spacing} \\[test-logger\\] some_key=some_value\n" 109 | f"{timestamp} \\[warning \\] event {spacing} \\[test-logger\\] some_key=some_value\n$", 110 | without_color, 111 | ) 112 | 113 | 114 | def test_configure_logging_to_log_json(caplog, capsys): 115 | configure_logging( 116 | { 117 | "handlers": { 118 | "default": {"class": "logging.StreamHandler", "formatter": "json"} 119 | }, 120 | }, 121 | debug=False, 122 | ) 123 | structlog_logger = structlog.get_logger("test-logger").bind(logger="test-logger") 124 | stdlib_logger = logging.getLogger("test-logger") 125 | logging.getLogger().addHandler(caplog.handler) 126 | 127 | structlog_logger.warning("event", some_key="some_value") 128 | stdlib_logger.warning("event", extra={"some_key": "some_value"}) 129 | 130 | captured = capsys.readouterr() 131 | for line in captured.err.splitlines(): 132 | doc = json.loads(line) 133 | del doc["timestamp"] 134 | assert doc == { 135 | "level": "warning", 136 | "logger": "test-logger", 137 | "event": "event", 138 | "some_key": "some_value", 139 | } 140 | 141 | 142 | def test_configure_logging_disable_stdlib_sublogger(caplog): 143 | configure_logging( 144 | { 145 | "loggers": {"sublogger": {"propagate": False}}, 146 | }, 147 | debug=False, 148 | ) 149 | stdlib_logger = logging.getLogger("sublogger") 150 | logging.getLogger().addHandler(caplog.handler) 151 | 152 | stdlib_logger.warning("not_visible") 153 | 154 | assert caplog.record_tuples == [] 155 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/tests/test_metrics_persister.py: -------------------------------------------------------------------------------- 1 | from dmarc_metrics_exporter.dmarc_metrics import ( 2 | Disposition, 3 | DmarcMetrics, 4 | DmarcMetricsCollection, 5 | InvalidMeta, 6 | Meta, 7 | ) 8 | from dmarc_metrics_exporter.metrics_persister import MetricsPersister 9 | 10 | 11 | def test_roundtrip_metrics(tmp_path): 12 | metrics_db = tmp_path / "metrics.db" 13 | metrics = DmarcMetricsCollection( 14 | { 15 | Meta( 16 | reporter="google.com", 17 | from_domain="mydomain.de", 18 | dkim_domain="dkim-domain.org", 19 | spf_domain="spf-domain.org", 20 | ): DmarcMetrics( 21 | total_count=42, 22 | disposition_counts={Disposition.QUARANTINE: 4}, 23 | dmarc_compliant_count=24, 24 | dkim_aligned_count=5, 25 | dkim_pass_count=10, 26 | spf_aligned_count=4, 27 | spf_pass_count=8, 28 | ) 29 | }, 30 | {InvalidMeta("someone@example.com"): 42}, 31 | ) 32 | 33 | persister = MetricsPersister(metrics_db) 34 | persister.save(metrics) 35 | assert persister.load() == metrics 36 | 37 | 38 | def test_loads_old_format(tmp_path): 39 | metrics_db = tmp_path / "metrics.db" 40 | metrics_db.write_text( 41 | "[[{" 42 | '"reporter":"google.com",' 43 | '"from_domain":"mydomain.de",' 44 | '"dkim_domain":"dkim-domain.org",' 45 | '"spf_domain":"spf-domain.org"' 46 | "},{" 47 | '"total_count":42,' 48 | '"disposition_counts":{"quarantine":4},' 49 | '"dmarc_compliant_count":24,' 50 | '"dkim_aligned_count":5,' 51 | '"dkim_pass_count":10,' 52 | '"spf_aligned_count":4,' 53 | '"spf_pass_count":8' 54 | "}]]" 55 | ) 56 | 57 | persister = MetricsPersister(metrics_db) 58 | assert persister.load() == DmarcMetricsCollection( 59 | { 60 | Meta( 61 | reporter="google.com", 62 | from_domain="mydomain.de", 63 | dkim_domain="dkim-domain.org", 64 | spf_domain="spf-domain.org", 65 | ): DmarcMetrics( 66 | total_count=42, 67 | disposition_counts={Disposition.QUARANTINE: 4}, 68 | dmarc_compliant_count=24, 69 | dkim_aligned_count=5, 70 | dkim_pass_count=10, 71 | spf_aligned_count=4, 72 | spf_pass_count=8, 73 | ) 74 | } 75 | ) 76 | 77 | 78 | def test_returns_newly_initialized_metrics_if_db_is_non_existent(tmp_path): 79 | metrics_db = tmp_path / "metrics.db" 80 | persister = MetricsPersister(metrics_db) 81 | assert persister.load() == DmarcMetricsCollection() 82 | -------------------------------------------------------------------------------- /dmarc_metrics_exporter/tests/test_prometheus_exporter.py: -------------------------------------------------------------------------------- 1 | import dataclasses 2 | 3 | import aiohttp 4 | import pytest 5 | from prometheus_client.parser import text_string_to_metric_families 6 | from prometheus_client.samples import Sample 7 | 8 | import dmarc_metrics_exporter 9 | from dmarc_metrics_exporter.dmarc_event import Disposition, Meta 10 | from dmarc_metrics_exporter.dmarc_metrics import ( 11 | DmarcMetrics, 12 | DmarcMetricsCollection, 13 | InvalidMeta, 14 | ) 15 | from dmarc_metrics_exporter.prometheus_exporter import PrometheusExporter 16 | 17 | 18 | @pytest.mark.asyncio 19 | async def test_prometheus_exporter(): 20 | metrics = DmarcMetricsCollection( 21 | metrics={ 22 | Meta( 23 | reporter="google.com", 24 | from_domain="mydomain.de", 25 | dkim_domain="sub.mydomain.de", 26 | spf_domain="mydomain.de", 27 | ): DmarcMetrics( 28 | total_count=42, 29 | disposition_counts={ 30 | Disposition.QUARANTINE: 3, 31 | Disposition.NONE_VALUE: 39, 32 | }, 33 | dmarc_compliant_count=39, 34 | dkim_aligned_count=39, 35 | dkim_pass_count=39, 36 | spf_pass_count=42, 37 | spf_aligned_count=42, 38 | ), 39 | Meta( 40 | reporter="yahoo.com", 41 | from_domain="mydomain.de", 42 | dkim_domain="sub.mydomain.de", 43 | spf_domain="mydomain.de", 44 | ): DmarcMetrics( 45 | total_count=1, 46 | disposition_counts={Disposition.NONE_VALUE: 1}, 47 | dmarc_compliant_count=1, 48 | dkim_aligned_count=1, 49 | dkim_pass_count=1, 50 | spf_pass_count=1, 51 | spf_aligned_count=1, 52 | ), 53 | }, 54 | invalid_reports={InvalidMeta("someone@example.org"): 42}, 55 | ) 56 | 57 | exporter = PrometheusExporter(metrics) 58 | async with exporter.start_server() as server: 59 | async with aiohttp.ClientSession() as session: 60 | async with session.get( 61 | f"http://{server.host}:{server.port}/metrics" 62 | ) as response: 63 | served_metrics = text_string_to_metric_families(await response.text()) 64 | 65 | samples = [ 66 | sample for served_metric in served_metrics for sample in served_metric.samples 67 | ] 68 | expected_metrics = { 69 | "dmarc_total": lambda m: m.total_count, 70 | "dmarc_compliant_total": lambda m: m.dmarc_compliant_count, 71 | "dmarc_quarantine_total": lambda m: m.disposition_counts.get( 72 | Disposition.QUARANTINE, 0 73 | ), 74 | "dmarc_reject_total": lambda m: m.disposition_counts.get(Disposition.REJECT, 0), 75 | "dmarc_dkim_aligned_total": lambda m: m.dkim_aligned_count, 76 | "dmarc_dkim_pass_total": lambda m: m.dkim_pass_count, 77 | "dmarc_spf_aligned_total": lambda m: m.spf_aligned_count, 78 | "dmarc_spf_pass_total": lambda m: m.spf_pass_count, 79 | } 80 | for meta, metric in metrics.items(): 81 | for prometheus_name, getter in expected_metrics.items(): 82 | assert ( 83 | Sample( 84 | prometheus_name, 85 | labels=dataclasses.asdict(meta), 86 | value=getter(metric), 87 | timestamp=None, 88 | exemplar=None, 89 | ) 90 | in samples 91 | ) 92 | assert Sample( 93 | "dmarc_invalid_reports_total", 94 | labels={"from_email": "someone@example.org"}, 95 | value=42, 96 | timestamp=None, 97 | exemplar=None, 98 | ) 99 | 100 | 101 | @pytest.mark.asyncio 102 | async def test_build_info(): 103 | exporter = PrometheusExporter(DmarcMetricsCollection()) 104 | async with exporter.start_server() as server: 105 | async with aiohttp.ClientSession() as session: 106 | async with session.get( 107 | f"http://{server.host}:{server.port}/metrics" 108 | ) as response: 109 | served_metrics = text_string_to_metric_families(await response.text()) 110 | 111 | samples = [ 112 | sample for served_metric in served_metrics for sample in served_metric.samples 113 | ] 114 | assert ( 115 | Sample( 116 | "dmarc_metrics_exporter_build_info", 117 | labels={"version": dmarc_metrics_exporter.__version__}, 118 | value=1, 119 | timestamp=None, 120 | exemplar=None, 121 | ) 122 | in samples 123 | ) 124 | -------------------------------------------------------------------------------- /generate-dataclasses-from-xsd.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o errexit -o nounset -o pipefail 4 | 5 | poetry run xsdata generate dmarc-aggregate-report.xsd --package dmarc_metrics_exporter.model -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | ignore_missing_imports = True 3 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | authors = ["Jan Gosmann "] 3 | classifiers = [ 4 | "Development Status :: 5 - Production/Stable", 5 | "Framework :: AsyncIO", 6 | "Intended Audience :: System Administrators", 7 | "License :: OSI Approved :: MIT License", 8 | "Programming Language :: Python :: 3 :: Only", 9 | "Programming Language :: Python :: 3.9", 10 | "Programming Language :: Python :: 3.10", 11 | "Programming Language :: Python :: 3.11", 12 | "Programming Language :: Python :: 3.12", 13 | "Programming Language :: Python :: 3.13", 14 | "Topic :: Communications :: Email", 15 | "Topic :: System :: Monitoring", 16 | ] 17 | description = "Export Prometheus metrics from DMARC reports." 18 | keywords = ["DMARC", "DKIM", "SPF", "Prometheus"] 19 | license = "MIT" 20 | name = "dmarc-metrics-exporter" 21 | readme = "README.rst" 22 | repository = "https://github.com/jgosmann/dmarc-metrics-exporter/" 23 | version = "1.2.0" # Update also in __init__.py and Docker compose example in readme! 24 | 25 | [tool.poetry.scripts] 26 | dmarc-metrics-exporter = "dmarc_metrics_exporter.__main__:run" 27 | 28 | [tool.poetry.dependencies] 29 | bite-parser = "^0.2.4" 30 | dataclasses-serialization = "^1.3.1" 31 | prometheus_client = "^0.21.0" 32 | python = "^3.9" 33 | structlog = ">=24.1.0" 34 | uvicorn = {extras = ["standard"], version = "^0.32.0"} 35 | xsdata = ">=23.7" 36 | 37 | [tool.poetry.group.dev.dependencies] 38 | aiohttp = "^3.7.3,!=3.10.5" 39 | mypy = "^1.4.1" 40 | pytest = "^8.3.2" 41 | pytest-asyncio = "^0.24.0" 42 | pytest-cov = "^5.0.0" 43 | requests = "^2.26.0" 44 | ruff = "^0.6.9" 45 | types-requests = "^2.25.11" 46 | 47 | [tool.ruff.lint] 48 | extend-select = ["I"] 49 | 50 | [build-system] 51 | build-backend = "poetry.core.masonry.api" 52 | requires = ["poetry-core"] 53 | -------------------------------------------------------------------------------- /roles/README.rst: -------------------------------------------------------------------------------- 1 | dmarc-metrics-exporter Ansible role 2 | =================================== 3 | 4 | This Ansible role allows an automated deployment of dmarc-metrics-exporter. 5 | 6 | To use the role add 7 | 8 | .. code-block:: yaml 9 | 10 | roles: 11 | - name: dmarc_metrics_exporter 12 | src: https://github.com/jgosmann/dmarc-metrics-exporter.git 13 | 14 | to your ``requirements.yml`` 15 | and then install the role with: 16 | 17 | .. code-block:: bash 18 | 19 | ansible-galaxy install -r requirements.yml 20 | 21 | You can then use the role in your playbooks like so: 22 | 23 | .. code-block:: yaml 24 | 25 | - hosts: all 26 | roles: 27 | - role: dmarc-metrics-exporter 28 | vars: 29 | imap_username: dmarc@your-domain.com 30 | imap_password: !vault | 31 | $ANSIBLE_VAULT;1.1;AES256 32 | 62663862643861313432633433373264663362313362353865313362396666356230653630633135 33 | 6265623537383536363639613034643162396230376163610a363564306334326234386630646265 34 | 38626566663965633931366364613663626539623938633133303830613263383831363532326530 35 | 3062303461343065650a313935376235313466616233376639613437353230626561653534643537 36 | 6166 37 | 38 | Use ``ansible-vault encrypt_string`` to obtain an encrypted password. 39 | 40 | Available role variables 41 | ------------------------ 42 | 43 | * ``dmarc_metrics_exporter_version`` (string, default: ``""``): dmarc-metrics-exporter version to install. 44 | * ``dmarc_metrics_exporter_virtualenv_path`` (string, default: ``"/opt/dmarc_metrics_exporter"``): Path to create Python virtualenv in for the dmarc-metrics-exporter. 45 | * ``listen_addr`` (string, default ``"127.0.0.1"``): Listen address for the HTTP endpoint. 46 | * ``listen_port`` (number, default ``9797``): Port to listen on for the HTTP endpoint. 47 | * ``imap_host`` (string, default ``"localhost"``): Hostname of IMAP server to connect to. 48 | * ``imap_port`` (number, default ``993``): Port of the IMAP server to connect to. 49 | * ``imap_username`` (string, required): Login username for the IMAP connection. 50 | * ``imap_password``: (string, required): Login password for the IMAP connection. 51 | * ``imap_use_ssl``: (boolean, default ``true``): Whether to use SSL encryption on the IMAP connection. 52 | * ``imap_verify_certificate``: (boolean, default ``true``): Whether to verify the SSL certificate. 53 | * ``folder_inbox`` (string, default ``"INBOX"``): IMAP mailbox that is checked for incoming DMARC aggregate reports. 54 | * ``folder_done`` (string, default ``"Archive"``): IMAP mailbox that successfully processed reports are moved to. 55 | * ``folder_error``: (string, default ``"Invalid"``): IMAP mailbox that emails are moved to that could not be processed. 56 | * ``poll_interval_seconds`` (number, default ``60``): How often to poll the IMAP server in seconds. 57 | * ``deduplication_max_seconds`` (number, default ``604800`` which is 7 days): How long individual report IDs will be remembered to avoid counting double delivered reports twice. 58 | * ``logging_config`` (map, default ``{}``): Logging configuration, see main documentation for details. 59 | -------------------------------------------------------------------------------- /roles/dmarc_metrics_exporter/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dmarc_metrics_exporter_version: "" 3 | dmarc_metrics_exporter_virtualenv_path: /opt/dmarc-metrics-exporter 4 | 5 | listen_addr: "127.0.0.1" 6 | listen_port: 9797 7 | 8 | imap_host: "localhost" 9 | imap_port: 993 10 | imap_use_ssl: yes 11 | imap_verify_certificate: yes 12 | 13 | poll_interval_seconds: 60 14 | deduplication_max_seconds: 604800 15 | 16 | logging_config: {} 17 | 18 | folder_inbox: "INBOX" 19 | folder_done: "Archive" 20 | folder_error: "Invalid" 21 | -------------------------------------------------------------------------------- /roles/dmarc_metrics_exporter/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: Restart dmarc-metrics-exporter 4 | ansible.builtin.systemd: 5 | name: dmarc-metrics-exporter.service 6 | state: restarted 7 | daemon_reload: yes 8 | -------------------------------------------------------------------------------- /roles/dmarc_metrics_exporter/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | galaxy_info: 3 | role_name: dmarc_metrics_exporter 4 | author: Jan Gosmann 5 | description: Export Prometheus metrics from DMARC reports. 6 | license: MIT 7 | min_ansible_version: 2.9 8 | 9 | dependencies: [] 10 | -------------------------------------------------------------------------------- /roles/dmarc_metrics_exporter/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: Provide configuration 4 | ansible.builtin.template: 5 | src: dmarc-metrics-exporter.json.j2 6 | dest: /etc/dmarc-metrics-exporter.json 7 | owner: root 8 | group: root 9 | mode: 0640 10 | notify: Restart dmarc-metrics-exporter 11 | 12 | - name: Install dmarc-metrics-exporter 13 | ansible.builtin.pip: 14 | name: dmarc-metrics-exporter 15 | version: "{{ dmarc_metrics_exporter_version }}" 16 | virtualenv_command: python3 -m venv 17 | virtualenv: "{{ dmarc_metrics_exporter_virtualenv_path }}" 18 | notify: Restart dmarc-metrics-exporter 19 | 20 | - name: Install systemd service 21 | ansible.builtin.template: 22 | src: dmarc-metrics-exporter.service.j2 23 | dest: /etc/systemd/system/dmarc-metrics-exporter.service 24 | owner: root 25 | group: root 26 | mode: 0644 27 | notify: Restart dmarc-metrics-exporter 28 | 29 | - name: Activate systemd service 30 | ansible.builtin.systemd: 31 | name: dmarc-metrics-exporter.service 32 | state: started 33 | enabled: yes 34 | -------------------------------------------------------------------------------- /roles/dmarc_metrics_exporter/templates/dmarc-metrics-exporter.json.j2: -------------------------------------------------------------------------------- 1 | { 2 | "listen_addr": "{{ listen_addr }}", 3 | "port": {{ listen_port }}, 4 | "imap": { 5 | "host": "{{ imap_host }}", 6 | "port": {{ imap_port }}, 7 | "username": "{{ imap_username }}", 8 | "password": "{{ imap_password }}", 9 | "use_ssl": {{ imap_use_ssl | to_json }}, 10 | "verify_certificate": {{ imap_verify_certificate | to_json }} 11 | }, 12 | "folders": { 13 | "inbox": "{{ folder_inbox }}", 14 | "done": "{{ folder_done }}", 15 | "error": "{{ folder_error }}" 16 | }, 17 | "storage_path": "/var/lib/dmarc-metrics-exporter", 18 | "poll_interval_seconds": {{ poll_interval_seconds }}, 19 | "deduplication_max_seconds": {{ deduplication_max_seconds }}, 20 | "logging": {{ logging_config | to_json }} 21 | } 22 | -------------------------------------------------------------------------------- /roles/dmarc_metrics_exporter/templates/dmarc-metrics-exporter.service.j2: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Export Prometheus metrics from DMARC reports. 3 | 4 | [Service] 5 | ExecStart={{ dmarc_metrics_exporter_virtualenv_path }}/bin/python3 -m dmarc_metrics_exporter --configuration "${CREDENTIALS_DIRECTORY}/dmarc-metrics-exporter.json" 6 | Environment=PYTHONUNBUFFERED=1 7 | Restart=on-failure 8 | 9 | DynamicUser=yes 10 | User=dmarc-metrics 11 | 12 | LoadCredential=dmarc-metrics-exporter.json:/etc/dmarc-metrics-exporter.json 13 | StateDirectory=dmarc-metrics-exporter 14 | 15 | CapabilityBoundingSet= 16 | LockPersonality=yes 17 | MemoryDenyWriteExecute=yes 18 | NoNewPrivileges=yes 19 | PrivateTmp=yes 20 | PrivateDevices=yes 21 | ProcSubset=pid 22 | ProtectClock=yes 23 | ProtectControlGroups=yes 24 | ProtectHome=yes 25 | ProtectHostname=yes 26 | ProtectKernelLogs=yes 27 | ProtectKernelModules=yes 28 | ProtectKernelTunables=yes 29 | ProtectProc=invisible 30 | ProtectSystem=full 31 | PrivateDevices=yes 32 | PrivateUsers=yes 33 | RestrictAddressFamilies=AF_INET AF_INET6 34 | RestrictNamespaces=yes 35 | RestrictRealtime=yes 36 | SystemCallArchitectures=native 37 | SystemCallFilter=@system-service 38 | SystemCallFilter=~@privileged @resources 39 | UMask=0077 40 | 41 | [Install] 42 | WantedBy=default.target 43 | --------------------------------------------------------------------------------