├── .dockerignore ├── .github ├── docker │ └── Dockerfile └── workflows │ └── ci.yaml ├── .gitignore ├── CHANGELOG.md ├── Cargo.lock ├── Cargo.toml ├── Dockerfile ├── LICENSE ├── README.md ├── screenshots ├── about.png ├── dashboard.png ├── mail.png ├── mails.png ├── report.png ├── reports.png └── screenshots.md ├── src ├── background.rs ├── cache_map.rs ├── config.rs ├── dmarc.rs ├── geolocate.rs ├── hasher.rs ├── http.rs ├── http │ ├── dmarc_reports.rs │ ├── ips.rs │ ├── mails.rs │ ├── static_files.rs │ └── summary.rs ├── imap.rs ├── mail.rs ├── main.rs ├── state.rs ├── unpack.rs └── whois.rs ├── testdata └── dmarc-reports │ ├── acme.xml │ ├── aol.xml │ ├── gmxnet.xml │ ├── google.xml │ ├── hardfail.xml │ ├── mailru.xml │ ├── outlook.xml │ ├── solamora.xml │ ├── webde.xml │ └── yahoo.xml └── ui ├── chart.umd.4.4.2.min.js ├── components ├── about.js ├── app.js ├── dashboard.js ├── dmarc-report-table.js ├── dmarc-report.js ├── dmarc-reports.js ├── mail-table.js ├── mail.js ├── mails.js └── style.js ├── index.html └── lit-core.3.1.4.min.js /.dockerignore: -------------------------------------------------------------------------------- 1 | .git 2 | .gitignore 3 | .github 4 | target 5 | testdata 6 | screenshots 7 | -------------------------------------------------------------------------------- /.github/docker/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM scratch 2 | ARG TARGETOS TARGETARCH 3 | COPY $TARGETOS-$TARGETARCH/dmarc-report-viewer / 4 | EXPOSE 8080 5 | CMD ["./dmarc-report-viewer"] -------------------------------------------------------------------------------- /.github/workflows/ci.yaml: -------------------------------------------------------------------------------- 1 | name: CI 2 | on: [push, pull_request] 3 | jobs: 4 | linux_x86_64: 5 | name: Linux (x86-64) 6 | runs-on: ubuntu-24.04 7 | steps: 8 | - name: Checkout 9 | uses: actions/checkout@v4 10 | - name: Update Rust 11 | run: rustup toolchain install stable --profile minimal --no-self-update 12 | - name: Install MUSL Toolchain 13 | run: rustup target add x86_64-unknown-linux-musl 14 | - name: Install MUSL dependencies 15 | run: sudo apt-get install musl-tools 16 | - name: Enable Rust Caching 17 | uses: Swatinem/rust-cache@v2 18 | - name: Release Build 19 | run: cargo build --release --all --target x86_64-unknown-linux-musl 20 | - name: Execute Tests 21 | run: cargo test --release --all --target x86_64-unknown-linux-musl 22 | - name: Run Clippy 23 | run: cargo clippy --release --all --target x86_64-unknown-linux-musl --all-targets --all-features --locked -- -D warnings 24 | - name: Check Formatting 25 | run: cargo fmt --all -- --check 26 | - uses: actions/upload-artifact@v4 27 | with: 28 | name: linux-x86_64 29 | path: target/x86_64-unknown-linux-musl/release/dmarc-report-viewer 30 | linux_aarch64: 31 | name: Linux (aarch64) 32 | runs-on: ubuntu-24.04 33 | steps: 34 | - name: Checkout 35 | uses: actions/checkout@v4 36 | - name: Update Rust 37 | run: rustup toolchain install stable --profile minimal --no-self-update 38 | - name: Install Cargo Binary Install 39 | run: curl -L --proto '=https' --tlsv1.2 -sSf https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash 40 | - name: Install Cargo Cross 41 | run: cargo binstall cross --no-confirm 42 | - name: Enable Rust Caching 43 | uses: Swatinem/rust-cache@v2 44 | - name: Release Build 45 | run: cross build --release --all --target aarch64-unknown-linux-musl 46 | - name: Execute Tests 47 | run: cross test --release --all --target aarch64-unknown-linux-musl 48 | - uses: actions/upload-artifact@v4 49 | with: 50 | name: linux-aarch64 51 | path: target/aarch64-unknown-linux-musl/release/dmarc-report-viewer 52 | windows_x86_64: 53 | name: Windows (x86-64) 54 | runs-on: windows-latest 55 | steps: 56 | - name: Checkout 57 | uses: actions/checkout@v4 58 | - name: Install NASM 59 | uses: ilammy/setup-nasm@v1 60 | - name: Update Rust 61 | run: rustup toolchain install stable --profile minimal --no-self-update 62 | - name: Enable Rust Caching 63 | uses: Swatinem/rust-cache@v2 64 | - name: Release Build 65 | run: cargo build --release --all 66 | - name: Execute Tests 67 | run: cargo test --release --all 68 | - uses: actions/upload-artifact@v4 69 | with: 70 | name: windows-x86_64 71 | path: target/release/dmarc-report-viewer.exe 72 | mac_x86_64: 73 | name: Mac (x86-64) 74 | runs-on: macos-latest 75 | steps: 76 | - name: Checkout 77 | uses: actions/checkout@v4 78 | - name: Update Rust 79 | run: rustup toolchain install stable --profile minimal --no-self-update 80 | - name: Install x64 target 81 | run: rustup target add x86_64-apple-darwin 82 | - name: Enable Rust Caching 83 | uses: Swatinem/rust-cache@v2 84 | - name: Release Build 85 | run: cargo build --release --all --target x86_64-apple-darwin 86 | - name: Execute Tests 87 | run: cargo test --release --all --target x86_64-apple-darwin 88 | - uses: actions/upload-artifact@v4 89 | with: 90 | name: mac-x86_64 91 | path: target/x86_64-apple-darwin/release/dmarc-report-viewer 92 | mac_aarch64: 93 | name: Mac (aarch64) 94 | runs-on: macos-latest 95 | steps: 96 | - name: Checkout 97 | uses: actions/checkout@v4 98 | - name: Update Rust 99 | run: rustup toolchain install stable --profile minimal --no-self-update 100 | - name: Enable Rust Caching 101 | uses: Swatinem/rust-cache@v2 102 | - name: Release Build 103 | run: cargo build --release --all --target aarch64-apple-darwin 104 | - name: Execute Tests 105 | run: cargo test --release --all --target aarch64-apple-darwin 106 | - uses: actions/upload-artifact@v4 107 | with: 108 | name: mac-aarch64 109 | path: target/aarch64-apple-darwin/release/dmarc-report-viewer 110 | docker_linux: 111 | name: Docker (Linux, x86-64, aarch64) 112 | needs: [linux_x86_64, linux_aarch64] 113 | runs-on: ubuntu-24.04 114 | permissions: 115 | contents: read 116 | packages: write 117 | steps: 118 | - name: Checkout 119 | uses: actions/checkout@v4 120 | - name: Download Artifacts 121 | uses: actions/download-artifact@v4 122 | - name: Prepare Binary Artifacts 123 | run: | 124 | mv linux-x86_64 .github/docker/linux-amd64 125 | mv linux-aarch64 .github/docker/linux-arm64 126 | chmod +x .github/docker/linux-amd64/dmarc-report-viewer 127 | chmod +x .github/docker/linux-arm64/dmarc-report-viewer 128 | - name: Build Docker Images 129 | run: | 130 | cd .github/docker/ 131 | docker builder create --name builder 132 | docker buildx build --builder builder --platform=linux/amd64,linux/arm64 --pull . 133 | - name: Build Develop Docker Images 134 | if: ${{github.ref == 'refs/heads/master'}} 135 | run: | 136 | cd .github/docker/ 137 | echo ${{secrets.GITHUB_TOKEN}} | docker login ghcr.io -u ${{github.actor}} --password-stdin 138 | docker buildx build --builder builder --platform=linux/amd64,linux/arm64 --pull --push --tag ghcr.io/${{github.actor}}/dmarc-report-viewer:develop . 139 | docker run --rm ghcr.io/${{github.actor}}/dmarc-report-viewer:develop ./dmarc-report-viewer --version 140 | - name: Build Release Docker Images 141 | if: startsWith(github.ref, 'refs/tags/') 142 | run: | 143 | cd .github/docker/ 144 | echo ${{secrets.GITHUB_TOKEN}} | docker login ghcr.io -u ${{github.actor}} --password-stdin 145 | docker buildx build --builder builder --platform=linux/amd64,linux/arm64 --pull --push --tag ghcr.io/${{github.actor}}/dmarc-report-viewer:latest --tag ghcr.io/${{github.actor}}/dmarc-report-viewer:${{github.ref_name}} . 146 | docker run --rm ghcr.io/${{github.actor}}/dmarc-report-viewer:latest ./dmarc-report-viewer --version 147 | release: 148 | name: Release 149 | if: startsWith(github.ref, 'refs/tags/') 150 | needs: [linux_x86_64, linux_aarch64, windows_x86_64, mac_x86_64, mac_aarch64, docker_linux] 151 | runs-on: ubuntu-24.04 152 | permissions: 153 | contents: write 154 | steps: 155 | - name: Checkout 156 | uses: actions/checkout@v4 157 | - name: Download Artifacts 158 | uses: actions/download-artifact@v4 159 | - name: Pack Artifacts for Release 160 | run: | 161 | zip -r windows-x86_64.zip windows-x86_64 162 | zip -r linux-x86_64.zip linux-x86_64 163 | zip -r linux-aarch64.zip linux-aarch64 164 | zip -r mac-x86_64.zip mac-x86_64 165 | zip -r mac-aarch64.zip mac-aarch64 166 | - name: Publish Release 167 | uses: softprops/action-gh-release@v2 168 | with: 169 | body: Release created automatically from git tag ${{github.ref_name}}, see CHANGELOG.md for more details. 170 | files: | 171 | CHANGELOG.md 172 | LICENSE 173 | windows-x86_64.zip 174 | linux-x86_64.zip 175 | linux-aarch64.zip 176 | mac-x86_64.zip 177 | mac-aarch64.zip 178 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | /todo.txt 3 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | All notable changes to this project will be documented in this file. 4 | 5 | ## [1.8.0] - 2025-05-20 6 | * Dashboard UI: Add filtering for charts by time span 7 | * Dashboard UI: Add filtering for charts by domain 8 | * Add support for attachments with uncompressed XML files 9 | * Allow scheduling IMAP updates using cron expressions instead of intervals 10 | * Allow SPF result "hardfail" as alias for "fail" 11 | * Improved visualization of dynamically queried source IP properties 12 | * Updated default IMAP chunk size to make MS Exchange servers happy 13 | * Fix to sum up results on dashboard correctly with row count 14 | * Fix to treat same XML file from different mails as separate XML files 15 | * Fix to deal with file names in headers that are split into multiple parts 16 | * Updated Cargo dependencies 17 | 18 | ## [1.7.0] - 2025-04-12 19 | * Dashboard UI: Use fixed colors for some well known big organizations 20 | * Dashboard UI: Limit size of legends in charts 21 | * Dashboard UI: Made order of values in charts stable 22 | * Extended low level logging for mail fetching and XML extraction 23 | * Fixed embedded documentation for certificate input file 24 | * Convert non-fatal IMAP error when closing connection into warning 25 | * Updated Cargo dependencies 26 | 27 | ## [1.6.0] - 2025-03-20 28 | * Improved active state of navbar links to include child pages 29 | * Introduced separate problem flags for DKIM and SPF 30 | * Detect more ZIP attachments correctly 31 | * Updated Cargo dependencies, including `zip` to fix CVE-2025-29787 and `ring` to fix GHSA-4p46-pwfr-66x6 32 | 33 | ## [1.5.0] - 2025-03-02 34 | * Fixed detection of (G)ZIP XML attachments with content type `application/octet-stream` 35 | * Added feature to look up DNS name of Source IP 36 | * Added feature to look up location of Source IP 37 | (uses free IP Geolocation API by ip-api.com, limited to 45 req/min) 38 | * Added feature to look up Whois record of Source IP 39 | * Updated Cargo dependencies 40 | 41 | ## [1.4.0] - 2025-02-15 42 | * Added option to inject additional custom CA certificates 43 | * Added option to disable TLS encryption for IMAP client 44 | * Updated Cargo dependencies 45 | 46 | ## [1.3.0] - 2025-01-21 47 | * Increased default IMAP check interval to 30 minutes 48 | * More robust mail fetching (RFC822.SIZE property is now optional) 49 | * Updated Cargo dependencies 50 | * Allow empty `sp` field in reports instead of failing to parse whole report 51 | * Docker images now expose port 8080 for improved auto-discovery 52 | * Made Web UI responsive to also work on smaller screens 53 | 54 | ## [1.2.0] - 2025-01-04 55 | * Fixed bugs and improved E-Mail subject decoding 56 | * Added Linux ARM 64bit binary artifacts and restructured builds 57 | * Added support for ARM 64bit Linux Docker images and publish them to Github registry 58 | * Updated Cargo dependencies 59 | 60 | ## [1.1.2] - 2025-01-01 61 | * Fix issue with iCloud Mail server not returning the mail body 62 | * Improved log messages for mails without XML report data 63 | * Extended log messages with time needed for background updates 64 | 65 | ## [1.1.1] - 2024-12-31 66 | * Some minor UI styling improvements and fixes 67 | * Fixed XML count bug in mails table 68 | * Better subject shortening for mails table 69 | * Updated Cargo dependencies 70 | * Added embedded help for some of the harder to understand policy fields in reports 71 | 72 | ## [1.1.0] - 2024-12-23 73 | * Restyled the whole application to look a bit nicer 74 | * Fixed missing git info (commit hash and ref name) in Docker builds 75 | * Added Mac OS builds for CI and Releases 76 | * Updated Cargo dependencies 77 | 78 | ## [1.0.0] - 2024-12-20 79 | First stable release. 80 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "dmarc-report-viewer" 3 | description = "Standalone DMARC report viewer that fetches input data periodically from an IMAP mailbox" 4 | keywords = ["dmarc", "email"] 5 | categories = ["email"] 6 | license = "MIT" 7 | readme = "README.md" 8 | repository = "https://github.com/cry-inc/dmarc-report-viewer" 9 | version = "1.8.0" 10 | edition = "2021" 11 | 12 | [dependencies] 13 | axum = "0.8" 14 | anyhow = "1" 15 | flate2 = "1" 16 | sha2 = "0.10" 17 | cron = "0.15" 18 | regex = "1.11" 19 | futures = "0.3" 20 | tracing = "0.1" 21 | base64 = "0.22" 22 | serde_json = "1" 23 | dns-lookup = "2" 24 | urlencoding = "2" 25 | chrono = "0.4.20" 26 | mailparse = "0.16" 27 | tokio-util = "0.7" 28 | webpki-roots = "1" 29 | axum-server = "0.7" 30 | tokio-rustls = "0.26" 31 | http-body-util = "0.1" 32 | tracing-subscriber = "0.3" 33 | hyper = { version = "1", features = ["full"] } 34 | serde = {version = "1", features = ["derive"] } 35 | hyper-util = { version = "0.1", features = ["full"] } 36 | clap = { version = "4", features = ["derive", "env"] } 37 | rustls-acme = { version = "0.13", features = ["axum"] } 38 | quick-xml = {version = "0.37", features = ["serialize"] } 39 | tower-http = { version = "0.6", features = ["compression-gzip"] } 40 | zip = { version = "4", default-features = false, features = ["deflate"] } 41 | tokio = { version = "1.36", features = ["macros", "rt-multi-thread", "signal"] } 42 | async-imap = {version = "0.10", default-features = false, features = ["runtime-tokio"] } 43 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Temporary build container 2 | FROM rust:1-alpine AS builder 3 | 4 | # Get ENV variables for build info from build args 5 | ARG GITHUB_SHA="n/a" 6 | ARG GITHUB_REF_NAME="n/a" 7 | ENV GITHUB_SHA=$GITHUB_SHA 8 | ENV GITHUB_REF_NAME=$GITHUB_REF_NAME 9 | 10 | # Install build dependencies 11 | RUN apk add --no-cache musl-dev make cmake g++ 12 | 13 | # Copy source code into container 14 | WORKDIR /usr/src 15 | COPY . . 16 | 17 | # Build Rust binary 18 | ENV CARGO_TARGET_DIR=/usr/src/target 19 | RUN cargo build --release 20 | 21 | # Remove debug symbols 22 | RUN strip /usr/src/target/release/dmarc-report-viewer 23 | 24 | # Build final minimal image with only the binary 25 | FROM scratch 26 | COPY --from=builder /usr/src/target/release/dmarc-report-viewer / 27 | EXPOSE 8080 28 | CMD ["./dmarc-report-viewer"] 29 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2024 cry-inc 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in all 11 | copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 19 | SOFTWARE. 20 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DMARC Report Viewer 2 | [![Build Status](https://github.com/cry-inc/dmarc-report-viewer/workflows/CI/badge.svg)](https://github.com/cry-inc/dmarc-report-viewer/actions) 3 | [![No Unsafe](https://img.shields.io/badge/unsafe-forbidden-brightgreen.svg)](https://doc.rust-lang.org/nomicon/meet-safe-and-unsafe.html) 4 | [![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT) 5 | [![Dependencies](https://deps.rs/repo/github/cry-inc/dmarc-report-viewer/status.svg)](https://deps.rs/repo/github/cry-inc/dmarc-report-viewer) 6 | 7 | A lightweight selfhosted standalone DMARC report viewer that automatically fetches input data periodically from an IMAP mailbox. 8 | Ideal for smaller selfhosted mailservers to browse, visualize and analyze the DMARC reports. 9 | 10 | The application is a single fully statically linked executable written in Rust. 11 | It combines a DMARC report parser with an IMAP client and an HTTP server. 12 | The embedded HTTP server offers a web UI for easy access and filtering of the reports. 13 | 14 | You can run the precompiled executable directly on any Linux, Windows or MacOS system. 15 | Alternatively, you can use the tiny 10 MB Docker image to deploy the application. 16 | It is also easy to [build the application](#build-from-source) directly from source. 17 | 18 | ![Screenshot of Dashboard](screenshots/dashboard.png "Screenshot of Dashboard") 19 | You can find more screenshots [here](screenshots/screenshots.md). 20 | 21 | ## Features 22 | - [x] Lightweight Docker image for easy deployment 23 | - [x] Prebuilt binaries and Docker images 24 | - [x] Runs out of the box on a Raspberry Pi 25 | - [x] Secure IMAP client (TLS & STARTTLS) 26 | - [x] Robust parsing of XML DMARC reports 27 | - [x] Embedded HTTP server for Web UI 28 | - [x] Responsive Web UI that works also on small screens 29 | - [x] Automatic fetching of reports from IMAP inbox 30 | - [x] Updates are scheduled via simple update interval or cron expression 31 | - [x] Automatic HTTPS via ACME/Let's Encrypt 32 | - [x] Basic Auth password protection for HTTP server 33 | - [x] Easy configuration via command line arguments or ENV variables 34 | - [x] Configurable maximum size of mails (to skip oversized mails) 35 | - [x] Summary with charts for domains, organizations and passed/failed checks 36 | - [x] Viewing filtered lists of reports 37 | - [x] Viewing of individual DMARC reports 38 | - [x] Export DMARC reports as XML or JSON documents 39 | - [x] List all mails in the IMAP inbox 40 | - [x] Viewing of individual mail metadata with a list of extracted reports 41 | - [x] Show parsing errors for DMARC reports 42 | - [x] Lookup of DNS, location, whois and other source IP properties 43 | 44 | ## Changelog 45 | Read the [CHANGELOG.md](CHANGELOG.md) file for a list of all released versions and their corresponding changes. 46 | 47 | ## Run with Docker 48 | The latest versions are automatically published as Docker images in the GitHub container registry. 49 | You can download the latest release using the command `sudo docker pull ghcr.io/cry-inc/dmarc-report-viewer`. 50 | 51 | ### Available Docker Tags 52 | The following tags are available (aside from the versioned tag for all individual releases): 53 | * `latest` (Latest stable release) 54 | * `develop` (Last development build from master branch) 55 | 56 | ### Configuration 57 | List all available configuration parameters with the corresponding environment variables and default values by running this command: 58 | `sudo docker run --rm ghcr.io/cry-inc/dmarc-report-viewer ./dmarc-report-viewer --help`. 59 | 60 | You can configure the application with command line arguments or environment variables. 61 | For the Docker use case, environment variables are recommended. 62 | Do not forget to forward the port for the HTTP server! 63 | By default the HTTP server will use port 8080. 64 | 65 | Here is an example: 66 | 67 | sudo docker run --rm \ 68 | -e IMAP_HOST=imap.mymailserver.com \ 69 | -e IMAP_USER=dmarc@mymailserver.com \ 70 | -e IMAP_PASSWORD=mysecurepassword \ 71 | -e HTTP_SERVER_USER=webui-user \ 72 | -e HTTP_SERVER_PASSWORD=webui-password \ 73 | -p 8080:8080 \ 74 | ghcr.io/cry-inc/dmarc-report-viewer 75 | 76 | ### IMAP with STARTTLS 77 | By default the IMAP client will attempt to use a TLS encrypted connection using port 993. 78 | For STARTTLS set the ENV variables `IMAP_STARTTLS=TRUE` and `IMAP_PORT=143`. 79 | 80 | ### HTTPS for UI 81 | By default, the application will start an unencrypted and unsecure HTTP server. 82 | It is *strongly* recommended use the automatic HTTPS feature that will automatically fetch and renew a certificate from Let's Encrypt. 83 | This feature uses the TLS-ALPN-01 challenge, which uses the HTTPS port 443 also for the challenge. No port 80 required! 84 | Alternatively, you can use an separate HTTPS reverse proxy like [Caddy](https://caddyserver.com/) to secure the application. 85 | 86 | To use the automatic HTTPS feature you need to make sure that the public port exposed to the internet is 443. 87 | You should also persist the certificate caching directory on your host file system: 88 | 89 | sudo docker run --rm \ 90 | -e IMAP_HOST=imap.mymailserver.com \ 91 | -e IMAP_USER=dmarc@mymailserver.com \ 92 | -e IMAP_PASSWORD=mysecurepassword \ 93 | -e HTTP_SERVER_PORT=8443 \ 94 | -e HTTP_SERVER_USER=webui-user \ 95 | -e HTTP_SERVER_PASSWORD=webui-password \ 96 | -e HTTPS_AUTO_CERT=true \ 97 | -e HTTPS_AUTO_CERT_CACHE=/certs \ 98 | -e HTTPS_AUTO_CERT_MAIL=admin@mymailserver.com \ 99 | -e HTTPS_AUTO_CERT_DOMAIN=dmarc.mymailserver.com \ 100 | -v /host/cert/folder:/certs \ 101 | -p 443:8443 \ 102 | ghcr.io/cry-inc/dmarc-report-viewer 103 | 104 | ## Build from Source 105 | 1. Install Rust toolchain (see https://rustup.rs/) 106 | 2. Check out this repository (or download and extract the ZIP file) 107 | 3. Run the command `cargo build --release` in the folder with this README file 108 | 4. Find the compiled executable in the folder `target/release` 109 | 5. Use the help argument to list all possible configuration parameters: `dmarc-report-viewer --help` 110 | 111 | ### Docker Builds (Linux only) 112 | The Dockerfile works for `amd64` and `arm64` architectures. 113 | 1. Install Docker 114 | 2. Check out this repository (or download and extract the ZIP file) 115 | 3. Run the command `sudo docker build . --pull --tag dmarc-report-viewer` in the folder with this README file 116 | 4. You should now be able to see the new Docker image using the command `sudo docker images` 117 | 118 | ## Acknowledgments 119 | - https://github.com/bbustin/dmarc_aggregate_parser was used as foundation for the slightly modified DMARC report parser 120 | - [Charts.js](https://github.com/chartjs/Chart.js) and [Lit](https://lit.dev/) as embedded as JavaScript libraries for the UI 121 | - All the other Rust dependencies in [Cargo.toml](Cargo.toml) that make this application possible! 122 | -------------------------------------------------------------------------------- /screenshots/about.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/dmarc-report-viewer/7a0962ea2040c09109493662f401d5a5863bc515/screenshots/about.png -------------------------------------------------------------------------------- /screenshots/dashboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/dmarc-report-viewer/7a0962ea2040c09109493662f401d5a5863bc515/screenshots/dashboard.png -------------------------------------------------------------------------------- /screenshots/mail.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/dmarc-report-viewer/7a0962ea2040c09109493662f401d5a5863bc515/screenshots/mail.png -------------------------------------------------------------------------------- /screenshots/mails.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/dmarc-report-viewer/7a0962ea2040c09109493662f401d5a5863bc515/screenshots/mails.png -------------------------------------------------------------------------------- /screenshots/report.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/dmarc-report-viewer/7a0962ea2040c09109493662f401d5a5863bc515/screenshots/report.png -------------------------------------------------------------------------------- /screenshots/reports.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/dmarc-report-viewer/7a0962ea2040c09109493662f401d5a5863bc515/screenshots/reports.png -------------------------------------------------------------------------------- /screenshots/screenshots.md: -------------------------------------------------------------------------------- 1 | ## Dashboard 2 | ![Screenshot of Dashboard](dashboard.png "Screenshot of Dashboard") 3 | 4 | ## List of Mails 5 | ![Screenshot of Mails](mails.png "Screenshot of Mails") 6 | 7 | ## Mail Details 8 | ![Screenshot of Mail Details](mail.png "Screenshot of Mail Details") 9 | 10 | ## List of Reports 11 | ![Screenshot of Reports](reports.png "Screenshot of Reports") 12 | 13 | ## Report Details 14 | ![Screenshot of Report Details](report.png "Screenshot of Report Details") 15 | 16 | ## About Page 17 | ![Screenshot of About Page](about.png "Screenshot of About Page") 18 | -------------------------------------------------------------------------------- /src/background.rs: -------------------------------------------------------------------------------- 1 | use crate::config::Configuration; 2 | use crate::dmarc::{DmarcParsingError, Report}; 3 | use crate::hasher::create_hash; 4 | use crate::imap::get_mails; 5 | use crate::state::{AppState, DmarcReportWithUid}; 6 | use crate::unpack::extract_xml_files; 7 | use anyhow::{Context, Result}; 8 | use chrono::Local; 9 | use std::collections::HashMap; 10 | use std::sync::Arc; 11 | use std::time::{Duration, Instant, SystemTime}; 12 | use tokio::sync::mpsc::Receiver; 13 | use tokio::sync::Mutex; 14 | use tokio::task::JoinHandle; 15 | use tracing::{error, info, trace, warn}; 16 | 17 | pub fn start_bg_task( 18 | config: Configuration, 19 | state: Arc>, 20 | mut stop_signal: Receiver<()>, 21 | ) -> JoinHandle<()> { 22 | tokio::spawn(async move { 23 | info!( 24 | "Started background task with check interval of {} secs", 25 | config.imap_check_interval 26 | ); 27 | loop { 28 | let start = Instant::now(); 29 | info!("Starting background update..."); 30 | match bg_update(&config, &state).await { 31 | Ok(..) => info!( 32 | "Finished background update after {:.3}s", 33 | start.elapsed().as_secs_f64() 34 | ), 35 | Err(err) => error!("Failed background update: {err:#}"), 36 | }; 37 | 38 | // Check how many seconds we need to sleep 39 | let mut duration = Duration::from_secs(config.imap_check_interval); 40 | if let Some(schedule) = &config.imap_check_schedule { 41 | if let Some(next_update) = schedule.upcoming(Local).next() { 42 | let delta = next_update - Local::now(); 43 | duration = Duration::from_millis(delta.num_milliseconds().max(0) as u64) 44 | } else { 45 | warn!("Unable to find next scheduled check, falling back to interval...") 46 | } 47 | } 48 | 49 | // Print next update time 50 | let next = Local::now() + duration; 51 | info!("Next update is planned for {next}"); 52 | 53 | tokio::select! { 54 | _ = tokio::time::sleep(duration) => {}, 55 | _ = stop_signal.recv() => { break; }, 56 | } 57 | } 58 | }) 59 | } 60 | 61 | async fn bg_update(config: &Configuration, state: &Arc>) -> Result<()> { 62 | let mut mails = get_mails(config).await.context("Failed to get mails")?; 63 | 64 | let mut xml_files = HashMap::new(); 65 | let mut mails_without_xml = 0; 66 | for mail in &mut mails.values_mut() { 67 | if mail.body.is_none() { 68 | trace!( 69 | "Skipping data extraction for mail with UID {} because of empty body", 70 | mail.uid 71 | ); 72 | continue; 73 | } 74 | match extract_xml_files(mail) { 75 | Ok(files) => { 76 | if files.is_empty() { 77 | mails_without_xml += 1; 78 | } 79 | for xml_file in files { 80 | xml_files.insert(xml_file.hash.clone(), xml_file); 81 | mail.xml_files += 1; 82 | } 83 | } 84 | Err(err) => warn!("Failed to extract XML files from mail: {err:#}"), 85 | } 86 | } 87 | if mails_without_xml > 0 { 88 | warn!("Found {mails_without_xml} mail(s) without XML files"); 89 | } 90 | info!("Extracted {} XML file(s)", xml_files.len()); 91 | 92 | let mut dmarc_parsing_errors = HashMap::new(); 93 | let mut dmarc_reports = HashMap::new(); 94 | for xml_file in xml_files.values() { 95 | match Report::from_slice(&xml_file.data) { 96 | Ok(report) => { 97 | let rwu = DmarcReportWithUid { 98 | report, 99 | uid: xml_file.mail_uid, 100 | }; 101 | let binary = 102 | serde_json::to_vec(&rwu).context("Failed to serialize report with UID")?; 103 | let hash = create_hash(&binary, None); 104 | dmarc_reports.insert(hash, rwu); 105 | } 106 | Err(err) => { 107 | // Prepare error information 108 | let error_str = format!("{err:#}"); 109 | let error = DmarcParsingError { 110 | error: error_str, 111 | xml: String::from_utf8_lossy(&xml_file.data).to_string(), 112 | }; 113 | 114 | // Store in error hashmap for fast lookup 115 | let entry: &mut Vec = 116 | dmarc_parsing_errors.entry(xml_file.mail_uid).or_default(); 117 | entry.push(error); 118 | 119 | // Increase error counter for mail 120 | let mail = mails 121 | .get_mut(&xml_file.mail_uid) 122 | .context("Failed to find mail")?; 123 | mail.parsing_errors += 1; 124 | } 125 | } 126 | } 127 | info!("Parsed {} DMARC reports successfully", dmarc_reports.len()); 128 | if !dmarc_parsing_errors.is_empty() { 129 | warn!( 130 | "Failed to parse {} XML file as DMARC reports", 131 | dmarc_parsing_errors.len() 132 | ); 133 | } 134 | 135 | let timestamp = SystemTime::now() 136 | .duration_since(SystemTime::UNIX_EPOCH) 137 | .context("Failed to get Unix time stamp")? 138 | .as_secs(); 139 | 140 | { 141 | let mut locked_state = state.lock().await; 142 | locked_state.mails = mails; 143 | locked_state.dmarc_reports = dmarc_reports; 144 | locked_state.last_update = timestamp; 145 | locked_state.xml_files = xml_files.len(); 146 | locked_state.dmarc_parsing_errors = dmarc_parsing_errors; 147 | } 148 | 149 | Ok(()) 150 | } 151 | -------------------------------------------------------------------------------- /src/cache_map.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{ensure, Result}; 2 | use std::collections::HashMap; 3 | use std::hash::Hash; 4 | 5 | /// Very simple map for caching data. 6 | /// Cached values are identified by a unique key. 7 | /// The cache only keeps up to `max_size` entries. 8 | /// When inserting new entries, the oldest entry 9 | /// is deleted if `max_size` was already reached. 10 | pub struct CacheMap { 11 | map: HashMap>, 12 | max_size: usize, 13 | counter: usize, 14 | } 15 | 16 | struct Entry { 17 | pub inserted: usize, 18 | pub value: T, 19 | } 20 | 21 | impl CacheMap 22 | where 23 | K: Eq + Hash + Clone, 24 | { 25 | pub fn new(max_size: usize) -> Result { 26 | ensure!(max_size >= 1, "max_size needs to be one or bigger"); 27 | Ok(Self { 28 | map: HashMap::new(), 29 | max_size, 30 | counter: 0, 31 | }) 32 | } 33 | 34 | pub fn get(&self, key: &K) -> Option<&V> { 35 | self.map.get(key).map(|e| &e.value) 36 | } 37 | 38 | pub fn insert(&mut self, key: K, value: V) { 39 | if self.map.len() >= self.max_size { 40 | self.prune(); 41 | } 42 | let entry = Entry { 43 | inserted: self.counter, 44 | value, 45 | }; 46 | self.map.insert(key, entry); 47 | self.counter += 1; 48 | } 49 | 50 | fn prune(&mut self) { 51 | let oldest = self 52 | .map 53 | .iter() 54 | .min_by(|a, b| a.1.inserted.cmp(&b.1.inserted)) 55 | .map(|m| m.0) 56 | .cloned(); 57 | if let Some(oldest) = &oldest { 58 | self.map.remove(oldest); 59 | } 60 | } 61 | } 62 | 63 | #[cfg(test)] 64 | mod tests { 65 | use super::CacheMap; 66 | 67 | #[test] 68 | fn basic() { 69 | let mut cache = CacheMap::new(1).unwrap(); 70 | 71 | assert!(cache.get(&1).is_none()); 72 | cache.insert(1, 23); 73 | assert_eq!(cache.get(&1), Some(&23)); 74 | 75 | cache.insert(1, 42); 76 | assert_eq!(cache.get(&1), Some(&42)); 77 | 78 | cache.insert(2, 666); 79 | assert_eq!(cache.get(&2), Some(&666)); 80 | assert!(cache.get(&1).is_none()); 81 | } 82 | 83 | #[test] 84 | fn invalid_size() { 85 | assert!(CacheMap::::new(0).is_err()); 86 | } 87 | 88 | #[test] 89 | fn pruning() { 90 | let mut cache = CacheMap::new(3).unwrap(); 91 | 92 | cache.insert(1, 1); 93 | cache.insert(2, 2); 94 | cache.insert(3, 3); 95 | assert_eq!(cache.get(&1), Some(&1)); 96 | assert_eq!(cache.get(&2), Some(&2)); 97 | assert_eq!(cache.get(&3), Some(&3)); 98 | 99 | cache.insert(4, 4); 100 | cache.insert(5, 5); 101 | assert!(cache.get(&1).is_none()); 102 | assert!(cache.get(&2).is_none()); 103 | assert_eq!(cache.get(&3), Some(&3)); 104 | assert_eq!(cache.get(&4), Some(&4)); 105 | assert_eq!(cache.get(&5), Some(&5)); 106 | } 107 | 108 | #[test] 109 | fn replacing() { 110 | let mut cache = CacheMap::new(1).unwrap(); 111 | 112 | cache.insert(1, 1); 113 | assert_eq!(cache.get(&1), Some(&1)); 114 | cache.insert(1, 2); 115 | assert_eq!(cache.get(&1), Some(&2)); 116 | cache.insert(1, 3); 117 | assert_eq!(cache.get(&1), Some(&3)); 118 | } 119 | } 120 | -------------------------------------------------------------------------------- /src/config.rs: -------------------------------------------------------------------------------- 1 | use clap::{Parser, ValueEnum}; 2 | use cron::Schedule; 3 | use std::path::PathBuf; 4 | use tracing::{info, Level}; 5 | 6 | #[derive(Parser, Clone)] 7 | #[command(version, about, long_about = None)] 8 | pub struct Configuration { 9 | /// Host name or domain of the IMAP server with the DMARC reports inbox 10 | #[arg(long, env)] 11 | pub imap_host: String, 12 | 13 | /// User name of the IMAP inbox with the DMARC reports 14 | #[arg(long, env)] 15 | pub imap_user: String, 16 | 17 | /// Password of the IMAP inbox with the DMARC reports 18 | #[arg(long, env)] 19 | pub imap_password: String, 20 | 21 | /// TLS encrypted port of the IMAP server 22 | #[arg(long, env, default_value_t = 993)] 23 | pub imap_port: u16, 24 | 25 | /// Enable STARTTLS mode for IMAP client (IMAP port should be set to 143) 26 | #[arg(long, env, conflicts_with = "imap_disable_tls")] 27 | pub imap_starttls: bool, 28 | 29 | /// Optional path to additional TLS root certificates used to creating the IMAP TLS connections. 30 | /// The default set is a compiled-in copy of the root certificates trusted by Mozilla. 31 | /// The path should point to a PEM file with one or more X.509 certificates. 32 | #[arg(long, env)] 33 | pub imap_tls_ca_certs: Option, 34 | 35 | /// Will the disable TLS encryption for the IMAP connection (IMAP port should be set to 143). 36 | /// Not recommended. NEVER use this for a remote IMAP server over a network! 37 | /// This is ONLY intended for connecting to IMAP servers or proxies on the same machine! 38 | #[arg(long, env, conflicts_with = "imap_starttls")] 39 | pub imap_disable_tls: bool, 40 | 41 | /// IMAP folder with the DMARC reports 42 | #[arg(long, env, default_value = "INBOX")] 43 | pub imap_folder: String, 44 | 45 | /// Method of requesting the mail body from the IMAP server. 46 | /// The default should work for most IMAP servers. 47 | /// Only try other values if you have issues with missing mail bodies. 48 | #[arg(long, env, default_value = "default")] 49 | pub imap_body_request: ImapBodyRequest, 50 | 51 | /// TCP connection timeout for IMAP server in seconds 52 | #[arg(long, env, default_value_t = 10)] 53 | pub imap_timeout: u64, 54 | 55 | /// Number of mails downloaded in one chunk, must be bigger than 0. 56 | /// The default value should work for most IMAP servers. 57 | /// Try lower values in case of warnings like "Unable to fetch some mails from chunk"! 58 | #[arg(long, env, default_value_t = 2000)] 59 | pub imap_chunk_size: usize, 60 | 61 | /// Interval between checking for new reports in IMAP inbox in seconds 62 | #[arg(long, env, default_value_t = 1800)] 63 | pub imap_check_interval: u64, 64 | 65 | /// Schedule for checking the IMAP inbox. 66 | /// Specified as cron expression string (in Local time). 67 | /// Will replace and override the IMAP check interval if specified. 68 | /// Columns: sec, min, hour, day of month, month, day of week, year. 69 | /// When running the official Docker image the local time zone will be UTC. 70 | /// To change this, set the `TZ` ENV var. Since the image comes without 71 | /// time zone data, you also need to mount the host folder 72 | /// `/usr/share/zoneinfo` into the container. 73 | #[arg(long, env)] 74 | pub imap_check_schedule: Option, 75 | 76 | /// Embedded HTTP server port for web UI. 77 | /// Needs to be bigger than 0 because for 0 a random port will be used! 78 | #[arg(long, env, default_value_t = 8080)] 79 | pub http_server_port: u16, 80 | 81 | /// Embedded HTTP server binding for web UI. 82 | /// Needs to be an IP address. 83 | /// The default will use all IPs of the host. 84 | /// Use `127.0.0.1` to make the server only available on localhost! 85 | #[arg(long, env, default_value = "0.0.0.0")] 86 | pub http_server_binding: String, 87 | 88 | /// Username for the HTTP server basic auth login 89 | #[arg(long, env, default_value = "dmarc")] 90 | pub http_server_user: String, 91 | 92 | /// Password for the HTTP server basic auth login. 93 | /// Use empty string to disable (not recommended). 94 | #[arg(long, env)] 95 | pub http_server_password: String, 96 | 97 | /// Enable automatic HTTPS encryption using Let's Encrypt certificates. 98 | /// This will replace the HTTP protocol on the configured HTTP port with HTTPS. 99 | /// There is no second separate port for HTTPS! 100 | /// This uses the TLS-ALPN-01 challenge and therefore the public HTTPS port MUST be 443! 101 | #[arg( 102 | long, 103 | env, 104 | requires = "https_auto_cert_domain", 105 | requires = "https_auto_cert_mail", 106 | requires = "https_auto_cert_cache" 107 | )] 108 | pub https_auto_cert: bool, 109 | 110 | /// Contact E-Mail address, required for automatic HTTPS 111 | #[arg(long, env)] 112 | pub https_auto_cert_mail: Option, 113 | 114 | /// Certificate caching directory, required for automatic HTTPS 115 | #[arg(long, env)] 116 | pub https_auto_cert_cache: Option, 117 | 118 | /// HTTPS server domain, required for automatic HTTPS 119 | #[arg(long, env)] 120 | pub https_auto_cert_domain: Option, 121 | 122 | /// Log level (trace, debug, info, warn, error) 123 | #[arg(long, env, default_value_t = Level::INFO)] 124 | pub log_level: Level, 125 | 126 | /// Maximum mail size in bytes, anything bigger will be ignored and not parsed 127 | #[arg(long, env, default_value_t = 1024 * 1024 * 1)] 128 | pub max_mail_size: u32, 129 | } 130 | 131 | impl Configuration { 132 | pub fn new() -> Self { 133 | Configuration::parse() 134 | } 135 | 136 | pub fn log(&self) { 137 | info!("Log Level: {}", self.log_level); 138 | 139 | info!("IMAP Host: {}", self.imap_host); 140 | info!("IMAP Port: {}", self.imap_port); 141 | info!("IMAP STARTTLS: {}", self.imap_starttls); 142 | info!("IMAP TLS CA Certificate File: {:?}", self.imap_tls_ca_certs); 143 | info!("IMAP TLS Disabled: {}", self.imap_disable_tls); 144 | info!("IMAP User: {}", self.imap_user); 145 | info!("IMAP Check Interval: {} seconds", self.imap_check_interval); 146 | info!( 147 | "IMAP Schedule: {}", 148 | self.imap_check_schedule 149 | .as_ref() 150 | .map(|s| s.source().to_string()) 151 | .unwrap_or(String::from("None")) 152 | ); 153 | info!("IMAP Body Request: {:?}", self.imap_body_request); 154 | info!("IMAP Chunk Size: {}", self.imap_chunk_size); 155 | info!("IMAP Timeout: {}", self.imap_timeout); 156 | 157 | info!("HTTP Binding: {}", self.http_server_binding); 158 | info!("HTTP Port: {}", self.http_server_port); 159 | info!("HTTP User: {}", self.http_server_user); 160 | 161 | info!("HTTPS Enabled: {}", self.https_auto_cert); 162 | info!("HTTPS Domain: {:?}", self.https_auto_cert_domain); 163 | info!("HTTPS Mail: {:?}", self.https_auto_cert_mail); 164 | info!("HTTPS Cache Dir: {:?}", self.https_auto_cert_cache); 165 | 166 | info!("Maximum Mail Body Size: {} bytes", self.max_mail_size); 167 | } 168 | } 169 | 170 | #[derive(Clone, ValueEnum, Debug, Default)] 171 | pub enum ImapBodyRequest { 172 | /// RFC822 and BODY[] 173 | #[default] 174 | Default, 175 | /// RFC822 176 | Rfc822, 177 | /// BODY[] 178 | Body, 179 | } 180 | 181 | impl ImapBodyRequest { 182 | pub fn to_request_string(&self) -> String { 183 | match &self { 184 | ImapBodyRequest::Default => String::from("RFC822 BODY[]"), 185 | ImapBodyRequest::Rfc822 => String::from("RFC822"), 186 | ImapBodyRequest::Body => String::from("BODY[]"), 187 | } 188 | } 189 | } 190 | -------------------------------------------------------------------------------- /src/geolocate.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{bail, ensure, Context, Result}; 2 | use axum::http::uri::Scheme; 3 | use http_body_util::{BodyExt, Empty}; 4 | use hyper::body::Bytes; 5 | use hyper::client::conn::http1; 6 | use hyper::{Request, Uri}; 7 | use hyper_util::rt::TokioIo; 8 | use serde::{Deserialize, Serialize}; 9 | use std::net::IpAddr; 10 | use tokio::net::TcpStream; 11 | use tracing::error; 12 | 13 | #[derive(Debug, Clone, Serialize, Deserialize)] 14 | #[serde(rename_all = "camelCase")] 15 | pub struct Location { 16 | #[serde(rename = "as")] 17 | pub autonomous_system: String, 18 | pub country: String, 19 | pub city: String, 20 | pub country_code: String, 21 | pub hosting: bool, 22 | pub isp: String, 23 | pub lat: f64, 24 | pub lon: f64, 25 | pub org: String, 26 | pub proxy: bool, 27 | pub region_name: String, 28 | pub timezone: String, 29 | } 30 | 31 | impl Location { 32 | /// Current backend allows 45 requests per minute 33 | pub async fn from_ip(ip: &IpAddr) -> Result> { 34 | // Create and parse URI 35 | let uri = format!("http://ip-api.com/json/{ip}?fields=country,countryCode,regionName,city,lat,lon,timezone,isp,org,as,proxy,hosting,query") 36 | .parse::() 37 | .context("Failed to parse URI")?; 38 | ensure!( 39 | uri.scheme().context("URI has no scheme")? == &Scheme::HTTP, 40 | "Only HTTP is supported" 41 | ); 42 | 43 | // Get the host and the port 44 | let host = uri.host().context("URI has no host")?.to_string(); 45 | let port = uri.port_u16().unwrap_or(80); 46 | 47 | // Open a TCP connection to the remote host 48 | let address = format!("{host}:{port}"); 49 | let stream = TcpStream::connect(address) 50 | .await 51 | .context("Failed to connect TCP stream")?; 52 | 53 | // Create the Hyper client 54 | let io = TokioIo::new(stream); 55 | let (mut sender, conn) = http1::handshake(io) 56 | .await 57 | .context("Failed to create HTTP handshake")?; 58 | 59 | // Spawn a task to drive the HTTP state 60 | tokio::task::spawn(async move { 61 | if let Err(err) = conn.await { 62 | error!("Connection failed: {err:?}"); 63 | } 64 | }); 65 | 66 | // Create and send HTTP request 67 | let req = Request::builder() 68 | .uri(uri) 69 | .header(hyper::header::HOST, host) 70 | .body(Empty::::new()) 71 | .context("Failed to create HTTP request")?; 72 | let mut res = sender 73 | .send_request(req) 74 | .await 75 | .context("Failed to send HTTP request")?; 76 | ensure!(res.status().is_success(), "HTTP request did not succeed"); 77 | 78 | // Get response body piece by piece 79 | let mut body = Vec::new(); 80 | while let Some(next) = res.frame().await { 81 | let frame = next.context("Failed to receive next HTTP response chunk")?; 82 | if let Some(chunk) = frame.data_ref() { 83 | body.extend_from_slice(chunk); 84 | } 85 | if body.len() > 1024 * 1024 { 86 | bail!("HTTP response too big"); 87 | } 88 | } 89 | 90 | // Parse response JSON 91 | let parsed: Self = 92 | serde_json::from_slice(&body).context("Failed to parse HTTP response as JSON")?; 93 | 94 | Ok(Some(parsed)) 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /src/hasher.rs: -------------------------------------------------------------------------------- 1 | use sha2::{Digest, Sha256}; 2 | 3 | pub fn create_hash(data: &[u8], uid: Option) -> String { 4 | let mut hasher = Sha256::new(); 5 | hasher.update(data); 6 | if let Some(uid) = uid { 7 | hasher.update(uid.to_le_bytes()); 8 | } 9 | let hash = hasher.finalize(); 10 | format!("{:x}", hash) 11 | } 12 | -------------------------------------------------------------------------------- /src/http.rs: -------------------------------------------------------------------------------- 1 | mod dmarc_reports; 2 | mod ips; 3 | mod mails; 4 | mod static_files; 5 | mod summary; 6 | 7 | use crate::config::Configuration; 8 | use crate::state::AppState; 9 | use anyhow::{Context, Result}; 10 | use axum::body::Body; 11 | use axum::extract::Request; 12 | use axum::http::header::{AUTHORIZATION, WWW_AUTHENTICATE}; 13 | use axum::http::StatusCode; 14 | use axum::middleware::{self, Next}; 15 | use axum::response::{IntoResponse, Response}; 16 | use axum::routing::IntoMakeService; 17 | use axum::Json; 18 | use axum::{extract::State, routing::get, Router}; 19 | use axum_server::Handle; 20 | use base64::{engine::general_purpose::STANDARD, Engine}; 21 | use futures::StreamExt; 22 | use rustls_acme::caches::DirCache; 23 | use rustls_acme::AcmeConfig; 24 | use serde_json::json; 25 | use std::net::SocketAddr; 26 | use std::sync::Arc; 27 | use tokio::signal; 28 | use tokio::sync::Mutex; 29 | use tracing::{error, info, warn}; 30 | 31 | pub async fn run_http_server(config: &Configuration, state: Arc>) -> Result<()> { 32 | if config.http_server_password.is_empty() { 33 | warn!("Detected empty password: Basic Authentication will be disabled") 34 | } 35 | let make_service = Router::new() 36 | .route("/summary", get(summary::handler)) 37 | .route("/mails", get(mails::list_handler)) 38 | .route("/mails/{id}", get(mails::single_handler)) 39 | .route("/mails/{id}/errors", get(mails::errors_handler)) 40 | .route("/dmarc-reports", get(dmarc_reports::list_handler)) 41 | .route("/dmarc-reports/{id}", get(dmarc_reports::single_handler)) 42 | .route("/dmarc-reports/{id}/json", get(dmarc_reports::json_handler)) 43 | .route("/dmarc-reports/{id}/xml", get(dmarc_reports::xml_handler)) 44 | .route("/ips/{ip}/dns", get(ips::to_dns_handler)) 45 | .route("/ips/{ip}/location", get(ips::to_location_handler)) 46 | .route("/ips/{ip}/whois", get(ips::to_whois_handler)) 47 | .route("/build", get(build)) 48 | .route("/", get(static_files::handler)) // index.html 49 | .route("/{*filepath}", get(static_files::handler)) // all other files 50 | .route_layer(middleware::from_fn_with_state( 51 | config.clone(), 52 | basic_auth_middleware, 53 | )) 54 | .with_state(state.clone()) 55 | .into_make_service(); 56 | 57 | let binding = format!("{}:{}", config.http_server_binding, config.http_server_port); 58 | let addr: SocketAddr = binding.parse().context("Failed to parse binding address")?; 59 | info!("Binding HTTP server to {addr}..."); 60 | 61 | if config.https_auto_cert { 62 | start_https_server(config, addr, make_service) 63 | .await 64 | .context("Failed to start HTTPS server") 65 | } else { 66 | start_http_server(addr, make_service) 67 | .await 68 | .context("Failed to start HTTP server") 69 | } 70 | } 71 | 72 | async fn start_http_server( 73 | addr: SocketAddr, 74 | make_service: IntoMakeService, 75 | ) -> anyhow::Result<()> { 76 | let handle = Handle::new(); 77 | let handle_clone = handle.clone(); 78 | tokio::spawn(async move { 79 | shutdown_signal().await; 80 | handle_clone.shutdown(); 81 | }); 82 | 83 | axum_server::bind(addr) 84 | .handle(handle) 85 | .serve(make_service) 86 | .await 87 | .context("Failed to create axum HTTP server") 88 | } 89 | 90 | async fn start_https_server( 91 | config: &Configuration, 92 | addr: SocketAddr, 93 | make_service: IntoMakeService, 94 | ) -> anyhow::Result<()> { 95 | let handle = Handle::new(); 96 | let handle_clone = handle.clone(); 97 | tokio::spawn(async move { 98 | shutdown_signal().await; 99 | handle_clone.shutdown(); 100 | }); 101 | 102 | let acme_domain = config 103 | .https_auto_cert_domain 104 | .as_deref() 105 | .context("HTTPS automatic certificate domain is missing in configuration")?; 106 | 107 | let acme_contact = format!( 108 | "mailto:{}", 109 | config 110 | .https_auto_cert_mail 111 | .as_deref() 112 | .context("HTTPS automatic certificate mail is missing in configuration")? 113 | ); 114 | 115 | let acme_cache = DirCache::new( 116 | config 117 | .https_auto_cert_cache 118 | .as_deref() 119 | .context("HTTPS automatic certificate cache directory is missing in configuration")? 120 | .to_owned(), 121 | ); 122 | 123 | let mut acme_state = AcmeConfig::new([acme_domain]) 124 | .contact([acme_contact]) 125 | .cache_option(Some(acme_cache)) 126 | .directory_lets_encrypt(true) 127 | .state(); 128 | let rustls_config = acme_state.default_rustls_config(); 129 | let acceptor = acme_state.axum_acceptor(rustls_config); 130 | 131 | tokio::spawn(async move { 132 | loop { 133 | match acme_state 134 | .next() 135 | .await 136 | .expect("Failed to get next ACME event") 137 | { 138 | Ok(ok) => info!("ACME Event: {:?}", ok), 139 | Err(err) => error!("ACME Error: {:?}", err), 140 | } 141 | } 142 | }); 143 | 144 | axum_server::bind(addr) 145 | .handle(handle) 146 | .acceptor(acceptor) 147 | .serve(make_service) 148 | .await 149 | .context("Failed to create axum HTTPS server") 150 | } 151 | 152 | /// Promise will be fulfilled when a shutdown signal is received 153 | async fn shutdown_signal() { 154 | let ctrlc = async { 155 | signal::ctrl_c() 156 | .await 157 | .expect("Failed to install Ctrl + C handler"); 158 | }; 159 | 160 | #[cfg(unix)] 161 | let terminate = async { 162 | signal::unix::signal(signal::unix::SignalKind::terminate()) 163 | .expect("Failed to install signal handler") 164 | .recv() 165 | .await; 166 | }; 167 | 168 | #[cfg(not(unix))] 169 | let terminate = std::future::pending::<()>(); 170 | 171 | tokio::select! { 172 | _ = ctrlc => {}, 173 | _ = terminate => {}, 174 | } 175 | } 176 | 177 | /// Middleware to add basic auth password protection 178 | async fn basic_auth_middleware( 179 | State(config): State, 180 | request: Request, 181 | next: Next, 182 | ) -> Response { 183 | // Password empty means basic auth is disabled 184 | if config.http_server_password.is_empty() { 185 | return next.run(request).await; 186 | } 187 | 188 | // Prepare error responses 189 | let unauthorized = Response::builder() 190 | .status(StatusCode::UNAUTHORIZED) 191 | .header(WWW_AUTHENTICATE, "Basic realm=\"Access\"") 192 | .body(Body::empty()) 193 | .expect("Failed to create response"); 194 | let bad_request = Response::builder() 195 | .status(StatusCode::BAD_REQUEST) 196 | .body(Body::empty()) 197 | .expect("Failed to create response"); 198 | 199 | let Some(header) = request.headers().get(AUTHORIZATION) else { 200 | return unauthorized; 201 | }; 202 | let Ok(header) = header.to_str() else { 203 | return bad_request; 204 | }; 205 | let Some(base64) = header.strip_prefix("Basic ") else { 206 | return bad_request; 207 | }; 208 | let Ok(decoded) = STANDARD.decode(base64) else { 209 | return bad_request; 210 | }; 211 | let Ok(string) = String::from_utf8(decoded) else { 212 | return bad_request; 213 | }; 214 | let Some((user, password)) = string.split_once(':') else { 215 | return bad_request; 216 | }; 217 | if user == config.http_server_user && password == config.http_server_password { 218 | next.run(request).await 219 | } else { 220 | unauthorized 221 | } 222 | } 223 | 224 | async fn build() -> impl IntoResponse { 225 | Json(json!({ 226 | "version": env!("CARGO_PKG_VERSION"), 227 | "hash": option_env!("GITHUB_SHA").unwrap_or("n/a"), 228 | "ref": option_env!("GITHUB_REF_NAME").unwrap_or("n/a"), 229 | })) 230 | } 231 | -------------------------------------------------------------------------------- /src/http/dmarc_reports.rs: -------------------------------------------------------------------------------- 1 | use crate::dmarc::DkimResultType; 2 | use crate::dmarc::DmarcResultType; 3 | use crate::dmarc::Report; 4 | use crate::dmarc::SpfResultType; 5 | use crate::state::AppState; 6 | use axum::extract::Path; 7 | use axum::extract::Query; 8 | use axum::extract::State; 9 | use axum::http::header; 10 | use axum::http::StatusCode; 11 | use axum::response::IntoResponse; 12 | use axum::Json; 13 | use serde::Deserialize; 14 | use serde::Serialize; 15 | use std::sync::Arc; 16 | use tokio::sync::Mutex; 17 | 18 | #[derive(Serialize)] 19 | struct ReportHeader { 20 | hash: String, 21 | id: String, 22 | org: String, 23 | domain: String, 24 | date_begin: u64, 25 | date_end: u64, 26 | records: usize, 27 | flagged_dkim: bool, 28 | flagged_spf: bool, 29 | flagged: bool, 30 | } 31 | 32 | impl ReportHeader { 33 | pub fn from_report(hash: &str, report: &Report) -> Self { 34 | let (flagged_dkim, flagged_spf) = Self::report_is_flagged(report); 35 | Self { 36 | hash: hash.to_string(), 37 | id: report.report_metadata.report_id.clone(), 38 | org: report.report_metadata.org_name.clone(), 39 | domain: report.policy_published.domain.clone(), 40 | date_begin: report.report_metadata.date_range.begin, 41 | date_end: report.report_metadata.date_range.end, 42 | records: report.record.len(), 43 | flagged: flagged_dkim | flagged_spf, 44 | flagged_dkim, 45 | flagged_spf, 46 | } 47 | } 48 | 49 | /// Checks if the report has DKIM or SPF issues 50 | fn report_is_flagged(report: &Report) -> (bool, bool) { 51 | let mut dkim_flagged = false; 52 | let mut spf_flagged = false; 53 | for record in &report.record { 54 | if let Some(dkim) = &record.row.policy_evaluated.dkim { 55 | if *dkim != DmarcResultType::Pass { 56 | dkim_flagged = true; 57 | } 58 | } 59 | if let Some(spf) = &record.row.policy_evaluated.spf { 60 | if *spf != DmarcResultType::Pass { 61 | spf_flagged = true; 62 | } 63 | } 64 | if let Some(dkim) = &record.auth_results.dkim { 65 | if dkim.iter().any(|x| x.result != DkimResultType::Pass) { 66 | dkim_flagged = true; 67 | } 68 | } 69 | if record 70 | .auth_results 71 | .spf 72 | .iter() 73 | .any(|x| x.result != SpfResultType::Pass) 74 | { 75 | spf_flagged = true; 76 | } 77 | } 78 | (dkim_flagged, spf_flagged) 79 | } 80 | } 81 | 82 | #[derive(Deserialize)] 83 | pub struct ReportFilters { 84 | uid: Option, 85 | flagged: Option, 86 | flagged_dkim: Option, 87 | flagged_spf: Option, 88 | domain: Option, 89 | org: Option, 90 | } 91 | 92 | impl ReportFilters { 93 | fn url_decode(&self) -> Self { 94 | Self { 95 | uid: self.uid, 96 | flagged: self.flagged, 97 | flagged_dkim: self.flagged_dkim, 98 | flagged_spf: self.flagged_spf, 99 | domain: self 100 | .domain 101 | .as_ref() 102 | .and_then(|d| urlencoding::decode(d).ok()) 103 | .map(|d| d.to_string()), 104 | org: self 105 | .org 106 | .as_ref() 107 | .and_then(|o| urlencoding::decode(o).ok()) 108 | .map(|o| o.to_string()), 109 | } 110 | } 111 | } 112 | 113 | pub async fn list_handler( 114 | State(state): State>>, 115 | filters: Query, 116 | ) -> impl IntoResponse { 117 | // Remove URL encoding from strings in filters 118 | let filters = filters.url_decode(); 119 | 120 | let reports: Vec = state 121 | .lock() 122 | .await 123 | .dmarc_reports 124 | .iter() 125 | .filter(|(_, rwu)| { 126 | if let Some(queried_uid) = filters.uid { 127 | rwu.uid == queried_uid 128 | } else { 129 | true 130 | } 131 | }) 132 | .filter(|(_, rwu)| { 133 | if let Some(org) = &filters.org { 134 | rwu.report.report_metadata.org_name == *org 135 | } else { 136 | true 137 | } 138 | }) 139 | .filter(|(_, rwu)| { 140 | if let Some(domain) = &filters.domain { 141 | rwu.report.policy_published.domain == *domain 142 | } else { 143 | true 144 | } 145 | }) 146 | .map(|(hash, rwu)| ReportHeader::from_report(hash, &rwu.report)) 147 | .filter(|rh| { 148 | if let Some(flagged) = &filters.flagged { 149 | rh.flagged == *flagged 150 | } else { 151 | true 152 | } 153 | }) 154 | .filter(|rh| { 155 | if let Some(dkim) = &filters.flagged_dkim { 156 | rh.flagged_dkim == *dkim 157 | } else { 158 | true 159 | } 160 | }) 161 | .filter(|rh| { 162 | if let Some(spf) = &filters.flagged_spf { 163 | rh.flagged_spf == *spf 164 | } else { 165 | true 166 | } 167 | }) 168 | .collect(); 169 | Json(reports) 170 | } 171 | 172 | pub async fn single_handler( 173 | State(state): State>>, 174 | Path(id): Path, 175 | ) -> impl IntoResponse { 176 | let lock = state.lock().await; 177 | if let Some(rwu) = lock.dmarc_reports.get(&id) { 178 | let report_json = serde_json::to_string(rwu).expect("Failed to serialize JSON"); 179 | ( 180 | StatusCode::OK, 181 | [(header::CONTENT_TYPE, "application/json")], 182 | report_json, 183 | ) 184 | } else { 185 | ( 186 | StatusCode::NOT_FOUND, 187 | [(header::CONTENT_TYPE, "text/plain")], 188 | format!("Cannot find report with ID {id}"), 189 | ) 190 | } 191 | } 192 | 193 | pub async fn json_handler( 194 | State(state): State>>, 195 | Path(id): Path, 196 | ) -> impl IntoResponse { 197 | let lock = state.lock().await; 198 | if let Some(rwu) = lock.dmarc_reports.get(&id) { 199 | let report_json = serde_json::to_string(&rwu.report).expect("Failed to serialize JSON"); 200 | ( 201 | StatusCode::OK, 202 | [(header::CONTENT_TYPE, "application/json")], 203 | report_json, 204 | ) 205 | } else { 206 | ( 207 | StatusCode::NOT_FOUND, 208 | [(header::CONTENT_TYPE, "text/plain")], 209 | format!("Cannot find report with ID {id}"), 210 | ) 211 | } 212 | } 213 | 214 | pub async fn xml_handler( 215 | State(state): State>>, 216 | Path(id): Path, 217 | ) -> impl IntoResponse { 218 | let lock = state.lock().await; 219 | if let Some(rwu) = lock.dmarc_reports.get(&id) { 220 | let mut report_xml = String::new(); 221 | let mut serializer = quick_xml::se::Serializer::new(&mut report_xml); 222 | serializer.indent(' ', 2); 223 | rwu.report 224 | .serialize(serializer) 225 | .expect("Failed to serialize XML"); 226 | report_xml = String::from("\n") + &report_xml; 227 | ( 228 | StatusCode::OK, 229 | [(header::CONTENT_TYPE, "text/xml")], 230 | report_xml, 231 | ) 232 | } else { 233 | ( 234 | StatusCode::NOT_FOUND, 235 | [(header::CONTENT_TYPE, "text/plain")], 236 | format!("Cannot find report with ID {id}"), 237 | ) 238 | } 239 | } 240 | -------------------------------------------------------------------------------- /src/http/ips.rs: -------------------------------------------------------------------------------- 1 | use crate::geolocate::Location; 2 | use crate::state::AppState; 3 | use crate::whois::WhoIsIp; 4 | use axum::extract::Path; 5 | use axum::extract::State; 6 | use axum::http::header; 7 | use axum::http::StatusCode; 8 | use axum::response::IntoResponse; 9 | use dns_lookup::lookup_addr; 10 | use std::net::IpAddr; 11 | use std::sync::Arc; 12 | use tokio::sync::Mutex; 13 | use tokio::task::spawn_blocking; 14 | 15 | pub async fn to_dns_handler( 16 | State(state): State>>, 17 | Path(ip): Path, 18 | ) -> impl IntoResponse { 19 | let Ok(parsed_ip) = ip.parse::() else { 20 | return ( 21 | StatusCode::BAD_REQUEST, 22 | [(header::CONTENT_TYPE, "text/plain")], 23 | format!("Invalid IP {ip}"), 24 | ); 25 | }; 26 | 27 | // Check cache 28 | let cached = { 29 | let app = state.lock().await; 30 | app.ip_dns_cache.get(&parsed_ip).map(|dns| dns.to_string()) 31 | }; 32 | 33 | let result = if let Some(host_name) = cached { 34 | // Found result in cache! 35 | Some(host_name) 36 | } else { 37 | // Nothing in cache, send new DNS request! 38 | // Do not block here and use a special async task 39 | // where blocking calls are acceptable 40 | let handle = spawn_blocking(move || lookup_addr(&parsed_ip)); 41 | 42 | // Join async task 43 | let Ok(result) = handle.await else { 44 | return ( 45 | StatusCode::INTERNAL_SERVER_ERROR, 46 | [(header::CONTENT_TYPE, "text/plain")], 47 | String::from("DNS lookup failed"), 48 | ); 49 | }; 50 | 51 | // Cache any positive result 52 | if let Ok(host_name) = result { 53 | let mut app = state.lock().await; 54 | app.ip_dns_cache.insert(parsed_ip, host_name.clone()); 55 | Some(host_name) 56 | } else { 57 | None 58 | } 59 | }; 60 | 61 | if let Some(host_name) = result { 62 | ( 63 | StatusCode::OK, 64 | [(header::CONTENT_TYPE, "text/plain")], 65 | host_name, 66 | ) 67 | } else { 68 | ( 69 | StatusCode::NOT_FOUND, 70 | [(header::CONTENT_TYPE, "text/plain")], 71 | String::from("n/a"), 72 | ) 73 | } 74 | } 75 | 76 | pub async fn to_location_handler( 77 | State(state): State>>, 78 | Path(ip): Path, 79 | ) -> impl IntoResponse { 80 | let Ok(parsed_ip) = ip.parse::() else { 81 | return ( 82 | StatusCode::BAD_REQUEST, 83 | [(header::CONTENT_TYPE, "text/plain")], 84 | format!("Invalid IP {ip}"), 85 | ); 86 | }; 87 | 88 | // Check cache 89 | let cached = { 90 | let app = state.lock().await; 91 | app.ip_location_cache.get(&parsed_ip).cloned() 92 | }; 93 | 94 | let result = if let Some(location) = cached { 95 | // Found result in cache! 96 | Some(location) 97 | } else { 98 | // Nothing in cache, send new request! 99 | let Ok(result) = Location::from_ip(&parsed_ip).await else { 100 | return ( 101 | StatusCode::INTERNAL_SERVER_ERROR, 102 | [(header::CONTENT_TYPE, "text/plain")], 103 | String::from("Failed to locate IP"), 104 | ); 105 | }; 106 | 107 | // Cache any positive result 108 | if let Some(location) = result { 109 | let mut app = state.lock().await; 110 | app.ip_location_cache.insert(parsed_ip, location.clone()); 111 | Some(location) 112 | } else { 113 | None 114 | } 115 | }; 116 | 117 | let Some(location) = result else { 118 | return ( 119 | StatusCode::NOT_FOUND, 120 | [(header::CONTENT_TYPE, "text/plain")], 121 | String::from("No info found"), 122 | ); 123 | }; 124 | 125 | ( 126 | StatusCode::OK, 127 | [(header::CONTENT_TYPE, "application/json")], 128 | serde_json::to_string_pretty(&location).expect("Failed to serialize JSON"), 129 | ) 130 | } 131 | 132 | pub async fn to_whois_handler(Path(ip): Path) -> impl IntoResponse { 133 | let Ok(parsed_ip) = ip.parse::() else { 134 | return ( 135 | StatusCode::BAD_REQUEST, 136 | [(header::CONTENT_TYPE, "text/plain")], 137 | format!("Invalid IP {ip}"), 138 | ); 139 | }; 140 | 141 | let whois = WhoIsIp::default(); 142 | let Ok(whois) = whois.lookup(&parsed_ip).await else { 143 | return ( 144 | StatusCode::NOT_FOUND, 145 | [(header::CONTENT_TYPE, "text/plain")], 146 | String::from("Failed to look up IP"), 147 | ); 148 | }; 149 | 150 | ( 151 | StatusCode::OK, 152 | [(header::CONTENT_TYPE, "text/plain")], 153 | whois, 154 | ) 155 | } 156 | -------------------------------------------------------------------------------- /src/http/mails.rs: -------------------------------------------------------------------------------- 1 | use crate::mail::Mail; 2 | use crate::state::AppState; 3 | use axum::extract::State; 4 | use axum::extract::{Path, Query}; 5 | use axum::http::header; 6 | use axum::http::StatusCode; 7 | use axum::response::IntoResponse; 8 | use serde::Deserialize; 9 | use std::sync::Arc; 10 | use tokio::sync::Mutex; 11 | 12 | pub async fn single_handler( 13 | State(state): State>>, 14 | Path(id): Path, 15 | ) -> impl IntoResponse { 16 | let Ok(parsed_uid) = id.parse::() else { 17 | return ( 18 | StatusCode::BAD_REQUEST, 19 | [(header::CONTENT_TYPE, "text/plain")], 20 | format!("Invalid ID {id}"), 21 | ); 22 | }; 23 | let lock = state.lock().await; 24 | if let Some((_, mail)) = lock.mails.iter().find(|(uid, _)| **uid == parsed_uid) { 25 | let mail_json = serde_json::to_string(mail).expect("Failed to serialize JSON"); 26 | ( 27 | StatusCode::OK, 28 | [(header::CONTENT_TYPE, "application/json")], 29 | mail_json, 30 | ) 31 | } else { 32 | ( 33 | StatusCode::NOT_FOUND, 34 | [(header::CONTENT_TYPE, "text/plain")], 35 | format!("Cannot find mail with ID {id}"), 36 | ) 37 | } 38 | } 39 | 40 | pub async fn errors_handler( 41 | State(state): State>>, 42 | Path(id): Path, 43 | ) -> impl IntoResponse { 44 | let Ok(parsed_uid) = id.parse::() else { 45 | return ( 46 | StatusCode::BAD_REQUEST, 47 | [(header::CONTENT_TYPE, "text/plain")], 48 | format!("Invalid ID {id}"), 49 | ); 50 | }; 51 | let lock = state.lock().await; 52 | if !lock.mails.contains_key(&parsed_uid) { 53 | return ( 54 | StatusCode::NOT_FOUND, 55 | [(header::CONTENT_TYPE, "text/plain")], 56 | format!("Cannot find mail with ID {id}"), 57 | ); 58 | } 59 | if let Some(errors) = lock.dmarc_parsing_errors.get(&parsed_uid) { 60 | let errors_json = serde_json::to_string(errors).expect("Failed to serialize JSON"); 61 | ( 62 | StatusCode::OK, 63 | [(header::CONTENT_TYPE, "application/json")], 64 | errors_json, 65 | ) 66 | } else { 67 | ( 68 | StatusCode::OK, 69 | [(header::CONTENT_TYPE, "application/json")], 70 | String::from("[]"), 71 | ) 72 | } 73 | } 74 | 75 | #[derive(Deserialize, Debug)] 76 | pub struct MailFilters { 77 | sender: Option, 78 | count: Option, 79 | oversized: Option, 80 | errors: Option, 81 | } 82 | 83 | impl MailFilters { 84 | fn url_decode(&self) -> Self { 85 | Self { 86 | oversized: self.oversized, 87 | count: self.count, 88 | errors: self.errors, 89 | sender: self 90 | .sender 91 | .as_ref() 92 | .and_then(|s| urlencoding::decode(s).ok()) 93 | .map(|s| s.to_string()), 94 | } 95 | } 96 | } 97 | 98 | pub async fn list_handler( 99 | State(state): State>>, 100 | filters: Query, 101 | ) -> impl IntoResponse { 102 | // Remove URL encoding from strings in filters 103 | let filters = filters.url_decode(); 104 | 105 | let lock = state.lock().await; 106 | let mails: Vec<&Mail> = lock 107 | .mails 108 | .values() 109 | .filter(|m| { 110 | if let Some(queried_sender) = &filters.sender { 111 | m.sender == *queried_sender 112 | } else { 113 | true 114 | } 115 | }) 116 | .filter(|m| { 117 | if let Some(queried_oversized) = &filters.oversized { 118 | m.oversized == *queried_oversized 119 | } else { 120 | true 121 | } 122 | }) 123 | .filter(|m| { 124 | if let Some(queried_count) = &filters.count { 125 | m.xml_files == *queried_count 126 | } else { 127 | true 128 | } 129 | }) 130 | .filter(|m| { 131 | if let Some(queried_errors) = &filters.errors { 132 | (m.parsing_errors > 0) == *queried_errors 133 | } else { 134 | true 135 | } 136 | }) 137 | .collect(); 138 | let mails_json = serde_json::to_string(&mails).expect("Failed to serialize JSON"); 139 | ( 140 | StatusCode::OK, 141 | [(header::CONTENT_TYPE, "application/json")], 142 | mails_json, 143 | ) 144 | } 145 | -------------------------------------------------------------------------------- /src/http/static_files.rs: -------------------------------------------------------------------------------- 1 | use axum::extract::Request; 2 | use axum::http::header; 3 | use axum::http::StatusCode; 4 | use axum::response::IntoResponse; 5 | 6 | pub async fn handler(req: Request) -> impl IntoResponse { 7 | let path = req.uri().path(); 8 | for sf in STATIC_FILES { 9 | if sf.http_path == path { 10 | let mut mime_type = "application/octet-stream"; 11 | for mt in MIME_TYPES { 12 | if sf.file_path.ends_with(mt.ext) { 13 | mime_type = mt.mime_type; 14 | break; 15 | } 16 | } 17 | return ( 18 | StatusCode::OK, 19 | [(header::CONTENT_TYPE, mime_type)], 20 | #[cfg(debug_assertions)] 21 | std::fs::read(sf.file_path).expect("Failed to read file"), 22 | #[cfg(not(debug_assertions))] 23 | sf._data, 24 | ); 25 | } 26 | } 27 | ( 28 | StatusCode::NOT_FOUND, 29 | [(header::CONTENT_TYPE, "text/plain")], 30 | #[cfg(debug_assertions)] 31 | b"File not found".to_vec(), 32 | #[cfg(not(debug_assertions))] 33 | b"File not found", 34 | ) 35 | } 36 | 37 | const STATIC_FILES: &[StaticFile] = &[ 38 | StaticFile { 39 | http_path: "/", 40 | file_path: "ui/index.html", 41 | _data: include_bytes!("../../ui/index.html"), 42 | }, 43 | StaticFile { 44 | http_path: "/chart.js", 45 | file_path: "ui/chart.umd.4.4.2.min.js", 46 | _data: include_bytes!("../../ui/chart.umd.4.4.2.min.js"), 47 | }, 48 | StaticFile { 49 | http_path: "/lit.js", 50 | file_path: "ui/lit-core.3.1.4.min.js", 51 | _data: include_bytes!("../../ui/lit-core.3.1.4.min.js"), 52 | }, 53 | StaticFile { 54 | http_path: "/components/style.js", 55 | file_path: "ui/components/style.js", 56 | _data: include_bytes!("../../ui/components/style.js"), 57 | }, 58 | StaticFile { 59 | http_path: "/components/app.js", 60 | file_path: "ui/components/app.js", 61 | _data: include_bytes!("../../ui/components/app.js"), 62 | }, 63 | StaticFile { 64 | http_path: "/components/dashboard.js", 65 | file_path: "ui/components/dashboard.js", 66 | _data: include_bytes!("../../ui/components/dashboard.js"), 67 | }, 68 | StaticFile { 69 | http_path: "/components/mail-table.js", 70 | file_path: "ui/components/mail-table.js", 71 | _data: include_bytes!("../../ui/components/mail-table.js"), 72 | }, 73 | StaticFile { 74 | http_path: "/components/dmarc-report.js", 75 | file_path: "ui/components/dmarc-report.js", 76 | _data: include_bytes!("../../ui/components/dmarc-report.js"), 77 | }, 78 | StaticFile { 79 | http_path: "/components/dmarc-reports.js", 80 | file_path: "ui/components/dmarc-reports.js", 81 | _data: include_bytes!("../../ui/components/dmarc-reports.js"), 82 | }, 83 | StaticFile { 84 | http_path: "/components/mails.js", 85 | file_path: "ui/components/mails.js", 86 | _data: include_bytes!("../../ui/components/mails.js"), 87 | }, 88 | StaticFile { 89 | http_path: "/components/mail.js", 90 | file_path: "ui/components/mail.js", 91 | _data: include_bytes!("../../ui/components/mail.js"), 92 | }, 93 | StaticFile { 94 | http_path: "/components/about.js", 95 | file_path: "ui/components/about.js", 96 | _data: include_bytes!("../../ui/components/about.js"), 97 | }, 98 | StaticFile { 99 | http_path: "/components/dmarc-report-table.js", 100 | file_path: "ui/components/dmarc-report-table.js", 101 | _data: include_bytes!("../../ui/components/dmarc-report-table.js"), 102 | }, 103 | ]; 104 | 105 | const MIME_TYPES: &[MimeType] = &[ 106 | MimeType { 107 | ext: ".html", 108 | mime_type: "text/html", 109 | }, 110 | MimeType { 111 | ext: ".js", 112 | mime_type: "text/javascript", 113 | }, 114 | MimeType { 115 | ext: ".css", 116 | mime_type: "text/css", 117 | }, 118 | ]; 119 | 120 | struct MimeType { 121 | ext: &'static str, 122 | mime_type: &'static str, 123 | } 124 | 125 | struct StaticFile { 126 | http_path: &'static str, 127 | file_path: &'static str, 128 | _data: &'static [u8], 129 | } 130 | -------------------------------------------------------------------------------- /src/http/summary.rs: -------------------------------------------------------------------------------- 1 | use crate::dmarc::{DkimResultType, DmarcResultType, SpfResultType}; 2 | use crate::state::{AppState, DmarcReportWithUid}; 3 | use axum::extract::{Query, State}; 4 | use axum::response::IntoResponse; 5 | use axum::Json; 6 | use chrono::{Duration, Utc}; 7 | use serde::{Deserialize, Serialize}; 8 | use std::collections::HashMap; 9 | use std::sync::Arc; 10 | use tokio::sync::Mutex; 11 | 12 | #[derive(Deserialize, Debug)] 13 | pub struct SummaryFilters { 14 | /// Number of hours from current time backwards to include. 15 | /// Everything older will be excluded. 16 | /// None or a value of zero means the filter is disabled! 17 | time_span: Option, 18 | 19 | /// Domain to be filtered. Other domains will be ignored. 20 | /// None means the filter is disabled! 21 | domain: Option, 22 | } 23 | 24 | impl SummaryFilters { 25 | fn url_decode(&self) -> Self { 26 | Self { 27 | time_span: self.time_span, 28 | domain: self 29 | .domain 30 | .as_ref() 31 | .and_then(|s| urlencoding::decode(s).ok()) 32 | .map(|s| s.to_string()), 33 | } 34 | } 35 | } 36 | 37 | pub async fn handler( 38 | State(state): State>>, 39 | filters: Query, 40 | ) -> impl IntoResponse { 41 | let filters = filters.url_decode(); 42 | let guard = state.lock().await; 43 | let mut time_span = None; 44 | if let Some(hours) = filters.time_span { 45 | if hours > 0 { 46 | time_span = Some(Duration::hours(hours as i64)); 47 | } 48 | } 49 | let summary = Summary::new( 50 | guard.mails.len(), 51 | guard.xml_files, 52 | &guard.dmarc_reports, 53 | guard.last_update, 54 | time_span, 55 | filters.domain, 56 | ); 57 | Json(summary) 58 | } 59 | 60 | #[derive(Serialize, Default, Clone)] 61 | pub struct Summary { 62 | /// Number of mails from IMAP inbox 63 | pub mails: usize, 64 | 65 | /// Number of XML files found in mails from IMAPinbox 66 | pub xml_files: usize, 67 | 68 | /// Number of successfully parsed DMARC reports XML files found in IMAP inbox 69 | pub dmarc_reports: usize, 70 | 71 | /// Unix timestamp with time of last update 72 | pub last_update: u64, 73 | 74 | /// Map of organizations with number of corresponding reports 75 | pub dmarc_orgs: HashMap, 76 | 77 | /// Map of domains with number of corresponding reports 78 | pub dmarc_domains: HashMap, 79 | 80 | /// Map of DMARC SPF policy evaluation results 81 | pub spf_policy_results: HashMap, 82 | 83 | /// Map of DMARC DKIM policy evaluation results 84 | pub dkim_policy_results: HashMap, 85 | 86 | /// Map of DMARC SPF auth results 87 | pub spf_auth_results: HashMap, 88 | 89 | /// Map of DMARC DKIM auth results 90 | pub dkim_auth_results: HashMap, 91 | } 92 | 93 | impl Summary { 94 | pub fn new( 95 | mails: usize, 96 | xml_files: usize, 97 | dmarc_reports: &HashMap, 98 | last_update: u64, 99 | time_span: Option, 100 | domain: Option, 101 | ) -> Self { 102 | let mut dmarc_orgs: HashMap = HashMap::new(); 103 | let mut dmarc_domains = HashMap::new(); 104 | let mut spf_policy_results: HashMap = HashMap::new(); 105 | let mut dkim_policy_results: HashMap = HashMap::new(); 106 | let mut spf_auth_results: HashMap = HashMap::new(); 107 | let mut dkim_auth_results: HashMap = HashMap::new(); 108 | let threshold = time_span.map(|d| (Utc::now() - d).timestamp() as u64); 109 | for DmarcReportWithUid { report, .. } in dmarc_reports.values() { 110 | if let Some(threshold) = threshold { 111 | if report.report_metadata.date_range.end < threshold { 112 | continue; 113 | } 114 | } 115 | if let Some(domain) = &domain { 116 | if report.policy_published.domain != *domain { 117 | continue; 118 | } 119 | } 120 | let domain = report.policy_published.domain.clone(); 121 | if let Some(entry) = dmarc_domains.get_mut(&domain) { 122 | *entry += 1; 123 | } else { 124 | dmarc_domains.insert(domain, 1); 125 | } 126 | let org = report.report_metadata.org_name.clone(); 127 | if let Some(entry) = dmarc_orgs.get_mut(&org) { 128 | *entry += 1; 129 | } else { 130 | dmarc_orgs.insert(org, 1); 131 | } 132 | for record in &report.record { 133 | for r in &record.auth_results.spf { 134 | if let Some(entry) = spf_auth_results.get_mut(&r.result) { 135 | *entry += record.row.count; 136 | } else { 137 | spf_auth_results.insert(r.result.clone(), record.row.count); 138 | } 139 | } 140 | if let Some(vec) = &record.auth_results.dkim { 141 | for r in vec { 142 | if let Some(entry) = dkim_auth_results.get_mut(&r.result) { 143 | *entry += record.row.count; 144 | } else { 145 | dkim_auth_results.insert(r.result.clone(), record.row.count); 146 | } 147 | } 148 | } 149 | if let Some(result) = &record.row.policy_evaluated.spf { 150 | if let Some(entry) = spf_policy_results.get_mut(result) { 151 | *entry += record.row.count; 152 | } else { 153 | spf_policy_results.insert(result.clone(), record.row.count); 154 | } 155 | } 156 | if let Some(result) = &record.row.policy_evaluated.dkim { 157 | if let Some(entry) = dkim_policy_results.get_mut(result) { 158 | *entry += record.row.count; 159 | } else { 160 | dkim_policy_results.insert(result.clone(), record.row.count); 161 | } 162 | } 163 | } 164 | } 165 | Self { 166 | mails, 167 | xml_files, 168 | last_update, 169 | dmarc_reports: dmarc_reports.len(), 170 | dmarc_orgs, 171 | dmarc_domains, 172 | spf_policy_results, 173 | dkim_policy_results, 174 | spf_auth_results, 175 | dkim_auth_results, 176 | } 177 | } 178 | } 179 | -------------------------------------------------------------------------------- /src/imap.rs: -------------------------------------------------------------------------------- 1 | use crate::config::Configuration; 2 | use crate::mail::{decode_subject, Mail}; 3 | use anyhow::{anyhow, Context, Result}; 4 | use async_imap::imap_proto::Address; 5 | use async_imap::types::Fetch; 6 | use async_imap::Client; 7 | use futures::StreamExt; 8 | use std::collections::HashMap; 9 | use std::net::TcpStream as StdTcpStream; 10 | use std::net::{SocketAddr, ToSocketAddrs}; 11 | use std::sync::Arc; 12 | use std::time::Duration; 13 | use tokio::net::TcpStream; 14 | use tokio_rustls::client::TlsStream; 15 | use tokio_rustls::rustls::pki_types::pem::PemObject; 16 | use tokio_rustls::rustls::pki_types::{CertificateDer, ServerName}; 17 | use tokio_rustls::rustls::{ClientConfig, RootCertStore}; 18 | use tokio_rustls::TlsConnector; 19 | use tokio_util::either::Either; 20 | use tracing::{debug, info, trace, warn}; 21 | 22 | pub async fn get_mails(config: &Configuration) -> Result> { 23 | let client = create_client(config) 24 | .await 25 | .context("Failed to create IMAP client")?; 26 | 27 | let mut session = client 28 | .login(&config.imap_user, &config.imap_password) 29 | .await 30 | .map_err(|e| e.0) 31 | .context("Failed to log in and create IMAP session")?; 32 | debug!("IMAP login successful"); 33 | 34 | let imap_folder = &config.imap_folder; 35 | let mailbox = session 36 | .select(imap_folder) 37 | .await 38 | .context(format!("Failed to select {imap_folder} folder"))?; 39 | debug!("Selected {imap_folder} folder successfully"); 40 | 41 | // Get metadata for all all mails and filter by size 42 | let mut mails = HashMap::new(); 43 | debug!( 44 | "Number of mails in {imap_folder} folder: {}", 45 | mailbox.exists 46 | ); 47 | if mailbox.exists > 0 { 48 | // Get metadata for all mails 49 | let sequence = format!("1:{}", mailbox.exists); 50 | let mut stream = session 51 | .fetch(sequence, "(RFC822.SIZE UID ENVELOPE INTERNALDATE)") 52 | .await 53 | .context("Failed to fetch message stream from IMAP inbox")?; 54 | while let Some(fetch_result) = stream.next().await { 55 | let fetched = 56 | fetch_result.context("Failed to get next mail header from IMAP fetch response")?; 57 | let mail = extract_metadata(&fetched, config.max_mail_size as usize) 58 | .context("Unable to extract mail metadata")?; 59 | mails.insert(mail.uid, mail); 60 | } 61 | info!("Downloaded metadata of {} mails", mails.len()); 62 | 63 | let no_size_mails = mails.values().filter(|m| m.size == 0).count(); 64 | if no_size_mails > 0 { 65 | warn!("Found {no_size_mails} without size property, this will make upfront oversize filtering impossible!") 66 | } 67 | 68 | let oversized_mails = mails.values().filter(|m| m.oversized).count(); 69 | if oversized_mails > 0 { 70 | warn!( 71 | "Found {} mails over size limit of {} bytes", 72 | oversized_mails, config.max_mail_size 73 | ) 74 | } 75 | } 76 | 77 | // Get full mail body for all non-oversized mails 78 | let uids: Vec = mails 79 | .values() 80 | .filter(|m| !m.oversized) 81 | .map(|m| m.uid.to_string()) 82 | .collect(); 83 | if !uids.is_empty() { 84 | // We need to get the mails in chunks. 85 | // It will fail silently if the requested sequences become too big! 86 | for chunk in uids.chunks(config.imap_chunk_size) { 87 | debug!("Downloading chunk with {} mails...", chunk.len()); 88 | let sequence: String = chunk.join(","); 89 | let body_request = config.imap_body_request.to_request_string(); 90 | let fetch_query = format!("({body_request} RFC822.SIZE UID ENVELOPE INTERNALDATE)"); 91 | let mut stream = session 92 | .uid_fetch( 93 | sequence, 94 | // Some servers (like iCloud Mail) seem to require BODY[] instead of just RFC822... 95 | &fetch_query, 96 | ) 97 | .await 98 | .context("Failed to fetch message stream from IMAP inbox")?; 99 | let mut fetched = 0; 100 | while let Some(fetch_result) = stream.next().await { 101 | fetched += 1; 102 | let fetched = fetch_result 103 | .context("Failed to get next mail header from IMAP fetch response")?; 104 | let uid = fetched 105 | .uid 106 | .context("Failed to get UID from IMAP fetch result")?; 107 | let Some(mail) = mails.get_mut(&uid) else { 108 | warn!("Cannot find mail metadata for UID {uid}"); 109 | continue; 110 | }; 111 | let Some(body) = fetched.body() else { 112 | warn!("Mail with UID {} has no body!", mail.uid); 113 | continue; 114 | }; 115 | mail.body = Some(body.to_vec()); 116 | mail.size = body.len(); 117 | mail.oversized = body.len() > config.max_mail_size as usize; 118 | if mail.oversized { 119 | // Do not keep oversized mails in memory 120 | mail.body = None; 121 | warn!("Mail with UID {uid} was bigger than expected and is oversized"); 122 | } 123 | trace!( 124 | "Fetched mail with UID {uid} and size {} from {}", 125 | mail.size, 126 | mail.sender 127 | ); 128 | } 129 | if fetched != chunk.len() { 130 | warn!( 131 | "Unable to fetch some mails from chunk, expected {} mails but got {fetched}", 132 | chunk.len() 133 | ); 134 | } 135 | } 136 | 137 | info!("Downloaded {} mails", uids.len()); 138 | } 139 | 140 | // We have everything we need, an error is no longer preventing an update. 141 | if let Err(err) = session.logout().await { 142 | let anyhow_err = anyhow!(err); 143 | warn!("Failed to log off from IMAP server: {anyhow_err:#}"); 144 | } 145 | 146 | Ok(mails) 147 | } 148 | 149 | /// Creates an unecrypted or encrypted IMAP client 150 | async fn create_client( 151 | config: &Configuration, 152 | ) -> Result>>> { 153 | let host_port = format!("{}:{}", config.imap_host.as_str(), config.imap_port); 154 | let addrs = host_port 155 | .to_socket_addrs() 156 | .context("Failed to convert host name and port to socket address")? 157 | .collect::>(); 158 | let addr = addrs.first().context("Unable get first resolved address")?; 159 | debug!("Got {addr} as as socket address for IMAP endpoint {host_port}"); 160 | 161 | let timeout = Duration::from_secs(config.imap_timeout); 162 | let std_tcp_stream = 163 | StdTcpStream::connect_timeout(addr, timeout).context("Failed to connect to IMAP server")?; 164 | debug!("Created TCP stream"); 165 | 166 | std_tcp_stream 167 | .set_nonblocking(true) 168 | .context("Failed to set TCP stream to non-blocking")?; 169 | let tcp_stream = TcpStream::from_std(std_tcp_stream) 170 | .context("Failed to create TCP stream to IMAP server")?; 171 | debug!("Created async TCP stream"); 172 | 173 | let stream = if config.imap_starttls { 174 | debug!("Sending STARTTLS command over plain connection..."); 175 | let mut plain_client = Client::new(tcp_stream); 176 | plain_client 177 | .read_response() 178 | .await 179 | .context("Failed to read greeting")? 180 | .context("Failed parse greeting response")?; 181 | debug!("Received greeting"); 182 | plain_client 183 | .run_command_and_check_ok("STARTTLS", None) 184 | .await 185 | .context("Failed to run STARTTLS command")?; 186 | debug!("Requested STARTTLS, upgrading..."); 187 | let tls_stream = create_tls_stream(config, plain_client.into_inner()) 188 | .await 189 | .context("Failed to upgrade to TLS stream")?; 190 | Either::Right(tls_stream) 191 | } else if config.imap_disable_tls { 192 | warn!("Using unecrypted TCP connection for IMAP client"); 193 | Either::Left(tcp_stream) 194 | } else { 195 | debug!("Directly creating TLS stream..."); 196 | let tls_stream = create_tls_stream(config, tcp_stream) 197 | .await 198 | .context("Failed to create TLS stream")?; 199 | Either::Right(tls_stream) 200 | }; 201 | 202 | let client = Client::new(stream); 203 | debug!("Created IMAP client"); 204 | Ok(client) 205 | } 206 | 207 | async fn create_tls_stream( 208 | config: &Configuration, 209 | tcp_stream: TcpStream, 210 | ) -> Result> { 211 | let mut root_cert_store = RootCertStore::empty(); 212 | let certs = webpki_roots::TLS_SERVER_ROOTS.iter().cloned(); 213 | root_cert_store.extend(certs); 214 | debug!("Created Root CA cert store"); 215 | 216 | if let Some(ca_certs) = &config.imap_tls_ca_certs { 217 | info!( 218 | "Loading file with custom TLS CA certificates for IMAP client from {}...", 219 | ca_certs.display() 220 | ); 221 | let mut custom_certs = Vec::new(); 222 | for res in CertificateDer::pem_file_iter(ca_certs) 223 | .context("Failed to parse custom CA certificate file")? 224 | { 225 | let cert = res.context("Failed to parse certificate")?; 226 | custom_certs.push(cert); 227 | } 228 | info!( 229 | "Loaded {} custom certificate(s) from input file", 230 | custom_certs.len() 231 | ); 232 | let (added, ignored) = root_cert_store.add_parsable_certificates(custom_certs); 233 | info!("{added} certificate(s) were added to the root store and {ignored} were ignored"); 234 | } 235 | 236 | let client_config = ClientConfig::builder() 237 | .with_root_certificates(root_cert_store) 238 | .with_no_client_auth(); 239 | debug!("Created TLS client config"); 240 | 241 | let connector = TlsConnector::from(Arc::new(client_config)); 242 | debug!("Created TLS connector"); 243 | 244 | let dns_name = ServerName::try_from(config.imap_host.clone()) 245 | .context("Failed to get DNS name from host")?; 246 | debug!("Got DNS name: {dns_name:?}"); 247 | 248 | let tls_stream = connector 249 | .connect(dns_name, tcp_stream) 250 | .await 251 | .context("Failed to create TLS stream")?; 252 | debug!("Created TLS stream"); 253 | 254 | Ok(tls_stream) 255 | } 256 | 257 | fn extract_metadata(mail: &Fetch, max_size: usize) -> Result { 258 | let uid = mail.uid.context("Mail server did not provide UID")?; 259 | let size = mail.size.unwrap_or(0) as usize; // In case the mail server ignored our request for the size 260 | let env = mail 261 | .envelope() 262 | .context("Mail server did not provide envelope")?; 263 | let sender = addrs_to_string(env.sender.as_deref()); 264 | let to = addrs_to_string(env.to.as_deref()); 265 | let date = mail 266 | .internal_date() 267 | .context("Mail server did not provide date")? 268 | .timestamp(); 269 | let env = mail 270 | .envelope() 271 | .context("Mail server did not provide envelope")?; 272 | let subject = decode_subject( 273 | env.subject 274 | .as_deref() 275 | .map(|s| String::from_utf8_lossy(s)) 276 | .unwrap_or("n/a".into()) 277 | .as_ref(), 278 | ); 279 | Ok(Mail { 280 | body: None, 281 | uid, 282 | sender, 283 | to, 284 | subject, 285 | date, 286 | size, 287 | oversized: size > max_size, 288 | xml_files: 0, 289 | parsing_errors: 0, 290 | }) 291 | } 292 | 293 | fn addrs_to_string(addrs: Option<&[Address]>) -> String { 294 | if let Some(addrs) = addrs { 295 | addrs 296 | .iter() 297 | .map(|addr| { 298 | let mailbox = addr 299 | .mailbox 300 | .as_deref() 301 | .map(|s| String::from_utf8_lossy(s)) 302 | .unwrap_or("n/a".into()) 303 | .to_string(); 304 | let host = addr 305 | .host 306 | .as_deref() 307 | .map(|s| String::from_utf8_lossy(s)) 308 | .unwrap_or("n/a".into()) 309 | .to_string(); 310 | format!("{mailbox}@{host}") 311 | }) 312 | .collect::>() 313 | .join("; ") 314 | } else { 315 | String::from("n/a") 316 | } 317 | } 318 | -------------------------------------------------------------------------------- /src/mail.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{bail, Context, Result}; 2 | use base64::engine::general_purpose::STANDARD; 3 | use base64::Engine; 4 | use regex::Regex; 5 | use serde::Serialize; 6 | 7 | #[derive(Serialize)] 8 | pub struct Mail { 9 | pub uid: u32, 10 | pub size: usize, 11 | pub oversized: bool, 12 | pub date: i64, 13 | pub subject: String, 14 | pub sender: String, 15 | pub to: String, 16 | 17 | // Body is removed after parsing to save memory 18 | #[serde(skip)] 19 | pub body: Option>, 20 | 21 | // Set at later stage when extracting the XML files from the body 22 | pub xml_files: usize, 23 | 24 | // Set at later stage during parsing 25 | pub parsing_errors: usize, 26 | } 27 | 28 | /// Decoding of Q-encoded data as described in RFC2047 29 | fn q_decode(mut data: &str) -> Result> { 30 | let mut result = Vec::new(); 31 | while !data.is_empty() { 32 | if data.starts_with('_') { 33 | // This is always ASCII space (0x20) 34 | result.push(0x20); 35 | data = &data[1..]; 36 | } else if data.starts_with('=') { 37 | // This is followed by two hex digits encoding a byte 38 | if data.len() >= 3 { 39 | let hex = &data[1..3]; 40 | let value = u8::from_str_radix(hex, 16) 41 | .context("Expected valid hex string but found something else")?; 42 | result.push(value); 43 | data = &data[3..]; 44 | } else { 45 | bail!("The equal character must be followed by two hex characters"); 46 | } 47 | } else { 48 | // Keep everything else as is... 49 | let byte = &data.as_bytes()[0..1]; 50 | result.extend_from_slice(byte); 51 | data = &data[1..]; 52 | } 53 | } 54 | Ok(result) 55 | } 56 | 57 | /// Decoding of MIME encoded words as described in RFC2047 58 | /// This implementation currently only supports UTF-8! 59 | fn decode_word(charset: &str, encoding: &str, data: &str) -> Result { 60 | let charset = charset.to_lowercase(); 61 | let encoding = encoding.to_lowercase(); 62 | let decoded = if encoding == "b" { 63 | STANDARD 64 | .decode(data) 65 | .context("Failed to decode Base64 data")? 66 | } else if encoding == "q" { 67 | q_decode(data).context("Failed to decode Q data")? 68 | } else { 69 | bail!("Unsupported encoding: {encoding}") 70 | }; 71 | if charset == "utf-8" { 72 | String::from_utf8(decoded).context("Failed to parse UTF-8 string") 73 | } else { 74 | // Unsupported charset 75 | bail!("Unsupported charset: {charset}") 76 | } 77 | } 78 | 79 | /// Basic decoder for subjects containing MIME encoded words. 80 | /// Supported charsets: Only UTF-8 81 | /// Supported encodings: Base64 and Q 82 | pub fn decode_subject(value: &str) -> String { 83 | let re = Regex::new(r"=\?(.+?)\?(.)\?(.+?)\?=").unwrap(); 84 | let mut result = value.to_owned(); 85 | for capture in re.captures_iter(value) { 86 | let (matched, [charset, encoding, encoded]) = capture.extract(); 87 | let decoded = match decode_word(charset, encoding, encoded) { 88 | Ok(word) => word, 89 | Err(_) => continue, 90 | }; 91 | result = result.replace(matched, &decoded); 92 | } 93 | result 94 | } 95 | 96 | #[cfg(test)] 97 | mod tests { 98 | use super::*; 99 | 100 | #[test] 101 | fn q_decode_test() { 102 | assert_eq!(q_decode("").unwrap(), Vec::::new()); 103 | assert_eq!(q_decode("abc").unwrap(), vec![b'a', b'b', b'c']); 104 | assert_eq!(q_decode("_").unwrap(), vec![0x20]); 105 | assert_eq!( 106 | q_decode("=00=ff=AA_abc").unwrap(), 107 | vec![0x00, 0xff, 0xaa, 0x20, b'a', b'b', b'c'] 108 | ); 109 | assert_eq!( 110 | q_decode("Best=C3=A4tigen").unwrap(), 111 | vec![66, 101, 115, 116, 195, 164, 116, 105, 103, 101, 110] 112 | ); 113 | } 114 | 115 | #[test] 116 | fn decode_word_test() { 117 | assert_eq!(decode_word("utf-8", "b", "YWJj").unwrap(), "abc"); 118 | assert_eq!(decode_word("UtF-8", "B", "YWJj").unwrap(), "abc"); 119 | assert_eq!(decode_word("utf-8", "q", "=C3=A4").unwrap(), "ä"); 120 | assert_eq!(decode_word("utf-8", "b", "dGV4dA==").unwrap(), "text"); 121 | 122 | assert!(decode_word("unknown", "B", "YWJj").is_err()); 123 | assert!(decode_word("utf-8", "unknown", "YWJj").is_err()); 124 | assert!(decode_word("utf-8", "b", "not_valid_b64").is_err()); 125 | } 126 | 127 | #[test] 128 | fn decode_subject_test() { 129 | // Can handle empty strings 130 | assert_eq!(decode_subject(""), ""); 131 | 132 | // Can handle strings without encoded words 133 | assert_eq!(decode_subject("foobar 42"), "foobar 42"); 134 | 135 | // Ignores invalid words that cannot be decoded 136 | assert_eq!(decode_subject("=?foo?z?a?="), "=?foo?z?a?="); 137 | 138 | // Can decode words in the middle 139 | assert_eq!(decode_subject(" =?UTF-8?b?YWJj?= "), " abc "); 140 | 141 | // Can decode multiple words in one string 142 | assert_eq!( 143 | decode_subject(" =?UTF-8?B?YWJj?= =?UTF-8?Q?=C3=A4?= "), 144 | " abc ä " 145 | ); 146 | } 147 | } 148 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | #![forbid(unsafe_code)] 2 | 3 | mod background; 4 | mod cache_map; 5 | mod config; 6 | mod dmarc; 7 | mod geolocate; 8 | mod hasher; 9 | mod http; 10 | mod imap; 11 | mod mail; 12 | mod state; 13 | mod unpack; 14 | mod whois; 15 | 16 | use crate::background::start_bg_task; 17 | use crate::http::run_http_server; 18 | use crate::state::AppState; 19 | use anyhow::{Context, Result}; 20 | use config::Configuration; 21 | use std::sync::Arc; 22 | use tokio::sync::{mpsc::channel, Mutex}; 23 | use tracing::info; 24 | 25 | #[tokio::main] 26 | async fn main() -> Result<()> { 27 | // Create config from args and ENV variables. 28 | // Will exit early in case of error or help and version command. 29 | let config = Configuration::new(); 30 | 31 | // Set up basic logging to stdout 32 | let subscriber = tracing_subscriber::fmt() 33 | .compact() 34 | .with_max_level(config.log_level) 35 | .with_target(false) 36 | .with_ansi(false) 37 | .finish(); 38 | tracing::subscriber::set_global_default(subscriber) 39 | .expect("Failed to set up default tracing subscriber"); 40 | 41 | // Log app name and version 42 | let version = env!("CARGO_PKG_VERSION"); 43 | info!("DMARC Report Analyzer {version}"); 44 | 45 | // Inject git hash for logging during Github builds. 46 | // Other builds, like normal local dev builds do not support this. 47 | let git_hash = option_env!("GITHUB_SHA").unwrap_or("n/a"); 48 | let git_ref = option_env!("GITHUB_REF_NAME").unwrap_or("n/a"); 49 | info!("Git-Hash: {git_hash} ({git_ref})"); 50 | 51 | // Make configuration visible in logs 52 | config.log(); 53 | 54 | // Prepare shared application state 55 | let state = Arc::new(Mutex::new(AppState::new())); 56 | 57 | // Start background task 58 | let (stop_sender, stop_receiver) = channel(1); 59 | let bg_handle = start_bg_task(config.clone(), state.clone(), stop_receiver); 60 | 61 | // Starting HTTP server 62 | run_http_server(&config, state.clone()) 63 | .await 64 | .context("Failed to start HTTP server")?; 65 | 66 | // Shutdown rest of app after HTTP server stopped 67 | info!("HTTP server stopped"); 68 | info!("Shutting down background task..."); 69 | stop_sender 70 | .send(()) 71 | .await 72 | .expect("Failed to send background task shutdown signal"); 73 | bg_handle.await.expect("Failed to join background task"); 74 | info!("Background task stopped, application shutdown completed!"); 75 | 76 | Ok(()) 77 | } 78 | -------------------------------------------------------------------------------- /src/state.rs: -------------------------------------------------------------------------------- 1 | use crate::dmarc::{DmarcParsingError, Report}; 2 | use crate::geolocate::Location; 3 | use crate::{cache_map::CacheMap, mail::Mail}; 4 | use serde::{Deserialize, Serialize}; 5 | use std::collections::HashMap; 6 | use std::net::IpAddr; 7 | 8 | const CACHE_SIZE: usize = 10000; 9 | 10 | /// Report with UID of the mail that contained the report 11 | #[derive(Debug, Serialize, Deserialize)] 12 | pub struct DmarcReportWithUid { 13 | pub uid: u32, 14 | pub report: Report, 15 | } 16 | 17 | /// Shared state between the different parts of the application. 18 | /// Connects the background task that collects mails via IMAP, 19 | /// parses them, analyzes DMARC reports and makes them available for 20 | /// the web frontend running on to the embedded HTTP server. 21 | pub struct AppState { 22 | /// Mails from IMAP inbox with mail UID as key 23 | pub mails: HashMap, 24 | 25 | /// Parsed DMARC reports with mail UID and corresponding hash as key 26 | pub dmarc_reports: HashMap, 27 | 28 | /// Number of XML files extracted from mails 29 | pub xml_files: usize, 30 | 31 | /// Time of last update from IMAP inbox as Unix timestamp 32 | pub last_update: u64, 33 | 34 | /// XML DMARC parsing errors keyed by mail UID 35 | pub dmarc_parsing_errors: HashMap>, 36 | 37 | /// IP to DNS cache 38 | pub ip_dns_cache: CacheMap, 39 | 40 | /// IP to location cache 41 | pub ip_location_cache: CacheMap, 42 | } 43 | 44 | impl AppState { 45 | pub fn new() -> Self { 46 | Self { 47 | mails: HashMap::new(), 48 | dmarc_reports: HashMap::new(), 49 | last_update: 0, 50 | xml_files: 0, 51 | dmarc_parsing_errors: HashMap::new(), 52 | ip_dns_cache: CacheMap::new(CACHE_SIZE).expect("Failed to create DNS cache"), 53 | ip_location_cache: CacheMap::new(CACHE_SIZE).expect("Failed to create location cache"), 54 | } 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /src/unpack.rs: -------------------------------------------------------------------------------- 1 | use crate::hasher::create_hash; 2 | use crate::mail::Mail; 3 | use anyhow::{Context, Result}; 4 | use flate2::read::GzDecoder; 5 | use mailparse::{MailHeaderMap, ParsedMail}; 6 | use std::io::{Cursor, Read}; 7 | use tracing::{trace, warn}; 8 | use zip::ZipArchive; 9 | 10 | /// Get zero or more XML files from a ZIP archive 11 | fn get_xml_from_zip(zip_bytes: &[u8]) -> Result>> { 12 | let cursor = Cursor::new(zip_bytes); 13 | let mut archive = ZipArchive::new(cursor).context("Failed to binary data as ZIP")?; 14 | 15 | let file_count = archive.len(); 16 | if file_count == 0 { 17 | warn!("ZIP file is empty"); 18 | } 19 | 20 | let mut xml_files = Vec::new(); 21 | for i in 0..file_count { 22 | let mut file = archive.by_index(i).context("Unable to get file from ZIP")?; 23 | let file_name = file.name(); 24 | 25 | if !file_name.ends_with(".xml") { 26 | warn!("File {file_name} in ZIP is not an XML file, skipping...",); 27 | continue; 28 | } 29 | 30 | let mut xml_file = Vec::new(); 31 | file.read_to_end(&mut xml_file) 32 | .context("Failed to read XML from ZIP")?; 33 | xml_files.push(xml_file); 34 | } 35 | 36 | Ok(xml_files) 37 | } 38 | 39 | /// Merge name value of content type header in case its split like described 40 | /// here: https://datatracker.ietf.org/doc/html/rfc2231#section-3 41 | /// Please note that `mailparse` already removes the line breaks before this function is used! 42 | fn merge_name_parts(value: &str) -> String { 43 | let mut out_buffer = String::new(); 44 | let mut name_buffer = String::new(); 45 | let mut next = 0; 46 | 47 | for segment in value.trim().split("; ") { 48 | let next_prefix = format!("name*{next}="); 49 | if let Some(mut candidate) = segment.trim().strip_prefix(&next_prefix).map(String::from) { 50 | next += 1; 51 | if candidate.ends_with(';') { 52 | candidate.pop(); 53 | } 54 | if candidate.len() > 2 && candidate.starts_with('"') && candidate.ends_with('"') { 55 | candidate.remove(0); 56 | candidate.pop(); 57 | } 58 | name_buffer += &candidate; 59 | } else if out_buffer.is_empty() { 60 | out_buffer += segment; 61 | } else { 62 | out_buffer += &format!("; {segment}"); 63 | } 64 | } 65 | 66 | if !name_buffer.is_empty() { 67 | out_buffer += &format!("; name=\"{name_buffer}\""); 68 | } 69 | 70 | out_buffer 71 | } 72 | 73 | /// Get a single XML file from a GZ archive 74 | fn get_xml_from_gz(gz_bytes: &[u8]) -> Result> { 75 | let mut gz = GzDecoder::new(gz_bytes); 76 | let mut xml_file = Vec::new(); 77 | gz.read_to_end(&mut xml_file) 78 | .context("Failed to read file from GZ archive")?; 79 | Ok(xml_file) 80 | } 81 | 82 | pub fn extract_xml_files(mail: &mut Mail) -> Result> { 83 | // Consume mail body to avoid keeping the longer needed data in memory 84 | let body = mail.body.take().context("Missing mail body")?; 85 | 86 | let mut xml_files = Vec::new(); 87 | let parsed = mailparse::parse_mail(&body).context("Failed to parse mail body")?; 88 | let parts: Vec<&ParsedMail> = parsed.parts().collect(); 89 | let uid = mail.uid; 90 | trace!("Parsed mail with UID {uid} and found {} parts", parts.len()); 91 | for (index, part) in parts.iter().enumerate() { 92 | let Some(content_type) = part.get_headers().get_first_value("Content-Type") else { 93 | trace!("Skipping part {index} of mail with UID {uid} because of missing content type",); 94 | continue; 95 | }; 96 | trace!("Part {index} of mail with UID {uid} has content type '{content_type}'"); 97 | 98 | // Some long names in content type headers names are split into multiple lines and parts: 99 | // application/octet-stream; 100 | // name*0=amazonses.com!xxxxxxxxxxxxxxxxxxxxxx!1745884800!1745971200.xm; 101 | // name*1=l.gz 102 | let content_type = merge_name_parts(&content_type); 103 | 104 | // Detect compression based on content type header. 105 | // In most cases is directly a ZIP or GZIP type, but in some cases its generic 106 | // and we need to check for a file name ending with a certain extension. 107 | // For example AWS uses such values: 108 | // Content-Type: application/octet-stream;name=amazonses.com!example.com!1722384000!1722470400.xml.gz 109 | if content_type.contains("application/zip") 110 | || content_type.contains("application/octet-stream") && content_type.contains(".zip") 111 | || content_type.contains("application/x-zip-compressed") 112 | && content_type.contains(".zip") 113 | { 114 | trace!("Detected ZIP attachment for mail with UID {uid} in part {index}"); 115 | let body = part 116 | .get_body_raw() 117 | .context("Failed to get raw body of attachment part")?; 118 | let xml_files_zip = 119 | get_xml_from_zip(&body).context("Failed to extract XML from ZIP attachment")?; 120 | trace!( 121 | "Extracted {} XML files from ZIP in part {index} of mail with UID {uid}", 122 | xml_files_zip.len() 123 | ); 124 | for xml in xml_files_zip { 125 | let hash = create_hash(&xml, Some(mail.uid)); 126 | xml_files.push(XmlFile { 127 | data: xml, 128 | mail_uid: mail.uid, 129 | hash, 130 | }); 131 | } 132 | } else if content_type.contains("application/gzip") 133 | || content_type.contains("application/octet-stream") && content_type.contains(".xml.gz") 134 | { 135 | trace!("Detected GZ attachment for mail with UID {uid} in part {index}"); 136 | let body = part 137 | .get_body_raw() 138 | .context("Failed to get raw body of attachment part")?; 139 | let xml = get_xml_from_gz(&body).context("Failed to extract XML from GZ attachment")?; 140 | let hash = create_hash(&xml, Some(mail.uid)); 141 | xml_files.push(XmlFile { 142 | data: xml, 143 | mail_uid: mail.uid, 144 | hash, 145 | }); 146 | } else if content_type.contains("text/xml") 147 | || content_type.contains("application/octet-stream") && content_type.contains(".xml") 148 | { 149 | trace!("Detected uncompressed XML attachment for mail with UID {uid} in part {index}"); 150 | let xml = part 151 | .get_body_raw() 152 | .context("Failed to get raw body of attachment part")?; 153 | let hash = create_hash(&xml, Some(mail.uid)); 154 | xml_files.push(XmlFile { 155 | data: xml, 156 | mail_uid: mail.uid, 157 | hash, 158 | }); 159 | } 160 | } 161 | 162 | Ok(xml_files) 163 | } 164 | 165 | /// In-memory representation of an unparsed XML file with mail UID and hash 166 | pub struct XmlFile { 167 | /// UID of the mail that contained this XML file 168 | pub mail_uid: u32, 169 | /// Binary data of the XML file 170 | pub data: Vec, 171 | /// Hash of the XML data AND mail UID. 172 | /// UID needs to be included to avoid the same XML file from multiple mails being treated as the same file! 173 | pub hash: String, 174 | } 175 | 176 | #[cfg(test)] 177 | mod tests { 178 | use super::*; 179 | 180 | #[test] 181 | fn test_merge_name_parts() { 182 | let input = "application/octet-stream; name*0=amazonses.com!xxxxxxxxxxxxxxxxxxxxxx!1745884800!1745971200.xm; name*1=l.gz"; 183 | let output = merge_name_parts(input); 184 | assert!(output.contains( 185 | "name=\"amazonses.com!xxxxxxxxxxxxxxxxxxxxxx!1745884800!1745971200.xml.gz\"" 186 | )); 187 | 188 | let input = "application/octet-stream; name*0=foo; name*1=bar; name*2=.jpeg"; 189 | let output = merge_name_parts(input); 190 | assert!(output.contains("name=\"foobar.jpeg\"")); 191 | 192 | let input = "application/octet-stream; name*0=\"foo\"; name*1=\"bar\"; name*2=\".jpeg\""; 193 | let output = merge_name_parts(input); 194 | assert!(output.contains("name=\"foobar.jpeg\"")); 195 | } 196 | } 197 | -------------------------------------------------------------------------------- /src/whois.rs: -------------------------------------------------------------------------------- 1 | // This file contains a minimal whois client for IPs. 2 | // It will not work for domains. 3 | // Its a heavily modified and simplified version of an existing library. 4 | // See https://github.com/magiclen/whois-rust for the original code! 5 | 6 | use anyhow::{bail, Context, Result}; 7 | use regex::Regex; 8 | use std::net::IpAddr; 9 | use std::time::Duration; 10 | use tokio::io::{AsyncReadExt, AsyncWriteExt}; 11 | use tokio::net::TcpStream; 12 | use tokio::time::timeout; 13 | 14 | pub struct WhoIsIp { 15 | regex: Regex, 16 | server: Server, 17 | timeout: Duration, 18 | max_follows: u8, 19 | } 20 | 21 | impl Default for WhoIsIp { 22 | fn default() -> Self { 23 | Self { 24 | server: Server::default(), 25 | regex: Regex::new(r"(ReferralServer|Registrar Whois|Whois Server|WHOIS Server|Registrar WHOIS Server):[^\S\n]*(r?whois://)?(.*)").expect("Failed to cosntruct RegEx"), 26 | timeout: Duration::from_secs(10), 27 | max_follows: 3, 28 | } 29 | } 30 | } 31 | 32 | impl WhoIsIp { 33 | async fn get_tcp_stream(&self, addr: &str) -> Result { 34 | timeout(self.timeout, TcpStream::connect(addr)) 35 | .await 36 | .context("TCP connect timed out")? 37 | .context("TCP connect failed") 38 | } 39 | 40 | async fn lookup_once(&self, ip: &IpAddr, server: &Server) -> Result { 41 | let server_addr = format!("{}:{}", server.host, server.port); 42 | let mut client = self 43 | .get_tcp_stream(&server_addr) 44 | .await 45 | .context("Failed to get TCP stream")?; 46 | let query = server.query.replace("$addr", &ip.to_string()); 47 | timeout(self.timeout, client.write_all(query.as_bytes())) 48 | .await 49 | .context("Sending query timed out")? 50 | .context("Failed to send query")?; 51 | timeout(self.timeout, client.flush()) 52 | .await 53 | .context("Flushing query timed out")? 54 | .context("Failed to flush query")?; 55 | let mut result = String::new(); 56 | timeout(self.timeout, client.read_to_string(&mut result)) 57 | .await 58 | .context("Reading response timed out")? 59 | .context("Failed to read response")?; 60 | Ok(AddrTextPair { 61 | server_addr, 62 | text: result, 63 | }) 64 | } 65 | 66 | async fn lookup_iterative( 67 | &self, 68 | ip: &IpAddr, 69 | server: &Server, 70 | mut follow: u8, 71 | ) -> Result { 72 | let mut result = self 73 | .lookup_once(ip, server) 74 | .await 75 | .context("Initial whois query failed")?; 76 | while follow > 0 { 77 | if let Some(captures) = self.regex.captures(&result.text) { 78 | if let Some(addr) = captures.get(3) { 79 | let addr = addr.as_str(); 80 | if addr.ne(&result.server_addr) { 81 | let server = 82 | Server::from_str(addr).context("Failed to parse server address")?; 83 | result = self 84 | .lookup_once(ip, &server) 85 | .await 86 | .context("Secondary whois query failed")?; 87 | follow -= 1; 88 | continue; 89 | } 90 | } 91 | } 92 | break; 93 | } 94 | Ok(result.text) 95 | } 96 | 97 | pub async fn lookup(&self, ip: &IpAddr) -> Result { 98 | self.lookup_iterative(ip, &self.server, self.max_follows) 99 | .await 100 | } 101 | } 102 | 103 | struct AddrTextPair { 104 | pub server_addr: String, 105 | pub text: String, 106 | } 107 | 108 | struct Server { 109 | pub host: String, 110 | pub port: u16, 111 | pub query: String, 112 | } 113 | 114 | impl Default for Server { 115 | fn default() -> Self { 116 | Self { 117 | host: String::from("whois.arin.net"), 118 | port: 43, 119 | query: String::from("n + $addr\r\n"), 120 | } 121 | } 122 | } 123 | 124 | impl Server { 125 | fn from_str(value: &str) -> Result { 126 | let query = String::from("$addr\r\n"); 127 | let parts: Vec<&str> = value.split(':').collect(); 128 | if parts.len() == 1 { 129 | let host = parts[0].to_string(); 130 | let port = 43; 131 | Ok(Server { host, query, port }) 132 | } else if parts.len() >= 2 { 133 | let host = parts[0].to_string(); 134 | let port: u16 = parts[1].parse().context("Failed to parse port")?; 135 | Ok(Server { host, query, port }) 136 | } else { 137 | bail!("Cannot parse address, expected host[:port]") 138 | } 139 | } 140 | } 141 | -------------------------------------------------------------------------------- /testdata/dmarc-reports/acme.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | acme.com 5 | noreply-dmarc-support@acme.com 6 | http://acme.com/dmarc/support 7 | 9391651994964116463 8 | There was a sample error. 9 | 10 | 1335571200 11 | 1335657599 12 | 13 | 14 | 15 | example.com 16 | r 17 | r 18 |

none

19 | none 20 | 100 21 | 1 22 |
23 | 24 | 25 | 72.150.241.94 26 | 2 27 | 28 | none 29 | fail 30 | pass 31 | 32 | other 33 | DMARC Policy overridden for incoherent example. 34 | 35 | 36 | 37 | 38 | example.com 39 | example.com 40 | acme.com 41 | 42 | 43 | 44 | example.com 45 | ExamplesSelector 46 | fail 47 | Incoherent example 48 | 49 | 50 | example.com 51 | helo 52 | pass 53 | 54 | 55 | 56 |
-------------------------------------------------------------------------------- /testdata/dmarc-reports/aol.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | AOL 6 | postmaster@aol.com 7 | website.com_1504828800 8 | 9 | 1504742400 10 | 1504828800 11 | 12 | 13 | 14 | website.com 15 | r 16 | r 17 |

reject

18 | reject 19 | 100 20 |
21 | 22 | 23 | 125.125.125.125 24 | 1 25 | 26 | none 27 | pass 28 | pass 29 | 30 | 31 | 32 | website.com 33 | 34 | 35 | 36 | website.com 37 | pass 38 | 39 | 40 | website.com 41 | mfrom 42 | pass 43 | 44 | 45 | 46 |
-------------------------------------------------------------------------------- /testdata/dmarc-reports/gmxnet.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 1.0 4 | 5 | GMX 6 | noreply-dmarc@sicher.gmx.net 7 | https://postmaster.gmx.net/en/case?c=r2002 8 | 6d2be94cbabf4e838a3cf58fb4a42ab5 9 | 10 | 1733184000 11 | 1733270399 12 | 13 | 14 | 15 | myserver.com 16 | psl 17 | r 18 | r 19 |

reject

20 | reject 21 | n 22 |
23 | 24 | 25 | 11.222.33.44 26 | 1 27 | 28 | none 29 | pass 30 | pass 31 | 32 | 33 | 34 | myserver.com 35 | myserver.com 36 | 37 | 38 | 39 | myserver.com 40 | abc123 41 | pass 42 | 43 | 44 | myserver.com 45 | mfrom 46 | pass 47 | 48 | 49 | 50 |
51 | -------------------------------------------------------------------------------- /testdata/dmarc-reports/google.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | google.com 5 | noreply-dmarc-support@google.com 6 | https://support.google.com/a/answer/2466580 7 | 3166094538684628578 8 | 9 | 1709683200 10 | 1709769599 11 | 12 | 13 | 14 | foo-bar.io 15 | r 16 | r 17 |

reject

18 | reject 19 | 100 20 | reject 21 |
22 | 23 | 24 | 1.2.3.4 25 | 1 26 | 27 | none 28 | pass 29 | pass 30 | 31 | 32 | 33 | foo-bar.io 34 | 35 | 36 | 37 | foo-bar.io 38 | pass 39 | krs 40 | 41 | 42 | foo-bar.io 43 | pass 44 | 45 | 46 | 47 |
48 | -------------------------------------------------------------------------------- /testdata/dmarc-reports/hardfail.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | reporting org 5 | reporting@reporting.org 6 | abcdef 7 | 8 | 1727049600 9 | 1727135999 10 | 11 | 12 | 13 | r 14 | mydomain.org 15 | r 16 |

quarantine

17 | 100 18 | quarantine 19 |
20 | 21 | 22 | 42.42.42.42 23 | 1 24 | 25 | quarantine 26 | fail 27 | fail 28 | 29 | 30 | 31 | mydomain.org 32 | 33 | 34 | 35 | fail 36 | 37 | 38 | 39 | 40 | mydomain.org 41 | hardfail 42 | 43 | 44 | 45 |
-------------------------------------------------------------------------------- /testdata/dmarc-reports/mailru.xml: -------------------------------------------------------------------------------- 1 | Mail.Rudmarc_support@corp.mail.ruhttp://help.mail.ru/mail-help2832732119368115491172136080017212608001721347200foobar.derr

reject

reject100
118.41.204.21rejectfailfailfoobar.defoobar.demfromsoftfail
-------------------------------------------------------------------------------- /testdata/dmarc-reports/outlook.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 1.0 4 | 5 | Outlook.com 6 | dmarcreport@microsoft.com 7 | a4f4ef0654474d3faa5dca167a34a86a 8 | 9 | 1709683200 10 | 1709769600 11 | 12 | 13 | 14 | random.net 15 | r 16 | r 17 |

reject

18 | reject 19 | 100 20 | 0 21 |
22 | 23 | 24 | 1.2.3.4 25 | 1 26 | 27 | none 28 | pass 29 | pass 30 | 31 | 32 | 33 | live.de 34 | random.net 35 | random.net 36 | 37 | 38 | 39 | random.net 40 | def 41 | pass 42 | 43 | 44 | random.net 45 | mfrom 46 | pass 47 | 48 | 49 | 50 | 51 | 52 | 1.2.3.4 53 | 2 54 | 55 | none 56 | pass 57 | pass 58 | 59 | 60 | 61 | outlook.de 62 | random.net 63 | random.net 64 | 65 | 66 | 67 | random.net 68 | def 69 | pass 70 | 71 | 72 | random.net 73 | mfrom 74 | pass 75 | 76 | 77 | 78 |
-------------------------------------------------------------------------------- /testdata/dmarc-reports/solamora.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | solarmora.com 5 | noreply-dmarc-support@solarmora.com 6 | http://solarmora.com/dmarc/support 7 | 9391651994964116463 8 | 9 | 1335571200 10 | 1335657599 11 | 12 | 13 | 14 | bix-business.com 15 | r 16 | r 17 |

none

18 | none 19 | 100 20 |
21 | 22 | 23 | 203.0.113.209 24 | 2 25 | 26 | none 27 | fail 28 | pass 29 | 30 | 31 | 32 | bix-business.com 33 | 34 | 35 | 36 | bix-business.com 37 | fail 38 | 39 | 40 | 41 | bix-business.com 42 | pass 43 | 44 | 45 | 46 |
-------------------------------------------------------------------------------- /testdata/dmarc-reports/webde.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 1.0 4 | 5 | WEB.DE 6 | noreply-dmarc@sicher.web.de 7 | https://postmaster.web.de/en/case?c=r2002 8 | a3345c7cb5fd4f26aa62144bf449a54b 9 | 10 | 1722816000 11 | 1722902399 12 | 13 | 14 | 15 | foobar.com 16 | psl 17 | r 18 | r 19 |

reject

20 | none 21 | n 22 |
23 | 24 | 25 | 111.69.13.71 26 | 1 27 | 28 | none 29 | pass 30 | pass 31 | 32 | 33 | 34 | foobar.com 35 | foobar.com 36 | 37 | 38 | 39 | foobar.com 40 | sel123 41 | pass 42 | 43 | 44 | foobar.com 45 | mfrom 46 | pass 47 | 48 | 49 | 50 |
-------------------------------------------------------------------------------- /testdata/dmarc-reports/yahoo.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Yahoo 5 | dmarchelp@yahooinc.com 6 | 1709600619.487850 7 | 8 | 1709510400 9 | 1709596799 10 | 11 | 12 | 13 | random.org 14 | r 15 | r 16 |

reject

17 | 100 18 |
19 | 20 | 21 | 1.2.3.4 22 | 1 23 | 24 | none 25 | pass 26 | pass 27 | 28 | 29 | 30 | random.org 31 | 32 | 33 | 34 | random.org 35 | abc 36 | pass 37 | 38 | 39 | random.org 40 | pass 41 | 42 | 43 | 44 |
-------------------------------------------------------------------------------- /ui/components/about.js: -------------------------------------------------------------------------------- 1 | import { LitElement, html } from "lit"; 2 | import { globalStyle } from "./style.js"; 3 | 4 | export class About extends LitElement { 5 | static styles = [globalStyle]; 6 | 7 | static properties = { 8 | version: { type: Object }, 9 | hash: { type: String }, 10 | ref: { type: String } 11 | }; 12 | 13 | constructor() { 14 | super(); 15 | this.version = "n/a"; 16 | this.hash = "n/a"; 17 | this.ref = "n/a"; 18 | this.updateBuild(); 19 | } 20 | 21 | async updateBuild() { 22 | const versionResponse = await fetch("build"); 23 | const json = await versionResponse.json(); 24 | this.version = json.version; 25 | this.ref = json.ref; 26 | this.hash = json.hash; 27 | } 28 | 29 | render() { 30 | return html` 31 |

About

32 |
33 | This DMARC Report Viewer is an open source application written in Rust and JavaScript.
34 | You can find the source code, license and issue tracker on Github: 35 | github.com/cry-inc/dmarc-report-viewer 36 |
37 |

38 | Version: ${this.version}
39 | Git Hash: ${this.hash}
40 | Git Ref: ${this.ref} 41 |

42 | `; 43 | } 44 | } 45 | 46 | customElements.define("drv-about", About); 47 | -------------------------------------------------------------------------------- /ui/components/app.js: -------------------------------------------------------------------------------- 1 | import { LitElement, html, css } from "lit"; 2 | import { globalStyle } from "./style.js"; 3 | 4 | export class App extends LitElement { 5 | static styles = [globalStyle, css` 6 | a, a:visited { 7 | padding-right: 15px; 8 | color: rgba(255, 255, 255, 0.5); 9 | text-decoration: none; 10 | } 11 | 12 | a:hover { 13 | color: rgba(255, 255, 255, 0.75); 14 | } 15 | 16 | a.active { 17 | color: white; 18 | } 19 | 20 | a.right { 21 | position: relative; 22 | float: right; 23 | } 24 | 25 | nav { 26 | background-color: #343a40; 27 | padding: 15px; 28 | } 29 | 30 | main { 31 | position: fixed; 32 | top: 52px; 33 | bottom: 0px; 34 | left: 0px; 35 | right: 0px; 36 | padding: 15px; 37 | overflow-y: auto; 38 | } 39 | `]; 40 | 41 | static get properties() { 42 | return { 43 | component: { type: String }, 44 | params: { type: Object }, 45 | reportHash: { type: String }, 46 | mailUid: { type: String }, 47 | }; 48 | } 49 | 50 | constructor() { 51 | super(); 52 | this.component = "dashboard"; 53 | this.params = {}; 54 | this.reportHash = null; 55 | this.mailUid = null; 56 | window.onhashchange = () => this.onHashChange(); 57 | this.onHashChange(); 58 | } 59 | 60 | async onHashChange() { 61 | let hash = document.location.hash; 62 | 63 | // Split off and parse query params behind route 64 | const sep = hash.indexOf("?"); 65 | this.params = {}; 66 | if (sep != -1) { 67 | const param = hash.substring(sep + 1).split("&"); 68 | param.forEach((param) => { 69 | const keyValue = param.split("="); 70 | if (keyValue.length === 2) { 71 | this.params[keyValue[0]] = keyValue[1]; 72 | } 73 | }); 74 | hash = hash.substring(0, sep); 75 | } 76 | 77 | // Parse routes and route parameters 78 | if (hash == "#/dmarc-reports") { 79 | this.component = "dmarc-reports"; 80 | } else if (hash.startsWith("#/dmarc-reports/")) { 81 | this.component = "dmarc-report"; 82 | this.reportHash = hash.substring(16); 83 | } else if (hash == "#/mails") { 84 | this.component = "mails"; 85 | } else if (hash.startsWith("#/mails/")) { 86 | this.component = "mail"; 87 | this.mailUid = hash.substring(8); 88 | } else if (hash == "#/about") { 89 | this.component = "about"; 90 | } else { 91 | this.component = "dashboard"; 92 | } 93 | } 94 | 95 | render() { 96 | let component; 97 | if (this.component == "dmarc-reports") { 98 | component = html``; 99 | } else if (this.component == "dmarc-report") { 100 | component = html``; 101 | } else if (this.component == "mails") { 102 | component = html``; 103 | } else if (this.component == "mail") { 104 | component = html``; 105 | } else if (this.component == "about") { 106 | component = html``; 107 | } else { 108 | component = html``; 109 | } 110 | 111 | return html` 112 | 118 |
${component}
119 | `; 120 | } 121 | } 122 | 123 | customElements.define("drv-app", App); 124 | -------------------------------------------------------------------------------- /ui/components/dashboard.js: -------------------------------------------------------------------------------- 1 | import { LitElement, html, css } from "lit"; 2 | import { globalStyle } from "./style.js"; 3 | 4 | export class Dashboard extends LitElement { 5 | static styles = [globalStyle, css` 6 | .grid { 7 | display: grid; 8 | gap: 10px; 9 | grid-template-columns: repeat(auto-fit, minmax(300px, 1fr)); 10 | } 11 | 12 | .module { 13 | border: 1px solid #e0e0e0; 14 | border-radius: 3px; 15 | background-color: #efefef; 16 | padding: 5px; 17 | text-align: center; 18 | } 19 | 20 | .module canvas { 21 | margin: auto; 22 | } 23 | 24 | .stats { 25 | margin-bottom: 10px; 26 | } 27 | 28 | .stats span { 29 | margin-left: 15px; 30 | margin-right: 15px; 31 | } 32 | `]; 33 | 34 | static properties = { 35 | params: { type: Object }, 36 | mails: { type: Number }, 37 | xmlFiles: { type: Number }, 38 | dmarcReports: { type: Number }, 39 | lastUpdate: { type: Number }, 40 | domains: { type: Array }, 41 | }; 42 | 43 | constructor() { 44 | super(); 45 | 46 | this.params = {}; 47 | this.mails = 0; 48 | this.xmlFiles = 0; 49 | this.dmarcReports = 0; 50 | this.lastUpdate = 0; 51 | this.domains = []; 52 | 53 | this.getDomains(); 54 | } 55 | 56 | async getDomains() { 57 | const response = await fetch("summary"); 58 | const summary = await response.json(); 59 | this.domains = Object.keys(summary.dmarc_domains); 60 | this.domains.sort(); 61 | } 62 | 63 | updated(changedProperties) { 64 | if (changedProperties.has("params")) { 65 | this.updateCharts(); 66 | } 67 | } 68 | 69 | onTimeSpanChange(event) { 70 | const value = event.target.value; 71 | if (value && value !== "0") { 72 | this.params.ts = value; 73 | } else { 74 | delete this.params.ts; 75 | } 76 | this.updateByParams(); 77 | } 78 | 79 | onDomainChange(event) { 80 | const value = event.target.value; 81 | if (value && value !== "all") { 82 | this.params.domain = value; 83 | } else { 84 | delete this.params.domain; 85 | } 86 | this.updateByParams(); 87 | } 88 | 89 | updateByParams() { 90 | let params = Object. 91 | keys(this.params). 92 | map(k => k + "=" + this.params[k]). 93 | join("&"); 94 | if (params.length > 0) { 95 | document.location.href = "#/dashboard?" + params; 96 | } else { 97 | document.location.href = "#/dashboard"; 98 | } 99 | } 100 | 101 | async updateCharts() { 102 | const queryParams = []; 103 | if (this.params.ts && this.params.ts !== "0") { 104 | queryParams.push("time_span=" + this.params.ts); 105 | } 106 | if (this.params.domain && this.params.domain !== "all") { 107 | queryParams.push("domain=" + this.params.domain); 108 | } 109 | let url = "summary"; 110 | if (queryParams.length > 0) { 111 | url += "?" + queryParams.join("&"); 112 | } 113 | const response = await fetch(url); 114 | const summary = await response.json(); 115 | 116 | const resultColorMap = { 117 | "none": "rgb(108, 117, 125)", 118 | "fail": "rgb(220, 53, 69)", 119 | "pass": "rgb(25, 135, 84)", 120 | "softfail": "rgb(255, 193, 7)", 121 | "policy": "rgb(13, 110, 253)", 122 | "neutral": "rgb(13, 202, 240)", 123 | "temperror": "rgb(253, 126, 20)", 124 | "permerror": "rgb(132, 32, 41)", 125 | }; 126 | 127 | const orgColorMap = { 128 | "google.com": "#ea4335", 129 | "Yahoo": "#6001d2", 130 | "WEB.DE": "#ffd800", 131 | "Mail.Ru": "#0078ff", 132 | "GMX": "#1c449b", 133 | "Outlook.com": "#0078d4", 134 | "Enterprise Outlook": "#0078d4", 135 | "Fastmail Pty Ltd": "#0067b9", 136 | "AMAZON-SES": "#ff9900", 137 | }; 138 | 139 | this.mails = summary.mails; 140 | this.xmlFiles = summary.xml_files; 141 | this.dmarcReports = summary.dmarc_reports; 142 | this.lastUpdate = summary.last_update; 143 | 144 | if (this.orgs_chart) this.orgs_chart.destroy(); 145 | this.orgs_chart = await this.createPieChart("orgs_chart", this.sortedMap(summary.dmarc_orgs), orgColorMap, function (label) { 146 | window.location.hash = "#/dmarc-reports?org=" + encodeURIComponent(label); 147 | }); 148 | 149 | if (this.domains_chart) this.domains_chart.destroy(); 150 | this.domains_chart = await this.createPieChart("domains_chart", this.sortedMap(summary.dmarc_domains), null, function (label) { 151 | window.location.hash = "#/dmarc-reports?domain=" + encodeURIComponent(label); 152 | }); 153 | 154 | if (this.spf_policy_chart) this.spf_policy_chart.destroy(); 155 | this.spf_policy_chart = await this.createPieChart("spf_policy_chart", this.sortedMap(summary.spf_policy_results), resultColorMap); 156 | 157 | if (this.dkim_policy_chart) this.dkim_policy_chart.destroy(); 158 | this.dkim_policy_chart = await this.createPieChart("dkim_policy_chart", this.sortedMap(summary.dkim_policy_results), resultColorMap); 159 | 160 | if (this.spf_auth_chart) this.spf_auth_chart.destroy(); 161 | this.spf_auth_chart = await this.createPieChart("spf_auth_chart", this.sortedMap(summary.spf_auth_results), resultColorMap); 162 | 163 | if (this.dkim_auth_chart) this.dkim_auth_chart.destroy(); 164 | this.dkim_auth_chart = await this.createPieChart("dkim_auth_chart", this.sortedMap(summary.dkim_auth_results), resultColorMap); 165 | } 166 | 167 | sortedMap(map) { 168 | const keys = Object.keys(map); 169 | keys.sort((a, b) => { 170 | if (map[a] < map[b]) 171 | return 1; 172 | if (map[a] > map[b]) 173 | return -1; 174 | else 175 | return b; 176 | }); 177 | const newMap = {}; 178 | keys.forEach(k => newMap[k] = map[k]); 179 | return newMap; 180 | } 181 | 182 | async createPieChart(canvasId, dataMap, colorMap, onLabelClick) { 183 | const defaultColors = [ 184 | "rgb(13, 202, 240)", 185 | "rgb(253, 126, 20)", 186 | "rgb(25, 135, 84)", 187 | "rgb(220, 53, 69)", 188 | "rgb(13, 110, 253)", 189 | "rgb(255, 193, 7)", 190 | "rgb(108, 117, 125)", 191 | "rgb(132, 32, 41)" 192 | ]; 193 | 194 | const element = this.renderRoot.querySelector("." + canvasId); 195 | 196 | const labels = Object.keys(dataMap); 197 | const data = labels.map(k => dataMap[k]); 198 | 199 | let colors = undefined; 200 | if (colorMap !== undefined && colorMap !== null) { 201 | colors = labels.map(l => colorMap[l]); 202 | 203 | // Use default color set to colorize labels without explicit color 204 | let nextColor = 0; 205 | for (let i = 0; i < colors.length; i++) { 206 | if (!colors[i]) { 207 | colors[i] = defaultColors[nextColor % defaultColors.length]; 208 | nextColor++; 209 | } 210 | } 211 | } else { 212 | colors = defaultColors; 213 | } 214 | 215 | return new Chart(element, { 216 | type: "pie", 217 | data: { 218 | labels, 219 | datasets: [{ 220 | data: data, 221 | backgroundColor: colors 222 | }], 223 | }, 224 | options: { 225 | onClick: function (event, element, chart) { 226 | if (onLabelClick) { 227 | const label = labels[element[0].index]; 228 | onLabelClick(label); 229 | } 230 | }, 231 | plugins: { 232 | legend: { 233 | maxHeight: 70 234 | } 235 | } 236 | } 237 | }); 238 | } 239 | 240 | render() { 241 | return html` 242 |

Dashboard

243 | 244 |
245 | Mails: ${this.mails} 246 | XML Files: ${this.xmlFiles} 247 | DMARC Reports: ${this.dmarcReports} 248 | Last Update: ${new Date(this.lastUpdate * 1000).toLocaleString()} 249 |
250 | 251 |
252 | 253 | Time Span for Summary Charts: 254 | 262 | 263 | 264 | 265 | Domain: 266 | 274 | 275 |
276 | 277 |
278 |
279 |

DMARC Organizations

280 | 281 |
282 | 283 |
284 |

DMARC Domains

285 | 286 |
287 | 288 |
289 |

SPF Policy Results

290 | 291 |
292 | 293 |
294 |

DKIM Policy Results

295 | 296 |
297 | 298 |
299 |

SPF Auth Results

300 | 301 |
302 | 303 |
304 |

DKIM Auth Results

305 | 306 |
307 |
308 | `; 309 | } 310 | } 311 | 312 | customElements.define("drv-dashboard", Dashboard); 313 | -------------------------------------------------------------------------------- /ui/components/dmarc-report-table.js: -------------------------------------------------------------------------------- 1 | import { LitElement, html, css } from "lit"; 2 | import { globalStyle } from "./style.js"; 3 | 4 | export class ReportTable extends LitElement { 5 | static styles = [globalStyle]; 6 | 7 | static properties = { 8 | reports: { type: Array }, 9 | }; 10 | 11 | constructor() { 12 | super(); 13 | this.reports = []; 14 | } 15 | 16 | prepareId(id) { 17 | const limit = 25; 18 | if (id.length <= limit) { 19 | return id; 20 | } else { 21 | return id.substring(0, limit) + "..."; 22 | } 23 | } 24 | 25 | renderProblemBadges(dkim, spf) { 26 | const badges = []; 27 | if (dkim) { 28 | badges.push(html`DKIM`); 29 | } 30 | if (spf) { 31 | badges.push(html` SPF`); 32 | } 33 | return badges; 34 | } 35 | 36 | render() { 37 | return html` 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | ${this.reports.length !== 0 ? this.reports.map((report) => 49 | html` 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | ` 58 | 59 | ) : html` 60 | 61 | ` 62 | } 63 |
IDOrganizationDomainProblemsRecordsBeginEnd
${this.prepareId(report.id)}${report.org}${report.domain}${this.renderProblemBadges(report.flagged_dkim, report.flagged_spf)}${report.records}${new Date(report.date_begin * 1000).toLocaleString()}${new Date(report.date_end * 1000).toLocaleString()}
No reports found.
64 | `; 65 | } 66 | } 67 | 68 | customElements.define("drv-dmarc-report-table", ReportTable); 69 | -------------------------------------------------------------------------------- /ui/components/dmarc-report.js: -------------------------------------------------------------------------------- 1 | import { LitElement, html, css } from "lit"; 2 | import { globalStyle } from "./style.js"; 3 | 4 | export class Report extends LitElement { 5 | static styles = [globalStyle]; 6 | 7 | static get properties() { 8 | return { 9 | hash: { type: String }, 10 | uid: { type: String, attribute: false }, 11 | }; 12 | } 13 | 14 | constructor() { 15 | super(); 16 | this.hash = null; 17 | this.uid = null; 18 | this.report = null; 19 | this.ip2dns = {}; 20 | this.ip2location = {}; 21 | this.ipDetails = {}; 22 | } 23 | 24 | async updated(changedProperties) { 25 | if (changedProperties.has("hash") && changedProperties.hash !== this.hash && this.hash) { 26 | const response = await fetch("dmarc-reports/" + this.hash); 27 | const rwu = await response.json(); 28 | this.report = rwu.report; 29 | this.uid = rwu.uid; 30 | } 31 | } 32 | 33 | async lookupIp(ip) { 34 | if (this.ipDetails[ip]) { 35 | this.ipDetails[ip] = false; 36 | } else { 37 | this.ipDetails[ip] = true; 38 | this.getDnsForIp(ip); 39 | this.getLocationForIp(ip); 40 | } 41 | this.requestUpdate(); 42 | } 43 | 44 | async getDnsForIp(ip) { 45 | const response = await fetch("ips/" + ip + "/dns"); 46 | if (response.status === 200) { 47 | const result = await response.text(); 48 | this.ip2dns[ip] = result; 49 | } else { 50 | this.ip2dns[ip] = null; 51 | } 52 | this.requestUpdate(); 53 | } 54 | 55 | async getLocationForIp(ip) { 56 | const response = await fetch("ips/" + ip + "/location"); 57 | if (response.status === 200) { 58 | const result = await response.json(); 59 | this.ip2location[ip] = result; 60 | } else { 61 | this.ip2location[ip] = null; 62 | } 63 | this.requestUpdate(); 64 | } 65 | 66 | renderOptional(value) { 67 | if (value !== null && value !== undefined) { 68 | return html`${value}`; 69 | } else { 70 | return html`n/a`; 71 | } 72 | } 73 | 74 | renderResultBadge(result) { 75 | if (result === "fail" || result === "temperror" || 76 | result === "permerror" || result === "softfail" || 77 | result === "quarantine" || result === "reject" 78 | ) { 79 | return html`${result}`; 80 | } else if (result === "pass") { 81 | return html`${result}`; 82 | } else if (result !== null || result !== undefined) { 83 | return html`n/a`; 84 | } else { 85 | return html`${result}`; 86 | } 87 | } 88 | 89 | renderLocation(lat, lon) { 90 | return html`${lat}, ${lon}`; 91 | } 92 | 93 | renderPropIfObjDefined(obj, prop) { 94 | if (obj === undefined) { 95 | return html`loading...`; 96 | } else if (obj) { 97 | return obj[prop] 98 | } else { 99 | return html`n/a`; 100 | } 101 | } 102 | 103 | renderIfDefined(obj) { 104 | if (obj === undefined) { 105 | return html`loading...`; 106 | } else if (obj) { 107 | return obj; 108 | } else { 109 | return html`n/a`; 110 | } 111 | } 112 | 113 | render() { 114 | if (!this.report) { 115 | return html`No report loaded`; 116 | } 117 | 118 | let errors = null; 119 | if (this.report.report_metadata.error) { 120 | errors = this.report.report_metadata.error.join(", "); 121 | } 122 | 123 | return html` 124 |

Report Details

125 |

126 | Show Mail 127 | Open XML 128 | Open JSON 129 |

130 | 131 | 132 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 |
Report Header 133 |
Id${this.report.report_metadata.report_id}
Org${this.report.report_metadata.org_name}
Records${this.report.record.length}
Date Range Begin${new Date(this.report.report_metadata.date_range.begin * 1000).toLocaleString()}
Date Range End${new Date(this.report.report_metadata.date_range.end * 1000).toLocaleString()}
E-Mail${this.report.report_metadata.email}
Extra Contact Info${this.renderOptional(this.report.report_metadata.extra_contact_info)}
Errors${this.renderOptional(errors)}
Version${this.renderOptional(this.report.version)}
Published Policy
Domain${this.report.policy_published.domain}
adkim${this.renderOptional(this.report.policy_published.adkim)}
aspf${this.renderOptional(this.report.policy_published.aspf)}
p${this.report.policy_published.p}
sp${this.renderOptional(this.report.policy_published.sp)}
pct${this.renderOptional(this.report.policy_published.pct)}
fo${this.renderOptional(this.report.policy_published.fo)}
202 | ${this.report.record.map((record) => html` 203 |

Record

204 | 205 | 206 | 208 | 209 | 210 | 215 | 216 | 217 | 218 | 219 | 221 | 222 | 223 | 224 | 225 | 226 | 227 | 228 | 229 | 230 | 231 | 232 | 233 | 234 | 235 | 236 | 237 | 238 | 239 | 240 | 241 | 242 | 243 | 244 | 245 | 246 | 247 | 248 | 253 | 254 | 255 | 256 | 257 | 258 | 259 | 260 | 261 | 262 | 263 | 264 | 265 | 266 | 267 | 268 | 269 | 270 | 271 | 272 | 273 | 280 | 281 | 282 | 283 | 284 | 285 | 286 | 287 | 288 | 289 | 290 | 291 | 292 | 293 | ${record.auth_results.spf.map((result) => html` 294 | 295 | 297 | 298 | 299 | 300 | 301 | 302 | 303 | 304 | 305 | 306 | 307 | 308 | 309 | `)} 310 | ${(record.auth_results.dkim ? 311 | record.auth_results.dkim : []).map((result) => html` 312 | 313 | 315 | 316 | 317 | 318 | 319 | 320 | 321 | 322 | 323 | 324 | 325 | 326 | 327 | 328 | 329 | 330 | 331 | `)} 332 |
Record Header 207 |
Source IP 211 | ${record.row.source_ip} 212 | 213 | Whois 214 |
Source IP DNS${this.renderIfDefined(this.ip2dns[record.row.source_ip])} 220 |
Source IP Country${this.renderPropIfObjDefined(this.ip2location[record.row.source_ip], "country")}
Source IP City${this.renderPropIfObjDefined(this.ip2location[record.row.source_ip], "city")}
Source IP ISP${this.renderPropIfObjDefined(this.ip2location[record.row.source_ip], "isp")}
Source IP AS${this.renderPropIfObjDefined(this.ip2location[record.row.source_ip], "as")}
Source IP Proxy${this.renderPropIfObjDefined(this.ip2location[record.row.source_ip], "proxy")}
Source IP Data Center${this.renderPropIfObjDefined(this.ip2location[record.row.source_ip], "hosting")}
Source IP Location${this.ip2location[record.row.source_ip] === undefined ? 249 | html`loading` : 250 | this.renderLocation(this.ip2location[record.row.source_ip].lat, this.ip2location[record.row.source_ip].lon) 251 | } 252 |
Count${record.row.count}
Policy Disposition${this.renderResultBadge(record.row.policy_evaluated.disposition)}
Policy DKIM${this.renderResultBadge(record.row.policy_evaluated.dkim)}
Policy SPF${this.renderResultBadge(record.row.policy_evaluated.spf)}
Policy Reason 274 | ${record.row.policy_evaluated.reason ? 275 | record.row.policy_evaluated.reason.map( 276 | (reason) => html`${reason.kind} ${reason.comment}` 277 | ) : html`n/a` 278 | } 279 |
Header From${record.identifiers.header_from}
Envelope From${this.renderOptional(record.identifiers.envelope_from)}
Envelope To${this.renderOptional(record.identifiers.envelope_to)}
SPF Auth Result 296 |
Domain${result.domain}
Scope${this.renderOptional(result.scope)}
Result${this.renderResultBadge(result.result)}
DKIM Auth Result 314 |
Domain${result.domain}
Scope${this.renderOptional(result.selector)}
Result${this.renderResultBadge(result.result)}
Human Result${this.renderOptional(result.human_result)}
333 | `)} 334 | `; 335 | } 336 | } 337 | 338 | customElements.define("drv-dmarc-report", Report); 339 | -------------------------------------------------------------------------------- /ui/components/dmarc-reports.js: -------------------------------------------------------------------------------- 1 | import { LitElement, html } from "lit"; 2 | import { globalStyle } from "./style.js"; 3 | 4 | export class Reports extends LitElement { 5 | static styles = [globalStyle]; 6 | 7 | static properties = { 8 | params: { type: Object }, 9 | reports: { type: Array }, 10 | }; 11 | 12 | constructor() { 13 | super(); 14 | this.params = {}; 15 | this.reports = []; 16 | this.filtered = false; 17 | } 18 | 19 | updated(changedProperties) { 20 | if (changedProperties.has("params")) { 21 | this.updateReports(); 22 | } 23 | } 24 | 25 | async updateReports() { 26 | const urlParams = []; 27 | if (this.params.flagged === "true" || this.params.flagged === "false") { 28 | urlParams.push("flagged=" + this.params.flagged); 29 | } 30 | if (this.params.flagged_dkim === "true" || this.params.flagged_dkim === "false") { 31 | urlParams.push("flagged_dkim=" + this.params.flagged_dkim); 32 | } 33 | if (this.params.flagged_spf === "true" || this.params.flagged_spf === "false") { 34 | urlParams.push("flagged_spf=" + this.params.flagged_spf); 35 | } 36 | if (this.params.domain) { 37 | urlParams.push("domain=" + encodeURIComponent(this.params.domain)); 38 | } 39 | if (this.params.org) { 40 | urlParams.push("org=" + encodeURIComponent(this.params.org)); 41 | } 42 | let url = "dmarc-reports"; 43 | if (urlParams.length > 0) { 44 | url += "?" + urlParams.join("&"); 45 | } 46 | const response = await fetch(url); 47 | this.reports = await response.json(); 48 | this.reports.sort((a, b) => b.date_begin - a.date_begin); 49 | this.filtered = this.filtered = urlParams.length > 0; 50 | } 51 | 52 | render() { 53 | return html` 54 |

Reports

55 |
56 | ${this.filtered ? 57 | html`Filter active! Show all Reports` : 58 | html`Filters: 59 | Reports with Problems 60 | Reports with DKIM Problems 61 | Reports with SPF Problems 62 | ` 63 | } 64 |
65 | 66 | `; 67 | } 68 | } 69 | 70 | customElements.define("drv-dmarc-reports", Reports); 71 | -------------------------------------------------------------------------------- /ui/components/mail-table.js: -------------------------------------------------------------------------------- 1 | import { LitElement, html, css } from "lit"; 2 | import { globalStyle } from "./style.js"; 3 | 4 | export class MailTable extends LitElement { 5 | static styles = [globalStyle]; 6 | 7 | static properties = { 8 | mails: { type: Array }, 9 | }; 10 | 11 | constructor() { 12 | super(); 13 | this.mails = []; 14 | } 15 | 16 | prepareSubject(subject) { 17 | subject = subject.replace(/Report Domain: |Report domain: /, "D: "); 18 | subject = subject.replace(/Submitter: /, "S: "); 19 | subject = subject.replace(/Report-ID: /, "ID: "); 20 | 21 | const limit = 70; 22 | if (subject.length <= limit) { 23 | return subject; 24 | } else { 25 | return subject.substring(0, limit) + "..."; 26 | } 27 | } 28 | 29 | prepareSize(mail) { 30 | if (mail.oversized) { 31 | return html`${mail.size}`; 32 | } else { 33 | return mail.size; 34 | } 35 | } 36 | 37 | prepareXmlFileCount(mail) { 38 | if (mail.oversized) { 39 | return html`n/a`; 40 | } else if (mail.xml_files < 1) { 41 | return html`${mail.xml_files}`; 42 | } else { 43 | return html`${mail.xml_files}`; 44 | } 45 | } 46 | 47 | prepareParsingError(mail) { 48 | if (mail.oversized) { 49 | return html`n/a`; 50 | } else if (mail.parsing_errors > 0) { 51 | return html`Yes`; 52 | } else { 53 | return html`No`; 54 | } 55 | } 56 | 57 | render() { 58 | return html` 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | ${this.mails.length !== 0 ? this.mails.map((mail) => 69 | html` 70 | 71 | 72 | 73 | 74 | 75 | 76 | ` 77 | ) : html` 78 | 79 | ` 80 | } 81 |
SubjectSenderDateSizeXMLsErrors
${this.prepareSubject(mail.subject)}${mail.sender}${new Date(mail.date * 1000).toLocaleString()}${this.prepareSize(mail)}${this.prepareXmlFileCount(mail)}${this.prepareParsingError(mail)}
No mails found.
82 | `; 83 | } 84 | } 85 | 86 | customElements.define("drv-mail-table", MailTable); 87 | -------------------------------------------------------------------------------- /ui/components/mail.js: -------------------------------------------------------------------------------- 1 | import { LitElement, html, css } from "lit"; 2 | import { globalStyle } from "./style.js"; 3 | 4 | export class Mail extends LitElement { 5 | static styles = [globalStyle, css` 6 | .error pre { 7 | border: 1px solid #e0e0e0; 8 | border-radius: 3px; 9 | background-color: #efefef; 10 | padding: 5px; 11 | } 12 | `]; 13 | 14 | static get properties() { 15 | return { 16 | uid: { type: String }, 17 | mail: { type: Object, attribute: false }, 18 | reports: { type: Array, attribute: false }, 19 | errors: { type: Array, attribute: false } 20 | }; 21 | } 22 | 23 | constructor() { 24 | super(); 25 | this.uid = null; 26 | this.mail = null; 27 | this.reports = []; 28 | this.errors = []; 29 | } 30 | 31 | async updated(changedProperties) { 32 | if (changedProperties.has("uid") && changedProperties.uid !== this.uid && this.uid) { 33 | const mailsResponse = await fetch("mails/" + this.uid); 34 | this.mail = await mailsResponse.json(); 35 | const reportsResponse = await fetch("dmarc-reports?uid=" + this.uid); 36 | this.reports = await reportsResponse.json(); 37 | const errorsResponse = await fetch("mails/" + this.uid + "/errors"); 38 | this.errors = await errorsResponse.json(); 39 | } 40 | } 41 | 42 | renderOversized(oversized) { 43 | if (oversized) { 44 | return html`Yes`; 45 | } else { 46 | return html`No`; 47 | } 48 | } 49 | 50 | render() { 51 | if (!this.mail) { 52 | return html`No mail loaded`; 53 | } 54 | 55 | return html` 56 |

Mail Details

57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 85 | 86 | 87 | 88 | 89 | 90 |
UID${this.mail.uid}
Size${this.mail.size} Bytes
Oversized${this.renderOversized(this.mail.oversized)}
Date${new Date(this.mail.date * 1000).toLocaleString()}
Subject${this.mail.subject}
Sender 81 | 82 | ${this.mail.sender} 83 | 84 |
Recipient${this.mail.to}
91 | 92 |

DMARC Reports

93 | 94 | 95 | ${this.errors.length > 0 ? 96 | html` 97 |

XML Parsing Errors

98 | ${this.errors.map((e) => 99 | html` 100 |
101 | ${e.error} 102 |
${e.xml}
103 |
` 104 | )}` 105 | : html`` 106 | } 107 | `; 108 | } 109 | } 110 | 111 | customElements.define("drv-mail", Mail); 112 | -------------------------------------------------------------------------------- /ui/components/mails.js: -------------------------------------------------------------------------------- 1 | import { LitElement, html } from "lit"; 2 | import { globalStyle } from "./style.js"; 3 | 4 | export class Mails extends LitElement { 5 | static styles = [globalStyle]; 6 | 7 | static properties = { 8 | params: { type: Object }, 9 | mails: { type: Array }, 10 | }; 11 | 12 | constructor() { 13 | super(); 14 | this.params = {}; 15 | this.mails = []; 16 | this.filtered = false; 17 | } 18 | 19 | updated(changedProperties) { 20 | if (changedProperties.has("params")) { 21 | this.updateMails(); 22 | } 23 | } 24 | 25 | async updateMails() { 26 | const queryParams = []; 27 | if (this.params.oversized === "true" || this.params.oversized === "false") { 28 | queryParams.push("oversized=" + this.params.oversized); 29 | } 30 | if (this.params.sender) { 31 | queryParams.push("sender=" + encodeURIComponent(this.params.sender)); 32 | } 33 | if (this.params.count) { 34 | queryParams.push("count=" + encodeURIComponent(this.params.count)); 35 | } 36 | if (this.params.errors === "true" || this.params.errors === "false") { 37 | queryParams.push("errors=" + this.params.errors); 38 | } 39 | let url = "mails"; 40 | if (queryParams.length > 0) { 41 | url += "?" + queryParams.join("&"); 42 | } 43 | const mailsResponse = await fetch(url); 44 | this.mails = await mailsResponse.json(); 45 | this.mails.sort((a, b) => b.date - a.date); 46 | this.filtered = this.filtered = queryParams.length > 0; 47 | } 48 | 49 | render() { 50 | return html` 51 |

Mails

52 |
53 | ${this.filtered ? 54 | html`Filter active! Show all Mails` : 55 | html`Filters: Oversized Mails 56 | Without XML Files 57 | Parsing Errors` 58 | } 59 |
60 | 61 | `; 62 | } 63 | } 64 | 65 | customElements.define("drv-mails", Mails); 66 | -------------------------------------------------------------------------------- /ui/components/style.js: -------------------------------------------------------------------------------- 1 | import { css } from "lit"; 2 | 3 | export const globalStyle = css` 4 | a { 5 | color: rgb(0, 123, 255); 6 | text-decoration: none; 7 | } 8 | 9 | a:hover { 10 | color: rgb(0, 86, 179); 11 | } 12 | 13 | .badge { 14 | border-radius: 3px; 15 | padding-left: 4px; 16 | padding-right: 4px; 17 | background-color: #888; 18 | color: white; 19 | } 20 | 21 | .badge-negative { 22 | background-color: rgb(220, 53, 69); 23 | } 24 | 25 | .badge-positive { 26 | background-color: rgb(25, 135, 84); 27 | } 28 | 29 | .faded { 30 | color: #ccc; 31 | } 32 | 33 | .help { 34 | cursor: help; 35 | text-decoration-line: underline; 36 | text-decoration-style: dotted; 37 | } 38 | 39 | table { 40 | width: 100%; 41 | margin-top: 15px; 42 | border-collapse: collapse; 43 | } 44 | 45 | th { 46 | color: #495057; 47 | background-color: #e9ecef; 48 | border-bottom: 2px solid #dee2e6; 49 | text-align: left; 50 | font-weight: 700; 51 | font-size: 17px; 52 | } 53 | 54 | td { 55 | border-top: 1px solid #dee2e6; 56 | } 57 | 58 | td, th { 59 | padding-left: 15px; 60 | padding-right: 15px; 61 | padding-top: 5px; 62 | padding-bottom: 5px; 63 | } 64 | 65 | tr:hover { 66 | background-color: #f4f4f4; 67 | } 68 | 69 | td.name { 70 | font-weight: 700; 71 | width: 175px; 72 | color: rgb(73, 80, 87); 73 | } 74 | 75 | h1, h2, h3 { 76 | padding: 0px; 77 | margin-top: 15px; 78 | margin-bottom: 15px; 79 | } 80 | 81 | h1 { 82 | margin-top: 0px; 83 | } 84 | 85 | .button { 86 | background: none; 87 | border: none; 88 | font: inherit; 89 | cursor: pointer; 90 | outline: inherit; 91 | display: inline-block; 92 | padding: 5px; 93 | padding-left: 8px; 94 | padding-right: 8px; 95 | margin-right: 10px; 96 | color: white; 97 | border-radius: 3px; 98 | background-color: rgb(108, 117, 125); 99 | margin-bottom: 3px; 100 | } 101 | 102 | .button:hover { 103 | color: white; 104 | background-color: rgb(90, 98, 104); 105 | } 106 | 107 | .button.sm { 108 | padding: 1px; 109 | padding-left: 4px; 110 | padding-right: 4px; 111 | margin-right: 5px; 112 | } 113 | 114 | .ml { 115 | margin-left: 10px; 116 | } 117 | 118 | .sourceip .name { 119 | padding-left: 40px; 120 | } 121 | 122 | .mr-5 { 123 | margin-right: 5px; 124 | } 125 | 126 | @media only screen and (max-width: 1100px) { 127 | .md-hidden { 128 | display: none; 129 | } 130 | } 131 | 132 | @media only screen and (max-width: 800px) { 133 | .sm-hidden { 134 | display: none; 135 | } 136 | } 137 | 138 | @media only screen and (max-width: 600px) { 139 | .xs-hidden { 140 | display: none; 141 | } 142 | } 143 | `; 144 | -------------------------------------------------------------------------------- /ui/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | DMARC Report Viewer & Analyzer 7 | 8 | 9 | 10 | 13 | 24 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | -------------------------------------------------------------------------------- /ui/lit-core.3.1.4.min.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2019 Google LLC 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | const t=globalThis,s=t.ShadowRoot&&(void 0===t.ShadyCSS||t.ShadyCSS.nativeShadow)&&"adoptedStyleSheets"in Document.prototype&&"replace"in CSSStyleSheet.prototype,i=Symbol(),e=new WeakMap;class o{constructor(t,s,e){if(this._$cssResult$=!0,e!==i)throw Error("CSSResult is not constructable. Use `unsafeCSS` or `css` instead.");this.cssText=t,this.t=s}get styleSheet(){let t=this.i;const i=this.t;if(s&&void 0===t){const s=void 0!==i&&1===i.length;s&&(t=e.get(i)),void 0===t&&((this.i=t=new CSSStyleSheet).replaceSync(this.cssText),s&&e.set(i,t))}return t}toString(){return this.cssText}}const h=t=>new o("string"==typeof t?t:t+"",void 0,i),r=(t,...s)=>{const e=1===t.length?t[0]:s.reduce(((s,i,e)=>s+(t=>{if(!0===t._$cssResult$)return t.cssText;if("number"==typeof t)return t;throw Error("Value passed to 'css' function must be a 'css' function result: "+t+". Use 'unsafeCSS' to pass non-literal values, but take care to ensure page security.")})(i)+t[e+1]),t[0]);return new o(e,t,i)},n=(i,e)=>{if(s)i.adoptedStyleSheets=e.map((t=>t instanceof CSSStyleSheet?t:t.styleSheet));else for(const s of e){const e=document.createElement("style"),o=t.litNonce;void 0!==o&&e.setAttribute("nonce",o),e.textContent=s.cssText,i.appendChild(e)}},c=s?t=>t:t=>t instanceof CSSStyleSheet?(t=>{let s="";for(const i of t.cssRules)s+=i.cssText;return h(s)})(t):t 7 | /** 8 | * @license 9 | * Copyright 2017 Google LLC 10 | * SPDX-License-Identifier: BSD-3-Clause 11 | */,{is:a,defineProperty:l,getOwnPropertyDescriptor:u,getOwnPropertyNames:d,getOwnPropertySymbols:f,getPrototypeOf:p}=Object,v=globalThis,m=v.trustedTypes,y=m?m.emptyScript:"",g=v.reactiveElementPolyfillSupport,_=(t,s)=>t,b={toAttribute(t,s){switch(s){case Boolean:t=t?y:null;break;case Object:case Array:t=null==t?t:JSON.stringify(t)}return t},fromAttribute(t,s){let i=t;switch(s){case Boolean:i=null!==t;break;case Number:i=null===t?null:Number(t);break;case Object:case Array:try{i=JSON.parse(t)}catch(t){i=null}}return i}},S=(t,s)=>!a(t,s),w={attribute:!0,type:String,converter:b,reflect:!1,hasChanged:S};Symbol.metadata??=Symbol("metadata"),v.litPropertyMetadata??=new WeakMap;class $ extends HTMLElement{static addInitializer(t){this.o(),(this.l??=[]).push(t)}static get observedAttributes(){return this.finalize(),this.u&&[...this.u.keys()]}static createProperty(t,s=w){if(s.state&&(s.attribute=!1),this.o(),this.elementProperties.set(t,s),!s.noAccessor){const i=Symbol(),e=this.getPropertyDescriptor(t,i,s);void 0!==e&&l(this.prototype,t,e)}}static getPropertyDescriptor(t,s,i){const{get:e,set:o}=u(this.prototype,t)??{get(){return this[s]},set(t){this[s]=t}};return{get(){return e?.call(this)},set(s){const h=e?.call(this);o.call(this,s),this.requestUpdate(t,h,i)},configurable:!0,enumerable:!0}}static getPropertyOptions(t){return this.elementProperties.get(t)??w}static o(){if(this.hasOwnProperty(_("elementProperties")))return;const t=p(this);t.finalize(),void 0!==t.l&&(this.l=[...t.l]),this.elementProperties=new Map(t.elementProperties)}static finalize(){if(this.hasOwnProperty(_("finalized")))return;if(this.finalized=!0,this.o(),this.hasOwnProperty(_("properties"))){const t=this.properties,s=[...d(t),...f(t)];for(const i of s)this.createProperty(i,t[i])}const t=this[Symbol.metadata];if(null!==t){const s=litPropertyMetadata.get(t);if(void 0!==s)for(const[t,i]of s)this.elementProperties.set(t,i)}this.u=new Map;for(const[t,s]of this.elementProperties){const i=this.p(t,s);void 0!==i&&this.u.set(i,t)}this.elementStyles=this.finalizeStyles(this.styles)}static finalizeStyles(t){const s=[];if(Array.isArray(t)){const i=new Set(t.flat(1/0).reverse());for(const t of i)s.unshift(c(t))}else void 0!==t&&s.push(c(t));return s}static p(t,s){const i=s.attribute;return!1===i?void 0:"string"==typeof i?i:"string"==typeof t?t.toLowerCase():void 0}constructor(){super(),this.v=void 0,this.isUpdatePending=!1,this.hasUpdated=!1,this.m=null,this._()}_(){this.S=new Promise((t=>this.enableUpdating=t)),this._$AL=new Map,this.$(),this.requestUpdate(),this.constructor.l?.forEach((t=>t(this)))}addController(t){(this.P??=new Set).add(t),void 0!==this.renderRoot&&this.isConnected&&t.hostConnected?.()}removeController(t){this.P?.delete(t)}$(){const t=new Map,s=this.constructor.elementProperties;for(const i of s.keys())this.hasOwnProperty(i)&&(t.set(i,this[i]),delete this[i]);t.size>0&&(this.v=t)}createRenderRoot(){const t=this.shadowRoot??this.attachShadow(this.constructor.shadowRootOptions);return n(t,this.constructor.elementStyles),t}connectedCallback(){this.renderRoot??=this.createRenderRoot(),this.enableUpdating(!0),this.P?.forEach((t=>t.hostConnected?.()))}enableUpdating(t){}disconnectedCallback(){this.P?.forEach((t=>t.hostDisconnected?.()))}attributeChangedCallback(t,s,i){this._$AK(t,i)}C(t,s){const i=this.constructor.elementProperties.get(t),e=this.constructor.p(t,i);if(void 0!==e&&!0===i.reflect){const o=(void 0!==i.converter?.toAttribute?i.converter:b).toAttribute(s,i.type);this.m=t,null==o?this.removeAttribute(e):this.setAttribute(e,o),this.m=null}}_$AK(t,s){const i=this.constructor,e=i.u.get(t);if(void 0!==e&&this.m!==e){const t=i.getPropertyOptions(e),o="function"==typeof t.converter?{fromAttribute:t.converter}:void 0!==t.converter?.fromAttribute?t.converter:b;this.m=e,this[e]=o.fromAttribute(s,t.type),this.m=null}}requestUpdate(t,s,i){if(void 0!==t){if(i??=this.constructor.getPropertyOptions(t),!(i.hasChanged??S)(this[t],s))return;this.T(t,s,i)}!1===this.isUpdatePending&&(this.S=this.A())}T(t,s,i){this._$AL.has(t)||this._$AL.set(t,s),!0===i.reflect&&this.m!==t&&(this.M??=new Set).add(t)}async A(){this.isUpdatePending=!0;try{await this.S}catch(t){Promise.reject(t)}const t=this.scheduleUpdate();return null!=t&&await t,!this.isUpdatePending}scheduleUpdate(){return this.performUpdate()}performUpdate(){if(!this.isUpdatePending)return;if(!this.hasUpdated){if(this.renderRoot??=this.createRenderRoot(),this.v){for(const[t,s]of this.v)this[t]=s;this.v=void 0}const t=this.constructor.elementProperties;if(t.size>0)for(const[s,i]of t)!0!==i.wrapped||this._$AL.has(s)||void 0===this[s]||this.T(s,this[s],i)}let t=!1;const s=this._$AL;try{t=this.shouldUpdate(s),t?(this.willUpdate(s),this.P?.forEach((t=>t.hostUpdate?.())),this.update(s)):this.k()}catch(s){throw t=!1,this.k(),s}t&&this._$AE(s)}willUpdate(t){}_$AE(t){this.P?.forEach((t=>t.hostUpdated?.())),this.hasUpdated||(this.hasUpdated=!0,this.firstUpdated(t)),this.updated(t)}k(){this._$AL=new Map,this.isUpdatePending=!1}get updateComplete(){return this.getUpdateComplete()}getUpdateComplete(){return this.S}shouldUpdate(t){return!0}update(t){this.M&&=this.M.forEach((t=>this.C(t,this[t]))),this.k()}updated(t){}firstUpdated(t){}}$.elementStyles=[],$.shadowRootOptions={mode:"open"},$[_("elementProperties")]=new Map,$[_("finalized")]=new Map,g?.({ReactiveElement:$}),(v.reactiveElementVersions??=[]).push("2.0.4"); 12 | /** 13 | * @license 14 | * Copyright 2017 Google LLC 15 | * SPDX-License-Identifier: BSD-3-Clause 16 | */ 17 | const P=globalThis,C=P.trustedTypes,T=C?C.createPolicy("lit-html",{createHTML:t=>t}):void 0,x="$lit$",A=`lit$${Math.random().toFixed(9).slice(2)}$`,M="?"+A,k=`<${M}>`,E=document,U=()=>E.createComment(""),N=t=>null===t||"object"!=typeof t&&"function"!=typeof t,O=Array.isArray,R=t=>O(t)||"function"==typeof t?.[Symbol.iterator],z="[ \t\n\f\r]",V=/<(?:(!--|\/[^a-zA-Z])|(\/?[a-zA-Z][^>\s]*)|(\/?$))/g,L=/-->/g,I=/>/g,j=RegExp(`>|${z}(?:([^\\s"'>=/]+)(${z}*=${z}*(?:[^ \t\n\f\r"'\`<>=]|("|')|))|$)`,"g"),D=/'/g,H=/"/g,B=/^(?:script|style|textarea|title)$/i,W=t=>(s,...i)=>({_$litType$:t,strings:s,values:i}),q=W(1),J=W(2),Z=Symbol.for("lit-noChange"),F=Symbol.for("lit-nothing"),G=new WeakMap,K=E.createTreeWalker(E,129);function Q(t,s){if(!Array.isArray(t)||!t.hasOwnProperty("raw"))throw Error("invalid template strings array");return void 0!==T?T.createHTML(s):s}const X=(t,s)=>{const i=t.length-1,e=[];let o,h=2===s?"":"",r=V;for(let s=0;s"===c[0]?(r=o??V,a=-1):void 0===c[1]?a=-2:(a=r.lastIndex-c[2].length,n=c[1],r=void 0===c[3]?j:'"'===c[3]?H:D):r===H||r===D?r=j:r===L||r===I?r=V:(r=j,o=void 0);const u=r===j&&t[s+1].startsWith("/>")?" ":"";h+=r===V?i+k:a>=0?(e.push(n),i.slice(0,a)+x+i.slice(a)+A+u):i+A+(-2===a?s:u)}return[Q(t,h+(t[i]||"")+(2===s?"":"")),e]};class Y{constructor({strings:t,_$litType$:s},i){let e;this.parts=[];let o=0,h=0;const r=t.length-1,n=this.parts,[c,a]=X(t,s);if(this.el=Y.createElement(c,i),K.currentNode=this.el.content,2===s){const t=this.el.content.firstChild;t.replaceWith(...t.childNodes)}for(;null!==(e=K.nextNode())&&n.length0){e.textContent=C?C.emptyScript:"";for(let i=0;i2||""!==i[0]||""!==i[1]?(this._$AH=Array(i.length-1).fill(new String),this.strings=i):this._$AH=F}_$AI(t,s=this,i,e){const o=this.strings;let h=!1;if(void 0===o)t=tt(this,t,s,0),h=!N(t)||t!==this._$AH&&t!==Z,h&&(this._$AH=t);else{const e=t;let r,n;for(t=o[0],r=0;r{const e=i?.renderBefore??s;let o=e._$litPart$;if(void 0===o){const t=i?.renderBefore??null;e._$litPart$=o=new it(s.insertBefore(U(),t),t,void 0,i??{})}return o._$AI(t),o}; 18 | /** 19 | * @license 20 | * Copyright 2017 Google LLC 21 | * SPDX-License-Identifier: BSD-3-Clause 22 | */class ut extends ${constructor(){super(...arguments),this.renderOptions={host:this},this.ht=void 0}createRenderRoot(){const t=super.createRenderRoot();return this.renderOptions.renderBefore??=t.firstChild,t}update(t){const s=this.render();this.hasUpdated||(this.renderOptions.isConnected=this.isConnected),super.update(t),this.ht=lt(s,this.renderRoot,this.renderOptions)}connectedCallback(){super.connectedCallback(),this.ht?.setConnected(!0)}disconnectedCallback(){super.disconnectedCallback(),this.ht?.setConnected(!1)}render(){return Z}}ut._$litElement$=!0,ut[("finalized","finalized")]=!0,globalThis.litElementHydrateSupport?.({LitElement:ut});const dt=globalThis.litElementPolyfillSupport;dt?.({LitElement:ut});const ft={_$AK:(t,s,i)=>{t._$AK(s,i)},_$AL:t=>t._$AL};(globalThis.litElementVersions??=[]).push("4.0.6"); 23 | /** 24 | * @license 25 | * Copyright 2022 Google LLC 26 | * SPDX-License-Identifier: BSD-3-Clause 27 | */ 28 | const pt=!1;export{o as CSSResult,ut as LitElement,$ as ReactiveElement,ft as _$LE,ct as _$LH,n as adoptStyles,r as css,b as defaultConverter,c as getCompatibleStyle,q as html,pt as isServer,Z as noChange,S as notEqual,F as nothing,lt as render,s as supportsAdoptingStyleSheets,J as svg,h as unsafeCSS}; 29 | --------------------------------------------------------------------------------