├── .github └── workflows │ ├── codespell.excludelines │ ├── codespell.excludewords │ ├── codespell.yml │ ├── debian-package.yml │ ├── docs.yml │ ├── pycodestyle.yml │ ├── scripts │ ├── build-docs.sh │ └── debian-package.sh │ └── unittest.yml ├── .gitignore ├── AGPL-3 ├── CHANGELOG ├── COPYING ├── LGPL-2.1 ├── Makefile ├── NEWS.md ├── README.rst ├── debian ├── NEWS ├── changelog ├── compat ├── control ├── copyright ├── docs ├── intelmq-mailgen.docs ├── intelmq-mailgen.examples ├── intelmq-mailgen.install ├── rules └── source │ ├── format │ └── options ├── docs ├── Makefile ├── README.rst ├── concept.rst ├── conf.py ├── contactdb-design.png ├── index.rst ├── intelmqcbmail.rst ├── notification_overview.svg ├── requirements.txt └── scripts.rst ├── example_scripts ├── 00add_variables_to_context.py ├── 10shadowservercsv.py ├── 11malware-infection.py ├── 13vulnerable-service.py ├── 20xarf.py ├── 99fallback.py └── variables.json ├── extras └── dirty_setup │ ├── dirty_setup.sh │ ├── ds-templates │ ├── README │ ├── pipeline.conf │ └── runtime.conf │ └── mg-templates │ ├── template-DNS-open-resolvers.txt │ ├── template-NTP-Monitor.txt │ ├── template-Open-Chargen.txt │ ├── template-Open-Elasticsearch.txt │ ├── template-Open-IPMI.txt │ ├── template-Open-MSSQL.txt │ ├── template-Open-Memcached.txt │ ├── template-Open-MongoDB.txt │ ├── template-Open-NetBIOS.txt │ ├── template-Open-Portmapper.txt │ ├── template-Open-Redis.txt │ ├── template-Open-SNMP.txt │ ├── template-Open-SSDP.txt │ ├── template-Open-mDNS.txt │ ├── template-Ssl-Freak-Scan.txt │ ├── template-Ssl-Scan.txt │ └── template-generic_malware.txt ├── intelmq-mailgen.conf.example ├── intelmqmail ├── __init__.py ├── cb.py ├── db.py ├── mail.py ├── notification.py ├── script.py ├── tableformat.py └── templates.py ├── setup.cfg ├── setup.py ├── sql ├── notifications.sql └── updates.md ├── templates ├── example-template-dronereport.txt ├── example-template-sslfreak.txt └── example-template.txt └── tests ├── __init__.py ├── keys ├── Readme.md ├── test1.pub └── test1.sec ├── test_db.py ├── test_mail.py ├── test_notifications.py ├── test_script.py ├── test_sign.py ├── test_templates.py └── util.py /.github/workflows/codespell.excludelines: -------------------------------------------------------------------------------- 1 | # ./tests/test_sign.py:64: 2 | self.assertEqual(vsig.fpr, '5F503EFAC8C89323D54C252591B8CD7E15925678') 3 | # extras/dirty_setup/dirty_setup.sh: 4 | cat - >"$ghome/gpg.conf" < 4 | #SPDX-License-Identifier: AGPL-3.0-or-later 5 | # 6 | name: "Build Debian packages" 7 | on: 8 | push: 9 | pull_request: 10 | 11 | jobs: 12 | build: 13 | runs-on: ubuntu-24.04 14 | env: 15 | # Fixes https://github.com/actions/virtual-environments/issues/3080 16 | STORAGE_OPTS: overlay.mount_program=/usr/bin/fuse-overlayfs 17 | name: Build Debian packages 18 | strategy: 19 | matrix: 20 | codename: ['bullseye', 'bookworm'] 21 | 22 | steps: 23 | - name: Checkout repository 24 | uses: actions/checkout@v3 25 | 26 | - name: Build package 27 | run: bash .github/workflows/scripts/debian-package.sh ${{ matrix.codename }} 28 | 29 | - name: Upload artifact 30 | if: ${{ github.event_name == 'push' }} 31 | uses: actions/upload-artifact@v4 32 | with: 33 | name: debian-package-${{ matrix.codename }}-${{ github.sha }} 34 | path: '~/artifacts' 35 | retention-days: 5 36 | -------------------------------------------------------------------------------- /.github/workflows/docs.yml: -------------------------------------------------------------------------------- 1 | #SPDX-FileCopyrightText: 2023 Intevation GmbH 2 | #SPDX-License-Identifier: AGPL-3.0-or-later 3 | # Template: https://github.com/actions/starter-workflows/blob/main/pages/hugo.yml 4 | # 5 | name: "Build and push docs" 6 | on: 7 | # Runs on pushes targeting the master branch 8 | push: 9 | branches: [master] 10 | 11 | # Allows you to run this workflow manually from the Actions tab 12 | workflow_dispatch: 13 | 14 | # Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages 15 | permissions: 16 | contents: read 17 | pages: write 18 | id-token: write 19 | 20 | # Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. 21 | # However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. 22 | concurrency: 23 | group: "pages" 24 | cancel-in-progress: false 25 | 26 | jobs: 27 | build-docs: 28 | runs-on: ubuntu-latest 29 | name: Build the documentation 30 | 31 | steps: 32 | - name: Checkout repository 33 | uses: actions/checkout@v2 34 | - name: Setup Pages 35 | id: pages 36 | uses: actions/configure-pages@v3 37 | with: 38 | enablement: true 39 | 40 | - name: Set up Python 3.8 41 | uses: actions/setup-python@v2 42 | with: 43 | python-version: 3.8 44 | 45 | - name: Install requirements 46 | run: pip install -r docs/requirements.txt 47 | 48 | - name: Build docs 49 | run: make -C docs html 50 | 51 | - name: Upload artifact 52 | uses: actions/upload-pages-artifact@v3 53 | with: 54 | path: ./docs/_build/html/ 55 | 56 | # Deployment job 57 | deploy-docs: 58 | environment: 59 | name: github-pages 60 | url: ${{ steps.deployment.outputs.page_url }} 61 | runs-on: ubuntu-latest 62 | needs: build-docs 63 | steps: 64 | - name: Deploy to GitHub Pages 65 | id: deployment 66 | uses: actions/deploy-pages@v4 67 | -------------------------------------------------------------------------------- /.github/workflows/pycodestyle.yml: -------------------------------------------------------------------------------- 1 | #Github Workflow to run pycodestyle 2 | # 3 | #SPDX-FileCopyrightText: 2020 Birger Schacht 4 | #SPDX-License-Identifier: AGPL-3.0-or-later 5 | 6 | name: "Run pycodestyle on repository" 7 | 8 | on: [push, pull_request] 9 | 10 | jobs: 11 | pycodestyle: 12 | name: Run pycodestyle 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - name: Checkout repository 17 | uses: actions/checkout@v2 18 | - name: Install pycodestyle 19 | run: | 20 | sudo apt update 21 | sudo apt install pycodestyle -y 22 | - name: Run pycodestyle 23 | run: | 24 | pycodestyle docs/ example_scripts/ extras/ intelmqmail/ sql/ templates/ tests/ 25 | -------------------------------------------------------------------------------- /.github/workflows/scripts/build-docs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | push docs 4 | make html 5 | popd 6 | -------------------------------------------------------------------------------- /.github/workflows/scripts/debian-package.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # SPDX-FileCopyrightText: 2020 Birger Schacht 3 | # SPDX-License-Identifier: AGPL-3.0-or-later 4 | # 5 | # Bash script for a github action to build a Debian 6 | # package in a user-defined Debian container 7 | 8 | 9 | set -x 10 | set -e 11 | 12 | # A list of known Debian releases 13 | knowncodenames=("bullseye" "bookworm" "stable" "testing") 14 | 15 | # We want exactly one argument: the name of the release 16 | if (( $# != 1 )) 17 | then 18 | >&2 echo "Illegal number of parameters" 19 | exit 1 20 | fi 21 | 22 | codename=$1 23 | 24 | # check if the releasename is in the list of known Debian distributions 25 | validcodename=false 26 | for value in "${knowncodenames[@]}" 27 | do 28 | [[ "$codename" = "$value" ]] && validcodename=true 29 | done 30 | 31 | # If the release name is not valid, simply exit 32 | unknowncodename () { 33 | >&2 echo "Debian distribution not known. Valid arguments are: ${knowncodenames[*]}" 34 | exit 1 35 | } 36 | 37 | # Build the package in the container 38 | build () { 39 | codename=$1 40 | echo "Building on ${codename}" 41 | # run installation in buildah 42 | 43 | ARTIFACTEXTENSIONS=("deb" "xz" "dsc" "buildinfo" "changes") 44 | PARENT=$(dirname "${GITHUB_WORKSPACE}") 45 | echo "Building on ${codename} in ${GITHUB_WORKSPACE}" 46 | 47 | # fetch and configure the container 48 | CONTAINER=$(buildah from docker.io/debian:"${codename}"-slim) 49 | buildah config --workingdir "${GITHUB_WORKSPACE}" "${CONTAINER}" 50 | 51 | # install build dependencies in the container 52 | BR="buildah run -v ${PARENT}:${PARENT}" 53 | ${BR} "${CONTAINER}" apt-get update -qq 54 | ${BR} "${CONTAINER}" apt-get install dpkg-dev lintian -y 55 | ${BR} "${CONTAINER}" apt-get build-dep -y . 56 | 57 | ${BR} "${CONTAINER}" /bin/sh -c 'DEB_BUILD_OPTIONS=nocheck dpkg-buildpackage -us -uc -b' 58 | 59 | # create a directory for the artifacts 60 | # and copy the relevant files there 61 | mkdir -p "${HOME}/artifacts" 62 | for extension in "${ARTIFACTEXTENSIONS[@]}" 63 | do 64 | find "${PARENT}" -type f -name "*.${extension}" -exec cp '{}' "${HOME}/artifacts/" \; 65 | done 66 | } 67 | 68 | # check if release name is valid; build if it is, exit if it isn't 69 | if [ "$validcodename" = true ] 70 | then 71 | build "$codename" 72 | else 73 | unknowncodename 74 | fi 75 | -------------------------------------------------------------------------------- /.github/workflows/unittest.yml: -------------------------------------------------------------------------------- 1 | #SPDX-FileCopyrightText: 2023-2025 Intevation GmbH 2 | #SPDX-License-Identifier: AGPL-3.0-or-later 3 | # 4 | name: "Unit tests" 5 | on: [push, pull_request] 6 | 7 | jobs: 8 | unittests: 9 | runs-on: ubuntu-22.04 10 | name: Run unit tests with pytest 11 | strategy: 12 | fail-fast: false 13 | matrix: 14 | python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] 15 | 16 | 17 | steps: 18 | - name: Checkout repository 19 | uses: actions/checkout@v2 20 | 21 | - name: Set up Python ${{ matrix.python-version }} 22 | uses: actions/setup-python@v2 23 | with: 24 | python-version: ${{ matrix.python-version }} 25 | 26 | - name: Install pgpme dev 27 | run: DEBIAN_FRONTEND=noninteractive sudo -E apt-get update -qq && DEBIAN_FRONTEND=noninteractive sudo -E apt-get install -y libgpgme-dev 28 | 29 | - name: Install test dependencies 30 | run: pip install pytest 31 | - run: echo $PYTHON 32 | - name: Install gpgme python bindings 33 | run: pip install gpgme 34 | 35 | - name: Install dependencies 36 | run: pip install -e . 37 | 38 | - name: Run basic testsuite 39 | run: pytest 40 | 41 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.bak 2 | *.swp 3 | *.log 4 | *.*~ 5 | __pycache__/ 6 | dist 7 | build 8 | .pybuild 9 | 10 | # packaging-related 11 | intelmqmail.egg-info/ 12 | debian/files 13 | debian/intelmq-mailgen.substvars 14 | debian/intelmq-mailgen/ 15 | 16 | # docs 17 | docs/_build/ 18 | docs/source/ 19 | -------------------------------------------------------------------------------- /CHANGELOG: -------------------------------------------------------------------------------- 1 | debian/changelog -------------------------------------------------------------------------------- /COPYING: -------------------------------------------------------------------------------- 1 | License 2 | ======= 3 | IntelMQ Mailgen is Free Software under the GNU Affero GPL v>=3 4 | and comes with ABSOLUTELY NO WARRANTY! 5 | See AGPL-3 for details. 6 | 7 | Copyright (C) 2016-2021 by Bundesamt für Sicherheit in der Informationstechnik 8 | Software engineering by Intevation GmbH 9 | 10 | tests/util.py is Copyright (C) 2006 James Henstridge 11 | and Free Software under GNU LGPL v>=2.1. 12 | See source code header lines and LGPG-2.1 for details. 13 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | check: 2 | pytest tests/ 3 | 4 | check_all: 5 | ALLTESTS=1 pytest tests/ -v 6 | 7 | .PHONY: check docs 8 | 9 | docs: 10 | make -C docs html 11 | 12 | pycodestyle: 13 | pycodestyle docs/ example_scripts/ extras/ intelmqmail/ sql/ templates/ tests/ 14 | -------------------------------------------------------------------------------- /NEWS.md: -------------------------------------------------------------------------------- 1 | As this component is to be used and released together with others, see 2 | [intelmq-cb-mailgen/NEWS](https://github.com/Intevation/intelmq-mailgen-release). 3 | 4 | ## 1.3.7 5 | 6 | To use the `JSONB` type of IntelMQ's `extra` field directly without conversion, re-create these adjusted functions: 7 | 8 | ```sql 9 | CREATE OR REPLACE FUNCTION json_object_as_text_array(obj JSONB) 10 | RETURNS TEXT[][] 11 | AS $$ 12 | DECLARE 13 | arr TEXT[][] = '{}'::TEXT[][]; 14 | k TEXT; 15 | v TEXT; 16 | BEGIN 17 | FOR k, v IN 18 | SELECT * FROM jsonb_each_text(obj) ORDER BY key 19 | LOOP 20 | arr := arr || ARRAY[ARRAY[k, v]]; 21 | END LOOP; 22 | RETURN arr; 23 | END 24 | $$ LANGUAGE plpgsql IMMUTABLE; 25 | 26 | CREATE OR REPLACE FUNCTION insert_directive( 27 | event_id BIGINT, 28 | directive JSONB, 29 | endpoint ip_endpoint 30 | ) RETURNS VOID 31 | AS $$ 32 | DECLARE 33 | medium TEXT := directive ->> 'medium'; 34 | recipient_address TEXT := directive ->> 'recipient_address'; 35 | template_name TEXT := directive ->> 'template_name'; 36 | notification_format TEXT := directive ->> 'notification_format'; 37 | event_data_format TEXT := directive ->> 'event_data_format'; 38 | aggregate_identifier TEXT[][] 39 | := json_object_as_text_array(directive -> 'aggregate_identifier'); 40 | notification_interval interval 41 | := coalesce(((directive ->> 'notification_interval') :: INT) 42 | * interval '1 second', 43 | interval '0 second'); 44 | BEGIN 45 | IF medium IS NOT NULL 46 | AND recipient_address IS NOT NULL 47 | AND template_name IS NOT NULL 48 | AND notification_format IS NOT NULL 49 | AND event_data_format IS NOT NULL 50 | AND notification_interval IS NOT NULL 51 | AND notification_interval != interval '-1 second' 52 | THEN 53 | INSERT INTO directives (events_id, 54 | medium, 55 | recipient_address, 56 | template_name, 57 | notification_format, 58 | event_data_format, 59 | aggregate_identifier, 60 | notification_interval, 61 | endpoint) 62 | VALUES (event_id, 63 | medium, 64 | recipient_address, 65 | template_name, 66 | notification_format, 67 | event_data_format, 68 | aggregate_identifier, 69 | notification_interval, 70 | endpoint); 71 | END IF; 72 | END 73 | $$ LANGUAGE plpgsql VOLATILE; 74 | 75 | 76 | CREATE OR REPLACE FUNCTION directives_from_extra( 77 | event_id BIGINT, 78 | extra JSONB 79 | ) RETURNS VOID 80 | AS $$ 81 | DECLARE 82 | json_directives JSONB := extra -> 'certbund' -> 'source_directives'; 83 | directive JSONB; 84 | BEGIN 85 | IF json_directives IS NOT NULL THEN 86 | FOR directive 87 | IN SELECT * FROM jsonb_array_elements(json_directives) LOOP 88 | PERFORM insert_directive(event_id, directive, 'source'); 89 | END LOOP; 90 | END IF; 91 | END 92 | $$ LANGUAGE plpgsql VOLATILE; 93 | ``` 94 | 95 | ## 1.02 to 1.3.0 96 | 97 | * Changed dependency to use the official Python GnuPG bindings 98 | and drop support for old pygpgme bindings. 99 | * Dropped support for Python `v<=3.5.*` 100 | * Make depending on `pyxarf` module optional. 101 | -------------------------------------------------------------------------------- /debian/NEWS: -------------------------------------------------------------------------------- 1 | intelmq-mailgen (0.94~alpha3) unstable; urgency=medium 2 | 3 | The notification table scheme has been changed to include feed names which 4 | are used by intelmq-mailgen to group notifications by. 5 | 6 | Due to this change, the notification table will have to be reinitialized 7 | after this update. 8 | 9 | -- Gernot Schulz Tue, 21 Jun 2016 14:50:38 +0200 10 | -------------------------------------------------------------------------------- /debian/changelog: -------------------------------------------------------------------------------- 1 | intelmq-mailgen (1.3.8-1) UNRELEASED; urgency=medium 2 | 3 | * notifications: 4 | * allow to pass an existing ticket number to mail_format_as_csv 5 | 6 | -- Sebastian Wagner Wed, 30 Apr 2025 17:30:43 +0200 7 | 8 | intelmq-mailgen (1.3.7-1) stable; urgency=medium 9 | 10 | * cli interface: 11 | * do not commit the directive to db if only in preview-mode 12 | * better error message when format scripts did not generate a notification 13 | * in Dry runs and debug logging level, also log envelop to addresses if set 14 | * notifications: 15 | * allow for different envelope-to than header-to in mail_format_as_csv and EmailNotification 16 | * Packaging: 17 | * Add GitHub Workflows to build Debian Package for Debian 11 and Debian 12 18 | * Fix Build for Debian 12 / Ubuntu 24.04 (#47) 19 | * Documentation: 20 | * Include the README in the rendered documentation, converted to RST 21 | * Add conceptual overview graphics and some texts 22 | * add database schema visualization 23 | * convert sql/updates to markdown for readability 24 | * SQL/Database: 25 | * Fix script to create Type `ip_endpoint` as superuser 26 | * use JSONB of events.extra 27 | IntelMQ's eventdb events.extra switched from type JSON to JSONB: 28 | https://github.com/certtools/intelmq/pull/2597 29 | directly use the JSONB type in the functions 30 | * github workflows: 31 | * update some actions for compatibility, update runners 32 | * fix package build script 33 | * update python versions: remove 3.7, add 3.8-3.13 34 | * use gpgme from pypi instead of compiling ourself 35 | 36 | -- Sebastian Wagner Wed, 30 Apr 2025 15:03:19 +0200 37 | 38 | intelmq-mailgen (1.3.6-1) stable; urgency=medium 39 | 40 | * gnupg: set home dir with parameter, not env variable 41 | * Documentation: 42 | * add docs on format specs 43 | * small fixes 44 | * render API docs 45 | 46 | -- Sebastian Wagner Fri, 30 Aug 2024 18:54:13 +0200 47 | 48 | intelmq-mailgen (1.3.5-2) stable; urgency=medium 49 | 50 | * ScriptContext: Handle self.templates = None and read from directory 51 | (default with intelmqmail cli interface). 52 | 53 | -- Sebastian Wagner Tue, 16 Jan 2024 16:32:08 +0100 54 | 55 | intelmq-mailgen (1.3.5-1) stable; urgency=medium 56 | 57 | * allow setting default table format by calling scripts 58 | is used if the called script (template) does not set one 59 | set an internal fallback value, if neither a default is provided, nor 60 | the script sets a table format 61 | 62 | -- Sebastian Wagner Fri, 08 Sep 2023 12:19:14 +0200 63 | 64 | intelmq-mailgen (1.3.4-1) stable; urgency=medium 65 | 66 | * notifications: fix bug when templates are given as parameter 67 | due to a variable name clash, the loop over all templates, has set the 68 | fallback_template to a string, causing a type error 69 | 70 | -- Sebastian Wagner Fri, 30 Jun 2023 17:11:41 +0200 71 | 72 | intelmq-mailgen (1.3.3-1) stable; urgency=medium 73 | 74 | * Allow passing templates as parameters 75 | 76 | -- Sebastian Wagner Wed, 21 Jun 2023 15:11:50 +0200 77 | 78 | intelmq-mailgen (1.3.2-1) stable; urgency=medium 79 | 80 | * Documentation available at http://intevation.github.io/intelmq-mailgen/ 81 | * Enhancements to the library-mode: 82 | * Template can be passed as parameter to mailgen 83 | * dry run (simulation mode) 84 | * Preview mode: returning notifications as string to the caller instead of sending them via SMTP 85 | * `additional_directive_where` as parameter 86 | * Existing database connection can be passed as parameter to mailgen 87 | * Many small style and formatting fixes 88 | 89 | -- Sebastian Wagner Wed, 31 May 2023 15:55:13 +0200 90 | 91 | intelmq-mailgen (1.3.1-1) stable; urgency=medium 92 | 93 | * use sys.exit instead of exit 94 | * `additional_directive_where`: allow to use events 95 | * new parameter additional_directive_where 96 | * add parameter --verbose for debug logging 97 | * add option to load custom config file per parameter 98 | * better help messages in program 99 | 100 | -- Sebastian Wagner Wed, 15 Mar 2023 11:28:24 +0100 101 | 102 | intelmq-mailgen (1.3.0-3) stable; urgency=medium 103 | 104 | * Remove postgresql dependency. mailgen only needs the client on the same 105 | machine, not the server. 106 | * remove recommended python3-pyxarf, is unavailable 107 | fixes #45 108 | * Add pkg-resources as runtime dependency 109 | 110 | -- Sebastian Wagner Tue, 31 Jan 2023 09:51:07 +0100 111 | 112 | intelmq-mailgen (1.3.0-2) stable; urgency=medium 113 | 114 | * Remove python package's dependency on 'gpg'. 115 | Due to a bug in python3-gpg of Ubuntu Focal, intelmqcbmail would always 116 | fail on start. 117 | See https://bugs.launchpad.net/ubuntu/+source/gpgme1.0/+bug/1977645 118 | 119 | -- Sebastian Wagner Wed, 15 Jun 2022 14:36:56 +0200 120 | 121 | intelmq-mailgen (1.3.0-1) stable; urgency=medium 122 | 123 | * Add revision. 124 | 125 | -- Sebastian Wagner Fri, 10 Jun 2022 17:36:29 +0200 126 | 127 | intelmq-mailgen (1.3.0) unstable; urgency=medium 128 | 129 | * Changed dependency to use the official Python GnuPG bindings 130 | and drop support for old pygpgme bindings. 131 | * Dropped support for Python `v<=3.5.*` 132 | * Make depending on `pyxarf` module optional. 133 | 134 | -- Sascha Wilde Mon, 24 May 2021 19:16:13 +0200 135 | 136 | intelmq-mailgen (1.02) unstable; urgency=medium 137 | 138 | * Improve performance: 139 | - Assume newest inserted_at correlates to newest sent_at 140 | * Store timestamp of email Date header in sent 141 | 142 | -- Sascha Wilde Fri, 12 Jul 2019 10:04:01 +0200 143 | 144 | intelmq-mailgen (1.01) unstable; urgency=medium 145 | 146 | * To improve speed, determine last_sent over the last month only 147 | * Allow log-level to be set for the intelmqmail.cb logger, too and add 148 | some more debug log output 149 | 150 | -- Sascha Wilde Thu, 27 Jun 2019 18:08:05 +0200 151 | 152 | intelmq-mailgen (1.00~rc3) unstable; urgency=medium 153 | 154 | * Allow passing a logger instance to load_scripts. 155 | This is needed by current version of intelmq-certbund-contact 156 | 157 | -- Sascha Wilde Wed, 12 Sep 2018 15:45:25 +0200 158 | 159 | intelmq-mailgen (1.00~rc2) unstable; urgency=medium 160 | 161 | * No microseconds in time stamps 162 | * Explicitly generate a Message-Id for notification mails 163 | * Make it easier to use items from the extra field in table formats 164 | 165 | -- Gernot Schulz Mon, 13 Aug 2018 16:12:56 +0200 166 | 167 | intelmq-mailgen (1.00~rc1) unstable; urgency=medium 168 | 169 | * Enforce quoted-printable for text MIME parts. 170 | This enforces splitting of long lines on transport. 171 | * Start a new SMTP session more often. This fixes problems with session 172 | Tmieouts when big amounts of directives are processerd in a batch. 173 | 174 | -- Sascha Wilde > Wed, 07 Feb 2018 16:51:58 +0100 175 | 176 | intelmq-mailgen (0.99~rc5) unstable; urgency=medium 177 | 178 | * Extended data format for avalanche. 179 | * Extended and improved documentation. 180 | 181 | -- Sascha Wilde Mon, 17 Jul 2017 17:55:25 +0200 182 | 183 | intelmq-mailgen (0.99~rc4) unstable; urgency=medium 184 | 185 | * New release candidate 186 | * Update maintainer 187 | 188 | -- Gernot Schulz Fri, 16 Jun 2017 11:53:20 +0200 189 | 190 | intelmq-mailgen (0.99~rc3) unstable; urgency=medium 191 | 192 | * Updates rules. 193 | 194 | -- Sascha Wilde Thu, 20 Apr 2017 13:07:57 +0200 195 | 196 | intelmq-mailgen (0.99~rc2) unstable; urgency=high 197 | 198 | * CSV attachments 199 | * OpenPGP/MIME signatures 200 | 201 | -- Gernot Schulz Mon, 10 Apr 2017 15:53:10 +0200 202 | 203 | intelmq-mailgen (0.99~rc1) unstable; urgency=high 204 | 205 | * The functionality from db-api was removed and moved to new repo (and 206 | package) intelmq-fody-api. 207 | * New notification concept. 208 | * Preliminary Xarf Support 209 | * New, more flexible configuration concept. 210 | 211 | -- Sascha Wilde Thu, 30 Mar 2017 15:16:39 +0200 212 | 213 | intelmq-mailgen (0.95.1) unstable; urgency=low 214 | 215 | * Added Db-api backends for Fody to packaging. 216 | 217 | -- Sascha Wilde Mon, 06 Mar 2017 17:01:15 +0100 218 | 219 | intelmq-mailgen (0.95) testing; urgency=medium 220 | 221 | * Release 0.95 222 | 223 | -- Sascha Wilde Tue, 31 Jan 2017 18:58:59 +0100 224 | 225 | intelmq-mailgen (0.95~rc2) unstable; urgency=low 226 | 227 | * Added fix for #864 (Shadowserver Open-LDAP key mapping) 228 | 229 | -- Sascha Wilde Mon, 30 Jan 2017 18:10:51 +0100 230 | 231 | intelmq-mailgen (0.95~rc1) unstable; urgency=low 232 | 233 | * Customer changes 234 | * Work with current intelmq 235 | 236 | -- Sascha Wilde Fri, 27 Jan 2017 12:47:18 +0100 237 | 238 | intelmq-mailgen (0.95~beta2) unstable; urgency=low 239 | 240 | * Removed amplification field from cvs in mssql report. 241 | 242 | -- Sascha Wilde Mon, 21 Nov 2016 16:58:33 +0100 243 | 244 | intelmq-mailgen (0.95~beta1) unstable; urgency=low 245 | 246 | * New release 247 | * Various fixes and refactoring. 248 | 249 | -- Sascha Wilde Fri, 18 Nov 2016 12:55:45 +0100 250 | 251 | intelmq-mailgen (0.94~beta5) unstable; urgency=low 252 | 253 | * New release 254 | 255 | -- Gernot Schulz Tue, 09 Aug 2016 16:26:15 +0200 256 | 257 | intelmq-mailgen (0.94~beta4) unstable; urgency=low 258 | 259 | * New release 260 | * Check for appropriate intelmq version 261 | 262 | -- Gernot Schulz Thu, 04 Aug 2016 16:06:24 +0200 263 | 264 | intelmq-mailgen (0.94~beta2) unstable; urgency=low 265 | 266 | * New release 267 | * Add Open-MSSQL and Open-Mongo DB feed formats 268 | 269 | -- Gernot Schulz Mon, 18 Jul 2016 13:40:49 +0200 270 | 271 | intelmq-mailgen (0.94~beta1) unstable; urgency=low 272 | 273 | * New release 274 | * Improved documentation 275 | * Simplified code to send mails with cvs data for a limited selection of 276 | feeds. 277 | 278 | -- Sascha Wilde Wed, 13 Jul 2016 17:21:59 +0200 279 | 280 | intelmq-mailgen (0.94~alpha6) unstable; urgency=low 281 | 282 | * New release 283 | 284 | -- Bernhard Reiter Fri, 08 Jul 2016 09:30:00 +0200 285 | 286 | intelmq-mailgen (0.94~alpha5) unstable; urgency=medium 287 | 288 | * Add an example template 289 | 290 | -- Gernot Schulz Fri, 01 Jul 2016 18:25:40 +0200 291 | 292 | intelmq-mailgen (0.94~alpha4) unstable; urgency=medium 293 | 294 | * New release 295 | 296 | -- Gernot Schulz Fri, 01 Jul 2016 11:07:30 +0200 297 | 298 | intelmq-mailgen (0.94~alpha3) unstable; urgency=medium 299 | 300 | * New release (requires reinitialization of the notification table!) 301 | * Fix license information 302 | * Update maintainer information 303 | * Run tests when building package 304 | 305 | -- Gernot Schulz Tue, 21 Jun 2016 14:32:50 +0200 306 | 307 | intelmq-mailgen (0.94~alpha2) unstable; urgency=medium 308 | 309 | * New upstream release 310 | 311 | -- Gernot Schulz Wed, 08 Jun 2016 13:10:52 +0200 312 | 313 | intelmq-mailgen (0.94~alpha1) unstable; urgency=medium 314 | 315 | * New upstream release 316 | * New dependencies: gnupg2, python3-gpgme 317 | 318 | -- Gernot Schulz Fri, 03 Jun 2016 15:47:54 +0200 319 | 320 | intelmq-mailgen (0.93~alpha1) unstable; urgency=medium 321 | 322 | * Mark as alpha again, as most functionality is still inkomplete. 323 | * Merged in xarf and related code changes. 324 | 325 | -- Sascha Wilde Fri, 27 May 2016 10:52:55 +0200 326 | 327 | intelmq-mailgen (0.92~rc1) unstable; urgency=medium 328 | 329 | * Don't require both (user and system) configuration files. 330 | 331 | -- Sascha Wilde Mon, 25 May 2016 11:15:00 +0200 332 | 333 | intelmq-mailgen (0.91~rc1) unstable; urgency=high 334 | 335 | * Added generic csv output for unknown classification types. 336 | 337 | -- Sascha Wilde Mon, 25 May 2016 10:43:00 +0200 338 | 339 | intelmq-mailgen (0.90~rc2) unstable; urgency=low 340 | 341 | * Drop dependency on intelmq. 342 | 343 | -- Sascha Wilde Mon, 24 May 2016 11:44:37 +0200 344 | 345 | intelmq-mailgen (0.90~rc1) unstable; urgency=low 346 | 347 | * New Release. 348 | 349 | -- Sascha Wilde Mon, 23 May 2016 15:46:37 +0200 350 | 351 | intelmq-mailgen (0.90~alpha1) UNRELEASED; urgency=medium 352 | 353 | * Initial release. (2auto/issue20) 354 | 355 | -- Sascha Wilde Tue, 17 May 2016 16:48:03 +0200 356 | -------------------------------------------------------------------------------- /debian/compat: -------------------------------------------------------------------------------- 1 | 10 2 | -------------------------------------------------------------------------------- /debian/control: -------------------------------------------------------------------------------- 1 | Source: intelmq-mailgen 2 | Maintainer: Sascha Wilde 3 | Section: net 4 | Priority: optional 5 | Build-Depends: dh-python, python3-setuptools, python3, debhelper (>= 9), python3-psycopg2 6 | Standards-Version: 3.9.5.0 7 | 8 | Package: intelmq-mailgen 9 | Architecture: all 10 | Depends: ${misc:Depends}, python3-psycopg2, ${python3:Depends}, 11 | gnupg (>=2.2), python3-gpg, python3-pkg-resources 12 | Recommends: pinentry-curses 13 | Breaks: intelmq (<< 1.0.0~dev6+intevation.1.2~rc1) 14 | Description: Generate and send emails using data from the IntelMQ ContactDB. 15 | . 16 | The tool processes events written to the PostgreSQL database and send 17 | email notifications based on data from the IntelMQ Contacts Database. 18 | -------------------------------------------------------------------------------- /debian/copyright: -------------------------------------------------------------------------------- 1 | Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ 2 | Upstream-Name: intelmq-mailgen 3 | Upstream-Contact: 4 | Source: https://github.com/Intevation/intelmq-mailgen.git 5 | 6 | Files: intelmqmail/cb.py 7 | Copyright: 2016 Aaron Kaplan, cert.at, Intevation GmbH 8 | License: AGPL-3+ 9 | 10 | Files: tests/util.py 11 | Copyright: 2006 James Henstridge 12 | License: LGPL-2.1+ 13 | 14 | Files: * 15 | Copyright: 2016 Intevation GmbH 16 | License: AGPL-3+ 17 | 18 | License: LGPL-2.1+ 19 | This library is free software; you can redistribute it and/or 20 | modify it under the terms of the GNU Lesser General Public 21 | License as published by the Free Software Foundation; either 22 | version 2.1 of the License, or (at your option) any later version. 23 | . 24 | This library is distributed in the hope that it will be useful, 25 | but WITHOUT ANY WARRANTY; without even the implied warranty of 26 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 27 | Lesser General Public License for more details. 28 | . 29 | You should have received a copy of the GNU Lesser General Public 30 | License along with this library; if not, write to the Free Software 31 | Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 32 | . 33 | On Debian systems, the full text of the GNU Lesser General Public 34 | License version 2.1 can be found in the file 35 | `/usr/share/common-licenses/LGPL-2.1'. 36 | 37 | License: AGPL-3+ 38 | This program is free software: you can redistribute it and/or modify 39 | it under the terms of the GNU Affero General Public License as published 40 | by the Free Software Foundation, either version 3 of the License, or 41 | (at your option) any later version. 42 | . 43 | This program is distributed in the hope that it will be useful, 44 | but WITHOUT ANY WARRANTY; without even the implied warranty of 45 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 46 | GNU General Public License for more details. 47 | . 48 | You should have received a copy of the GNU General Public License 49 | along with this program. If not, see . 50 | . 51 | In this package the license is included as /usr/share/intelmq-mailgen/AGPL-3 52 | -------------------------------------------------------------------------------- /debian/docs: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /debian/intelmq-mailgen.docs: -------------------------------------------------------------------------------- 1 | NEWS.md 2 | AGPL-3 3 | -------------------------------------------------------------------------------- /debian/intelmq-mailgen.examples: -------------------------------------------------------------------------------- 1 | intelmq-mailgen.conf.example 2 | templates/example-template-dronereport.txt 3 | templates/example-template-sslfreak.txt 4 | templates/example-template.txt 5 | -------------------------------------------------------------------------------- /debian/intelmq-mailgen.install: -------------------------------------------------------------------------------- 1 | sql/* usr/share/intelmq-mailgen/sql/ 2 | tests/* usr/share/intelmq-mailgen/tests/ 3 | intelmq-mailgen.conf.example etc/intelmq/ 4 | example_scripts/* etc/intelmq/mailgen/formats/ 5 | -------------------------------------------------------------------------------- /debian/rules: -------------------------------------------------------------------------------- 1 | #!/usr/bin/make -f 2 | 3 | export PYBUILD_NAME=intelmqmail 4 | %: 5 | dh $@ --with python3 --without python2 --buildsystem=pybuild 6 | 7 | 8 | override_dh_install: 9 | dh_install 10 | dh_installexamples 11 | # Fix paths for Debian 12 | mkdir -p debian/intelmq-mailgen/usr/share/doc/intelmq-mailgen 13 | sed 's@src/\(intelmq-mailgen/tests/keys/test1.sec\)@/usr/share/\1@; s@sql/notifications.sql@/usr/share/intelmq-mailgen/sql/notifications.sql@; s@intelmq-mailgen.conf.example@/usr/share/doc/intelmq-mailgen/examples/intelmq-mailgen.conf.example@' \ 14 | README.rst \ 15 | > debian/intelmq-mailgen/usr/share/doc/intelmq-mailgen/README.rst 16 | 17 | # vim :set noet sts=0 sw=2 ts=2: 18 | -------------------------------------------------------------------------------- /debian/source/format: -------------------------------------------------------------------------------- 1 | 1.0 2 | -------------------------------------------------------------------------------- /debian/source/options: -------------------------------------------------------------------------------- 1 | tar-ignore = ".git" 2 | tar-ignore = ".gitignore" 3 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/IntelMQMailgen.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/IntelMQMailgen.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/IntelMQMailgen" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/IntelMQMailgen" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /docs/README.rst: -------------------------------------------------------------------------------- 1 | ../README.rst -------------------------------------------------------------------------------- /docs/concept.rst: -------------------------------------------------------------------------------- 1 | Concept: Handling notifications with IntelMQ Mailgen 2 | ==================================================== 3 | 4 | .. image:: notification_overview.svg 5 | 6 | certbund-contact expert: 7 | queries the contact db, adds all resulting contacts, including all tags, to the event 8 | certbund-rule expert: 9 | based on the rules, decides whom to send the notification 10 | prioritization of contacts: automatic vs manual contacts, more specific vs less specific contacts 11 | how often (notification interval) 12 | -> high priority classifications or VIP networks may have shorter intervals 13 | which format (CSV inline, CSV attachment, XARF) 14 | -> may depend on the contact or data 15 | which template 16 | -> may depend on the classification/feed and the recipient 17 | result: *"Directives"* 18 | postgresql output and database: 19 | allows asynchronous and batched processing of notifications 20 | intelmqmail (command line) tool 21 | formats, applied to directives 22 | format the notification (generates e-mail from template, substitutes variables, defines CSV columns) 23 | may postpone the sending, e.g. based on the directive age 24 | e-mail templates 25 | 26 | Overview 27 | -------- 28 | 29 | When sending notifications based on the events processed by IntelMQ, the 30 | system---consisting of both IntelMQ and Mailgen---has to decide for each 31 | event who is to be notified when about the event and in which way. This 32 | information is represented by a `notification directive` that contains 33 | all the information needed for sending the notifications for an event. 34 | IntelMQ Mailgen processes these directives and sends mails accordingly. 35 | 36 | The directives are created by the IntelMQ side of the system. Expert 37 | bots in IntelMQ read contact information from a database (the CERT-bund 38 | Contact Database bot) and decide based on that information and the other 39 | event data which directives to create for an individual event (the 40 | CERT-bund Contact Rules bot). The directives are then written to the 41 | event database by IntelMQ's ``postgresql`` output bot automatically as 42 | part of the event data. 43 | 44 | IntelMQ Mailgen reads the directives that have not yet been processed 45 | from the database, aggregates them when possible, and generates and 46 | sends mails when the time has come. When mails are sent, the sending 47 | time and some other information such as the ticket number is stored in 48 | the database, indicating that the directive has been processed and to 49 | allow any questions the recipients of the mails may have to be linked to 50 | the events the notification was about. 51 | 52 | Both the rules expert bot and Mailgen can be configured with python 53 | scripts that are run for all events and directives respectively. 54 | 55 | The rest of this part of the document describes this in more detail. 56 | 57 | 58 | Notification Directives 59 | ----------------------- 60 | 61 | A `notification directive` describes one notification for one event. 62 | There may be zero or more directives for each event, though. It's 63 | meaning is something like "Send a mail to foo@example.com using template 64 | T once a day and include the event data as CSV". This meaning is encoded 65 | in the attributes listed in the next section. The precise meaning is 66 | ultimately up to conventions used by both the scripts in the rule expert 67 | bot which generate the directives and the scripts in mailgen which 68 | interpret them. 69 | 70 | Mailgen can aggregate directives, that is it can process directives for 71 | different events as if it were one directive for all those events, if 72 | the directives are similar enough. Two directives are considered similar 73 | enough in this sense when most of their attributes are equal. For 74 | instance, the address of the recipient, the data format and the template 75 | must be equal, otherwise it would not make sense to aggregate them at 76 | all. There are some other considerations, see :ref:`aggregation` for 77 | details. 78 | 79 | 80 | Attributes 81 | .......... 82 | 83 | A directive contains the following information (the names of these 84 | attributes are the actual identifiers used in the code): 85 | 86 | :recipient_address: The email address of the recipient. 87 | :notification_format: The main format of the notification. 88 | :template_name: The name of the template for the contents of the 89 | notification. 90 | :event_data_format: The format to use for event data included in the 91 | notification. 92 | :aggregate_identifier: Additional key/value pairs used when 93 | aggregating directives. Both keys and values 94 | are strings. (See also :ref:`aggregation`) 95 | :notification_interval: Interval between notifications for similar 96 | events (see 97 | :ref:`aggregation_and_notification_interval`) 98 | 99 | 100 | The three attributes ``notification_format``, ``template_name`` and 101 | ``event_data_format`` are the main parameters that define the contents 102 | of the generated mails. The ``notification_format`` is intended to name 103 | the overall format of the mail and the template and data format specify 104 | some of the details. 105 | 106 | There's some other information related to directives once they're in the 107 | event database: 108 | 109 | ``inserted_at`` 110 | 111 | Date/Time when the directive was added to the event database. 112 | 113 | Event ID 114 | 115 | The ID in the event database of the event the directive belongs 116 | to. 117 | 118 | Ticket number 119 | 120 | A unique identifier included in notifications sent by mailgen 121 | that can be used to identify which directives (and therefore 122 | which events) were included in a particular notification that a 123 | recipient may have questions about. 124 | 125 | The ticket number is generated when mailgen actually generates a 126 | mail for a given directive. 127 | 128 | Sent At 129 | 130 | Date/time when the notification was sent. 131 | 132 | 133 | 134 | .. _aggregation: 135 | 136 | Aggregation 137 | ........... 138 | 139 | For two directives to be considered similar enough to be aggregated, all 140 | of these attributes must be equal: 141 | 142 | * ``recipient_address`` 143 | * ``template_name`` 144 | * ``notification_format`` 145 | * ``event_data_format`` 146 | * ``aggregate_identifier`` 147 | 148 | That the first four of these must be equal is obvious enough. They 149 | directly influence the contents of the mails. The aggregate identifier 150 | is a collection of key/value pairs that can be used by the rule in the 151 | rule expert bot to further control how directives are aggregated. For 152 | example, you could aggregate directives for events with the same 153 | ``classification.type``. The key/value pairs are available in the 154 | mailgen scripts when the directive are processed and can be referenced 155 | in templates. 156 | 157 | 158 | .. _aggregation_and_notification_intervals: 159 | 160 | Aggregation and notification intervals 161 | ...................................... 162 | 163 | Aggregation only makes sense if directives are not processed immediately 164 | in order to let directives accumulate for a while. The main parameter in 165 | a directive that can be used to control this is the 166 | ``notification_interval`` attribute which holds the minimum duration 167 | between to similar notifications, where similar means exactly the same 168 | thing as for aggregation. How this is interpreted exactly, and whether 169 | this or some other criterion is used, is up to the scripts in mailgen, 170 | however. 171 | 172 | 173 | Mailgen 174 | ------- 175 | 176 | Mailgen reads directives from the event database, processes them and 177 | sends mail. In particular, it performs these steps: 178 | 179 | 1. Load the scripts from the script directory (see :ref:`mailgen_scripts`) 180 | 181 | 2. Read the aggregated pending directives from the database 182 | 183 | 3. For each group of directives, perform the following steps: 184 | 185 | 1. call each script and if one of the scripts generates a message, 186 | stop processing (see :ref:`mailgen_scripts`) 187 | 188 | 2. Send the messages 189 | 190 | 3. Mark the messages as sent in the database, recording the 191 | date/time when the message was sent. 192 | 193 | `Pending directives` are the directives for which no mail has been sent 194 | yet. Aggregation is done according to the criteria described in 195 | :ref:`aggregation`. 196 | 197 | For each group of directives some more attributes are read from the 198 | database in addition to the attributes that were used for aggregation: 199 | 200 | :last_sent: When the last similar mail was sent (see 201 | :ref:`aggregation_and_notification_interval`) 202 | :inserted_at: When the newest of the directives in the group was 203 | added to the database. 204 | :event_ids: A list with the database IDs of all the events whose 205 | directives have been accumulated in the group 206 | :directive_ids: A list with the database IDs of all the directives 207 | that have been accumulated in the group 208 | :notification_interval: The longest of the ``notification_interval`` 209 | values of all the directives in the group. 210 | 211 | 212 | 213 | .. _mailgen_scripts: 214 | 215 | Mailgen Scripts 216 | ............... 217 | 218 | Most of the logic for handling the directives is implemented with python 219 | scripts, like the examples in the ``example_scripts/`` subdirectory. 220 | When mailgen is started it reads all the python files in the configured 221 | script directory that have names starting with two decimal digits. 222 | 223 | Each of the scripts must define a function called 224 | ``create_notifications``. Mailgen calls this function with a ``Context`` 225 | object as parameter which provides access to the group of directives 226 | being processed (see the doc-strings in 227 | ``intelmqmail/notification.py``). The function is expected to return one 228 | of three possible results: 229 | 230 | ``None`` 231 | 232 | Indicates that the script is not interested in processing the 233 | directive. 234 | 235 | A list of ``EmailNotification`` objects 236 | 237 | Each of these objects represents a complete email that has not 238 | been sent yet. Typically the script uses helper methods on the 239 | context object to create these, like ``mail_format_as_csv`` (see 240 | the doc-strings for details) 241 | 242 | ``Postponed`` 243 | 244 | A predefined constant in the ``intelmqmail.notification`` 245 | module. This constant indicates that the script would handle the 246 | directive if sufficient time has passed. For instance, it may 247 | return this constant the time that passed since the 248 | ``last_sent`` date is shorter than the 249 | ``notification_interval``. 250 | 251 | 252 | When mailgen processes a group of directives, it calls the 253 | ``create_notifications`` function of each of the scripts in turn in 254 | alphabetical order of the script name (hence the two leading digits that 255 | provide a simple way to order the scripts). Mailgen stops once one of 256 | the functions returns something other than None. If the return value is a 257 | list of ``EmailNotification`` objects, mailgen sends those mails as 258 | described in :ref:`mailgen_sending_mails`. 259 | 260 | 261 | 262 | 263 | Contact-DB Bot 264 | -------------- 265 | 266 | On the other end of the notification processing is the `Contact-DB bot`. 267 | This expert bot in IntelMQ reads contact information from the contact 268 | database and adds it to the event. This is done twice, once for contacts 269 | related to the source of the event and once for the destination, 270 | yielding two sets of contact information. Each set uses these types of 271 | data: 272 | 273 | matches 274 | 275 | These describe which parts of the event matched some entry in 276 | the database. This is the field name without the `source.` or 277 | `destination.` prefix and the ID of the organisation it belongs 278 | to. For network matches it also contains the network address 279 | because in this case the field does not contain the same 280 | information because a match means that the IP address in the 281 | event is contained in the network. 282 | 283 | organisations 284 | 285 | An organisation links the matches with the actual contact 286 | information. 287 | 288 | contacts 289 | 290 | An actual contact which is mostly just an email address. 291 | 292 | annotations 293 | 294 | Matches, organisation and contacts may have any number of 295 | annotations. Annotations have a tag (just a string) and an 296 | optional condition. The condition is a simple comparison of an 297 | event field with a constant. The idea is that the annotation 298 | should only be used to make decisions about notifications when 299 | the condition is true. 300 | 301 | 302 | 303 | Rule-Expert Bot 304 | --------------- 305 | 306 | This expert bot makes the decisions about the notifications. It takes an 307 | event with contact information added by the contact db bot and generates 308 | directives based on that contact information and the event data. 309 | 310 | In order to be flexible this bot uses python scripts in very much the 311 | same way as mailgen. In the rule expert bot, the function is called 312 | `determine_directives` and like in mailgen gets a context object as 313 | parameter. The class is different, of course, this time it's `Context` 314 | in `intelmq.bots.experts.certbund_contact.rulesupport`. The context 315 | object provides access to the event data and the contact information. 316 | The script should examine the information and depending on what it 317 | finds, create directives and add them to the context. The return value 318 | of the `determine_directives` function is a boolean. Returning true 319 | means that no further scripts should be executed. 320 | 321 | There are some example scripts in 322 | `intelmq/bots/experts/certbund_contact/example-rules/` which demonstrate 323 | how to write such scripts. 324 | 325 | .. _mailgen_sending_mails: 326 | 327 | Mailgen sending mails 328 | --------------------- 329 | 330 | TODO 331 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # IntelMQ Mailgen documentation build configuration file, created by 4 | # sphinx-quickstart on Wed Jun 21 15:46:29 2017. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | # If extensions (or modules to document with autodoc) are in another directory, 16 | # add these directories to sys.path here. If the directory is relative to the 17 | # documentation root, use os.path.abspath to make it absolute, like shown here. 18 | # sys.path.insert(0, os.path.abspath('.')) 19 | 20 | import sys 21 | import os 22 | import subprocess 23 | 24 | 25 | sys.path.insert(0, os.path.abspath('../')) # make intelmqmail importable by apidoc 26 | 27 | 28 | # -- General configuration ------------------------------------------------ 29 | 30 | # If your documentation needs a minimal Sphinx version, state it here. 31 | # needs_sphinx = '1.0' 32 | 33 | # Add any Sphinx extension module names here, as strings. They can be 34 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 35 | # ones. 36 | extensions = [ 37 | 'sphinx.ext.autodoc' 38 | ] 39 | 40 | # Add any paths that contain templates here, relative to this directory. 41 | templates_path = ['_templates'] 42 | 43 | # The suffix of source filenames. 44 | source_suffix = '.rst' 45 | 46 | # The encoding of source files. 47 | # source_encoding = 'utf-8-sig' 48 | 49 | # The master toctree document. 50 | master_doc = 'index' 51 | 52 | # General information about the project. 53 | project = 'IntelMQ Mailgen' 54 | copyright = '2023, Intevation GmbH' 55 | 56 | # The version info for the project yo're documenting, acts as replacement for 57 | # |version| and |release|, also used in various other places throughout the 58 | # built documents. 59 | # 60 | # The short X.Y version. 61 | version = '0.99' 62 | # The full version, including alpha/beta/rc tags. 63 | release = '0.99' 64 | 65 | # The language for content autogenerated by Sphinx. Refer to documentation 66 | # for a list of supported languages. 67 | # language = None 68 | 69 | # There are two options for replacing |today|: either, you set today to some 70 | # non-false value, then it is used: 71 | # today = '' 72 | # Else, today_fmt is used as the format for a strftime call. 73 | # today_fmt = '%B %d, %Y' 74 | 75 | # List of patterns, relative to source directory, that match files and 76 | # directories to ignore when looking for source files. 77 | exclude_patterns = ['_build'] 78 | 79 | # The reST default role (used for this markup: `text`) to use for all 80 | # documents. 81 | # default_role = None 82 | 83 | # If true, '()' will be appended to :func: etc. cross-reference text. 84 | # add_function_parentheses = True 85 | 86 | # If true, the current module name will be prepended to all description 87 | # unit titles (such as .. function::). 88 | # add_module_names = True 89 | 90 | # If true, sectionauthor and moduleauthor directives will be shown in the 91 | # output. They are ignored by default. 92 | # show_authors = False 93 | 94 | # The name of the Pygments (syntax highlighting) style to use. 95 | pygments_style = 'sphinx' 96 | 97 | # A list of ignored prefixes for module index sorting. 98 | # modindex_common_prefix = [] 99 | 100 | # If true, keep warnings as "system message" paragraphs in the built documents. 101 | # keep_warnings = False 102 | 103 | 104 | # -- Options for HTML output ---------------------------------------------- 105 | 106 | # The theme to use for HTML and HTML Help pages. See the documentation for 107 | # a list of builtin themes. 108 | html_theme = 'default' 109 | 110 | # Theme options are theme-specific and customize the look and feel of a theme 111 | # further. For a list of options available for each theme, see the 112 | # documentation. 113 | # html_theme_options = {} 114 | 115 | # Add any paths that contain custom themes here, relative to this directory. 116 | # html_theme_path = [] 117 | 118 | # The name for this set of Sphinx documents. If None, it defaults to 119 | # " v documentation". 120 | # html_title = None 121 | 122 | # A shorter title for the navigation bar. Default is the same as html_title. 123 | # html_short_title = None 124 | 125 | # The name of an image file (relative to this directory) to place at the top 126 | # of the sidebar. 127 | # html_logo = None 128 | 129 | # The name of an image file (within the static path) to use as favicon of the 130 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 131 | # pixels large. 132 | # html_favicon = None 133 | 134 | # Add any paths that contain custom static files (such as style sheets) here, 135 | # relative to this directory. They are copied after the builtin static files, 136 | # so a file named "default.css" will overwrite the builtin "default.css". 137 | html_static_path = ['_static'] 138 | 139 | # Add any extra paths that contain custom files (such as robots.txt or 140 | # .htaccess) here, relative to this directory. These files are copied 141 | # directly to the root of the documentation. 142 | # html_extra_path = [] 143 | 144 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 145 | # using the given strftime format. 146 | # html_last_updated_fmt = '%b %d, %Y' 147 | 148 | # If true, SmartyPants will be used to convert quotes and dashes to 149 | # typographically correct entities. 150 | # html_use_smartypants = True 151 | 152 | # Custom sidebar templates, maps document names to template names. 153 | # html_sidebars = {} 154 | 155 | # Additional templates that should be rendered to pages, maps page names to 156 | # template names. 157 | # html_additional_pages = {} 158 | 159 | # If false, no module index is generated. 160 | # html_domain_indices = True 161 | 162 | # If false, no index is generated. 163 | # html_use_index = True 164 | 165 | # If true, the index is split into individual pages for each letter. 166 | # html_split_index = False 167 | 168 | # If true, links to the reST sources are added to the pages. 169 | # html_show_sourcelink = True 170 | 171 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 172 | # html_show_sphinx = True 173 | 174 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 175 | # html_show_copyright = True 176 | 177 | # If true, an OpenSearch description file will be output, and all pages will 178 | # contain a tag referring to it. The value of this option must be the 179 | # base URL from which the finished HTML is served. 180 | # html_use_opensearch = '' 181 | 182 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 183 | # html_file_suffix = None 184 | 185 | # Output file base name for HTML help builder. 186 | htmlhelp_basename = 'IntelMQMailgendoc' 187 | 188 | 189 | # -- Options for LaTeX output --------------------------------------------- 190 | 191 | latex_elements = { 192 | # The paper size ('letterpaper' or 'a4paper'). 193 | # 'papersize': 'letterpaper', 194 | 195 | # The font size ('10pt', '11pt' or '12pt'). 196 | # 'pointsize': '10pt', 197 | 198 | # Additional stuff for the LaTeX preamble. 199 | # 'preamble': '', 200 | } 201 | 202 | # Grouping the document tree into LaTeX files. List of tuples 203 | # (source start file, target name, title, 204 | # author, documentclass [howto, manual, or own class]). 205 | latex_documents = [ 206 | ('index', 'IntelMQMailgen.tex', 'IntelMQ Mailgen Documentation', 207 | 'Intevation GmbH', 'manual'), 208 | ] 209 | 210 | # The name of an image file (relative to this directory) to place at the top of 211 | # the title page. 212 | # latex_logo = None 213 | 214 | # For "manual" documents, if this is true, then toplevel headings are parts, 215 | # not chapters. 216 | # latex_use_parts = False 217 | 218 | # If true, show page references after internal links. 219 | # latex_show_pagerefs = False 220 | 221 | # If true, show URL addresses after external links. 222 | # latex_show_urls = False 223 | 224 | # Documents to append as an appendix to all manuals. 225 | # latex_appendices = [] 226 | 227 | # If false, no module index is generated. 228 | # latex_domain_indices = True 229 | 230 | 231 | # -- Options for manual page output --------------------------------------- 232 | 233 | # One entry per manual page. List of tuples 234 | # (source start file, name, description, authors, manual section). 235 | man_pages = [ 236 | ('index', 'intelmqmailgen', 'IntelMQ Mailgen Documentation', 237 | ['Intevation GmbH'], 1) 238 | ] 239 | 240 | # If true, show URL addresses after external links. 241 | # man_show_urls = False 242 | 243 | 244 | # -- Options for Texinfo output ------------------------------------------- 245 | 246 | # Grouping the document tree into Texinfo files. List of tuples 247 | # (source start file, target name, title, author, 248 | # dir menu entry, description, category) 249 | texinfo_documents = [ 250 | ('index', 'IntelMQMailgen', 'IntelMQ Mailgen Documentation', 251 | 'Intevation GmbH', 'IntelMQMailgen', 'One line description of project.', 252 | 'Miscellaneous'), 253 | ] 254 | 255 | # Documents to append as an appendix to all manuals. 256 | # texinfo_appendices = [] 257 | 258 | # If false, no module index is generated. 259 | # texinfo_domain_indices = True 260 | 261 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 262 | # texinfo_show_urls = 'footnote' 263 | 264 | # If true, do not generate a @detailmenu in the "Top" node's menu. 265 | # texinfo_no_detailmenu = False 266 | 267 | # -- Custom options ------------------------------------------------------- 268 | 269 | 270 | def run_apidoc(_): 271 | subprocess.check_call("sphinx-apidoc --implicit-namespaces -o source ../intelmqmail", shell=True) 272 | 273 | 274 | # Always document the __init__ methods 275 | # https://stackoverflow.com/a/5599712/2851664 276 | def skip(app, what, name, obj, would_skip, options): 277 | if name == "__init__": 278 | return False 279 | return would_skip 280 | 281 | 282 | def setup(app): 283 | app.connect("autodoc-skip-member", skip) 284 | app.connect("builder-inited", run_apidoc) 285 | -------------------------------------------------------------------------------- /docs/contactdb-design.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Intevation/intelmq-mailgen/3de33d7ab84ddc2d9a4bb26033c157713a925353/docs/contactdb-design.png -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. IntelMQ Mailgen documentation master file, created by 2 | sphinx-quickstart on Wed Jun 21 15:46:29 2017. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to IntelMQ Mailgen's documentation! 7 | =========================================== 8 | 9 | Contents: 10 | 11 | .. toctree:: 12 | :maxdepth: 2 13 | 14 | README 15 | concept 16 | scripts 17 | intelmqcbmail 18 | 19 | For the integration with IntelMQ Webinput CSV, see the documentation `there `_. 20 | 21 | Source code documentation 22 | ========================= 23 | 24 | .. toctree:: 25 | :maxdepth: 2 26 | 27 | source/modules 28 | source/intelmqmail 29 | 30 | Indices and tables 31 | ================== 32 | 33 | * :ref:`genindex` 34 | * :ref:`modindex` 35 | * :ref:`search` 36 | 37 | -------------------------------------------------------------------------------- /docs/intelmqcbmail.rst: -------------------------------------------------------------------------------- 1 | intelmqcbmail tool 2 | ================== 3 | 4 | ``intelmqcbmail`` is the command line tool to start mailgen. 5 | 6 | Command line parameters 7 | ----------------------- 8 | 9 | * ``-h``, ``--help`` to show a short help page 10 | * ``-a``, ``--all``: Process all events (batch mode) non-interactively 11 | * ``-c CONFIG``, ``--config CONFIG``: Alternative system configuration file 12 | * ``-v``, ``--verbose``: Activate verbose debug logging 13 | * ``-n``, ``--dry-run``: Dry run. Simulate only. 14 | 15 | Dry run (simulation) 16 | -------------------- 17 | 18 | This mode does not send mails and does not mark the directives as sent in the database. 19 | 20 | All notifications are generated as if they were sent, which tests the complete configuration, templating etc. 21 | A connection SMTP server is only opened for testing. 22 | 23 | The ticket numbers counter is always incremented, as `Postgres sequence changes cannot be rolled back `_. 24 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | Sphinx 2 | sphinxcontrib-apidoc 3 | -------------------------------------------------------------------------------- /docs/scripts.rst: -------------------------------------------------------------------------------- 1 | Mailgen Scripts (formats) 2 | ========================= 3 | 4 | For the overall concept of mailgen scripts (also: formats), please see 5 | :ref:`mailgen_scripts`. 6 | 7 | Format spec (also: table format) 8 | -------------------------------- 9 | 10 | The format spec specifies which parts of the data event will become part of the 11 | data sent (usually CSV format) in the notifications (usually e-mail). 12 | 13 | This corresponds to the CSV columns in most cases. 14 | 15 | The format spec can be set in different ways for the notifications. The order 16 | is the following: 17 | 18 | 1. The table format specified by the mailgen script: ``create_notifications`` returning ``context.mail_format_as_csv(table_format, ...)`` 19 | 2. The parameter passed to ``cb.create_notifications``/``cb.send_notifications``/``cb.start``/``cb.mailgen``/``intelmqmail.notification.ScriptContext``. 20 | IntelMQ Webinput CSV uses this. 21 | 3. The internal default, see :py:mod:`intelmqmail.notification.ScriptContext` 22 | 23 | Different Envelope-To from Header-To 24 | ------------------------------------ 25 | 26 | Normally, the recipient (Header-To and Envelope-To) of the E-Mail is the ``recipient_address`` of the Directive. 27 | Format Scripts can generate ``EmailNotification`` objects with a differing Envelope-To. 28 | 29 | ``context.mail_format_as_csv`` takes an argument ``envelope_tos`` with a list of email-addresses. 30 | The Header-To is always taken from the directive. 31 | 32 | This feature can be used to send a copy of notifications to internal contacts, 33 | with the original Header-To intact, possibly with different template or format than the original notification. 34 | -------------------------------------------------------------------------------- /example_scripts/00add_variables_to_context.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | 4 | def create_notifications(context): 5 | # Read the substitutions from a file and add it to the context. 6 | # The Context is an Object which is available in all Mailgen-Scripts 7 | # and passed from script to script. 8 | js = {} 9 | with open('/etc/intelmq/mailgen/formats/variables.json', 'r') as j: 10 | js = json.load(j) 11 | 12 | substitution_variables = js.get("substitutions") 13 | 14 | # Determine the kind of Aggregation. 15 | aggregation = context.directive.aggregate_identifier 16 | asn_or_cidr = "" # Can also be a CC 17 | if "source.asn" in aggregation: 18 | asn_or_cidr += "about AS %s" % aggregation["source.asn"] 19 | elif "cidr" in aggregation: 20 | asn_or_cidr = "about CIDR %s" % aggregation["cidr"] 21 | elif "source.geolocation.cc" in aggregation: 22 | asn_or_cidr = "about your Country %s" % aggregation["source.geolocation.cc"] 23 | 24 | substitution_variables["asn_or_cidr"] = asn_or_cidr 25 | 26 | context.substitutions = substitution_variables 27 | 28 | return None 29 | -------------------------------------------------------------------------------- /example_scripts/10shadowservercsv.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import datetime 3 | 4 | from intelmqmail.tableformat import build_table_formats, ExtraColumn 5 | from intelmqmail.notification import Postponed 6 | 7 | 8 | standard_column_titles = { 9 | # column titles for standard event attributes 10 | 'classification.identifier': 'malware', 11 | 'destination.fqdn': 'dst_host', 12 | 'destination.ip': 'dst_ip', 13 | 'destination.port': 'dst_port', 14 | 'protocol.transport': 'proto', 15 | 'source.asn': 'asn', 16 | 'source.ip': 'ip', 17 | 'source.local_hostname': 'server_name', 18 | 'source.port': 'src_port', 19 | 'source.reverse_dns': 'hostname', 20 | 'time.source': 'timestamp', 21 | 22 | # column titles for extra attributes 23 | 'extra:system_desc': 'sysdesc', 24 | "extra:mssql_version": "version", 25 | "extra:mongodb_version": "version", 26 | "extra:workgroup_name": "workgroup", 27 | "extra:machine_name": "machine_name", 28 | "extra:elasticsearch_version": "version", 29 | "extra:workstation_info": "workstation_info", 30 | "extra:memcached_version": "version", 31 | "extra:redis_version": "version", 32 | "extra:ssdp_server": "server", 33 | "extra:subject_common_name": "subject_common_name", 34 | "extra:issuer_common_name": "issuer_common_name", 35 | "extra:freak_cipher_suite": "freak_cipher_suite", 36 | } 37 | 38 | 39 | def add_default_titles(columns): 40 | """ 41 | Add the standard title to each of the columns. 42 | 43 | Args: 44 | columns: 45 | 46 | Returns: 47 | 48 | """ 49 | extended_columns = [] 50 | for col in columns: 51 | if isinstance(col, str): 52 | title = standard_column_titles[col] 53 | if col.startswith("extra:"): 54 | extended_columns.append(ExtraColumn(col[6:], title)) 55 | else: 56 | extended_columns.append((col, title)) 57 | else: 58 | extended_columns.append(col) 59 | return extended_columns 60 | 61 | 62 | def table_formats_with_default_titles(formats): 63 | """ 64 | Frontend for build_table_formats that adds standard column titles. 65 | 66 | Args: 67 | formats: 68 | 69 | Returns: 70 | 71 | """ 72 | 73 | return build_table_formats([(name, add_default_titles(columns)) 74 | for name, columns in formats]) 75 | 76 | 77 | table_formats = table_formats_with_default_titles([ 78 | ("csv_malware", [ 79 | # this is used for the following feeds: 80 | # "Botnet-Drone-Hadoop", "Sinkhole-HTTP-Drone", 81 | # "Microsoft-Sinkhole" 82 | # These names are all mapped to "csv_malware" by the rule expert bot 83 | "source.asn", 84 | "source.ip", 85 | "time.source", 86 | "classification.identifier", 87 | "source.port", 88 | "destination.ip", 89 | "destination.port", 90 | "destination.fqdn", 91 | "protocol.transport", 92 | ]), 93 | ("csv_DNS-open-resolvers", [ 94 | "source.asn", 95 | "source.ip", 96 | "time.source", 97 | ]), 98 | ("csv_Open-Portmapper", [ 99 | "source.asn", 100 | "source.ip", 101 | "time.source", 102 | ]), 103 | ("csv_Open-SNMP", [ 104 | "source.asn", 105 | "source.ip", 106 | "time.source", 107 | "extra:system_desc", 108 | ]), 109 | ("csv_Open-LDAP", [ 110 | "source.asn", 111 | "source.ip", 112 | "time.source", 113 | ("source.local_hostname", "dns_hostname"), 114 | ]), 115 | ("csv_Open-MSSQL", [ 116 | "source.asn", 117 | "source.ip", 118 | "time.source", 119 | "extra:mssql_version", 120 | "source.local_hostname", 121 | ExtraColumn("instance_name", "instance_name"), 122 | ]), 123 | ("csv_Open-MongoDB", [ 124 | "source.asn", 125 | "source.ip", 126 | "time.source", 127 | "extra:mongodb_version", 128 | ]), 129 | ("csv_Open-Chargen", [ 130 | "source.asn", 131 | "source.ip", 132 | "time.source", 133 | ]), 134 | ("csv_Open-IPMI", [ 135 | "source.asn", 136 | "source.ip", 137 | "time.source", 138 | ]), 139 | ("csv_Open-NetBIOS", [ 140 | "source.asn", 141 | "source.ip", 142 | "time.source", 143 | "extra:workgroup_name", 144 | "extra:machine_name", 145 | ]), 146 | ("csv_NTP-Monitor", [ 147 | "source.asn", 148 | "source.ip", 149 | "time.source", 150 | ]), 151 | ("csv_Open-Elasticsearch", [ 152 | "source.asn", 153 | "source.ip", 154 | "time.source", 155 | "extra:elasticsearch_version", 156 | ExtraColumn("instance_name", "name"), 157 | ]), 158 | ("csv_Open-mDNS", [ 159 | "source.asn", 160 | "source.ip", 161 | "time.source", 162 | "extra:workstation_info", 163 | ]), 164 | ("csv_Open-Memcached", [ 165 | "source.asn", 166 | "source.ip", 167 | "time.source", 168 | "extra:memcached_version", 169 | ]), 170 | ("csv_Open-Redis", [ 171 | "source.asn", 172 | "source.ip", 173 | "time.source", 174 | "extra:redis_version", 175 | ]), 176 | ("csv_Open-SSDP", [ 177 | "source.asn", 178 | "source.ip", 179 | "time.source", 180 | "extra:ssdp_server", 181 | ]), 182 | ("csv_Ssl-Freak-Scan", [ 183 | "source.asn", 184 | "source.ip", 185 | "time.source", 186 | "source.reverse_dns", 187 | "extra:subject_common_name", 188 | "extra:issuer_common_name", 189 | "extra:freak_cipher_suite", 190 | ]), 191 | ("csv_Ssl-Scan", [ 192 | "source.asn", 193 | "source.ip", 194 | "time.source", 195 | "source.reverse_dns", 196 | "extra:subject_common_name", 197 | "extra:issuer_common_name", 198 | ]), 199 | ]) 200 | 201 | 202 | # Minimum age of the newest of a group of directives being aggregated 203 | # 204 | # The value should be chosen such that it's very unlikely that any more 205 | # directives will be added to the event database that would end up in 206 | # the same aggregation when the newest directive has reached at least 207 | # this age. 208 | minimum_directive_age = datetime.timedelta(minutes=15) 209 | 210 | minimum_observation_age = datetime.timedelta(hours=2) 211 | 212 | 213 | def create_notifications(context): 214 | """ 215 | 216 | Args: 217 | context: 218 | 219 | Returns: 220 | 221 | """ 222 | if context.directive.notification_format == "shadowserver": 223 | 224 | # Copy substitutions from the context. 225 | # This way we can edit the variables in this script 226 | # without changing the context. 227 | substitution_variables = copy.copy(context.substitutions) 228 | 229 | format_spec = table_formats.get(context.directive.event_data_format) 230 | if format_spec is not None: 231 | if (context.age_of_newest_directive() < minimum_directive_age or 232 | context.age_of_observation() < minimum_observation_age): 233 | return Postponed 234 | 235 | substitution_variables["data_location_en"] = substitution_variables["data_location_inline_en"] 236 | substitution_variables["data_location_de"] = substitution_variables["data_location_inline_de"] 237 | return context.mail_format_as_csv(format_spec, substitutions=substitution_variables) 238 | 239 | return None 240 | -------------------------------------------------------------------------------- /example_scripts/11malware-infection.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import datetime 3 | 4 | from intelmqmail.tableformat import build_table_format, ExtraColumn 5 | from intelmqmail.notification import Postponed 6 | 7 | 8 | SPECIAL_MATTERS = ['avalanche', 9 | 'ebury', 10 | 'mumblehard' 11 | ] 12 | 13 | MATTER = 'malware-infection' 14 | # Minimum age of the newest of a group of directives being aggregated 15 | # 16 | # The value should be chosen such that it's very unlikely that any more 17 | # directives will be added to the event database that would end up in 18 | # the same aggregation when the newest directive has reached at least 19 | # this age. 20 | minimum_directive_age = datetime.timedelta(minutes=15) 21 | minimum_observation_age = datetime.timedelta(hours=2) 22 | 23 | 24 | def create_notifications(context): 25 | matter = context.directive.notification_format 26 | 27 | # Copy Substitutions from the context to this script. 28 | # This way we can edit the variables in this script 29 | # without changing the context. 30 | substitution_variables = copy.copy(context.substitutions) 31 | 32 | data_format = context.directive.event_data_format 33 | 34 | if matter in SPECIAL_MATTERS: 35 | # Prepare Message in CSV-Format for avalanche, 36 | # ebury, mumblehard 37 | # For avalanche, we are accessing the ExtraColumn 38 | if matter == 'avalanche': 39 | formats = ( 40 | ("source.asn", "ASN"), 41 | ("source.ip", "IP"), 42 | ("time.source", "Time"), 43 | ("classification.identifier", "Identifier"), 44 | ("malware.name", "Malware"), 45 | ("source.port", "Port"), 46 | ("destination.ip", "Destination-IP"), 47 | ("destination.port", "Destination-Port"), 48 | ExtraColumn("Destination-FQDN", "http_host_header")) 49 | else: 50 | formats = ( 51 | ("source.asn", "ASN"), 52 | ("source.ip", "IP"), 53 | ("time.source", "Time"), 54 | ("classification.identifier", "Identifier"), 55 | ("malware.name", "Malware"), 56 | ("source.port", "Port"), 57 | ("destination.ip", "Destination-IP"), 58 | ("destination.port", "Destination-Port"), 59 | ("destination.fqdn", "Destination-FQDN")) 60 | elif matter == MATTER: 61 | # Prepare Message in CSV-Format for other matters, 62 | # most likely shadowserver sinkhole data 63 | formats = ( 64 | ("source.asn", "asn"), 65 | ("source.ip", "ip"), 66 | ("time.source", "timestamp"), 67 | ("classification.identifier", "malware"), 68 | ("source.port", "src_port"), 69 | ("destination.ip", "dst_ip"), 70 | ("destination.port", "dst_port"), 71 | ("destination.fqdn", "dst_host"), 72 | ("protocol.transport", "proto")) 73 | 74 | else: 75 | return None 76 | 77 | # TODO: Update this mechanism! 78 | # If the matter is not a SPECIAL_MATTER, use the common notification_interval 79 | if matter not in SPECIAL_MATTERS: 80 | if not context.notification_interval_exceeded(): 81 | return Postponed 82 | else: 83 | # If the matter is a SPECIAL_MATTER, it should be aggregatable by time.observation 84 | # (32ct_botnet-drone.py should have done that!) 85 | if (context.age_of_newest_directive() < minimum_directive_age or 86 | context.age_of_observation() < minimum_observation_age): 87 | return Postponed 88 | 89 | csv_format = build_table_format(matter, formats) 90 | 91 | return create_csv_mail(data_format, csv_format, substitution_variables, context) 92 | 93 | 94 | def create_csv_mail(data_format, csv_format, substitution_variables, context): 95 | if data_format.endswith("_csv_inline"): 96 | # If Inline-Messages are wanted 97 | substitution_variables["data_location_en"] = substitution_variables["data_location_inline_en"] 98 | substitution_variables["data_location_de"] = substitution_variables["data_location_inline_de"] 99 | return context.mail_format_as_csv(csv_format, substitutions=substitution_variables) 100 | 101 | elif data_format.endswith("_csv_attachment"): 102 | # If Inline-Messages are wanted 103 | # TODO There is no attachment, yet! 104 | substitution_variables["data_location_en"] = substitution_variables["data_location_attached_en"] 105 | substitution_variables["data_location_de"] = substitution_variables["data_location_attached_de"] 106 | substitution_variables["data_inline_separator_en"] = "" 107 | substitution_variables["data_inline_separator_de"] = "" 108 | return context.mail_format_as_csv(csv_format, substitutions=substitution_variables, 109 | attach_event_data=True) 110 | -------------------------------------------------------------------------------- /example_scripts/13vulnerable-service.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import datetime 3 | 4 | from intelmqmail.tableformat import build_table_formats, ExtraColumn 5 | from intelmqmail.notification import Postponed 6 | 7 | # Minimum age of the newest of a group of directives being aggregated 8 | # 9 | # The value should be chosen such that it's very unlikely that any more 10 | # directives will be added to the event database that would end up in 11 | # the same aggregation when the newest directive has reached at least 12 | # this age. 13 | minimum_directive_age = datetime.timedelta(minutes=15) 14 | minimum_observation_age = datetime.timedelta(hours=2) 15 | 16 | standard_column_titles = { 17 | # column titles for standard event attributes 18 | 'classification.identifier': 'malware', 19 | 'destination.fqdn': 'dst_host', 20 | 'destination.ip': 'dst_ip', 21 | 'destination.port': 'dst_port', 22 | 'protocol.transport': 'proto', 23 | 'source.asn': 'asn', 24 | 'source.ip': 'ip', 25 | 'source.local_hostname': 'server_name', 26 | 'source.port': 'src_port', 27 | 'source.reverse_dns': 'hostname', 28 | 'time.source': 'timestamp', 29 | 30 | # column titles for extra attributes 31 | 'extra:system_desc': 'sysdesc', 32 | "extra:mssql_version": "version", 33 | "extra:mongodb_version": "version", 34 | "extra:workgroup_name": "workgroup", 35 | "extra:machine_name": "machine_name", 36 | "extra:elasticsearch_version": "version", 37 | "extra:workstation_info": "workstation_info", 38 | "extra:memcached_version": "version", 39 | "extra:redis_version": "version", 40 | "extra:ssdp_server": "server", 41 | "extra:subject_common_name": "subject_common_name", 42 | "extra:issuer_common_name": "issuer_common_name", 43 | "extra:freak_cipher_suite": "freak_cipher_suite", 44 | } 45 | 46 | 47 | def add_default_titles(columns): 48 | """ 49 | Add the standard title to each of the columns. 50 | 51 | Args: 52 | columns: 53 | 54 | Returns: 55 | 56 | """ 57 | extended_columns = [] 58 | for col in columns: 59 | if isinstance(col, str): 60 | title = standard_column_titles[col] 61 | if col.startswith("extra:"): 62 | extended_columns.append(ExtraColumn(col[6:], title)) 63 | else: 64 | extended_columns.append((col, title)) 65 | else: 66 | extended_columns.append(col) 67 | return extended_columns 68 | 69 | 70 | def table_formats_with_default_titles(formats): 71 | """ 72 | Frontend for build_table_formats that adds standard column titles. 73 | 74 | Args: 75 | formats: 76 | 77 | Returns: 78 | 79 | """ 80 | 81 | return build_table_formats([(name, add_default_titles(columns)) 82 | for name, columns in formats]) 83 | 84 | 85 | table_formats = table_formats_with_default_titles([ 86 | ("opendns", [ 87 | "source.asn", 88 | "source.ip", 89 | "time.source", 90 | ]), 91 | ("openportmapper", [ 92 | "source.asn", 93 | "source.ip", 94 | "time.source", 95 | ]), 96 | ("opensnmp", [ 97 | "source.asn", 98 | "source.ip", 99 | "time.source", 100 | "extra:system_desc", 101 | ]), 102 | ("openldap", [ 103 | "source.asn", 104 | "source.ip", 105 | "time.source", 106 | ("source.local_hostname", "dns_hostname"), 107 | ]), 108 | ("openmssql", [ 109 | "source.asn", 110 | "source.ip", 111 | "time.source", 112 | "extra:mssql_version", 113 | "source.local_hostname", 114 | ExtraColumn("instance_name", "instance_name"), 115 | ]), 116 | ("openmongodb", [ 117 | "source.asn", 118 | "source.ip", 119 | "time.source", 120 | "extra:mongodb_version", 121 | ]), 122 | ("openchargen", [ 123 | "source.asn", 124 | "source.ip", 125 | "time.source", 126 | ]), 127 | ("openipmi", [ 128 | "source.asn", 129 | "source.ip", 130 | "time.source", 131 | ]), 132 | ("opennetbios", [ 133 | "source.asn", 134 | "source.ip", 135 | "time.source", 136 | "extra:workgroup_name", 137 | "extra:machine_name", 138 | ]), 139 | ("openntp", [ 140 | "source.asn", 141 | "source.ip", 142 | "time.source", 143 | ]), 144 | ("openelasticsearch", [ 145 | "source.asn", 146 | "source.ip", 147 | "time.source", 148 | "extra:elasticsearch_version", 149 | ExtraColumn("instance_name", "name"), 150 | ]), 151 | ("openmdns", [ 152 | "source.asn", 153 | "source.ip", 154 | "time.source", 155 | "extra:workstation_info", 156 | ]), 157 | ("openmemcached", [ 158 | "source.asn", 159 | "source.ip", 160 | "time.source", 161 | "extra:memcached_version", 162 | ]), 163 | ("openredis", [ 164 | "source.asn", 165 | "source.ip", 166 | "time.source", 167 | "extra:redis_version", 168 | ]), 169 | ("openssdp", [ 170 | "source.asn", 171 | "source.ip", 172 | "time.source", 173 | "extra:ssdp_server", 174 | ]), 175 | ("ssl-freak", [ 176 | "source.asn", 177 | "source.ip", 178 | "time.source", 179 | "source.reverse_dns", 180 | "extra:subject_common_name", 181 | "extra:issuer_common_name", 182 | "extra:freak_cipher_suite", 183 | ]), 184 | ("ssl-poodle", [ 185 | "source.asn", 186 | "source.ip", 187 | "time.source", 188 | "source.reverse_dns", 189 | "extra:subject_common_name", 190 | "extra:issuer_common_name", 191 | ]), 192 | ]) 193 | 194 | 195 | def create_notifications(context): 196 | 197 | if context.directive.notification_format == "vulnerable-service": 198 | 199 | if (context.age_of_newest_directive() < minimum_directive_age or 200 | context.age_of_observation() < minimum_observation_age): 201 | return Postponed 202 | 203 | # Copy Substitutions from the context to this script. 204 | # This way we can edit the variables in this script 205 | # without changing the context. 206 | substitution_variables = copy.copy(context.substitutions) 207 | 208 | data_format = context.directive.event_data_format 209 | template_name = context.directive.template_name 210 | 211 | # The template name is expected to look like 212 | # openportmapper_provider 213 | # which is sth. like the lowercase classification.identifier 214 | # and the target group. 215 | # If the classification.identifier contains underscores, 216 | # we'll ge in trouble here. You need to make sure in 217 | # the scripts generating the directives, that this does never 218 | # happen. 219 | csv_header_style = template_name.split(sep='_')[0] 220 | 221 | format_spec = table_formats.get(csv_header_style) 222 | 223 | return create_csv_mail(data_format, format_spec, substitution_variables, context) 224 | 225 | return None 226 | 227 | 228 | def create_csv_mail(data_format, csv_format, substitution_variables, context): 229 | if data_format.endswith("_csv_inline"): 230 | # If Inline-Messages are wanted 231 | substitution_variables["data_location_en"] = substitution_variables["data_location_inline_en"] 232 | substitution_variables["data_location_de"] = substitution_variables["data_location_inline_de"] 233 | return context.mail_format_as_csv(csv_format, substitutions=substitution_variables) 234 | 235 | elif data_format.endswith("_csv_attachment"): 236 | # TODO There is no attachment, yet! 237 | substitution_variables["data_location_en"] = substitution_variables["data_location_attached_en"] 238 | substitution_variables["data_location_de"] = substitution_variables["data_location_attached_de"] 239 | substitution_variables["data_inline_separator_en"] = "" 240 | substitution_variables["data_inline_separator_de"] = "" 241 | return context.mail_format_as_csv(csv_format, substitutions=substitution_variables, 242 | attach_event_data=True) 243 | -------------------------------------------------------------------------------- /example_scripts/20xarf.py: -------------------------------------------------------------------------------- 1 | """X-ARF for IntelMQ-Mailgen 2 | 3 | This script looks for xarf directives within IntelMQ-Mailgen's context 4 | objects. To be successful, it expects a directive which was created by 5 | an *IntelMQ CertBUND-Contact Rule Expert* like:: 6 | 7 | return Directive(template_name="xarf-description-template", 8 | notification_format="xarf", 9 | event_data_format="bot-infection_0.2.0_unstable", 10 | notification_interval=0) 11 | """ 12 | 13 | from email.utils import formatdate # required for RFC2822 date-conversion 14 | 15 | from intelmqmail.notification import Postponed 16 | 17 | 18 | class Formatter: 19 | """A Formatter which converts data into an other representation. 20 | 21 | This is a formatter which is used to combine information on the 22 | IntelMQ-field which should be inserted into the X-Arf Message and a 23 | Function which is converting this data into an X-Arf compliant 24 | format 25 | """ 26 | 27 | def __init__(self, field, formatter=lambda x: x): 28 | """Initialise the formatter 29 | 30 | Args: 31 | field: The name of the IntelMQ field, for instance "source.ip" 32 | formatter: a function which should be used to format the 33 | data of the IntelMQ field 34 | """ 35 | self.field = field 36 | self.formatter = formatter 37 | 38 | def format(self, event): 39 | """Apply the formatting function to the data 40 | 41 | Args: 42 | event: An IntelMQ Event 43 | 44 | Returns: 45 | The formatted value of self.field 46 | """ 47 | return self.formatter(event[self.field]) 48 | 49 | 50 | class XarfSchema: 51 | """XarfSchema handles X-Arf Schema definitions 52 | 53 | This class provides functions that are necesaary to handle the 54 | X-ARF Schema definitions provided below. 55 | """ 56 | 57 | def __init__(self, static_fields, event_mapping): 58 | """Initialise the Schema Object 59 | 60 | Args: 61 | 62 | static_fields: A dictionary of static fields, which are 63 | typical to X-Arf messages of this schema 64 | 65 | event_mapping: A dictionary of X-Arf-Key / Formatter pairs. 66 | Each Formatter contains the field-name of the 67 | IntelMQ-Event. Each X-Arf-Key contains the name of the 68 | X-Arf Field 69 | """ 70 | self.static_fields = static_fields 71 | self.event_mapping = event_mapping 72 | for key, value in self.event_mapping.items(): 73 | if not isinstance(value, Formatter): 74 | value = Formatter(value) 75 | self.event_mapping[key] = value 76 | 77 | def event_columns(self): 78 | """This returns the fieldname of the event-mappings Formatter-Object 79 | 80 | Returns: 81 | A field name. For instance "source.ip" 82 | """ 83 | return [formatter.field for formatter in self.event_mapping.values()] 84 | 85 | def xarf_params(self, event): 86 | """Create X-ARF key-value pairs 87 | 88 | Generates a dictionary of key-value pairs, where key is the name 89 | of the X-ARF field, and value is the data of the IntelMQ-Event. 90 | The function formats the data to the correct format, by using 91 | the formatting functions which have been provided within the 92 | schema-definitions below. 93 | 94 | Args: 95 | event: An IntelMQ-Event 96 | 97 | Returns: 98 | A dictionary containing the X-Arf-fieldname to event-data 99 | mapping after the formatting-function was applied. 100 | """ 101 | params = self.static_fields.copy() 102 | for key, formatter in self.event_mapping.items(): 103 | formatted = formatter.format(event) 104 | if formatted is not None: 105 | # Only add non-Null values to the dict 106 | params[key] = formatted 107 | return params 108 | 109 | 110 | def datetime_to_rfc3339(eventdatetime): 111 | """Convert datetime object to `RFC3339 `_ string 112 | 113 | Hint: when using python > 3.6 one could use timespec='seconds' to 114 | truncate the result to seconds... 115 | 116 | Args: 117 | eventdatetime: A datetime-object with timezone information 118 | 119 | Returns: 120 | the datetime as a `RFC3339 121 | `_ encoded string 122 | """ 123 | return eventdatetime.astimezone().isoformat() 124 | 125 | 126 | def datetime_to_rfc2822(eventdatetime): 127 | """Convert datetime object to `RFC2822 `_ string 128 | 129 | Args: 130 | eventdatetime: A datetime-object with timezone information 131 | 132 | Returns: 133 | the datetime as a `RFC2822 134 | `_ encoded string 135 | """ 136 | return formatdate(eventdatetime.timestamp(), localtime=True) 137 | 138 | 139 | known_xarf_schema = { 140 | "bot-infection_0.2.0_unstable": XarfSchema({ 141 | 'schema_url': 'https://raw.githubusercontent.com/Intevation/xarf-schemata/master/abuse_bot-infection_0.2.0_unstable.json', 142 | 'category': 'abuse', 143 | 'report_type': 'bot-infection', 144 | 'source_type': 'ip-address', 145 | 'destination_type': 'ip-address', 146 | 'attachment': None, 147 | }, 148 | { 149 | 'source': 'source.ip', 150 | 'source_port': 'source.port', 151 | 'source_asn': 'source.asn', 152 | 'date': Formatter("time.source", datetime_to_rfc3339), 153 | 'destination': 'destination.ip', 154 | 'destination_port': 'destination.port', 155 | 'destination_asn': 'destination.asn', 156 | 'classification_taxonomy': 'classification.taxonomy', 157 | 'classification_type': 'classification.type', 158 | 'classification_identifier': 'classification.identifier', 159 | 'malware_name': 'malware.name', 160 | 'malware_md5': 'malware.hash.md5', 161 | }) 162 | } 163 | 164 | """known_xarf_schema is a dictionary containing the mapping-dictionaries of 165 | X-ARF Fields to IntelMQ-Fields 166 | 167 | Each X-Arf-Schema is divided into two discrete dictionaries. 168 | 169 | 1. "Static" Fields: These fields are the same in each X-ARF-Message of 170 | the given schema 171 | 2. "Event-Mapping" fields: This is a dictionary providing the mapping 172 | of an IntelMQ field to the X-ARF Message. 173 | 174 | Example: 175 | The IntelMQ field "source.ip" will be converted to the field 176 | "source" in the X-ARF Message. As a direct conversation is not 177 | possible in all cases, you can user formatters, like the 178 | `datetime_to_rfc3339` formatter in order to convert the data 179 | retrieved from the IntelMQ-Event:: 180 | 181 | "bot-infection_0.2.0_unstable": XarfSchema({ 182 | 'schema_url': 'https://raw.githubusercontent.com/Intevation/xarf-schemata/master/abuse_bot-infection_0.2.0_unstable.json', 183 | }, 184 | { 185 | 'source': 'source.ip', 186 | 'date': Formatter("time.source", datetime_to_rfc3339), 187 | }) 188 | 189 | Note: 190 | The underscores `_` in the keys will be converted into dashes `-` by 191 | the xarf-library `pyxarf 192 | `_ 193 | """ 194 | 195 | 196 | def create_notifications(context): 197 | """Entrypoint of intelmq-mailgen. 198 | 199 | Args: 200 | context: 201 | 202 | Returns: 203 | """ 204 | if context.directive.notification_format != "xarf": 205 | return None 206 | 207 | if not context.notification_interval_exceeded(): 208 | return Postponed 209 | 210 | schema_name = context.directive.event_data_format 211 | 212 | xarf_schema = known_xarf_schema.get(schema_name) 213 | if xarf_schema is not None: 214 | return context.mail_format_as_xarf(xarf_schema) 215 | else: 216 | # XARF was defined, but the schema is not configured. 217 | raise RuntimeError("Unknown X-ARF schema %r" % (schema_name,)) 218 | 219 | return None 220 | -------------------------------------------------------------------------------- /example_scripts/99fallback.py: -------------------------------------------------------------------------------- 1 | """Example script demonstrating a fallback notification directive handler. 2 | 3 | This handler tries to handle all directives by formatting a simple email 4 | with the event information in CSV format where the columns are limited 5 | to event attributes that should be present in almost all events. 6 | """ 7 | 8 | from intelmqmail.tableformat import build_table_format 9 | from intelmqmail.templates import Template 10 | from intelmqmail.notification import Postponed 11 | 12 | 13 | table_format = build_table_format( 14 | "Fallback", 15 | (("source.asn", "asn"), 16 | ("source.ip", "ip"), 17 | ("time.source", "timestamp"), 18 | ("source.port", "src_port"), 19 | ("destination.ip", "dst_ip"), 20 | ("destination.port", "dst_port"), 21 | ("destination.fqdn", "dst_host"), 22 | ("protocol.transport", "proto"), 23 | )) 24 | 25 | # The text of the template is inlined here to make sure creating the 26 | # mail does not fail due to a missing template file. 27 | template = Template.from_strings("CB-Report#${ticket_number}", 28 | "Dear Sir or Madam,\n" 29 | "\n" 30 | "Please find below a list of affected systems" 31 | " on your network(s).\n" 32 | "\n" 33 | "Events:\n" 34 | "${events_as_csv}") 35 | 36 | 37 | def create_notifications(context): 38 | if not context.notification_interval_exceeded(): 39 | return Postponed 40 | 41 | # If there are some additional substitutions to be performed in the 42 | # above template, add them to the substitutions dictionary. By 43 | # passing it to the mail_format_as_csv method below they will be 44 | # substituted into the template when the mail is created. 45 | substitutions = dict() 46 | 47 | return context.mail_format_as_csv(table_format, template=template, 48 | substitutions=substitutions) 49 | -------------------------------------------------------------------------------- /example_scripts/variables.json: -------------------------------------------------------------------------------- 1 | { 2 | "substitutions": { 3 | "ticket_prefix": "IntelMQ-Mailgen#", 4 | "data_location_inline_de": "Nachfolgend", 5 | "data_location_attached_de": "Im Anhang", 6 | "data_location_inline_en": "below", 7 | "data_location_attached_en": "attached", 8 | "signature_de": "Mit freundlichen Grüßen\ndie IntelMQ-Mailgen Entwickler\n", 9 | "signature_de_en": "Mit freundlichen Grüßen / Kind regards\n\ndie IntelMQ-Mailgen Entwickler\nthe developers of IntelMQ-Mailgen\n", 10 | "signature_en": "Kind regards\nthe developers of IntelMQ-Mailgen\n", 11 | "note_automated_de": "Bitte beachten Sie:\nDies ist eine automatisch generierte Nachricht.\n", 12 | "note_automated_en": "Please note:\nThis is an automatically generated message.\n", 13 | "note_pgp_signature_de": "Diese E-Mail ist mittels OpenPGP digital signiert.", 14 | "note_pgp_signature_en": "This message is digitally signed using OpenPGP.", 15 | "data_inline_separator_de": "======================================================================\n\nBetroffene Systeme\n\n", 16 | "data_inline_separator_de_en": "======================================================================\n\nBetroffene Systeme\nAffected hosts\n\n", 17 | "data_inline_separator_en": "======================================================================\n\nAffected hosts\n\n" 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /extras/dirty_setup/dirty_setup.sh: -------------------------------------------------------------------------------- 1 | # quick and dirty setup for a complete testing system 2 | # 3 | # WARNING use at your own risk! 4 | # 5 | # It is dirty because this script 6 | # * just makes many assumptions 7 | # * does not care for edge cases (or some defects) 8 | # * is in danger of being out of sync with the authoritative docs for each bot 9 | # 10 | # call as root on an Ubuntu 14.04 LTS system which has recent 11 | # packages of intelmq-mailgen, intelmq-manager installed with 12 | # bash -x 13 | # 14 | # To use another than the built-in configuration for intelmq 15 | # place the common configuration files for /opt/intelmq/etc 16 | # in a directory named "ds-templates" in your CWD. 17 | # 18 | # The files in this directory can contain some keywords in the form 19 | # @keyword@, which will be substituted by this script. Known keywords 20 | # are defined in TEMPLATE_VARS, they are replaced by the content of 21 | # the global variables of the same name. 22 | 23 | # ------------------------------------------------------------------- 24 | # Copyright (C) 2016 by Intevation GmbH 25 | # Author(s): 26 | # Bernhard Reiter 27 | # Sascha Wilde 28 | 29 | # This program is free software under the GNU GPL (>=v2) 30 | # Read the file COPYING coming with the software for details. 31 | 32 | # Templating code derived from xen-make-guest 33 | # Copyright (C) 2008-2016 by Intevation GmbH 34 | # Author(s): 35 | # Sascha Wilde 36 | # Thomas Arendsen Hein 37 | # ------------------------------------------------------------------- 38 | 39 | # 40 | # certbund_contact setup 41 | # MUST adhere to /usr/share/doc/intelmq/bots/experts/certbund_contact/README.md.gz 42 | # 43 | 44 | TEMPLATE_VARS="intelmqdbpasswd dbuser" 45 | TEMPLATE_PATH="$PWD/ds-templates" 46 | MG_TEMPLATE_PATH="$PWD/mg-templates" 47 | 48 | dbuser=intelmq 49 | intelmqdbpasswd=`tr -dc A-Za-z0-9_ < /dev/urandom | head -c 14` 50 | 51 | fill_in_template() 52 | # $1 TEMPLATE_CONTENT 53 | # return TEMPLATE_CONTENT with variables of the form @varname@ substituted. 54 | { 55 | local template="$1" 56 | local substexp="" 57 | for var in $TEMPLATE_VARS ; do 58 | substexp="${substexp}s/@$var@/\${$var//\//\\\\/}/g;" 59 | done 60 | substexp="\"${substexp}\"" 61 | local content=$( echo "$template" | eval sed $substexp ) 62 | echo "$content" 63 | } 64 | 65 | sudo -u postgres bash -x << EOF 66 | if psql -lqt | cut -d \| -f 1 | grep -qw contactdb; then 67 | echo "database already exists - no need for setup" 68 | else 69 | createdb --encoding=UTF8 --template=template0 contactdb 70 | psql -f /usr/share/intelmq/certbund_contact/initdb.sql contactdb 71 | psql -f /usr/share/intelmq/certbund_contact/defaults.sql contactdb 72 | 73 | psql -c "CREATE USER $dbuser WITH PASSWORD '$intelmqdbpasswd';" 74 | psql -c "GRANT SELECT ON ALL TABLES IN SCHEMA public TO $dbuser;" contactdb 75 | fi 76 | EOF 77 | 78 | # 79 | # bots/output/postgresql setup 80 | # MUST adhere to /usr/share/doc/intelmq/bots/outputs/postgresql/README.md 81 | # 82 | INITDB_FILE=/tmp/initdb.sql 83 | 84 | if [ ! -s "$INITDB_FILE" ] ; then 85 | intelmq_psql_initdb 86 | fi 87 | 88 | sudo -u postgres bash -x << EOF 89 | if psql -lqt | cut -d \| -f 1 | grep -qw intelmq-events; then 90 | echo "database already exists - no need for setup" 91 | else 92 | createdb --encoding=UTF8 --template=template0 --owner=$dbuser intelmq-events 93 | psql intelmq-events <"$INITDB_FILE" 94 | fi 95 | EOF 96 | 97 | # 98 | # intelmq mailgen setup 99 | # MUST adhere to /usr/share/doc/intelmq-mailgen/README.md.gz 100 | # 101 | maildbuser=intelmq_mailgen 102 | maildbpasswd=`tr -dc A-Za-z0-9_ < /dev/urandom | head -c 14` 103 | 104 | sudo -u postgres bash -x << EOF 105 | psql -c "CREATE USER $maildbuser WITH PASSWORD '$maildbpasswd';" 106 | psql -f /usr/share/intelmq-mailgen/sql/notifications.sql intelmq-events 107 | psql -c "GRANT eventdb_insert TO $dbuser" intelmq-events 108 | psql -c "GRANT eventdb_send_notifications TO $maildbuser" intelmq-events 109 | EOF 110 | 111 | cat /usr/share/doc/intelmq-mailgen/examples/intelmq-mailgen.conf.example | \ 112 | sed -e "s/your DB password/$maildbpasswd/" \ 113 | -e 's/"port": 25/"port": 8025/' \ 114 | > /etc/intelmq/intelmq-mailgen.conf 115 | 116 | ghome=/etc/intelmq/mailgen/gnupghome 117 | 118 | mkdir "$ghome" 119 | chown intelmq.intelmq "$ghome" 120 | sudo -u intelmq bash -x <"$ghome/gpg.conf" < 129 | FOF 130 | EOF 131 | 132 | cp /usr/share/doc/intelmq-mailgen/examples/example-template.txt \ 133 | /etc/intelmq/mailgen/templates/template-generic_malware.txt 134 | 135 | 136 | # intelmq overall setup 137 | etcdir=/opt/intelmq/etc 138 | 139 | declare -A default_templates 140 | 141 | default_templates[runtime.conf]=$( cat <\"$etcdir/$conf\"" 267 | done 268 | 269 | if [ -d "$MG_TEMPLATE_PATH" ] ; then 270 | mg_tmpl_dst=`sed -n '/"template_dir":/s/.*:[^"]*"\(.*\)".*/\1/p' /etc/intelmq/intelmq-mailgen.conf` 271 | mkdir -p "$mg_tmpl_dst" 272 | cp "$MG_TEMPLATE_PATH"/* "$mg_tmpl_dst" 273 | fi 274 | 275 | echo importing a revision of the ripe database for DE from file 276 | ripedir=/opt/INTELMQ-DIST-REPO/ripe 277 | ripeexport=`ls -d "$ripedir"/2???-??-?? 2>/dev/null | tail -1` 278 | if [ -d "$ripeexport" ] ; then 279 | pushd "$ripeexport" 280 | cat delegated-ripencc-latest | \ 281 | awk -F'|' '{if ($2=="DE" && $3=="asn") print "AS"$4}' >asn-DE.txt 282 | 283 | sudo -u postgres bash -x << EOF 284 | ripe_import.py --conninfo dbname=contactdb --asn-whitelist-file=asn-DE.txt -v 285 | EOF 286 | 287 | popd 288 | fi 289 | 290 | echo TODO: as root: start dsmtp 291 | echo TODO: e.g. as intelmq: copy shadowdsrv_botnet_droneTESTDATA.csv in /tmp/ 292 | cat << EOF 293 | # When using intelmq/tests/bots/parsers/shadowserver/testdata you could: 294 | git clone --depth=1 -b integrated https://github.com/intevation/intelmq intelmq-git 295 | pushd intelmq-git/intelmq/tests/bots/parsers/shadowserver/testdata 296 | cp accessible-open-mongodb.csv /tmp/input_open-mongodb/ 297 | cp chargen.csv /tmp/input_open-chargen 298 | cp ipmi.csv /tmp/input_open-ipmi 299 | cp microsoft-sinkhole.csv /tmp/input_microsoft-sinkhole 300 | popd 301 | EOF 302 | -------------------------------------------------------------------------------- /extras/dirty_setup/ds-templates/README: -------------------------------------------------------------------------------- 1 | This setup needs various databases, which are regularly updated via 2 | /opt/intelmq/etc/runtime.conf.new 3 | To create/update them manually execute as intelmq user: 4 | 5 | /usr/bin/update-tor-nodes /opt/intelmq/var/lib/bots/tor_nodes/tor_nodes.dat 6 | /usr/bin/update-geoip-data /opt/intelmq/var/lib/bots/maxmind_geoip/GeoLite2-City.mmdb 7 | /usr/bin/update-asn-data /opt/intelmq/var/lib/bots/asn_lookup/ipasn.dat 8 | -------------------------------------------------------------------------------- /extras/dirty_setup/ds-templates/pipeline.conf: -------------------------------------------------------------------------------- 1 | { 2 | "taxonomy-expert": { 3 | "source-queue": "taxonomy-expert-queue", 4 | "destination-queues": [ 5 | "asn-lookup-expert-queue" 6 | ] 7 | }, 8 | "asn-lookup-expert": { 9 | "source-queue": "asn-lookup-expert-queue", 10 | "destination-queues": [ 11 | "maxmind-geoip-expert-queue" 12 | ] 13 | }, 14 | "maxmind-geoip-expert": { 15 | "source-queue": "maxmind-geoip-expert-queue", 16 | "destination-queues": [ 17 | "modify-expert-queue" 18 | ] 19 | }, 20 | "modify-expert": { 21 | "source-queue": "modify-expert-queue", 22 | "destination-queues": [ 23 | "cert-bund-contact-database-expert-queue" 24 | ] 25 | }, 26 | "cert-bund-contact-database-expert": { 27 | "source-queue": "cert-bund-contact-database-expert-queue", 28 | "destination-queues": [ 29 | "postgresql-output-queue" 30 | ] 31 | }, 32 | "postgresql-output": { 33 | "source-queue": "postgresql-output-queue" 34 | }, 35 | "deduplicator-expert": { 36 | "source-queue": "deduplicator-expert-queue", 37 | "destination-queues": [ 38 | "tor-nodes-expert-queue" 39 | ] 40 | }, 41 | "tor-nodes-expert": { 42 | "source-queue": "tor-nodes-expert-queue", 43 | "destination-queues": [ 44 | "filter-drop-tornode-queue", 45 | "filter-NOT-drop-tornode-queue" 46 | ] 47 | }, 48 | "filter-drop-tornode": { 49 | "source-queue": "filter-drop-tornode-queue", 50 | "destination-queues": [ 51 | "taxonomy-expert-queue" 52 | ] 53 | }, 54 | "filter-NOT-drop-tornode": { 55 | "source-queue": "filter-NOT-drop-tornode-queue", 56 | "destination-queues": [ 57 | "taxonomy-expert-copy-queue" 58 | ] 59 | }, 60 | "taxonomy-expert-copy": { 61 | "source-queue": "taxonomy-expert-copy-queue", 62 | "destination-queues": [ 63 | "asn-lookup-expert-copy-queue" 64 | ] 65 | }, 66 | "asn-lookup-expert-copy": { 67 | "source-queue": "asn-lookup-expert-copy-queue", 68 | "destination-queues": [ 69 | "maxmind-geoip-expert-copy-queue" 70 | ] 71 | }, 72 | "maxmind-geoip-expert-copy": { 73 | "source-queue": "maxmind-geoip-expert-copy-queue", 74 | "destination-queues": [ 75 | "modify-expert-copy-queue" 76 | ] 77 | }, 78 | "modify-expert-copy": { 79 | "source-queue": "modify-expert-copy-queue", 80 | "destination-queues": [ 81 | "postgresql-output-queue" 82 | ] 83 | }, 84 | "shadowserver-parser-sinkhole-http-drone": { 85 | "source-queue": "shadowserver-parser-sinkhole-http-drone-queue", 86 | "destination-queues": [ 87 | "deduplicator-expert-queue" 88 | ] 89 | }, 90 | "TEST-Shadowserver-DRONE-REPORT": { 91 | "destination-queues": [ 92 | "shadowserver-parser-botnet-drone-hadoop-queue" 93 | ] 94 | }, 95 | "shadowserver-parser-botnet-drone-hadoop": { 96 | "source-queue": "shadowserver-parser-botnet-drone-hadoop-queue", 97 | "destination-queues": [ 98 | "deduplicator-expert-queue" 99 | ] 100 | }, 101 | "TEST-Shadowserver-Sinkhole-HTTP-Drone": { 102 | "destination-queues": [ 103 | "shadowserver-parser-sinkhole-http-drone-queue" 104 | ] 105 | }, 106 | "TEST-Shadowserver-Open-SNMP": { 107 | "destination-queues": [ 108 | "shadowserver-parser-open-snmp-queue" 109 | ] 110 | }, 111 | "shadowserver-parser-open-snmp": { 112 | "source-queue": "shadowserver-parser-open-snmp-queue", 113 | "destination-queues": [ 114 | "deduplicator-expert-opensnmp-queue" 115 | ] 116 | }, 117 | "deduplicator-expert-opensnmp": { 118 | "source-queue": "deduplicator-expert-opensnmp-queue", 119 | "destination-queues": [ 120 | "filter-drop-empty-sysdesc-queue", 121 | "filter-NOT-drop-empty-sysdesc-queue" 122 | ] 123 | }, 124 | "filter-drop-empty-sysdesc": { 125 | "source-queue": "filter-drop-empty-sysdesc-queue", 126 | "destination-queues": [ 127 | "tor-nodes-expert-queue" 128 | ] 129 | }, 130 | "filter-NOT-drop-empty-sysdesc": { 131 | "source-queue": "filter-NOT-drop-empty-sysdesc-queue", 132 | "destination-queues": [ 133 | "taxonomy-expert-copy-queue" 134 | ] 135 | }, 136 | "shadowserver-parser-open-portmapper": { 137 | "source-queue": "shadowserver-parser-open-portmapper-queue", 138 | "destination-queues": [ 139 | "deduplicator-expert-queue" 140 | ] 141 | }, 142 | "TEST-Shadowserver-Open-Portmapper": { 143 | "destination-queues": [ 144 | "shadowserver-parser-open-portmapper-queue" 145 | ] 146 | }, 147 | "TEST-Shadowserver-DNS-open-resolvers": { 148 | "destination-queues": [ 149 | "shadowserver-parser-dns-open-resolvers-queue" 150 | ] 151 | }, 152 | "shadowserver-parser-dns-open-resolvers": { 153 | "source-queue": "shadowserver-parser-dns-open-resolvers-queue", 154 | "destination-queues": [ 155 | "deduplicator-expert-opendns-queue" 156 | ] 157 | }, 158 | "deduplicator-expert-opendns": { 159 | "source-queue": "deduplicator-expert-opendns-queue", 160 | "destination-queues": [ 161 | "filter-drop-empty-amplification-queue", 162 | "filter-NOT-drop-empty-amplification-queue" 163 | ] 164 | }, 165 | "filter-drop-empty-amplification": { 166 | "source-queue": "filter-drop-empty-amplification-queue", 167 | "destination-queues": [ 168 | "tor-nodes-expert-queue" 169 | ] 170 | }, 171 | "filter-NOT-drop-empty-amplification": { 172 | "source-queue": "filter-NOT-drop-empty-amplification-queue", 173 | "destination-queues": [ 174 | "taxonomy-expert-copy-queue" 175 | ] 176 | }, 177 | "shadowserver-parser-microsoft-sinkhole": { 178 | "source-queue": "shadowserver-parser-microsoft-sinkhole-queue", 179 | "destination-queues": [ 180 | "deduplicator-expert-queue" 181 | ] 182 | }, 183 | "TEST-Shadowserver-Microsoft-Sinkhole": { 184 | "destination-queues": [ 185 | "shadowserver-parser-microsoft-sinkhole-queue" 186 | ] 187 | }, 188 | "shadowserver-parser-ntp-monitor": { 189 | "source-queue": "shadowserver-parser-ntp-monitor-queue", 190 | "destination-queues": [ 191 | "deduplicator-expert-queue" 192 | ] 193 | }, 194 | "TEST-Shadowserver-NTP-Monitor": { 195 | "destination-queues": [ 196 | "shadowserver-parser-ntp-monitor-queue" 197 | ] 198 | }, 199 | "shadowserver-parser-open-chargen": { 200 | "source-queue": "shadowserver-parser-open-chargen-queue", 201 | "destination-queues": [ 202 | "deduplicator-expert-queue" 203 | ] 204 | }, 205 | "TEST-Shadowserver-Open-Chargen": { 206 | "destination-queues": [ 207 | "shadowserver-parser-open-chargen-queue" 208 | ] 209 | }, 210 | "TEST-Shadowserver-Open-Elasticsearch": { 211 | "destination-queues": [ 212 | "shadowserver-parser-open-elasticsearch-queue" 213 | ] 214 | }, 215 | "shadowserver-parser-open-elasticsearch": { 216 | "source-queue": "shadowserver-parser-open-elasticsearch-queue", 217 | "destination-queues": [ 218 | "deduplicator-expert-group2-queue" 219 | ] 220 | }, 221 | "deduplicator-expert-group2": { 222 | "source-queue": "deduplicator-expert-group2-queue", 223 | "destination-queues": [ 224 | "filter-drop-empty-port-queue", 225 | "filter-NOT-drop-empty-port-queue" 226 | ] 227 | }, 228 | "filter-drop-empty-port": { 229 | "source-queue": "filter-drop-empty-port-queue", 230 | "destination-queues": [ 231 | "filter-drop-empty-version-queue", 232 | "filter-NOT-drop-empty-version-queue" 233 | ] 234 | }, 235 | "filter-NOT-drop-empty-port": { 236 | "source-queue": "filter-NOT-drop-empty-port-queue", 237 | "destination-queues": [ 238 | "taxonomy-expert-copy-queue" 239 | ] 240 | }, 241 | "filter-drop-empty-version": { 242 | "source-queue": "filter-drop-empty-version-queue", 243 | "destination-queues": [ 244 | "tor-nodes-expert-queue" 245 | ] 246 | }, 247 | "filter-NOT-drop-empty-version": { 248 | "source-queue": "filter-NOT-drop-empty-version-queue", 249 | "destination-queues": [ 250 | "taxonomy-expert-copy-queue" 251 | ] 252 | }, 253 | "shadowserver-parser-open-ipmi": { 254 | "source-queue": "shadowserver-parser-open-ipmi-queue", 255 | "destination-queues": [ 256 | "deduplicator-expert-queue" 257 | ] 258 | }, 259 | "TEST-Shadowserver-Open-IPMI": { 260 | "destination-queues": [ 261 | "shadowserver-parser-open-ipmi-queue" 262 | ] 263 | }, 264 | "shadowserver-parser-open-mdns": { 265 | "source-queue": "shadowserver-parser-open-mdns-queue", 266 | "destination-queues": [ 267 | "deduplicator-expert-queue" 268 | ] 269 | }, 270 | "TEST-Shadowserver-Open-MDNS": { 271 | "destination-queues": [ 272 | "shadowserver-parser-open-mdns-queue" 273 | ] 274 | }, 275 | "TEST-Shadowserver-Open-MS-SQL": { 276 | "destination-queues": [ 277 | "shadowserver-parser-open-ms-sql-queue" 278 | ] 279 | }, 280 | "shadowserver-parser-open-ms-sql": { 281 | "source-queue": "shadowserver-parser-open-ms-sql-queue", 282 | "destination-queues": [ 283 | "deduplicator-expert-group2-queue" 284 | ] 285 | }, 286 | "TEST-Shadowserver-Open-Memcached": { 287 | "destination-queues": [ 288 | "shadowserver-parser-open-memcached-queue" 289 | ] 290 | }, 291 | "shadowserver-parser-open-memcached": { 292 | "source-queue": "shadowserver-parser-open-memcached-queue", 293 | "destination-queues": [ 294 | "deduplicator-expert-group2-queue" 295 | ] 296 | }, 297 | "TEST-Shadowserver-Open-MongoDB": { 298 | "destination-queues": [ 299 | "shadowserver-parser-open-mongodb-queue" 300 | ] 301 | }, 302 | "shadowserver-parser-open-mongodb": { 303 | "source-queue": "shadowserver-parser-open-mongodb-queue", 304 | "destination-queues": [ 305 | "deduplicator-expert-openmongodb-queue" 306 | ] 307 | }, 308 | "deduplicator-expert-openmongodb": { 309 | "source-queue": "deduplicator-expert-openmongodb-queue", 310 | "destination-queues": [ 311 | "filter-drop-empty-visible-database-queue", 312 | "filter-NOT-drop-empty-visible-database-queue" 313 | ] 314 | }, 315 | "filter-drop-empty-visible-database": { 316 | "source-queue": "filter-drop-empty-visible-database-queue", 317 | "destination-queues": [ 318 | "filter-drop-empty-version-queue", 319 | "filter-NOT-drop-empty-version-queue" 320 | ] 321 | }, 322 | "filter-NOT-drop-empty-visible-database": { 323 | "source-queue": "filter-NOT-drop-empty-visible-database-queue", 324 | "destination-queues": [ 325 | "taxonomy-expert-copy-queue" 326 | ] 327 | }, 328 | "shadowserver-parser-open-netbios": { 329 | "source-queue": "shadowserver-parser-open-netbios-queue", 330 | "destination-queues": [ 331 | "deduplicator-expert-queue" 332 | ] 333 | }, 334 | "TEST-Shadowserver-Open-NetBIOS": { 335 | "destination-queues": [ 336 | "shadowserver-parser-open-netbios-queue" 337 | ] 338 | }, 339 | "TEST-Shadowserver-Open-Redis": { 340 | "destination-queues": [ 341 | "shadowserver-parser-open-redis-queue" 342 | ] 343 | }, 344 | "shadowserver-parser-open-redis": { 345 | "source-queue": "shadowserver-parser-open-redis-queue", 346 | "destination-queues": [ 347 | "deduplicator-expert-group2-queue" 348 | ] 349 | }, 350 | "TEST-Shadowserver-Open-SSDP": { 351 | "destination-queues": [ 352 | "shadowserver-parser-open-ssdp-queue" 353 | ] 354 | }, 355 | "shadowserver-parser-open-ssdp": { 356 | "source-queue": "shadowserver-parser-open-ssdp-queue", 357 | "destination-queues": [ 358 | "deduplicator-expert-openssdp-queue" 359 | ] 360 | }, 361 | "deduplicator-expert-openssdp": { 362 | "source-queue": "deduplicator-expert-openssdp-queue", 363 | "destination-queues": [ 364 | "filter-drop-empty-server-queue", 365 | "filter-NOT-drop-empty-server-queue" 366 | ] 367 | }, 368 | "shadowserver-parser-ssl-freak": { 369 | "source-queue": "shadowserver-parser-ssl-freak-queue", 370 | "destination-queues": [ 371 | "deduplicator-expert-queue" 372 | ] 373 | }, 374 | "TEST-Shadowserver-SSL-FREAK": { 375 | "destination-queues": [ 376 | "shadowserver-parser-ssl-freak-queue" 377 | ] 378 | }, 379 | "shadowserver-parser-ssl-poodle": { 380 | "source-queue": "shadowserver-parser-ssl-poodle-queue", 381 | "destination-queues": [ 382 | "deduplicator-expert-queue" 383 | ] 384 | }, 385 | "TEST-Shadowserver-SSL-POODLE": { 386 | "destination-queues": [ 387 | "shadowserver-parser-ssl-poodle-queue" 388 | ] 389 | }, 390 | "filter-drop-empty-server": { 391 | "source-queue": "filter-drop-empty-server-queue", 392 | "destination-queues": [ 393 | "tor-nodes-expert-queue" 394 | ] 395 | }, 396 | "filter-NOT-drop-empty-server": { 397 | "source-queue": "filter-NOT-drop-empty-server-queue", 398 | "destination-queues": [ 399 | "taxonomy-expert-copy-queue" 400 | ] 401 | } 402 | } 403 | -------------------------------------------------------------------------------- /extras/dirty_setup/mg-templates/template-DNS-open-resolvers.txt: -------------------------------------------------------------------------------- 1 | [TEST-Report#${ticket_number}] Offene DNS-Resolver in AS${asn} 2 | 3 | [English version below] 4 | 5 | Sehr geehrte Damen und Herren, 6 | 7 | dies ist ein Test-Report für Entwicklungs- und 8 | Qualitätssicherungszwecke, ein Versnd an externe Empfänger ist nicht 9 | sinnvoll oder beabsichtigt. 10 | 11 | ====================================================================== 12 | 13 | Dear Sir or Madam, 14 | 15 | this is a test report for development and quality assurance, sending 16 | to external percipience is neither sensible nor intend. 17 | 18 | ====================================================================== 19 | 20 | Betroffene Systeme in Ihrem Netzbereich: 21 | Affected systems on your network: 22 | 23 | Format: ASN | IP address | Timestamp (UTC) 24 | 25 | ${events_as_csv} 26 | -------------------------------------------------------------------------------- /extras/dirty_setup/mg-templates/template-NTP-Monitor.txt: -------------------------------------------------------------------------------- 1 | [TEST-Report#${ticket_number}] NTP-Server mit aktiver 'monlist' Funktion in AS${asn} 2 | 3 | [English version below] 4 | 5 | Sehr geehrte Damen und Herren, 6 | 7 | dies ist ein Test-Report für Entwicklungs- und 8 | Qualitätssicherungszwecke, ein Versnd an externe Empfänger ist nicht 9 | sinnvoll oder beabsichtigt. 10 | 11 | ====================================================================== 12 | 13 | Dear Sir or Madam, 14 | 15 | this is a test report for development and quality assurance, sending 16 | to external percipience is neither sensible nor intend. 17 | 18 | ====================================================================== 19 | 20 | Betroffene Systeme in Ihrem Netzbereich: 21 | Affected systems on your network: 22 | 23 | Format: ASN | IP address | Timestamp (UTC) 24 | 25 | ${events_as_csv} 26 | -------------------------------------------------------------------------------- /extras/dirty_setup/mg-templates/template-Open-Chargen.txt: -------------------------------------------------------------------------------- 1 | [TEST-Report#${ticket_number}] Offenes Chargen in AS${asn} 2 | 3 | Format: ? 4 | 5 | ${events_as_csv} 6 | -------------------------------------------------------------------------------- /extras/dirty_setup/mg-templates/template-Open-Elasticsearch.txt: -------------------------------------------------------------------------------- 1 | [TEST-Report#${ticket_number}] Offene Elasticsearch-Server in AS${asn} 2 | 3 | [English version below] 4 | 5 | Sehr geehrte Damen und Herren, 6 | 7 | dies ist ein Test-Report für Entwicklungs- und 8 | Qualitätssicherungszwecke, ein Versnd an externe Empfänger ist nicht 9 | sinnvoll oder beabsichtigt. 10 | 11 | ====================================================================== 12 | 13 | Dear Sir or Madam, 14 | 15 | this is a test report for development and quality assurance, sending 16 | to external percipience is neither sensible nor intend. 17 | 18 | ====================================================================== 19 | 20 | Betroffene Systeme in Ihrem Netzbereich: 21 | Affected systems on your network: 22 | 23 | Format: ASN | IP address | Timestamp (UTC) | Elasticsearch version | Instance name 24 | 25 | ${events_as_csv} 26 | -------------------------------------------------------------------------------- /extras/dirty_setup/mg-templates/template-Open-IPMI.txt: -------------------------------------------------------------------------------- 1 | [TEST-Report#${ticket_number}] Offenes IPMI in AS${asn} 2 | 3 | Format: ? 4 | 5 | ${events_as_csv} 6 | 7 | -------------------------------------------------------------------------------- /extras/dirty_setup/mg-templates/template-Open-MSSQL.txt: -------------------------------------------------------------------------------- 1 | [TEST-Report#${ticket_number}] Offene MS-SQL Browserdienste in AS${asn} 2 | 3 | [English version below] 4 | 5 | Sehr geehrte Damen und Herren, 6 | 7 | dies ist ein Test-Report für Entwicklungs- und 8 | Qualitätssicherungszwecke, ein Versnd an externe Empfänger ist nicht 9 | sinnvoll oder beabsichtigt. 10 | 11 | ====================================================================== 12 | 13 | Dear Sir or Madam, 14 | 15 | this is a test report for development and quality assurance, sending 16 | to external percipience is neither sensible nor intend. 17 | 18 | ====================================================================== 19 | 20 | Betroffene Systeme in Ihrem Netzbereich: 21 | Affected systems on your network: 22 | 23 | Format: ASN | IP address | Timestamp (UTC) | Version | Server Name | Instance Name | Amplification 24 | 25 | ${events_as_csv} 26 | -------------------------------------------------------------------------------- /extras/dirty_setup/mg-templates/template-Open-Memcached.txt: -------------------------------------------------------------------------------- 1 | [TEST-Report#${ticket_number}] Offene Memcached-Server in AS${asn} 2 | 3 | [English version below] 4 | 5 | Sehr geehrte Damen und Herren, 6 | 7 | dies ist ein Test-Report für Entwicklungs- und 8 | Qualitätssicherungszwecke, ein Versnd an externe Empfänger ist nicht 9 | sinnvoll oder beabsichtigt. 10 | 11 | ====================================================================== 12 | 13 | Dear Sir or Madam, 14 | 15 | this is a test report for development and quality assurance, sending 16 | to external percipience is neither sensible nor intend. 17 | 18 | ====================================================================== 19 | 20 | Betroffene Systeme in Ihrem Netzbereich: 21 | Affected systems on your network: 22 | 23 | Format: ASN | IP address | Timestamp (UTC) | Port | Memcached version 24 | 25 | ${events_as_csv} 26 | -------------------------------------------------------------------------------- /extras/dirty_setup/mg-templates/template-Open-MongoDB.txt: -------------------------------------------------------------------------------- 1 | [TEST-Report#${ticket_number}] Offene MongoDB-Server in AS${asn} 2 | 3 | [English version below] 4 | 5 | Sehr geehrte Damen und Herren, 6 | 7 | dies ist ein Test-Report für Entwicklungs- und 8 | Qualitätssicherungszwecke, ein Versnd an externe Empfänger ist nicht 9 | sinnvoll oder beabsichtigt. 10 | 11 | ====================================================================== 12 | 13 | Dear Sir or Madam, 14 | 15 | this is a test report for development and quality assurance, sending 16 | to external percipience is neither sensible nor intend. 17 | 18 | ====================================================================== 19 | 20 | Betroffene Systeme in Ihrem Netzbereich: 21 | Affected systems on your network: 22 | 23 | Format: ASN | IP address | Timestamp (UTC) | Version | Databases (excerpt) 24 | 25 | ${events_as_csv} 26 | -------------------------------------------------------------------------------- /extras/dirty_setup/mg-templates/template-Open-NetBIOS.txt: -------------------------------------------------------------------------------- 1 | [TEST-Report#${ticket_number}] Offenes NetBIOS in AS${asn} 2 | 3 | Format: ? 4 | 5 | ${events_as_csv} 6 | 7 | -------------------------------------------------------------------------------- /extras/dirty_setup/mg-templates/template-Open-Portmapper.txt: -------------------------------------------------------------------------------- 1 | [TEST-Report#${ticket_number}] Offene Portmapper-Dienste in AS${asn} 2 | 3 | [English version below] 4 | 5 | Sehr geehrte Damen und Herren, 6 | 7 | dies ist ein Test-Report für Entwicklungs- und 8 | Qualitätssicherungszwecke, ein Versnd an externe Empfänger ist nicht 9 | sinnvoll oder beabsichtigt. 10 | 11 | ====================================================================== 12 | 13 | Dear Sir or Madam, 14 | 15 | this is a test report for development and quality assurance, sending 16 | to external percipience is neither sensible nor intend. 17 | 18 | ====================================================================== 19 | 20 | Betroffene Systeme in Ihrem Netzbereich: 21 | Affected systems on your network: 22 | 23 | Format: ASN | IP address | Timestamp (UTC) 24 | 25 | ${events_as_csv} 26 | -------------------------------------------------------------------------------- /extras/dirty_setup/mg-templates/template-Open-Redis.txt: -------------------------------------------------------------------------------- 1 | [TEST-Report#${ticket_number}] Offene Redis-Server in AS${asn} 2 | 3 | [English version below] 4 | 5 | Sehr geehrte Damen und Herren, 6 | 7 | dies ist ein Test-Report für Entwicklungs- und 8 | Qualitätssicherungszwecke, ein Versnd an externe Empfänger ist nicht 9 | sinnvoll oder beabsichtigt. 10 | 11 | ====================================================================== 12 | 13 | Dear Sir or Madam, 14 | 15 | this is a test report for development and quality assurance, sending 16 | to external percipience is neither sensible nor intend. 17 | 18 | ====================================================================== 19 | 20 | Betroffene Systeme in Ihrem Netzbereich: 21 | Affected systems on your network: 22 | 23 | Format: ASN | IP address | Timestamp (UTC) | Port | Redis version 24 | 25 | ${events_as_csv} 26 | -------------------------------------------------------------------------------- /extras/dirty_setup/mg-templates/template-Open-SNMP.txt: -------------------------------------------------------------------------------- 1 | [TEST-Report#${ticket_number}] Offene SNMP-Dienste in AS${asn} 2 | 3 | [English version below] 4 | 5 | Sehr geehrte Damen und Herren, 6 | 7 | dies ist ein Test-Report für Entwicklungs- und 8 | Qualitätssicherungszwecke, ein Versnd an externe Empfänger ist nicht 9 | sinnvoll oder beabsichtigt. 10 | 11 | ====================================================================== 12 | 13 | Dear Sir or Madam, 14 | 15 | this is a test report for development and quality assurance, sending 16 | to external percipience is neither sensible nor intend. 17 | 18 | ====================================================================== 19 | 20 | Betroffene Systeme in Ihrem Netzbereich: 21 | Affected systems on your network: 22 | 23 | Format: ASN | IP address | Timestamp (UTC) | Device ID 24 | 25 | ${events_as_csv} 26 | -------------------------------------------------------------------------------- /extras/dirty_setup/mg-templates/template-Open-SSDP.txt: -------------------------------------------------------------------------------- 1 | [TEST-Report#${ticket_number}] Offene SSDP-Dienste in AS${asn} 2 | 3 | [English version below] 4 | 5 | Sehr geehrte Damen und Herren, 6 | 7 | dies ist ein Test-Report für Entwicklungs- und 8 | Qualitätssicherungszwecke, ein Versnd an externe Empfänger ist nicht 9 | sinnvoll oder beabsichtigt. 10 | 11 | ====================================================================== 12 | 13 | Dear Sir or Madam, 14 | 15 | this is a test report for development and quality assurance, sending 16 | to external percipience is neither sensible nor intend. 17 | 18 | ====================================================================== 19 | 20 | Betroffene Systeme in Ihrem Netzbereich: 21 | Affected systems on your network: 22 | 23 | Format: ASN | IP address | Timestamp (UTC) | SSDP server 24 | 25 | ${events_as_csv} 26 | -------------------------------------------------------------------------------- /extras/dirty_setup/mg-templates/template-Open-mDNS.txt: -------------------------------------------------------------------------------- 1 | [TEST-Report#${ticket_number}] Offene mDNS-Server in AS${asn} 2 | 3 | [English version below] 4 | 5 | Sehr geehrte Damen und Herren, 6 | 7 | dies ist ein Test-Report für Entwicklungs- und 8 | Qualitätssicherungszwecke, ein Versnd an externe Empfänger ist nicht 9 | sinnvoll oder beabsichtigt. 10 | 11 | ====================================================================== 12 | 13 | Dear Sir or Madam, 14 | 15 | this is a test report for development and quality assurance, sending 16 | to external percipience is neither sensible nor intend. 17 | 18 | ====================================================================== 19 | 20 | Betroffene Systeme in Ihrem Netzbereich: 21 | Affected systems on your network: 22 | 23 | Format: ASN | IP address | Timestamp (UTC) | Workstation Info 24 | 25 | ${events_as_csv} 26 | -------------------------------------------------------------------------------- /extras/dirty_setup/mg-templates/template-Ssl-Freak-Scan.txt: -------------------------------------------------------------------------------- 1 | [TEST-Report#${ticket_number}] SSL-Freak in AS${asn} 2 | 3 | Format: ? 4 | 5 | ${events_as_csv} 6 | 7 | -------------------------------------------------------------------------------- /extras/dirty_setup/mg-templates/template-Ssl-Scan.txt: -------------------------------------------------------------------------------- 1 | [TEST-Report#${ticket_number}] SSL-Scan in AS${asn} 2 | 3 | Format: ? 4 | 5 | ${events_as_csv} 6 | 7 | 8 | -------------------------------------------------------------------------------- /extras/dirty_setup/mg-templates/template-generic_malware.txt: -------------------------------------------------------------------------------- 1 | [TEST-Report#${ticket_number}] Schadprogramm-Infektionen in AS${asn} 2 | 3 | [English version below] 4 | 5 | Sehr geehrte Damen und Herren, 6 | 7 | dies ist ein Test-Report für Entwicklungs- und 8 | Qualitätssicherungszwecke, ein Versnd an externe Empfänger ist nicht 9 | sinnvoll oder beabsichtigt. 10 | 11 | ====================================================================== 12 | 13 | Dear Sir or Madam, 14 | 15 | this is a test report for development and quality assurance, sending 16 | to external percipience is neither sensible nor intend. 17 | 18 | ====================================================================== 19 | 20 | Betroffene Systeme in Ihrem Netzbereich: 21 | Affected systems on your network: 22 | 23 | Format: ASN | IP | Timestamp (UTC) | Malware | Source Port | Target IP | Target Port | Protocol | Target Hostname 24 | 25 | ${events_as_csv} 26 | -------------------------------------------------------------------------------- /intelmq-mailgen.conf.example: -------------------------------------------------------------------------------- 1 | { 2 | "openpgp": { 3 | "gnupg_home" : "/etc/intelmq/mailgen/gnupghome", 4 | "always_sign" : true, 5 | "signing_key" : "5F503EFAC8C89323D54C252591B8CD7E15925678" 6 | }, 7 | "database": { 8 | "event": { 9 | "name": "intelmq-events", 10 | "username": "intelmq_mailgen", 11 | "password": "your DB password", 12 | "host": "localhost", 13 | "port": 5432 14 | } 15 | }, 16 | "template_dir": "/etc/intelmq/mailgen/templates", 17 | "script_directory": "/etc/intelmq/mailgen/formats", 18 | "sender": "noreply@example.com", 19 | "smtp": { 20 | "host": "localhost", 21 | "port": 25 22 | }, 23 | "logging_level": "INFO" 24 | } 25 | -------------------------------------------------------------------------------- /intelmqmail/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Intevation/intelmq-mailgen/3de33d7ab84ddc2d9a4bb26033c157713a925353/intelmqmail/__init__.py -------------------------------------------------------------------------------- /intelmqmail/db.py: -------------------------------------------------------------------------------- 1 | """Access to the event/notification database.""" 2 | 3 | import string 4 | import logging 5 | from typing import Optional 6 | 7 | import psycopg2 8 | import psycopg2.errorcodes 9 | 10 | from psycopg2.extensions import connection as psycopg2_connection 11 | 12 | 13 | log = logging.getLogger(__name__) 14 | 15 | 16 | def open_db_connection(config, connection_factory=None) -> psycopg2_connection: 17 | """Opens a psycopg2 database connection. 18 | 19 | Does not set autocommit, so using code must take 20 | care about transaction handling itself. 21 | """ 22 | params = config['database']['event'] 23 | return psycopg2.connect(database=params['name'], 24 | user=params['username'], 25 | password=params['password'], 26 | host=params['host'], 27 | port=params['port'], 28 | # sslmode=params['sslmode'], 29 | connection_factory=connection_factory) 30 | 31 | 32 | PENDING_DIRECTIVES_QUERY = """\ 33 | SELECT d.recipient_address AS recipient_address, 34 | d.template_name AS template_name, 35 | d.notification_format AS notification_format, 36 | d.event_data_format AS event_data_format, 37 | d.aggregate_identifier AS aggregate_identifier, 38 | array_agg(d.events_id) AS event_ids, 39 | array_agg(d.id) AS directive_ids, 40 | max(d.inserted_at) AS inserted_at, 41 | max(d.notification_interval) AS notification_interval, 42 | (SELECT s.sent_at 43 | FROM directives AS d2 44 | JOIN sent s ON d2.sent_id = s.id 45 | WHERE d2.recipient_address = d.recipient_address 46 | AND d2.template_name = d.template_name 47 | AND d2.notification_format = d.notification_format 48 | AND d2.event_data_format = d.event_data_format 49 | AND d2.aggregate_identifier = d.aggregate_identifier 50 | ORDER BY d2.inserted_at DESC 51 | LIMIT 1) AS last_sent 52 | FROM (SELECT d3.id, events_id, recipient_address, template_name, 53 | notification_format, event_data_format, notification_interval, 54 | aggregate_identifier, inserted_at 55 | FROM directives AS d3 56 | {additional_directive_join} 57 | WHERE sent_id IS NULL 58 | AND medium = 'email' 59 | AND endpoint = 'source' 60 | {additional_directive_where} 61 | FOR UPDATE NOWAIT) AS d 62 | GROUP BY d.recipient_address, d.template_name, d.notification_format, 63 | d.event_data_format, d.aggregate_identifier; 64 | """ 65 | 66 | 67 | def get_pending_notifications(cur, additional_directive_where: Optional[str] = None): 68 | """Retrieve all pending directives from the database. 69 | Directives are pending if the notification they describe hasn't been 70 | sent yet and the last time a similar notification has been sent was 71 | long enough ago that the notification interval has been exceeded. 72 | The directives are grouped according to the aggregation identifier. 73 | 74 | :returns: list of aggregated directives 75 | :rtype: list 76 | """ 77 | try: 78 | additional_directive_join = "" 79 | if additional_directive_where: 80 | if 'events.' in additional_directive_where: 81 | additional_directive_join = "JOIN events ON d3.events_id = events.id" 82 | additional_directive_where = f"AND {additional_directive_where}" 83 | else: 84 | additional_directive_where = "" 85 | cur.execute(PENDING_DIRECTIVES_QUERY.format(additional_directive_where=additional_directive_where, 86 | additional_directive_join=additional_directive_join)) 87 | except psycopg2.OperationalError as exc: 88 | if exc.pgcode == psycopg2.errorcodes.LOCK_NOT_AVAILABLE: 89 | log.info("Could not get db lock for pending notifications. " 90 | "Probably another instance of myself is running.") 91 | return None 92 | else: 93 | raise 94 | 95 | return cur.fetchall() 96 | 97 | 98 | # characters allowed in identifiers in escape_sql_identifier. There are 99 | # just the characters that are used in IntelMQ for identifiers in the 100 | # events table. 101 | sql_identifier_charset = set(string.ascii_letters + string.digits + "_.") 102 | 103 | 104 | def escape_sql_identifier(ident): 105 | if set(ident) - sql_identifier_charset: 106 | raise ValueError("Event column identifier %r contains invalid" 107 | " characters (%r)" 108 | % (ident, set(ident) - sql_identifier_charset)) 109 | return '"' + ident + '"' 110 | 111 | 112 | def load_events(cur, event_ids, columns=None): 113 | """Return events for the ids with all or a subset of available columns. 114 | 115 | Use the columns parameter to specify which columns to return. 116 | 117 | :param cur: database connection 118 | :param event_ids: list of events ids 119 | :param columns: list of column names, defaults to all if 'None' is given. 120 | returns: corresponding events as a list of dictionaries 121 | """ 122 | if columns is not None: 123 | sql_columns = ", ".join(escape_sql_identifier(col) for col in columns) 124 | else: 125 | sql_columns = "*" 126 | cur.execute("SELECT {} FROM events WHERE id = ANY (%s)".format(sql_columns), 127 | (event_ids,)) 128 | 129 | return cur.fetchall() 130 | 131 | 132 | def new_ticket_number(cur): 133 | """Draw a new unique ticket number. 134 | 135 | Check the database and reset the ticket counter if 136 | our day is past the last initialisation day. 137 | Raise RuntimeError if last initialisation is in the future, because 138 | we may potentially reuse ticket numbers if we get to this day. 139 | 140 | :returns: a unique ticket-number string in format YYYYMMDD-XXXXXXXX 141 | :rtype: string 142 | """ 143 | sqlQuery = """SELECT to_char(now(), 'YYYYMMDD') AS date, 144 | (SELECT to_char(initialized_for_day, 'YYYYMMDD') 145 | FROM ticket_day) AS init_date, 146 | nextval('intelmq_ticket_seq');""" 147 | cur.execute(sqlQuery) 148 | result = cur.fetchall() 149 | 150 | date_str = result[0]["date"] 151 | if date_str != result[0]["init_date"]: 152 | if date_str < result[0]["init_date"]: 153 | raise RuntimeError( 154 | f"initialized_for_day='{result[0]['init_date']}' is in the future from now(). " 155 | "Stopping to avoid reusing ticket numbers.") 156 | 157 | log.debug("We have a new day, resetting the ticket generator.") 158 | cur.execute("ALTER SEQUENCE intelmq_ticket_seq RESTART;") 159 | cur.execute("UPDATE ticket_day SET initialized_for_day=%s;", 160 | (date_str,)) 161 | 162 | cur.execute(sqlQuery) 163 | result = cur.fetchall() 164 | 165 | ticket = _format_ticket(date_str, result[0]["nextval"]) 166 | log.debug('New ticket number %r.', ticket) 167 | 168 | return ticket 169 | 170 | 171 | def _format_ticket(date_str, sequence_number: int) -> str: 172 | # num_str from integer: fill with 0s and cut out 8 chars from the right 173 | num_str = "{:08d}".format(sequence_number)[-8:] 174 | ticket = "{:s}-{:s}".format(date_str, num_str) 175 | 176 | return ticket 177 | 178 | 179 | def last_ticket_number(cur) -> str: 180 | """Return a ticket number that has recently been drawn. 181 | 182 | Because of race conditions, there might by other tickets numbers already 183 | drawn or the emails may not be send out yet. 184 | """ 185 | sql_query = """SELECT 186 | (SELECT to_char(initialized_for_day, 'YYYYMMDD') 187 | FROM ticket_day) AS day, 188 | last_value FROM intelmq_ticket_seq;""" 189 | 190 | cur.execute(sql_query) 191 | result = cur.fetchone() 192 | 193 | return _format_ticket(result["day"], result["last_value"]) 194 | 195 | 196 | def mark_as_sent(cur, directive_ids, ticket, sent_at): 197 | """Mark directives as sent. 198 | Args: 199 | directive_ids (list of int): IDs of the directives to be marked as sent 200 | ticket (string): The ticket number 201 | sent_at (datetime): When the mail was sent. Should be the value 202 | used in the Date header of the mail. 203 | """ 204 | log.debug("Marking directive ids %r as sent.", directive_ids) 205 | cur.execute("""\ 206 | WITH sent_row AS (INSERT INTO sent (intelmq_ticket, sent_at) 207 | VALUES (%s, %s) 208 | RETURNING id) 209 | UPDATE directives 210 | SET sent_id = (SELECT id FROM sent_row) 211 | WHERE id = ANY (%s);""", 212 | (ticket, sent_at, directive_ids,)) 213 | -------------------------------------------------------------------------------- /intelmqmail/mail.py: -------------------------------------------------------------------------------- 1 | """Email-related functions 2 | * SPDX-License-Identifier: AGPL-3.0-or-later 3 | 4 | * SPDX-FileCopyrightText: 2016-2019,2021 BSI 5 | * Software-Engineering: 2016-2019,2021 Intevation GmbH 6 | Authors: 7 | * 2016-2019 Bernhard Herzog 8 | """ 9 | 10 | import logging 11 | from email.message import EmailMessage 12 | from email.contentmanager import ContentManager, raw_data_manager 13 | from email.policy import SMTP 14 | from email.utils import formatdate, make_msgid, parseaddr 15 | 16 | import gpg 17 | 18 | 19 | log = logging.getLogger(__name__) 20 | 21 | 22 | class DomainNotFound(Exception): 23 | 24 | """Exception raised when no domain could be extracted from the sender""" 25 | 26 | 27 | def domain_from_sender(sender): 28 | """Extract the domain of the email address in sender. 29 | 30 | The argument is expected to be a string that could be used as the 31 | value of the From: headerfield, e.g. a plain email address or it 32 | could include both a display name and the email address. 33 | 34 | If the plain email address included in sender does not have a 35 | domain, an exception is raised. 36 | """ 37 | address = parseaddr(sender)[1] 38 | domain = address.partition("@")[-1] 39 | if not domain: 40 | raise DomainNotFound("Could not extract the domain from the sender (%r)" 41 | % (sender,)) 42 | return domain 43 | 44 | 45 | # Map gpgme hash algorithm IDs to OpenPGP/MIME micalg strings. GPG 46 | # supports more algorithms than are listed here, but this should cover 47 | # the algorithms that are likely to be used. 48 | hash_algorithms = { 49 | gpg._gpgme.GPGME_MD_SHA1: "pgp-sha1", 50 | gpg._gpgme.GPGME_MD_SHA256: "pgp-sha256", 51 | gpg._gpgme.GPGME_MD_SHA384: "pgp-sha384", 52 | gpg._gpgme.GPGME_MD_SHA512: "pgp-sha512", 53 | } 54 | 55 | 56 | class MailgenContentManager(ContentManager): 57 | """ContentManager enforcing mailgen specific goals. 58 | 59 | This content manager delegates all functionality to the 60 | raw_data_manager except for these: 61 | 62 | - quoted-printable transfer encoding for text 63 | 64 | Always using quoted-printable has the advantage that the text 65 | parts of the generated mail will have only ASCII characters and 66 | reasonably short lines, even if the original text does not. 67 | 68 | - Escaping "From " at the beginning of lines in text 69 | 70 | "From " at the beginning of lines can be problematic because for 71 | some tools it indicates the beginning of a message and some mail 72 | agents therefore modify such mails by prepending a '>' character 73 | to the line, breaking cryptographic signatures. Since we're 74 | enforcing quoted-printable for all text content, we can simply 75 | replace "From " with "From=20" in the quoted printable encoded 76 | text. 77 | """ 78 | 79 | def get_content(self, msg, *args, **kw): 80 | return raw_data_manager.get_content(msg, *args, **kw) 81 | 82 | def set_content(self, msg, obj, *args, **kw): 83 | if isinstance(obj, str): 84 | kw["cte"] = "quoted-printable" 85 | 86 | raw_data_manager.set_content(msg, obj, *args, **kw) 87 | 88 | if msg.get("content-transfer-encoding") == "quoted-printable": 89 | content = msg.get_payload(decode=False) 90 | from_escaped = content.replace("From ", "From=20") 91 | msg.set_payload(from_escaped) 92 | 93 | 94 | mailgen_policy = SMTP.clone(cte_type="7bit", 95 | content_manager=MailgenContentManager()) 96 | 97 | 98 | def create_mail(sender, recipient, subject, body, attachments, gpgme_ctx): 99 | """Create an email either as single or multi-part with attachments. 100 | """ 101 | msg = EmailMessage(policy=mailgen_policy) 102 | msg.set_content(body) 103 | attachment_parent = msg 104 | if gpgme_ctx is not None: 105 | msg.make_mixed() 106 | attachment_parent = next(msg.iter_parts()) 107 | 108 | if attachments: 109 | for args, kw in attachments: 110 | attachment_parent.add_attachment(*args, **kw) 111 | 112 | if gpgme_ctx is not None: 113 | signed_bytes = attachment_parent.as_bytes() 114 | hash_algo, signature = detached_signature(gpgme_ctx, signed_bytes) 115 | 116 | msg.add_attachment(signature, "application", "pgp-signature", 117 | cte="8bit") 118 | # the signature part should now be the last of two parts in the 119 | # message, the first one being the signed part. 120 | signature_part = list(msg.iter_parts())[1] 121 | if "Content-Disposition" in signature_part: 122 | del signature_part["Content-Disposition"] 123 | 124 | msg.replace_header("Content-Type", "multipart/signed") 125 | 126 | micalg = hash_algorithms.get(hash_algo) 127 | if micalg is None: 128 | raise RuntimeError("Unexpected hash algorithm %r from gpgme" 129 | % (signature[0].hash_algo,)) 130 | 131 | msg.set_param("protocol", "application/pgp-signature") 132 | msg.set_param("micalg", micalg) 133 | 134 | msg.add_header("From", sender) 135 | msg.add_header("To", recipient) 136 | msg.add_header("Subject", subject) 137 | msg.add_header("Date", formatdate(timeval=None, localtime=True)) 138 | 139 | # take the domain part of sender as the domain part of the message ID. 140 | msg.add_header("Message-Id", make_msgid(domain=domain_from_sender(sender))) 141 | 142 | return msg 143 | 144 | 145 | def clearsign(gpgme_ctx, text): 146 | try: 147 | signature, signResult = gpgme_ctx.sign( 148 | text.encode(), 149 | mode=gpg.constants.sig.mode.CLEAR) 150 | except Exception: 151 | log.error("OpenPGP signing failed!") 152 | raise 153 | 154 | return signature.decode() 155 | 156 | 157 | def detached_signature(gpgme_ctx, plainbytes): 158 | """Create a detached signature for multipart/signed messages. 159 | The signature created by this function is asci armored because 160 | that's required for multipart/signed messages. 161 | 162 | Args: 163 | gpgme_ctx (gpgme context): The gpgme context to use for signing. 164 | The signature is made with whatever keys are set as signing keys 165 | in this context. 166 | plainbytes (bytes): The data to sign 167 | 168 | Return: 169 | Tuple of (hash_algo, signature). The hash_algo is one of the 170 | relevant constants in gpgme. The signature is a bytestring 171 | with the signature. 172 | """ 173 | try: 174 | gpgme_ctx.armor = True 175 | signature, signResult = gpgme_ctx.sign( 176 | plainbytes, 177 | mode=gpg.constants.sig.mode.DETACH) 178 | except Exception: 179 | print("OpenPGP signing for multipart/signed failed!") 180 | raise 181 | 182 | return (signResult.signatures[0].hash_algo, signature) 183 | -------------------------------------------------------------------------------- /intelmqmail/script.py: -------------------------------------------------------------------------------- 1 | """Load user supplied scripts""" 2 | 3 | import os 4 | import glob 5 | 6 | import logging 7 | 8 | 9 | log = logging.getLogger(__name__) 10 | 11 | 12 | class Script: 13 | 14 | """Represents a script or plugin. 15 | 16 | The script can be run by simply calling the Script object. All 17 | parameters will be forwarded to the actual entry point and its 18 | return value will be returned. 19 | 20 | The public attribute filename is the name of the python file the 21 | script was loaded from. 22 | """ 23 | 24 | def __init__(self, filename, entry_point): 25 | self.filename = filename 26 | self.entry_point = entry_point 27 | 28 | def __call__(self, *args, **kw): 29 | return self.entry_point(*args, **kw) 30 | 31 | 32 | def load_scripts(script_directory, entry_point, logger=None): 33 | if logger is None: 34 | logger = log 35 | entry_points = [] 36 | found_errors = False 37 | glob_pattern = os.path.join(glob.escape(script_directory), 38 | "[0-9][0-9]*.py") 39 | for filename in sorted(glob.glob(glob_pattern)): 40 | try: 41 | with open(filename, "r") as scriptfile: 42 | logger.debug("Loading script %r.", filename) 43 | my_globals = {} 44 | exec(compile(scriptfile.read(), filename, "exec"), 45 | my_globals) 46 | entry = my_globals.get(entry_point) 47 | if entry is not None: 48 | entry_points.append(Script(filename, entry)) 49 | else: 50 | found_errors = True 51 | logger.error("Cannot find entry point %r in %r", 52 | entry_point, filename) 53 | except Exception: 54 | found_errors = True 55 | logger.exception("Exception while trying to find entry point %r in %r", 56 | entry_point, filename) 57 | if found_errors: 58 | raise RuntimeError("Errors found while loading scripts." 59 | " See log file for details") 60 | return entry_points 61 | -------------------------------------------------------------------------------- /intelmqmail/tableformat.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016 by Bundesamt für Sicherheit in der Informationstechnik 2 | # Software engineering by Intevation GmbH 3 | # 4 | # This program is Free Software: you can redistribute it and/or modify 5 | # it under the terms of the GNU Affero General Public License as published by 6 | # the Free Software Foundation, either version 3 of the License, or 7 | # (at your option) any later version. 8 | # 9 | # This program is distributed in the hope that it will be useful, 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 12 | # GNU Affero General Public License for more details. 13 | # 14 | # You should have received a copy of the GNU Affero General Public License 15 | # along with this program. If not, see . 16 | 17 | """Basic support for tabular data formats such as CSV""" 18 | 19 | 20 | import json 21 | import io 22 | import csv 23 | 24 | 25 | class TableFormat: 26 | 27 | """Describe a table format. 28 | """ 29 | 30 | def __init__(self, name, columns): 31 | """Initialize the format specification. 32 | The columns parameter should be a list of Column instances.""" 33 | self.name = name 34 | self.columns = columns 35 | 36 | def column_titles(self): 37 | """Return a dictionary with the column titles for use as a header. 38 | This could be the header line in a CSV file, for instance. 39 | The keys of the dictionary are the same that row_from_event also 40 | uses. 41 | """ 42 | return dict((col.column_key, col.title) for col in self.columns) 43 | 44 | def event_table_columns(self): 45 | """Return a list with the columns to retrieve from the event table. 46 | """ 47 | return list(set(col.event_table_column for col in self.columns)) 48 | 49 | def column_keys(self): 50 | """Return a list with the keys used for the rows. 51 | The list is intended to be used as the field names parameter for 52 | e.g. the csv.DictWriter class and matches the dictionaries 53 | returned by the row_from_event method. 54 | """ 55 | return [col.column_key for col in self.columns] 56 | 57 | def row_from_event(self, event): 58 | """Return the row for the given event as a dictionary. 59 | """ 60 | return dict((col.column_key, col.value_from_event(event)) 61 | for col in self.columns) 62 | 63 | 64 | class Column: 65 | 66 | """Specifies a single column for a TableFormat. 67 | 68 | This base class only provides a title for the column. 69 | 70 | Derived classes should implement the following attributes and methods: 71 | 72 | :title: the column title 73 | :event_table_column: the column of the event table to retrieve 74 | :column_key: a key to use for the row dictionary. 75 | All columns of a single format must have different 76 | column_key values. 77 | :value_from_event(event): Return the value of the column for the 78 | given event. The event parameter is a dictionary that has at 79 | least a value for the event_table_column. 80 | """ 81 | 82 | def __init__(self, title): 83 | self.title = title 84 | 85 | 86 | class IntelMQColumn(Column): 87 | 88 | """Column filled directly from an IntelMQ field.""" 89 | 90 | def __init__(self, title, field_name): 91 | super(IntelMQColumn, self).__init__(title) 92 | self.field_name = field_name 93 | 94 | @property 95 | def event_table_column(self): 96 | return self.field_name 97 | 98 | @property 99 | def column_key(self): 100 | return self.field_name 101 | 102 | def value_from_event(self, event): 103 | return event[self.field_name] 104 | 105 | 106 | class ExtraColumn(Column): 107 | 108 | """Column filled with a value taken from the IntelMQ extra field. 109 | 110 | The extra_key parameter of the constructor gives the name key to 111 | look up in the JSON dictionary contained in the extra field. 112 | """ 113 | 114 | def __init__(self, title, extra_key): 115 | super(ExtraColumn, self).__init__(title) 116 | self.extra_key = extra_key 117 | 118 | @property 119 | def event_table_column(self): 120 | return "extra" 121 | 122 | @property 123 | def column_key(self): 124 | return "extra:" + self.extra_key 125 | 126 | def value_from_event(self, event): 127 | value = event[self.event_table_column] 128 | if isinstance(value, str): 129 | # With psycopg 2.4.5 values of type JSON in the database are 130 | # returned as strings. In newer psycopg versions they are 131 | # converted automatically, so we may not have to convert the 132 | # value. 133 | # FIXME: This aspect (not having to convert with newer psycopg 134 | # versions) has not been tested. 135 | value = json.loads(value) 136 | return value.get(self.extra_key) 137 | 138 | 139 | # convenience functions for building the format datastructures in a more 140 | # declarative way. 141 | 142 | 143 | def build_table_formats(formats): 144 | """Return a dictionary mapping format names to format specifications. 145 | The parameter is a list of (formatname, columns) pairs, where 146 | formatname is the name of the format as a string and columns is a 147 | list of column specifications. The formatname values are used as the 148 | keys in the dictionary and both formatname and columns are passed to 149 | build_table_format to create the corresponding format specification. 150 | """ 151 | return dict((name, build_table_format(name, columns)) 152 | for name, columns in formats) 153 | 154 | 155 | def build_table_format(name, columns): 156 | """Build a TableFormat instance for name. 157 | The columns parameter should be a list of column specifications 158 | which are passed to build_table_column to create the list of columns 159 | for the TableFormat instance. 160 | """ 161 | return TableFormat(name, [build_table_column(col) 162 | for col in columns]) 163 | 164 | 165 | def build_table_column(col): 166 | """Return a Column instance built from a column specification. A 167 | column specification may either be a tuple of strings of the form 168 | (intelmq_field, column_title) or an instance of Column. 169 | 170 | In the former case, if intelmq_field starts with "extra.", an 171 | ExtraColumn instance is created using the rest of intelmq_field as 172 | the extra_key parameter. Otherwise a IntelMQColumn instance is 173 | created. 174 | 175 | Instances of Column will be used as is. 176 | """ 177 | if isinstance(col, tuple): 178 | intelmq_field, column_title = col 179 | first_component, sep, rest = intelmq_field.partition(".") 180 | if first_component == "extra": 181 | return ExtraColumn(column_title, rest) 182 | else: 183 | return IntelMQColumn(column_title, intelmq_field) 184 | else: 185 | return col 186 | 187 | 188 | def format_as_csv(table_format, events): 189 | """Return a list of event dictionaries as a CSV formatted string. 190 | :table_format: The table format, assumed to be a TableFormat instance. 191 | :events: list of event dictionaries 192 | """ 193 | contents = io.StringIO() 194 | writer = csv.DictWriter(contents, table_format.column_keys(), delimiter=",", 195 | quotechar='"', quoting=csv.QUOTE_ALL) 196 | writer.writerow(table_format.column_titles()) 197 | 198 | for event in events: 199 | row = table_format.row_from_event(event) 200 | if row.get('time.source'): 201 | row['time.source'] = row['time.source'].replace(tzinfo=None, microsecond=0) 202 | writer.writerow(row) 203 | 204 | return contents.getvalue() 205 | -------------------------------------------------------------------------------- /intelmqmail/templates.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016 by Bundesamt für Sicherheit in der Informationstechnik 2 | # Software engineering by Intevation GmbH 3 | # 4 | # This program is Free Software: you can redistribute it and/or modify 5 | # it under the terms of the GNU Affero General Public License as published by 6 | # the Free Software Foundation, either version 3 of the License, or 7 | # (at your option) any later version. 8 | # 9 | # This program is distributed in the hope that it will be useful, 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 12 | # GNU Affero General Public License for more details. 13 | # 14 | # You should have received a copy of the GNU Affero General Public License 15 | # along with this program. If not, see . 16 | 17 | """Template handling""" 18 | 19 | 20 | import os 21 | import string 22 | 23 | 24 | def full_template_filename(template_dir, template_name): 25 | """Return the full absolute file name of a template. 26 | 27 | The template_name parameter is interpreted relative to template_dir 28 | and must refer to a file under that directory. If the resulting file 29 | name would name a file outside of template_dir, a ValueError 30 | exception is raised. This check is done to guard against malicious 31 | template names. 32 | """ 33 | # make sure absbase ends with "/" so that the check whether the 34 | # resulting template file name is located under template_dir 35 | # actually works. os.path.abspath will remove any trailing slashes 36 | # from its parameter so we can simply append a single one. 37 | absbase = os.path.abspath(template_dir) + os.path.sep 38 | absfilename = os.path.abspath(os.path.join(template_dir, template_name)) 39 | if not absfilename.startswith(absbase): 40 | raise ValueError(f"Invalid template name {template_name!r}! Full template filename" 41 | f" would be outside of the template base directory {template_dir!r}.") 42 | return absfilename 43 | 44 | 45 | def read_template(template_dir, template_name): 46 | """Read the email template indicated by template_dir and template_name. 47 | 48 | The name of the template file is determined with full_template_filename. 49 | 50 | File Format: 51 | 52 | - The first non-empty line of the file is assumed to be the 53 | template string for the subject line of the email. 54 | 55 | - The rest of the lines are the email body. Leading and trailing 56 | white space is removed from the body and a newline added at the 57 | end. This allows e.g. an empty line in the template between the 58 | subject line and the body. 59 | 60 | The resulting string is used as template string in a Python 61 | Template object, thus allowing some simple substitutions. See 62 | the different formatter implementations for the substitutions they 63 | support. 64 | 65 | The return value is an instance of the Template class. 66 | """ 67 | with open(full_template_filename(template_dir, template_name)) as infile: 68 | subject = None 69 | while not subject: 70 | subject = infile.readline().strip() 71 | return Template.from_strings(subject, infile.read().strip() + "\n") 72 | 73 | 74 | class IntelMQStringTemplate(string.Template): 75 | 76 | """Variant of string.Template that allows '.' characters in identifiers.""" 77 | 78 | idpattern = "[_a-z][_a-z0-9.]*" 79 | 80 | 81 | class Template: 82 | 83 | """A template for email contents. 84 | 85 | The template contains two separate templates, one for the subject 86 | and one for the body. To fill in values, use the substitute() 87 | method. 88 | """ 89 | 90 | def __init__(self, subject, body): 91 | """Initialize the template with subject and body. 92 | Both parameters should behave like string.Template instances. 93 | """ 94 | self.subject = subject 95 | self.body = body 96 | 97 | @classmethod 98 | def from_strings(cls, subject, body): 99 | """Convenience method that creates a template from strings. 100 | The strings are converted to templates with IntelMQStringTemplate. 101 | """ 102 | return cls(IntelMQStringTemplate(subject), 103 | IntelMQStringTemplate(body)) 104 | 105 | def __repr__(self): 106 | return f"Template({self.subject!r}, {self.body!r})" 107 | 108 | def substitute(self, substitutions): 109 | """Fill-in the template with the given substitutions. 110 | 111 | The substitutions parameter should be a dictionary mapping the 112 | keys that might be in the template to the respective values. 113 | This is done by passing the dictionary to the subject/body's 114 | substitute method. 115 | 116 | The return value is a pair (subject, body) with the filled in 117 | subject and body. 118 | """ 119 | return (self.subject.substitute(substitutions), 120 | self.body.substitute(substitutions)) 121 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2022 Sebastian Wagner 2 | # 3 | # SPDX-License-Identifier: AGPL-3.0-or-later 4 | 5 | [bdist_wheel] 6 | universal = 1 7 | 8 | [pep8] 9 | ignore = E221, E722, W504, E501 10 | 11 | [pycodestyle] 12 | ignore = E221, E722, W504, E501 13 | 14 | [flake8] 15 | ignore = E221, E722, W504, E501 16 | 17 | [codespell] 18 | skip = *.csv,*.data,*.svg,./docs/_build,.eggs,.git,testfile.txt,dga.txt,*.txt,*.json,./cover,*.egg-info 19 | 20 | [metadata] 21 | name = intelmq-mailgen 22 | description = A mail sending module for IntelMQ 23 | author = Intevation GmbH 24 | maintainer = Intevation GmbH 25 | maintainer_email = sebastian.wagner@intevation.de 26 | url = https://github.com/intevation/intelmq-mailgen 27 | license = AGPL-3.0-or-later and LGPL-2.1-or-later 28 | license_file = COPYING 29 | long_description = file: README.rst 30 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """Setuptools setup file for intelmq-mailgen. 2 | """ 3 | 4 | # Always prefer setuptools over distutils 5 | from setuptools import setup 6 | # To use a consistent encoding 7 | from os import path 8 | 9 | here = path.abspath(path.dirname(__file__)) 10 | 11 | setup( 12 | name='intelmqmail', 13 | 14 | # Versions should comply with PEP440. For a discussion on single-sourcing 15 | # the version across setup.py and the project code, see 16 | # https://packaging.python.org/en/latest/single_source_version.html 17 | version='1.3.8-dev', 18 | 19 | description='A mail sending module for IntelMQ', 20 | 21 | # The project's main homepage. 22 | url='https://github.com/intevation/intelmq-mailgen', 23 | 24 | # Author details 25 | author='Intevation GmbH', 26 | author_email='info@intevation.de', 27 | 28 | # Choose your license 29 | license='GNU Affero General Public License', 30 | 31 | # See https://pypi.python.org/pypi?%3Aaction=list_classifiers 32 | classifiers=[ 33 | # Pick your license as you wish (should match "license" above) 34 | 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)', 35 | 'License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)', 36 | 37 | 'Programming Language :: Python :: 3', 38 | 'Programming Language :: Python :: 3.6', 39 | 'Programming Language :: Python :: 3.7', 40 | 'Programming Language :: Python :: 3.8', 41 | 'Programming Language :: Python :: 3.9', 42 | 'Programming Language :: Python :: 3.10', 43 | 'Programming Language :: Python :: 3.11', 44 | ], 45 | 46 | # What does your project relate to? 47 | keywords='intelmq mailer postgresql abuse-handling', 48 | 49 | packages=['intelmqmail'], 50 | 51 | install_requires=[ 52 | 'psycopg2', 53 | # Ubuntu 20.04 focal's python3-gpg does not correctly provide the python-package's metadata. 54 | # Requiring gpg here would result in 55 | # > pkg_resources.DistributionNotFound: The 'gpg>=1.10' distribution was not found and is required by intelmqmail 56 | # https://bugs.launchpad.net/ubuntu/+source/gpgme1.0/+bug/1977645 57 | # The dependency can be reactivated when the bug is fixed upstream 58 | # 'gpg >= 1.10', # /!\ can (probably) **not** be installed via pip 59 | # Later versions can use gpgme from pypi 60 | # 'gpgme', 61 | 62 | # * (optional) pyxarf v>0.0.5 for python3 installed 63 | # https://github.com/xarf/python-xarf 64 | # (v==0.0.5 does **not** work) 65 | # version 2502a80ae9178a1ba0b76106c800d0e4b779d8da shall work 66 | ], 67 | 68 | entry_points={ 69 | 'console_scripts': [ 70 | 'intelmqcbmail = intelmqmail.cb:main', 71 | ], 72 | }, 73 | ) 74 | -------------------------------------------------------------------------------- /sql/notifications.sql: -------------------------------------------------------------------------------- 1 | -- Initialize the notifications part of the event DB. 2 | -- 3 | -- The notifications part keeps track of which emails are to be sent and 4 | -- which have already been sent in the notifications table. There's also 5 | -- a trigger on the events table that automatically extracts the 6 | -- notification information added to events by the certbund_contact bot 7 | -- and inserts it into notifications. 8 | 9 | BEGIN; 10 | 11 | 12 | CREATE ROLE eventdb_owner 13 | NOLOGIN NOSUPERUSER NOINHERIT NOCREATEDB CREATEROLE; 14 | CREATE ROLE eventdb_insert 15 | NOLOGIN NOSUPERUSER NOINHERIT NOCREATEDB CREATEROLE; 16 | CREATE ROLE eventdb_send_notifications 17 | NOLOGIN NOSUPERUSER NOINHERIT NOCREATEDB CREATEROLE; 18 | 19 | ALTER DATABASE :"DBNAME" OWNER TO eventdb_owner; 20 | 21 | ALTER TABLE events OWNER TO eventdb_owner; 22 | 23 | -- must be superuser to create type 24 | CREATE TYPE ip_endpoint AS ENUM ('source', 'destination'); 25 | 26 | CREATE SEQUENCE intelmq_ticket_seq MINVALUE 10000001; 27 | ALTER SEQUENCE intelmq_ticket_seq OWNER TO eventdb_send_notifications; 28 | 29 | SET ROLE eventdb_owner; 30 | 31 | GRANT INSERT ON events TO eventdb_insert; 32 | GRANT USAGE ON events_id_seq TO eventdb_insert; 33 | GRANT SELECT ON events TO eventdb_send_notifications; 34 | 35 | -- a single row table to save which day we currently use for intelmq_ticket 36 | CREATE TABLE ticket_day ( 37 | initialized_for_day DATE 38 | ); 39 | INSERT INTO ticket_day (initialized_for_day) VALUES('20160101'); 40 | GRANT SELECT, UPDATE ON ticket_day TO eventdb_send_notifications; 41 | 42 | 43 | CREATE TABLE sent ( 44 | id BIGSERIAL UNIQUE PRIMARY KEY, 45 | intelmq_ticket VARCHAR(18) UNIQUE NOT NULL, 46 | sent_at TIMESTAMP WITH TIME ZONE 47 | ); 48 | 49 | 50 | GRANT SELECT, INSERT ON sent TO eventdb_send_notifications; 51 | GRANT USAGE ON sent_id_seq TO eventdb_send_notifications; 52 | 53 | 54 | CREATE TABLE directives ( 55 | id BIGSERIAL UNIQUE PRIMARY KEY, 56 | events_id BIGINT NOT NULL, 57 | sent_id BIGINT, 58 | 59 | medium VARCHAR(100) NOT NULL, 60 | recipient_address VARCHAR(100) NOT NULL, 61 | template_name VARCHAR(100) NOT NULL, 62 | notification_format VARCHAR(100) NOT NULL, 63 | event_data_format VARCHAR(100) NOT NULL, 64 | aggregate_identifier TEXT[][], 65 | notification_interval INTERVAL NOT NULL, 66 | endpoint ip_endpoint NOT NULL, 67 | 68 | inserted_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP, 69 | 70 | FOREIGN KEY (events_id) REFERENCES events(id), 71 | FOREIGN KEY (sent_id) REFERENCES sent(id) 72 | ); 73 | 74 | 75 | CREATE INDEX directives_grouping_inserted_at_idx 76 | ON directives (recipient_address, template_name, 77 | notification_format, event_data_format, 78 | aggregate_identifier, inserted_at); 79 | CREATE INDEX directives_events_id_idx 80 | ON directives (events_id); 81 | CREATE INDEX directives_sent_id_idx 82 | ON directives (sent_id); 83 | 84 | -- Use https://www.postgresql.org/docs/9.5/pgtrgm.html to allow for 85 | -- fast ILIKE search in tags saved in the aggregate_identifier. 86 | -- If additional tags are entered there, additional indixes may be advisable. 87 | CREATE EXTENSION pg_trgm; 88 | CREATE INDEX directives_recipient_group_idx 89 | ON directives USING gist ( 90 | (json_object(aggregate_identifier) ->> 'recipient_group') 91 | gist_trgm_ops 92 | ); 93 | 94 | GRANT SELECT, UPDATE ON directives TO eventdb_send_notifications; 95 | 96 | 97 | -- Converts a JSON object used as aggregate identifier to a 98 | -- 2-dimensional TEXT array usable as a value in the database for 99 | -- grouping. Doing this properly is a bit tricky. Requirements: 100 | -- 101 | -- 1. the type must allow comparison because we need to be able to 102 | -- GROUP BY the aggregate_identifier column 103 | -- 104 | -- 2. The value must be chosen to preserve the equivalence relation on 105 | -- the abstract aggregate identifier, meaning 106 | -- 107 | -- (a) Equal aggregate identifiers have to be mapped to the equal 108 | -- values 109 | -- 110 | -- (b) equal values must imply equal aggregate identifiers 111 | -- 112 | -- Requirement 1 rules out using JSON directly because it doesn't 113 | -- support comparison. We cannot use JSONB either because that type is 114 | -- not available in PostgreSQL 9.3 (JSONB requires at least 9.4). Simply 115 | -- converting the JSON object to TEXT is not an option either since, for 116 | -- instance, the order of the keys would not be predictable. 117 | -- 118 | -- Requirement 2 means we need to be careful when choosing the 119 | -- representation. An easy solution would be to iterate over the JSON 120 | -- object with the json_each or json_each_text functions. Neither is 121 | -- really good. json_each returns the values as JSON objects in which 122 | -- case the conversion to TEXT will not preserve equality in the case of 123 | -- Strings because escape sequences will not be normalized. 124 | -- json_each_text returns the values as text which means that numbers 125 | -- and strings cannot be distinguished reliably (123 and "123" would be 126 | -- considered equal). 127 | -- 128 | -- Given that we might switch to PostgreSQL 9.5 which comes with Ubuntu 129 | -- 16.4 LTS we go with json_each_text because in most cases the values 130 | -- will have come from IntelMQ events where the values have been 131 | -- validated and e.g. ASNs will always be numbers. 132 | CREATE OR REPLACE FUNCTION json_object_as_text_array(obj JSONB) 133 | RETURNS TEXT[][] 134 | AS $$ 135 | DECLARE 136 | arr TEXT[][] = '{}'::TEXT[][]; 137 | k TEXT; 138 | v TEXT; 139 | BEGIN 140 | FOR k, v IN 141 | SELECT * FROM jsonb_each_text(obj) ORDER BY key 142 | LOOP 143 | arr := arr || ARRAY[ARRAY[k, v]]; 144 | END LOOP; 145 | RETURN arr; 146 | END 147 | $$ LANGUAGE plpgsql IMMUTABLE; 148 | 149 | 150 | CREATE OR REPLACE FUNCTION insert_directive( 151 | event_id BIGINT, 152 | directive JSONB, 153 | endpoint ip_endpoint 154 | ) RETURNS VOID 155 | AS $$ 156 | DECLARE 157 | medium TEXT := directive ->> 'medium'; 158 | recipient_address TEXT := directive ->> 'recipient_address'; 159 | template_name TEXT := directive ->> 'template_name'; 160 | notification_format TEXT := directive ->> 'notification_format'; 161 | event_data_format TEXT := directive ->> 'event_data_format'; 162 | aggregate_identifier TEXT[][] 163 | := json_object_as_text_array(directive -> 'aggregate_identifier'); 164 | notification_interval interval 165 | := coalesce(((directive ->> 'notification_interval') :: INT) 166 | * interval '1 second', 167 | interval '0 second'); 168 | BEGIN 169 | IF medium IS NOT NULL 170 | AND recipient_address IS NOT NULL 171 | AND template_name IS NOT NULL 172 | AND notification_format IS NOT NULL 173 | AND event_data_format IS NOT NULL 174 | AND notification_interval IS NOT NULL 175 | AND notification_interval != interval '-1 second' 176 | THEN 177 | INSERT INTO directives (events_id, 178 | medium, 179 | recipient_address, 180 | template_name, 181 | notification_format, 182 | event_data_format, 183 | aggregate_identifier, 184 | notification_interval, 185 | endpoint) 186 | VALUES (event_id, 187 | medium, 188 | recipient_address, 189 | template_name, 190 | notification_format, 191 | event_data_format, 192 | aggregate_identifier, 193 | notification_interval, 194 | endpoint); 195 | END IF; 196 | END 197 | $$ LANGUAGE plpgsql VOLATILE; 198 | 199 | 200 | CREATE OR REPLACE FUNCTION directives_from_extra( 201 | event_id BIGINT, 202 | extra JSONB 203 | ) RETURNS VOID 204 | AS $$ 205 | DECLARE 206 | json_directives JSONB := extra -> 'certbund' -> 'source_directives'; 207 | directive JSONB; 208 | BEGIN 209 | IF json_directives IS NOT NULL THEN 210 | FOR directive 211 | IN SELECT * FROM jsonb_array_elements(json_directives) LOOP 212 | PERFORM insert_directive(event_id, directive, 'source'); 213 | END LOOP; 214 | END IF; 215 | END 216 | $$ LANGUAGE plpgsql VOLATILE; 217 | 218 | 219 | CREATE OR REPLACE FUNCTION events_insert_directives_for_row() 220 | RETURNS TRIGGER 221 | AS $$ 222 | BEGIN 223 | PERFORM directives_from_extra(NEW.id, NEW.extra); 224 | RETURN NEW; 225 | END 226 | $$ LANGUAGE plpgsql VOLATILE EXTERNAL SECURITY DEFINER; 227 | 228 | GRANT EXECUTE ON FUNCTION events_insert_directives_for_row() 229 | TO eventdb_insert; 230 | 231 | 232 | CREATE TRIGGER events_insert_directive_trigger 233 | AFTER INSERT ON events 234 | FOR EACH ROW 235 | EXECUTE PROCEDURE events_insert_directives_for_row(); 236 | 237 | 238 | COMMIT; 239 | -------------------------------------------------------------------------------- /sql/updates.md: -------------------------------------------------------------------------------- 1 | # Update to SQL Database 2 | 3 | (most recent on top) 4 | 5 | ## Adapt to JSONB type of IntelMQ's `extra` field 6 | 7 | ```sql 8 | SET ROLE eventdb_owner; 9 | 10 | CREATE OR REPLACE FUNCTION json_object_as_text_array(obj JSONB) 11 | RETURNS TEXT[][] 12 | AS $$ 13 | DECLARE 14 | arr TEXT[][] = '{}'::TEXT[][]; 15 | k TEXT; 16 | v TEXT; 17 | BEGIN 18 | FOR k, v IN 19 | SELECT * FROM jsonb_each_text(obj) ORDER BY key 20 | LOOP 21 | arr := arr || ARRAY[ARRAY[k, v]]; 22 | END LOOP; 23 | RETURN arr; 24 | END 25 | $$ LANGUAGE plpgsql IMMUTABLE; 26 | 27 | CREATE OR REPLACE FUNCTION insert_directive( 28 | event_id BIGINT, 29 | directive JSONB, 30 | endpoint ip_endpoint 31 | ) RETURNS VOID 32 | AS $$ 33 | DECLARE 34 | medium TEXT := directive ->> 'medium'; 35 | recipient_address TEXT := directive ->> 'recipient_address'; 36 | template_name TEXT := directive ->> 'template_name'; 37 | notification_format TEXT := directive ->> 'notification_format'; 38 | event_data_format TEXT := directive ->> 'event_data_format'; 39 | aggregate_identifier TEXT[][] 40 | := json_object_as_text_array(directive -> 'aggregate_identifier'); 41 | notification_interval interval 42 | := coalesce(((directive ->> 'notification_interval') :: INT) 43 | * interval '1 second', 44 | interval '0 second'); 45 | BEGIN 46 | IF medium IS NOT NULL 47 | AND recipient_address IS NOT NULL 48 | AND template_name IS NOT NULL 49 | AND notification_format IS NOT NULL 50 | AND event_data_format IS NOT NULL 51 | AND notification_interval IS NOT NULL 52 | AND notification_interval != interval '-1 second' 53 | THEN 54 | INSERT INTO directives (events_id, 55 | medium, 56 | recipient_address, 57 | template_name, 58 | notification_format, 59 | event_data_format, 60 | aggregate_identifier, 61 | notification_interval, 62 | endpoint) 63 | VALUES (event_id, 64 | medium, 65 | recipient_address, 66 | template_name, 67 | notification_format, 68 | event_data_format, 69 | aggregate_identifier, 70 | notification_interval, 71 | endpoint); 72 | END IF; 73 | END 74 | $$ LANGUAGE plpgsql VOLATILE; 75 | 76 | 77 | CREATE OR REPLACE FUNCTION directives_from_extra( 78 | event_id BIGINT, 79 | extra JSONB 80 | ) RETURNS VOID 81 | AS $$ 82 | DECLARE 83 | json_directives JSONB := extra -> 'certbund' -> 'source_directives'; 84 | directive JSONB; 85 | BEGIN 86 | IF json_directives IS NOT NULL THEN 87 | FOR directive 88 | IN SELECT * FROM jsonb_array_elements(json_directives) LOOP 89 | PERFORM insert_directive(event_id, directive, 'source'); 90 | END LOOP; 91 | END IF; 92 | END 93 | $$ LANGUAGE plpgsql VOLATILE; 94 | ``` 95 | 96 | ## Add expression index for recipient_group to directives (2019-10) 97 | 98 | For each tag that is saved in the `aggregate_identifier` in the directives 99 | table, an index is needed if fast substring searches shall be done. 100 | Note that `intelmq-fody-backend` version>=0.6.4 offers those searches 101 | for the event statistics. 102 | 103 | The PostgreSQL extension `pg_trgm` is 104 | packaged in `postgresql-contrib-9.5` for Ubuntu 16.04 LTS. 105 | 106 | ### forward 107 | 108 | ```sql 109 | CREATE EXTENSION pg_trgm; 110 | CREATE INDEX directives_recipient_group_idx 111 | ON directives USING gist ( 112 | (json_object(aggregate_identifier) ->> 'recipient_group') 113 | gist_trgm_ops 114 | ); 115 | ``` 116 | 117 | ### backward 118 | 119 | ```sql 120 | DROP INDEX directives_recipient_group_idx; 121 | DROP EXTENSION pg_trgm CASCADE; 122 | ``` 123 | 124 | ## Directive Insertion time-stamp 125 | 126 | ### forward 127 | 128 | ```sql 129 | ALTER TABLE directives ADD COLUMN inserted_at TIMESTAMP WITH TIME ZONE; 130 | ALTER TABLE directives ALTER COLUMN inserted_at SET DEFAULT CURRENT_TIMESTAMP; 131 | UPDATE directives 132 | SET inserted_at = (SELECT "time.observation" FROM events 133 | WHERE id = events_id) 134 | WHERE inserted_at IS NULL; 135 | ALTER TABLE directives ALTER COLUMN inserted_at SET NOT NULL; 136 | ``` 137 | 138 | ### backward 139 | 140 | ```sql 141 | ALTER TABLE DROP COLUMN inserted_at; 142 | ``` 143 | 144 | ## Adapt directives_grouping_idx to actually used grouping columns 145 | 146 | ### forward 147 | 148 | ```sql 149 | DROP INDEX directives_grouping_idx; 150 | CREATE INDEX directives_grouping_idx 151 | ON directives (recipient_address, template_name, 152 | notification_format, event_data_format, 153 | aggregate_identifier); 154 | ``` 155 | 156 | ## backward 157 | 158 | ```sql 159 | DROP INDEX directives_grouping_idx; 160 | CREATE INDEX directives_grouping_idx 161 | ON directives (medium, recipient_address, template_name, 162 | notification_format, event_data_format, 163 | aggregate_identifier, endpoint); 164 | ``` 165 | 166 | ## New notification handling 167 | 168 | See git history 169 | 170 | 171 | ## adding ticket_number #28 172 | 173 | ### forward 174 | 175 | ```sql 176 | CREATE TABLE ticket_day ( 177 | initialized_for_day DATE 178 | ); 179 | GRANT SELECT, UPDATE ON ticket_day TO eventdb_send_notifications; 180 | 181 | ALTER TABLE notifications ALTER COLUMN intelmq_ticket TYPE VARCHAR(18); 182 | 183 | DROP SEQUENCE intelmq_ticket_seq; 184 | CREATE SEQUENCE intelmq_ticket_seq MINVALUE 10000001; 185 | ALTER SEQUENCE intelmq_ticket_seq OWNER TO eventdb_send_notifications; 186 | ``` 187 | 188 | ### backwards 189 | 190 | ```sql 191 | DROP SEQUENCE intelmq_ticket_seq; 192 | CREATE SEQUENCE intelmq_ticket_seq; 193 | GRANT USAGE ON intelmq_ticket_seq TO eventdb_send_notifications; 194 | 195 | DROP TABLE ticket_day; 196 | 197 | -- will only work if all old entries can still be converted 198 | ALTER TABLE notifications ALTER COLUMN intelmq_ticket TYPE BIGINT; 199 | ``` 200 | -------------------------------------------------------------------------------- /templates/example-template-dronereport.txt: -------------------------------------------------------------------------------- 1 | Drone Report AS${source.asn} 2 | 3 | EXAMPLE EXAMPLE EXAMPLE EXAMPLE EXAMPLE EXAMPLE EXAMPLE EXAMPLE EXAMPLE 4 | 5 | Dear Sir or Madam, 6 | 7 | Please find below a list of affected systems on your network. Each 8 | record includes the IP address of the affected system, a timestamp 9 | and the name of the related malware family. If available, the record 10 | also includes the source port, target IP, target port and target 11 | hostname for the connection most probably triggered by the malware 12 | to connect to a command-and-control server. 13 | 14 | We would like to ask you to check the issues reported and to take 15 | appropriate steps to get the infected hosts cleaned up or notify 16 | your customers accordingly. 17 | 18 | This message is digitally signed using OpenPGP. 19 | Details on the signature key used are available on our website at: 20 | 21 | example.com 22 | 23 | Please note: 24 | This is an automatically generated message. 25 | Replying to the sender address is not possible. 26 | 27 | 28 | ----------------------------------------------------------------------- 29 | 30 | Affected systems on your network: 31 | 32 | ${events_as_csv} 33 | 34 | 35 | Kind regards 36 | 37 | -------------------------------------------------------------------------------- /templates/example-template-sslfreak.txt: -------------------------------------------------------------------------------- 1 | Report on vulnerable systems against SSL FREAK Attack AS${source.asn} 2 | 3 | EXAMPLE EXAMPLE EXAMPLE EXAMPLE EXAMPLE EXAMPLE EXAMPLE EXAMPLE EXAMPLE 4 | 5 | Dear Sir or Madam, 6 | 7 | Please find below a list of systems on your network which our sources 8 | identifed as vulnerable to the SSL FREAK Attack 9 | 10 | We would like to ask you to check the issues reported and to take 11 | appropriate steps to repair these vulnerabilities. 12 | 13 | 14 | This message is digitally signed using OpenPGP. 15 | Details on the signature key used are available on our website at: 16 | 17 | example.com 18 | 19 | Please note: 20 | This is an automatically generated message. 21 | Replying to the sender address is not possible. 22 | 23 | More information about the FREAK Attack can be obtained from: 24 | 25 | https://mitls.org/pages/attacks/SMACK#freak 26 | 27 | ######################################################################## 28 | 29 | Vulnerable systems on your network: 30 | 31 | ${events_as_csv} 32 | 33 | ######################################################################## 34 | 35 | Kind regards 36 | 37 | -------------------------------------------------------------------------------- /templates/example-template.txt: -------------------------------------------------------------------------------- 1 | ${ticket_prefix}${ticket_number} Example Report about ${asn_or_cidr} 2 | 3 | EXAMPLE EXAMPLE EXAMPLE EXAMPLE EXAMPLE EXAMPLE EXAMPLE EXAMPLE EXAMPLE 4 | 5 | Dear Sir or Madam, 6 | 7 | Please find ${data_location} a list of affected systems. 8 | Each record includes the IP address of the affected system, a timestamp 9 | and the name of the related malware family. If available, the record 10 | also includes the source port, target IP, target port and target 11 | hostname for the connection most probably triggered by the malware 12 | to connect to a command-and-control server. 13 | 14 | We would like to ask you to check the issues reported and to take 15 | appropriate steps to get the infected hosts cleaned up or notify 16 | your customers accordingly. 17 | 18 | 19 | ${note_pgp_signature_en} 20 | 21 | 22 | Please note: 23 | This is the automatically generated message ${ticket_prefix}${ticket_number}. 24 | Replying to the sender address is not possible. 25 | 26 | 27 | ${data_inline_separator_en} 28 | 29 | ${events_as_csv} 30 | 31 | 32 | ${signature_en} 33 | 34 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Intevation/intelmq-mailgen/3de33d7ab84ddc2d9a4bb26033c157713a925353/tests/__init__.py -------------------------------------------------------------------------------- /tests/keys/Readme.md: -------------------------------------------------------------------------------- 1 | First test key material created with 2 | 3 | Package: gnupg2 4 | Architecture: i386 5 | Version: 2.0.25-99intevation2 6 | 7 | libgcrypt 1.5.3 8 | 9 | test1.intelmq@example.org 10 | -------------------------- 11 | 12 | ``` 13 | LANG=C GNUPGHOME=~/tmp/dot.gnupg gpg2 --full-gen-key 14 | RSA (sign only) 15 | Requested keysize is 4096 bits 16 | Key does not expire at all 17 | Real name: Test1 IntelMQ 18 | Email address: test1.intelmq@example.org 19 | Comment: no passphrase 20 | ``` 21 | 22 | ``` 23 | LANG=C GNUPGHOME=~/tmp/dot.gnupg gpg2 --export-secret-key test1 >test1.gpg 24 | LANG=C GNUPGHOME=~/tmp/dot.gnupg gpg2 --armor --export test1 >test1.pub 25 | ``` 26 | -------------------------------------------------------------------------------- /tests/keys/test1.pub: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP PUBLIC KEY BLOCK----- 2 | Version: GnuPG v2 3 | 4 | mQINBFdO3UsBEADLQUlMuWZbj64i54kO4/4I6JIjzd9ko4VTKYm/1BHeWA6+lxBk 5 | sxzQ+9oYFJqqG5F22iJ1T/x0W5cgXqGz5ihyAXld6EfbDi6NbDVtbtYrYYZQcHCs 6 | TAhk4RT9Eqsrn4rRDGailYw/j9exBFmg3Gb3QB17PEBFIwHH9UcMJYFuGVKsefMd 7 | aKZbWWWLqMRLDoE1Y/JBF9q52aZjGtBwS0lqwct8zSbpBHCZWjaUIayAw40UOLIo 8 | MVbyHponGkfJgJfbkEqy6POgV5tLDXp+Q6tDybho7jL7/gDAd7a75o+wcv2qNhdU 9 | V4oSFUBMJiBlyx1uJM7W0mDIC4KwTlZUoB7TgZ+2MEUzABtc3T/+XqBmvGsJiTb3 10 | /66TacjROahueBNShPd0Ok3aJ9hXwjGQRz3GRxYkVpBV5nQgOU5/YDzH1NTtcZSD 11 | KN04d5LDpDVsbtUmgbjvI9Brr2Q2oxQ+/nqziB29ie5MsWkDhQ6vh+7D6RBFEseR 12 | lU2WMssclSetrqOc3BEObg5xgFlng+eS7tarQU9opQYLIYrHu/wmiNc9XCA81FoO 13 | +xashBziNlLpO7/minFHVU8ZQd2KEY4PcNDC5ECANlwtC6t2PkihFaVNp3jgfHn8 14 | VpcFQ0cdoUIxsxiJe87ArYDt/F4miRzEmHEUvXrf2GxWi+RLtObEiAOJBQARAQAB 15 | tDlUZXN0MSBJbnRlbE1RIChubyBwYXNzcGhyYXNlKSA8dGVzdDEuaW50ZWxtcUBl 16 | eGFtcGxlLm9yZz6JAjkEEwECACMFAldO3UsCGwMHCwkIBwMCAQYVCAIJCgsEFgID 17 | AQIeAQIXgAAKCRCRuM1+FZJWeDzfD/9JyGpZKT6Pin/KlQTBtUSwS/Lj6zlqrCmv 18 | aFpcARph8+5QSdusEs02cpiaexz+h94BvGu6Zckffk+lvekQYCF+3XYnUGxkSVnY 19 | TeiFGCR6XqPceSgb+ZsmK9+I2Aa25/vQ7n+Fxyf+lTGNZQkn38WVRzAXF8KSXrVG 20 | n3lRkT5UVCMjZR/72n46bDIKvqQ3TBPRI3htHZH9M80vcoik0MXg58y/c5dnO5rR 21 | Fr+wXRoe12srrjbzGyR9Nu4aN7U7AkjvSnmZ0yF+xqkHQ3cK4Wuc37iwJnNxUEfD 22 | DzOuEWw0+0XNAszXCvc9DqO/1WlVOI8mtTXuunY3sK8MJ1IRgPjUaJT2waYWDELb 23 | icFYwvVdnJr+I+y+b4mc/RL8hAgjMAY/nf+MAVg1l6XORtlGvGbTsH1/wLQcmPre 24 | jnY7ufWYn5afvxo/XZt6/T8SAOYf0y89D1zN0ZGzLEf7QD4+3YQ+M/+i/qYe4W/m 25 | MolwGaLqrXDEJTkCYf/I1C/Q3q0R68nighXl7hAbnJstWYc8Pik7uy2+xKfK5adS 26 | GCR/TEvGT1BtVQVSdPlMf6bLRtnVp+361tOOR+Sw3HbTfdmxJ5qN7gsqhI4Va5LS 27 | QZUfa8SQ3qtwqa2OhKQDsIbjpZGmcTn77gRB17PNR/VTecvQzOKPGcI9lYGbfl1a 28 | IRFOLVjGJQ== 29 | =/QOZ 30 | -----END PGP PUBLIC KEY BLOCK----- 31 | -------------------------------------------------------------------------------- /tests/keys/test1.sec: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Intevation/intelmq-mailgen/3de33d7ab84ddc2d9a4bb26033c157713a925353/tests/keys/test1.sec -------------------------------------------------------------------------------- /tests/test_db.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Test the intelmqmail.db module. 3 | 4 | Basic test. 5 | 6 | Dependencies: 7 | (none) 8 | Authors: 9 | * Bernhard E. Reiter 10 | """ 11 | 12 | import unittest 13 | 14 | 15 | from intelmqmail import db 16 | 17 | 18 | class Tests(unittest.TestCase): 19 | 20 | def test_escape_sql_identifier(self): 21 | self.assertEqual(db.escape_sql_identifier('abc.def'), '"abc.def"') 22 | self.assertEqual(db.escape_sql_identifier('AB_cde4'), '"AB_cde4"') 23 | 24 | self.assertRaises(ValueError, db.escape_sql_identifier, 'oh-no') 25 | self.assertRaises(ValueError, db.escape_sql_identifier, '%s \\")$') 26 | -------------------------------------------------------------------------------- /tests/test_mail.py: -------------------------------------------------------------------------------- 1 | """ Tests for intelmqmail.mail. 2 | 3 | * SPDX-License-Identifier: AGPL-3.0-or-later 4 | * SPDX-FileCopyrightText: 2018,2019,2021 BSI 5 | * Software-Engineering: 2018,2019.2021 Intevation GmbH 6 | 7 | Authors: 8 | * 2016,2019 Bernhard Herzog 9 | """ 10 | 11 | import unittest 12 | import re 13 | from datetime import datetime, timedelta, timezone 14 | 15 | import gpg 16 | 17 | from intelmqmail.mail import create_mail 18 | 19 | from .util import GpgHomeTestCase 20 | 21 | 22 | class MailCreationTest: 23 | 24 | body_content = ("This body should be using quoted-printable.\n" 25 | "From now on, create_mail enforces this,\n" 26 | "just like it makes sure the 'From ' on the preceding" 27 | " line is escaped.\n") 28 | csv_content = ('"asn","ip","timestamp"\n' 29 | '"64496","192.168.33.12","2018-02-06 11:47:55"\n') 30 | 31 | def create_text_mail_with_attachment(self, gpg_context, 32 | sender="sender@example.com"): 33 | return create_mail(sender, "recipient@example.com", 34 | "Test quoted-printable", 35 | self.body_content, 36 | [((self.csv_content,), 37 | dict(subtype="csv", filename="events.csv"))], 38 | gpg_context) 39 | 40 | def check_body_part(self, part): 41 | """Check that the part's content is the expected body of the message. 42 | 43 | Its contents must match the expected body_content and it must be 44 | text/plain with a quoted-printable transfer encoding. 45 | """ 46 | self.assertEqual(part.get_content(), self.body_content) 47 | self.assertEqual(part.get_content_maintype(), "text") 48 | self.assertEqual(part.get_content_subtype(), "plain") 49 | self.assertEqual(part["content-transfer-encoding"], "quoted-printable") 50 | 51 | def check_csv_attachment(self, part): 52 | """Check that the part's content is the expected csv attacment. 53 | 54 | Its contents must match the expected csv_content and it must be 55 | text/csv with a quoted-printable transfer encoding. 56 | """ 57 | self.assertEqual(part.get_content(), self.csv_content) 58 | self.assertEqual(part.get_content_maintype(), "text") 59 | self.assertEqual(part.get_content_subtype(), "csv") 60 | self.assertEqual(part["content-transfer-encoding"], "quoted-printable") 61 | 62 | def check_unpack_multipart(self, part, subtype): 63 | """Check that part is a multipart with the given subpart. 64 | Return the parts for further inspection 65 | """ 66 | self.assertEqual(part.get_content_maintype(), "multipart") 67 | self.assertEqual(part.get_content_subtype(), subtype) 68 | return list(part.iter_parts()) 69 | 70 | def check_no_from(self, msg): 71 | """Check that msg has no lines starting with 'From '. 72 | 73 | The serialized representation of msg must not contain any line 74 | starting with "From ". 75 | """ 76 | self.assertNotRegex(str(msg), re.compile("^From ", re.MULTILINE)) 77 | 78 | 79 | class TestCreateUnsignedMail(MailCreationTest, unittest.TestCase): 80 | 81 | def test_unsigned_text_mail_with_attachment(self): 82 | """Test one simple notification message with an attachment.""" 83 | msg = self.create_text_mail_with_attachment(None) 84 | self.check_no_from(msg) 85 | 86 | # the mail itself is multipart/mixed with the first part being 87 | # the body and the second the CSV attachment 88 | body, csv = self.check_unpack_multipart(msg, "mixed") 89 | self.check_body_part(body) 90 | self.check_csv_attachment(csv) 91 | 92 | def test_message_id_with_display_name_in_sender(self): 93 | """Test Message-ID when sender contains a display name. 94 | 95 | The domain of the Message-ID is derived from the sender. 96 | Originally this was done in a naive way because the code assumed 97 | the sender to be given in the user@domain form. When the sender 98 | includes a display name, this must be done differently. 99 | """ 100 | msg = self.create_text_mail_with_attachment(None, 101 | sender="Real Name ") 102 | self.assertRegex(msg["Message-ID"], r"@example\.com>$") 103 | 104 | def test_message_id_with_plain_email_address_in_sender(self): 105 | """Test Message-ID when sender is a plain email address. 106 | """ 107 | msg = self.create_text_mail_with_attachment(None, 108 | sender="rn@example.com") 109 | self.assertRegex(msg["Message-ID"], r"@example\.com>$") 110 | 111 | 112 | class TestCreateSignedMail(MailCreationTest, GpgHomeTestCase): 113 | 114 | import_keys = ['test1.sec'] 115 | 116 | def test_signed_text_mail_with_attachment(self): 117 | """Test one simple signed notification message with an attachment.""" 118 | ctx = gpg.Context() 119 | key = ctx.get_key('5F503EFAC8C89323D54C252591B8CD7E15925678') 120 | ctx.signers = [key] 121 | 122 | msg = self.create_text_mail_with_attachment(ctx) 123 | # print mail 124 | # print(msg) 125 | self.check_no_from(msg) 126 | 127 | # the mail itself is multipart/signed with the first part being 128 | # the signed part the second the signature 129 | signed, signature = self.check_unpack_multipart(msg, "signed") 130 | self.assertEqual(signature.get_content_type(), 131 | "application/pgp-signature") 132 | 133 | # the signed part is multipart/mixed with the first part being 134 | # the body and the second the CSV attachment 135 | body, csv = self.check_unpack_multipart(signed, "mixed") 136 | self.check_body_part(body) 137 | self.check_csv_attachment(csv) 138 | -------------------------------------------------------------------------------- /tests/test_notifications.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Tests for intelmqmail.notifications. 4 | """ 5 | 6 | import unittest 7 | from datetime import datetime, timedelta, timezone 8 | 9 | from intelmqmail.notification import ScriptContext, Directive, SendContext 10 | from intelmqmail.templates import Template 11 | 12 | 13 | class TestScriptContext(unittest.TestCase): 14 | 15 | def context_with_directive(self, recipient_address="admin@example.com", 16 | template_name="generic_plaintext.txt", 17 | notification_format="generic_plaintext", 18 | event_data_format="inline_csv", 19 | aggregate_identifier=(), 20 | event_ids=(100001, 100302), 21 | directive_ids=(10, 11, 12), inserted_at=None, 22 | last_sent=None, notification_interval=None, 23 | cur=None): 24 | directive = Directive(recipient_address=recipient_address, 25 | template_name=template_name, 26 | notification_format=notification_format, 27 | event_data_format=event_data_format, 28 | aggregate_identifier=aggregate_identifier, 29 | event_ids=event_ids, directive_ids=directive_ids, 30 | inserted_at=inserted_at, last_sent=last_sent, 31 | notification_interval=notification_interval) 32 | return ScriptContext(config={'sender': 'intelmqmail@intelmq.example'}, cur=cur, gpgme_ctx=None, directive=directive, logger=None) 33 | 34 | def test_notification_interval_exceeded_no_last_sent(self): 35 | """Notification interval is exceeded if no mail has been sent before""" 36 | context = self.context_with_directive( 37 | last_sent=None, 38 | notification_interval=timedelta(hours=2)) 39 | self.assertTrue(context.notification_interval_exceeded()) 40 | 41 | def test_notification_interval_exceeded_last_sent_old_enough(self): 42 | """Notification interval is exceeded if last mail is too old""" 43 | context = self.context_with_directive( 44 | last_sent=datetime.now(timezone.utc) - timedelta(hours=3), 45 | notification_interval=timedelta(hours=2)) 46 | self.assertTrue(context.notification_interval_exceeded()) 47 | 48 | def test_notification_interval_exceeded_last_sent_too_new(self): 49 | """Notification interval is not exceeded if last mail was sent in interval""" 50 | context = self.context_with_directive( 51 | last_sent=datetime.now(timezone.utc) - timedelta(hours=1), 52 | notification_interval=timedelta(hours=2)) 53 | self.assertFalse(context.notification_interval_exceeded()) 54 | 55 | def test_email_notification_envelope_to(self): 56 | """ 57 | Test setting the envelope_to in mail_format_as_csv / EmailNotification 58 | """ 59 | with unittest.mock.patch('psycopg2.connect', autospec=True) as mock_connect: 60 | cursor = mock_connect.return_value.cursor 61 | script_context = self.context_with_directive(cur=cursor) 62 | with unittest.mock.patch('intelmqmail.notification.ScriptContext.new_ticket_number') as new_ticket_number: 63 | new_ticket_number.return_value = 1 64 | email_notifications = script_context.mail_format_as_csv(template=Template.from_strings('${ticket_number} Test Subject', 'Body\n${events_as_csv}'), 65 | envelope_tos=['contact@example.com']) # the internal contact, Envelope-To 66 | assert len(email_notifications) == 1 67 | assert email_notifications[0].email.get_all('To') == ['admin@example.com'] # the normal recipient, header-to 68 | assert email_notifications[0].email.get('Subject') == '1 Test Subject' 69 | with unittest.mock.patch('smtplib.SMTP', autospec=True) as mock_smtp: 70 | import smtplib 71 | email_notifications[0].send(SendContext(cur=cursor, smtp=smtplib.SMTP())) 72 | mock_smtp.return_value.send_message.assert_called_with(email_notifications[0].email, to_addrs=['contact@example.com']) 73 | 74 | def test_mail_format_as_csv_ticket_number(self): 75 | """ Test parameter ticket_number of mail_format_as_csv """ 76 | with unittest.mock.patch('psycopg2.connect', autospec=True) as mock_connect: 77 | cursor = mock_connect.return_value.cursor 78 | script_context = self.context_with_directive(cur=cursor) 79 | with unittest.mock.patch('intelmqmail.notification.ScriptContext.new_ticket_number') as new_ticket_number: 80 | new_ticket_number.return_value = 1 81 | email_notifications = script_context.mail_format_as_csv(template=Template.from_strings('${ticket_number} Test Subject', 'Body\n${events_as_csv}'), 82 | ticket_number=2) 83 | assert len(email_notifications) == 1 84 | assert email_notifications[0].email.get('Subject') == '2 Test Subject' 85 | assert email_notifications[0].ticket == 2 86 | -------------------------------------------------------------------------------- /tests/test_script.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Test the intelmqmail.script module 3 | """ 4 | 5 | import os 6 | import string 7 | from tempfile import TemporaryDirectory 8 | import unittest 9 | import logging 10 | 11 | 12 | from intelmqmail.script import load_scripts 13 | 14 | 15 | log = logging.getLogger(__name__) 16 | 17 | 18 | class LoadScriptTest(unittest.TestCase): 19 | 20 | def setUp(self): 21 | self.tempdir = TemporaryDirectory() 22 | 23 | for filename, contents in self.script_files: 24 | with open(os.path.join(self.tempdir.name, filename), "xt") as f: 25 | f.write(contents) 26 | 27 | def tearDown(self): 28 | self.tempdir.cleanup() 29 | 30 | 31 | class TestLoadScriptSimple(LoadScriptTest): 32 | 33 | """Very basic load_scripts test, that tests in a rudimentary way 34 | 35 | - that the named entry point function has been extracted 36 | 37 | - the order of the entry points is the one determined by the 38 | numbers in the script file names. 39 | 40 | - the names associated with the entry point objects 41 | """ 42 | 43 | script_files = [("10preparation.py", """\ 44 | def entry_point(): 45 | return "preparation" 46 | """), 47 | ("45special_rule1.py", """\ 48 | def entry_point(): 49 | return "special rule 1" 50 | """)] 51 | 52 | def test(self): 53 | entry_points = load_scripts(self.tempdir.name, "entry_point") 54 | self.assertEqual([f() for f in entry_points], 55 | ["preparation", "special rule 1"]) 56 | self.assertEqual([f.filename for f in entry_points], 57 | [os.path.join(self.tempdir.name, name) 58 | for name in ["10preparation.py", 59 | "45special_rule1.py"]]) 60 | 61 | 62 | class TestLoadScriptMissingEntryPoint(LoadScriptTest): 63 | 64 | """Test that load_scripts raises an exception if the entry point is missing 65 | """ 66 | 67 | script_files = [("10preparation.py", """\ 68 | def main(): 69 | return "preparation" 70 | """), 71 | ("45special_rule1.py", """\ 72 | def some_other_function(): 73 | pass 74 | """)] 75 | 76 | def test(self): 77 | with self.assertLogs("intelmqmail.script") as logs: 78 | with self.assertRaises(RuntimeError, 79 | msg="Errors found while loading scripts"): 80 | load_scripts(self.tempdir.name, "main") 81 | self.assertEqual(logs.output, 82 | ["ERROR:intelmqmail.script:Cannot find entry point" 83 | " 'main' in '%s'" 84 | % os.path.join(self.tempdir.name, 85 | "45special_rule1.py")]) 86 | 87 | 88 | class TestLoadScriptExecErrors(LoadScriptTest): 89 | 90 | """Test that load_scripts raises an exception if loading the module fails 91 | """ 92 | 93 | script_files = [("10preparation.py", """\ 94 | def eventhandler(event): 95 | syntax error 96 | """), 97 | ("45special_rule1.py", """\ 98 | def eventhandler(event): 99 | pass 100 | """)] 101 | 102 | def test(self): 103 | # use a new logger so that we can test passing an explicit 104 | # logger to load_scripts 105 | logger = log.getChild(self.__class__.__name__) 106 | with self.assertLogs(logger) as logs: 107 | with self.assertRaises(RuntimeError, 108 | msg="Errors found while loading scripts"): 109 | load_scripts(self.tempdir.name, "eventhandler", logger=logger) 110 | 111 | # there should be one log message with some specific content 112 | # (reproducing the whole content would be hard to maintain 113 | # because of too many irrelevant details in the traceback in the 114 | # message. 115 | self.assertEqual(len(logs.output), 1) 116 | self.assertTrue(logs.output[0].startswith( 117 | "ERROR:tests.test_script.TestLoadScriptExecErrors:" 118 | "Exception while trying to find entry point 'eventhandler'")) 119 | self.assertTrue(logs.output[0].endswith("SyntaxError: invalid syntax")) 120 | -------------------------------------------------------------------------------- /tests/test_sign.py: -------------------------------------------------------------------------------- 1 | """Test how to OpenPGP-sign data for emails. 2 | 3 | * SPDX-License-Identifier: AGPL-3.0-or-later 4 | 5 | * SPDX-FileCopyrightText: 2016,2021 BSI 6 | * Software-Engineering: 2016,2021 Intevation GmbH 7 | 8 | Dependencies: 9 | * python3-gpg (official GnuPG Python bindings released with gpgme) 10 | Authors: 11 | * 2016,2021 Bernhard E. Reiter 12 | """ 13 | 14 | from timeit import default_timer as timer 15 | import unittest 16 | from io import BytesIO 17 | 18 | import gpg 19 | from .util import GpgHomeTestCase 20 | 21 | from os import environ 22 | 23 | # Read env var to enable all tests, including tests which may be 24 | # hardware-dependent. 25 | run_all_tests = False 26 | if 'ALLTESTS' in environ: 27 | if environ['ALLTESTS'] == '1': 28 | run_all_tests = True 29 | 30 | 31 | class SignTestCase(GpgHomeTestCase): 32 | import_keys = ['test1.sec'] 33 | 34 | def test_sign_nomime(self): 35 | email_body = """Hello, 36 | 37 | this is my email body, 38 | which shall be signed.""" 39 | 40 | # from https://www.gnupg.org/documentation/manuals/gpgme/Text-Mode.html 41 | # | the updated RFC 3156 mandates that the mail user agent 42 | # | does some preparations so that text mode is not needed anymore. 43 | ctx = gpg.Context(armor=True, textmode=False, offline=True) 44 | 45 | key = ctx.get_key('5F50 3EFA C8C8 9323 D54C 2525 91B8 CD7E 1592 5678') 46 | ctx.signers = [key] 47 | 48 | signedText, signResult = ctx.sign( 49 | email_body.encode(), mode=gpg.constants.sig.mode.CLEAR) 50 | self.assertEqual(len(signResult.signatures), 1) 51 | 52 | sig = signResult.signatures[0] 53 | self.assertEqual(sig.type, gpg.constants.sig.mode.CLEAR) 54 | self.assertIsInstance(sig, gpg.results.NewSignature) 55 | 56 | # print out the unicode string of the signed email body 57 | # print('\n' + signedText.decode()) 58 | 59 | # let us verify the signature 60 | newPlainText, results = ctx.verify(signedText) 61 | 62 | self.assertEqual(newPlainText.decode(), email_body + '\n') 63 | self.assertEqual(len(results.signatures), 1) 64 | vsig = results.signatures[0] 65 | self.assertEqual(vsig.fpr, '5F503EFAC8C89323D54C252591B8CD7E15925678') 66 | 67 | @unittest.skipUnless(run_all_tests, 68 | 'Set ALLTESTS=1 to include this test.') 69 | def test_speed(self): 70 | email_body = """Hello, 71 | 72 | this is my email body, 73 | which shall be signed.""" 74 | 75 | ctx = gpg.Context() 76 | key = ctx.get_key('5F50 3EFA C8C8 9323 D54C 2525 91B8 CD7E 1592 5678') 77 | ctx.signers = [key] 78 | 79 | plainText = BytesIO(email_body.encode()) 80 | 81 | start = timer() 82 | n = 100 83 | for i in range(n): 84 | plainText.seek(0) 85 | 86 | signedText, signResult = ctx.sign( 87 | plainText, mode=gpg.constants.sig.mode.CLEAR) 88 | self.assertEqual(len(signResult.signatures), 1) 89 | 90 | sig = signResult.signatures[0] 91 | self.assertEqual(sig.type, gpg.constants.sig.mode.CLEAR) 92 | self.assertIsInstance(sig, gpg.results.NewSignature) 93 | 94 | end = timer() 95 | time_spent = end - start # in fractions of seconds 96 | # print("\nTime elapsed for {:d} iterations: {:.3f}".format(n, time_spent)) 97 | # print("That is {:.1f} signatures per second.".format(n/time_spent)) 98 | # we want to process at least 12 per second 99 | self.assertGreater(n / time_spent, 12) 100 | -------------------------------------------------------------------------------- /tests/test_templates.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Test the templates module of intelmqmail. 3 | 4 | Basic test. 5 | 6 | Dependencies: 7 | (none) 8 | Authors: 9 | * Bernhard E. Reiter 10 | """ 11 | 12 | import os 13 | import unittest 14 | from datetime import datetime, timedelta, timezone 15 | from logging import getLogger 16 | from tempfile import TemporaryDirectory 17 | from unittest.mock import patch 18 | 19 | from intelmqmail import templates 20 | from intelmqmail.notification import Directive, ScriptContext 21 | from intelmqmail.tableformat import build_table_format 22 | 23 | 24 | table_format = build_table_format( 25 | "Fallback", 26 | (("source.asn", "asn"), 27 | ("source.ip", "ip"), 28 | ("time.source", "timestamp"), 29 | ("source.port", "src_port"), 30 | ("destination.ip", "dst_ip"), 31 | ("destination.port", "dst_port"), 32 | ("destination.fqdn", "dst_host"), 33 | ("protocol.transport", "proto"), 34 | )) 35 | NOW = datetime.now() 36 | CSV = f'''"asn","ip","timestamp","src_port","dst_ip","dst_port","dst_host","proto"\r 37 | "1","1","{NOW.strftime('%Y-%m-%d %H:%M:%S')}","1","1","1","1","1"''' 38 | 39 | 40 | def load_events(self, columns): 41 | return [{i: NOW if i == 'time.source' else '1' for i in columns}] 42 | 43 | 44 | class TemplatesTest(unittest.TestCase): 45 | 46 | @classmethod 47 | def setUpClass(cls): 48 | "Sets up a directory structure for all template tests." 49 | cls.top_dir_obj = TemporaryDirectory() # will clean itself up 50 | cls.template_dir = os.path.join(cls.top_dir_obj.name, 'templates') 51 | os.mkdir(cls.template_dir) 52 | 53 | cls.test_contents = """Subject for report #${ticket} 54 | 55 | Body of report #${ticket} for AS ${source.asn}. Events: 56 | ${events}""" 57 | 58 | with open(os.path.join(cls.template_dir, "test-template"), "xt") as f: 59 | f.write(cls.test_contents) 60 | 61 | def test_full_template_filename(self): 62 | self.assertEqual( 63 | templates.full_template_filename(self.template_dir, "test-template"), 64 | os.path.join(self.template_dir, "test-template")) 65 | 66 | self.assertRaises(ValueError, 67 | templates.full_template_filename, 68 | self.template_dir, 69 | "../test-template") 70 | 71 | def test_read_template(self): 72 | tmpl = templates.read_template(self.template_dir, "test-template") 73 | subject, body = tmpl.substitute({"ticket": "8172", 74 | "source.asn": "3269", 75 | "events": ""}) 76 | self.assertEqual(subject, "Subject for report #8172") 77 | self.assertEqual(body, ("Body of report #8172 for AS 3269. Events:\n" 78 | "\n")) 79 | 80 | def test_template_from_parameter(self): 81 | "Tests usage of template given as parameter" 82 | directive = Directive(recipient_address="admin@example.com", 83 | template_name="generic_plaintext.txt", 84 | notification_format="generic_plaintext", 85 | event_data_format="inline_csv", 86 | aggregate_identifier=(), 87 | event_ids=(100001, 100302), directive_ids=(10, 11, 12), 88 | inserted_at=None, last_sent=datetime.now(timezone.utc) - timedelta(hours=1), 89 | notification_interval=timedelta(hours=2)) 90 | context = ScriptContext(config={'sender': 'origin@localhost'}, cur=None, gpgme_ctx=None, directive=directive, logger=getLogger('test_templates'), 91 | templates={'generic_plaintext.txt': templates.Template.from_strings('This is the subject!', 'and the body!\n${events_as_csv}')}) 92 | self.assertFalse(context.notification_interval_exceeded()) 93 | with patch.object(ScriptContext, 'load_events', new=load_events): 94 | with patch.object(ScriptContext, 'new_ticket_number', new=lambda cur: 1): 95 | retval = context.mail_format_as_csv(format_spec=table_format) 96 | assert retval[0].ticket == 1 97 | email = retval[0].email.as_string() 98 | assert 'Subject: This is the subject!' in email 99 | assert 'and the body' in email 100 | assert CSV in email 101 | -------------------------------------------------------------------------------- /tests/util.py: -------------------------------------------------------------------------------- 1 | """Support test cases with GPGME. 2 | 3 | Initially imported from https://pypi.python.org/pypi/pygpgme/0.3 4 | sha256sum pygpgme-0.3.tar.gz 5 | 5fd887c407015296a8fd3f4b867fe0fcca3179de97ccde90449853a3dfb802e1 6 | 7 | # pygpgme - a Python wrapper for the gpgme library 8 | # Copyright (C) 2006 James Henstridge 9 | # 10 | # This library is free software; you can redistribute it and/or 11 | # modify it under the terms of the GNU Lesser General Public 12 | # License as published by the Free Software Foundation; either 13 | # version 2.1 of the License, or (at your option) any later version. 14 | # 15 | # This library is distributed in the hope that it will be useful, 16 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 17 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 18 | # Lesser General Public License for more details. 19 | # 20 | # You should have received a copy of the GNU Lesser General Public 21 | # License along with this library; if not, write to the Free Software 22 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA 23 | """ 24 | 25 | import os 26 | import shutil 27 | import tempfile 28 | import unittest 29 | 30 | import gpg 31 | 32 | __all__ = ['GpgHomeTestCase'] 33 | 34 | keydir = os.path.join(os.path.dirname(__file__), 'keys') 35 | 36 | 37 | class GpgHomeTestCase(unittest.TestCase): 38 | 39 | gpg_conf_contents = '' 40 | import_keys = [] 41 | 42 | def keyfile(self, key): 43 | return open(os.path.join(keydir, key), 'rb') 44 | 45 | def setUp(self): 46 | self._gpghome = tempfile.mkdtemp(prefix='tmp.gpghome') 47 | os.environ['GNUPGHOME'] = self._gpghome 48 | fp = open(os.path.join(self._gpghome, 'gpg.conf'), 'wb') 49 | fp.write(self.gpg_conf_contents.encode('UTF-8')) 50 | fp.close() 51 | 52 | # import requested keys into the keyring 53 | ctx = gpg.Context() 54 | for key in self.import_keys: 55 | with self.keyfile(key) as fp: 56 | ctx.key_import(fp) 57 | 58 | def tearDown(self): 59 | del os.environ['GNUPGHOME'] 60 | shutil.rmtree(self._gpghome, ignore_errors=True) 61 | --------------------------------------------------------------------------------