├── .gitignore ├── .gitmodules ├── BUILD.txt ├── GNUmakefile ├── LICENSE ├── README.adoc ├── bin ├── dsv ├── dsv-cdns-to-pcap ├── dsv-cdns-to-tsv ├── dsv-clickhouse-sys-info ├── dsv-clickhouse-update ├── dsv-config ├── dsv-datastore-setup ├── dsv-find-node-id ├── dsv-geo-update ├── dsv-import ├── dsv-import-freeze ├── dsv-import-mirror ├── dsv-import-thaw ├── dsv-import-tsv ├── dsv-log ├── dsv-nodes-update ├── dsv-postgres-update ├── dsv-prune ├── dsv-queue ├── dsv-queue-details ├── dsv-queue-freeze ├── dsv-queue-thaw ├── dsv-rssac-daemon ├── dsv-rssac-daemon-tester ├── dsv-rssac-notify ├── dsv-rssac-reports ├── dsv-status ├── dsv-tld-update └── dsv-worker ├── debian ├── changelog ├── compat ├── control ├── dns-stats-visualizer-base.install ├── dns-stats-visualizer-base.manpages ├── dns-stats-visualizer-base.postinst ├── dns-stats-visualizer-base.prerm ├── dns-stats-visualizer-clickhouse-server.install ├── dns-stats-visualizer-clickhouse-server.manpages ├── dns-stats-visualizer-clickhouse-server.postinst ├── dns-stats-visualizer-doc.doc-base ├── dns-stats-visualizer-doc.install ├── dns-stats-visualizer-grafana-main.install ├── dns-stats-visualizer-grafana-main.postinst ├── dns-stats-visualizer-import-mirror.dirs ├── dns-stats-visualizer-import-mirror.install ├── dns-stats-visualizer-import.dirs ├── dns-stats-visualizer-import.install ├── dns-stats-visualizer-import.manpages ├── dns-stats-visualizer-import.postinst ├── dns-stats-visualizer-import.postrm ├── dns-stats-visualizer-import.preinst ├── dns-stats-visualizer-import.prerm ├── dns-stats-visualizer-rssac-notify.install ├── dns-stats-visualizer-rssac-notify.manpages ├── dns-stats-visualizer-rssac-notify.postinst ├── dns-stats-visualizer-rssac-reports.install ├── dns-stats-visualizer-rssac-reports.manpages ├── dns-stats-visualizer-rssac-reports.postinst ├── rules └── source │ └── format ├── doc ├── Advanced_User_Guide.adoc ├── Overview_and_Basic_Install.adoc ├── adoc-source │ ├── admin.adoc │ ├── appendices.adoc │ ├── background.adoc │ ├── basic_install.adoc │ ├── configuring.adoc │ ├── custom_install.adoc │ ├── developing.adoc │ ├── dsv-overview.png │ ├── dsv-queues.png │ ├── operations.adoc │ ├── overview.adoc │ └── packages.adoc ├── images │ ├── client_subnet.png │ ├── ip_version.png │ ├── location_city.png │ └── query_stats.png ├── man │ ├── man1 │ │ ├── dsv-clickhouse-sys-info.adoc │ │ ├── dsv-clickhouse-update.adoc │ │ ├── dsv-config.adoc │ │ ├── dsv-datastore-setup.adoc │ │ ├── dsv-find-node-id.adoc │ │ ├── dsv-geo-update.adoc │ │ ├── dsv-import-freeze.adoc │ │ ├── dsv-import-mirror.adoc │ │ ├── dsv-import-thaw.adoc │ │ ├── dsv-import.adoc │ │ ├── dsv-log.adoc │ │ ├── dsv-nodes-update.adoc │ │ ├── dsv-postgres-update.adoc │ │ ├── dsv-prune.adoc │ │ ├── dsv-queue-details.adoc │ │ ├── dsv-queue-freeze.adoc │ │ ├── dsv-queue-thaw.adoc │ │ ├── dsv-rssac-daemon-tester.adoc │ │ ├── dsv-rssac-daemon.adoc │ │ ├── dsv-rssac-notify.adoc │ │ ├── dsv-rssac-reports.adoc │ │ ├── dsv-status.adoc │ │ ├── dsv-tld-update.adoc │ │ ├── dsv-worker.adoc │ │ └── dsv.adoc │ └── man5 │ │ └── dsv.cfg.adoc └── release-notes.adoc ├── etc ├── GeoIP.conf ├── clickhouse │ ├── config.d │ │ ├── cluster.xml.dsv │ │ ├── compression.xml.dsv │ │ └── config.xml.dsv │ ├── dictionaries.d │ │ ├── geolocation.xml │ │ ├── iana_text.xml │ │ ├── node_text.xml │ │ ├── server_address.xml │ │ ├── sysinfo.xml │ │ └── tld_text.xml │ └── users.d │ │ ├── profiles.xml.dsv │ │ └── users.xml.dsv ├── dns-stats-visualizer │ ├── dsv.cfg │ └── nodes.csv.sample ├── odbc.ini.dsv ├── supervisor │ └── conf.d │ │ └── dsv.conf.sample └── systemd │ └── system │ ├── dsv-import-mirror.service │ ├── dsv-rssac.service │ └── dsv-rssac.socket ├── grafana ├── bin │ ├── dash-depends │ └── generate-dashboard ├── common │ ├── active_disabled_nodes.html │ ├── city_country.html │ ├── dashboards │ │ └── aggregated │ │ │ ├── client_subnet_statistics.py │ │ │ ├── client_subnet_statistics_detail.py │ │ │ ├── client_subnet_statistics_header.html │ │ │ ├── geolocation_city.py │ │ │ ├── geolocation_country.py │ │ │ ├── ip_protocol.py │ │ │ ├── qtype.py │ │ │ ├── qtype_vs_tld.py │ │ │ ├── query_attributes.py │ │ │ ├── query_statistics.py │ │ │ ├── query_statistics_detail.py │ │ │ ├── rcode.py │ │ │ ├── rssac_other.py │ │ │ ├── rssac_reporting.py │ │ │ ├── rssac_sources.py │ │ │ ├── rssac_volumes.py │ │ │ └── server_ip_address.py │ ├── footer.html │ ├── footer_test.html │ ├── grafanacommon.py │ └── querystatsgraph.py ├── dashboards │ └── main-site │ │ ├── menu │ │ ├── main.dashboard.py │ │ ├── menu-other.html │ │ └── menu-timelines.html │ │ ├── other-metrics │ │ ├── client-subnet-statistics-detail.dashboard.py │ │ ├── client-subnet-statistics.dashboard.py │ │ ├── geolocation_city.dashboard.py │ │ ├── geolocation_country.dashboard.py │ │ ├── qtype-vs-tld.dashboard.py │ │ ├── rssac-other.dashboard.py │ │ ├── rssac-reporting.dashboard.py │ │ ├── rssac-sources.dashboard.py │ │ └── rssac-volumes.dashboard.py │ │ ├── other │ │ ├── about.dashboard.py │ │ └── about.md │ │ └── timelines │ │ ├── ip-protocol.dashboard.py │ │ ├── qtype.dashboard.py │ │ ├── query-attributes.dashboard.py │ │ ├── query-statistics-detail.dashboard.py │ │ ├── query-statistics.dashboard.py │ │ ├── rcode.dashboard.py │ │ └── server-ip-address.dashboard.py └── provisioning │ ├── dashboards │ ├── main-site.yml │ └── test-site.yml │ └── datasources │ ├── dsv-main.yml.sample │ └── dsv-test.yml.sample ├── mkdeb.sh ├── sampledata └── testnode.cdns.xz ├── sql ├── clickhouse │ ├── ddl │ │ ├── 0001.sql │ │ ├── 0002.sql │ │ ├── 0010-rollback.sql │ │ ├── 0010.sql │ │ ├── 0011-rollback.sql │ │ └── 0011.sql │ ├── info.awk │ └── tsv.tpl └── postgres │ └── ddl │ └── 0001.sql ├── src └── python3 │ ├── clickhouse_driver │ ├── dsv.pylintrc │ └── dsv │ ├── __init__.py │ ├── commands │ ├── __init__.py │ ├── clickhouse_sys_info.py │ ├── clickhouse_update.py │ ├── config.py │ ├── find_node_id.py │ ├── geo_update.py │ ├── import.py │ ├── import_freeze.py │ ├── import_thaw.py │ ├── log.py │ ├── nodes_update.py │ ├── postgres_update.py │ ├── prune.py │ ├── queue.py │ ├── queue_details.py │ ├── queue_freeze.py │ ├── queue_thaw.py │ ├── rssac_daemon.py │ ├── rssac_daemon_tester.py │ ├── rssac_notify.py │ ├── rssac_reports.py │ ├── status.py │ ├── tld_update.py │ └── worker.py │ └── common │ ├── Config.py │ ├── DDL.py │ ├── DateTime.py │ ├── Lock.py │ ├── NodeFlag.py │ ├── Path.py │ └── Queue.py └── tests ├── python3 └── integration │ ├── common.py │ ├── ddl-applied │ └── 0001.sql │ ├── ddl-toapply │ ├── 0001-rollback.sql │ ├── 0001.sql │ └── 0002.sql │ ├── gear.py │ ├── test_DDL.py │ ├── test_clickhouse_sys_info.py │ ├── test_config.py │ ├── test_import_freeze_thaw.py │ ├── test_log.py │ ├── test_queue.py │ ├── test_queue_freeze_thaw.py │ ├── test_rssac_notify.py │ └── test_status.py └── tools └── log-requests ├── background.js └── manifest.json /.gitignore: -------------------------------------------------------------------------------- 1 | *.1 2 | *.5 3 | *.d 4 | *.pdf 5 | *.html 6 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "submodule.tools/grafanalib.path"] 2 | path = tools/grafanalib 3 | url = https://github.com/weaveworks/grafanalib 4 | branch = master 5 | [submodule "submodule.tools/natel-plotly-panel.path"] 6 | path = tools/sinodun-natel-plotly-panel 7 | url = https://github.com/Sinodun/grafana-plotly-panel 8 | branch = dist 9 | [submodule "submodule.tools/clickhouse-driver.path"] 10 | path = tools/clickhouse-driver 11 | url = https://github.com/Sinodun/clickhouse-driver.git 12 | branch = purepython3-notz 13 | -------------------------------------------------------------------------------- /BUILD.txt: -------------------------------------------------------------------------------- 1 | * make deb 2 | -------------------------------------------------------------------------------- /GNUmakefile: -------------------------------------------------------------------------------- 1 | .PHONY: clean test 2 | 3 | # Putting the command straight into $(shell) gives 'missing ')'' error. 4 | # This two-stage process works. 5 | VERCMD := sed -e "s/.*(\([0-9][0-9.~rc]*\).*/\1/;q" debian/changelog 6 | DSVVERSION := $(shell $(VERCMD)) 7 | 8 | MAN1=$(patsubst %.adoc,%.1, $(wildcard doc/man/man1/*.adoc)) 9 | MAN5=$(patsubst %.adoc,%.5, $(wildcard doc/man/man5/*.adoc)) 10 | MANS=$(MAN1) $(MAN5) 11 | MANDOCS=$(MAN1:.1=.html) $(MAN5:.5=.html) 12 | MANPDFS=$(MANDOCS:.html=.pdf) 13 | 14 | DOCS=$(patsubst %.adoc,%.html, \ 15 | $(wildcard doc/user/*.adoc doc/*.adoc) README.adoc) 16 | DOCPDFS=$(DOCS:.html=.pdf) 17 | 18 | JSON=$(patsubst %.dashboard.py,%.dashboard.json,$(wildcard \ 19 | grafana/dashboards/main-site/*/*.dashboard.py)) 20 | JSONDEPS=$(JSON:.json=.d) 21 | 22 | all: $(MANS) $(MANDOCS) $(DOCS) $(JSON) 23 | 24 | %.1 :: %.adoc ; asciidoctor -b manpage -d manpage -o $@ $< 25 | 26 | %.5 :: %.adoc ; asciidoctor -b manpage -d manpage -o $@ $< 27 | 28 | %.html :: %.adoc ; asciidoctor -b html5 -d article -a dsvversion=$(DSVVERSION) -o $@ $< 29 | 30 | %.pdf :: %.adoc ; asciidoctor-pdf -b pdf -d article -a dsvversion=$(DSVVERSION) -o $@ $< 31 | 32 | %.dashboard.json :: %.dashboard.py ; DSVVERSION=$(DSVVERSION) grafana/bin/generate-dashboard -o $@ $< 33 | 34 | %.dashboard.d :: %.dashboard.py ; grafana/bin/dash-depends $< > $@ 35 | 36 | doc: $(MANS) $(MANDOCS) $(DOCS) 37 | 38 | pdf: $(MANPDFS) $(DOCPDFS) 39 | 40 | json: $(JSON) 41 | 42 | clean: ; rm -rf $(MANS) $(MANDOCS) $(DOCS) $(MANPDFS) $(DOCPDFS) $(JSON) dist_deb 43 | 44 | distclean: clean ; rm -f $(JSONDEPS) 45 | 46 | test: 47 | PYTHONPATH=src/python3 python3 -m unittest discover -s tests/python3/integration/ 48 | 49 | pylint: 50 | `command -v pylint3 2> /dev/null || echo "pylint"` --rcfile=src/python3/dsv.pylintrc src/python3/dsv 51 | 52 | deb: $(MANS) $(DOCS) $(JSON) test 53 | DSVVERSION=$(DSVVERSION) ./mkdeb.sh 54 | 55 | include $(JSONDEPS) 56 | 57 | doc/Overview_and_Basic_Install.html: $(wildcard doc/adoc-source/*.adoc) 58 | doc/Overview_and_Basic_Install.pdf: $(wildcard doc/adoc-source/*.adoc) 59 | doc/Advanced_User_Guide.html: $(wildcard doc/adoc-source/*.adoc) 60 | doc/Advanced_User_Guide.pdf: $(wildcard doc/adoc-source/*.adoc) 61 | -------------------------------------------------------------------------------- /README.adoc: -------------------------------------------------------------------------------- 1 | = DNS-STATS Visualizer 2 | 3 | == About 4 | 5 | DNS-STATS Visualizer is a system which can: 6 | 7 | * Consume DNS traffic data files recorded in Compacted-DNS (C-DNS, https://tools.ietf.org/html/rfc8618[RFC8618]) 8 | format from nameservers. (Files in C-DNS format can be generated by 9 | https://github.com/dns-stats/compactor/wiki[DNS-STATS compactor].) 10 | 11 | * Populate a ClickHouse database with per query/response level data 12 | (and additionally aggregate data at a chosen time interval) 13 | 14 | * Produce https://www.dns-oarc.net/tools/dsc[DSC]-like statistics graphs 15 | of the recorded traffic in Grafana. 16 | 17 | RSSAC reports can also be made available via the link at the foot of the main dashboard. 18 | 19 | The project was initially developed for https://www.dns.icann.org/imrs/[ICANN] 20 | by http://sinodun.com/[Sinodun IT], and is now released via 21 | http://dns-stats.org[DNS-STATS] as an open source project licensed 22 | under the Mozilla Public License v2.0. 23 | 24 | For more information see the 25 | https://github.com/dns-stats/visualizer/wiki[DNS-STATS Visualizer wiki] or 26 | https://github.com/dns-stats/visualizer[github repository]. 27 | 28 | == Packages 29 | 30 | The latest Visualizer packages are available from 31 | https://launchpad.net/~dns-stats/+archive/ubuntu/visualizer[ppa:dns-stats/visualizer]. 32 | 33 | == Support 34 | 35 | - Mailing list: https://mm.dns-stats.org/mailman/listinfo/dns-stats-users 36 | - Issue tracker: https://github.com/dns-stats/visualizer/issues 37 | 38 | == Screenshots 39 | 40 | A customised version of this framework is used by ICANN for their 41 | https://stats.dns.icann.org[public DNS statistics]. 42 | These screenshots are from the ICANN site. 43 | 44 | image::doc/images/query_stats.png[DNS-STATS Visualizer Query Volumes] 45 | image::doc/images/ip_version.png[DNS-STATS Visualizer IP Version] 46 | image::doc/images/location_city.png[DNS-STATS Visualizer GeoLocation] 47 | image::doc/images/client_subnet.png[DNS-STATS Visualizer Client Subnets] 48 | -------------------------------------------------------------------------------- /bin/dsv: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | # 3 | # Copyright 2018-2019, 2021 Internet Corporation for Assigned Names and Numbers. 4 | # 5 | # This Source Code Form is subject to the terms of the Mozilla Public 6 | # License, v. 2.0. If a copy of the MPL was not distributed with this 7 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 8 | # 9 | # Developed by Sinodun IT (sinodun.com) 10 | # 11 | # Start a program from dsv.incoming depending on contents of argv[0]. 12 | 13 | import logging 14 | import os 15 | import pathlib 16 | import sys 17 | 18 | # For running from the source tree (even if soft linked from elsewhere) 19 | # or if not whether from a Debian package and add the necessary Python path. 20 | # If running from source tree, add that bin to the front of the PATH so 21 | # that queue jobs get picked up from there too. 22 | me = pathlib.Path(sys.argv[0]) 23 | localbase = me.resolve().parent.parent 24 | locallib = localbase / 'src/python3/dsv' 25 | if locallib.is_dir(): 26 | sys.path.append(str(locallib.parent)) 27 | localbin = localbase / 'bin' 28 | ospath = os.environ["PATH"] 29 | os.environ["PATH"] = str(localbin) + os.pathsep + ospath 30 | else: 31 | deb = pathlib.Path('/usr/share/dns-stats-visualizer/python3') 32 | if deb.is_dir(): 33 | sys.path.append(str(deb)) 34 | 35 | exit_status = 1 36 | 37 | import dsv.commands 38 | 39 | cmdlist = ' '.join(dsv.commands.available_commands()) 40 | _, sep, cmd = me.name.partition('dsv-') 41 | if not sep: 42 | del sys.argv[0] 43 | if not sys.argv: 44 | print('Usage: dsv []\nAvailable commands: {cmds}'.format(cmds=cmdlist), file=sys.stderr) 45 | cmd = None 46 | else: 47 | cmd = sys.argv[0] 48 | try: 49 | if cmd: 50 | exit_status = dsv.commands.run_command(cmd, sys.argv) 51 | except ImportError: 52 | print('No Visualizer command {cmd}\nAvailable commands: {cmds}'.format(cmd=cmd, cmds=cmdlist), file=sys.stderr) 53 | except KeyboardInterrupt: 54 | pass 55 | except Exception as e: 56 | logging.error('Exception {exc} ({args})'.format( 57 | exc=type(e).__name__, 58 | args=str(e))) 59 | print('Error {exc} ({args}).'.format( 60 | exc=type(e).__name__, 61 | args=str(e)), file=sys.stderr) 62 | sys.exit(exit_status) 63 | 64 | # Local Variables: 65 | # mode: Python 66 | # End: 67 | -------------------------------------------------------------------------------- /bin/dsv-cdns-to-pcap: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Copyright 2018-2019, 2021 Internet Corporation for Assigned Names and Numbers. 4 | # 5 | # This Source Code Form is subject to the terms of the Mozilla Public 6 | # License, v. 2.0. If a copy of the MPL was not distributed with this 7 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 8 | # 9 | # Developed by Sinodun IT (sinodun.com) 10 | # 11 | # dsv-cdns-to-pcap 12 | # 13 | # Convert a C-DNS file to a PCAP file in the current directory, or the 14 | # parent directory if the current directory is 'pending'. 15 | # 16 | # If the input file name ends '.xz', decompress first. 17 | # 18 | # The output PCAP may be compressed and may contain only queries, depending 19 | # on Visualizer configuration. 20 | # 21 | # On successful conversion, give exit code 0. Otherwise if anything goes 22 | # wrong with decompression and conversion, there doesn't seem any likelihood 23 | # it's going to be temporary, so fail with exit code 1. Finally, 24 | # if we're missing necessary required executables, that's an 25 | # infrastructure/setup error, so exit with error code 99. 26 | # 27 | # This script is stuffed with bash-isms. Sorry, but it does make life 28 | # much easier. 29 | 30 | #set -x 31 | 32 | for cmd in inspector xz cat dsv-config dsv-log 33 | do 34 | command -v $cmd > /dev/null 2>&1 || { echo "No $cmd." >&2; exit 99; } 35 | done 36 | 37 | set -o pipefail 38 | 39 | INSPECTOR="inspector" 40 | XZ="xz --decompress --stdout" 41 | DSVLOG="dsv-log -l warning" 42 | 43 | COMPRESS_PCAP=$(dsv-config pcap compress) 44 | COMPRESS_PCAP_LEVEL=$(dsv-config pcap compression-level) 45 | REPLACE_PCAP=$(dsv-config pcap replace) 46 | QUERY_ONLY=$(dsv-config pcap query-only) 47 | ANONYMISE=$(dsv-config pcap pseudo-anonymise) 48 | ANON_KEY=$(dsv-config pcap pseudo-anonymisation-key) 49 | ANON_PASSPHRASE=$(dsv-config pcap pseudo-anonymisation-passphrase) 50 | 51 | declare -a INSPECTOR_ARGS 52 | if [[ "$COMPRESS_PCAP" ]]; then 53 | INSPECTOR_ARGS+=(-x -u) 54 | INSPECTOR_ARGS+=("$COMPRESS_PCAP_LEVEL") 55 | fi 56 | if [[ "$QUERY_ONLY" ]]; then 57 | INSPECTOR_ARGS+=(-q) 58 | fi 59 | if [[ "$ANONYMISE" ]]; then 60 | if [[ "$ANON_KEY" ]]; then 61 | INSPECTOR_ARGS+=(-p -k) 62 | INSPECTOR_ARGS+=("$ANON_KEY") 63 | elif [[ "$ANON_PASSPHRASE" ]]; then 64 | INSPECTOR_ARGS+=(-p -P) 65 | INSPECTOR_ARGS+=("$ANON_PASSPHRASE") 66 | fi 67 | fi 68 | 69 | usage() 70 | { 71 | echo "Usage: dsv-cdns-to-pcap " 1>&2 72 | exit 1 73 | } 74 | 75 | if [[ $# -ne 1 ]]; then 76 | usage 77 | fi 78 | file=$1 79 | 80 | dir=$(dirname $file) 81 | dirname=$(basename $dir) 82 | if [[ "$dirname" = "pending" ]]; then 83 | outdir=$(dirname $dir) 84 | else 85 | outdir=$dir 86 | fi 87 | 88 | name=$(basename $file) 89 | basename=${name%.xz} 90 | 91 | error() 92 | { 93 | if [[ -n $1 ]]; then 94 | echo $1 1>&2 95 | fi 96 | rm -f $outdir/$basename.pcap* 97 | exit 1 98 | } 99 | 100 | # If file does not exist it's been successfully dealt with. 101 | if [[ ! -f $file ]]; then 102 | $DSVLOG "$file not found." 103 | exit 0 104 | fi 105 | 106 | if [[ $name != $basename ]]; then 107 | decomp=$XZ 108 | else 109 | decomp=cat 110 | fi 111 | 112 | trap "error Interrupted" HUP INT TERM 113 | 114 | # Ensure if re-generating we don't end up with -1 etc. outputs but replace. 115 | if [[ "$REPLACE_PCAP" ]]; then 116 | rm -f $outdir/$basename.pcap $outdir/$basename.pcap.xz $outdir/$basename.pcap.info 117 | fi 118 | if ! $decomp $file | $INSPECTOR "${INSPECTOR_ARGS[@]}" -o $outdir/$basename.pcap; then 119 | error "Error converting file" 120 | fi 121 | 122 | exit 0 123 | 124 | # Local Variables: 125 | # mode: sh 126 | # End: 127 | -------------------------------------------------------------------------------- /bin/dsv-cdns-to-tsv: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Copyright 2018-2021 Internet Corporation for Assigned Names and Numbers. 4 | # 5 | # This Source Code Form is subject to the terms of the Mozilla Public 6 | # License, v. 2.0. If a copy of the MPL was not distributed with this 7 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 8 | # 9 | # Developed by Sinodun IT (sinodun.com) 10 | # 11 | # dsv-cdns-to-tsv 12 | # 13 | # Convert a C-DNS file to (potentially) multiple TSV files in the same 14 | # directory as the C-DNS file. Add these files to the job queue import-tsv. 15 | # 16 | # If the input file name ends '.xz', decompress first. 17 | # 18 | # The C-DNS file will be in a directory ///pending. 19 | # 20 | # On successful conversion, give exit code 0. Otherwise if anything goes 21 | # wrong with decompression and conversion, there doesn't seem any likelihood 22 | # it's going to be temporary, so fail with exit code 1. Finally, 23 | # if we're missing necessary required executables, that's an 24 | # infrastructure/setup error, so exit with error code 99. 25 | # 26 | # This script is stuffed with bash-isms. Sorry, but it does make life 27 | # much easier. 28 | 29 | #set -x 30 | 31 | for cmd in inspector xz cat awk dsv-find-node-id dsv-queue dsv-log 32 | do 33 | command -v $cmd > /dev/null 2>&1 || { echo "No $cmd." >&2; exit 99; } 34 | done 35 | 36 | set -o pipefail 37 | 38 | TEMPLATE="/usr/share/dns-stats-visualizer/sql/clickhouse/tsv.tpl" 39 | if [[ ! -f $TEMPLATE ]]; then 40 | # Check old location for file. 41 | TEMPLATE="/etc/dns-stats-visualizer/tsv-clickhouse.tpl" 42 | if [[ ! -f $TEMPLATE ]]; then 43 | echo "Template file missing." >&2 44 | exit 99 45 | fi 46 | fi 47 | 48 | INSPECTOR="inspector --output-format template --template $TEMPLATE" 49 | XZ="xz --decompress --stdout" 50 | DSVLOG="dsv-log -l warning" 51 | 52 | ADDTSVQUEUE="dsv-queue -q import-tsv" 53 | 54 | GETPACKETINFO="awk -f /usr/share/dns-stats-visualizer/sql/clickhouse/info.awk" 55 | 56 | usage() 57 | { 58 | echo "Usage: dsv-cdns-to-tsv " 1>&2 59 | exit 1 60 | } 61 | 62 | if [[ $# -ne 1 ]]; then 63 | usage 64 | fi 65 | file=$1 66 | 67 | dir=$(dirname $file) 68 | dirname=$(basename $dir) 69 | ndir=$dir 70 | if [[ "$dirname" = "pending" ]]; then 71 | ndir=$(dirname $ndir) 72 | fi 73 | nodedir=$(dirname $ndir) 74 | nodename=$(basename $nodedir) 75 | 76 | serverdir=$(dirname $nodedir) 77 | servername=$(basename $serverdir) 78 | 79 | name=$(basename $file) 80 | basename=${name%.xz} 81 | 82 | error() 83 | { 84 | if [[ -n $1 ]]; then 85 | echo $1 1>&2 86 | fi 87 | rm -f $dir/$basename.tsv* $dir/$basename.info 88 | exit 1 89 | } 90 | 91 | # Look up server/nodename in Postgres and find its nodeID. 92 | nodeid=$(dsv-find-node-id $servername $nodename) 93 | if [[ -z $nodeid ]]; then 94 | error "Can't find server $servername node $nodename" 95 | fi 96 | 97 | # If file does not exist it's been successfully dealt with. 98 | if [[ ! -f $file ]]; then 99 | $DSVLOG "$file not found." 100 | exit 0 101 | fi 102 | 103 | if [[ $name != $basename ]]; then 104 | decomp=$XZ 105 | else 106 | decomp=cat 107 | fi 108 | 109 | trap "error Interrupted" HUP INT TERM 110 | 111 | if ! $decomp $file | $INSPECTOR --output $dir/$basename.tsv --value node=$nodeid --report-info > $dir/$basename.info; then 112 | error "Error converting file" 113 | fi 114 | 115 | if ! $GETPACKETINFO -v node_id=$nodeid $dir/$basename.info > $dir/$basename.tsv.info; then 116 | rm -f $dir/$basename.tsv.info 117 | fi 118 | rm -f $dir/$basename.info 119 | 120 | if ! $ADDTSVQUEUE $dir/$basename.tsv; then 121 | error "Error adding TSV file $dir/$basename.tsv to import queue" 122 | fi 123 | 124 | exit 0 125 | 126 | # Local Variables: 127 | # mode: sh 128 | # End: 129 | -------------------------------------------------------------------------------- /bin/dsv-clickhouse-sys-info: -------------------------------------------------------------------------------- 1 | dsv -------------------------------------------------------------------------------- /bin/dsv-clickhouse-update: -------------------------------------------------------------------------------- 1 | dsv -------------------------------------------------------------------------------- /bin/dsv-config: -------------------------------------------------------------------------------- 1 | dsv -------------------------------------------------------------------------------- /bin/dsv-datastore-setup: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # 3 | # Copyright 2018, 2021 Internet Corporation for Assigned Names and Numbers. 4 | # 5 | # This Source Code Form is subject to the terms of the Mozilla Public 6 | # License, v. 2.0. If a copy of the MPL was not distributed with this 7 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 8 | # 9 | # Developed by Sinodun IT (sinodun.com) 10 | # 11 | # Create PostGres dsv user and dsv database. 12 | 13 | command -v adduser > /dev/null 2>&1 || { echo "'adduser' not found, cannot proceed." >&2 ; exit 2; } 14 | command -v psql > /dev/null 2>&1 || { echo "'psql' not found, cannot proceed." >&2 ; exit 2; } 15 | command -v sudo > /dev/null 2>&1 || { echo "'sudo' not found, cannot proceed." >&2 ; exit 2; } 16 | 17 | err=0 18 | 19 | # Create system user 'dsv' if not already present. 20 | echo "Create system user 'dsv'." 21 | if grep --quiet dsv /etc/passwd; then 22 | echo "'dsv' user already exists." 23 | else 24 | if sudo adduser --system --group --gecos "DNS-STATS Visualizer" --disabled-login dsv; then 25 | echo "Added user 'dsv'." 26 | else 27 | echo "Adding user 'dsv' failed." 28 | err=$? 29 | fi 30 | fi 31 | 32 | # Create Postgres 'dsv' user. 33 | echo "Create Postgres user 'dsv'." 34 | if sudo -u postgres psql -c "CREATE USER dsv PASSWORD 'dsv';"; then 35 | echo "Created Postgres user 'dsv'." 36 | else 37 | err=$? 38 | fi 39 | 40 | # Create Postgres 'dsv' database. 41 | echo "Create Postgres database 'dsv'." 42 | if sudo -u postgres psql -c "CREATE DATABASE dsv OWNER dsv;"; then 43 | echo "Created Postgres database 'dsv'." 44 | else 45 | err=$? 46 | fi 47 | 48 | exit $err 49 | -------------------------------------------------------------------------------- /bin/dsv-find-node-id: -------------------------------------------------------------------------------- 1 | dsv -------------------------------------------------------------------------------- /bin/dsv-geo-update: -------------------------------------------------------------------------------- 1 | dsv -------------------------------------------------------------------------------- /bin/dsv-import: -------------------------------------------------------------------------------- 1 | dsv -------------------------------------------------------------------------------- /bin/dsv-import-freeze: -------------------------------------------------------------------------------- 1 | dsv -------------------------------------------------------------------------------- /bin/dsv-import-mirror: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Copyright 2020, 2021 Internet Corporation for Assigned Names and Numbers. 4 | # 5 | # This Source Code Form is subject to the terms of the Mozilla Public 6 | # License, v. 2.0. If a copy of the MPL was not distributed with this 7 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 8 | # 9 | # Developed by Sinodun IT (sinodun.com) 10 | # 11 | # dsv-import-mirror [ ...] 12 | # 13 | # This is a continuously running server process. It creates a 14 | # parallel copy of datastore incoming directories under 15 | # directory . New data files created under 16 | # the datastore for hosts that match the hostname pattern get 17 | # a hard link to the file created in a matching directory in the 18 | # target root. 19 | # 20 | # A periodic rsync on a receiving host can then copy these files 21 | # to a mirror. E.g. 22 | # 23 | # rsync --archive --chmod=g+w --remove-source-files --rsh="ssh -i ~/.ssh/cbor.rsa" :/ 24 | # 25 | # Exit codes: 0 = success, 1 = error, 99 = missing pre-requisite. 26 | # 27 | 28 | #set -x 29 | 30 | for cmd in inotifywait dsv-config 31 | do 32 | command -v $cmd > /dev/null 2>&1 || { echo "No $cmd." >&2; exit 99; } 33 | done 34 | 35 | if [[ $# -lt 2 ]]; then 36 | echo "Usage: dsv-import-mirror [ ...]" 1>&2 37 | exit 1 38 | fi 39 | 40 | FROMROOT=$(dsv-config datastore path) 41 | if [[ $? -ne 0 ]]; then 42 | echo "No datastore path configured." 1>&2 43 | exit 99 44 | fi 45 | 46 | TOROOT=$1 47 | shift 48 | 49 | DSDIRS="" 50 | for pat in "$@" 51 | do 52 | DSDIRS="$DSDIRS $pat/incoming" 53 | done 54 | 55 | cd $FROMROOT 56 | 57 | inotifywait -m -q -e moved_to --format "%w%f" $DSDIRS | while read f; do 58 | mkdir -p $TOROOT/$(dirname $f) 59 | ln $f $TOROOT/$f 60 | done 61 | 62 | exit 0 63 | 64 | # Local Variables: 65 | # mode: sh 66 | # End: 67 | -------------------------------------------------------------------------------- /bin/dsv-import-thaw: -------------------------------------------------------------------------------- 1 | dsv -------------------------------------------------------------------------------- /bin/dsv-log: -------------------------------------------------------------------------------- 1 | dsv -------------------------------------------------------------------------------- /bin/dsv-nodes-update: -------------------------------------------------------------------------------- 1 | dsv -------------------------------------------------------------------------------- /bin/dsv-postgres-update: -------------------------------------------------------------------------------- 1 | dsv -------------------------------------------------------------------------------- /bin/dsv-prune: -------------------------------------------------------------------------------- 1 | dsv -------------------------------------------------------------------------------- /bin/dsv-queue: -------------------------------------------------------------------------------- 1 | dsv -------------------------------------------------------------------------------- /bin/dsv-queue-details: -------------------------------------------------------------------------------- 1 | dsv -------------------------------------------------------------------------------- /bin/dsv-queue-freeze: -------------------------------------------------------------------------------- 1 | dsv -------------------------------------------------------------------------------- /bin/dsv-queue-thaw: -------------------------------------------------------------------------------- 1 | dsv -------------------------------------------------------------------------------- /bin/dsv-rssac-daemon: -------------------------------------------------------------------------------- 1 | dsv -------------------------------------------------------------------------------- /bin/dsv-rssac-daemon-tester: -------------------------------------------------------------------------------- 1 | dsv -------------------------------------------------------------------------------- /bin/dsv-rssac-notify: -------------------------------------------------------------------------------- 1 | dsv -------------------------------------------------------------------------------- /bin/dsv-rssac-reports: -------------------------------------------------------------------------------- 1 | dsv -------------------------------------------------------------------------------- /bin/dsv-status: -------------------------------------------------------------------------------- 1 | dsv -------------------------------------------------------------------------------- /bin/dsv-tld-update: -------------------------------------------------------------------------------- 1 | dsv -------------------------------------------------------------------------------- /bin/dsv-worker: -------------------------------------------------------------------------------- 1 | dsv -------------------------------------------------------------------------------- /debian/changelog: -------------------------------------------------------------------------------- 1 | dns-stats-visualizer (1.0.1-1) bionic; urgency=low 2 | 3 | * Change name of bar chart plugin to fix clash with original plugin. 4 | * Fix instance selector to select on all items. 5 | * Add more Chrome pre-requisites to Grafana package for image 6 | rendering plugin. 7 | 8 | -- Jim Hague Fri, 28 May 2021 16:47:00 +0100 9 | 10 | dns-stats-visualizer (1.0.0-1) bionic; urgency=low 11 | 12 | * Initial release. 13 | 14 | -- Jim Hague Fri, 12 Feb 2021 15:09:00 +0000 15 | -------------------------------------------------------------------------------- /debian/compat: -------------------------------------------------------------------------------- 1 | 10 2 | -------------------------------------------------------------------------------- /debian/control: -------------------------------------------------------------------------------- 1 | Source: dns-stats-visualizer 2 | Standards-Version: 3.9.7 3 | Section: net 4 | Priority: optional 5 | Maintainer: Jim Hague 6 | Homepage: https://github.com/Sinodun/dns-stats-visualizer 7 | Build-Depends: debhelper (>=9), asciidoctor, coreutils, findutils, make, python3 (>= 3.5), python3-attr, python3-dnspython, python3-psycopg2 8 | 9 | Package: dns-stats-visualizer-base 10 | Architecture: all 11 | Depends: python3 (>= 3.5), ${misc:Depends} 12 | Suggests: dns-stats-visualizer-doc 13 | Description: Base DNS STATS Visualizer infrastructure. 14 | Base infrastructure for DNS STATS Visualizer. 15 | 16 | Package: dns-stats-visualizer-import 17 | Architecture: all 18 | Depends: dns-stats-visualizer-base, bash, python3-gear, python3-psycopg2, clickhouse-client, libmaxminddb0, geoipupdate, gearman-job-server, gearman-tools, dns-stats-inspector (>= 1.1.0), ${misc:Depends} 19 | Description: Data import for DNS STATS Visualizer 20 | Commands required for DNS STATS Visualizer data import. 21 | 22 | Package: dns-stats-visualizer-import-mirror 23 | Architecture: all 24 | Depends: dns-stats-visualizer-base, bash, inotify-tools, ${misc:Depends} 25 | Description: Mirror incoming datastore files 26 | Utility to assist in mirroring incoming datastore files. 27 | 28 | Package: dns-stats-visualizer-clickhouse-server 29 | Architecture: all 30 | Depends: dns-stats-visualizer-base, clickhouse-client, clickhouse-server (>= 19.16), odbcinst, odbc-postgresql, unixodbc, ${misc:Depends} 31 | Description: DNS STATS Visualizer support for ClickHouse servers 32 | Commands required for DNS STATS Visualizer ClickHouse servers. 33 | 34 | Package: dns-stats-visualizer-rssac-reports 35 | Architecture: all 36 | Depends: dns-stats-visualizer-base, ${misc:Depends} 37 | Description: DNS STATS Visualizer RSSAC reports. 38 | Produce DNS STATS Visualizer RSSAC reports. 39 | 40 | Package: dns-stats-visualizer-rssac-notify 41 | Architecture: all 42 | Depends: dns-stats-visualizer-base, python3-dnspython, python3-systemd, ${misc:Depends} 43 | Description: DNS STATS Visualizer RSSAC notify daemon. 44 | DNS STATS Visualizer daemon for monitoring RSSAC zone info and updating DNS STATS Visualizer database. 45 | 46 | Package: dns-stats-visualizer-grafana-main 47 | Architecture: all 48 | Depends: grafana (>= 6.4), libx11-6, libx11-xcb1, libxcb-dri3-0, libxcomposite1, libxcursor1, libxdamage1, libxext6, libxfixes3, libxi6, libxrender1, libxtst6, libglib2.0-0, libnss3, libcups2, libdbus-1-3, libxss1, libxrandr2, libgbm1, libgtk-3-0, libasound2, coreutils, ${misc:Depends} 49 | Description: DNS STATS Visualizer Grafana main site dashboards and datasource. 50 | DNS STATS Visualizer main site dashboards and other provisioning for Grafana. 51 | 52 | Package: dns-stats-visualizer-doc 53 | Architecture: all 54 | Depends: ${misc:Depends} 55 | Description: DNS STATS Visualizer documentation 56 | DNS STATS Visualizer documentation in HTML format. 57 | -------------------------------------------------------------------------------- /debian/dns-stats-visualizer-base.install: -------------------------------------------------------------------------------- 1 | bin/dsv usr/bin 2 | bin/dsv-config usr/bin 3 | bin/dsv-log usr/bin 4 | etc/dns-stats-visualizer/dsv.cfg etc/dns-stats-visualizer 5 | tools/clickhouse-driver/clickhouse_driver usr/share/dns-stats-visualizer/python3 6 | src/python3/dsv/common usr/share/dns-stats-visualizer/python3/dsv 7 | src/python3/dsv/commands/__init__.py usr/share/dns-stats-visualizer/python3/dsv/commands 8 | src/python3/dsv/commands/config.py usr/share/dns-stats-visualizer/python3/dsv/commands 9 | src/python3/dsv/commands/log.py usr/share/dns-stats-visualizer/python3/dsv/commands 10 | -------------------------------------------------------------------------------- /debian/dns-stats-visualizer-base.manpages: -------------------------------------------------------------------------------- 1 | doc/man/man1/dsv.1 2 | doc/man/man1/dsv-config.1 3 | doc/man/man1/dsv-log.1 4 | doc/man/man5/dsv.cfg.5 5 | -------------------------------------------------------------------------------- /debian/dns-stats-visualizer-base.postinst: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | if which py3compile >/dev/null 2>&1; then 4 | py3compile /usr/share/dns-stats-visualizer/python3/ 5 | fi 6 | 7 | #DEBHELPER# 8 | 9 | exit 0 10 | -------------------------------------------------------------------------------- /debian/dns-stats-visualizer-base.prerm: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | if which py3clean >/dev/null 2>&1; then 4 | py3clean /usr/share/dns-stats-visualizer/python3 5 | fi 6 | 7 | #DEBHELPER# 8 | 9 | exit 0 10 | -------------------------------------------------------------------------------- /debian/dns-stats-visualizer-clickhouse-server.install: -------------------------------------------------------------------------------- 1 | bin/dsv-clickhouse-sys-info usr/bin 2 | bin/dsv-clickhouse-update usr/bin 3 | etc/clickhouse/config.d etc/clickhouse-server 4 | etc/clickhouse/dictionaries.d etc/clickhouse-server 5 | etc/clickhouse/users.d etc/clickhouse-server 6 | etc/odbc.ini.dsv etc 7 | src/python3/dsv/commands/clickhouse_sys_info.py usr/share/dns-stats-visualizer/python3/dsv/commands 8 | src/python3/dsv/commands/clickhouse_update.py usr/share/dns-stats-visualizer/python3/dsv/commands 9 | sql/clickhouse/ddl* usr/share/dns-stats-visualizer/sql/clickhouse/ 10 | -------------------------------------------------------------------------------- /debian/dns-stats-visualizer-clickhouse-server.manpages: -------------------------------------------------------------------------------- 1 | doc/man/man1/dsv-clickhouse-sys-info.1 2 | doc/man/man1/dsv-clickhouse-update.1 3 | -------------------------------------------------------------------------------- /debian/dns-stats-visualizer-clickhouse-server.postinst: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | if which py3compile >/dev/null 2>&1; then 4 | py3compile /usr/share/dns-stats-visualizer/python3/ 5 | fi 6 | 7 | for i in node node_server node_region node_country node_city node_instance 8 | do 9 | rm -f /etc/clickhouse-server/dictionaries.d/${i}.xml 10 | done 11 | 12 | #DEBHELPER# 13 | 14 | exit 0 15 | -------------------------------------------------------------------------------- /debian/dns-stats-visualizer-doc.doc-base: -------------------------------------------------------------------------------- 1 | Document: dns-stats-visualizer 2 | Title: DNS STATS Visualizer documentation 3 | Author: Jim Hague 4 | Abstract: Collected documents on DNS STATS Visualizer operation 5 | Section: System/Monitoring 6 | Format: HTML 7 | Index: /usr/share/doc/dns-stats-visualizer/Overview_and_Basic_Install.html 8 | Files: /usr/share/doc/dns-stats-visualizer/*.html -------------------------------------------------------------------------------- /debian/dns-stats-visualizer-doc.install: -------------------------------------------------------------------------------- 1 | doc/*.html usr/share/doc/dns-stats-visualizer/ 2 | -------------------------------------------------------------------------------- /debian/dns-stats-visualizer-grafana-main.install: -------------------------------------------------------------------------------- 1 | grafana/dashboards/main-site var/lib/grafana/dashboards 2 | grafana/plugins/sinodun-natel-plotly-panel var/lib/grafana/plugins 3 | provisioning/dashboards/main-site.yml etc/grafana/provisioning/dashboards 4 | provisioning/datasources/dsv-main.yml.sample etc/grafana/provisioning/datasources 5 | -------------------------------------------------------------------------------- /debian/dns-stats-visualizer-grafana-main.postinst: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | 4 | if [ "$1" = "configure" ]; then 5 | if [ -f /etc/grafana/provisioning/dashboards/main-site.yml ]; then 6 | chgrp grafana /etc/grafana/provisioning/dashboards/main-site.yml 7 | chmod 0640 /etc/grafana/provisioning/dashboards/main-site.yml 8 | fi 9 | if [ -f /etc/grafana/provisioning/datasources/dsv-main.yml.sample ]; then 10 | chgrp grafana /etc/grafana/provisioning/datasources/dsv-main.yml.sample 11 | chmod 0640 /etc/grafana/provisioning/datasources/dsv-main.yml.sample 12 | fi 13 | 14 | # Ensure required plugins are installed, and update to latest version 15 | # if already installed. 16 | plugins_before=$(mktemp -t "plugins-before.XXXXXX") 17 | plugins_after=$(mktemp -t "plugins-before.XXXXXX") 18 | grafana-cli plugins ls > $plugins_before 19 | for plugin in vertamedia-clickhouse-datasource grafana-image-renderer grafana-worldmap-panel 20 | do 21 | if ! grep $plugin $plugins_before > /dev/null; then 22 | grafana-cli plugins install $plugin > /dev/null 23 | else 24 | grafana-cli plugins update $plugin > /dev/null 25 | fi 26 | done 27 | grafana-cli plugins ls > $plugins_after 28 | if ! cmp -s $plugins_before $plugins_after; then 29 | systemctl restart grafana-server 30 | fi 31 | rm -f $plugins_before $plugins_after 32 | fi 33 | 34 | #DEBHELPER# 35 | 36 | exit 0 37 | -------------------------------------------------------------------------------- /debian/dns-stats-visualizer-import-mirror.dirs: -------------------------------------------------------------------------------- 1 | var/lib/dns-stats-visualizer/mirror 2 | -------------------------------------------------------------------------------- /debian/dns-stats-visualizer-import-mirror.install: -------------------------------------------------------------------------------- 1 | bin/dsv-import-mirror usr/bin 2 | etc/systemd/system/dsv-import-mirror.service lib/systemd/system 3 | -------------------------------------------------------------------------------- /debian/dns-stats-visualizer-import.dirs: -------------------------------------------------------------------------------- 1 | var/lib/dns-stats-visualizer/cdns 2 | -------------------------------------------------------------------------------- /debian/dns-stats-visualizer-import.install: -------------------------------------------------------------------------------- 1 | bin/dsv-cdns-to-pcap usr/bin 2 | bin/dsv-cdns-to-tsv usr/bin 3 | bin/dsv-datastore-setup usr/bin 4 | bin/dsv-find-node-id usr/bin 5 | bin/dsv-geo-update usr/bin 6 | bin/dsv-import usr/bin 7 | bin/dsv-import-freeze usr/bin 8 | bin/dsv-import-thaw usr/bin 9 | bin/dsv-import-tsv usr/bin 10 | bin/dsv-nodes-update usr/bin 11 | bin/dsv-postgres-update usr/bin 12 | bin/dsv-prune usr/bin 13 | bin/dsv-queue usr/bin 14 | bin/dsv-queue-details usr/bin 15 | bin/dsv-queue-freeze usr/bin 16 | bin/dsv-queue-thaw usr/bin 17 | bin/dsv-rssac-reports usr/bin 18 | bin/dsv-status usr/bin 19 | bin/dsv-tld-update usr/bin 20 | bin/dsv-worker usr/bin 21 | etc/dns-stats-visualizer/nodes.csv.sample etc/dns-stats-visualizer 22 | etc/supervisor/conf.d/dsv.conf.sample etc/supervisor/conf.d 23 | sampledata usr/share/dns-stats-visualizer 24 | src/python3/dsv/commands/find_node_id.py usr/share/dns-stats-visualizer/python3/dsv/commands 25 | src/python3/dsv/commands/geo_update.py usr/share/dns-stats-visualizer/python3/dsv/commands 26 | src/python3/dsv/commands/import.py usr/share/dns-stats-visualizer/python3/dsv/commands 27 | src/python3/dsv/commands/import_freeze.py usr/share/dns-stats-visualizer/python3/dsv/commands 28 | src/python3/dsv/commands/import_thaw.py usr/share/dns-stats-visualizer/python3/dsv/commands 29 | src/python3/dsv/commands/nodes_update.py usr/share/dns-stats-visualizer/python3/dsv/commands 30 | src/python3/dsv/commands/postgres_update.py usr/share/dns-stats-visualizer/python3/dsv/commands 31 | src/python3/dsv/commands/prune.py usr/share/dns-stats-visualizer/python3/dsv/commands 32 | src/python3/dsv/commands/queue_details.py usr/share/dns-stats-visualizer/python3/dsv/commands 33 | src/python3/dsv/commands/queue_freeze.py usr/share/dns-stats-visualizer/python3/dsv/commands 34 | src/python3/dsv/commands/queue_thaw.py usr/share/dns-stats-visualizer/python3/dsv/commands 35 | src/python3/dsv/commands/queue.py usr/share/dns-stats-visualizer/python3/dsv/commands 36 | src/python3/dsv/commands/status.py usr/share/dns-stats-visualizer/python3/dsv/commands 37 | src/python3/dsv/commands/tld_update.py usr/share/dns-stats-visualizer/python3/dsv/commands 38 | src/python3/dsv/commands/worker.py usr/share/dns-stats-visualizer/python3/dsv/commands 39 | sql/clickhouse/tsv.tpl usr/share/dns-stats-visualizer/sql/clickhouse 40 | sql/clickhouse/info.awk usr/share/dns-stats-visualizer/sql/clickhouse 41 | sql/postgres usr/share/dns-stats-visualizer/sql 42 | -------------------------------------------------------------------------------- /debian/dns-stats-visualizer-import.manpages: -------------------------------------------------------------------------------- 1 | doc/man/man1/dsv-find-node-id.1 2 | doc/man/man1/dsv-geo-update.1 3 | doc/man/man1/dsv-import.1 4 | doc/man/man1/dsv-import-freeze.1 5 | doc/man/man1/dsv-import-thaw.1 6 | doc/man/man1/dsv-nodes-update.1 7 | doc/man/man1/dsv-datastore-setup.1 8 | doc/man/man1/dsv-postgres-update.1 9 | doc/man/man1/dsv-prune.1 10 | doc/man/man1/dsv-queue-freeze.1 11 | doc/man/man1/dsv-queue-thaw.1 12 | doc/man/man1/dsv-rssac-reports.1 13 | doc/man/man1/dsv-status.1 14 | doc/man/man1/dsv-tld-update.1 15 | doc/man/man1/dsv-worker.1 16 | -------------------------------------------------------------------------------- /debian/dns-stats-visualizer-import.postinst: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | if which py3compile >/dev/null 2>&1; then 4 | py3compile /usr/share/dns-stats-visualizer/python3/ 5 | fi 6 | 7 | if [ -x /usr/bin/supervisorctl ]; then 8 | /usr/bin/supervisorctl restart all 9 | fi 10 | 11 | #DEBHELPER# 12 | 13 | exit 0 14 | -------------------------------------------------------------------------------- /debian/dns-stats-visualizer-import.postrm: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | if [ -x /usr/bin/supervisorctl ]; then 4 | /usr/bin/supervisorctl restart all 5 | fi 6 | 7 | #DEBHELPER# 8 | 9 | exit 0 10 | -------------------------------------------------------------------------------- /debian/dns-stats-visualizer-import.preinst: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | if [ -x /usr/bin/supervisorctl ]; then 4 | /usr/bin/supervisorctl stop all 5 | fi 6 | 7 | #DEBHELPER# 8 | 9 | exit 0 10 | -------------------------------------------------------------------------------- /debian/dns-stats-visualizer-import.prerm: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | if [ -x /usr/bin/supervisorctl ]; then 4 | /usr/bin/supervisorctl stop all 5 | fi 6 | 7 | #DEBHELPER# 8 | 9 | exit 0 10 | -------------------------------------------------------------------------------- /debian/dns-stats-visualizer-rssac-notify.install: -------------------------------------------------------------------------------- 1 | bin/dsv-rssac-daemon usr/bin 2 | bin/dsv-rssac-daemon-tester usr/bin 3 | bin/dsv-rssac-notify usr/bin 4 | etc/systemd/system/dsv-rssac.service lib/systemd/system 5 | etc/systemd/system/dsv-rssac.socket lib/systemd/system 6 | src/python3/dsv/commands/rssac_daemon.py usr/share/dns-stats-visualizer/python3/dsv/commands 7 | src/python3/dsv/commands/rssac_daemon_tester.py usr/share/dns-stats-visualizer/python3/dsv/commands 8 | src/python3/dsv/commands/rssac_notify.py usr/share/dns-stats-visualizer/python3/dsv/commands 9 | -------------------------------------------------------------------------------- /debian/dns-stats-visualizer-rssac-notify.manpages: -------------------------------------------------------------------------------- 1 | doc/man/man1/dsv-rssac-daemon.1 2 | doc/man/man1/dsv-rssac-notify.1 3 | -------------------------------------------------------------------------------- /debian/dns-stats-visualizer-rssac-notify.postinst: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | if which py3compile >/dev/null 2>&1; then 4 | py3compile /usr/share/dns-stats-visualizer/python3/ 5 | fi 6 | 7 | #DEBHELPER# 8 | 9 | exit 0 10 | -------------------------------------------------------------------------------- /debian/dns-stats-visualizer-rssac-reports.install: -------------------------------------------------------------------------------- 1 | bin/dsv-rssac-reports usr/bin 2 | src/python3/dsv/commands/rssac_reports.py usr/share/dns-stats-visualizer/python3/dsv/commands 3 | -------------------------------------------------------------------------------- /debian/dns-stats-visualizer-rssac-reports.manpages: -------------------------------------------------------------------------------- 1 | doc/man/man1/dsv-rssac-reports.1 2 | -------------------------------------------------------------------------------- /debian/dns-stats-visualizer-rssac-reports.postinst: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | if which py3compile >/dev/null 2>&1; then 4 | py3compile /usr/share/dns-stats-visualizer/python3/ 5 | fi 6 | 7 | #DEBHELPER# 8 | 9 | exit 0 10 | -------------------------------------------------------------------------------- /debian/rules: -------------------------------------------------------------------------------- 1 | #!/usr/bin/make -f 2 | 3 | %: 4 | dh $@ 5 | -------------------------------------------------------------------------------- /debian/source/format: -------------------------------------------------------------------------------- 1 | 3.0 (quilt) 2 | -------------------------------------------------------------------------------- /doc/Advanced_User_Guide.adoc: -------------------------------------------------------------------------------- 1 | = DNS-STATS Visualizer Advanced User Guide 2 | DNS-STATS 3 | v{dsvversion} 4 | :source-highlighter: highlightjs 5 | :toc: left 6 | :toclevels: 3 7 | :sectnums: 8 | :sectnumlevels: 5 9 | :icons: font 10 | :imagesdir: adoc-source 11 | :xrefstyle: full 12 | 13 | 14 | include::adoc-source/background.adoc[] 15 | 16 | include::adoc-source/packages.adoc[] 17 | 18 | include::adoc-source/operations.adoc[] 19 | 20 | include::adoc-source/admin.adoc[] 21 | 22 | include::adoc-source/configuring.adoc[] 23 | 24 | include::adoc-source/custom_install.adoc[] 25 | 26 | include::adoc-source/developing.adoc[] 27 | 28 | include::adoc-source/appendices.adoc[] 29 | -------------------------------------------------------------------------------- /doc/Overview_and_Basic_Install.adoc: -------------------------------------------------------------------------------- 1 | = DNS-STATS Visualizer Overview and Basic Install 2 | DNS-STATS 3 | v{dsvversion} 4 | :source-highlighter: highlightjs 5 | :toc: left 6 | :toclevels: 3 7 | :sectnums: 8 | :sectnumlevels: 5 9 | :icons: font 10 | :imagesdir: adoc-source 11 | :xrefstyle: full 12 | 13 | include::adoc-source/overview.adoc[] 14 | 15 | include::adoc-source/basic_install.adoc[] 16 | -------------------------------------------------------------------------------- /doc/adoc-source/background.adoc: -------------------------------------------------------------------------------- 1 | == Background 2 | 3 | The DNS-STATS Visualizer Overview and Basic Installation document provides a high 4 | level view of Visualizer and instructions on how to use the packages to perform 5 | a standard, default installation on 3 hosts. 6 | 7 | This document provides more detailed information on day to day operation of Visualizer, 8 | details on advanced configuration options and how to customise the installation. 9 | 10 | It also covers how to develop new code on top of the default Visualizer framework. 11 | -------------------------------------------------------------------------------- /doc/adoc-source/dsv-overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dns-stats/visualizer/20fba91f0d26b98531f97f643c8329640d1c0d11/doc/adoc-source/dsv-overview.png -------------------------------------------------------------------------------- /doc/adoc-source/dsv-queues.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dns-stats/visualizer/20fba91f0d26b98531f97f643c8329640d1c0d11/doc/adoc-source/dsv-queues.png -------------------------------------------------------------------------------- /doc/adoc-source/packages.adoc: -------------------------------------------------------------------------------- 1 | == Packages 2 | 3 | A number of packages are available for Visualizer. The core packages are required for a default install as described in 4 | xref:Overview_and_Basic_install.adoc#_installing_dns_stats_visualizer[Installing DNS-STATS Visualizer] 5 | 6 | 7 | .Visualiser core packages 8 | [cols="2a,3a"] 9 | |=== 10 | | `dns-stats-visualizer-base` 11 | | Base package required for all installs. 12 | 13 | | `dns-stats-visualizer-import` 14 | | Install on the Datastore host to provide the import framework and PostgreSQL database. 15 | 16 | | `dns-stats-visualizer-clickhouse-server` 17 | | Install on the ClickHouse host to configure the ClickHouse database and schema. 18 | 19 | | `dns-stats-visualizer-grafana-main` 20 | | Install on the Grafana host to provide the default 'main' Visualizer dashboard. 21 | 22 | |=== 23 | 24 | The optional packages provide additional functionality as described in <<_optional_modules>>. 25 | 26 | .Visualiser optional packages 27 | [cols="2a,3a"] 28 | |=== 29 | 30 | | `dns-stats-visualizer-doc` 31 | | Install the documentation. 32 | 33 | | `dns-stats-visualizer-rssac-notify` 34 | | Install on the Datastore host to gather data for the RSSAC `zone load` metric 35 | 36 | | `dns-stats-visualizer-rssac-reports` 37 | | Install on the Grafana host to generate RSSAC02 format yaml reports. 38 | 39 | | `dns-stats-visualizer-import-mirror` 40 | | Install on the Datastore host to provide an `rsync` mechanism for file import. 41 | 42 | |=== -------------------------------------------------------------------------------- /doc/images/client_subnet.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dns-stats/visualizer/20fba91f0d26b98531f97f643c8329640d1c0d11/doc/images/client_subnet.png -------------------------------------------------------------------------------- /doc/images/ip_version.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dns-stats/visualizer/20fba91f0d26b98531f97f643c8329640d1c0d11/doc/images/ip_version.png -------------------------------------------------------------------------------- /doc/images/location_city.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dns-stats/visualizer/20fba91f0d26b98531f97f643c8329640d1c0d11/doc/images/location_city.png -------------------------------------------------------------------------------- /doc/images/query_stats.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dns-stats/visualizer/20fba91f0d26b98531f97f643c8329640d1c0d11/doc/images/query_stats.png -------------------------------------------------------------------------------- /doc/man/man1/dsv-clickhouse-sys-info.adoc: -------------------------------------------------------------------------------- 1 | = dsv-clickhouse-sys-info(1) 2 | Jim Hague, Sinodun Internet Technologies 3 | :manmanual: DNS-STATS-VISUALIZER 4 | :mansource: DNS-STATS-VISUALIZER 5 | :man-linkstyle: blue R <> 6 | 7 | == NAME 8 | 9 | dsv-clickhouse-sys-info - report information on the host ClickHouse system 10 | 11 | == SYNOPSIS 12 | 13 | *dsv-sys-info* [_OPTION_]... 14 | 15 | == DESCRIPTION 16 | 17 | Reads one or more request names from standard input, one per line, and 18 | for each output the corresponding information, one piece per line. 19 | 20 | All disc quantities refer to the disc holding the ClickHouse Visualizer tables, 21 | obtained from the Visualizer configuration. 22 | 23 | This command is intended for use as the backend to a ClickHouse dictionary, 24 | hence the rather curious usage. 25 | 26 | The commands available are: 27 | 28 | * `disc-block-size`. Report the size of a disc block. 29 | * `disc-size`. Report the size of the disc in units of 1K. 30 | * `disc-available`. Report the space free on the disc in units of 1K. 31 | * `disc-percent-free`. Report the percentage of the disc free. 32 | * `disc-size`. Report the percentage of the disc used. 33 | 34 | == OPTIONS 35 | 36 | *-c, --config* [_arg_]:: 37 | Configuration file location. Default is `/etc/dns-stats-visualizer/dsv.conf`. 38 | 39 | == EXIT STATUS 40 | 41 | 0 on success, non-zero on any error. 42 | 43 | == SEE ALSO 44 | 45 | link:dsv.cfg.adoc[dsv.cfg]. 46 | -------------------------------------------------------------------------------- /doc/man/man1/dsv-clickhouse-update.adoc: -------------------------------------------------------------------------------- 1 | = dsv-clickhouse-update(1) 2 | Jim Hague, Sinodun Internet Technologies 3 | :manmanual: DNS-STATS-VISUALIZER 4 | :mansource: DNS-STATS-VISUALIZER 5 | :man-linkstyle: blue R <> 6 | 7 | == NAME 8 | 9 | dsv-clickhouse-update - update Visualizer ClickHouse schema on the host 10 | 11 | == SYNOPSIS 12 | 13 | *dsv-clickhouse-update* [_OPTION_]... [_DIRECTORY_] 14 | 15 | == DESCRIPTION 16 | 17 | Given a directory containing numbered DDL files, manage the ClickHouse schema status. 18 | The command can be used to display the current schema status, apply any schema 19 | updates, or roll back the last installed schema update. The DDL directory defaults to 20 | `/usr/share/dns-stats-visualizer/sql/clickhouse/ddl`, the standard install location 21 | for Visualizer ClickHouse DDL files. 22 | 23 | `dsv-clickhouse-update` first queries a table `dsv.ddl_history` to 24 | determine the highest numbered DDL currently applied. If this table is not found, 25 | `schema.sql` from the DDL directory is applied to create the table. 26 | 27 | The directory is then scanned for all files named `.sql`. Any with a number 28 | higher than the topmost version recorded as applied as considered as 29 | available updates. 30 | 31 | Three actions are then available: 32 | 33 | *update*:: 34 | Apply all DDL files that are not currently applied. This is the default action. 35 | 36 | *rollback*:: 37 | Find version number of the highest DDL currently applied, and check to see if a file 38 | `-rollback.sql` is present. If so, apply that file and mark that version 39 | as no longer applied. 40 | 41 | *status*:: 42 | Report which DDLs are applied and when they were applied. Also report on any 43 | available unapplied DDLs that will be applied during an update. 44 | 45 | == OPTIONS 46 | 47 | *-c, --config* [_arg_]:: 48 | Configuration file location. Default is `/etc/dns-stats-visualizer/dsv.conf`. 49 | *-a, --action* [_arg_]:: 50 | Which action to perform. Options are *update* (default), *rollback* or *status*. 51 | *-v, --verbose*:: 52 | Report all DDL actions. When reporting status, report on any rollbacks too. 53 | *-n, --dry-run*:: 54 | Report as if applying any update or rollback, but do not actually apply the change. 55 | *-r, --update-required*:: 56 | Check to see whether unapplied DDL updates are available. 57 | 58 | == EXIT STATUS 59 | 60 | 0 on success, non-zero on any error. If checking for unapplied updates, 0 if 61 | all updates are applied, 1 if there are outstanding unapplied updates, 2 on error. 62 | -------------------------------------------------------------------------------- /doc/man/man1/dsv-config.adoc: -------------------------------------------------------------------------------- 1 | = dsv-config(1) 2 | Jim Hague, Sinodun Internet Technologies 3 | :manmanual: DNS-STATS-VISUALIZER 4 | :mansource: DNS-STATS-VISUALIZER 5 | :man-linkstyle: blue R <> 6 | 7 | == NAME 8 | 9 | dsv-config - get Visualizer configuration value 10 | 11 | == SYNOPSIS 12 | 13 | *dsv-config* [_OPTION_]... _KEY_ [_KEY_]... 14 | 15 | == DESCRIPTION 16 | 17 | Obtain a value from Visualizer configuration. Descend the key hierarchy and return 18 | the value found at the end. 19 | 20 | == OPTIONS 21 | 22 | *-c, --config* [_arg_]:: 23 | Configuration file location. Default is `/etc/dns-stats-visualizer/dsv.conf`. 24 | 25 | *-r, --random*:: 26 | If the value is a set of comma-separated items, pick one of the items at 27 | random and return just that item. 28 | 29 | == EXAMPLE 30 | 31 | ---- 32 | $ dsv-config -r clickhouse servers 33 | dsv-clickhouse2 34 | ---- 35 | 36 | == EXIT STATUS 37 | 38 | 0 on success, non-zero on any error. 39 | -------------------------------------------------------------------------------- /doc/man/man1/dsv-datastore-setup.adoc: -------------------------------------------------------------------------------- 1 | = dsv-datastore-setup(1) 2 | Jim Hague, Sinodun Internet Technologies 3 | :manmanual: DNS-STATS-VISUALIZER 4 | :mansource: DNS-STATS-VISUALIZER 5 | :man-linkstyle: blue R <> 6 | 7 | == NAME 8 | 9 | dsv-datastore-setup - create Visualizer user and Postgres access on the host 10 | 11 | == SYNOPSIS 12 | 13 | *dsv-datastore-setup* 14 | 15 | == DESCRIPTION 16 | 17 | If no system user `dsv` exists, create one as a system user with disabled login. 18 | 19 | Then create a Postgres user `dsv` and a Postgres database `dsv`. 20 | 21 | == EXIT STATUS 22 | 23 | 0 on success, non-zero on any error. 24 | -------------------------------------------------------------------------------- /doc/man/man1/dsv-find-node-id.adoc: -------------------------------------------------------------------------------- 1 | = dsv-find-node-id(1) 2 | Jim Hague, Sinodun Internet Technologies 3 | :manmanual: DNS-STATS-VISUALIZER 4 | :mansource: DNS-STATS-VISUALIZER 5 | :man-linkstyle: blue R <> 6 | 7 | == NAME 8 | 9 | dsv-find-node-id - get Visualizer node ID for a node server 10 | 11 | == SYNOPSIS 12 | 13 | *dsv-find-node-id* [_OPTION_]... _SERVERNAME_ _NODENAME_ 14 | 15 | == DESCRIPTION 16 | 17 | Find the node ID of a Visualizer node, given the server name and the node name. 18 | If an alternate server or node name is specified in Postgres, either the primary 19 | or the alternate name may be used. See *dsv-nodes-update(1)*. 20 | 21 | == OPTIONS 22 | 23 | *-c, --config* [_arg_]:: 24 | Configuration file location. Default is `/etc/dns-stats-visualizer/dsv.conf`. 25 | 26 | == EXIT STATUS 27 | 28 | 0 on success, non-zero on any error. 29 | 30 | == SEE ALSO 31 | 32 | link:dsv-nodes-update.adoc[dsv-nodes-update],link:dsv.cfg.adoc[dsv.cfg]. 33 | -------------------------------------------------------------------------------- /doc/man/man1/dsv-geo-update.adoc: -------------------------------------------------------------------------------- 1 | = dsv-geo-update(1) 2 | Jim Hague, Sinodun Internet Technologies 3 | :manmanual: DNS-STATS-VISUALIZER 4 | :mansource: DNS-STATS-VISUALIZER 5 | :man-linkstyle: blue R <> 6 | 7 | == NAME 8 | 9 | dsv-geo-update - update Visualizer geographical locations 10 | 11 | == SYNOPSIS 12 | 13 | *dsv-geo-update* [_OPTION_]... 14 | 15 | == DESCRIPTION 16 | 17 | Download the latest set of geographic locations used in Visualizer data, and add or update 18 | the geographic location information in the Postgres database. 19 | 20 | MaxMind require that users obtain a licence key in order to download the zip archive 21 | with the location data. See https://dev.maxmind.com/geoip/geoip2/geolite2/. 22 | This licence key must be specified in Visualizer configuration. 23 | 24 | == OPTIONS 25 | 26 | *-c, --config* [_arg_]:: 27 | Configuration file location. Default is `/etc/dns-stats-visualizer/dsv.conf`. 28 | 29 | *-v, --verbose*:: 30 | Report progress to standard output. 31 | 32 | *-n, --dry-run*:: 33 | If enabled, treats update as a trial run. The information is read and the current set 34 | of location data printed to standard output. The Postgres database is not altered. 35 | 36 | *--zipfile* [_arg_]:: 37 | The MaxMind City CSV data archive `GeoLite2-City-CSV.zip` has already been 38 | downloaded as _arg_. Update the location data from the file. 39 | 40 | == EXIT STATUS 41 | 42 | Non-zero on any error. 43 | 44 | == SEE ALSO 45 | 46 | link:dsv.cfg.adoc[dsv.cfg]. 47 | -------------------------------------------------------------------------------- /doc/man/man1/dsv-import-freeze.adoc: -------------------------------------------------------------------------------- 1 | = dsv-import-freeze(1) 2 | Jim Hague, Sinodun Internet Technologies 3 | :manmanual: DNS-STATS-VISUALIZER 4 | :mansource: DNS-STATS-VISUALIZER 5 | :man-linkstyle: blue R <> 6 | 7 | == NAME 8 | 9 | dsv-import-freeze - freeze Visualizer data import 10 | 11 | == SYNOPSIS 12 | 13 | *dsv-import-freeze* [_OPTION_]... 14 | 15 | == DESCRIPTION 16 | 17 | Freeze Visualizer import. All subsequent calls to `dsv-import` will 18 | exit with the message _Import is frozen_. 19 | 20 | This command must be run as the user owning the datastore, or `root`. 21 | 22 | == OPTIONS 23 | 24 | *-c, --config* [_arg_]:: 25 | Configuration file location. Default is `/etc/dns-stats-visualizer/dsv.conf`. 26 | 27 | == EXAMPLE 28 | 29 | ---- 30 | $ dsv-import-freeze 31 | Visualizer import frozen. 32 | ---- 33 | 34 | == EXIT STATUS 35 | 36 | 0 on success, non-zero on any error or import already frozen. 37 | 38 | == SEE ALSO 39 | 40 | link:dsv-import-thaw.adoc[dsv-import-thaw(1)], 41 | link:dsv-import.adoc[dsv-import(1)], 42 | link:dsv.cfg.adoc[dsv.cfg(5)]. 43 | -------------------------------------------------------------------------------- /doc/man/man1/dsv-import-mirror.adoc: -------------------------------------------------------------------------------- 1 | = dsv-import-mirror(1) 2 | Jim Hague, Sinodun Internet Technologies 3 | :manmanual: DNS-STATS-VISUALIZER 4 | :mansource: DNS-STATS-VISUALIZER 5 | :man-linkstyle: blue R <> 6 | 7 | == NAME 8 | 9 | dsv-import-mirror - create parallel datastore contining hard links to new incoming files 10 | 11 | == SYNOPSIS 12 | 13 | *dsv-import-mirror* _TARGET_ROOT_ _SERVER_NODE_PATTERN_ [_SERVER_NODE_PATTERN_]... 14 | 15 | == DESCRIPTION 16 | 17 | A server process that monitors new files in datastore `incoming` directories and 18 | create a parallel directory structure containing hard links to the new files. 19 | 20 | _TARGET_ROOT_ specifies the root directory of the mirroring directory structure. 21 | 22 | _SERVER_NODE_PATTERN_ gives a *glob(7)* pattern specifying server 23 | and node directories to be mirrored. 24 | 25 | == EXAMPLE 26 | 27 | ---- 28 | $ dsv-import-mirror /opt/data/outgoing_staging Server/* 29 | ---- 30 | 31 | == EXIT STATUS 32 | 33 | 0 on success, non-zero on any error or import already frozen. 34 | 35 | == SEE ALSO 36 | 37 | link:dsv.cfg.adoc[dsv.cfg(5)], 38 | glob(7). 39 | -------------------------------------------------------------------------------- /doc/man/man1/dsv-import-thaw.adoc: -------------------------------------------------------------------------------- 1 | = dsv-import-thaw(1) 2 | Jim Hague, Sinodun Internet Technologies 3 | :manmanual: DNS-STATS-VISUALIZER 4 | :mansource: DNS-STATS-VISUALIZER 5 | :man-linkstyle: blue R <> 6 | 7 | == NAME 8 | 9 | dsv-import-thaw - thaw Visualizer data import 10 | 11 | == SYNOPSIS 12 | 13 | *dsv-import-thaw* [_OPTION_]... 14 | 15 | == DESCRIPTION 16 | 17 | Thaw Visualizer import. All subsequent calls to `dsv-import` will 18 | proceed with the import. 19 | 20 | This command must be run as the user owning the datastore, or `root`. 21 | 22 | == OPTIONS 23 | 24 | *-c, --config* [_arg_]:: 25 | Configuration file location. Default is `/etc/dns-stats-visualizer/dsv.conf`. 26 | 27 | == EXAMPLE 28 | 29 | ---- 30 | $ dsv-import-thaw 31 | Visualizer import thawed. 32 | ---- 33 | 34 | == EXIT STATUS 35 | 36 | 0 on success, non-zero on any error or import already thawed. 37 | 38 | 39 | == SEE ALSO 40 | 41 | link:dsv-import-freeze.adoc[dsv-import-freeze(1)], 42 | link:dsv-import.adoc[dsv-import(1)], 43 | link:dsv.cfg.adoc[dsv.cfg(5)]. 44 | -------------------------------------------------------------------------------- /doc/man/man1/dsv-log.adoc: -------------------------------------------------------------------------------- 1 | = dsv-log(1) 2 | Jim Hague, Sinodun Internet Technologies 3 | :manmanual: DNS-STATS-VISUALIZER 4 | :mansource: DNS-STATS-VISUALIZER 5 | :man-linkstyle: blue R <> 6 | 7 | == NAME 8 | 9 | dsv-log - log to Visualizer logging 10 | 11 | == SYNOPSIS 12 | 13 | *dsv-log* [_OPTION_]... [_TEXT_]... 14 | 15 | == DESCRIPTION 16 | 17 | Log text, or an empty line if no text specified, to the Visualizer logger. 18 | 19 | == OPTIONS 20 | 21 | *-c, --config* [_arg_]:: 22 | Configuration file location. Default is `/etc/dns-stats-visualizer/dsv.conf`. 23 | 24 | *-l, --level*:: 25 | The logging level to use. Must be `critical`, `error`, `warning`, 26 | `info` or `debug`. If not specified, `info` is used. 27 | 28 | == EXAMPLE 29 | 30 | ---- 31 | $ dsv-log -l debug File already present. 32 | ---- 33 | 34 | == EXIT STATUS 35 | 36 | 0 on success, non-zero on any error. 37 | -------------------------------------------------------------------------------- /doc/man/man1/dsv-nodes-update.adoc: -------------------------------------------------------------------------------- 1 | = dsv-nodes-update(1) 2 | Jim Hague, Sinodun Internet Technologies 3 | :manmanual: DNS-STATS-VISUALIZER 4 | :mansource: DNS-STATS-VISUALIZER 5 | :man-linkstyle: blue R <> 6 | 7 | == NAME 8 | 9 | dsv-nodes-update - update Visualizer node details from file 10 | 11 | == SYNOPSIS 12 | 13 | *dsv-nodes-update* [_OPTION_]... _NODES FILE_ 14 | 15 | == DESCRIPTION 16 | 17 | Read node information from a CSV file and update the node information in 18 | the Postgres database. 19 | 20 | The input CSV file must contain the following fields: 21 | 22 | 1. The server name(s). Mandatory. 23 | 2. The node name(s). Mandatory. 24 | 3. The node region name. Mandatory. 25 | 4. The node country name. Mandatory. 26 | 5. The node city name. Mandatory. 27 | 6. The node instance name. Mandatory. 28 | 7. The node service address. Optional. 29 | 8. Visibility value for the node. Optional. 30 | 31 | Any additional fields are ignored. 32 | 33 | === Node service address 34 | 35 | If no node service address is given, the node is not used in determining zone load times. 36 | 37 | === Visibility values 38 | 39 | The node visibility value determines whether a node is included or not included in 40 | Grafana dashboards. 41 | 42 | *all*:: 43 | The node is included on all dashboards. This is the default if no visibility value 44 | is specified. 45 | 46 | *test*:: 47 | The node is included on only the test dashboards. It is not included in main 48 | dashboards. 49 | 50 | *main*:: 51 | The node is included on only the main dashboards. It is not included in test 52 | dashboards. 53 | 54 | *none*:: 55 | The node is not included on any dashboards. 56 | 57 | === Alternate names 58 | 59 | In the case of server name and node name, an optional alternate name may be 60 | given in addition to the primary name by separating it from the primary name with 61 | a | character. The primary name is always the name displayed, but the alternate 62 | names are recognised when looking up node IDs with *dsv-find-node-id(1)*. 63 | 64 | The following line is an example input line from a CSV file. 65 | 66 | ---- 67 | server|altserver,name|altname,region,country,city,instance,169.254.0.1 68 | ---- 69 | 70 | Server, node and instance names can contain only alphanumeric characters, 71 | `.` and `-`. 72 | 73 | Rows from the input file are processed in sequence. If any row contains the same 74 | server or node name as a previous row, the server or node data will be updated 75 | to the value in the row, and the values in the previous row will be discarded. 76 | If supplying alternate names for a server, therefore, the alternate name must appear 77 | in the last record containing the server. To avoid confusion, specifying the 78 | alternate name for all lines containing the server is recommended. 79 | 80 | A server or a node can be renamed by initially giving the new name as an alternate 81 | name, and then after that is successfully updated removing the old name. 82 | 83 | == OPTIONS 84 | 85 | *-c, --config* [_arg_]:: 86 | Configuration file location. Default is `/etc/dns-stats-visualizer/dsv.conf`. 87 | 88 | == BUGS 89 | 90 | There is currently no way to remove a node, This is perhaps not a bad thing, 91 | as it ensures that any historic node data in the Visualizer database will always have 92 | associated node details. 93 | 94 | == EXIT STATUS 95 | 96 | Non-zero on any error. 97 | 98 | == SEE ALSO 99 | 100 | link:dsv-find-node-id.adoc[dsv-find-node-id],link:dsv.cfg.adoc[dsv.cfg]. 101 | -------------------------------------------------------------------------------- /doc/man/man1/dsv-postgres-update.adoc: -------------------------------------------------------------------------------- 1 | = dsv-postgres-update(1) 2 | Jim Hague, Sinodun Internet Technologies 3 | :manmanual: DNS-STATS-VISUALIZER 4 | :mansource: DNS-STATS-VISUALIZER 5 | :man-linkstyle: blue R <> 6 | 7 | == NAME 8 | 9 | dsv-postgres-update - update Visualizer Postgres schema on the host 10 | 11 | == SYNOPSIS 12 | 13 | *dsv-postgres-update* [_OPTION_]... [_DIRECTORY_] 14 | 15 | == DESCRIPTION 16 | 17 | Given a directory containing numbered DDL files, manage the ClickHouse schema status. 18 | The command can be used to display the current schema status, apply any schema 19 | updates, or roll back the last installed schema update. The DDL directory defaults to 20 | `/usr/share/dns-stats-visualizer/sql/postgres/ddl`, the standard install location 21 | for Visualizer Postgres DDL files. 22 | 23 | `dsv-postgres-update` first queries a table `dsv.ddl_history` to 24 | determine the highest numbered DDL currently applied. If this table is not found, 25 | `schema.sql` from the DDL directory is applied to create the table. 26 | 27 | The directory is then scanned for all files named `.sql`. Any with a number 28 | higher than the topmost version recorded as applied as considered as 29 | available updates. 30 | 31 | Three actions are then available: 32 | 33 | *update*:: 34 | Apply all DDL files that are not currently applied. This is the default action. 35 | 36 | *rollback*:: 37 | Find version number of the highest DDL currently applied, and check to see if a file 38 | `-rollback.sql` is present. If so, apply that file and mark that version 39 | as no longer applied. 40 | 41 | *status*:: 42 | Report which DDLs are applied and when they were applied. Also report on any 43 | available unapplied DDLs that will be applied during an update. 44 | 45 | == OPTIONS 46 | 47 | *-c, --config* [_arg_]:: 48 | Configuration file location. Default is `/etc/dns-stats-visualizer/dsv.conf`. 49 | *-a, --action* [_arg_]:: 50 | Which action to perform. Options are *update* (default), *rollback* or *status*. 51 | *-v, --verbose*:: 52 | Report all DDL actions. When reporting status, report on any rollbacks too. 53 | *-n, --dry-run*:: 54 | Report as if applying any update or rollback, but do not actually apply the change. 55 | *-r, --update-required*:: 56 | Check to see whether unapplied DDL updates are available. 57 | 58 | == EXIT STATUS 59 | 60 | 0 on success, non-zero on any error. If checking for unapplied updates, 0 if 61 | all updates are applied, 1 if there are outstanding unapplied updates, 2 on error. 62 | -------------------------------------------------------------------------------- /doc/man/man1/dsv-prune.adoc: -------------------------------------------------------------------------------- 1 | = dsv-prune(1) 2 | Jim Hague, Sinodun Internet Technologies 3 | :manmanual: DNS-STATS-VISUALIZER 4 | :mansource: DNS-STATS-VISUALIZER 5 | :man-linkstyle: blue R <> 6 | 7 | == NAME 8 | 9 | dsv-prune - report Visualizer database disc usage and optionally delete oldest partition 10 | 11 | == SYNOPSIS 12 | 13 | *dsv-prune* [_OPTION_]... 14 | 15 | == DESCRIPTION 16 | 17 | Reports the current percentage of available disc space used by the Visualizer 18 | database on each of the Visualizer cluster machines. 19 | 20 | If the used disc space percentage is at or above a threshold, attempt to 21 | free some disc space by deleting all data partitions in a specified category that are 22 | more than a given number of days old. 23 | 24 | The available categories of data are: 25 | 26 | . `raw`. Raw data. Records of individual queries and responses. 27 | . `5min`. Summaries of raw data aggregated over 5 minutes. 28 | 29 | If the *--incremental* flag is given, partitions are deleted one at a time starting with the 30 | oldest and deletion stops once disc usage falls below the specified disc usage threshold. 31 | 32 | In all cases, user confirmation or use of the *--force* flag is required before 33 | a partition is deleted. 34 | 35 | Deleted partitions are deleted from all Visualizer database cluster servers on which 36 | the partition is present. 37 | 38 | Data is partitioned into weekly partitions. The partition name displayed will be 39 | of the form `YYYYWW` where `YYYY` is the year and `WW` is the week number 40 | within the year. 41 | 42 | == OPTIONS 43 | 44 | *-c, --config* [_arg_]:: 45 | Configuration file location. Default is `/etc/dns-stats-visualizer/dsv.conf`. 46 | 47 | *-t, --threshold* [_arg_]:: 48 | Set the disc usage percent threshold. If the disc usage is at or above this percentage, 49 | find the oldest partition and ask if should be deleted. Default is 80. 50 | 51 | *--force*:: 52 | If disc usage is at or above the threshold, don't ask for confirmation, but 53 | delete partitions. Use this option with caution. 54 | 55 | *-l, --list-partitions*:: 56 | Don't check disc usage, just list the current partitions for all data categories 57 | to standard output. 58 | 59 | *--report-only*:: 60 | Only report disc usage. Do not delete any partition, regardless of other options. 61 | 62 | *-n, --dry-run*:: 63 | If deleting partitions, report the partitions to be deleted, but do not delete them. 64 | 65 | *-d, --age-data* [_arg_]:: 66 | The category of data to consider for deletion. If specified, must be one 67 | of the following options: 68 | * `raw`. Raw data. Records of individual queries and responses. 69 | * `5min`. Summaries of raw data aggregated over 5 minutes. 70 | Default is `raw`. 71 | 72 | *-a, --max-age* [_arg_]:: 73 | Delete all partitions in the specified data category which are 74 | older than the given age in days. Default is 365, so where data is more than a 75 | year old. 76 | 77 | *-i, --incremental*:: 78 | After deleting a partition, re-check the disc space and stop if now below 79 | the threshold. Default is to delete all over-age partitions. 80 | 81 | == EXIT STATUS 82 | 83 | 0 on success, non-zero on any error. 84 | 85 | == SEE ALSO 86 | 87 | link:dsv.cfg.adoc[dsv.cfg]. 88 | -------------------------------------------------------------------------------- /doc/man/man1/dsv-queue-details.adoc: -------------------------------------------------------------------------------- 1 | = dsv-queue-details(1) 2 | Jim Hague, Sinodun Internet Technologies 3 | :manmanual: DNS-STATS-VISUALIZER 4 | :mansource: DNS-STATS-VISUALIZER 5 | :man-linkstyle: blue R <> 6 | 7 | == NAME 8 | 9 | dsv-queue-details - report per-node counts for import queues 10 | 11 | == SYNOPSIS 12 | 13 | *dsv-queue-details* [_OPTION_]... 14 | 15 | == DESCRIPTION 16 | 17 | Generate per-node counts for files currently in the import queues. 18 | Print a report, or update ClickHouse with the data. 19 | 20 | == OPTIONS 21 | 22 | *-c, --config* [_arg_]:: 23 | Configuration file location. Default is `/etc/dns-stats-visualizer/dsv.conf`. 24 | 25 | *-p, --print*:: 26 | Print a report to standard output 27 | 28 | *-s, --store*:: 29 | Update the queue size data in ClickHouse. 30 | 31 | == EXIT STATUS 32 | 33 | Non-zero on any error. 34 | 35 | == SEE ALSO 36 | 37 | link:dsv.cfg.adoc[dsv.cfg]. 38 | -------------------------------------------------------------------------------- /doc/man/man1/dsv-queue-freeze.adoc: -------------------------------------------------------------------------------- 1 | = dsv-queue-freeze(1) 2 | Jim Hague, Sinodun Internet Technologies 3 | :manmanual: DNS-STATS-VISUALIZER 4 | :mansource: DNS-STATS-VISUALIZER 5 | :man-linkstyle: blue R <> 6 | 7 | == NAME 8 | 9 | dsv-queue-freeze - freeze processing for named Visualizer queue 10 | 11 | == SYNOPSIS 12 | 13 | *dsv-queue-freeze* [_OPTION_]... [_QUEUENAME_]... 14 | 15 | == DESCRIPTION 16 | 17 | Freeze named Visualizer queue. All subsequent calls to `dsv-queue` will 18 | exit with the message _Visualizer queue already frozen_. 19 | 20 | This command must be run as the user owning the datastore, or `root`. 21 | 22 | == OPTIONS 23 | 24 | *-c, --config* [_arg_]:: 25 | Configuration file location. Default is `/etc/dns-stats-visualizer/dsv.conf`. 26 | 27 | == EXAMPLE 28 | 29 | ---- 30 | $ dsv-queue-freeze import-tsv 31 | Visualizer queue import-tsv frozen. 32 | ---- 33 | 34 | == EXIT STATUS 35 | 36 | 0 on success, non-zero on any error or queue already frozen. 37 | 38 | == SEE ALSO 39 | 40 | link:dsv-queue-thaw.adoc[dsv-queue-thaw(1)], 41 | link:dsv-status.adoc[dsv-status(1)], 42 | link:dsv.cfg.adoc[dsv.cfg(5)]. 43 | -------------------------------------------------------------------------------- /doc/man/man1/dsv-queue-thaw.adoc: -------------------------------------------------------------------------------- 1 | = dsv-queue-thaw(1) 2 | Jim Hague, Sinodun Internet Technologies 3 | :manmanual: DNS-STATS-VISUALIZER 4 | :mansource: DNS-STATS-VISUALIZER 5 | :man-linkstyle: blue R <> 6 | 7 | == NAME 8 | 9 | dsv-queue-thaw - thaw processing for named Visualizer queue 10 | 11 | == SYNOPSIS 12 | 13 | *dsv-queue-thaw* [_OPTION_]... [_QUEUENAME_]... 14 | 15 | == DESCRIPTION 16 | 17 | Thaw named Visualizer queue. All subsequent calls to `dsv-queue` will 18 | exit with the message _Visualizer queue already thawed_. 19 | 20 | This command must be run as the user owning the datastore, or `root`. 21 | 22 | == OPTIONS 23 | 24 | *-c, --config* [_arg_]:: 25 | Configuration file location. Default is `/etc/dns-stats-visualizer/dsv.conf`. 26 | 27 | == EXAMPLE 28 | 29 | ---- 30 | $ dsv-queue-thaw import-tsv 31 | Visualizer queue import-tsv thawed. 32 | ---- 33 | 34 | == EXIT STATUS 35 | 36 | 0 on success, non-zero on any error or queue already thawed. 37 | 38 | == SEE ALSO 39 | 40 | link:dsv-queue-freeze.adoc[dsv-queue-freeze(1)], 41 | link:dsv-status.adoc[dsv-status(1)], 42 | link:dsv.cfg.adoc[dsv.cfg(5)]. 43 | -------------------------------------------------------------------------------- /doc/man/man1/dsv-rssac-daemon-tester.adoc: -------------------------------------------------------------------------------- 1 | = dsv-rssac-daemon-tester(1) 2 | Jim Hague, Sinodun Internet Technologies 3 | :manmanual: DNS-STATS-VISUALIZER 4 | :mansource: DNS-STATS-VISUALIZER 5 | :man-linkstyle: blue R <> 6 | 7 | == NAME 8 | 9 | dsv-rssac-daemon-tester - send a NOTIFY message for a zone 10 | 11 | == SYNOPSIS 12 | 13 | *dsv-rssac-daemon-tester* [_OPTION_]... 14 | 15 | == DESCRIPTION 16 | 17 | Send a DNS NOTIFY message for a zone. 18 | This is a utility to aid troubleshooting of the Visualizer RSSAC daemon. 19 | 20 | == OPTIONS 21 | 22 | *-c, --config* [_arg_]:: 23 | Configuration file location. Default is `/etc/dns-stats-visualizer/dsv.conf`. 24 | 25 | *-s, --server* [_arg_]:: 26 | The server to send the message to. Default is `localhost`. 27 | 28 | *-p, --port* [_arg_]:: 29 | The port to send the message to. Default is 53. 30 | 31 | *-n, --serial* [_arg_]:: 32 | The zone serial number to send. Required option. 33 | 34 | *-z, --zone* [_arg_]:: 35 | The zone to send. Required option. 36 | 37 | == EXIT STATUS 38 | 39 | Non-zero on any error. 40 | 41 | == SEE ALSO 42 | 43 | link:dsv-rssac-daemon.adoc[dsv-rssac-daemon], link:dsv.cfg.adoc[dsv.cfg]. 44 | -------------------------------------------------------------------------------- /doc/man/man1/dsv-rssac-daemon.adoc: -------------------------------------------------------------------------------- 1 | = dsv-rssac-daemon(1) 2 | Jim Hague, Sinodun Internet Technologies 3 | :manmanual: DNS-STATS-VISUALIZER 4 | :mansource: DNS-STATS-VISUALIZER 5 | :man-linkstyle: blue R <> 6 | 7 | == NAME 8 | 9 | dsv-rssac-daemon - listen for NOTIFY message for a zone 10 | 11 | == SYNOPSIS 12 | 13 | *dsv-rssac-daemon* [_OPTION_]... 14 | 15 | == DESCRIPTION 16 | 17 | Started by `systemd`, listens for NOTIFY messages for the zone configured 18 | by `rssac.notify-zone`. When notify is received, extracts the serial number 19 | and runs `dsv-rssac-notify`, passing the serial number as the only 20 | argument. 21 | 22 | As installed, this configures `systemd` to open a socket on port 53 listening 23 | on all interfaces. It passes this socket to `dsv-rssac-daemon`. 24 | See `/lib/systemd/system/dsv--rssac.socket` and 25 | override it with an entry in `/etc/systemd/system` if you want to change 26 | the port or restrict listening to specific interfaces. 27 | 28 | == OPTIONS 29 | 30 | *-c, --config* [_arg_]:: 31 | Configuration file location. Default is `/etc/dns-stats-visualizer/dsv.conf`. 32 | 33 | == EXIT STATUS 34 | 35 | Non-zero on any error. 36 | 37 | == SEE ALSO 38 | 39 | link:dsv-rssac-notify.adoc[dsv-rssac-notify], link:dsv.cfg.adoc[dsv.cfg]. 40 | -------------------------------------------------------------------------------- /doc/man/man1/dsv-rssac-notify.adoc: -------------------------------------------------------------------------------- 1 | = dsv-rssac-notify(1) 2 | Jim Hague, Sinodun Internet Technologies 3 | :manmanual: DNS-STATS-VISUALIZER 4 | :mansource: DNS-STATS-VISUALIZER 5 | :man-linkstyle: blue R <> 6 | 7 | == NAME 8 | 9 | dsv-rssac-notify - get latency and size info for a zone 10 | 11 | == SYNOPSIS 12 | 13 | *dsv-rssac-notify* [_OPTION_]... _SERIAL_ 14 | 15 | == DESCRIPTION 16 | 17 | Check to see if latency and size information is already recorded for the 18 | zone configured by `rssac.zone`. If none is present, and the given serial 19 | number is later than the last seen, probe for and record the zone 20 | latency on all active Visualizer nodes that do not have the `not-rssac` flag set, 21 | and also record the zone size. 22 | 23 | This is designed to be run from `dsv-rssac-daemon`. 24 | 25 | == OPTIONS 26 | 27 | *-c, --config* [_arg_]:: 28 | Configuration file location. Default is `/etc/dns-stats-visualizer/dsv.conf`. 29 | 30 | *-n, --dryrun*:: 31 | Perform a trial run. Print results but do not record them in the database. 32 | 33 | *-t, --timeout* [_arg_]:: 34 | Overall timeout in seconds for load time. Give up on nodes that have not been 35 | updated after this time. Default is 800 seconds. 36 | 37 | == EXIT STATUS 38 | 39 | Non-zero on any error, or if the check is not considered successful due to too few nodes 40 | responding. 41 | 42 | == SEE ALSO 43 | 44 | link:dsv-rssac-notify.adoc[dsv-rssac-daemon], link:dsv.cfg.adoc[dsv.cfg]. 45 | -------------------------------------------------------------------------------- /doc/man/man1/dsv-rssac-reports.adoc: -------------------------------------------------------------------------------- 1 | = dsv-rssac-reports(1) 2 | Jim Hague, Sinodun Internet Technologies 3 | :manmanual: DNS-STATS-VISUALIZER 4 | :mansource: DNS-STATS-VISUALIZER 5 | :man-linkstyle: blue R <> 6 | 7 | == NAME 8 | 9 | dsv-rssac-reports - generate RSSAC reports for a date 10 | 11 | == SYNOPSIS 12 | 13 | *dsv-rssac-reports* [_OPTION_]... 14 | 15 | == DESCRIPTION 16 | 17 | Generate the RSSAC reports for a given date. You can choose to generate 18 | individual reports, or to generate only plots or YAML. 19 | 20 | == OPTIONS 21 | 22 | *-c, --config* [_arg_]:: 23 | Configuration file location. Default is `/etc/dns-stats-visualizer/dsv.conf`. 24 | 25 | *--date* [_arg_]:: 26 | The date for which to generate reports. The date is specified as YYYY-MM-DD. 27 | Default is 7 days before the current date. 28 | 29 | *--output-dir* [_arg_]:: 30 | The base directory for the output files. Default is the current directory. 31 | 32 | *-r, --report* [_arg_]:: 33 | The report to generate. The options are `load-time`, `rcode-volume`, 34 | `traffic-sizes`, `traffic-volume`, `unique-sources`, `zone-size` 35 | or `all`, which generates all available reports. Required value. 36 | 37 | *-s, --server* [_arg_]:: 38 | The server for which to generate reports. Required value. 39 | 40 | *--report-server-name* [_arg_]:: 41 | The service name to put into the RSSAC YAML reports. 42 | Defaults to the given server name, lowercased. 43 | 44 | *--report-file-prefix* [_arg_]:: 45 | The prefix for the RSSAC report output filenames. 46 | Defaults to the given server name, lowercased. 47 | 48 | *---no-plots*:: 49 | Do not generate the plots. 50 | 51 | *---no-yaml*:: 52 | Do not generate the YAML reports. 53 | 54 | *---no-cert-check*:: 55 | Do not check the hostname in the certificate for HTTPS connections. 56 | 57 | == EXIT STATUS 58 | 59 | Non-zero on any error, or if any YAML report is missing data. 60 | 61 | == SEE ALSO 62 | 63 | link:dsv.cfg.adoc[dsv.cfg]. 64 | -------------------------------------------------------------------------------- /doc/man/man1/dsv-status.adoc: -------------------------------------------------------------------------------- 1 | = dsv-status(1) 2 | Jim Hague, Sinodun Internet Technologies 3 | :manmanual: DNS-STATS-VISUALIZER 4 | :mansource: DNS-STATS-VISUALIZER 5 | :man-linkstyle: blue R <> 6 | 7 | == NAME 8 | 9 | dsv-status - print the current Visualizer queue status 10 | 11 | == SYNOPSIS 12 | 13 | *dsv-status* [_OPTION_]... [_STATUS ITEM_]... 14 | 15 | == DESCRIPTION 16 | 17 | Display the current Visualizer queue status. You can choose to display individual 18 | status items for a particular queue, all items for a particular queue, or (if no 19 | arguments are given) all items for all queues. 20 | 21 | You can select from the following status items: 22 | 23 | * `len`. The number of items in the queue. 24 | * `running`. The number of items in the queue currently being processed. 25 | * `workers`. The number of worker processes configured to run jobs from this queue. 26 | * `frozen`. Whether the queue is currently running or frozen. 27 | 28 | == OPTIONS 29 | 30 | *-c, --config* [_arg_]:: 31 | Configuration file location. Default is `/etc/dns-stats-visualizer/dsv.conf`. 32 | 33 | *-q, ---queue* [_arg_]:: 34 | The name of the queue to show details for. Possible named are `cdns-to-tsv`, 35 | `cdns-to-pcap` and `import-tsv`. If not specified, show details for all queues. 36 | 37 | == EXIT STATUS 38 | 39 | Non-zero on any error. 40 | 41 | == SEE ALSO 42 | 43 | link:dsv.cfg.adoc[dsv.cfg]. 44 | -------------------------------------------------------------------------------- /doc/man/man1/dsv-tld-update.adoc: -------------------------------------------------------------------------------- 1 | = dsv-tld-update(1) 2 | Jim Hague, Sinodun Internet Technologies 3 | :manmanual: DNS-STATS-VISUALIZER 4 | :mansource: DNS-STATS-VISUALIZER 5 | :man-linkstyle: blue R <> 6 | 7 | == NAME 8 | 9 | dsv-tld-update - update Visualizer details of Top Level Domains (TLDs) 10 | 11 | == SYNOPSIS 12 | 13 | *dsv-tld-update* [_OPTION_]... 14 | 15 | == DESCRIPTION 16 | 17 | Download the latest list of Top Level Domains from IANA and update the 18 | TLD information in the Postgres database. 19 | 20 | TLDs no longer present in the IANA list are deleted. 21 | New TLDs are added. If they can be determined to be country code TLDs, 22 | they are assigned a TLD type of `ccTLD`. Otherwise they are assigned a 23 | type of `New-gTLD`. 24 | 25 | Each addition or removal is logged. 26 | 27 | == OPTIONS 28 | 29 | *-c, --config* [_arg_]:: 30 | Configuration file location. Default is `/etc/dns-stats-visualizer/dsv.conf`. 31 | 32 | *-v, --verbose*:: 33 | Report each addition or removal to standard output. 34 | 35 | *-n, --dry-run*:: 36 | If enabled, treats update as a trial run. Changes are logged and optionally reported, 37 | but the Postgres database is not altered. 38 | 39 | == EXIT STATUS 40 | 41 | Non-zero on any error. 42 | 43 | == SEE ALSO 44 | 45 | link:dsv.cfg.adoc[dsv.cfg]. 46 | -------------------------------------------------------------------------------- /doc/man/man1/dsv-worker.adoc: -------------------------------------------------------------------------------- 1 | = dsv-worker(1) 2 | Jim Hague, Sinodun Internet Technologies 3 | :manmanual: DNS-STATS-VISUALIZER 4 | :mansource: DNS-STATS-VISUALIZER 5 | :man-linkstyle: blue R <> 6 | 7 | == NAME 8 | 9 | dsv-worker - process jobs from Visualizer queues 10 | 11 | == SYNOPSIS 12 | 13 | *dsv-worker* [_OPTION_]... 14 | 15 | == DESCRIPTION 16 | 17 | Process jobs from Visualizer job queue. If no jobs are available, wait indefinitely 18 | for a job to be added to a queue. 19 | 20 | This command must be run as the user owning the datastore, or `root`. 21 | 22 | dsv-worker processes jobs from three queues; `cdns-to-tsv`, `cdns-to-pcap` 23 | and `import-tsv`. A job is processed by reading the job argument. The job 24 | argument is the path of the file to be processed. This may be optionally followed 25 | by `|` and the retry count of the job. 26 | 27 | The job is executed by passing its file path argument to an external 28 | process named `dsv-`. 29 | 30 | When a job is placed on the queue, a hard link to the job argument (the file path to 31 | process) is created in a subdirectory `pending` of the source directory. 32 | 33 | On completion, the external process exit code is examined. 34 | 35 | [start=0] 36 | . The job succeeded. Unlink the pending link, and inform GearMan the job is complete. 37 | . A permanent failure. Move the link into a directory `error` under the node 38 | directory and mark the job failed in GearMan. Log the failure and the job 39 | standard output and standard error. 40 | . A transient failure. If the retry limit has been reached, handle as a permanent 41 | failure. Otherwise re-add the job to the job queue after incrementing the 42 | retry counter. Log the transient failure and the job standard output and standard 43 | error. 44 | . The job succeeded, but no unlink is required. Inform GearMan the job is complete. 45 | 46 | Any other return code is logged as an infrastructure error and *dsv-worker* exits. 47 | 48 | == OPTIONS 49 | 50 | *-c, --config* [_arg_]:: 51 | Configuration file location. Default is `/etc/dns-stats-visualizer/dsv.conf`. 52 | 53 | *---fail-delay* [_arg_]:: 54 | The number of seconds to delay when a job fails before processing the next 55 | job. Default 5. 56 | 57 | *---max-retries* [_arg_]:: 58 | The number of times to retry a job that fails with a transient failure error code 59 | before treating the failure as permanent. Default 5. 60 | 61 | *---ignore-queue* [_arg_]:: 62 | Do not register to process jobs on queue _arg_. This option may be specified 63 | multiple times. 64 | 65 | == EXIT STATUS 66 | 67 | Non-zero on any error. 68 | 69 | == SEE ALSO 70 | 71 | link:dsv.cfg.adoc[dsv.cfg]. 72 | -------------------------------------------------------------------------------- /doc/man/man1/dsv.adoc: -------------------------------------------------------------------------------- 1 | = dsv(1) 2 | Jim Hague, Sinodun Internet Technologies 3 | :manmanual: DNS-STATS-VISUALIZER 4 | :mansource: DNS-STATS-VISUALIZER 5 | :man-linkstyle: blue R <> 6 | 7 | == NAME 8 | 9 | dsv - issue DNS STATS Visualizer commands 10 | 11 | == SYNOPSIS 12 | 13 | *dsv* _COMMAND_ [_OPTION_]... [_ARGS_]... 14 | 15 | == DESCRIPTION 16 | 17 | All Visualizer commands can be run using the `dsv` command, followed by the 18 | sub-command required. Alternatively they can be run as `dsv-`. 19 | 20 | The following commands may be available: 21 | 22 | * `clickhouse-sys-info` - report system info for ClickHouse to use. 23 | * `clickhouse-update` - update Visualizer ClickHouse schema on host. 24 | * `config` - get Visualizer configuration value. 25 | * `find-node-id` - get Visualizer node ID for server. 26 | * `import-freeze` - freeze Visualizer imports. 27 | * `import-thaw` - resume Visualizer imports. 28 | * `import` - scan file store and add new jobs to processing queue. 29 | * `log` - log text to Visualizer logging. 30 | * `nodes-update` - update Visualizer node details from file. 31 | * `postgres-update` - update Visualizer PostgreSQL schema on host. 32 | * `prune` - delete old Visualizer ClickHouse partitions. 33 | * `queue-freeze` - freeze Visualizer queue. 34 | * `queue-thaw` - resume Visualizer queue. 35 | * `rssac-daemon-tester` - send DNS NOTIFY message. 36 | * `rssac-daemon` - daemon listening for DNS NOTIFY. 37 | * `rssac-notify` - performs actions to be done on receiving DNS NOTIFY. 38 | * `rssac-reports` - generate RSSAC reports. 39 | * `status` - print current Visualizer queue status. 40 | * `tls-update` - update current list of TLDs. 41 | * `worker` - process jobs from Visualizer queues. 42 | 43 | == SEE ALSO 44 | 45 | `dsv-clickhouse-update`(1), `dsv-config`(1), `dsv-find-node-id`(1), 46 | `dsv-import`(1), `dsv-nodes-update`(1), `dsv-postgres-update`(1), 47 | `dsv-status`(1), `dsv-worker`(1) 48 | 49 | == EXIT STATUS 50 | 51 | 0 on success, non-zero on any error. 52 | -------------------------------------------------------------------------------- /doc/release-notes.adoc: -------------------------------------------------------------------------------- 1 | = DNS-STATS Visualizer release notes 2 | Jim Hague 3 | 4 | == Release 1.0.1 5 | Released 2021-05-28. 6 | 7 | * Change name of bar chart plugin to fix clash with original plugin. 8 | * Fix instance selector to select on all items. 9 | * Add more Chrome pre-requisites to Grafana package for image rendering plugin. 10 | 11 | == Release 1.0.0 12 | Released 2021-02-11. 13 | 14 | * Initial release. 15 | -------------------------------------------------------------------------------- /etc/GeoIP.conf: -------------------------------------------------------------------------------- 1 | # The following AccountID and LicenseKey are required placeholders. 2 | # For geoipupdate versions earlier than 2.5.0, use UserId here instead of AccountID. 3 | AccountId 0 4 | LicenseKey 000000000000 5 | 6 | # Include one or more of the following edition IDs: 7 | # * GeoLite2-City - GeoLite 2 City 8 | # * GeoLite2-Country - GeoLite2 Country 9 | # For geoipupdate versions earlier than 2.5.0, use ProductIds here instead of EditionIDs. 10 | EditionIds GeoLite2-City GeoLite2-Country GeoLite2-ASN 11 | -------------------------------------------------------------------------------- /etc/clickhouse/config.d/cluster.xml.dsv: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 100 7 | true 8 | 9 | dsv-clickhouse 10 | 9000 11 | 12 | 13 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /etc/clickhouse/config.d/compression.xml.dsv: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 10000000000 6 | 7 | 0.01 8 | 9 | 10 | zstd 11 | 6 12 | 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /etc/clickhouse/config.d/config.xml.dsv: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | :: 6 | 7 | notice 8 | /var/log/clickhouse-server/clickhouse-server.log 9 | /var/log/clickhouse-server/clickhouse-server.err.log 10 | 1000M 11 | 10 12 | 13 | 22 | UTC 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | dictionaries.d/*.xml 33 | 34 | -------------------------------------------------------------------------------- /etc/clickhouse/dictionaries.d/geolocation.xml: -------------------------------------------------------------------------------- 1 | 2 | Geolocation data 3 | 4 | geolocation 5 | 6 | 7 | DSN=dsv 8 | geolocation
9 |
10 | 11 | 300 12 | 13 | 14 | 15 | 16 | 17 | id 18 | 19 | 20 | country_id 21 | UInt64 22 | 0 23 | 24 | 25 | name 26 | String 27 | Unknown 28 | 29 | 30 | latitude 31 | Float32 32 | 0.0 33 | 34 | 35 | longitude 36 | Float32 37 | 0.0 38 | 39 | 40 |
41 |
42 | -------------------------------------------------------------------------------- /etc/clickhouse/dictionaries.d/iana_text.xml: -------------------------------------------------------------------------------- 1 | 2 | IANA text metadata 3 | 4 | iana_text 5 | 6 | 7 | DSN=dsv 8 | iana_text
9 |
10 | 11 | 300 12 | 13 | 14 | 15 | 16 | 17 | 18 | registry_name 19 | String 20 | 21 | 22 | 23 | value_name 24 | String 25 | 26 | 27 | 28 | 29 | value 30 | UInt32 31 | 0 32 | 33 | 34 | value_description 35 | String 36 | Unknown 37 | 38 | 39 |
40 |
41 | -------------------------------------------------------------------------------- /etc/clickhouse/dictionaries.d/node_text.xml: -------------------------------------------------------------------------------- 1 | 2 | Node text metadata 3 | 4 | node_text 5 | 6 | 7 | DSN=dsv 8 | node_text
9 |
10 | 11 | 300 12 | 13 | 14 | 15 | 16 | 17 | 18 | name 19 | String 20 | 21 | 22 | 23 | server_name 24 | String 25 | 26 | 27 | 28 | 29 | node_id 30 | UInt16 31 | 0 32 | 33 | 34 | instance_name 35 | String 36 | Unknown 37 | 38 | 39 | city_name 40 | String 41 | Unknown 42 | 43 | 44 | country_name 45 | String 46 | Unknown 47 | 48 | 49 | region_name 50 | String 51 | Unknown 52 | 53 | 54 | flags 55 | UInt64 56 | 0 57 | 58 | 59 |
60 |
61 | -------------------------------------------------------------------------------- /etc/clickhouse/dictionaries.d/server_address.xml: -------------------------------------------------------------------------------- 1 | 2 | Server address metadata 3 | 4 | server_address 5 | 6 | 7 | DSN=dsv 8 | server_address
9 |
10 | 11 | 300 12 | 13 | 14 | 15 | 16 | 17 | id 18 | 19 | 20 | address 21 | String 22 | Unknown 23 | 24 | 25 |
26 |
27 | -------------------------------------------------------------------------------- /etc/clickhouse/dictionaries.d/sysinfo.xml: -------------------------------------------------------------------------------- 1 | 2 | System info 3 | 4 | sysinfo 5 | 6 | 7 | dsv-clickhouse-sys-info 8 | TabSeparated 9 | 10 | 11 | 1 12 | 13 | 14 | 20 15 | 16 | 17 | 18 | 19 | 20 | item 21 | String 22 | 23 | 24 | 25 | value 26 | String 27 | 28 | 29 | 30 | 31 | 32 | -------------------------------------------------------------------------------- /etc/clickhouse/dictionaries.d/tld_text.xml: -------------------------------------------------------------------------------- 1 | 2 | TLD text metadata 3 | 4 | tld_text 5 | 6 | 7 | DSN=dsv 8 | tld_text
9 |
10 | 11 | 300 12 | 13 | 14 | 15 | 16 | 17 | 18 | name 19 | String 20 | 21 | 22 | 23 | 24 | tld_type 25 | String 26 | Unknown 27 | 28 | 29 | ulabel 30 | String 31 | 32 | 33 | 34 |
35 |
36 | -------------------------------------------------------------------------------- /etc/clickhouse/users.d/profiles.xml.dsv: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 10000000000 9 | 10 | 11 | 0 12 | 13 | 20 | nearest_hostname 21 | 22 | 23 | 24 | 25 | 1 26 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /etc/clickhouse/users.d/users.xml.dsv: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 20 | 21 | 24 | 25 | 26 | dsv 27 | 28 | ::/0 29 | 30 | default 31 | default 32 | 33 | 34 | 35 | 36 | ::/0 37 | 38 | readonly 39 | default 40 | 41 | 42 | 43 | -------------------------------------------------------------------------------- /etc/dns-stats-visualizer/dsv.cfg: -------------------------------------------------------------------------------- 1 | # 2 | # Sample Visualizer configuration file. 3 | # 4 | # This file contains a subset of the available configuration items. 5 | # The items here are those that are commonly modified for individual 6 | # installations. The values given here are the default values. 7 | # 8 | # For the full list of configuration items, see dsv.cfg(5). 9 | # 10 | [datastore] 11 | path=/var/lib/dns-stats-visualizer/cdns/ 12 | cdns_file_pattern=*.cdns.xz 13 | user=dsv 14 | 15 | [pcap] 16 | compress=Y 17 | compression-level=2 18 | 19 | [postgres] 20 | user=dsv 21 | password=dsv 22 | 23 | [clickhouse] 24 | # For clusters, separate hostnames with commas. 25 | # servers=dsv-clickhouse,dsv-clickhouse2,dsv-clickhouse3,dsv-clickhouse4 26 | servers=dsv-clickhouse 27 | dbdir=/var/lib/clickhouse 28 | import-server=dsv-clickhouse 29 | user=dsv 30 | password=dsv 31 | 32 | [rssac] 33 | grafana-url=https://localhost 34 | outdir=. 35 | server=Z-Root 36 | zone=. 37 | 38 | [geo] 39 | licencekey=XXXXXXXXXXXX 40 | 41 | [loggers] 42 | keys=root,gear 43 | 44 | [logger_root] 45 | #level=DEBUG 46 | level=INFO 47 | handlers=syslog 48 | 49 | [logger_gear] 50 | level=ERROR 51 | qualname=gear 52 | propagate=0 53 | handlers=syslog 54 | -------------------------------------------------------------------------------- /etc/dns-stats-visualizer/nodes.csv.sample: -------------------------------------------------------------------------------- 1 | ######################### NODES CONFIGURATION FILE ################### 2 | # Format is: server-name, node-name, node-region, node-country, 3 | # node-city,node-instance,[node-ip],[node-visibility] 4 | # 5 | # '#' starts a comment. Any line that is blank after comments and leading 6 | # or trailing spaces are removed is ignored. 7 | # 8 | # Server name, node name and instance fields can contain alphanumeric 9 | # characters, full stops (.) and hyphens (-) only. 10 | # Server name and node name can each contain an alternate to the primary 11 | # name. The alternate name is separated from the primary name by '|'. 12 | # E.g. 'NewName|OldName'. 13 | # 14 | # server-name - The primary name and any alternate name must be unique. 15 | # Either the primary name or the alternate name must match 16 | # the name used in the data directory path for uploaded files. 17 | # If an alternate name is given, it must be given for 18 | # every node belonging to the server. 19 | # node-name - The primary name and any alternate name must be unique for a 20 | # given server. Either the primary name or the alternate name 21 | # must match the name used in the data directory path for 22 | # uploaded files. 23 | # node-region - The region that the node is located in. 24 | # node-country - The country that the node is located in. 25 | # node-city - The city that the node is located in. 26 | # node-instance - This is used to collect nodes into subgroups called 27 | # 'Instances' (e.g. nodes within the same city) for 28 | # convenient selection and plotting. It should be unique 29 | # for a given country. 30 | # node-ip - The management IP address of the node (IPv4 or IPv6) 31 | # It is optional and if not specified is left blank. This is 32 | # required if RSSAC load-time/zone-size data is to be 33 | # collected. 34 | # node-visibility - This field determines the Grafana dashboards that include 35 | # the node. It is optional. If present, it must contain 36 | # one of the following values: 37 | # all - the node appears in all dashboards. This is 38 | # the default used if no value is given. 39 | # test - the node only appears in test 40 | # Grafana dashboards, not in the main ones. 41 | # main - the node only appears in the main 42 | # Grafana dashboards, not in the test ones. 43 | # none - the node does not appear in any Grafana 44 | # dashboards. 45 | # 46 | # Examples: 47 | # 48 | #Server-A,Node-1|Node-a,Region-1,Country-1,City-1,Instance-1,10.0.0.2 49 | #Server-A,Node-2,Region-1,Country-1,City-2,Instance-2, 50 | #Server-A,Node-3,Region-1,Country-2,City-3,Instance-3, 51 | #Server-A,Node-4,Region-1,Country-2,City-3,Instance-3, 52 | #Server-A,Node-5,Region-2,Country-3,City-4,Instance-4,,none 53 | #Server-A,Node-6,Region-2,Country-3,City-4,Instance-4,,test 54 | #Server-A,Node-7,Region-3,Country-3,City-4,Instance-5,10.0.0.3 55 | #Server-B|Server-2,Node-1,Region-1,Country-4,City-5,Instance-6, 56 | #Server-B|Server-2,Node-2,Region-2,Country-5,City-6,Instance-7, 57 | ###################################################################### 58 | #TestServer,TestNode,TestRegion,TestCountry,TestCity,TestInstance 59 | -------------------------------------------------------------------------------- /etc/odbc.ini.dsv: -------------------------------------------------------------------------------- 1 | # Sample odbc.ini file for Visualizer. This must be at /etc/odbc.ini. 2 | # 3 | # You will need to ensure packages unixodbc, odbcinst and odbc-postgresql 4 | # are installed. 5 | 6 | [DEFAULT] 7 | Driver = dsv 8 | 9 | [dsv] 10 | Description = Connection to Visualizer PostgreSQL 11 | Driver = /usr/lib/x86_64-linux-gnu/odbc/psqlodbcw.so 12 | Database = dsv 13 | Servername = dsv-datastore 14 | UserName = dsv 15 | Password = dsv 16 | Port = 5432 17 | Protocol = 9.3 18 | ReadOnly = Yes 19 | RowVersioning = No 20 | ShowSystemTables = No 21 | ConnSettings = 22 | -------------------------------------------------------------------------------- /etc/supervisor/conf.d/dsv.conf.sample: -------------------------------------------------------------------------------- 1 | [program:dsv_worker] 2 | command=/usr/bin/dsv-worker 3 | process_name=%(program_name)s_%(process_num)02d 4 | numprocs=5 5 | user=dsv 6 | -------------------------------------------------------------------------------- /etc/systemd/system/dsv-import-mirror.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=DNS-STATS Visualizer Datastore Mirror service 3 | 4 | [Service] 5 | Type=simple 6 | ExecStart=/usr/bin/dsv-import-mirror /var/lib/dns-stats-visualizer/mirror * 7 | TimeoutStopSec=5 8 | Restart=on-failure 9 | User=dsv 10 | 11 | [Install] 12 | WantedBy=default.target 13 | -------------------------------------------------------------------------------- /etc/systemd/system/dsv-rssac.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=DNS-STATS Visualizer RSSAC Service 3 | After=network.target dsv-rssac.socket 4 | Requires=dsv-rssac.socket 5 | 6 | [Service] 7 | Type=simple 8 | ExecStart=/usr/bin/dsv-rssac-daemon --socket-type ipv6 9 | TimeoutStopSec=5 10 | Restart=on-failure 11 | User=dsv 12 | 13 | [Install] 14 | WantedBy=default.target 15 | -------------------------------------------------------------------------------- /etc/systemd/system/dsv-rssac.socket: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=DNS-STATS Visualizer RSSAC Notify Socket 3 | PartOf=dsv-rssac.service 4 | 5 | [Socket] 6 | ListenDatagram=[::]:53 7 | 8 | [Install] 9 | WantedBy=sockets.target 10 | -------------------------------------------------------------------------------- /grafana/bin/dash-depends: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | # 3 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 4 | # 5 | # This Source Code Form is subject to the terms of the Mozilla Public 6 | # License, v. 2.0. If a copy of the MPL was not distributed with this 7 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 8 | # 9 | # Developed by Sinodun IT (sinodun.com) 10 | # 11 | # Scan a Visualizer Grafana dashboard Python file and extract the 12 | # dependencies. These are either Python imported modules or 13 | # HTML panel files. 14 | # 15 | # This is very dependent on current dashboard source practices. 16 | # 17 | 18 | import argparse 19 | import pathlib 20 | import re 21 | import sys 22 | import traceback 23 | 24 | PYTHON_PATHS = [ 25 | 'grafana/common', 26 | 'src/python3', 27 | ] 28 | 29 | python_re = [ 30 | re.compile(r'import +(?P[^ ]+)'), 31 | re.compile(r'from +(?P[^ ]+) +import'), 32 | ] 33 | 34 | other_re = [ 35 | re.compile(r'(HTML|Markdown)Panel\([\'"](?P[^\'"]+)'), 36 | ] 37 | 38 | def match_res(relist, lines): 39 | res = [] 40 | for r in relist: 41 | for line in lines: 42 | m = r.search(line) 43 | if m: 44 | res.append(m.group('module')) 45 | return res 46 | 47 | def get_local_python(modules): 48 | res = [] 49 | for m in modules: 50 | fname = m.replace('.', '/') + '.py' 51 | for path in PYTHON_PATHS: 52 | f = pathlib.Path(path + '/' + fname) 53 | if f.exists(): 54 | res.append(str(f)) 55 | break 56 | return res 57 | 58 | def read_python_file(pyfile): 59 | res = [] 60 | with open(pyfile) as f: 61 | lines = f.readlines() 62 | python = get_local_python(match_res(python_re, lines)) 63 | html = match_res(other_re, lines) 64 | res.extend(python) 65 | res.extend(html) 66 | for p in python: 67 | for dep in read_python_file(p): 68 | if dep not in res: 69 | res.append(dep) 70 | return res 71 | 72 | parser = argparse.ArgumentParser('Extract dependencies from Grafana dashboard Python') 73 | parser.add_argument('--traceback', 74 | dest='traceback', action='store_true', 75 | help=argparse.SUPPRESS) 76 | parser.add_argument('f', nargs='+') 77 | args = parser.parse_args() 78 | 79 | try: 80 | for f in args.f: 81 | deps = read_python_file(f) 82 | print('{} {}: {} {}'.format( 83 | f.replace('.py', '.json'), 84 | f.replace('.py', '.d'), 85 | f, 86 | ' '.join(deps))) 87 | except Exception as e: 88 | if args.traceback: 89 | traceback.print_exc() 90 | print('Error {exc} ({args}).'.format( 91 | exc=type(e).__name__, 92 | args=str(e)), file=sys.stderr) 93 | sys.exit(1) 94 | 95 | sys.exit(0) 96 | 97 | 98 | # Local Variables: 99 | # mode: Python 100 | # End: 101 | -------------------------------------------------------------------------------- /grafana/bin/generate-dashboard: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | # 3 | # Copyright 2019-2020 Internet Corporation for Assigned Names and Numbers. 4 | # 5 | # This Source Code Form is subject to the terms of the Mozilla Public 6 | # License, v. 2.0. If a copy of the MPL was not distributed with this 7 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 8 | # 9 | # Developed by Sinodun IT (sinodun.com) 10 | # 11 | # Build a Grafana dashboard defined in GrafanaLib to JSON. 12 | 13 | import os 14 | import sys 15 | 16 | mydir = os.path.dirname(os.path.realpath(__file__)) 17 | 18 | for p in ['/../common', 19 | '/../../src/python3', 20 | '/../../tools/grafanalib', 21 | ]: 22 | dir = os.path.realpath(mydir + p) 23 | if os.path.exists(dir): 24 | sys.path.insert(0, dir) 25 | 26 | import grafanalib._gen as gen 27 | 28 | gen.generate_dashboard_script() 29 | -------------------------------------------------------------------------------- /grafana/common/active_disabled_nodes.html: -------------------------------------------------------------------------------- 1 | 10 |
11 |

12 | In the following tables, nodes that participate in the zone load 13 | time measurements are considered active. Those that do not 14 | (because they either do not have a configured service address or 15 | are no longer present in nodes.csv) are 16 | considered disabled. 17 |

18 |
19 | -------------------------------------------------------------------------------- /grafana/common/city_country.html: -------------------------------------------------------------------------------- 1 | 10 |
11 |

12 | A restriction of the MaxMind database used to generate this graph 13 | is that certain client locations (AS subnets) are only linked with 14 | a country, not a specific city. 15 |

16 |

17 | These client locations are plotted on the map using the 18 | latitude/longitude of the capital of the country, but 19 | are listed in the table under the country name. Hence 20 | the table contains a mixture of country and city names. 21 |

22 |
23 | -------------------------------------------------------------------------------- /grafana/common/dashboards/aggregated/client_subnet_statistics_header.html: -------------------------------------------------------------------------------- 1 | 10 | 18 | -------------------------------------------------------------------------------- /grafana/common/dashboards/aggregated/query_statistics.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | # 9 | # Aggregation query statistics 10 | 11 | import textwrap 12 | 13 | import grafanalib.core as GCore 14 | 15 | import querystatsgraph as qsg 16 | import grafanacommon as GCommon 17 | 18 | def dash(myuid, agginfo, nodesel, **kwargs): 19 | return GCommon.Dashboard( 20 | title = 'Query statistics', 21 | tags = [ 22 | agginfo['graph_tag'] 23 | ], 24 | uid = myuid, 25 | rows = [ 26 | GCore.Row( 27 | panels = [ 28 | GCommon.QPSGraph( 29 | title = 'Queries', 30 | targets = [ 31 | GCommon.ClickHouseTarget( 32 | database = agginfo['database'], 33 | table = 'Queries' + agginfo['table_suffix'], 34 | round = agginfo['round'], 35 | query = textwrap.dedent("""\ 36 | SELECT 37 | $timeSeries AS t, 38 | sum(QueryCount)/{interval_divisor} AS QPS 39 | FROM $table 40 | WHERE 41 | $timeFilter 42 | AND NodeID IN {nodesel} 43 | GROUP BY t 44 | ORDER BY t""".format( 45 | interval_divisor=agginfo['interval_divisor'], 46 | nodesel=nodesel)), 47 | refId = 'A' 48 | ) 49 | ], 50 | ), 51 | ], 52 | ), 53 | GCore.Row( 54 | panels = [ 55 | GCommon.QPSGraph( 56 | title = 'Queries by region', 57 | targets = [ qsg.QPSTargetGroup(agginfo, 'region_name', nodesel) ], 58 | ), 59 | GCommon.QPSGraph( 60 | title = 'Queries by country', 61 | targets = [ qsg.QPSTargetGroup(agginfo, 'country_name', nodesel) ], 62 | ), 63 | ] 64 | ), 65 | ] 66 | ) 67 | -------------------------------------------------------------------------------- /grafana/common/footer.html: -------------------------------------------------------------------------------- 1 | 10 |
11 | 16 |
17 |
18 |

19 | Version $version 20 | © Internet Corporation for Assigned Names and Numbers 21 |
22 | Developed by Sinodun Internet Technologies 23 |

24 |
25 |
26 |

27 | Location and network data from GeoLite2 data created by MaxMind, available from www.maxmind.com 28 |
29 | Map data from OpenStreetMap and CartoDB 30 |
31 | Powered by PostgreSQL, ClickHouse, Grafana 32 |

33 |
34 | -------------------------------------------------------------------------------- /grafana/common/footer_test.html: -------------------------------------------------------------------------------- 1 | 10 |
11 |
12 |

13 | TEST SITE - Version $version 14 |
15 | Developed by Sinodun Internet Technologies 16 |

17 |
18 |
19 |

20 | Location and network data from GeoLite2 data created by MaxMind, available from www.maxmind.com 21 |
22 | Map data from OpenStreetMap and CartoDB 23 |
24 | Powered by PostgreSQL, ClickHouse, Grafana 25 |

26 |
27 | -------------------------------------------------------------------------------- /grafana/common/querystatsgraph.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | # 9 | # Query statistics graph common target 10 | 11 | import textwrap 12 | 13 | import grafanacommon as GCommon 14 | 15 | def QPSTargetGroup(agginfo, field, nodesel): 16 | return GCommon.ClickHouseTarget( 17 | database = agginfo['database'], 18 | table = 'Queries' + agginfo['table_suffix'], 19 | round = agginfo['round'], 20 | query = textwrap.dedent("""\ 21 | SELECT t, groupArray((Name, qc)) 22 | FROM 23 | ( 24 | SELECT 25 | t,Name,sum(cnt)/{interval_divisor} AS qc 26 | FROM 27 | ( 28 | SELECT 29 | $timeSeries AS t, 30 | NodeID, 31 | sum(QueryCount) AS cnt 32 | FROM $table 33 | WHERE $timeFilter AND NodeID IN {nodesel} 34 | GROUP BY t,NodeID 35 | ORDER BY t,NodeID 36 | ) AS NodeCount 37 | ALL INNER JOIN 38 | ( 39 | SELECT 40 | {field} AS Name, 41 | toUInt16(node_id) AS NodeID 42 | FROM {nodeinfo_database}.node_text 43 | ) AS NodeName USING NodeID 44 | GROUP BY t, Name 45 | ORDER BY t, Name 46 | ) 47 | GROUP BY t 48 | ORDER BY t""".format( 49 | field=field, 50 | interval_divisor=agginfo['interval_divisor'], 51 | nodesel=nodesel, 52 | nodeinfo_database=agginfo['nodeinfo_database'])), 53 | refId = 'A' 54 | ) 55 | -------------------------------------------------------------------------------- /grafana/dashboards/main-site/menu/main.dashboard.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | # 9 | # Visualizer main dashboard 10 | 11 | import textwrap 12 | 13 | import grafanalib.core as GCore 14 | 15 | import grafanacommon as GCommon 16 | 17 | dashboard = GCommon.Dashboard( 18 | title = "DNS-STATS Visualizer main menu", 19 | timePicker = GCommon.TimePicker( 20 | refreshIntervals = GCore.DEFAULT_TIME_PICKER.refreshIntervals, 21 | timeOptions = GCore.DEFAULT_TIME_PICKER.timeOptions, 22 | nowDelay = '1h', 23 | hidden=True), 24 | uid = "dsv-main", 25 | rows = [ 26 | GCore.Row( 27 | height = GCore.Pixels(330), 28 | panels = [ 29 | GCommon.HTMLPanel('grafana/dashboards/main-site/menu/menu-timelines.html', 'Timelines'), 30 | ], 31 | ), 32 | GCore.Row( 33 | height = GCore.Pixels(360), 34 | panels = [ 35 | GCommon.HTMLPanel('grafana/dashboards/main-site/menu/menu-other.html', 'Other metrics'), 36 | ], 37 | ), 38 | GCommon.FOOTER_ROW, 39 | ], 40 | ).auto_panel_ids() 41 | -------------------------------------------------------------------------------- /grafana/dashboards/main-site/menu/menu-other.html: -------------------------------------------------------------------------------- 1 | 10 | 70 | -------------------------------------------------------------------------------- /grafana/dashboards/main-site/menu/menu-timelines.html: -------------------------------------------------------------------------------- 1 | 10 | 63 | -------------------------------------------------------------------------------- /grafana/dashboards/main-site/other-metrics/client-subnet-statistics-detail.dashboard.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | # 9 | # Aggregation client subnet statistics 10 | 11 | import grafanacommon as GCommon 12 | 13 | from dashboards.aggregated.client_subnet_statistics_detail import dash 14 | 15 | dashboard = GCommon.MakeAggDashboard(dash, 'dsv-5minagg-clientsubnetstatsdetail').auto_panel_ids() 16 | -------------------------------------------------------------------------------- /grafana/dashboards/main-site/other-metrics/client-subnet-statistics.dashboard.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | # 9 | # Aggregation client subnet statistics 10 | 11 | import grafanacommon as GCommon 12 | 13 | from dashboards.aggregated.client_subnet_statistics import dash 14 | 15 | dashboard = GCommon.MakeAggDashboard(dash, 'dsv-5minagg-clientsubnetstats').auto_panel_ids() 16 | -------------------------------------------------------------------------------- /grafana/dashboards/main-site/other-metrics/geolocation_city.dashboard.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | # 9 | # Aggregation geolocation plots 10 | 11 | import grafanacommon as GCommon 12 | 13 | from dashboards.aggregated.geolocation_city import dash 14 | 15 | dashboard = GCommon.MakeAggDashboard(dash, 'dsv-5minagg-geolocation-city').auto_panel_ids() 16 | -------------------------------------------------------------------------------- /grafana/dashboards/main-site/other-metrics/geolocation_country.dashboard.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | # 9 | # Aggregation geolocation plots 10 | 11 | import grafanacommon as GCommon 12 | 13 | from dashboards.aggregated.geolocation_country import dash 14 | 15 | dashboard = GCommon.MakeAggDashboard(dash, 'dsv-5minagg-geolocation-country').auto_panel_ids() 16 | -------------------------------------------------------------------------------- /grafana/dashboards/main-site/other-metrics/qtype-vs-tld.dashboard.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | # 9 | # Aggregation IP/Protocol 10 | 11 | import grafanacommon as GCommon 12 | 13 | from dashboards.aggregated.qtype_vs_tld import dash 14 | 15 | dashboard = GCommon.MakeAggDashboard(dash, 'dsv-5minagg-qtype-vs-tld').auto_panel_ids() 16 | -------------------------------------------------------------------------------- /grafana/dashboards/main-site/other-metrics/rssac-other.dashboard.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | # 9 | # Aggregation RSSAC plots 10 | 11 | import grafanacommon as GCommon 12 | 13 | from dashboards.aggregated.rssac_other import dash 14 | 15 | dashboard = GCommon.MakeAggDashboard(dash, 'dsv-5minagg-rssacother', zone_load_name_col='instance_name').auto_panel_ids() 16 | -------------------------------------------------------------------------------- /grafana/dashboards/main-site/other-metrics/rssac-reporting.dashboard.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | # 9 | # Aggregation RSSAC plots 10 | 11 | import grafanacommon as GCommon 12 | 13 | from dashboards.aggregated.rssac_reporting import dash 14 | 15 | dashboard = GCommon.MakeAggDashboard(dash, 'dsv-5minagg-rssac-report', tags=[], zone_load_name_col='instance_name').auto_panel_ids() 16 | -------------------------------------------------------------------------------- /grafana/dashboards/main-site/other-metrics/rssac-sources.dashboard.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | # 9 | # Aggregation RSSAC plots 10 | 11 | import grafanacommon as GCommon 12 | 13 | from dashboards.aggregated.rssac_sources import dash 14 | 15 | dashboard = GCommon.MakeAggDashboard(dash, 'dsv-5minagg-rssacsources', zone_load_name_col='instance_name').auto_panel_ids() 16 | -------------------------------------------------------------------------------- /grafana/dashboards/main-site/other-metrics/rssac-volumes.dashboard.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | # 9 | # Aggregation RSSAC plots 10 | 11 | import grafanacommon as GCommon 12 | 13 | from dashboards.aggregated.rssac_volumes import dash 14 | 15 | dashboard = GCommon.MakeAggDashboard(dash, 'dsv-5minagg-rssacvolumes', zone_load_name_col='instance_name').auto_panel_ids() 16 | -------------------------------------------------------------------------------- /grafana/dashboards/main-site/other/about.dashboard.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | # 9 | # Visualizer main help dashboard 10 | 11 | import textwrap 12 | 13 | import grafanalib.core as GCore 14 | 15 | import grafanacommon as GCommon 16 | 17 | dashboard = GCommon.Dashboard( 18 | title = "About", 19 | timePicker = GCommon.TimePicker( 20 | refreshIntervals = GCore.DEFAULT_TIME_PICKER.refreshIntervals, 21 | timeOptions = GCore.DEFAULT_TIME_PICKER.timeOptions, 22 | nowDelay = '1h', 23 | hidden=True), 24 | uid = "dsv-about", 25 | rows = [ 26 | GCore.Row( 27 | height = GCore.Pixels(350), 28 | panels = [ 29 | GCommon.MarkdownPanel('grafana/dashboards/main-site/other/about.md'), 30 | ], 31 | ), 32 | GCommon.FOOTER_ROW, 33 | ], 34 | ).auto_panel_ids() 35 | -------------------------------------------------------------------------------- /grafana/dashboards/main-site/other/about.md: -------------------------------------------------------------------------------- 1 | ## About 2 | 3 | DNS-STATS Visualizer is a system which can 4 | 5 | * Consume DNS traffic data files recorded in Compacted-DNS (C-DNS, RFC8618) format from nameservers. (Files in C-DNS format can be generated by [DNS-STATS 6 | compactor](https://github.com/dns-stats/compactor/wiki).) 7 | 8 | * Populate a ClickHouse database with per query/response level data (and additionally aggregate data at a chosen time interval) 9 | 10 | * Produce [DCS](https://www.dns-oarc.net/tools/dsc)-like statistics graphs of the recorded traffic in Grafana. 11 | 12 | RSSAC reports can also be made available via the link at the foot of the main dashboard. 13 | 14 | The project was initially developed for for [ICANN](http://www.icann.org/) by [Sinodun IT](http://sinodun.com/), and is now released via [DNS-STATS](http://dns-stats.org) as an open source project licensed under the Mozilla Public License v2.0. 15 | 16 | For more information see the [DNS-STATS Visualizer wiki](https://github.com/dns-stats/visualizer/wiki) or [github repository](https://github.com/dns-stats/visualizer). 17 | -------------------------------------------------------------------------------- /grafana/dashboards/main-site/timelines/ip-protocol.dashboard.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | # 9 | # Aggregation IP/Protocol 10 | 11 | import grafanacommon as GCommon 12 | 13 | from dashboards.aggregated.ip_protocol import dash 14 | 15 | dashboard = GCommon.MakeAggDashboard(dash, 'dsv-5minagg-ipprotocol').auto_panel_ids() 16 | -------------------------------------------------------------------------------- /grafana/dashboards/main-site/timelines/qtype.dashboard.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | # 9 | # Aggregation query attributes 10 | 11 | import grafanacommon as GCommon 12 | 13 | from dashboards.aggregated.qtype import dash 14 | 15 | dashboard = GCommon.MakeAggDashboard(dash, 'dsv-5minagg-qtype').auto_panel_ids() 16 | -------------------------------------------------------------------------------- /grafana/dashboards/main-site/timelines/query-attributes.dashboard.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | # 9 | # Aggregation query attributes 10 | 11 | import grafanacommon as GCommon 12 | 13 | from dashboards.aggregated.query_attributes import dash 14 | 15 | dashboard = GCommon.MakeAggDashboard(dash, 'dsv-5minagg-queryattribs').auto_panel_ids() 16 | -------------------------------------------------------------------------------- /grafana/dashboards/main-site/timelines/query-statistics-detail.dashboard.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | # 9 | # Aggregation query statistics 10 | 11 | import grafanacommon as GCommon 12 | 13 | from dashboards.aggregated.query_statistics_detail import dash 14 | 15 | dashboard = GCommon.MakeAggDashboard(dash, 'dsv-5minagg-querystatsdetail').auto_panel_ids() 16 | -------------------------------------------------------------------------------- /grafana/dashboards/main-site/timelines/query-statistics.dashboard.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | # 9 | # Aggregation query statistics 10 | 11 | import grafanacommon as GCommon 12 | 13 | from dashboards.aggregated.query_statistics import dash 14 | 15 | dashboard = GCommon.MakeAggDashboard(dash, 'dsv-5minagg-querystats').auto_panel_ids() 16 | -------------------------------------------------------------------------------- /grafana/dashboards/main-site/timelines/rcode.dashboard.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | # 9 | # Aggregation rcodes 10 | 11 | import grafanacommon as GCommon 12 | 13 | from dashboards.aggregated.rcode import dash 14 | 15 | dashboard = GCommon.MakeAggDashboard(dash, 'dsv-5minagg-rcode').auto_panel_ids() 16 | -------------------------------------------------------------------------------- /grafana/dashboards/main-site/timelines/server-ip-address.dashboard.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | # 9 | # Aggregation server IP address plots 10 | 11 | import grafanacommon as GCommon 12 | 13 | from dashboards.aggregated.server_ip_address import dash 14 | 15 | dashboard = GCommon.MakeAggDashboard(dash, 'dsv-5minagg-servip-addr').auto_panel_ids() 16 | -------------------------------------------------------------------------------- /grafana/provisioning/dashboards/main-site.yml: -------------------------------------------------------------------------------- 1 | apiVersion: 1 2 | 3 | providers: 4 | - name: 'Timelines' 5 | orgId: 1 6 | folder: 'Timelines' 7 | type: file 8 | disableDeletion: false 9 | editable: false 10 | updateIntervalSeconds: 10 #how often Grafana will scan for changed dashboards 11 | options: 12 | path: /var/lib/grafana/dashboards/main-site/timelines 13 | - name: 'Other metrics' 14 | orgId: 1 15 | folder: 'OtherMetrics' 16 | type: file 17 | disableDeletion: false 18 | editable: false 19 | updateIntervalSeconds: 10 #how often Grafana will scan for changed dashboards 20 | options: 21 | path: /var/lib/grafana/dashboards/main-site/other-metrics 22 | - name: 'Other dashboards' 23 | orgId: 1 24 | folder: 'OtherDashboards' 25 | type: file 26 | disableDeletion: false 27 | editable: false 28 | updateIntervalSeconds: 10 #how often Grafana will scan for changed dashboards 29 | options: 30 | path: /var/lib/grafana/dashboards/main-site/other 31 | - name: 'Public Site' 32 | orgId: 1 33 | folder: '' 34 | type: file 35 | disableDeletion: true 36 | editable: false 37 | updateIntervalSeconds: 10 #how often Grafana will scan for changed dashboards 38 | options: 39 | path: /var/lib/grafana/dashboards/main-site/menu 40 | -------------------------------------------------------------------------------- /grafana/provisioning/dashboards/test-site.yml: -------------------------------------------------------------------------------- 1 | apiVersion: 2 2 | 3 | providers: 4 | - name: 'Timelines' 5 | orgId: 1 6 | folder: 'Timelines' 7 | type: file 8 | disableDeletion: false 9 | editable: false 10 | updateIntervalSeconds: 10 #how often Grafana will scan for changed dashboards 11 | options: 12 | path: /var/lib/grafana/dashboards/test-site/timelines 13 | - name: 'Other metrics' 14 | orgId: 1 15 | folder: 'OtherMetrics' 16 | type: file 17 | disableDeletion: false 18 | editable: false 19 | updateIntervalSeconds: 10 #how often Grafana will scan for changed dashboards 20 | options: 21 | path: /var/lib/grafana/dashboards/test-site/other-metrics 22 | - name: 'Other dashboards' 23 | orgId: 1 24 | folder: 'OtherDashboards' 25 | type: file 26 | disableDeletion: false 27 | editable: false 28 | updateIntervalSeconds: 10 #how often Grafana will scan for changed dashboards 29 | options: 30 | path: /var/lib/grafana/dashboards/test-site/other 31 | - name: 'Public Site' 32 | orgId: 1 33 | folder: '' 34 | type: file 35 | disableDeletion: true 36 | editable: false 37 | updateIntervalSeconds: 10 #how often Grafana will scan for changed dashboards 38 | options: 39 | path: /var/lib/grafana/dashboards/test-site/menu 40 | -------------------------------------------------------------------------------- /grafana/provisioning/datasources/dsv-main.yml.sample: -------------------------------------------------------------------------------- 1 | apiVersion: 1 2 | 3 | datasources: 4 | - name: Visualizer 5 | type: vertamedia-clickhouse-datasource 6 | access: proxy 7 | url: http://dsv-clickhouse:8123 8 | 9 | orgId: 1 10 | 11 | # enable/disable basic auth 12 | basicAuth: true 13 | # basic auth username 14 | basicAuthUser: dsv_main 15 | # basic auth password 16 | basicAuthPassword: 17 | # enable/disable with credentials headers 18 | withCredentials: false 19 | # mark as default datasource. Max one per org 20 | isDefault: true 21 | # fields that will be converted to json and stored in json_data 22 | jsonData: 23 | # enable/disable sending 'add_http_cors_header=1' parameter 24 | addCorsHeader: false 25 | # enable/disable using POST method for sending queries 26 | usePOST: true 27 | # default database name 28 | defaultDatabase: dsv_five_minute 29 | -------------------------------------------------------------------------------- /grafana/provisioning/datasources/dsv-test.yml.sample: -------------------------------------------------------------------------------- 1 | apiVersion: 1 2 | 3 | datasources: 4 | - name: Visualizer 5 | type: vertamedia-clickhouse-datasource 6 | access: proxy 7 | url: http://dsv-clickhouse:8123 8 | 9 | orgId: 1 10 | 11 | # enable/disable basic auth 12 | basicAuth: false 13 | # basic auth username 14 | basicAuthUser: 15 | # basic auth password 16 | basicAuthPassword: 17 | # enable/disable with credentials headers 18 | withCredentials: false 19 | # mark as default datasource. Max one per org 20 | isDefault: true 21 | # fields that will be converted to json and stored in json_data 22 | jsonData: 23 | # enable/disable sending 'add_http_cors_header=1' parameter 24 | addCorsHeader: false 25 | # enable/disable using POST method for sending queries 26 | usePOST: true 27 | # default database name 28 | defaultDatabase: dsv_five_minute 29 | -------------------------------------------------------------------------------- /mkdeb.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # 3 | # Make Debian package in subdir dist_deb. 4 | # 5 | # Requires VERSION set in environment. 6 | 7 | rm -rf dist_deb 8 | 9 | # Copy source tree into dist_deb/dsv-packages 10 | # 11 | # First the directories copied in toto. 12 | mkdir -p dist_deb/dsv-packages 13 | cp -pR bin doc etc sql src sampledata dist_deb/dsv-packages 14 | 15 | # All Grafana dashboard JSON and Grafana provisioning. 16 | find grafana/dashboards -name "*.json" | xargs tar -cf - | tar -C dist_deb/dsv-packages -xf - 17 | cp -pR grafana/provisioning dist_deb/dsv-packages 18 | 19 | # Now the Grafana bar chart. This needs to go into Grafana plugins. 20 | mkdir -p dist_deb/dsv-packages/grafana/plugins/sinodun-natel-plotly-panel 21 | (cd tools/sinodun-natel-plotly-panel; git archive HEAD) | tar -C dist_deb/dsv-packages/grafana/plugins/sinodun-natel-plotly-panel -xf - 22 | 23 | # The Python ClickHouse driver. 24 | mkdir -p dist_deb/dsv-packages/tools/clickhouse-driver 25 | (cd tools/clickhouse-driver; git archive HEAD) | tar -C dist_deb/dsv-packages/tools/clickhouse-driver -xf - 26 | 27 | # Remove any Python cache dirs. 28 | find dist_deb/dsv-packages -name __pycache__ -type d | xargs rm -rf 29 | 30 | # Make the source tarball. 31 | tar -c -z -f dist_deb/dns-stats-visualizer_${DSVVERSION}.orig.tar.gz -C dist_deb/dsv-packages bin doc etc grafana provisioning sql src sampledata tools 32 | 33 | # Copy the debian dir into the build dir and make the packages. 34 | cp -pR debian dist_deb/dsv-packages 35 | cd dist_deb/dsv-packages 36 | exec dpkg-buildpackage -us -uc 37 | -------------------------------------------------------------------------------- /sampledata/testnode.cdns.xz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dns-stats/visualizer/20fba91f0d26b98531f97f643c8329640d1c0d11/sampledata/testnode.cdns.xz -------------------------------------------------------------------------------- /sql/clickhouse/ddl/0002.sql: -------------------------------------------------------------------------------- 1 | --- Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | --- 3 | --- This Source Code Form is subject to the terms of the Mozilla Public 4 | --- License, v. 2.0. If a copy of the MPL was not distributed with this 5 | --- file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | --- 7 | --- Developed by Sinodun IT (sinodun.com) 8 | --- 9 | --- Create tables accessing dictionaries. 10 | --- 11 | CREATE TABLE dsv.node_text 12 | ( 13 | node_id UInt16, 14 | name String, 15 | server_name String, 16 | instance_name String, 17 | city_name String, 18 | country_name String, 19 | region_name String, 20 | flags UInt64 21 | ) 22 | ENGINE = Dictionary('node_text'); 23 | 24 | CREATE TABLE dsv.server_address 25 | ( 26 | id UInt64, 27 | address String 28 | ) 29 | ENGINE = Dictionary('server_address'); 30 | 31 | CREATE TABLE dsv.iana_text 32 | ( 33 | registry_name String, 34 | value_name String, 35 | value UInt32, 36 | value_description String 37 | ) 38 | ENGINE = Dictionary('iana_text'); 39 | 40 | CREATE TABLE dsv.tld_text 41 | ( 42 | name String, 43 | tld_type String, 44 | ulabel String 45 | ) 46 | ENGINE = Dictionary('tld_text'); 47 | 48 | CREATE TABLE dsv.geolocation 49 | ( 50 | id UInt64, 51 | country_id UInt64, 52 | name String, 53 | latitude Float32, 54 | longitude Float32 55 | ) 56 | ENGINE = Dictionary('geolocation'); 57 | -------------------------------------------------------------------------------- /sql/clickhouse/ddl/0010-rollback.sql: -------------------------------------------------------------------------------- 1 | --- Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | --- 3 | --- This Source Code Form is subject to the terms of the Mozilla Public 4 | --- License, v. 2.0. If a copy of the MPL was not distributed with this 5 | --- file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | --- 7 | --- Developed by Sinodun IT (sinodun.com) 8 | DROP TABLE dsv.AAATopUndelegatedTldPerFiveMinsShard; 9 | DROP TABLE dsv.AAATopUndelegatedTldPerFiveMinsShardMV; 10 | DROP TABLE VIEW dsv.AAATopUndelegatedTldPerFiveMins; 11 | -------------------------------------------------------------------------------- /sql/clickhouse/ddl/0010.sql: -------------------------------------------------------------------------------- 1 | --- Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | --- 3 | --- This Source Code Form is subject to the terms of the Mozilla Public 4 | --- License, v. 2.0. If a copy of the MPL was not distributed with this 5 | --- file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | --- 7 | --- Developed by Sinodun IT (sinodun.com) 8 | --- 9 | --- Create supporting tables for aggregations. 10 | --- 11 | --- These are tables that are not directly part of the aggregation, 12 | --- but data from which is used when building an aggregation table. 13 | --- 14 | 15 | --- 16 | --- Create local shard aggregating top 40 undelegated TLDs. 17 | --- This is just used for tracking the most popular TLDs, to determine 18 | --- whether a particular TLD should be added to 19 | --- QtypeUndelegatedTldShard. When making that decision, we 20 | --- consider only the last hour at 5 minute granularity, to ensure 21 | --- that temporary bursts of popularity fade from view. 22 | --- 23 | --- It appears that additions to materialized views are done in 24 | --- alphabetical order. So put this first, so that aggregating 25 | --- into the main data table proper based on the the contents of this 26 | --- table happens after this table is updated. 27 | --- 28 | --- NOTE. I tried calculating TLD in a subquery and using that value 29 | --- instead of repeating the QueryName modification. But I found 30 | --- that in some ClickHouse versions the select 31 | --- works fine on its own, but as the AS clause in a CREATE 32 | --- MATERIALIZED VIEW it errors. 33 | --- I suspect ClickHouse has problems with subqueries in this context. 34 | --- 35 | CREATE TABLE dsv.AAATopUndelegatedTldPerFiveMinsShard 36 | ( 37 | Date Date, 38 | DateTime DateTime, 39 | NodeID UInt16, 40 | TopTlds AggregateFunction(topK(40), String) 41 | ) 42 | ENGINE = AggregatingMergeTree() 43 | PARTITION BY toYearWeek(Date) 44 | ORDER BY (Date, DateTime, NodeID); 45 | 46 | CREATE MATERIALIZED VIEW dsv.AAATopUndelegatedTldPerFiveMinsShardMV 47 | TO dsv.AAATopUndelegatedTldPerFiveMinsShard 48 | AS SELECT 49 | today() AS Date, 50 | toStartOfFiveMinute(now()) AS DateTime, 51 | NodeID, 52 | topKState(40)(lower(topLevelDomain(QueryName))) AS TopTlds 53 | FROM dsv.QueryResponseShard 54 | WHERE QueryResponseHasQuery 55 | AND notEmpty(lower(topLevelDomain(QueryName))) 56 | AND lower(topLevelDomain(QueryName)) NOT IN (SELECT name FROM dsv.tld_text) 57 | GROUP BY Date, DateTime, NodeID; 58 | 59 | --- 60 | --- Create distributed table for aggregated top 40 undelegated TLDs. 61 | --- 62 | CREATE TABLE dsv.AAATopUndelegatedTldPerFiveMins 63 | ( 64 | Date Date, 65 | DateTime DateTime, 66 | NodeID UInt16, 67 | TopTlds AggregateFunction(topK(40), String) 68 | ) 69 | ENGINE = Distributed(dsv, dsv, AAATopUndelegatedTldPerFiveMinsShard); 70 | -------------------------------------------------------------------------------- /sql/clickhouse/ddl/0011-rollback.sql: -------------------------------------------------------------------------------- 1 | --- Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | --- 3 | --- This Source Code Form is subject to the terms of the Mozilla Public 4 | --- License, v. 2.0. If a copy of the MPL was not distributed with this 5 | --- file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | --- 7 | --- Developed by Sinodun IT (sinodun.com) 8 | DROP DATABASE IF EXISTS dsv_five_minute; 9 | -------------------------------------------------------------------------------- /sql/clickhouse/info.awk: -------------------------------------------------------------------------------- 1 | BEGIN { FS = ":"; OFS = "\t" } 2 | /Total Packets processed/ { total_packets = $2 + 0; } 3 | /Malformed DNS packets/ { malformed_packets = $2 + 0; } 4 | /Non-DNS packets/ { non_dns_packets = $2 + 0; } 5 | /File duration/ { file_duration = $2; } 6 | /Collection started/ { collection_started = $2; } 7 | /Earliest data/ { earliest_data = $2; } 8 | END { 9 | duration = 300; 10 | # If they exist, we expect durations to be 0000s9999u. 11 | # Report duration in s, round up if us >= 500000 or s = 0. 12 | # Obtain duration from file duration if present. If not, default to 300s. 13 | if ( length(file_duration) > 0 ) { 14 | spos = index(file_duration, "s"); 15 | upos = index(file_duration, "us"); 16 | if ( spos > 0 && upos == length(file_duration) - 1 ) { 17 | secs = substr(file_duration, 1, spos - 1) + 0; 18 | usecs = substr(file_duration, spos + 1, upos - spos - 1) + 0; 19 | if ( usecs > 500000 || secs == 0 ) 20 | secs++; 21 | duration = secs; 22 | } 23 | } 24 | if ( length(collection_started) == 0 ) { 25 | collection_started = earliest_data; 26 | } 27 | if ( length(collection_started) < 20 ) { 28 | exit 1 29 | } 30 | split(collection_started, start, " ") 31 | date = start[1]; 32 | time = substr(start[2], 1, 2) ":" substr(start[2], 4, 2) ":" substr(start[2], 7, 2); 33 | print date, date " " time, node_id, total_packets, malformed_packets, non_dns_packets, duration; 34 | } 35 | -------------------------------------------------------------------------------- /sql/clickhouse/tsv.tpl: -------------------------------------------------------------------------------- 1 | {{#QUERY_RESPONSE_HEADER}}Date DateTime NanoSecondsSinceEpoch NodeID ClientAddress ClientPort ClientHoplimit ClientGeoLocation ClientASN ClientASNetmask ServerAddress ServerPort TransportTCP TransportIPv6 QueryResponseHasQuery QueryResponseHasResponse QueryResponseQueryHasQuestion QueryResponseQueryHasOpt QueryResponseResponseHasQuestion QueryResponseResponseHasOpt QueryLength ResponseLength ID QueryOpcode QueryCheckingDisabled QueryAuthenticatedData QueryZ QueryRecursionAvailable QueryRecursionDesired QueryTruncated QueryAuthoritativeAnswer QueryDO QueryRcode QueryClass QueryType QueryName QueryQDCount QueryANCount QueryARCount QueryNSCount QueryEDNSVersion QueryEDNSUDPMessageSize ResponseDelayNanoSeconds ResponseCheckingDisabled ResponseAuthenticatedData ResponseZ ResponseRecursionAvailable ResponseRecursionDesired ResponseTruncated ResponseAuthoritativeAnswer ResponseRcode ResponseQDCount ResponseANCount ResponseARCount ResponseNSCount 2 | {{/QUERY_RESPONSE_HEADER}}{{timestamp_secs:x-date}} {{timestamp_secs}} {{timestamp_nanosecs}} {{node}} {{client_address:x-ip6addr-bin:x-hexstring}} {{client_port}} {{client_hoplimit}} {{client_address:x-ipaddr-geo-location}} {{client_address:x-ipaddr-geo-asn}} {{client_address:x-ipaddr-geo-as-netmask}} {{server_address:x-ip6addr-bin:x-hexstring}} {{server_port}} {{transport_tcp}} {{transport_ipv6}} {{query_response_has_query}} {{query_response_has_response}} {{query_response_query_has_question}} {{query_response_query_has_opt}} {{query_response_response_has_question}} {{query_response_response_has_opt}} {{query_len}} {{response_len}} {{id}} {{query_opcode}} {{query_checking_disabled}} {{query_authenticated_data}} {{query_z}} {{query_recursion_available}} {{query_recursion_desired}} {{query_truncated}} {{query_authoritative_answer}} {{query_do}} {{query_rcode}} {{query_class}} {{query_type}} {{query_name:x-cstring}} {{query_qdcount}} {{query_ancount}} {{query_arcount}} {{query_nscount}} {{query_edns_version}} {{query_edns_udp_payload_size}} {{response_delay_nanosecs}} {{response_checking_disabled}} {{response_authenticated_data}} {{response_z}} {{response_recursion_available}} {{response_recursion_desired}} {{response_truncated}} {{response_authoritative_answer}} {{response_rcode}} {{response_qdcount}} {{response_ancount}} {{response_arcount}} {{response_nscount}} 3 | -------------------------------------------------------------------------------- /src/python3/clickhouse_driver: -------------------------------------------------------------------------------- 1 | ../../tools/clickhouse-driver/clickhouse_driver -------------------------------------------------------------------------------- /src/python3/dsv/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = ["common", "commands"] 2 | -------------------------------------------------------------------------------- /src/python3/dsv/commands/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # Copyright 2018-2021 Internet Corporation for Assigned Names and Numbers. 4 | # 5 | # This Source Code Form is subject to the terms of the Mozilla Public 6 | # License, v. 2.0. If a copy of the MPL was not distributed with this 7 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 8 | # 9 | # Developed by Sinodun IT (sinodun.com) 10 | # 11 | # Start a program from dsv.commands depending on contents of argv[0]. 12 | 13 | import argparse 14 | import pathlib 15 | import sys 16 | import traceback 17 | import importlib.util 18 | 19 | import dsv.common.Config as dc 20 | 21 | def module_name(cmd): 22 | return 'dsv.commands.' + cmd.replace('-', '_') 23 | 24 | def available_commands(): 25 | res = [] 26 | for pentry in sys.path: 27 | cmddir = pathlib.Path(pentry) / 'dsv' / 'commands' 28 | if cmddir.is_dir(): 29 | for mod in cmddir.glob('*.py'): 30 | name = mod.stem 31 | if name and name[0] != '_' and \ 32 | importlib.util.spec_from_file_location(module_name(name), str(mod)): 33 | res.append(name.replace('_', '-')) 34 | return sorted(res) 35 | 36 | def run_command(cmd, argv): 37 | exe = importlib.import_module(module_name(cmd)) 38 | 39 | parser = argparse.ArgumentParser(description=exe.description) 40 | parser.add_argument('-c', '--config', 41 | dest='conf_file', action='store', default=None, 42 | help='configuration file location', 43 | metavar='CONFIG_FILE') 44 | parser.add_argument('--traceback', 45 | dest='traceback', action='store_true', 46 | help=argparse.SUPPRESS) 47 | exe.add_args(parser) 48 | args = parser.parse_args(argv[1:]) 49 | cfg = dc.Config(args.conf_file) 50 | try: 51 | return exe.main(args, cfg) 52 | except Exception as e: 53 | if args.traceback: 54 | traceback.print_exc() 55 | raise e 56 | -------------------------------------------------------------------------------- /src/python3/dsv/commands/clickhouse_sys_info.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # Copyright 2019 Internet Corporation for Assigned Names and Numbers. 4 | # 5 | # This Source Code Form is subject to the terms of the Mozilla Public 6 | # License, v. 2.0. If a copy of the MPL was not distributed with this 7 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 8 | # 9 | # Developed by Sinodun IT (sinodun.com) 10 | # 11 | # Return data on a Visualizer database server. Intended for use with a 12 | # ClickHouse dictionary with a complex key, so listens on stdin for 13 | # the names of keys and outputs key\tvalue in reply. 14 | # 15 | # Usage: dsv-clickhouse-sys-info [-c|--config conf_file] 16 | # 17 | # Keys recognised: disc-block-size, disc-size, disc-available, 18 | # disc-percent-free, disc-percent-used 19 | # 20 | 21 | import logging 22 | import os 23 | import sys 24 | 25 | description = 'read keys from stdin, return system quantities.' 26 | 27 | def add_args(_): 28 | pass 29 | 30 | def main(_, cfg): 31 | statfs = os.statvfs(cfg['clickhouse']['dbdir']) 32 | 33 | for line in sys.stdin.readlines(): 34 | line = line.strip() 35 | if line == 'disc-block-size': 36 | val = statfs.f_bsize 37 | elif line == 'disc-size': 38 | val = statfs.f_blocks * statfs.f_frsize // 1024 39 | elif line == 'disc-available': 40 | val = statfs.f_bavail * statfs.f_frsize // 1024 41 | elif line == 'disc-percent-free': 42 | val = 100 *statfs.f_bavail // statfs.f_blocks 43 | elif line == 'disc-percent-used': 44 | val = 100 - (100 * statfs.f_bavail // statfs.f_blocks) 45 | else: 46 | print('Unknown key: {key}'.format(key=line)) 47 | logging.error('Unknown key: {key}'.format(key=line)) 48 | return 1 49 | 50 | print('{key}\t{val}'.format(key=line, val=val)) 51 | return 0 52 | -------------------------------------------------------------------------------- /src/python3/dsv/commands/config.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # Copyright 2018-2019 Internet Corporation for Assigned Names and Numbers. 4 | # 5 | # This Source Code Form is subject to the terms of the Mozilla Public 6 | # License, v. 2.0. If a copy of the MPL was not distributed with this 7 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 8 | # 9 | # Developed by Sinodun IT (sinodun.com) 10 | # 11 | # A utility to obtain Visualizer config for use from non-Python worlds. 12 | 13 | import random 14 | 15 | description = 'get Visualizer configuration value.' 16 | 17 | def add_args(parser): 18 | parser.add_argument('-r', '--random', 19 | dest='random', action='store_true', 20 | help='pick random entry from command separated list') 21 | parser.add_argument('cfg', nargs='+', 22 | help='config key', 23 | metavar='KEY') 24 | 25 | def main(args, cfg): 26 | try: 27 | for conf in args.cfg: 28 | cfg = cfg[conf] 29 | except KeyError: 30 | return 1 31 | 32 | if args.random: 33 | print(random.choice(cfg.split(','))) 34 | else: 35 | print(cfg) 36 | return 0 37 | -------------------------------------------------------------------------------- /src/python3/dsv/commands/find_node_id.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # Copyright 2018-2020 Internet Corporation for Assigned Names and Numbers. 4 | # 5 | # This Source Code Form is subject to the terms of the Mozilla Public 6 | # License, v. 2.0. If a copy of the MPL was not distributed with this 7 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 8 | # 9 | # Developed by Sinodun IT (sinodun.com) 10 | # 11 | # Find the node ID given server and node names. 12 | # 13 | # Usage: dsv-find-node-id 14 | # 15 | 16 | import psycopg2 17 | 18 | description = 'find node ID from Postgres.' 19 | 20 | def add_args(parser): 21 | parser.add_argument('servername', 22 | help='the server name', 23 | metavar='SERVERNAME') 24 | parser.add_argument('nodename', 25 | help='the node name', 26 | metavar='NODENAME') 27 | 28 | def main(args, cfg): 29 | conn = None 30 | 31 | try: 32 | pgcfg = cfg['postgres'] 33 | conn = psycopg2.connect(host=pgcfg['host'], 34 | dbname=pgcfg['database'], 35 | user=pgcfg['user'], 36 | password=pgcfg['password']) 37 | 38 | with conn.cursor() as cur: 39 | cur.execute('SELECT node.id FROM node ' 40 | 'INNER JOIN node_server ON node_server.id = node.server_id ' 41 | 'WHERE (node_server.name=%(server)s OR ' 42 | ' node_server.altname=%(server)s) ' 43 | 'AND (node.name=%(node)s OR node.altname=%(node)s)', 44 | {'server': args.servername, 'node': args.nodename}) 45 | res = cur.fetchone() 46 | 47 | conn.close() 48 | 49 | if res: 50 | print(res[0]) 51 | return 0 52 | return 1 53 | except Exception: 54 | if conn is not None: 55 | conn.rollback() 56 | conn.close() 57 | raise 58 | -------------------------------------------------------------------------------- /src/python3/dsv/commands/import_freeze.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # Copyright 2018-2020 Internet Corporation for Assigned Names and Numbers. 4 | # 5 | # This Source Code Form is subject to the terms of the Mozilla Public 6 | # License, v. 2.0. If a copy of the MPL was not distributed with this 7 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 8 | # 9 | # Developed by Sinodun IT (sinodun.com) 10 | # 11 | # Freeze Visualizer import by removing write permission from the lock file. 12 | 13 | import logging 14 | 15 | import dsv.common.Lock as dl 16 | 17 | description = 'freeze Visualizer import processing.' 18 | 19 | def add_args(_): 20 | pass 21 | 22 | def main(_, cfg): 23 | datastore_cfg = cfg['datastore'] 24 | lock = dl.DSVLock(datastore_cfg['user'], datastore_cfg['lockfile'].format('import')) 25 | if lock.is_frozen(): 26 | logging.info('Visualizer import freeze - already frozen.') 27 | print('Visualizer import already frozen.') 28 | return 1 29 | lock.freeze() 30 | logging.info('Visualizer import frozen.') 31 | print('Visualizer import frozen.') 32 | return 0 33 | -------------------------------------------------------------------------------- /src/python3/dsv/commands/import_thaw.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # Copyright 2018-2020 Internet Corporation for Assigned Names and Numbers. 4 | # 5 | # This Source Code Form is subject to the terms of the Mozilla Public 6 | # License, v. 2.0. If a copy of the MPL was not distributed with this 7 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 8 | # 9 | # Developed by Sinodun IT (sinodun.com) 10 | # 11 | # Thaw Visualizer import by adding write permission to the lock file. 12 | 13 | import logging 14 | 15 | import dsv.common.Lock as dl 16 | 17 | description = 'thaw Visualizer import processing.' 18 | 19 | def add_args(_): 20 | pass 21 | 22 | def main(_, cfg): 23 | datastore_cfg = cfg['datastore'] 24 | lock = dl.DSVLock(datastore_cfg['user'], datastore_cfg['lockfile'].format('import')) 25 | if not lock.is_frozen(): 26 | logging.info('Visualizer import thaw - already thawed.') 27 | print('Visualizer import already thawed.') 28 | return 1 29 | lock.thaw() 30 | logging.info('Visualizer import thawed.') 31 | print('Visualizer import thawed.') 32 | return 0 33 | -------------------------------------------------------------------------------- /src/python3/dsv/commands/log.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # Copyright 2018-2019 Internet Corporation for Assigned Names and Numbers. 4 | # 5 | # This Source Code Form is subject to the terms of the Mozilla Public 6 | # License, v. 2.0. If a copy of the MPL was not distributed with this 7 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 8 | # 9 | # Developed by Sinodun IT (sinodun.com) 10 | # 11 | # A utility to log text via Visualizer logging for use from non-Python worlds. 12 | 13 | import logging 14 | 15 | description = 'get Visualizer configuration value.' 16 | 17 | def add_args(parser): 18 | parser.add_argument('-l', '--level', 19 | dest='level', action='store', 20 | choices=['critical', 'error', 'warning', 'info', 'debug'], 21 | default='info', 22 | help='log level: critical, error, warning, info (default), debug', 23 | metavar='LEVEL') 24 | parser.add_argument('txt', nargs='*', 25 | help='text to log', 26 | metavar='TEXT') 27 | 28 | def main(args, _): 29 | txt = " ".join(args.txt) 30 | for lvl in (logging.CRITICAL, logging.ERROR, logging.WARNING, 31 | logging.INFO, logging.DEBUG): 32 | if logging.getLevelName(lvl).lower() == args.level: 33 | logging.log(lvl, txt) 34 | return 0 35 | return 1 36 | -------------------------------------------------------------------------------- /src/python3/dsv/commands/postgres_update.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # Copyright 2018-2021 Internet Corporation for Assigned Names and Numbers. 4 | # 5 | # This Source Code Form is subject to the terms of the Mozilla Public 6 | # License, v. 2.0. If a copy of the MPL was not distributed with this 7 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 8 | # 9 | # Developed by Sinodun IT (sinodun.com) 10 | # 11 | # Keep the Postgres schema up to date. The Postgres user 'dsv' and 12 | # database 'dsv' must have already been created using the script 13 | # dsv-postgres-create. 14 | # 15 | # Usage: dsv-postgres-update 16 | # 17 | 18 | import logging 19 | import textwrap 20 | 21 | import psycopg2 22 | import psycopg2.errorcodes 23 | 24 | import dsv.common.DDL as dd 25 | 26 | description = 'update Postgres schema.' 27 | 28 | # SQL for managing the DDL management schema. 29 | SQL_CREATE_HISTORY = textwrap.dedent("""\ 30 | CREATE TABLE ddl_history ( 31 | version INTEGER NOT NULL, 32 | applied TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP, 33 | action INTEGER NOT NULL DEFAULT 0 34 | )""") 35 | SQL_UPDATE_HISTORY_FROM_V1 = textwrap.dedent("""\ 36 | ALTER TABLE ddl_history DROP CONSTRAINT ddl_history_version_key, 37 | ALTER COLUMN applied SET DEFAULT CURRENT_TIMESTAMP, 38 | ADD COLUMN action INTEGER NOT NULL DEFAULT 0""") 39 | 40 | class PostgresDDLActions(dd.DDLActions): 41 | def __init__(self, conn): 42 | self.conn = conn 43 | 44 | def read_ddl_info(self, ddl_path): 45 | query = 'SELECT version,applied,action FROM ddl_history ORDER BY applied' 46 | try: 47 | with self.conn.cursor() as cur: 48 | cur.execute(query) 49 | ddl_info = cur.fetchall() 50 | except psycopg2.Error as err: 51 | if err.pgcode == psycopg2.errorcodes.UNDEFINED_COLUMN: 52 | self.conn.rollback() 53 | with self.conn.cursor() as cur: 54 | cur.execute(SQL_UPDATE_HISTORY_FROM_V1) 55 | self.conn.commit() 56 | logging.info('Apply Postgres ddl_history v2 update') 57 | with self.conn.cursor() as cur: 58 | cur.execute(query) 59 | ddl_info = cur.fetchall() 60 | elif err.pgcode == psycopg2.errorcodes.UNDEFINED_TABLE: 61 | self.conn.rollback() 62 | with self.conn.cursor() as cur: 63 | cur.execute(SQL_CREATE_HISTORY) 64 | self.conn.commit() 65 | logging.info('Create Postgres ddl_history') 66 | ddl_info = [] 67 | else: 68 | raise err 69 | return ddl_info 70 | 71 | def apply_ddl(self, ddl_file, version, action=dd.ACTION_APPLY): 72 | sql = ddl_file.open().read() 73 | with self.conn.cursor() as cur: 74 | cur.execute(sql) 75 | cur.execute('INSERT INTO ddl_history (version, action) ' 76 | 'VALUES (%s, %s)', 77 | [version, action]) 78 | self.conn.commit() 79 | logging.info('Postgres DDL: {action} {version}'.format( 80 | version=version, action='Rollback' if action else 'Apply')) 81 | 82 | def add_args(parser): 83 | dd.add_args(parser) 84 | 85 | def main(args, cfg): 86 | if not args.ddl_path: 87 | args.ddl_path = cfg['postgres']['default_ddl_path'] 88 | 89 | conn = None 90 | try: 91 | pgcfg = cfg['postgres'] 92 | conn = psycopg2.connect(host=pgcfg['host'], 93 | dbname=pgcfg['database'], 94 | user=pgcfg['user'], 95 | password=pgcfg['password']) 96 | ddl_actions = PostgresDDLActions(conn) 97 | res = dd.main(args, ddl_actions) 98 | conn.close() 99 | return res 100 | except Exception: 101 | if conn: 102 | conn.rollback() 103 | conn.close() 104 | raise 105 | -------------------------------------------------------------------------------- /src/python3/dsv/commands/queue.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # Copyright 2018-2019 Internet Corporation for Assigned Names and Numbers. 4 | # 5 | # This Source Code Form is subject to the terms of the Mozilla Public 6 | # License, v. 2.0. If a copy of the MPL was not distributed with this 7 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 8 | # 9 | # Developed by Sinodun IT (sinodun.com) 10 | # 11 | # Add a job to a Visualizer queue. 12 | 13 | import datetime 14 | import sys 15 | 16 | import dsv.common.DateTime as dd 17 | import dsv.common.Queue as dq 18 | 19 | description = 'add a job to a processing queue.' 20 | 21 | def add_args(parser): 22 | parser.add_argument('-q', '--queue', 23 | dest='queue', action='store', 24 | choices=['cdns-to-tsv', 'cdns-to-pcap', 'import-tsv'], 25 | required=True, 26 | help='the queue to use - cdns-to-tsv, cdns-to-pcap or import-tsv', 27 | metavar='QUEUE') 28 | parser.add_argument('--not-before', 29 | dest='notbefore', action='store', 30 | type=dd.arg_valid_date_type, 31 | default=None, 32 | help='don\'t execute job before this date/time', 33 | metavar='DATETIME') 34 | parser.add_argument('--delay', 35 | dest='delay', type=int, action='store', 36 | default=None, 37 | help='delay for SECS before executing job', 38 | metavar='SECS') 39 | parser.add_argument('jobs', 40 | nargs='+', 41 | help='the job string to add to the queue', 42 | metavar='JOB') 43 | 44 | def main(args, cfg): 45 | if args.delay and args.notbefore: 46 | print('Only one of --delay and --notbefore is allowed.', file=sys.stderr) 47 | return 1 48 | 49 | if args.delay: 50 | notbefore = datetime.datetime.now() + datetime.timedelta(seconds=args.delay) 51 | else: 52 | notbefore = args.notbefore 53 | with dq.QueueContext(cfg, sys.argv[0]).writer() as writer: 54 | for job in args.jobs: 55 | writer.add(args.queue, job, notbefore=notbefore) 56 | return 0 57 | -------------------------------------------------------------------------------- /src/python3/dsv/commands/queue_freeze.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # Copyright 2019-2021 Internet Corporation for Assigned Names and Numbers. 4 | # 5 | # This Source Code Form is subject to the terms of the Mozilla Public 6 | # License, v. 2.0. If a copy of the MPL was not distributed with this 7 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 8 | # 9 | # Developed by Sinodun IT (sinodun.com) 10 | # 11 | # Freeze Visualizer queue execution by removing write permission from the lock file. 12 | 13 | import logging 14 | 15 | import dsv.common.Lock as dl 16 | 17 | description = 'freeze Visualizer queue processing.' 18 | 19 | def add_args(parser): 20 | parser.add_argument('queue', nargs='+', 21 | help='queue name', 22 | metavar='QUEUE') 23 | 24 | def main(args, cfg): 25 | datastore_cfg = cfg['datastore'] 26 | exit_status = 0 27 | for q in args.queue: 28 | if q not in datastore_cfg['queues']: 29 | logging.error('{}: no such queue.'.format(q)) 30 | print('{}: no such queue.'.format(q)) 31 | exit_status = 1 32 | continue 33 | 34 | lock = dl.DSVLock(datastore_cfg['user'], datastore_cfg['lockfile'].format(q)) 35 | if lock.is_frozen(): 36 | logging.info('Visualizer queue {} freeze - already frozen.'.format(q)) 37 | print('Visualizer queue {} already frozen.'.format(q)) 38 | exit_status = 1 39 | continue 40 | lock.freeze() 41 | logging.info('Visualizer queue {} frozen.'.format(q)) 42 | print('Visualizer queue {} frozen.'.format(q)) 43 | return exit_status 44 | -------------------------------------------------------------------------------- /src/python3/dsv/commands/queue_thaw.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # Copyright 2019-2021 Internet Corporation for Assigned Names and Numbers. 4 | # 5 | # This Source Code Form is subject to the terms of the Mozilla Public 6 | # License, v. 2.0. If a copy of the MPL was not distributed with this 7 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 8 | # 9 | # Developed by Sinodun IT (sinodun.com) 10 | # 11 | # Thaw Visualizer queue execution by adding write permission from the lock file. 12 | 13 | import logging 14 | 15 | import dsv.common.Lock as dl 16 | 17 | description = 'Thaw Visualizer queue processing.' 18 | 19 | def add_args(parser): 20 | parser.add_argument('queue', nargs='+', 21 | help='queue name', 22 | metavar='QUEUE') 23 | 24 | def main(args, cfg): 25 | datastore_cfg = cfg['datastore'] 26 | exit_status = 0 27 | for q in args.queue: 28 | if q not in datastore_cfg['queues']: 29 | logging.error('{}: no such queue.'.format(q)) 30 | print('{}: no such queue.'.format(q)) 31 | exit_status = 1 32 | continue 33 | 34 | lock = dl.DSVLock(datastore_cfg['user'], datastore_cfg['lockfile'].format(q)) 35 | if not lock.is_frozen(): 36 | logging.info('Visualizer queue {} thaw - already thawed.'.format(q)) 37 | print('Visualizer queue {} already thawed.'.format(q)) 38 | exit_status = 1 39 | continue 40 | lock.thaw() 41 | logging.info('Visualizer queue {} thawed.'.format(q)) 42 | print('Visualizer queue {} thawed.'.format(q)) 43 | return exit_status 44 | -------------------------------------------------------------------------------- /src/python3/dsv/commands/rssac_daemon.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # Copyright 2019-2020 Internet Corporation for Assigned Names and Numbers. 4 | # 5 | # This Source Code Form is subject to the terms of the Mozilla Public 6 | # License, v. 2.0. If a copy of the MPL was not distributed with this 7 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 8 | # 9 | # Developed by Sinodun IT (sinodun.com) 10 | # 11 | # Listen for NOTIFY messages on a zone, and when we get one for the 12 | # zone of interest, run notifier program and pass it the serial number. 13 | # 14 | # This program typically listens on port 53. We use the good offices 15 | # of systemd to handle all the socket setup, and configure it to pass 16 | # us a single socket to work with. We therefore avoid all the brouhaha 17 | # associated with running as root, dropping privs etc. 18 | # 19 | 20 | import logging 21 | import socket 22 | import socketserver 23 | import subprocess 24 | 25 | import dns.exception 26 | import dns.message 27 | import dns.opcode 28 | 29 | import systemd.daemon 30 | 31 | description = 'listen for notifications to trigger RSSAC zone stats collection.' 32 | 33 | class RSSACHandler(socketserver.DatagramRequestHandler): 34 | def handle(self): 35 | data = self.request[0] 36 | try: 37 | message = dns.message.from_wire(data) 38 | if self.send_response(message): 39 | response = dns.message.make_response(message) 40 | self.socket.sendto(response.to_wire(), self.client_address) 41 | self.run_notify(message) 42 | except (dns.exception.DNSException, OSError) as err: 43 | logging.error('Error {err} on data from {client}'.format( 44 | err=err, client=self.client_address[0])) 45 | 46 | def send_response(self, message): 47 | # pylint: disable=no-member 48 | if message.opcode() != dns.opcode.NOTIFY: 49 | logging.info('Non-NOTIFY from {client}, ignoring'.format( 50 | client=self.client_address[0])) 51 | return False 52 | 53 | if len(message.question) == 0: 54 | logging.info('NOTIFY from {client} with no question, ignoring'.format( 55 | client=self.client_address[0])) 56 | return False 57 | 58 | qname = str(message.question[0].name) 59 | if self.server.zone != qname: 60 | logging.info('NOTIFY from {client} for zone "{zone}" not "{server}", ignoring'.format( 61 | client=self.client_address[0], 62 | zone=qname, 63 | server=self.server.zone)) 64 | return False 65 | 66 | return True 67 | 68 | def run_notify(self, message): 69 | if len(message.answer) == 0: 70 | logging.error('NOTIFY from {client} has no serial, ignoring'.format( 71 | client=self.client_address[0])) 72 | return 73 | 74 | serial = message.answer[0].to_rdataset()[0].serial 75 | 76 | logging.info('NOTIFY from {client} serial {serial}'.format( 77 | client=self.client_address[0], 78 | serial=serial)) 79 | try: 80 | subprocess.run([self.server.notifier, str(serial)], check=True) 81 | except (subprocess.SubprocessError, OSError) as err: 82 | logging.error('dsv-rssac-notify: {}'.format(err)) 83 | 84 | class RSSACServer(socketserver.UDPServer): 85 | def __init__(self, fd, family, handler, notifier, zone): 86 | super().__init__(None, handler, bind_and_activate=False) 87 | self.socket = socket.fromfd(fd, family, self.socket_type) 88 | self.notifier = notifier 89 | self.zone = zone 90 | 91 | def add_args(parser): 92 | parser.add_argument('--notifier', 93 | dest='notifier', action='store', 94 | default='dsv-rssac-notify', 95 | help='notification process to run (default %(default)s)', 96 | metavar='NOTIFIER') 97 | parser.add_argument('-t', '--socket-type', 98 | dest='socktype', action='store', 99 | choices=['ipv4', 'ipv6'], 100 | required=True, 101 | help='Type of socket systemd is listening on') 102 | 103 | def main(args, cfg): 104 | fds = systemd.daemon.listen_fds() 105 | if len(fds) != 1: 106 | logging.error('Requires a single socket from systemd') 107 | return 1 108 | fd = fds[0] 109 | if not systemd.daemon.is_socket(fd): 110 | logging.error('Passed object from systemd is not a socket') 111 | return 1 112 | 113 | family = socket.AF_INET if args.socktype == 'ipv4' else socket.AF_INET6 114 | server = RSSACServer(fd, family, RSSACHandler, args.notifier, cfg['rssac']['zone']) 115 | logging.debug('RSSAC daemon started') 116 | server.serve_forever() 117 | # Keep pylint happy. 118 | return 0 119 | -------------------------------------------------------------------------------- /src/python3/dsv/commands/rssac_daemon_tester.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # Copyright 2019-2020 Internet Corporation for Assigned Names and Numbers. 4 | # 5 | # This Source Code Form is subject to the terms of the Mozilla Public 6 | # License, v. 2.0. If a copy of the MPL was not distributed with this 7 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 8 | # 9 | # Developed by Sinodun IT (sinodun.com) 10 | # 11 | # Send a NOTIFY to the designated server and port with the given zone 12 | # and serial number. 13 | # 14 | 15 | import socket 16 | import sys 17 | 18 | import dns.exception 19 | import dns.message 20 | import dns.opcode 21 | import dns.rdatatype 22 | import dns.rrset 23 | import dns.query 24 | 25 | description = 'send a NOTIFY to test the RSSAC daemon.' 26 | 27 | def add_args(parser): 28 | parser.add_argument('-s', '--server', 29 | dest='server', action='store', 30 | default='localhost', 31 | help='server to send message to (default %(default)s)', 32 | metavar='SERVER') 33 | parser.add_argument('-p', '--port', 34 | dest='port', action='store', type=int, 35 | default=53, 36 | help='port to send message to (default %(default)s)', 37 | metavar='PORT') 38 | parser.add_argument('-n', '--serial', 39 | dest='serial', action='store', type=int, 40 | required=True, 41 | help='serial number', 42 | metavar='SERIAL') 43 | parser.add_argument('-z', '--zone', 44 | dest='zone', action='store', 45 | required=True, 46 | help='the zone', 47 | metavar='ZONE') 48 | 49 | def main(args, _): 50 | # pylint: disable=no-member 51 | message = dns.message.make_query(args.zone, dns.rdatatype.SOA) 52 | message.set_opcode(dns.opcode.NOTIFY) 53 | rrset = dns.rrset.from_text(args.zone, 0, 'IN', 'SOA', '. . {} 0 0 0 0'.format(args.serial)) 54 | message.answer.append(rrset) 55 | 56 | try: 57 | # dns.query.udp needs a string with an IP address. 58 | # Otherwise it messes up address comparison on the return packet. 59 | dest = socket.getaddrinfo(args.server, args.port) 60 | server_addr = dest[0][4][0] 61 | except OSError: 62 | print('Unknown server {}'.format(args.server), file=sys.stderr) 63 | return 1 64 | 65 | try: 66 | dns.query.udp(message, server_addr, port=args.port, timeout=1) 67 | return 0 68 | except dns.exception.Timeout: 69 | print('No response received.', file=sys.stderr) 70 | return 1 71 | -------------------------------------------------------------------------------- /src/python3/dsv/commands/status.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # Copyright 2018-2020 Internet Corporation for Assigned Names and Numbers. 4 | # 5 | # This Source Code Form is subject to the terms of the Mozilla Public 6 | # License, v. 2.0. If a copy of the MPL was not distributed with this 7 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 8 | # 9 | # Developed by Sinodun IT (sinodun.com) 10 | # 11 | # Get Visualizer queue status info 12 | 13 | import sys 14 | 15 | import dsv.common.Lock as dl 16 | import dsv.common.Queue as dq 17 | 18 | description = 'get queue status.' 19 | 20 | def add_args(parser): 21 | parser.add_argument('-q', '--queue', 22 | dest='queue', action='store', 23 | choices=['cdns-to-tsv', 'cdns-to-pcap', 'import-tsv'], 24 | default=None, 25 | help='the queue to use - cdns-to-tsv, cdns-to-pcap or import-tsv', 26 | metavar='QUEUE') 27 | parser.add_argument('items', 28 | nargs='*', 29 | help='the status item to show (len, running, workers, frozen)', 30 | metavar='ITEM') 31 | 32 | def main(args, cfg): 33 | datastore_cfg = cfg['datastore'] 34 | 35 | ctx = dq.QueueContext(cfg, sys.argv[0]) 36 | for q in ctx.status(): 37 | if args.queue: 38 | if args.queue != q[0]: 39 | continue 40 | qinfo = {'name': q[0], 'len': q[1], 'running': q[2], 'workers': q[3]} 41 | lock = dl.DSVLock(datastore_cfg['user'], datastore_cfg['lockfile'].format(q[0])) 42 | qinfo['frozen'] = 'frozen' if lock.is_frozen() else 'active' 43 | if args.items: 44 | first = True 45 | for item in args.items: 46 | try: 47 | if first: 48 | first = False 49 | else: 50 | print(',', end='') 51 | print(qinfo[item], end='') 52 | except KeyError: 53 | print('Unknown item {} - use len, running, ' 54 | 'workers or frozen'.format(item), file=sys.stderr) 55 | return 1 56 | print() 57 | else: 58 | print('{queue}: {items} queued, {running} running, ' 59 | '{workers} workers, {frozen}'.format( 60 | queue=qinfo['name'], items=qinfo['len'], 61 | running=qinfo['running'], workers=qinfo['workers'], 62 | frozen=qinfo['frozen'])) 63 | return 0 64 | -------------------------------------------------------------------------------- /src/python3/dsv/commands/tld_update.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # Copyright 2018-2020 Internet Corporation for Assigned Names and Numbers. 4 | # 5 | # This Source Code Form is subject to the terms of the Mozilla Public 6 | # License, v. 2.0. If a copy of the MPL was not distributed with this 7 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 8 | # 9 | # Developed by Sinodun IT (sinodun.com) 10 | # 11 | # Update the list of top level domains. Uses the list of TLDs published 12 | # by IANA at https://data.iana.org/TLD/tlds-alpha-by-domain.txt and 13 | # checks individual TLDs using the IANA TLD page at 14 | # https://www.iana.org/domains/root/db/.html. 15 | # 16 | # Usage: dsv-tld-update 17 | 18 | import logging 19 | import urllib.request 20 | import encodings.idna 21 | import sys 22 | 23 | import psycopg2 24 | 25 | description = 'Update the list of top level domains' 26 | 27 | def read_url(url, encoding='utf-8'): 28 | with urllib.request.urlopen(url) as page: 29 | return page.read().decode(encoding) 30 | 31 | def tld_type(tld, tldurlfmt): 32 | # Legacy TLD are all in the base set. New ones will be either 33 | # ccTLD or New-gTLD. New ccTLDs are either (a) any 2 character TLD, 34 | # or (b) an IDN flagged as a ccTLD. To determine the latter, 35 | # check the IANA TLD page and look for the text 36 | # 'Country-code top-level domain'. 37 | if len(tld) == 2 or \ 38 | (tld.startswith('xn--') and \ 39 | read_url(tldurlfmt.format(tld)).find('Country-code top-level domain') != -1): 40 | return 'ccTLD' 41 | return 'New-gTLD' 42 | 43 | def add_args(parser): 44 | parser.add_argument('-n', '--dry-run', 45 | dest='dryrun', action='store_true', default=False, 46 | help='perform a trial run') 47 | parser.add_argument('-v', '--verbose', 48 | action='store_true', default=False, 49 | help='enable verbosity') 50 | 51 | def main(args, cfg): 52 | conn = None 53 | tlds = set() 54 | pg_tlds = set() 55 | try: 56 | # pylint: disable=no-member 57 | if args.verbose and not sys.stdout.encoding.lower().startswith('utf'): 58 | print('Output path is not Unicode. U-Labels will not display correctly.') 59 | 60 | for line in read_url(cfg['urls']['tldlist'], 'ascii').splitlines(): 61 | tld = line.split('#')[0].strip().lower() 62 | if tld: 63 | tlds.add(tld) 64 | 65 | pgcfg = cfg['postgres'] 66 | conn = psycopg2.connect(host=pgcfg['host'], 67 | dbname=pgcfg['database'], 68 | user=pgcfg['user'], 69 | password=pgcfg['password']) 70 | 71 | with conn.cursor() as cur: 72 | cur.execute('SELECT name FROM tld') 73 | pg_tlds_tuples = cur.fetchall() 74 | for pg_tlds_tuple in pg_tlds_tuples: 75 | pg_tlds.add(pg_tlds_tuple[0]) 76 | 77 | to_remove = pg_tlds - tlds 78 | to_add = tlds - pg_tlds 79 | 80 | # Historically the tld table had no ulabel column so do a 81 | # quick check to ensure this is populated for every tld. 82 | for tld in sorted(pg_tlds): 83 | ulabel = encodings.idna.ToUnicode(tld) 84 | msg = 'Update TLD {tld} U-Label {ulabel}'.format(tld=tld, ulabel=ulabel) 85 | logging.info(msg) 86 | if args.verbose: 87 | print(msg) 88 | if not args.dryrun: 89 | cur.execute('UPDATE tld SET ulabel = %s ' 90 | 'WHERE name = %s AND ulabel IS null', 91 | (ulabel, tld)) 92 | 93 | for tld in sorted(to_add): 94 | tldtype = tld_type(tld, cfg['urls']['tldpage']) 95 | ulabel = encodings.idna.ToUnicode(tld) 96 | msg = 'Adding TLD {tld} U-Label {ulabel} ({tldtype})'.format( 97 | tld=tld, ulabel=ulabel, tldtype=tldtype) 98 | logging.info(msg) 99 | if args.verbose: 100 | print(msg) 101 | if not args.dryrun: 102 | cur.execute('INSERT INTO tld (name, ulabel, type_id) ' 103 | 'SELECT %s, %s, id FROM tld_type WHERE name=%s', 104 | (tld, ulabel, tldtype)) 105 | 106 | for tld in sorted(to_remove): 107 | msg = 'Removing TLD {tld}'.format(tld=tld) 108 | logging.info(msg) 109 | if args.verbose: 110 | print(msg) 111 | if not args.dryrun: 112 | cur.execute('DELETE FROM tld WHERE name = %s', (tld,)) 113 | 114 | conn.commit() 115 | conn.close() 116 | return 0 117 | except psycopg2.Error: 118 | if conn is not None: 119 | conn.rollback() 120 | conn.close() 121 | raise 122 | -------------------------------------------------------------------------------- /src/python3/dsv/common/DateTime.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018-2019 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | 9 | import argparse 10 | import datetime 11 | 12 | DATE_TIME_FORMATS = ['%Y-%m-%d %H:%M:%S', '%Y%m%d_%H%M%S', '%Y-%m-%d'] 13 | 14 | def parse_datetime(arg): 15 | for dtf in DATE_TIME_FORMATS: 16 | try: 17 | return datetime.datetime.strptime(arg, dtf) 18 | except ValueError: 19 | pass 20 | raise ValueError('{0} is not a valid date'.format(arg)) 21 | 22 | def arg_valid_date_type(arg): 23 | try: 24 | return parse_datetime(arg) 25 | except ValueError: 26 | raise argparse.ArgumentTypeError( 27 | 'Date {0} not valid. Expected format YYYY-MM-DD or YYYY-MM-DD HH:MM:SS.'.format(arg)) 28 | -------------------------------------------------------------------------------- /src/python3/dsv/common/Lock.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018-2019 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | 9 | import fcntl 10 | import pathlib 11 | import pwd 12 | import os 13 | 14 | class NoSuchUserException(Exception): 15 | """Exception raised when there is no such user as the one specified for locking.""" 16 | def __init__(self, user): 17 | super().__init__("Configured user {} does not exist.".format(user)) 18 | 19 | class WrongUserException(Exception): 20 | """Exception raised when we aren't the right user to use a Visualizer lock.""" 21 | def __init__(self, user): 22 | super().__init__("You need to be user {} to use this command.".format(user)) 23 | 24 | class DSVUser: 25 | # pylint: disable=too-few-public-methods 26 | def __init__(self, user): 27 | self._user = user 28 | try: 29 | passwd = pwd.getpwnam(user) 30 | self._uid = passwd.pw_uid 31 | self._gid = passwd.pw_gid 32 | except KeyError: 33 | raise NoSuchUserException(user) 34 | 35 | def ensure_user(self): 36 | """Ensure we are the nominated user.""" 37 | try: 38 | os.setgid(self._gid) 39 | os.setuid(self._uid) 40 | except PermissionError: 41 | raise WrongUserException(self._user) 42 | 43 | class DSVLock: 44 | def __init__(self, user, lockpath): 45 | self._lock = pathlib.Path(lockpath) 46 | self._user = DSVUser(user) 47 | 48 | def _ensure_lockfile(self): 49 | # pylint: disable=no-member 50 | """Ensure the lockfile and its dir exist.""" 51 | self._user.ensure_user() 52 | lockdir = self._lock.parent 53 | lockdir.mkdir(0o755, parents=True, exist_ok=True) 54 | self._lock.touch(0o666) 55 | 56 | def _get_mode(self): 57 | """Get the current lockfile mode.""" 58 | return self._lock.stat().st_mode 59 | 60 | def lock(self): 61 | """Obtain the lock, or throw. 62 | 63 | WrongUserException: Incorrect user permissions. 64 | PermissionError: Lock frozen. 65 | BlockingError: Lock already taken. 66 | """ 67 | self._ensure_lockfile() 68 | f = self._lock.open('w') 69 | fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB) 70 | 71 | def is_frozen(self): 72 | """Is the lock currently frozen? 73 | 74 | So status can be read by all users, assume not frozen 75 | if the lock file does not exist but directory is readable.""" 76 | try: 77 | if not self._lock.exists(): 78 | return False 79 | except PermissionError: 80 | self._ensure_lockfile() 81 | return (self._get_mode() & 0o200) == 0 82 | 83 | def freeze(self): 84 | """Freeze the lock. 85 | 86 | Set permissions to read-only. 87 | """ 88 | self._ensure_lockfile() 89 | self._lock.chmod(self._get_mode() & 0o444) 90 | 91 | def thaw(self): 92 | """Thaw the lock. 93 | 94 | Set permissions to read/write. 95 | """ 96 | self._ensure_lockfile() 97 | self._lock.chmod(self._get_mode() | 0o200) 98 | -------------------------------------------------------------------------------- /src/python3/dsv/common/NodeFlag.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020, 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | 9 | import enum 10 | 11 | class NodeFlag(enum.Enum): 12 | NONE = 0 # No flags set 13 | 14 | # Flags set on nodes update. 15 | HIDDEN = 0b0000000000001 # Not visible on Grafana 16 | HIDDEN_TEST = 0b0000000000010 # Not visible on test Grafana 17 | INACTIVE = 0b0000000000100 # Not present in nodes.csv 18 | NO_SERVICE_ADDR = 0b0000000001000 # No service address 19 | 20 | # Derived flag combinations. 21 | NOT_RSSAC = 0b0000000001100 # Inactive or no service address 22 | -------------------------------------------------------------------------------- /tests/python3/integration/ddl-applied/0001.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dns-stats/visualizer/20fba91f0d26b98531f97f643c8329640d1c0d11/tests/python3/integration/ddl-applied/0001.sql -------------------------------------------------------------------------------- /tests/python3/integration/ddl-toapply/0001-rollback.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dns-stats/visualizer/20fba91f0d26b98531f97f643c8329640d1c0d11/tests/python3/integration/ddl-toapply/0001-rollback.sql -------------------------------------------------------------------------------- /tests/python3/integration/ddl-toapply/0001.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dns-stats/visualizer/20fba91f0d26b98531f97f643c8329640d1c0d11/tests/python3/integration/ddl-toapply/0001.sql -------------------------------------------------------------------------------- /tests/python3/integration/ddl-toapply/0002.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dns-stats/visualizer/20fba91f0d26b98531f97f643c8329640d1c0d11/tests/python3/integration/ddl-toapply/0002.sql -------------------------------------------------------------------------------- /tests/python3/integration/gear.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | 9 | # Test stub for gear module. 10 | 11 | PRECEDENCE_NORMAL = 0 12 | PRECEDENCE_LOW = 1 13 | PRECEDENCE_HIGH = 2 14 | -------------------------------------------------------------------------------- /tests/python3/integration/test_DDL.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | 9 | import argparse 10 | import datetime 11 | import pathlib 12 | import unittest 13 | 14 | import dsv.common.DDL as DDL 15 | 16 | import common 17 | 18 | class TestDDLActions(DDL.DDLActions): 19 | def __init__(self): 20 | self.actions = [] 21 | 22 | def apply_ddl(self, ddl_file, version, action=DDL.ACTION_APPLY): 23 | self.actions.append('{}|{}|{}|'.format(ddl_file.stem, version, 'apply' if action == DDL.ACTION_APPLY else 'rollback')) 24 | 25 | def read_ddl_info(self, ddl_path): 26 | return [ 27 | (1, datetime.datetime(2021, 1, 19, 10, 0, 0), DDL.ACTION_APPLY), 28 | (1, datetime.datetime(2021, 1, 19, 10, 5, 0), DDL.ACTION_ROLLBACK), 29 | (1, datetime.datetime(2021, 1, 19, 10, 10, 0), DDL.ACTION_APPLY), 30 | ] 31 | 32 | def get_args(argv=[]): 33 | parser = argparse.ArgumentParser('DDL') 34 | DDL.add_args(parser) 35 | return parser.parse_args(argv) 36 | 37 | class TestRun(common.DSVTestCase): 38 | def __init__(self, methodName='runTest'): 39 | super().__init__(methodName) 40 | self.ddl_toapply_path = pathlib.Path('tests/python3/integration/ddl-toapply') 41 | self.ddl_applied_path = pathlib.Path('tests/python3/integration/ddl-applied') 42 | 43 | def test_ddl_files_toapply(self): 44 | files = DDL.ddl_files(self.ddl_toapply_path) 45 | self.assertEqual(len(files), 2) 46 | self.assertEqual(int(files[0].stem), 1) 47 | self.assertEqual(int(files[1].stem), 2) 48 | 49 | def test_ddl_files_applied(self): 50 | files = DDL.ddl_files(self.ddl_applied_path) 51 | self.assertEqual(len(files), 1) 52 | self.assertEqual(int(files[0].stem), 1) 53 | 54 | def test_ddl_rollback_files(self): 55 | files = DDL.ddl_rollback_files(self.ddl_toapply_path, 1) 56 | self.assertEqual(len(files), 1) 57 | files = DDL.ddl_rollback_files(self.ddl_toapply_path, 2) 58 | self.assertEqual(len(files), 0) 59 | 60 | def test_active_ddls(self): 61 | actions = TestDDLActions() 62 | active = DDL.active_ddls(actions.read_ddl_info(self.ddl_toapply_path)) 63 | self.assertEqual(len(active), 1) 64 | self.assertEqual(active[1], datetime.datetime(2021, 1, 19, 10, 10, 00)) 65 | 66 | def test_apply_toapply(self): 67 | actions = TestDDLActions() 68 | args = get_args(['--quiet', '--action', 'update', str(self.ddl_toapply_path)]) 69 | res = DDL.main(args, actions) 70 | self.assertEqual(res, 0) 71 | self.assertEqual(len(actions.actions), 1) 72 | self.assertEqual(actions.actions[0], '0002|2|apply|') 73 | 74 | def test_apply_applied(self): 75 | actions = TestDDLActions() 76 | args = get_args(['--quiet', '--action', 'update', str(self.ddl_applied_path)]) 77 | res = DDL.main(args, actions) 78 | self.assertEqual(res, 0) 79 | self.assertEqual(len(actions.actions), 0) 80 | 81 | def test_rollback_toapply(self): 82 | actions = TestDDLActions() 83 | args = get_args(['--quiet', '--action', 'rollback', str(self.ddl_toapply_path)]) 84 | res = DDL.main(args, actions) 85 | self.assertEqual(res, 0) 86 | self.assertEqual(len(actions.actions), 1) 87 | self.assertEqual(actions.actions[0], '0001-rollback|1|rollback|') 88 | 89 | def test_rollback_applied(self): 90 | actions = TestDDLActions() 91 | args = get_args(['--quiet', '--action', 'rollback', str(self.ddl_applied_path)]) 92 | res = DDL.main(args, actions) 93 | self.assertEqual(res, 1) 94 | self.assertEqual(len(actions.actions), 0) 95 | 96 | def test_status_toapply(self): 97 | actions = TestDDLActions() 98 | args = get_args(['--quiet', '--action', 'status', str(self.ddl_toapply_path)]) 99 | res = DDL.main(args, actions) 100 | self.assertEqual(res, 1) 101 | 102 | def test_status_applied(self): 103 | actions = TestDDLActions() 104 | args = get_args(['--quiet', '--action', 'status', str(self.ddl_applied_path)]) 105 | res = DDL.main(args, actions) 106 | self.assertEqual(res, 0) 107 | 108 | def test_exception_raised(self): 109 | actions = DDL.DDLActions() 110 | args = get_args(['--quiet', '--action', 'status', str(self.ddl_applied_path)]) 111 | res = DDL.main(args, actions) 112 | self.assertEqual(res, 2) 113 | -------------------------------------------------------------------------------- /tests/python3/integration/test_clickhouse_sys_info.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | 9 | import importlib 10 | 11 | import common 12 | 13 | dcsi = importlib.import_module('dsv.commands.clickhouse_sys_info') 14 | 15 | class TestRun(common.DSVTestCase): 16 | def test_all_used(self): 17 | print('disc-block-size\ndisc-size\ndisc-available\ndisc-percent-free\ndisc-percent-used', file=self._stdin) 18 | self._stdin.seek(0) 19 | args = common.get_args(dcsi) 20 | self.assertEqual(dcsi.main(args, self._config), 0) 21 | 22 | self._stdout.seek(0) 23 | output = self._stdout.readlines() 24 | out_split = [o.split('\t') for o in output] 25 | for o in out_split: 26 | d = int(o[1]) 27 | self.assertTrue(d >= 0) 28 | if o[0].find('percent') != -1: 29 | self.assertTrue(d <= 100) 30 | self.assertEqual(out_split[0][0], 'disc-block-size') 31 | self.assertEqual(out_split[1][0], 'disc-size') 32 | self.assertEqual(out_split[2][0], 'disc-available') 33 | self.assertEqual(out_split[3][0], 'disc-percent-free') 34 | self.assertEqual(out_split[4][0], 'disc-percent-used') 35 | 36 | def test_unused(self): 37 | print('xyzzy', file=self._stdin) 38 | self._stdin.seek(0) 39 | args = common.get_args(dcsi) 40 | self.assertEqual(dcsi.main(args, self._config), 1) 41 | -------------------------------------------------------------------------------- /tests/python3/integration/test_config.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | 9 | import importlib 10 | 11 | import common 12 | 13 | dc = importlib.import_module('dsv.commands.config') 14 | 15 | class TestRun(common.DSVTestCase): 16 | def test_config(self): 17 | args = common.get_args(dc, ['datastore', 'queues']) 18 | self.assertEqual(dc.main(args, self._config), 0) 19 | self._stdout.seek(0) 20 | output = self._stdout.readlines() 21 | self.assertEqual(len(output), 1) 22 | self.assertEqual(output[0], 'cdns-to-pcap,cdns-to-tsv,import-tsv\n') 23 | 24 | def test_bad_config(self): 25 | args = common.get_args(dc, ['plugh', 'xyzzy']) 26 | self.assertNotEqual(dc.main(args, self._config), 0) 27 | 28 | def test_random_config(self): 29 | args = common.get_args(dc, ['-r', 'datastore', 'queues']) 30 | self.assertEqual(dc.main(args, self._config), 0) 31 | self._stdout.seek(0) 32 | output = self._stdout.readlines() 33 | self.assertEqual(len(output), 1) 34 | self.assertIn(output[0].strip(), ['cdns-to-pcap', 'cdns-to-tsv', 'import-tsv']) 35 | -------------------------------------------------------------------------------- /tests/python3/integration/test_import_freeze_thaw.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | 9 | import importlib 10 | import pathlib 11 | 12 | import common 13 | 14 | dif = importlib.import_module('dsv.commands.import_freeze') 15 | dit = importlib.import_module('dsv.commands.import_thaw') 16 | di = importlib.import_module('dsv.commands.import') 17 | 18 | class TestRun(common.DSVTestCase): 19 | def test_freeze(self): 20 | args = common.get_args(dif) 21 | self.assertEqual(dif.main(args, self._config), 0) 22 | self.assertNotEqual(dif.main(args, self._config), 0) 23 | lockpath = self._config['datastore']['lockfile'].format('import') 24 | lockfile = pathlib.Path(lockpath) 25 | self.assertTrue(lockfile.is_file()) 26 | fmode = lockfile.stat().st_mode 27 | self.assertEqual(fmode & 0o200, 0) 28 | 29 | di_args = common.get_args(di, ['-s', 'incoming']) 30 | self.assertNotEqual(di.main(di_args, self._config), 0) 31 | 32 | self.assertEqual(dit.main(args, self._config), 0) 33 | self.assertTrue(lockfile.is_file()) 34 | fmode = lockfile.stat().st_mode 35 | self.assertNotEqual(fmode & 0o200, 0) 36 | self.assertNotEqual(dit.main(args, self._config), 0) 37 | -------------------------------------------------------------------------------- /tests/python3/integration/test_log.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | 9 | import importlib 10 | 11 | import common 12 | 13 | dl = importlib.import_module('dsv.commands.log') 14 | 15 | class TestRun(common.DSVTestCase): 16 | def test_log(self): 17 | args = common.get_args(dl, ['-l', 'critical', 'log message']) 18 | self.assertEqual(dl.main(args, self._config), 0) 19 | self._log.seek(0) 20 | output = self._log.readlines() 21 | self.assertEqual(len(output), 1) 22 | self.assertEqual(output[0], 'root:CRITICAL:log message\n') 23 | -------------------------------------------------------------------------------- /tests/python3/integration/test_queue.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | 9 | import importlib 10 | import pdb 11 | import sys 12 | 13 | from unittest.mock import patch 14 | 15 | import common 16 | import dsv.common.Queue as dq 17 | 18 | cmd = importlib.import_module('dsv.commands.queue') 19 | 20 | class TestRun(common.DSVTestCase): 21 | def test_queue(self): 22 | with patch('dsv.common.Queue.QueueWriter', autospec=True) as MockQueueWriter: 23 | args = common.get_args(cmd, ['-q', 'import-tsv', 'job1']) 24 | self.assertEqual(cmd.main(args, self._config), 0) 25 | -------------------------------------------------------------------------------- /tests/python3/integration/test_queue_freeze_thaw.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | 9 | import importlib 10 | import pathlib 11 | 12 | import common 13 | 14 | dqf = importlib.import_module('dsv.commands.queue_freeze') 15 | dqt = importlib.import_module('dsv.commands.queue_thaw') 16 | 17 | class TestRun(common.DSVTestCase): 18 | def test_freeze(self): 19 | queues = self._config['datastore']['queues'].split(',') 20 | for q in queues: 21 | args = common.get_args(dqf, [q]) 22 | self.assertEqual(dqf.main(args, self._config), 0) 23 | self.assertNotEqual(dqf.main(args, self._config), 0) 24 | lockpath = self._config['datastore']['lockfile'].format(q) 25 | lockfile = pathlib.Path(lockpath) 26 | self.assertTrue(lockfile.is_file()) 27 | fmode = lockfile.stat().st_mode 28 | self.assertEqual(fmode & 0o200, 0) 29 | self.assertEqual(dqt.main(args, self._config), 0) 30 | self.assertTrue(lockfile.is_file()) 31 | fmode = lockfile.stat().st_mode 32 | self.assertNotEqual(fmode & 0o200, 0) 33 | self.assertNotEqual(dqt.main(args, self._config), 0) 34 | 35 | def test_bad_freeze(self): 36 | queues = ['plugh', 'xyzzy'] 37 | for q in queues: 38 | args = common.get_args(dqf, [q]) 39 | self.assertNotEqual(dqf.main(args, self._config), 0) 40 | self.assertNotEqual(dqt.main(args, self._config), 0) 41 | -------------------------------------------------------------------------------- /tests/python3/integration/test_rssac_notify.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | 9 | import importlib 10 | 11 | import common 12 | import dsv.common.Queue as dq 13 | 14 | drn = importlib.import_module('dsv.commands.rssac_notify') 15 | 16 | class TestRun(common.DSVTestCase): 17 | def test_cmp_serial(self): 18 | self.assertEqual(0, drn.cmp_serial(2019090101, 2019090101)) 19 | self.assertEqual(-1, drn.cmp_serial(2019090101, 2019090102)) 20 | self.assertEqual(1, drn.cmp_serial(2019090102, 2019090101)) 21 | 22 | self.assertEqual(-1, drn.cmp_serial(1, 0x7FFFFFFF)) 23 | self.assertEqual(1, drn.cmp_serial(1, 0x80000000)) 24 | 25 | self.assertEqual(-1, drn.cmp_serial(2, 0x80000000)) 26 | self.assertEqual(1, drn.cmp_serial(2, 0x80000001)) 27 | 28 | def test_find_latest_serial(self): 29 | self.assertEqual(None, drn.find_latest_serial([])) 30 | self.assertEqual(2019090101, drn.find_latest_serial([ 31 | 2019090101, 32 | ])) 33 | self.assertEqual(2019090101, drn.find_latest_serial([ 34 | 2019083101, 35 | 2019090101, 36 | ])) 37 | self.assertEqual(2019090103, drn.find_latest_serial([ 38 | 2019083101, 39 | 2019090101, 40 | 2019090103, 41 | 2019083102, 42 | ])) 43 | 44 | def test_get6addr(self): 45 | self.assertFalse(drn.get6addr('ipv6.google.com')[0].startswith('::ffff:')) 46 | self.assertTrue(drn.get6addr('ipv4.google.com')[0].startswith('::ffff:')) 47 | -------------------------------------------------------------------------------- /tests/python3/integration/test_status.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Internet Corporation for Assigned Names and Numbers. 2 | # 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this 5 | # file, you can obtain one at https://mozilla.org/MPL/2.0/. 6 | # 7 | # Developed by Sinodun IT (sinodun.com) 8 | 9 | import importlib 10 | 11 | from unittest.mock import patch 12 | 13 | import common 14 | import dsv.common.Queue as dq 15 | 16 | ws = importlib.import_module('dsv.commands.status') 17 | 18 | class TestRun(common.DSVTestCase): 19 | def test_status(self): 20 | with patch.object(dq.QueueContext, 'status', return_value=[['queue', 'len', 'running', 'workers']]): 21 | args = common.get_args(ws) 22 | self.assertEqual(ws.main(args, self._config), 0) 23 | self._stdout.seek(0) 24 | output = self._stdout.readlines() 25 | self.assertEqual(len(output), 1) 26 | self.assertEqual(output[0], 'queue: len queued, running running, workers workers, active\n') 27 | -------------------------------------------------------------------------------- /tests/tools/log-requests/background.js: -------------------------------------------------------------------------------- 1 | function logURL(requestDetails) { 2 | const body = requestDetails.requestBody; 3 | if ( body ) { 4 | if ( body.raw ) { 5 | let dec = new TextDecoder("utf-8"); 6 | for ( let i = 0; i < body.raw.length; i++ ) { 7 | let s = body.raw[i].bytes; 8 | console.log("\n%s\n", dec.decode(s)); 9 | } 10 | } 11 | } 12 | } 13 | 14 | chrome.webRequest.onBeforeRequest.addListener( 15 | logURL, 16 | {urls: ["*://*/api*","*://*/stats/api*"]}, 17 | ["requestBody"] 18 | ); 19 | -------------------------------------------------------------------------------- /tests/tools/log-requests/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Log Grafana API requests", 3 | "manifest_version": 2, 4 | "name": "grafana-api-logger", 5 | "version": "0.2", 6 | 7 | "permissions": [ 8 | "webRequest", 9 | "*://*/api*", 10 | "*://*/stats/api*" 11 | ], 12 | 13 | "background": { 14 | "scripts": ["background.js"] 15 | } 16 | } 17 | --------------------------------------------------------------------------------