├── docs ├── CHANGELOG.rst ├── requirements.txt ├── api │ ├── modules.rst │ └── grafanalib.rst ├── CODE_OF_CONDUCT.rst ├── index.rst ├── Makefile ├── releasing.rst ├── conf.py ├── CONTRIBUTING.rst └── getting-started.rst ├── requirements.txt ├── .flake8 ├── grafanalib ├── __init__.py ├── tests │ ├── examples │ │ ├── sqltarget_example_files │ │ │ ├── example.sql │ │ │ └── example_with_params.sql │ │ ├── example.dashboard-with-sql.py │ │ ├── example.dashboard.py │ │ ├── upload_grafana_dashboard.sh │ │ ├── example.upload-dashboard.py │ │ ├── example.upload-alerts.py │ │ ├── example-elasticsearch.dashboard.py │ │ ├── example.alertsv8.alertgroup.py │ │ ├── table-example-dashboard.py │ │ ├── example.alertsv9.alertgroup.py │ │ └── example.alertsv9.alertfilebasedprovisioning.py │ ├── test_azuredataexplorer.py │ ├── test_humio.py │ ├── test_opentsdb.py │ ├── test_elasticsearch.py │ ├── test_examples.py │ ├── test_zabbix.py │ ├── test_validators.py │ ├── test_cloudwatch.py │ ├── test_azuremonitor.py │ └── test_grafanalib.py ├── humio.py ├── prometheus.py ├── azuredataexplorer.py ├── influxdb.py ├── weave.py ├── validators.py ├── azuremonitor.py ├── cloudwatch.py ├── opentsdb.py ├── _gen.py ├── formatunits.py ├── elasticsearch.py └── zabbix.py ├── .coveragerc ├── .gitignore ├── .github ├── ISSUE_TEMPLATE.md ├── workflows │ ├── run-tests.yml │ ├── publish-to-pypi.yml │ └── check-sphinx-and-links.yml ├── dependabot.yml └── PULL_REQUEST_TEMPLATE.md ├── MAINTAINERS ├── .readthedocs.yml ├── tox.ini ├── Makefile ├── setup.py ├── README.rst ├── LICENSE └── CHANGELOG.rst /docs/CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | ../CHANGELOG.rst -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | tox 2 | pytest 3 | flake8 4 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 120 3 | ignore = E501 4 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx == 7.4.7 2 | sphinx_rtd_theme == 2.0.0 3 | -------------------------------------------------------------------------------- /grafanalib/__init__.py: -------------------------------------------------------------------------------- 1 | """Routines for building Grafana dashboards.""" 2 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = True 3 | include = 4 | grafanalib/*.py 5 | grafanalib/**/*.py 6 | -------------------------------------------------------------------------------- /docs/api/modules.rst: -------------------------------------------------------------------------------- 1 | grafanalib 2 | ========== 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | grafanalib 8 | -------------------------------------------------------------------------------- /grafanalib/tests/examples/sqltarget_example_files/example.sql: -------------------------------------------------------------------------------- 1 | SELECT example, count(id) 2 | FROM test 3 | GROUP BY example; 4 | -------------------------------------------------------------------------------- /grafanalib/tests/examples/sqltarget_example_files/example_with_params.sql: -------------------------------------------------------------------------------- 1 | SELECT example 2 | FROM test 3 | WHERE example='{example}' AND example_date BETWEEN '{starting_date}' AND '{ending_date}'; 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | __pycache__ 3 | *.egg-info 4 | build/ 5 | dist/ 6 | .uptodate 7 | /.env 8 | test-results/junit-*.xml 9 | /.cache 10 | .ensure-* 11 | /.tox 12 | /.coverage 13 | /venv*/ 14 | /.idea/ 15 | /.vscode/ 16 | 17 | # Documentation 18 | docs/build 19 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 4 | 5 | ## What you expected to happen? 6 | 7 | ## What happened? 8 | 9 | 10 | ## How to reproduce it? 11 | 12 | -------------------------------------------------------------------------------- /docs/CODE_OF_CONDUCT.rst: -------------------------------------------------------------------------------- 1 | ========================= 2 | Community Code of Conduct 3 | ========================= 4 | 5 | Weaveworks follows the `CNCF Community Code of Conduct v1.0`_. 6 | 7 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting 8 | a Weaveworks project maintainer, 9 | or `Alexis Richardson `. 10 | 11 | .. _`CNCF Community Code of Conduct v1.0`: https://github.com/cncf/foundation/blob/0ce4694e5103c0c24ca90c189da81e5408a46632/code-of-conduct.md 12 | -------------------------------------------------------------------------------- /MAINTAINERS: -------------------------------------------------------------------------------- 1 | In alphabetical order: 2 | 3 | The maintainers are generally available in Slack at 4 | https://weave-community.slack.com/ in #grafanalib (https://weave-community.slack.com/archives/C9C9K6T4P) 5 | (obtain an invitation at https://slack.weave.works/). 6 | 7 | 8 | James Gibson, BBC (github: @JamesGibo, slack: James G) 9 | 10 | Retired maintainers: 11 | 12 | - Bryan Boreham 13 | - Daniel Holbach 14 | - Jonathan Lange 15 | - Matt Richter 16 | 17 | Thank you for your involvement, and let us not say "farewell" ... 18 | -------------------------------------------------------------------------------- /.github/workflows/run-tests.yml: -------------------------------------------------------------------------------- 1 | name: Run tests 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | build-n-publish: 7 | name: Run tests - Python ${{ matrix.python }} 8 | runs-on: ubuntu-20.04 9 | strategy: 10 | matrix: 11 | python: ['3.8', '3.9', '3.10', '3.11'] 12 | steps: 13 | - uses: actions/checkout@v4 14 | - name: Set up Python 15 | uses: actions/setup-python@v5.1.1 16 | with: 17 | python-version: ${{ matrix.python }} 18 | - name: Run tests 19 | run: | 20 | pip3 install tox flake8 21 | make deps 22 | make all 23 | -------------------------------------------------------------------------------- /grafanalib/tests/test_azuredataexplorer.py: -------------------------------------------------------------------------------- 1 | import grafanalib.core as G 2 | import grafanalib.azuredataexplorer as A 3 | from grafanalib import _gen 4 | from io import StringIO 5 | 6 | 7 | def test_serialization_azuredataexplorer_metrics_target(): 8 | """Serializing a graph doesn't explode.""" 9 | graph = G.Graph( 10 | title="Azure Data Explorer graph", 11 | dataSource="default", 12 | targets=[ 13 | A.AzureDataExplorerTarget() 14 | ], 15 | ) 16 | stream = StringIO() 17 | _gen.write_dashboard(graph, stream) 18 | assert stream.getvalue() != '' 19 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. grafanalib documentation master file, created by 2 | sphinx-quickstart on Mon Feb 17 14:29:44 2020. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to grafanalib's documentation! 7 | ====================================== 8 | 9 | .. toctree:: 10 | :maxdepth: 2 11 | :caption: Contents: 12 | 13 | getting-started 14 | 15 | api/grafanalib 16 | api/modules 17 | 18 | CONTRIBUTING 19 | CODE_OF_CONDUCT 20 | releasing 21 | 22 | CHANGELOG 23 | 24 | 25 | Indices and tables 26 | ================== 27 | 28 | * :ref:`genindex` 29 | * :ref:`modindex` 30 | * :ref:`search` 31 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= -W 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "pip" # See documentation for possible values 9 | directory: "/" # Location of package manifests 10 | schedule: 11 | interval: "weekly" 12 | 13 | - package-ecosystem: "github-actions" # See documentation for possible values 14 | directory: "/" # Location of package manifests 15 | schedule: 16 | interval: "weekly" 17 | -------------------------------------------------------------------------------- /grafanalib/tests/test_humio.py: -------------------------------------------------------------------------------- 1 | """Tests for Humio Datasource""" 2 | 3 | import grafanalib.core as G 4 | import grafanalib.humio as H 5 | from grafanalib import _gen 6 | from io import StringIO 7 | 8 | 9 | def test_serialization_humio_metrics_target(): 10 | """Serializing a graph doesn't explode.""" 11 | graph = G.Graph( 12 | title="Humio Logs", 13 | dataSource="Humio data source", 14 | targets=[ 15 | H.HumioTarget(), 16 | ], 17 | id=1, 18 | yAxes=G.YAxes( 19 | G.YAxis(format=G.SHORT_FORMAT, label="ms"), 20 | G.YAxis(format=G.SHORT_FORMAT), 21 | ), 22 | ) 23 | stream = StringIO() 24 | _gen.write_dashboard(graph, stream) 25 | assert stream.getvalue() != '' 26 | -------------------------------------------------------------------------------- /.github/workflows/publish-to-pypi.yml: -------------------------------------------------------------------------------- 1 | name: Publish Python 🐍 distributions 📦 to PyPI 2 | 3 | on: push 4 | 5 | jobs: 6 | build-n-publish: 7 | name: Build and publish Python 🐍 distributions 📦 to PyPI 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/checkout@v4 11 | - name: Set up Python 3.10 12 | uses: actions/setup-python@v5.1.1 13 | with: 14 | python-version: '3.10' 15 | - name: Build a binary wheel and a source tarball 16 | run: >- 17 | pip install wheel; 18 | rm -rf dist; 19 | python setup.py sdist bdist_wheel 20 | - name: Publish distribution 📦 to PyPI 21 | if: startsWith(github.event.ref, 'refs/tags') 22 | uses: pypa/gh-action-pypi-publish@master 23 | with: 24 | password: ${{ secrets.pypi_password }} 25 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Set the version of Python and other tools you might need 9 | build: 10 | os: ubuntu-22.04 11 | tools: 12 | python: "3.10" 13 | 14 | # Build documentation in the docs/ directory with Sphinx 15 | sphinx: 16 | configuration: docs/conf.py 17 | 18 | # Build documentation with MkDocs 19 | #mkdocs: 20 | # configuration: mkdocs.yml 21 | 22 | # Optionally build your docs in additional formats such as PDF and ePub 23 | formats: all 24 | 25 | # Optionally declare the Python requirements required to build your docs 26 | python: 27 | install: 28 | - requirements: docs/requirements.txt 29 | - method: setuptools 30 | path: . 31 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | # Tox (https://tox.readthedocs.io/) is a tool for running tests 2 | # in multiple virtualenvs. This configuration file will run the 3 | # test suite on all supported python versions. To use it, "pip install tox" 4 | # and then run "tox" from this directory. 5 | 6 | [tox] 7 | envlist = py37, py38, py39, py310, py311 8 | 9 | [testenv] 10 | commands = pytest -o junit_family=xunit2 --junitxml=test-results/junit-{envname}.xml 11 | deps = 12 | pytest 13 | 14 | [testenv:coverage] 15 | deps = 16 | coverage 17 | pytest 18 | commands = 19 | python -m coverage run --rcfile=.coveragerc -m pytest --strict-markers --maxfail=1 --ff {posargs} 20 | # Had 88% test coverage at time of introducing coverage ratchet. 21 | # This number must only go up. 22 | python -m coverage report --rcfile=.coveragerc --show-missing --fail-under=88 23 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 11 | 12 | ## What does this do? 13 | 14 | 15 | ## Why is it a good idea? 16 | 17 | 18 | ## Context 19 | 20 | 21 | ## Questions 22 | 23 | -------------------------------------------------------------------------------- /grafanalib/humio.py: -------------------------------------------------------------------------------- 1 | """Helpers to create Humio-specific Grafana queries.""" 2 | 3 | import attr 4 | 5 | 6 | @attr.s 7 | class HumioTarget(object): 8 | """ 9 | Generates Humio target JSON structure. 10 | 11 | Link to Humio Grafana plugin https://grafana.com/grafana/plugins/humio-datasource/ 12 | 13 | Humio docs on query language https://library.humio.com/humio-server/syntax.html 14 | 15 | :param humioQuery: Query that will be executed on Humio 16 | :param humioRepository: Repository to execute query on. 17 | :param refId: target reference id 18 | """ 19 | 20 | humioQuery = attr.ib(default="") 21 | humioRepository = attr.ib(default="") 22 | refId = attr.ib(default="") 23 | 24 | def to_json_data(self): 25 | 26 | return { 27 | "humioQuery": self.humioQuery, 28 | "humioRepository": self.humioRepository, 29 | "refId": self.refId 30 | } 31 | -------------------------------------------------------------------------------- /.github/workflows/check-sphinx-and-links.yml: -------------------------------------------------------------------------------- 1 | name: "Check docs and links" 2 | on: 3 | - pull_request 4 | - push 5 | 6 | jobs: 7 | docs: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/checkout@v4 11 | 12 | # - uses: ammaraskar/sphinx-action@master 13 | # Using fork of sphinx-action to support python version > 3.9 14 | # As sphinx-action not updated the sphinx docker image it uses 15 | - uses: erpcya/sphinx-action@feature/#update-sphinx-base-image 16 | with: 17 | docs-folder: "docs/" 18 | pre-build-command: | 19 | pip3 install --upgrade pip 20 | sphinx-apidoc -f grafanalib -o docs/api 21 | python3 setup.py install --user 22 | build-command: "make html" 23 | 24 | - name: Link Checker 25 | id: lc 26 | uses: lycheeverse/lychee-action@v1.10.0 27 | with: 28 | args: --verbose **/*.html 29 | - name: Fail if there were link errors 30 | run: exit ${{ steps.lc.outputs.exit_code }} 31 | -------------------------------------------------------------------------------- /grafanalib/tests/examples/example.dashboard-with-sql.py: -------------------------------------------------------------------------------- 1 | from grafanalib.core import ( 2 | Dashboard, 3 | Graph, 4 | GridPos, 5 | OPS_FORMAT, 6 | RowPanel, 7 | SHORT_FORMAT, 8 | SqlTarget, 9 | YAxes, 10 | YAxis, 11 | ) 12 | 13 | 14 | dashboard = Dashboard( 15 | title="Random stats from SQL DB", 16 | panels=[ 17 | RowPanel(title="New row", gridPos=GridPos(h=1, w=24, x=0, y=8)), 18 | Graph( 19 | title="Some SQL Queries", 20 | dataSource="Your SQL Source", 21 | targets=[ 22 | SqlTarget( 23 | rawSql='SELECT date as "time", metric FROM example WHERE $__timeFilter("time")', 24 | refId="A", 25 | ), 26 | ], 27 | yAxes=YAxes( 28 | YAxis(format=OPS_FORMAT), 29 | YAxis(format=SHORT_FORMAT), 30 | ), 31 | gridPos=GridPos(h=8, w=24, x=0, y=9), 32 | ), 33 | ], 34 | ).auto_panel_ids() 35 | -------------------------------------------------------------------------------- /docs/releasing.rst: -------------------------------------------------------------------------------- 1 | =============== 2 | Release process 3 | =============== 4 | 5 | Pre-release 6 | ----------- 7 | 8 | * Pick a new version number (e.g. ``X.Y.Z``) 9 | * Update `CHANGELOG `_ with that number 10 | * Update `setup.py `_ with that number 11 | 12 | Smoke-testing 13 | ------------- 14 | 15 | * Run 16 | 17 | .. code-block:: console 18 | 19 | $ python setup.py install --user 20 | 21 | * Check ``~/.local/bin/generate-dashboard`` for the update version. 22 | * Try the example on `README `_. 23 | 24 | Releasing 25 | --------- 26 | 27 | * Head to ``_ and create the release there. 28 | * Wait for GitHub Actions to complete the build and release. 29 | * Confirm on ``_ that the release made it there. 30 | 31 | Follow-up 32 | --------- 33 | 34 | * Run 35 | 36 | .. code-block:: console 37 | 38 | $ pip intall grafanalib -U 39 | 40 | * Check if the upgrade worked and the test above still passes. 41 | -------------------------------------------------------------------------------- /grafanalib/tests/test_opentsdb.py: -------------------------------------------------------------------------------- 1 | """Tests for OpenTSDB datasource""" 2 | 3 | import grafanalib.core as G 4 | from grafanalib.opentsdb import ( 5 | OpenTSDBFilter, 6 | OpenTSDBTarget, 7 | ) 8 | from grafanalib import _gen 9 | 10 | import sys 11 | if sys.version_info[0] < 3: 12 | from io import BytesIO as StringIO 13 | else: 14 | from io import StringIO 15 | 16 | 17 | def test_serialization_opentsdb_target(): 18 | """Serializing a graph doesn't explode.""" 19 | graph = G.Graph( 20 | title="CPU Usage", 21 | dataSource="OpenTSDB data source", 22 | targets=[ 23 | OpenTSDBTarget( 24 | metric='cpu', 25 | alias='$tag_instance', 26 | filters=[ 27 | OpenTSDBFilter(value='*', tag='instance', 28 | type='wildcard', groupBy=True), 29 | ]), 30 | ], 31 | id=1, 32 | yAxes=G.YAxes( 33 | G.YAxis(format=G.SHORT_FORMAT, label="CPU seconds / second"), 34 | G.YAxis(format=G.SHORT_FORMAT), 35 | ), 36 | ) 37 | stream = StringIO() 38 | _gen.write_dashboard(graph, stream) 39 | assert stream.getvalue() != '' 40 | -------------------------------------------------------------------------------- /grafanalib/tests/test_elasticsearch.py: -------------------------------------------------------------------------------- 1 | """Tests for elasticsearch.""" 2 | 3 | import grafanalib.elasticsearch as E 4 | import pytest 5 | 6 | 7 | def test_rate_metric_agg(): 8 | t = E.RateMetricAgg() 9 | json_data = t.to_json_data() 10 | 11 | assert json_data["id"] == "0" 12 | assert json_data["hide"] is False 13 | assert json_data["field"] == "" 14 | assert len(json_data["settings"]) == 0 15 | assert json_data["type"] == "rate" 16 | assert len(json_data) == 5 17 | 18 | t = E.RateMetricAgg( 19 | field="some-field", 20 | hide=True, 21 | id=2, 22 | unit="minute", 23 | mode="sum", 24 | script="some script" 25 | ) 26 | json_data = t.to_json_data() 27 | 28 | assert json_data["id"] == "2" 29 | assert json_data["hide"] is True 30 | assert json_data["field"] == "some-field" 31 | assert len(json_data["settings"]) == 3 32 | assert json_data["settings"]["unit"] == "minute" 33 | assert json_data["settings"]["mode"] == "sum" 34 | assert json_data["settings"]["script"] == "some script" 35 | assert json_data["type"] == "rate" 36 | assert len(json_data) == 5 37 | 38 | with pytest.raises(ValueError): 39 | t = E.RateMetricAgg( 40 | mode="invalid mode" 41 | ) 42 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: all clean clean-deps lint test deps coverage 2 | .DEFAULT_GOAL := all 3 | 4 | # Python-specific stuff 5 | TOX := $(shell command -v tox 2> /dev/null) 6 | PIP := $(shell command -v pip3 2> /dev/null) 7 | FLAKE8 := $(shell command -v flake8 2> /dev/null) 8 | 9 | .ensure-tox: .ensure-pip 10 | ifndef TOX 11 | rm -f .ensure-tox 12 | $(error "tox is not installed. Install with `pip install [--user] tox`.") 13 | endif 14 | touch .ensure-tox 15 | 16 | .ensure-pip: 17 | ifndef PIP 18 | rm -f .ensure-pip 19 | $(error "pip is not installed. Install with `python -m [--user] ensurepip`.") 20 | endif 21 | touch .ensure-pip 22 | 23 | .ensure-flake8: .ensure-pip 24 | ifndef FLAKE8 25 | rm -f .ensure-flake8 26 | $(error "flake8 is not installed. Install with `pip install [--user] flake8`.") 27 | endif 28 | touch .ensure-pip 29 | 30 | all: test lint coverage 31 | 32 | deps: setup.py .ensure-tox tox.ini 33 | 34 | $(VIRTUALENV_BIN)/flake8 $(VIRTUALENV_BIN)/py.test: $(DEPS_UPTODATE) 35 | 36 | lint: .ensure-flake8 37 | $(FLAKE8) grafanalib 38 | 39 | test: .ensure-tox 40 | $(TOX) --skip-missing-interpreters 41 | 42 | coverage: 43 | $(TOX) -e coverage 44 | 45 | clean: 46 | rm -rf grafanalib.egg-info 47 | rm -f .ensure-pip .ensure-tox .ensure-flake8 48 | find . -name '*.pyc' | xargs rm 49 | 50 | clean-deps: 51 | rm -rf $(VIRTUALENV_DIR) 52 | -------------------------------------------------------------------------------- /grafanalib/prometheus.py: -------------------------------------------------------------------------------- 1 | """Helpers for Prometheus-driven graphs.""" 2 | 3 | import string 4 | 5 | import grafanalib.core as G 6 | 7 | 8 | def PromGraph(data_source, title, expressions, **kwargs): 9 | """Create a graph that renders Prometheus data. 10 | 11 | :param str data_source: The name of the data source that provides 12 | Prometheus data. 13 | :param title: The title of the graph. 14 | :param expressions: List of tuples of (legend, expr), where 'expr' is a 15 | Prometheus expression. Or a list of dict where keys are Target's args. 16 | :param kwargs: Passed on to Graph. 17 | """ 18 | letters = string.ascii_uppercase 19 | expressions = list(expressions) 20 | if len(expressions) > len(letters): 21 | raise ValueError( 22 | 'Too many expressions. Can support at most {}, but got {}'.format( 23 | len(letters), len(expressions))) 24 | 25 | if all(isinstance(expr, dict) for expr in expressions): 26 | targets = [ 27 | G.Target(refId=refId, **args) 28 | for (args, refId) in zip(expressions, letters)] 29 | else: 30 | targets = [ 31 | G.Target(expr=expr, legendFormat=legend, refId=refId) 32 | for ((legend, expr), refId) in zip(expressions, letters)] 33 | return G.Graph( 34 | title=title, 35 | dataSource=data_source, 36 | targets=targets, 37 | **kwargs 38 | ) 39 | -------------------------------------------------------------------------------- /grafanalib/azuredataexplorer.py: -------------------------------------------------------------------------------- 1 | """Helpers to create Azure Data Explorer specific Grafana queries.""" 2 | 3 | import attr 4 | 5 | TIME_SERIES_RESULT_FORMAT = 'time_series' 6 | TABLE_RESULT_FORMAT = 'table' 7 | ADX_TIME_SERIES_RESULT_FORMAT = 'time_series_adx_series' 8 | 9 | 10 | @attr.s 11 | class AzureDataExplorerTarget(object): 12 | """ 13 | Generates Azure Data Explorer target JSON structure. 14 | 15 | Link to Azure Data Explorer datasource Grafana plugin: 16 | https://grafana.com/grafana/plugins/grafana-azure-data-explorer-datasource/ 17 | 18 | Azure Data Explorer docs on query language (KQL): 19 | https://learn.microsoft.com/en-us/azure/data-explorer/kusto/query/ 20 | 21 | :param database: Database to execute query on 22 | :param query: Query in Kusto Query Language (KQL) 23 | :param resultFormat: Output format of the query result 24 | :param alias: legend alias 25 | :param refId: target reference id 26 | """ 27 | 28 | database = attr.ib(default="") 29 | query = attr.ib(default="") 30 | resultFormat = attr.ib(default=TIME_SERIES_RESULT_FORMAT) 31 | alias = attr.ib(default="") 32 | refId = attr.ib(default="") 33 | 34 | def to_json_data(self): 35 | return { 36 | 'database': self.database, 37 | 'query': self.query, 38 | 'resultFormat': self.resultFormat, 39 | 'alias': self.alias, 40 | 'refId': self.refId 41 | } 42 | -------------------------------------------------------------------------------- /grafanalib/influxdb.py: -------------------------------------------------------------------------------- 1 | """Helpers to create InfluxDB-specific Grafana queries.""" 2 | 3 | import attr 4 | 5 | TIME_SERIES_TARGET_FORMAT = 'time_series' 6 | 7 | 8 | @attr.s 9 | class InfluxDBTarget(object): 10 | """ 11 | Generates InfluxDB target JSON structure. 12 | 13 | Grafana docs on using InfluxDB: 14 | https://grafana.com/docs/features/datasources/influxdb/ 15 | InfluxDB docs on querying or reading data: 16 | https://v2.docs.influxdata.com/v2.0/query-data/ 17 | 18 | :param alias: legend alias 19 | :param format: Bucket aggregators 20 | :param datasource: Influxdb name (for multiple datasource with same panel) 21 | :param measurement: Metric Aggregators 22 | :param query: query 23 | :param rawQuery: target reference id 24 | :param refId: target reference id 25 | """ 26 | 27 | alias = attr.ib(default="") 28 | format = attr.ib(default=TIME_SERIES_TARGET_FORMAT) 29 | datasource = attr.ib(default="") 30 | measurement = attr.ib(default="") 31 | query = attr.ib(default="") 32 | rawQuery = attr.ib(default=True) 33 | refId = attr.ib(default="") 34 | 35 | def to_json_data(self): 36 | return { 37 | 'query': self.query, 38 | 'resultFormat': self.format, 39 | 'alias': self.alias, 40 | 'datasource': self.datasource, 41 | 'measurement': self.measurement, 42 | 'rawQuery': self.rawQuery, 43 | 'refId': self.refId 44 | } 45 | -------------------------------------------------------------------------------- /grafanalib/tests/test_examples.py: -------------------------------------------------------------------------------- 1 | '''Run examples.''' 2 | 3 | from contextlib import redirect_stdout 4 | import glob 5 | import io 6 | import os 7 | 8 | from grafanalib import _gen 9 | 10 | 11 | def test_examples(): 12 | '''Run examples in ./examples directory.''' 13 | 14 | # Run dashboard examples 15 | examples_dir = os.path.join(os.path.dirname(__file__), 'examples') 16 | dashboards = glob.glob('{}/*.dashboard.py'.format(examples_dir)) 17 | assert len(dashboards) == 2 18 | 19 | stdout = io.StringIO() 20 | for example in dashboards: 21 | with redirect_stdout(stdout): 22 | ret = _gen.generate_dashboard([example]) 23 | assert ret == 0 24 | assert stdout.getvalue() != '' 25 | 26 | # Run alertgroup example 27 | alerts = glob.glob('{}/*.alertgroup.py'.format(examples_dir)) 28 | assert len(alerts) == 2 29 | 30 | stdout = io.StringIO() 31 | for example in alerts: 32 | with redirect_stdout(stdout): 33 | ret = _gen.generate_alertgroup([example]) 34 | assert ret == 0 35 | assert stdout.getvalue() != '' 36 | 37 | # Run file based provisioning of alerts example 38 | alerts = glob.glob('{}/*.alertfilebasedprovisioning.py'.format(examples_dir)) 39 | assert len(alerts) == 1 40 | 41 | stdout = io.StringIO() 42 | for example in alerts: 43 | with redirect_stdout(stdout): 44 | ret = _gen.generate_alertgroup([example]) 45 | assert ret == 0 46 | assert stdout.getvalue() != '' 47 | -------------------------------------------------------------------------------- /grafanalib/tests/examples/example.dashboard.py: -------------------------------------------------------------------------------- 1 | 2 | from grafanalib.core import ( 3 | Dashboard, TimeSeries, GaugePanel, 4 | Target, GridPos, 5 | OPS_FORMAT 6 | ) 7 | 8 | dashboard = Dashboard( 9 | title="Python generated example dashboard", 10 | description="Example dashboard using the Random Walk and default Prometheus datasource", 11 | tags=[ 12 | 'example' 13 | ], 14 | timezone="browser", 15 | panels=[ 16 | TimeSeries( 17 | title="Random Walk", 18 | dataSource='default', 19 | targets=[ 20 | Target( 21 | datasource='grafana', 22 | expr='example', 23 | ), 24 | ], 25 | gridPos=GridPos(h=8, w=16, x=0, y=0), 26 | ), 27 | GaugePanel( 28 | title="Random Walk", 29 | dataSource='default', 30 | targets=[ 31 | Target( 32 | datasource='grafana', 33 | expr='example', 34 | ), 35 | ], 36 | gridPos=GridPos(h=4, w=4, x=17, y=0), 37 | ), 38 | TimeSeries( 39 | title="Prometheus http requests", 40 | dataSource='prometheus', 41 | targets=[ 42 | Target( 43 | expr='rate(prometheus_http_requests_total[5m])', 44 | legendFormat="{{ handler }}", 45 | refId='A', 46 | ), 47 | ], 48 | unit=OPS_FORMAT, 49 | gridPos=GridPos(h=8, w=16, x=0, y=10), 50 | ), 51 | ], 52 | ).auto_panel_ids() 53 | -------------------------------------------------------------------------------- /grafanalib/tests/examples/upload_grafana_dashboard.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | show_help_info () { 4 | echo -e "\n\tERROR: $1" 5 | 6 | cat < 18 | 19 | Example: 20 | 21 | ./upload_grafana_dashboard.sh dash.json 22 | 23 | HELPINFO 24 | } 25 | 26 | function msg () { echo -e "$*"; } 27 | function bail () { msg "\nError: ${1:-Unknown Error}\n"; exit ${2:-1}; } 28 | 29 | # ------------------------------------------------------------------------- 30 | if [ -z "$1" ];then 31 | show_help_info "No dashboard parameter received" 32 | exit 1 33 | fi 34 | 35 | GRAFANA_API_KEY=${GRAFANA_API_KEY:-Unset} 36 | if [[ $GRAFANA_API_KEY == Unset ]]; then 37 | echo -e "\\nError: GRAFANA_API_KEY environment variable not define.\\n" 38 | exit 1 39 | fi 40 | GRAFANA_SERVER=${GRAFANA_SERVER:-Unset} 41 | if [[ $GRAFANA_SERVER == Unset ]]; then 42 | echo -e "\\nError: GRAFANA_SERVER environment variable not define.\\n" 43 | exit 1 44 | fi 45 | logfile="grafana_upload.log" 46 | 47 | # Get path/file parm 48 | DASHBOARD=$1 49 | 50 | # Pull through jq to validate json 51 | payload="$(jq . ${DASHBOARD}) >> $logfile" 52 | 53 | # Upload the JSON to Grafana 54 | curl -X POST \ 55 | -H 'Content-Type: application/json' \ 56 | -d "${payload}" \ 57 | "http://api_key:$GRAFANA_API_KEY@$GRAFANA_SERVER/api/dashboards/db" -w "\n" | tee -a "$logfile" 58 | -------------------------------------------------------------------------------- /grafanalib/tests/test_zabbix.py: -------------------------------------------------------------------------------- 1 | """Tests for Zabbix Datasource""" 2 | 3 | import grafanalib.core as G 4 | import grafanalib.zabbix as Z 5 | from grafanalib import _gen 6 | 7 | import sys 8 | if sys.version_info[0] < 3: 9 | from io import BytesIO as StringIO 10 | else: 11 | from io import StringIO 12 | 13 | 14 | def test_serialization_zabbix_target(): 15 | """Serializing a graph doesn't explode.""" 16 | graph = G.Graph( 17 | title="CPU Usage", 18 | dataSource="Zabbix data source", 19 | targets=[ 20 | Z.zabbixMetricTarget( 21 | group="Zabbix Group", 22 | host="Zabbix Host", 23 | application="CPU", 24 | item="/CPU (load)/", 25 | functions=[ 26 | Z.ZabbixSetAliasFunction("View alias"), 27 | ]), 28 | ], 29 | id=1, 30 | yAxes=G.YAxes( 31 | G.YAxis(format=G.SHORT_FORMAT, label="CPU seconds / second"), 32 | G.YAxis(format=G.SHORT_FORMAT), 33 | ), 34 | ) 35 | stream = StringIO() 36 | _gen.write_dashboard(graph, stream) 37 | assert stream.getvalue() != '' 38 | 39 | 40 | def test_serialization_zabbix_trigger_panel(): 41 | """Serializing a graph doesn't explode.""" 42 | graph = Z.ZabbixTriggersPanel( 43 | id=1, 44 | title="Zabbix Triggers", 45 | dataSource="Zabbix data source", 46 | triggers=Z.ZabbixTrigger( 47 | group='Zabbix Group', 48 | application="", 49 | trigger="/trigger.regexp/", 50 | host="/zabbix.host/")) 51 | stream = StringIO() 52 | _gen.write_dashboard(graph, stream) 53 | assert stream.getvalue() != '' 54 | -------------------------------------------------------------------------------- /grafanalib/tests/examples/example.upload-dashboard.py: -------------------------------------------------------------------------------- 1 | from grafanalib.core import Dashboard 2 | from grafanalib._gen import DashboardEncoder 3 | import json 4 | import requests 5 | from os import getenv 6 | 7 | 8 | def get_dashboard_json(dashboard, overwrite=False, message="Updated by grafanlib"): 9 | ''' 10 | get_dashboard_json generates JSON from grafanalib Dashboard object 11 | 12 | :param dashboard - Dashboard() created via grafanalib 13 | ''' 14 | 15 | # grafanalib generates json which need to pack to "dashboard" root element 16 | return json.dumps( 17 | { 18 | "dashboard": dashboard.to_json_data(), 19 | "overwrite": overwrite, 20 | "message": message 21 | }, sort_keys=True, indent=2, cls=DashboardEncoder) 22 | 23 | 24 | def upload_to_grafana(json, server, api_key, verify=True): 25 | ''' 26 | upload_to_grafana tries to upload dashboard to grafana and prints response 27 | 28 | :param json - dashboard json generated by grafanalib 29 | :param server - grafana server name 30 | :param api_key - grafana api key with read and write privileges 31 | ''' 32 | 33 | headers = {'Authorization': f"Bearer {api_key}", 'Content-Type': 'application/json'} 34 | r = requests.post(f"https://{server}/api/dashboards/db", data=json, headers=headers, verify=verify) 35 | # TODO: add error handling 36 | print(f"{r.status_code} - {r.content}") 37 | 38 | 39 | grafana_api_key = getenv("GRAFANA_API_KEY") 40 | grafana_server = getenv("GRAFANA_SERVER") 41 | 42 | my_dashboard = Dashboard(title="My awesome dashboard", uid='abifsd') 43 | my_dashboard_json = get_dashboard_json(my_dashboard, overwrite=True) 44 | upload_to_grafana(my_dashboard_json, grafana_server, grafana_api_key) 45 | -------------------------------------------------------------------------------- /docs/api/grafanalib.rst: -------------------------------------------------------------------------------- 1 | grafanalib package 2 | ================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | grafanalib.cloudwatch module 8 | ---------------------------- 9 | 10 | .. automodule:: grafanalib.cloudwatch 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | grafanalib.core module 16 | ---------------------- 17 | 18 | .. automodule:: grafanalib.core 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | grafanalib.elasticsearch module 24 | ------------------------------- 25 | 26 | .. automodule:: grafanalib.elasticsearch 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | grafanalib.formatunits module 32 | ----------------------------- 33 | 34 | .. automodule:: grafanalib.formatunits 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | grafanalib.influxdb module 40 | -------------------------- 41 | 42 | .. automodule:: grafanalib.influxdb 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | 47 | grafanalib.opentsdb module 48 | -------------------------- 49 | 50 | .. automodule:: grafanalib.opentsdb 51 | :members: 52 | :undoc-members: 53 | :show-inheritance: 54 | 55 | grafanalib.prometheus module 56 | ---------------------------- 57 | 58 | .. automodule:: grafanalib.prometheus 59 | :members: 60 | :undoc-members: 61 | :show-inheritance: 62 | 63 | grafanalib.validators module 64 | ---------------------------- 65 | 66 | .. automodule:: grafanalib.validators 67 | :members: 68 | :undoc-members: 69 | :show-inheritance: 70 | 71 | grafanalib.weave module 72 | ----------------------- 73 | 74 | .. automodule:: grafanalib.weave 75 | :members: 76 | :undoc-members: 77 | :show-inheritance: 78 | 79 | grafanalib.zabbix module 80 | ------------------------ 81 | 82 | .. automodule:: grafanalib.zabbix 83 | :members: 84 | :undoc-members: 85 | :show-inheritance: 86 | 87 | Module contents 88 | --------------- 89 | 90 | .. automodule:: grafanalib 91 | :members: 92 | :undoc-members: 93 | :show-inheritance: 94 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | # 13 | 14 | import os 15 | import sys 16 | sys.path.insert(0, os.path.abspath('.')) 17 | 18 | 19 | # -- Project information ----------------------------------------------------- 20 | 21 | project = 'grafanalib' 22 | copyright = '2021, grafanalib community' 23 | author = 'grafanalib community' 24 | 25 | 26 | # -- General configuration --------------------------------------------------- 27 | 28 | # Add any Sphinx extension module names here, as strings. They can be 29 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 30 | # ones. 31 | extensions = [ 32 | 'sphinx.ext.autodoc', 33 | ] 34 | 35 | # Add any paths that contain templates here, relative to this directory. 36 | templates_path = ['_templates'] 37 | 38 | # List of patterns, relative to source directory, that match files and 39 | # directories to ignore when looking for source files. 40 | # This pattern also affects html_static_path and html_extra_path. 41 | exclude_patterns = [] 42 | 43 | 44 | # -- Options for HTML output ------------------------------------------------- 45 | 46 | # The theme to use for HTML and HTML Help pages. See the documentation for 47 | # a list of builtin themes. 48 | # 49 | html_theme = 'sphinx_rtd_theme' 50 | 51 | # Add any paths that contain custom static files (such as style sheets) here, 52 | # relative to this directory. They are copied after the builtin static files, 53 | # so a file named "default.css" will overwrite the builtin "default.css". 54 | html_static_path = ['_static'] 55 | -------------------------------------------------------------------------------- /grafanalib/tests/test_validators.py: -------------------------------------------------------------------------------- 1 | import attr 2 | import pytest 3 | 4 | import grafanalib.validators as validators 5 | 6 | 7 | def create_attribute(): 8 | return attr.Attribute( 9 | name='x', 10 | default=None, 11 | validator=None, 12 | repr=True, 13 | cmp=None, 14 | eq=True, 15 | order=False, 16 | hash=True, 17 | init=True, 18 | inherited=False) 19 | 20 | 21 | def test_is_in(): 22 | item = 1 23 | choices = (1, 2, 3) 24 | val = validators.is_in(choices) 25 | res = val(None, create_attribute(), item) 26 | assert res is None 27 | 28 | 29 | def test_is_in_raises(): 30 | item = 0 31 | choices = (1, 2, 3) 32 | val = validators.is_in(choices) 33 | with pytest.raises(ValueError): 34 | val(None, create_attribute(), item) 35 | 36 | 37 | @pytest.mark.parametrize("item", ( 38 | '24h', '7d', '1M', '+24h', '-24h', '60s', '2m')) 39 | def test_is_interval(item): 40 | assert validators.is_interval(None, create_attribute(), item) is None 41 | 42 | 43 | def test_is_interval_raises(): 44 | with pytest.raises(ValueError): 45 | validators.is_interval(None, create_attribute(), '1') 46 | 47 | 48 | @pytest.mark.parametrize("color", ( 49 | "#111111", "#ffffff")) 50 | def test_is_color_code(color): 51 | res = validators.is_color_code(None, create_attribute(), color) 52 | assert res is None 53 | 54 | 55 | @pytest.mark.parametrize("color", ( 56 | "111111", "#gggggg", "#1111111", "#11111")) 57 | def test_is_color_code_raises(color): 58 | with pytest.raises(ValueError): 59 | validators.is_color_code(None, create_attribute(), color) 60 | 61 | 62 | def test_list_of(): 63 | etype = int 64 | check = (1, 2, 3) 65 | val = validators.is_list_of(etype) 66 | res = val(None, create_attribute(), check) 67 | assert res is None 68 | 69 | 70 | def test_list_of_raises(): 71 | etype = int 72 | check = ("a") 73 | with pytest.raises(ValueError): 74 | val = validators.is_list_of(etype) 75 | val(None, create_attribute(), check) 76 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | from setuptools import setup, find_packages 3 | 4 | 5 | def local_file(name): 6 | return os.path.relpath(os.path.join(os.path.dirname(__file__), name)) 7 | 8 | 9 | README = local_file('README.rst') 10 | 11 | 12 | setup( 13 | name='grafanalib', 14 | 15 | # Versions should comply with PEP440. For a discussion on single-sourcing 16 | # the version across setup.py and the project code, see 17 | # https://packaging.python.org/en/latest/guides/single-sourcing-package-version/ 18 | version='0.7.1', 19 | description='Library for building Grafana dashboards', 20 | long_description=open(README).read(), 21 | url='https://github.com/weaveworks/grafanalib', 22 | project_urls={ 23 | "Documentation": "https://grafanalib.readthedocs.io", 24 | "Source": "https://github.com/weaveworks/grafanalib", 25 | }, 26 | author='Weaveworks', 27 | author_email='help+grafanalib@weave.works', 28 | license='Apache', 29 | packages=find_packages(), 30 | classifiers=[ 31 | 'Development Status :: 3 - Alpha', 32 | 'Environment :: Console', 33 | 'Intended Audience :: Developers', 34 | 'Intended Audience :: System Administrators', 35 | 'License :: OSI Approved :: Apache Software License', 36 | 'Programming Language :: Python :: 3.8', 37 | 'Programming Language :: Python :: 3.9', 38 | 'Programming Language :: Python :: 3.10', 39 | 'Programming Language :: Python :: 3.11', 40 | 'Topic :: System :: Monitoring', 41 | ], 42 | install_requires=[ 43 | 'attrs>=15.2.0', 44 | ], 45 | extras_require={ 46 | 'dev': [ 47 | 'flake8', 48 | 'pytest', 49 | ], 50 | }, 51 | entry_points={ 52 | 'console_scripts': [ 53 | 'generate-dashboard=grafanalib._gen:generate_dashboard_script', 54 | 'generate-dashboards=grafanalib._gen:generate_dashboards_script', 55 | 'generate-alertgroup=grafanalib._gen:generate_alertgroup_script', 56 | 'generate-alertgroups=grafanalib._gen:generate_alertgroups_script' 57 | ], 58 | }, 59 | ) 60 | -------------------------------------------------------------------------------- /grafanalib/weave.py: -------------------------------------------------------------------------------- 1 | """Weave-specific dashboard configuration. 2 | 3 | Unlike 'core', which has logic for building generic Grafana dashboards, this 4 | has our Weave-specific preferences. 5 | """ 6 | 7 | import attr 8 | 9 | import grafanalib.core as G 10 | from grafanalib import prometheus 11 | 12 | 13 | YELLOW = '#EAB839' 14 | GREEN = '#7EB26D' 15 | BLUE = '#6ED0E0' 16 | ORANGE = '#EF843C' 17 | RED = '#E24D42' 18 | 19 | ALIAS_COLORS = { 20 | '1xx': YELLOW, 21 | '2xx': GREEN, 22 | '3xx': BLUE, 23 | '4xx': ORANGE, 24 | '5xx': RED, 25 | 'success': GREEN, 26 | 'error': RED, 27 | } 28 | 29 | 30 | def QPSGraph(data_source, title, expressions, **kwargs): 31 | """Create a graph of QPS, broken up by response code. 32 | 33 | Data is drawn from Prometheus. 34 | 35 | :param title: Title of the graph. 36 | :param expressions: List of Prometheus expressions. Must be 5. 37 | :param kwargs: Passed on to Graph. 38 | """ 39 | if len(expressions) != 5 and len(expressions) != 7: 40 | raise ValueError('Expected 5 or 7 expressions, got {}: {}'.format( 41 | len(expressions), expressions)) 42 | legends = sorted(ALIAS_COLORS.keys()) 43 | exprs = zip(legends, expressions) 44 | return stacked(prometheus.PromGraph( 45 | data_source=data_source, 46 | title=title, 47 | expressions=exprs, 48 | aliasColors=ALIAS_COLORS, 49 | yAxes=G.YAxes( 50 | G.YAxis(format=G.OPS_FORMAT), 51 | G.YAxis(format=G.SHORT_FORMAT), 52 | ), 53 | **kwargs 54 | )) 55 | 56 | 57 | def stacked(graph): 58 | """Turn a graph into a stacked graph.""" 59 | return attr.evolve( 60 | graph, 61 | lineWidth=0, 62 | nullPointMode=G.NULL_AS_ZERO, 63 | stack=True, 64 | fill=10, 65 | tooltip=G.Tooltip( 66 | sort=G.SORT_DESC, 67 | valueType=G.INDIVIDUAL, 68 | ), 69 | ) 70 | 71 | 72 | def PercentUnitAxis(label=None): 73 | """A Y axis that shows a percentage based on a unit value.""" 74 | return G.YAxis( 75 | format=G.PERCENT_UNIT_FORMAT, 76 | label=label, 77 | logBase=1, 78 | max=1, 79 | min=0, 80 | ) 81 | -------------------------------------------------------------------------------- /grafanalib/tests/test_cloudwatch.py: -------------------------------------------------------------------------------- 1 | """Tests for Cloudwatch Datasource""" 2 | 3 | import grafanalib.core as G 4 | import grafanalib.cloudwatch as C 5 | from grafanalib import _gen 6 | from io import StringIO 7 | 8 | 9 | def test_serialization_cloudwatch_metrics_target(): 10 | """Serializing a graph doesn't explode.""" 11 | graph = G.Graph( 12 | title="Lambda Duration", 13 | dataSource="Cloudwatch data source", 14 | targets=[ 15 | C.CloudwatchMetricsTarget(), 16 | ], 17 | id=1, 18 | yAxes=G.YAxes( 19 | G.YAxis(format=G.SHORT_FORMAT, label="ms"), 20 | G.YAxis(format=G.SHORT_FORMAT), 21 | ), 22 | ) 23 | stream = StringIO() 24 | _gen.write_dashboard(graph, stream) 25 | assert stream.getvalue() != '' 26 | 27 | 28 | def test_serialization_cloudwatch_logs_insights_target(): 29 | """Serializing a graph doesn't explode.""" 30 | graph = G.Logs( 31 | title="Lambda Duration", 32 | dataSource="Cloudwatch data source", 33 | targets=[ 34 | C.CloudwatchLogsInsightsTarget(), 35 | ], 36 | id=1, 37 | wrapLogMessages=True 38 | ) 39 | stream = StringIO() 40 | _gen.write_dashboard(graph, stream) 41 | assert stream.getvalue() != '' 42 | 43 | 44 | def test_cloudwatch_logs_insights_target(): 45 | """Test Cloudwatch Logs Insights target""" 46 | cloudwatch_logs_insights_expression = "fields @timestamp, @xrayTraceId, @message | filter @message like /^(?!.*(START|END|REPORT|LOGS|EXTENSION)).*$/ | sort @timestamp desc" 47 | ref_id = "A" 48 | log_group_names = ["/aws/lambda/foo", "/aws/lambda/bar"] 49 | 50 | target = C.CloudwatchLogsInsightsTarget( 51 | expression=cloudwatch_logs_insights_expression, 52 | logGroupNames=log_group_names, 53 | refId=ref_id 54 | ) 55 | 56 | data = target.to_json_data() 57 | 58 | assert data["expression"] == cloudwatch_logs_insights_expression 59 | assert data["id"] == "" 60 | assert data["logGroupNames"] == log_group_names 61 | assert data["namespace"] == "" 62 | assert data["queryMode"] == "Logs" 63 | assert data["refId"] == ref_id 64 | assert data["region"] == "default" 65 | assert data["statsGroups"] == [] 66 | assert data["hide"] is False 67 | -------------------------------------------------------------------------------- /grafanalib/tests/examples/example.upload-alerts.py: -------------------------------------------------------------------------------- 1 | from grafanalib.core import AlertGroup 2 | from grafanalib._gen import DashboardEncoder, loader 3 | import json 4 | import requests 5 | from os import getenv 6 | 7 | 8 | def get_alert_json(alert: AlertGroup): 9 | ''' 10 | get_alert_json generates JSON from grafanalib AlertGroup object 11 | 12 | :param alert - AlertGroup created via grafanalib 13 | ''' 14 | 15 | return json.dumps(alert.to_json_data(), sort_keys=True, indent=4, cls=DashboardEncoder) 16 | 17 | 18 | def upload_to_grafana(alertjson, folder, server, api_key, session_cookie, verify=True): 19 | ''' 20 | upload_to_grafana tries to upload the AlertGroup to grafana and prints response 21 | WARNING: This will first delete all alerts in the AlertGroup before replacing them with the provided AlertGroup. 22 | 23 | :param alertjson - AlertGroup json generated by grafanalib 24 | :param folder - Folder to upload the AlertGroup into 25 | :param server - grafana server name 26 | :param api_key - grafana api key with read and write privileges 27 | ''' 28 | groupName = json.loads(alertjson)['name'] 29 | 30 | headers = {} 31 | if api_key: 32 | print("using bearer auth") 33 | headers['Authorization'] = f"Bearer {api_key}" 34 | 35 | if session_cookie: 36 | print("using session cookie") 37 | headers['Cookie'] = session_cookie 38 | 39 | print(f"deleting AlertGroup {groupName} in folder {folder}") 40 | r = requests.delete(f"https://{server}/api/ruler/grafana/api/v1/rules/{folder}/{groupName}", headers=headers, verify=verify) 41 | print(f"{r.status_code} - {r.content}") 42 | 43 | headers['Content-Type'] = 'application/json' 44 | 45 | print(f"ensuring folder {folder} exists") 46 | r = requests.post(f"https://{server}/api/folders", data={"title": folder}, headers=headers) 47 | print(f"{r.status_code} - {r.content}") 48 | 49 | print(f"uploading AlertGroup {groupName} to folder {folder}") 50 | r = requests.post(f"https://{server}/api/ruler/grafana/api/v1/rules/{folder}", data=alertjson, headers=headers, verify=verify) 51 | # TODO: add error handling 52 | print(f"{r.status_code} - {r.content}") 53 | 54 | 55 | grafana_api_key = getenv("GRAFANA_API_KEY") 56 | grafana_server = getenv("GRAFANA_SERVER") 57 | grafana_cookie = getenv("GRAFANA_COOKIE") 58 | 59 | # Generate an alert from the example 60 | my_alergroup_json = get_alert_json(loader("./grafanalib/tests/examples/example.alertgroup.py")) 61 | upload_to_grafana(my_alergroup_json, "testfolder", grafana_server, grafana_api_key, grafana_cookie) 62 | -------------------------------------------------------------------------------- /grafanalib/validators.py: -------------------------------------------------------------------------------- 1 | import re 2 | import attr 3 | 4 | 5 | @attr.attributes(repr=False, slots=True) 6 | class _IsInValidator(object): 7 | choices = attr.attr() 8 | 9 | def __call__(self, inst, attr, value): 10 | if value not in self.choices: 11 | raise ValueError("{attr} should be one of {choice}".format( 12 | attr=attr.name, choice=self.choices)) 13 | 14 | def __repr__(self): 15 | return ( 16 | "" 17 | .format(choice=self.choices) 18 | ) 19 | 20 | 21 | def is_in(choices): 22 | """ 23 | A validator that raises a :exc:`ValueError` if the attribute value is not 24 | in a provided list. 25 | 26 | :param choices: List of valid choices 27 | """ 28 | return _IsInValidator(choices) 29 | 30 | 31 | def is_interval(instance, attribute, value): 32 | """ 33 | A validator that raises a :exc:`ValueError` if the attribute value is not 34 | matching regular expression. 35 | """ 36 | if not re.match(r"^[+-]?\d*[smhdMY]$", value): 37 | raise ValueError( 38 | "valid interval should be a string " 39 | r"matching an expression: ^[+-]?\d*[smhdMY]$. " 40 | "Examples: 24h 7d 1M +24h -24h") 41 | 42 | 43 | def is_color_code(instance, attribute, value): 44 | """ 45 | A validator that raises a :exc:`ValueError` if attribute value 46 | is not valid color code. 47 | Value considered as valid color code if it starts with # char 48 | followed by hexadecimal. 49 | """ 50 | err = "{attr} should be a valid color code (e.g. #37872D)".format(attr=attribute.name) 51 | if not value.startswith("#"): 52 | raise ValueError(err) 53 | if len(value) != 7: 54 | raise ValueError(err) 55 | try: 56 | int(value[1:], 16) 57 | except ValueError: 58 | raise ValueError(err) 59 | 60 | 61 | @attr.attributes(repr=False, slots=True) 62 | class _ListOfValidator(object): 63 | etype = attr.attr() 64 | 65 | def __call__(self, inst, attr, value): 66 | if False in set(map(lambda el: isinstance(el, self.etype), value)): 67 | raise ValueError("{attr} should be list of {etype}".format( 68 | attr=attr.name, etype=self.etype)) 69 | 70 | def __repr__(self): 71 | return ( 72 | "" 73 | .format(etype=self.etype) 74 | ) 75 | 76 | 77 | def is_list_of(etype): 78 | """ 79 | A validator that raises a :exc:`ValueError` if the attribute value is not 80 | in a provided list. 81 | 82 | :param choices: List of valid choices 83 | """ 84 | return _ListOfValidator(etype) 85 | -------------------------------------------------------------------------------- /grafanalib/tests/test_azuremonitor.py: -------------------------------------------------------------------------------- 1 | """Tests for Azure Monitor Datasource""" 2 | 3 | import grafanalib.core as G 4 | import grafanalib.azuremonitor as A 5 | from grafanalib import _gen 6 | from io import StringIO 7 | 8 | 9 | def test_serialization_azure_metrics_target(): 10 | """Serializing a graph doesn't explode.""" 11 | graph = G.TimeSeries( 12 | title="Test Azure Monitor", 13 | dataSource="default", 14 | targets=[ 15 | A.AzureMonitorMetricsTarget( 16 | aggregation="Total", 17 | metricDefinition="Microsoft.Web/sites", 18 | metricName="Requests", 19 | metricNamespace="Microsoft.Web/sites", 20 | resourceGroup="test-grafana", 21 | resourceName="test-grafana", 22 | subscription="3a680d1a-9310-4667-9e6a-9fcd2ecddd86", 23 | refId="Requests", 24 | ), 25 | ], 26 | ) 27 | stream = StringIO() 28 | _gen.write_dashboard(graph, stream) 29 | assert stream.getvalue() != "" 30 | 31 | 32 | def test_serialization_azure_logs_target(): 33 | """Serializing a graph doesn't explode.""" 34 | 35 | logs_query = """AzureMetrics 36 | | where TimeGenerated > ago(30d) 37 | | extend tail_latency = Maximum / Average 38 | | where MetricName == "Http5xx" or (MetricName == "HttpResponseTime" and Average >= 3) or (MetricName == "HttpResponseTime" and tail_latency >= 10 and Average >= 0.5) 39 | | summarize dcount(TimeGenerated) by Resource 40 | | order by dcount_TimeGenerated""" 41 | 42 | graph = G.GaugePanel( 43 | title="Test Logs", 44 | dataSource="default", 45 | targets=[ 46 | A.AzureLogsTarget( 47 | query=logs_query, 48 | resource="/subscriptions/3a680d1a-9310-4667-9e6a-9fcd2ecddd86", 49 | subscription="3a680d1a-9310-4667-9e6a-9fcd2ecddd86", 50 | refId="Bad Minutes", 51 | ), 52 | ], 53 | ) 54 | stream = StringIO() 55 | _gen.write_dashboard(graph, stream) 56 | assert stream.getvalue() != "" 57 | 58 | 59 | def test_serialization_azure_graph_target(): 60 | """Serializing a graph doesn't explode.""" 61 | 62 | graph_query = """Resources 63 | | project name, type, location 64 | | order by name asc""" 65 | 66 | graph = G.GaugePanel( 67 | title="Test Logs", 68 | dataSource="default", 69 | targets=[ 70 | A.AzureLogsTarget( 71 | query=graph_query, 72 | subscription="3a680d1a-9310-4667-9e6a-9fcd2ecddd86", 73 | refId="Resources", 74 | ), 75 | ], 76 | ) 77 | stream = StringIO() 78 | _gen.write_dashboard(graph, stream) 79 | assert stream.getvalue() != "" 80 | -------------------------------------------------------------------------------- /grafanalib/tests/examples/example-elasticsearch.dashboard.py: -------------------------------------------------------------------------------- 1 | """ 2 | This is an exemplary Grafana board that uses an elasticsearch datasource. 3 | 4 | The graph shows the following metrics for HTTP requests to the URL path "/login": 5 | - number of successful requests resulted in a HTTP response code between 200-300 6 | - number of failed requests resulted in a HTTP response code between 400-500, 7 | - Max. response time per point of time of HTTP requests 8 | """ 9 | 10 | from grafanalib.core import ( 11 | Dashboard, Graph, Legend, NULL_AS_NULL, Row, SECONDS_FORMAT, 12 | SHORT_FORMAT, YAxes, YAxis 13 | ) 14 | 15 | from grafanalib.elasticsearch import ( 16 | DateHistogramGroupBy, ElasticsearchTarget, Filter, 17 | FiltersGroupBy, MaxMetricAgg 18 | ) 19 | 20 | suc_label = "Success (200-300)" 21 | clt_err_label = "Client Errors (400-500)" 22 | resptime_label = "Max response time" 23 | 24 | filters = [ 25 | Filter(query="response: [200 TO 300]", label=suc_label), 26 | Filter(query="response: [400 TO 500]", label=clt_err_label), 27 | ] 28 | 29 | tgts = [ 30 | ElasticsearchTarget( 31 | query='request: "/login"', 32 | bucketAggs=[ 33 | FiltersGroupBy(filters=filters), 34 | DateHistogramGroupBy(interval="10m")], 35 | ).auto_bucket_agg_ids(), 36 | ElasticsearchTarget( 37 | query='request: "/login"', 38 | metricAggs=[MaxMetricAgg(field="resptime")], 39 | alias=resptime_label, 40 | ).auto_bucket_agg_ids(), 41 | ] 42 | 43 | g = Graph( 44 | title="login requests", 45 | dataSource="elasticsearch", 46 | targets=tgts, 47 | lines=False, 48 | legend=Legend(alignAsTable=True, rightSide=True, 49 | total=True, current=True, max=True), 50 | lineWidth=1, 51 | nullPointMode=NULL_AS_NULL, 52 | seriesOverrides=[ 53 | { 54 | "alias": suc_label, 55 | "bars": True, 56 | "lines": False, 57 | "stack": "A", 58 | "yaxis": 1, 59 | "color": "#629E51" 60 | }, 61 | { 62 | "alias": clt_err_label, 63 | "bars": True, 64 | "lines": False, 65 | "stack": "A", 66 | "yaxis": 1, 67 | "color": "#E5AC0E" 68 | }, 69 | { 70 | "alias": resptime_label, 71 | "lines": True, 72 | "fill": 0, 73 | "nullPointMode": "connected", 74 | "steppedLine": True, 75 | "yaxis": 2, 76 | "color": "#447EBC" 77 | }, 78 | ], 79 | yAxes=YAxes( 80 | YAxis( 81 | label="Count", 82 | format=SHORT_FORMAT, 83 | decimals=0 84 | ), 85 | YAxis( 86 | label="Response Time", 87 | format=SECONDS_FORMAT, 88 | decimals=2 89 | ), 90 | ), 91 | transparent=True, 92 | span=12, 93 | ) 94 | 95 | dashboard = Dashboard(title="HTTP dashboard", rows=[Row(panels=[g])]) 96 | -------------------------------------------------------------------------------- /docs/CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | ========================== 2 | Contributing to grafanalib 3 | ========================== 4 | 5 | Thank you for contributing to grafanalib! 6 | Here are some notes to help you get your PR merged as quickly as possible, 7 | and to help us remember how to review things properly. 8 | 9 | If something comes up during a code review or on a ticket that you think should be part of these guidelines, please say so, or even file a PR to make this doc better! 10 | 11 | Code of conduct 12 | =============== 13 | 14 | We have a :doc:`code of conduct <../CODE_OF_CONDUCT>`, and we enforce it. Please take a look! 15 | 16 | Coding guidelines 17 | ================= 18 | 19 | * Python 3 all the way 20 | * Must be `flake8`_ compliant 21 | * We use `attrs`_ everywhere 22 | * Avoid inheritance as much as possible 23 | * Avoid mutation as much as possible—keep things purely functional 24 | * Docstrings are great, let's have more of those 25 | * Link to official Grafana docs in comments as much as possible 26 | 27 | Conventions 28 | ----------- 29 | 30 | * Classes are ``StudlyCaps`` 31 | * Attributes are ``camelCased`` 32 | * Methods are ``snake_cased`` 33 | * Local variables are ``snake_cased`` 34 | * We're kind of fussy about indentation: 35 | 4 spaces everywhere, follow the examples in `core.py`_ if you're uncertain 36 | * Triple Double quotes `"""` for docstrings 37 | * Double quotes "" for human readable message or when string used for interpolation 38 | * Single quotes '' for symbol like strings 39 | 40 | Testing 41 | ------- 42 | 43 | Lots of grafanalib is just simple data structures, so we aren't fastidious about test coverage. 44 | 45 | However, tests are strongly encouraged for anything with non-trivial logic. 46 | Please try to use `hypothesis`_ for your tests. 47 | 48 | .. code-block:: console 49 | 50 | $ make all 51 | 52 | Gotchas 53 | ------- 54 | 55 | * Do **not** use mutable values as default values for attributes. 56 | Mutable values include lists (e.g. ``default=[RED, GREEN]``) and other grafanalib objects (e.g. ``default=Annotations()``). 57 | Instead, use `attr.Factory`_. 58 | e.g. ``default=attr.Factory(Annotations)`` or ``default=attr.Factory(lambda: [RED, GREEN])``. 59 | 60 | Submitting a PR 61 | =============== 62 | 63 | * We are very grateful for all PRs, and deeply appreciate the work and effort involved! 64 | * We try to review PRs as quickly as possible, but it might take a couple of weeks to get around to reviewing your PR—sorry, we know that sucks 65 | * Please add an entry to the :doc:`CHANGELOG <../CHANGELOG>` in your PR 66 | * It helps a lot if the PR description provides some context on what you are trying to do and why you think it's a good idea 67 | * The smaller the PR, the more quickly we'll be able to review it 68 | 69 | Filing a bug 70 | ============ 71 | 72 | * Please say what you saw, what you expected to see, and how someone else can reproduce the bug 73 | * If it comes with a test case, even better! 74 | 75 | 76 | .. _`flake8`: http://flake8.pycqa.org/en/latest/ 77 | .. _`attrs`: http://www.attrs.org/en/stable/ 78 | .. _`CHANGELOG`: ../CHANGELOG.rst 79 | .. _`attr.Factory`: http://www.attrs.org/en/stable/api.html#attr.Factory 80 | .. _`hypothesis`: http://hypothesis.works/ 81 | .. _`core.py`: https://github.com/weaveworks/grafanalib/blob/main/grafanalib/core.py 82 | -------------------------------------------------------------------------------- /grafanalib/tests/examples/example.alertsv8.alertgroup.py: -------------------------------------------------------------------------------- 1 | """Example grafana 8.x+ Alert""" 2 | 3 | 4 | from grafanalib.core import ( 5 | AlertGroup, 6 | AlertRulev8, 7 | Target, 8 | AlertCondition, 9 | LowerThan, 10 | OP_OR, 11 | OP_AND, 12 | RTYPE_LAST 13 | ) 14 | 15 | # An AlertGroup is one group contained in an alert folder. 16 | alertgroup = AlertGroup( 17 | name="Production Alerts", 18 | # Each AlertRule forms a separate alert. 19 | rules=[ 20 | AlertRulev8( 21 | # Each rule must have a unique title 22 | title="Database is unresponsive", 23 | # Several triggers can be used per alert, a trigger is a combination of a Target and its AlertCondition in a tuple. 24 | triggers=[ 25 | ( 26 | # A target refId must be assigned, and exist only once per AlertRule. 27 | Target( 28 | expr='sum(kube_pod_container_status_ready{exported_pod=~"database-/*"})', 29 | # Set datasource to name of your datasource 30 | datasource="VictoriaMetrics", 31 | refId="A", 32 | ), 33 | AlertCondition( 34 | evaluator=LowerThan(1), 35 | # To have the alert fire when either of the triggers are met in the rule, set both AlertCondition operators to OP_OR. 36 | operator=OP_OR, 37 | reducerType=RTYPE_LAST 38 | ) 39 | ), 40 | ( 41 | Target( 42 | expr='sum by (app) (count_over_time({app="database"}[5m]))', 43 | # Set datasource to name of your datasource 44 | datasource="Loki", 45 | refId="B", 46 | ), 47 | AlertCondition( 48 | evaluator=LowerThan(1000), 49 | operator=OP_OR, 50 | reducerType=RTYPE_LAST 51 | ) 52 | ) 53 | ], 54 | annotations={ 55 | "summary": "The database is down", 56 | "runbook_url": "runbook-for-this-scenario.com/foo", 57 | }, 58 | labels={ 59 | "environment": "prod", 60 | "slack": "prod-alerts", 61 | }, 62 | evaluateInterval="1m", 63 | evaluateFor="3m", 64 | ), 65 | 66 | # Second alert 67 | AlertRulev8( 68 | title="Service API blackbox failure", 69 | triggers=[ 70 | ( 71 | Target( 72 | expr='probe_success{instance="my-service.foo.com/ready"}', 73 | # Set datasource to name of your datasource 74 | datasource="VictoriaMetrics", 75 | refId="A", 76 | ), 77 | AlertCondition( 78 | evaluator=LowerThan(1), 79 | operator=OP_AND, 80 | reducerType=RTYPE_LAST, 81 | ) 82 | ) 83 | ], 84 | annotations={ 85 | "summary": "Service API has been unavailable for 3 minutes", 86 | "runbook_url": "runbook-for-this-scenario.com/foo", 87 | }, 88 | labels={ 89 | "environment": "prod", 90 | "slack": "prod-alerts", 91 | }, 92 | evaluateInterval="1m", 93 | evaluateFor="3m", 94 | ) 95 | ] 96 | ) 97 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | =============================== 2 | Getting Started with grafanalib 3 | =============================== 4 | 5 | .. image:: https://readthedocs.org/projects/grafanalib/badge/?version=main 6 | :alt: Documentation Status 7 | :scale: 100% 8 | :target: https://grafanalib.readthedocs.io/en/main 9 | 10 | Do you like `Grafana `_ but wish you could version your 11 | dashboard configuration? Do you find yourself repeating common patterns? If 12 | so, grafanalib is for you. 13 | 14 | grafanalib lets you generate Grafana dashboards from simple Python scripts. 15 | 16 | How it works 17 | ============ 18 | 19 | Take a look at `the examples directory 20 | `_, 21 | e.g. `this dashboard 22 | `_ 23 | will configure a dashboard with a single row, with one QPS graph broken down 24 | by status code and another latency graph showing median and 99th percentile 25 | latency. 26 | 27 | In the code is a fair bit of repetition here, but once you figure out what 28 | works for your needs, you can factor that out. 29 | See `our Weave-specific customizations 30 | `_ 31 | for inspiration. 32 | 33 | You can read the entire grafanlib documentation on `readthedocs.io 34 | `_. 35 | 36 | Getting started 37 | =============== 38 | 39 | grafanalib is just a Python package, so: 40 | 41 | .. code-block:: console 42 | 43 | $ pip install grafanalib 44 | 45 | 46 | Generate the JSON dashboard like so: 47 | 48 | .. code-block:: console 49 | 50 | $ curl -o example.dashboard.py https://raw.githubusercontent.com/weaveworks/grafanalib/main/grafanalib/tests/examples/example.dashboard.py 51 | $ generate-dashboard -o frontend.json example.dashboard.py 52 | 53 | 54 | Support 55 | ======= 56 | 57 | This library is in its very early stages. We'll probably make changes that 58 | break backwards compatibility, although we'll try hard not to. 59 | 60 | grafanalib works with Python 3.6 through 3.11. 61 | 62 | Developing 63 | ========== 64 | If you're working on the project, and need to build from source, it's done as follows: 65 | 66 | .. code-block:: console 67 | 68 | $ virtualenv .env 69 | $ . ./.env/bin/activate 70 | $ pip install -e . 71 | 72 | Configuring Grafana Datasources 73 | =============================== 74 | 75 | This repo used to contain a program ``gfdatasource`` for configuring 76 | Grafana data sources, but it has been retired since Grafana now has a 77 | built-in way to do it. See https://grafana.com/docs/administration/provisioning/#datasources 78 | 79 | Community 80 | ========= 81 | 82 | We currently don't follow a roadmap for ``grafanalib`` and both `maintainers 83 | ` have recently 84 | become somewhat occupied otherwise. 85 | 86 | We'd like you to join the ``grafanalib`` community! If you would like to 87 | help out maintaining ``grafanalib`` that would be great. It's a fairly laid-back 88 | and straight-forward project. Please talk to us on Slack (see the links below). 89 | 90 | We follow the `CNCF Code of Conduct `_. 91 | 92 | Getting Help 93 | ------------ 94 | 95 | If you have any questions about, feedback for or problems with ``grafanalib``: 96 | 97 | - Read the documentation at https://grafanalib.readthedocs.io 98 | - Invite yourself to the `Weave Users Slack `_. 99 | - Ask a question on the `#grafanalib `_ slack channel. 100 | - `File an issue `_. 101 | 102 | Your feedback is always welcome! 103 | -------------------------------------------------------------------------------- /grafanalib/tests/examples/table-example-dashboard.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | """ 5 | NAME: 6 | table-example-dashboard.py 7 | 8 | DESCRIPTION: 9 | This script creates Grafana dashboards using Grafanalib, and a static table 10 | which defines metrics/dashboards. 11 | 12 | The resulting dashboard can be easily uploaded to Grafana with associated script: 13 | 14 | upload_grafana_dashboard.sh 15 | 16 | USAGE: 17 | Create and upload the dashboard: 18 | 19 | ./table-example-dashboard.py --title "My python dashboard" > dash.json 20 | ./upload_grafana_dashboard.sh dash.json 21 | 22 | """ 23 | 24 | import textwrap 25 | import argparse 26 | import sys 27 | import io 28 | import grafanalib.core as G 29 | from grafanalib._gen import write_dashboard 30 | 31 | DEFAULT_TITLE = "Python Example Dashboard" 32 | 33 | # Simple example of table drive - good to enhance with Grid position, Legend etc. 34 | metrics = [ 35 | {'section': 'Monitor Tracking'}, 36 | {'row': 1}, 37 | {'title': 'Monitor Processes (by cmd)', 38 | 'expr': ['monitor_by_cmd{serverid="$serverid"}', 39 | 'sum(monitor_by_cmd{serverid="$serverid"})']}, 40 | {'title': 'Monitor Processes (by user)', 41 | 'expr': ['monitor_by_user{serverid="$serverid"}', 42 | 'sum(monitor_by_user{serverid="$serverid"})']}, 43 | ] 44 | 45 | 46 | class CreateDashboard(): 47 | "See module doc string for details" 48 | 49 | def __init__(self, *args, **kwargs): 50 | self.parse_args(__doc__, args) 51 | 52 | def parse_args(self, doc, args): 53 | "Common parsing and setting up of args" 54 | desc = textwrap.dedent(doc) 55 | parser = argparse.ArgumentParser( 56 | formatter_class=argparse.RawDescriptionHelpFormatter, 57 | description=desc) 58 | parser.add_argument('-t', '--title', default=DEFAULT_TITLE, 59 | help="Dashboard title. Default: " + DEFAULT_TITLE) 60 | self.options = parser.parse_args(args=args) 61 | 62 | def run(self): 63 | templateList = [G.Template(default="", 64 | dataSource="default", 65 | name="serverid", 66 | label="ServerID", 67 | query="label_values(serverid)")] 68 | 69 | dashboard = G.Dashboard(title=self.options.title, 70 | templating=G.Templating(list=templateList)) 71 | 72 | # Simple table processing - could be enhanced to use GridPos etc. 73 | for metric in metrics: 74 | if 'section' in metric: 75 | dashboard.rows.append(G.Row(title=metric['section'], showTitle=True)) 76 | continue 77 | if 'row' in metric: 78 | dashboard.rows.append(G.Row(title='', showTitle=False)) 79 | continue 80 | graph = G.Graph(title=metric['title'], 81 | dataSource='default', 82 | maxDataPoints=1000, 83 | legend=G.Legend(show=True, alignAsTable=True, 84 | min=True, max=True, avg=True, current=True, total=True, 85 | sort='max', sortDesc=True), 86 | yAxes=G.single_y_axis()) 87 | ref_id = 'A' 88 | for texp in metric['expr']: 89 | graph.targets.append(G.Target(expr=texp, 90 | refId=ref_id)) 91 | ref_id = chr(ord(ref_id) + 1) 92 | dashboard.rows[-1].panels.append(graph) 93 | 94 | # Auto-number panels - returns new dashboard 95 | dashboard = dashboard.auto_panel_ids() 96 | 97 | s = io.StringIO() 98 | write_dashboard(dashboard, s) 99 | print("""{ 100 | "dashboard": %s 101 | } 102 | """ % s.getvalue()) 103 | 104 | 105 | if __name__ == '__main__': 106 | """ Main Program""" 107 | obj = CreateDashboard(*sys.argv[1:]) 108 | obj.run() 109 | -------------------------------------------------------------------------------- /grafanalib/tests/examples/example.alertsv9.alertgroup.py: -------------------------------------------------------------------------------- 1 | """Example grafana 9.x+ Alert""" 2 | 3 | 4 | from grafanalib.core import ( 5 | AlertGroup, 6 | AlertRulev9, 7 | Target, 8 | AlertCondition, 9 | AlertExpression, 10 | GreaterThan, 11 | OP_AND, 12 | RTYPE_LAST, 13 | EXP_TYPE_CLASSIC, 14 | EXP_TYPE_REDUCE, 15 | EXP_TYPE_MATH 16 | ) 17 | 18 | # An AlertGroup is one group contained in an alert folder. 19 | alertgroup = AlertGroup( 20 | name="Production Alerts", 21 | # Each AlertRule forms a separate alert. 22 | rules=[ 23 | # Alert rule using classic condition > 3 24 | AlertRulev9( 25 | # Each rule must have a unique title 26 | title="Alert for something 1", 27 | uid='alert1', 28 | # Several triggers can be used per alert 29 | condition='B', 30 | triggers=[ 31 | # A target refId must be assigned, and exist only once per AlertRule. 32 | Target( 33 | expr="from(bucket: \"sensors\")\n |> range(start: v.timeRangeStart, stop: v.timeRangeStop)\n |> filter(fn: (r) => r[\"_measurement\"] == \"remote_cpu\")\n |> filter(fn: (r) => r[\"_field\"] == \"usage_system\")\n |> filter(fn: (r) => r[\"cpu\"] == \"cpu-total\")\n |> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false)\n |> yield(name: \"mean\")", 34 | # Set datasource to name of your datasource 35 | datasource="influxdb", 36 | refId="A", 37 | ), 38 | AlertExpression( 39 | refId="B", 40 | expressionType=EXP_TYPE_CLASSIC, 41 | expression='A', 42 | conditions=[ 43 | AlertCondition( 44 | evaluator=GreaterThan(3), 45 | operator=OP_AND, 46 | reducerType=RTYPE_LAST 47 | ) 48 | ] 49 | ) 50 | ], 51 | annotations={ 52 | "summary": "The database is down", 53 | "runbook_url": "runbook-for-this-scenario.com/foo", 54 | }, 55 | labels={ 56 | "environment": "prod", 57 | "slack": "prod-alerts", 58 | }, 59 | evaluateFor="3m", 60 | ), 61 | # Alert rule using reduce and Math 62 | AlertRulev9( 63 | # Each rule must have a unique title 64 | title="Alert for something 2", 65 | uid='alert2', 66 | condition='C', 67 | # Several triggers can be used per alert 68 | triggers=[ 69 | # A target refId must be assigned, and exist only once per AlertRule. 70 | Target( 71 | expr="from(bucket: \"sensors\")\n |> range(start: v.timeRangeStart, stop: v.timeRangeStop)\n |> filter(fn: (r) => r[\"_measurement\"] == \"remote_cpu\")\n |> filter(fn: (r) => r[\"_field\"] == \"usage_system\")\n |> filter(fn: (r) => r[\"cpu\"] == \"cpu-total\")\n |> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false)\n |> yield(name: \"mean\")", 72 | # Set datasource to name of your datasource 73 | datasource="influxdb", 74 | refId="A", 75 | ), 76 | AlertExpression( 77 | refId="B", 78 | expressionType=EXP_TYPE_REDUCE, 79 | expression='A', 80 | reduceFunction='mean', 81 | reduceMode='dropNN' 82 | ), 83 | AlertExpression( 84 | refId="C", 85 | expressionType=EXP_TYPE_MATH, 86 | expression='$B < 3' 87 | ) 88 | ], 89 | annotations={ 90 | "summary": "The database is down", 91 | "runbook_url": "runbook-for-this-scenario.com/foo", 92 | }, 93 | labels={ 94 | "environment": "prod", 95 | "slack": "prod-alerts", 96 | }, 97 | evaluateFor="3m", 98 | ) 99 | ] 100 | ) 101 | -------------------------------------------------------------------------------- /grafanalib/tests/examples/example.alertsv9.alertfilebasedprovisioning.py: -------------------------------------------------------------------------------- 1 | """Example grafana 9.x+ Alert""" 2 | 3 | 4 | from grafanalib.core import ( 5 | AlertGroup, 6 | AlertRulev9, 7 | Target, 8 | AlertCondition, 9 | AlertExpression, 10 | AlertFileBasedProvisioning, 11 | GreaterThan, 12 | OP_AND, 13 | RTYPE_LAST, 14 | EXP_TYPE_CLASSIC, 15 | EXP_TYPE_REDUCE, 16 | EXP_TYPE_MATH 17 | ) 18 | 19 | # An AlertGroup is one group contained in an alert folder. 20 | alertgroup = AlertGroup( 21 | name="Production Alerts", 22 | # Each AlertRule forms a separate alert. 23 | rules=[ 24 | # Alert rule using classic condition > 3 25 | AlertRulev9( 26 | # Each rule must have a unique title 27 | title="Alert for something 3", 28 | uid='alert3', 29 | # Several triggers can be used per alert 30 | condition='B', 31 | triggers=[ 32 | # A target refId must be assigned, and exist only once per AlertRule. 33 | Target( 34 | expr="from(bucket: \"sensors\")\n |> range(start: v.timeRangeStart, stop: v.timeRangeStop)\n |> filter(fn: (r) => r[\"_measurement\"] == \"remote_cpu\")\n |> filter(fn: (r) => r[\"_field\"] == \"usage_system\")\n |> filter(fn: (r) => r[\"cpu\"] == \"cpu-total\")\n |> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false)\n |> yield(name: \"mean\")", 35 | # Set datasource to name of your datasource 36 | datasource="influxdb", 37 | refId="A", 38 | ), 39 | AlertExpression( 40 | refId="B", 41 | expressionType=EXP_TYPE_CLASSIC, 42 | expression='A', 43 | conditions=[ 44 | AlertCondition( 45 | evaluator=GreaterThan(3), 46 | operator=OP_AND, 47 | reducerType=RTYPE_LAST 48 | ) 49 | ] 50 | ) 51 | ], 52 | annotations={ 53 | "summary": "The database is down", 54 | "runbook_url": "runbook-for-this-scenario.com/foo", 55 | }, 56 | labels={ 57 | "environment": "prod", 58 | "slack": "prod-alerts", 59 | }, 60 | evaluateFor="3m", 61 | ), 62 | # Alert rule using reduce and Math 63 | AlertRulev9( 64 | # Each rule must have a unique title 65 | title="Alert for something 4", 66 | uid='alert4', 67 | condition='C', 68 | # Several triggers can be used per alert 69 | triggers=[ 70 | # A target refId must be assigned, and exist only once per AlertRule. 71 | Target( 72 | expr="from(bucket: \"sensors\")\n |> range(start: v.timeRangeStart, stop: v.timeRangeStop)\n |> filter(fn: (r) => r[\"_measurement\"] == \"remote_cpu\")\n |> filter(fn: (r) => r[\"_field\"] == \"usage_system\")\n |> filter(fn: (r) => r[\"cpu\"] == \"cpu-total\")\n |> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false)\n |> yield(name: \"mean\")", 73 | # Set datasource to name of your datasource 74 | datasource="influxdb", 75 | refId="A", 76 | ), 77 | AlertExpression( 78 | refId="B", 79 | expressionType=EXP_TYPE_REDUCE, 80 | expression='A', 81 | reduceFunction='mean', 82 | reduceMode='dropNN' 83 | ), 84 | AlertExpression( 85 | refId="C", 86 | expressionType=EXP_TYPE_MATH, 87 | expression='$B < 3' 88 | ) 89 | ], 90 | annotations={ 91 | "summary": "The database is down", 92 | "runbook_url": "runbook-for-this-scenario.com/foo", 93 | }, 94 | labels={ 95 | "environment": "prod", 96 | "slack": "prod-alerts", 97 | }, 98 | evaluateFor="3m", 99 | ) 100 | ] 101 | ) 102 | 103 | alertfilebasedprovisioning = AlertFileBasedProvisioning([alertgroup]) 104 | -------------------------------------------------------------------------------- /grafanalib/azuremonitor.py: -------------------------------------------------------------------------------- 1 | """Helpers to create Azure Monitor specific Grafana queries.""" 2 | 3 | import attr 4 | from attr.validators import instance_of 5 | 6 | 7 | @attr.s 8 | class AzureMonitorMetricsTarget(object): 9 | """ 10 | Generates Azure Monitor Metrics target JSON structure. 11 | 12 | Grafana docs on using Azure Monitor: 13 | https://grafana.com/docs/grafana/latest/datasources/azuremonitor/#querying-azure-monitor-metrics 14 | 15 | :param aggregation: Metrics Aggregation (Total, None, Minimum, Maximum, Average, Count) 16 | :param dimensionFilters: Dimension Filters 17 | :param metricsDefinition: Metrics Definition https://docs.microsoft.com/en-us/azure/azure-monitor/essentials/metrics-supported 18 | :param metricNamespace: Metrics Namespace 19 | :param resourceGroup: Resource Group the resource resides in 20 | :param timeGrain: Time Granularity 21 | :param queryType: Type of Query (Azure Monitor in this case) 22 | :param subscription: Azure Subscription ID to scope to 23 | :param refId: Reference ID for the target 24 | """ 25 | 26 | aggregation = attr.ib(default="Total") 27 | dimensionFilters = attr.ib(factory=list, validator=instance_of(list)) 28 | metricDefinition = attr.ib(default="") 29 | metricName = attr.ib(default="") 30 | metricNamespace = attr.ib(default="") 31 | resourceGroup = attr.ib(default="") 32 | resourceName = attr.ib(default="") 33 | timeGrain = attr.ib(default="auto") 34 | queryType = attr.ib(default="Azure Monitor") 35 | subscription = attr.ib(default="") 36 | refId = attr.ib(default="") 37 | alias = attr.ib(default="") 38 | 39 | def to_json_data(self): 40 | return { 41 | "azureMonitor": { 42 | "aggregation": self.aggregation, 43 | "alias": self.alias, 44 | "dimensionFilters": self.dimensionFilters, 45 | "metricDefinition": self.metricDefinition, 46 | "metricName": self.metricName, 47 | "metricNamespace": self.metricNamespace, 48 | "resourceGroup": self.resourceGroup, 49 | "resourceName": self.resourceName, 50 | "timeGrain": self.timeGrain, 51 | }, 52 | "queryType": self.queryType, 53 | "refId": self.refId, 54 | "subscription": self.subscription, 55 | } 56 | 57 | 58 | @attr.s 59 | class AzureLogsTarget(object): 60 | """ 61 | Generates Azure Monitor Logs target JSON structure. 62 | 63 | Grafana docs on using Azure Logs: 64 | https://grafana.com/docs/grafana/latest/datasources/azuremonitor/#querying-azure-monitor-logs 65 | 66 | :param query: Query to execute 67 | :param resource: Identification string for resource e.g. /subscriptions/1234-abcd/resourceGroups/myResourceGroup/providers/Microsoft.DataFactory/factories/myDataFactory 68 | :param resultFormat: Output Format of the logs 69 | :param queryType: Type of Query (Azure Log Analytics in this case) 70 | :param subscription: Azure Subscription ID to scope to 71 | :param refId: Reference ID for the target 72 | """ 73 | 74 | query = attr.ib(default="") 75 | resource = attr.ib(default="") 76 | resultFormat = attr.ib(default="table") 77 | queryType = attr.ib(default="Azure Log Analytics") 78 | subscription = attr.ib(default="") 79 | refId = attr.ib(default="") 80 | 81 | def to_json_data(self): 82 | return { 83 | "azureLogAnalytics": { 84 | "query": self.query, 85 | "resource": self.resource, 86 | "resultFormat": self.resultFormat, 87 | }, 88 | "queryType": self.queryType, 89 | "refId": self.refId, 90 | "subscription": self.subscription, 91 | } 92 | 93 | 94 | @attr.s 95 | class AzureResourceGraphTarget(object): 96 | """ 97 | Generates Azure Resource Graph target JSON structure. 98 | 99 | Grafana docs on using Azure Resource Graph: 100 | https://grafana.com/docs/grafana/latest/datasources/azuremonitor/#querying-azure-resource-graph 101 | 102 | :param query: Query to execute 103 | :param queryType: Type of Query (Azure Resource Graph in this case) 104 | :param subscription: Azure Subscription ID to scope to 105 | :param refId: Reference ID for the target 106 | """ 107 | 108 | query = attr.ib(default="") 109 | resource = attr.ib(default="") 110 | queryType = attr.ib(default="Azure Resource Graph") 111 | subscription = attr.ib(default="") 112 | refId = attr.ib(default="") 113 | 114 | def to_json_data(self): 115 | return { 116 | "azureResourceGraph": {"query": self.query}, 117 | "queryType": self.queryType, 118 | "refId": self.refId, 119 | "subscription": self.subscription, 120 | } 121 | -------------------------------------------------------------------------------- /grafanalib/cloudwatch.py: -------------------------------------------------------------------------------- 1 | """Helpers to create Cloudwatch-specific Grafana queries.""" 2 | 3 | import attr 4 | from attr.validators import instance_of 5 | 6 | from grafanalib.core import Target 7 | 8 | 9 | @attr.s 10 | class CloudwatchMetricsTarget(Target): 11 | """ 12 | Generates Cloudwatch target JSON structure. 13 | 14 | Grafana docs on using Cloudwatch: 15 | https://grafana.com/docs/grafana/latest/datasources/cloudwatch/ 16 | 17 | AWS docs on Cloudwatch metrics: 18 | https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/aws-services-cloudwatch-metrics.html 19 | 20 | :param alias: legend alias 21 | :param dimensions: Cloudwatch dimensions dict 22 | :param expression: Cloudwatch Metric math expressions 23 | :param id: unique id 24 | :param matchExact: Only show metrics that exactly match all defined dimension names. 25 | :param account: AWS Account where Cloudwatch is used 26 | :param accountId: AWS Account ID where Cloudwatch is used 27 | :param metricName: Cloudwatch metric name 28 | :param namespace: Cloudwatch namespace 29 | :param period: Cloudwatch data period 30 | :param refId: target reference id 31 | :param region: Cloudwatch region 32 | :param statistics: Cloudwatch mathematic statistics (to be deprecated, prefer `statistic` instead) 33 | :param statistic: Cloudwatch mathematic statistic 34 | :param hide: controls if given metric is displayed on visualization 35 | :param datasource: Grafana datasource name 36 | :param queryMode: queryMode for cloudwatch metric request 37 | """ 38 | 39 | alias = attr.ib(default="") 40 | dimensions = attr.ib(factory=dict, validator=instance_of(dict)) 41 | expression = attr.ib(default="") 42 | id = attr.ib(default="") 43 | matchExact = attr.ib(default=True, validator=instance_of(bool)) 44 | account = attr.ib(default="") 45 | accountId = attr.ib(default="") 46 | metricName = attr.ib(default="") 47 | namespace = attr.ib(default="") 48 | period = attr.ib(default="") 49 | refId = attr.ib(default="") 50 | region = attr.ib(default="default") 51 | statistics = attr.ib(default=["Average"], validator=instance_of(list)) 52 | statistic = attr.ib(default="Average") 53 | hide = attr.ib(default=False, validator=instance_of(bool)) 54 | datasource = attr.ib(default=None) 55 | queryMode = attr.ib(default="") 56 | 57 | def to_json_data(self): 58 | return { 59 | "alias": self.alias, 60 | "dimensions": self.dimensions, 61 | "expression": self.expression, 62 | "id": self.id, 63 | "matchExact": self.matchExact, 64 | "account": self.account, 65 | "accountId": self.accountId, 66 | "metricName": self.metricName, 67 | "namespace": self.namespace, 68 | "period": self.period, 69 | "refId": self.refId, 70 | "region": self.region, 71 | "statistics": self.statistics, 72 | "statistic": self.statistic, 73 | "hide": self.hide, 74 | "datasource": self.datasource, 75 | "queryMode": self.queryMode, 76 | } 77 | 78 | 79 | @attr.s 80 | class CloudwatchLogsInsightsTarget(Target): 81 | """ 82 | Generates Cloudwatch Logs Insights target JSON structure. 83 | 84 | Grafana docs on using Cloudwatch: 85 | https://grafana.com/docs/grafana/latest/datasources/cloudwatch/ 86 | 87 | AWS docs on Cloudwatch Logs Insights: 88 | https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/AnalyzingLogData.html 89 | 90 | :param expression: Cloudwatch Logs Insights expressions 91 | :param id: unique id 92 | :param logGroupNames: List of Cloudwatch log groups to query 93 | :param namespace: Cloudwatch namespace 94 | :param refId: target reference id 95 | :param region: Cloudwatch region 96 | :param statsGroups: Cloudwatch statsGroups 97 | :param hide: controls if given metric is displayed on visualization 98 | :param datasource: Grafana datasource name 99 | """ 100 | 101 | expression = attr.ib(default="") 102 | id = attr.ib(default="") 103 | logGroupNames = attr.ib(factory=list, validator=instance_of(list)) 104 | namespace = attr.ib(default="") 105 | refId = attr.ib(default="") 106 | region = attr.ib(default="default") 107 | statsGroups = attr.ib(factory=list, validator=instance_of(list)) 108 | hide = attr.ib(default=False, validator=instance_of(bool)) 109 | datasource = attr.ib(default=None) 110 | 111 | def to_json_data(self): 112 | return { 113 | "expression": self.expression, 114 | "id": self.id, 115 | "logGroupNames": self.logGroupNames, 116 | "namespace": self.namespace, 117 | "queryMode": "Logs", 118 | "refId": self.refId, 119 | "region": self.region, 120 | "statsGroups": self.statsGroups, 121 | "hide": self.hide, 122 | "datasource": self.datasource, 123 | } 124 | -------------------------------------------------------------------------------- /grafanalib/opentsdb.py: -------------------------------------------------------------------------------- 1 | """Support for OpenTSDB.""" 2 | 3 | import attr 4 | from attr.validators import instance_of 5 | from grafanalib.validators import is_in 6 | 7 | # OpenTSDB aggregators 8 | OTSDB_AGG_AVG = 'avg' 9 | OTSDB_AGG_COUNT = 'count' 10 | OTSDB_AGG_DEV = 'dev' 11 | OTSDB_AGG_EP50R3 = 'ep50r3' 12 | OTSDB_AGG_EP50R7 = 'ep50r7' 13 | OTSDB_AGG_EP75R3 = 'ep75r3' 14 | OTSDB_AGG_EP75R7 = 'ep75r7' 15 | OTSDB_AGG_EP90R3 = 'ep90r3' 16 | OTSDB_AGG_EP90R7 = 'ep90r7' 17 | OTSDB_AGG_EP95R3 = 'ep95r3' 18 | OTSDB_AGG_EP95R7 = 'ep95r7' 19 | OTSDB_AGG_EP99R3 = 'ep99r3' 20 | OTSDB_AGG_EP99R7 = 'ep99r7' 21 | OTSDB_AGG_EP999R3 = 'ep999r3' 22 | OTSDB_AGG_EP999R7 = 'ep999r7' 23 | OTSDB_AGG_FIRST = 'first' 24 | OTSDB_AGG_LAST = 'last' 25 | OTSDB_AGG_MIMMIN = 'mimmin' 26 | OTSDB_AGG_MIMMAX = 'mimmax' 27 | OTSDB_AGG_MIN = 'min' 28 | OTSDB_AGG_MAX = 'max' 29 | OTSDB_AGG_NONE = 'none' 30 | OTSDB_AGG_P50 = 'p50' 31 | OTSDB_AGG_P75 = 'p75' 32 | OTSDB_AGG_P90 = 'p90' 33 | OTSDB_AGG_P95 = 'p95' 34 | OTSDB_AGG_P99 = 'p99' 35 | OTSDB_AGG_P999 = 'p999' 36 | OTSDB_AGG_SUM = 'sum' 37 | OTSDB_AGG_ZIMSUM = 'zimsum' 38 | 39 | OTSDB_DOWNSAMPLING_FILL_POLICIES = ('none', 'nan', 'null', 'zero') 40 | OTSDB_DOWNSAMPLING_FILL_POLICY_DEFAULT = 'none' 41 | 42 | OTSDB_QUERY_FILTERS = ( 43 | 'literal_or', 'iliteral_or', 'not_literal_or', 44 | 'not_iliteral_or', 'wildcard', 'iwildcard', 'regexp') 45 | OTSDB_QUERY_FILTER_DEFAULT = 'literal_or' 46 | 47 | 48 | @attr.s 49 | class OpenTSDBFilter(object): 50 | 51 | value = attr.ib() 52 | tag = attr.ib() 53 | type = attr.ib( 54 | default=OTSDB_QUERY_FILTER_DEFAULT, 55 | validator=is_in(OTSDB_QUERY_FILTERS)) 56 | groupBy = attr.ib(default=False, validator=instance_of(bool)) 57 | 58 | def to_json_data(self): 59 | return { 60 | 'filter': self.value, 61 | 'tagk': self.tag, 62 | 'type': self.type, 63 | 'groupBy': self.groupBy 64 | } 65 | 66 | 67 | @attr.s 68 | class OpenTSDBTarget(object): 69 | """Generates OpenTSDB target JSON structure. 70 | 71 | Grafana docs on using OpenTSDB: 72 | http://docs.grafana.org/features/datasources/opentsdb/ 73 | OpenTSDB docs on querying or reading data: 74 | http://opentsdb.net/docs/build/html/user_guide/query/index.html 75 | 76 | 77 | :param metric: OpenTSDB metric name 78 | :param refId: target reference id 79 | :param aggregator: defines metric aggregator. 80 | The list of opentsdb aggregators: 81 | http://opentsdb.net/docs/build/html/user_guide/query/aggregators.html#available-aggregators 82 | :param alias: legend alias. Use patterns like $tag_tagname to replace part 83 | of the alias for a tag value. 84 | :param isCounter: defines if rate function results should 85 | be interpret as counter 86 | :param counterMax: defines rate counter max value 87 | :param counterResetValue: defines rate counter reset value 88 | :param disableDownsampling: defines if downsampling should be disabled. 89 | OpenTSDB docs on downsampling: 90 | http://opentsdb.net/docs/build/html/user_guide/query/index.html#downsampling 91 | :param downsampleAggregator: defines downsampling aggregator 92 | :param downsampleFillPolicy: defines downsampling fill policy 93 | :param downsampleInterval: defines downsampling interval 94 | :param filters: defines the list of metric query filters. 95 | OpenTSDB docs on filters: 96 | http://opentsdb.net/docs/build/html/user_guide/query/index.html#filters 97 | :param shouldComputeRate: defines if rate function should be used. 98 | OpenTSDB docs on rate function: 99 | http://opentsdb.net/docs/build/html/user_guide/query/index.html#rate 100 | :param currentFilterGroupBy: defines if grouping should be enabled for 101 | current filter 102 | :param currentFilterKey: defines current filter key 103 | :param currentFilterType: defines current filter type 104 | :param currentFilterValue: defines current filter value 105 | """ 106 | 107 | metric = attr.ib() 108 | refId = attr.ib(default="") 109 | aggregator = attr.ib(default='sum') 110 | alias = attr.ib(default=None) 111 | isCounter = attr.ib(default=False, validator=instance_of(bool)) 112 | counterMax = attr.ib(default=None) 113 | counterResetValue = attr.ib(default=None) 114 | disableDownsampling = attr.ib(default=False, validator=instance_of(bool)) 115 | downsampleAggregator = attr.ib(default=OTSDB_AGG_SUM) 116 | downsampleFillPolicy = attr.ib( 117 | default=OTSDB_DOWNSAMPLING_FILL_POLICY_DEFAULT, 118 | validator=is_in(OTSDB_DOWNSAMPLING_FILL_POLICIES)) 119 | downsampleInterval = attr.ib(default=None) 120 | filters = attr.ib(default=attr.Factory(list)) 121 | shouldComputeRate = attr.ib(default=False, validator=instance_of(bool)) 122 | currentFilterGroupBy = attr.ib(default=False, validator=instance_of(bool)) 123 | currentFilterKey = attr.ib(default="") 124 | currentFilterType = attr.ib(default=OTSDB_QUERY_FILTER_DEFAULT) 125 | currentFilterValue = attr.ib(default="") 126 | 127 | def to_json_data(self): 128 | 129 | return { 130 | 'aggregator': self.aggregator, 131 | 'alias': self.alias, 132 | 'isCounter': self.isCounter, 133 | 'counterMax': self.counterMax, 134 | 'counterResetValue': self.counterResetValue, 135 | 'disableDownsampling': self.disableDownsampling, 136 | 'downsampleAggregator': self.downsampleAggregator, 137 | 'downsampleFillPolicy': self.downsampleFillPolicy, 138 | 'downsampleInterval': self.downsampleInterval, 139 | 'filters': self.filters, 140 | 'metric': self.metric, 141 | 'refId': self.refId, 142 | 'shouldComputeRate': self.shouldComputeRate, 143 | 'currentFilterGroupBy': self.currentFilterGroupBy, 144 | 'currentFilterKey': self.currentFilterKey, 145 | 'currentFilterType': self.currentFilterType, 146 | 'currentFilterValue': self.currentFilterValue, 147 | } 148 | -------------------------------------------------------------------------------- /grafanalib/tests/test_grafanalib.py: -------------------------------------------------------------------------------- 1 | """Tests for Grafanalib.""" 2 | 3 | import grafanalib.core as G 4 | from grafanalib import _gen 5 | 6 | import sys 7 | if sys.version_info[0] < 3: 8 | from io import BytesIO as StringIO 9 | else: 10 | from io import StringIO 11 | 12 | # TODO: Use Hypothesis to generate a more thorough battery of smoke tests. 13 | 14 | 15 | def test_serialization(): 16 | """Serializing a graph doesn't explode.""" 17 | graph = G.Graph( 18 | title="CPU Usage by Namespace (rate[5m])", 19 | dataSource="My data source", 20 | targets=[ 21 | G.Target( 22 | expr='namespace:container_cpu_usage_seconds_total:sum_rate', 23 | legendFormat='{{namespace}}', 24 | refId='A', 25 | ), 26 | ], 27 | id=1, 28 | yAxes=G.YAxes( 29 | G.YAxis(format=G.SHORT_FORMAT, label="CPU seconds / second"), 30 | G.YAxis(format=G.SHORT_FORMAT), 31 | ), 32 | ) 33 | stream = StringIO() 34 | _gen.write_dashboard(graph, stream) 35 | assert stream.getvalue() != '' 36 | 37 | 38 | def test_auto_id(): 39 | """auto_panel_ids() provides IDs for all panels without IDs already set.""" 40 | dashboard = G.Dashboard( 41 | title="Test dashboard", 42 | rows=[ 43 | G.Row(panels=[ 44 | G.Graph( 45 | title="CPU Usage by Namespace (rate[5m])", 46 | dataSource="My data source", 47 | targets=[ 48 | G.Target( 49 | expr='whatever', 50 | legendFormat='{{namespace}}', 51 | refId='A', 52 | ), 53 | ], 54 | yAxes=G.YAxes( 55 | G.YAxis(format=G.SHORT_FORMAT, label="CPU seconds"), 56 | G.YAxis(format=G.SHORT_FORMAT), 57 | ), 58 | ) 59 | ]), 60 | ], 61 | ).auto_panel_ids() 62 | assert dashboard.rows[0].panels[0].id == 1 63 | 64 | dashboard = G.Dashboard( 65 | title="Test dashboard", 66 | panels=[ 67 | G.RowPanel(gridPos=G.GridPos(h=1, w=24, x=0, y=8)), 68 | G.Graph( 69 | title="CPU Usage by Namespace (rate[5m])", 70 | dataSource="My data source", 71 | targets=[ 72 | G.Target( 73 | expr='whatever', 74 | legendFormat='{{namespace}}', 75 | refId='A', 76 | ), 77 | ], 78 | yAxes=G.YAxes( 79 | G.YAxis(format=G.SHORT_FORMAT, label="CPU seconds"), 80 | G.YAxis(format=G.SHORT_FORMAT), 81 | ), 82 | gridPos=G.GridPos(h=1, w=24, x=0, y=8) 83 | ) 84 | ], 85 | ).auto_panel_ids() 86 | assert dashboard.panels[0].id == 1 87 | 88 | 89 | def test_auto_refids_preserves_provided_ids(): 90 | """ 91 | auto_ref_ids() provides refIds for all targets without refIds already 92 | set. 93 | """ 94 | dashboard = G.Dashboard( 95 | title="Test dashboard", 96 | rows=[ 97 | G.Row(panels=[ 98 | G.Graph( 99 | title="CPU Usage by Namespace (rate[5m])", 100 | targets=[ 101 | G.Target( 102 | expr='whatever #Q', 103 | legendFormat='{{namespace}}', 104 | ), 105 | G.Target( 106 | expr='hidden whatever', 107 | legendFormat='{{namespace}}', 108 | refId='Q', 109 | ), 110 | G.Target( 111 | expr='another target' 112 | ), 113 | ], 114 | ).auto_ref_ids() 115 | ]), 116 | ], 117 | ) 118 | assert dashboard.rows[0].panels[0].targets[0].refId == 'A' 119 | assert dashboard.rows[0].panels[0].targets[1].refId == 'Q' 120 | assert dashboard.rows[0].panels[0].targets[2].refId == 'B' 121 | 122 | dashboard = G.Dashboard( 123 | title="Test dashboard", 124 | panels=[ 125 | G.Graph( 126 | title="CPU Usage by Namespace (rate[5m])", 127 | dataSource="My data source", 128 | targets=[ 129 | G.Target( 130 | expr='whatever #Q', 131 | legendFormat='{{namespace}}', 132 | ), 133 | G.Target( 134 | expr='hidden whatever', 135 | legendFormat='{{namespace}}', 136 | refId='Q', 137 | ), 138 | G.Target( 139 | expr='another target' 140 | ), 141 | ], 142 | yAxes=G.YAxes( 143 | G.YAxis(format=G.SHORT_FORMAT, label="CPU seconds"), 144 | G.YAxis(format=G.SHORT_FORMAT), 145 | ), 146 | gridPos=G.GridPos(h=1, w=24, x=0, y=8) 147 | ).auto_ref_ids() 148 | ], 149 | ).auto_panel_ids() 150 | assert dashboard.panels[0].targets[0].refId == 'A' 151 | assert dashboard.panels[0].targets[1].refId == 'Q' 152 | assert dashboard.panels[0].targets[2].refId == 'B' 153 | 154 | 155 | def test_auto_refids(): 156 | """ 157 | auto_ref_ids() provides refIds for all targets without refIds already 158 | set. 159 | """ 160 | dashboard = G.Dashboard( 161 | title="Test dashboard", 162 | rows=[ 163 | G.Row(panels=[ 164 | G.Graph( 165 | title="CPU Usage by Namespace (rate[5m])", 166 | targets=[G.Target(expr="metric %d" % i) 167 | for i in range(53)], 168 | ).auto_ref_ids() 169 | ]), 170 | ], 171 | ) 172 | assert dashboard.rows[0].panels[0].targets[0].refId == 'A' 173 | assert dashboard.rows[0].panels[0].targets[25].refId == 'Z' 174 | assert dashboard.rows[0].panels[0].targets[26].refId == 'AA' 175 | assert dashboard.rows[0].panels[0].targets[51].refId == 'AZ' 176 | assert dashboard.rows[0].panels[0].targets[52].refId == 'BA' 177 | 178 | 179 | def test_row_show_title(): 180 | row = G.Row().to_json_data() 181 | assert row['title'] == 'New row' 182 | assert not row['showTitle'] 183 | 184 | row = G.Row(title='My title').to_json_data() 185 | assert row['title'] == 'My title' 186 | assert row['showTitle'] 187 | 188 | row = G.Row(title='My title', showTitle=False).to_json_data() 189 | assert row['title'] == 'My title' 190 | assert not row['showTitle'] 191 | 192 | 193 | def test_row_panel_show_title(): 194 | row = G.RowPanel().to_json_data() 195 | assert row['title'] == '' 196 | assert row['panels'] == [] 197 | 198 | row = G.RowPanel(title='My title').to_json_data() 199 | assert row['title'] == 'My title' 200 | 201 | row = G.RowPanel(title='My title', panels=['a', 'b']).to_json_data() 202 | assert row['title'] == 'My title' 203 | assert row['panels'][0] == 'a' 204 | 205 | 206 | def test_row_panel_collapsed(): 207 | row = G.RowPanel().to_json_data() 208 | assert row['collapsed'] is False 209 | 210 | row = G.RowPanel(collapsed=True).to_json_data() 211 | assert row['collapsed'] is True 212 | -------------------------------------------------------------------------------- /grafanalib/_gen.py: -------------------------------------------------------------------------------- 1 | """Generate JSON Grafana dashboards.""" 2 | 3 | import argparse 4 | import json 5 | import os 6 | import sys 7 | 8 | 9 | DASHBOARD_SUFFIX = '.dashboard.py' 10 | ALERTGROUP_SUFFIX = '.alertgroup.py' 11 | 12 | """ 13 | Common generation functionality 14 | """ 15 | 16 | 17 | class DashboardEncoder(json.JSONEncoder): 18 | """Encode dashboard objects.""" 19 | 20 | def default(self, obj): 21 | to_json_data = getattr(obj, 'to_json_data', None) 22 | if to_json_data: 23 | return to_json_data() 24 | return json.JSONEncoder.default(self, obj) 25 | 26 | 27 | class DashboardError(Exception): 28 | """Raised when there is something wrong with a dashboard.""" 29 | 30 | 31 | class AlertGroupError(Exception): 32 | """Raised when there is something wrong with an alertgroup.""" 33 | 34 | 35 | def write_dashboard(dashboard, stream): 36 | json.dump( 37 | dashboard.to_json_data(), stream, sort_keys=True, indent=2, 38 | cls=DashboardEncoder) 39 | stream.write('\n') 40 | 41 | 42 | write_alertgroup = write_dashboard 43 | 44 | 45 | class DefinitionError(Exception): 46 | """Raised when there is a problem loading a Grafanalib type from a python definition.""" 47 | 48 | 49 | def loader(path): 50 | """Load a grafanalib type from a Python definition. 51 | 52 | :param str path: Path to a *..py file that defines a variable called . 53 | """ 54 | gtype = path.split(".")[-2] 55 | 56 | if sys.version_info[0] == 3 and sys.version_info[1] >= 5: 57 | import importlib.util 58 | spec = importlib.util.spec_from_file_location(gtype, path) 59 | module = importlib.util.module_from_spec(spec) 60 | spec.loader.exec_module(module) 61 | elif sys.version_info[0] == 3 and (sys.version_info[1] >= 3 or sys.version_info[1] <= 4): 62 | from importlib.machinery import SourceFileLoader 63 | module = SourceFileLoader(gtype, path).load_module() 64 | elif sys.version_info[0] == 2: 65 | import imp 66 | module = imp.load_source(gtype, path) 67 | else: 68 | import importlib 69 | module = importlib.load_source(gtype, path) 70 | 71 | marker = object() 72 | grafanalibtype = getattr(module, gtype, marker) 73 | if grafanalibtype is marker: 74 | raise DefinitionError( 75 | "Definition {} does not define a variable '{}'".format(path, gtype)) 76 | return grafanalibtype 77 | 78 | 79 | def run_script(f): 80 | sys.exit(f(sys.argv[1:])) 81 | 82 | 83 | """ 84 | AlertGroup generation 85 | """ 86 | 87 | 88 | def print_alertgroup(dashboard): 89 | write_dashboard(dashboard, stream=sys.stdout) 90 | 91 | 92 | def write_alertgroups(paths): 93 | for path in paths: 94 | assert path.endswith(ALERTGROUP_SUFFIX) 95 | dashboard = loader(path) 96 | with open(get_alertgroup_json_path(path), 'w') as json_file: 97 | write_dashboard(dashboard, json_file) 98 | 99 | 100 | def get_alertgroup_json_path(path): 101 | assert path.endswith(ALERTGROUP_SUFFIX) 102 | return '{}.json'.format(path[:-len(ALERTGROUP_SUFFIX)]) 103 | 104 | 105 | def alertgroup_path(path): 106 | abspath = os.path.abspath(path) 107 | if not abspath.endswith(ALERTGROUP_SUFFIX): 108 | raise argparse.ArgumentTypeError( 109 | 'AlertGroup file {} does not end with {}'.format( 110 | path, ALERTGROUP_SUFFIX)) 111 | return abspath 112 | 113 | 114 | def generate_alertgroups(args): 115 | """Script for generating multiple alertgroups at a time""" 116 | parser = argparse.ArgumentParser(prog='generate-alertgroups') 117 | parser.add_argument( 118 | 'alertgroups', metavar='ALERT', type=os.path.abspath, 119 | nargs='+', help='Path to alertgroup definition', 120 | ) 121 | opts = parser.parse_args(args) 122 | try: 123 | write_alertgroups(opts.alertgroups) 124 | except AlertGroupError as e: 125 | sys.stderr.write('ERROR: {}\n'.format(e)) 126 | return 1 127 | return 0 128 | 129 | 130 | def generate_alertgroup(args): 131 | parser = argparse.ArgumentParser(prog='generate-alertgroup') 132 | parser.add_argument( 133 | '--output', '-o', type=os.path.abspath, 134 | help='Where to write the alertgroup JSON' 135 | ) 136 | parser.add_argument( 137 | 'alertgroup', metavar='ALERT', type=os.path.abspath, 138 | help='Path to alertgroup definition', 139 | ) 140 | opts = parser.parse_args(args) 141 | try: 142 | alertgroup = loader(opts.alertgroup) 143 | if not opts.output: 144 | print_alertgroup(alertgroup) 145 | else: 146 | with open(opts.output, 'w') as output: 147 | write_alertgroup(alertgroup, output) 148 | except AlertGroupError as e: 149 | sys.stderr.write('ERROR: {}\n'.format(e)) 150 | return 1 151 | return 0 152 | 153 | 154 | def generate_alertgroups_script(): 155 | """Entry point for generate-alertgroups.""" 156 | run_script(generate_alertgroups) 157 | 158 | 159 | def generate_alertgroup_script(): 160 | """Entry point for generate-alertgroup.""" 161 | run_script(generate_alertgroup) 162 | 163 | 164 | """ 165 | Dashboard generation 166 | """ 167 | 168 | 169 | def print_dashboard(dashboard): 170 | write_dashboard(dashboard, stream=sys.stdout) 171 | 172 | 173 | def write_dashboards(paths): 174 | for path in paths: 175 | assert path.endswith(DASHBOARD_SUFFIX) 176 | dashboard = loader(path) 177 | with open(get_dashboard_json_path(path), 'w') as json_file: 178 | write_dashboard(dashboard, json_file) 179 | 180 | 181 | def get_dashboard_json_path(path): 182 | assert path.endswith(DASHBOARD_SUFFIX) 183 | return '{}.json'.format(path[:-len(DASHBOARD_SUFFIX)]) 184 | 185 | 186 | def dashboard_path(path): 187 | abspath = os.path.abspath(path) 188 | if not abspath.endswith(DASHBOARD_SUFFIX): 189 | raise argparse.ArgumentTypeError( 190 | 'Dashboard file {} does not end with {}'.format( 191 | path, DASHBOARD_SUFFIX)) 192 | return abspath 193 | 194 | 195 | def generate_dashboards(args): 196 | """Script for generating multiple dashboards at a time.""" 197 | parser = argparse.ArgumentParser(prog='generate-dashboards') 198 | parser.add_argument( 199 | 'dashboards', metavar='DASHBOARD', type=os.path.abspath, 200 | nargs='+', help='Path to dashboard definition', 201 | ) 202 | opts = parser.parse_args(args) 203 | try: 204 | write_dashboards(opts.dashboards) 205 | except DashboardError as e: 206 | sys.stderr.write('ERROR: {}\n'.format(e)) 207 | return 1 208 | return 0 209 | 210 | 211 | def generate_dashboard(args): 212 | parser = argparse.ArgumentParser(prog='generate-dashboard') 213 | parser.add_argument( 214 | '--output', '-o', type=os.path.abspath, 215 | help='Where to write the dashboard JSON' 216 | ) 217 | parser.add_argument( 218 | 'dashboard', metavar='DASHBOARD', type=os.path.abspath, 219 | help='Path to dashboard definition', 220 | ) 221 | opts = parser.parse_args(args) 222 | try: 223 | dashboard = loader(opts.dashboard) 224 | if not opts.output: 225 | print_dashboard(dashboard) 226 | else: 227 | with open(opts.output, 'w') as output: 228 | write_dashboard(dashboard, output) 229 | except DashboardError as e: 230 | sys.stderr.write('ERROR: {}\n'.format(e)) 231 | return 1 232 | return 0 233 | 234 | 235 | def generate_dashboards_script(): 236 | """Entry point for generate-dashboards.""" 237 | run_script(generate_dashboards) 238 | 239 | 240 | def generate_dashboard_script(): 241 | """Entry point for generate-dashboard.""" 242 | run_script(generate_dashboard) 243 | -------------------------------------------------------------------------------- /docs/getting-started.rst: -------------------------------------------------------------------------------- 1 | =============================== 2 | Getting Started with grafanalib 3 | =============================== 4 | 5 | Do you like `Grafana `_ but wish you could version your 6 | dashboard configuration? Do you find yourself repeating common patterns? If 7 | so, grafanalib is for you. 8 | 9 | grafanalib lets you generate Grafana dashboards from simple Python scripts. 10 | 11 | Grafana migrates dashboards to the latest Grafana schema version on import, 12 | meaning that dashboards created with grafanalib are supported by 13 | all versions of Grafana. You may find that some of the latest features are 14 | missing from grafanalib, please refer to the `module documentation 15 | `_ for information 16 | about supported features. If you find a missing feature please raise an issue 17 | or submit a PR to the GitHub `repository `_ 18 | 19 | Writing dashboards 20 | ================== 21 | 22 | The following will configure a dashboard with a couple of example panels that 23 | use the random walk and Prometheus datasources. 24 | 25 | .. literalinclude:: ../grafanalib/tests/examples/example.dashboard.py 26 | :language: python 27 | 28 | There is a fair bit of repetition here, but once you figure out what works for 29 | your needs, you can factor that out. 30 | See `our Weave-specific customizations 31 | `_ 32 | for inspiration. 33 | 34 | Generating dashboards 35 | ===================== 36 | 37 | If you save the above as ``example.dashboard.py`` (the suffix must be 38 | ``.dashboard.py``), you can then generate the JSON dashboard with: 39 | 40 | .. code-block:: console 41 | 42 | $ generate-dashboard -o frontend.json example.dashboard.py 43 | 44 | Uploading dashboards from code 45 | =============================== 46 | 47 | Sometimes you may need to generate and upload dashboard directly from Python 48 | code. The following example provides minimal code boilerplate for it: 49 | 50 | .. literalinclude:: ../grafanalib/tests/examples/example.upload-dashboard.py 51 | :language: python 52 | 53 | Alternatively Grafana supports file based provisioning, where dashboard files 54 | are periodically loaded into the Grafana database. Tools like Anisble can 55 | assist with the deployment. 56 | 57 | Writing Alerts 58 | ============== 59 | 60 | Between Grafana versions there have been significant changes in how alerts 61 | are managed. Bellow are some example of how to configure alerting in 62 | Grafana v8 and Grafana v9. 63 | 64 | Alerts in Grafana v8 65 | -------------------- 66 | 67 | The following will configure a couple of alerts inside a group. 68 | 69 | .. literalinclude:: ../grafanalib/tests/examples/example.alertsv8.alertgroup.py 70 | :language: python 71 | 72 | Although this example has a fair amount of boilerplate, when creating large numbers 73 | of similar alerts it can save lots of time to programmatically fill these fields. 74 | 75 | Each ``AlertGroup`` represents a folder within Grafana's alerts tab. This consists 76 | of one or more ``AlertRulev8``, which contains one or more triggers. Triggers define 77 | what will cause the alert to fire. 78 | 79 | A trigger is made up of a ``Target`` (a Grafana query on a datasource) and an 80 | ``AlertCondition`` (a condition this query must satisfy in order to alert). 81 | 82 | Finally, there are additional settings like: 83 | 84 | * How the alert will behave when data sources have problems (``noDataAlertState`` and ``errorAlertState``) 85 | 86 | * How frequently the trigger is evaluated (``evaluateInterval``) 87 | 88 | * How long the AlertCondition needs to be met before the alert fires (``evaluateFor``) 89 | 90 | * Annotations and labels, which help provide contextual information and direct where 91 | your alerts will go 92 | 93 | Alerts in Grafana v9 94 | -------------------- 95 | 96 | The following will configure a couple of alerts inside a group for Grafana v9.x+. 97 | 98 | .. literalinclude:: ../grafanalib/tests/examples/example.alertsv9.alertgroup.py 99 | :language: python 100 | 101 | Although this example has a fair amount of boilerplate, when creating large numbers 102 | of similar alerts it can save lots of time to programmatically fill these fields. 103 | 104 | Each ``AlertGroup`` represents a folder within Grafana's alerts tab. This consists 105 | of one or more ``AlertRulev9``, which contains a list of triggers, that define what 106 | will cause the alert to fire. 107 | 108 | A trigger can either be a ``Target`` (a Grafana query on a datasource) or an 109 | ``AlertExpression`` (a expression performed on one of the triggers). 110 | 111 | An ``AlertExpression`` can be one of 4 types 112 | 113 | * Classic - Contains and list of ``AlertCondition``'s that are evaluated 114 | * Reduce - Reduce the queried data 115 | * Resample - Resample the queried data 116 | * Math - Expression with the condition for the rule 117 | 118 | Finally, there are additional settings like: 119 | 120 | * How the alert will behave when data sources have problems (``noDataAlertState`` and ``errorAlertState``) 121 | 122 | * How frequently the each rule in the Alert Group is evaluated (``evaluateInterval``) 123 | 124 | * How long the AlertCondition needs to be met before the alert fires (``evaluateFor``) 125 | 126 | * Annotations and labels, which help provide contextual information and direct where 127 | your alerts will go 128 | 129 | 130 | Generating Alerts 131 | ================= 132 | 133 | If you save either of the above examples for Grafana v8 or v9 as ``example.alertgroup.py`` 134 | (the suffix must be ``.alertgroup.py``), you can then generate the JSON alert with: 135 | 136 | .. code-block:: console 137 | 138 | $ generate-alertgroup -o alerts.json example.alertgroup.py 139 | 140 | Uploading alerts from code 141 | ========================== 142 | 143 | As Grafana does not currently have a user interface for importing alertgroup JSON, 144 | you must either upload the alerts via Grafana's REST API or use file based provisioning. 145 | 146 | Uploading alerts from code using REST API 147 | ----------------------------------------- 148 | 149 | The following example provides minimal code boilerplate for it: 150 | 151 | .. literalinclude:: ../grafanalib/tests/examples/example.upload-alerts.py 152 | :language: python 153 | 154 | Uploading alerts from code using File Based Provisioning 155 | -------------------------------------------------------- 156 | 157 | The alternative to using Grafana's REST API is to use its file based provisioning for 158 | alerting. 159 | 160 | The following example uses the ``AlertFileBasedProvisioning`` class to provision a list 161 | of alert groups: 162 | 163 | .. literalinclude:: ../grafanalib/tests/examples/example.alertsv9.alertfilebasedprovisioning.py 164 | :language: python 165 | 166 | Save the above example as ``example.alertfilebasedprovisioning.py`` 167 | (the suffix must be ``.alertfilebasedprovisioning.py``), you can then generate the JSON alert with: 168 | 169 | .. code-block:: console 170 | 171 | $ generate-alertgroup -o alerts.json example.alertfilebasedprovisioning.py 172 | 173 | Then place the file in the ``provisioning/alerting`` directory and start Grafana 174 | Tools like Anisble can assist with the deployment of the alert file. 175 | 176 | Installation 177 | ============ 178 | 179 | grafanalib is just a Python package, so: 180 | 181 | .. code-block:: console 182 | 183 | $ pip install grafanalib 184 | 185 | Support 186 | ======= 187 | 188 | This library is in its very early stages. We'll probably make changes that 189 | break backwards compatibility, although we'll try hard not to. 190 | 191 | grafanalib works with Python 3.7, 3.8, 3.9, 3.10 and 3.11. 192 | 193 | Developing 194 | ========== 195 | If you're working on the project, and need to build from source, it's done as follows: 196 | 197 | .. code-block:: console 198 | 199 | $ virtualenv .env 200 | $ . ./.env/bin/activate 201 | $ pip install -e . 202 | 203 | Configuring Grafana Datasources 204 | =============================== 205 | 206 | This repo used to contain a program ``gfdatasource`` for configuring 207 | Grafana data sources, but it has been retired since Grafana now has a 208 | built-in way to do it. See https://grafana.com/docs/administration/provisioning/#datasources 209 | -------------------------------------------------------------------------------- /grafanalib/formatunits.py: -------------------------------------------------------------------------------- 1 | """ 2 | Grafana unit formats 3 | (https://github.com/grafana/grafana/blob/main/packages/grafana-data/src/valueFormats/categories.ts) 4 | 5 | To use: 6 | from grafanalib import formatunits as UNITS 7 | 8 | format = UNITS.BYTES 9 | """ 10 | 11 | NO_FORMAT = 'none' 12 | NONE_FORMAT = 'none' 13 | NUMBER_FORMAT = 'none' 14 | STRING_FORMAT = 'string' 15 | PERCENT_UNIT = 'percentunit' 16 | PERCENT_FORMAT = 'percent' 17 | SHORT = 'short' 18 | HUMIDITY = 'humidity' # %H 19 | DECIBEL = 'dB' 20 | HEXADECIMAL_OX = 'hex0x' # 0x 21 | HEXADECIMAL = 'hex' 22 | SCI_NOTATION = 'sci' 23 | LOCAL_FORMAT = 'locale' 24 | PIXELS = 'pixel' 25 | # Acceleration 26 | METERS_SEC_2 = 'accMS2' # m/sec² 27 | FEET_SEC_2 = 'accFS2' # f/sec² 28 | G_UNIT = 'accG' # g 29 | # Angle 30 | DEGREES = 'degree' # ° 31 | RADIANS = 'radian' # rad 32 | GRADIAN = 'grad' # grad 33 | ARC_MINUTES = 'arcmin' # arcmin 34 | ARC_SECONDS = 'arcsec' # arcsec 35 | # Area 36 | SQUARE_METERS = 'areaM2' # m² 37 | SQUARE_FEET = 'areaF2' # ft² 38 | SQUARE_MILES = 'areaMI2' # mi² 39 | # Computation 40 | FLOPS_PER_SEC = 'flops' # FLOP/s 41 | MEGA_FLOPS_PER_SEC = 'mflops' # MFLOP/s 42 | GIGA_FLOPS_PER_SEC = 'gflops' # GFLOP/s 43 | TERA_FLOPS_PER_SEC = 'tflops' # TFLOP/s 44 | PETA_FLOPS_PER_SEC = 'pflops' # PFLOP/s 45 | EXA_FLOPS_PER_SEC = 'eflops' # EFLOP/s 46 | ZETTA_FLOPS_PER_SEC = 'zflops' # ZFLOP/s 47 | YOTTA_FLOPS_PER_SEC = 'yflops' # YFLOP/s 48 | # Concentration 49 | PARTS_PER_MILLION = 'ppm' # ppm 50 | PARTS_PER_BILLION = 'conppb' # ppb 51 | NANO_GRAM_PER_CUBIC_METER = 'conngm3' # ng/m³ 52 | NANO_GRAM_PER_NORMAL_CUBIC_METER = 'conngNm3' # ng/Nm³ 53 | MICRO_GRAM_PER_CUBIC_METER = 'conμgm3' # μg/m³ 54 | MICRO_GRAM_PER_NORMAL_CUBIC_METER = 'conμgNm3' # μg/Nm³ 55 | MILLI_GRAM_PER_CUBIC_METER = 'conmgm3' # mg/m³ 56 | MILLI_GRAM_PER_NORMAL_CUBIC_METER = 'conmgNm3' # mg/Nm³ 57 | GRAM_PER_CUBIC_METER = 'congm3' # g/m³ 58 | GRAM_PER_NORMAL_CUBIC_METER = 'congNm3' # g/Nm³ 59 | MILLI_GRAM_PER_DECI_LITRE = 'conmgdL' # mg/dL 60 | MILLI_MOLES_PER_LITRE = 'conmmolL' # mmol/L 61 | # Currency 62 | DOLLARS = 'currencyUSD' # $ 63 | POUNDS = 'currencyGBP' # £ 64 | EURO = 'currencyEUR' # € 65 | YEN = 'currencyJPY' # ¥ 66 | RUBLES = 'currencyRUB' # ₽ 67 | HRYVNIAS = 'currencyUAH' # ₴ 68 | REAL = 'currencyBRL' # R$ 69 | DANISH_KRONE = 'currencyDKK' # kr 70 | ICELANDIC_KRONA = 'currencyISK' # kr 71 | NORWEGIAN_KRONE = 'currencyNOK' # kr 72 | SWEDISH_KORNA = 'currencySEK' # kr 73 | CZECH_KORUNA = 'currencyCZK' # czk 74 | SWISS_FRANC = 'currencyCHF' # CHF 75 | POLISH_ZLOTY = 'currencyPLN' # PLN 76 | BITCOIN = 'currencyBTC' # ฿ 77 | MILLI_BITCOIN = 'currencymBTC' # mBTC 78 | MICRO_BITCOIN = 'currencyμBTC' # μBTC 79 | SOUTH_AFRICAN_RAND = 'currencyZAR' # R 80 | INDIAN_RUPEE = 'currencyINR' # ₹ 81 | SOUTH_KOREAN_WON = 'currencyKRW' # ₩ 82 | INDONESIAN_RUPIAH = 'currencyIDR' # Rp 83 | PHILIPPINE_PESO = 'currencyPHP' # PHP 84 | # Data 85 | BYTES_IEC = 'bytes' 86 | BYTES = 'decbytes' # B 87 | BITS_IEC = 'bits' 88 | BITS = 'decbits' 89 | KIBI_BYTES = 'kbytes' # KiB 90 | KILO_BYTES = 'deckbytes' # kB 91 | MEBI_BYTES = 'mbytes' # MiB 92 | MEGA_BYTES = 'decmbytes' # MB 93 | GIBI_BYTES = 'gbytes' # GiB 94 | GIGA_BYTES = 'decgbytes' # GB 95 | TEBI_BYTES = 'tbytes' # TiB 96 | TERA_BYTES = 'dectbytes' # TB 97 | PEBI_BYTES = 'pbytes' # PiB 98 | PETA_BYTES = 'decpbytes' # PB 99 | # Data Rate 100 | PACKETS_SEC = 'pps' # p/s 101 | 102 | BYTES_SEC_IEC = 'binBps' # B/s 103 | KIBI_BYTES_SEC = 'KiBs' # KiB/s 104 | MEBI_BYTES_SEC = 'MiBs' # MiB/s 105 | GIBI_BYTES_SEC = 'GiBs' # GiB/s 106 | TEBI_BYTES_SEC = 'TiBs' # TiB/s 107 | PEBI_BYTES_SEC = 'PiBs' # PB/s 108 | 109 | BYTES_SEC = 'Bps' # B/s 110 | KILO_BYTES_SEC = 'KBs' # kB/s 111 | MEGA_BYTES_SEC = 'MBs' # MB/s 112 | GIGA_BYTES_SEC = 'GBs' # GB/s 113 | TERA_BYTES_SEC = 'TBs' # TB/s 114 | PETA_BYTES_SEC = 'PBs' # PB/s 115 | 116 | BITS_SEC_IEC = 'binbps' # b/s 117 | KIBI_BITS_SEC = 'Kibits' # Kib/s 118 | MEBI_BITS_SEC = 'Mibits' # Mib/s 119 | GIBI_BITS_SEC = 'Gibits' # Gib/s 120 | TEBI_BITS_SEC = 'Tibits' # Tib/s 121 | PEBI_BITS_SEC = 'Pibits' # Pib/s 122 | 123 | BITS_SEC = 'bps' # b/s 124 | KILO_BITS_SEC = 'Kbits' # kb/s 125 | MEGA_BITS_SEC = 'Mbits' # Mb/s 126 | GIGA_BITS_SEC = 'Gbits' # Gb/s 127 | TERA_BITS_SEC = 'Tbits' # Tb/s 128 | PETA_BITS_SEC = 'Pbits' # Pb/s 129 | # Date & Time 130 | DATE_TIME_ISO = 'dateTimeAsIso' 131 | DATE_TIME_ISO_TODAY = 'dateTimeAsIsoNoDateIfToday' 132 | DATE_TIME_US = 'dateTimeAsUS' 133 | DATE_TIME_US_TODAY = 'dateTimeAsUSNoDateIfToday' 134 | DATE_TIME_LOCAL = 'dateTimeAsLocal' 135 | DATE_TIME_LOCAL_TODAY = 'dateTimeAsLocalNoDateIfToday' 136 | DATE_TIME_DEFAULT = 'dateTimeAsSystem' 137 | DATE_TIME_FROM_NOW = 'dateTimeFromNow' 138 | # Energy 139 | WATT = 'watt' # W 140 | KILO_WATT = 'kwatt' # kW 141 | MEGA_WATT = 'megwatt' # MW 142 | GIGA_WATT = 'gwatt' # GW 143 | MILLI_WATT = 'mwatt' # mW 144 | WATT_SQUARE_METER = 'Wm2' # W/m² 145 | VOLT_AMPERE = 'voltamp' # VA 146 | KILO_VOLT_AMPERE = 'kvoltamp' # kVA 147 | VAR = 'voltampreact' # VAR 148 | KILO_VAR = 'kvoltampreact' # kVAR 149 | WATT_HOUR = 'watth' # Wh 150 | WATT_HOUR_KILO = 'watthperkg' # Wh/kg 151 | KILO_WATT_HOUR = 'kwatth' # kWh 152 | KILO_WATT_MIN = 'kwattm' # kWm 153 | AMPERE_HOUR = 'amph' # Ah 154 | KILO_AMPERE_HR = 'kamph' # kAh 155 | MILLI_AMPER_HOUR = 'mamph' # mAh 156 | JOULE = 'joule' # J 157 | ELECTRON_VOLT = 'ev' # eV 158 | AMPERE = 'amp' # A 159 | KILO_AMPERE = 'kamp' # kA 160 | MILLI_AMPERE = 'mamp' # mA 161 | VOLT = 'volt' # V 162 | KILO_VOLT = 'kvolt' # kV 163 | MILLI_VOLT = 'mvolt' # mV 164 | DECIBEL_MILLI_WATT = 'dBm' # dBm 165 | OHM = 'ohm' # Ω 166 | KILO_OHM = 'kohm' # kΩ 167 | MEGA_OHM = 'Mohm' # MΩ 168 | FARAD = 'farad' # F 169 | MICRO_FARAD = 'µfarad' # µF 170 | NANO_FARAD = 'nfarad' # nF 171 | PICO_FARAD = 'pfarad' # pF 172 | FEMTO_FARAD = 'ffarad' # fF 173 | HENRY = 'henry' # H 174 | MILLI_HENRY = 'mhenry' # mH 175 | MICRO_HENRY = 'µhenry' # µH 176 | LUMENS = 'lumens' # Lm 177 | # Flow 178 | GALLONS_PER_MIN = 'flowgpm' # gpm 179 | CUBIC_METERS_PER_SEC = 'flowcms' # cms 180 | CUBIC_FEET_PER_SEC = 'flowcfs' # cfs 181 | CUBIC_FEET_PER_MIN = 'flowcfm' # cfm 182 | LITRES_PER_HOUR = 'litreh' # L/h 183 | LITRES_PER_MIN = 'flowlpm' # L/min 184 | MILLI_LITRE_PER_MIN = 'flowmlpm' # mL/min 185 | LUX = 'lux' # lx 186 | # Force 187 | NEWTON_METERS = 'forceNm' # Nm 188 | KILO_NEWTON_METERS = 'forcekNm' # kNm 189 | NEWTONS = 'forceN' # N 190 | KILO_NEWTONS = 'forcekN' # kN 191 | # Hash Rate 192 | HASHES_PER_SEC = 'Hs' # H/s 193 | KILO_HASHES_PER_SEC = 'KHs' # kH/s 194 | MEGA_HASHES_PER_SEC = 'MHs' # MH/s 195 | GIGA_HASHES_PER_SEC = 'GHs' # GH/s 196 | TERA_HASHES_PER_SEC = 'THs' # TH/s 197 | PETA_HASHES_PER_SEC = 'PHs' # PH/s 198 | EXA_HASHES_PER_SEC = 'EHs' # EH/s 199 | # Mass 200 | MILLI_GRAM = 'massmg' # mg 201 | GRAM = 'massg' # g 202 | POUND = 'masslb' # lb 203 | KILO_GRAM = 'masskg' # kg 204 | METRIC_TON = 'masst' # t 205 | # Length 206 | MILLI_METER = 'lengthmm' # mm 207 | INCH = 'lengthin' # in 208 | METER = 'lengthm' # m 209 | KILO_METER = 'lengthkm' # km 210 | FEET = 'lengthft' # ft 211 | MILE = 'lengthmi' # mi 212 | # Pressure 213 | MILLI_BARS = 'pressurembar' # mBar, 214 | BARS = 'pressurebar' # Bar, 215 | KILO_BARS = 'pressurekbar' # kBar, 216 | PASCALS = 'pressurepa' # Pa 217 | HECTO_PASCALS = 'pressurehpa' # hPa 218 | KILO_PASCALS = 'pressurekpa' # kPa 219 | INCHES_OF_MERCURY = 'pressurehg' # "Hg 220 | PSI = 'pressurepsi' # psi 221 | # Radiation 222 | BECQUEREL = 'radbq' # Bq 223 | CURIE = 'radci' # Ci 224 | GRAY = 'radgy' # Gy 225 | RAD = 'radrad' # rad 226 | MICROSIEVERT = 'radusv' # µSv 227 | MILLI_SIEVERT = 'radmsv' # mSv 228 | SIEVERT = 'radsv' # Sv 229 | REM = 'radrem' # rem 230 | EXPOSURE = 'radexpckg' # C/kg 231 | ROENTGEN = 'radr' # R 232 | MICRO_SIEVERT_PER_HOUR = 'radusvh' # µSv/h 233 | MILLI_SIEVERT_PER_HOUR = 'radmsvh' # mSv/h 234 | SIEVERT_PER_HOUR = 'radsvh' # Sv/h 235 | # Rotational Speed 236 | RPM = 'rotrpm' # rpm 237 | HERTZ_ROTATION = 'rothz' # Hz 238 | RADS_PER_SEC = 'rotrads' # rad/s 239 | DEGREES_PER_SECOND = 'rotdegs' # °/s 240 | # Temperature 241 | CELSIUS = 'celsius' # °C 242 | FAHRENHEIT = 'fahrenheit' # °F 243 | KELVIN = 'kelvin' # K 244 | # Time 245 | HERTZ = 'hertz' # Hz 246 | NANO_SECONDS = 'ns' # ns 247 | MICRO_SECONDS = 'µs' # µs 248 | MILLI_SECONDS = 'ms' # ms 249 | SECONDS = 's' # s 250 | MINUTES = 'm' # m 251 | HOURS = 'h' # h 252 | DAYS = 'd' # d 253 | DURATION_MILLI_SECONDS = 'dtdurationms' # ms 254 | DURATION_SECONDS = 'dtdurations' # s 255 | HH_MM_SS = 'dthms' # hh:mm:ss 256 | D_HH_MM_SS = 'dtdhms' # d hh:mm:ss 257 | TIME_TICKS = 'timeticks' # s/100 258 | CLOCK_MSEC = 'clockms' # ms 259 | CLOCK_SEC = 'clocks' # s 260 | # Throughput 261 | COUNTS_PER_SEC = 'cps' # cps 262 | OPS_PER_SEC = 'ops' # ops 263 | REQUESTS_PER_SEC = 'reqps' # rps 264 | READS_PER_SEC = 'rps' # rps 265 | WRITES_PER_SEC = 'wps' # wps 266 | IO_OPS_PER_SEC = 'iops' # iops 267 | COUNTS_PER_MIN = 'cpm' # cpm 268 | OPS_PER_MIN = 'opm' # opm 269 | READS_PER_MIN = 'rpm' # rpm 270 | WRITES_PER_MIN = 'wpm' # wpm 271 | # Velocity 272 | METERS_PER_SEC = 'velocityms' # m/s 273 | KILO_METERS_PER_SEC = 'velocitykmh' # km/h 274 | MILES_PER_HOUR = 'velocitymph' # mph 275 | KNOTS = 'velocityknot' # kn 276 | # Volume 277 | MILLI_LITRE = 'mlitre' # mL 278 | LITRE = 'litre' # L 279 | CUBIC_METER = 'm3' # m³ 280 | NORMAL_CUBIC_METER = 'Nm3' # Nm³ 281 | CUBIC_DECI_METER = 'dm3' # dm³ 282 | GALLONS = 'gallons' # g 283 | # Boolean 284 | TRUE_FALSE = 'bool' # True/False 285 | YES_NO = 'bool_yes_no' # Yes/No 286 | ON_OFF = 'bool_on_off' # On/Off 287 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | Copyright 2014-2016 Weaveworks Ltd. 180 | 181 | Licensed under the Apache License, Version 2.0 (the "License"); 182 | you may not use this file except in compliance with the License. 183 | You may obtain a copy of the License at 184 | 185 | http://www.apache.org/licenses/LICENSE-2.0 186 | 187 | Unless required by applicable law or agreed to in writing, software 188 | distributed under the License is distributed on an "AS IS" BASIS, 189 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 190 | See the License for the specific language governing permissions and 191 | limitations under the License. 192 | -------------------------------------------------------------------------------- /CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | ========= 2 | Changelog 3 | ========= 4 | 5 | x.x.x ? 6 | ======= 7 | 8 | * Add `QueryMode` parameter in CloudwatchMetricsTarget 9 | * Added support `alias` via the `legendFormat` option for `Target` 10 | * Added `neutral` option for `GaugePanel` (supported by Grafana 9.3.0 - https://github.com/grafana/grafana/discussions/38273) 11 | * Added support `alias` via the `legendFormat` option for `Target` 12 | * **Breaking change:** Fixed spelling errors for temperature units, corrected 'CELSUIS' to 'CELSIUS' and 'FARENHEIT' to 'FAHRENHEIT'. 13 | * Added ``tooltipSort`` parameter to PieChartv2 panel 14 | * Fix mappings for Table 15 | * Added support for AWS Cross-Account in CloudwatchMetricsTarget 16 | * Added `LokiTarget` 17 | 18 | 0.7.1 2024-01-12 19 | ================ 20 | 21 | * Extended DashboardLink to support links to dashboards and urls, as per the docs_ 22 | 23 | .. _`docs`: https://grafana.com/docs/grafana/latest/dashboards/build-dashboards/manage-dashboard-links/#dashboard-links 24 | 25 | * Fix default options for Heatmap 26 | * Add Unit option for Graph panel 27 | * Added Minimum option for Timeseries 28 | * Added Maximum option for Timeseries 29 | * Added Number of decimals displays option for Timeseries 30 | * Added Bar_Chart_ panel support 31 | * Extended SqlTarget to support parsing queries from files 32 | * Fix AlertCondition backwards compatibility (``useNewAlerts`` default to ``False``) 33 | * Added RateMetricAgg_ for ElasticSearch 34 | * added axisSoftMin and axisSoftMax options for TimeSeries 35 | * Added support for Azure Data Explorer datasource plugin (https://github.com/grafana/azure-data-explorer-datasource) 36 | * Added ``sortBy`` parameter to Table panel 37 | * Added ``tooltipSort`` parameter to TimeSeries panel 38 | * Added unit parameter to the Table class in core 39 | * Added a hide parameter to ElasticsearchTarget 40 | * Fix value literal GAUGE_CALC_TOTAL to sum instead of total 41 | * Fix `BarGauge` orientation validation to accept `'auto'` 42 | 43 | .. _`Bar_Chart`: https://grafana.com/docs/grafana/latest/panels-visualizations/visualizations/bar-chart/ 44 | .. _`RateMetricAgg`: https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-rate-aggregation.html 45 | 46 | 47 | 0.7.0 (2022-10-02) 48 | ================== 49 | 50 | * Added Grafana 8.x new Alert Rule 51 | * Added Grafana 9.x new Alert Rule 52 | * Added ePict_ plugin. 53 | * Added ae3e plotly panel support 54 | * Added datasource parameter to Influxdb targets 55 | * Added missing units for Boolean, Data, Data Rate, Date & Time, Energy, Length, 56 | Mass, and Misc 57 | * Fix typo in unit constant ``GIGA_WATT`` (was ``GAGA_WATT``) 58 | * Fix typo in unit constant ``NORMAL_CUBIC_METER`` (was ``NORMAIL_CUBIC_METER``) 59 | 60 | .. _`ePict`: https://grafana.com/grafana/plugins/larona-epict-panel/ 61 | 62 | 0.6.3 (2022-03-30) 63 | ================== 64 | 65 | * Added Azure Monitor Target 66 | * Added ``legendCalcs`` parameter to TimeSeries Panel 67 | * Added ``hide`` parameter to ElasticsearchTarget 68 | * Added ExpressionTarget support for ElasticSearch data sources 69 | 70 | 71 | 0.6.2 (2022-02-24) 72 | ================== 73 | 74 | * Added percentage type for thresholds 75 | * Added ``datasource`` parameter to CloudWatch targets 76 | * Added support for auto panels ids to AlertList panel 77 | * Added ``SeriesOverride`` options (dashes and Z-index) 78 | * Added support for fields value in Stat panel 79 | * Added ``alertName`` parameter to AlertList panel 80 | * Added ``thresholdsStyleMode`` parameter to TimeSeries panel 81 | * Added Histogram panel support 82 | * Dashboard upload script updated to support overwriting dashboards 83 | 84 | 0.6.1 (2021-11-23) 85 | ================== 86 | 87 | * Added new SqlTarget to core to be able to define SQL queries as well 88 | * Added missing attributes to the Logs panel 89 | * Added Cloudwatch Logs Insights Target 90 | * Added overrides to panels 91 | * Extend ``SeriesOverride`` options 92 | 93 | Changes 94 | ------- 95 | 96 | * Fix Text panel (and add tests) 97 | 98 | **ATTENTION:** This might break panels generated for Grafana <8.0.6 99 | 100 | 0.6.0 (2021-10-26) 101 | =================== 102 | 103 | * Added Discrete panel (https://grafana.com/grafana/plugins/natel-discrete-panel/) 104 | * Added support for colors in stat mapping panel with StatValueMappings & StatRangeMappings 105 | * Added missing auto interval properties in Template 106 | * Added param to RowPanel to collapse the row 107 | * Added StateTimeline panel which was added in Grafana v8 108 | * Added support for timeseries panel added in Grafana v8 109 | * Added MinMetricAgg and PercentilesMetricAgg to Elasticsearch 110 | * Added support for News panel 111 | * Added support for Pie Chart v2 from Grafana v8 112 | 113 | Changes 114 | ------- 115 | 116 | * Refine expectations of is_color_code 117 | * Deprecated StatMapping, StatValueMapping & StatRangeMapping 118 | * Change YAxis min value default from 0 to None 119 | * Support for Table panel for Grafana v8 may have broken backwards compatibility in old Table panel 120 | * Breaking change, support for styled columns in tables removed, no longer used in Grafana v8 new Table 121 | * Move development to ``main`` branch on GitHub. If you have work tracking the ``master`` you will need to update this. 122 | 123 | 0.5.14 (2021-09-14) 124 | ================== 125 | 126 | * Added colour overrides to pie chart panel 127 | * Added missing attributes from xAxis class 128 | * Added transformations for the Panel class (https://grafana.com/docs/grafana/latest/panels/reference-transformation-functions/) 129 | * Added Worldmap panel (https://grafana.com/grafana/plugins/grafana-worldmap-panel/) 130 | * Added missing fill gradient to Graph panel 131 | * Added missing align to graph panel 132 | * Added missing show percentage attribute to Pie chart panel 133 | * Added ``extraJson`` attribute to the Panel class for overriding the panel with raw JSON 134 | * Added inline script support for Elasticsearch metrics 135 | * Selected needs to be set as a bool value for templating to work. 136 | 137 | 0.5.13 (2021-05-17) 138 | =================== 139 | 140 | * Added a test for the Alert class. 141 | 142 | Changes 143 | ------- 144 | 145 | * Bugfix: changed 'target' validator in AlertNotification to accept CloudwatchMetricsTarget 146 | * Moved the alertRuleTag field from Graph to Alert. 147 | 148 | 0.5.12 (2021-04-24) 149 | =================== 150 | 151 | * Added hide parameter to CloudwatchMetricsTarget class 152 | * Added table-driven example dashboard and upload script 153 | 154 | Changes 155 | ------- 156 | 157 | * bugfix load_dashboard add support for old python version 2.x, 3.3 and 3.4 158 | * Fix default target datasource to work with newer versions of Grafana 159 | * Removed re-defined maxDataPoints field from multiple panels 160 | * Fix the AlertList class and add a test for it 161 | 162 | Thanks to all those who have contributed to this release. 163 | 164 | 165 | 0.5.11 (2021-04-06) 166 | =================== 167 | 168 | * Added timeField field for the Elasticsearch target to allow the alert to change its state 169 | * Added nameFilter field for the AlertList panel 170 | * Added dashboardTags field for the AlertList panel 171 | 172 | Thanks a lot for your contributions to this release, @dafna-starkware 173 | 174 | 0.5.10 (2021-03-21) 175 | =================== 176 | 177 | * Added Logs panel (https://grafana.com/docs/grafana/latest/panels/visualizations/logs-panel/) 178 | * Added Cloudwatch metrics datasource (https://grafana.com/docs/grafana/latest/datasources/cloudwatch/) 179 | * Added option to hide dashboard time picker 180 | * Added Notification for Alert 181 | * Added alertRuleTags field to the graph panel 182 | * Added support for thresholds to graph panel 183 | * Added support for Elasticsearch alert condition 184 | * Added support for using gridPos for dashboard panels 185 | * Added support for Humio Data Source. (https://grafana.com/grafana/plugins/humio-datasource/) 186 | 187 | 188 | Changes 189 | ------- 190 | 191 | * Replace deprecated attr.assoc with attr.evolve 192 | 193 | 194 | 195 | 0.5.9 (2020-12-18) 196 | ================== 197 | 198 | * Added Alert Threshold enabled/disabled to Graphs. 199 | * Added constants for all Grafana value formats 200 | * Added support for repetitions to Stat Panels 201 | * Added textMode option to Stat Panels 202 | * Add Panel object for all panels to inherit from 203 | * Add Dashboard list panel (https://grafana.com/docs/grafana/latest/panels/visualizations/dashboard-list-panel/) 204 | 205 | 206 | Changes 207 | ------- 208 | 209 | * Change supported python versions from 3.6 to 3.9 210 | * Added hide parameter to Target 211 | * Updated dependencies (docs, build, CI) 212 | * Consistent coding style 213 | 214 | 215 | 0.5.8 (2020-11-02) 216 | ================== 217 | 218 | This release adds quite a few new classes to grafanalib, ElasticSearch support was improved and support for InfluxDB data sources was added. 219 | 220 | We would also very much like to welcome James Gibson as new maintainer of grafanalib. Thanks a lot for stepping up to this role! 221 | 222 | Changes 223 | ------- 224 | 225 | * Added more YAxis formats, added Threshold and SeriesOverride types 226 | * dataLinks support in graphs 227 | * Add Elasticsearch bucket script pipeline aggregator 228 | * Added ability to hide metrics for Elasticsearch MetricAggs 229 | * Add derivative metric aggregation for Elasticsearch 230 | * Add ``Stat`` class (and ``StatMapping``, ``StatValueMapping``, ``StatRangeMapping``) to support the Stat panel 231 | * Add ``Svg`` class to support the SVG panel 232 | * Add ``PieChart`` class for creating Pie Chart panels 233 | * Add `transparent` setting to classes that were missing it (Heatmap, PieChart) 234 | * Add InfluxDB data source 235 | * Add ``auto_ref_ids`` to ``Graph`` 236 | 237 | Thanks a lot for your contributions to this release, @DWalker487, @JamesGibo, @daveworth, @dholbach, @fauust, @larsderidder, @matthewmrichter. 238 | 239 | 240 | 0.5.7 (2020-05-11) 241 | ================== 242 | 243 | Changes 244 | ------- 245 | 246 | * Fix crasher instatiating elasticsearch panels. 247 | * Remove unused ``tools/`` directory. 248 | 249 | Thanks a lot for your contributions to this release, @DWalker487, @dholbach and @matthewmrichter. 250 | 251 | 252 | 0.5.6 (2020-05-05) 253 | ================== 254 | 255 | Changes 256 | ------- 257 | 258 | * Add ``Heatmap`` class (and ``HeatmapColor``) to support the Heatmap panel (#170) 259 | * Add ``BarGuage`` for creating bar guages panels in grafana 6 260 | * Add ``GuagePanel`` for creating guages in grafana 6 261 | * Add data links support to ``Graph``, ``BarGuage``, and ``GuagePanel`` panels 262 | * Removed gfdatasource - feature is built in to Grafana since v5. 263 | * Generate API docs for readthedocs.org 264 | * Fix AlertList panel generation 265 | * Add both upper and lower case `"time"` pattern for time_series column format in Table class 266 | * Drop testing of Python 2.7, it has been EOL'ed and CI was broken 267 | due to this. 268 | * Automatically test documentation examples. 269 | * Point to dev meeting resources. 270 | * Add description attribute to Dashboard. 271 | * Add support for custom variables. 272 | * Point out documentation on readthedocs more clearly. 273 | * Add average metric aggregation for elastic search 274 | * Bugfix to query ordering in Elasticsearch TermsGroupBy 275 | * Added all parameters for StringColumnStyle 276 | * Add Elasticsearch Sum metric aggregator 277 | * Add ``Statusmap`` class (and ``StatusmapColor``) to support the Statusmap panel plugin 278 | * Bugfix to update default ``Threshold`` values for ``GaugePanel`` and ``BarGauge`` 279 | * Use Github Actions for CI. 280 | * Fix test warnings. 281 | * Update ``BarGauge`` and ``GaugePanel`` default Threshold values. 282 | * Update release instructions. 283 | 284 | Thanks a lot to the contributions from @DWalker487, @bboreham, @butlerx, @dholbach, @franzs, @jaychitalia95, @matthewmrichter and @number492 for this release! 285 | 286 | 0.5.5 (2020-02-17) 287 | ================== 288 | 289 | It's been a while since the last release and we are happy to get this one into your hands. 290 | 0.5.5 is a maintenance release, most importantly it adds support for Python >= 3.5. 291 | 292 | We are very delighted to welcome Matt Richter on board as maintainer. 293 | 294 | Changes 295 | ------- 296 | 297 | * Automate publishing to PyPI with GitHub Actions 298 | * Update README.rst to make the example work 299 | * Bump Dockerfile to use Alpine 3.10 as base 300 | * Fix up ``load_source()`` call which doesn't exist in Python 3.5 301 | * Update versions of Python tested 302 | * Repair tests 303 | * pin to attrs 19.2 and fix deprecated arguments 304 | 305 | Many thanks to contributors @bboreham, @dholbach, @ducksecops, @kevingessner, @matthewmrichter, @uritau. 306 | 307 | 0.5.4 (2019-08-30) 308 | ================== 309 | 310 | Changes 311 | ------- 312 | 313 | * Add 'diff', 'percent_diff' and 'count_non_null' as RTYPE 314 | * Support for changing sort value in Template Variables. 315 | * Sort tooltips by value in Weave/Stacked-Charts 316 | * Add ``for`` parameter for alerts on Grafana 6.X 317 | * Add ``STATE_OK`` for alerts 318 | * Add named values for the Template.hide parameter 319 | * Add cardinality metric aggregator for ElasticSearch 320 | * Add Threshold and Series Override types 321 | * Add more YAxis formats 322 | 323 | Many thanks to contributors @kevingessner, @2easy, @vicmarbev, @butlerx. 324 | 325 | 0.5.3 (2018-07-19) 326 | ================== 327 | 328 | Changes 329 | ------- 330 | 331 | * Minor markup tweaks to the README 332 | 333 | 0.5.2 (2018-07-19) 334 | ================== 335 | 336 | Fixes 337 | ----- 338 | 339 | * ``PromGraph`` was losing all its legends. It doesn't any more. (`#130`_) 340 | 341 | .. _`#130`: https://github.com/weaveworks/grafanalib/pull/130 342 | 343 | Changes 344 | ------- 345 | 346 | * Add ``AlertList`` panel support 347 | * Add support for mixed data sources 348 | * Add ``ExternalLink`` class for dashboard-level links to other pages 349 | * Template now supports 'type' and 'hide' attributes 350 | * Legend now supports ``sort`` and ``sortDesc`` attributes 351 | * Tables now support ``timeFrom`` attribute 352 | * Update README.rst with information on how to get help. 353 | 354 | 355 | 0.5.1 (2018-02-27) 356 | ================== 357 | 358 | Fixes 359 | ----- 360 | 361 | * Fix for crasher bug that broke ``SingleStat``, introduced by `#114`_ 362 | 363 | .. _`#114`: https://github.com/weaveworks/grafanalib/pull/114 364 | 365 | 366 | 0.5.0 (2018-02-26) 367 | ================== 368 | 369 | New features 370 | ------------ 371 | 372 | * grafanalib now supports Python 2.7. This enables it to be used within `Bazel `_. 373 | * Partial support for graphs against Elasticsearch datasources (https://github.com/weaveworks/grafanalib/pull/99) 374 | 375 | Extensions 376 | ---------- 377 | 378 | * Constants for days, hours, and minutes (https://github.com/weaveworks/grafanalib/pull/98) 379 | * Groups and tags can now be used with templates (https://github.com/weaveworks/grafanalib/pull/97) 380 | 381 | 382 | 0.4.0 (2017-11-23) 383 | ================== 384 | 385 | Massive release! 386 | 387 | It's Thanksgiving today, so more than ever I want to express my gratitude to 388 | all the people who have contributed to this release! 389 | 390 | * @aknuds1 391 | * @atopuzov 392 | * @bboreham 393 | * @fho 394 | * @filippog 395 | * @gaelL 396 | * @lalinsky 397 | * @leth 398 | * @lexfrei 399 | * @mikebryant 400 | 401 | New features 402 | ------------ 403 | 404 | * Support for ``Text`` panels 405 | (https://github.com/weaveworks/grafanalib/pull/63) 406 | * ``PromGraph`` is now more powerful. 407 | If you want to pass extra parameters like ``intervalFactor`` to your 408 | targets, you can do so by listing targets as dictionaries, 409 | rather than tuples. 410 | (https://github.com/weaveworks/grafanalib/pull/66) 411 | * Support for absolute links to drill-down in graphs 412 | (https://github.com/weaveworks/grafanalib/pull/86) 413 | 414 | Changes 415 | ------- 416 | 417 | * Breaking change to ``weave.QPSGraph()`` - added ``data_source`` 418 | parameter and removed old hard-coded setting. 419 | (https://github.com/weaveworks/grafanalib/pull/77) 420 | 421 | Extensions 422 | ---------- 423 | 424 | Generally adding more parameters to existing things: 425 | 426 | * Graphs can now have descriptions or be transparent 427 | (https://github.com/weaveworks/grafanalib/pull/62 https://github.com/weaveworks/grafanalib/pull/89) 428 | * New formats: "bps" and "Bps" 429 | (https://github.com/weaveworks/grafanalib/pull/68) 430 | * Specify the "Min step" for a ``Target`` 431 | using the ``interval`` attribute. 432 | * Specify the number of decimals shown on the ``YAxis`` 433 | with the ``decimals`` attribute 434 | * Specify multiple ``Dashboard`` inputs, 435 | allowing dashboards to be parametrized by data source. 436 | (https://github.com/weaveworks/grafanalib/pull/83) 437 | * Templates 438 | * ``label`` is now optional (https://github.com/weaveworks/grafanalib/pull/92) 439 | * ``allValue`` and ``includeAll`` attributes now available (https://github.com/weaveworks/grafanalib/pull/67) 440 | * ``regex`` and ``multi`` attributes now available (https://github.com/weaveworks/grafanalib/pull/82) 441 | * Rows can now repeat (https://github.com/weaveworks/grafanalib/pull/82) 442 | * Add missing ``NULL_AS_NULL`` constant 443 | * Specify the "Instant" for a ``Target`` using the ``instant`` attribute. 444 | 445 | Fixes 446 | ----- 447 | 448 | * The ``showTitle`` parameter in ``Row`` is now respected 449 | (https://github.com/weaveworks/grafanalib/pull/80) 450 | 451 | 452 | 453 | 0.3.0 (2017-07-27) 454 | ================== 455 | 456 | New features 457 | ------------ 458 | 459 | * OpenTSDB datasource support (https://github.com/weaveworks/grafanalib/pull/27) 460 | * Grafana Zabbix plugin support 461 | (https://github.com/weaveworks/grafanalib/pull/31, https://github.com/weaveworks/grafanalib/pull/36) 462 | * ``Dashboard`` objects now have an ``auto_panel_id`` method which will 463 | automatically supply unique panel (i.e. graph) IDs for any panels that don't 464 | have one set. Dashboard config files no longer need to track their own 465 | ``GRAPH_ID`` counter. 466 | * Support for ``SingleStat`` panels 467 | (https://github.com/weaveworks/grafanalib/pull/22) 468 | * ``single_y_axis`` helper for the common case of a graph that has only one Y axis 469 | 470 | Improvements 471 | ------------ 472 | 473 | * ``PromGraph`` now lives in ``grafanalib.prometheus``, and takes a 474 | ``data_source`` parameter 475 | * Additional fields for ``Legend`` (https://github.com/weaveworks/grafanalib/pull/25) 476 | * Additional fields for ``XAxis`` 477 | (https://github.com/weaveworks/grafanalib/pull/28) 478 | * Get an error when you specify the wrong number of Y axes 479 | 480 | Changes 481 | ------- 482 | 483 | * New ``YAxes`` type for specifying Y axes. Using a list of two ``YAxis`` 484 | objects is deprecated. 485 | 486 | 487 | 0.1.2 (2017-01-02) 488 | ================== 489 | 490 | * Add support for Grafana Templates (https://github.com/weaveworks/grafanalib/pull/9) 491 | 492 | 0.1.1 (2016-12-02) 493 | ================== 494 | 495 | * Include README on PyPI page 496 | 497 | 0.1.0 (2016-12-02) 498 | ================== 499 | 500 | Initial release. 501 | -------------------------------------------------------------------------------- /grafanalib/elasticsearch.py: -------------------------------------------------------------------------------- 1 | """Helpers to create Elasticsearch-specific Grafana queries.""" 2 | 3 | import attr 4 | import itertools 5 | from attr.validators import in_, instance_of 6 | from grafanalib.core import AlertCondition 7 | 8 | DATE_HISTOGRAM_DEFAULT_FIELD = 'time_iso8601' 9 | ORDER_ASC = 'asc' 10 | ORDER_DESC = 'desc' 11 | 12 | 13 | @attr.s 14 | class CountMetricAgg(object): 15 | """An aggregator that counts the number of values. 16 | 17 | https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-valuecount-aggregation.html 18 | 19 | It's the default aggregator for elasticsearch queries. 20 | :param hide: show/hide the metric in the final panel display 21 | :param id: id of the metric 22 | :param inline: script to apply to the data, using '_value' 23 | """ 24 | id = attr.ib(default=0, validator=instance_of(int)) 25 | hide = attr.ib(default=False, validator=instance_of(bool)) 26 | inline = attr.ib(default="", validator=instance_of(str)) 27 | 28 | def to_json_data(self): 29 | self.settings = {} 30 | 31 | if self.inline: 32 | self.settings['script'] = {'inline': self.inline} 33 | 34 | return { 35 | 'id': str(self.id), 36 | 'hide': self.hide, 37 | 'type': 'count', 38 | 'field': 'select field', 39 | 'inlineScript': self.inline, 40 | 'settings': self.settings, 41 | } 42 | 43 | 44 | @attr.s 45 | class MaxMetricAgg(object): 46 | """An aggregator that provides the max. value among the values. 47 | 48 | https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-max-aggregation.html 49 | 50 | :param field: name of elasticsearch field to provide the maximum for 51 | :param hide: show/hide the metric in the final panel display 52 | :param id: id of the metric 53 | :param inline: script to apply to the data, using '_value' 54 | """ 55 | field = attr.ib(default="", validator=instance_of(str)) 56 | id = attr.ib(default=0, validator=instance_of(int)) 57 | hide = attr.ib(default=False, validator=instance_of(bool)) 58 | inline = attr.ib(default="", validator=instance_of(str)) 59 | 60 | def to_json_data(self): 61 | self.settings = {} 62 | 63 | if self.inline: 64 | self.settings['script'] = {'inline': self.inline} 65 | 66 | return { 67 | 'id': str(self.id), 68 | 'hide': self.hide, 69 | 'type': 'max', 70 | 'field': self.field, 71 | 'inlineScript': self.inline, 72 | 'settings': self.settings, 73 | } 74 | 75 | 76 | @attr.s 77 | class CardinalityMetricAgg(object): 78 | """An aggregator that provides the cardinality. value among the values. 79 | 80 | https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-cardinality-aggregation.html 81 | 82 | :param field: name of elasticsearch field to provide the cardinality for 83 | :param id: id of the metric 84 | :param hide: show/hide the metric in the final panel display 85 | :param inline: script to apply to the data, using '_value' 86 | """ 87 | field = attr.ib(default="", validator=instance_of(str)) 88 | id = attr.ib(default=0, validator=instance_of(int)) 89 | hide = attr.ib(default=False, validator=instance_of(bool)) 90 | inline = attr.ib(default="", validator=instance_of(str)) 91 | 92 | def to_json_data(self): 93 | self.settings = {} 94 | 95 | if self.inline: 96 | self.settings['script'] = {'inline': self.inline} 97 | 98 | return { 99 | 'id': str(self.id), 100 | 'hide': self.hide, 101 | 'type': 'cardinality', 102 | 'field': self.field, 103 | 'inlineScript': self.inline, 104 | 'settings': self.settings, 105 | } 106 | 107 | 108 | @attr.s 109 | class AverageMetricAgg(object): 110 | """An aggregator that provides the average. value among the values. 111 | 112 | https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-avg-aggregation.html 113 | 114 | :param field: name of elasticsearch metric aggregator to provide the average of 115 | :param id: id of the metric 116 | :param hide: show/hide the metric in the final panel display 117 | :param inline: script to apply to the data, using '_value' 118 | """ 119 | 120 | field = attr.ib(default="", validator=instance_of(str)) 121 | id = attr.ib(default=0, validator=instance_of(int)) 122 | hide = attr.ib(default=False, validator=instance_of(bool)) 123 | inline = attr.ib(default="", validator=instance_of(str)) 124 | 125 | def to_json_data(self): 126 | self.settings = {} 127 | 128 | if self.inline: 129 | self.settings['script'] = {'inline': self.inline} 130 | 131 | return { 132 | 'id': str(self.id), 133 | 'hide': self.hide, 134 | 'type': 'avg', 135 | 'field': self.field, 136 | 'inlineScript': self.inline, 137 | 'settings': self.settings, 138 | 'meta': {} 139 | } 140 | 141 | 142 | @attr.s 143 | class DerivativeMetricAgg(object): 144 | """An aggregator that takes the derivative of another metric aggregator. 145 | 146 | https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-derivative-aggregation.html 147 | 148 | :param field: id of elasticsearch metric aggregator to provide the derivative of 149 | :param hide: show/hide the metric in the final panel display 150 | :param id: id of the metric 151 | :param pipelineAgg: pipeline aggregator id 152 | :param unit: derivative units 153 | """ 154 | field = attr.ib(default="", validator=instance_of(str)) 155 | hide = attr.ib(default=False, validator=instance_of(bool)) 156 | id = attr.ib(default=0, validator=instance_of(int)) 157 | pipelineAgg = attr.ib(default=1, validator=instance_of(int)) 158 | unit = attr.ib(default="", validator=instance_of(str)) 159 | 160 | def to_json_data(self): 161 | settings = {} 162 | if self.unit != "": 163 | settings['unit'] = self.unit 164 | 165 | return { 166 | 'id': str(self.id), 167 | 'pipelineAgg': str(self.pipelineAgg), 168 | 'hide': self.hide, 169 | 'type': 'derivative', 170 | 'field': self.field, 171 | 'settings': settings, 172 | } 173 | 174 | 175 | @attr.s 176 | class SumMetricAgg(object): 177 | """An aggregator that provides the sum of the values. 178 | https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-sum-aggregation.html 179 | :param field: name of elasticsearch field to provide the sum over 180 | :param hide: show/hide the metric in the final panel display 181 | :param id: id of the metric 182 | :param inline: script to apply to the data, using '_value' 183 | """ 184 | field = attr.ib(default="", validator=instance_of(str)) 185 | id = attr.ib(default=0, validator=instance_of(int)) 186 | hide = attr.ib(default=False, validator=instance_of(bool)) 187 | inline = attr.ib(default="", validator=instance_of(str)) 188 | 189 | def to_json_data(self): 190 | self.settings = {} 191 | 192 | if self.inline: 193 | self.settings['script'] = {'inline': self.inline} 194 | 195 | return { 196 | 'id': str(self.id), 197 | 'hide': self.hide, 198 | 'type': 'sum', 199 | 'field': self.field, 200 | 'inlineScript': self.inline, 201 | 'settings': self.settings, 202 | } 203 | 204 | 205 | @attr.s 206 | class DateHistogramGroupBy(object): 207 | """A bucket aggregator that groups results by date. 208 | 209 | https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-datehistogram-aggregation.html 210 | 211 | :param id: ascending unique number per GroupBy clause 212 | :param field: name of the elasticsearch field to group by 213 | :param interval: interval to group by 214 | :param minDocCount: min. amount of records in the timespan to return a 215 | result 216 | """ 217 | id = attr.ib(default=0, validator=instance_of(int)) 218 | field = attr.ib( 219 | default=DATE_HISTOGRAM_DEFAULT_FIELD, 220 | validator=instance_of(str), 221 | ) 222 | interval = attr.ib(default='auto', validator=instance_of(str)) 223 | minDocCount = attr.ib(default=0, validator=instance_of(int)) 224 | 225 | def to_json_data(self): 226 | return { 227 | 'field': self.field, 228 | 'id': str(self.id), 229 | 'settings': { 230 | 'interval': self.interval, 231 | 'min_doc_count': self.minDocCount, 232 | 'trimEdges': 0, 233 | }, 234 | 'type': 'date_histogram', 235 | } 236 | 237 | 238 | @attr.s 239 | class BucketScriptAgg(object): 240 | """An aggregator that applies a bucket script to the results of previous aggregations. 241 | https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline-bucket-script-aggregation.html 242 | 243 | :param fields: dictionary of field names mapped to aggregation IDs to be used in the bucket script 244 | e.g. { "field1":1 }, which allows the output of aggregate ID 1 to be referenced as 245 | params.field1 in the bucket script 246 | :param script: script to apply to the data using the variables specified in 'fields' 247 | :param id: id of the aggregator 248 | :param hide: show/hide the metric in the final panel display 249 | """ 250 | fields = attr.ib(factory=dict, validator=instance_of(dict)) 251 | id = attr.ib(default=0, validator=instance_of(int)) 252 | hide = attr.ib(default=False, validator=instance_of(bool)) 253 | script = attr.ib(default="", validator=instance_of(str)) 254 | 255 | def to_json_data(self): 256 | pipelineVars = [] 257 | for field in self.fields: 258 | pipelineVars.append({ 259 | 'name': str(field), 260 | 'pipelineAgg': str(self.fields[field]) 261 | }) 262 | 263 | return { 264 | 'field': 'select field', 265 | 'type': 'bucket_script', 266 | 'id': str(self.id), 267 | 'hide': self.hide, 268 | 'pipelineVariables': pipelineVars, 269 | 'settings': { 270 | 'script': self.script 271 | }, 272 | } 273 | 274 | 275 | @attr.s 276 | class Filter(object): 277 | """ A Filter for a FilterGroupBy aggregator. 278 | 279 | https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-filter-aggregation.html 280 | 281 | :param label: label for the metric that is shown in the graph 282 | :param query: the query to filter by 283 | """ 284 | label = attr.ib(default="", validator=instance_of(str)) 285 | query = attr.ib(default="", validator=instance_of(str)) 286 | 287 | def to_json_data(self): 288 | return { 289 | 'label': self.label, 290 | 'query': self.query, 291 | } 292 | 293 | 294 | @attr.s 295 | class FiltersGroupBy(object): 296 | """ A bucket aggregator that groups records by a filter expression. 297 | 298 | https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-filter-aggregation.html 299 | 300 | :param id: ascending unique number per GroupBy clause 301 | :param filters: list of Filter objects 302 | """ 303 | id = attr.ib(default=0, validator=instance_of(int)) 304 | filters = attr.ib(default=attr.Factory(list)) 305 | 306 | def to_json_data(self): 307 | return { 308 | 'id': str(self.id), 309 | 'settings': { 310 | 'filters': self.filters, 311 | }, 312 | 'type': 'filters', 313 | } 314 | 315 | 316 | @attr.s 317 | class TermsGroupBy(object): 318 | """ A multi-bucket aggregator based on field values. 319 | 320 | https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-terms-aggregation.html 321 | 322 | :param id: ascending unique number per GroupBy clause 323 | :param field: name of the field to group by 324 | :param minDocCount: min. amount of matching records to return a result 325 | :param order: ORDER_ASC or ORDER_DESC 326 | :param orderBy: term to order the bucket Term value: '_term', Doc Count: '_count' 327 | or to use metric function use the string value "2" 328 | :param size: how many buckets are returned 329 | """ 330 | field = attr.ib(validator=instance_of(str)) 331 | id = attr.ib(default=0, validator=instance_of(int)) 332 | minDocCount = attr.ib(default=1, validator=instance_of(int)) 333 | order = attr.ib(default=ORDER_DESC, validator=instance_of(str)) 334 | orderBy = attr.ib(default='_term', validator=instance_of(str)) 335 | size = attr.ib(default=0, validator=instance_of(int)) 336 | 337 | def to_json_data(self): 338 | return { 339 | 'id': str(self.id), 340 | 'type': 'terms', 341 | 'field': self.field, 342 | 'settings': { 343 | 'min_doc_count': self.minDocCount, 344 | 'order': self.order, 345 | 'orderBy': self.orderBy, 346 | 'size': self.size, 347 | }, 348 | } 349 | 350 | 351 | @attr.s 352 | class ElasticsearchTarget(object): 353 | """Generates Elasticsearch target JSON structure. 354 | 355 | Grafana docs on using Elasticsearch: 356 | http://docs.grafana.org/features/datasources/elasticsearch/ 357 | Elasticsearch docs on querying or reading data: 358 | https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html 359 | 360 | :param alias: legend alias 361 | :param bucketAggs: Bucket aggregators 362 | :param metricAggs: Metric Aggregators 363 | :param query: query 364 | :param refId: target reference id 365 | :param timeField: name of the elasticsearch time field 366 | :param hide: show/hide the target result in the final panel display 367 | """ 368 | 369 | alias = attr.ib(default=None) 370 | bucketAggs = attr.ib( 371 | default=attr.Factory(lambda: [DateHistogramGroupBy()]), 372 | ) 373 | metricAggs = attr.ib(default=attr.Factory(lambda: [CountMetricAgg()])) 374 | query = attr.ib(default="", validator=instance_of(str)) 375 | refId = attr.ib(default="", validator=instance_of(str)) 376 | timeField = attr.ib(default="@timestamp", validator=instance_of(str)) 377 | hide = attr.ib(default=False, validator=instance_of(bool)) 378 | 379 | def _map_bucket_aggs(self, f): 380 | return attr.evolve(self, bucketAggs=list(map(f, self.bucketAggs))) 381 | 382 | def auto_bucket_agg_ids(self): 383 | """Give unique IDs all bucketAggs without ID. 384 | 385 | Returns a new ``ElasticsearchTarget`` that is the same as this one, 386 | except all of the bucketAggs have their ``id`` property set. Any panels 387 | which had an ``id`` property set will keep that property, all others 388 | will have auto-generated IDs provided for them. 389 | 390 | If the bucketAggs don't have unique ID associated with it, the 391 | generated graph will be broken. 392 | """ 393 | ids = set([agg.id for agg in self.bucketAggs if agg.id]) 394 | auto_ids = (i for i in itertools.count(1) if i not in ids) 395 | 396 | def set_id(agg): 397 | if agg.id: 398 | return agg 399 | 400 | return attr.evolve(agg, id=next(auto_ids)) 401 | 402 | return self._map_bucket_aggs(set_id) 403 | 404 | def to_json_data(self): 405 | return { 406 | 'alias': self.alias, 407 | 'bucketAggs': self.bucketAggs, 408 | 'metrics': self.metricAggs, 409 | 'query': self.query, 410 | 'refId': self.refId, 411 | 'timeField': self.timeField, 412 | 'hide': self.hide, 413 | } 414 | 415 | 416 | @attr.s 417 | class ElasticsearchAlertCondition(AlertCondition): 418 | """ 419 | Override alert condition to support Elasticseach target. 420 | 421 | See AlertCondition for more information. 422 | 423 | :param Target target: Metric the alert condition is based on. 424 | :param Evaluator evaluator: How we decide whether we should alert on the 425 | metric. e.g. ``GreaterThan(5)`` means the metric must be greater than 5 426 | to trigger the condition. See ``GreaterThan``, ``LowerThan``, 427 | ``WithinRange``, ``OutsideRange``, ``NoValue``. 428 | :param TimeRange timeRange: How long the condition must be true for before 429 | we alert. 430 | :param operator: One of ``OP_AND`` or ``OP_OR``. How this condition 431 | combines with other conditions. 432 | :param reducerType: RTYPE_* 433 | :param type: CTYPE_* 434 | """ 435 | 436 | target = attr.ib(default=None, validator=instance_of(ElasticsearchTarget)) 437 | 438 | 439 | @attr.s 440 | class MinMetricAgg(object): 441 | """An aggregator that provides the min. value among the values. 442 | https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-min-aggregation.html 443 | :param field: name of elasticsearch field to provide the minimum for 444 | :param hide: show/hide the metric in the final panel display 445 | :param id: id of the metric 446 | :param inline: script to apply to the data, using '_value' 447 | """ 448 | 449 | field = attr.ib(default="", validator=instance_of(str)) 450 | id = attr.ib(default=0, validator=instance_of(int)) 451 | hide = attr.ib(default=False, validator=instance_of(bool)) 452 | inline = attr.ib(default="", validator=instance_of(str)) 453 | 454 | def to_json_data(self): 455 | self.settings = {} 456 | 457 | if self.inline: 458 | self.settings['script'] = {'inline': self.inline} 459 | 460 | return { 461 | 'id': str(self.id), 462 | 'hide': self.hide, 463 | 'type': 'min', 464 | 'field': self.field, 465 | 'inlineScript': self.inline, 466 | 'settings': self.settings, 467 | } 468 | 469 | 470 | @attr.s 471 | class PercentilesMetricAgg(object): 472 | """A multi-value metrics aggregation that calculates one or more percentiles over numeric values extracted from the aggregated documents 473 | https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-percentile-aggregation.html 474 | :param field: name of elasticsearch field to provide the percentiles for 475 | :param hide: show/hide the metric in the final panel display 476 | :param id: id of the metric 477 | :param inline: script to apply to the data, using '_value' 478 | :param percents: list of percentiles, like [95,99] 479 | """ 480 | 481 | field = attr.ib(default="", validator=instance_of(str)) 482 | id = attr.ib(default=0, validator=instance_of(int)) 483 | hide = attr.ib(default=False, validator=instance_of(bool)) 484 | inline = attr.ib(default="", validator=instance_of(str)) 485 | percents = attr.ib(default=attr.Factory(list)) 486 | settings = attr.ib(factory=dict) 487 | 488 | def to_json_data(self): 489 | self.settings = {} 490 | 491 | self.settings['percents'] = self.percents 492 | 493 | if self.inline: 494 | self.settings['script'] = {'inline': self.inline} 495 | 496 | return { 497 | 'id': str(self.id), 498 | 'hide': self.hide, 499 | 'type': 'percentiles', 500 | 'field': self.field, 501 | 'inlineScript': self.inline, 502 | 'settings': self.settings, 503 | } 504 | 505 | 506 | @attr.s 507 | class RateMetricAgg(object): 508 | """An aggregator that provides the rate of the values. 509 | https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-rate-aggregation.html 510 | :param field: name of elasticsearch field to provide the sum over 511 | :param hide: show/hide the metric in the final panel display 512 | :param id: id of the metric 513 | :param unit: calendar interval to group by 514 | supported calendar intervals 515 | https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-datehistogram-aggregation.html#calendar_intervals 516 | "minute" 517 | "hour" 518 | "day" 519 | "week" 520 | "month" 521 | "quarter" 522 | "year" 523 | :param mode: sum or count the values 524 | :param script: script to apply to the data, using '_value' 525 | """ 526 | 527 | field = attr.ib(default="", validator=instance_of(str)) 528 | id = attr.ib(default=0, validator=instance_of(int)) 529 | hide = attr.ib(default=False, validator=instance_of(bool)) 530 | unit = attr.ib(default="", validator=instance_of(str)) 531 | mode = attr.ib(default="", validator=in_(["", "value_count", "sum"])) 532 | script = attr.ib(default="", validator=instance_of(str)) 533 | 534 | def to_json_data(self): 535 | self.settings = {} 536 | 537 | if self.unit: 538 | self.settings["unit"] = self.unit 539 | 540 | if self.mode: 541 | self.settings["mode"] = self.mode 542 | 543 | if self.script: 544 | self.settings["script"] = self.script 545 | 546 | return { 547 | "id": str(self.id), 548 | "hide": self.hide, 549 | "field": self.field, 550 | "settings": self.settings, 551 | "type": "rate", 552 | } 553 | -------------------------------------------------------------------------------- /grafanalib/zabbix.py: -------------------------------------------------------------------------------- 1 | import attr 2 | import itertools 3 | from attr.validators import instance_of 4 | from numbers import Number 5 | from grafanalib.validators import is_interval, is_in, is_color_code, is_list_of 6 | from grafanalib.core import ( 7 | RGBA, Percent, Pixels, DashboardLink, 8 | DEFAULT_ROW_HEIGHT, BLANK, GREEN) 9 | 10 | ZABBIX_TRIGGERS_TYPE = 'alexanderzobnin-zabbix-triggers-panel' 11 | 12 | ZABBIX_QMODE_METRICS = 0 13 | ZABBIX_QMODE_SERVICES = 1 14 | ZABBIX_QMODE_TEXT = 2 15 | 16 | ZABBIX_SLA_PROP_STATUS = { 17 | 'name': 'Status', 18 | 'property': 'status'} 19 | 20 | ZABBIX_SLA_PROP_SLA = { 21 | 'name': 'SLA', 22 | 'property': 'sla'} 23 | 24 | ZABBIX_SLA_PROP_OKTIME = { 25 | 'name': 'OK time', 26 | 'property': 'okTime'} 27 | 28 | ZABBIX_SLA_PROP_PROBTIME = { 29 | 'name': 'Problem time', 30 | 'property': 'problemTime'} 31 | 32 | ZABBIX_SLA_PROP_DOWNTIME = { 33 | 'name': 'Down time', 34 | 'property': 'downtimeTime', 35 | } 36 | 37 | ZABBIX_EVENT_PROBLEMS = { 38 | 'text': 'Problems', 39 | 'value': [1]} 40 | 41 | ZABBIX_EVENT_OK = { 42 | 'text': 'OK', 43 | 'value': [0]} 44 | 45 | ZABBIX_EVENT_ALL = { 46 | 'text': 'All', 47 | 'value': [0, 1]} 48 | 49 | ZABBIX_TRIGGERS_SHOW_ALL = 'all triggers' 50 | ZABBIX_TRIGGERS_SHOW_ACK = 'acknowledged' 51 | ZABBIX_TRIGGERS_SHOW_NACK = 'unacknowledged' 52 | 53 | ZABBIX_SORT_TRIGGERS_BY_CHANGE = { 54 | 'text': 'last change', 55 | 'value': 'lastchange', 56 | } 57 | ZABBIX_SORT_TRIGGERS_BY_SEVERITY = { 58 | 'text': 'severity', 59 | 'value': 'priority', 60 | } 61 | 62 | ZABBIX_SEVERITY_COLORS = ( 63 | ('#B7DBAB', 'Not classified'), 64 | ('#82B5D8', 'Information'), 65 | ('#E5AC0E', 'Warning'), 66 | ('#C15C17', 'Average'), 67 | ('#BF1B00', 'High'), 68 | ('#890F02', 'Disaster'), 69 | ) 70 | 71 | 72 | def convertZabbixSeverityColors(colors): 73 | priorities = itertools.count(0) 74 | return [ZabbixColor(color=c, priority=next(priorities), severity=s) 75 | for c, s in colors] 76 | 77 | 78 | @attr.s 79 | class ZabbixTargetOptions(object): 80 | showDisabledItems = attr.ib(default=False, validator=instance_of(bool)) 81 | 82 | def to_json_data(self): 83 | return { 84 | 'showDisabledItems': self.showDisabledItems 85 | } 86 | 87 | 88 | @attr.s 89 | class ZabbixTargetField(object): 90 | filter = attr.ib(default="", validator=instance_of(str)) 91 | 92 | def to_json_data(self): 93 | return { 94 | 'filter': self.filter 95 | } 96 | 97 | 98 | @attr.s 99 | class ZabbixTarget(object): 100 | """Generates Zabbix datasource target JSON structure. 101 | 102 | Grafana-Zabbix is a plugin for Grafana allowing 103 | to visualize monitoring data from Zabbix and create 104 | dashboards for analyzing metrics and realtime monitoring. 105 | 106 | Grafana docs on using Zabbix plugin: https://alexanderzobnin.github.io/grafana-zabbix/ 107 | 108 | :param application: zabbix application name 109 | :param expr: zabbix arbitary query 110 | :param functions: list of zabbix aggregation functions 111 | :param group: zabbix host group 112 | :param host: hostname 113 | :param intervalFactor: defines interval between metric queries 114 | :param item: regexp that defines which metric to query 115 | :param itService: zabbix it service name 116 | :param mode: query mode type 117 | :param options: additional query options 118 | :param refId: target reference id 119 | :param slaProperty: zabbix it service sla property. 120 | Zabbix returns the following availability information about IT service 121 | Status - current status of the IT service 122 | SLA - SLA for the given time interval 123 | OK time - time the service was in OK state, in seconds 124 | Problem time - time the service was in problem state, in seconds 125 | Down time - time the service was in scheduled downtime, in seconds 126 | :param textFilter: query text filter. Use regex to extract a part of 127 | the returned value. 128 | :param useCaptureGroups: defines if capture groups should be used during 129 | metric query 130 | """ 131 | 132 | application = attr.ib(default="", validator=instance_of(str)) 133 | expr = attr.ib(default="") 134 | functions = attr.ib(default=attr.Factory(list)) 135 | group = attr.ib(default="", validator=instance_of(str)) 136 | host = attr.ib(default="", validator=instance_of(str)) 137 | intervalFactor = attr.ib(default=2, validator=instance_of(int)) 138 | item = attr.ib(default="", validator=instance_of(str)) 139 | itService = attr.ib(default="", validator=instance_of(str)) 140 | mode = attr.ib(default=ZABBIX_QMODE_METRICS, validator=instance_of(int)) 141 | options = attr.ib(default=attr.Factory(ZabbixTargetOptions), 142 | validator=instance_of(ZabbixTargetOptions)) 143 | refId = attr.ib(default="") 144 | slaProperty = attr.ib(default=attr.Factory(dict)) 145 | textFilter = attr.ib(default="", validator=instance_of(str)) 146 | useCaptureGroups = attr.ib(default=False, validator=instance_of(bool)) 147 | 148 | def to_json_data(self): 149 | obj = { 150 | 'application': ZabbixTargetField(self.application), 151 | 'expr': self.expr, 152 | 'functions': self.functions, 153 | 'group': ZabbixTargetField(self.group), 154 | 'host': ZabbixTargetField(self.host), 155 | 'intervalFactor': self.intervalFactor, 156 | 'item': ZabbixTargetField(self.item), 157 | 'mode': self.mode, 158 | 'options': self.options, 159 | 'refId': self.refId, 160 | } 161 | if self.mode == ZABBIX_QMODE_SERVICES: 162 | obj['slaProperty'] = self.slaProperty, 163 | obj['itservice'] = {'name': self.itService} 164 | if self.mode == ZABBIX_QMODE_TEXT: 165 | obj['textFilter'] = self.textFilter 166 | obj['useCaptureGroups'] = self.useCaptureGroups 167 | return obj 168 | 169 | 170 | @attr.s 171 | class ZabbixDeltaFunction(object): 172 | """ZabbixDeltaFunction 173 | 174 | Convert absolute values to delta, for example, bits to bits/sec 175 | https://alexanderzobnin.github.io/grafana-zabbix/reference/functions/#delta 176 | """ 177 | added = attr.ib(default=False, validator=instance_of(bool)) 178 | 179 | def to_json_data(self): 180 | text = "delta()" 181 | definition = { 182 | 'category': 'Transform', 183 | 'name': 'delta', 184 | 'defaultParams': [], 185 | 'params': []} 186 | return { 187 | 'added': self.added, 188 | 'text': text, 189 | 'def': definition, 190 | 'params': [], 191 | } 192 | 193 | 194 | @attr.s 195 | class ZabbixGroupByFunction(object): 196 | """ZabbixGroupByFunction 197 | 198 | Takes each timeseries and consolidate its points falled in given interval 199 | into one point using function, which can be one of: avg, min, max, median. 200 | https://alexanderzobnin.github.io/grafana-zabbix/reference/functions//#groupBy 201 | """ 202 | 203 | _options = ('avg', 'min', 'max', 'median') 204 | _default_interval = '1m' 205 | _default_function = 'avg' 206 | 207 | added = attr.ib(default=False, validator=instance_of(bool)) 208 | interval = attr.ib(default=_default_interval, validator=is_interval) 209 | function = attr.ib(default=_default_function, 210 | validator=is_in(_options)) 211 | 212 | def to_json_data(self): 213 | text = "groupBy({interval}, {function})" 214 | definition = { 215 | 'category': 'Transform', 216 | 'name': 'groupBy', 217 | 'defaultParams': [ 218 | self._default_interval, 219 | self._default_function, 220 | ], 221 | 'params': [ 222 | {'name': 'interval', 223 | 'type': 'string'}, 224 | {'name': 'function', 225 | 'options': self._options, 226 | 'type': 'string'}]} 227 | return { 228 | 'def': definition, 229 | 'text': text.format( 230 | interval=self.interval, function=self.function), 231 | 'params': [self.interval, self.function], 232 | 'added': self.added, 233 | } 234 | 235 | 236 | @attr.s 237 | class ZabbixScaleFunction(object): 238 | """ZabbixScaleFunction 239 | 240 | Takes timeseries and multiplies each point by the given factor. 241 | https://alexanderzobnin.github.io/grafana-zabbix/reference/functions//#scale 242 | """ 243 | 244 | _default_factor = 100 245 | 246 | added = attr.ib(default=False, validator=instance_of(bool)) 247 | factor = attr.ib(default=_default_factor, validator=instance_of(Number)) 248 | 249 | def to_json_data(self): 250 | text = "scale({factor})" 251 | definition = { 252 | 'category': 'Transform', 253 | 'name': 'scale', 254 | 'defaultParams': [self._default_factor], 255 | 'params': [ 256 | {'name': 'factor', 257 | 'options': [100, 0.01, 10, -1], 258 | 'type': 'float'}] 259 | } 260 | return { 261 | 'def': definition, 262 | 'text': text.format(factor=self.factor), 263 | 'params': [self.factor], 264 | 'added': self.added, 265 | } 266 | 267 | 268 | @attr.s 269 | class ZabbixAggregateByFunction(object): 270 | """ZabbixAggregateByFunction 271 | 272 | Takes all timeseries and consolidate all its points falled in given 273 | interval into one point using function, which can be one of: 274 | avg, min, max, median. 275 | https://alexanderzobnin.github.io/grafana-zabbix/reference/functions/#aggregateBy 276 | """ 277 | 278 | _options = ('avg', 'min', 'max', 'median') 279 | _default_interval = '1m' 280 | _default_function = 'avg' 281 | 282 | added = attr.ib(default=False, validator=instance_of(bool)) 283 | interval = attr.ib(default=_default_interval, validator=is_interval) 284 | function = attr.ib(default=_default_function, 285 | validator=is_in(_options)) 286 | 287 | def to_json_data(self): 288 | text = "aggregateBy({interval}, {function})" 289 | definition = { 290 | 'category': 'Aggregate', 291 | 'name': 'aggregateBy', 292 | 'defaultParams': [ 293 | self._default_interval, 294 | self._default_function, 295 | ], 296 | 'params': [ 297 | {'name': 'interval', 298 | 'type': 'string'}, 299 | {'name': 'function', 300 | 'options': self._options, 301 | 'type': 'string'}]} 302 | return { 303 | 'def': definition, 304 | 'text': text.format( 305 | interval=self.interval, function=self.function), 306 | 'params': [self.interval, self.function], 307 | 'added': self.added, 308 | } 309 | 310 | 311 | @attr.s 312 | class ZabbixAverageFunction(object): 313 | """ZabbixAverageFunction 314 | 315 | Deprecated, use aggregateBy(interval, avg) instead. 316 | https://alexanderzobnin.github.io/grafana-zabbix/reference/functions/#average 317 | """ 318 | 319 | _default_interval = '1m' 320 | 321 | added = attr.ib(default=False, validator=instance_of(bool)) 322 | interval = attr.ib(default=_default_interval, validator=is_interval) 323 | 324 | def to_json_data(self): 325 | text = "average({interval})" 326 | definition = { 327 | 'category': "Aggregate", 328 | "name": "average", 329 | "defaultParams": [ 330 | self._default_interval, 331 | ], 332 | 'params': [ 333 | {'name': 'interval', 334 | 'type': 'string'}] 335 | } 336 | return { 337 | 'def': definition, 338 | 'text': text.format( 339 | interval=self.interval), 340 | 'params': [self.interval], 341 | 'added': self.added, 342 | } 343 | 344 | 345 | @attr.s 346 | class ZabbixMaxFunction(object): 347 | """ZabbixMaxFunction 348 | 349 | Deprecated, use aggregateBy(interval, max) instead. 350 | https://alexanderzobnin.github.io/grafana-zabbix/reference/functions/#max 351 | """ 352 | 353 | _default_interval = '1m' 354 | 355 | added = attr.ib(default=False, validator=instance_of(bool)) 356 | interval = attr.ib(default=_default_interval, validator=is_interval) 357 | 358 | def to_json_data(self): 359 | text = "max({interval})" 360 | definition = { 361 | 'category': 'Aggregate', 362 | 'name': 'max', 363 | 'defaultParams': [ 364 | self._default_interval, 365 | ], 366 | 'params': [ 367 | {'name': 'interval', 368 | 'type': 'string'}] 369 | } 370 | return { 371 | 'def': definition, 372 | 'text': text.format( 373 | interval=self.interval), 374 | 'params': [self.interval], 375 | 'added': self.added, 376 | } 377 | 378 | 379 | @attr.s 380 | class ZabbixMedianFunction(object): 381 | """ZabbixMedianFunction 382 | 383 | Deprecated, use aggregateBy(interval, median) instead. 384 | https://alexanderzobnin.github.io/grafana-zabbix/reference/functions/#median 385 | """ 386 | 387 | _default_interval = '1m' 388 | 389 | added = attr.ib(default=False, validator=instance_of(bool)) 390 | interval = attr.ib(default='1m', validator=is_interval) 391 | 392 | def to_json_data(self): 393 | text = "median({interval})" 394 | definition = { 395 | 'category': 'Aggregate', 396 | 'name': 'median', 397 | 'defaultParams': [ 398 | self._default_interval, 399 | ], 400 | 'params': [ 401 | {'name': 'interval', 402 | 'type': 'string'}] 403 | } 404 | return { 405 | 'def': definition, 406 | 'text': text.format( 407 | interval=self.interval), 408 | 'params': [self.interval], 409 | 'added': self.added, 410 | } 411 | 412 | 413 | @attr.s 414 | class ZabbixMinFunction(object): 415 | """ZabbixMinFunction 416 | 417 | Deprecated, use aggregateBy(interval, min) instead. 418 | https://alexanderzobnin.github.io/grafana-zabbix/reference/functions/#min 419 | """ 420 | 421 | _default_interval = '1m' 422 | 423 | added = attr.ib(default=False, validator=instance_of(bool)) 424 | interval = attr.ib(default=_default_interval, validator=is_interval) 425 | 426 | def to_json_data(self): 427 | text = "min({interval})" 428 | definition = { 429 | 'category': 'Aggregate', 430 | 'name': 'min', 431 | 'defaultParams': [ 432 | self._default_interval, 433 | ], 434 | 'params': [ 435 | {'name': 'interval', 436 | 'type': 'string'}] 437 | } 438 | return { 439 | 'def': definition, 440 | 'text': text.format( 441 | interval=self.interval), 442 | 'params': [self.interval], 443 | 'added': self.added, 444 | } 445 | 446 | 447 | @attr.s 448 | class ZabbixSumSeriesFunction(object): 449 | """ZabbixSumSeriesFunction 450 | 451 | This will add metrics together and return the sum at each datapoint. 452 | This method required interpolation of each timeseries so it may 453 | cause high CPU load. 454 | Try to combine it with groupBy() function to reduce load. 455 | https://alexanderzobnin.github.io/grafana-zabbix/reference/functions/#sumSeries 456 | """ 457 | added = attr.ib(default=False) 458 | 459 | def to_json_data(self): 460 | text = "sumSeries()" 461 | definition = { 462 | 'category': 'Aggregate', 463 | 'name': 'sumSeries', 464 | 'defaultParams': [], 465 | 'params': []} 466 | return { 467 | 'added': self.added, 468 | 'text': text, 469 | 'def': definition, 470 | 'params': [], 471 | } 472 | 473 | 474 | @attr.s 475 | class ZabbixBottomFunction(object): 476 | 477 | _options = ('avg', 'min', 'max', 'median') 478 | _default_number = 5 479 | _default_function = 'avg' 480 | 481 | added = attr.ib(default=False, validator=instance_of(bool)) 482 | number = attr.ib(default=_default_number, validator=instance_of(int)) 483 | function = attr.ib(default=_default_function, 484 | validator=is_in(_options)) 485 | 486 | def to_json_data(self): 487 | text = "bottom({number}, {function})" 488 | definition = { 489 | 'category': 'Filter', 490 | 'name': 'bottom', 491 | 'defaultParams': [ 492 | self._default_number, 493 | self._default_function, 494 | ], 495 | 'params': [ 496 | {'name': 'number', 497 | 'type': 'string'}, 498 | {'name': 'function', 499 | 'options': self._options, 500 | 'type': 'string'}]} 501 | return { 502 | 'def': definition, 503 | 'text': text.format( 504 | number=self.number, function=self.function), 505 | 'params': [self.number, self.function], 506 | 'added': self.added, 507 | } 508 | 509 | 510 | @attr.s 511 | class ZabbixTopFunction(object): 512 | 513 | _options = ('avg', 'min', 'max', 'median') 514 | _default_number = 5 515 | _default_function = 'avg' 516 | 517 | added = attr.ib(default=False, validator=instance_of(bool)) 518 | number = attr.ib(default=_default_number, validator=instance_of(int)) 519 | function = attr.ib(default=_default_function, 520 | validator=is_in(_options)) 521 | 522 | def to_json_data(self): 523 | text = "top({number}, {function})" 524 | definition = { 525 | 'category': 'Filter', 526 | 'name': 'top', 527 | 'defaultParams': [ 528 | self._default_number, 529 | self._default_function, 530 | ], 531 | 'params': [ 532 | {'name': 'number', 533 | 'type': 'string'}, 534 | {'name': 'function', 535 | 'options': self._options, 536 | 'type': 'string'}]} 537 | return { 538 | 'def': definition, 539 | 'text': text.format( 540 | number=self.number, function=self.function), 541 | 'params': [self.number, self.function], 542 | 'added': self.added, 543 | } 544 | 545 | 546 | @attr.s 547 | class ZabbixTrendValueFunction(object): 548 | """ZabbixTrendValueFunction 549 | 550 | Specifying type of trend value returned by Zabbix when 551 | trends are used (avg, min or max). 552 | https://alexanderzobnin.github.io/grafana-zabbix/reference/functions/#trendValue 553 | """ 554 | 555 | _options = ('avg', 'min', 'max') 556 | _default_type = 'avg' 557 | added = attr.ib(default=False, validator=instance_of(bool)) 558 | type = attr.ib(default=_default_type, 559 | validator=is_in(_options)) 560 | 561 | def to_json_data(self): 562 | text = "trendValue({type})" 563 | definition = { 564 | 'category': 'Trends', 565 | 'name': 'trendValue', 566 | 'defaultParams': [ 567 | self._default_type, 568 | ], 569 | 'params': [ 570 | {'name': 'type', 571 | 'options': self._options, 572 | 'type': 'string'}]} 573 | return { 574 | 'def': definition, 575 | 'text': text.format( 576 | type=self.type), 577 | 'params': [self.type], 578 | 'added': self.added, 579 | } 580 | 581 | 582 | @attr.s 583 | class ZabbixTimeShiftFunction(object): 584 | """ZabbixTimeShiftFunction 585 | 586 | Draws the selected metrics shifted in time. 587 | If no sign is given, a minus sign ( - ) is implied which will 588 | shift the metric back in time. 589 | If a plus sign ( + ) is given, the metric will be shifted forward in time. 590 | https://alexanderzobnin.github.io/grafana-zabbix/reference/functions/#timeShift 591 | """ 592 | 593 | _options = ('24h', '7d', '1M', '+24h', '-24h') 594 | _default_interval = '24h' 595 | 596 | added = attr.ib(default=False, validator=instance_of(bool)) 597 | interval = attr.ib(default=_default_interval) 598 | 599 | def to_json_data(self): 600 | text = "timeShift({interval})" 601 | definition = { 602 | 'category': 'Time', 603 | 'name': 'timeShift', 604 | 'defaultParams': [ 605 | self._default_interval, 606 | ], 607 | 'params': [ 608 | {'name': 'interval', 609 | 'options': self._options, 610 | 'type': 'string'}]} 611 | return { 612 | 'def': definition, 613 | 'text': text.format( 614 | interval=self.interval), 615 | 'params': [self.interval], 616 | 'added': self.added, 617 | } 618 | 619 | 620 | @attr.s 621 | class ZabbixSetAliasFunction(object): 622 | """ZabbixSetAliasFunction 623 | 624 | Returns given alias instead of the metric name. 625 | https://alexanderzobnin.github.io/grafana-zabbix/reference/functions/#setAlias 626 | """ 627 | alias = attr.ib(validator=instance_of(str)) 628 | added = attr.ib(default=False, validator=instance_of(bool)) 629 | 630 | def to_json_data(self): 631 | text = "setAlias({alias})" 632 | definition = { 633 | 'category': 'Alias', 634 | 'name': 'setAlias', 635 | 'defaultParams': [], 636 | 'params': [ 637 | {'name': 'alias', 638 | 'type': 'string'}]} 639 | return { 640 | 'def': definition, 641 | 'text': text.format(alias=self.alias), 642 | 'params': [self.alias], 643 | 'added': self.added, 644 | } 645 | 646 | 647 | @attr.s 648 | class ZabbixSetAliasByRegexFunction(object): 649 | """ZabbixSetAliasByRegexFunction 650 | 651 | Returns part of the metric name matched by regex. 652 | https://alexanderzobnin.github.io/grafana-zabbix/reference/functions/#setAliasByRegex 653 | """ 654 | 655 | regexp = attr.ib(validator=instance_of(str)) 656 | added = attr.ib(default=False, validator=instance_of(bool)) 657 | 658 | def to_json_data(self): 659 | text = "setAliasByRegex({regexp})" 660 | definition = { 661 | 'category': 'Alias', 662 | 'name': 'setAliasByRegex', 663 | 'defaultParams': [], 664 | 'params': [ 665 | {'name': 'aliasByRegex', 666 | 'type': 'string'}]} 667 | return { 668 | 'def': definition, 669 | 'text': text.format(regexp=self.regexp), 670 | 'params': [self.regexp], 671 | 'added': self.added, 672 | } 673 | 674 | 675 | def zabbixMetricTarget(application, group, host, item, functions=[]): 676 | return ZabbixTarget( 677 | mode=ZABBIX_QMODE_METRICS, 678 | application=application, 679 | group=group, 680 | host=host, 681 | item=item, 682 | functions=functions, 683 | ) 684 | 685 | 686 | def zabbixServiceTarget(service, sla=ZABBIX_SLA_PROP_STATUS): 687 | return ZabbixTarget( 688 | mode=ZABBIX_QMODE_SERVICES, 689 | itService=service, 690 | slaProperty=sla, 691 | ) 692 | 693 | 694 | def zabbixTextTarget(application, group, host, item, text, 695 | useCaptureGroups=False): 696 | return ZabbixTarget( 697 | mode=ZABBIX_QMODE_TEXT, 698 | application=application, 699 | group=group, 700 | host=host, 701 | item=item, 702 | textFilter=text, 703 | useCaptureGroups=useCaptureGroups, 704 | ) 705 | 706 | 707 | @attr.s 708 | class ZabbixColor(object): 709 | color = attr.ib(validator=is_color_code) 710 | priority = attr.ib(validator=instance_of(int)) 711 | severity = attr.ib(validator=instance_of(str)) 712 | show = attr.ib(default=True, validator=instance_of(bool)) 713 | 714 | def to_json_data(self): 715 | return { 716 | 'color': self.color, 717 | 'priority': self.priority, 718 | 'severity': self.severity, 719 | 'show': self.show, 720 | } 721 | 722 | 723 | @attr.s 724 | class ZabbixTrigger(object): 725 | 726 | application = attr.ib(default="", validator=instance_of(str)) 727 | group = attr.ib(default="", validator=instance_of(str)) 728 | host = attr.ib(default="", validator=instance_of(str)) 729 | trigger = attr.ib(default="", validator=instance_of(str)) 730 | 731 | def to_json_data(self): 732 | return { 733 | 'application': ZabbixTargetField(self.application), 734 | 'group': ZabbixTargetField(self.group), 735 | 'host': ZabbixTargetField(self.host), 736 | 'trigger': ZabbixTargetField(self.trigger), 737 | } 738 | 739 | 740 | @attr.s 741 | class ZabbixTriggersPanel(object): 742 | """ZabbixTriggersPanel 743 | 744 | :param dataSource: query datasource name 745 | :param title: panel title 746 | :param ackEventColor: acknowledged triggers color 747 | :param customLastChangeFormat: defines last change field data format. 748 | See momentjs docs for time format: 749 | http://momentjs.com/docs/#/displaying/format/ 750 | :param description: additional panel description 751 | :param fontSize: panel font size 752 | :param height: panel height in Pixels 753 | :param hideHostsInMaintenance: defines if triggers form hosts 754 | in maintenance should be shown 755 | :param hostField: defines if host field should be shown 756 | :param hostTechNameField: defines if field with host technical name should 757 | be shown 758 | :param id: panel identificator 759 | :param infoField: defines if field with host info should be shown 760 | :param lastChangeField: defines if field with last change 761 | time should be shown 762 | :param limit: defines number of queried triggers 763 | :param links: list of dashboard links 764 | :param markAckEvents: defines if acknowledged triggers should be colored 765 | with different color 766 | :param minSpan: defines panel minimum spans 767 | :param okEventColor: defines color for triggers with Ok status 768 | :param pageSize: defines number of triggers per panel page 769 | :param scroll: defines if scroll should be shown 770 | :param severityField: defines if severity field should be shown 771 | :param showEvents: defines event type to query (Ok, Problems, All) 772 | :param showTriggers: defines trigger type to query 773 | (all, acknowledged, unacknowledged) 774 | :param sortTriggersBy: defines trigger sort policy 775 | :param span: defines span number for panel 776 | :param statusField: defines if status field should be shown 777 | :param transparent: defines if panel should be transparent 778 | :param triggerSeverity: defines colors for trigger severity, 779 | :param triggers: trigger query 780 | 781 | """ 782 | dataSource = attr.ib() 783 | title = attr.ib() 784 | 785 | ackEventColor = attr.ib(default=attr.Factory(lambda: BLANK), 786 | validator=instance_of(RGBA)) 787 | ageField = attr.ib(default=True, validator=instance_of(bool)) 788 | customLastChangeFormat = attr.ib(default=False, 789 | validator=instance_of(bool)) 790 | description = attr.ib(default="", validator=instance_of(str)) 791 | fontSize = attr.ib(default=attr.Factory(Percent), 792 | validator=instance_of(Percent)) 793 | height = attr.ib(default=DEFAULT_ROW_HEIGHT, validator=instance_of(Pixels)) 794 | hideHostsInMaintenance = attr.ib(default=False, 795 | validator=instance_of(bool)) 796 | hostField = attr.ib(default=True, validator=instance_of(bool)) 797 | hostTechNameField = attr.ib(default=False, validator=instance_of(bool)) 798 | id = attr.ib(default=None) 799 | infoField = attr.ib(default=True, validator=instance_of(bool)) 800 | lastChangeField = attr.ib(default=True, validator=instance_of(bool)) 801 | 802 | lastChangeFormat = attr.ib(default="") 803 | limit = attr.ib(default=10, validator=instance_of(int)) 804 | links = attr.ib(default=attr.Factory(list), 805 | validator=is_list_of(DashboardLink)) 806 | markAckEvents = attr.ib(default=False, validator=instance_of(bool)) 807 | minSpan = attr.ib(default=None) 808 | okEventColor = attr.ib(default=attr.Factory(lambda: GREEN), 809 | validator=instance_of(RGBA)) 810 | pageSize = attr.ib(default=10, validator=instance_of(int)) 811 | repeat = attr.ib(default=None) 812 | scroll = attr.ib(default=True, validator=instance_of(bool)) 813 | severityField = attr.ib(default=False, validator=instance_of(bool)) 814 | showEvents = attr.ib(default=attr.Factory(lambda: ZABBIX_EVENT_PROBLEMS)) 815 | showTriggers = attr.ib(default=ZABBIX_TRIGGERS_SHOW_ALL) 816 | sortTriggersBy = attr.ib( 817 | default=attr.Factory(lambda: ZABBIX_SORT_TRIGGERS_BY_CHANGE), 818 | ) 819 | span = attr.ib(default=None) 820 | statusField = attr.ib(default=False, validator=instance_of(bool)) 821 | transparent = attr.ib(default=False, validator=instance_of(bool)) 822 | try: 823 | triggerSeverity = attr.ib( 824 | default=ZABBIX_SEVERITY_COLORS, 825 | converter=convertZabbixSeverityColors, 826 | ) 827 | except TypeError: 828 | triggerSeverity = attr.ib( 829 | default=ZABBIX_SEVERITY_COLORS, 830 | convert=convertZabbixSeverityColors, 831 | ) 832 | triggers = attr.ib( 833 | default=attr.Factory(ZabbixTrigger), 834 | validator=instance_of(ZabbixTrigger), 835 | ) 836 | 837 | def to_json_data(self): 838 | return { 839 | 'type': ZABBIX_TRIGGERS_TYPE, 840 | 'datasource': self.dataSource, 841 | 'title': self.title, 842 | 'ackEventColor': self.ackEventColor, 843 | 'ageField': self.ageField, 844 | 'customLastChangeFormat': self.customLastChangeFormat, 845 | 'description': self.description, 846 | 'fontSize': self.fontSize, 847 | 'height': self.height, 848 | 'hideHostsInMaintenance': self.hideHostsInMaintenance, 849 | 'hostField': self.hostField, 850 | 'hostTechNameField': self.hostTechNameField, 851 | 'id': self.id, 852 | 'infoField': self.infoField, 853 | 'lastChangeField': self.lastChangeField, 854 | 'lastChangeFormat': self.lastChangeFormat, 855 | 'limit': self.limit, 856 | 'links': self.links, 857 | 'markAckEvents': self.markAckEvents, 858 | 'minSpan': self.minSpan, 859 | 'okEventColor': self.okEventColor, 860 | 'pageSize': self.pageSize, 861 | 'repeat': self.repeat, 862 | 'scroll': self.scroll, 863 | 'severityField': self.severityField, 864 | 'showEvents': self.showEvents, 865 | 'showTriggers': self.showTriggers, 866 | 'sortTriggersBy': self.sortTriggersBy, 867 | 'span': self.span, 868 | 'statusField': self.statusField, 869 | 'transparent': self.transparent, 870 | 'triggers': self.triggers, 871 | 'triggerSeverity': self.triggerSeverity, 872 | } 873 | --------------------------------------------------------------------------------