├── .gitignore ├── requirements.txt ├── .github ├── collectd.png ├── immobiliare-labs.png ├── dependabot.yml ├── ISSUE_TEMPLATE │ ├── Feature_Request.md │ ├── Support_Question.md │ ├── BC_Break.md │ └── Bug.md ├── workflows │ ├── commit-lint.yml │ └── test.yml ├── CONTRIBUTING.md ├── PULL_REQUEST_TEMPLATE │ ├── Improvement.md │ └── New_Feature.md ├── CHANGELOG.md └── README.md ├── LICENSE ├── haproxy.py └── haproxy_test.py /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | *.pyc -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | mock 2 | pycodestyle 3 | pytest 4 | pytest-cov -------------------------------------------------------------------------------- /.github/collectd.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/immobiliare/collectd-haproxy-plugin/HEAD/.github/collectd.png -------------------------------------------------------------------------------- /.github/immobiliare-labs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/immobiliare/collectd-haproxy-plugin/HEAD/.github/immobiliare-labs.png -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "daily" 7 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/Feature_Request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: 🎉 Feature Request 3 | about: You have a neat idea that should be implemented? 🎩 4 | --- 5 | 6 | ### Feature Request 7 | 8 | 9 | 10 | | Q | A 11 | |------------ | ------ 12 | | New Feature | yes 13 | | RFC | yes/no 14 | | BC Break | yes/no 15 | 16 | #### Summary 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/Support_Question.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: ❓ Support Question 3 | about: "Have a problem that you can't figure out? \U0001F914" 4 | --- 5 | 6 | 7 | 8 | | Q | A | 9 | | ------------------- | ------ | 10 | | BC Break | yes/no | 11 | | Version | x.y.z | 12 | 13 | 18 | 19 | ### Support Question 20 | 21 | 22 | -------------------------------------------------------------------------------- /.github/workflows/commit-lint.yml: -------------------------------------------------------------------------------- 1 | name: commit-lint 2 | 3 | on: [pull_request] 4 | 5 | jobs: 6 | commit-lint: 7 | runs-on: ubuntu-latest 8 | 9 | steps: 10 | 11 | - name: Checkout repository 12 | uses: actions/checkout@v5 13 | with: 14 | fetch-depth: ${{ github.event.pull_request.commits }} 15 | ref: ${{ github.event.pull_request.head.sha }} 16 | 17 | - name: Commits linter 18 | uses: bugbundle/commits@v1.1.0 19 | id: commits 20 | 21 | - name: Preview the version 22 | run: echo ${{ steps.commits.outputs.major }}.${{ steps.commits.outputs.minor }}.${{ steps.commits.outputs.patch }} 23 | -------------------------------------------------------------------------------- /.github/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | 1. [Fork](https://help.github.com/articles/fork-a-repo/) this repository to your own GitHub account and then [clone](https://help.github.com/articles/cloning-a-repository/) it to your local device. 4 | 2. Create a new branch `git checkout -b MY_BRANCH_NAME` 5 | 3. Install the dependencies: `python3 -m pip install mock pycodestyle pytest` 6 | 7 | ## Testing 8 | 9 | Simply run `py.test haproxy_test.py`. 10 | 11 | ## Contributors 12 | 13 | * [`@mleinart`](https://github.com/mleinart) 14 | * [`@wt`](https://github.com/wt) 15 | * [`@vzhabiuks`](https://github.com/vzhabiuks) 16 | * [`@rhyss`](https://github.com/rhyss) 17 | * [`@streambinder`](https://github.com/streambinder) -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE/Improvement.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: ⚙ Improvement 3 | about: You have some improvement to make the library better? 🎁 4 | --- 5 | 6 | 11 | 12 | ### Improvement 13 | 14 | 15 | 16 | | Q | A 17 | |------------ | ------ 18 | | New Feature | yes 19 | | RFC | yes/no 20 | | BC Break | yes/no 21 | | Issue | Close #... 22 | 23 | #### Summary 24 | 25 | 26 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE/New_Feature.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: 🎉 New Feature 3 | about: You have implemented some neat idea that you want to make part of the library? 🎩 4 | --- 5 | 6 | 11 | 12 | ### New Feature 13 | 14 | 15 | 16 | | Q | A 17 | |------------ | ------ 18 | | New Feature | yes 19 | | RFC | yes/no 20 | | BC Break | yes/no 21 | | Issue | Close #... 22 | 23 | #### Summary 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /.github/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | All notable changes to this project will be documented in this file. 4 | 5 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 6 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 7 | 8 | ## [Unreleased] 9 | 10 | ## [0.1.1] - 2021-11-25 11 | 12 | ### Fixed 13 | 14 | - Sending (bytes) commands to HAProxy connections 15 | 16 | ## [0.1.0] - 2021-11-08 17 | 18 | ### Added 19 | 20 | - Support for multi-sockets statistics aggregation 21 | - Support for HAproxy 1.7.x 22 | - Cross Python versions compatibility 23 | 24 | [Unreleased]: https://github.com/immobiliare/collectd-haproxy-plugin/compare/0.1.1...HEAD 25 | [0.1.1]: https://github.com/immobiliare/collectd-haproxy-plugin/releases/tag/0.1.1 26 | [0.1.0]: https://github.com/immobiliare/collectd-haproxy-plugin/releases/tag/0.1.0 27 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/BC_Break.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: 💥 BC Break 3 | about: Have you encountered an issue during upgrade? 💣 4 | --- 5 | 6 | ### BC Break Report 7 | 8 | 9 | 10 | | Q | A 11 | |------------ | ------ 12 | | BC Break | yes 13 | | Version | x.y.z 14 | 15 | #### Summary 16 | 17 | 18 | 19 | #### Previous behavior 20 | 21 | 22 | 23 | #### Current behavior 24 | 25 | 26 | 27 | #### How to reproduce 28 | 29 | 34 | 35 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/Bug.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: "\U0001F41E Bug Report" 3 | about: "Something is broken? \U0001F528" 4 | --- 5 | 6 | ### Bug Report 7 | 8 | 9 | 10 | | Q | A | 11 | | ------------------- | ------ | 12 | | BC Break | yes/no | 13 | | Version | x.y.z | 14 | 15 | #### Summary 16 | 17 | 18 | 19 | #### Current behavior 20 | 21 | 22 | 23 | #### How to reproduce 24 | 25 | 30 | 31 | #### Expected behavior 32 | 33 | 34 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Immobiliare Labs 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: test 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | branches: 9 | - master 10 | 11 | jobs: 12 | test: 13 | runs-on: ubuntu-latest 14 | strategy: 15 | matrix: 16 | python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13', 'pypy3.7', 'pypy3.8', 'pypy3.9', 'pypy3.10', 'pypy3.11'] 17 | 18 | steps: 19 | - name: Checkout repository 20 | uses: actions/checkout@v5 21 | 22 | - name: Setup Python environment 23 | uses: actions/setup-python@v6 24 | with: 25 | python-version: ${{ matrix.python-version }} 26 | cache: 'pip' 27 | 28 | - name: Install Python dependencies 29 | run: | 30 | python -m pip install --upgrade pip 31 | pip install -r requirements.txt 32 | 33 | - name: Run tests 34 | run: pytest haproxy_test.py --doctest-modules --junitxml=junit/test-results-${{ matrix.python-version }}.xml 35 | 36 | - name: Upload pytest test results 37 | uses: actions/upload-artifact@v4 38 | with: 39 | name: pytest-results-${{ matrix.python-version }} 40 | path: junit/test-results-${{ matrix.python-version }}.xml 41 | # Use always() to always run this step to publish test results when there are test failures 42 | if: ${{ always() }} 43 | 44 | - name: Run codestyle check 45 | run: | 46 | pycodestyle *.py 47 | -------------------------------------------------------------------------------- /.github/README.md: -------------------------------------------------------------------------------- 1 | # Collectd HAProxy plugin 2 | 3 | [![pipeline status](https://github.com/immobiliare/collectd-haproxy-plugin/actions/workflows/test.yml/badge.svg)](https://github.com/immobiliare/collectd-haproxy-plugin/actions/workflows/test.yml) 4 | 5 | This is a [collectd](https://collectd.org) plugin for [HAProxy](https://haproxy.com). 6 | 7 | It uses the exposed HAProxy socket commands (on defined TCP/Unix sockets) to monitor statistics from the `show info`, `show stat` and `show resolvers` (if on HAProxy 1.8+) commands. 8 | This allows monitoring of the overall service status as well as frontends, backends, servers and resolvers configured. 9 | It also supports multi-process statistics aggregation, allowing to configure multiple sockets to collect metrics from. 10 | 11 | It's the result of the [hmrc/collectd-haproxy](https://github.com/hmrc/collectd-haproxy) repository fork, to which, mainly, the support for multi stats sockets aggregation has been added. 12 | 13 | ## Table of Contents 14 | 15 | - [Install](#install) 16 | - [Usage](#usage) 17 | - [Compatibility](#compatibility) 18 | - [Requirements](#requirements) 19 | - [Changelog](#changelog) 20 | - [Contributing](#contributing) 21 | - [Issues](#issues) 22 | 23 | ## Install 24 | 25 | Download the latest release of `haproxy.py` file into an arbirtrary path `/usr/local/lib/collectd`, e.g. for version 0.1.0: 26 | 27 | ```bash 28 | curl -Lo /usr/local/lib/collectd/haproxy.py --create-dirs \ 29 | https://github.com/immobiliare/collectd-haproxy-plugin/releases/download/0.1.0/haproxy.py 30 | ``` 31 | 32 | ## Usage 33 | 34 | Enabling the plugin follows the widely known standard collectd's way. 35 | 36 | ```bash 37 | cat < /etc/collectd/plugins/haproxy.conf 38 | 39 | Globals true 40 | 41 | 42 | 43 | ModulePath "/usr/local/lib/collectd" 44 | Import "haproxy" 45 | 46 | Socket "/var/run/haproxy/proc1.sock" 47 | Socket "unix:///var/run/haproxy/proc2.sock" 48 | Socket "tcp://127.0.0.1:8080" 49 | ProxyMonitor "backend" 50 | # ProxyMonitor "server" or "frontend" 51 | # ProxyIgnore to ignore metrics 52 | # Verbose to increase verbosity 53 | 54 | 55 | EOF 56 | ``` 57 | 58 | ## Compatibility 59 | 60 | | Version | Status | Python compatibility | HAproxy compatibility | 61 | | ------- | ----------------------- | -------------------- | --------------------- | 62 | | 1.x | maintained but untested | cpython >=2.7 | >=1.8 | 63 | | 1.x | maintained and tested | cpython >=3.8 | >=1.8 | 64 | | 1.x | maintained and tested | pypy >=3.7 | >=1.8 | 65 | 66 | ## Requirements 67 | 68 | There's no particular requirement other than collectd/HAproxy and Python, as per the matrice above. 69 | 70 | ## Changelog 71 | 72 | See [changelog](./CHANGELOG.md). 73 | 74 | ## Contributing 75 | 76 | See [contributing](./CONTRIBUTING.md). 77 | 78 | ## Issues 79 | 80 | You found a bug or need a new feature? Please open an issue. 81 | -------------------------------------------------------------------------------- /haproxy.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Author: Michael Leinartas 4 | # Description: This is a collectd plugin which runs under the Python plugin to 5 | # collect metrics from haproxy. 6 | # Plugin structure and logging func taken from 7 | # https://github.com/phrawzty/rabbitmq-collectd-plugin 8 | # 9 | # Modified by: 10 | # - "Warren Turkal" 11 | # - "Volodymyr Zhabiuk" 12 | # - "HM Revenue & Customs" 13 | # - "Davide Pucci" 14 | 15 | import collectd 16 | import csv 17 | import re 18 | import socket 19 | import sys 20 | 21 | PLUGIN_NAME = 'haproxy' 22 | RECV_SIZE = 1024 23 | 24 | METRICS_TO_COLLECT = { 25 | 'CompressBpsIn': 'derive', 26 | 'CompressBpsOut': 'derive', 27 | 'ConnRate': 'gauge', 28 | 'CumConns': 'derive', 29 | 'CumReq': 'derive', 30 | 'CumSslConns': 'derive', 31 | 'CurrConns': 'gauge', 32 | 'CurrSslConns': 'gauge', 33 | 'Idle_pct': 'gauge', 34 | 'MaxConn': 'gauge', 35 | 'MaxConnRate': 'gauge', 36 | 'MaxPipes': 'gauge', 37 | 'MaxSessRate': 'gauge', 38 | 'MaxSslConns': 'gauge', 39 | 'PipesFree': 'gauge', 40 | 'PipesUsed': 'gauge', 41 | 'Run_queue': 'gauge', 42 | 'SessRate': 'gauge', 43 | 'SslBackendKeyRate': 'gauge', 44 | 'SslCacheLookups': 'derive', 45 | 'SslCacheMisses': 'derive', 46 | 'SslFrontendKeyRate': 'gauge', 47 | 'SslRate': 'gauge', 48 | 'Tasks': 'gauge', 49 | 'Uptime_sec': 'derive', 50 | 'ZlibMemUsage': 'gauge', 51 | 'act': 'gauge', 52 | 'any_err': 'gauge', 53 | 'bck': 'gauge', 54 | 'bin': 'derive', 55 | 'bout': 'derive', 56 | 'check_duration': 'gauge', 57 | 'chkfail': 'derive', 58 | 'cli_abrt': 'derive', 59 | 'cname': 'gauge', 60 | 'cname_error': 'gauge', 61 | 'comp_byp': 'derive', 62 | 'comp_in': 'derive', 63 | 'comp_out': 'derive', 64 | 'comp_rsp': 'derive', 65 | 'conn_rate': 'gauge', 66 | 'conn_rate_max': 'gauge', 67 | 'conn_tot': 'counter', 68 | 'ctime': 'gauge', 69 | 'dcon': 'gauge', 70 | 'downtime': 'derive', 71 | 'dreq': 'derive', 72 | 'dresp': 'derive', 73 | 'dses': 'gauge', 74 | 'econ': 'derive', 75 | 'ereq': 'derive', 76 | 'eresp': 'derive', 77 | 'hrsp_1xx': 'derive', 78 | 'hrsp_2xx': 'derive', 79 | 'hrsp_3xx': 'derive', 80 | 'hrsp_4xx': 'derive', 81 | 'hrsp_5xx': 'derive', 82 | 'hrsp_other': 'derive', 83 | 'intercepted': 'gauge', 84 | 'invalid': 'gauge', 85 | 'lastsess': 'gauge', 86 | 'lbtot': 'counter', 87 | 'nx': 'gauge', 88 | 'other': 'gauge', 89 | 'outdated': 'gauge', 90 | 'qcur': 'gauge', 91 | 'qlimit': 'gauge', 92 | 'qmax': 'gauge', 93 | 'qtime': 'gauge', 94 | 'rate': 'gauge', 95 | 'rate_lim': 'gauge', 96 | 'rate_max': 'gauge', 97 | 'refused': 'gauge', 98 | 'req_rate': 'gauge', 99 | 'req_rate_max': 'gauge', 100 | 'rtime': 'gauge', 101 | 'scur': 'gauge', 102 | 'sent': 'gauge', 103 | 'slim': 'gauge', 104 | 'smax': 'gauge', 105 | 'snd_error': 'gauge', 106 | 'srv_abrt': 'derive', 107 | 'stot': 'derive', 108 | 'throttle': 'gauge', 109 | 'timeout': 'gauge', 110 | 'too_big': 'gauge', 111 | 'truncated': 'gauge', 112 | 'ttime': 'gauge', 113 | 'update': 'gauge', 114 | 'valid': 'gauge', 115 | 'wredis': 'derive', 116 | 'wretr': 'derive', 117 | } 118 | 119 | # svname, pxname, type are absolutely mandatory 120 | # here to keep the overall plugin flow working 121 | METRICS_AGGR_PULL = [ 122 | 'pxname', 123 | 'svname', 124 | 'type', 125 | ] 126 | METRICS_AGGR_SUM = [ 127 | 'CompressBpsIn', 128 | 'CompressBpsOut', 129 | 'CumConns', 130 | 'CumReq', 131 | 'CumSslConns', 132 | 'CurrConns', 133 | 'CurrSslConns', 134 | 'Idle_pct', 135 | 'MaxConn', 136 | 'MaxPipes', 137 | 'MaxSslConns', 138 | 'PipesFree', 139 | 'PipesUsed', 140 | 'Run_queue', 141 | 'SslCacheMisses', 142 | 'Tasks', 143 | 'act', 144 | 'any_err', 145 | 'bck', 146 | 'bin', 147 | 'bout', 148 | 'check_duration', 149 | 'chkfail', 150 | 'cli_abrt', 151 | 'cname', 152 | 'cname_error', 153 | 'comp_byp', 154 | 'comp_in', 155 | 'comp_out', 156 | 'comp_rsp', 157 | 'conn_tot', 158 | 'dcon', 159 | 'dreq', 160 | 'dresp', 161 | 'dses', 162 | 'econ', 163 | 'ereq', 164 | 'eresp', 165 | 'hrsp_1xx', 166 | 'hrsp_2xx', 167 | 'hrsp_3xx', 168 | 'hrsp_4xx', 169 | 'hrsp_5xx', 170 | 'hrsp_other', 171 | 'intercepted', 172 | 'invalid', 173 | 'lastsess', 174 | 'lbtot', 175 | 'nx', 176 | 'other', 177 | 'outdated', 178 | 'qcur', 179 | 'refused', 180 | 'scur', 181 | 'sent', 182 | 'slim', 183 | 'snd_error', 184 | 'srv_abrt', 185 | 'stot', 186 | 'throttle', 187 | 'timeout', 188 | 'too_big', 189 | 'truncated', 190 | 'update', 191 | 'valid', 192 | 'wredis', 193 | 'wretr', 194 | ] 195 | METRICS_AGGR_MEAN = [ 196 | 'ConnRate', 197 | 'MaxConnRate', 198 | 'MaxSessRate', 199 | 'SessRate', 200 | 'SslBackendKeyRate', 201 | 'SslCacheLookups', 202 | 'SslFrontendKeyRate', 203 | 'SslRate', 204 | 'Uptime_sec', 205 | 'ZlibMemUsage', 206 | 'conn_rate', 207 | 'conn_rate_max', 208 | 'ctime', 209 | 'downtime', 210 | 'qlimit', 211 | 'qmax', 212 | 'qtime', 213 | 'rate', 214 | 'rate_lim', 215 | 'rate_max', 216 | 'req_rate', 217 | 'req_rate_max', 218 | 'rtime', 219 | 'smax', 220 | 'ttime', 221 | ] 222 | 223 | DEFAULT_SOCKET = '/var/run/haproxy.sock' 224 | DEFAULT_PROXY_MONITORS = ['server', 'frontend', 'backend'] 225 | 226 | 227 | class HAProxySocket(object): 228 | ''' 229 | Encapsulates communication with HAProxy via the socket interface 230 | ''' 231 | 232 | def __init__(self, socket_files=[DEFAULT_SOCKET]): 233 | self.sockets = socket_files 234 | # for socket in socket_files: 235 | # self.sockets[socket] = None 236 | 237 | def communicate(self, command): 238 | ''' 239 | Get response from single command. 240 | 241 | Args: 242 | command: string command to send to haproxy stat socket 243 | 244 | Returns: 245 | a string of the response data 246 | ''' 247 | if not command.endswith('\n'): 248 | command += '\n' 249 | 250 | outputs = [] 251 | for socket in self.sockets: 252 | conn = HAProxySocket._connect(socket) 253 | if conn is None: 254 | collectd.warning('unable to connect to {}'.format(socket)) 255 | continue 256 | 257 | if sys.version_info[0] >= 3: 258 | command = command.encode('utf-8') 259 | conn.sendall(command) 260 | result_buf = str() 261 | buf = conn.recv(RECV_SIZE) 262 | while buf: 263 | result_buf += str(buf.decode('utf-8')) 264 | buf = conn.recv(RECV_SIZE) 265 | 266 | conn.close() 267 | outputs.append(result_buf) 268 | 269 | return outputs 270 | 271 | # this method isn't nice but there's no other way 272 | # to parse the output of show resolvers from haproxy 273 | def get_resolvers(self): 274 | ''' 275 | Gets the resolver config and return s, 276 | whish is a map of nameserver -> nameservermetrics. 277 | 278 | The output from the socket looks like 279 | Resolvers section mydns 280 | nameserver dns1: 281 | sent: 8 282 | ... 283 | 284 | :return: 285 | map of nameserver -> nameservermetrics 286 | e.g. '{dns1': {'sent': '8', ...}, ...} 287 | ''' 288 | result = {} 289 | sockets_stats = self.communicate('show resolvers') 290 | nameserver = None 291 | 292 | for stats in sockets_stats: 293 | lines = stats.splitlines() 294 | # check if command is supported 295 | if any(lines) and lines[0].lower().startswith('unknown command'): 296 | continue 297 | 298 | for line in lines: 299 | try: 300 | if 'Resolvers section' in line or line.strip() == '': 301 | continue 302 | elif 'nameserver' in line: 303 | _, unsanitied_nameserver = line.strip().split(' ', 1) 304 | # remove trailing ':' 305 | nameserver = unsanitied_nameserver[:-1] 306 | result[nameserver] = {} 307 | elif nameserver: 308 | key, val = line.split(':', 1) 309 | current_nameserver_stats = result[nameserver] 310 | current_nameserver_stats[key.strip()] = val.strip() 311 | result[nameserver] = current_nameserver_stats 312 | except ValueError: 313 | continue 314 | 315 | return result 316 | 317 | def get_server_info(self): 318 | result = {} 319 | sockets_stats = self.communicate('show info') 320 | 321 | for stats in sockets_stats: 322 | stats_proc = self.get_server_info_proc_num(stats) 323 | 324 | for line in stats.splitlines(): 325 | try: 326 | key, val = line.split(':', 1) 327 | except ValueError: 328 | continue 329 | result['{}#{}'.format(key.strip(), stats_proc)] = val.strip() 330 | 331 | return result 332 | 333 | def get_server_info_proc_num(self, data): 334 | for _, match in enumerate( 335 | re.finditer(r'Process_num: ([0-9]+)', data, re.MULTILINE), 336 | start=1): 337 | for groupNum in range(0, len(match.groups())): 338 | groupNum = groupNum + 1 339 | return match.group(groupNum).strip() 340 | return 'U' 341 | 342 | def get_server_stats(self): 343 | result = [] 344 | sockets_stats = self.communicate('show stat') 345 | for stat in sockets_stats: 346 | # sanitize and make a list of lines 347 | output = stat.lstrip('# ').strip() 348 | output = [line.strip(',') for line in output.splitlines()] 349 | csvreader = csv.DictReader(output) 350 | result += [d.copy() for d in csvreader] 351 | 352 | return HAProxySocket._aggregate(result) 353 | 354 | @staticmethod 355 | def _aggregate(stats): 356 | aggregate = {} 357 | 358 | for stat in stats: 359 | aggr_key = _format_plugin_instance(stat) 360 | if aggr_key not in aggregate: 361 | aggregate[aggr_key] = {} 362 | 363 | for key in set(aggregate[aggr_key]) | set(stat): 364 | val_left = aggregate[aggr_key].get(key, 0) 365 | val_right = stat.get(key, '0') 366 | if key in METRICS_AGGR_PULL: 367 | aggregate[aggr_key][key] = val_right 368 | elif key in METRICS_AGGR_SUM: 369 | if not val_right or not val_right.isdigit(): 370 | continue 371 | aggregate[aggr_key][key] = val_left + int(val_right) 372 | elif key in METRICS_AGGR_MEAN: 373 | if not val_right or not val_right.isdigit(): 374 | continue 375 | key_aggr_mean_label = '{}_aggr_mean_cnt'.format(key) 376 | if key_aggr_mean_label not in aggregate[aggr_key]: 377 | aggregate[aggr_key][key_aggr_mean_label] = 0 378 | 379 | aggregate[aggr_key][key_aggr_mean_label] = \ 380 | aggregate[aggr_key][key_aggr_mean_label] + 1 381 | # compute a progressive mean as we don't know 382 | # how many elements to do the calculation for apriori. 383 | # this way, at any step the calculation is computed, 384 | # it represents a perfectly valid mean. 385 | nxt_mean_cnt = aggregate[aggr_key][key_aggr_mean_label] 386 | crr_mean_cnt = aggregate[aggr_key][key_aggr_mean_label] - 1 387 | aggregate[aggr_key][key] = \ 388 | ((val_left * crr_mean_cnt) + int(val_right)) \ 389 | / nxt_mean_cnt 390 | else: 391 | pass 392 | 393 | return list(aggregate.values()) 394 | 395 | @staticmethod 396 | def _connect(payload): 397 | if payload.startswith('file://') \ 398 | or payload.startswith('unix://') \ 399 | or payload.startswith('/'): 400 | fname = payload.replace('file://', '').replace('unix://', '') 401 | sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) 402 | sock.connect(fname) 403 | return sock 404 | elif payload.startswith('tcp://'): 405 | host, port = payload.replace('tcp://', '').split(':') 406 | sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 407 | sock.connect((host, int(port))) 408 | return sock 409 | elif payload.startswith('http://'): 410 | pass 411 | 412 | collectd.warning('{} socket type not recognized'.format(payload)) 413 | return None 414 | 415 | 416 | def get_stats(module_config): 417 | ''' 418 | Makes two calls to haproxy to fetch server info and server stats. 419 | Returns the dict containing metric name as the key 420 | and a tuple of metric value and the dict of dimensions if any. 421 | ''' 422 | if 'sockets' not in module_config or len(module_config['sockets']) == 0: 423 | collectd.error( 424 | "At least a socket must be given as a configuration parameter") 425 | return 426 | 427 | stats = [] 428 | haproxy = HAProxySocket(module_config['sockets']) 429 | 430 | try: 431 | server_info = haproxy.get_server_info() 432 | server_stats = haproxy.get_server_stats() 433 | resolver_stats = haproxy.get_resolvers() 434 | except socket.error as e: 435 | collectd.warning( 436 | 'unable to connect to the HAProxy socket: {}'.format(str(e))) 437 | return stats 438 | 439 | # server wide stats 440 | for key, val in server_info.items(): 441 | try: 442 | stats.append((key, int(val), dict())) 443 | except (TypeError, ValueError): 444 | pass 445 | 446 | # proxy specific stats 447 | for statdict in server_stats: 448 | if not should_capture_metric(statdict, module_config): 449 | continue 450 | for metricname, val in statdict.items(): 451 | try: 452 | stats.append((metricname, int(val), statdict)) 453 | except (TypeError, ValueError): 454 | pass 455 | 456 | for resolver, resolver_stats in resolver_stats.items(): 457 | for metricname, val in resolver_stats.items(): 458 | try: 459 | stats.append((metricname, int(val), { 460 | 'is_resolver': True, 461 | 'nameserver': resolver 462 | })) 463 | except (TypeError, ValueError): 464 | pass 465 | 466 | return stats 467 | 468 | 469 | def should_capture_metric(statdict, module_config): 470 | return ( 471 | ( 472 | 'svname' in statdict and 473 | statdict['svname'].lower() in module_config['proxy_monitors'] 474 | ) or ( 475 | 'pxname' in statdict and 476 | statdict['pxname'].lower() in module_config['proxy_monitors'] 477 | ) or is_backend_server_metric(statdict) and 478 | 'backend' in module_config['proxy_monitors'] 479 | ) 480 | 481 | 482 | def is_backend_server_metric(statdict): 483 | return 'type' in statdict and _get_proxy_type(statdict['type']) == 'server' 484 | 485 | 486 | def is_resolver_metric(statdict): 487 | return 'is_resolver' in statdict and statdict['is_resolver'] 488 | 489 | 490 | def config(config_values): 491 | ''' 492 | A callback method that loads information 493 | from the HaProxy collectd plugin config file. 494 | 495 | Args: 496 | config_values (collectd.Config): Object containing config values 497 | ''' 498 | 499 | module_config = {} 500 | sockets = [] 501 | proxy_monitors = [] 502 | excluded_metrics = set() 503 | enhanced_metrics = False 504 | interval = None 505 | testing = False 506 | custom_dimensions = {} 507 | 508 | for node in config_values.children: 509 | if node.key == "ProxyMonitor" and node.values[0]: 510 | proxy_monitors.extend(node.values) 511 | elif node.key == "Socket" and node.values: 512 | sockets.extend(node.values) 513 | elif node.key == "Interval" and node.values[0]: 514 | interval = node.values[0] 515 | elif node.key == "Testing" and node.values[0]: 516 | testing = _str_to_bool(node.values[0]) 517 | elif node.key == 'Dimension': 518 | if len(node.values) == 2: 519 | custom_dimensions.update({node.values[0]: node.values[1]}) 520 | else: 521 | collectd.warning("WARNING: Check configuration \ 522 | setting for %s" % node.key) 523 | else: 524 | collectd.warning('Unknown config key: %s' % node.key) 525 | 526 | if not sockets: 527 | sockets.append(DEFAULT_SOCKET) 528 | if not proxy_monitors: 529 | proxy_monitors += DEFAULT_PROXY_MONITORS 530 | 531 | module_config = { 532 | 'sockets': sockets, 533 | 'proxy_monitors': proxy_monitors, 534 | 'interval': interval, 535 | 'enhanced_metrics': enhanced_metrics, 536 | 'excluded_metrics': excluded_metrics, 537 | 'custom_dimensions': custom_dimensions, 538 | 'testing': testing, 539 | } 540 | 541 | if testing: 542 | return module_config 543 | 544 | # pass interval only if not None 545 | interval_kwarg = {} 546 | if interval: 547 | interval_kwarg['interval'] = interval 548 | 549 | collectd.register_read( 550 | collect_metrics, data=module_config, 551 | name='node_{}_{}'.format('_'.join(sockets), '_'.join(proxy_monitors)), 552 | **interval_kwarg) 553 | 554 | 555 | def _format_plugin_instance(dimensions): 556 | if is_backend_server_metric(dimensions): 557 | return "{0}.{1}.{2}".format( 558 | "backend", 559 | dimensions['pxname'].lower(), 560 | dimensions['svname'] 561 | ) 562 | elif is_resolver_metric(dimensions): 563 | return "nameserver.{0}".format( 564 | dimensions['nameserver'] 565 | ) 566 | else: 567 | return "{0}.{1}".format( 568 | dimensions['svname'].lower(), 569 | dimensions['pxname'] 570 | ) 571 | 572 | 573 | def _get_proxy_type(type_id): 574 | ''' 575 | Return human readable proxy type 576 | ''' 577 | return { 578 | 0: 'frontend', 579 | 1: 'backend', 580 | 2: 'server', 581 | 3: 'socket/listener', 582 | }.get(int(type_id)) 583 | 584 | 585 | def _str_to_bool(val): 586 | ''' 587 | Converts a true/false string to a boolean 588 | ''' 589 | val = str(val).strip().lower() 590 | if val == 'true': 591 | return True 592 | elif val != 'false': 593 | collectd.warning( 594 | '"%s" cannot be converted to a bool: returning false.' % val) 595 | 596 | return False 597 | 598 | 599 | def submit_metrics(metric_datapoint): 600 | datapoint = collectd.Values() 601 | datapoint.type = metric_datapoint['type'] 602 | datapoint.type_instance = metric_datapoint['type_instance'] 603 | datapoint.plugin = metric_datapoint['plugin'] 604 | if 'plugin_instance' in metric_datapoint.keys(): 605 | datapoint.plugin_instance = metric_datapoint['plugin_instance'] 606 | datapoint.values = metric_datapoint['values'] 607 | datapoint.dispatch() 608 | 609 | 610 | def collect_metrics(module_config): 611 | ''' 612 | A callback method that gets metrics from HAProxy 613 | and records them to collectd. 614 | ''' 615 | 616 | collectd.debug('beginning collect_metrics') 617 | info = get_stats(module_config) 618 | 619 | if not info: 620 | collectd.warning('%s: No data received' % PLUGIN_NAME) 621 | return 622 | 623 | for metric_name, metric_value, dimensions in info: 624 | # assert metric is in valid metrics lists 625 | if metric_name not in METRICS_TO_COLLECT: 626 | collectd.debug( 627 | "metric %s is ignored" % metric_name.lower()) 628 | continue 629 | 630 | metric_datapoint = { 631 | 'plugin': PLUGIN_NAME, 632 | 'type': METRICS_TO_COLLECT[metric_name], 633 | 'type_instance': metric_name.lower(), 634 | 'values': (metric_value,) 635 | } 636 | if len(dimensions) > 0: 637 | metric_datapoint['plugin_instance'] = _format_plugin_instance( 638 | dimensions) 639 | submit_metrics(metric_datapoint) 640 | 641 | 642 | collectd.register_config(config) 643 | -------------------------------------------------------------------------------- /haproxy_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Copyright (c) 2020 Immobiliare Labs 4 | # 5 | # Permission is hereby granted, free of charge, to any person obtaining a copy 6 | # of this software and associated documentation files (the "Software"), to deal 7 | # in the Software without restriction, including without limitation the rights 8 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | # copies of the Software, and to permit persons to whom the Software is 10 | # furnished to do so, subject to the following conditions: 11 | # 12 | # The above copyright notice and this permission notice shall be included in 13 | # all copies or substantial portions of the Software. 14 | # 15 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | # SOFTWARE. 22 | 23 | from __future__ import print_function 24 | 25 | import collections 26 | import sys 27 | 28 | from mock import call 29 | from mock import MagicMock 30 | from mock import Mock 31 | from mock import patch 32 | 33 | 34 | class MockCollectd(MagicMock): 35 | """ 36 | Mocks the functions and objects provided by the collectd module 37 | """ 38 | 39 | @staticmethod 40 | def log(log_str): 41 | print(log_str) 42 | 43 | debug = log 44 | info = log 45 | warning = log 46 | error = log 47 | 48 | 49 | class MockHAProxySocketSimple: 50 | def __init__(self, sockets=["whatever"]): 51 | self.sockets = sockets 52 | 53 | def get_resolvers(self): 54 | return {} 55 | 56 | def get_server_info(self): 57 | return { 58 | 'ConnRate': '3', 59 | 'CumReq': '5', 60 | 'Idle_pct': '78' 61 | } 62 | 63 | def get_server_stats(self): 64 | return [{ 65 | 'bin': '3120628', 66 | 'lastchg': '', 67 | 'lbt': '', 68 | 'weight': '', 69 | 'wretr': '', 70 | 'slim': '50', 71 | 'pid': '1', 72 | 'wredis': '', 73 | 'dresp': '0', 74 | 'ereq': '0', 75 | 'pxname': 'sample_proxy', 76 | 'stot': '39728', 77 | 'sid': '0', 78 | 'bout': '188112702395', 79 | 'qlimit': '', 80 | 'status': 'OPEN', 81 | 'smax': '2', 82 | 'dreq': '0', 83 | 'econ': '', 84 | 'iid': '2', 85 | 'chkfail': '', 86 | 'downtime': '', 87 | 'qcur': '', 88 | 'eresp': '', 89 | 'throttle': '', 90 | 'scur': '0', 91 | 'bck': '', 92 | 'qmax': '', 93 | 'act': '', 94 | 'chkdown': '', 95 | 'svname': 'FRONTEND' 96 | }] 97 | 98 | 99 | class MockHAProxySocketComplex: 100 | def __init__(self, socket_file="whatever"): 101 | self.socket_file = socket_file 102 | 103 | def get_resolvers(self): 104 | return { 105 | 'dns1': { 106 | 'sent': '8', 107 | 'snd_error': '0', 108 | 'valid': '4', 109 | 'update': '0', 110 | 'cname': '0', 111 | 'cname_error': '4', 112 | 'any_err': '0', 113 | 'nx': '0', 114 | 'timeout': '0', 115 | 'refused': '0', 116 | 'other': '0', 117 | 'invalid': '0', 118 | 'too_big': '0', 119 | 'truncated': '0', 120 | 'outdated': '0' 121 | }, 'dns2': { 122 | 'sent': '0', 123 | 'snd_error': '0', 124 | 'valid': '0', 125 | 'update': '0', 126 | 'cname': '0', 127 | 'cname_error': '0', 128 | 'any_err': '0', 129 | 'nx': '0', 130 | 'timeout': '0', 131 | 'refused': '0', 132 | 'other': '0', 133 | 'invalid': '0', 134 | 'too_big': '0', 135 | 'truncated': '0', 136 | 'outdated': '0' 137 | } 138 | } 139 | 140 | def get_server_info(self): 141 | return { 142 | 'ConnRate': '3', 143 | 'CumReq': '5', 144 | 'Idle_pct': '78' 145 | } 146 | 147 | def get_server_stats(self): 148 | return [{ 149 | 'lastchg': '321093', 150 | 'agent_health': '', 151 | 'check_desc': 'Layer7 check passed', 152 | 'smax': '2', 153 | 'agent_rise': '', 154 | 'req_rate': '', 155 | 'check_status': 'L7OK', 156 | 'wredis': '0', 157 | 'comp_out': '', 158 | 'conn_rate': '', 159 | 'cli_abrt': '0', 160 | 'pxname': 'elasticsearch_backend', 161 | 'check_code': '0', 162 | 'check_health': '4', 163 | 'check_fall': '3', 164 | 'qlimit': '', 165 | 'bin': '0', 166 | 'conn_rate_max': '', 167 | 'hrsp_5xx': '', 168 | 'stot': '344777', 169 | 'econ': '0', 170 | 'iid': '3', 171 | 'hrsp_4xx': '', 172 | 'hanafail': '', 173 | 'downtime': '0', 174 | 'eresp': '0', 175 | 'bout': '0', 176 | 'dses': '', 177 | 'qtime': '0', 178 | 'srv_abrt': '0', 179 | 'throttle': '', 180 | 'ctime': '0', 181 | 'scur': '0', 182 | 'type': '2', 183 | 'check_rise': '2', 184 | 'intercepted': '', 185 | 'hrsp_2xx': '', 186 | 'mode': 'tcp', 187 | 'agent_code': '', 188 | 'qmax': '0', 189 | 'agent_desc': '', 190 | 'weight': '1', 191 | 'slim': '', 192 | 'pid': '1', 193 | 'comp_byp': '', 194 | 'lastsess': '0', 195 | 'comp_rsp': '', 196 | 'agent_status': '', 197 | 'check_duration': '0', 198 | 'rate': '2', 199 | 'rate_max': '9', 200 | 'dresp': '0', 201 | 'ereq': '', 202 | 'addr': '192.168.1.1:6379', 203 | 'comp_in': '', 204 | 'dcon': '', 205 | 'last_chk': '(tcp-check)', 206 | 'sid': '1', 207 | 'ttime': '18', 208 | 'hrsp_1xx': '', 209 | 'agent_duration': '', 210 | 'hrsp_other': '', 211 | 'status': 'UP', 212 | 'wretr': '0', 213 | 'lbtot': '344777', 214 | 'dreq': '', 215 | 'req_rate_max': '', 216 | 'conn_tot': '', 217 | 'chkfail': '0', 218 | 'cookie': '', 219 | 'qcur': '0', 220 | 'tracked': '', 221 | 'rtime': '0', 222 | 'last_agt': '', 223 | 'bck': '0', 224 | 'req_tot': '', 225 | 'rate_lim': '', 226 | 'hrsp_3xx': '', 227 | 'algo': '', 228 | 'act': '1', 229 | 'chkdown': '0', 230 | 'svname': 'elasticache', 231 | 'agent_fall': '' 232 | }, { 233 | 'lastchg': '321093', 234 | 'agent_health': '', 235 | 'check_desc': '', 236 | 'smax': '2', 237 | 'agent_rise': '', 238 | 'req_rate': '', 239 | 'check_status': '', 240 | 'wredis': '0', 241 | 'comp_out': '0', 242 | 'conn_rate': '', 243 | 'cli_abrt': '0', 244 | 'pxname': 'elasticsearch_backend', 245 | 'check_code': '', 246 | 'check_health': '', 247 | 'check_fall': '', 248 | 'qlimit': '', 249 | 'bin': '0', 250 | 'conn_rate_max': '', 251 | 'hrsp_5xx': '', 252 | 'stot': '515751', 253 | 'econ': '0', 254 | 'iid': '3', 255 | 'hrsp_4xx': '', 256 | 'hanafail': '', 257 | 'downtime': '0', 258 | 'eresp': '0', 259 | 'bout': '0', 260 | 'dses': '', 261 | 'qtime': '0', 262 | 'srv_abrt': '0', 263 | 'throttle': '', 264 | 'ctime': '0', 265 | 'scur': '0', 266 | 'type': '1', 267 | 'check_rise': '', 268 | 'intercepted': '', 269 | 'hrsp_2xx': '', 270 | 'mode': 'tcp', 271 | 'agent_code': '', 272 | 'qmax': '0', 273 | 'agent_desc': '', 274 | 'weight': '1', 275 | 'slim': '800', 276 | 'pid': '1', 277 | 'comp_byp': '0', 278 | 'lastsess': '0', 279 | 'comp_rsp': '0', 280 | 'agent_status': '', 281 | 'check_duration': '', 282 | 'rate': '3', 283 | 'rate_max': '9', 284 | 'dresp': '0', 285 | 'ereq': '', 286 | 'addr': '', 287 | 'comp_in': '0', 288 | 'dcon': '', 289 | 'last_chk': '', 290 | 'sid': '0', 291 | 'ttime': '18', 292 | 'hrsp_1xx': '', 293 | 'agent_duration': '', 294 | 'hrsp_other': '', 295 | 'status': 'UP', 296 | 'wretr': '0', 297 | 'lbtot': '344777', 298 | 'dreq': '0', 299 | 'req_rate_max': '', 300 | 'conn_tot': '', 301 | 'chkfail': '', 302 | 'cookie': '', 303 | 'qcur': '0', 304 | 'tracked': '', 305 | 'rtime': '0', 306 | 'last_agt': '', 307 | 'bck': '0', 308 | 'req_tot': '', 309 | 'rate_lim': '', 310 | 'hrsp_3xx': '', 311 | 'algo': 'roundrobin', 312 | 'act': '1', 313 | 'chkdown': '0', 314 | 'svname': 'BACKEND', 315 | 'agent_fall': '' 316 | }, { 317 | 'lastchg': '', 318 | 'agent_health': None, 319 | 'check_desc': None, 320 | 'smax': '0', 321 | 'agent_rise': None, 322 | 'req_rate': '0', 323 | 'check_status': '', 324 | 'wredis': '', 325 | 'comp_out': None, 326 | 'conn_rate': None, 327 | 'cli_abrt': None, 328 | 'pxname': 'sensu_frontend', 329 | 'check_code': '', 330 | 'check_health': None, 331 | 'check_fall': None, 332 | 'qlimit': '', 333 | 'bin': '0', 334 | 'conn_rate_max': None, 335 | 'hrsp_5xx': '', 336 | 'stot': '0', 337 | 'econ': '', 338 | 'iid': '4', 339 | 'hrsp_4xx': '', 340 | 'hanafail': '', 341 | 'downtime': '', 342 | 'eresp': '', 343 | 'bout': '0', 344 | 'dses': None, 345 | 'qtime': None, 346 | 'srv_abrt': None, 347 | 'throttle': '', 348 | 'ctime': None, 349 | 'scur': '0', 350 | 'type': '0', 351 | 'check_rise': None, 352 | 'intercepted': None, 353 | 'hrsp_2xx': '', 354 | 'mode': None, 355 | 'agent_code': None, 356 | 'qmax': '', 357 | 'agent_desc': None, 358 | 'weight': '', 359 | 'slim': '8000', 360 | 'pid': '1', 361 | 'comp_byp': None, 362 | 'lastsess': None, 363 | 'comp_rsp': None, 364 | 'agent_status': None, 365 | 'check_duration': '', 366 | 'rate': '0', 367 | 'rate_max': '10', 368 | 'dresp': '0', 369 | 'ereq': '0', 370 | 'addr': None, 371 | 'comp_in': None, 372 | 'dcon': None, 373 | 'last_chk': None, 374 | 'sid': '0', 375 | 'ttime': None, 376 | 'hrsp_1xx': '', 377 | 'agent_duration': None, 378 | 'hrsp_other': '', 379 | 'status': 'OPEN', 380 | 'wretr': '', 381 | 'lbtot': '', 382 | 'dreq': '0', 383 | 'req_rate_max': '0', 384 | 'conn_tot': None, 385 | 'chkfail': '', 386 | 'cookie': None, 387 | 'qcur': '', 388 | 'tracked': '', 389 | 'rtime': None, 390 | 'last_agt': None, 391 | 'bck': '', 392 | 'req_tot': '', 393 | 'rate_lim': '0', 394 | 'hrsp_3xx': '', 395 | 'algo': None, 396 | 'act': '', 397 | 'chkdown': '', 398 | 'svname': 'FRONTEND', 399 | }] 400 | 401 | 402 | # don't move the block below 403 | sys.modules['collectd'] = MockCollectd() 404 | import haproxy # nopep8 405 | ConfigOption = collections.namedtuple('ConfigOption', ('key', 'values')) 406 | mock_config_default_values = Mock() 407 | mock_config_default_values.children = [ 408 | ConfigOption('Testing', ('True',)) 409 | ] 410 | 411 | 412 | def test_default_config(): 413 | module_config = haproxy.config(mock_config_default_values) 414 | assert module_config['sockets'] == ['/var/run/haproxy.sock'] 415 | assert module_config['proxy_monitors'] == ['server', 'frontend', 'backend'] 416 | assert module_config['testing'] 417 | 418 | 419 | @patch('haproxy.HAProxySocket', MockHAProxySocketComplex) 420 | def test_metrics_submitted_for_frontend_with_correct_names(): 421 | haproxy.submit_metrics = MagicMock() 422 | mock_config = Mock() 423 | mock_config.children = [ 424 | ConfigOption('ProxyMonitor', ('frontend',)), 425 | ConfigOption('EnhancedMetrics', ('True',)), 426 | ConfigOption('Testing', ('True',)) 427 | ] 428 | haproxy.collect_metrics(haproxy.config(mock_config)) 429 | haproxy.submit_metrics.assert_has_calls([ 430 | call({ 431 | 'values': (3,), 432 | 'type_instance': 'connrate', 433 | 'type': 'gauge', 434 | 'plugin': 'haproxy' 435 | }), call({ 436 | 'values': (5,), 437 | 'type_instance': 'cumreq', 438 | 'type': 'derive', 439 | 'plugin': 'haproxy' 440 | }), call({ 441 | 'values': (78,), 442 | 'type_instance': 'idle_pct', 443 | 'type': 'gauge', 444 | 'plugin': 'haproxy' 445 | }), call({ 446 | 'values': (0,), 447 | 'plugin_instance': 'frontend.sensu_frontend', 448 | 'type_instance': 'smax', 449 | 'type': 'gauge', 450 | 'plugin': 'haproxy' 451 | }), call({ 452 | 'values': (0,), 453 | 'plugin_instance': 'frontend.sensu_frontend', 454 | 'type_instance': 'rate', 455 | 'type': 'gauge', 456 | 'plugin': 'haproxy' 457 | }), call({ 458 | 'values': (0,), 459 | 'plugin_instance': 'frontend.sensu_frontend', 460 | 'type_instance': 'req_rate', 461 | 'type': 'gauge', 462 | 'plugin': 'haproxy' 463 | }), call({ 464 | 'values': (0,), 465 | 'plugin_instance': 'frontend.sensu_frontend', 466 | 'type_instance': 'dresp', 467 | 'type': 'derive', 468 | 'plugin': 'haproxy' 469 | }), call({ 470 | 'values': (0,), 471 | 'plugin_instance': 'frontend.sensu_frontend', 472 | 'type_instance': 'ereq', 473 | 'type': 'derive', 474 | 'plugin': 'haproxy' 475 | }), call({ 476 | 'values': (0,), 477 | 'plugin_instance': 'frontend.sensu_frontend', 478 | 'type_instance': 'dreq', 479 | 'type': 'derive', 480 | 'plugin': 'haproxy' 481 | }), call({ 482 | 'values': (0,), 483 | 'plugin_instance': 'frontend.sensu_frontend', 484 | 'type_instance': 'bin', 485 | 'type': 'derive', 486 | 'plugin': 'haproxy' 487 | }), call({ 488 | 'values': (0,), 489 | 'plugin_instance': 'frontend.sensu_frontend', 490 | 'type_instance': 'stot', 491 | 'type': 'derive', 492 | 'plugin': 'haproxy' 493 | }), call({ 494 | 'values': (0,), 495 | 'plugin_instance': 'frontend.sensu_frontend', 496 | 'type_instance': 'req_rate_max', 497 | 'type': 'gauge', 498 | 'plugin': 'haproxy' 499 | }), call({ 500 | 'values': (8000,), 501 | 'plugin_instance': 'frontend.sensu_frontend', 502 | 'type_instance': 'slim', 503 | 'type': 'gauge', 504 | 'plugin': 'haproxy' 505 | }), call({ 506 | 'values': (0,), 507 | 'plugin_instance': 'frontend.sensu_frontend', 508 | 'type_instance': 'rate_lim', 509 | 'type': 'gauge', 510 | 'plugin': 'haproxy' 511 | }), call({ 512 | 'values': (0,), 513 | 'plugin_instance': 'frontend.sensu_frontend', 514 | 'type_instance': 'bout', 515 | 'type': 'derive', 516 | 'plugin': 'haproxy' 517 | }), call({ 518 | 'values': (0,), 519 | 'plugin_instance': 'frontend.sensu_frontend', 520 | 'type_instance': 'scur', 521 | 'type': 'gauge', 522 | 'plugin': 'haproxy' 523 | }), call({ 524 | 'values': (10,), 525 | 'plugin_instance': 'frontend.sensu_frontend', 526 | 'type_instance': 'rate_max', 527 | 'type': 'gauge', 528 | 'plugin': 'haproxy' 529 | }) 530 | ], any_order=True) 531 | 532 | 533 | @patch('haproxy.HAProxySocket', MockHAProxySocketComplex) 534 | def test_metrics_submitted_for_backend_and_server_with_correct_names(): 535 | haproxy.submit_metrics = MagicMock() 536 | mock_config = Mock() 537 | mock_config.children = [ 538 | ConfigOption('ProxyMonitor', ('backend',)), 539 | ConfigOption('EnhancedMetrics', ('True',)), 540 | ConfigOption('Testing', ('True',)) 541 | ] 542 | haproxy.collect_metrics(haproxy.config(mock_config)) 543 | haproxy.submit_metrics.assert_has_calls([ 544 | call({ 545 | 'values': (0,), 546 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 547 | 'type_instance': 'rtime', 548 | 'type': 'gauge', 549 | 'plugin': 'haproxy' 550 | }), call({ 551 | 'values': (2,), 552 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 553 | 'type_instance': 'smax', 554 | 'type': 'gauge', 555 | 'plugin': 'haproxy' 556 | }), call({ 557 | 'values': (0,), 558 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 559 | 'type_instance': 'lastsess', 560 | 'type': 'gauge', 561 | 'plugin': 'haproxy' 562 | }), call({ 563 | 'values': (0,), 564 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 565 | 'type_instance': 'check_duration', 566 | 'type': 'gauge', 567 | 'plugin': 'haproxy' 568 | }), call({ 569 | 'values': (2,), 570 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 571 | 'type_instance': 'rate', 572 | 'type': 'gauge', 573 | 'plugin': 'haproxy' 574 | }), call({ 575 | 'values': (0,), 576 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 577 | 'type_instance': 'wredis', 578 | 'type': 'derive', 579 | 'plugin': 'haproxy' 580 | }), call({ 581 | 'values': (0,), 582 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 583 | 'type_instance': 'eresp', 584 | 'type': 'derive', 585 | 'plugin': 'haproxy' 586 | }), call({ 587 | 'values': (0,), 588 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 589 | 'type_instance': 'dresp', 590 | 'type': 'derive', 591 | 'plugin': 'haproxy' 592 | }), call({ 593 | 'values': (0,), 594 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 595 | 'type_instance': 'cli_abrt', 596 | 'type': 'derive', 597 | 'plugin': 'haproxy' 598 | }), call({ 599 | 'values': (0,), 600 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 601 | 'type_instance': 'bin', 602 | 'type': 'derive', 603 | 'plugin': 'haproxy' 604 | }), call({ 605 | 'values': (344777,), 606 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 607 | 'type_instance': 'lbtot', 608 | 'type': 'counter', 609 | 'plugin': 'haproxy' 610 | }), call({ 611 | 'values': (344777,), 612 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 613 | 'type_instance': 'stot', 614 | 'type': 'derive', 615 | 'plugin': 'haproxy' 616 | }), call({ 617 | 'values': (0,), 618 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 619 | 'type_instance': 'econ', 620 | 'type': 'derive', 621 | 'plugin': 'haproxy' 622 | }), call({ 623 | 'values': (18,), 624 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 625 | 'type_instance': 'ttime', 626 | 'type': 'gauge', 627 | 'plugin': 'haproxy' 628 | }), call({ 629 | 'values': (0,), 630 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 631 | 'type_instance': 'downtime', 632 | 'type': 'derive', 633 | 'plugin': 'haproxy' 634 | }), call({ 635 | 'values': (0,), 636 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 637 | 'type_instance': 'qcur', 638 | 'type': 'gauge', 639 | 'plugin': 'haproxy' 640 | }), call({ 641 | 'values': (0,), 642 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 643 | 'type_instance': 'wretr', 644 | 'type': 'derive', 645 | 'plugin': 'haproxy' 646 | }), call({ 647 | 'values': (0,), 648 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 649 | 'type_instance': 'qtime', 650 | 'type': 'gauge', 651 | 'plugin': 'haproxy' 652 | }), call({ 653 | 'values': (0,), 654 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 655 | 'type_instance': 'srv_abrt', 656 | 'type': 'derive', 657 | 'plugin': 'haproxy' 658 | }), call({ 659 | 'values': (0,), 660 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 661 | 'type_instance': 'bout', 662 | 'type': 'derive', 663 | 'plugin': 'haproxy' 664 | }), call({ 665 | 'values': (0,), 666 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 667 | 'type_instance': 'ctime', 668 | 'type': 'gauge', 669 | 'plugin': 'haproxy' 670 | }), call({ 671 | 'values': (0,), 672 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 673 | 'type_instance': 'scur', 674 | 'type': 'gauge', 675 | 'plugin': 'haproxy' 676 | }), call({ 677 | 'values': (0,), 678 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 679 | 'type_instance': 'bck', 680 | 'type': 'gauge', 681 | 'plugin': 'haproxy' 682 | }), call({ 683 | 'values': (0,), 684 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 685 | 'type_instance': 'qmax', 686 | 'type': 'gauge', 687 | 'plugin': 'haproxy' 688 | }), call({ 689 | 'values': (9,), 690 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 691 | 'type_instance': 'rate_max', 692 | 'type': 'gauge', 693 | 'plugin': 'haproxy' 694 | }), call({ 695 | 'values': (1,), 696 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 697 | 'type_instance': 'act', 698 | 'type': 'gauge', 699 | 'plugin': 'haproxy' 700 | }), call({ 701 | 'values': (0,), 702 | 'plugin_instance': 'backend.elasticsearch_backend.elasticache', 703 | 'type_instance': 'chkfail', 704 | 'type': 'derive', 705 | 'plugin': 'haproxy' 706 | }), call({ 707 | 'values': (0,), 708 | 'plugin_instance': 'backend.elasticsearch_backend', 709 | 'type_instance': 'rtime', 710 | 'type': 'gauge', 711 | 'plugin': 'haproxy' 712 | }), call({ 713 | 'values': (2,), 714 | 'plugin_instance': 'backend.elasticsearch_backend', 715 | 'type_instance': 'smax', 716 | 'type': 'gauge', 717 | 'plugin': 'haproxy' 718 | }), call({ 719 | 'values': (0,), 720 | 'plugin_instance': 'backend.elasticsearch_backend', 721 | 'type_instance': 'comp_byp', 722 | 'type': 'derive', 723 | 'plugin': 'haproxy' 724 | }), call({ 725 | 'values': (0,), 726 | 'plugin_instance': 'backend.elasticsearch_backend', 727 | 'type_instance': 'lastsess', 728 | 'type': 'gauge', 729 | 'plugin': 'haproxy' 730 | }), call({ 731 | 'values': (3,), 732 | 'plugin_instance': 'backend.elasticsearch_backend', 733 | 'type_instance': 'rate', 734 | 'type': 'gauge', 735 | 'plugin': 'haproxy' 736 | }), call({ 737 | 'values': (0,), 738 | 'plugin_instance': 'backend.elasticsearch_backend', 739 | 'type_instance': 'wredis', 740 | 'type': 'derive', 741 | 'plugin': 'haproxy' 742 | }), call({ 743 | 'values': (0,), 744 | 'plugin_instance': 'backend.elasticsearch_backend', 745 | 'type_instance': 'comp_out', 746 | 'type': 'derive', 747 | 'plugin': 'haproxy' 748 | }), call({ 749 | 'values': (0,), 750 | 'plugin_instance': 'backend.elasticsearch_backend', 751 | 'type_instance': 'eresp', 752 | 'type': 'derive', 753 | 'plugin': 'haproxy' 754 | }), call({ 755 | 'values': (0,), 756 | 'plugin_instance': 'backend.elasticsearch_backend', 757 | 'type_instance': 'dresp', 758 | 'type': 'derive', 759 | 'plugin': 'haproxy' 760 | }), call({ 761 | 'values': (0,), 762 | 'plugin_instance': 'backend.elasticsearch_backend', 763 | 'type_instance': 'comp_in', 764 | 'type': 'derive', 765 | 'plugin': 'haproxy' 766 | }), call({ 767 | 'values': (0,), 768 | 'plugin_instance': 'backend.elasticsearch_backend', 769 | 'type_instance': 'dreq', 770 | 'type': 'derive', 771 | 'plugin': 'haproxy' 772 | }), call({ 773 | 'values': (0,), 774 | 'plugin_instance': 'backend.elasticsearch_backend', 775 | 'type_instance': 'cli_abrt', 776 | 'type': 'derive', 777 | 'plugin': 'haproxy' 778 | }), call({ 779 | 'values': (0,), 780 | 'plugin_instance': 'backend.elasticsearch_backend', 781 | 'type_instance': 'bin', 782 | 'type': 'derive', 783 | 'plugin': 'haproxy' 784 | }), call({ 785 | 'values': (344777,), 786 | 'plugin_instance': 'backend.elasticsearch_backend', 787 | 'type_instance': 'lbtot', 788 | 'type': 'counter', 789 | 'plugin': 'haproxy' 790 | }), call({ 791 | 'values': (515751,), 792 | 'plugin_instance': 'backend.elasticsearch_backend', 793 | 'type_instance': 'stot', 794 | 'type': 'derive', 795 | 'plugin': 'haproxy' 796 | }), call({ 797 | 'values': (0,), 798 | 'plugin_instance': 'backend.elasticsearch_backend', 799 | 'type_instance': 'econ', 800 | 'type': 'derive', 801 | 'plugin': 'haproxy' 802 | }), call({ 803 | 'values': (18,), 804 | 'plugin_instance': 'backend.elasticsearch_backend', 805 | 'type_instance': 'ttime', 806 | 'type': 'gauge', 807 | 'plugin': 'haproxy' 808 | }), call({ 809 | 'values': (800,), 810 | 'plugin_instance': 'backend.elasticsearch_backend', 811 | 'type_instance': 'slim', 812 | 'type': 'gauge', 813 | 'plugin': 'haproxy' 814 | }), call({ 815 | 'values': (0,), 816 | 'plugin_instance': 'backend.elasticsearch_backend', 817 | 'type_instance': 'downtime', 818 | 'type': 'derive', 819 | 'plugin': 'haproxy' 820 | }), call({ 821 | 'values': (0,), 822 | 'plugin_instance': 'backend.elasticsearch_backend', 823 | 'type_instance': 'qcur', 824 | 'type': 'gauge', 825 | 'plugin': 'haproxy' 826 | }), call({ 827 | 'values': (0,), 828 | 'plugin_instance': 'backend.elasticsearch_backend', 829 | 'type_instance': 'comp_rsp', 830 | 'type': 'derive', 831 | 'plugin': 'haproxy' 832 | }), call({ 833 | 'values': (0,), 834 | 'plugin_instance': 'backend.elasticsearch_backend', 835 | 'type_instance': 'wretr', 836 | 'type': 'derive', 837 | 'plugin': 'haproxy' 838 | }), call({ 839 | 'values': (0,), 840 | 'plugin_instance': 'backend.elasticsearch_backend', 841 | 'type_instance': 'qtime', 842 | 'type': 'gauge', 843 | 'plugin': 'haproxy' 844 | }), call({ 845 | 'values': (0,), 846 | 'plugin_instance': 'backend.elasticsearch_backend', 847 | 'type_instance': 'srv_abrt', 848 | 'type': 'derive', 849 | 'plugin': 'haproxy' 850 | }), call({ 851 | 'values': (0,), 852 | 'plugin_instance': 'backend.elasticsearch_backend', 853 | 'type_instance': 'bout', 854 | 'type': 'derive', 855 | 'plugin': 'haproxy' 856 | }), call({ 857 | 'values': (0,), 858 | 'plugin_instance': 'backend.elasticsearch_backend', 859 | 'type_instance': 'ctime', 860 | 'type': 'gauge', 861 | 'plugin': 'haproxy' 862 | }), call({ 863 | 'values': (0,), 864 | 'plugin_instance': 'backend.elasticsearch_backend', 865 | 'type_instance': 'scur', 866 | 'type': 'gauge', 867 | 'plugin': 'haproxy' 868 | }), call({ 869 | 'values': (0,), 870 | 'plugin_instance': 'backend.elasticsearch_backend', 871 | 'type_instance': 'bck', 872 | 'type': 'gauge', 873 | 'plugin': 'haproxy' 874 | }), call({ 875 | 'values': (0,), 876 | 'plugin_instance': 'backend.elasticsearch_backend', 877 | 'type_instance': 'qmax', 878 | 'type': 'gauge', 879 | 'plugin': 'haproxy' 880 | }), call({ 881 | 'values': (9,), 882 | 'plugin_instance': 'backend.elasticsearch_backend', 883 | 'type_instance': 'rate_max', 884 | 'type': 'gauge', 885 | 'plugin': 'haproxy' 886 | }), call({ 887 | 'values': (1,), 888 | 'plugin_instance': 'backend.elasticsearch_backend', 889 | 'type_instance': 'act', 890 | 'type': 'gauge', 891 | 'plugin': 'haproxy' 892 | }) 893 | ], any_order=True) 894 | 895 | 896 | @patch('haproxy.HAProxySocket', MockHAProxySocketComplex) 897 | def test_metrics_submitted_for_resolvers(): 898 | haproxy.submit_metrics = MagicMock() 899 | mock_config = Mock() 900 | mock_config.children = [ 901 | ConfigOption('Testing', ('True',)) 902 | ] 903 | haproxy.collect_metrics(haproxy.config(mock_config)) 904 | haproxy.submit_metrics.assert_has_calls([ 905 | call({ 906 | 'values': (0,), 907 | 'plugin_instance': 'nameserver.dns2', 908 | 'type_instance': 'cname_error', 909 | 'type': 'gauge', 910 | 'plugin': 'haproxy' 911 | }), call({ 912 | 'values': (0,), 913 | 'plugin_instance': 'nameserver.dns2', 914 | 'type_instance': 'truncated', 915 | 'type': 'gauge', 916 | 'plugin': 'haproxy' 917 | }), call({ 918 | 'values': (0,), 919 | 'plugin_instance': 'nameserver.dns2', 920 | 'type_instance': 'update', 921 | 'type': 'gauge', 922 | 'plugin': 'haproxy' 923 | }), call({ 924 | 'values': (0,), 925 | 'plugin_instance': 'nameserver.dns2', 926 | 'type_instance': 'refused', 927 | 'type': 'gauge', 928 | 'plugin': 'haproxy' 929 | }), call({ 930 | 'values': (0,), 931 | 'plugin_instance': 'nameserver.dns2', 932 | 'type_instance': 'any_err', 933 | 'type': 'gauge', 934 | 'plugin': 'haproxy' 935 | }), call({ 936 | 'values': (0,), 937 | 'plugin_instance': 'nameserver.dns2', 938 | 'type_instance': 'cname', 939 | 'type': 'gauge', 940 | 'plugin': 'haproxy' 941 | }), call({ 942 | 'values': (0,), 943 | 'plugin_instance': 'nameserver.dns2', 944 | 'type_instance': 'outdated', 945 | 'type': 'gauge', 946 | 'plugin': 'haproxy' 947 | }), call({ 948 | 'values': (0,), 949 | 'plugin_instance': 'nameserver.dns2', 950 | 'type_instance': 'too_big', 951 | 'type': 'gauge', 952 | 'plugin': 'haproxy' 953 | }), call({ 954 | 'values': (0,), 955 | 'plugin_instance': 'nameserver.dns2', 956 | 'type_instance': 'invalid', 957 | 'type': 'gauge', 958 | 'plugin': 'haproxy' 959 | }), call({ 960 | 'values': (0,), 961 | 'plugin_instance': 'nameserver.dns2', 962 | 'type_instance': 'snd_error', 963 | 'type': 'gauge', 964 | 'plugin': 'haproxy' 965 | }), call({ 966 | 'values': (0,), 967 | 'plugin_instance': 'nameserver.dns2', 968 | 'type_instance': 'nx', 969 | 'type': 'gauge', 970 | 'plugin': 'haproxy' 971 | }), call({ 972 | 'values': (0,), 973 | 'plugin_instance': 'nameserver.dns2', 974 | 'type_instance': 'valid', 975 | 'type': 'gauge', 976 | 'plugin': 'haproxy' 977 | }), call({ 978 | 'values': (0,), 979 | 'plugin_instance': 'nameserver.dns2', 980 | 'type_instance': 'timeout', 981 | 'type': 'gauge', 982 | 'plugin': 'haproxy' 983 | }), call({ 984 | 'values': (0,), 985 | 'plugin_instance': 'nameserver.dns2', 986 | 'type_instance': 'other', 987 | 'type': 'gauge', 988 | 'plugin': 'haproxy' 989 | }), call({ 990 | 'values': (0,), 991 | 'plugin_instance': 'nameserver.dns2', 992 | 'type_instance': 'sent', 993 | 'type': 'gauge', 994 | 'plugin': 'haproxy' 995 | }), call({ 996 | 'values': (4,), 997 | 'plugin_instance': 'nameserver.dns1', 998 | 'type_instance': 'cname_error', 999 | 'type': 'gauge', 1000 | 'plugin': 'haproxy' 1001 | }), call({ 1002 | 'values': (0,), 1003 | 'plugin_instance': 'nameserver.dns1', 1004 | 'type_instance': 'truncated', 1005 | 'type': 'gauge', 1006 | 'plugin': 'haproxy' 1007 | }), call({ 1008 | 'values': (0,), 1009 | 'plugin_instance': 'nameserver.dns1', 1010 | 'type_instance': 'update', 1011 | 'type': 'gauge', 1012 | 'plugin': 'haproxy' 1013 | }), call({ 1014 | 'values': (0,), 1015 | 'plugin_instance': 'nameserver.dns1', 1016 | 'type_instance': 'refused', 1017 | 'type': 'gauge', 1018 | 'plugin': 'haproxy' 1019 | }), call({ 1020 | 'values': (0,), 1021 | 'plugin_instance': 'nameserver.dns1', 1022 | 'type_instance': 'any_err', 1023 | 'type': 'gauge', 1024 | 'plugin': 'haproxy' 1025 | }), call({ 1026 | 'values': (0,), 1027 | 'plugin_instance': 'nameserver.dns1', 1028 | 'type_instance': 1029 | 'cname', 1030 | 'type': 'gauge', 1031 | 'plugin': 'haproxy' 1032 | }), call({ 1033 | 'values': (0,), 1034 | 'plugin_instance': 'nameserver.dns1', 1035 | 'type_instance': 'outdated', 1036 | 'type': 'gauge', 1037 | 'plugin': 'haproxy' 1038 | }), call({ 1039 | 'values': (0,), 1040 | 'plugin_instance': 'nameserver.dns1', 1041 | 'type_instance': 'too_big', 1042 | 'type': 'gauge', 1043 | 'plugin': 'haproxy' 1044 | }), call({ 1045 | 'values': (0,), 1046 | 'plugin_instance': 'nameserver.dns1', 1047 | 'type_instance': 'invalid', 1048 | 'type': 'gauge', 1049 | 'plugin': 'haproxy' 1050 | }), call({ 1051 | 'values': (0,), 1052 | 'plugin_instance': 'nameserver.dns1', 1053 | 'type_instance': 'snd_error', 1054 | 'type': 'gauge', 1055 | 'plugin': 'haproxy' 1056 | }), call({ 1057 | 'values': (0,), 1058 | 'plugin_instance': 'nameserver.dns1', 1059 | 'type_instance': 'nx', 1060 | 'type': 'gauge', 1061 | 'plugin': 'haproxy' 1062 | }), call({ 1063 | 'values': (4,), 1064 | 'plugin_instance': 'nameserver.dns1', 1065 | 'type_instance': 'valid', 1066 | 'type': 'gauge', 1067 | 'plugin': 'haproxy' 1068 | }), call({ 1069 | 'values': (0,), 1070 | 'plugin_instance': 'nameserver.dns1', 1071 | 'type_instance': 'timeout', 1072 | 'type': 'gauge', 1073 | 'plugin': 'haproxy' 1074 | }), call({ 1075 | 'values': (0,), 1076 | 'plugin_instance': 'nameserver.dns1', 1077 | 'type_instance': 'other', 1078 | 'type': 'gauge', 1079 | 'plugin': 'haproxy' 1080 | }), call({ 1081 | 'values': (8,), 1082 | 'plugin_instance': 'nameserver.dns1', 1083 | 'type_instance': 'sent', 1084 | 'type': 'gauge', 1085 | 'plugin': 'haproxy' 1086 | }) 1087 | ], any_order=True) 1088 | 1089 | 1090 | def test_resolver_stats_can_be_parsed(): 1091 | haproxy_socket = haproxy.HAProxySocket(MagicMock()) 1092 | haproxy_socket.communicate = MagicMock( 1093 | return_value=["""Resolvers section mydns 1094 | nameserver dns1: 1095 | sent: 8 1096 | snd_error: 0 1097 | valid: 4 1098 | update: 0 1099 | cname: 0 1100 | cname_error: 4 1101 | any_err: 0 1102 | nx: 0 1103 | timeout: 0 1104 | refused: 0 1105 | other: 0 1106 | invalid: 0 1107 | too_big: 0 1108 | truncated: 0 1109 | outdated: 0 1110 | 1111 | Resolvers section mydns2 1112 | nameserver dns2: 1113 | sent: 0 1114 | snd_error: 0 1115 | valid: 0 1116 | update: 0 1117 | cname: 0 1118 | cname_error: 0 1119 | any_err: 0 1120 | nx: 0 1121 | timeout: 0 1122 | refused: 0 1123 | other: 0 1124 | invalid: 0 1125 | too_big: 0 1126 | truncated: 0 1127 | outdated: 0"""]) 1128 | assert haproxy_socket.get_resolvers() == { 1129 | 'dns1': { 1130 | 'sent': '8', 1131 | 'snd_error': '0', 1132 | 'valid': '4', 1133 | 'update': '0', 1134 | 'cname': '0', 1135 | 'cname_error': '4', 1136 | 'any_err': '0', 1137 | 'nx': '0', 1138 | 'timeout': '0', 1139 | 'refused': '0', 1140 | 'other': '0', 1141 | 'invalid': '0', 1142 | 'too_big': '0', 1143 | 'truncated': '0', 1144 | 'outdated': '0' 1145 | }, 'dns2': { 1146 | 'sent': '0', 1147 | 'snd_error': '0', 1148 | 'valid': '0', 1149 | 'update': '0', 1150 | 'cname': '0', 1151 | 'cname_error': '0', 1152 | 'any_err': '0', 1153 | 'nx': '0', 1154 | 'timeout': '0', 1155 | 'refused': '0', 1156 | 'other': '0', 1157 | 'invalid': '0', 1158 | 'too_big': '0', 1159 | 'truncated': '0', 1160 | 'outdated': '0' 1161 | }} 1162 | --------------------------------------------------------------------------------