├── .bumpversion.cfg ├── .github └── workflows │ ├── release.yaml │ └── test.yaml ├── .gitignore ├── LICENSE ├── README.md ├── docs ├── admin1.png └── metrics.txt ├── poetry.lock ├── prometheus_http_sd ├── __init__.py ├── app.py ├── cli.py ├── config.py ├── const.py ├── decorator.py ├── exceptions.py ├── mem_perf.py ├── sd.py ├── targets.py ├── templates │ └── admin.html ├── validate.py └── version.py ├── pyproject.toml └── test ├── app_root ├── a.yaml ├── cached_target │ └── a.py ├── echo_target │ ├── sleep2_target.py │ ├── sleep_target.py │ └── target.py └── error │ └── error.py ├── conftest.py ├── test_app.py ├── test_generator ├── root │ ├── empty │ │ └── node.json │ ├── json │ │ └── target.json │ └── yaml │ │ └── target.yaml └── test_generator.py ├── test_ignore ├── good_root │ ├── .hidden.json │ ├── .shoud_ignore │ │ └── a.txt │ ├── .should_ignore_file.txt │ ├── _utils │ │ └── utils.py │ ├── gateway │ │ ├── nginx │ │ │ ├── edge.py │ │ │ └── targets.json │ │ └── targets.json │ ├── queue │ │ ├── _queue_utils │ │ │ └── utils.py │ │ ├── kafka.py │ │ └── zookeeper.json │ └── victoriametrics.json └── test_genrator_ignore.py ├── test_timeout ├── test_selector.py └── test_timeout.py └── test_validate ├── root_dir ├── bad.yaml ├── good.yaml ├── py_should_run_test_func │ └── a.py └── should_ignore │ ├── a.yaml │ └── b.yaml └── test_validate.py /.bumpversion.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | current_version = 1.3.16 3 | commit = True 4 | tag = True 5 | 6 | [bumpversion:file:prometheus_http_sd/version.py] 7 | 8 | [bumpversion:file:pyproject.toml] 9 | -------------------------------------------------------------------------------- /.github/workflows/release.yaml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | tags: 6 | - v* 7 | 8 | jobs: 9 | release-pypi: 10 | name: release-pypi 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - uses: actions/checkout@v2 15 | - uses: actions/setup-python@v1 16 | with: 17 | python-version: "3.10" 18 | architecture: "x64" 19 | 20 | - name: Install Dependencies 21 | run: | 22 | python3 -m venv venv 23 | . venv/bin/activate 24 | pip install -U pip 25 | pip install poetry 26 | poetry install 27 | python -c "import sys; print(sys.version)" 28 | pip list 29 | 30 | - name: Poetry Build 31 | run: | 32 | . venv/bin/activate 33 | pip list 34 | poetry build 35 | 36 | - name: Test Build 37 | run: | 38 | python3 -m venv fresh_env 39 | . fresh_env/bin/activate 40 | pip install dist/*.whl 41 | 42 | prometheus-http-sd --help 43 | 44 | - name: Upload to Pypi 45 | env: 46 | PASSWORD: ${{ secrets.PYPI_TOKEN }} 47 | run: | 48 | . venv/bin/activate 49 | poetry publish --username __token__ --password ${PASSWORD} 50 | -------------------------------------------------------------------------------- /.github/workflows/test.yaml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: 4 | pull_request: 5 | push: 6 | branches: 7 | - master 8 | 9 | jobs: 10 | test: 11 | name: Pytest 12 | strategy: 13 | matrix: 14 | os: [ubuntu-latest] 15 | python: ["3.9", "3.10"] 16 | runs-on: ${{ matrix.os }} 17 | 18 | steps: 19 | - uses: actions/checkout@v2 20 | - uses: actions/setup-python@v2 21 | with: 22 | python-version: ${{ matrix.python }} 23 | architecture: "x64" 24 | - name: Cache venv 25 | uses: actions/cache@v2 26 | with: 27 | path: venv 28 | # Look to see if there is a cache hit for the corresponding requirements file 29 | key: 30 | poetryenv-${{ matrix.os }}-${{ matrix.python }}-${{ 31 | hashFiles('poetry.lock') }} 32 | - name: Install Dependencies 33 | run: | 34 | python3 -m venv venv 35 | . venv/bin/activate 36 | pip install -U pip==21.1 setuptools 37 | pip install poetry 38 | poetry install 39 | python -c "import sys; print(sys.version)" 40 | pip list 41 | - name: Pytest 42 | run: | 43 | . venv/bin/activate 44 | pytest 45 | lint: 46 | name: flake8 & black 47 | runs-on: ubuntu-latest 48 | 49 | steps: 50 | - uses: actions/checkout@v2 51 | - uses: codespell-project/actions-codespell@master 52 | with: 53 | ignore_words_list: fo,ists,oll,optin,ot,smove,tre,whe 54 | - uses: actions/setup-python@v1 55 | with: 56 | python-version: 3.9 57 | architecture: "x64" 58 | - name: Cache venv 59 | uses: actions/cache@v2 60 | with: 61 | path: venv 62 | # Look to see if there is a cache hit for the corresponding requirements file 63 | key: lintenv-v2 64 | - name: Install Dependencies 65 | run: | 66 | python3 -m venv venv 67 | . venv/bin/activate 68 | pip install -U pip flake8 black 69 | - name: Flake8 test 70 | run: | 71 | . venv/bin/activate 72 | flake8 --exclude venv . 73 | - name: Black test 74 | run: | 75 | . venv/bin/activate 76 | black --check . 77 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # prometheus-http-sd 2 | 3 | This is a 4 | [Prometheus HTTP SD](https://prometheus.io/docs/prometheus/latest/http_sd/) 5 | framework. 6 | 7 | [![Test](https://github.com/laixintao/prometheus-http-sd/actions/workflows/test.yaml/badge.svg?branch=main)](https://github.com/laixintao/prometheus-http-sd/actions/workflows/test.yaml) 8 | 9 | 10 | 11 | - [Features](#features) 12 | - [Installation](#installation) 13 | - [Usage](#usage) 14 | - [The Python Target Generator](#the-python-target-generator) 15 | - [Python Target Generator Cache and Throttle](#python-target-generator-cache-and-throttle) 16 | - [Manage prometheus-http-sd by systemd](#manage-prometheus-http-sd-by-systemd) 17 | - [Admin Page](#admin-page) 18 | - [Serve under a different root path](#serve-under-a-different-root-path) 19 | - [Change Certificate](#change-certificate) 20 | - [Timeout](#timeout) 21 | - [None](#none) 22 | - [Sentry APM](#sentry-apm) 23 | - [Define your targets](#define-your-targets) 24 | - [Your target generator](#your-target-generator) 25 | - [The Target Path](#the-target-path) 26 | - [Overwriting `job_name` labels](#overwriting-job_name-labels) 27 | - [Check and Validate your Targets](#check-and-validate-your-targets) 28 | - [Script Dependencies](#script-dependencies) 29 | - [Update Your Scripts](#update-your-scripts) 30 | - [Debug Your Scripts](#debug-your-scripts) 31 | - [Best Practice](#best-practice) 32 | 33 | 34 | 35 | ## Features 36 | 37 | - Support static targets from Json file; 38 | - Support static targets from Yaml file; 39 | - Support generating target list using Python script; 40 | - Support `check` command, to testing the generated target is as expected, and 41 | counting the targets; 42 | - You can monitoring your target generator via `/metrics`, see 43 | [metrics](./docs/metrics.txt); 44 | - Admin page to list all target paths; 45 | - Auto reload when generator or targets changed; 46 | - Support managing targets in a hierarchy way; 47 | - Throttle parallel execution and cache the result for Python script; 48 | - Support Sentry APM. 49 | 50 | ## Installation 51 | 52 | ```shell 53 | pip install prometheus-http-sd 54 | ``` 55 | 56 | ## Usage 57 | 58 | First, you need a directory, everything in this directory will be used to 59 | generate targets for prometheus-http-sd. 60 | 61 | ```shell 62 | $ mkdir targets 63 | ``` 64 | 65 | In this directory, every file is called a target "generator": 66 | 67 | - Filename that ending with `.json` will be exposed directly 68 | - Filename that ending with `.yaml` will be exposed directly 69 | - Filename that ending with `.py` must include a `generate_targets()` function, 70 | the function will be run, and it must return a `TargetList` (Type helper in 71 | `prometheus_http_sd.targets.`) 72 | - Filename that starts with `_` will be ignored, so you can have some python 73 | utils there, for e.g. `_utils/__init__.py` that you can import in you 74 | `generate_targets()` 75 | - Filename that starts with `.` (hidden file in Linux) will also be ignored 76 | 77 | Let write our first target generator by yaml, put this into your 78 | `targets/first_target.yaml`: 79 | 80 | ```yaml 81 | --- 82 | - targets: 83 | - "10.1.1.9:9100" 84 | - "10.1.1.10:9100" 85 | labels: 86 | job: node 87 | datacenter: nyc 88 | group: g1 89 | - targets: 90 | - "10.2.1.9:9100" 91 | - "10.2.1.10:9100" 92 | labels: 93 | job: node 94 | datacenter: sg 95 | group: g2 96 | ``` 97 | 98 | If you use json, the data structure is the same, just in Json format. 99 | 100 | ### The Python Target Generator 101 | 102 | Let's put another generator using Python: 103 | 104 | Put this into your `targets/by_python.py`: 105 | 106 | ```python 107 | def generate_targets(**extra_parameters): 108 | return [{"targets": ["10.1.1.22:2379"], "labels": {"app": "etcd"}}] 109 | ``` 110 | 111 | Then you can run `prometheus-http-sd serve -h 0.0.0.0 -p 8080 /tmp/targets`, 112 | prometheus-http-sd will start to expose targets at: http://0.0.0.0:8080/targets 113 | 114 | The `-h` and `-p` is optional, defaults to `127.0.0.1` and `8080`. 115 | 116 | ```shell 117 | $ prometheus-http-sd serve /tmp/targets # replace this to your target path 118 | [2022-07-24 00:52:03,896] {wasyncore.py:486} INFO - Serving on http://127.0.0.1:8080 119 | ``` 120 | 121 | If you run `curl http://127.0.0.1:8080/targets` you will get: 122 | 123 | ```shell 124 | {"targets": "10.1.1.22:2379", "labels": {"app": "etcd"}} 125 | ``` 126 | 127 | Finally, you can tell your Prometheus to find targets under 128 | http://127.0.0.1:8080/targets, by adding this into your Prometheus config: 129 | 130 | ```yaml 131 | scrape_configs: 132 | - job_name: "etcd" 133 | http_sd_config: 134 | url: http://127.0.0.1:8080/targets/ 135 | ``` 136 | 137 | The Python target generator also support URL query params. You can check the 138 | params in your `generate_targets()` function. 139 | 140 | For example: 141 | 142 | ```python 143 | def generate_targets(**params): 144 | cluster = params.get("cluster") 145 | return [{"targets": ["10.1.1.22:2379"], "labels": {"app": "etcd", "cluster": cluster}}] 146 | ``` 147 | 148 | Then `curl http://127.0.0.1:8080/targets?cluster=us1` you will get: 149 | 150 | ```shell 151 | {"targets": "10.1.1.22:2379", "labels": {"app": "etcd", "cluster": "us1"}} 152 | ``` 153 | 154 | ### Python Target Generator Cache and Throttle 155 | 156 | Support you have 10 Prometheus instance request http-sd for targets every 157 | minutes, for Python script target generator, it doesn't make sense that the same 158 | script run 10 times in every minute, instead, it should run only once, and use 159 | this result to respond for 10 Prometheus instances. 160 | 161 | prometheus-http-sd has cache and throttle by default, that means: 162 | 163 | - At any time there is only one python script running 164 | - The result will be cached for 1minute (This means that every script at max 165 | will be only running one time per minute, and your target update will delay at 166 | most 1 minute) 167 | 168 | ### Manage prometheus-http-sd by systemd 169 | 170 | Just put this file under `/lib/systemd/system/http-sd.service` (remember to 171 | change your installation path and root_dir path): 172 | 173 | ``` 174 | # /lib/systemd/system/http-sd.service 175 | [Unit] 176 | Description=Prometheus HTTP SD Service 177 | Wants=network-online.target 178 | After=network-online.target 179 | 180 | [Service] 181 | Type=simple 182 | ExecStart=/opt/httpsd_env/bin/prometheus-http-sd serve \ 183 | -h 0.0.0.0 \ 184 | -p 8080 \ 185 | /opt/httpsd_targets 186 | 187 | Restart=always 188 | RestartSec=90 189 | 190 | [Install] 191 | WantedBy=multi-user.target 192 | ``` 193 | 194 | ### Admin Page 195 | 196 | You can open the root path, `http://127.0.0.1:8080/` in this example, and you 197 | will see all of the available paths list in the admin page. 198 | 199 | ![](./docs/admin1.png) 200 | 201 | ### Serve under a different root path 202 | 203 | If you put prometheus-http-sd behind a reverse proxy like Nginx, like this: 204 | 205 | ``` 206 | location /http_sd/ { 207 | proxy_pass http://prometheus_http_sd; 208 | } 209 | ``` 210 | 211 | Then you need to tell prometheus_http_sd to serve all HTTP requests under this 212 | path, by using the `--url_prefix /http_sd` cli option, (or `-r /http_sd` for 213 | short). 214 | 215 | ### Change Certificate 216 | 217 | By default, `prometheus-http-sd` has caching capabilities for Python targets to avoid server crashes due to too many queries. 218 | ``` 219 | +------------+ 220 | | | 221 | | | 222 | | | +-----------+ 223 | | Caller 1 +----+ | | 224 | | | | | | 225 | | | | | | 226 | | | | | | 227 | +------------+ | | | 228 | | | | 229 | | | | 230 | +------------+ | | | +----------+ 231 | | | | | | | | 232 | | | | call at the | | only single call | | 233 | | | | same time | Timeout | to the back | | 234 | | Caller 2 +----|------------>+ Cache +----------------->+ Function | 235 | | | | | | | | 236 | | | | | | | | 237 | | | | | | | | 238 | +------------+ | | | +----------+ 239 | | | | 240 | | | | 241 | +------------+ | | | 242 | | | | | | 243 | | | | | | 244 | | | | | | 245 | | Caller 3 +----+ | | 246 | | | +-----------+ 247 | | | 248 | | | 249 | +------------+ 250 | ``` 251 | 252 | To change this behavior, you can use the option `--cache-type` to change the cache behavior. 253 | 254 | Also, you can use the option `--cache-opt` to change the variable. 255 | For example: 256 | 257 | ```bash 258 | prometheus-http-sd serve \ 259 | -h 0.0.0.0 \ 260 | -p 8080 \ 261 | --cache-type="Timeout" \ 262 | --cache-opt="timeout=360" \ 263 | /opt/httpsd_targets 264 | 265 | ``` 266 | 267 | #### Timeout 268 | 269 | This is the default value, It will cache the result or exception from the target function. 270 | * `timeout=`: 271 | function timeout. if exceed, raise TimeoutException (in sec). 272 | * `cache_time=`: 273 | after function return normally, how long should we cache the result (in sec). 274 | * `cache_exception_time=`: 275 | after function return incorrectly, how long should we cache the exception (in sec). 276 | * `name=`: 277 | prometheus_client metrics prefix 278 | * `garbage_collection_count=`: 279 | garbage collection threshold 280 | * `garbage_collection_interval=`: 281 | the second to avoid collection too often. 282 | * `copy_response=`: 283 | if true, use copy.deepcopy on the response from the target function. 284 | 285 | 286 | #### None 287 | 288 | This is a dummy function if you don't need any cache method. 289 | 290 | ### Sentry APM 291 | 292 | You can use the option `--sentry-url ` (or `-s `) 293 | to enable Sentry APM. 294 | 295 | The Exception from user's script will be sent to Sentry. 296 | 297 | ## Define your targets 298 | 299 | ### Your target generator 300 | 301 | Please see the [Usage](#usage) to know how to define your generator. 302 | 303 | ### The Target Path 304 | 305 | prometheus-http-sd support sub-pathes. 306 | 307 | For example, if we use `prometheus-http-sd serve gateway`, and the `gateway` 308 | directory's structure is as follows: 309 | 310 | ```shell 311 | gateway 312 | ├── nginx 313 | │   ├── edge.py 314 | │   └── targets.json 315 | └── targets.json 316 | ``` 317 | 318 | Then: 319 | 320 | - `/targets/gateway` will return the targets from: 321 | - `gateway/nginx/edge.py` 322 | - `gateway/nginx/targets.json` 323 | - `gateway/targets.json` 324 | - `/targets/gateway/nginx` will return the targets from: 325 | - `gateway/nginx/edge.py` 326 | - `gateway/nginx/targets.json` 327 | 328 | This is very useful when you use vertical scaling. Say you have 5 Prometheus 329 | instances, and you want each one of them scrape for different targets, then you 330 | can use the sub-path feature of prometheus-http-sd. 331 | 332 | For example, in one Prometheus's scrape config: 333 | 334 | ```yaml 335 | scrape_configs: 336 | - job_name: "nginx" 337 | http_sd_config: 338 | url: http://prometheus-http-sd:8080/targets/nginx 339 | 340 | - job_name: "etcd" 341 | http_sd_config: 342 | url: http://prometheus-http-sd:8080/targets/etcd 343 | ``` 344 | 345 | And in another one: 346 | 347 | ```yaml 348 | scrape_configs: 349 | - job_name: "nginx" 350 | http_sd_config: 351 | url: http://prometheus-http-sd:8080/targets/database 352 | 353 | - job_name: "etcd" 354 | http_sd_config: 355 | url: http://prometheus-http-sd:8080/targets/application 356 | ``` 357 | 358 | ### Overwriting `job_name` labels 359 | 360 | You may want to put all of etcd targets in one generator, including port 2379 361 | for etcd metrics and 9100 for node_exporter metrics of the etcd server. But the 362 | `job_name` setting was based on per URL. 363 | 364 | The trick is that, you can overwrite the `job` label in the target labels, like 365 | this: 366 | 367 | ```yaml 368 | --- 369 | - targets: 370 | - "10.1.1.9:9100" 371 | labels: 372 | job: node 373 | datacenter: nyc 374 | group: g1 375 | - targets: 376 | - "10.1.1.9:2379" 377 | labels: 378 | job: etcd 379 | datacenter: nyc 380 | group: g1 381 | ``` 382 | 383 | ### Check and Validate your Targets 384 | 385 | You can use `prometheus-http-sd check` command to test your targets dir. It will 386 | run all of you generators, validate the targets, and print the targets count 387 | that each generator generates. 388 | 389 | ```shell 390 | $ prometheus-http-sd check test/test_generator/root 391 | [2022-08-06 00:50:11,095] {validate.py:16} INFO - Run generator test/test_generator/root/json/target.json, took 0.0011398792266845703s, generated 1 targets. 392 | [2022-08-06 00:50:11,100] {validate.py:16} INFO - Run generator test/test_generator/root/yaml/target.yaml, took 0.0043718814849853516s, generated 2 targets. 393 | [2022-08-06 00:50:11,100] {validate.py:22} INFO - Done! Generated {total_targets} in total. 394 | ``` 395 | 396 | It's a good idea to use `prometheus-http-sd check` in your CI system to validate 397 | your targets generator scripts and target files. 398 | 399 | For Python script, `prometheus-http-sd check` command will run 400 | `generate_targets` in each script, without any params. However, you can 401 | overwrite the `check` logic by providing a function called 402 | `test_generate_targets()`(without any function args), then `check` will run 403 | `test_generate_targets` instead. (So you can call `generate_targets(foo="bar")` 404 | to set the test logic of your own. 405 | 406 | ### Script Dependencies 407 | 408 | If you want your scripts to use some other python library, just install them 409 | into the **same virtualenv** that you install prometheus-http-sd, so that 410 | prometheus-http-sd can import them. 411 | 412 | ## Update Your Scripts 413 | 414 | If you want to update your script file or target json file, just upload and 415 | overwrite with your new version, it will take effect immediately after you 416 | making changes, **there is no need to restart** prometheus-http-sd, 417 | prometheus-http-sd will read the file (or reload the python script) every time 418 | serving a request. 419 | 420 | It is worth noting that restarting is safe because if Prometheus failed to get 421 | the target list via HTTP request, it won't update its current target list to 422 | empty, instead, 423 | [it will keep using the current list](https://prometheus.io/docs/prometheus/latest/http_sd/). 424 | 425 | > Prometheus caches target lists. If an error occurs while fetching an updated 426 | > targets list, Prometheus keeps using the current targets list. 427 | 428 | For the same reason, if there are 3 scripts under `/targets/mysystem` and only 429 | one failed for a request, prometheus-http-sd will return a HTTP 500 Error for 430 | the whole request instead of returning the partial targets from the other two 431 | scripts. 432 | 433 | Also for the same reason, if your script met any error, you should throw out 434 | `Exception` all the way to the top instead of catch it in your script and return 435 | a null `TargetList`, if you return a null `TargetList`, prometheus-http-sd will 436 | think that your script run successfully and empty the target list as well. 437 | 438 | You can notice this error from stdout logs or `/metrics` from 439 | prometheus-http-sd. 440 | 441 | ## Debug Your Scripts 442 | 443 | Debug script latency. 444 | 445 | You can add `?debug=true` at the end of your target url to see the time 446 | cost of each generator of a path. 447 | 448 | For example: 449 | 450 | ```shell 451 | curl http://127.0.0.1:8080/targets/echo_target\?debug\=true 452 | {"generator_run_seconds":{"./test/app_root/echo_target/sleep2_target.py":2.005011796951294,"./test/app_root/echo_target/sleep_target.py":3.00480318069458,"./test/app_root/echo_target/target.py":0.0009987354278564453}} 453 | ``` 454 | 455 | ## Best Practice 456 | 457 | You can use a git repository to manage your target generator. 458 | -------------------------------------------------------------------------------- /docs/admin1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laixintao/prometheus-http-sd/0017ce62e51312ab3f891866354c99d48d2e6350/docs/admin1.png -------------------------------------------------------------------------------- /docs/metrics.txt: -------------------------------------------------------------------------------- 1 | # HELP python_gc_objects_collected_total Objects collected during gc 2 | # TYPE python_gc_objects_collected_total counter 3 | python_gc_objects_collected_total{generation="0"} 482.0 4 | python_gc_objects_collected_total{generation="1"} 0.0 5 | python_gc_objects_collected_total{generation="2"} 0.0 6 | # HELP python_gc_objects_uncollectable_total Uncollectable object found during GC 7 | # TYPE python_gc_objects_uncollectable_total counter 8 | python_gc_objects_uncollectable_total{generation="0"} 0.0 9 | python_gc_objects_uncollectable_total{generation="1"} 0.0 10 | python_gc_objects_uncollectable_total{generation="2"} 0.0 11 | # HELP python_gc_collections_total Number of times this generation was collected 12 | # TYPE python_gc_collections_total counter 13 | python_gc_collections_total{generation="0"} 79.0 14 | python_gc_collections_total{generation="1"} 7.0 15 | python_gc_collections_total{generation="2"} 0.0 16 | # HELP python_info Python platform information 17 | # TYPE python_info gauge 18 | python_info{implementation="CPython",major="3",minor="9",patchlevel="0",version="3.9.0"} 1.0 19 | # HELP httpsd_generator_requests_total The total count that this generator executed, status can be success/fail 20 | # TYPE httpsd_generator_requests_total counter 21 | httpsd_generator_requests_total{generator="test/test_validate/root_dir/bad.yaml",status="success"} 7.0 22 | httpsd_generator_requests_total{generator="test/test_validate/root_dir/good.yaml",status="success"} 7.0 23 | # HELP httpsd_generator_requests_created The total count that this generator executed, status can be success/fail 24 | # TYPE httpsd_generator_requests_created gauge 25 | httpsd_generator_requests_created{generator="test/test_validate/root_dir/bad.yaml",status="success"} 1.659772343690829e+09 26 | httpsd_generator_requests_created{generator="test/test_validate/root_dir/good.yaml",status="success"} 1.6597723436915379e+09 27 | # HELP httpsd_generator_last_generated_targets The target count that this generator gets during its last execution 28 | # TYPE httpsd_generator_last_generated_targets gauge 29 | httpsd_generator_last_generated_targets{generator="test/test_validate/root_dir/bad.yaml"} 4.0 30 | httpsd_generator_last_generated_targets{generator="test/test_validate/root_dir/good.yaml"} 4.0 31 | # HELP httpsd_generator_run_duration_seconds The time cost that this generator run 32 | # TYPE httpsd_generator_run_duration_seconds histogram 33 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/bad.yaml",le="0.005"} 7.0 34 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/bad.yaml",le="0.01"} 7.0 35 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/bad.yaml",le="0.025"} 7.0 36 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/bad.yaml",le="0.05"} 7.0 37 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/bad.yaml",le="0.075"} 7.0 38 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/bad.yaml",le="0.1"} 7.0 39 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/bad.yaml",le="0.25"} 7.0 40 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/bad.yaml",le="0.5"} 7.0 41 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/bad.yaml",le="0.75"} 7.0 42 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/bad.yaml",le="1.0"} 7.0 43 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/bad.yaml",le="2.5"} 7.0 44 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/bad.yaml",le="5.0"} 7.0 45 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/bad.yaml",le="7.5"} 7.0 46 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/bad.yaml",le="10.0"} 7.0 47 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/bad.yaml",le="+Inf"} 7.0 48 | httpsd_generator_run_duration_seconds_count{generator="test/test_validate/root_dir/bad.yaml"} 7.0 49 | httpsd_generator_run_duration_seconds_sum{generator="test/test_validate/root_dir/bad.yaml"} 0.004312008999999506 50 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/good.yaml",le="0.005"} 7.0 51 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/good.yaml",le="0.01"} 7.0 52 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/good.yaml",le="0.025"} 7.0 53 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/good.yaml",le="0.05"} 7.0 54 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/good.yaml",le="0.075"} 7.0 55 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/good.yaml",le="0.1"} 7.0 56 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/good.yaml",le="0.25"} 7.0 57 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/good.yaml",le="0.5"} 7.0 58 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/good.yaml",le="0.75"} 7.0 59 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/good.yaml",le="1.0"} 7.0 60 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/good.yaml",le="2.5"} 7.0 61 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/good.yaml",le="5.0"} 7.0 62 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/good.yaml",le="7.5"} 7.0 63 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/good.yaml",le="10.0"} 7.0 64 | httpsd_generator_run_duration_seconds_bucket{generator="test/test_validate/root_dir/good.yaml",le="+Inf"} 7.0 65 | httpsd_generator_run_duration_seconds_count{generator="test/test_validate/root_dir/good.yaml"} 7.0 66 | httpsd_generator_run_duration_seconds_sum{generator="test/test_validate/root_dir/good.yaml"} 0.002305905000003605 67 | # HELP httpsd_generator_run_duration_seconds_created The time cost that this generator run 68 | # TYPE httpsd_generator_run_duration_seconds_created gauge 69 | httpsd_generator_run_duration_seconds_created{generator="test/test_validate/root_dir/bad.yaml"} 1.659772343689007e+09 70 | httpsd_generator_run_duration_seconds_created{generator="test/test_validate/root_dir/good.yaml"} 1.6597723436908538e+09 71 | # HELP httpsd_path_last_generated_targets Generated targets count in last request 72 | # TYPE httpsd_path_last_generated_targets gauge 73 | httpsd_path_last_generated_targets{path="test_validate/root_dir"} 8.0 74 | # HELP httpsd_version_info prometheus_http_sd version info 75 | # TYPE httpsd_version_info gauge 76 | httpsd_version_info{version="0.4.1"} 1.0 77 | # HELP httpsd_path_requests_total The total count of a path being requested, status label can be success/fail 78 | # TYPE httpsd_path_requests_total counter 79 | httpsd_path_requests_total{path="test_validate/root_dir",status="success"} 7.0 80 | # HELP httpsd_path_requests_created The total count of a path being requested, status label can be success/fail 81 | # TYPE httpsd_path_requests_created gauge 82 | httpsd_path_requests_created{path="test_validate/root_dir",status="success"} 1.659772343691556e+09 83 | # HELP httpsd_target_path_request_duration_seconds The bucket of request duration in seconds 84 | # TYPE httpsd_target_path_request_duration_seconds histogram 85 | httpsd_target_path_request_duration_seconds_bucket{le="0.005",path="test_validate/root_dir"} 7.0 86 | httpsd_target_path_request_duration_seconds_bucket{le="0.01",path="test_validate/root_dir"} 7.0 87 | httpsd_target_path_request_duration_seconds_bucket{le="0.025",path="test_validate/root_dir"} 7.0 88 | httpsd_target_path_request_duration_seconds_bucket{le="0.05",path="test_validate/root_dir"} 7.0 89 | httpsd_target_path_request_duration_seconds_bucket{le="0.075",path="test_validate/root_dir"} 7.0 90 | httpsd_target_path_request_duration_seconds_bucket{le="0.1",path="test_validate/root_dir"} 7.0 91 | httpsd_target_path_request_duration_seconds_bucket{le="0.25",path="test_validate/root_dir"} 7.0 92 | httpsd_target_path_request_duration_seconds_bucket{le="0.5",path="test_validate/root_dir"} 7.0 93 | httpsd_target_path_request_duration_seconds_bucket{le="0.75",path="test_validate/root_dir"} 7.0 94 | httpsd_target_path_request_duration_seconds_bucket{le="1.0",path="test_validate/root_dir"} 7.0 95 | httpsd_target_path_request_duration_seconds_bucket{le="2.5",path="test_validate/root_dir"} 7.0 96 | httpsd_target_path_request_duration_seconds_bucket{le="5.0",path="test_validate/root_dir"} 7.0 97 | httpsd_target_path_request_duration_seconds_bucket{le="7.5",path="test_validate/root_dir"} 7.0 98 | httpsd_target_path_request_duration_seconds_bucket{le="10.0",path="test_validate/root_dir"} 7.0 99 | httpsd_target_path_request_duration_seconds_bucket{le="+Inf",path="test_validate/root_dir"} 7.0 100 | httpsd_target_path_request_duration_seconds_count{path="test_validate/root_dir"} 7.0 101 | httpsd_target_path_request_duration_seconds_sum{path="test_validate/root_dir"} 0.009562472999999017 102 | # HELP httpsd_target_path_request_duration_seconds_created The bucket of request duration in seconds 103 | # TYPE httpsd_target_path_request_duration_seconds_created gauge 104 | httpsd_target_path_request_duration_seconds_created{path="test_validate/root_dir"} 1.659772343688767e+09 105 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. 2 | 3 | [[package]] 4 | name = "blinker" 5 | version = "1.6.3" 6 | description = "Fast, simple object-to-object and broadcast signaling" 7 | optional = false 8 | python-versions = ">=3.7" 9 | files = [ 10 | {file = "blinker-1.6.3-py3-none-any.whl", hash = "sha256:296320d6c28b006eb5e32d4712202dbcdcbf5dc482da298c2f44881c43884aaa"}, 11 | {file = "blinker-1.6.3.tar.gz", hash = "sha256:152090d27c1c5c722ee7e48504b02d76502811ce02e1523553b4cf8c8b3d3a8d"}, 12 | ] 13 | 14 | [[package]] 15 | name = "certifi" 16 | version = "2024.2.2" 17 | description = "Python package for providing Mozilla's CA Bundle." 18 | optional = false 19 | python-versions = ">=3.6" 20 | files = [ 21 | {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, 22 | {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, 23 | ] 24 | 25 | [[package]] 26 | name = "click" 27 | version = "8.1.7" 28 | description = "Composable command line interface toolkit" 29 | optional = false 30 | python-versions = ">=3.7" 31 | files = [ 32 | {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, 33 | {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, 34 | ] 35 | 36 | [package.dependencies] 37 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 38 | importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} 39 | 40 | [[package]] 41 | name = "colorama" 42 | version = "0.4.6" 43 | description = "Cross-platform colored terminal text." 44 | optional = false 45 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" 46 | files = [ 47 | {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, 48 | {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, 49 | ] 50 | 51 | [[package]] 52 | name = "exceptiongroup" 53 | version = "1.2.1" 54 | description = "Backport of PEP 654 (exception groups)" 55 | optional = false 56 | python-versions = ">=3.7" 57 | files = [ 58 | {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, 59 | {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, 60 | ] 61 | 62 | [package.extras] 63 | test = ["pytest (>=6)"] 64 | 65 | [[package]] 66 | name = "flake8" 67 | version = "4.0.1" 68 | description = "the modular source code checker: pep8 pyflakes and co" 69 | optional = false 70 | python-versions = ">=3.6" 71 | files = [ 72 | {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, 73 | {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, 74 | ] 75 | 76 | [package.dependencies] 77 | importlib-metadata = {version = "<4.3", markers = "python_version < \"3.8\""} 78 | mccabe = ">=0.6.0,<0.7.0" 79 | pycodestyle = ">=2.8.0,<2.9.0" 80 | pyflakes = ">=2.4.0,<2.5.0" 81 | 82 | [[package]] 83 | name = "flask" 84 | version = "2.2.5" 85 | description = "A simple framework for building complex web applications." 86 | optional = false 87 | python-versions = ">=3.7" 88 | files = [ 89 | {file = "Flask-2.2.5-py3-none-any.whl", hash = "sha256:58107ed83443e86067e41eff4631b058178191a355886f8e479e347fa1285fdf"}, 90 | {file = "Flask-2.2.5.tar.gz", hash = "sha256:edee9b0a7ff26621bd5a8c10ff484ae28737a2410d99b0bb9a6850c7fb977aa0"}, 91 | ] 92 | 93 | [package.dependencies] 94 | click = ">=8.0" 95 | importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""} 96 | itsdangerous = ">=2.0" 97 | Jinja2 = ">=3.0" 98 | Werkzeug = ">=2.2.2" 99 | 100 | [package.extras] 101 | async = ["asgiref (>=3.2)"] 102 | dotenv = ["python-dotenv"] 103 | 104 | [[package]] 105 | name = "importlib-metadata" 106 | version = "4.2.0" 107 | description = "Read metadata from Python packages" 108 | optional = false 109 | python-versions = ">=3.6" 110 | files = [ 111 | {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"}, 112 | {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, 113 | ] 114 | 115 | [package.dependencies] 116 | typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} 117 | zipp = ">=0.5" 118 | 119 | [package.extras] 120 | docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] 121 | testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pep517", "pyfakefs", "pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] 122 | 123 | [[package]] 124 | name = "iniconfig" 125 | version = "2.0.0" 126 | description = "brain-dead simple config-ini parsing" 127 | optional = false 128 | python-versions = ">=3.7" 129 | files = [ 130 | {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, 131 | {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, 132 | ] 133 | 134 | [[package]] 135 | name = "itsdangerous" 136 | version = "2.1.2" 137 | description = "Safely pass data to untrusted environments and back." 138 | optional = false 139 | python-versions = ">=3.7" 140 | files = [ 141 | {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, 142 | {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, 143 | ] 144 | 145 | [[package]] 146 | name = "jinja2" 147 | version = "3.1.4" 148 | description = "A very fast and expressive template engine." 149 | optional = false 150 | python-versions = ">=3.7" 151 | files = [ 152 | {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, 153 | {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, 154 | ] 155 | 156 | [package.dependencies] 157 | MarkupSafe = ">=2.0" 158 | 159 | [package.extras] 160 | i18n = ["Babel (>=2.7)"] 161 | 162 | [[package]] 163 | name = "markupsafe" 164 | version = "2.1.5" 165 | description = "Safely add untrusted strings to HTML/XML markup." 166 | optional = false 167 | python-versions = ">=3.7" 168 | files = [ 169 | {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, 170 | {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, 171 | {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, 172 | {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, 173 | {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, 174 | {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, 175 | {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, 176 | {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, 177 | {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, 178 | {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, 179 | {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, 180 | {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, 181 | {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, 182 | {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, 183 | {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, 184 | {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, 185 | {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, 186 | {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, 187 | {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, 188 | {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, 189 | {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, 190 | {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, 191 | {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, 192 | {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, 193 | {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, 194 | {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, 195 | {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, 196 | {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, 197 | {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, 198 | {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, 199 | {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, 200 | {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, 201 | {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, 202 | {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, 203 | {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, 204 | {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, 205 | {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, 206 | {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, 207 | {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, 208 | {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, 209 | {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, 210 | {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, 211 | {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, 212 | {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, 213 | {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, 214 | {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, 215 | {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, 216 | {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, 217 | {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, 218 | {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, 219 | {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, 220 | {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, 221 | {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, 222 | {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, 223 | {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, 224 | {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, 225 | {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, 226 | {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, 227 | {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, 228 | {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, 229 | ] 230 | 231 | [[package]] 232 | name = "mccabe" 233 | version = "0.6.1" 234 | description = "McCabe checker, plugin for flake8" 235 | optional = false 236 | python-versions = "*" 237 | files = [ 238 | {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, 239 | {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, 240 | ] 241 | 242 | [[package]] 243 | name = "packaging" 244 | version = "24.0" 245 | description = "Core utilities for Python packages" 246 | optional = false 247 | python-versions = ">=3.7" 248 | files = [ 249 | {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, 250 | {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, 251 | ] 252 | 253 | [[package]] 254 | name = "pluggy" 255 | version = "1.2.0" 256 | description = "plugin and hook calling mechanisms for python" 257 | optional = false 258 | python-versions = ">=3.7" 259 | files = [ 260 | {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, 261 | {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, 262 | ] 263 | 264 | [package.dependencies] 265 | importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} 266 | 267 | [package.extras] 268 | dev = ["pre-commit", "tox"] 269 | testing = ["pytest", "pytest-benchmark"] 270 | 271 | [[package]] 272 | name = "prometheus-client" 273 | version = "0.14.1" 274 | description = "Python client for the Prometheus monitoring system." 275 | optional = false 276 | python-versions = ">=3.6" 277 | files = [ 278 | {file = "prometheus_client-0.14.1-py3-none-any.whl", hash = "sha256:522fded625282822a89e2773452f42df14b5a8e84a86433e3f8a189c1d54dc01"}, 279 | {file = "prometheus_client-0.14.1.tar.gz", hash = "sha256:5459c427624961076277fdc6dc50540e2bacb98eebde99886e59ec55ed92093a"}, 280 | ] 281 | 282 | [package.extras] 283 | twisted = ["twisted"] 284 | 285 | [[package]] 286 | name = "pycodestyle" 287 | version = "2.8.0" 288 | description = "Python style guide checker" 289 | optional = false 290 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 291 | files = [ 292 | {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, 293 | {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, 294 | ] 295 | 296 | [[package]] 297 | name = "pyflakes" 298 | version = "2.4.0" 299 | description = "passive checker of Python programs" 300 | optional = false 301 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 302 | files = [ 303 | {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, 304 | {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, 305 | ] 306 | 307 | [[package]] 308 | name = "pytest" 309 | version = "7.4.4" 310 | description = "pytest: simple powerful testing with Python" 311 | optional = false 312 | python-versions = ">=3.7" 313 | files = [ 314 | {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, 315 | {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, 316 | ] 317 | 318 | [package.dependencies] 319 | colorama = {version = "*", markers = "sys_platform == \"win32\""} 320 | exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} 321 | importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} 322 | iniconfig = "*" 323 | packaging = "*" 324 | pluggy = ">=0.12,<2.0" 325 | tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} 326 | 327 | [package.extras] 328 | testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] 329 | 330 | [[package]] 331 | name = "pyyaml" 332 | version = "6.0.1" 333 | description = "YAML parser and emitter for Python" 334 | optional = false 335 | python-versions = ">=3.6" 336 | files = [ 337 | {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, 338 | {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, 339 | {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, 340 | {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, 341 | {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, 342 | {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, 343 | {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, 344 | {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, 345 | {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, 346 | {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, 347 | {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, 348 | {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, 349 | {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, 350 | {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, 351 | {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, 352 | {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, 353 | {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, 354 | {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, 355 | {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, 356 | {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, 357 | {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, 358 | {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, 359 | {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, 360 | {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, 361 | {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, 362 | {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, 363 | {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, 364 | {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, 365 | {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, 366 | {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, 367 | {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, 368 | {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, 369 | {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, 370 | {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, 371 | {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, 372 | {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, 373 | {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, 374 | {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, 375 | {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, 376 | {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, 377 | {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, 378 | {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, 379 | {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, 380 | {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, 381 | {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, 382 | {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, 383 | {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, 384 | {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, 385 | {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, 386 | {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, 387 | ] 388 | 389 | [[package]] 390 | name = "sentry-sdk" 391 | version = "0.10.2" 392 | description = "Python client for Sentry (https://getsentry.com)" 393 | optional = false 394 | python-versions = "*" 395 | files = [ 396 | {file = "sentry-sdk-0.10.2.tar.gz", hash = "sha256:d491aa6399eaa3eded433972751a9770180730fd8b4c225b0b7f49c4fa2af70b"}, 397 | {file = "sentry_sdk-0.10.2-py2.py3-none-any.whl", hash = "sha256:d68003cdffbbfcadaa2c445b72e1050b0a44406f94199866f192986c016b23f5"}, 398 | ] 399 | 400 | [package.dependencies] 401 | blinker = {version = ">=1.1", optional = true, markers = "extra == \"flask\""} 402 | certifi = "*" 403 | flask = {version = ">=0.8", optional = true, markers = "extra == \"flask\""} 404 | urllib3 = "*" 405 | 406 | [package.extras] 407 | bottle = ["bottle (>=0.12.13)"] 408 | falcon = ["falcon (>=1.4)"] 409 | flask = ["blinker (>=1.1)", "flask (>=0.8)"] 410 | 411 | [[package]] 412 | name = "tomli" 413 | version = "2.0.1" 414 | description = "A lil' TOML parser" 415 | optional = false 416 | python-versions = ">=3.7" 417 | files = [ 418 | {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, 419 | {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, 420 | ] 421 | 422 | [[package]] 423 | name = "typing-extensions" 424 | version = "4.7.1" 425 | description = "Backported and Experimental Type Hints for Python 3.7+" 426 | optional = false 427 | python-versions = ">=3.7" 428 | files = [ 429 | {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, 430 | {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, 431 | ] 432 | 433 | [[package]] 434 | name = "urllib3" 435 | version = "2.0.7" 436 | description = "HTTP library with thread-safe connection pooling, file post, and more." 437 | optional = false 438 | python-versions = ">=3.7" 439 | files = [ 440 | {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"}, 441 | {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"}, 442 | ] 443 | 444 | [package.extras] 445 | brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] 446 | secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] 447 | socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] 448 | zstd = ["zstandard (>=0.18.0)"] 449 | 450 | [[package]] 451 | name = "waitress" 452 | version = "2.1.2" 453 | description = "Waitress WSGI server" 454 | optional = false 455 | python-versions = ">=3.7.0" 456 | files = [ 457 | {file = "waitress-2.1.2-py3-none-any.whl", hash = "sha256:7500c9625927c8ec60f54377d590f67b30c8e70ef4b8894214ac6e4cad233d2a"}, 458 | {file = "waitress-2.1.2.tar.gz", hash = "sha256:780a4082c5fbc0fde6a2fcfe5e26e6efc1e8f425730863c04085769781f51eba"}, 459 | ] 460 | 461 | [package.extras] 462 | docs = ["Sphinx (>=1.8.1)", "docutils", "pylons-sphinx-themes (>=1.0.9)"] 463 | testing = ["coverage (>=5.0)", "pytest", "pytest-cover"] 464 | 465 | [[package]] 466 | name = "werkzeug" 467 | version = "2.2.3" 468 | description = "The comprehensive WSGI web application library." 469 | optional = false 470 | python-versions = ">=3.7" 471 | files = [ 472 | {file = "Werkzeug-2.2.3-py3-none-any.whl", hash = "sha256:56433961bc1f12533306c624f3be5e744389ac61d722175d543e1751285da612"}, 473 | {file = "Werkzeug-2.2.3.tar.gz", hash = "sha256:2e1ccc9417d4da358b9de6f174e3ac094391ea1d4fbef2d667865d819dfd0afe"}, 474 | ] 475 | 476 | [package.dependencies] 477 | MarkupSafe = ">=2.1.1" 478 | 479 | [package.extras] 480 | watchdog = ["watchdog"] 481 | 482 | [[package]] 483 | name = "zipp" 484 | version = "3.15.0" 485 | description = "Backport of pathlib-compatible object wrapper for zip files" 486 | optional = false 487 | python-versions = ">=3.7" 488 | files = [ 489 | {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, 490 | {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, 491 | ] 492 | 493 | [package.extras] 494 | docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] 495 | testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] 496 | 497 | [metadata] 498 | lock-version = "2.0" 499 | python-versions = "^3.7" 500 | content-hash = "a885c915b909fd3c3dc566652cc9a6231c7e5e8b384742d96d54fed1ac4aea1e" 501 | -------------------------------------------------------------------------------- /prometheus_http_sd/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laixintao/prometheus-http-sd/0017ce62e51312ab3f891866354c99d48d2e6350/prometheus_http_sd/__init__.py -------------------------------------------------------------------------------- /prometheus_http_sd/app.py: -------------------------------------------------------------------------------- 1 | import os 2 | import logging 3 | from pathlib import Path 4 | 5 | 6 | from flask import Flask, jsonify, render_template, request 7 | from .sd import generate, generate_perf, run_python 8 | from .version import VERSION 9 | from .config import config 10 | from prometheus_client import Gauge, Counter, Histogram, Info 11 | from werkzeug.middleware.dispatcher import DispatcherMiddleware 12 | from prometheus_client import make_wsgi_app 13 | 14 | 15 | logger = logging.getLogger(__name__) 16 | 17 | 18 | path_last_generated_targets = Gauge( 19 | "httpsd_path_last_generated_targets", 20 | "Generated targets count in last request", 21 | ["path"], 22 | ) 23 | version_info = Info( 24 | "httpsd_version", 25 | "prometheus_http_sd version info", 26 | ) 27 | version_info.info({"version": VERSION}) 28 | target_path_requests_total = Counter( 29 | "httpsd_path_requests_total", 30 | "The total count of a path being requested, status label can be" 31 | " success/fail", 32 | ["path", "status", "l1_dir", "l2_dir"], 33 | ) 34 | target_path_request_duration_seconds = Histogram( 35 | "httpsd_target_path_request_duration_seconds", 36 | "The bucket of request duration in seconds", 37 | ["path"], 38 | ) 39 | 40 | 41 | def create_app(prefix): 42 | app = Flask( 43 | __name__, 44 | template_folder=str(Path(__file__).parent / "templates"), 45 | ) 46 | 47 | # Add prometheus wsgi middleware to route /metrics requests 48 | prometheus_wsgi_app = make_wsgi_app() 49 | app.wsgi_app = DispatcherMiddleware( 50 | app.wsgi_app, 51 | { 52 | "/metrics": prometheus_wsgi_app, 53 | f"{prefix}/metrics": prometheus_wsgi_app, 54 | }, 55 | ) 56 | 57 | # temp solution, return dynamic scape configs from python file. 58 | # only support python file, not directory. 59 | @app.route(f"{prefix}/scrape_configs/") 60 | def get_scrape_configs(rest_path): 61 | generated = run_python( 62 | str(Path(config.root_dir) / (rest_path + ".py")), **request.args 63 | ) 64 | return generated 65 | 66 | @app.route(f"{prefix}/targets", defaults={"rest_path": ""}) 67 | @app.route(f"{prefix}/targets/", defaults={"rest_path": ""}) 68 | # match the rest of the path 69 | @app.route(f"{prefix}/targets/") 70 | def get_targets(rest_path): 71 | 72 | if request.args.get("debug") == "true": 73 | arg_list = dict(request.args) 74 | del arg_list["debug"] 75 | return generate_perf(config.root_dir, rest_path, **arg_list) 76 | 77 | logger.info( 78 | "request target path: {}, with parameters: {}".format( 79 | rest_path, 80 | request.args, 81 | ) 82 | ) 83 | 84 | l1_dir = l2_dir = "" 85 | path_splits = rest_path.split("/") 86 | if len(path_splits) > 0: 87 | l1_dir = path_splits[0] 88 | if len(path_splits) > 1: 89 | l2_dir = path_splits[1] 90 | 91 | with target_path_request_duration_seconds.labels( 92 | path=rest_path 93 | ).time(): 94 | try: 95 | targets = generate(config.root_dir, rest_path, **request.args) 96 | except: # noqa: E722 97 | target_path_requests_total.labels( 98 | path=rest_path, status="fail", l1_dir=l1_dir, l2_dir=l2_dir 99 | ).inc() 100 | raise 101 | else: 102 | target_path_requests_total.labels( 103 | path=rest_path, 104 | status="success", 105 | l1_dir=l1_dir, 106 | l2_dir=l2_dir, 107 | ).inc() 108 | if ( 109 | isinstance(targets, list) 110 | and len(targets) > 0 111 | and isinstance(targets[0], dict) 112 | ): 113 | path_last_generated_targets.labels(path=rest_path).set( 114 | sum(len(t.get("targets", []) or []) for t in targets) 115 | ) 116 | 117 | return jsonify(targets) 118 | 119 | @app.route(f"{prefix}/") 120 | def admin(): 121 | paths = [] 122 | 123 | for dirpath, _, _ in os.walk(config.root_dir): 124 | should_ignore_underscore = any( 125 | p.startswith("_") 126 | for p in os.path.normpath(dirpath).split(os.sep) 127 | ) 128 | if should_ignore_underscore: 129 | continue 130 | 131 | dirpath = dirpath.removeprefix(config.root_dir) 132 | dirpath = dirpath.removeprefix("/") 133 | paths.append(dirpath) 134 | 135 | paths = sorted(list(set(paths))) 136 | return render_template( 137 | "admin.html", prefix=prefix, paths=paths, version=VERSION 138 | ) 139 | 140 | return app 141 | -------------------------------------------------------------------------------- /prometheus_http_sd/cli.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import sys 3 | import click 4 | import waitress 5 | 6 | from .mem_perf import start_tracing_thread 7 | from .config import config 8 | from .validate import validate 9 | from .app import create_app 10 | from .sd import py_cache 11 | 12 | 13 | def config_log(level): 14 | stdout_handler = logging.StreamHandler(stream=sys.stdout) 15 | logging.basicConfig( 16 | level=level, 17 | format=( 18 | "[%(asctime)s] {%(filename)s:%(lineno)d} %(levelname)s -" 19 | " %(message)s" 20 | ), 21 | handlers=[stdout_handler], 22 | ) 23 | 24 | 25 | @click.group() 26 | @click.option( 27 | "--log-level", 28 | default=20, 29 | help=( 30 | "Python logging level, default 20, can set from 0 to 50, step 10:" 31 | " https://docs.python.org/3/library/logging.html" 32 | ), 33 | ) 34 | def main(log_level): 35 | config_log(log_level) 36 | 37 | 38 | @main.command(help="Start a HTTP_SD server for Prometheus.") 39 | @click.option( 40 | "--host", "-h", default="127.0.0.1", help="The interface to bind to." 41 | ) 42 | @click.option("--port", "-p", default=8080, help="The port to bind to.") 43 | @click.option( 44 | "--connection-limit", "-c", default=1000, help="Server connection limit" 45 | ) 46 | @click.option("--threads", "-t", default=64, help="Server threads") 47 | @click.option( 48 | "--url_prefix", 49 | "-r", 50 | default="", 51 | help=( 52 | "The global url prefix, if set to /foo, then /targets will be" 53 | " available under /foo/targets" 54 | ), 55 | ) 56 | @click.argument( 57 | "root_dir", 58 | type=click.Path(exists=True, file_okay=False, dir_okay=True), 59 | ) 60 | @click.option( 61 | "--enable-tracer", 62 | "-v", 63 | is_flag=True, 64 | help="Enable memory tracer, will print it into logs", 65 | ) 66 | @click.option( 67 | "--sentry-url", 68 | "-s", 69 | help=( 70 | "Using sentry AMP(sentry.io) You need to manually pip install" 71 | " sentry-sdk" 72 | ), 73 | ) 74 | @click.option( 75 | "--cache-type", 76 | help='Cache of "py_run" function. Can be None or Timeout.', 77 | type=click.Choice(["Timeout", "None"]), 78 | default="Timeout", 79 | ) 80 | @click.option( 81 | "--cache-opt", 82 | help=( 83 | "Options pass to the cache object." 84 | "Input format should be k=v. ex: timeout=1" 85 | ), 86 | multiple=True, 87 | default=[ 88 | "timeout=60", 89 | "cache_time=60", 90 | "name=target_generator", 91 | "garbage_collection_count=100", 92 | ], 93 | ) 94 | def serve( 95 | host, 96 | port, 97 | connection_limit, 98 | threads, 99 | url_prefix, 100 | root_dir, 101 | enable_tracer, 102 | sentry_url, 103 | cache_type, 104 | cache_opt, 105 | ): 106 | if sentry_url: 107 | try: 108 | import sentry_sdk 109 | from sentry_sdk.integrations.flask import FlaskIntegration 110 | except ImportError: 111 | print( 112 | "import sentry_sdk failed, please pip install" 113 | " 'sentry-sdk[flask]'" 114 | ) 115 | sys.exit(2) 116 | 117 | sentry_sdk.init( 118 | dsn=sentry_url, 119 | integrations=[ 120 | FlaskIntegration( 121 | transaction_style="url", 122 | ), 123 | ], 124 | ) 125 | print("sentry sdk initialized!") 126 | config.root_dir = root_dir 127 | app = create_app(url_prefix) 128 | 129 | setup_cache(cache_type, cache_opt) 130 | 131 | if enable_tracer: 132 | start_tracing_thread() 133 | 134 | waitress.serve( 135 | app, 136 | host=host, 137 | port=port, 138 | connection_limit=connection_limit, 139 | threads=threads, 140 | ) 141 | 142 | 143 | @main.command(help="Run and verify the generators under target directory.") 144 | @click.argument( 145 | "root_dir", 146 | type=click.Path(exists=True, file_okay=False, dir_okay=True), 147 | ) 148 | @click.option( 149 | "--ignore-path", 150 | "-i", 151 | multiple=True, 152 | help="Don't check this dir, starts with the same location as root", 153 | ) 154 | def check(root_dir, ignore_path): 155 | config.root_dir = root_dir.rstrip("/") 156 | validate(root_dir, ignore_dirs=ignore_path) 157 | 158 | 159 | def setup_cache(cache_type, config_opt): 160 | kwargs = {} 161 | for opt in config_opt: 162 | try: 163 | key, value = opt.split("=", 1) 164 | except ValueError: 165 | print( 166 | "value format incorrect. required key=value, but get %s" % opt, 167 | file=sys.stderr, 168 | ) 169 | sys.exit(127) 170 | kwargs[key] = value 171 | py_cache.select_decorator(cache_type, **kwargs) 172 | 173 | 174 | if __name__ == "__main__": 175 | main() 176 | -------------------------------------------------------------------------------- /prometheus_http_sd/config.py: -------------------------------------------------------------------------------- 1 | class Config: 2 | root_dir: str 3 | 4 | def __init__(self) -> None: 5 | self.root_dir = "" 6 | 7 | 8 | config = Config 9 | -------------------------------------------------------------------------------- /prometheus_http_sd/const.py: -------------------------------------------------------------------------------- 1 | TEST_ENV_NAME = "PROMETHEUS_HTTP_SD_IS_TEST" 2 | -------------------------------------------------------------------------------- /prometheus_http_sd/decorator.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import time 3 | import heapq 4 | import traceback 5 | import threading 6 | 7 | from prometheus_client import Gauge, Counter, Histogram 8 | 9 | _collected_total = Counter( 10 | "httpsd_garbage_collection_collected_items_total", 11 | "The total count of the garbage collection collected items.", 12 | ["name"], 13 | ) 14 | 15 | _thread_cache_count = Gauge( 16 | "httpsd_garbage_collection_cache_count", 17 | "Show current thread_cache count", 18 | ["name"], 19 | ) 20 | 21 | _heap_cache_count = Gauge( 22 | "httpsd_garbage_collection_heap_count", 23 | "Show current heap length", 24 | ["name"], 25 | ) 26 | 27 | _collection_run_interval = Histogram( 28 | "http_sd_garbage_collection_run_interval_seconds_bucket", 29 | "The interval of two garbage collection run.", 30 | ["name"], 31 | ) 32 | 33 | 34 | class TimeoutException(Exception): 35 | """Raised when target function timeout.""" 36 | 37 | pass 38 | 39 | 40 | class TimeoutDecorator: 41 | """ 42 | TimeoutDecorator run target function in a single thread. 43 | 44 | +------------+ 45 | | | 46 | | | 47 | | | +-----------+ 48 | | Caller 1 +----+ | | 49 | | | | | | 50 | | | | | | 51 | | | | | | 52 | +------------+ | | | 53 | | | | 54 | | | | 55 | +------------+ | | | +----------+ 56 | | | | | | | | 57 | | | | call at the | | only single call | | 58 | | | | same time | Timeout | to the back | | 59 | | Caller 2 +----+------------>+ Cache +----------------->+ Function | 60 | | | | | | | | 61 | | | | | | | | 62 | | | | | | | | 63 | +------------+ | | | +----------+ 64 | | | | 65 | | | | 66 | +------------+ | | | 67 | | | | | | 68 | | | | | | 69 | | | | | | 70 | | Caller 3 +----+ | | 71 | | | +-----------+ 72 | | | 73 | | | 74 | +------------+ 75 | """ 76 | 77 | def __init__( 78 | self, 79 | timeout=None, 80 | cache_time=0, 81 | cache_exception_time=0, 82 | name="", 83 | garbage_collection_interval=5, 84 | garbage_collection_count=30, 85 | copy_response=False, 86 | ): 87 | """ 88 | Use threading and cache to store the function result. 89 | 90 | Garbage Collection time complexity: 91 | worse: O(nlogn) 92 | average in every operation: O(logn) 93 | 94 | Parameters 95 | ---------- 96 | timeout: int 97 | function timeout. if exceed, raise TimeoutException (in sec). 98 | cache_time: int 99 | after function return normally, 100 | how long should we cache the result (in sec). 101 | cache_exception_time: int 102 | after function return incorrectly, 103 | how long should we cache the exception (in sec). 104 | name: str 105 | prometheus_client metrics prefix 106 | garbage_collection_count: int 107 | garbage collection threshold 108 | garbage_collection_interval: int 109 | the second to avoid collection too often. 110 | copy_response: bool 111 | use copy.deepcopy on the response from the target function. 112 | 113 | Returns 114 | ------- 115 | TimeoutDecorator 116 | decorator class. 117 | """ 118 | self.timeout = timeout 119 | self.cache_time = cache_time 120 | self.cache_exception_time = cache_exception_time 121 | self.name = name 122 | self.garbage_collection_interval = garbage_collection_interval 123 | self.garbage_collection_count = garbage_collection_count 124 | self.copy_response = copy_response 125 | 126 | self.thread_cache = {} 127 | self.cache_lock = threading.Lock() 128 | self.heap = [] 129 | self.heap_lock = threading.Lock() 130 | self.garbage_collection_timestamp = 0 131 | self.garbage_collection_lock = threading.Lock() 132 | 133 | def can_garbage_collection(self): 134 | """Check current state can run garbage collection.""" 135 | return ( 136 | self.garbage_collection_interval 137 | + self.garbage_collection_timestamp 138 | < time.time() 139 | and len(self.heap) > self.garbage_collection_count 140 | ) 141 | 142 | def _cache_garbage_collection(self): 143 | def can_iterate(): 144 | with self.heap_lock: 145 | if len(self.heap) == 0 or self.heap[0][0] > time.time(): 146 | return False 147 | return True 148 | 149 | worked_keys = {} 150 | while can_iterate(): 151 | _timestamp, _key = None, None 152 | with self.heap_lock: 153 | _timestamp, _key = heapq.heappop(self.heap) 154 | if _key in worked_keys: 155 | continue 156 | worked_keys[_key] = True 157 | with self.cache_lock: 158 | if _key not in self.thread_cache: 159 | continue 160 | if self.is_expired(self.thread_cache[_key]): 161 | if "traceback" in self.thread_cache[_key]: 162 | traceback.clear_frames( 163 | self.thread_cache[_key]["traceback"], 164 | ) 165 | del self.thread_cache[_key] 166 | _collected_total.labels(name=self.name).inc(1) 167 | _heap_cache_count.labels( 168 | name=self.name, 169 | ).set(len(self.heap)) 170 | _thread_cache_count.labels( 171 | name=self.name, 172 | ).set(len(self.thread_cache)) 173 | current_time = time.time() 174 | if self.garbage_collection_timestamp != 0: 175 | _collection_run_interval.labels( 176 | name=self.name, 177 | ).observe(current_time - self.garbage_collection_timestamp) 178 | self.garbage_collection_timestamp = current_time 179 | 180 | def is_expired(self, cache): 181 | """Check thread_cache dict is expired.""" 182 | return cache["expired_timestamp"] < time.time() 183 | 184 | def _cal_cache_key(*arg, **kwargs): 185 | return hash(tuple([hash(arg), tuple(sorted(kwargs.items()))])) 186 | 187 | def __call__(self, function): 188 | """ 189 | Call target function with response cache. 190 | 191 | Raises 192 | ------ 193 | TimeoutException 194 | If the target function exceeds the executing time. 195 | """ 196 | 197 | def wrapper(*arg, **kwargs): 198 | # cache stores the context for this function call. 199 | # same function call will use the same cache. 200 | cache = { 201 | "thread": None, 202 | "error": None, 203 | "response": None, 204 | "expired_timestamp": float("inf"), 205 | } 206 | 207 | # target_function is a wrapper of the real function 208 | def target_function(key): 209 | try: 210 | if self.copy_response: 211 | cache["response"] = copy.deepcopy( 212 | function(*arg, **kwargs), 213 | ) 214 | else: 215 | cache["response"] = function(*arg, **kwargs) 216 | cache["expired_timestamp"] = time.time() + self.cache_time 217 | except Exception as e: 218 | cache["error"] = e 219 | cache["expired_timestamp"] = ( 220 | time.time() + self.cache_exception_time 221 | ) 222 | with self.heap_lock: 223 | heapq.heappush( 224 | self.heap, 225 | ( 226 | cache["expired_timestamp"], 227 | key, 228 | ), 229 | ) 230 | _heap_cache_count.labels( 231 | name=self.name, 232 | ).set(len(self.heap)) 233 | 234 | key = self._cal_cache_key(*arg, **kwargs) 235 | with self.cache_lock: 236 | if key in self.thread_cache: 237 | if self.thread_cache[key][ 238 | "thread" 239 | ].is_alive() or not self.is_expired( 240 | self.thread_cache[key] 241 | ): 242 | cache = self.thread_cache[key] 243 | if cache["thread"] is None: 244 | cache["thread"] = threading.Thread( 245 | target=target_function, 246 | args=(key,), 247 | ) 248 | cache["thread"].start() 249 | self.thread_cache[key] = cache 250 | _thread_cache_count.labels( 251 | name=self.name, 252 | ).set(len(self.thread_cache)) 253 | cache["thread"].join(self.timeout) 254 | 255 | if ( 256 | self.can_garbage_collection() 257 | and self.garbage_collection_lock.acquire(False) 258 | ): 259 | try: 260 | self._cache_garbage_collection() 261 | finally: 262 | self.garbage_collection_lock.release() 263 | if cache["thread"].is_alive(): 264 | raise TimeoutException("target function timeout!") 265 | if cache["error"]: 266 | e = cache["error"] 267 | raise copy.copy(e).with_traceback(e.__traceback__) 268 | if self.copy_response: 269 | return copy.deepcopy(cache["response"]) 270 | else: 271 | return cache["response"] 272 | 273 | return wrapper 274 | 275 | 276 | class NoDecoratorException(Exception): 277 | """Raised if cache type not found.""" 278 | 279 | pass 280 | 281 | 282 | class DecoratorSelector: 283 | """Wrapper for select different "run_python" function cache method.""" 284 | 285 | def __init__( 286 | self, 287 | cache_type="None", 288 | **kwargs, 289 | ): 290 | """ 291 | Init function to select the target decorators. 292 | 293 | Parameters 294 | ---------- 295 | cache_type: str 296 | select different decorators. 297 | kwargs: Dict[string, Any] 298 | parameters passed to the target cache decorators. 299 | 300 | Raises 301 | ------ 302 | NoDecoratorException 303 | If no cache type matches. 304 | """ 305 | self._functions = [] 306 | self.select_decorator(cache_type, **kwargs) 307 | 308 | def select_decorator( 309 | self, 310 | cache_type="Timeout", 311 | **kwargs, 312 | ): 313 | """ 314 | Re-init the decorator. 315 | 316 | Parameters 317 | ---------- 318 | cache_type: str 319 | select different decorators. 320 | kwargs: Dict[string, Any] 321 | parameters passed to the target cache decorators. 322 | 323 | Raises 324 | ------ 325 | NoDecoratorException 326 | If no cache type matches. 327 | """ 328 | if cache_type == "Timeout": 329 | self._decorator = self._timeout_decorator_init(**kwargs) 330 | elif cache_type == "None": 331 | self._decorator = lambda function: function 332 | else: 333 | raise NoDecoratorException( 334 | "cache_type %s not support" % cache_type 335 | ) 336 | for index in range(0, len(self._functions)): 337 | function = self._functions[index][0] 338 | self._functions[index] = ( 339 | function, 340 | self._decorator(function), 341 | ) 342 | 343 | def _timeout_decorator_init( 344 | self, 345 | timeout=None, 346 | cache_time=0, 347 | cache_exception_time=0, 348 | name="", 349 | garbage_collection_interval=5, 350 | garbage_collection_count=30, 351 | copy_response=False, 352 | ): 353 | if timeout is not None: 354 | timeout = int(timeout) 355 | cache_time = int(cache_time) 356 | cache_exception_time = int(cache_exception_time) 357 | name = str(name) 358 | garbage_collection_interval = int(garbage_collection_interval) 359 | garbage_collection_count = int(garbage_collection_count) 360 | copy_response = bool(copy_response) 361 | return TimeoutDecorator( 362 | timeout=timeout, 363 | cache_time=cache_time, 364 | cache_exception_time=cache_exception_time, 365 | name=name, 366 | garbage_collection_interval=garbage_collection_interval, 367 | garbage_collection_count=garbage_collection_count, 368 | copy_response=copy_response, 369 | ) 370 | 371 | def __call__(self, function): 372 | """Call the decorator that we initialized.""" 373 | target_function = self._decorator(function) 374 | self._functions.append( 375 | ( 376 | function, 377 | target_function, 378 | ) 379 | ) 380 | index = len(self._functions) - 1 381 | 382 | def wrapper(*arg, **kwargs): 383 | return self._functions[index][1](*arg, **kwargs) 384 | 385 | return wrapper 386 | -------------------------------------------------------------------------------- /prometheus_http_sd/exceptions.py: -------------------------------------------------------------------------------- 1 | class HTTPSDException(Exception): 2 | pass 3 | 4 | 5 | class SDResultNotValidException(Exception): 6 | """The generated targets not valid""" 7 | -------------------------------------------------------------------------------- /prometheus_http_sd/mem_perf.py: -------------------------------------------------------------------------------- 1 | import time 2 | import tracemalloc 3 | import logging 4 | import threading 5 | 6 | logger = logging.getLogger("memorytracer") 7 | 8 | INTERVAL_SECONDS = 10 9 | TOPN = 10 10 | 11 | 12 | class MemTracer: 13 | def __init__(self): 14 | self.last_snapshot = None 15 | 16 | def snapshot_once(self): 17 | snapshot = tracemalloc.take_snapshot() 18 | if self.last_snapshot: 19 | top_stats = snapshot.compare_to(self.last_snapshot, "lineno") 20 | logger.info("[ Top 10 differences ]") 21 | for stat in top_stats[:TOPN]: 22 | logger.info(stat) 23 | self.last_snapshot = snapshot 24 | 25 | top_stats = snapshot.statistics("lineno") 26 | 27 | logger.info("[ Top 10 ]") 28 | for stat in top_stats[:TOPN]: 29 | logger.info(stat) 30 | 31 | def run_forever(self): 32 | tracemalloc.start() 33 | logger.info( 34 | "Memory tracer started, will print out the memory usage every %d" 35 | " seconds...", 36 | INTERVAL_SECONDS, 37 | ) 38 | while 1: 39 | self.snapshot_once() 40 | 41 | time.sleep(INTERVAL_SECONDS) 42 | 43 | 44 | def start_tracing_thread(): 45 | tracer = MemTracer() 46 | t = threading.Thread(target=tracer.run_forever, daemon=True) 47 | t.start() 48 | -------------------------------------------------------------------------------- /prometheus_http_sd/sd.py: -------------------------------------------------------------------------------- 1 | from concurrent.futures import ThreadPoolExecutor, as_completed 2 | import importlib 3 | import importlib.machinery 4 | import importlib.util 5 | import json 6 | import logging 7 | import os 8 | from pathlib import Path 9 | import time 10 | from typing import Dict, List 11 | 12 | from prometheus_client import Counter, Gauge, Histogram 13 | import yaml 14 | 15 | from prometheus_http_sd.decorator import DecoratorSelector 16 | from prometheus_http_sd.exceptions import SDResultNotValidException 17 | 18 | from .const import TEST_ENV_NAME 19 | from .targets import TargetList 20 | 21 | try: 22 | from yaml import CLoader as Loader 23 | except ImportError: 24 | from yaml import Loader 25 | 26 | logger = logging.getLogger(__name__) 27 | 28 | generator_requests_total = Counter( 29 | "httpsd_generator_requests_total", 30 | "The total count that this generator executed, status can be success/fail", 31 | ["generator", "status"], 32 | ) 33 | 34 | generator_last_generated_targets = Gauge( 35 | "httpsd_generator_last_generated_targets", 36 | "The target count that this generator gets during its last execution", 37 | ["generator"], 38 | ) 39 | 40 | generator_run_duration_seconds = Histogram( 41 | "httpsd_generator_run_duration_seconds", 42 | "The time cost that this generator run", 43 | ["generator"], 44 | ) 45 | 46 | generator_executor = ThreadPoolExecutor(max_workers=400) 47 | 48 | 49 | def should_ignore(full_path, ignore_dirs): 50 | if ignore_dirs: 51 | for ignore in ignore_dirs: 52 | if full_path.startswith(ignore): 53 | logger.warning( 54 | f"{full_path} is ignored due to match ignore" 55 | f" pattern {ignore}" 56 | ) 57 | return True 58 | 59 | should_ignore_this = any( 60 | p.startswith("_") or (p.startswith(".") and p != "..") 61 | for p in os.path.normpath(full_path).split(os.sep) 62 | ) 63 | 64 | if should_ignore_this: 65 | return True 66 | 67 | return False 68 | 69 | 70 | def get_generator_list( 71 | root: str, path: str = "", ignore_dirs=None 72 | ) -> List[str]: 73 | """ 74 | generate targets start from ``path`` 75 | if ``path`` is None or empty, then start from the root path 76 | ``TARGETS_DIR_ENV_NAME `` 77 | """ 78 | logger.debug(f"{root=}, {path=}") 79 | if path: 80 | root = os.path.join(root, path) 81 | 82 | generators = [] 83 | 84 | if not Path(root).exists(): 85 | raise FileNotFoundError(f"{root} not exist!") 86 | 87 | for root, _, files in os.walk(root): 88 | for file in files: 89 | full_path = os.path.join(root, file) 90 | 91 | ignore = should_ignore(full_path, ignore_dirs) 92 | logger.info(f"{file=}, ignore={ignore}") 93 | if ignore: 94 | continue 95 | 96 | generators.append(full_path) 97 | 98 | logger.debug(f"{generators=}") 99 | return generators 100 | 101 | 102 | def generate(root: str, path: str = "", **extra_args) -> TargetList: 103 | generators = get_generator_list(root, path) 104 | all_targets = [] 105 | 106 | futures = [] 107 | for generator in generators: 108 | future = generator_executor.submit( 109 | run_generator, generator, **extra_args 110 | ) 111 | futures.append(future) 112 | 113 | for future in as_completed(futures): 114 | target_list = future.result() 115 | if isinstance(target_list, list): 116 | all_targets.extend(target_list) 117 | else: 118 | all_targets.append(target_list) 119 | 120 | return all_targets 121 | 122 | 123 | def _timed_wrapper(*args, **kwargs): 124 | start = time.time() 125 | run_generator(*args, **kwargs) 126 | return time.time() - start 127 | 128 | 129 | def generate_perf(root: str, path: str = "", **extra_args) -> Dict[str, float]: 130 | generators = get_generator_list(root, path) 131 | futures = {} 132 | result = {} 133 | 134 | for generator in generators: 135 | futures[generator] = generator_executor.submit( 136 | _timed_wrapper, generator, **extra_args 137 | ) 138 | 139 | for generator, future in futures.items(): 140 | time_cost = future.result() 141 | result[generator] = time_cost 142 | return {"generator_run_seconds": result} 143 | 144 | 145 | def run_generator(generator_path: str, **extra_args) -> TargetList: 146 | if generator_path.endswith(".json"): 147 | executor = run_json 148 | elif generator_path.endswith(".py"): 149 | executor = run_python 150 | elif generator_path.endswith(".yaml"): 151 | executor = run_yaml 152 | else: 153 | generator_requests_total.labels( 154 | generator=generator_path, status="fail" 155 | ).inc() 156 | raise Exception(f"Unknown File Type: {generator_path}") 157 | 158 | with generator_run_duration_seconds.labels( 159 | generator=generator_path 160 | ).time(): 161 | try: 162 | result = executor(generator_path, **extra_args) 163 | if result is None: 164 | raise SDResultNotValidException("Generated result is None") 165 | except: # noqa: E722 166 | generator_requests_total.labels( 167 | generator=generator_path, status="fail" 168 | ).inc() 169 | raise 170 | else: 171 | generator_requests_total.labels( 172 | generator=generator_path, status="success" 173 | ).inc() 174 | 175 | if ( 176 | isinstance(result, list) 177 | and len(result) > 0 178 | and isinstance(result[0], dict) 179 | ): 180 | generator_last_generated_targets.labels( 181 | generator=generator_path 182 | ).set(sum(len(t.get("targets", []) or []) for t in result)) 183 | 184 | return result 185 | 186 | 187 | def run_json(file_path: str) -> TargetList: 188 | with open(file_path) as jsonf: 189 | return json.load(jsonf) 190 | 191 | 192 | py_cache = DecoratorSelector() 193 | 194 | 195 | @py_cache 196 | def run_python(generator_path, **extra_args) -> TargetList: 197 | logger.debug(f"start to import module {generator_path}...") 198 | 199 | loader = importlib.machinery.SourceFileLoader("mymodule", generator_path) 200 | spec = importlib.util.spec_from_loader("mymodule", loader) 201 | if spec: 202 | mymodule = importlib.util.module_from_spec(spec) 203 | loader.exec_module(mymodule) 204 | else: 205 | raise Exception("Load a None module!") 206 | func = getattr(mymodule, "generate_targets") 207 | 208 | if os.getenv(TEST_ENV_NAME) == "1": 209 | try: 210 | test_func = getattr(mymodule, "test_generate_targets") 211 | except AttributeError: 212 | pass 213 | else: 214 | func = test_func 215 | return func(**extra_args) 216 | 217 | 218 | def run_yaml(file_path: str): 219 | with open(file_path) as yamlf: 220 | data = yaml.load(yamlf, Loader=Loader) 221 | return data 222 | 223 | 224 | if __name__ == "__main__": 225 | generate("") 226 | generate("spex") 227 | -------------------------------------------------------------------------------- /prometheus_http_sd/targets.py: -------------------------------------------------------------------------------- 1 | import typing 2 | 3 | 4 | class Target(typing.TypedDict): 5 | targets: typing.List[str] 6 | labels: typing.Dict[str, str] 7 | 8 | 9 | TargetList = typing.List[Target] 10 | -------------------------------------------------------------------------------- /prometheus_http_sd/templates/admin.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Prometheus HTTP SD Admin 6 | 7 | 8 |

Prometheus HTTP SD Admin

9 |

10 | Documentation 13 | HTTP SD Version: {{ version }} 16 |

17 |
18 |

Targets Paths

19 |
20 | 27 |
28 |

Metrics Path

29 | {% if prefix != "" %} 30 |

31 | Prometheus HTTP SD exposes metrics for monitoring itself. Metrics are 32 | available under /metrics and {{ prefix }}/metrics (by prefix). 33 |

34 | {% endif %} 35 |
36 | 46 |
47 | 48 | 49 | -------------------------------------------------------------------------------- /prometheus_http_sd/validate.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import time 4 | import logging 5 | 6 | from .const import TEST_ENV_NAME 7 | from .sd import get_generator_list, run_generator 8 | 9 | logger = logging.getLogger("checker") 10 | 11 | 12 | def validate(root_dir, ignore_dirs=None): 13 | os.environ[TEST_ENV_NAME] = "1" 14 | 15 | logger.info(f"validate dir, {ignore_dirs=}") 16 | generators = get_generator_list(root_dir, ignore_dirs=ignore_dirs) 17 | 18 | total_targets = 0 19 | exit_0 = True 20 | 21 | by_generator = {} 22 | for generator in generators: 23 | start = time.time() 24 | try: 25 | target_list = run_generator(generator) 26 | except: # noqa E772 27 | logger.exception("Error when run generator: %s", generator) 28 | sys.exit(1) 29 | 30 | all_good = True 31 | for t in target_list: 32 | all_good = check_content(t) 33 | if not all_good: 34 | exit_0 = False 35 | end = time.time() 36 | count = 0 37 | count = sum(len(t["targets"]) for t in target_list) 38 | status = "PASS" 39 | if not all_good: 40 | status = "FAIL" 41 | logger.info( 42 | f"{status} run generator {generator}, took {end-start}s, generated" 43 | f" {count} targets." 44 | ) 45 | by_generator[str(generator)] = count 46 | total_targets += count 47 | 48 | logger.info(f"Done! Generated {total_targets} targets in total.") 49 | 50 | by_generator["_total"] = total_targets 51 | if exit_0: 52 | return by_generator 53 | sys.exit(1) 54 | 55 | 56 | def check_content(target): 57 | if "targets" not in target: 58 | logger.warning(f"`targets` key is not in {target}") 59 | return False 60 | 61 | host_ports = target["targets"] 62 | if not isinstance(host_ports, list): 63 | logger.warning(f"`targets` key in {target} is not a array.") 64 | return False 65 | for k in host_ports: 66 | if ":" not in k: 67 | logger.warning(f"is target {k} missing port?") 68 | return False 69 | 70 | labels = target.get("labels") 71 | if labels: 72 | if not isinstance(labels, dict): 73 | logger.warning(f"`labels` key in {target} is not a dict.") 74 | return False 75 | for k, v in labels.items(): 76 | if not isinstance(k, str) or not isinstance(v, str): 77 | logger.warning(f"label pair {k}:{v} is not string.") 78 | return False 79 | return True 80 | -------------------------------------------------------------------------------- /prometheus_http_sd/version.py: -------------------------------------------------------------------------------- 1 | VERSION = "1.3.16" 2 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "prometheus-http-sd" 3 | version = "1.3.16" 4 | description = "Prometheus HTTP SD framework." 5 | authors = ["laixintao "] 6 | readme = 'README.md' 7 | homepage = "https://python-poetry.org://github.com/laixintao/prometheus-http-sd" 8 | 9 | 10 | [tool.poetry.dependencies] 11 | python = "^3.7" 12 | Flask = "^2.1.3" 13 | waitress = "^2.1.2" 14 | prometheus-client = "^0.14.1" 15 | PyYAML = "^6.0" 16 | sentry-sdk = {extras = ["flask"], version = "0.10.2"} 17 | 18 | [tool.poetry.dev-dependencies] 19 | pytest = "^7.1.2" 20 | flake8 = "^4.0.1" 21 | 22 | [build-system] 23 | requires = ["poetry-core>=1.3.16"] 24 | build-backend = "poetry.core.masonry.api" 25 | 26 | [tool.black] 27 | line-length = 79 28 | 29 | [tool.poetry.scripts] 30 | prometheus-http-sd = 'prometheus_http_sd.cli:main' 31 | -------------------------------------------------------------------------------- /test/app_root/a.yaml: -------------------------------------------------------------------------------- 1 | [] 2 | -------------------------------------------------------------------------------- /test/app_root/cached_target/a.py: -------------------------------------------------------------------------------- 1 | def generate_targets(**kwargs): 2 | import time 3 | 4 | time.sleep(5) 5 | return [ 6 | { 7 | "labels": {"foo": "bar"}, 8 | "targets": ["127.0.0.1:8080"], 9 | } 10 | ] 11 | -------------------------------------------------------------------------------- /test/app_root/echo_target/sleep2_target.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | 4 | def generate_targets(**kwargs): 5 | """Return arg list in label target.""" 6 | time.sleep(2) 7 | return [ 8 | { 9 | "labels": {"sleep": "2"}, 10 | "targets": ["127.0.0.1:8080"], 11 | } 12 | ] 13 | 14 | 15 | if __name__ == "__main__": 16 | generate_targets() 17 | -------------------------------------------------------------------------------- /test/app_root/echo_target/sleep_target.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | 4 | def generate_targets(**kwargs): 5 | """Return arg list in label target.""" 6 | time.sleep(3) 7 | return [ 8 | { 9 | "labels": {"sleep": "3"}, 10 | "targets": ["127.0.0.1:8080"], 11 | } 12 | ] 13 | 14 | 15 | if __name__ == "__main__": 16 | generate_targets() 17 | -------------------------------------------------------------------------------- /test/app_root/echo_target/target.py: -------------------------------------------------------------------------------- 1 | def generate_targets(**args): 2 | """Return arg list in label target.""" 3 | return [ 4 | { 5 | "labels": args, 6 | "targets": ["127.0.0.1:8080"], 7 | } 8 | ] 9 | 10 | 11 | if __name__ == "__main__": 12 | generate_targets() 13 | -------------------------------------------------------------------------------- /test/app_root/error/error.py: -------------------------------------------------------------------------------- 1 | def generate_targets(**args): 2 | """Return arg list in label target.""" 3 | return 1 / 0 4 | -------------------------------------------------------------------------------- /test/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from prometheus_http_sd.app import create_app 3 | 4 | 5 | @pytest.fixture() 6 | def app(): 7 | app = create_app("/") 8 | app.config.update( 9 | { 10 | "TESTING": True, 11 | } 12 | ) 13 | 14 | yield app 15 | 16 | 17 | @pytest.fixture() 18 | def client(app): 19 | return app.test_client() 20 | -------------------------------------------------------------------------------- /test/test_app.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | 4 | def test_app_target_with_parameters(client): 5 | from prometheus_http_sd.config import config 6 | import json 7 | 8 | config.root_dir = str(Path(__file__).parent / "app_root") 9 | 10 | response = client.get("/targets/echo_target?domain=example.com&info=test") 11 | assert response.status_code == 200 12 | body = json.loads(response.data.decode("utf-8")) 13 | assert body == [ 14 | { 15 | "labels": {"domain": "example.com", "info": "test"}, 16 | "targets": ["127.0.0.1:8080"], 17 | }, 18 | {"labels": {"sleep": "2"}, "targets": ["127.0.0.1:8080"]}, 19 | {"labels": {"sleep": "3"}, "targets": ["127.0.0.1:8080"]}, 20 | ] 21 | -------------------------------------------------------------------------------- /test/test_generator/root/empty/node.json: -------------------------------------------------------------------------------- 1 | [] 2 | -------------------------------------------------------------------------------- /test/test_generator/root/json/target.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "targets": [ 4 | "192.168.19.2:9100", 5 | "192.168.19.3:9100", 6 | "192.168.19.4:9100", 7 | "192.168.19.5:9100" 8 | ], 9 | "labels": { 10 | "__meta_datacenter": "singapore", 11 | "__meta_prometheus_job": "gateway" 12 | } 13 | } 14 | ] 15 | -------------------------------------------------------------------------------- /test/test_generator/root/yaml/target.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | - targets: 3 | - "10.1.1.9:9100" 4 | - "10.1.1.10:9100" 5 | labels: 6 | job: node 7 | datacenter: nyc 8 | group: g1 9 | - targets: 10 | - "10.2.1.9:9100" 11 | - "10.2.1.10:9100" 12 | labels: 13 | job: node 14 | datacenter: sg 15 | group: g2 16 | -------------------------------------------------------------------------------- /test/test_generator/test_generator.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from prometheus_http_sd.sd import generate 3 | from pathlib import Path 4 | 5 | 6 | root = str(Path(__file__).parent / "root") 7 | 8 | 9 | def test_parse_json(): 10 | targets = generate(root, "json") 11 | assert targets == [ 12 | { 13 | "targets": [ 14 | "192.168.19.2:9100", 15 | "192.168.19.3:9100", 16 | "192.168.19.4:9100", 17 | "192.168.19.5:9100", 18 | ], 19 | "labels": { 20 | "__meta_datacenter": "singapore", 21 | "__meta_prometheus_job": "gateway", 22 | }, 23 | } 24 | ] 25 | 26 | 27 | def test_parse_yaml(): 28 | targets = generate(root, "yaml") 29 | assert targets == [ 30 | { 31 | "targets": ["10.1.1.9:9100", "10.1.1.10:9100"], 32 | "labels": {"job": "node", "datacenter": "nyc", "group": "g1"}, 33 | }, 34 | { 35 | "targets": ["10.2.1.9:9100", "10.2.1.10:9100"], 36 | "labels": {"job": "node", "datacenter": "sg", "group": "g2"}, 37 | }, 38 | ] 39 | 40 | 41 | def test_non_exist(): 42 | with pytest.raises(FileNotFoundError): 43 | generate(root, "non-exist") 44 | 45 | 46 | def test_empty(): 47 | targets = generate(root, "empty") 48 | assert targets == [] 49 | -------------------------------------------------------------------------------- /test/test_ignore/good_root/.hidden.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laixintao/prometheus-http-sd/0017ce62e51312ab3f891866354c99d48d2e6350/test/test_ignore/good_root/.hidden.json -------------------------------------------------------------------------------- /test/test_ignore/good_root/.shoud_ignore/a.txt: -------------------------------------------------------------------------------- 1 | foo 2 | -------------------------------------------------------------------------------- /test/test_ignore/good_root/.should_ignore_file.txt: -------------------------------------------------------------------------------- 1 | bar 2 | -------------------------------------------------------------------------------- /test/test_ignore/good_root/_utils/utils.py: -------------------------------------------------------------------------------- 1 | def foo(): 2 | return "bar" 3 | -------------------------------------------------------------------------------- /test/test_ignore/good_root/gateway/nginx/edge.py: -------------------------------------------------------------------------------- 1 | def generate_targets(**kwargs): 2 | return [ 3 | { 4 | "targets": ["10.71.99.1:3333", "10.71.99.2:3333"], 5 | "labels": { 6 | "__meta_datacenter": "singapore", 7 | "__meta_prometheus_job": "nginx", 8 | }, 9 | } 10 | ] 11 | -------------------------------------------------------------------------------- /test/test_ignore/good_root/gateway/nginx/targets.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "targets": [ 4 | "10.71.11.1:3333", 5 | "10.71.11.2:3333" 6 | ], 7 | "labels": { 8 | "__meta_datacenter": "singapore", 9 | "__meta_prometheus_job": "nginx" 10 | } 11 | } 12 | ] 13 | -------------------------------------------------------------------------------- /test/test_ignore/good_root/gateway/targets.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "targets": [ 4 | "192.168.19.2:9100", 5 | "192.168.19.3:9100", 6 | "192.168.19.4:9100", 7 | "192.168.19.5:9100" 8 | ], 9 | "labels": { 10 | "__meta_datacenter": "singapore", 11 | "__meta_prometheus_job": "gateway" 12 | } 13 | } 14 | ] 15 | -------------------------------------------------------------------------------- /test/test_ignore/good_root/queue/_queue_utils/utils.py: -------------------------------------------------------------------------------- 1 | def hello(): 2 | return "world" 3 | -------------------------------------------------------------------------------- /test/test_ignore/good_root/queue/kafka.py: -------------------------------------------------------------------------------- 1 | def generate_targets(**kwargs): 2 | return [ 3 | { 4 | "targets": [ 5 | "10.0.10.2:9100", 6 | "10.0.10.3:9100", 7 | "10.0.10.4:9100", 8 | "10.0.10.5:9100", 9 | ], 10 | "labels": { 11 | "__meta_datacenter": "london", 12 | "__meta_prometheus_job": "node", 13 | }, 14 | }, 15 | { 16 | "targets": ["10.0.40.2:9100", "10.0.40.3:9100"], 17 | "labels": { 18 | "__meta_datacenter": "london", 19 | "__meta_prometheus_job": "alertmanager", 20 | }, 21 | }, 22 | { 23 | "targets": ["10.0.40.2:9093", "10.0.40.3:9093"], 24 | "labels": { 25 | "__meta_datacenter": "newyork", 26 | "__meta_prometheus_job": "alertmanager", 27 | }, 28 | }, 29 | ] 30 | -------------------------------------------------------------------------------- /test/test_ignore/good_root/queue/zookeeper.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "targets": [ 4 | "192.168.19.2:9100", 5 | "192.168.19.3:9100", 6 | "192.168.19.4:9100", 7 | "192.168.19.5:9100" 8 | ], 9 | "labels": { 10 | "__meta_datacenter": "singapore", 11 | "__meta_prometheus_job": "node" 12 | } 13 | } 14 | ] 15 | -------------------------------------------------------------------------------- /test/test_ignore/good_root/victoriametrics.json: -------------------------------------------------------------------------------- 1 | [] 2 | -------------------------------------------------------------------------------- /test/test_ignore/test_genrator_ignore.py: -------------------------------------------------------------------------------- 1 | from prometheus_http_sd.sd import get_generator_list 2 | from pathlib import Path 3 | 4 | 5 | good_root = str(Path(__file__).parent / "good_root") 6 | 7 | 8 | def test_underscore_should_be_ignored(): 9 | generators = get_generator_list(good_root) 10 | for g in generators: 11 | assert "utils" not in g 12 | 13 | 14 | def test_dot_should_be_ignored(): 15 | generators = get_generator_list(good_root) 16 | for g in generators: 17 | assert "hidden" not in g 18 | 19 | 20 | def test_dot_directory_should_be_ignored(): 21 | generators = get_generator_list(good_root) 22 | for g in generators: 23 | assert "should_ignore" not in g 24 | -------------------------------------------------------------------------------- /test/test_timeout/test_selector.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | from prometheus_http_sd.decorator import ( 4 | TimeoutException, 5 | DecoratorSelector, 6 | ) 7 | 8 | 9 | def test_decorator_select(): 10 | selector = DecoratorSelector( 11 | "None", 12 | ) 13 | 14 | @selector 15 | def function(): 16 | time.sleep(2) 17 | return "hello" 18 | 19 | assert function() == "hello", "should be none selector" 20 | 21 | selector.select_decorator( 22 | cache_type="Timeout", 23 | timeout=1, 24 | ) 25 | try: 26 | function() 27 | assert False, "it should raise a timeout exception" 28 | pass 29 | except TimeoutException: 30 | pass 31 | -------------------------------------------------------------------------------- /test/test_timeout/test_timeout.py: -------------------------------------------------------------------------------- 1 | import time 2 | import pytest 3 | import threading 4 | import traceback 5 | 6 | from random import random 7 | from prometheus_http_sd.decorator import TimeoutDecorator, TimeoutException 8 | 9 | 10 | def test_timeout_cache(): 11 | @TimeoutDecorator( 12 | timeout=0.5, 13 | cache_time=1, 14 | garbage_collection_interval=0, 15 | garbage_collection_count=0, 16 | ) 17 | def havy_function(): 18 | time.sleep(2) 19 | return random() 20 | 21 | # test if the decorator can raise an exception after timeout. 22 | with pytest.raises(TimeoutException): 23 | _ = havy_function() 24 | 25 | # test if the decorator can cache the result. 26 | time.sleep(2) 27 | first_call = havy_function() 28 | second_call = havy_function() 29 | assert first_call is second_call, "the function did't cache the result :(" 30 | 31 | time.sleep(1) 32 | # after cache_time, the next call should returns different value. 33 | with pytest.raises(TimeoutException): 34 | _ = havy_function() 35 | time.sleep(2) 36 | third_call = havy_function() 37 | assert first_call != third_call, "oops, cache_time doesn't work!" 38 | 39 | 40 | def test_garbage_collection(): 41 | decorator = TimeoutDecorator( 42 | timeout=0.5, 43 | cache_time=1, 44 | garbage_collection_interval=0, 45 | garbage_collection_count=1000000, # avoid automatic garbage collection 46 | ) 47 | 48 | @decorator 49 | def function(n): 50 | return object() 51 | 52 | expired_result = [] 53 | alive_result = [] 54 | old_object = function(10) 55 | for i in range(5): 56 | expired_result.append((i, function(i))) 57 | 58 | time.sleep(1.2) 59 | for j in range(5, 10): 60 | alive_result.append((j, function(j))) 61 | 62 | new_object = function(10) 63 | decorator._cache_garbage_collection() 64 | for key, _ in alive_result: 65 | assert decorator._cal_cache_key(key) in decorator.thread_cache 66 | 67 | for key, _ in expired_result: 68 | assert decorator._cal_cache_key(key) not in decorator.thread_cache 69 | 70 | assert old_object is not new_object 71 | assert decorator._cal_cache_key(10) in decorator.thread_cache 72 | 73 | 74 | def test_exception_cache(): 75 | decorator = TimeoutDecorator( 76 | timeout=0.5, 77 | cache_time=999, 78 | garbage_collection_interval=0, 79 | garbage_collection_count=1000000, # avoid automatic garbage collection 80 | ) 81 | 82 | global a 83 | a = 0 84 | 85 | class TestException(Exception): 86 | pass 87 | 88 | @decorator 89 | def function(): 90 | global a 91 | a += 1 92 | raise TestException("A") 93 | 94 | first_error = None 95 | second_error = None 96 | try: 97 | function() 98 | assert False, "function should raise error" 99 | except TestException as e: 100 | traceback.print_exc() 101 | first_error = e 102 | 103 | try: 104 | function() 105 | assert False, "function should raise error" 106 | except TestException as e: 107 | traceback.print_exc() 108 | second_error = e 109 | 110 | assert first_error is not second_error, "return same error object" 111 | # first item is the function call position. Which is different 112 | assert ( 113 | traceback.format_tb(first_error.__traceback__)[0] 114 | != traceback.format_tb(second_error.__traceback__)[0] 115 | ), "cached the error is the wrong behavior" 116 | assert ( 117 | traceback.format_tb(first_error.__traceback__)[1:] 118 | == traceback.format_tb(second_error.__traceback__)[1:] 119 | ), "traceback stack should be the same" 120 | assert a == 2 121 | 122 | 123 | def test_duplicated_append_traceback_problem(): 124 | decorator = TimeoutDecorator( 125 | timeout=2, 126 | cache_time=999, 127 | cache_exception_time=10, 128 | garbage_collection_interval=0, 129 | garbage_collection_count=1000000, # avoid automatic garbage collection 130 | ) 131 | 132 | global first_error 133 | global second_error 134 | global call_count 135 | first_error = None 136 | second_error = None 137 | call_count = 0 138 | 139 | @decorator 140 | def function(): 141 | time.sleep(1) 142 | global call_count 143 | call_count += 1 144 | raise Exception("A") 145 | 146 | def first_function(): 147 | global first_error 148 | try: 149 | function() 150 | assert False, "function should raise error" 151 | except Exception as e: 152 | first_error = e 153 | 154 | def second_function(): 155 | global second_error 156 | try: 157 | function() 158 | assert False, "function should raise error" 159 | except Exception as e: 160 | second_error = e 161 | 162 | first_thread = threading.Thread( 163 | target=first_function, 164 | ) 165 | 166 | second_thread = threading.Thread( 167 | target=second_function, 168 | ) 169 | first_thread.start() 170 | second_thread.start() 171 | 172 | first_thread.join() 173 | second_thread.join() 174 | 175 | assert first_error is not second_error 176 | 177 | # since python stores the tb in the same address, 178 | # raise a exception twice will be a problem. 179 | # therefore, we copy the exception in the cache decorator 180 | # to correct the traceback 181 | assert ( 182 | traceback.extract_tb(first_error.__traceback__)[0] 183 | != traceback.extract_tb(second_error.__traceback__)[0] 184 | ) 185 | # expect the first item (function call from "test_timeout"), 186 | # other tb should be the same 187 | assert ( 188 | traceback.extract_tb(first_error.__traceback__)[1:] 189 | == traceback.extract_tb(second_error.__traceback__)[1:] 190 | ) 191 | 192 | assert call_count == 1, "target function should only been call once" 193 | -------------------------------------------------------------------------------- /test/test_validate/root_dir/bad.yaml: -------------------------------------------------------------------------------- 1 | - targets: 2 | - "10.0.0.1:8080" 3 | - "10.0.0.2:8080" 4 | labels: 5 | app: nginx 6 | group: sg 7 | 8 | - targets: 9 | - "10.0.0.4:8080" 10 | - "10.0.0.6" 11 | labels: 12 | app: nginx 13 | group: sg 14 | -------------------------------------------------------------------------------- /test/test_validate/root_dir/good.yaml: -------------------------------------------------------------------------------- 1 | - targets: 2 | - "10.0.0.1:8080" 3 | - "10.0.0.2:8080" 4 | labels: 5 | app: nginx 6 | group: sg 7 | 8 | - targets: 9 | - "10.0.0.4:8080" 10 | - "10.0.0.6:8080" 11 | labels: 12 | app: nginx 13 | group: sg 14 | -------------------------------------------------------------------------------- /test/test_validate/root_dir/py_should_run_test_func/a.py: -------------------------------------------------------------------------------- 1 | def generate_targets(**args): 2 | return [ 3 | { 4 | "labels": {"arg_foo": args["foo"]}, 5 | "targets": ["127.0.0.1:8080"], 6 | } 7 | ] 8 | 9 | 10 | def test_generate_targets(*args, **kwargs): 11 | return generate_targets(foo="bar") 12 | -------------------------------------------------------------------------------- /test/test_validate/root_dir/should_ignore/a.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | - labels: 3 | a:b 4 | -------------------------------------------------------------------------------- /test/test_validate/root_dir/should_ignore/b.yaml: -------------------------------------------------------------------------------- 1 | abc 2 | -------------------------------------------------------------------------------- /test/test_validate/test_validate.py: -------------------------------------------------------------------------------- 1 | from prometheus_http_sd.validate import check_content 2 | from prometheus_http_sd.validate import validate 3 | 4 | from pathlib import Path 5 | 6 | 7 | def test_run_test_generate_method(): 8 | result = validate( 9 | Path(__file__).parent / "root_dir" / "py_should_run_test_func" 10 | ) 11 | assert result["_total"] == 1 12 | 13 | 14 | def test_no_port(): 15 | assert not check_content( 16 | { 17 | "targets": [ 18 | "192.168.19.2:9100", 19 | "192.168.19.3:9100", 20 | "192.168.19.4:9100", 21 | "192.168.19.5", 22 | ], 23 | "labels": { 24 | "__meta_datacenter": "singapore", 25 | "__meta_prometheus_job": "gateway", 26 | }, 27 | } 28 | ) 29 | 30 | 31 | def test_no_targets(): 32 | assert not check_content( 33 | { 34 | "labels": { 35 | "__meta_datacenter": "singapore", 36 | "__meta_prometheus_job": "gateway", 37 | }, 38 | } 39 | ) 40 | 41 | 42 | def test_label_notdict(): 43 | assert not check_content( 44 | { 45 | "targets": ["10.0.0.1:123"], 46 | "labels": ["__meta_datacenter", "__meta_prometheus_job"], 47 | } 48 | ) 49 | 50 | 51 | def test_label_no_bool(): 52 | assert not check_content( 53 | {"targets": ["10.0.0.1:123"], "labels": {"abc": False}} 54 | ) 55 | --------------------------------------------------------------------------------