├── .coveragerc ├── .gitignore ├── .travis.sh ├── .travis.yml ├── CONTRIBUTING.md ├── Dockerfile ├── LICENSE ├── MANIFEST.in ├── README.rst ├── bin └── graphite ├── docs ├── Makefile ├── api.rst ├── conf.py ├── configuration.rst ├── custom-functions.rst ├── deployment.rst ├── finders.rst ├── functions.rst ├── index.rst ├── installation.rst └── releases.rst ├── fpm ├── README.md ├── build-deb.sh ├── cloud-init.sh └── conf │ ├── etc │ ├── default │ │ └── graphite-api │ ├── graphite-api.yaml │ └── init.d │ │ └── graphite-api │ ├── post-install │ ├── post-remove │ └── pre-remove ├── graphite_api ├── __init__.py ├── _vendor │ ├── __init__.py │ └── whisper.py ├── app.py ├── carbonlink.py ├── config.py ├── encoders.py ├── evaluator.py ├── finders │ ├── __init__.py │ └── whisper.py ├── functions.py ├── intervals.py ├── middleware.py ├── node.py ├── readers.py ├── render │ ├── __init__.py │ ├── attime.py │ ├── datalib.py │ ├── glyph.py │ └── grammar.py ├── storage.py └── utils.py ├── requirements-dev.txt ├── requirements.txt ├── setup.cfg ├── setup.py ├── tests ├── __init__.py ├── conf.yaml ├── data │ └── index ├── test_attime.py ├── test_carbonlink.py ├── test_encoders.py ├── test_finders.py ├── test_functions.py ├── test_http.py ├── test_intervals.py ├── test_metrics.py ├── test_paths.py ├── test_render.py ├── test_render_datalib.py ├── test_render_glyph.py └── test_storage.py ├── tox.ini └── unittest_main.py /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = True 3 | source = graphite_api 4 | 5 | [report] 6 | omit = *_vendor* 7 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .tox 2 | *.egg-info 3 | *.pyc 4 | .coverage 5 | htmlcov 6 | docs/_build 7 | dist 8 | build 9 | -------------------------------------------------------------------------------- /.travis.sh: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env bash 2 | set -xe 3 | 4 | if [ $TOXENV == "coverage" ] 5 | then 6 | pip install -r requirements-dev.txt coverage coveralls 7 | coverage run unittest_main.py 8 | coveralls 9 | else 10 | tox -e $TOXENV 11 | fi 12 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | python: 3.6 3 | sudo: false 4 | cache: pip 5 | env: 6 | - TOXENV=pyparsing1 7 | - TOXENV=flask08 8 | - TOXENV=flask09 9 | - TOXENV=no-flask-cache 10 | - TOXENV=coverage 11 | - TOXENV=lint 12 | - TOXENV=docs 13 | 14 | addons: 15 | apt: 16 | packages: 17 | - libcairo2-dev 18 | 19 | install: 20 | - pip install tox 21 | 22 | script: "./.travis.sh" 23 | 24 | matrix: 25 | include: 26 | - python: 3.6 27 | env: 28 | - TOXENV=py36 29 | - python: 3.5 30 | env: 31 | - TOXENV=py35 32 | - python: 3.4 33 | env: 34 | - TOXENV=py34 35 | - python: 2.7 36 | env: 37 | - TOXENV=py27 38 | - python: pypy 39 | env: 40 | - TOXENV=pypy 41 | allow_failures: 42 | - env: TOXENV=coverage 43 | - env: TOXENV=pyparsing1 44 | - env: TOXENV=pypy 45 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | Contributions to Graphite-API are welcome! Before you start, here are a couple 2 | of indications: 3 | 4 | * If you want to report a bug, please search in the existing issues to see if 5 | the bug has already been reported. 6 | 7 | * If you want to write a patch, make sure there is no pending pull request for 8 | the same thing to avoid duplicate efforts. 9 | 10 | * If you're fixing a bug, please add a regression test that reproduces the 11 | bug and proves your fix is actually working. 12 | 13 | Pull requests must pass the travis builds to get merged. Please validate that 14 | your code passes flake8 checks and that the documentation builds without 15 | warnings. 16 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:16.04 2 | 3 | MAINTAINER Bruno Renié 4 | 5 | VOLUME /srv/graphite 6 | 7 | RUN apt-get update 8 | RUN apt-get upgrade -y 9 | 10 | RUN apt-get install -y language-pack-en 11 | ENV LANGUAGE en_US.UTF-8 12 | ENV LANG en_US.UTF-8 13 | ENV LC_ALL en_US.UTF-8 14 | 15 | RUN locale-gen en_US.UTF-8 16 | RUN dpkg-reconfigure locales 17 | 18 | RUN apt-get install -y build-essential python-dev libffi-dev libcairo2-dev python-pip 19 | 20 | RUN pip install gunicorn graphite-api[sentry,cyanite] 21 | 22 | ONBUILD ADD graphite-api.yaml /etc/graphite-api.yaml 23 | ONBUILD RUN chmod 0644 /etc/graphite-api.yaml 24 | 25 | EXPOSE 8000 26 | 27 | CMD exec gunicorn -b 0.0.0.0:8000 -w 2 --log-level debug graphite_api.app:app 28 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright 2008 Chris Davis 191 | Copyright 2014 Bruno Renié 192 | 193 | Licensed under the Apache License, Version 2.0 (the "License"); 194 | you may not use this file except in compliance with the License. 195 | You may obtain a copy of the License at 196 | 197 | http://www.apache.org/licenses/LICENSE-2.0 198 | 199 | Unless required by applicable law or agreed to in writing, software 200 | distributed under the License is distributed on an "AS IS" BASIS, 201 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 202 | See the License for the specific language governing permissions and 203 | limitations under the License. 204 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.rst LICENSE 2 | recursive-exclude tests * 3 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | Graphite-API 2 | ============ 3 | 4 | .. image:: https://travis-ci.org/brutasse/graphite-api.svg?branch=master 5 | :alt: Build Status 6 | :target: https://travis-ci.org/brutasse/graphite-api 7 | 8 | .. image:: https://img.shields.io/coveralls/brutasse/graphite-api/master.svg 9 | :alt: Coverage Status 10 | :target: https://coveralls.io/r/brutasse/graphite-api?branch=master 11 | 12 | Graphite-web, without the interface. Just the rendering HTTP API. 13 | 14 | This is a minimalistic API server that replicates the behavior of 15 | Graphite-web. I removed everything I could and simplified as much code as 16 | possible while keeping the basic functionality. 17 | 18 | Implemented API calls: 19 | 20 | * ``/metrics/find`` 21 | * ``/metrics/expand`` 22 | * ``/render`` 23 | 24 | No-ops: 25 | 26 | * ``/dashboard/find`` 27 | * ``/dashboard/load/`` 28 | * ``/events/get_data`` 29 | 30 | Difference from graphite-web 31 | ---------------------------- 32 | 33 | * Stateless. No need for a database. 34 | * No Pickle rendering. 35 | * No remote rendering. 36 | * JSON data in request bodies is supported, additionally to form data and 37 | querystring parameters. 38 | * Ceres integration will be as an external backend. 39 | * Compatibility with python 2 and 3. 40 | * Easy to install and configure. 41 | 42 | Goals 43 | ----- 44 | 45 | * Solid codebase. Strict flake8 compatibility, good test coverage. 46 | * Ease of installation/use/configuration. 47 | * Compatibility with the original Graphite-web API and 3rd-party dashboards. 48 | 49 | Non-goals 50 | --------- 51 | 52 | * Support for very old Python versions (Python 2.6 is still supported but 53 | maybe not for long). 54 | * Built-in support for every metric storage system in the world. Whisper is 55 | included by default, other storages are added via 3rd-party backends. 56 | 57 | Documentation 58 | ------------- 59 | 60 | `On readthedocs.org`_ or in the ``docs/`` directory. 61 | 62 | .. _On readthedocs.org: https://graphite-api.readthedocs.io/en/latest/ 63 | 64 | Hacking 65 | ------- 66 | 67 | `Tox`_ is used to run the tests for all supported environments. To get started 68 | from a fresh clone of the repository: 69 | 70 | .. code-block:: bash 71 | 72 | pip install tox 73 | tox 74 | 75 | .. _Tox: https://testrun.org/tox/ 76 | -------------------------------------------------------------------------------- /bin/graphite: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | import os 3 | 4 | os.environ.setdefault('DEBUG', '1') 5 | 6 | import graphite_api 7 | 8 | graphite_api.DEBUG = bool(int(os.environ['DEBUG'])) 9 | 10 | from graphite_api.app import app 11 | 12 | 13 | if __name__ == '__main__': 14 | port = int(os.environ.get('PORT', 8888)) 15 | host = os.environ.get('BIND_ADDRESS', '127.0.0.1') 16 | app.run(debug=graphite_api.DEBUG, port=port, host=host) 17 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Graphite-API.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Graphite-API.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/Graphite-API" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Graphite-API" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # coding: utf-8 3 | 4 | import os 5 | import re 6 | import sys 7 | 8 | import sphinx_rtd_theme 9 | 10 | from sphinx.ext import autodoc 11 | 12 | sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir)) 13 | 14 | extensions = [ 15 | 'sphinx.ext.autodoc', 16 | ] 17 | 18 | templates_path = ['_templates'] 19 | 20 | source_suffix = '.rst' 21 | 22 | master_doc = 'index' 23 | 24 | project = 'Graphite-API' 25 | copyright = u'2014, Bruno Renié' 26 | 27 | version = '1.1.3' 28 | release = '1.1.3' 29 | 30 | exclude_patterns = ['_build'] 31 | 32 | pygments_style = 'sphinx' 33 | 34 | html_theme = 'sphinx_rtd_theme' 35 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] 36 | 37 | htmlhelp_basename = 'Graphite-APIdoc' 38 | 39 | latex_elements = { 40 | } 41 | 42 | latex_documents = [ 43 | ('index', 'Graphite-API.tex', 'Graphite-API Documentation', 44 | 'Bruno Renié', 'manual'), 45 | ] 46 | 47 | man_pages = [ 48 | ('index', 'graphite-api', 'Graphite-API Documentation', 49 | ['Bruno Renié'], 1) 50 | ] 51 | 52 | texinfo_documents = [ 53 | ('index', 'Graphite-API', 'Graphite-API Documentation', 54 | 'Bruno Renié', 'Graphite-API', 'One line description of project.', 55 | 'Miscellaneous'), 56 | ] 57 | 58 | 59 | class RenderFunctionDocumenter(autodoc.FunctionDocumenter): 60 | priority = 10 61 | 62 | @classmethod 63 | def can_document_member(cls, member, membername, isattr, parent): 64 | return autodoc.FunctionDocumenter.can_document_member( 65 | member, membername, isattr, parent 66 | ) and parent.name == 'graphite_api.functions' 67 | 68 | def format_args(self): 69 | args = super(RenderFunctionDocumenter, self).format_args() 70 | if args is not None: 71 | return re.sub('requestContext, ', '', args) 72 | 73 | 74 | suppress_warnings = ['app.add_directive'] 75 | 76 | 77 | def setup(app): 78 | app.add_autodocumenter(RenderFunctionDocumenter) 79 | 80 | 81 | add_module_names = False 82 | 83 | 84 | class Mock(object): 85 | __all__ = [] 86 | 87 | def __init__(self, *args, **kwargs): 88 | pass 89 | 90 | def __call__(self, *args, **kwargs): 91 | return Mock() 92 | 93 | @classmethod 94 | def __getattr__(cls, name): 95 | if name in ('__file__', '__path__'): 96 | return '/dev/null' 97 | elif name[0] == name[0].upper(): 98 | mockType = type(name, (), {}) 99 | mockType.__module__ = __name__ 100 | return mockType 101 | else: 102 | return Mock() 103 | 104 | 105 | for mod_name in ['cairocffi']: 106 | sys.modules[mod_name] = Mock() 107 | -------------------------------------------------------------------------------- /docs/configuration.rst: -------------------------------------------------------------------------------- 1 | Configuration 2 | ============= 3 | 4 | /etc/graphite-api.yaml 5 | ---------------------- 6 | 7 | The configuration file for Graphite-API lives at ``/etc/graphite-api.yaml`` 8 | and uses the YAML format. Creating the configuration file is optional: if 9 | Graphite-API doesn't find the file, sane defaults are used. They are described 10 | below. 11 | 12 | Default values 13 | `````````````` 14 | 15 | .. code-block:: yaml 16 | 17 | search_index: /srv/graphite/index 18 | finders: 19 | - graphite_api.finders.whisper.WhisperFinder 20 | functions: 21 | - graphite_api.functions.SeriesFunctions 22 | - graphite_api.functions.PieFunctions 23 | whisper: 24 | directories: 25 | - /srv/graphite/whisper 26 | time_zone: or UTC 27 | 28 | Config sections 29 | ``````````````` 30 | 31 | Default sections 32 | ^^^^^^^^^^^^^^^^ 33 | 34 | *search_index* 35 | 36 | The location of the search index used for searching metrics. Note that it 37 | needs to be a file that is writable by the Graphite-API process. 38 | 39 | *finders* 40 | 41 | A list of python paths to the storage finders you want to use when fetching 42 | metrics. 43 | 44 | *functions* 45 | 46 | A list of python paths to function definitions for transforming / analyzing 47 | time series data. 48 | 49 | *whisper* 50 | 51 | The configuration information for whisper. Only relevant when using 52 | WhisperFinder. Simply holds a ``directories`` key listing all directories 53 | containing whisper data. 54 | 55 | *time_zone* 56 | 57 | The time zone to use when generating graphs. By default, Graphite-API tries 58 | to detect your system timezone. If detection fails it falls back to UTC. You 59 | can also manually override it if you want another value than your system's 60 | timezone. 61 | 62 | Extra sections 63 | ^^^^^^^^^^^^^^ 64 | 65 | *carbon* 66 | 67 | Configuration information for reading data from carbon's cache. Items: 68 | 69 | *hosts* 70 | List of carbon-cache hosts, in the format ``hostname:port[:instance]``. 71 | 72 | *timeout* 73 | Socket timeout for carbon connections, in seconds. 74 | 75 | *retry_delay* 76 | Time to wait before trying to re-establish a failed carbon connection, in 77 | seconds. 78 | 79 | *hashing_keyfunc* 80 | Python path to a hashing function for metrics. If you use Carbon with 81 | consistent hashing and a custom function, you need to point to the same 82 | hashing function. 83 | 84 | *hashing_type* 85 | Type of metric hashing function. The default ``carbon_ch`` is Graphite's 86 | traditional consistent-hashing implementation. Alternatively, you can use 87 | ``fnv1a_ch``, which supports the Fowler-Noll-Vo hash function (FNV-1a) hash 88 | implementation offered by the carbon-c-relay project. 89 | Default: ``carbon_ch`` 90 | 91 | *carbon_prefix* 92 | Prefix for carbon's internal metrics. When querying metrics starting with 93 | this prefix, requests are made to all carbon-cache instances instead of 94 | one instance selected by the key function. Default: ``carbon``. 95 | 96 | *replication_factor* 97 | The replication factor of your carbon setup. Default: ``1``. 98 | 99 | Example: 100 | 101 | .. code-block:: yaml 102 | 103 | carbon: 104 | hosts: 105 | - 127.0.0.1:7002 106 | timeout: 1 107 | retry_delay: 15 108 | carbon_prefix: carbon 109 | replication_factor: 1 110 | 111 | *sentry_dsn* 112 | 113 | This is useful if you want to send Graphite-API's exceptions to a `Sentry`_ 114 | instance for easier debugging. 115 | 116 | Example:: 117 | 118 | sentry_dsn: https://key:secret@app.getsentry.com/12345 119 | 120 | .. note:: 121 | 122 | Sentry integration requires Graphite-API to be installed with the 123 | corresponding extra dependency:: 124 | 125 | $ pip install graphite-api[sentry] 126 | 127 | .. _Sentry: https://docs.getsentry.com 128 | 129 | *allowed_origins* 130 | 131 | Allows you to do cross-domain (CORS) requests to the Graphite API. Say you 132 | have a dashboard at ``dashboard.example.com`` that makes AJAX requests to 133 | ``graphite.example.com``, just set the value accordingly:: 134 | 135 | allowed_origins: 136 | - dashboard.example.com 137 | 138 | You can specify as many origins as you want. A wildcard can be used to allow 139 | all origins:: 140 | 141 | allowed_origins: 142 | - * 143 | 144 | *cache* 145 | 146 | Lets you configure a cache for graph rendering. This is done via 147 | `Flask-Cache `_ which supports a 148 | number of backends including memcache, Redis, filesystem or in-memory 149 | caching. 150 | 151 | Cache configuration maps directly to Flask-Cache's config values. For each 152 | ``CACHE_*`` config value, set the lowercased name in the ``cache`` section, 153 | without the prefix. Example:: 154 | 155 | cache: 156 | type: redis 157 | redis_host: localhost 158 | 159 | This would configure Flask-Cache with ``CACHE_TYPE = 'redis'`` and 160 | ``CACHE_REDIS_HOST = 'localhost'``. 161 | 162 | Some cache options have default values defined by Graphite-API: 163 | 164 | * ``default_timeout``: 60 165 | 166 | * ``key_prefix``: ``'graphite-api:``. 167 | 168 | .. note:: 169 | 170 | Caching functionality requires you to install the cache extra dependency 171 | but also the underlying driver. E.g. for redis, you'll need:: 172 | 173 | $ pip install graphite-api[cache] redis 174 | 175 | *statsd* 176 | 177 | Attaches a statsd object to the application, which can be used for 178 | instrumentation. Currently Graphite-API itself doesn't use this, 179 | but some backends do, like `Graphite-Influxdb`_. 180 | 181 | Example:: 182 | 183 | statsd: 184 | host: 'statsd_host' 185 | port: 8125 # not needed if default 186 | 187 | .. note:: 188 | 189 | This requires the statsd module:: 190 | 191 | $ pip install statsd 192 | 193 | .. _Graphite-Influxdb: https://github.com/vimeo/graphite-influxdb 194 | 195 | *render_errors* 196 | 197 | If ``True`` (default), full tracebacks are returned in the HTTP 198 | response in case of application errors. 199 | 200 | Custom location 201 | --------------- 202 | 203 | If you need the Graphite-API config file to be stored in another place than 204 | ``/etc/graphite-api.yaml``, you can set a custom location using the 205 | ``GRAPHITE_API_CONFIG`` environment variable:: 206 | 207 | export GRAPHITE_API_CONFIG=/var/lib/graphite/config.yaml 208 | -------------------------------------------------------------------------------- /docs/custom-functions.rst: -------------------------------------------------------------------------------- 1 | Custom functions 2 | ================ 3 | 4 | Just like with storage finders, it is possible to extend Graphite-API to add 5 | custom processing functions. 6 | 7 | To give an example, let's implement a function that reverses the time series, 8 | placing old values at the end and recent values at the beginning. 9 | 10 | .. code-block:: python 11 | 12 | # reverse.py 13 | 14 | def reverseSeries(requestContex, seriesList): 15 | reverse = [] 16 | for series in seriesList: 17 | reverse.append(TimeSeries(series.name, series.start, series.end, 18 | series.step, series[::-1])) 19 | return reverse 20 | 21 | The first argument, ``requestContext``, holds some information about the 22 | request parameters. ``seriesList`` is the list of paths found for the request 23 | target. 24 | 25 | Once you've created your function, declare it in a dictionnary: 26 | 27 | .. code-block:: python 28 | 29 | ReverseFunctions = { 30 | 'reverseSeries': reverseSeries, 31 | } 32 | 33 | Add your module to the Graphite-API Python path and add it to the 34 | configuration: 35 | 36 | .. code-block:: yaml 37 | 38 | functions: 39 | - graphite_api.functions.SeriesFunctions 40 | - graphite_api.functions.PieFunctions 41 | - reverse.ReverseFunctions 42 | -------------------------------------------------------------------------------- /docs/deployment.rst: -------------------------------------------------------------------------------- 1 | Deployment 2 | ========== 3 | 4 | There are several options available, depending on your setup. 5 | 6 | Gunicorn + nginx 7 | ---------------- 8 | 9 | First, you need to install Gunicorn. The easiest way is to use ``pip``:: 10 | 11 | $ pip install gunicorn 12 | 13 | If you have installed Graphite-API in a virtualenv, install Gunicorn in the 14 | same virtualenv:: 15 | 16 | $ /usr/share/python/graphite/bin/pip install gunicorn 17 | 18 | Next, create the script that will run Graphite-API using your process watcher 19 | of choice. 20 | 21 | *Upstart* 22 | 23 | :: 24 | 25 | description "Graphite-API server" 26 | start on runlevel [2345] 27 | stop on runlevel [!2345] 28 | 29 | respawn 30 | 31 | exec gunicorn -w2 graphite_api.app:app -b 127.0.0.1:8888 32 | 33 | *Supervisor* 34 | 35 | :: 36 | 37 | [program:graphite-api] 38 | command = gunicorn -w2 graphite_api.app:app -b 127.0.0.1:8888 39 | autostart = true 40 | autorestart = true 41 | 42 | *systemd* 43 | 44 | :: 45 | 46 | # This is /etc/systemd/system/graphite-api.socket 47 | [Unit] 48 | Description=graphite-api socket 49 | 50 | [Socket] 51 | ListenStream=/run/graphite-api.sock 52 | ListenStream=127.0.0.1:8888 53 | 54 | [Install] 55 | WantedBy=sockets.target 56 | 57 | :: 58 | 59 | # This is /etc/systemd/system/graphite-api.service 60 | [Unit] 61 | Description=Graphite-API service 62 | Requires=graphite-api.socket 63 | 64 | [Service] 65 | ExecStart=/usr/bin/gunicorn -w2 graphite_api.app:app 66 | Restart=on-failure 67 | #User=graphite 68 | #Group=graphite 69 | ExecReload=/bin/kill -s HUP $MAINPID 70 | ExecStop=/bin/kill -s TERM $MAINPID 71 | PrivateTmp=true 72 | 73 | [Install] 74 | WantedBy=multi-user.target 75 | 76 | .. note:: 77 | 78 | If you have installed Graphite-API and Gunicorn in a virtualenv, you 79 | need to use the full path to Gunicorn. Instead of ``gunicorn``, use 80 | ``/usr/share/python/graphite/bin/gunicorn`` (assuming your virtualenv is 81 | at ``/usr/share/python/graphite``). 82 | 83 | See the `Gunicorn docs`_ for configuration options and command-line flags. 84 | 85 | .. _Gunicorn docs: http://docs.gunicorn.org/en/latest/ 86 | 87 | Finally, configure the nginx vhost: 88 | 89 | .. code-block:: nginx 90 | 91 | # /etc/nginx/sites-available/graphite.conf 92 | 93 | upstream graphite { 94 | server 127.0.0.1:8888 fail_timeout=0; 95 | } 96 | 97 | server { 98 | server_name graph; 99 | listen 80 default; 100 | root /srv/www/graphite; 101 | 102 | location / { 103 | try_files $uri @graphite; 104 | } 105 | 106 | location @graphite { 107 | proxy_pass http://graphite; 108 | } 109 | } 110 | 111 | Enable the vhost and restart nginx:: 112 | 113 | $ ln -s /etc/nginx/sites-available/graphite.conf /etc/nginx/sites-enabled 114 | $ service nginx restart 115 | 116 | Apache + mod_wsgi 117 | ----------------- 118 | 119 | First, you need to install mod_wsgi. 120 | 121 | See the `mod_wsgi InstallationInstructions`_ for installation instructions. 122 | 123 | .. _mod_wsgi InstallationInstructions: https://code.google.com/p/modwsgi/wiki/InstallationInstructions 124 | 125 | Then create the graphite-api.wsgi: 126 | 127 | .. code-block:: bash 128 | 129 | # /var/www/wsgi-scripts/graphite-api.wsgi 130 | 131 | from graphite_api.app import app as application 132 | 133 | Finally, configure the apache vhost: 134 | 135 | .. code-block:: apache 136 | 137 | # /etc/httpd/conf.d/graphite.conf 138 | 139 | LoadModule wsgi_module modules/mod_wsgi.so 140 | 141 | WSGISocketPrefix /var/run/wsgi 142 | 143 | Listen 8013 144 | 145 | 146 | WSGIDaemonProcess graphite-api processes=5 threads=5 display-name='%{GROUP}' inactivity-timeout=120 147 | WSGIProcessGroup graphite-api 148 | WSGIApplicationGroup %{GLOBAL} 149 | WSGIImportScript /var/www/wsgi-scripts/graphite-api.wsgi process-group=graphite-api application-group=%{GLOBAL} 150 | 151 | WSGIScriptAlias / /var/www/wsgi-scripts/graphite-api.wsgi 152 | 153 | 154 | Order deny,allow 155 | Allow from all 156 | 157 | 158 | 159 | Adapt the mod_wsgi configuration to your requirements. 160 | 161 | See the `mod_wsgi QuickConfigurationGuide`_ for an overview of configurations and `mod_wsgi ConfigurationDirectives`_ to see all configuration directives 162 | 163 | .. _mod_wsgi QuickConfigurationGuide: https://code.google.com/p/modwsgi/wiki/QuickConfigurationGuide 164 | 165 | .. _mod_wsgi ConfigurationDirectives: https://code.google.com/p/modwsgi/wiki/ConfigurationDirectives 166 | 167 | Restart apache:: 168 | 169 | $ service httpd restart 170 | 171 | Docker 172 | ------ 173 | 174 | Create a ``graphite-api.yaml`` configuration file with your desired config. 175 | 176 | Create a ``Dockerfile``:: 177 | 178 | FROM brutasse/graphite-api 179 | 180 | Build your container:: 181 | 182 | docker build -t graphite-api . 183 | 184 | Run it:: 185 | 186 | docker run -t -i -p 8888:8888 graphite-api 187 | 188 | ``/srv/graphite`` is a docker ``VOLUME``. You can use that to provide whisper 189 | data from the host (or from another docker container) to the graphite-api 190 | container:: 191 | 192 | docker run -t -i -v /path/to/graphite:/srv/graphite -p 8888:8888 graphite-api 193 | 194 | This container has all the :ref:`extra packages ` included. Cyanite 195 | backend and Sentry integration are available. 196 | 197 | Nginx + uWSGI 198 | ------------- 199 | 200 | First, you need to install uWSGI with Python support. On Debian, install ``uwsgi-plugin-python``. 201 | 202 | Then create the uWSGI file for Graphite-API in 203 | ``/etc/uwsgi/apps-available/graphite-api.ini``: 204 | 205 | .. code-block:: ini 206 | 207 | [uwsgi] 208 | processes = 2 209 | socket = localhost:8080 210 | plugins = python27 211 | module = graphite_api.app:app 212 | 213 | If you installed Graphite-API in a virtualenv, specify the virtualenv path: 214 | 215 | .. code-block:: ini 216 | 217 | home = /var/www/wsgi-scripts/env 218 | 219 | If you need a custom location for Graphite-API's config file, set the 220 | environment variable like this: 221 | 222 | .. code-block:: ini 223 | 224 | env = GRAPHITE_API_CONFIG=/var/www/wsgi-scripts/config.yml 225 | 226 | Enable ``graphite-api.ini`` and restart uWSGI: 227 | 228 | .. code-block:: bash 229 | 230 | $ ln -s /etc/uwsgi/apps-available/graphite-api.ini /etc/uwsgi/apps-enabled 231 | $ service uwsgi restart 232 | 233 | Finally, configure the nginx vhost: 234 | 235 | .. code-block:: nginx 236 | 237 | # /etc/nginx/sites-available/graphite.conf 238 | 239 | server { 240 | listen 80; 241 | 242 | location / { 243 | include uwsgi_params; 244 | uwsgi_pass localhost:8080; 245 | } 246 | } 247 | 248 | Enable the vhost and restart nginx: 249 | 250 | .. code-block:: bash 251 | 252 | $ ln -s /etc/nginx/sites-available/graphite.conf /etc/nginx/sites-enabled 253 | $ service nginx restart 254 | 255 | Other deployment methods 256 | ------------------------ 257 | 258 | They currently aren't described here but there are several other ways to serve 259 | Graphite-API: 260 | 261 | * nginx + circus + chaussette 262 | 263 | If you feel like contributing some documentation, feel free to open pull a 264 | request on the `Graphite-API repository`_. 265 | 266 | .. _Graphite-API repository: https://github.com/brutasse/graphite-api 267 | -------------------------------------------------------------------------------- /docs/finders.rst: -------------------------------------------------------------------------------- 1 | Storage finders 2 | --------------- 3 | 4 | Graphite-API searches and fetches metrics from time series databases using an 5 | interface called *finders*. The default finder provided with Graphite-API is 6 | the one that integrates with Whisper databases. 7 | 8 | Customizing finders can be done in the ``finders`` section of the Graphite-API 9 | configuration file: 10 | 11 | .. code-block:: yaml 12 | 13 | finders: 14 | - graphite_api.finders.whisper.WhisperFinder 15 | 16 | Several values are allowed, to let you store different kinds of metrics at 17 | different places or smoothly handle transitions from one time series database 18 | to another. 19 | 20 | The default finder reads data from a Whisper database. 21 | 22 | Custom finders 23 | ^^^^^^^^^^^^^^ 24 | 25 | ``finders`` being a list of arbitrary python paths, it is relatively easy to 26 | write a custom finder if you want to read data from other places than Whisper. 27 | A finder is a python class with a ``find_nodes()`` method: 28 | 29 | .. code-block:: python 30 | 31 | class CustomFinder(object): 32 | def find_nodes(self, query): 33 | # ... 34 | 35 | ``query`` is a ``FindQuery`` object. ``find_nodes()`` is the entry point when 36 | browsing the metrics tree. It must yield leaf or branch nodes matching the 37 | query: 38 | 39 | .. code-block:: python 40 | 41 | from graphite_api.node import LeafNode, BranchNode 42 | 43 | class CustomFinder(object): 44 | def find_nodes(self, query): 45 | # find some paths matching the query, then yield them 46 | # is_branch or is_leaf are predicates you need to implement 47 | for path in matches: 48 | if is_branch(path): 49 | yield BranchNode(path) 50 | if is_leaf(path): 51 | yield LeafNode(path, CustomReader(path)) 52 | 53 | 54 | ``LeafNode`` is created with a *reader*, which is the class responsible for 55 | fetching the datapoints for the given path. It is a simple class with 2 56 | methods: ``fetch()`` and ``get_intervals()``: 57 | 58 | .. code-block:: python 59 | 60 | from graphite_api.intervals import IntervalSet, Interval 61 | 62 | class CustomReader(object): 63 | __slots__ = ('path',) # __slots__ is recommended to save memory on readers 64 | 65 | def __init__(self, path): 66 | self.path = path 67 | 68 | def fetch(self, start_time, end_time): 69 | # fetch data 70 | time_info = _from_, _to_, _step_ 71 | return time_info, series 72 | 73 | def get_intervals(self): 74 | return IntervalSet([Interval(start, end)]) 75 | 76 | ``fetch()`` must return a list of 2 elements: the time info for the data and 77 | the datapoints themselves. The time info is a list of 3 items: the start time 78 | of the datapoints (in unix time), the end time and the time step (in seconds) 79 | between the datapoints. 80 | 81 | The datapoints is a list of points found in the database for the required 82 | interval. There must be ``(end - start) / step`` points in the dataset even if 83 | the database has gaps: gaps can be filled with ``None`` values. 84 | 85 | ``get_intervals()`` is a method that hints graphite-web about the time range 86 | available for this given metric in the database. It must return an 87 | ``IntervalSet`` of one or more ``Interval`` objects. 88 | 89 | Fetching multiple paths at once 90 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 91 | 92 | If your storage backend allows it, fetching multiple paths at once is useful 93 | to avoid sequential fetches and save time and resources. This can be achieved 94 | in three steps: 95 | 96 | * Subclass ``LeafNode`` and add a ``__fetch_multi__`` class attribute to your 97 | subclass:: 98 | 99 | class CustomLeafNode(LeafNode): 100 | __fetch_multi__ = 'custom' 101 | 102 | The string ``'custom'`` is used to identify backends and needs to be unique 103 | per-backend. 104 | 105 | * Add the ``__fetch_multi__`` attribute to your finder class:: 106 | 107 | class CustomFinder(objects): 108 | __fetch_multi__ = 'custom' 109 | 110 | * Implement a ``fetch_multi()`` method on your finder:: 111 | 112 | class CustomFinder(objects): 113 | def fetch_multi(self, nodes, start_time, end_time): 114 | paths = [node.path for node in nodes] 115 | # fetch paths 116 | return time_info, series 117 | 118 | ``time_info`` is the same structure as the one returned by ``fetch()``. 119 | ``series`` is a dictionnary with paths as keys and datapoints as values. 120 | 121 | Installing custom finders 122 | ^^^^^^^^^^^^^^^^^^^^^^^^^ 123 | 124 | In order for your custom finder to be importable, you need to package it under 125 | a namespace of your choice. Python packaging won't be covered here but you can 126 | look at third-party finders to get some inspiration: 127 | 128 | * `Cyanite finder `_ 129 | 130 | Configuration 131 | ^^^^^^^^^^^^^ 132 | 133 | Graphite-API instantiates finders and passes it its whole parsed configuration 134 | file, as a Python data structure. External finders can require extra sections 135 | in the configuration file to setup access to the time series database they 136 | communicate with. For instance, let's say your ``CustomFinder`` needs two 137 | configuration parameters, a host and a user: 138 | 139 | .. code-block:: python 140 | 141 | class CustomFinder(object): 142 | def __init__(self, config): 143 | config.setdefault('custom', {}) 144 | self.user = config['custom'].get('user', 'default') 145 | self.host = config['custom'].get('host', 'localhost') 146 | 147 | The configuration file would look like: 148 | 149 | .. code-block:: yaml 150 | 151 | finders: 152 | - custom.CustomFinder 153 | custom: 154 | user: myuser 155 | host: example.com 156 | 157 | When possible, try to use sane defaults that would "just work" for most common 158 | setups. Here if the ``custom`` section isn't provided, the finder uses 159 | ``default`` as user and ``localhost`` as host. 160 | -------------------------------------------------------------------------------- /docs/functions.rst: -------------------------------------------------------------------------------- 1 | Built-in functions 2 | ================== 3 | 4 | Functions are used to transform, combine, and perform computations on series 5 | data. They are applied by manipulating the ``target`` parameters in the 6 | :ref:`Render API `. 7 | 8 | Usage 9 | ----- 10 | 11 | Most functions are applied to one series list. Functions with the 12 | parameter ``*seriesLists`` can take an arbitrary number of series lists. 13 | To pass multiple series lists to a function which only takes one, use the 14 | :py:func:`group` function. 15 | 16 | .. _list-of-functions: 17 | 18 | List of functions 19 | ----------------- 20 | 21 | .. automodule:: graphite_api.functions 22 | :members: 23 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Graphite-API documentation 2 | ========================== 3 | 4 | .. raw:: html 5 | 6 |

7 | GitHub project 8 | 9 | Build Status 10 | 11 | PyPI version 12 |

13 | 14 | Graphite-API is an alternative to Graphite-web, without any built-in 15 | dashboard. Its role is solely to fetch metrics from a time-series database 16 | (whisper, cyanite, etc.) and rendering graphs or JSON data out of these 17 | time series. It is meant to be consumed by any of the numerous Graphite 18 | dashboard applications. 19 | 20 | Graphite-API is a fork of Graphite-web and couldn't have existed without the 21 | fantastic prior work done by the Graphite team. 22 | 23 | Why should I use it? 24 | -------------------- 25 | 26 | Graphite-API offers a number of improvements over Graphite-web that you might 27 | find useful. Namely: 28 | 29 | * The Graphite-API application is completely stateless and doesn't need a SQL 30 | database. It only needs to talk to a time series database. 31 | 32 | * Python 2 and 3 are both supported. 33 | 34 | * The HTTP API accepts JSON data additionnaly to form data and querystring 35 | parameters. 36 | 37 | * The application is extremely simple to :doc:`install ` and 38 | :doc:`configure `. 39 | 40 | * The architecture has been drastically simplified and there are many fewer 41 | moving parts than in graphite-web: 42 | 43 | * No memcache integration -- rendering is live. 44 | 45 | * No support for the Pickle format when rendering. 46 | 47 | * Plugin architecture for :doc:`integrating with time series databases 48 | ` or :doc:`adding more analysis functions `. 49 | 50 | * The codebase has been thoroughly updated with a focus on test coverage and 51 | code quality. 52 | 53 | .. note:: 54 | 55 | Graphite-API does **not** provide any web/graphical interface. If you 56 | currently rely on the built-in Graphite composer, Graphite-API might not 57 | be for you. However, if you're using a third-party dashboard interface, 58 | Graphite-API will do just fine. 59 | 60 | Contents 61 | -------- 62 | 63 | .. toctree:: 64 | :maxdepth: 2 65 | 66 | installation 67 | configuration 68 | deployment 69 | api 70 | functions 71 | finders 72 | custom-functions 73 | releases 74 | 75 | Indices and tables 76 | ================== 77 | 78 | * :ref:`genindex` 79 | * :ref:`modindex` 80 | * :ref:`search` 81 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Installation 3 | ============ 4 | 5 | Debian / Ubuntu: native package 6 | =============================== 7 | 8 | If you run Debian 8 or Ubuntu 14.04 LTS, you can use one of the available 9 | packages which provides a self-contained build of graphite-api. Builds are 10 | available on the `releases`_ page. 11 | 12 | .. _releases: https://github.com/brutasse/graphite-api/releases 13 | 14 | Once installed, Graphite-api should be running as a service and available on 15 | port 8888. The package contains all the :ref:`optional dependencies `. 16 | 17 | Python package 18 | ============== 19 | 20 | Prerequisites 21 | ------------- 22 | 23 | Installing Graphite-API requires: 24 | 25 | * Python 2 (2.6 and above) or 3 (3.3 and above), with development files. On 26 | debian/ubuntu, you'll want to install ``python-dev``. 27 | 28 | * ``gcc``. On debian/ubuntu, install ``build-essential``. 29 | 30 | * Cairo, including development files. On debian/ubuntu, install the 31 | ``libcairo2-dev`` package. 32 | 33 | * ``libffi`` with development files, ``libffi-dev`` on debian/ubuntu. 34 | 35 | * Pip, the Python package manager. On debian/ubuntu, install ``python-pip``. 36 | 37 | Global installation 38 | ------------------- 39 | 40 | To install Graphite-API globally on your system, run as root:: 41 | 42 | $ pip install graphite-api 43 | 44 | Isolated installation (virtualenv) 45 | ---------------------------------- 46 | 47 | If you want to isolate Graphite-API from the system-wide python environment, 48 | you can install it in a virtualenv. 49 | 50 | :: 51 | 52 | $ virtualenv /usr/share/python/graphite 53 | $ /usr/share/python/graphite/bin/pip install graphite-api 54 | 55 | .. _extras: 56 | 57 | Extra dependencies 58 | ------------------ 59 | 60 | When you install ``graphite-api``, all the dependencies for running a Graphite 61 | server that uses Whisper as a storage backend are installed. You can specify 62 | extra dependencies: 63 | 64 | * For `Sentry`_ integration: ``pip install graphite-api[sentry]``. 65 | 66 | * For `Cyanite`_ integration: ``pip install graphite-api[cyanite]``. 67 | 68 | * For Cache support: ``pip install graphite-api[cache]``. You'll also need the 69 | driver for the type of caching you want to use (Redis, Memcache, etc.). See 70 | the `Flask-Cache docs`_ for supported cache types. 71 | 72 | 73 | .. _Sentry: https://docs.getsentry.com 74 | .. _Cyanite: https://github.com/brutasse/graphite-cyanite 75 | .. _Flask-Cache docs: http://pythonhosted.org/Flask-Cache/#configuring-flask-cache 76 | 77 | You can also combine several extra dependencies:: 78 | 79 | $ pip install graphite-api[sentry,cyanite] 80 | -------------------------------------------------------------------------------- /docs/releases.rst: -------------------------------------------------------------------------------- 1 | Graphite-API releases 2 | ===================== 3 | 4 | 1.1.3 -- 2016-05-23 5 | ------------------- 6 | 7 | * Remove extra parenthesis from ``aliasByMetric()``. 8 | * Fix leap year handling in ``graphite_api.render.attime``. 9 | * Allow colon and hash in node names in ``aliasByNode()`` 10 | * Fix calling ``reduceFunction`` in ``reduceSeries`` 11 | * Revert a whisper patch which broke multiple retentions handling. 12 | * Specify which function is invalid when providing an invalid consolidation 13 | function. 14 | 15 | 1.1.2 -- 2015-11-19 16 | ------------------- 17 | 18 | * Fix regression in multi fetch handling: paths were queried multiple times, 19 | leading to erroneous behaviour and slowdown. 20 | * Continue on IndexError in ``remove{Above,Below}Percentile`` functions. 21 | 22 | 1.1.1 -- 2015-10-23 23 | ------------------- 24 | 25 | * Fix ``areaMode=stacked``. 26 | 27 | * Fix error when calling functions that use ``fetchWithBootstrap`` and the 28 | bootstrap range isn't available (fill with nulls instead). 29 | 30 | 1.1 -- 2015-10-05 31 | ----------------- 32 | 33 | * Add CarbonLink support. 34 | 35 | * Add support for configuring a cache backend and the ``noCache`` and 36 | ``cacheTimeout`` API options. 37 | 38 | * When no timezone is provided in the configuration file, try to guess from 39 | the system's timezone with a fallback to UTC. 40 | 41 | * Now supporting Flask >= 0.8 and Pyparsing >= 1.5.7. 42 | 43 | * Add support for ``fetch_multi()`` in storage finders. This is useful for 44 | database-backed finders such as Cyanite because it allows fetching all time 45 | series at once instead of sequentially. 46 | 47 | * Add ``multiplySeriesWithWildcards``, ``minimumBelow``, ``changed``, 48 | ``timeSlice`` and ``removeEmptySeries`` functions. 49 | 50 | * Add optional ``step`` argument to ``time``, ``sin`` and ``randomWalk`` 51 | functions. 52 | 53 | * Add ``/metrics`` API call as an alias to ``/metrics/find``. 54 | 55 | * Add missing ``/metrics/index.json`` API call. 56 | 57 | * Allow wildcards origins (``*``) in CORS configuration. 58 | 59 | * Whisper finder now logs debug information. 60 | 61 | * Fix parsing dates such as "feb27" during month days > 28. 62 | 63 | * Change ``sum()`` to return ``null`` instead of 0 when all series' datapoints 64 | are null at the same time. This is graphite-web's behavior. 65 | 66 | * Extract paths of all targets before fetching data. This is a significant 67 | optimization for storage backends such as Cyanite that allow bulk-fetching 68 | metrics. 69 | 70 | * Add JSONP support to all API endpoints that can return JSON. 71 | 72 | * Fix 500 error when generating a SVG graph without any data. 73 | 74 | * Return tracebacks in the HTTP response when app errors occur. This behavior 75 | can be disabled in the configuration. 76 | 77 | * Fixes for the following graphite-web issues: 78 | 79 | * `#639 `_ -- 80 | proper timezone handling of ``from`` and ``until`` with client-supplied 81 | timezones. 82 | * `#540 `_ -- 83 | provide the last data point when rendering to JSON format. 84 | * `#381 `_ -- 85 | make ``areaBetween()`` work either when passed 2 arguments or a single 86 | wildcard series of length 2. 87 | * `#702 `_ -- 88 | handle backslash as path separator on windows. 89 | * `#410 `_ -- SVG 90 | output sometimes had an extra ```` tag. 91 | 92 | 1.0.1 -- 2014-03-21 93 | ------------------- 94 | 95 | * ``time_zone`` set to UTC by default instead of Europe/Berlin. 96 | * Properly log app exceptions. 97 | * Fix constantLine for python 3. 98 | * Create whisper directories if they don't exist. 99 | * Fixes for the following graphite-web issues: 100 | 101 | * `#645 `_, `#625 102 | `_ -- allow 103 | ``constantLine`` to work even if there are no other targets in the graph. 104 | 105 | 1.0.0 -- 2014-03-20 106 | ------------------- 107 | 108 | Version 1.0 is based on the master branch of Graphite-web, mid-March 2014, 109 | with the following modifications: 110 | 111 | * New ``/index`` API endpoint for re-building the index (replaces the 112 | build-index command-line script from graphite-web). 113 | 114 | * Removal of memcache integration. 115 | 116 | * Removal of Pickle integration. 117 | 118 | * Removal of remote rendering. 119 | 120 | * Support for Python 3. 121 | 122 | * A lot more tests and test coverage. 123 | 124 | * Fixes for the following graphite-web issues: 125 | 126 | * (meta) `#647 `_ 127 | -- strip out the API from graphite-web. 128 | * `#665 `_ -- 129 | address some DeprecationWarnings. 130 | * `#658 `_ -- 131 | accept a float value in ``maxDataPoints``. 132 | * `#654 `_ -- 133 | ignore invalid ``logBase`` values (<=1). 134 | * `#591 `_ -- 135 | accept JSON data additionaly to querystring params or form data. 136 | -------------------------------------------------------------------------------- /fpm/README.md: -------------------------------------------------------------------------------- 1 | FPM 2 | === 3 | 4 | The files in this folder allows you to build a package for graphite-api with 5 | [fpm](https://github.com/jordansissel/fpm). The resulting package is 6 | completely isolated from your system-wide python packages. It's a packaged 7 | virtual environment located at `/usr/share/python/graphite` that contains: 8 | 9 | * Graphite-api with all its requirements and optional dependencies 10 | * Gunicorn 11 | * Config files (`/etc/graphite-api.yaml`, `/etc/default/graphite-api`) 12 | * Creation of a `graphite` user on installation 13 | * An init script in `/etc/init.d` 14 | 15 | Prerequisites (to build a package) 16 | ---------------------------------- 17 | 18 | * FPM (`sudo gem install fpm`) 19 | * `sudo apt-get install python-dev libffi-dev python-virtualenv` 20 | * virtualenv-tools (either build it with fpm and install it or `sudo pip 21 | install virtualenv-tools`). 22 | 23 | Building a package 24 | ------------------ 25 | 26 | ``` 27 | ./build-deb.sh 28 | ``` 29 | 30 | Deb packages are generated with fresh Debian and Ubuntu cloud instances and 31 | the following cloud-init configuration: 32 | 33 | ``` 34 | #cloud-config 35 | runcmd: 36 | - "wget https://raw.githubusercontent.com/brutasse/graphite-api/master/fpm/cloud-init.sh" 37 | - "sh cloud-init.sh" 38 | ``` 39 | -------------------------------------------------------------------------------- /fpm/build-deb.sh: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env bash 2 | set -xe 3 | 4 | export VERSION=1.1.3 5 | 6 | apt-get -y install build-essential python-dev python-virtualenv libffi-dev 7 | 8 | rm -rf build 9 | 10 | mkdir -p build/usr/share/python 11 | virtualenv build/usr/share/python/graphite 12 | build/usr/share/python/graphite/bin/pip install -U pip distribute 13 | build/usr/share/python/graphite/bin/pip uninstall -y distribute 14 | 15 | build/usr/share/python/graphite/bin/pip install graphite-api[sentry,cyanite] gunicorn==18.0 virtualenv-tools 16 | 17 | find build ! -perm -a+r -exec chmod a+r {} \; 18 | 19 | cd build/usr/share/python/graphite 20 | sed -i "s/'\/bin\/python'/\('\/bin\/python','\/bin\/python2'\)/g" lib/python2.7/site-packages/virtualenv_tools.py 21 | ./bin/virtualenv-tools --update-path /usr/share/python/graphite 22 | cd - 23 | 24 | find build -iname *.pyc -exec rm {} \; 25 | find build -iname *.pyo -exec rm {} \; 26 | 27 | cp -a conf/etc build 28 | 29 | fpm \ 30 | -t deb -s dir -C build -n graphite-api -v $VERSION \ 31 | --iteration `date +%s` \ 32 | --deb-default conf/etc/default/graphite-api \ 33 | --deb-init conf/etc/init.d/graphite-api \ 34 | --deb-user root \ 35 | --deb-group root \ 36 | --config-files /etc/graphite-api.yaml \ 37 | --config-files /etc/init.d/graphite-api \ 38 | --config-files /etc/default/graphite-api \ 39 | -d libcairo2 \ 40 | -d "libffi5 | libffi6" \ 41 | -d adduser \ 42 | -d python \ 43 | --after-install conf/post-install \ 44 | --before-remove conf/pre-remove \ 45 | --after-remove conf/post-remove \ 46 | --url https://github.com/brutasse/graphite-api \ 47 | --description 'Graphite-web, without the interface. Just the rendering HTTP API.' \ 48 | --license 'Apache 2.0' \ 49 | . 50 | -------------------------------------------------------------------------------- /fpm/cloud-init.sh: -------------------------------------------------------------------------------- 1 | # Run with cloud-init: 2 | # 3 | # #cloud-config 4 | # runcmd: 5 | # - "wget https://raw.githubusercontent.com/brutasse/graphite-api/master/fpm/cloud-init.sh" 6 | # - "sh cloud-init.sh" 7 | 8 | set -e 9 | export DEBIAN_FRONTEND=noninteractive 10 | apt-get update 11 | apt-get -y upgrade 12 | apt-get -y install git ruby ruby-dev build-essential 13 | gem install fpm --no-rdoc --no-ri 14 | export PATH=$PATH:/usr/local/bin 15 | git clone https://github.com/brutasse/graphite-api.git 16 | cd graphite-api/fpm 17 | ./build-deb.sh 18 | -------------------------------------------------------------------------------- /fpm/conf/etc/default/graphite-api: -------------------------------------------------------------------------------- 1 | GRAPHITE_API_CONFIG=/etc/graphite-api.yaml 2 | -------------------------------------------------------------------------------- /fpm/conf/etc/graphite-api.yaml: -------------------------------------------------------------------------------- 1 | search_index: /srv/graphite/index 2 | finders: 3 | - graphite_api.finders.whisper.WhisperFinder 4 | functions: 5 | - graphite_api.functions.SeriesFunctions 6 | - graphite_api.functions.PieFunctions 7 | whisper: 8 | directories: 9 | - /srv/graphite/whisper 10 | time_zone: UTC 11 | -------------------------------------------------------------------------------- /fpm/conf/etc/init.d/graphite-api: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | ### BEGIN INIT INFO 3 | # Provides: graphite-api 4 | # Required-Start: $remote_fs $syslog 5 | # Required-Stop: $remote_fs $syslog 6 | # Default-Start: 2 3 4 5 7 | # Default-Stop: 0 1 6 8 | # Short-Description: Graphite-API server 9 | # Description: Stateless graphite API server 10 | ### END INIT INFO 11 | 12 | # Author: Bruno Renié 13 | 14 | # Do NOT "set -e" 15 | 16 | # PATH should only include /usr/* if it runs after the mountnfs.sh script 17 | PATH=/sbin:/usr/sbin:/bin:/usr/bin:/usr/local/bin 18 | DESC="Graphite-API" 19 | NAME=graphite-api 20 | DAEMON=/usr/share/python/graphite/bin/gunicorn 21 | DAEMON_ARGS="graphite_api.app:app -b 0.0.0.0:8888" 22 | DAEMON_USER=graphite 23 | PIDFILE=/var/run/$NAME.pid 24 | SCRIPTNAME=/etc/init.d/$NAME 25 | 26 | # Exit if the package is not installed 27 | [ -x "$DAEMON" ] || exit 0 28 | 29 | # Read configuration variable file if it is present 30 | [ -r /etc/default/$NAME ] && . /etc/default/$NAME 31 | 32 | # Load the VERBOSE setting and other rcS variables 33 | . /lib/init/vars.sh 34 | 35 | # Define LSB log_* functions. 36 | # Depend on lsb-base (>= 3.2-14) to ensure that this file is present 37 | # and status_of_proc is working. 38 | . /lib/lsb/init-functions 39 | 40 | # Function that starts the daemon/service 41 | do_start() 42 | { 43 | # Return 44 | # 0 if daemon has been started 45 | # 1 if daemon was already running 46 | # 2 if daemon could not be started 47 | pid=$( pidofproc -p $PIDFILE "$NAME") 48 | if [ -n "$pid" ] ; then 49 | log_daemon_msg "$DESC is already running (PID `cat ${PIDFILE}`)" 50 | return 1 51 | fi 52 | start-stop-daemon --start --quiet --chuid $DAEMON_USER --chdir / --make-pidfile --background --pidfile $PIDFILE --exec $DAEMON -- \ 53 | $DAEMON_ARGS \ 54 | || return 2 55 | # Add code here, if necessary, that waits for the process to be ready 56 | # to handle requests from services started subsequently which depend 57 | # on this one. As a last resort, sleep for some time. 58 | } 59 | 60 | # Function that stops the daemon/service 61 | do_stop() 62 | { 63 | # Return 64 | # 0 if daemon has been stopped 65 | # 1 if daemon was already stopped 66 | # 2 if daemon could not be stopped 67 | # other if a failure occurred 68 | start-stop-daemon --stop --quiet --retry=TERM/30/KILL/5 --pidfile $PIDFILE 69 | RETVAL="$?" 70 | [ "$RETVAL" = 2 ] && return 2 71 | # Wait for children to finish too if this is a daemon that forks 72 | # and if the daemon is only ever run from this initscript. 73 | # If the above conditions are not satisfied then add some other code 74 | # that waits for the process to drop all resources that could be 75 | # needed by services started subsequently. A last resort is to 76 | # sleep for some time. 77 | start-stop-daemon --stop --quiet --oknodo --retry=0/30/KILL/5 --exec $DAEMON 78 | [ "$?" = 2 ] && return 2 79 | # Many daemons don't delete their pidfiles when they exit. 80 | rm -f $PIDFILE 81 | return "$RETVAL" 82 | } 83 | 84 | # Function that sends a SIGHUP to the daemon/service 85 | do_reload() { 86 | # 87 | # If the daemon can reload its configuration without 88 | # restarting (for example, when it is sent a SIGHUP), 89 | # then implement that here. 90 | # 91 | start-stop-daemon --stop --quiet --signal HUP --pidfile $PIDFILE 92 | return $? 93 | } 94 | 95 | case "$1" in 96 | start) 97 | [ "$VERBOSE" != no ] && log_daemon_msg "Starting $DESC" "$NAME" 98 | do_start 99 | case "$?" in 100 | 0|1) [ "$VERBOSE" != no ] && log_end_msg 0 ;; 101 | 2) [ "$VERBOSE" != no ] && log_end_msg 1 ;; 102 | esac 103 | ;; 104 | stop) 105 | [ "$VERBOSE" != no ] && log_daemon_msg "Stopping $DESC" "$NAME" 106 | do_stop 107 | case "$?" in 108 | 0|1) [ "$VERBOSE" != no ] && log_end_msg 0 ;; 109 | 2) [ "$VERBOSE" != no ] && log_end_msg 1 ;; 110 | esac 111 | ;; 112 | status) 113 | status_of_proc "$DAEMON" "$NAME" && exit 0 || exit $? 114 | ;; 115 | reload|force-reload) 116 | log_daemon_msg "Reloading $DESC" "$NAME" 117 | do_reload 118 | log_end_msg $? 119 | ;; 120 | restart) 121 | log_daemon_msg "Restarting $DESC" "$NAME" 122 | do_stop 123 | case "$?" in 124 | 0|1) 125 | do_start 126 | case "$?" in 127 | 0) log_end_msg 0 ;; 128 | 1) log_end_msg 1 ;; # Old process is still running 129 | *) log_end_msg 1 ;; # Failed to start 130 | esac 131 | ;; 132 | *) 133 | # Failed to stop 134 | log_end_msg 1 135 | ;; 136 | esac 137 | ;; 138 | *) 139 | echo "Usage: $SCRIPTNAME {start|stop|status|restart|reload|force-reload}" >&2 140 | exit 3 141 | ;; 142 | esac 143 | 144 | : 145 | -------------------------------------------------------------------------------- /fpm/conf/post-install: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env bash 2 | 3 | set -e 4 | 5 | pythonver=$(python --version 2>&1 | awk -F'.' '{print $1}') 6 | 7 | # Create bytecode 8 | if [[ x$pythonver == 'xPython 2' ]]; then 9 | python -m compileall /usr/share/python/graphite/lib > /dev/null || true 10 | python -O -m compileall /usr/share/python/graphite/lib > /dev/null || true 11 | else 12 | python -m compileall /usr/share/python/graphite/lib > /dev/null 13 | python -O -m compileall /usr/share/python/graphite/lib > /dev/null 14 | fi 15 | 16 | # Create user if it doesn't exist 17 | if ! id graphite > /dev/null 2>&1 ; then 18 | adduser --system --home /usr/share/python/graphite --no-create-home \ 19 | --ingroup nogroup --disabled-password --shell /bin/false \ 20 | --gecos 'Graphite API' \ 21 | graphite 22 | fi 23 | 24 | # Create path if it doesn't exist 25 | if ! test -d /srv/graphite ; then 26 | mkdir -p /srv/graphite 27 | chown graphite:nogroup /srv/graphite 28 | chmod -R 750 /srv/graphite 29 | fi 30 | 31 | # Register service 32 | if [ -x "/etc/init.d/graphite-api" ]; then 33 | update-rc.d graphite-api start 50 2 3 4 5 . stop 50 0 1 6 . >/dev/null 34 | invoke-rc.d graphite-api start || exit $? 35 | fi 36 | -------------------------------------------------------------------------------- /fpm/conf/post-remove: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | if [ "$1" = "purge" ] ; then 4 | update-rc.d graphite-api remove >/dev/null 5 | fi 6 | -------------------------------------------------------------------------------- /fpm/conf/pre-remove: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env bash 2 | set -e 3 | 4 | find /usr/share/python/graphite/lib -iname *.pyc -exec rm {} \; 5 | find /usr/share/python/graphite/lib -iname *.pyo -exec rm {} \; 6 | 7 | if [ -x "/etc/init.d/graphite-api" ]; then 8 | invoke-rc.d graphite-api stop || exit $? 9 | fi 10 | -------------------------------------------------------------------------------- /graphite_api/__init__.py: -------------------------------------------------------------------------------- 1 | DEBUG = False 2 | -------------------------------------------------------------------------------- /graphite_api/_vendor/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brutasse/graphite-api/0886b7adcf985a1e8bcb084f6dd1dc166a3f3dff/graphite_api/_vendor/__init__.py -------------------------------------------------------------------------------- /graphite_api/app.py: -------------------------------------------------------------------------------- 1 | import csv 2 | import json 3 | import math 4 | import time 5 | from collections import defaultdict 6 | from datetime import datetime 7 | from io import BytesIO, StringIO 8 | 9 | import pytz 10 | import six 11 | from flask import Flask 12 | from structlog import get_logger 13 | from werkzeug.http import http_date 14 | 15 | from .config import configure 16 | from .encoders import JSONEncoder 17 | from .render.attime import parseATTime 18 | from .render.datalib import fetchData 19 | from .render.glyph import GraphTypes 20 | from .utils import hash_request, RequestParams 21 | 22 | logger = get_logger() 23 | 24 | 25 | def jsonify(data, status=200, headers=None): 26 | if headers is None: 27 | headers = {} 28 | 29 | jsonp = RequestParams.get('jsonp', False) 30 | 31 | body = json.dumps(data, cls=JSONEncoder) 32 | if jsonp: 33 | headers['Content-Type'] = 'text/javascript' 34 | body = '{0}({1})'.format(jsonp, body) 35 | else: 36 | headers['Content-Type'] = 'application/json' 37 | return body, status, headers 38 | 39 | 40 | class Graphite(Flask): 41 | @property 42 | def store(self): 43 | return self.config['GRAPHITE']['store'] 44 | 45 | @property 46 | def functions(self): 47 | return self.config['GRAPHITE']['functions'] 48 | 49 | @property 50 | def logger(self): 51 | # Flask has its own logger that doesn't get any handler if we use 52 | # dictconfig(). Replace it with our structlog logger. 53 | return logger 54 | 55 | 56 | app = Graphite(__name__) 57 | try: 58 | configure(app) 59 | except Exception: 60 | import traceback 61 | print(traceback.format_exc()) 62 | raise 63 | 64 | methods = ('GET', 'POST') 65 | 66 | 67 | # No-op routes, non-essential for creating dashboards 68 | @app.route('/dashboard/find', methods=methods) 69 | def dashboard_find(): 70 | return jsonify({'dashboards': []}) 71 | 72 | 73 | @app.route('/dashboard/load/', methods=methods) 74 | def dashboard_load(name): 75 | return jsonify({'error': "Dashboard '{0}' does not exist.".format(name)}, 76 | status=404) 77 | 78 | 79 | @app.route('/events/get_data', methods=methods) 80 | def events(): 81 | return json.dumps([]), 200, {'Content-Type': 'application/json'} 82 | 83 | 84 | # API calls that actually do something 85 | @app.route('/metrics', methods=methods) 86 | @app.route('/metrics/find', methods=methods) 87 | def metrics_find(): 88 | errors = {} 89 | from_time = None 90 | until_time = None 91 | wildcards = False 92 | 93 | try: 94 | wildcards = bool(int(RequestParams.get('wildcards', 0))) 95 | except ValueError: 96 | errors['wildcards'] = 'must be 0 or 1.' 97 | 98 | try: 99 | from_time = int(RequestParams.get('from', -1)) 100 | except ValueError: 101 | errors['from'] = 'must be an epoch timestamp.' 102 | try: 103 | until_time = int(RequestParams.get('until', -1)) 104 | except ValueError: 105 | errors['until'] = 'must be an epoch timestamp.' 106 | 107 | try: 108 | node_position = int(RequestParams.get('position', -1)) 109 | except ValueError: 110 | errors['position'] = 'must be an integer,' 111 | 112 | if from_time == -1: 113 | from_time = None 114 | if until_time == -1: 115 | until_time = None 116 | 117 | format = RequestParams.get('format', 'treejson') 118 | if format not in ['treejson', 'completer', 'nodelist', 'json']: 119 | errors['format'] = 'unrecognized format: "{0}".'.format(format) 120 | 121 | if 'query' not in RequestParams: 122 | errors['query'] = 'this parameter is required.' 123 | 124 | if errors: 125 | return jsonify({'errors': errors}, status=400) 126 | 127 | query = RequestParams['query'] 128 | matches = sorted( 129 | app.store.find(query, from_time, until_time), 130 | key=lambda node: node.name 131 | ) 132 | 133 | base_path = query.rsplit('.', 1)[0] + '.' if '.' in query else '' 134 | 135 | if format == 'treejson': 136 | data = tree_json(matches, base_path, wildcards=wildcards) 137 | return ( 138 | json.dumps(data), 139 | 200, 140 | {'Content-Type': 'application/json'} 141 | ) 142 | elif format == 'nodelist': 143 | found = set() 144 | for metric in matches: 145 | nodes = metric.path.split('.') 146 | found.add(nodes[node_position]) 147 | return jsonify({'nodes': sorted(found)}) 148 | elif format == 'json': 149 | content = json_nodes(matches) 150 | return jsonify(content) 151 | 152 | results = [] 153 | for node in matches: 154 | node_info = { 155 | 'path': node.path, 156 | 'name': node.name, 157 | 'is_leaf': int(node.is_leaf), # XXX Y was this cast to str 158 | } 159 | if not node.is_leaf: 160 | node_info['path'] += '.' 161 | results.append(node_info) 162 | 163 | if len(results) > 1 and wildcards: 164 | results.append({'name': '*'}) 165 | 166 | return jsonify({'metrics': results}) 167 | 168 | 169 | @app.route('/metrics/expand', methods=methods) 170 | def metrics_expand(): 171 | errors = {} 172 | try: 173 | group_by_expr = bool(int(RequestParams.get('groupByExpr', 0))) 174 | except ValueError: 175 | errors['groupByExpr'] = 'must be 0 or 1.' 176 | try: 177 | leaves_only = bool(int(RequestParams.get('leavesOnly', 0))) 178 | except ValueError: 179 | errors['leavesOnly'] = 'must be 0 or 1.' 180 | 181 | if 'query' not in RequestParams: 182 | errors['query'] = 'this parameter is required.' 183 | if errors: 184 | return jsonify({'errors': errors}, status=400) 185 | 186 | results = defaultdict(set) 187 | for query in RequestParams.getlist('query'): 188 | for node in app.store.find(query): 189 | if node.is_leaf or not leaves_only: 190 | results[query].add(node.path) 191 | 192 | if group_by_expr: 193 | for query, matches in results.items(): 194 | results[query] = sorted(matches) 195 | else: 196 | new_results = set() 197 | for value in results.values(): 198 | new_results = new_results.union(value) 199 | results = sorted(new_results) 200 | 201 | return jsonify({'results': results}) 202 | 203 | 204 | def recurse(query, index): 205 | """ 206 | Recursively walk across paths, adding leaves to the index as they're found. 207 | """ 208 | for node in app.store.find(query): 209 | if node.is_leaf: 210 | index.add(node.path) 211 | else: 212 | recurse('{0}.*'.format(node.path), index) 213 | 214 | 215 | @app.route('/metrics/index.json', methods=methods) 216 | def metrics_index(): 217 | index = set() 218 | recurse('*', index) 219 | return jsonify(sorted(index)) 220 | 221 | 222 | def prune_datapoints(series, max_datapoints, start, end): 223 | time_range = end - start 224 | points = time_range // series.step 225 | if max_datapoints < points: 226 | values_per_point = int( 227 | math.ceil(float(points) / float(max_datapoints)) 228 | ) 229 | seconds_per_point = values_per_point * series.step 230 | nudge = ( 231 | seconds_per_point + 232 | (series.start % series.step) - 233 | (series.start % seconds_per_point) 234 | ) 235 | series.start += nudge 236 | values_to_lose = nudge // series.step 237 | del series[:values_to_lose-1] 238 | series.consolidate(values_per_point) 239 | step = seconds_per_point 240 | else: 241 | step = series.step 242 | 243 | timestamps = range(series.start, series.end + series.step, step) 244 | datapoints = zip(series, timestamps) 245 | return {'target': series.name, 'datapoints': datapoints} 246 | 247 | 248 | @app.route('/render', methods=methods) 249 | def render(): 250 | start = time.time() 251 | # Start with some defaults 252 | errors = {} 253 | graph_options = { 254 | 'width': 600, 255 | 'height': 300, 256 | } 257 | request_options = {} 258 | 259 | # Fill in the request_options 260 | graph_type = RequestParams.get('graphType', 'line') 261 | 262 | # Fill in the request_options 263 | try: 264 | graph_class = GraphTypes[graph_type] 265 | request_options['graphType'] = graph_type 266 | request_options['graphClass'] = graph_class 267 | except KeyError: 268 | errors['graphType'] = ( 269 | "Invalid graphType '{0}', must be one of '{1}'.".format( 270 | graph_type, "', '".join(sorted(GraphTypes)))) 271 | request_options['pieMode'] = RequestParams.get('pieMode', 'average') 272 | targets = RequestParams.getlist('target') 273 | if not len(targets): 274 | errors['target'] = 'This parameter is required.' 275 | request_options['targets'] = targets 276 | 277 | if 'rawData' in RequestParams: 278 | request_options['format'] = 'raw' 279 | if 'format' in RequestParams: 280 | request_options['format'] = RequestParams['format'] 281 | if 'jsonp' in RequestParams: 282 | request_options['jsonp'] = RequestParams['jsonp'] 283 | if 'maxDataPoints' in RequestParams: 284 | try: 285 | request_options['maxDataPoints'] = int( 286 | float(RequestParams['maxDataPoints'])) 287 | except ValueError: 288 | errors['maxDataPoints'] = 'Must be an integer.' 289 | if 'noNullPoints' in RequestParams: 290 | request_options['noNullPoints'] = True 291 | 292 | if errors: 293 | return jsonify({'errors': errors}, status=400) 294 | 295 | # Fill in the graph_options 296 | for opt in graph_class.customizable: 297 | if opt in RequestParams: 298 | value = RequestParams[opt] 299 | try: 300 | intvalue = int(value) 301 | if str(intvalue) == str(value): 302 | value = intvalue 303 | except ValueError: 304 | try: 305 | value = float(value) 306 | except ValueError: 307 | if value.lower() in ('true', 'false'): 308 | value = value.lower() == 'true' 309 | elif value.lower() == 'default' or not value: 310 | continue 311 | graph_options[opt] = value 312 | 313 | tzinfo = pytz.timezone(app.config['TIME_ZONE']) 314 | tz = RequestParams.get('tz') 315 | if tz: 316 | try: 317 | tzinfo = pytz.timezone(tz) 318 | except pytz.UnknownTimeZoneError: 319 | errors['tz'] = "Unknown timezone: '{0}'.".format(tz) 320 | request_options['tzinfo'] = tzinfo 321 | 322 | # Get the time interval for time-oriented graph types 323 | now = parseATTime(RequestParams.get('now', 'now'), tzinfo) 324 | until_time = parseATTime(RequestParams.get('until', 'now'), tzinfo, now) 325 | from_time = parseATTime(RequestParams.get('from', '-1d'), tzinfo, now) 326 | 327 | start_time = min(from_time, until_time) 328 | end_time = max(from_time, until_time) 329 | if start_time == end_time: 330 | errors['from'] = errors['until'] = 'Invalid empty time range' 331 | 332 | request_options['startTime'] = start_time 333 | request_options['endTime'] = end_time 334 | request_options['now'] = now 335 | 336 | template = dict() 337 | for key in RequestParams.keys(): 338 | if key.startswith('template['): 339 | template[key[9:-1]] = RequestParams.get(key) 340 | request_options['template'] = template 341 | 342 | use_cache = app.cache is not None and 'noCache' not in RequestParams 343 | cache_timeout = RequestParams.get('cacheTimeout') 344 | if cache_timeout is not None: 345 | cache_timeout = int(cache_timeout) 346 | 347 | if errors: 348 | return jsonify({'errors': errors}, status=400) 349 | 350 | # Done with options. 351 | 352 | if use_cache: 353 | request_key = hash_request() 354 | response = app.cache.get(request_key) 355 | if response is not None: 356 | logger.debug("cached response", time=(time.time() - start), 357 | targets=targets) 358 | return response 359 | 360 | headers = { 361 | 'Last-Modified': http_date(time.time()), 362 | 'Expires': http_date(time.time() + (cache_timeout or 60)), 363 | 'Cache-Control': 'max-age={0}'.format(cache_timeout or 60) 364 | } if use_cache else { 365 | 'Pragma': 'no-cache', 366 | 'Cache-Control': 'no-cache', 367 | } 368 | 369 | context = { 370 | 'startTime': request_options['startTime'], 371 | 'endTime': request_options['endTime'], 372 | 'now': request_options['now'], 373 | 'tzinfo': request_options['tzinfo'], 374 | 'template': request_options['template'], 375 | 'data': [], 376 | } 377 | 378 | # Gather all data to take advantage of backends with fetch_multi 379 | fdstart = time.time() 380 | paths = [] 381 | for target in request_options['targets']: 382 | if request_options['graphType'] == 'pie': 383 | if ':' in target: 384 | continue 385 | if target.strip(): 386 | paths += pathsFromTarget(context, target) 387 | data_store = fetchData(context, paths) 388 | logger.debug("fetched data", time=(time.time() - fdstart), paths=paths) 389 | 390 | if request_options['graphType'] == 'pie': 391 | for target in request_options['targets']: 392 | if ':' in target: 393 | name, value = target.split(':', 1) 394 | try: 395 | value = float(value) 396 | except ValueError: 397 | errors['target'] = "Invalid target: '{0}'.".format(target) 398 | context['data'].append((name, value)) 399 | else: 400 | series_list = evaluateTarget(context, target, data_store) 401 | 402 | for series in series_list: 403 | func = app.functions[request_options['pieMode']] 404 | context['data'].append((series.name, 405 | func(context, series) or 0)) 406 | 407 | if errors: 408 | return jsonify({'errors': errors}, status=400) 409 | 410 | else: # graphType == 'line' 411 | for target in request_options['targets']: 412 | if not target.strip(): 413 | continue 414 | emstart = time.time() 415 | series_list = evaluateTarget(context, target, data_store) 416 | logger.debug("evaluated metric", time=(time.time() - emstart), 417 | target=target) 418 | context['data'].extend(series_list) 419 | 420 | request_options['format'] = request_options.get('format') 421 | 422 | if request_options['format'] == 'csv': 423 | response = BytesIO() if six.PY2 else StringIO() 424 | writer = csv.writer(response, dialect='excel') 425 | for series in context['data']: 426 | for index, value in enumerate(series): 427 | ts = datetime.fromtimestamp( 428 | series.start + index * series.step, 429 | request_options['tzinfo'] 430 | ) 431 | writer.writerow((series.name, 432 | ts.strftime("%Y-%m-%d %H:%M:%S"), value)) 433 | response.seek(0) 434 | headers['Content-Type'] = 'text/csv' 435 | response = (response.read(), 200, headers) 436 | if use_cache: 437 | app.cache.add(request_key, response, cache_timeout) 438 | return response 439 | 440 | if request_options['format'] == 'json': 441 | series_data = [] 442 | if 'maxDataPoints' in request_options and any(context['data']): 443 | start_time = min([s.start for s in context['data']]) 444 | end_time = max([s.end for s in context['data']]) 445 | for series in context['data']: 446 | series_data.append(prune_datapoints( 447 | series, request_options['maxDataPoints'], 448 | start_time, end_time)) 449 | elif 'noNullPoints' in request_options and any(context['data']): 450 | for series in context['data']: 451 | values = [] 452 | for (index, v) in enumerate(series): 453 | if v is not None: 454 | timestamp = series.start + (index * series.step) 455 | values.append((v, timestamp)) 456 | if len(values) > 0: 457 | series_data.append({'target': series.name, 458 | 'datapoints': values}) 459 | else: 460 | for series in context['data']: 461 | timestamps = range(series.start, series.end + series.step, 462 | series.step) 463 | datapoints = zip(series, timestamps) 464 | series_data.append({'target': series.name, 465 | 'datapoints': datapoints}) 466 | 467 | response = jsonify(series_data, headers=headers) 468 | if use_cache: 469 | app.cache.add(request_key, response, cache_timeout) 470 | logger.debug("rendered json", time=(time.time() - start), 471 | targets=targets) 472 | return response 473 | 474 | if request_options['format'] == 'dygraph': 475 | series_data = {} 476 | labels = ['Time'] 477 | if any(context['data']): 478 | datapoints = [[ts * 1000] 479 | for ts in range(context['data'][0].start, 480 | context['data'][0].end, 481 | context['data'][0].step)] 482 | for series in context['data']: 483 | labels.append(series.name) 484 | for i, point in enumerate(series): 485 | datapoints[i].append(point) 486 | series_data = {'labels': labels, 'data': datapoints} 487 | 488 | logger.debug("rendered dygraph", time=(time.time() - start), 489 | targets=targets) 490 | return jsonify(series_data, headers=headers) 491 | 492 | if request_options['format'] == 'rickshaw': 493 | series_data = [] 494 | for series in context['data']: 495 | timestamps = range(series.start, series.end, series.step) 496 | datapoints = [{'x': x, 'y': y} 497 | for x, y in zip(timestamps, series)] 498 | series_data.append(dict(target=series.name, 499 | datapoints=datapoints)) 500 | logger.debug("rendered rickshaw", time=(time.time() - start), 501 | targets=targets) 502 | return jsonify(series_data, headers=headers) 503 | 504 | if request_options['format'] == 'raw': 505 | response = StringIO() 506 | for series in context['data']: 507 | response.write(u"%s,%d,%d,%d|" % ( 508 | series.name, series.start, series.end, series.step)) 509 | response.write(u','.join(map(repr, series))) 510 | response.write(u'\n') 511 | response.seek(0) 512 | headers['Content-Type'] = 'text/plain' 513 | response = (response.read(), 200, headers) 514 | if use_cache: 515 | app.cache.add(request_key, response, cache_timeout) 516 | logger.debug("rendered rawData", time=(time.time() - start), 517 | targets=targets) 518 | return response 519 | 520 | if request_options['format'] == 'svg': 521 | graph_options['outputFormat'] = 'svg' 522 | elif request_options['format'] == 'pdf': 523 | graph_options['outputFormat'] = 'pdf' 524 | 525 | graph_options['data'] = context['data'] 526 | image = doImageRender(request_options['graphClass'], graph_options) 527 | 528 | use_svg = graph_options.get('outputFormat') == 'svg' 529 | 530 | if use_svg and 'jsonp' in request_options: 531 | headers['Content-Type'] = 'text/javascript' 532 | response = ('{0}({1})'.format(request_options['jsonp'], 533 | json.dumps(image.decode('utf-8'))), 534 | 200, headers) 535 | else: 536 | if use_svg: 537 | ctype = 'image/svg+xml' 538 | elif graph_options.get('outputFormat') == 'pdf': 539 | ctype = 'application/x-pdf' 540 | else: 541 | ctype = 'image/png' 542 | headers['Content-Type'] = ctype 543 | response = image, 200, headers 544 | 545 | if use_cache: 546 | app.cache.add(request_key, response, cache_timeout) 547 | logger.debug("rendered graph", time=(time.time() - start), targets=targets) 548 | return response 549 | 550 | 551 | def tree_json(nodes, base_path, wildcards=False): 552 | results = [] 553 | 554 | branchNode = { 555 | 'allowChildren': 1, 556 | 'expandable': 1, 557 | 'leaf': 0, 558 | } 559 | leafNode = { 560 | 'allowChildren': 0, 561 | 'expandable': 0, 562 | 'leaf': 1, 563 | } 564 | 565 | # Add a wildcard node if appropriate 566 | if len(nodes) > 1 and wildcards: 567 | wildcardNode = {'text': '*', 'id': base_path + '*'} 568 | 569 | if any(not n.is_leaf for n in nodes): 570 | wildcardNode.update(branchNode) 571 | 572 | else: 573 | wildcardNode.update(leafNode) 574 | 575 | results.append(wildcardNode) 576 | 577 | found = set() 578 | results_leaf = [] 579 | results_branch = [] 580 | for node in nodes: # Now let's add the matching children 581 | if node.name in found: 582 | continue 583 | 584 | found.add(node.name) 585 | resultNode = { 586 | 'text': str(node.name), 587 | 'id': base_path + str(node.name), 588 | } 589 | 590 | if node.is_leaf: 591 | resultNode.update(leafNode) 592 | results_leaf.append(resultNode) 593 | else: 594 | resultNode.update(branchNode) 595 | results_branch.append(resultNode) 596 | 597 | results.extend(results_branch) 598 | results.extend(results_leaf) 599 | return results 600 | 601 | 602 | def json_nodes(nodes): 603 | nodes_info = [] 604 | 605 | for node in nodes: 606 | info = { 607 | 'path': node.path, 608 | 'is_leaf': node.is_leaf, 609 | 'intervals': [], 610 | } 611 | if node.is_leaf: 612 | for i in node.intervals: 613 | interval = {'start': i.start, 'end': i.end} 614 | info['intervals'].append(interval) 615 | 616 | nodes_info.append(info) 617 | 618 | return sorted(nodes_info, key=lambda item: item['path']) 619 | 620 | 621 | def doImageRender(graphClass, graphOptions): 622 | pngData = BytesIO() 623 | img = graphClass(**graphOptions) 624 | img.output(pngData) 625 | imageData = pngData.getvalue() 626 | pngData.close() 627 | return imageData 628 | 629 | 630 | from .evaluator import evaluateTarget, pathsFromTarget # noqa 631 | -------------------------------------------------------------------------------- /graphite_api/carbonlink.py: -------------------------------------------------------------------------------- 1 | import bisect 2 | import random 3 | import socket 4 | import struct 5 | import time 6 | 7 | from hashlib import md5 8 | from importlib import import_module 9 | from io import BytesIO 10 | 11 | import six 12 | from six.moves import cPickle as pickle # noqa 13 | from structlog import get_logger 14 | 15 | logger = get_logger() 16 | 17 | pickle_safe = { 18 | 'copy_reg': set(['_reconstructor']), 19 | '__builtin__': set(['object', 'list']), 20 | 'collections': set(['deque']), 21 | } 22 | renames = { 23 | 'copy_reg': 'copyreg', 24 | '__builtin__': 'builtins', 25 | } 26 | 27 | 28 | try: 29 | import pyhash 30 | hasher = pyhash.fnv1a_32() 31 | 32 | def fnv32a(string, seed=0x811c9dc5): 33 | return hasher(string, seed=seed) 34 | except ImportError: 35 | def fnv32a(string, seed=0x811c9dc5): 36 | """ 37 | FNV-1a Hash (http://isthe.com/chongo/tech/comp/fnv/) in Python. 38 | Taken from https://gist.github.com/vaiorabbit/5670985 39 | """ 40 | hval = seed 41 | fnv_32_prime = 0x01000193 42 | uint32_max = 2 ** 32 43 | for s in string: 44 | hval = hval ^ ord(s) 45 | hval = (hval * fnv_32_prime) % uint32_max 46 | return hval 47 | 48 | 49 | def allowed_module(module, name): 50 | if module not in pickle_safe: 51 | raise pickle.UnpicklingError( 52 | 'Attempting to unpickle unsafe module %s' % module) 53 | if name not in pickle_safe[module]: 54 | raise pickle.UnpicklingError( 55 | 'Attempting to unpickle unsafe class %s' % name) 56 | if module in renames: 57 | module = 'six.moves.{0}'.format(renames[module]) 58 | mod = import_module(module) 59 | return getattr(mod, name) 60 | 61 | 62 | if six.PY2: 63 | class SafeUnpickler(object): 64 | @classmethod 65 | def find_class(cls, module, name): 66 | return allowed_module(module, name) 67 | 68 | @classmethod 69 | def loads(cls, s): 70 | obj = pickle.Unpickler(BytesIO(s)) 71 | obj.find_global = cls.find_class 72 | return obj.load() 73 | else: 74 | class SafeUnpickler(pickle.Unpickler): 75 | def find_class(self, module, name): 76 | return allowed_module(module, name) 77 | 78 | @classmethod 79 | def loads(cls, s): 80 | obj = SafeUnpickler(BytesIO(s)) 81 | return obj.load() 82 | 83 | 84 | class ConsistentHashRing(object): 85 | def __init__(self, nodes, replica_count=100, hash_type='carbon_ch'): 86 | self.ring = [] 87 | self.ring_len = len(self.ring) 88 | self.nodes = set() 89 | self.nodes_len = len(self.nodes) 90 | self.replica_count = replica_count 91 | self.hash_type = hash_type 92 | for node in nodes: 93 | self.add_node(node) 94 | 95 | def compute_ring_position(self, key): 96 | if self.hash_type == 'fnv1a_ch': 97 | big_hash = '{0:x}'.format(int(fnv32a(str(key)))) 98 | small_hash = int(big_hash[:4], 16) ^ int(big_hash[4:], 16) 99 | else: 100 | big_hash = md5(str(key).encode()).hexdigest() 101 | small_hash = int(big_hash[:4], 16) 102 | return small_hash 103 | 104 | def add_node(self, key): 105 | self.nodes.add(key) 106 | self.nodes_len = len(self.nodes) 107 | for i in range(self.replica_count): 108 | if self.hash_type == 'fnv1a_ch': 109 | replica_key = "%d-%s" % (i, key[1]) 110 | else: 111 | replica_key = "%s:%d" % (key, i) 112 | position = self.compute_ring_position(replica_key) 113 | entry = position, key 114 | bisect.insort(self.ring, entry) 115 | self.ring_len = len(self.ring) 116 | 117 | def remove_node(self, key): 118 | self.nodes.discard(key) 119 | self.nodes_len = len(self.nodes) 120 | self.ring = [entry for entry in self.ring if entry[1] != key] 121 | self.ring_len = len(self.ring) 122 | 123 | def get_node(self, key): 124 | assert self.ring 125 | position = self.compute_ring_position(key) 126 | search_entry = position, None 127 | index = bisect.bisect_left(self.ring, search_entry) % self.ring_len 128 | entry = self.ring[index] 129 | return entry[1] 130 | 131 | def get_nodes(self, key): 132 | nodes = [] 133 | position = self.compute_ring_position(key) 134 | search_entry = position, None 135 | index = bisect.bisect_left(self.ring, search_entry) % self.ring_len 136 | last_index = (index - 1) % self.ring_len 137 | nodes_len = len(nodes) 138 | while nodes_len < self.nodes_len and index != last_index: 139 | position, next_node = self.ring[index] 140 | if next_node not in nodes: 141 | nodes.append(next_node) 142 | nodes_len += 1 143 | index = (index + 1) % self.ring_len 144 | return nodes 145 | 146 | 147 | class CarbonLinkPool(object): 148 | def __init__(self, hosts, timeout=1, retry_delay=15, 149 | carbon_prefix='carbon', replication_factor=1, 150 | hashing_keyfunc=lambda x: x, hashing_type='carbon_ch'): 151 | self.carbon_prefix = carbon_prefix 152 | self.retry_delay = retry_delay 153 | self.hosts = [] 154 | self.ports = {} 155 | servers = set() 156 | for host in hosts: 157 | parts = host.split(':') 158 | if len(parts) == 2: 159 | parts.append(None) 160 | server, port, instance = parts 161 | self.hosts.append((server, instance)) 162 | self.ports[(server, instance)] = port 163 | servers.add(server) 164 | 165 | self.timeout = float(timeout) 166 | if len(servers) < replication_factor: 167 | raise Exception( 168 | "replication_factor=%d cannot exceed servers=%d" % ( 169 | replication_factor, len(servers))) 170 | self.replication_factor = replication_factor 171 | 172 | self.hash_ring = ConsistentHashRing(self.hosts, hash_type=hashing_type) 173 | self.keyfunc = hashing_keyfunc 174 | self.connections = {} 175 | self.last_failure = {} 176 | # Create a connection pool for each host 177 | for host in self.hosts: 178 | self.connections[host] = set() 179 | 180 | def select_host(self, metric): 181 | """ 182 | Returns the carbon host that has data for the given metric. 183 | """ 184 | key = self.keyfunc(metric) 185 | nodes = [] 186 | servers = set() 187 | for node in self.hash_ring.get_nodes(key): 188 | server, instance = node 189 | if server in servers: 190 | continue 191 | servers.add(server) 192 | nodes.append(node) 193 | if len(servers) >= self.replication_factor: 194 | break 195 | available = [n for n in nodes if self.is_available(n)] 196 | return random.choice(available or nodes) 197 | 198 | def is_available(self, host): 199 | now = time.time() 200 | last_fail = self.last_failure.get(host, 0) 201 | return (now - last_fail) < self.retry_delay 202 | 203 | def get_connection(self, host): 204 | # First try to take one out of the pool for this host 205 | server, instance = host 206 | port = self.ports[host] 207 | pool = self.connections[host] 208 | try: 209 | return pool.pop() 210 | except KeyError: 211 | pass # nothing left in the pool, gotta make a new connection 212 | 213 | logger.info("new carbonlink socket", host=str(host)) 214 | connection = socket.socket() 215 | connection.settimeout(self.timeout) 216 | try: 217 | connection.connect((server, int(port))) 218 | except Exception: 219 | self.last_failure[host] = time.time() 220 | raise 221 | connection.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) 222 | return connection 223 | 224 | def query(self, metric): 225 | if not self.hosts: 226 | return [] 227 | request = dict(type='cache-query', metric=metric) 228 | results = self.send_request(request) 229 | logger.debug("carbonlink request returned", metric=metric, 230 | datapoints=len(results['datapoints'])) 231 | return results['datapoints'] 232 | 233 | def get_metadata(self, metric, key): 234 | request = dict(type='get-metadata', metric=metric, key=key) 235 | results = self.send_request(request) 236 | logger.debug("carbonlink get-metadata request received", 237 | metric=metric, key=key) 238 | return results['value'] 239 | 240 | def set_metadata(self, metric, key, value): 241 | request = dict(type='set-metadata', metric=metric, 242 | key=key, value=value) 243 | results = self.send_request(request) 244 | logger.debug("carbonlink set-metadata request received", 245 | metric=metric, key=key, value=value) 246 | return results 247 | 248 | def send_request(self, request): 249 | metric = request['metric'] 250 | serialized_request = pickle.dumps(request, protocol=2) 251 | len_prefix = struct.pack("!L", len(serialized_request)) 252 | request_packet = len_prefix + serialized_request 253 | result = {} 254 | result.setdefault('datapoints', []) 255 | 256 | if metric.startswith(self.carbon_prefix): 257 | return self.send_request_to_all(request) 258 | 259 | host = self.select_host(metric) 260 | conn = self.get_connection(host) 261 | logger.debug("carbonlink request", metric=metric, host=str(host)) 262 | try: 263 | conn.sendall(request_packet) 264 | result = self.recv_response(conn) 265 | except Exception: 266 | self.last_failure[host] = time.time() 267 | logger.info("carbonlink exception", exc_info=True, host=str(host)) 268 | else: 269 | self.connections[host].add(conn) 270 | if 'error' in result: 271 | logger.info("carbonlink error", error=result['error']) 272 | raise CarbonLinkRequestError(result['error']) 273 | logger.debug("carbonlink finished receiving", 274 | metric=metric, host=host) 275 | return result 276 | 277 | def send_request_to_all(self, request): 278 | metric = request['metric'] 279 | serialized_request = pickle.dumps(request, protocol=2) 280 | len_prefix = struct.pack("!L", len(serialized_request)) 281 | request_packet = len_prefix + serialized_request 282 | results = {} 283 | results.setdefault('datapoints', []) 284 | 285 | for host in self.hosts: 286 | conn = self.get_connection(host) 287 | logger.debug("carbonlink request", metric=metric, host=str(host)) 288 | try: 289 | conn.sendall(request_packet) 290 | result = self.recv_response(conn) 291 | except Exception: 292 | self.last_failure[host] = time.time() 293 | logger.info("carbonlink exception", exc_info=True, 294 | host=str(host)) 295 | else: 296 | self.connections[host].add(conn) 297 | if 'error' in result: 298 | logger.info("carbonlink error", 299 | host=str(host), error=result['error']) 300 | else: 301 | if len(result['datapoints']) > 1: 302 | results['datapoints'].extend(result['datapoints']) 303 | logger.debug("carbonlink finished receiving", 304 | metric=metric, host=str(host)) 305 | return results 306 | 307 | def recv_response(self, conn): 308 | len_prefix = recv_exactly(conn, 4) 309 | body_size = struct.unpack("!L", len_prefix)[0] 310 | body = recv_exactly(conn, body_size) 311 | return SafeUnpickler.loads(body) 312 | 313 | 314 | class CarbonLinkRequestError(Exception): 315 | pass 316 | 317 | 318 | # Socket helper functions 319 | def recv_exactly(conn, num_bytes): 320 | buf = b'' 321 | while len(buf) < num_bytes: 322 | data = conn.recv(num_bytes - len(buf)) 323 | if not data: 324 | raise Exception("Connection lost") 325 | buf += data 326 | return buf 327 | -------------------------------------------------------------------------------- /graphite_api/config.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | import traceback 4 | import warnings 5 | from importlib import import_module 6 | from logging.config import dictConfig 7 | 8 | import structlog 9 | import yaml 10 | from flask import make_response 11 | from structlog.processors import (format_exc_info, JSONRenderer, 12 | KeyValueRenderer) 13 | from tzlocal import get_localzone 14 | 15 | from . import DEBUG 16 | from .middleware import CORS, TrailingSlash 17 | from .storage import Store 18 | 19 | if DEBUG: 20 | processors = (format_exc_info, KeyValueRenderer()) 21 | else: 22 | processors = (format_exc_info, JSONRenderer()) 23 | 24 | logger = structlog.get_logger() 25 | 26 | default_conf = { 27 | 'search_index': '/srv/graphite/index', 28 | 'finders': [ 29 | 'graphite_api.finders.whisper.WhisperFinder', 30 | ], 31 | 'functions': [ 32 | 'graphite_api.functions.SeriesFunctions', 33 | 'graphite_api.functions.PieFunctions', 34 | ], 35 | 'whisper': { 36 | 'directories': [ 37 | '/srv/graphite/whisper', 38 | ], 39 | }, 40 | 'time_zone': get_localzone().zone, 41 | } 42 | if default_conf['time_zone'] == 'local': # tzlocal didn't find anything 43 | default_conf['time_zone'] = 'UTC' 44 | 45 | 46 | # attributes of a classical log record 47 | NON_EXTRA = set(['module', 'filename', 'levelno', 'exc_text', 'pathname', 48 | 'lineno', 'msg', 'funcName', 'relativeCreated', 49 | 'levelname', 'msecs', 'threadName', 'name', 'created', 50 | 'process', 'processName', 'thread']) 51 | 52 | 53 | class StructlogFormatter(logging.Formatter): 54 | def __init__(self, *args, **kwargs): 55 | self._bound = structlog.BoundLoggerBase(None, processors, {}) 56 | 57 | def format(self, record): 58 | if not record.name.startswith('graphite_api'): 59 | kw = dict(((k, v) for k, v in record.__dict__.items() 60 | if k not in NON_EXTRA)) 61 | kw['logger'] = record.name 62 | return self._bound._process_event( 63 | record.levelname.lower(), record.getMessage(), kw)[0] 64 | return record.getMessage() 65 | 66 | 67 | def load_by_path(path): 68 | module, klass = path.rsplit('.', 1) 69 | finder = import_module(module) 70 | return getattr(finder, klass) 71 | 72 | 73 | def error_handler(e): 74 | return make_response(traceback.format_exc(), 500, 75 | {'Content-Type': 'text/plain'}) 76 | 77 | 78 | def configure(app): 79 | config_file = os.environ.get('GRAPHITE_API_CONFIG', 80 | '/etc/graphite-api.yaml') 81 | if os.path.exists(config_file): 82 | with open(config_file) as f: 83 | config = yaml.safe_load(f) 84 | config['path'] = config_file 85 | else: 86 | warnings.warn("Unable to find configuration file at {0}, using " 87 | "default config.".format(config_file)) 88 | config = {} 89 | 90 | configure_logging(config) 91 | 92 | for key, value in list(default_conf.items()): 93 | config.setdefault(key, value) 94 | 95 | app.statsd = None 96 | if 'statsd' in config: 97 | try: 98 | from statsd import StatsClient 99 | except ImportError: 100 | warnings.warn("'statsd' is provided in the configuration but " 101 | "the statsd client is not installed. Please `pip " 102 | "install statsd`.") 103 | else: 104 | c = config['statsd'] 105 | app.statsd = StatsClient(c['host'], c.get('port', 8125)) 106 | 107 | app.cache = None 108 | if 'cache' in config: 109 | try: 110 | from flask.ext.cache import Cache 111 | except ImportError: 112 | warnings.warn("'cache' is provided in the configuration but " 113 | "Flask-Cache is not installed. Please `pip install " 114 | "Flask-Cache`.") 115 | else: 116 | cache_conf = {'CACHE_DEFAULT_TIMEOUT': 60, 117 | 'CACHE_KEY_PREFIX': 'graphite-api:'} 118 | for key, value in config['cache'].items(): 119 | cache_conf['CACHE_{0}'.format(key.upper())] = value 120 | app.cache = Cache(app, config=cache_conf) 121 | 122 | loaded_config = {'functions': {}} 123 | for functions in config['functions']: 124 | loaded_config['functions'].update(load_by_path(functions)) 125 | 126 | if 'carbon' in config: 127 | if 'hashing_keyfunc' in config['carbon']: 128 | config['carbon']['hashing_keyfunc'] = load_by_path( 129 | config['carbon']['hashing_keyfunc']) 130 | else: 131 | config['carbon']['hashing_keyfunc'] = lambda x: x 132 | loaded_config['carbon'] = config.get('carbon', None) 133 | 134 | finders = [] 135 | for finder in config['finders']: 136 | finders.append(load_by_path(finder)(config)) 137 | loaded_config['store'] = Store(finders) 138 | app.config['GRAPHITE'] = loaded_config 139 | app.config['TIME_ZONE'] = config['time_zone'] 140 | logger.info("configured timezone", timezone=app.config['TIME_ZONE']) 141 | 142 | if 'sentry_dsn' in config: 143 | try: 144 | from raven.contrib.flask import Sentry 145 | except ImportError: 146 | warnings.warn("'sentry_dsn' is provided in the configuration but " 147 | "the sentry client is not installed. Please `pip " 148 | "install raven[flask]`.") 149 | else: 150 | Sentry(app, dsn=config['sentry_dsn']) 151 | 152 | app.wsgi_app = TrailingSlash(CORS(app.wsgi_app, 153 | config.get('allowed_origins'))) 154 | if config.get('render_errors', True): 155 | app.errorhandler(500)(error_handler) 156 | 157 | 158 | def configure_logging(config): 159 | structlog.configure(processors=processors, 160 | logger_factory=structlog.stdlib.LoggerFactory(), 161 | wrapper_class=structlog.stdlib.BoundLogger, 162 | cache_logger_on_first_use=True) 163 | config.setdefault('logging', {}) 164 | config['logging'].setdefault('version', 1) 165 | config['logging'].setdefault('handlers', {}) 166 | config['logging'].setdefault('formatters', {}) 167 | config['logging'].setdefault('loggers', {}) 168 | config['logging']['handlers'].setdefault('raw', { 169 | 'level': 'DEBUG', 170 | 'class': 'logging.StreamHandler', 171 | 'formatter': 'raw', 172 | }) 173 | config['logging']['loggers'].setdefault('root', { 174 | 'handlers': ['raw'], 175 | 'level': 'DEBUG', 176 | 'propagate': False, 177 | }) 178 | config['logging']['loggers'].setdefault('graphite_api', { 179 | 'handlers': ['raw'], 180 | 'level': 'DEBUG', 181 | }) 182 | config['logging']['formatters']['raw'] = {'()': StructlogFormatter} 183 | dictConfig(config['logging']) 184 | if 'path' in config: 185 | logger.info("loading configuration", path=config['path']) 186 | else: 187 | logger.info("loading default configuration") 188 | -------------------------------------------------------------------------------- /graphite_api/encoders.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | 4 | class JSONEncoder(json.JSONEncoder): 5 | """ 6 | JSONEncoder subclass that knows how to encode generators. 7 | """ 8 | def default(self, o): 9 | if hasattr(o, 'tolist'): 10 | return o.tolist() 11 | elif hasattr(o, '__getitem__'): 12 | try: 13 | return dict(o) 14 | except TypeError: 15 | pass 16 | elif hasattr(o, '__iter__'): 17 | return [i for i in o] 18 | return super(JSONEncoder, self).default(o) 19 | -------------------------------------------------------------------------------- /graphite_api/evaluator.py: -------------------------------------------------------------------------------- 1 | import itertools 2 | import re 3 | 4 | import six 5 | 6 | from .render.datalib import fetchData, TimeSeries 7 | from .render.grammar import grammar 8 | 9 | 10 | def pathsFromTarget(requestContext, target): 11 | tokens = grammar.parseString(target) 12 | paths = list(pathsFromTokens(requestContext, tokens)) 13 | return paths 14 | 15 | 16 | def pathsFromTokens(requestContext, tokens, replacements=None): 17 | iters = [] 18 | 19 | if tokens.template: 20 | arglist = dict() 21 | if tokens.template.kwargs: 22 | for kwarg in tokens.template.kwargs: 23 | arg = kwarg.args[0] 24 | if arg.string: 25 | arglist[kwarg.argname] = arg.string[1:-1] 26 | if tokens.template.args: 27 | for i, arg in enumerate(tokens.template.args): 28 | if arg.string: 29 | arglist[str(i + 1)] = arg.string[1:-1] 30 | if 'template' in requestContext: 31 | arglist.update(requestContext['template']) 32 | iters.append(pathsFromTokens(requestContext, tokens.template, arglist)) 33 | 34 | elif tokens.expression: 35 | iters.append(pathsFromTokens(requestContext, tokens.expression, 36 | replacements)) 37 | 38 | elif tokens.pathExpression: 39 | expression = tokens.pathExpression 40 | if replacements: 41 | for name in replacements: 42 | val = replacements[name] 43 | expression = expression.replace('$'+name, str(val)) 44 | iters.append([expression]) 45 | 46 | elif tokens.call: 47 | if tokens.call.funcname == 'template': 48 | # if template propagates down here, it means the grammar didn't 49 | # match the invocation as tokens.template. this generally happens 50 | # if you try to pass non-numeric/string args 51 | raise ValueError("invalid template() syntax, only string/numeric " 52 | "arguments are allowed") 53 | 54 | iters.extend([pathsFromTokens(requestContext, arg, replacements) 55 | for arg in tokens.call.args]) 56 | iters.extend([pathsFromTokens(requestContext, kwarg.args[0], 57 | replacements) 58 | for kwarg in tokens.call.kwargs]) 59 | 60 | for path in itertools.chain(*iters): 61 | yield path 62 | 63 | 64 | def evaluateTarget(requestContext, target, data_store=None): 65 | tokens = grammar.parseString(target) 66 | 67 | if data_store is None: 68 | paths = list(pathsFromTokens(requestContext, tokens)) 69 | data_store = fetchData(requestContext, paths) 70 | 71 | result = evaluateTokens(requestContext, tokens, data_store) 72 | if isinstance(result, TimeSeries): 73 | return [result] # we have to return a list of TimeSeries objects 74 | 75 | return result 76 | 77 | 78 | def evaluateTokens(requestContext, tokens, data_store=None, replacements=None): 79 | if data_store is None: 80 | paths = list(pathsFromTokens(requestContext, tokens)) 81 | data_store = fetchData(requestContext, paths) 82 | 83 | if tokens.template: 84 | arglist = dict() 85 | if tokens.template.kwargs: 86 | args = [(kwarg.argname, evaluateTokens(requestContext, 87 | kwarg.args[0], 88 | data_store)) 89 | for kwarg in tokens.template.kwargs] 90 | arglist.update(dict(args)) 91 | if tokens.template.args: 92 | args = [(str(i + 1), evaluateTokens(requestContext, arg, 93 | data_store)) 94 | for i, arg in enumerate(tokens.template.args)] 95 | arglist.update(dict(args)) 96 | if 'template' in requestContext: 97 | arglist.update(requestContext['template']) 98 | return evaluateTokens(requestContext, tokens.template, data_store, 99 | arglist) 100 | 101 | elif tokens.expression: 102 | return evaluateTokens(requestContext, tokens.expression, data_store, 103 | replacements) 104 | 105 | elif tokens.pathExpression: 106 | expression = tokens.pathExpression 107 | if replacements: 108 | for name in replacements: 109 | val = replacements[name] 110 | if expression == '$'+name: 111 | if not isinstance(val, six.string_types): 112 | return val 113 | elif re.match('^-?[\d.]+$', val): 114 | return float(val) 115 | else: 116 | return val 117 | else: 118 | expression = expression.replace('$'+name, str(val)) 119 | return data_store.get_series_list(expression) 120 | 121 | elif tokens.call: 122 | if tokens.call.funcname == 'template': 123 | # if template propagates down here, it means the grammar didn't 124 | # match the invocation as tokens.template. this generally happens 125 | # if you try to pass non-numeric/string args 126 | raise ValueError("invalid template() syntax, only string/numeric " 127 | "arguments are allowed") 128 | 129 | func = app.functions[tokens.call.funcname] 130 | args = [evaluateTokens(requestContext, arg, data_store, replacements) 131 | for arg in tokens.call.args] 132 | requestContext['args'] = tokens.call.args 133 | kwargs = dict([(kwarg.argname, 134 | evaluateTokens(requestContext, kwarg.args[0], 135 | data_store, replacements)) 136 | for kwarg in tokens.call.kwargs]) 137 | ret = func(requestContext, *args, **kwargs) 138 | return ret 139 | 140 | elif tokens.number: 141 | if tokens.number.integer: 142 | return int(tokens.number.integer) 143 | elif tokens.number.float: 144 | return float(tokens.number.float) 145 | elif tokens.number.scientific: 146 | return float(tokens.number.scientific[0]) 147 | 148 | elif tokens.string: 149 | return tokens.string[1:-1] 150 | 151 | elif tokens.boolean: 152 | return tokens.boolean[0] == 'true' 153 | 154 | else: 155 | raise ValueError("unknown token in target evaluator") 156 | 157 | from .app import app # noqa 158 | -------------------------------------------------------------------------------- /graphite_api/finders/__init__.py: -------------------------------------------------------------------------------- 1 | import fnmatch 2 | import os.path 3 | import re 4 | 5 | EXPAND_BRACES_RE = re.compile(r'.*(\{.*?[^\\]?\})') 6 | 7 | 8 | def get_real_metric_path(absolute_path, metric_path): 9 | # Support symbolic links (real_metric_path ensures proper cache queries) 10 | real_fs_path = os.path.realpath(absolute_path) 11 | if absolute_path != real_fs_path: 12 | relative_fs_path = metric_path.replace('.', os.sep) 13 | abs_fs_path = os.path.dirname(absolute_path[:-len(relative_fs_path)]) 14 | base_fs_path = os.path.realpath(abs_fs_path) 15 | relative_real_fs_path = real_fs_path[len(base_fs_path):].lstrip('/') 16 | return fs_to_metric(relative_real_fs_path) 17 | 18 | return metric_path 19 | 20 | 21 | def fs_to_metric(path): 22 | dirpath = os.path.dirname(path) 23 | filename = os.path.basename(path) 24 | return os.path.join(dirpath, filename.split('.')[0]).replace(os.sep, '.') 25 | 26 | 27 | def _deduplicate(entries): 28 | yielded = set() 29 | for entry in entries: 30 | if entry not in yielded: 31 | yielded.add(entry) 32 | yield entry 33 | 34 | 35 | def extract_variants(pattern): 36 | """Extract the pattern variants (ie. {foo,bar}baz = foobaz or barbaz).""" 37 | v1, v2 = pattern.find('{'), pattern.find('}') 38 | if v1 > -1 and v2 > v1: 39 | variations = pattern[v1+1:v2].split(',') 40 | variants = [pattern[:v1] + v + pattern[v2+1:] for v in variations] 41 | else: 42 | variants = [pattern] 43 | return list(_deduplicate(variants)) 44 | 45 | 46 | def match_entries(entries, pattern): 47 | """A drop-in replacement for fnmatch.filter that supports pattern 48 | variants (ie. {foo,bar}baz = foobaz or barbaz).""" 49 | matching = [] 50 | 51 | for variant in expand_braces(pattern): 52 | matching.extend(fnmatch.filter(entries, variant)) 53 | 54 | return list(_deduplicate(matching)) 55 | 56 | 57 | def expand_braces(pattern): 58 | """Find the rightmost, innermost set of braces and, if it contains a 59 | comma-separated list, expand its contents recursively (any of its items 60 | may itself be a list enclosed in braces). 61 | 62 | Return the full list of expanded strings. 63 | """ 64 | res = set() 65 | 66 | # Used instead of s.strip('{}') because strip is greedy. 67 | # We want to remove only ONE leading { and ONE trailing }, if both exist 68 | def remove_outer_braces(s): 69 | if s[0] == '{' and s[-1] == '}': 70 | return s[1:-1] 71 | return s 72 | 73 | match = EXPAND_BRACES_RE.search(pattern) 74 | if match is not None: 75 | sub = match.group(1) 76 | v1, v2 = match.span(1) 77 | if "," in sub: 78 | for pat in sub.strip('{}').split(','): 79 | subpattern = pattern[:v1] + pat + pattern[v2:] 80 | res.update(expand_braces(subpattern)) 81 | else: 82 | subpattern = pattern[:v1] + remove_outer_braces(sub) + pattern[v2:] 83 | res.update(expand_braces(subpattern)) 84 | else: 85 | res.add(pattern.replace('\\}', '}')) 86 | 87 | return list(res) 88 | -------------------------------------------------------------------------------- /graphite_api/finders/whisper.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import gzip 4 | import os.path 5 | import time 6 | 7 | from structlog import get_logger 8 | 9 | from . import fs_to_metric, get_real_metric_path, match_entries 10 | from .._vendor import whisper 11 | from ..carbonlink import CarbonLinkPool 12 | from ..intervals import Interval, IntervalSet 13 | from ..node import BranchNode, LeafNode 14 | from ..utils import is_pattern 15 | 16 | try: 17 | from os import scandir, stat, walk 18 | except ImportError: 19 | from scandir import scandir, stat, walk 20 | 21 | logger = get_logger() 22 | 23 | 24 | class WhisperFinder(object): 25 | def __init__(self, config): 26 | self.directories = config['whisper']['directories'] 27 | self.carbonlink = None 28 | if 'carbon' in config: 29 | self.carbonlink = CarbonLinkPool(**config['carbon']) 30 | else: 31 | self.carbonlink = None 32 | 33 | def find_nodes(self, query): 34 | logger.debug("find_nodes", finder="whisper", start=query.startTime, 35 | end=query.endTime, pattern=query.pattern) 36 | clean_pattern = query.pattern.replace('\\', '') 37 | pattern_parts = clean_pattern.split('.') 38 | 39 | for root_dir in self.directories: 40 | if not os.path.isdir(root_dir): 41 | os.makedirs(root_dir) 42 | for absolute_path in self._find_paths(root_dir, pattern_parts): 43 | if os.path.basename(absolute_path).startswith('.'): 44 | continue 45 | 46 | relative_path = absolute_path[len(root_dir):].lstrip(os.sep) 47 | metric_path = fs_to_metric(relative_path) 48 | real_metric_path = get_real_metric_path(absolute_path, 49 | metric_path) 50 | 51 | metric_path_parts = metric_path.split('.') 52 | for field_index in find_escaped_pattern_fields(query.pattern): 53 | metric_path_parts[field_index] = pattern_parts[ 54 | field_index].replace('\\', '') 55 | metric_path = '.'.join(metric_path_parts) 56 | 57 | # Now we construct and yield an appropriate Node object 58 | if os.path.isdir(absolute_path): 59 | yield BranchNode(metric_path) 60 | 61 | elif os.path.isfile(absolute_path): 62 | if absolute_path.endswith('.wsp'): 63 | reader = WhisperReader(absolute_path, real_metric_path, 64 | self.carbonlink) 65 | yield LeafNode(metric_path, reader) 66 | 67 | elif absolute_path.endswith('.wsp.gz'): 68 | reader = GzippedWhisperReader(absolute_path, 69 | real_metric_path, 70 | self.carbonlink) 71 | yield LeafNode(metric_path, reader) 72 | 73 | def _find_paths(self, current_dir, patterns): 74 | """Recursively generates absolute paths whose components 75 | underneath current_dir match the corresponding pattern in 76 | patterns""" 77 | pattern = patterns[0] 78 | patterns = patterns[1:] 79 | has_wildcard = is_pattern(pattern) 80 | using_globstar = pattern == "**" 81 | 82 | # This avoids os.listdir() for performance 83 | if has_wildcard: 84 | entries = [x.name for x in scandir(current_dir)] 85 | else: 86 | entries = [pattern] 87 | 88 | if using_globstar: 89 | matching_subdirs = map(lambda x: x[0], walk(current_dir)) 90 | else: 91 | subdirs = [e for e in entries 92 | if os.path.isdir(os.path.join(current_dir, e))] 93 | matching_subdirs = match_entries(subdirs, pattern) 94 | 95 | # For terminal globstar, add a pattern for all files in subdirs 96 | if using_globstar and not patterns: 97 | patterns = ['*'] 98 | 99 | if patterns: # we've still got more directories to traverse 100 | for subdir in matching_subdirs: 101 | absolute_path = os.path.join(current_dir, subdir) 102 | for match in self._find_paths(absolute_path, patterns): 103 | yield match 104 | 105 | else: # we've got the last pattern 106 | if not has_wildcard: 107 | entries = [pattern + '.wsp', pattern + '.wsp.gz'] 108 | files = [e for e in entries 109 | if os.path.isfile(os.path.join(current_dir, e))] 110 | matching_files = match_entries(files, pattern + '.*') 111 | 112 | for _basename in matching_files + matching_subdirs: 113 | yield os.path.join(current_dir, _basename) 114 | 115 | 116 | class WhisperReader(object): 117 | 118 | __slots__ = ('fs_path', 'real_metric_path', 'carbonlink') 119 | 120 | def __init__(self, fs_path, real_metric_path, carbonlink=None): 121 | self.fs_path = fs_path 122 | self.real_metric_path = real_metric_path 123 | self.carbonlink = carbonlink 124 | 125 | def get_intervals(self): 126 | start = time.time() - whisper.info(self.fs_path)['maxRetention'] 127 | end = max(stat(self.fs_path).st_mtime, start) 128 | return IntervalSet([Interval(start, end)]) 129 | 130 | def fetch(self, startTime, endTime): # noqa 131 | logger.debug("fetch", reader="whisper", path=self.fs_path, 132 | metric_path=self.real_metric_path, 133 | start=startTime, end=endTime) 134 | data = whisper.fetch(self.fs_path, startTime, endTime) 135 | if not data: 136 | return None 137 | 138 | time_info, values = data 139 | start, end, step = time_info 140 | 141 | if self.carbonlink: 142 | cached_datapoints = self.carbonlink.query(self.real_metric_path) 143 | if isinstance(cached_datapoints, dict): 144 | cached_datapoints = cached_datapoints.items() 145 | for timestamp, value in sorted(cached_datapoints): 146 | # filter only to cached datapoints within [start, end) 147 | if not (timestamp >= start and timestamp < end): 148 | continue 149 | interval = timestamp - (timestamp % step) 150 | i = int(interval - start) // step 151 | values[i] = value 152 | 153 | return time_info, values 154 | 155 | 156 | class GzippedWhisperReader(WhisperReader): 157 | def get_intervals(self): 158 | fh = gzip.GzipFile(self.fs_path, 'rb') 159 | try: 160 | info = getattr(whisper, '__readHeader')(fh) # evil, but necessary. 161 | finally: 162 | fh.close() 163 | 164 | start = time.time() - info['maxRetention'] 165 | end = max(stat(self.fs_path).st_mtime, start) 166 | return IntervalSet([Interval(start, end)]) 167 | 168 | def fetch(self, startTime, endTime): 169 | logger.debug("fetch", reader="gzip_whisper", path=self.fs_path, 170 | metric_path=self.real_metric_path, 171 | start=startTime, end=endTime) 172 | fh = gzip.GzipFile(self.fs_path, 'rb') 173 | try: 174 | return whisper.file_fetch(fh, startTime, endTime) 175 | finally: 176 | fh.close() 177 | 178 | 179 | def find_escaped_pattern_fields(pattern_string): 180 | pattern_parts = pattern_string.split('.') 181 | for index, part in enumerate(pattern_parts): 182 | if is_escaped_pattern(part): 183 | yield index 184 | 185 | 186 | def is_escaped_pattern(s): 187 | for symbol in '*?[{': 188 | i = s.find(symbol) 189 | if i > 0: 190 | if s[i-1] == '\\': 191 | return True 192 | return False 193 | -------------------------------------------------------------------------------- /graphite_api/intervals.py: -------------------------------------------------------------------------------- 1 | INFINITY = float('inf') 2 | NEGATIVE_INFINITY = -INFINITY 3 | 4 | 5 | class IntervalSet(object): 6 | __slots__ = ('intervals', 'size') 7 | 8 | def __init__(self, intervals, disjoint=False): 9 | self.intervals = intervals 10 | 11 | if not disjoint: 12 | self.intervals = union_overlapping(self.intervals) 13 | 14 | self.size = sum(i.size for i in self.intervals) 15 | 16 | def __repr__(self): 17 | return repr(self.intervals) 18 | 19 | def __eq__(self, other): 20 | return self.intervals == other.intervals 21 | 22 | def __iter__(self): 23 | return iter(self.intervals) 24 | 25 | def __len__(self): 26 | return len(self.intervals) 27 | 28 | def __getitem__(self, i): 29 | return self.intervals[i] 30 | 31 | def __bool__(self): 32 | return self.size != 0 33 | __nonzero__ = __bool__ # python 2 34 | 35 | def __sub__(self, other): 36 | return self.intersect(other.complement()) 37 | 38 | def complement(self): 39 | complementary = [] 40 | cursor = NEGATIVE_INFINITY 41 | 42 | for interval in self.intervals: 43 | if cursor < interval.start: 44 | complementary.append(Interval(cursor, interval.start)) 45 | cursor = interval.end 46 | 47 | if cursor < INFINITY: 48 | complementary.append(Interval(cursor, INFINITY)) 49 | 50 | return IntervalSet(complementary, disjoint=True) 51 | 52 | def intersect(self, other): 53 | # XXX The last major bottleneck. Factorial-time hell. 54 | # Then again, this function is entirely unused... 55 | if not self or not other: 56 | return IntervalSet([]) 57 | 58 | intersections = [x for x in (i.intersect(j) 59 | for i in self.intervals 60 | for j in other.intervals) 61 | if x] 62 | 63 | return IntervalSet(intersections, disjoint=True) 64 | 65 | def intersect_interval(self, interval): 66 | intersections = [x for x in (i.intersect(interval) 67 | for i in self.intervals) 68 | if x] 69 | return IntervalSet(intersections, disjoint=True) 70 | 71 | def union(self, other): 72 | return IntervalSet(sorted(self.intervals + other.intervals)) 73 | 74 | 75 | class Interval(object): 76 | __slots__ = ('start', 'end', 'tuple', 'size') 77 | 78 | def __init__(self, start, end): 79 | if end - start < 0: 80 | raise ValueError("Invalid interval start=%s end=%s" % (start, end)) 81 | 82 | self.start = start 83 | self.end = end 84 | self.tuple = (start, end) 85 | self.size = self.end - self.start 86 | 87 | def __eq__(self, other): 88 | return self.tuple == other.tuple 89 | 90 | def __hash__(self): 91 | return hash(self.tuple) 92 | 93 | def __lt__(self, other): 94 | return (self.start < other.start) - (self.start > other.start) 95 | 96 | def __len__(self): 97 | raise TypeError("len() doesn't support infinite values, use the " 98 | "'size' attribute instead") 99 | 100 | def __bool__(self): 101 | return self.size != 0 102 | __nonzero__ = __bool__ # python 2 103 | 104 | def __repr__(self): 105 | return '' % str(self.tuple) 106 | 107 | def intersect(self, other): 108 | start = max(self.start, other.start) 109 | end = min(self.end, other.end) 110 | 111 | if end > start: 112 | return Interval(start, end) 113 | 114 | def overlaps(self, other): 115 | earlier = self if self.start <= other.start else other 116 | later = self if earlier is other else other 117 | return earlier.end >= later.start 118 | 119 | def union(self, other): 120 | if not self.overlaps(other): 121 | raise TypeError("Union of disjoint intervals is not an interval") 122 | 123 | start = min(self.start, other.start) 124 | end = max(self.end, other.end) 125 | return Interval(start, end) 126 | 127 | 128 | def union_overlapping(intervals): 129 | """Union any overlapping intervals in the given set.""" 130 | disjoint_intervals = [] 131 | 132 | for interval in intervals: 133 | if disjoint_intervals and disjoint_intervals[-1].overlaps(interval): 134 | disjoint_intervals[-1] = disjoint_intervals[-1].union(interval) 135 | else: 136 | disjoint_intervals.append(interval) 137 | 138 | return disjoint_intervals 139 | -------------------------------------------------------------------------------- /graphite_api/middleware.py: -------------------------------------------------------------------------------- 1 | from six.moves.urllib.parse import urlparse 2 | 3 | 4 | class CORS(object): 5 | """ 6 | Simple middleware that adds CORS headers. 7 | """ 8 | def __init__(self, app, origins=None): 9 | self.app = app 10 | self.origins = origins 11 | 12 | def __call__(self, environ, start_response): 13 | origin = environ.get('HTTP_ORIGIN') 14 | if origin is None or self.origins is None: 15 | return self.app(environ, start_response) 16 | 17 | netloc = urlparse(origin).netloc 18 | if netloc in self.origins or '*' in self.origins: 19 | allow_origin = [ 20 | ('Access-Control-Allow-Origin', origin), 21 | ('Access-Control-Allow-Credentials', 'true'), 22 | ] 23 | if environ['REQUEST_METHOD'] == 'OPTIONS': 24 | start_response('204 No Content', allow_origin) 25 | return [] 26 | 27 | def custom_start_response(status, headers, exc_info=None): 28 | headers.extend(allow_origin) 29 | return start_response(status, headers, exc_info) 30 | else: 31 | custom_start_response = start_response 32 | return self.app(environ, custom_start_response) 33 | 34 | 35 | class TrailingSlash(object): 36 | """ 37 | Middleware that strips trailing slashes from URLs. 38 | """ 39 | def __init__(self, app): 40 | self.app = app 41 | 42 | def __call__(self, environ, start_response): 43 | path_info = environ['PATH_INFO'] 44 | if len(path_info) > 1 and path_info.endswith('/'): 45 | environ['PATH_INFO'] = path_info.rstrip('/') 46 | return self.app(environ, start_response) 47 | -------------------------------------------------------------------------------- /graphite_api/node.py: -------------------------------------------------------------------------------- 1 | class Node(object): 2 | __slots__ = ('name', 'path', 'local', 'is_leaf') 3 | 4 | def __init__(self, path): 5 | self.path = path 6 | self.name = path.split('.')[-1] 7 | self.local = True 8 | self.is_leaf = False 9 | 10 | def __repr__(self): 11 | return '<%s[%x]: %s>' % (self.__class__.__name__, id(self), self.path) 12 | 13 | 14 | class BranchNode(Node): 15 | pass 16 | 17 | 18 | class LeafNode(Node): 19 | __slots__ = ('reader', 'is_leaf') 20 | 21 | def __init__(self, path, reader): 22 | super(LeafNode, self).__init__(path) 23 | self.reader = reader 24 | self.is_leaf = True 25 | 26 | def fetch(self, startTime, endTime, now=None, requestContext=None): 27 | try: 28 | result = self.reader.fetch(startTime, endTime, now, requestContext) 29 | except TypeError: 30 | result = self.reader.fetch(startTime, endTime) 31 | 32 | return result 33 | 34 | @property 35 | def intervals(self): 36 | return self.reader.get_intervals() 37 | 38 | def __repr__(self): 39 | return '' % (id(self), self.path, self.reader) 40 | -------------------------------------------------------------------------------- /graphite_api/readers.py: -------------------------------------------------------------------------------- 1 | from structlog import get_logger 2 | 3 | from .intervals import IntervalSet 4 | 5 | logger = get_logger() 6 | 7 | 8 | class MultiReader(object): 9 | __slots__ = ('nodes',) 10 | 11 | def __init__(self, nodes): 12 | self.nodes = nodes 13 | 14 | def get_intervals(self): 15 | interval_sets = [] 16 | for node in self.nodes: 17 | interval_sets.extend(node.intervals.intervals) 18 | return IntervalSet(sorted(interval_sets)) 19 | 20 | def fetch(self, startTime, endTime, now=None, requestContext=None): 21 | # Start the fetch on each node 22 | results = [] 23 | 24 | for node in self.nodes: 25 | try: 26 | results.append(node.fetch(startTime, endTime, now, 27 | requestContext)) 28 | except Exception: 29 | logger.error("fetch error", exc_info=True) 30 | 31 | data = None 32 | for r in filter(None, results): 33 | if data is None: 34 | data = r 35 | else: 36 | data = self.merge(data, r) 37 | if data is None: 38 | raise Exception("All sub-fetches failed") 39 | return data 40 | 41 | def merge(self, results1, results2): 42 | # Ensure results1 is finer than results2 43 | if results1[0][2] > results2[0][2]: 44 | results1, results2 = results2, results1 45 | 46 | time_info1, values1 = results1 47 | time_info2, values2 = results2 48 | start1, end1, step1 = time_info1 49 | start2, end2, step2 = time_info2 50 | 51 | step = step1 # finest step 52 | start = min(start1, start2) # earliest start 53 | end = max(end1, end2) # latest end 54 | time_info = start, end, step 55 | values = [] 56 | 57 | t = start 58 | while t < end: 59 | # Look for the finer precision value first if available 60 | i1 = (t - start1) // step1 61 | 62 | if len(values1) > i1: 63 | v1 = values1[i1] 64 | else: 65 | v1 = None 66 | 67 | if v1 is None: 68 | i2 = (t - start2) // step2 69 | 70 | if len(values2) > i2: 71 | v2 = values2[i2] 72 | else: 73 | v2 = None 74 | 75 | values.append(v2) 76 | else: 77 | values.append(v1) 78 | 79 | t += step 80 | 81 | return (time_info, values) 82 | -------------------------------------------------------------------------------- /graphite_api/render/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brutasse/graphite-api/0886b7adcf985a1e8bcb084f6dd1dc166a3f3dff/graphite_api/render/__init__.py -------------------------------------------------------------------------------- /graphite_api/render/attime.py: -------------------------------------------------------------------------------- 1 | """Copyright 2008 Orbitz WorldWide 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License.""" 14 | from datetime import datetime, timedelta 15 | from time import daylight 16 | 17 | import pytz 18 | 19 | months = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 20 | 'jul', 'aug', 'sep', 'oct', 'nov', 'dec'] 21 | weekdays = ['sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat'] 22 | 23 | 24 | def parseATTime(s, tzinfo=None, now=None): 25 | if tzinfo is None: 26 | from ..app import app 27 | tzinfo = pytz.timezone(app.config['TIME_ZONE']) 28 | s = s.strip().lower().replace('_', '').replace(',', '').replace(' ', '') 29 | if s.isdigit(): 30 | if ( 31 | len(s) == 8 and 32 | int(s[:4]) > 1900 and 33 | int(s[4:6]) < 13 and 34 | int(s[6:]) < 32 35 | ): 36 | pass # Fall back because its not a timestamp, its YYYYMMDD form 37 | else: 38 | return datetime.fromtimestamp(int(s), tzinfo) 39 | elif ':' in s and len(s) == 13: 40 | return tzinfo.localize(datetime.strptime(s, '%H:%M%Y%m%d'), daylight) 41 | if '+' in s: 42 | ref, offset = s.split('+', 1) 43 | offset = '+' + offset 44 | elif '-' in s: 45 | ref, offset = s.split('-', 1) 46 | offset = '-' + offset 47 | else: 48 | ref, offset = s, '' 49 | return (parseTimeReference(ref or now) + 50 | parseTimeOffset(offset)).astimezone(tzinfo) 51 | 52 | 53 | def parseTimeReference(ref): 54 | if isinstance(ref, datetime): 55 | return ref 56 | if not ref or ref == 'now': 57 | return datetime.utcnow().replace(tzinfo=pytz.utc) 58 | 59 | # Time-of-day reference 60 | i = ref.find(':') 61 | hour, min = 0, 0 62 | if i != -1: 63 | hour = int(ref[:i]) 64 | min = int(ref[i+1:i+3]) 65 | ref = ref[i+3:] 66 | if ref[:2] == 'am': 67 | ref = ref[2:] 68 | elif ref[:2] == 'pm': 69 | hour = (hour + 12) % 24 70 | ref = ref[2:] 71 | if ref.startswith('noon'): 72 | hour, min = 12, 0 73 | ref = ref[4:] 74 | elif ref.startswith('midnight'): 75 | hour, min = 0, 0 76 | ref = ref[8:] 77 | elif ref.startswith('teatime'): 78 | hour, min = 16, 0 79 | ref = ref[7:] 80 | 81 | refDate = datetime.utcnow().replace(hour=hour, minute=min, second=0, 82 | tzinfo=pytz.utc) 83 | 84 | # Day reference 85 | if ref in ('yesterday', 'today', 'tomorrow'): # yesterday, today, tomorrow 86 | if ref == 'yesterday': 87 | refDate = refDate - timedelta(days=1) 88 | if ref == 'tomorrow': 89 | refDate = refDate + timedelta(days=1) 90 | elif ref.count('/') == 2: # MM/DD/YY[YY] 91 | m, d, y = map(int, ref.split('/')) 92 | if y < 1900: 93 | y += 1900 94 | if y < 1970: 95 | y += 100 96 | refDate = replace_date(refDate, y, m, d) 97 | 98 | elif len(ref) == 8 and ref.isdigit(): # YYYYMMDD 99 | refDate = replace_date(refDate, int(ref[:4]), int(ref[4:6]), 100 | int(ref[6:8])) 101 | 102 | elif ref[:3] in months: # MonthName DayOfMonth 103 | month = months.index(ref[:3]) + 1 104 | if ref[-2:].isdigit(): 105 | day = int(ref[-2]) 106 | elif ref[-1:].isdigit(): 107 | day = int(ref[-1:]) 108 | else: 109 | raise Exception("Day of month required after month name") 110 | refDate = replace_date(refDate, None, month, day) 111 | elif ref[:3] in weekdays: # DayOfWeek (Monday, etc) 112 | todayDayName = refDate.strftime("%a").lower()[:3] 113 | today = weekdays.index(todayDayName) 114 | twoWeeks = weekdays * 2 115 | dayOffset = today - twoWeeks.index(ref[:3]) 116 | if dayOffset < 0: 117 | dayOffset += 7 118 | refDate -= timedelta(days=dayOffset) 119 | elif ref: 120 | raise Exception("Unknown day reference") 121 | return refDate 122 | 123 | 124 | def replace_date(date, year, month, day): 125 | if year is not None: 126 | try: 127 | date = date.replace(year=year) 128 | except ValueError: # Feb 29. 129 | date = date.replace(year=year, day=28) 130 | try: 131 | date = date.replace(month=month) 132 | date = date.replace(day=day) 133 | except ValueError: # day out of range for month, or vice versa 134 | date = date.replace(day=day) 135 | date = date.replace(month=month) 136 | return date 137 | 138 | 139 | def parseTimeOffset(offset): 140 | if not offset: 141 | return timedelta() 142 | 143 | t = timedelta() 144 | 145 | if offset[0].isdigit(): 146 | sign = 1 147 | else: 148 | sign = {'+': 1, '-': -1}[offset[0]] 149 | offset = offset[1:] 150 | 151 | while offset: 152 | i = 1 153 | while offset[:i].isdigit() and i <= len(offset): 154 | i += 1 155 | num = int(offset[:i-1]) 156 | offset = offset[i-1:] 157 | i = 1 158 | while offset[:i].isalpha() and i <= len(offset): 159 | i += 1 160 | unit = offset[:i-1] 161 | offset = offset[i-1:] 162 | unitString = getUnitString(unit) 163 | if unitString == 'months': 164 | unitString = 'days' 165 | num = num * 30 166 | if unitString == 'years': 167 | unitString = 'days' 168 | num = num * 365 169 | t += timedelta(**{unitString: sign * num}) 170 | 171 | return t 172 | 173 | 174 | def getUnitString(s): 175 | if s.startswith('s'): 176 | return 'seconds' 177 | if s.startswith('min'): 178 | return 'minutes' 179 | if s.startswith('h'): 180 | return 'hours' 181 | if s.startswith('d'): 182 | return 'days' 183 | if s.startswith('w'): 184 | return 'weeks' 185 | if s.startswith('mon'): 186 | return 'months' 187 | if s.startswith('y'): 188 | return 'years' 189 | raise Exception("Invalid offset unit '%s'" % s) 190 | -------------------------------------------------------------------------------- /graphite_api/render/datalib.py: -------------------------------------------------------------------------------- 1 | """Copyright 2008 Orbitz WorldWide 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License.""" 14 | from collections import defaultdict 15 | 16 | from structlog import get_logger 17 | 18 | from ..utils import epoch 19 | 20 | logger = get_logger() 21 | 22 | 23 | class TimeSeries(list): 24 | def __init__(self, name, start, end, step, values, consolidate='average'): 25 | list.__init__(self, values) 26 | self.name = name 27 | self.start = start 28 | self.end = end 29 | self.step = step 30 | self.consolidationFunc = consolidate 31 | self.valuesPerPoint = 1 32 | self.options = {} 33 | self.pathExpression = name 34 | 35 | def __eq__(self, other): 36 | if isinstance(other, TimeSeries): 37 | color_eq = True 38 | if hasattr(self, 'color'): 39 | if hasattr(other, 'color'): 40 | color_eq = (self.color == other.color) 41 | else: 42 | color_eq = False 43 | elif hasattr(other, 'color'): 44 | color_eq = False 45 | 46 | return ((self.name, self.start, self.step, self.consolidationFunc, 47 | self.valuesPerPoint, self.options) == 48 | (other.name, other.start, other.step, 49 | other.consolidationFunc, other.valuesPerPoint, 50 | other.options)) and list.__eq__(self, other) and color_eq 51 | return False 52 | 53 | def __iter__(self): 54 | if self.valuesPerPoint > 1: 55 | return self.__consolidatingGenerator(list.__iter__(self)) 56 | else: 57 | return list.__iter__(self) 58 | 59 | def consolidate(self, valuesPerPoint): 60 | self.valuesPerPoint = int(valuesPerPoint) 61 | 62 | def __consolidatingGenerator(self, gen): 63 | buf = [] 64 | for x in gen: 65 | buf.append(x) 66 | if len(buf) == self.valuesPerPoint: 67 | while None in buf: 68 | buf.remove(None) 69 | if buf: 70 | yield self.__consolidate(buf) 71 | buf = [] 72 | else: 73 | yield None 74 | while None in buf: 75 | buf.remove(None) 76 | if buf: 77 | yield self.__consolidate(buf) 78 | else: 79 | yield None 80 | return 81 | 82 | def __consolidate(self, values): 83 | usable = [v for v in values if v is not None] 84 | if not usable: 85 | return None 86 | if self.consolidationFunc == 'sum': 87 | return sum(usable) 88 | if self.consolidationFunc == 'average': 89 | return float(sum(usable)) / len(usable) 90 | if self.consolidationFunc == 'max': 91 | return max(usable) 92 | if self.consolidationFunc == 'min': 93 | return min(usable) 94 | raise Exception( 95 | "Invalid consolidation function: '%s'" % self.consolidationFunc) 96 | 97 | def __repr__(self): 98 | return 'TimeSeries(name=%s, start=%s, end=%s, step=%s)' % ( 99 | self.name, self.start, self.end, self.step) 100 | 101 | 102 | class DataStore(object): 103 | """ 104 | Simple object to store results of multi fetches. 105 | Also aids in looking up data by pathExpressions. 106 | """ 107 | def __init__(self): 108 | self.paths = defaultdict(set) 109 | self.data = defaultdict(list) 110 | 111 | def get_paths(self, path_expr): 112 | """ 113 | Returns all paths found for path_expr 114 | """ 115 | return sorted(self.paths[path_expr]) 116 | 117 | def add_data(self, path, time_info, data, exprs): 118 | """ 119 | Stores data before it can be put into a time series 120 | """ 121 | # Dont add if empty 122 | if not nonempty(data): 123 | for d in self.data[path]: 124 | if nonempty(d['values']): 125 | return 126 | 127 | # Add data to path 128 | for expr in exprs: 129 | self.paths[expr].add(path) 130 | self.data[path].append({ 131 | 'time_info': time_info, 132 | 'values': data 133 | }) 134 | 135 | def get_series_list(self, path_expr): 136 | series_list = [] 137 | for path in self.get_paths(path_expr): 138 | for data in self.data.get(path): 139 | start, end, step = data['time_info'] 140 | series = TimeSeries(path, start, end, step, data['values']) 141 | series.pathExpression = path_expr 142 | series_list.append(series) 143 | return series_list 144 | 145 | 146 | def fetchData(requestContext, pathExprs): 147 | from ..app import app 148 | startTime = int(epoch(requestContext['startTime'])) 149 | endTime = int(epoch(requestContext['endTime'])) 150 | if 'now' in requestContext: 151 | now = int(epoch(requestContext['now'])) 152 | else: 153 | now = None 154 | 155 | # Convert to list if given single path 156 | if not isinstance(pathExprs, list): 157 | pathExprs = [pathExprs] 158 | 159 | data_store = DataStore() 160 | multi_nodes = defaultdict(list) 161 | single_nodes = [] 162 | 163 | path_to_exprs = defaultdict(list) 164 | 165 | # Group nodes that support multiple fetches 166 | for pathExpr in pathExprs: 167 | for node in app.store.find(pathExpr, startTime, endTime): 168 | if not node.is_leaf: 169 | continue 170 | if node.path not in path_to_exprs: 171 | if hasattr(node, '__fetch_multi__'): 172 | multi_nodes[node.__fetch_multi__].append(node) 173 | else: 174 | single_nodes.append(node) 175 | path_to_exprs[node.path].append(pathExpr) 176 | 177 | # Multi fetches 178 | for finder in app.store.finders: 179 | if not hasattr(finder, '__fetch_multi__'): 180 | continue 181 | nodes = multi_nodes[finder.__fetch_multi__] 182 | if not nodes: 183 | continue 184 | try: 185 | time_info, series = finder.fetch_multi(nodes, startTime, endTime, 186 | now, requestContext) 187 | except TypeError: 188 | time_info, series = finder.fetch_multi(nodes, startTime, endTime) 189 | for path, values in series.items(): 190 | data_store.add_data(path, time_info, values, 191 | path_to_exprs[path]) 192 | 193 | # Single fetches 194 | fetches = [ 195 | (node.path, node.fetch(startTime, endTime, now, requestContext)) 196 | for node in single_nodes 197 | ] 198 | for path, results in fetches: 199 | if not results: 200 | logger.info("no results", path=path, start=startTime, 201 | end=endTime) 202 | continue 203 | 204 | try: 205 | time_info, values = results 206 | except ValueError as e: 207 | raise Exception("could not parse timeInfo/values from metric " 208 | "'%s': %s" % (path, e)) 209 | data_store.add_data(path, time_info, values, path_to_exprs[path]) 210 | 211 | return data_store 212 | 213 | 214 | def nonempty(series): 215 | for value in series: 216 | if value is not None: 217 | return True 218 | return False 219 | -------------------------------------------------------------------------------- /graphite_api/render/grammar.py: -------------------------------------------------------------------------------- 1 | from distutils.version import StrictVersion 2 | 3 | from pyparsing import ( 4 | __version__, alphanums, alphas, CaselessKeyword, CaselessLiteral, Combine, 5 | delimitedList, FollowedBy, Forward, Group, LineEnd, Literal, OneOrMore, 6 | Optional, printables, quotedString, Regex, Word, ZeroOrMore, 7 | ) 8 | 9 | grammar = Forward() 10 | 11 | expression = Forward() 12 | 13 | # Literals 14 | intNumber = Regex(r'-?\d+')('integer') 15 | 16 | floatNumber = Regex(r'-?\d+\.\d+')('float') 17 | 18 | sciNumber = Combine( 19 | (floatNumber | intNumber) + CaselessLiteral('e') + intNumber 20 | )('scientific') 21 | 22 | aString = quotedString('string') 23 | 24 | # Use lookahead to match only numbers in a list (can't remember why this 25 | # is necessary) 26 | afterNumber = FollowedBy(",") ^ FollowedBy(")") ^ FollowedBy(LineEnd()) 27 | number = Group( 28 | (sciNumber + afterNumber) | 29 | (floatNumber + afterNumber) | 30 | (intNumber + afterNumber) 31 | )('number') 32 | 33 | boolean = Group( 34 | CaselessKeyword("true") | 35 | CaselessKeyword("false") 36 | )('boolean') 37 | 38 | argname = Word(alphas + '_', alphanums + '_')('argname') 39 | funcname = Word(alphas + '_', alphanums + '_')('funcname') 40 | 41 | # Symbols 42 | leftParen = Literal('(').suppress() 43 | rightParen = Literal(')').suppress() 44 | comma = Literal(',').suppress() 45 | equal = Literal('=').suppress() 46 | 47 | # Function calls 48 | 49 | # Symbols 50 | leftBrace = Literal('{') 51 | rightBrace = Literal('}') 52 | leftParen = Literal('(').suppress() 53 | rightParen = Literal(')').suppress() 54 | comma = Literal(',').suppress() 55 | equal = Literal('=').suppress() 56 | backslash = Literal('\\').suppress() 57 | 58 | symbols = '''(){},=.'"\\''' 59 | arg = Group( 60 | boolean | 61 | number | 62 | aString | 63 | expression 64 | )('args*') 65 | kwarg = Group(argname + equal + arg)('kwargs*') 66 | 67 | # lookahead to prevent failing on equals 68 | args = delimitedList(~kwarg + arg) 69 | kwargs = delimitedList(kwarg) 70 | 71 | call = Group( 72 | funcname + leftParen + 73 | Optional( 74 | args + Optional( 75 | comma + kwargs 76 | ) 77 | ) + rightParen 78 | )('call') 79 | 80 | # Metric pattern (aka. pathExpression) 81 | validMetricChars = ''.join((set(printables) - set(symbols))) 82 | escapedChar = backslash + Word(symbols, exact=1) 83 | partialPathElem = Combine( 84 | OneOrMore( 85 | escapedChar | Word(validMetricChars) 86 | ) 87 | ) 88 | 89 | matchEnum = Combine( 90 | leftBrace + 91 | delimitedList(partialPathElem, combine=True) + 92 | rightBrace 93 | ) 94 | 95 | pathElement = Combine( 96 | Group(partialPathElem | matchEnum) + 97 | ZeroOrMore(matchEnum | partialPathElem) 98 | ) 99 | pathExpression = delimitedList(pathElement, 100 | delim='.', combine=True)('pathExpression') 101 | 102 | litarg = Group( 103 | number | aString 104 | )('args*') 105 | litkwarg = Group(argname + equal + litarg)('kwargs*') 106 | 107 | # lookahead to prevent failing on equals 108 | litargs = delimitedList(~litkwarg + litarg) 109 | litkwargs = delimitedList(litkwarg) 110 | 111 | template = Group( 112 | Literal('template') + leftParen + 113 | (call | pathExpression) + 114 | Optional(comma + (litargs | litkwargs)) + 115 | rightParen 116 | )('template') 117 | 118 | if StrictVersion(__version__) >= StrictVersion('2.0.0'): 119 | expression <<= Group(template | call | pathExpression)('expression') 120 | grammar <<= expression 121 | else: 122 | expression << (Group(template | call | pathExpression)('expression')) 123 | grammar << expression 124 | -------------------------------------------------------------------------------- /graphite_api/storage.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | from collections import defaultdict 4 | 5 | from .intervals import Interval 6 | from .node import LeafNode 7 | from .readers import MultiReader 8 | from .utils import is_pattern 9 | 10 | 11 | class Store(object): 12 | def __init__(self, finders=None): 13 | self.finders = finders 14 | 15 | def find(self, pattern, startTime=None, endTime=None, local=True): 16 | query = FindQuery(pattern, startTime, endTime) 17 | 18 | matching_nodes = set() 19 | 20 | # Search locally 21 | for finder in self.finders: 22 | for node in finder.find_nodes(query): 23 | matching_nodes.add(node) 24 | 25 | # Group matching nodes by their path 26 | nodes_by_path = defaultdict(list) 27 | for node in matching_nodes: 28 | nodes_by_path[node.path].append(node) 29 | 30 | # Reduce matching nodes for each path to a minimal set 31 | found_branch_nodes = set() 32 | 33 | for path, nodes in sorted(nodes_by_path.items(), key=lambda k: k[0]): 34 | leaf_nodes = set() 35 | 36 | # First we dispense with the BranchNodes 37 | for node in nodes: 38 | if node.is_leaf: 39 | leaf_nodes.add(node) 40 | elif node.path not in found_branch_nodes: 41 | # TODO need to filter branch nodes based on requested 42 | # interval... how?!?!? 43 | yield node 44 | found_branch_nodes.add(node.path) 45 | 46 | if not leaf_nodes: 47 | continue 48 | 49 | if len(leaf_nodes) == 1: 50 | yield leaf_nodes.pop() 51 | elif len(leaf_nodes) > 1: 52 | reader = MultiReader(leaf_nodes) 53 | yield LeafNode(path, reader) 54 | 55 | 56 | class FindQuery(object): 57 | def __init__(self, pattern, startTime, endTime): 58 | self.pattern = pattern 59 | self.startTime = startTime 60 | self.endTime = endTime 61 | self.isExact = is_pattern(pattern) 62 | self.interval = Interval( 63 | float('-inf') if startTime is None else startTime, 64 | float('inf') if endTime is None else endTime) 65 | 66 | def __repr__(self): 67 | if self.startTime is None: 68 | startString = '*' 69 | else: 70 | startString = time.ctime(self.startTime) 71 | 72 | if self.endTime is None: 73 | endString = '*' 74 | else: 75 | endString = time.ctime(self.endTime) 76 | 77 | return '' % (self.pattern, startString, 78 | endString) 79 | -------------------------------------------------------------------------------- /graphite_api/utils.py: -------------------------------------------------------------------------------- 1 | """Copyright 2008 Orbitz WorldWide 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License.""" 14 | import calendar 15 | import hashlib 16 | 17 | import pytz 18 | 19 | from flask import request 20 | 21 | 22 | def is_pattern(s): 23 | return '*' in s or '?' in s or '[' in s or '{' in s 24 | 25 | 26 | class RequestParams(object): 27 | """Dict-like structure that allows accessing request params 28 | whatever their origin (json body, form body, request args).""" 29 | 30 | def __getitem__(self, key): 31 | json = request_json() 32 | if json and key in json: 33 | return json[key] 34 | if key in request.form: 35 | return request.form.getlist(key)[-1] 36 | if key in request.args: 37 | return request.args.getlist(key)[-1] 38 | raise KeyError 39 | 40 | def __contains__(self, key): 41 | try: 42 | self[key] 43 | return True 44 | except KeyError: 45 | return False 46 | 47 | def get(self, key, default=None): 48 | try: 49 | return self[key] 50 | except KeyError: 51 | return default 52 | 53 | def getlist(self, key): 54 | json = request_json() 55 | if json and key in json: 56 | value = self[key] 57 | if not isinstance(value, list): 58 | value = [value] 59 | return value 60 | if key in request.form: 61 | return request.form.getlist(key) 62 | return request.args.getlist(key) 63 | 64 | def keys(self): 65 | keys = set() 66 | json = request_json() 67 | if json: 68 | keys.update(json.keys()) 69 | if request.form: 70 | keys.update(request.form.keys()) 71 | keys.update(request.args.keys()) 72 | return keys 73 | 74 | 75 | RequestParams = RequestParams() 76 | 77 | 78 | def request_json(): 79 | if hasattr(request, 'get_json'): 80 | return request.get_json() 81 | else: 82 | return request.json 83 | 84 | 85 | def hash_request(): 86 | keys = set() 87 | json = request_json() 88 | if json: 89 | keys.update(json.keys()) 90 | if request.form: 91 | keys.update(request.form.keys()) 92 | keys.update(request.args.keys()) 93 | params = u",".join([ 94 | u"{0}={1}".format(key, u"&".join(sorted(RequestParams.getlist(key)))) 95 | for key in sorted(keys) if not key.startswith('_') 96 | ]) 97 | md5 = hashlib.md5() 98 | md5.update(params.encode('utf-8')) 99 | return md5.hexdigest() 100 | 101 | 102 | def to_seconds(delta): 103 | """ 104 | Convert a timedelta object into seconds 105 | (same as delta.total_seconds() in Python 2.7+) 106 | """ 107 | return abs(delta.seconds + delta.days * 86400) 108 | 109 | 110 | def epoch(dt): 111 | """ 112 | Returns the epoch timestamp of a timezone-aware datetime object. 113 | """ 114 | return calendar.timegm(dt.astimezone(pytz.utc).timetuple()) 115 | -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | -r requirements.txt 2 | mock 3 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | Flask 2 | Flask-Cache 3 | cairocffi 4 | pyparsing>=1.5.7 5 | pytz 6 | pyyaml 7 | raven[flask] 8 | six 9 | structlog 10 | tzlocal 11 | scandir 12 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [wheel] 2 | universal = 1 3 | 4 | [flake8] 5 | exclude = _vendor,.tox,wsp 6 | import-order-style=smarkets 7 | application-import-names= 8 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | import sys 3 | 4 | from setuptools import setup, find_packages 5 | 6 | install_requires = [ 7 | 'Flask', 8 | 'PyYAML', 9 | 'cairocffi', 10 | 'pyparsing>=1.5.7', 11 | 'pytz', 12 | 'six', 13 | 'structlog', 14 | 'tzlocal', 15 | ] 16 | 17 | if sys.version_info < (3, 5): 18 | install_requires.append('scandir') 19 | 20 | with open('README.rst') as f: 21 | long_description = f.read() 22 | 23 | setup( 24 | name='graphite-api', 25 | version='1.1.3', 26 | url='https://github.com/brutasse/graphite-api', 27 | author="Bruno Renié, based on Chris Davis's graphite-web", 28 | author_email='bruno@renie.fr', 29 | license='Apache Software License 2.0', 30 | description=('Graphite-web, without the interface. ' 31 | 'Just the rendering HTTP API.'), 32 | long_description=long_description, 33 | packages=find_packages(exclude=['tests']), 34 | include_package_data=True, 35 | install_requires=install_requires, 36 | extras_require={ 37 | 'sentry': ['raven[flask]'], 38 | 'cyanite': ['cyanite'], 39 | 'cache': ['Flask-Cache'], 40 | 'statsd': ['statsd'], 41 | }, 42 | zip_safe=False, 43 | platforms='any', 44 | classifiers=( 45 | 'Development Status :: 4 - Beta', 46 | 'Environment :: Web Environment', 47 | 'Framework :: Flask', 48 | 'Intended Audience :: Developers', 49 | 'Intended Audience :: System Administrators', 50 | 'License :: OSI Approved :: Apache Software License', 51 | 'Operating System :: OS Independent', 52 | 'Programming Language :: Python', 53 | 'Programming Language :: Python :: 2', 54 | 'Programming Language :: Python :: 2.6', 55 | 'Programming Language :: Python :: 2.7', 56 | 'Programming Language :: Python :: 3', 57 | 'Programming Language :: Python :: 3.2', 58 | 'Programming Language :: Python :: 3.3', 59 | 'Programming Language :: Python :: 3.4', 60 | 'Topic :: Scientific/Engineering :: Information Analysis', 61 | 'Topic :: Scientific/Engineering :: Visualization', 62 | 'Topic :: System :: Monitoring', 63 | ), 64 | test_suite='tests', 65 | ) 66 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import shutil 4 | from logging.config import dictConfig 5 | 6 | os.environ.setdefault( 7 | 'GRAPHITE_API_CONFIG', 8 | os.path.join(os.path.dirname(__file__), 'conf.yaml')) # noqa 9 | 10 | try: 11 | import unittest2 as unittest 12 | except ImportError: 13 | import unittest 14 | 15 | from graphite_api._vendor import whisper 16 | from graphite_api.app import app 17 | from graphite_api.finders.whisper import WhisperFinder 18 | from graphite_api.storage import Store 19 | 20 | 21 | DATA_DIR = '/tmp/graphite-api-data.{0}'.format(os.getpid()) 22 | WHISPER_DIR = os.path.join(DATA_DIR, 'whisper') 23 | SEARCH_INDEX = os.path.join(DATA_DIR, 'index') 24 | 25 | 26 | dictConfig({ 27 | 'version': 1, 28 | 'handlers': { 29 | 'raw': { 30 | 'level': 'DEBUG', 31 | 'class': 'logging.NullHandler', 32 | }, 33 | }, 34 | }) 35 | 36 | 37 | class TestCase(unittest.TestCase): 38 | def _cleanup(self): 39 | shutil.rmtree(DATA_DIR, ignore_errors=True) 40 | 41 | def setUp(self): 42 | self._cleanup() 43 | os.makedirs(WHISPER_DIR) 44 | app.config['TESTING'] = True 45 | whisper_conf = {'whisper': {'directories': [WHISPER_DIR]}} 46 | app.config['GRAPHITE']['store'] = Store([WhisperFinder(whisper_conf)]) 47 | self.app = app.test_client() 48 | 49 | def tearDown(self): 50 | self._cleanup() 51 | 52 | def assertJSON(self, response, data, status_code=200): 53 | self.assertEqual(response.status_code, status_code) 54 | self.assertEqual(json.loads(response.data.decode('utf-8')), data) 55 | 56 | def write_series(self, series, retentions=((1, 180),)): 57 | file_name = os.path.join( 58 | WHISPER_DIR, 59 | '{0}.wsp'.format(series.pathExpression.replace('.', os.sep))) 60 | dir_name = os.path.dirname(file_name) 61 | if not os.path.isdir(dir_name): 62 | os.makedirs(dir_name) 63 | whisper.create(file_name, retentions) 64 | data = [] 65 | for index, value in enumerate(series): 66 | if value is None: 67 | continue 68 | data.append((series.start + index * series.step, value)) 69 | whisper.update_many(file_name, data) 70 | -------------------------------------------------------------------------------- /tests/conf.yaml: -------------------------------------------------------------------------------- 1 | search_index: /tmp/graphite-api-index 2 | time_zone: UTC 3 | allowed_origins: 4 | - example.com 5 | - foo.example.com:8888 6 | cache: 7 | type: simple 8 | default_timeout: 0 9 | logging: 10 | version: 1 11 | disable_existing_loggers: true 12 | handlers: 13 | stdout: 14 | class: logging.StreamHandler 15 | loggers: 16 | graphite_api: 17 | handlers: 18 | - stdout 19 | propagate: true 20 | level: INFO 21 | -------------------------------------------------------------------------------- /tests/data/index: -------------------------------------------------------------------------------- 1 | collectd.test.df-root.df_complex-free 2 | collectd.test.df-root.df_complex-used.value 3 | collectd.test.df-root.df_complex-reserved 4 | collectd.test.df-root.df_complex-reserved.value 5 | collectd.test.df-root.df_complex-free.value 6 | collectd.test.df-root.df_complex-used 7 | collectd.test.load.load.midterm 8 | collectd.test.load.load.shortterm 9 | collectd.test.load.load.longterm 10 | -------------------------------------------------------------------------------- /tests/test_attime.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import time 3 | import pytz 4 | 5 | from graphite_api.render.attime import parseATTime 6 | 7 | from . import TestCase 8 | 9 | 10 | class AtTestCase(TestCase): 11 | default_tz = pytz.utc 12 | specified_tz = pytz.timezone('America/Los_Angeles') 13 | 14 | def test_absolute_time(self): 15 | time_string = '12:0020150308' 16 | expected_time = self.default_tz.localize( 17 | datetime.datetime.strptime(time_string, '%H:%M%Y%m%d')) 18 | actual_time = parseATTime(time_string) 19 | self.assertEqual(actual_time, expected_time) 20 | 21 | expected_time = self.specified_tz.localize( 22 | datetime.datetime.strptime(time_string, '%H:%M%Y%m%d')) 23 | actual_time = parseATTime(time_string, self.specified_tz) 24 | self.assertEqual(actual_time, expected_time) 25 | 26 | def test_parse(self): 27 | for value in [ 28 | str(int(time.time())), 29 | '20140319', 30 | '20130319+1y', 31 | '20130319+1mon', 32 | '20130319+1w', 33 | '12:12_20130319', 34 | '3:05am_20130319', 35 | '3:05pm_20130319', 36 | 'noon20130319', 37 | 'midnight20130319', 38 | 'teatime20130319', 39 | 'yesterday', 40 | 'tomorrow', 41 | '03/19/2014', 42 | '03/19/1800', 43 | '03/19/1950', 44 | 'feb 27', 45 | 'mar 5', 46 | 'mon', 47 | 'tue', 48 | 'wed', 49 | 'thu', 50 | 'fri', 51 | 'sat', 52 | 'sun', 53 | '10:00', 54 | '20160229-1year', 55 | '20160229-4years', 56 | '20160229-1month', 57 | '20130228-1year', 58 | '20130228-4years', 59 | '20130228-1month', 60 | ]: 61 | self.assertIsInstance(parseATTime(value), datetime.datetime) 62 | 63 | for value in [ 64 | '20130319+1foo', 65 | 'mar', 66 | 'wat', 67 | ]: 68 | with self.assertRaises(Exception): 69 | parseATTime(value) 70 | -------------------------------------------------------------------------------- /tests/test_carbonlink.py: -------------------------------------------------------------------------------- 1 | import struct 2 | 3 | try: 4 | from unittest.mock import patch 5 | except ImportError: 6 | from mock import patch 7 | 8 | from graphite_api import carbonlink 9 | from graphite_api.carbonlink import CarbonLinkPool, ConsistentHashRing 10 | from six.moves import cPickle as pickle 11 | 12 | from . import TestCase 13 | 14 | 15 | class CarbonLinkTestCase(TestCase): 16 | def test_allowed_modules(self): 17 | with self.assertRaises(pickle.UnpicklingError) as context: 18 | carbonlink.allowed_module('foo', 'bar') 19 | self.assertIn('Attempting to unpickle unsafe module foo', 20 | str(context.exception)) 21 | 22 | with self.assertRaises(pickle.UnpicklingError) as context: 23 | carbonlink.allowed_module('__builtin__', 'bar') 24 | self.assertIn('Attempting to unpickle unsafe class bar', 25 | str(context.exception)) 26 | 27 | self.assertIsNotNone(carbonlink.allowed_module('collections', 'deque')) 28 | self.assertIsNotNone(carbonlink.allowed_module('__builtin__', 'list')) 29 | 30 | 31 | class ConsistentHashRingTest(TestCase): 32 | def test_chr_compute_ring_position(self): 33 | hosts = [ 34 | ("127.0.0.1", "cache0"), 35 | ("127.0.0.1", "cache1"), 36 | ("127.0.0.1", "cache2"), 37 | ] 38 | hashring = ConsistentHashRing(hosts) 39 | self.assertEqual(hashring.compute_ring_position('hosts.worker1.cpu'), 40 | 64833) 41 | self.assertEqual(hashring.compute_ring_position('hosts.worker2.cpu'), 42 | 38509) 43 | 44 | def test_chr_add_node(self): 45 | hosts = [ 46 | ("127.0.0.1", "cache0"), 47 | ("127.0.0.1", "cache1"), 48 | ("127.0.0.1", "cache2"), 49 | ] 50 | hashring = ConsistentHashRing(hosts) 51 | self.assertEqual(hashring.nodes, set(hosts)) 52 | hashring.add_node(("127.0.0.1", "cache3")) 53 | hosts.insert(0, ("127.0.0.1", "cache3")) 54 | self.assertEqual(hashring.nodes, set(hosts)) 55 | self.assertEqual(hashring.nodes_len, 4) 56 | 57 | def test_chr_add_node_duplicate(self): 58 | hosts = [ 59 | ("127.0.0.1", "cache0"), 60 | ("127.0.0.1", "cache1"), 61 | ("127.0.0.1", "cache2"), 62 | ] 63 | hashring = ConsistentHashRing(hosts) 64 | self.assertEqual(hashring.nodes, set(hosts)) 65 | hashring.add_node(("127.0.0.1", "cache2")) 66 | self.assertEqual(hashring.nodes, set(hosts)) 67 | self.assertEqual(hashring.nodes_len, 3) 68 | 69 | def test_chr_remove_node(self): 70 | hosts = [ 71 | ("127.0.0.1", "cache0"), 72 | ("127.0.0.1", "cache1"), 73 | ("127.0.0.1", "cache2"), 74 | ] 75 | hashring = ConsistentHashRing(hosts) 76 | self.assertEqual(hashring.nodes, set(hosts)) 77 | hashring.remove_node(("127.0.0.1", "cache2")) 78 | hosts.pop() 79 | self.assertEqual(hashring.nodes, set(hosts)) 80 | self.assertEqual(hashring.nodes_len, 2) 81 | 82 | def test_chr_remove_node_missing(self): 83 | hosts = [ 84 | ("127.0.0.1", "cache0"), 85 | ("127.0.0.1", "cache1"), 86 | ("127.0.0.1", "cache2"), 87 | ] 88 | hashring = ConsistentHashRing(hosts) 89 | self.assertEqual(hashring.nodes, set(hosts)) 90 | hashring.remove_node(("127.0.0.1", "cache4")) 91 | self.assertEqual(hashring.nodes, set(hosts)) 92 | self.assertEqual(hashring.nodes_len, 3) 93 | 94 | def test_chr_get_node(self): 95 | hosts = [ 96 | ("127.0.0.1", "cache0"), 97 | ("127.0.0.1", "cache1"), 98 | ("127.0.0.1", "cache2"), 99 | ] 100 | hashring = ConsistentHashRing(hosts) 101 | node = hashring.get_node('hosts.worker1.cpu') 102 | self.assertEqual(node, ('127.0.0.1', 'cache2')) 103 | 104 | def test_chr_get_nodes(self): 105 | hosts = [ 106 | ("127.0.0.1", "cache0"), 107 | ("127.0.0.1", "cache1"), 108 | ("127.0.0.1", "cache2"), 109 | ] 110 | hashring = ConsistentHashRing(hosts) 111 | node = hashring.get_nodes('hosts.worker1.cpu') 112 | expected = [ 113 | ("127.0.0.1", "cache2"), 114 | ("127.0.0.1", "cache0"), 115 | ("127.0.0.1", "cache1"), 116 | ] 117 | self.assertEqual(node, expected) 118 | 119 | 120 | class ConsistentHashRingTestFNV1A(TestCase): 121 | def test_chr_compute_ring_position_fnv1a(self): 122 | hosts = [ 123 | ("127.0.0.1", "ba603c36342304ed77953f84ac4d357b"), 124 | ("127.0.0.2", "5dd63865534f84899c6e5594dba6749a"), 125 | ("127.0.0.3", "866a18b81f2dc4649517a1df13e26f28"), 126 | ] 127 | hashring = ConsistentHashRing(hosts, hash_type='fnv1a_ch') 128 | self.assertEqual(hashring.compute_ring_position('hosts.worker1.cpu'), 129 | 59573) 130 | self.assertEqual(hashring.compute_ring_position('hosts.worker2.cpu'), 131 | 35749) 132 | 133 | def test_chr_get_node_fnv1a(self): 134 | hosts = [ 135 | ("127.0.0.1", "ba603c36342304ed77953f84ac4d357b"), 136 | ("127.0.0.2", "5dd63865534f84899c6e5594dba6749a"), 137 | ("127.0.0.3", "866a18b81f2dc4649517a1df13e26f28"), 138 | ] 139 | hashring = ConsistentHashRing(hosts, hash_type='fnv1a_ch') 140 | self.assertEqual(hashring.get_node('hosts.worker1.cpu'), 141 | ('127.0.0.1', 'ba603c36342304ed77953f84ac4d357b')) 142 | self.assertEqual(hashring.get_node('hosts.worker2.cpu'), 143 | ('127.0.0.3', '866a18b81f2dc4649517a1df13e26f28')) 144 | 145 | 146 | class CarbonLinkPoolTest(TestCase): 147 | def test_clp_replication_factor(self): 148 | with self.assertRaises(Exception) as context: 149 | CarbonLinkPool(['127.0.0.1:2003'], replication_factor=2) 150 | self.assertIn('replication_factor=2 cannot exceed servers=1', 151 | str(context.exception)) 152 | 153 | def test_clp_requests(self): 154 | hosts = [ 155 | '10.0.0.1:2003:cache0', 156 | '10.0.0.2:2003:cache1', 157 | '10.0.0.3:2003:cache2', 158 | ] 159 | carbonlink = CarbonLinkPool(hosts, replication_factor=3) 160 | 161 | with patch('socket.socket'): 162 | for host in hosts: 163 | server, port, instance = host.split(':') 164 | conn = carbonlink.get_connection((server, instance)) 165 | conn.connect.assert_called_with((server, int(port))) 166 | carbonlink.connections[(server, instance)].add(conn) 167 | 168 | def mock_recv_query(size): 169 | data = pickle.dumps(dict(datapoints=[1, 2, 3])) 170 | if size == 4: 171 | return struct.pack('!I', len(data)) 172 | elif size == len(data): 173 | return data 174 | else: 175 | raise ValueError('unexpected size %s' % size) 176 | 177 | conn.recv.side_effect = mock_recv_query 178 | datapoints = carbonlink.query('hosts.worker1.cpu') 179 | self.assertEqual(datapoints, [1, 2, 3]) 180 | 181 | datapoints = carbonlink.query('carbon.send_to_all.request') 182 | self.assertEqual(datapoints, [1, 2, 3] * 3) 183 | 184 | def mock_recv_get_metadata(size): 185 | data = pickle.dumps(dict(value='foo')) 186 | if size == 4: 187 | return struct.pack('!I', len(data)) 188 | elif size == len(data): 189 | return data 190 | else: 191 | raise ValueError('unexpected size %s' % size) 192 | 193 | conn.recv.side_effect = mock_recv_get_metadata 194 | metadata = carbonlink.get_metadata('hosts.worker1.cpu', 'key') 195 | self.assertEqual(metadata, 'foo') 196 | 197 | def mock_recv_set_metadata(size): 198 | data = pickle.dumps(dict(old_value='foo', new_value='bar')) 199 | if size == 4: 200 | return struct.pack('!I', len(data)) 201 | elif size == len(data): 202 | return data 203 | else: 204 | raise ValueError('unexpected size %s' % size) 205 | 206 | conn.recv.side_effect = mock_recv_set_metadata 207 | results = carbonlink.set_metadata('hosts.worker1.cpu', 'foo', 'bar') 208 | self.assertEqual(results, {'old_value': 'foo', 'new_value': 'bar'}) 209 | -------------------------------------------------------------------------------- /tests/test_encoders.py: -------------------------------------------------------------------------------- 1 | from graphite_api.encoders import JSONEncoder 2 | 3 | from . import TestCase 4 | 5 | 6 | class EncoderTestCase(TestCase): 7 | def test_json_encoder(self): 8 | encoder = JSONEncoder() 9 | 10 | with self.assertRaises(TypeError): 11 | encoder.default(object()) 12 | 13 | self.assertEqual(encoder.default(dict({1: 2})), {1: 2}) 14 | self.assertEqual(encoder.default(set([4, 5, 6])), [4, 5, 6]) 15 | self.assertEqual(encoder.default(DummyObject()), [7, 8, 9]) 16 | 17 | 18 | class DummyObject(object): 19 | def tolist(self): 20 | return list([7, 8, 9]) 21 | -------------------------------------------------------------------------------- /tests/test_finders.py: -------------------------------------------------------------------------------- 1 | import gzip 2 | import os 3 | import random 4 | import shutil 5 | import time 6 | 7 | try: 8 | from unittest.mock import patch 9 | except ImportError: 10 | from mock import patch 11 | 12 | from graphite_api._vendor import whisper 13 | from graphite_api.app import app 14 | from graphite_api.finders.whisper import scandir 15 | from graphite_api.intervals import Interval, IntervalSet 16 | from graphite_api.node import BranchNode, LeafNode 17 | from graphite_api.storage import Store 18 | 19 | from . import TestCase, WHISPER_DIR 20 | 21 | 22 | class FinderTest(TestCase): 23 | 24 | def test_custom_finder(self): 25 | store = Store([DummyFinder()]) 26 | nodes = list(store.find("foo")) 27 | self.assertEqual(len(nodes), 1) 28 | self.assertEqual(nodes[0].path, 'foo') 29 | 30 | nodes = list(store.find('bar.*')) 31 | self.assertEqual(len(nodes), 10) 32 | node = nodes[0] 33 | self.assertEqual(node.path.split('.')[0], 'bar') 34 | 35 | time_info, series = node.fetch(100, 200) 36 | self.assertEqual(time_info, (100, 200, 10)) 37 | self.assertEqual(len(series), 10) 38 | 39 | def test_multi_finder(self): 40 | store = Store([DummyFinder(), DummyFinder()]) 41 | nodes = list(store.find("foo")) 42 | self.assertEqual(len(nodes), 1) 43 | self.assertEqual(nodes[0].path, 'foo') 44 | 45 | nodes = list(store.find('bar.*')) 46 | self.assertEqual(len(nodes), 10) 47 | node = nodes[0] 48 | self.assertEqual(node.path.split('.')[0], 'bar') 49 | 50 | time_info, series = node.fetch(100, 200) 51 | self.assertEqual(time_info, (100, 200, 10)) 52 | self.assertEqual(len(series), 10) 53 | 54 | 55 | class DummyReader(object): 56 | __slots__ = ('path',) 57 | 58 | def __init__(self, path): 59 | self.path = path 60 | 61 | def fetch(self, start_time, end_time): 62 | npoints = (end_time - start_time) // 10 63 | return (start_time, end_time, 10), [ 64 | random.choice([None, 1, 2, 3]) for i in range(npoints) 65 | ] 66 | 67 | def get_intervals(self): 68 | return IntervalSet([Interval(time.time() - 3600, time.time())]) 69 | 70 | 71 | class DummyFinder(object): 72 | def find_nodes(self, query): 73 | if query.pattern == 'foo': 74 | yield BranchNode('foo') 75 | 76 | elif query.pattern == 'bar.*': 77 | for i in range(10): 78 | path = 'bar.{0}'.format(i) 79 | yield LeafNode(path, DummyReader(path)) 80 | 81 | 82 | class WhisperFinderTest(TestCase): 83 | 84 | def scandir_mock(d): 85 | return scandir(d) 86 | 87 | @patch('graphite_api.finders.whisper.scandir', wraps=scandir_mock) 88 | def test_whisper_finder(self, scandir_mocked): 89 | for db in ( 90 | ('whisper_finder', 'foo.wsp'), 91 | ('whisper_finder', 'foo', 'bar', 'baz.wsp'), 92 | ('whisper_finder', 'bar', 'baz', 'baz.wsp'), 93 | ): 94 | db_path = os.path.join(WHISPER_DIR, *db) 95 | if not os.path.exists(os.path.dirname(db_path)): 96 | os.makedirs(os.path.dirname(db_path)) 97 | whisper.create(db_path, [(1, 60)]) 98 | 99 | try: 100 | store = app.config['GRAPHITE']['store'] 101 | scandir_mocked.call_count = 0 102 | nodes = store.find('whisper_finder.foo') 103 | self.assertEqual(len(list(nodes)), 2) 104 | self.assertEqual(scandir_mocked.call_count, 0) 105 | 106 | scandir_mocked.call_count = 0 107 | nodes = store.find('whisper_finder.foo.bar.baz') 108 | self.assertEqual(len(list(nodes)), 1) 109 | self.assertEqual(scandir_mocked.call_count, 0) 110 | scandir_mocked.call_count = 0 111 | nodes = store.find('whisper_finder.*.ba?.{baz,foo}') 112 | self.assertEqual(len(list(nodes)), 2) 113 | self.assertEqual(scandir_mocked.call_count, 5) 114 | 115 | scandir_mocked.call_count = 0 116 | nodes = store.find('whisper_finder.{foo,bar}.{baz,bar}.{baz,foo}') 117 | self.assertEqual(len(list(nodes)), 2) 118 | self.assertEqual(scandir_mocked.call_count, 5) 119 | 120 | scandir_mocked.call_count = 0 121 | nodes = store.find('whisper_finder.{foo}.bar.*') 122 | self.assertEqual(len(list(nodes)), 1) 123 | self.assertEqual(scandir_mocked.call_count, 2) 124 | 125 | scandir_mocked.call_count = 0 126 | nodes = store.find('whisper_finder.foo.{ba{r,z},baz}.baz') 127 | self.assertEqual(len(list(nodes)), 1) 128 | self.assertEqual(scandir_mocked.call_count, 1) 129 | 130 | scandir_mocked.call_count = 0 131 | nodes = store.find('whisper_finder.{foo,garbage}.bar.baz') 132 | self.assertEqual(len(list(nodes)), 1) 133 | self.assertEqual(scandir_mocked.call_count, 1) 134 | 135 | scandir_mocked.call_count = 0 136 | nodes = store.find('whisper_finder.{fo{o}}.bar.baz') 137 | self.assertEqual(len(list(nodes)), 1) 138 | self.assertEqual(scandir_mocked.call_count, 1) 139 | 140 | scandir_mocked.call_count = 0 141 | nodes = store.find('whisper_finder.foo{}.bar.baz') 142 | self.assertEqual(len(list(nodes)), 1) 143 | self.assertEqual(scandir_mocked.call_count, 1) 144 | 145 | scandir_mocked.call_count = 0 146 | nodes = store.find('whisper_finder.{fo,ba}{o}.bar.baz') 147 | self.assertEqual(len(list(nodes)), 1) 148 | self.assertEqual(scandir_mocked.call_count, 1) 149 | 150 | scandir_mocked.call_count = 0 151 | nodes = store.find('whisper_finder.{fo,ba}{o,o}.bar.baz') 152 | self.assertEqual(len(list(nodes)), 1) 153 | self.assertEqual(scandir_mocked.call_count, 1) 154 | 155 | scandir_mocked.call_count = 0 156 | nodes = store.find('whisper_finder.{fo,ba}{o,z}.bar.baz') 157 | self.assertEqual(len(list(nodes)), 1) 158 | self.assertEqual(scandir_mocked.call_count, 1) 159 | 160 | finally: 161 | scandir_mocked.call_count = 0 162 | 163 | @patch('graphite_api.finders.whisper.scandir', wraps=scandir_mock) 164 | def test_gzipped_whisper_finder(self, scandir_mocked): 165 | for db in ( 166 | ('gzwhisper_finder', 'foo.wsp'), 167 | ('gzwhisper_finder', 'foo', 'bar', 'baz.wsp'), 168 | ('gzwhisper_finder', 'bar', 'baz', 'baz.wsp'), 169 | ): 170 | db_path = os.path.join(WHISPER_DIR, *db) 171 | if not os.path.exists(os.path.dirname(db_path)): 172 | os.makedirs(os.path.dirname(db_path)) 173 | whisper.create(db_path, [(1, 60)]) 174 | with open(db_path, 'rb') as f_in: 175 | f_out = gzip.open("%s.gz" % db_path, 'wb') 176 | shutil.copyfileobj(f_in, f_out) 177 | f_out.close() 178 | os.remove(db_path) 179 | 180 | try: 181 | store = app.config['GRAPHITE']['store'] 182 | scandir_mocked.call_count = 0 183 | nodes = store.find('gzwhisper_finder.foo') 184 | self.assertEqual(len(list(nodes)), 2) 185 | self.assertEqual(scandir_mocked.call_count, 0) 186 | 187 | scandir_mocked.call_count = 0 188 | nodes = store.find('gzwhisper_finder.foo{}.bar.baz') 189 | self.assertEqual(len(list(nodes)), 1) 190 | self.assertEqual(scandir_mocked.call_count, 1) 191 | 192 | finally: 193 | scandir_mocked.call_count = 0 194 | 195 | def test_globstar(self): 196 | store = app.config['GRAPHITE']['store'] 197 | query = "x.**.x" 198 | hits = ["x.x", "x._.x", "x._._.x"] 199 | misses = ["x.x.o", "o.x.x", "x._.x._.o", "o._.x._.x"] 200 | for path in hits + misses: 201 | db_path = os.path.join(WHISPER_DIR, path.replace(".", os.sep)) 202 | if not os.path.exists(os.path.dirname(db_path)): 203 | os.makedirs(os.path.dirname(db_path)) 204 | whisper.create(db_path + '.wsp', [(1, 60)]) 205 | 206 | paths = [node.path for node in store.find(query, local=True)] 207 | for hit in hits: 208 | self.assertIn(hit, paths) 209 | for miss in misses: 210 | self.assertNotIn(miss, paths) 211 | 212 | def test_multiple_globstars(self): 213 | store = app.config['GRAPHITE']['store'] 214 | query = "y.**.y.**.y" 215 | hits = [ 216 | "y.y.y", "y._.y.y", "y.y._.y", "y._.y._.y", 217 | "y._._.y.y", "y.y._._.y" 218 | ] 219 | misses = [ 220 | "y.o.y", "o.y.y", "y.y.o", "o.y.y.y", "y.y.y.o", 221 | "o._.y._.y", "y._.o._.y", "y._.y._.o" 222 | ] 223 | for path in hits + misses: 224 | db_path = os.path.join(WHISPER_DIR, path.replace(".", os.sep)) 225 | if not os.path.exists(os.path.dirname(db_path)): 226 | os.makedirs(os.path.dirname(db_path)) 227 | whisper.create(db_path + '.wsp', [(1, 60)]) 228 | 229 | paths = [node.path for node in store.find(query, local=True)] 230 | for hit in hits: 231 | self.assertIn(hit, paths) 232 | for miss in misses: 233 | self.assertNotIn(miss, paths) 234 | 235 | def test_terminal_globstar(self): 236 | store = app.config['GRAPHITE']['store'] 237 | query = "z.**" 238 | hits = ["z._", "z._._", "z._._._"] 239 | misses = ["z", "o._", "o.z._", "o._.z"] 240 | for path in hits + misses: 241 | db_path = os.path.join(WHISPER_DIR, path.replace(".", os.sep)) 242 | if not os.path.exists(os.path.dirname(db_path)): 243 | os.makedirs(os.path.dirname(db_path)) 244 | whisper.create(db_path + '.wsp', [(1, 60)]) 245 | 246 | paths = [node.path for node in store.find(query, local=True)] 247 | for hit in hits: 248 | self.assertIn(hit, paths) 249 | for miss in misses: 250 | self.assertNotIn(miss, paths) 251 | -------------------------------------------------------------------------------- /tests/test_http.py: -------------------------------------------------------------------------------- 1 | from . import TestCase 2 | 3 | 4 | class HttpTestCase(TestCase): 5 | def test_cors(self): 6 | response = self.app.options('/render') 7 | self.assertFalse( 8 | 'Access-Control-Allow-Origin' in response.headers) 9 | 10 | response = self.app.options('/render', headers=( 11 | ('Origin', 'https://example.com'), 12 | )) 13 | self.assertEqual(response.headers['Access-Control-Allow-Origin'], 14 | 'https://example.com') 15 | 16 | response = self.app.options('/render', headers=( 17 | ('Origin', 'http://foo.example.com:8888'), 18 | )) 19 | self.assertEqual(response.headers['Access-Control-Allow-Origin'], 20 | 'http://foo.example.com:8888') 21 | 22 | response = self.app.options('/', headers=( 23 | ('Origin', 'http://foo.example.com'), 24 | )) 25 | self.assertFalse( 26 | 'Access-Control-Allow-Origin' in response.headers) 27 | 28 | def test_trailing_slash(self): 29 | response = self.app.get('/render?target=foo') 30 | self.assertEqual(response.status_code, 200) 31 | 32 | response = self.app.get('/render/?target=foo') 33 | self.assertEqual(response.status_code, 200) 34 | -------------------------------------------------------------------------------- /tests/test_intervals.py: -------------------------------------------------------------------------------- 1 | from graphite_api.intervals import Interval, IntervalSet, union_overlapping 2 | 3 | from . import TestCase 4 | 5 | 6 | class IntervalTestCase(TestCase): 7 | def test_interval(self): 8 | with self.assertRaises(ValueError): 9 | Interval(1, 0) 10 | 11 | i = Interval(0, 1) 12 | j = Interval(1, 2) 13 | k = Interval(0, 1) 14 | l = Interval(0, 0) 15 | self.assertNotEqual(i, j) 16 | self.assertEqual(i, k) 17 | self.assertEqual(hash(i), hash(k)) 18 | 19 | with self.assertRaises(TypeError): 20 | len(i) 21 | 22 | self.assertTrue(j > i) 23 | 24 | self.assertTrue(i) 25 | self.assertFalse(l) 26 | 27 | self.assertEqual(repr(i), '') 28 | 29 | self.assertIsNone(i.intersect(j)) 30 | self.assertEqual(i.intersect(k), k) 31 | self.assertTrue(i.overlaps(j)) 32 | self.assertEqual(i.union(j), Interval(0, 2)) 33 | 34 | with self.assertRaises(TypeError): 35 | j.union(l) 36 | 37 | self.assertEqual(union_overlapping([i, j, k, l]), 38 | [Interval(0, 2)]) 39 | 40 | def test_interval_set(self): 41 | i = Interval(0, 1) 42 | j = Interval(1, 2) 43 | 44 | s = IntervalSet([i, j]) 45 | self.assertEqual(repr(s), '[]') 46 | s = IntervalSet([i, j], disjoint=True) 47 | 48 | it = iter(s) 49 | self.assertEqual(next(it), i) 50 | self.assertEqual(next(it), j) 51 | 52 | self.assertTrue(s) 53 | self.assertFalse(IntervalSet([])) 54 | 55 | self.assertEqual(s - IntervalSet([i]), 56 | IntervalSet([j])) 57 | 58 | self.assertFalse(IntervalSet([]).intersect(s)) 59 | 60 | self.assertEqual(s.union(IntervalSet([Interval(3, 4)])), 61 | IntervalSet([Interval(3, 4), i, j])) 62 | -------------------------------------------------------------------------------- /tests/test_metrics.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os.path 3 | import time 4 | 5 | from graphite_api._vendor import whisper 6 | 7 | from . import TestCase, WHISPER_DIR 8 | 9 | 10 | class MetricsTests(TestCase): 11 | def _create_dbs(self, ts=None): 12 | ts = ts or int(time.time()) 13 | for db in ( 14 | ('test', 'foo.wsp'), 15 | ('test', 'wat', 'welp.wsp'), 16 | ('test', 'bar', 'baz.wsp'), 17 | ): 18 | db_path = os.path.join(WHISPER_DIR, *db) 19 | os.makedirs(os.path.dirname(db_path)) 20 | whisper.create(db_path, [(1, 60)]) 21 | whisper.update(db_path, 1, ts) 22 | whisper.update(db_path, 2, ts) 23 | 24 | def test_find(self): 25 | url = '/metrics/find' 26 | 27 | response = self.app.get(url) 28 | self.assertEqual(response.status_code, 400) 29 | 30 | response = self.app.get(url, query_string={'query': 'test'}) 31 | self.assertJSON(response, []) 32 | 33 | response = self.app.get(url, query_string={'query': 'test', 34 | 'format': 'completer'}) 35 | self.assertJSON(response, {'metrics': []}) 36 | 37 | response = self.app.get(url, query_string={'query': 'test', 38 | 'format': 'nodelist'}) 39 | self.assertJSON(response, {'nodes': []}) 40 | 41 | ts = int(time.time()) 42 | self._create_dbs(ts) 43 | 44 | for _url in ['/metrics/find', '/metrics']: 45 | response = self.app.get(_url, query_string={'query': 'test.*', 46 | 'format': 'treejson'}) 47 | self.assertJSON(response, [{ 48 | 'allowChildren': 1, 49 | 'expandable': 1, 50 | 'id': 'test.bar', 51 | 'leaf': 0, 52 | 'text': 'bar', 53 | }, { 54 | 'allowChildren': 1, 55 | 'expandable': 1, 56 | 'id': 'test.wat', 57 | 'leaf': 0, 58 | 'text': 'wat', 59 | }, { 60 | 'allowChildren': 0, 61 | 'expandable': 0, 62 | 'id': 'test.foo', 63 | 'leaf': 1, 64 | 'text': 'foo', 65 | }]) 66 | 67 | response = self.app.get(url, query_string={'query': 'test.*', 68 | 'format': 'treejson', 69 | 'wildcards': 1}) 70 | self.assertJSON(response, [{ 71 | 'text': '*', 72 | 'expandable': 1, 73 | 'leaf': 0, 74 | 'id': 'test.*', 75 | 'allowChildren': 1, 76 | }, { 77 | 'allowChildren': 1, 78 | 'expandable': 1, 79 | 'id': 'test.bar', 80 | 'leaf': 0, 81 | 'text': 'bar', 82 | }, { 83 | 'allowChildren': 1, 84 | 'expandable': 1, 85 | 'id': 'test.wat', 86 | 'leaf': 0, 87 | 'text': 'wat', 88 | }, { 89 | 'allowChildren': 0, 90 | 'expandable': 0, 91 | 'id': 'test.foo', 92 | 'leaf': 1, 93 | 'text': 'foo', 94 | }]) 95 | 96 | response = self.app.get(url, query_string={'query': 'test.*', 97 | 'format': 'completer'}) 98 | self.assertJSON(response, {'metrics': [{ 99 | 'is_leaf': 0, 100 | 'name': 'bar', 101 | 'path': 'test.bar.', 102 | }, { 103 | 'is_leaf': 1, 104 | 'name': 'foo', 105 | 'path': 'test.foo', 106 | }, { 107 | 'is_leaf': 0, 108 | 'name': 'wat', 109 | 'path': 'test.wat.', 110 | }]}) 111 | 112 | response = self.app.get(url, query_string={'query': 'test.*', 113 | 'wildcards': 1, 114 | 'format': 'completer'}) 115 | self.assertJSON(response, {'metrics': [{ 116 | 'is_leaf': 0, 117 | 'name': 'bar', 118 | 'path': 'test.bar.', 119 | }, { 120 | 'is_leaf': 1, 121 | 'name': 'foo', 122 | 'path': 'test.foo', 123 | }, { 124 | 'is_leaf': 0, 125 | 'name': 'wat', 126 | 'path': 'test.wat.', 127 | }, { 128 | 'name': '*', 129 | }]}) 130 | 131 | response = self.app.get(url, query_string={'query': 'test.*', 132 | 'format': 'json'}) 133 | data = json.loads(response.data.decode('utf-8')) 134 | self.assertEqual(len(data), 3) 135 | self.assertEqual(data[0]['is_leaf'], False) 136 | self.assertEqual(len(data[0]['intervals']), 0) 137 | self.assertEqual(data[0]['path'], 'test.bar') 138 | 139 | self.assertEqual(data[1]['is_leaf'], True) 140 | self.assertEqual(len(data[1]['intervals']), 1) 141 | # Adjustment by 1 is a race condition. 142 | self.assertTrue(int(data[1]['intervals'][0]['start']) in [ts - 60, 143 | ts - 59]) 144 | self.assertTrue(int(data[1]['intervals'][0]['end']) in [ts, ts + 1]) 145 | self.assertEqual(data[1]['path'], 'test.foo') 146 | 147 | self.assertEqual(data[2]['is_leaf'], False) 148 | self.assertEqual(len(data[2]['intervals']), 0) 149 | self.assertEqual(data[2]['path'], 'test.wat') 150 | 151 | response = self.app.get(url, query_string={'query': '*', 152 | 'jsonp': 'foo', 153 | 'format': 'json'}) 154 | data = response.data.decode('utf-8') 155 | self.assertEqual(json.loads(data.split("(")[1].strip(")")), 156 | [{'is_leaf': False, 'intervals': [], 'path': 'test'}]) 157 | 158 | response = self.app.get(url, query_string={'query': '*', 159 | 'format': 'nodelist'}) 160 | self.assertJSON(response, {'nodes': ['test']}) 161 | 162 | response = self.app.get(url, query_string={'query': '*.*', 163 | 'format': 'nodelist'}) 164 | self.assertJSON(response, {'nodes': ['bar', 'foo', 'wat']}) 165 | 166 | response = self.app.get(url, query_string={'query': '*.*.*', 167 | 'format': 'nodelist'}) 168 | self.assertJSON(response, {'nodes': ['baz', 'welp']}) 169 | 170 | response = self.app.get(url, query_string={'query': '*.*.*', 171 | 'format': 'nodelist', 172 | 'position': '0'}) 173 | self.assertJSON(response, {'nodes': ['test']}) 174 | 175 | def test_find_validation(self): 176 | url = '/metrics/find' 177 | response = self.app.get(url, query_string={'query': 'foo', 178 | 'wildcards': 'aaa'}) 179 | self.assertJSON(response, {'errors': {'wildcards': 'must be 0 or 1.'}}, 180 | status_code=400) 181 | 182 | response = self.app.get(url, query_string={'query': 'foo', 183 | 'from': 'aaa', 184 | 'until': 'bbb'}) 185 | self.assertJSON(response, {'errors': { 186 | 'from': 'must be an epoch timestamp.', 187 | 'until': 'must be an epoch timestamp.', 188 | }}, status_code=400) 189 | 190 | response = self.app.get(url, query_string={'query': 'foo', 191 | 'format': 'other'}) 192 | self.assertJSON(response, {'errors': { 193 | 'format': 'unrecognized format: "other".', 194 | }}, status_code=400) 195 | 196 | def test_expand(self): 197 | url = '/metrics/expand' 198 | 199 | response = self.app.get(url) 200 | self.assertJSON(response, {'errors': 201 | {'query': 'this parameter is required.'}}, 202 | status_code=400) 203 | 204 | response = self.app.get(url, query_string={'query': 'test'}) 205 | self.assertJSON(response, {'results': []}) 206 | 207 | self._create_dbs() 208 | response = self.app.get(url, query_string={'query': 'test'}) 209 | self.assertJSON(response, {'results': ['test']}) 210 | 211 | response = self.app.get(url, query_string={'query': 'test.*'}) 212 | self.assertJSON(response, {'results': ['test.bar', 'test.foo', 213 | 'test.wat']}) 214 | 215 | response = self.app.get(url, query_string={'query': 'test.*', 216 | 'leavesOnly': 1}) 217 | self.assertJSON(response, {'results': ['test.foo']}) 218 | 219 | response = self.app.get(url, query_string={'query': 'test.*', 220 | 'groupByExpr': 1}) 221 | self.assertJSON(response, {'results': {'test.*': ['test.bar', 222 | 'test.foo', 223 | 'test.wat']}}) 224 | 225 | def test_expand_validation(self): 226 | url = '/metrics/expand' 227 | response = self.app.get(url, query_string={'query': 'foo', 228 | 'leavesOnly': 'bbb', 229 | 'groupByExpr': 'aaa'}) 230 | self.assertJSON(response, {'errors': { 231 | 'groupByExpr': 'must be 0 or 1.', 232 | 'leavesOnly': 'must be 0 or 1.', 233 | }}, status_code=400) 234 | 235 | def test_noop(self): 236 | url = '/dashboard/find' 237 | response = self.app.get(url) 238 | self.assertJSON(response, {'dashboards': []}) 239 | 240 | url = '/dashboard/load/foo' 241 | response = self.app.get(url) 242 | self.assertJSON(response, {'error': "Dashboard 'foo' does not exist."}, 243 | status_code=404) 244 | 245 | url = '/events/get_data' 246 | response = self.app.get(url) 247 | self.assertJSON(response, []) 248 | 249 | def test_metrics_index(self): 250 | url = '/metrics/index.json' 251 | response = self.app.get(url) 252 | self.assertJSON(response, []) 253 | self.assertEqual(response.headers['Content-Type'], 'application/json') 254 | 255 | response = self.app.get(url, query_string={'jsonp': 'foo'}) 256 | self.assertEqual(response.data, b'foo([])') 257 | self.assertEqual(response.headers['Content-Type'], 'text/javascript') 258 | 259 | parent = os.path.join(WHISPER_DIR, 'collectd') 260 | os.makedirs(parent) 261 | 262 | for metric in ['load', 'memory', 'cpu']: 263 | db = os.path.join(parent, '{0}.wsp'.format(metric)) 264 | whisper.create(db, [(1, 60)]) 265 | 266 | response = self.app.get(url) 267 | self.assertJSON(response, [ 268 | u'collectd.cpu', 269 | u'collectd.load', 270 | u'collectd.memory', 271 | ]) 272 | response = self.app.get(url, query_string={'jsonp': 'bar'}) 273 | self.assertEqual( 274 | response.data, 275 | b'bar(["collectd.cpu", "collectd.load", "collectd.memory"])') 276 | -------------------------------------------------------------------------------- /tests/test_paths.py: -------------------------------------------------------------------------------- 1 | from graphite_api.app import pathsFromTarget 2 | 3 | from . import TestCase 4 | 5 | 6 | class PathsTest(TestCase): 7 | """ 8 | TestCase for pathsFromTarget function 9 | 10 | """ 11 | def validate_paths(self, expected, test): 12 | """ 13 | Assert the lengths of the expected list and the test list are the same 14 | Also assert that every member of the expected list is present in the 15 | test list. 16 | 17 | """ 18 | # Check that test is a list 19 | self.assertTrue(isinstance(test, list)) 20 | # Check length before converting to sets 21 | self.assertEqual(len(expected), len(test)) 22 | # Convert lists to sets and verify equality 23 | self.assertEqual(set(expected), set(test)) 24 | 25 | def test_simple(self): 26 | """ 27 | Tests a target containing a single path expression. 28 | 29 | """ 30 | target = 'test.simple.metric' 31 | expected = [target] 32 | self.validate_paths(expected, pathsFromTarget({}, target)) 33 | 34 | def test_func_args(self): 35 | """ 36 | Tests a target containing function call with path expressions as 37 | arguments. 38 | 39 | """ 40 | path_1 = 'test.1.metric' 41 | path_2 = 'test.2.metric' 42 | target = 'sumSeries(%s,%s)' % (path_1, path_2) 43 | expected = [path_1, path_2] 44 | self.validate_paths(expected, pathsFromTarget({}, target)) 45 | 46 | def test_func_kwargs(self): 47 | """ 48 | Tests a target containing a function call with path expressions as 49 | a kwarg. 50 | 51 | """ 52 | path_a = 'test.a.metric' 53 | path_b = 'test.b.metric' 54 | target = 'someFunc(%s,b=%s)' % (path_a, path_b) 55 | expected = [path_a, path_b] 56 | self.validate_paths(expected, pathsFromTarget({}, target)) 57 | 58 | def test_func_nested(self): 59 | """ 60 | Tests a target containing nested functions with a mix of args and 61 | kwargs. 62 | 63 | """ 64 | paths = ( 65 | 'test.a.metric', 66 | 'test.b.metric', 67 | 'test.c.metric', 68 | 'test.d.metric', 69 | ) 70 | target = 'outerFunc(innerFunc(%s, %s), s=innerFunc(%s, %s))' % paths 71 | expected = list(paths) 72 | self.validate_paths(expected, pathsFromTarget({}, target)) 73 | -------------------------------------------------------------------------------- /tests/test_render_datalib.py: -------------------------------------------------------------------------------- 1 | from graphite_api.render.datalib import nonempty, TimeSeries 2 | 3 | from . import TestCase 4 | 5 | 6 | class TimeSeriesTest(TestCase): 7 | def test_TimeSeries_init_no_args(self): 8 | with self.assertRaises(TypeError): 9 | TimeSeries() 10 | 11 | def test_TimeSeries_init_string_values(self): 12 | series = TimeSeries("collectd.test-db.load.value", 13 | 0, 2, 1, "ab") 14 | expected = TimeSeries("collectd.test-db.load.value", 15 | 0, 2, 1, ["a", "b"]) 16 | self.assertEqual(series, expected) 17 | 18 | def test_TimeSeries_equal_list(self): 19 | values = range(0, 100) 20 | series = TimeSeries("collectd.test-db.load.value", 21 | 0, len(values), 1, values) 22 | with self.assertRaises(AssertionError): 23 | self.assertEqual(values, series) 24 | 25 | def test_TimeSeries_equal_list_color(self): 26 | values = range(0, 100) 27 | series1 = TimeSeries("collectd.test-db.load.value", 28 | 0, len(values), 1, values) 29 | series1.color = 'white' 30 | series2 = TimeSeries("collectd.test-db.load.value", 31 | 0, len(values), 1, values) 32 | series2.color = 'white' 33 | self.assertEqual(series1, series2) 34 | 35 | def test_TimeSeries_equal_list_color_bad(self): 36 | values = range(0, 100) 37 | series1 = TimeSeries("collectd.test-db.load.value", 38 | 0, len(values), 1, values) 39 | series2 = TimeSeries("collectd.test-db.load.value", 40 | 0, len(values), 1, values) 41 | series2.color = 'white' 42 | with self.assertRaises(AssertionError): 43 | self.assertEqual(series1, series2) 44 | 45 | def test_TimeSeries_equal_list_color_bad2(self): 46 | values = range(0, 100) 47 | series1 = TimeSeries("collectd.test-db.load.value", 48 | 0, len(values), 1, values) 49 | series2 = TimeSeries("collectd.test-db.load.value", 50 | 0, len(values), 1, values) 51 | series1.color = 'white' 52 | with self.assertRaises(AssertionError): 53 | self.assertEqual(series1, series2) 54 | 55 | def test_TimeSeries_consolidate(self): 56 | values = range(0, 100) 57 | series = TimeSeries("collectd.test-db.load.value", 58 | 0, len(values)/2, 1, values) 59 | self.assertEqual(series.valuesPerPoint, 1) 60 | series.consolidate(2) 61 | self.assertEqual(series.valuesPerPoint, 2) 62 | 63 | def test_TimeSeries_iterate(self): 64 | values = range(0, 100) 65 | series = TimeSeries("collectd.test-db.load.value", 66 | 0, len(values), 1, values) 67 | for i, val in enumerate(series): 68 | self.assertEqual(val, values[i]) 69 | 70 | def test_TimeSeries_iterate_valuesPerPoint_2_none_values(self): 71 | values = [None, None, None, None, None] 72 | series = TimeSeries("collectd.test-db.load.value", 73 | 0, len(values)/2, 1, values) 74 | self.assertEqual(series.valuesPerPoint, 1) 75 | series.consolidate(2) 76 | self.assertEqual(series.valuesPerPoint, 2) 77 | expected = TimeSeries("collectd.test-db.load.value", 78 | 0, 5, 1, [None, None, None]) 79 | self.assertEqual(list(series), list(expected)) 80 | 81 | def test_TimeSeries_iterate_valuesPerPoint_2_avg(self): 82 | values = range(0, 100) 83 | series = TimeSeries("collectd.test-db.load.value", 84 | 0, len(values)/2, 1, values) 85 | self.assertEqual(series.valuesPerPoint, 1) 86 | series.consolidate(2) 87 | self.assertEqual(series.valuesPerPoint, 2) 88 | expected = TimeSeries("collectd.test-db.load.value", 0, 5, 1, 89 | list(map(lambda x: x+0.5, range(0, 100, 2))) + 90 | [None]) 91 | self.assertEqual(list(series), list(expected)) 92 | 93 | def test_TimeSeries_iterate_valuesPerPoint_2_sum(self): 94 | values = range(0, 100) 95 | series = TimeSeries("collectd.test-db.load.value", 96 | 0, 5, 1, values, consolidate='sum') 97 | self.assertEqual(series.valuesPerPoint, 1) 98 | series.consolidate(2) 99 | self.assertEqual(series.valuesPerPoint, 2) 100 | expected = TimeSeries("collectd.test-db.load.value", 101 | 0, 5, 1, list(range(1, 200, 4)) + [None]) 102 | self.assertEqual(list(series), list(expected)) 103 | 104 | def test_TimeSeries_iterate_valuesPerPoint_2_max(self): 105 | values = range(0, 100) 106 | series = TimeSeries("collectd.test-db.load.value", 107 | 0, 5, 1, values, consolidate='max') 108 | self.assertEqual(series.valuesPerPoint, 1) 109 | series.consolidate(2) 110 | self.assertEqual(series.valuesPerPoint, 2) 111 | expected = TimeSeries("collectd.test-db.load.value", 112 | 0, 5, 1, list(range(1, 100, 2)) + [None]) 113 | self.assertEqual(list(series), list(expected)) 114 | 115 | def test_TimeSeries_iterate_valuesPerPoint_2_min(self): 116 | values = range(0, 100) 117 | series = TimeSeries("collectd.test-db.load.value", 118 | 0, 5, 1, values, consolidate='min') 119 | self.assertEqual(series.valuesPerPoint, 1) 120 | series.consolidate(2) 121 | self.assertEqual(series.valuesPerPoint, 2) 122 | expected = TimeSeries("collectd.test-db.load.value", 123 | 0, 5, 1, list(range(0, 100, 2)) + [None]) 124 | self.assertEqual(list(series), list(expected)) 125 | 126 | def test_TimeSeries_iterate_valuesPerPoint_2_invalid(self): 127 | values = range(0, 100) 128 | series = TimeSeries("collectd.test-db.load.value", 129 | 0, 5, 1, values, consolidate='bogus') 130 | self.assertEqual(series.valuesPerPoint, 1) 131 | series.consolidate(2) 132 | self.assertEqual(series.valuesPerPoint, 2) 133 | with self.assertRaises(Exception): 134 | list(series) 135 | 136 | 137 | class DatalibFunctionTest(TestCase): 138 | def test_nonempty_true(self): 139 | values = range(0, 100) 140 | series = TimeSeries("collectd.test-db.load.value", 141 | 0, len(values), 1, values) 142 | self.assertTrue(nonempty(series)) 143 | 144 | def test_nonempty_false_empty(self): 145 | series = TimeSeries("collectd.test-db.load.value", 0, 1, 1, []) 146 | self.assertFalse(nonempty(series)) 147 | 148 | def test_nonempty_false_nones(self): 149 | series = TimeSeries("collectd.test-db.load.value", 150 | 0, 4, 1, [None, None, None, None]) 151 | self.assertFalse(nonempty(series)) 152 | -------------------------------------------------------------------------------- /tests/test_storage.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | from graphite_api.storage import FindQuery 4 | 5 | from . import TestCase 6 | 7 | 8 | class StorageTestCase(TestCase): 9 | def test_find_query(self): 10 | end = int(time.time()) 11 | start = end - 3600 12 | 13 | query = FindQuery('collectd', None, None) 14 | self.assertEqual(repr(query), '') 15 | 16 | query = FindQuery('collectd', start, None) 17 | self.assertEqual(repr(query), '' 18 | % time.ctime(start)) 19 | 20 | query = FindQuery('collectd', None, end) 21 | self.assertEqual(repr(query), '' 22 | % time.ctime(end)) 23 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = 3 | py26, 4 | py27, 5 | py34, 6 | py35, 7 | py36, 8 | pypy, 9 | pyparsing1, 10 | flask08, 11 | flask09, 12 | no-flask-cache, 13 | lint, 14 | docs 15 | 16 | [testenv] 17 | setenv = 18 | PYTHONPATH={toxinidir} 19 | GRAPHITE_API_CONFIG={toxinidir}/tests/conf.yaml 20 | commands = 21 | python -Werror -m unittest discover 22 | deps = 23 | PyYAML 24 | cairocffi 25 | pytz 26 | raven[flask] 27 | six 28 | structlog 29 | tzlocal 30 | 31 | [testenv:py27] 32 | basepython = python2.7 33 | deps = 34 | {[testenv]deps} 35 | Flask 36 | Flask-Cache 37 | pyparsing 38 | mock 39 | scandir 40 | 41 | [testenv:py34] 42 | basepython = python3.4 43 | commands = 44 | python -Wall -m unittest discover 45 | deps = 46 | {[testenv]deps} 47 | Flask 48 | Flask-Cache 49 | pyparsing 50 | scandir 51 | 52 | [testenv:py35] 53 | basepython = python3.5 54 | commands = 55 | python -Wall -m unittest discover 56 | deps = 57 | {[testenv]deps} 58 | Flask 59 | Flask-Cache 60 | pyparsing 61 | 62 | [testenv:py36] 63 | basepython = python3.6 64 | commands = 65 | python -Wall -m unittest discover 66 | deps = 67 | {[testenv]deps} 68 | Flask 69 | Flask-Cache 70 | pyparsing 71 | 72 | [testenv:pyparsing1] 73 | basepython = python2.7 74 | deps = 75 | {[testenv]deps} 76 | Flask 77 | Flask-Cache 78 | pyparsing==1.5.7 79 | mock 80 | 81 | [testenv:pypy] 82 | basepython = pypy 83 | deps = 84 | {[testenv]deps} 85 | Flask 86 | Flask-Cache 87 | pyparsing 88 | mock 89 | 90 | [testenv:flask08] 91 | basepython = python2.7 92 | deps = 93 | {[testenv]deps} 94 | Flask<0.9 95 | Flask-Cache 96 | pyparsing 97 | mock 98 | 99 | [testenv:flask09] 100 | basepython = python2.7 101 | deps = 102 | {[testenv]deps} 103 | Flask<0.10 104 | Flask-Cache 105 | pyparsing 106 | mock 107 | 108 | [testenv:no-flask-cache] 109 | basepython = python3.6 110 | commands = 111 | python -Wall -m unittest discover 112 | deps = 113 | {[testenv]deps} 114 | Flask 115 | pyparsing 116 | 117 | [testenv:lint] 118 | deps = 119 | flake8 120 | flake8-import-order 121 | flake8-bugbear 122 | commands = 123 | flake8 {toxinidir}/graphite_api {toxinidir}/tests 124 | 125 | [testenv:docs] 126 | changedir = docs 127 | deps = 128 | Sphinx 129 | sphinx_rtd_theme 130 | structlog 131 | commands = 132 | sphinx-build -W -b html -d {envtmpdir}/doctrees . {envtmpdir}/html 133 | -------------------------------------------------------------------------------- /unittest_main.py: -------------------------------------------------------------------------------- 1 | """Main entry point""" 2 | 3 | import sys 4 | if sys.argv[0].endswith("__main__.py"): 5 | import os.path 6 | # We change sys.argv[0] to make help message more useful 7 | # use executable without path, unquoted 8 | # (it's just a hint anyway) 9 | # (if you have spaces in your executable you get what you deserve!) 10 | executable = os.path.basename(sys.executable) 11 | sys.argv[0] = executable + " -m unittest" 12 | del os 13 | 14 | __unittest = True 15 | 16 | from unittest.main import main, TestProgram 17 | 18 | main(module=None) 19 | --------------------------------------------------------------------------------