├── .gitignore ├── .pre-commit-config.yaml ├── .pylintrc ├── .travis.yml ├── LICENSE ├── MANIFEST.in ├── README.rst ├── docs ├── conf.py ├── index.rst └── readme.rst ├── graypy ├── __init__.py ├── handler.py └── rabbitmq.py ├── perftest.py ├── readthedocs.yml ├── setup.cfg ├── setup.py └── tests ├── __init__.py ├── config ├── create_ssl_certs.sh ├── docker-compose.yml ├── inputs.json ├── start_local_graylog_server.sh └── stop_local_graylog_server.sh ├── helper.py ├── integration ├── __init__.py ├── helper.py ├── test_chunked_logging.py ├── test_common_logging.py ├── test_debugging_fields.py ├── test_extra_fields.py └── test_status_issue.py └── unit ├── __init__.py ├── helper.py ├── test_ExcludeFilter.py ├── test_GELFRabbitHandler.py ├── test_chunking.py └── test_handler.py /.gitignore: -------------------------------------------------------------------------------- 1 | ### Python template 2 | # Byte-compiled / optimized / DLL files 3 | __pycache__/ 4 | *.py[cod] 5 | *$py.class 6 | 7 | # C extensions 8 | *.so 9 | 10 | # Distribution / packaging 11 | .Python 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .coverage 43 | .coverage.* 44 | .cache 45 | nosetests.xml 46 | coverage.xml 47 | *.cover 48 | .hypothesis/ 49 | .pytest_cache/ 50 | 51 | # Translations 52 | *.mo 53 | *.pot 54 | 55 | # Django stuff: 56 | *.log 57 | local_settings.py 58 | db.sqlite3 59 | 60 | # Flask stuff: 61 | instance/ 62 | .webassets-cache 63 | 64 | # Scrapy stuff: 65 | .scrapy 66 | 67 | # Sphinx documentation 68 | docs/_build/ 69 | 70 | # Sphinx API documentation 71 | docs/api/ 72 | 73 | # PyBuilder 74 | target/ 75 | 76 | # Jupyter Notebook 77 | .ipynb_checkpoints 78 | 79 | # pyenv 80 | .python-version 81 | 82 | # celery beat schedule file 83 | celerybeat-schedule 84 | 85 | # SageMath parsed files 86 | *.sage.py 87 | 88 | # Environments 89 | .env 90 | .venv 91 | env/ 92 | venv/ 93 | ENV/ 94 | env.bak/ 95 | venv.bak/ 96 | 97 | # Spyder project settings 98 | .spyderproject 99 | .spyproject 100 | 101 | # Rope project settings 102 | .ropeproject 103 | 104 | # mkdocs documentation 105 | /site 106 | 107 | # mypy 108 | .mypy_cache/ 109 | 110 | # Pycharm 111 | .idea/ 112 | 113 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | 3 | # Automatic source code formatting 4 | - repo: https://github.com/psf/black 5 | rev: stable 6 | hooks: 7 | - id: black 8 | args: [--safe, --quiet] 9 | -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | [MASTER] 2 | 3 | # Specify a configuration file. 4 | #rcfile= 5 | 6 | # Python code to execute, usually for sys.path manipulation such as 7 | # pygtk.require(). 8 | init-hook='sys.path.append(os.path.abspath(os.path.curdir))' 9 | 10 | # Add files or directories to the blacklist. They should be base names, not 11 | # paths. 12 | ignore=tests 13 | 14 | # Add files or directories matching the regex patterns to the blacklist. The 15 | # regex matches against base names, not paths. 16 | ignore-patterns= 17 | 18 | # Pickle collected data for later comparisons. 19 | persistent=yes 20 | 21 | # List of plugins (as comma separated values of python modules names) to load, 22 | # usually to register additional checkers. 23 | load-plugins= 24 | 25 | # Use multiple processes to speed up Pylint. 26 | jobs=4 27 | 28 | # Allow loading of arbitrary C extensions. Extensions are imported into the 29 | # active Python interpreter and may run arbitrary code. 30 | unsafe-load-any-extension=no 31 | 32 | # A comma-separated list of package or module names from where C extensions may 33 | # be loaded. Extensions are loading into the active Python interpreter and may 34 | # run arbitrary code 35 | extension-pkg-whitelist= 36 | 37 | # Allow optimization of some AST trees. This will activate a peephole AST 38 | # optimizer, which will apply various small optimizations. For instance, it can 39 | # be used to obtain the result of joining multiple strings with the addition 40 | # operator. Joining a lot of strings can lead to a maximum recursion error in 41 | # Pylint and this flag can prevent that. It has one side effect, the resulting 42 | # AST will be different than the one from reality. This option is deprecated 43 | # and it will be removed in Pylint 2.0. 44 | optimize-ast=no 45 | 46 | 47 | [MESSAGES CONTROL] 48 | 49 | # Only show warnings with the listed confidence levels. Leave empty to show 50 | # all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED 51 | confidence= 52 | 53 | # Enable the message, report, category or checker with the given id(s). You can 54 | # either give multiple identifier separated by comma (,) or put this option 55 | # multiple time (only on the command line, not in the configuration file where 56 | # it should appear only once). See also the "--disable" option for examples. 57 | #enable= 58 | 59 | # Disable the message, report, category or checker with the given id(s). You 60 | # can either give multiple identifiers separated by comma (,) or put this 61 | # option multiple times (only on the command line, not in the configuration 62 | # file where it should appear only once).You can also use "--disable=all" to 63 | # disable everything first and then reenable specific checks. For example, if 64 | # you want to run only the similarities checker, you can use "--disable=all 65 | # --enable=similarities". If you want to run only the classes checker, but have 66 | # no Warning level messages displayed, use"--disable=all --enable=classes 67 | # --disable=W" 68 | disable=locally-disabled,too-few-public-methods,no-self-use,too-many-ancestors,bad-continuation 69 | 70 | 71 | [REPORTS] 72 | 73 | # Set the output format. Available formats are text, parseable, colorized, msvs 74 | # (visual studio) and html. You can also give a reporter class, eg 75 | # mypackage.mymodule.MyReporterClass. 76 | output-format=text 77 | 78 | # Put messages in a separate file for each module / package specified on the 79 | # command line instead of printing them on stdout. Reports (if any) will be 80 | # written in a file name "pylint_global.[txt|html]". This option is deprecated 81 | # and it will be removed in Pylint 2.0. 82 | files-output=no 83 | 84 | # Tells whether to display a full report or only the messages 85 | reports=yes 86 | 87 | # Python expression which should return a note less than 10 (10 is the highest 88 | # note). You have access to the variables errors warning, statement which 89 | # respectively contain the number of errors / warnings messages and the total 90 | # number of statements analyzed. This is used by the global evaluation report 91 | # (RP0004). 92 | evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) 93 | 94 | # Template used to display messages. This is a python new-style format string 95 | # used to format the message information. See doc for all details 96 | #msg-template= 97 | 98 | 99 | [LOGGING] 100 | 101 | # Logging modules to check that the string format arguments are in logging 102 | # function parameter format 103 | logging-modules=logging 104 | 105 | 106 | [SIMILARITIES] 107 | 108 | # Minimum lines number of a similarity. 109 | min-similarity-lines=8 110 | 111 | # Ignore comments when computing similarities. 112 | ignore-comments=yes 113 | 114 | # Ignore docstrings when computing similarities. 115 | ignore-docstrings=yes 116 | 117 | # Ignore imports when computing similarities. 118 | ignore-imports=no 119 | 120 | 121 | [FORMAT] 122 | 123 | # Maximum number of characters on a single line. 124 | max-line-length=100 125 | 126 | # Regexp for a line that is allowed to be longer than the limit. 127 | ignore-long-lines=^\s*(# )??$ 128 | 129 | # Allow the body of an if to be on the same line as the test if there is no 130 | # else. 131 | single-line-if-stmt=no 132 | 133 | # List of optional constructs for which whitespace checking is disabled. `dict- 134 | # separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. 135 | # `trailing-comma` allows a space between comma and closing bracket: (a, ). 136 | # `empty-line` allows space-only lines. 137 | no-space-check=trailing-comma,dict-separator 138 | 139 | # Maximum number of lines in a module 140 | max-module-lines=1000 141 | 142 | # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 143 | # tab). 144 | indent-string=' ' 145 | 146 | # Number of spaces of indent required inside a hanging or continued line. 147 | indent-after-paren=4 148 | 149 | # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. 150 | expected-line-ending-format= 151 | 152 | 153 | [SPELLING] 154 | 155 | # Spelling dictionary name. Available dictionaries: none. To make it working 156 | # install python-enchant package. 157 | spelling-dict= 158 | 159 | # List of comma separated words that should not be checked. 160 | spelling-ignore-words= 161 | 162 | # A path to a file that contains private dictionary; one word per line. 163 | spelling-private-dict-file= 164 | 165 | # Tells whether to store unknown words to indicated private dictionary in 166 | # --spelling-private-dict-file option instead of raising a message. 167 | spelling-store-unknown-words=no 168 | 169 | 170 | [VARIABLES] 171 | 172 | # Tells whether we should check for unused import in __init__ files. 173 | init-import=no 174 | 175 | # A regular expression matching the name of dummy variables (i.e. expectedly 176 | # not used). 177 | dummy-variables-rgx=(_+[a-zA-Z0-9]*?$)|dummy 178 | 179 | # List of additional names supposed to be defined in builtins. Remember that 180 | # you should avoid to define new builtins when possible. 181 | additional-builtins= 182 | 183 | # List of strings which can identify a callback function by name. A callback 184 | # name must start or end with one of those strings. 185 | callbacks=cb_,_cb 186 | 187 | # List of qualified module names which can have objects that can redefine 188 | # builtins. 189 | redefining-builtins-modules=six.moves,future.builtins 190 | 191 | 192 | [TYPECHECK] 193 | 194 | # Tells whether missing members accessed in mixin class should be ignored. A 195 | # mixin class is detected if its name ends with "mixin" (case insensitive). 196 | ignore-mixin-members=yes 197 | 198 | # List of module names for which member attributes should not be checked 199 | # (useful for modules/projects where namespaces are manipulated during runtime 200 | # and thus existing member attributes cannot be deduced by static analysis. It 201 | # supports qualified module names, as well as Unix pattern matching. 202 | ignored-modules=flask_sqlalchemy,app.extensions.flask_sqlalchemy 203 | 204 | # List of class names for which member attributes should not be checked (useful 205 | # for classes with dynamically set attributes). This supports the use of 206 | # qualified names. 207 | ignored-classes=optparse.Values,thread._local,_thread._local 208 | 209 | # List of members which are set dynamically and missed by pylint inference 210 | # system, and so shouldn't trigger E1101 when accessed. Python regular 211 | # expressions are accepted. 212 | generated-members=fget,query,begin,add,merge,delete,commit,rollback 213 | 214 | # List of decorators that produce context managers, such as 215 | # contextlib.contextmanager. Add to this list to register other decorators that 216 | # produce valid context managers. 217 | contextmanager-decorators=contextlib.contextmanager 218 | 219 | 220 | [MISCELLANEOUS] 221 | 222 | # List of note tags to take in consideration, separated by a comma. 223 | notes=FIXME,XXX,TODO 224 | 225 | 226 | [BASIC] 227 | 228 | # Good variable names which should always be accepted, separated by a comma 229 | good-names=i,j,k,ex,Run,_,log,api 230 | 231 | # Bad variable names which should always be refused, separated by a comma 232 | bad-names=foo,bar,baz,toto,tutu,tata 233 | 234 | # Colon-delimited sets of names that determine each other's naming style when 235 | # the name regexes allow several styles. 236 | name-group= 237 | 238 | # Include a hint for the correct naming format with invalid-name 239 | include-naming-hint=no 240 | 241 | # List of decorators that produce properties, such as abc.abstractproperty. Add 242 | # to this list to register other decorators that produce valid properties. 243 | property-classes=abc.abstractproperty 244 | 245 | # Regular expression matching correct class names 246 | class-rgx=[A-Z_][a-zA-Z0-9]+$ 247 | 248 | # Naming hint for class names 249 | class-name-hint=[A-Z_][a-zA-Z0-9]+$ 250 | 251 | # Regular expression matching correct constant names 252 | const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ 253 | 254 | # Naming hint for constant names 255 | const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$ 256 | 257 | # Regular expression matching correct argument names 258 | argument-rgx=[a-z_][a-z0-9_]{2,40}$ 259 | 260 | # Naming hint for argument names 261 | argument-name-hint=[a-z_][a-z0-9_]{2,40}$ 262 | 263 | # Regular expression matching correct inline iteration names 264 | inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ 265 | 266 | # Naming hint for inline iteration names 267 | inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$ 268 | 269 | # Regular expression matching correct method names 270 | method-rgx=[a-z_][a-z0-9_]{2,40}$ 271 | 272 | # Naming hint for method names 273 | method-name-hint=[a-z_][a-z0-9_]{2,40}$ 274 | 275 | # Regular expression matching correct function names 276 | function-rgx=[a-z_][a-z0-9_]{2,40}$ 277 | 278 | # Naming hint for function names 279 | function-name-hint=[a-z_][a-z0-9_]{2,40}$ 280 | 281 | # Regular expression matching correct class attribute names 282 | class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,40}|(__.*__))$ 283 | 284 | # Naming hint for class attribute names 285 | class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,40}|(__.*__))$ 286 | 287 | # Regular expression matching correct attribute names 288 | attr-rgx=[a-z_][a-z0-9_]{2,40}$ 289 | 290 | # Naming hint for attribute names 291 | attr-name-hint=[a-z_][a-z0-9_]{2,40}$ 292 | 293 | # Regular expression matching correct variable names 294 | variable-rgx=[a-z_][a-z0-9_]{2,40}$ 295 | 296 | # Naming hint for variable names 297 | variable-name-hint=[a-z_][a-z0-9_]{2,40}$ 298 | 299 | # Regular expression matching correct module names 300 | module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ 301 | 302 | # Naming hint for module names 303 | module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ 304 | 305 | # Regular expression which should only match function or class names that do 306 | # not require a docstring. 307 | no-docstring-rgx=^_ 308 | 309 | # Minimum line length for functions/classes that require docstrings, shorter 310 | # ones are exempt. 311 | docstring-min-length=5 312 | 313 | 314 | [ELIF] 315 | 316 | # Maximum number of nested blocks for function / method body 317 | max-nested-blocks=5 318 | 319 | 320 | [DESIGN] 321 | 322 | # Maximum number of arguments for function / method 323 | max-args=5 324 | 325 | # Argument names that match this expression will be ignored. Default to name 326 | # with leading underscore 327 | ignored-argument-names=_.* 328 | 329 | # Maximum number of locals for function / method body 330 | max-locals=15 331 | 332 | # Maximum number of return / yield for function / method body 333 | max-returns=6 334 | 335 | # Maximum number of branch for function / method body 336 | max-branches=12 337 | 338 | # Maximum number of statements in function / method body 339 | max-statements=50 340 | 341 | # Maximum number of parents for a class (see R0901). 342 | max-parents=10 343 | 344 | # Maximum number of attributes for a class (see R0902). 345 | max-attributes=7 346 | 347 | # Minimum number of public methods for a class (see R0903). 348 | min-public-methods=2 349 | 350 | # Maximum number of public methods for a class (see R0904). 351 | max-public-methods=20 352 | 353 | # Maximum number of boolean expressions in a if statement 354 | max-bool-expr=5 355 | 356 | 357 | [IMPORTS] 358 | 359 | # Deprecated modules which should not be used, separated by a comma 360 | deprecated-modules=optparse 361 | 362 | # Create a graph of every (i.e. internal and external) dependencies in the 363 | # given file (report RP0402 must not be disabled) 364 | import-graph= 365 | 366 | # Create a graph of external dependencies in the given file (report RP0402 must 367 | # not be disabled) 368 | ext-import-graph= 369 | 370 | # Create a graph of internal dependencies in the given file (report RP0402 must 371 | # not be disabled) 372 | int-import-graph= 373 | 374 | # Force import order to recognize a module as part of the standard 375 | # compatibility libraries. 376 | known-standard-library= 377 | 378 | # Force import order to recognize a module as part of a third party library. 379 | known-third-party=flask_restplus_patched 380 | 381 | # Analyse import fallback blocks. This can be used to support both Python 2 and 382 | # 3 compatible code, which means that the block might have code that exists 383 | # only in one or another interpreter, leading to false positives when analysed. 384 | analyse-fallback-blocks=no 385 | 386 | 387 | [CLASSES] 388 | 389 | # List of method names used to declare (i.e. assign) instance attributes. 390 | defining-attr-methods=__init__,__new__,setUp 391 | 392 | # List of valid names for the first argument in a class method. 393 | valid-classmethod-first-arg=cls 394 | 395 | # List of valid names for the first argument in a metaclass class method. 396 | valid-metaclass-classmethod-first-arg=mcs 397 | 398 | # List of member names, which should be excluded from the protected access 399 | # warning. 400 | exclude-protected=_asdict,_fields,_replace,_source,_make 401 | 402 | 403 | [EXCEPTIONS] 404 | 405 | # Exceptions that will emit a warning when being caught. Defaults to 406 | # "Exception" 407 | overgeneral-exceptions=Exception 408 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | 3 | stages: 4 | - lint 5 | - test 6 | - build 7 | - deploy 8 | 9 | before_install: 10 | - pip install -U pip setuptools 11 | - pip install codecov 12 | 13 | install: 14 | - pip install . 15 | 16 | before_script: 17 | - bash tests/config/start_local_graylog_server.sh 18 | 19 | script: 20 | - python setup.py test 21 | 22 | after_script: 23 | - bash tests/config/stop_local_graylog_server.sh 24 | 25 | after_success: 26 | - codecov 27 | 28 | jobs: 29 | include: 30 | - python: 2.7 31 | - python: 3.4 32 | - python: 3.5 33 | - python: 3.6 34 | - python: 3.7 35 | dist: xenial 36 | - python: "pypy2.7-5.10.0" 37 | - python: "pypy3.5-5.10.0" 38 | - stage: lint 39 | name: "black" 40 | python: 41 | - 3.6 42 | before_install: 43 | - pip install black 44 | before_script: skip 45 | script: 46 | - black --check setup.py tests graypy 47 | after_script: skip 48 | - name: "pylint" 49 | python: 50 | - 3.6 51 | before_script: skip 52 | script: 53 | - python setup.py lint || exit $(($? & 35)) 54 | after_script: skip 55 | - name: "twine check" 56 | python: 57 | - 3.6 58 | before_install: 59 | - pip install docutils twine 60 | before_script: 61 | - python setup.py sdist 62 | script: 63 | - twine check dist/* 64 | after_script: skip 65 | - stage: build 66 | name: "sdist" 67 | python: 68 | - 3.6 69 | before_script: skip 70 | script: 71 | - python setup.py sdist 72 | after_script: skip 73 | - name: "bdist_wheel" 74 | python: 75 | - 3.6 76 | install: 77 | - pip install . wheel 78 | before_script: skip 79 | script: 80 | - python setup.py bdist_wheel 81 | after_script: skip 82 | - name: "sphinx docs" 83 | python: 84 | - 3.6 85 | install: 86 | - pip install .[docs,amqp] 87 | before_script: skip 88 | script: 89 | - sphinx-build docs/ build/ 90 | after_script: skip 91 | - stage: deploy 92 | deploy: 93 | python: 94 | - 3.6 95 | provider: pypi 96 | user: nklapste 97 | password: 98 | secure: Ryu87EzCW+k/quo2yC05cejU29zo9Xwlv9f2zxwMhCDRwMGt4GCSJ4zR7GXvYT0B5H6S+Y+gCxTmSSA53V1aRvCX0A42i6P9jef2yWR34aYe0+lmYwwW1wNKo/SLD1x5cXw59m5pcVGPvAVY9aQ88DwR9gn6aMr76A6LJet/ykY= 99 | on: 100 | tags: true 101 | branch: master 102 | repo: severb/graypy 103 | distributions: "sdist bdist_wheel" 104 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2011, Sever Băneşiu 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | * Redistributions of source code must retain the above copyright 7 | notice, this list of conditions and the following disclaimer. 8 | * Redistributions in binary form must reproduce the above copyright 9 | notice, this list of conditions and the following disclaimer in the 10 | documentation and/or other materials provided with the distribution. 11 | * Neither the name of the author nor the 12 | names of its contributors may be used to endorse or promote products 13 | derived from this software without specific prior written permission. 14 | 15 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 16 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 17 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 18 | DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR 19 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 20 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 21 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 22 | ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 23 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 24 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 25 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | include README.rst 3 | recursive-include tests *.py 4 | recursive-include tests/config * -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ###### 2 | graypy 3 | ###### 4 | 5 | .. image:: https://img.shields.io/pypi/v/graypy.svg 6 | :target: https://pypi.python.org/pypi/graypy 7 | :alt: PyPI Status 8 | 9 | .. image:: https://travis-ci.org/severb/graypy.svg?branch=master 10 | :target: https://travis-ci.org/severb/graypy 11 | :alt: Build Status 12 | 13 | .. image:: https://readthedocs.org/projects/graypy/badge/?version=stable 14 | :target: https://graypy.readthedocs.io/en/stable/?badge=stable 15 | :alt: Documentation Status 16 | 17 | .. image:: https://codecov.io/gh/severb/graypy/branch/master/graph/badge.svg 18 | :target: https://codecov.io/gh/severb/graypy 19 | :alt: Coverage Status 20 | 21 | Description 22 | =========== 23 | 24 | Python logging handlers that send log messages in the 25 | Graylog Extended Log Format (GELF_). 26 | 27 | graypy supports sending GELF logs to both Graylog2 and Graylog3 servers. 28 | 29 | Installing 30 | ========== 31 | 32 | Using pip 33 | --------- 34 | 35 | Install the basic graypy python logging handlers: 36 | 37 | .. code-block:: console 38 | 39 | pip install graypy 40 | 41 | Install with requirements for ``GELFRabbitHandler``: 42 | 43 | .. code-block:: console 44 | 45 | pip install graypy[amqp] 46 | 47 | Using easy_install 48 | ------------------ 49 | 50 | Install the basic graypy python logging handlers: 51 | 52 | .. code-block:: console 53 | 54 | easy_install graypy 55 | 56 | Install with requirements for ``GELFRabbitHandler``: 57 | 58 | .. code-block:: console 59 | 60 | easy_install graypy[amqp] 61 | 62 | Usage 63 | ===== 64 | 65 | graypy sends GELF logs to a Graylog server via subclasses of the python 66 | `logging.Handler`_ class. 67 | 68 | Below is the list of ready to run GELF logging handlers defined by graypy: 69 | 70 | * ``GELFUDPHandler`` - UDP log forwarding 71 | * ``GELFTCPHandler`` - TCP log forwarding 72 | * ``GELFTLSHandler`` - TCP log forwarding with TLS support 73 | * ``GELFHTTPHandler`` - HTTP log forwarding 74 | * ``GELFRabbitHandler`` - RabbitMQ log forwarding 75 | 76 | UDP Logging 77 | ----------- 78 | 79 | UDP Log forwarding to a locally hosted Graylog server can be easily done with 80 | the ``GELFUDPHandler``: 81 | 82 | .. code-block:: python 83 | 84 | import logging 85 | import graypy 86 | 87 | my_logger = logging.getLogger('test_logger') 88 | my_logger.setLevel(logging.DEBUG) 89 | 90 | handler = graypy.GELFUDPHandler('localhost', 12201) 91 | my_logger.addHandler(handler) 92 | 93 | my_logger.debug('Hello Graylog.') 94 | 95 | 96 | UDP GELF Chunkers 97 | ^^^^^^^^^^^^^^^^^ 98 | 99 | `GELF UDP Chunking`_ is supported by the ``GELFUDPHandler`` and is defined by 100 | the ``gelf_chunker`` argument within its constructor. By default the 101 | ``GELFWarningChunker`` is used, thus, GELF messages that chunk overflow 102 | (i.e. consisting of more than 128 chunks) will issue a 103 | ``GELFChunkOverflowWarning`` and **will be dropped**. 104 | 105 | Other ``gelf_chunker`` options are also available: 106 | 107 | * ``BaseGELFChunker`` silently drops GELF messages that chunk overflow 108 | * ``GELFTruncatingChunker`` issues a ``GELFChunkOverflowWarning`` and 109 | simplifies and truncates GELF messages that chunk overflow in a attempt 110 | to send some content to Graylog. If this process fails to prevent 111 | another chunk overflow a ``GELFTruncationFailureWarning`` is issued. 112 | 113 | RabbitMQ Logging 114 | ---------------- 115 | 116 | Alternately, use ``GELFRabbitHandler`` to send messages to RabbitMQ and 117 | configure your Graylog server to consume messages via AMQP. This prevents log 118 | messages from being lost due to dropped UDP packets (``GELFUDPHandler`` sends 119 | messages to Graylog using UDP). You will need to configure RabbitMQ with a 120 | ``gelf_log`` queue and bind it to the ``logging.gelf`` exchange so messages 121 | are properly routed to a queue that can be consumed by Graylog (the queue and 122 | exchange names may be customized to your liking). 123 | 124 | .. code-block:: python 125 | 126 | import logging 127 | import graypy 128 | 129 | my_logger = logging.getLogger('test_logger') 130 | my_logger.setLevel(logging.DEBUG) 131 | 132 | handler = graypy.GELFRabbitHandler('amqp://guest:guest@localhost/', exchange='logging.gelf') 133 | my_logger.addHandler(handler) 134 | 135 | my_logger.debug('Hello Graylog.') 136 | 137 | Django Logging 138 | -------------- 139 | 140 | It's easy to integrate ``graypy`` with Django's logging settings. Just add a 141 | new handler in your ``settings.py``: 142 | 143 | .. code-block:: python 144 | 145 | LOGGING = { 146 | 'version': 1, 147 | # other dictConfig keys here... 148 | 'handlers': { 149 | 'graypy': { 150 | 'level': 'WARNING', 151 | 'class': 'graypy.GELFUDPHandler', 152 | 'host': 'localhost', 153 | 'port': 12201, 154 | }, 155 | }, 156 | 'loggers': { 157 | 'django.request': { 158 | 'handlers': ['graypy'], 159 | 'level': 'ERROR', 160 | 'propagate': True, 161 | }, 162 | }, 163 | } 164 | 165 | Traceback Logging 166 | ----------------- 167 | 168 | By default log captured exception tracebacks are added to the GELF log as 169 | ``full_message`` fields: 170 | 171 | .. code-block:: python 172 | 173 | import logging 174 | import graypy 175 | 176 | my_logger = logging.getLogger('test_logger') 177 | my_logger.setLevel(logging.DEBUG) 178 | 179 | handler = graypy.GELFUDPHandler('localhost', 12201) 180 | my_logger.addHandler(handler) 181 | 182 | try: 183 | puff_the_magic_dragon() 184 | except NameError: 185 | my_logger.debug('No dragons here.', exc_info=1) 186 | 187 | Default Logging Fields 188 | ---------------------- 189 | 190 | By default a number of debugging logging fields are automatically added to the 191 | GELF log if available: 192 | 193 | * function 194 | * pid 195 | * process_name 196 | * thread_name 197 | 198 | You can disable automatically adding these debugging logging fields by 199 | specifying ``debugging_fields=False`` in the handler's constructor: 200 | 201 | .. code-block:: python 202 | 203 | handler = graypy.GELFUDPHandler('localhost', 12201, debugging_fields=False) 204 | 205 | Adding Custom Logging Fields 206 | ---------------------------- 207 | 208 | graypy also supports including custom fields in the GELF logs sent to Graylog. 209 | This can be done by using Python's LoggerAdapter_ and Filter_ classes. 210 | 211 | Using LoggerAdapter 212 | ^^^^^^^^^^^^^^^^^^^ 213 | 214 | LoggerAdapter_ makes it easy to add static information to your GELF log 215 | messages: 216 | 217 | .. code-block:: python 218 | 219 | import logging 220 | import graypy 221 | 222 | my_logger = logging.getLogger('test_logger') 223 | my_logger.setLevel(logging.DEBUG) 224 | 225 | handler = graypy.GELFUDPHandler('localhost', 12201) 226 | my_logger.addHandler(handler) 227 | 228 | my_adapter = logging.LoggerAdapter(logging.getLogger('test_logger'), 229 | {'username': 'John'}) 230 | 231 | my_adapter.debug('Hello Graylog from John.') 232 | 233 | Using Filter 234 | ^^^^^^^^^^^^ 235 | 236 | Filter_ gives more flexibility and allows for dynamic information to be 237 | added to your GELF logs: 238 | 239 | .. code-block:: python 240 | 241 | import logging 242 | import graypy 243 | 244 | class UsernameFilter(logging.Filter): 245 | def __init__(self): 246 | # In an actual use case would dynamically get this 247 | # (e.g. from memcache) 248 | self.username = 'John' 249 | 250 | def filter(self, record): 251 | record.username = self.username 252 | return True 253 | 254 | my_logger = logging.getLogger('test_logger') 255 | my_logger.setLevel(logging.DEBUG) 256 | 257 | handler = graypy.GELFUDPHandler('localhost', 12201) 258 | my_logger.addHandler(handler) 259 | 260 | my_logger.addFilter(UsernameFilter()) 261 | 262 | my_logger.debug('Hello Graylog from John.') 263 | 264 | Contributors 265 | ============ 266 | 267 | * Sever Banesiu 268 | * Daniel Miller 269 | * Tushar Makkar 270 | * Nathan Klapstein 271 | 272 | .. _GELF: https://docs.graylog.org/en/latest/pages/gelf.html 273 | .. _logging.Handler: https://docs.python.org/3/library/logging.html#logging.Handler 274 | .. _GELF UDP Chunking: https://docs.graylog.org/en/latest/pages/gelf.html#chunking 275 | .. _LoggerAdapter: https://docs.python.org/howto/logging-cookbook.html#using-loggeradapters-to-impart-contextual-information 276 | .. _Filter: https://docs.python.org/howto/logging-cookbook.html#using-filters-to-impart-contextual-information 277 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Configuration file for the Sphinx documentation builder. 4 | # 5 | # This file does only contain a selection of the most common options. For a 6 | # full list see the documentation: 7 | # http://www.sphinx-doc.org/en/master/config 8 | 9 | 10 | # -- Project information ----------------------------------------------------- 11 | 12 | copyright = "2019, Sever Băneşiu, Nathan Klapstein" 13 | author = "Sever Băneşiu, Nathan Klapstein" 14 | project = "graypy" 15 | 16 | 17 | def find_version(*file_paths): 18 | import codecs 19 | import os 20 | import re 21 | 22 | with codecs.open( 23 | os.path.join(os.path.abspath(os.path.dirname(__file__)), *file_paths), "r" 24 | ) as fp: 25 | version_file = fp.read() 26 | m = re.search(r"^__version__ = \((\d+), ?(\d+), ?(\d+)\)", version_file, re.M) 27 | if m: 28 | return "{}.{}".format(*m.groups()), "{}.{}.{}".format(*m.groups()) 29 | raise RuntimeError("Unable to find a valid version") 30 | 31 | 32 | version, release = find_version("..", "graypy", "__init__.py") 33 | 34 | # -- General configuration --------------------------------------------------- 35 | 36 | # Add any Sphinx extension module names here, as strings. They can be 37 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 38 | # ones. 39 | extensions = [ 40 | "sphinx.ext.autodoc", 41 | "sphinx.ext.viewcode", 42 | "sphinx.ext.intersphinx", 43 | "sphinx_autodoc_typehints", 44 | ] 45 | 46 | source_suffix = ".rst" 47 | 48 | master_doc = "index" 49 | 50 | # The language for content autogenerated by Sphinx. Refer to documentation 51 | # for a list of supported languages. 52 | # 53 | # This is also used if you do content translation via gettext catalogs. 54 | # Usually you set "language" from the command line for these cases. 55 | language = None 56 | 57 | # List of patterns, relative to source directory, that match files and 58 | # directories to ignore when looking for source files. 59 | # This pattern also affects html_static_path and html_extra_path . 60 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] 61 | 62 | # The name of the Pygments (syntax highlighting) style to use. 63 | pygments_style = "sphinx" 64 | 65 | 66 | # -- Options for HTML output ------------------------------------------------- 67 | 68 | # The theme to use for HTML and HTML Help pages. See the documentation for 69 | # a list of builtin themes. 70 | # 71 | html_theme = "sphinx_rtd_theme" 72 | 73 | html_theme_options = { 74 | "logo_only": False, 75 | "display_version": True, 76 | "prev_next_buttons_location": "bottom", 77 | "style_external_links": False, 78 | # Toc options 79 | "collapse_navigation": True, 80 | "sticky_navigation": True, 81 | "navigation_depth": 4, 82 | "includehidden": True, 83 | "titles_only": False, 84 | } 85 | 86 | 87 | # -- Options for HTMLHelp output --------------------------------------------- 88 | 89 | # Output file base name for HTML help builder. 90 | htmlhelp_basename = "graypydoc" 91 | 92 | 93 | # -- Options for LaTeX output ------------------------------------------------ 94 | 95 | latex_elements = { 96 | # The paper size ('letterpaper' or 'a4paper'). 97 | # 98 | # 'papersize': 'letterpaper', 99 | # The font size ('10pt', '11pt' or '12pt'). 100 | # 101 | # 'pointsize': '10pt', 102 | # Additional stuff for the LaTeX preamble. 103 | # 104 | # 'preamble': '', 105 | # Latex figure (float) alignment 106 | # 107 | # 'figure_align': 'htbp', 108 | } 109 | 110 | # Grouping the document tree into LaTeX files. List of tuples 111 | # (source start file, target name, title, author, documentclass [howto, manual, or own class]). 112 | latex_documents = [(master_doc, "graypy.tex", "graypy Documentation", author, "manual")] 113 | 114 | 115 | # -- Options for manual page output ------------------------------------------ 116 | 117 | # One entry per manual page. List of tuples 118 | # (source start file, name, description, authors, manual section). 119 | man_pages = [(master_doc, "graypy", "graypy Documentation", author.split(", "), 1)] 120 | 121 | 122 | # -- Options for Texinfo output ---------------------------------------------- 123 | 124 | # Grouping the document tree into Texinfo files. List of tuples 125 | # ( 126 | # source start file, 127 | # target name, 128 | # title, 129 | # author, 130 | # dir menu entry, 131 | # description, 132 | # category 133 | # ) 134 | texinfo_documents = [ 135 | ( 136 | master_doc, 137 | "graypy", 138 | "graypy Documentation", 139 | author, 140 | "graypy", 141 | "Python logging handlers that send messages in the Graylog Extended Log Format (GELF).", 142 | "Miscellaneous", 143 | ) 144 | ] 145 | 146 | 147 | # -- Extension configuration ------------------------------------------------- 148 | 149 | intersphinx_mapping = { 150 | "python": ("https://docs.python.org/3", (None, "python-inv.txt")) 151 | } 152 | 153 | 154 | # -- auto api docs generation -- 155 | 156 | # order auto doc members by position in source code 157 | autodoc_member_order = "bysource" 158 | 159 | 160 | def run_apidoc(_): 161 | from sphinx.ext.apidoc import main 162 | import os 163 | import sys 164 | 165 | sys.path.append(os.path.join(os.path.dirname(__file__), "..")) 166 | cur_dir = os.path.abspath(os.path.dirname(__file__)) 167 | module = os.path.join(cur_dir, "..", "graypy") 168 | main( 169 | [ 170 | "--module-first", 171 | "--separate", 172 | "--implicit-namespaces", 173 | module, 174 | "--output-dir", 175 | os.path.join(cur_dir, "api"), 176 | ] 177 | ) 178 | 179 | 180 | def skip(app, what, name, obj, would_skip, options): 181 | if name == "__init__": 182 | return False 183 | return would_skip 184 | 185 | 186 | def setup(app): 187 | app.connect("builder-inited", run_apidoc) 188 | app.connect("autodoc-skip-member", skip) 189 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. graypy documentation master file 2 | 3 | Welcome to graypy's documentation! 4 | ===================================== 5 | 6 | This code is open source, and is `available on GitHub`_. 7 | 8 | .. _available on GitHub: https://github.com/severb/graypy 9 | 10 | .. toctree:: 11 | :maxdepth: 2 12 | :caption: Contents: 13 | 14 | Overview 15 | Basic GELF Handlers 16 | RabbitMQ GELF Handler 17 | 18 | Indices and tables 19 | ================== 20 | 21 | * :ref:`genindex` 22 | * :ref:`modindex` 23 | -------------------------------------------------------------------------------- /docs/readme.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../README.rst 2 | -------------------------------------------------------------------------------- /graypy/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """graypy 5 | 6 | Python logging handlers that send messages in the 7 | Graylog Extended Log Format (GELF). 8 | 9 | Modules: 10 | + :mod:`.handler` - Basic GELF Logging Handlers 11 | + :mod:`.rabbitmq` - RabbitMQ GELF Logging Handler 12 | """ 13 | 14 | from graypy.handler import ( 15 | GELFUDPHandler, 16 | GELFTCPHandler, 17 | GELFTLSHandler, 18 | GELFHTTPHandler, 19 | WAN_CHUNK, 20 | LAN_CHUNK, 21 | ) 22 | 23 | try: 24 | from graypy.rabbitmq import GELFRabbitHandler, ExcludeFilter 25 | except ImportError: 26 | pass # amqplib is probably not installed 27 | 28 | 29 | __version__ = (2, 1, 0) 30 | -------------------------------------------------------------------------------- /graypy/handler.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """Logging Handlers that send messages in Graylog Extended Log Format (GELF)""" 5 | 6 | import warnings 7 | import abc 8 | import datetime 9 | import json 10 | import logging 11 | import math 12 | import random 13 | import socket 14 | import ssl 15 | import struct 16 | import sys 17 | import traceback 18 | import zlib 19 | from logging.handlers import DatagramHandler, SocketHandler 20 | 21 | 22 | WAN_CHUNK = 1420 23 | LAN_CHUNK = 8154 24 | 25 | if sys.version_info[0] == 3: # check if python3+ 26 | data, text = bytes, str 27 | else: 28 | data, text = str, unicode # pylint: disable=undefined-variable 29 | 30 | # fixes for using ABC 31 | if sys.version_info >= (3, 4): # check if python3.4+ 32 | ABC = abc.ABC 33 | else: 34 | ABC = abc.ABCMeta(str("ABC"), (), {}) 35 | 36 | try: 37 | import httplib 38 | except ImportError: 39 | import http.client as httplib 40 | 41 | SYSLOG_LEVELS = { 42 | logging.CRITICAL: 2, 43 | logging.ERROR: 3, 44 | logging.WARNING: 4, 45 | logging.INFO: 6, 46 | logging.DEBUG: 7, 47 | } 48 | 49 | GELF_MAX_CHUNK_NUMBER = 128 50 | 51 | 52 | class BaseGELFHandler(logging.Handler, ABC): 53 | """Abstract class defining the basic functionality of converting a 54 | :obj:`logging.LogRecord` into a GELF log. Provides the boilerplate for 55 | all GELF handlers defined within graypy.""" 56 | 57 | def __init__( 58 | self, 59 | debugging_fields=True, 60 | extra_fields=True, 61 | fqdn=False, 62 | localname=None, 63 | facility=None, 64 | level_names=False, 65 | compress=True, 66 | ): 67 | """Initialize the BaseGELFHandler 68 | 69 | :param debugging_fields: If :obj:`True` add debug fields from the 70 | log record into the GELF logs to be sent to Graylog. 71 | :type debugging_fields: bool 72 | 73 | :param extra_fields: If :obj:`True` add extra fields from the log 74 | record into the GELF logs to be sent to Graylog. 75 | :type extra_fields: bool 76 | 77 | :param fqdn: If :obj:`True` use the fully qualified domain name of 78 | localhost to populate the ``host`` GELF field. 79 | :type fqdn: bool 80 | 81 | :param localname: If specified and ``fqdn`` is :obj:`False`, use the 82 | specified hostname to populate the ``host`` GELF field. 83 | :type localname: str or None 84 | 85 | :param facility: If specified, replace the ``facility`` GELF field 86 | with the specified value. Also add a additional ``_logger`` 87 | GELF field containing the ``LogRecord.name``. 88 | :type facility: str 89 | 90 | :param level_names: If :obj:`True` use python logging error level name 91 | strings instead of syslog numerical values. 92 | :type level_names: bool 93 | 94 | :param compress: If :obj:`True` compress the GELF message before 95 | sending it to the Graylog server. 96 | :type compress: bool 97 | """ 98 | logging.Handler.__init__(self) 99 | self.debugging_fields = debugging_fields 100 | self.extra_fields = extra_fields 101 | 102 | if fqdn and localname: 103 | raise ValueError("cannot specify 'fqdn' and 'localname' arguments together") 104 | 105 | self.fqdn = fqdn 106 | self.localname = localname 107 | self.facility = facility 108 | self.level_names = level_names 109 | self.compress = compress 110 | 111 | def makePickle(self, record): 112 | """Convert a :class:`logging.LogRecord` into bytes representing 113 | a GELF log 114 | 115 | :param record: :class:`logging.LogRecord` to convert into a GELF log. 116 | :type record: logging.LogRecord 117 | 118 | :return: bytes representing a GELF log. 119 | :rtype: bytes 120 | """ 121 | gelf_dict = self._make_gelf_dict(record) 122 | packed = self._pack_gelf_dict(gelf_dict) 123 | pickle = zlib.compress(packed) if self.compress else packed 124 | return pickle 125 | 126 | def _make_gelf_dict(self, record): 127 | """Create a dictionary representing a GELF log from a 128 | python :class:`logging.LogRecord` 129 | 130 | :param record: :class:`logging.LogRecord` to create a GELF log from. 131 | :type record: logging.LogRecord 132 | 133 | :return: Dictionary representing a GELF log. 134 | :rtype: dict 135 | """ 136 | # construct the base GELF format 137 | gelf_dict = { 138 | "version": "1.0", 139 | "host": self._resolve_host(self.fqdn, self.localname), 140 | "short_message": self.formatter.format(record) 141 | if self.formatter 142 | else record.getMessage(), 143 | "timestamp": record.created, 144 | "level": SYSLOG_LEVELS.get(record.levelno, record.levelno), 145 | "facility": self.facility or record.name, 146 | } 147 | 148 | # add in specified optional extras 149 | self._add_full_message(gelf_dict, record) 150 | if self.level_names: 151 | self._add_level_names(gelf_dict, record) 152 | if self.facility is not None: 153 | self._set_custom_facility(gelf_dict, self.facility, record) 154 | if self.debugging_fields: 155 | self._add_debugging_fields(gelf_dict, record) 156 | if self.extra_fields: 157 | self._add_extra_fields(gelf_dict, record) 158 | return gelf_dict 159 | 160 | @staticmethod 161 | def _add_level_names(gelf_dict, record): 162 | """Add the ``level_name`` field to the ``gelf_dict`` which notes 163 | the logging level via the string error level names instead of 164 | numerical values 165 | 166 | :param gelf_dict: Dictionary representing a GELF log. 167 | :type gelf_dict: dict 168 | 169 | :param record: :class:`logging.LogRecord` to extract a logging 170 | level from to insert into the given ``gelf_dict``. 171 | :type record: logging.LogRecord 172 | """ 173 | gelf_dict["level_name"] = logging.getLevelName(record.levelno) 174 | 175 | @staticmethod 176 | def _set_custom_facility(gelf_dict, facility_value, record): 177 | """Set the ``gelf_dict``'s ``facility`` field to the specified value 178 | 179 | Also add a additional ``_logger`` field containing the 180 | ``LogRecord.name``. 181 | 182 | :param gelf_dict: Dictionary representing a GELF log. 183 | :type gelf_dict: dict 184 | 185 | :param facility_value: Value to set as the ``gelf_dict``'s 186 | ``facility`` field. 187 | :type facility_value: str 188 | 189 | :param record: :class:`logging.LogRecord` to extract it's record 190 | name to insert into the given ``gelf_dict`` as the ``_logger`` 191 | field. 192 | :type record: logging.LogRecord 193 | """ 194 | gelf_dict.update({"facility": facility_value, "_logger": record.name}) 195 | 196 | @staticmethod 197 | def _add_full_message(gelf_dict, record): 198 | """Add the ``full_message`` field to the ``gelf_dict`` if any 199 | traceback information exists within the logging record 200 | 201 | :param gelf_dict: Dictionary representing a GELF log. 202 | :type gelf_dict: dict 203 | 204 | :param record: :class:`logging.LogRecord` to extract a full 205 | logging message from to insert into the given ``gelf_dict``. 206 | :type record: logging.LogRecord 207 | """ 208 | # if a traceback exists add it to the log as the full_message field 209 | full_message = None 210 | # format exception information if present 211 | if record.exc_info: 212 | full_message = "\n".join(traceback.format_exception(*record.exc_info)) 213 | # use pre-formatted exception information in cases where the primary 214 | # exception information was removed, e.g. for LogRecord serialization 215 | if record.exc_text: 216 | full_message = record.exc_text 217 | if full_message: 218 | gelf_dict["full_message"] = full_message 219 | 220 | @staticmethod 221 | def _resolve_host(fqdn, localname): 222 | """Resolve the ``host`` GELF field 223 | 224 | :param fqdn: Boolean indicating whether to use :meth:`socket.getfqdn` 225 | to obtain the ``host`` GELF field. 226 | :type fqdn: bool 227 | 228 | :param localname: Use specified hostname as the ``host`` GELF field. 229 | :type localname: str or None 230 | 231 | :return: String representing the ``host`` GELF field. 232 | :rtype: str 233 | """ 234 | if fqdn: 235 | return socket.getfqdn() 236 | elif localname is not None: 237 | return localname 238 | return socket.gethostname() 239 | 240 | @staticmethod 241 | def _add_debugging_fields(gelf_dict, record): 242 | """Add debugging fields to the given ``gelf_dict`` 243 | 244 | :param gelf_dict: Dictionary representing a GELF log. 245 | :type gelf_dict: dict 246 | 247 | :param record: :class:`logging.LogRecord` to extract debugging 248 | fields from to insert into the given ``gelf_dict``. 249 | :type record: logging.LogRecord 250 | """ 251 | gelf_dict.update( 252 | { 253 | "file": record.pathname, 254 | "line": record.lineno, 255 | "_function": record.funcName, 256 | "_pid": record.process, 257 | "_thread_name": record.threadName, 258 | } 259 | ) 260 | # record.processName was added in Python 2.6.2 261 | pn = getattr(record, "processName", None) 262 | if pn is not None: 263 | gelf_dict["_process_name"] = pn 264 | 265 | @staticmethod 266 | def _add_extra_fields(gelf_dict, record): 267 | """Add extra fields to the given ``gelf_dict`` 268 | 269 | However, this does not add additional fields in to ``message_dict`` 270 | that are either duplicated from standard :class:`logging.LogRecord` 271 | attributes, duplicated from the python logging module source 272 | (e.g. ``exc_text``), or violate GELF format (i.e. ``id``). 273 | 274 | .. seealso:: 275 | 276 | The list of standard :class:`logging.LogRecord` attributes can be 277 | found at: 278 | 279 | http://docs.python.org/library/logging.html#logrecord-attributes 280 | 281 | :param gelf_dict: Dictionary representing a GELF log. 282 | :type gelf_dict: dict 283 | 284 | :param record: :class:`logging.LogRecord` to extract extra fields 285 | from to insert into the given ``gelf_dict``. 286 | :type record: logging.LogRecord 287 | """ 288 | # skip_list is used to filter additional fields in a log message. 289 | skip_list = ( 290 | "args", 291 | "asctime", 292 | "created", 293 | "exc_info", 294 | "exc_text", 295 | "filename", 296 | "funcName", 297 | "id", 298 | "levelname", 299 | "levelno", 300 | "lineno", 301 | "module", 302 | "msecs", 303 | "message", 304 | "msg", 305 | "name", 306 | "pathname", 307 | "process", 308 | "processName", 309 | "relativeCreated", 310 | "thread", 311 | "threadName", 312 | ) 313 | 314 | for key, value in record.__dict__.items(): 315 | if key not in skip_list and not key.startswith("_"): 316 | gelf_dict["_%s" % key] = value 317 | 318 | @classmethod 319 | def _pack_gelf_dict(cls, gelf_dict): 320 | """Convert a given ``gelf_dict`` into JSON-encoded UTF-8 bytes, thus, 321 | creating an uncompressed GELF log ready for consumption by Graylog. 322 | 323 | Since we cannot be 100% sure of what is contained in the ``gelf_dict`` 324 | we have to do some sanitation. 325 | 326 | :param gelf_dict: Dictionary representing a GELF log. 327 | :type gelf_dict: dict 328 | 329 | :return: Bytes representing a uncompressed GELF log. 330 | :rtype: bytes 331 | """ 332 | gelf_dict = cls._sanitize_to_unicode(gelf_dict) 333 | packed = json.dumps(gelf_dict, separators=",:", default=cls._object_to_json) 334 | return packed.encode("utf-8") 335 | 336 | @classmethod 337 | def _sanitize_to_unicode(cls, obj): 338 | """Convert all strings records of the object to unicode 339 | 340 | :param obj: Object to sanitize to unicode. 341 | :type obj: object 342 | 343 | :return: Unicode string representing the given object. 344 | :rtype: str 345 | """ 346 | if isinstance(obj, dict): 347 | return dict( 348 | (cls._sanitize_to_unicode(k), cls._sanitize_to_unicode(v)) 349 | for k, v in obj.items() 350 | ) 351 | if isinstance(obj, (list, tuple)): 352 | return obj.__class__([cls._sanitize_to_unicode(i) for i in obj]) 353 | if isinstance(obj, data): 354 | obj = obj.decode("utf-8", errors="replace") 355 | return obj 356 | 357 | @staticmethod 358 | def _object_to_json(obj): 359 | """Convert objects that cannot be natively serialized into JSON 360 | into their string representation (for later JSON serialization). 361 | 362 | :class:`datetime.datetime` based objects will be converted into a 363 | ISO formatted timestamp string. 364 | 365 | :param obj: Object to convert into a string representation. 366 | :type obj: object 367 | 368 | :return: String representing the given object. 369 | :rtype: str 370 | """ 371 | if isinstance(obj, datetime.datetime): 372 | return obj.isoformat() 373 | return repr(obj) 374 | 375 | 376 | class BaseGELFChunker(object): 377 | """Base UDP GELF message chunker 378 | 379 | .. warning:: 380 | This will silently drop chunk overflowing GELF messages. 381 | (i.e. GELF messages that consist of more than 128 chunks) 382 | 383 | .. note:: 384 | UDP GELF message chunking is only supported for the 385 | :class:`.handler.GELFUDPHandler`. 386 | """ 387 | 388 | def __init__(self, chunk_size=WAN_CHUNK): 389 | """Initialize the BaseGELFChunker. 390 | 391 | :param chunk_size: Message chunk size. Messages larger than this 392 | size should be sent to Graylog in multiple chunks. 393 | :type chunk_size: int 394 | """ 395 | self.chunk_size = chunk_size 396 | 397 | def _message_chunk_number(self, message): 398 | """Get the number of chunks a GELF message requires 399 | 400 | :return: Number of chunks the specified GELF message requires. 401 | :rtype: int 402 | """ 403 | return int(math.ceil(len(message) * 1.0 / self.chunk_size)) 404 | 405 | @staticmethod 406 | def _encode(message_id, chunk_seq, total_chunks, chunk): 407 | return b"".join( 408 | [ 409 | b"\x1e\x0f", 410 | struct.pack("Q", message_id), 411 | struct.pack("B", chunk_seq), 412 | struct.pack("B", total_chunks), 413 | chunk, 414 | ] 415 | ) 416 | 417 | def _gen_gelf_chunks(self, message): 418 | """Generate and iter chunks for a GELF message 419 | 420 | :param message: GELF message to generate and iter chunks for. 421 | :type; bytes 422 | 423 | :return: Iterator of the chunks of a GELF message. 424 | :rtype: Iterator[bytes] 425 | """ 426 | total_chunks = self._message_chunk_number(message) 427 | message_id = random.randint(0, 0xFFFFFFFFFFFFFFFF) 428 | for sequence, chunk in enumerate( 429 | ( 430 | message[i : i + self.chunk_size] 431 | for i in range(0, len(message), self.chunk_size) 432 | ) 433 | ): 434 | yield self._encode(message_id, sequence, total_chunks, chunk) 435 | 436 | def chunk_message(self, message): 437 | """Chunk a GELF message 438 | 439 | Silently drop chunk overflowing GELF messages. 440 | 441 | :param message: GELF message to chunk. 442 | :type message: bytes 443 | 444 | :return: Iterator of the chunks of a GELF message. 445 | :rtype: Iterator[bytes], None 446 | """ 447 | if self._message_chunk_number(message) > GELF_MAX_CHUNK_NUMBER: 448 | return 449 | for chunk in self._gen_gelf_chunks(message): 450 | yield chunk 451 | 452 | 453 | class GELFChunkOverflowWarning(Warning): 454 | """Warning that a chunked GELF UDP message requires more than 128 chunks""" 455 | 456 | 457 | class GELFWarningChunker(BaseGELFChunker): 458 | """GELF UDP message chunker that warns and drops overflowing messages""" 459 | 460 | def chunk_message(self, message): 461 | """Chunk a GELF message 462 | 463 | Issue a :class:`.handler.GELFChunkOverflowWarning` on chunk 464 | overflowing GELF messages. Then drop them. 465 | """ 466 | if self._message_chunk_number(message) > GELF_MAX_CHUNK_NUMBER: 467 | warnings.warn( 468 | "chunk overflowing GELF message: {}".format(message), 469 | GELFChunkOverflowWarning, 470 | ) 471 | return 472 | for chunk in self._gen_gelf_chunks(message): 473 | yield chunk 474 | 475 | 476 | class GELFTruncationFailureWarning(GELFChunkOverflowWarning): 477 | """Warning that the truncation of a chunked GELF UDP message failed 478 | to prevent chunk overflowing""" 479 | 480 | 481 | class GELFTruncatingChunker(BaseGELFChunker): 482 | """GELF UDP message chunker that truncates overflowing messages""" 483 | 484 | def __init__( 485 | self, 486 | chunk_size=WAN_CHUNK, 487 | compress=True, 488 | gelf_packer=BaseGELFHandler._pack_gelf_dict, 489 | ): 490 | """Initialize the GELFTruncatingChunker 491 | 492 | :param compress: Boolean noting whether the given GELF messages are 493 | originally compressed. 494 | :type compress: bool 495 | 496 | :param gelf_packer: Function handle for packing a GELF dictionary 497 | into bytes. Should be of the form ``gelf_packer(gelf_dict)``. 498 | :type gelf_packer: Callable[dict] 499 | """ 500 | BaseGELFChunker.__init__(self, chunk_size) 501 | self.gelf_packer = gelf_packer 502 | self.compress = compress 503 | 504 | def gen_chunk_overflow_gelf_log(self, raw_message): 505 | """Attempt to truncate a chunk overflowing GELF message 506 | 507 | :param raw_message: Original bytes of a chunk overflowing GELF message. 508 | :type raw_message: bytes 509 | 510 | :return: Truncated and simplified version of raw_message. 511 | :rtype: bytes 512 | """ 513 | if self.compress: 514 | message = zlib.decompress(raw_message) 515 | else: 516 | message = raw_message 517 | 518 | gelf_dict = json.loads(message.decode("UTF-8")) 519 | # Simplified GELF message dictionary to base the truncated 520 | # GELF message from 521 | simplified_gelf_dict = { 522 | "version": gelf_dict["version"], 523 | "host": gelf_dict["host"], 524 | "short_message": "", 525 | "timestamp": gelf_dict["timestamp"], 526 | "level": SYSLOG_LEVELS.get(logging.ERROR, logging.ERROR), 527 | "facility": gelf_dict["facility"], 528 | "_chunk_overflow": True, 529 | } 530 | 531 | # compute a estimate of the number of message chunks left this is 532 | # used to estimate the amount of truncation to apply 533 | gelf_chunks_free = GELF_MAX_CHUNK_NUMBER - self._message_chunk_number( 534 | zlib.compress(self.gelf_packer(simplified_gelf_dict)) 535 | if self.compress 536 | else self.gelf_packer(simplified_gelf_dict) 537 | ) 538 | truncated_short_message = gelf_dict["short_message"][ 539 | : self.chunk_size * gelf_chunks_free 540 | ] 541 | for clip in range(gelf_chunks_free, -1, -1): 542 | simplified_gelf_dict["short_message"] = truncated_short_message 543 | packed_message = self.gelf_packer(simplified_gelf_dict) 544 | if self.compress: 545 | packed_message = zlib.compress(packed_message) 546 | if self._message_chunk_number(packed_message) <= GELF_MAX_CHUNK_NUMBER: 547 | return packed_message 548 | else: 549 | truncated_short_message = truncated_short_message[: -self.chunk_size] 550 | else: 551 | raise GELFTruncationFailureWarning( 552 | "truncation failed preventing chunk overflowing for GELF message: {}".format( 553 | raw_message 554 | ) 555 | ) 556 | 557 | def chunk_message(self, message): 558 | """Chunk a GELF message 559 | 560 | Issue a :class:`.handler.GELFChunkOverflowWarning` on chunk 561 | overflowing GELF messages. Then attempt to truncate and simplify the 562 | chunk overflowing GELF message so that it may be successfully 563 | chunked without overflowing. 564 | 565 | If the truncation and simplification of the chunk overflowing GELF 566 | message fails issue a :class:`.handler.GELFTruncationFailureWarning` 567 | and drop the overflowing GELF message. 568 | """ 569 | if self._message_chunk_number(message) > GELF_MAX_CHUNK_NUMBER: 570 | warnings.warn( 571 | "truncating GELF chunk overflowing message: {}".format(message), 572 | GELFChunkOverflowWarning, 573 | ) 574 | try: 575 | message = self.gen_chunk_overflow_gelf_log(message) 576 | except GELFTruncationFailureWarning as w: 577 | warnings.warn(w) 578 | return 579 | for chunk in self._gen_gelf_chunks(message): 580 | yield chunk 581 | 582 | 583 | class GELFUDPHandler(BaseGELFHandler, DatagramHandler): 584 | """GELF UDP handler""" 585 | 586 | def __init__(self, host, port=12202, gelf_chunker=GELFWarningChunker(), **kwargs): 587 | """Initialize the GELFUDPHandler 588 | 589 | .. note:: 590 | By default a :class:`.handler.GELFWarningChunker` is used as the 591 | ``gelf_chunker``. Thus, GELF messages that chunk overflow will 592 | issue a :class:`.handler.GELFChunkOverflowWarning` and will be 593 | dropped. 594 | 595 | :param host: GELF UDP input host. 596 | :type host: str 597 | 598 | :param port: GELF UDP input port. 599 | :type port: int 600 | 601 | :param gelf_chunker: :class:`.handler.BaseGELFChunker` instance to 602 | handle chunking larger GELF messages. 603 | :type gelf_chunker: GELFWarningChunker 604 | """ 605 | BaseGELFHandler.__init__(self, **kwargs) 606 | DatagramHandler.__init__(self, host, port) 607 | self.gelf_chunker = gelf_chunker 608 | 609 | def send(self, s): 610 | if len(s) < self.gelf_chunker.chunk_size: 611 | super(GELFUDPHandler, self).send(s) 612 | else: 613 | for chunk in self.gelf_chunker.chunk_message(s): 614 | super(GELFUDPHandler, self).send(chunk) 615 | 616 | 617 | class GELFTCPHandler(BaseGELFHandler, SocketHandler): 618 | """GELF TCP handler""" 619 | 620 | def __init__(self, host, port=12201, **kwargs): 621 | """Initialize the GELFTCPHandler 622 | 623 | :param host: GELF TCP input host. 624 | :type host: str 625 | 626 | :param port: GELF TCP input port. 627 | :type port: int 628 | 629 | .. attention:: 630 | GELF TCP does not support compression due to the use of the null 631 | byte (``\\0``) as frame delimiter. 632 | 633 | Thus, :class:`.handler.GELFTCPHandler` does not support setting 634 | ``compress`` to :obj:`True` and is locked to :obj:`False`. 635 | """ 636 | BaseGELFHandler.__init__(self, compress=False, **kwargs) 637 | SocketHandler.__init__(self, host, port) 638 | 639 | def makePickle(self, record): 640 | """Add a null terminator to generated pickles as TCP frame objects 641 | need to be null terminated 642 | 643 | :param record: :class:`logging.LogRecord` to create a null 644 | terminated GELF log. 645 | :type record: logging.LogRecord 646 | 647 | :return: Null terminated bytes representing a GELF log. 648 | :rtype: bytes 649 | """ 650 | return super(GELFTCPHandler, self).makePickle(record) + b"\x00" 651 | 652 | 653 | class GELFTLSHandler(GELFTCPHandler): 654 | """GELF TCP handler with TLS support""" 655 | 656 | def __init__( 657 | self, 658 | host, 659 | port=12204, 660 | validate=False, 661 | ca_certs=None, 662 | certfile=None, 663 | keyfile=None, 664 | **kwargs 665 | ): 666 | """Initialize the GELFTLSHandler 667 | 668 | :param host: GELF TLS input host. 669 | :type host: str 670 | 671 | :param port: GELF TLS input port. 672 | :type port: int 673 | 674 | :param validate: If :obj:`True`, validate the Graylog server's 675 | certificate. In this case specifying ``ca_certs`` is also 676 | required. 677 | :type validate: bool 678 | 679 | :param ca_certs: Path to CA bundle file. 680 | :type ca_certs: str 681 | 682 | :param certfile: Path to the client certificate file. 683 | :type certfile: str 684 | 685 | :param keyfile: Path to the client private key. If the private key is 686 | stored with the certificate, this parameter can be ignored. 687 | :type keyfile: str 688 | """ 689 | if validate and ca_certs is None: 690 | raise ValueError("CA bundle file path must be specified") 691 | 692 | if keyfile is not None and certfile is None: 693 | raise ValueError("certfile must be specified") 694 | 695 | GELFTCPHandler.__init__(self, host=host, port=port, **kwargs) 696 | 697 | self.ca_certs = ca_certs 698 | self.reqs = ssl.CERT_REQUIRED if validate else ssl.CERT_NONE 699 | self.certfile = certfile 700 | self.keyfile = keyfile if keyfile else certfile 701 | 702 | def makeSocket(self, timeout=1): 703 | """Create a TLS wrapped socket""" 704 | plain_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 705 | 706 | if hasattr(plain_socket, "settimeout"): 707 | plain_socket.settimeout(timeout) 708 | 709 | wrapped_socket = ssl.wrap_socket( 710 | plain_socket, 711 | ca_certs=self.ca_certs, 712 | cert_reqs=self.reqs, 713 | keyfile=self.keyfile, 714 | certfile=self.certfile, 715 | ) 716 | wrapped_socket.connect((self.host, self.port)) 717 | 718 | return wrapped_socket 719 | 720 | 721 | # TODO: add https? 722 | class GELFHTTPHandler(BaseGELFHandler): 723 | """GELF HTTP handler""" 724 | 725 | def __init__( 726 | self, host, port=12203, compress=True, path="/gelf", timeout=5, **kwargs 727 | ): 728 | """Initialize the GELFHTTPHandler 729 | 730 | :param host: GELF HTTP input host. 731 | :type host: str 732 | 733 | :param port: GELF HTTP input port. 734 | :type port: int 735 | 736 | :param compress: If :obj:`True` compress the GELF message before 737 | sending it to the Graylog server. 738 | :type compress: bool 739 | 740 | :param path: Path of the HTTP input. 741 | (see http://docs.graylog.org/en/latest/pages/sending_data.html#gelf-via-http) 742 | :type path: str 743 | 744 | :param timeout: Number of seconds the HTTP client should wait before 745 | it discards the request if the Graylog server doesn't respond. 746 | :type timeout: int 747 | """ 748 | BaseGELFHandler.__init__(self, compress=compress, **kwargs) 749 | 750 | self.host = host 751 | self.port = port 752 | self.path = path 753 | self.timeout = timeout 754 | self.headers = {} 755 | 756 | if compress: 757 | self.headers["Content-Encoding"] = "gzip,deflate" 758 | 759 | def emit(self, record): 760 | """Convert a :class:`logging.LogRecord` to GELF and emit it to Graylog 761 | via a HTTP POST request 762 | 763 | :param record: :class:`logging.LogRecord` to convert into a GELF log 764 | and emit to Graylog via a HTTP POST request. 765 | :type record: logging.LogRecord 766 | """ 767 | pickle = self.makePickle(record) 768 | connection = httplib.HTTPConnection( 769 | host=self.host, port=self.port, timeout=self.timeout 770 | ) 771 | connection.request("POST", self.path, pickle, self.headers) 772 | -------------------------------------------------------------------------------- /graypy/rabbitmq.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """Logging Handler integrating RabbitMQ and 5 | Graylog Extended Log Format (GELF)""" 6 | 7 | import json 8 | from logging import Filter 9 | from logging.handlers import SocketHandler 10 | 11 | from amqplib import client_0_8 as amqp # pylint: disable=import-error 12 | 13 | from graypy.handler import BaseGELFHandler 14 | 15 | try: 16 | from urllib.parse import urlparse, unquote 17 | except ImportError: 18 | from urlparse import urlparse 19 | from urllib import unquote 20 | 21 | 22 | _ifnone = lambda v, x: x if v is None else v 23 | 24 | 25 | class GELFRabbitHandler(BaseGELFHandler, SocketHandler): 26 | """RabbitMQ / GELF handler 27 | 28 | .. note:: 29 | 30 | This handler ignores all messages logged by amqplib. 31 | """ 32 | 33 | def __init__( 34 | self, 35 | url, 36 | exchange="logging.gelf", 37 | exchange_type="fanout", 38 | virtual_host="/", 39 | routing_key="", 40 | **kwargs 41 | ): 42 | """Initialize the GELFRabbitHandler 43 | 44 | :param url: RabbitMQ URL (ex: amqp://guest:guest@localhost:5672/) 45 | :type url: str 46 | 47 | :param exchange: RabbitMQ exchange. A queue binding must be defined 48 | on the server to prevent GELF logs from being dropped. 49 | :type exchange: str 50 | 51 | :param exchange_type: RabbitMQ exchange type. 52 | :type exchange_type: str 53 | 54 | :param virtual_host: 55 | :type virtual_host: str 56 | 57 | :param routing_key: 58 | :type routing_key: str 59 | """ 60 | self.url = url 61 | parsed = urlparse(url) 62 | if parsed.scheme != "amqp": 63 | raise ValueError('invalid URL scheme (expected "amqp"): %s' % url) 64 | host = parsed.hostname or "localhost" 65 | port = _ifnone(parsed.port, 5672) 66 | self.virtual_host = ( 67 | virtual_host if not unquote(parsed.path[1:]) else unquote(parsed.path[1:]) 68 | ) 69 | self.cn_args = { 70 | "host": "%s:%s" % (host, port), 71 | "userid": _ifnone(parsed.username, "guest"), 72 | "password": _ifnone(parsed.password, "guest"), 73 | "virtual_host": self.virtual_host, 74 | "insist": False, 75 | } 76 | self.exchange = exchange 77 | self.exchange_type = exchange_type 78 | self.routing_key = routing_key 79 | BaseGELFHandler.__init__(self, **kwargs) 80 | SocketHandler.__init__(self, host, port) 81 | self.addFilter(ExcludeFilter("amqplib")) 82 | 83 | def makeSocket(self, timeout=1): 84 | return RabbitSocket( 85 | self.cn_args, timeout, self.exchange, self.exchange_type, self.routing_key 86 | ) 87 | 88 | def makePickle(self, record): 89 | message_dict = self._make_gelf_dict(record) 90 | return json.dumps(message_dict) 91 | 92 | 93 | class RabbitSocket(object): 94 | def __init__(self, cn_args, timeout, exchange, exchange_type, routing_key): 95 | self.cn_args = cn_args 96 | self.timeout = timeout 97 | self.exchange = exchange 98 | self.exchange_type = exchange_type 99 | self.routing_key = routing_key 100 | self.connection = amqp.Connection(connection_timeout=timeout, **self.cn_args) 101 | self.channel = self.connection.channel() 102 | self.channel.exchange_declare( 103 | exchange=self.exchange, 104 | type=self.exchange_type, 105 | durable=True, 106 | auto_delete=False, 107 | ) 108 | 109 | def sendall(self, data): 110 | msg = amqp.Message(data, delivery_mode=2) 111 | self.channel.basic_publish( 112 | msg, exchange=self.exchange, routing_key=self.routing_key 113 | ) 114 | 115 | def close(self): 116 | """Close the connection to the RabbitMQ socket""" 117 | try: 118 | self.connection.close() 119 | except Exception: 120 | pass 121 | 122 | 123 | class ExcludeFilter(Filter): 124 | """A subclass of :class:`logging.Filter` which should be instantiated 125 | with the name of the logger which, together with its children, will have 126 | its events excluded (filtered out)""" 127 | 128 | def __init__(self, name): 129 | """Initialize the ExcludeFilter 130 | 131 | :param name: Name to match for within a :class:`logging.LogRecord`'s 132 | ``name`` field for filtering. 133 | :type name: str 134 | """ 135 | if not name: 136 | raise ValueError("ExcludeFilter requires a non-empty name") 137 | Filter.__init__(self, name) 138 | 139 | def filter(self, record): 140 | return not ( 141 | record.name.startswith(self.name) 142 | and (len(record.name) == self.nlen or record.name[self.nlen] == ".") 143 | ) 144 | -------------------------------------------------------------------------------- /perftest.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import argparse 5 | import logging 6 | import logging.config 7 | import sys 8 | import time 9 | 10 | 11 | def main(argv=sys.argv): 12 | parser = argparse.ArgumentParser(prog="perftest.py") 13 | parser.add_argument( 14 | "--graylog-host", 15 | help="Graylog2 host. Do not test GELFUDPHandler if not specified.", 16 | ) 17 | parser.add_argument( 18 | "--graylog-port", 19 | type=int, 20 | default=12201, 21 | help="Graylog2 GELF UDP port. Default: 12201", 22 | ) 23 | parser.add_argument( 24 | "--graylog-chunked", 25 | action="store_true", 26 | default=None, 27 | help="Test graylog with chunked messages", 28 | ) 29 | parser.add_argument( 30 | "--rabbit-url", 31 | help="RabbitMQ url (ex: amqp://guest:guest@localhost/). " 32 | "Do not test GELFRabbitHandler if not specified.", 33 | ) 34 | parser.add_argument( 35 | "--rabbit-exchange", 36 | default="logging.gelf", 37 | help="RabbitMQ exchange. Default: logging.gelf", 38 | ) 39 | parser.add_argument("--console-logger", action="store_true", default=None) 40 | parser.add_argument( 41 | "--stress", 42 | action="store_true", 43 | help="Enable performance/stress test. WARNING this logs MANY warnings.", 44 | ) 45 | args = parser.parse_args(argv[1:]) 46 | 47 | if all( 48 | v is None for v in [args.graylog_host, args.rabbit_url, args.console_logger] 49 | ): 50 | parser.print_help() 51 | 52 | config = { 53 | "version": 1, 54 | "formatters": { 55 | "brief": {"format": "%(levelname)-7s %(name)s - %(message)s"}, 56 | "message": {"format": "%(message)s"}, 57 | }, 58 | "handlers": {}, 59 | "root": {"handlers": [], "level": "DEBUG"}, 60 | "disable_existing_loggers": False, 61 | } 62 | 63 | if args.graylog_host is not None: 64 | config["handlers"]["graylog_udp"] = { 65 | "class": "graypy.GELFUDPHandler", 66 | "host": args.graylog_host, 67 | "port": args.graylog_port, 68 | "debugging_fields": 0, 69 | "formatter": "message", 70 | } 71 | if args.graylog_chunked: 72 | config["handlers"]["graylog_udp"]["chunk_size"] = 1 73 | 74 | config["root"]["handlers"].append("graylog_udp") 75 | 76 | if args.rabbit_url is not None: 77 | config["handlers"]["graylog_rabbit"] = { 78 | "class": "graypy.GELFRabbitHandler", 79 | "url": args.rabbit_url, 80 | "exchange": args.rabbit_exchange, 81 | "debugging_fields": 0, 82 | "formatter": "message", 83 | } 84 | config["root"]["handlers"].append("graylog_rabbit") 85 | 86 | if args.console_logger: 87 | config["handlers"]["console"] = { 88 | "class": "logging.StreamHandler", 89 | "formatter": "brief", 90 | } 91 | config["root"]["handlers"].append("console") 92 | 93 | logging.config.dictConfig(config) 94 | 95 | log = logging.getLogger() 96 | t_start = time.time() 97 | total = 0 98 | 99 | log.debug("debug") 100 | log.info("info") 101 | log.warn("warning") 102 | log.error("error") 103 | log.critical("critical") 104 | total += 5 105 | 106 | if args.stress: 107 | t_end = time.time() + 10 108 | tx = t_end - 9 109 | cx = 0 110 | while True: 111 | log.warn("warning") 112 | cx += 1 113 | total += 1 114 | if time.time() > tx: 115 | elapsed = time.time() - (tx - 1) 116 | tx += 1 117 | print( 118 | "%s messages in %.3f seconds (%.3f msg/s)" 119 | % (cx, elapsed, cx / elapsed) 120 | ) 121 | cx = 0 122 | if tx > t_end: 123 | break 124 | 125 | elapsed = time.time() - t_start 126 | print( 127 | "%s messages in %.3f seconds (%.3f msg/s)" % (total, elapsed, total / elapsed) 128 | ) 129 | 130 | 131 | if __name__ == "__main__": 132 | main() 133 | -------------------------------------------------------------------------------- /readthedocs.yml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yml 2 | 3 | build: 4 | image: latest 5 | 6 | python: 7 | version: 3.6 8 | pip_install: true 9 | extra_requirements: 10 | - docs 11 | - amqp 12 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bdist_wheel] 2 | universal=1 3 | 4 | [metadata] 5 | license_file = LICENSE 6 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """setup.py for graypy""" 5 | 6 | import codecs 7 | import re 8 | import sys 9 | import os 10 | 11 | from setuptools import setup, find_packages 12 | from setuptools.command.test import test 13 | 14 | 15 | def find_version(*file_paths): 16 | with codecs.open( 17 | os.path.join(os.path.abspath(os.path.dirname(__file__)), *file_paths), "r" 18 | ) as fp: 19 | version_file = fp.read() 20 | m = re.search(r"^__version__ = \((\d+), ?(\d+), ?(\d+)\)", version_file, re.M) 21 | if m: 22 | return "{}.{}.{}".format(*m.groups()) 23 | raise RuntimeError("Unable to find a valid version") 24 | 25 | 26 | VERSION = find_version("graypy", "__init__.py") 27 | 28 | 29 | class Pylint(test): 30 | def run_tests(self): 31 | from pylint.lint import Run 32 | 33 | Run( 34 | [ 35 | "graypy", 36 | "--persistent", 37 | "y", 38 | "--rcfile", 39 | ".pylintrc", 40 | "--output-format", 41 | "colorized", 42 | ] 43 | ) 44 | 45 | 46 | class PyTest(test): 47 | user_options = [("pytest-args=", "a", "Arguments to pass to pytest")] 48 | 49 | def initialize_options(self): 50 | test.initialize_options(self) 51 | self.pytest_args = "-v --cov={}".format("graypy") 52 | 53 | def run_tests(self): 54 | import shlex 55 | 56 | # import here, cause outside the eggs aren't loaded 57 | import pytest 58 | 59 | errno = pytest.main(shlex.split(self.pytest_args)) 60 | sys.exit(errno) 61 | 62 | 63 | setup( 64 | name="graypy", 65 | version=VERSION, 66 | description="Python logging handlers that send messages in the Graylog Extended Log Format (GELF).", 67 | long_description=open("README.rst").read(), 68 | long_description_content_type="text/x-rst", 69 | keywords="logging gelf graylog2 graylog udp amqp", 70 | author="Sever Banesiu", 71 | author_email="banesiu.sever@gmail.com", 72 | url="https://github.com/severb/graypy", 73 | license="BSD License", 74 | packages=find_packages(), 75 | include_package_data=True, 76 | zip_safe=False, 77 | tests_require=[ 78 | "pytest>=2.8.7,<4.0.0", 79 | "pytest-cov<=2.6.0,<3.0.0", 80 | "pylint>=1.9.3,<2.0.0", 81 | "mock>=2.0.0,<3.0.0", 82 | "requests>=2.20.1,<3.0.0", 83 | "amqplib>=1.0.2,<2.0.0", 84 | ], 85 | extras_require={ 86 | "amqp": ["amqplib==1.0.2"], 87 | "docs": [ 88 | "sphinx>=2.1.2,<3.0.0", 89 | "sphinx_rtd_theme>=0.4.3,<1.0.0", 90 | "sphinx-autodoc-typehints>=1.6.0,<2.0.0", 91 | ], 92 | }, 93 | classifiers=[ 94 | "License :: OSI Approved :: BSD License", 95 | "Intended Audience :: Developers", 96 | "Programming Language :: Python", 97 | "Programming Language :: Python :: 2", 98 | "Programming Language :: Python :: 2.7", 99 | "Programming Language :: Python :: 3", 100 | "Programming Language :: Python :: 3.2", 101 | "Programming Language :: Python :: 3.3", 102 | "Programming Language :: Python :: 3.4", 103 | "Programming Language :: Python :: 3.5", 104 | "Programming Language :: Python :: 3.6", 105 | "Programming Language :: Python :: 3.7", 106 | "Programming Language :: Python :: Implementation :: CPython", 107 | "Programming Language :: Python :: Implementation :: PyPy", 108 | "Topic :: System :: Logging", 109 | ], 110 | cmdclass={"test": PyTest, "lint": Pylint}, 111 | ) 112 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """pytests for :mod:`graypy`""" 5 | -------------------------------------------------------------------------------- /tests/config/create_ssl_certs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | # 4 | # This scrip generate self-signed certificate to be used in development. 5 | # it sets the CN to the first provided hostname and will add all other 6 | # provided names to subjectAltName. 7 | # 8 | # Some Magic is added to the script that tries to find some settings for the 9 | # current host where this script is started. 10 | # 11 | # This script was first created by Jan Doberstein 2017-07-30 12 | # 13 | # This script is tested on CentOS 7, Ubuntu 14.04, Ubuntu 16.04, MacOS 10.12 14 | 15 | OPSSLBIN=$(which openssl) 16 | 17 | while getopts "d:h:i:m?" opt; do 18 | case ${opt} in 19 | h) HNAME+=("${OPTARG}");; 20 | i) HIP+=("${OPTARG}");; 21 | m) MMODE=active;; 22 | d) VALIDDAYS=${OPTARG};; 23 | s) KEYSECRET=${OPTARG};; 24 | ?) HELPME=yes;; 25 | *) HELPME=yes;; 26 | esac 27 | done 28 | 29 | if [ -n "${HELPME}" ]; then 30 | echo " 31 | This script will generate self-signed ssl certificates, they will be written to the current directory 32 | Options available: 33 | -h to set Hostnames (can be used multiple times) 34 | -i to set IP Adresses (can be used multiple times) 35 | -m (optional) activates a magic mode where the script try to find Hostnames and IPs of the current Host 36 | -d (optional) Number of Days the certificate is valid (default=365) 37 | -s (optional) The secret that is used for the crypted key (default=secret) 38 | " 39 | exit 0 40 | 41 | fi 42 | 43 | if [ -n "${MMODE}" ]; then 44 | echo "Magic Mode is on 45 | this will try to find the hostname and IP of host where this script is executed. 46 | it will then add this to the list of possible Hostnames and IPs 47 | 48 | If you get an error with the Magic Mode then retry with only one hostname set via -h option 49 | 50 | " 51 | 52 | HOSTNAME_BIN=$(type -p hostname) 53 | 54 | # possible addition 55 | # 56 | # try if dig is installed and check the hostname and ip resolve 57 | # dig_bin=$(which dig) 58 | 59 | if [ -n "${HOSTNAME_BIN}" ];then 60 | HNAME+=("$(hostname -s)") 61 | HNAME+=("$(hostname -A)") 62 | # add localhost as hostname to easy up debugging 63 | HNAME+=(localhost) 64 | # try if hostname -I returns the IP, if not 65 | # nasty workaround two steps because the array will get 66 | # entries that can't be parsed out correct 67 | GETIP=$({hostname -I 2>/dev/null || echo "127.0.0.1") 68 | HIP+=($(echo $GETIP | tr -d '[:blank:]')) 69 | else 70 | echo "The command hostname can't be found 71 | aborting Magic mode 72 | please use manual mode and provide at least one hostname with -h 73 | " 74 | exit 1 75 | fi 76 | 77 | # take all IP Adresses returned by the command IP into the list 78 | # first check if all binaries are present that are needed 79 | # (when only bash build-ins are needed would be awesome) 80 | IPCMD=$(type -p ip) 81 | GRPCMD=$(type -p grep) 82 | AWKCMD=$(type -p awk) 83 | CUTCMD=$(type -p cut) 84 | 85 | if [ -n "${IPCMD}" ] && [ -n "${GRPCMD}" ] && [ -n "${AWKCMD}" ] && [ -n "${CUTCMD}" ]; then 86 | # to avoid error output in the array 2>/dev/null 87 | # every IP that is returned will be added to the array 88 | # ip addr show | grep 'inet ' | awk '{ print $2}' | cut -d"/" -f1 89 | HIP+=($("${IPCMD}" addr show 2>/dev/null | "${GRPCMD}" 'inet ' 2>/dev/null| "${AWKCMD}" '{print $2}' 2>/dev/null| "${CUTCMD}" -d"/" -f1 2>/dev/null)) 90 | fi 91 | fi 92 | 93 | if [ -z "${HNAME}" ]; then 94 | echo "please provide hostname (-h) at least once. Try -? for help."; 95 | exit 1; 96 | fi 97 | 98 | if [ -z "${OPSSLBIN}" ]; then 99 | echo "no openssl detected aborting" 100 | exit 1; 101 | fi 102 | 103 | # set localhost IP if no other set 104 | if [ -z "${HIP}" ]; then 105 | HIP+=(127.0.0.1) 106 | fi 107 | 108 | # if no VALIDDAYS are set, default 365 109 | if [ -z "${VALIDDAYS}" ]; then 110 | VALIDDAYS=365 111 | fi 112 | 113 | # if no Key provided, set default secret 114 | if [ -z "${KEYSECRET}" ]; then 115 | KEYSECRET=secret 116 | fi 117 | 118 | 119 | # sort array entries and make them uniq 120 | NAMES=($(printf "DNS:%q\n" ${HNAME[@]} | sort -u)) 121 | IPADD=($(printf "IP:%q\n" ${HIP[@]} | sort -u)) 122 | 123 | # print each elemet of both arrays with comma seperator 124 | # and create a string from the array content 125 | SUBALT=$(IFS=','; echo "${NAMES[*]},${IPADD[*]}") 126 | 127 | #### output some informatione 128 | echo "This script will generate a SSL certificate with the following settings: 129 | CN Hostname = ${HNAME} 130 | subjectAltName = ${SUBALT} 131 | " 132 | # --------------------------- 133 | 134 | local_openssl_config=" 135 | [ req ] 136 | prompt = no 137 | distinguished_name = req_distinguished_name 138 | x509_extensions = san_self_signed 139 | [ req_distinguished_name ] 140 | CN=${HNAME} 141 | [ san_self_signed ] 142 | subjectAltName = ${SUBALT} 143 | subjectKeyIdentifier = hash 144 | authorityKeyIdentifier = keyid:always,issuer 145 | basicConstraints = CA:true 146 | " 147 | 148 | ${OPSSLBIN} req \ 149 | -newkey rsa:2048 -nodes \ 150 | -keyout "${HNAME}.pkcs5-plain.key.pem" \ 151 | -x509 -sha256 -days ${VALIDDAYS} \ 152 | -config <(echo "$local_openssl_config") \ 153 | -out "${HNAME}.cert.pem" 2>openssl_error.log || { echo -e "ERROR !\nOpenSSL returns an error, sorry this script will not work \n Possible reason: the openssl version is to old and does not support self signed san certificates \n Check openssl_error.log in your current directory for details"; exit 1; } 154 | 155 | ${OPSSLBIN} pkcs8 -in "${HNAME}.pkcs5-plain.key.pem" -topk8 -nocrypt -out "${HNAME}.pkcs8-plain.key.pem" 156 | ${OPSSLBIN} pkcs8 -in "${HNAME}.pkcs5-plain.key.pem" -topk8 -passout pass:"${KEYSECRET}" -out "${HNAME}.pkcs8-encrypted.key.pem" 157 | 158 | echo "the following files are written to the current directory:" 159 | echo " - ${HNAME}.pkcs5-plain.key.pem" 160 | echo " - ${HNAME}.pkcs8-plain.key.pem" 161 | echo " - ${HNAME}.pkcs8-encrypted.key.pem" 162 | echo " with the password: ${KEYSECRET}" 163 | echo "" 164 | 165 | rm openssl_error.log 166 | 167 | #EOF 168 | -------------------------------------------------------------------------------- /tests/config/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | services: 3 | mongo: 4 | image: "mongo:3" 5 | elasticsearch: 6 | image: "elasticsearch:2" 7 | command: "elasticsearch -Des.cluster.name='graylog'" 8 | graylog: 9 | image: graylog2/server 10 | environment: 11 | GRAYLOG_PASSWORD_SECRET: CVanHILkuYhsxE50BrNR6FFt75rS3h0V2uUlHxAshGB90guZznEoDxN7zhPx6Bcn61mfhY2T5r0PRkZVwowsTkHU2rBZnv0d 12 | GRAYLOG_ROOT_PASSWORD_SHA2: 8c6976e5b5410415bde908bd4dee15dfb167a9c873fc4bb8a81f6f2ab448a918 13 | GRAYLOG_WEB_ENDPOINT_URI: http://127.0.0.1:9000/api 14 | GRAYLOG_CONTENT_PACKS_AUTO_LOAD: grok-patterns.json,inputs.json 15 | GRAYLOG_ELASTICSEARCH_HOSTS: http://elasticsearch:9200 16 | volumes: 17 | - ./inputs.json:/usr/share/graylog/data/contentpacks/inputs.json 18 | - ./localhost.cert.pem:/usr/share/graylog/data/cert.pem 19 | - ./localhost.pkcs8-encrypted.key.pem:/usr/share/graylog/data/key.pem 20 | links: 21 | - mongo 22 | - elasticsearch 23 | depends_on: 24 | - mongo 25 | - elasticsearch 26 | ports: 27 | - "9000:9000" 28 | - "12201:12201/tcp" 29 | - "12202:12202/udp" 30 | - "12203:12203" 31 | - "12204:12204/tcp" 32 | -------------------------------------------------------------------------------- /tests/config/inputs.json: -------------------------------------------------------------------------------- 1 | { 2 | "inputs": [ 3 | { 4 | "title": "tcp", 5 | "configuration": { 6 | "bind_address": "0.0.0.0", 7 | "port": 12201 8 | }, 9 | "type": "org.graylog2.inputs.gelf.tcp.GELFTCPInput", 10 | "global": true 11 | }, 12 | { 13 | "title": "udp", 14 | "configuration": { 15 | "bind_address": "0.0.0.0", 16 | "port": 12202 17 | }, 18 | "type": "org.graylog2.inputs.gelf.udp.GELFUDPInput", 19 | "global": true 20 | }, 21 | { 22 | "title": "http", 23 | "configuration": { 24 | "bind_address": "0.0.0.0", 25 | "port": 12203 26 | }, 27 | "type": "org.graylog2.inputs.gelf.http.GELFHttpInput", 28 | "global": true 29 | }, 30 | { 31 | "title": "tls", 32 | "configuration": { 33 | "bind_address": "0.0.0.0", 34 | "port": 12204, 35 | "tls_enable": true, 36 | "tls_cert_file": "/usr/share/graylog/data/cert.pem", 37 | "tls_key_file": "/usr/share/graylog/data/key.pem", 38 | "tls_key_password": "secret" 39 | }, 40 | "type": "org.graylog2.inputs.gelf.tcp.GELFTCPInput", 41 | "global": true 42 | } 43 | ], 44 | "streams": [], 45 | "outputs": [], 46 | "dashboards": [], 47 | "grok_patterns": [] 48 | } -------------------------------------------------------------------------------- /tests/config/start_local_graylog_server.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # start a local graylog server for integration testing graypy 4 | 5 | # do work within ./test/config directory 6 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" 7 | cd ${DIR} 8 | 9 | # create ssl certs for enabling the graylog server to use a 10 | # TLS connection for GELF input 11 | bash create_ssl_certs.sh -h localhost -i 127.0.0.1 12 | 13 | # start the graylog server docker container 14 | docker-compose -f docker-compose.yml down 15 | docker-compose -f docker-compose.yml up -d 16 | 17 | # wait for the graylog server docker container to start 18 | sleep 40 19 | 20 | # test that the graylog server docker container is started 21 | curl -u admin:admin 'http://127.0.0.1:9000/api/search/universal/relative?query=test&range=5&fields=message' || true 22 | -------------------------------------------------------------------------------- /tests/config/stop_local_graylog_server.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # stop the local graylog server used for integration testing graypy 4 | 5 | # do work within ./test/config directory 6 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" 7 | cd ${DIR} 8 | 9 | docker-compose -f docker-compose.yml down 10 | -------------------------------------------------------------------------------- /tests/helper.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """helper functions for testing graypy 5 | 6 | These functions are used for both the integration and unit testing. 7 | """ 8 | 9 | import logging 10 | import pytest 11 | 12 | from graypy import GELFUDPHandler, GELFTCPHandler, GELFTLSHandler, GELFHTTPHandler 13 | 14 | TEST_CERT = "tests/config/localhost.cert.pem" 15 | KEY_PASS = "secret" 16 | 17 | TEST_TCP_PORT = 12201 18 | TEST_UDP_PORT = 12202 19 | TEST_HTTP_PORT = 12203 20 | TEST_TLS_PORT = 12204 21 | 22 | 23 | @pytest.fixture( 24 | params=[ 25 | GELFTCPHandler("127.0.0.1", TEST_TCP_PORT), 26 | GELFTCPHandler("127.0.0.1", TEST_TCP_PORT, extra_fields=True), 27 | GELFTCPHandler( 28 | "127.0.0.1", TEST_TCP_PORT, extra_fields=True, debugging_fields=True 29 | ), 30 | GELFTLSHandler("localhost", TEST_TLS_PORT), 31 | GELFTLSHandler("localhost", TEST_TLS_PORT, validate=True, ca_certs=TEST_CERT), 32 | GELFTLSHandler("127.0.0.1", TEST_TLS_PORT), 33 | GELFTLSHandler("127.0.0.1", TEST_TLS_PORT, validate=True, ca_certs=TEST_CERT), 34 | GELFHTTPHandler("127.0.0.1", TEST_HTTP_PORT), 35 | GELFHTTPHandler("127.0.0.1", TEST_HTTP_PORT, compress=False), 36 | GELFUDPHandler("127.0.0.1", TEST_UDP_PORT), 37 | GELFUDPHandler("127.0.0.1", TEST_UDP_PORT, compress=False), 38 | # the below handler tests are essentially smoke tests 39 | # that help cover the argument permutations of BaseGELFHandler 40 | GELFUDPHandler( 41 | "127.0.0.1", 42 | TEST_UDP_PORT, 43 | debugging_fields=True, 44 | extra_fields=True, 45 | localname="foobar_localname", 46 | facility="foobar_facility", 47 | level_names=True, 48 | compress=False, 49 | ), 50 | GELFUDPHandler( 51 | "127.0.0.1", 52 | TEST_UDP_PORT, 53 | debugging_fields=True, 54 | extra_fields=True, 55 | fqdn=True, 56 | facility="foobar_facility", 57 | level_names=True, 58 | compress=False, 59 | ), 60 | ] 61 | ) 62 | def handler(request): 63 | return request.param 64 | 65 | 66 | @pytest.fixture 67 | def logger(handler): 68 | logger_ = logging.getLogger("test_logger") 69 | logger_.addHandler(handler) 70 | yield logger_ 71 | logger_.removeHandler(handler) 72 | 73 | 74 | @pytest.fixture 75 | def formatted_logger(handler): 76 | logger_ = logging.getLogger("formatted_test_logger") 77 | handler.setFormatter(logging.Formatter("%(levelname)s : %(message)s")) 78 | logger_.addHandler(handler) 79 | yield logger_ 80 | logger_.removeHandler(handler) 81 | -------------------------------------------------------------------------------- /tests/integration/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """integration pytests for :mod:`graypy` 5 | 6 | .. note:: 7 | 8 | These tests require an local instance of Graylog to send messages to. 9 | """ 10 | 11 | import requests 12 | 13 | 14 | def validate_local_graylog_up(): 15 | """Test to see if a localhost instance of Graylog is currently running""" 16 | try: 17 | requests.get("http://127.0.0.1:9000/api") 18 | return True 19 | except Exception: 20 | return False 21 | 22 | 23 | LOCAL_GRAYLOG_UP = validate_local_graylog_up() 24 | -------------------------------------------------------------------------------- /tests/integration/helper.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """helper functions for testing graypy with a local Graylog instance""" 5 | 6 | from time import sleep 7 | from uuid import uuid4 8 | 9 | import requests 10 | 11 | 12 | def get_unique_message(): 13 | return str(uuid4()) 14 | 15 | 16 | DEFAULT_FIELDS = [ 17 | "message", 18 | "full_message", 19 | "source", 20 | "level", 21 | "func", 22 | "file", 23 | "line", 24 | "module", 25 | "logger_name", 26 | ] 27 | 28 | BASE_API_URL = 'http://127.0.0.1:9000/api/search/universal/relative?query=message:"{0}"&range=300&fields=' 29 | 30 | 31 | def get_graylog_response(message, fields=None): 32 | """Search for a given log message (with possible additional fields) 33 | within a local Graylog instance""" 34 | fields = fields if fields else [] 35 | tries = 0 36 | 37 | while True: 38 | try: 39 | return _parse_api_response( 40 | api_response=_get_api_response(message, fields), wanted_message=message 41 | ) 42 | except ValueError: 43 | sleep(2) 44 | if tries == 5: 45 | raise 46 | tries += 1 47 | 48 | 49 | def _build_api_string(message, fields): 50 | return BASE_API_URL.format(message) + "%2C".join(set(DEFAULT_FIELDS + fields)) 51 | 52 | 53 | def _get_api_response(message, fields): 54 | url = _build_api_string(message, fields) 55 | api_response = requests.get( 56 | url, auth=("admin", "admin"), headers={"accept": "application/json"} 57 | ) 58 | return api_response 59 | 60 | 61 | def _parse_api_response(api_response, wanted_message): 62 | assert api_response.status_code == 200 63 | print(api_response.json()) 64 | for message in api_response.json()["messages"]: 65 | if message["message"]["message"] == wanted_message: 66 | return message["message"] 67 | raise ValueError( 68 | "wanted_message: '{}' not within api_response: {}".format( 69 | wanted_message, api_response 70 | ) 71 | ) 72 | -------------------------------------------------------------------------------- /tests/integration/test_chunked_logging.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """pytests sending logs to a local Graylog instance that need to be chunked""" 5 | 6 | import logging 7 | 8 | import pytest 9 | 10 | from graypy.handler import ( 11 | SYSLOG_LEVELS, 12 | GELFUDPHandler, 13 | GELFWarningChunker, 14 | BaseGELFChunker, 15 | GELFTruncatingChunker, 16 | ) 17 | 18 | from tests.helper import TEST_UDP_PORT 19 | from tests.integration import LOCAL_GRAYLOG_UP 20 | from tests.integration.helper import get_unique_message, get_graylog_response 21 | 22 | 23 | @pytest.mark.parametrize( 24 | "gelf_chunker", [BaseGELFChunker, GELFWarningChunker, GELFTruncatingChunker] 25 | ) 26 | @pytest.mark.skipif(not LOCAL_GRAYLOG_UP, reason="local Graylog instance not up") 27 | def test_chunked_logging(gelf_chunker): 28 | """Test sending a log that requires chunking to be fully sent""" 29 | logger = logging.getLogger("test_chunked_logger") 30 | handler = GELFUDPHandler( 31 | "127.0.0.1", TEST_UDP_PORT, gelf_chunker=gelf_chunker(chunk_size=10) 32 | ) 33 | logger.addHandler(handler) 34 | message = get_unique_message() 35 | logger.error(message) 36 | graylog_response = get_graylog_response(message) 37 | assert message == graylog_response["message"] 38 | assert "long_message" not in graylog_response 39 | assert "timestamp" in graylog_response 40 | assert SYSLOG_LEVELS[logging.ERROR] == graylog_response["level"] 41 | -------------------------------------------------------------------------------- /tests/integration/test_common_logging.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """pytests sending logs to a local Graylog instance""" 5 | 6 | import logging 7 | 8 | import pytest 9 | 10 | from graypy.handler import SYSLOG_LEVELS 11 | 12 | from tests.helper import handler, logger 13 | from tests.integration import LOCAL_GRAYLOG_UP 14 | from tests.integration.helper import get_unique_message, get_graylog_response 15 | 16 | 17 | @pytest.mark.skipif(not LOCAL_GRAYLOG_UP, reason="local Graylog instance not up") 18 | def test_common_logging(logger): 19 | """Test sending a common usage log""" 20 | message = get_unique_message() 21 | logger.error(message) 22 | graylog_response = get_graylog_response(message) 23 | assert message == graylog_response["message"] 24 | assert "long_message" not in graylog_response 25 | assert "timestamp" in graylog_response 26 | assert SYSLOG_LEVELS[logging.ERROR] == graylog_response["level"] 27 | -------------------------------------------------------------------------------- /tests/integration/test_debugging_fields.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """pytests validating the emitting of valid debugging fields for graypy 5 | loggers""" 6 | 7 | import pytest 8 | 9 | from tests.helper import ( 10 | logger, 11 | TEST_CERT, 12 | TEST_TCP_PORT, 13 | TEST_HTTP_PORT, 14 | TEST_TLS_PORT, 15 | TEST_UDP_PORT, 16 | ) 17 | from tests.integration import LOCAL_GRAYLOG_UP 18 | from tests.integration.helper import get_graylog_response, get_unique_message 19 | 20 | from graypy import GELFUDPHandler, GELFTCPHandler, GELFTLSHandler, GELFHTTPHandler 21 | 22 | 23 | @pytest.fixture( 24 | params=[ 25 | GELFTCPHandler("127.0.0.1", TEST_TCP_PORT, debugging_fields=True), 26 | GELFUDPHandler("127.0.0.1", TEST_UDP_PORT, debugging_fields=True), 27 | GELFUDPHandler( 28 | "127.0.0.1", TEST_UDP_PORT, compress=False, debugging_fields=True 29 | ), 30 | GELFHTTPHandler("127.0.0.1", TEST_HTTP_PORT, debugging_fields=True), 31 | GELFHTTPHandler( 32 | "127.0.0.1", TEST_HTTP_PORT, compress=False, debugging_fields=True 33 | ), 34 | GELFTLSHandler("127.0.0.1", TEST_TLS_PORT, debugging_fields=True), 35 | GELFTLSHandler( 36 | "127.0.0.1", 37 | TEST_TLS_PORT, 38 | debugging_fields=True, 39 | validate=True, 40 | ca_certs=TEST_CERT, 41 | ), 42 | ] 43 | ) 44 | def handler(request): 45 | return request.param 46 | 47 | 48 | @pytest.mark.skipif(not LOCAL_GRAYLOG_UP, reason="local Graylog instance not up") 49 | def test_debug_mode(logger): 50 | message = get_unique_message() 51 | logger.error(message) 52 | graylog_response = get_graylog_response( 53 | message, fields=["function", "pid", "thread_name"] 54 | ) 55 | assert message == graylog_response["message"] 56 | assert "long_message" not in graylog_response 57 | assert "timestamp" in graylog_response 58 | assert graylog_response["file"].endswith("test_debugging_fields.py") 59 | assert "test_debug_mode" == graylog_response["function"] 60 | assert "line" in graylog_response 61 | assert "file" in graylog_response 62 | assert "pid" in graylog_response 63 | assert "thread_name" in graylog_response 64 | -------------------------------------------------------------------------------- /tests/integration/test_extra_fields.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """pytests for validating the addition of extra fields within GELF logs""" 5 | 6 | import logging 7 | import pytest 8 | from graypy import GELFTLSHandler, GELFTCPHandler, GELFUDPHandler, GELFHTTPHandler 9 | 10 | from tests.helper import ( 11 | TEST_CERT, 12 | TEST_TCP_PORT, 13 | TEST_HTTP_PORT, 14 | TEST_TLS_PORT, 15 | TEST_UDP_PORT, 16 | ) 17 | from tests.integration import LOCAL_GRAYLOG_UP 18 | from tests.integration.helper import get_unique_message, get_graylog_response 19 | 20 | 21 | class DummyFilter(logging.Filter): 22 | def filter(self, record): 23 | record.ozzy = "diary of a madman" 24 | record.van_halen = 1984 25 | record.id = 42 26 | return True 27 | 28 | 29 | @pytest.fixture( 30 | params=[ 31 | GELFTCPHandler("127.0.0.1", TEST_TCP_PORT, extra_fields=True), 32 | GELFUDPHandler("127.0.0.1", TEST_UDP_PORT, extra_fields=True), 33 | GELFUDPHandler("127.0.0.1", TEST_UDP_PORT, compress=False, extra_fields=True), 34 | GELFHTTPHandler("127.0.0.1", TEST_HTTP_PORT, extra_fields=True), 35 | GELFHTTPHandler("127.0.0.1", TEST_HTTP_PORT, compress=False, extra_fields=True), 36 | GELFTLSHandler("127.0.0.1", TEST_TLS_PORT, extra_fields=True), 37 | GELFTLSHandler( 38 | "127.0.0.1", 39 | TEST_TLS_PORT, 40 | validate=True, 41 | ca_certs=TEST_CERT, 42 | extra_fields=True, 43 | ), 44 | ] 45 | ) 46 | def handler(request): 47 | return request.param 48 | 49 | 50 | @pytest.fixture 51 | def logger(handler): 52 | logger = logging.getLogger("test") 53 | dummy_filter = DummyFilter() 54 | logger.addFilter(dummy_filter) 55 | logger.addHandler(handler) 56 | yield logger 57 | logger.removeHandler(handler) 58 | logger.removeFilter(dummy_filter) 59 | 60 | 61 | @pytest.mark.skipif(not LOCAL_GRAYLOG_UP, reason="local Graylog instance not up") 62 | def test_dynamic_fields(logger): 63 | message = get_unique_message() 64 | logger.error(message) 65 | graylog_response = get_graylog_response(message, fields=["ozzy", "van_halen"]) 66 | assert message == graylog_response["message"] 67 | assert "long_message" not in graylog_response 68 | assert "timestamp" in graylog_response 69 | assert "diary of a madman" == graylog_response["ozzy"] 70 | assert 1984 == graylog_response["van_halen"] 71 | assert 42 != graylog_response["_id"] 72 | assert "id" not in graylog_response 73 | -------------------------------------------------------------------------------- /tests/integration/test_status_issue.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """pytests for addressing potential issues with adding an ``status`` extra 5 | field withing a given log and having the log failing to appear within graylog. 6 | 7 | Related issue: 8 | - Fails to log silently with specific extra field #85 9 | 10 | URL: 11 | - https://github.com/severb/graypy/issues/85 12 | """ 13 | 14 | import pytest 15 | 16 | from tests.helper import handler, logger 17 | from tests.integration import LOCAL_GRAYLOG_UP 18 | from tests.integration.helper import get_unique_message, get_graylog_response 19 | 20 | 21 | @pytest.mark.skipif(not LOCAL_GRAYLOG_UP, reason="local Graylog instance not up") 22 | def test_non_status_field_log(logger): 23 | message = get_unique_message() 24 | logger.error(message, extra={"foo": "bar"}) 25 | graylog_response = get_graylog_response(message, fields=["foo"]) 26 | assert message == graylog_response["message"] 27 | assert "long_message" not in graylog_response 28 | assert "timestamp" in graylog_response 29 | assert "bar" == graylog_response["foo"] 30 | 31 | 32 | @pytest.mark.skipif(not LOCAL_GRAYLOG_UP, reason="local Graylog instance not up") 33 | def test_status_field_issue(logger): 34 | message = get_unique_message() 35 | logger.error(message, extra={"status": "OK"}) 36 | graylog_response = get_graylog_response(message, fields=["status"]) 37 | assert message == graylog_response["message"] 38 | assert "long_message" not in graylog_response 39 | assert "timestamp" in graylog_response 40 | assert "OK" == graylog_response["status"] 41 | 42 | 43 | @pytest.mark.skipif(not LOCAL_GRAYLOG_UP, reason="local Graylog instance not up") 44 | def test_status_field_issue_multi(logger): 45 | message = get_unique_message() 46 | logger.error(message, extra={"foo": "bar", "status": "OK"}) 47 | graylog_response = get_graylog_response(message, fields=["foo", "status"]) 48 | assert message == graylog_response["message"] 49 | assert "long_message" not in graylog_response 50 | assert "timestamp" in graylog_response 51 | assert "bar" == graylog_response["foo"] 52 | assert "OK" == graylog_response["status"] 53 | -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """unit pytests for :mod:`graypy` 5 | 6 | .. note:: 7 | 8 | These tests mock sending to Graylog, thus, do not require a local instance 9 | of Graylog to successfully run. 10 | """ 11 | -------------------------------------------------------------------------------- /tests/unit/helper.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """helper functions for testing graypy with mocks of python logging and 5 | Graylog services""" 6 | 7 | import logging 8 | 9 | MOCK_LOG_RECORD_NAME = "MOCK_LOG_RECORD" 10 | MOCK_LOG_RECORD = logging.LogRecord( 11 | MOCK_LOG_RECORD_NAME, 12 | logging.INFO, 13 | pathname=None, 14 | lineno=None, 15 | msg="Log message", 16 | args=(), 17 | exc_info=None, 18 | ) 19 | -------------------------------------------------------------------------------- /tests/unit/test_ExcludeFilter.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """pytests for :class:`graypy.rabbitmq.ExcludeFilter`""" 5 | 6 | import pytest 7 | 8 | from graypy import ExcludeFilter 9 | 10 | from tests.unit.helper import MOCK_LOG_RECORD_NAME, MOCK_LOG_RECORD 11 | 12 | 13 | @pytest.mark.parametrize("name", [None, ""]) 14 | def test_invalid_name(name): 15 | """Test constructing:class:`graypy.rabbitmq.ExcludeFilter` with a 16 | invalid ``name`` argument""" 17 | with pytest.raises(ValueError): 18 | ExcludeFilter(name) 19 | 20 | 21 | @pytest.mark.parametrize("name", ["foobar", ".", " "]) 22 | def test_valid_name(name): 23 | """Test constructing :class:`graypy.rabbitmq.ExcludeFilter` with a 24 | valid ``name`` argument""" 25 | exclude_filter = ExcludeFilter(name) 26 | assert exclude_filter 27 | assert name == exclude_filter.name 28 | assert len(name) == exclude_filter.nlen 29 | 30 | 31 | def test_non_filtering_record(): 32 | exclude_filter = ExcludeFilter("NOT" + MOCK_LOG_RECORD_NAME) 33 | assert exclude_filter.filter(MOCK_LOG_RECORD) 34 | assert MOCK_LOG_RECORD.name != exclude_filter.name 35 | 36 | 37 | def test_filtering_record(): 38 | exclude_filter = ExcludeFilter(MOCK_LOG_RECORD_NAME) 39 | assert not exclude_filter.filter(MOCK_LOG_RECORD) 40 | assert MOCK_LOG_RECORD.name == exclude_filter.name 41 | -------------------------------------------------------------------------------- /tests/unit/test_GELFRabbitHandler.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """pytests for :class:`graypy.rabbitmq.GELFRabbitHandler`""" 5 | 6 | import json 7 | 8 | import pytest 9 | 10 | from graypy.rabbitmq import GELFRabbitHandler 11 | from graypy.handler import SYSLOG_LEVELS 12 | 13 | from tests.unit.helper import MOCK_LOG_RECORD 14 | 15 | 16 | def test_invalid_url(): 17 | """Test constructing :class:`graypy.rabbitmq.GELFRabbitHandler` with 18 | an invalid rabbitmq url""" 19 | with pytest.raises(ValueError): 20 | GELFRabbitHandler("BADURL") 21 | 22 | 23 | def test_valid_url(): 24 | """Test constructing :class:`graypy.rabbitmq.GELFRabbitHandler` with 25 | a valid rabbitmq url""" 26 | handler = GELFRabbitHandler("amqp://localhost") 27 | assert handler 28 | assert "amqp://localhost" == handler.url 29 | 30 | 31 | @pytest.mark.xfail(reason="rabbitmq service is not up") 32 | def test_socket_creation_failure(): 33 | """Test attempting to open a socket to a rabbitmq instance when no such 34 | service exists""" 35 | handler = GELFRabbitHandler("amqp://localhost") 36 | handler.makeSocket() 37 | 38 | 39 | def test_make_pickle(): 40 | handler = GELFRabbitHandler("amqp://localhost") 41 | pickle = json.loads(handler.makePickle(MOCK_LOG_RECORD)) 42 | assert "Log message" == pickle["short_message"] 43 | assert SYSLOG_LEVELS[MOCK_LOG_RECORD.levelno] == pickle["level"] 44 | -------------------------------------------------------------------------------- /tests/unit/test_chunking.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """pytests for various GELF UDP message chunkers""" 5 | 6 | import json 7 | import logging 8 | import struct 9 | import zlib 10 | 11 | import pytest 12 | 13 | from graypy.handler import ( 14 | GELFTruncatingChunker, 15 | GELFWarningChunker, 16 | BaseGELFChunker, 17 | BaseGELFHandler, 18 | SYSLOG_LEVELS, 19 | GELFChunkOverflowWarning, 20 | GELFTruncationFailureWarning, 21 | ) 22 | 23 | 24 | @pytest.mark.parametrize( 25 | "gelf_chunker", [BaseGELFChunker, GELFWarningChunker, GELFTruncatingChunker] 26 | ) 27 | def test_gelf_chunking(gelf_chunker): 28 | """Test various GELF chunkers""" 29 | message = b"12345" 30 | header = b"\x1e\x0f" 31 | chunks = list(gelf_chunker(chunk_size=2).chunk_message(message)) 32 | expected = [ 33 | (struct.pack("b", 0), struct.pack("b", 3), b"12"), 34 | (struct.pack("b", 1), struct.pack("b", 3), b"34"), 35 | (struct.pack("b", 2), struct.pack("b", 3), b"5"), 36 | ] 37 | 38 | assert len(chunks) == len(expected) 39 | 40 | for index, chunk in enumerate(chunks): 41 | expected_index, expected_chunks_count, expected_chunk = expected[index] 42 | assert header == chunk[:2] 43 | assert expected_index == chunk[10:11] 44 | assert expected_chunks_count == chunk[11:12] 45 | assert expected_chunk == chunk[12:] 46 | 47 | 48 | def rebuild_gelf_bytes_from_udp_chunks(chunks): 49 | gelf_bytes = b"" 50 | bsize = len(chunks[0]) 51 | for chunk in chunks: 52 | if len(chunk) < bsize: 53 | gelf_bytes += chunk[-(bsize - len(chunk)) :] 54 | else: 55 | gelf_bytes += chunk[((2 + struct.calcsize("QBB")) - len(chunk)) :] 56 | return gelf_bytes 57 | 58 | 59 | @pytest.mark.parametrize( 60 | "gelf_chunker", [BaseGELFChunker, GELFWarningChunker, GELFTruncatingChunker] 61 | ) 62 | def test_gelf_chunkers(gelf_chunker): 63 | message = BaseGELFHandler().makePickle( 64 | logging.LogRecord( 65 | "test_gelf_chunkers", logging.INFO, None, None, "1" * 10, None, None 66 | ) 67 | ) 68 | chunks = list(gelf_chunker(chunk_size=2).chunk_message(message)) 69 | assert len(chunks) <= 128 70 | 71 | 72 | @pytest.mark.parametrize( 73 | "gelf_chunker", [BaseGELFChunker, GELFWarningChunker, GELFTruncatingChunker] 74 | ) 75 | def test_gelf_chunkers_overflow(gelf_chunker): 76 | message = BaseGELFHandler().makePickle( 77 | logging.LogRecord( 78 | "test_gelf_chunkers_overflow", 79 | logging.INFO, 80 | None, 81 | None, 82 | "1" * 1000, 83 | None, 84 | None, 85 | ) 86 | ) 87 | chunks = list(gelf_chunker(chunk_size=1).chunk_message(message)) 88 | assert len(chunks) <= 128 89 | 90 | 91 | def test_chunk_overflow_truncate_uncompressed(): 92 | message = BaseGELFHandler(compress=False).makePickle( 93 | logging.LogRecord( 94 | "test_chunk_overflow_truncate_uncompressed", 95 | logging.INFO, 96 | None, 97 | None, 98 | "1" * 1000, 99 | None, 100 | None, 101 | ) 102 | ) 103 | with pytest.warns(GELFChunkOverflowWarning): 104 | chunks = list( 105 | GELFTruncatingChunker(chunk_size=2, compress=False).chunk_message(message) 106 | ) 107 | assert len(chunks) <= 128 108 | payload = rebuild_gelf_bytes_from_udp_chunks(chunks).decode("UTF-8") 109 | glef_json = json.loads(payload) 110 | assert glef_json["_chunk_overflow"] is True 111 | assert glef_json["short_message"] in "1" * 1000 112 | assert glef_json["level"] == SYSLOG_LEVELS.get(logging.ERROR, logging.ERROR) 113 | 114 | 115 | def test_chunk_overflow_truncate_compressed(): 116 | message = BaseGELFHandler(compress=True).makePickle( 117 | logging.LogRecord( 118 | "test_chunk_overflow_truncate_compressed", 119 | logging.INFO, 120 | None, 121 | None, 122 | "123412345" * 5000, 123 | None, 124 | None, 125 | ) 126 | ) 127 | with pytest.warns(GELFChunkOverflowWarning): 128 | chunks = list( 129 | GELFTruncatingChunker(chunk_size=2, compress=True).chunk_message(message) 130 | ) 131 | assert len(chunks) <= 128 132 | payload = zlib.decompress(rebuild_gelf_bytes_from_udp_chunks(chunks)).decode( 133 | "UTF-8" 134 | ) 135 | glef_json = json.loads(payload) 136 | assert glef_json["_chunk_overflow"] is True 137 | assert glef_json["short_message"] in "123412345" * 5000 138 | assert glef_json["level"] == SYSLOG_LEVELS.get(logging.ERROR, logging.ERROR) 139 | 140 | 141 | def test_chunk_overflow_truncate_fail(): 142 | message = BaseGELFHandler().makePickle( 143 | logging.LogRecord( 144 | "test_chunk_overflow_truncate_fail", 145 | logging.INFO, 146 | None, 147 | None, 148 | "1" * 1000, 149 | None, 150 | None, 151 | ) 152 | ) 153 | with pytest.warns(GELFTruncationFailureWarning): 154 | list(GELFTruncatingChunker(1).chunk_message(message)) 155 | 156 | 157 | def test_chunk_overflow_truncate_fail_large_inherited_field(): 158 | message = BaseGELFHandler( 159 | facility="this is a really long facility" * 5000 160 | ).makePickle( 161 | logging.LogRecord( 162 | "test_chunk_overflow_truncate_fail", 163 | logging.INFO, 164 | None, 165 | None, 166 | "reasonable message", 167 | None, 168 | None, 169 | ) 170 | ) 171 | with pytest.warns(GELFTruncationFailureWarning): 172 | list(GELFTruncatingChunker(2).chunk_message(message)) 173 | -------------------------------------------------------------------------------- /tests/unit/test_handler.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """pytests for the formatting and construction of GELF logs by the graypy 5 | logging handlers 6 | 7 | .. note:: 8 | 9 | These tests mock sending to Graylog and do not require an active graylog 10 | instance to operate. 11 | """ 12 | 13 | import datetime 14 | import json 15 | import logging 16 | import socket 17 | import sys 18 | import zlib 19 | 20 | import mock 21 | import pytest 22 | 23 | from graypy.handler import BaseGELFHandler, GELFHTTPHandler, GELFTLSHandler 24 | 25 | from tests.helper import handler, logger, formatted_logger 26 | from tests.unit.helper import MOCK_LOG_RECORD, MOCK_LOG_RECORD_NAME 27 | 28 | UNICODE_REPLACEMENT = u"\ufffd" 29 | 30 | 31 | class TestClass(object): 32 | def __repr__(self): 33 | return "" 34 | 35 | 36 | @pytest.fixture 37 | def mock_send(handler): 38 | try: 39 | with mock.patch.object(handler, "send") as mock_send: 40 | yield mock_send 41 | except Exception: 42 | with mock.patch.object(handler, "emit") as mock_send: 43 | yield mock_send 44 | 45 | 46 | def get_mock_send_arg(mock_send): 47 | assert mock_send.call_args_list != [] 48 | [[[arg], _]] = mock_send.call_args_list 49 | 50 | # TODO: this is inaccurate solution for mocking non-send handlers 51 | if isinstance(arg, logging.LogRecord): 52 | return json.loads( 53 | BaseGELFHandler(compress=False).makePickle(arg).decode("utf-8") 54 | ) 55 | try: 56 | return json.loads(zlib.decompress(arg).decode("utf-8")) 57 | except zlib.error: # we have a uncompress message 58 | try: 59 | return json.loads(arg.decode("utf-8")) 60 | except Exception: # that is null terminated 61 | return json.loads(arg[:-1].decode("utf-8")) 62 | 63 | 64 | @pytest.mark.parametrize( 65 | "message,expected", 66 | [ 67 | (u"\u20AC", u"\u20AC"), 68 | (u"\u20AC".encode("utf-8"), u"\u20AC"), 69 | (b"\xc3", UNICODE_REPLACEMENT), 70 | (["a", b"\xc3"], ["a", UNICODE_REPLACEMENT]), 71 | ], 72 | ) 73 | def test_pack(message, expected): 74 | assert expected == json.loads( 75 | BaseGELFHandler._pack_gelf_dict(message).decode("utf-8") 76 | ) 77 | 78 | 79 | def test_manual_exc_info_handler(logger, mock_send): 80 | """Check that a the ``full_message`` traceback info is passed when 81 | the ``exc_info=1`` flag is given within a log message""" 82 | try: 83 | raise SyntaxError("Syntax error") 84 | except SyntaxError: 85 | logger.error("Failed", exc_info=1) 86 | arg = get_mock_send_arg(mock_send) 87 | assert "Failed" == arg["short_message"] 88 | assert arg["full_message"].startswith("Traceback (most recent call last):") 89 | 90 | # GELFHTTPHandler mocking does not complete the stacktrace 91 | # thus a missing \n 92 | assert arg["full_message"].endswith("SyntaxError: Syntax error") or arg[ 93 | "full_message" 94 | ].endswith("SyntaxError: Syntax error\n") 95 | 96 | 97 | def test_normal_exception_handler(logger, mock_send): 98 | try: 99 | raise SyntaxError("Syntax error") 100 | except SyntaxError: 101 | logger.exception("Failed") 102 | arg = get_mock_send_arg(mock_send) 103 | assert "Failed" == arg["short_message"] 104 | assert arg["full_message"].startswith("Traceback (most recent call last):") 105 | 106 | # GELFHTTPHandler mocking does not complete the stacktrace 107 | # thus a missing \n 108 | assert arg["full_message"].endswith("SyntaxError: Syntax error") or arg[ 109 | "full_message" 110 | ].endswith("SyntaxError: Syntax error\n") 111 | 112 | 113 | def test_unicode(logger, mock_send): 114 | logger.error(u"Mensaje de registro espa\xf1ol") 115 | arg = get_mock_send_arg(mock_send) 116 | assert u"Mensaje de registro espa\xf1ol" == arg["short_message"] 117 | 118 | 119 | @pytest.mark.skipif(sys.version_info[0] >= 3, reason="python2 only") 120 | def test_broken_unicode_python2(logger, mock_send): 121 | # py3 record.getMessage() returns a binary string here 122 | # which is safely converted to unicode during the sanitization 123 | # process 124 | logger.error(b"Broken \xde log message") 125 | decoded = get_mock_send_arg(mock_send) 126 | assert u"Broken %s log message" % UNICODE_REPLACEMENT == decoded["short_message"] 127 | 128 | 129 | @pytest.mark.skipif(sys.version_info[0] < 3, reason="python3 only") 130 | def test_broken_unicode_python3(logger, mock_send): 131 | # py3 record.getMessage() returns somewhat broken "b"foo"" if the 132 | # message string is not a string, but a binary object: b"foo" 133 | logger.error(b"Broken \xde log message") 134 | decoded = get_mock_send_arg(mock_send) 135 | assert "b'Broken \\xde log message'" == decoded["short_message"] 136 | 137 | 138 | def test_extra_field(logger, mock_send): 139 | logger.error("Log message", extra={"foo": "bar"}) 140 | decoded = get_mock_send_arg(mock_send) 141 | assert "Log message" == decoded["short_message"] 142 | assert "bar" == decoded["_foo"] 143 | 144 | 145 | def test_list(logger, mock_send): 146 | logger.error("Log message", extra={"foo": ["bar", "baz"]}) 147 | decoded = get_mock_send_arg(mock_send) 148 | assert "Log message" == decoded["short_message"] 149 | assert ["bar", "baz"] == decoded["_foo"] 150 | 151 | 152 | def test_arbitrary_object(logger, mock_send): 153 | logger.error("Log message", extra={"foo": TestClass()}) 154 | decoded = get_mock_send_arg(mock_send) 155 | assert "Log message" == decoded["short_message"] 156 | assert "" == decoded["_foo"] 157 | 158 | 159 | def test_message_to_pickle_serializes_datetime_objects_instead_of_blindly_repring_them( 160 | logger, mock_send 161 | ): 162 | timestamp = datetime.datetime(2001, 2, 3, 4, 5, 6, 7) 163 | logger.error("Log message", extra={"ts": timestamp}) 164 | decoded = get_mock_send_arg(mock_send) 165 | assert "datetime.datetime" not in decoded["_ts"] 166 | assert timestamp.isoformat() == decoded["_ts"] 167 | 168 | 169 | def test_status_field_issue(logger, mock_send): 170 | logger.error("Log message", extra={"status": "OK"}) 171 | decoded = get_mock_send_arg(mock_send) 172 | assert "Log message" == decoded["short_message"] 173 | assert "OK" == decoded["_status"] 174 | 175 | 176 | def test_add_level_name(): 177 | gelf_dict = dict() 178 | BaseGELFHandler._add_level_names(gelf_dict, MOCK_LOG_RECORD) 179 | assert "INFO" == gelf_dict["level_name"] 180 | 181 | 182 | def test_resolve_host(): 183 | """Test all posible resolutions of :meth:`BaseGELFHandler._resolve_host`""" 184 | assert socket.gethostname() == BaseGELFHandler._resolve_host(False, None) 185 | assert socket.getfqdn() == BaseGELFHandler._resolve_host(True, None) 186 | assert socket.getfqdn() == BaseGELFHandler._resolve_host(True, "localhost") 187 | assert "localhost" == BaseGELFHandler._resolve_host(False, "localhost") 188 | assert "" == BaseGELFHandler._resolve_host(False, "") 189 | 190 | 191 | def test_set_custom_facility(): 192 | gelf_dict = dict() 193 | facility = "test facility" 194 | BaseGELFHandler._set_custom_facility(gelf_dict, facility, MOCK_LOG_RECORD) 195 | assert MOCK_LOG_RECORD_NAME == gelf_dict["_logger"] 196 | assert "test facility" == gelf_dict["facility"] 197 | 198 | 199 | def test_formatted_logger(formatted_logger, mock_send): 200 | """Test the ability to set and modify the graypy handler's 201 | :class:`logging.Formatter` and have the resultant ``short_message`` be 202 | formatted by the set :class:`logging.Formatter`""" 203 | for handler in formatted_logger.handlers: 204 | if isinstance(handler, GELFHTTPHandler): 205 | pytest.skip("formatting not mocked for GELFHTTPHandler") 206 | formatted_logger.error("Log message") 207 | decoded = get_mock_send_arg(mock_send) 208 | assert "ERROR : Log message" == decoded["short_message"] 209 | 210 | 211 | def test_invalid_fqdn_localhost(): 212 | """Test constructing :class:`graypy.handler.BaseGELFHandler` with 213 | specifying conflicting arguments ``fqdn`` and ``localname``""" 214 | with pytest.raises(ValueError): 215 | BaseGELFHandler(fqdn=True, localname="localhost") 216 | 217 | 218 | def test_invalid_ca_certs(): 219 | """Test constructing :class:`graypy.handler.GELFTLSHandler` with 220 | incorrect arguments specifying server ca cert verification""" 221 | with pytest.raises(ValueError): 222 | GELFTLSHandler("127.0.0.1", validate=True) 223 | 224 | 225 | def test_invalid_client_certs(): 226 | """Test constructing :class:`graypy.handler.GELFTLSHandler` with 227 | incorrect arguments specifying client cert/key verification""" 228 | with pytest.raises(ValueError): 229 | # missing client cert 230 | GELFTLSHandler("127.0.0.1", keyfile="/dev/null") 231 | --------------------------------------------------------------------------------