├── tests ├── __init__.py ├── unittest.cfg ├── test_add_request_context.py ├── test_regex.py ├── test_const_attribute.py ├── test_add_thread_context_filter.py ├── test_log_request_context.py ├── test_log_record_missing.py ├── test_flask_json_formatter.py ├── test_attr_type_filter.py ├── test_django_attribute.py ├── test_logging_context.py ├── test_extra_formatter.py ├── test_flask_attribute.py └── test_json_formatter.py ├── logging_utilities ├── __init__.py ├── django_middlewares │ ├── __init__.py │ └── add_request_context.py ├── context │ ├── __init__.py │ ├── base.py │ ├── context.py │ └── thread_context.py ├── thread_context.py ├── formatters │ ├── __init__.py │ ├── extra_formatter.py │ └── json_formatter.py ├── filters │ ├── add_thread_context_filter.py │ ├── attr_type_filter.py │ ├── flask_attribute.py │ ├── __init__.py │ └── django_request.py └── log_record.py ├── .coveragerc ├── setup.py ├── .github ├── workflows │ ├── semver.yml │ └── pr-auto-semver.yml └── release.yml ├── .isort.cfg ├── Pipfile ├── .style.yapf ├── LICENSE ├── pyproject.toml ├── .gitignore ├── Makefile └── .pylintrc /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /logging_utilities/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [report] 2 | show_missing = True -------------------------------------------------------------------------------- /logging_utilities/django_middlewares/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | setup() 4 | -------------------------------------------------------------------------------- /logging_utilities/context/__init__.py: -------------------------------------------------------------------------------- 1 | from .context import get_logging_context 2 | from .context import remove_logging_context 3 | from .context import set_logging_context 4 | -------------------------------------------------------------------------------- /logging_utilities/thread_context.py: -------------------------------------------------------------------------------- 1 | from threading import local 2 | 3 | 4 | class ThreadContext(local): 5 | """ThreadContext is a store for data that is thread specific. 6 | """ 7 | 8 | 9 | thread_context = ThreadContext() 10 | -------------------------------------------------------------------------------- /.github/workflows/semver.yml: -------------------------------------------------------------------------------- 1 | name: on-push 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | - develop 8 | 9 | jobs: 10 | release: 11 | uses: geoadmin/.github/.github/workflows/semver-release.yml@master 12 | 13 | -------------------------------------------------------------------------------- /.isort.cfg: -------------------------------------------------------------------------------- 1 | [settings] 2 | known_third_party=pytest 3 | known_django=django 4 | known_flask=flask 5 | known_logging_utilities=logging_utilities 6 | force_single_line=True 7 | sections=FUTURE,STDLIB,THIRDPARTY,DJANGO,FLASK,LOGGING_UTILITIES,FIRSTPARTY,LOCALFOLDER -------------------------------------------------------------------------------- /.github/workflows/pr-auto-semver.yml: -------------------------------------------------------------------------------- 1 | name: on-pr 2 | 3 | on: 4 | pull_request: 5 | types: 6 | - opened 7 | - reopened 8 | - synchronize 9 | - edited 10 | 11 | jobs: 12 | pr-edit: 13 | uses: geoadmin/.github/.github/workflows/pr-auto-semver.yml@master -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [dev-packages] 7 | # update to latest major 8 | build = "*" 9 | pylint = "*" 10 | isort = "*" 11 | yapf = "*" 12 | coverage = "*" 13 | django = "*" 14 | flask = "*" 15 | nose2 = "*" 16 | twine = "*" 17 | setuptools = "*" 18 | 19 | [requires] 20 | -------------------------------------------------------------------------------- /tests/unittest.cfg: -------------------------------------------------------------------------------- 1 | [unittest] 2 | plugins = nose2.plugins.junitxml 3 | 4 | [junit-xml] 5 | always-on = True 6 | 7 | [log-capture] 8 | always-on = True 9 | # clear the handler to not polute the stdout and stderr with logging messages 10 | clear-handlers = True 11 | log-level = DEBUG 12 | 13 | [coverage] 14 | always-on = True 15 | coverage = logging_utilities 16 | -------------------------------------------------------------------------------- /logging_utilities/formatters/__init__.py: -------------------------------------------------------------------------------- 1 | RECORD_DFT_ATTR = { 2 | 'name', 3 | 'created', 4 | 'msecs', 5 | 'relativeCreated', 6 | 'asctime', 7 | 'levelname', 8 | 'levelno', 9 | 'pathname', 10 | 'filename', 11 | 'module', 12 | 'lineno', 13 | 'funcName', 14 | 'thread', 15 | 'threadName', 16 | 'processName', 17 | 'process', 18 | 'exc_info', 19 | 'exc_text', 20 | 'stack_info', 21 | 'msg', 22 | 'args', 23 | 'message', 24 | 'taskName' 25 | } 26 | -------------------------------------------------------------------------------- /logging_utilities/django_middlewares/add_request_context.py: -------------------------------------------------------------------------------- 1 | from logging_utilities.thread_context import thread_context 2 | 3 | 4 | class AddToThreadContextMiddleware(object): 5 | """Django middleware that stores request to thread local variable. 6 | """ 7 | 8 | def __init__(self, get_response): 9 | self.get_response = get_response 10 | 11 | def __call__(self, request): 12 | setattr(thread_context, 'request', request) 13 | response = self.get_response(request) 14 | setattr(thread_context, 'request', None) 15 | return response 16 | -------------------------------------------------------------------------------- /logging_utilities/context/base.py: -------------------------------------------------------------------------------- 1 | from abc import abstractmethod 2 | from collections.abc import MutableMapping 3 | 4 | 5 | class BaseContext(MutableMapping): 6 | __marker = object() 7 | 8 | @abstractmethod 9 | def init(self, data): 10 | pass # pragma: no cover 11 | 12 | @abstractmethod 13 | def get(self, key, default=None): 14 | pass # pragma: no cover 15 | 16 | @abstractmethod 17 | def pop(self, key, default=__marker): 18 | pass # pragma: no cover 19 | 20 | @abstractmethod 21 | def set(self, key, value): 22 | pass # pragma: no cover 23 | 24 | @abstractmethod 25 | def delete(self, key: str): 26 | pass # pragma: no cover 27 | 28 | @abstractmethod 29 | def clear(self): 30 | pass # pragma: no cover 31 | -------------------------------------------------------------------------------- /tests/test_add_request_context.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from django.conf import settings 4 | from django.test import RequestFactory 5 | 6 | from logging_utilities.django_middlewares.add_request_context import \ 7 | AddToThreadContextMiddleware 8 | from logging_utilities.thread_context import thread_context 9 | 10 | if not settings.configured: 11 | settings.configure() 12 | 13 | 14 | class AddToThreadContextMiddlewareTest(unittest.TestCase): 15 | 16 | def setUp(self) -> None: 17 | self.factory = RequestFactory() 18 | 19 | def test_add_request(self): 20 | 21 | def test_handler(request): 22 | r_from_var = getattr(thread_context, 'request', None) 23 | self.assertEqual(request, r_from_var) 24 | 25 | request = self.factory.get("/some_path?test=some_value") 26 | middleware = AddToThreadContextMiddleware(test_handler) 27 | middleware(request) 28 | -------------------------------------------------------------------------------- /.github/release.yml: -------------------------------------------------------------------------------- 1 | # .github/release.yml 2 | # WARNING this file is managed by terraform and cannot be edited manually, see 3 | # geoadmin/infra-terraform-bgdi/tf/github/geoadmin/modules/service-repository-semver/release_config.tf 4 | 5 | changelog: 6 | exclude: 7 | labels: 8 | - ignore-for-release 9 | - skip-changelog 10 | - skip-rn 11 | - skip-release-note 12 | - no-release-note 13 | - no-rn 14 | - no-changelog 15 | - new-release 16 | authors: 17 | - terraform-bgdi 18 | categories: 19 | - title: Breaking Changes 🛠 20 | labels: 21 | - breaking-change 22 | - title: New Features 23 | labels: 24 | - feature 25 | - enhancement 26 | - title: Data Updates 27 | labels: 28 | - data 29 | - data-integration 30 | - title: Bug Fixes 31 | labels: 32 | - fix 33 | - bugfix 34 | - bug 35 | - title: Other Changes 36 | labels: 37 | - "*" 38 | -------------------------------------------------------------------------------- /.style.yapf: -------------------------------------------------------------------------------- 1 | [style] 2 | based_on_style=google 3 | # Put closing brackets on a separate line, dedented, if the bracketed 4 | # expression can't fit in a single line. Applies to all kinds of brackets, 5 | # including function definitions and calls. For example: 6 | # 7 | # config = { 8 | # 'key1': 'value1', 9 | # 'key2': 'value2', 10 | # } # <--- this bracket is dedented and on a separate line 11 | # 12 | # time_series = self.remote_client.query_entity_counters( 13 | # entity='dev3246.region1', 14 | # key='dns.query_latency_tcp', 15 | # transform=Transformation.AVERAGE(window=timedelta(seconds=60)), 16 | # start_ts=now()-timedelta(days=3), 17 | # end_ts=now(), 18 | # ) # <--- this bracket is dedented and on a separate line 19 | dedent_closing_brackets=True 20 | coalesce_brackets=True 21 | 22 | # Split before arguments, but do not split all subexpressions recursively 23 | # (unless needed). 24 | split_all_top_level_comma_separated_values=True 25 | 26 | # Split lines longuer than 100 characters (this only applies to code not to 27 | # comment and docstring) 28 | column_limit=100 -------------------------------------------------------------------------------- /logging_utilities/filters/add_thread_context_filter.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from logging import LogRecord 3 | from typing import List 4 | 5 | from logging_utilities.thread_context import thread_context 6 | 7 | 8 | class AddThreadContextFilter(logging.Filter): 9 | """Add local thread attributes to the log record. 10 | """ 11 | 12 | def __init__(self, contexts: List[dict] = None) -> None: 13 | """Initialize the filter 14 | 15 | Args: 16 | contexts (List[dict], optional): 17 | List of values to add to the log record. Dictionary must contain value for 18 | 'context_key' to read value from thread local variable. Dictionary must also contain 19 | 'logger_key' to set the value on the log record. 20 | """ 21 | self.contexts: List[dict] = [] if contexts is None else contexts 22 | super().__init__() 23 | 24 | def filter(self, record: LogRecord) -> bool: 25 | for ctx in self.contexts: 26 | if getattr(thread_context, ctx['context_key'], None) is not None: 27 | setattr(record, ctx['logger_key'], getattr(thread_context, ctx['context_key'])) 28 | return True 29 | -------------------------------------------------------------------------------- /tests/test_regex.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from nose2.tools import params 4 | 5 | from logging_utilities.formatters.json_formatter import EnhancedPercentStyle 6 | 7 | 8 | class RegexTests(unittest.TestCase): 9 | 10 | @params( 11 | '%(asctime)s', 12 | '%(my.dotted.key)s', 13 | '%(my.dotted.key)s with constant', 14 | 'leading constant %(my.dotted.key)s with constant', 15 | 'constant%(my.dotted.key)sconstant', 16 | '%(my.dotted.key)d', 17 | '%(my.dotted.key)3.4d', 18 | ) 19 | def test_json_formatter_enhanced_percent_style_regex_match(self, value): 20 | self.assertIsNotNone(EnhancedPercentStyle.validation_pattern.search(value)) 21 | 22 | @params( 23 | 'my.key', 24 | 'my.key2', 25 | 'my.', 26 | 'k1.k3.', 27 | 'k_1.', 28 | 'k_.sk_2', 29 | '_my.key', 30 | '_1.', 31 | '_1._1', 32 | '_1a._2b._3b', 33 | 'a.b.c.', 34 | 'a1a.b2b.c2c.', 35 | 'a1a_.b2_b.c_2c.', 36 | 'my key.', 37 | '1.2', 38 | 'This is a text. It contains dot.', 39 | 'my.key ', 40 | ' my.key', 41 | 'my.key-test', 42 | '1', 43 | '1.test', 44 | '_1.1test', 45 | '%()s', 46 | 'asd %()s asdf' 47 | ) 48 | def test_json_formatter_enhanced_percent_style_regex_not_match(self, value): 49 | self.assertIsNone(EnhancedPercentStyle.validation_pattern.search(value)) 50 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2020, swisstopo 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools", "build", "setuptools-git-versioning<2"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "logging-utilities" 7 | requires-python = ">=3.0" 8 | description = """\ 9 | A collection of useful logging formatters and filters.\ 10 | Logging Context, JSON Formatter, Extra Formatter, ISO Time Filter, Flask Filter, Django Filter, ...\ 11 | """ 12 | readme = "README.md" 13 | license = {text = "BSD 3-Clause License"} 14 | authors = [ 15 | {name = "Brice Schaffner", email = "brice.schaffner@swisstopo.ch"} 16 | ] 17 | classifiers = [ 18 | "Development Status :: 5 - Production/Stable", 19 | "Intended Audience :: Developers", 20 | "License :: OSI Approved :: BSD License", 21 | "Operating System :: OS Independent", 22 | "Programming Language :: Python :: 3", 23 | "Programming Language :: Python :: 3 :: Only", 24 | "Topic :: Utilities", 25 | "Topic :: System :: Logging", 26 | "Topic :: Software Development :: Libraries", 27 | "Topic :: Software Development :: Libraries :: Python Modules", 28 | "Framework :: Django", 29 | "Framework :: Flask" 30 | ] 31 | keywords = ["context", "json", "logging", "flask", "django", "pyramid", "format"] 32 | dynamic = ["version"] 33 | 34 | [project.urls] 35 | Homepage = "https://github.com/geoadmin/lib-py-logging-utilities" 36 | Documentation = "https://github.com/geoadmin/lib-py-logging-utilities#readme" 37 | Source = "https://github.com/geoadmin/lib-py-logging-utilities" 38 | Tracker = "https://github.com/geoadmin/lib-py-logging-utilities/issues" 39 | 40 | [tool.setuptools.packages.find] 41 | include = ["logging_utilities*"] 42 | 43 | [tool.setuptools-git-versioning] 44 | enabled = true 45 | dev_template = "{tag}+git.{sha}" 46 | dirty_template = "{tag}+git.{sha}.dirty" -------------------------------------------------------------------------------- /logging_utilities/context/context.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from functools import wraps 3 | 4 | from .thread_context import ThreadMappingContext 5 | 6 | __record_factory_wrapped = None # pylint: disable=invalid-name 7 | __initial_record_factory = logging.getLogRecordFactory() 8 | __context = None # pylint: disable=invalid-name 9 | 10 | 11 | def set_logging_context(context=None): 12 | '''Set a logging context 13 | 14 | The context is set per thread (each thread can have different context) and is set to every 15 | log record in the `context` attribute. 16 | 17 | Args: 18 | context: (dict, None) 19 | Context to set, by default `None`. The context can be later retrieved and modified using 20 | `get_logging_context()` 21 | ''' 22 | global __record_factory_wrapped # pylint: disable=global-statement, invalid-name 23 | global __context # pylint: disable=global-statement, invalid-name 24 | current_factory = logging.getLogRecordFactory() 25 | if __context is None: 26 | __context = ThreadMappingContext() 27 | __context.init(context) 28 | if current_factory != __record_factory_wrapped: 29 | __initial_record_factory = logging.getLogRecordFactory() 30 | __record_factory_wrapped = __wrap_log_record_with_context(current_factory, __context) 31 | logging.setLogRecordFactory(__record_factory_wrapped) 32 | 33 | 34 | def get_logging_context(): 35 | '''Return the current logging context if set or `None` otherwise''' 36 | return __context 37 | 38 | 39 | def remove_logging_context(): 40 | '''Remove the logging context''' 41 | global __context # pylint: disable=global-statement, invalid-name 42 | logging.setLogRecordFactory(__initial_record_factory) 43 | __context = None 44 | 45 | 46 | def __wrap_log_record_with_context(record_factory, context): 47 | 48 | @wraps(record_factory) 49 | def wrapper(*args, **kwargs): 50 | record = record_factory(*args, **kwargs) 51 | record.context = context 52 | return record 53 | 54 | return wrapper 55 | -------------------------------------------------------------------------------- /logging_utilities/log_record.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | from logging import LogRecord 3 | from logging import setLogRecordFactory 4 | 5 | _dict_ignore_missing_types = {} 6 | 7 | 8 | class _DictIgnoreMissing(dict): 9 | _dft_value = '' 10 | 11 | def __getitem__(self, __k): 12 | try: 13 | return super().__getitem__(__k) 14 | except KeyError: 15 | return self._dft_value 16 | 17 | 18 | class LogRecordIgnoreMissing(LogRecord): 19 | '''LogRecord that don't raise ValueError exception when trying to access missing extra attribute 20 | 21 | Missing/unknown extra attribute will returns `''` when accessed via the `__dict__` attribute. 22 | 23 | This is particularly usefull when using a style formatter with attribute that might be missing 24 | from the log record; e.g. `"%(message)s - %(extra_attribute)s"` 25 | 26 | The default value for missing attribute can be changed via `__dft_value` parameter. 27 | ''' 28 | 29 | def __init__(self, *args, **kwargs): 30 | __dft_value = kwargs.pop('__dft_value', '') 31 | __dft_value_hash = str(__dft_value) 32 | super().__init__(*args, **kwargs) 33 | if __dft_value_hash not in _dict_ignore_missing_types: 34 | _dict_ignore_missing_type_name = '_DictIgnoreMissing_{}'.format( 35 | len(_dict_ignore_missing_types) 36 | ) 37 | _dict_ignore_missing_types[__dft_value_hash] = type( 38 | _dict_ignore_missing_type_name, (_DictIgnoreMissing,), {"_dft_value": __dft_value} 39 | ) 40 | self.__dict__ = _dict_ignore_missing_types[__dft_value_hash](self.__dict__) 41 | 42 | 43 | def set_log_record_ignore_missing_factory(dft_value=''): 44 | '''Globally change the log record factory to the LogRecordIgnoreMissing factory 45 | 46 | This new log record won't raise any exception on unknown/missing attribute access, but will 47 | return the `dft_value` instead. 48 | ''' 49 | setLogRecordFactory(partial(LogRecordIgnoreMissing, __dft_value=dft_value)) 50 | 51 | 52 | def reset_log_record_factory(): 53 | '''Reset the log record factory to the original one LogRecord. 54 | ''' 55 | setLogRecordFactory(LogRecord) # pragma: no cover 56 | -------------------------------------------------------------------------------- /tests/test_const_attribute.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import unittest 3 | from logging import Formatter 4 | 5 | from nose2.tools import params 6 | 7 | from logging_utilities.filters import ConstAttribute 8 | 9 | 10 | class RecordAttributesIgnoreMissingTest(unittest.TestCase): 11 | 12 | def setUp(self): 13 | super().setUp() 14 | self.maxDiff = None 15 | 16 | @classmethod 17 | def _configure_const_attribute(cls, logger, fmt, value): 18 | logger.setLevel(logging.DEBUG) 19 | 20 | for handler in logger.handlers: 21 | const_attribute = ConstAttribute(const_key=value) 22 | handler.addFilter(const_attribute) 23 | handler.setFormatter(Formatter(fmt)) 24 | 25 | @params('string value', 1, 1.1, [1, 'b'], None, {}, {'a': 1, 'b': {'c': ['a']}}) 26 | def test_const_attribute(self, value): 27 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 28 | logger = logging.getLogger('test_formatter') 29 | self._configure_const_attribute( 30 | logger, "%(levelname)s:%(const_key)s:%(message)s", value 31 | ) 32 | logger.info('Simple message') 33 | logger.info('Composed message: %s', 'this is a composed message') 34 | logger.info('Composed message %s', 'with extra', extra={'extra1': 23}) 35 | self.assertEqual( 36 | ctx.output, 37 | [ 38 | 'INFO:{}:Simple message'.format(value), 39 | 'INFO:{}:Composed message: this is a composed message'.format(value), 40 | 'INFO:{}:Composed message with extra'.format(value) 41 | ] 42 | ) 43 | 44 | @params('string value', 1, 1.1, [1, 'b'], None, {}, {'a': 1, 'b': {'c': ['a']}}) 45 | def test_const_attribute_and_extra(self, value): 46 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 47 | logger = logging.getLogger('test_formatter') 48 | self._configure_const_attribute( 49 | logger, "%(levelname)s:%(const_key)s:%(message)s:%(extra1)s", value 50 | ) 51 | logger.info('Composed message %s', 'with extra', extra={'extra1': 23}) 52 | self.assertEqual(ctx.output, ['INFO:{}:Composed message with extra:23'.format(value)]) 53 | -------------------------------------------------------------------------------- /logging_utilities/filters/attr_type_filter.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | 4 | def is_instance(attr, of_type): 5 | if isinstance(of_type, str): 6 | attr_class = type(attr) 7 | if '.' in of_type: 8 | return ( 9 | '{}.{}'.format(attr_class.__module__, attr_class.__name__) == of_type or 10 | of_type in ['{}.{}'.format(c.__module__, c.__name__) for c in attr_class.__bases__] 11 | ) 12 | return attr_class.__name__ == of_type or of_type in [ 13 | c.__name__ for c in attr_class.__bases__ 14 | ] 15 | return isinstance(attr, of_type) 16 | 17 | 18 | class AttrTypeFilter(logging.Filter): 19 | """Filter attributes based on their types 20 | 21 | This filter can help in case multiple libraries/frameworks etc. use the 22 | same extra properties in the extra parameter of the logging. It filters 23 | these extra properties by type, either with a whitelist (the default) or 24 | with a blacklist. 25 | """ 26 | 27 | def __init__(self, typecheck_list, *, is_blacklist=False): 28 | """Initialize the filter 29 | 30 | Args: 31 | typecheck_list: dict(key, type|list of types) 32 | A dictionary that maps keys to a type or a list of types. 33 | By default, it will only keep a parameter matching a key 34 | if the types match or if any of the types in the list match 35 | (white list). If in black list mode, it will only keep a 36 | parameter if the types don't match. Parameters not appearing 37 | in the dict will be ignored and passed though regardless of the 38 | mode (whitelist or blacklist). 39 | is_blacklist: bool (default: false) 40 | Whether the list passed should be a blacklist or a whitelist. 41 | To use both, simply include this filter two times, one time with 42 | this parameter set true and one time with this parameter set false. 43 | """ 44 | self.typecheck_list = typecheck_list 45 | for key in self.typecheck_list: 46 | if not isinstance(self.typecheck_list[key], list): 47 | self.typecheck_list[key] = [self.typecheck_list[key]] 48 | self.is_blacklist = is_blacklist 49 | super().__init__() 50 | 51 | def filter(self, record): 52 | for key, whitelisted_types in self.typecheck_list.items(): 53 | if not hasattr(record, key): 54 | continue 55 | item = getattr(record, key) 56 | if any(is_instance(item, t) for t in whitelisted_types) is self.is_blacklist: 57 | delattr(record, key) 58 | 59 | return True 60 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | 131 | # vscode 132 | .vscode 133 | 134 | # test report 135 | nose2-junit.xml 136 | 137 | # makefile 138 | .timestamps 139 | 140 | # git 141 | *.orig 142 | 143 | # Example files for testing README.md examples 144 | example-config.yaml 145 | example.py -------------------------------------------------------------------------------- /logging_utilities/filters/flask_attribute.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from werkzeug.datastructures import ImmutableDict 4 | from werkzeug.datastructures import ImmutableMultiDict 5 | from werkzeug.datastructures import MultiDict 6 | from werkzeug.exceptions import HTTPException 7 | 8 | from flask import has_request_context 9 | from flask import request 10 | 11 | 12 | class FlaskRequestAttribute(logging.Filter): 13 | """Logging Flask attributes record 14 | 15 | This filter adds Flask request context attributes to the log record. 16 | 17 | Flask request attributes are added as record attributes with the 'flask_request_' prefix. 18 | """ 19 | 20 | def __init__(self, attributes=None): 21 | """Initialize the filter 22 | 23 | Args: 24 | attributes: (list) 25 | Flask request attribute names list to add to the log record 26 | """ 27 | super().__init__() 28 | self.attributes = attributes if attributes else list() 29 | 30 | def filter(self, record): 31 | for attribute in self.attributes: 32 | if has_request_context(): 33 | rec_attribute = 'flask_request_' + attribute 34 | try: 35 | value = getattr(request, attribute) 36 | except HTTPException: 37 | # accessing the request.json might raise an HTTPException if the request 38 | # is malformed for json data. In this case we don't want the filter to crash 39 | # but simply set an empty value 40 | if attribute == 'json': 41 | if isinstance(request.data, bytes): 42 | value = request.data.decode('utf-8') 43 | else: 44 | value = str(request.data) 45 | else: 46 | raise 47 | # Accessing flask_request_view_args. might rise an exception if 48 | # flask_request_view_args is Null. To safely access flask_request_view_args 49 | # None is replaced by an empty dict. 50 | if attribute == 'view_args' and value is None: 51 | setattr(record, rec_attribute, {}) 52 | elif isinstance(value, (ImmutableDict, ImmutableMultiDict, MultiDict)): 53 | setattr(record, rec_attribute, dict(value)) 54 | elif value is None or isinstance(value, (str, int, float, dict, list)): 55 | setattr(record, rec_attribute, value) 56 | elif isinstance(value, bytes): 57 | setattr(record, rec_attribute, value.decode('utf-8')) 58 | elif attribute == 'headers': 59 | setattr(record, rec_attribute, dict(value.items())) 60 | else: 61 | raise ValueError('Attribute %s=%s unsupported type' % (attribute, value)) 62 | return True 63 | -------------------------------------------------------------------------------- /logging_utilities/context/thread_context.py: -------------------------------------------------------------------------------- 1 | import threading 2 | from collections.abc import Mapping 3 | 4 | from .base import BaseContext 5 | 6 | 7 | class ThreadMappingContext(BaseContext): 8 | '''Thread local mapping contex 9 | 10 | This class implements all Mapping (e.g. dictionary) functionality but on a 11 | thread local context. 12 | ''' 13 | __marker = object() 14 | 15 | def __init__(self): 16 | self.__local = threading.local() 17 | self.ensure_data() 18 | 19 | def ensure_data(self): 20 | """Ensure the current thread has a `data` attribute in its local storage. 21 | 22 | The `threading.local()` object provides each thread with its own independent attribute 23 | namespace. Attributes created in one thread are not visible to other threads. This means 24 | that even if `data` was initialized in the thread where this object was constructed, 25 | new threads will not automatically have a `data` attribute since the constructor is not 26 | run again. 27 | 28 | Calling this method guarantees that `self.__local.data` exists in the *current* thread, 29 | creating an empty dictionary if needed. It must be invoked on every access path 30 | (e.g., __getitem__, __iter__). 31 | """ 32 | if not hasattr(self.__local, 'data'): 33 | self.__local.data = {} 34 | 35 | def __str__(self): 36 | self.ensure_data() 37 | return str(self.__local.data) 38 | 39 | def __getitem__(self, __key): 40 | self.ensure_data() 41 | return self.__local.data[__key] 42 | 43 | def __setitem__(self, __key, __value): 44 | self.ensure_data() 45 | self.__local.data[__key] = __value 46 | 47 | def __delitem__(self, __key): 48 | self.ensure_data() 49 | del self.__local.data[__key] 50 | 51 | def __len__(self): 52 | self.ensure_data() 53 | return len(self.__local.data) 54 | 55 | def __iter__(self): 56 | self.ensure_data() 57 | return self.__local.data.__iter__() 58 | 59 | def __contains__(self, __o): 60 | self.ensure_data() 61 | return self.__local.data.__contains__(__o) 62 | 63 | def init(self, data=None): 64 | self.ensure_data() 65 | if data is None: 66 | self.__local.data = {} 67 | else: 68 | if not isinstance(data, Mapping): 69 | raise ValueError('Data must be a Mapping sequence') 70 | self.__local.data = data 71 | 72 | def get(self, key, default=None): 73 | self.ensure_data() 74 | return self.__local.data.get(key, default) 75 | 76 | def pop(self, key, default=__marker): 77 | self.ensure_data() 78 | if default == self.__marker: 79 | return self.__local.data.pop(key) 80 | return self.__local.data.pop(key, default) 81 | 82 | def set(self, key, value): 83 | self.ensure_data() 84 | self.__local.data[key] = value 85 | 86 | def delete(self, key): 87 | self.ensure_data() 88 | del self.__local.data[key] 89 | 90 | def clear(self): 91 | self.ensure_data() 92 | self.__local.data = {} 93 | -------------------------------------------------------------------------------- /tests/test_add_thread_context_filter.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import sys 4 | import unittest 5 | from collections import OrderedDict 6 | 7 | from django.conf import settings 8 | from django.test import RequestFactory 9 | 10 | from logging_utilities.filters.add_thread_context_filter import \ 11 | AddThreadContextFilter 12 | from logging_utilities.formatters.json_formatter import JsonFormatter 13 | from logging_utilities.thread_context import thread_context 14 | 15 | if not settings.configured: 16 | settings.configure() 17 | 18 | # From python3.7, dict is ordered 19 | if sys.version_info.major >= 3 and sys.version_info.minor >= 7: 20 | dictionary = dict 21 | else: 22 | dictionary = OrderedDict 23 | 24 | logger = logging.getLogger(__name__) 25 | 26 | 27 | class AddThreadContextFilterTest(unittest.TestCase): 28 | 29 | def setUp(self) -> None: 30 | self.factory = RequestFactory() 31 | 32 | @classmethod 33 | def _configure_json_filter(cls, _logger): 34 | _logger.setLevel(logging.DEBUG) 35 | for handler in _logger.handlers: 36 | handler.setFormatter(JsonFormatter(add_always_extra=True)) 37 | 38 | def test_add_thread_context_no_request(self): 39 | with self.assertLogs('test_logger', level=logging.DEBUG) as ctx: 40 | test_logger = logging.getLogger("test_logger") 41 | self._configure_json_filter(test_logger) 42 | test_logger.addFilter( 43 | AddThreadContextFilter( 44 | contexts=[{ 45 | 'logger_key': 'http_request', 'context_key': 'request' 46 | }] 47 | ) 48 | ) 49 | test_logger.debug("some message") 50 | 51 | message1 = json.loads(ctx.output[0], object_pairs_hook=dictionary) 52 | self.assertDictEqual( 53 | message1, 54 | dictionary([("levelname", "DEBUG"), ("name", "test_logger"), 55 | ("message", "some message")]) 56 | ) 57 | 58 | def test_add_thread_context(self): 59 | test_cases = [ 60 | { 61 | 'logger_name': 'test_1', 62 | 'var_key': 'request', 63 | 'var_val': "some value", 64 | 'attr_name': 'http_request', 65 | 'expect_value': "some value", 66 | 'log_message': 'a log message has appeared', 67 | }, 68 | { 69 | 'logger_name': 'test_2', 70 | 'var_key': 'request', 71 | 'var_val': self.factory.get("/some_path"), 72 | 'attr_name': 'request', 73 | 'expect_value': "", 74 | 'log_message': 'another log message has appeared', 75 | }, 76 | ] 77 | 78 | for tc in test_cases: 79 | with self.assertLogs(tc['logger_name'], level=logging.DEBUG) as ctx: 80 | test_logger = logging.getLogger(tc['logger_name']) 81 | setattr(thread_context, tc['var_key'], tc['var_val']) 82 | self._configure_json_filter(test_logger) 83 | test_logger.addFilter( 84 | AddThreadContextFilter( 85 | contexts=[{ 86 | 'logger_key': tc['attr_name'], 'context_key': tc['var_key'] 87 | }] 88 | ) 89 | ) 90 | 91 | test_logger.debug(tc['log_message']) 92 | setattr(thread_context, tc['var_key'], None) 93 | 94 | message1 = json.loads(ctx.output[0], object_pairs_hook=dictionary) 95 | self.assertDictEqual( 96 | message1, 97 | dictionary([("levelname", "DEBUG"), ("name", tc['logger_name']), 98 | ("message", tc['log_message']), (tc['attr_name'], tc['expect_value'])]) 99 | ) 100 | -------------------------------------------------------------------------------- /logging_utilities/formatters/extra_formatter.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import logging.config 3 | import re 4 | import sys 5 | from pprint import pformat 6 | 7 | from logging_utilities.formatters import RECORD_DFT_ATTR 8 | 9 | if sys.version_info < (3, 2): 10 | raise ImportError('Only python 3.2 and above are supported') 11 | 12 | # parse the format to retrieve all key e.g. "%(message)s %(module)s" => ['message', 'module'] 13 | KEYS_PATTERN = r'%\((\w+)\)[#0\- \+]?(?:\d+|\*)?(?:\.\d+|\.\*)?\d*[diouxXeEfFgGcrsa]' 14 | 15 | 16 | class ExtraFormatter(logging.Formatter): 17 | """Logging Extra Formatter 18 | 19 | This formatter enhance the python standard formatter to allow working with the log `extra`. 20 | The python standard formatter will raise a ValueError() when adding extra keyword in the format 21 | when this keyword is then missing from log record. This means that if you want to display a log 22 | extra, you have to make sure that every log message contains this extra. 23 | 24 | This formatter allow you to provide an `extra_fmt` parameter that will add record extra to the 25 | log message when available. You can either add the entire extra dictionary: `extra_fmt='%s'` or 26 | only some extras: `extra_fmt='%(extra1)s:%(extra2)s'`. In the latest case, when a key is missing 27 | in extra, the value is replaced by `extra_default`. 28 | When using the whole `extra` dictionary, you can use `extra_pretty_print` to improve the 29 | formatting, note that in this case the log might be on multiline (this use pprint.pformat). 30 | """ 31 | 32 | def __init__( 33 | self, 34 | fmt=None, 35 | datefmt=None, 36 | style='%', 37 | validate=True, 38 | extra_fmt=None, 39 | extra_default='', 40 | extra_pretty_print=False, 41 | pretty_print_kwargs=None 42 | ): 43 | ''' 44 | Initialize the formatter with specified format strings. 45 | 46 | Initialize the formatter either with the specified format string, or a default as described 47 | in logging.Formatter. 48 | 49 | Args: 50 | extra_fmt: string 51 | String format (old percent style only) for log extras. This can be used for instance 52 | to automatically add all extras, e.g: `extra_fmt='extras=%s'` or to add only some 53 | extra: `extra_fmt='%(extra1)s:%(extra2)s'` 54 | extra_default: any 55 | Default value to use for missing extra in record 56 | extra_pretty_print: boolean 57 | Set to true to use pprint.pformat on the extra dictionary 58 | ''' 59 | super().__init__(fmt=fmt, datefmt=datefmt, style=style) 60 | self._fmt_keys = re.findall(KEYS_PATTERN, fmt) 61 | self.extra_fmt = extra_fmt 62 | self._extras_keys = re.findall(KEYS_PATTERN, self.extra_fmt if self.extra_fmt else '') 63 | self._default = extra_default 64 | self._extra_pretty_print = extra_pretty_print 65 | self._pretty_print_kwargs = pretty_print_kwargs if pretty_print_kwargs is not None else {} 66 | 67 | def formatMessage(self, record): 68 | message = self._style.format(record) 69 | if self.extra_fmt: 70 | extra_keys = set(record.__dict__.keys()) - RECORD_DFT_ATTR - set(self._fmt_keys) 71 | extras = {key: getattr(record, key) for key in extra_keys} 72 | if extras: 73 | missing_keys = set(self._extras_keys) - set(extras.keys()) 74 | extras.update({key: self._default for key in missing_keys}) 75 | if self._extra_pretty_print: 76 | try: 77 | message = '%s%s' % ( 78 | message, self.extra_fmt % pformat(extras, **self._pretty_print_kwargs) 79 | ) 80 | except TypeError as err: 81 | if err.args[0] == 'format requires a mapping': 82 | raise ValueError( 83 | 'Cannot use extra_pretty_print with named placeholder' 84 | ) from err 85 | raise err 86 | else: 87 | message = '%s%s' % (message, self.extra_fmt % extras) 88 | return message 89 | -------------------------------------------------------------------------------- /logging_utilities/filters/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from datetime import datetime 3 | from datetime import timezone 4 | 5 | 6 | class ConstAttribute(logging.Filter): 7 | '''Logging constant record attribute 8 | 9 | This filter add a constant attribute to the log record. 10 | ''' 11 | 12 | def __init__(self, **kwargs): 13 | '''Initialize filter 14 | 15 | Args: 16 | kwargs: 17 | All keyword arguments are added as LogRecord attributes 18 | ''' 19 | self.kwargs = kwargs 20 | super().__init__() 21 | 22 | def filter(self, record): 23 | for key, value in self.kwargs.items(): 24 | setattr(record, key, value) 25 | return True 26 | 27 | 28 | class LevelFilter(logging.Filter): 29 | '''Logging level filter 30 | 31 | This filter can be used on a handler to filter logger message based on their level. 32 | 33 | For example if you have two DEBUG loggers; A and B, with both two handlers; console and file. 34 | On the file handler you want all message of A but only WARNING message of B. You can then use 35 | the following configuration: 36 | 37 | root: 38 | handlers: 39 | - "console" 40 | - "file" 41 | level: "DEBUG" 42 | propagate: "True" 43 | 44 | filters: 45 | BFilter: 46 | class: logging_utilities.filters.LevelFilter 47 | level: "WARNING" 48 | logger: 'B' 49 | 50 | loggers: 51 | A: 52 | level: "DEBUG" 53 | B: 54 | level: "DEBUG" 55 | 56 | handlers: 57 | console: 58 | class: "logging.StreamHandler" 59 | 60 | file: 61 | class: "logging.handlers.RotatingFileHandler" 62 | filters: 63 | - "BFilter" 64 | ''' 65 | 66 | def __init__(self, level='DEBUG', logger=''): 67 | '''Initialize the filter 68 | 69 | Args: 70 | level: (str|int) 71 | Level to filter, all message with a lower level will be filtered 72 | logger: (str) 73 | Logger name on which to apply the level filtering, if empty then the filtering is 74 | applied to all loggers 75 | 76 | Raises: 77 | ValueError: when an invalid level is given 78 | ''' 79 | if not isinstance(level, (str, int)): 80 | raise ValueError('Unsupported level type: must be int or string') 81 | self.level = level 82 | if isinstance(self.level, str): 83 | # translate level to int 84 | self.level = logging.getLevelName(self.level) 85 | if not isinstance(self.level, int): 86 | raise ValueError('Unsupported level string') 87 | elif isinstance(self.level, int) \ 88 | and logging.getLevelName(self.level) == "Level %d" % (self.level): 89 | raise ValueError('Undefined level integer') 90 | 91 | self.logger = logger 92 | super().__init__() 93 | 94 | def filter(self, record): 95 | if self.logger == '' or record.name.startswith(self.logger): 96 | if record.levelno < self.level: 97 | return False 98 | return True 99 | 100 | 101 | class TimeAttribute(logging.Filter): 102 | '''Logging time record attribute 103 | 104 | This filter can be used on a handler to add iso 8601 time attribute to the record. 105 | ''' 106 | 107 | def __init__(self, isotime=True, utc_isotime=False): 108 | '''Initialize the filter 109 | 110 | Args: 111 | isotime: (bool) 112 | Add local time in `YYYY-MM-DDThh:mm:ss±hh:mm` format as `isotime` 113 | attribute to LogRecord 114 | utc_isotime: (bool) 115 | Add utc time in `YYYY-MM-DDThh:mm:ss±hh:mm` format as `utc_isotime` 116 | attribute to LogRecord 117 | ''' 118 | self.isotime = isotime 119 | self.utc_isotime = utc_isotime 120 | super().__init__() 121 | 122 | def filter(self, record): 123 | if self.isotime: 124 | record.isotime = datetime.fromtimestamp(record.created).astimezone().isoformat() 125 | if self.utc_isotime: 126 | record.utc_isotime = datetime.fromtimestamp(record.created, tz=timezone.utc) \ 127 | .isoformat().replace('+00:00', 'Z') 128 | return True 129 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | SHELL = /bin/bash 2 | 3 | .DEFAULT_GOAL := help 4 | 5 | 6 | CURRENT_DIR := $(shell pwd) 7 | 8 | # Test reports configuration 9 | TEST_REPORT_DIR ?= $(CURRENT_DIR)/tests/report 10 | TEST_REPORT_FILE ?= nose2-junit.xml 11 | 12 | # general targets timestamps 13 | TIMESTAMPS = .timestamps 14 | REQUIREMENTS := $(TIMESTAMPS) $(PIP_FILE) $(PIP_FILE_LOCK) 15 | 16 | # Find all python files that are not inside a hidden directory (directory starting with .) 17 | PYTHON_FILES := $(shell find ./* -type d \( -path ./build -o -path ./dist \) -prune -false -o -type f -name "*.py" -print) 18 | 19 | # PIPENV files 20 | PIP_FILE = Pipfile 21 | PIP_FILE_LOCK = Pipfile.lock 22 | 23 | # default configuration 24 | ENV_FILE ?= .env.local 25 | 26 | # Commands 27 | PIPENV_RUN := pipenv run 28 | PYTHON := $(PIPENV_RUN) python3 29 | PIP := $(PIPENV_RUN) pip3 30 | YAPF := $(PIPENV_RUN) yapf 31 | ISORT := $(PIPENV_RUN) isort 32 | NOSE := $(PIPENV_RUN) nose2 33 | PYLINT := $(PIPENV_RUN) pylint 34 | 35 | PACKAGE_VERSION = $(shell awk '/^Version:/ {print $$2}' logging_utilities.egg-info/PKG-INFO) 36 | 37 | 38 | all: help 39 | 40 | 41 | .PHONY: help 42 | help: 43 | @echo "Usage: make " 44 | @echo 45 | @echo "Possible targets:" 46 | @echo -e " \033[1mSetup TARGETS\033[0m " 47 | @echo "- setup Create the python virtual environment with developper tools" 48 | @echo -e " \033[1mFORMATING, LINTING AND TESTING TOOLS TARGETS\033[0m " 49 | @echo "- format Format the python source code" 50 | @echo "- lint Lint the python source code" 51 | @echo "- format-lint Format and lint the python source code" 52 | @echo "- test Run the tests" 53 | @echo -e " \033[1mPACKAGING TARGETS\033[0m " 54 | @echo "- package Create package" 55 | @echo "- publish Tag and publish package to PyPI" 56 | @echo -e " \033[1mCLEANING TARGETS\033[0m " 57 | @echo "- clean Clean genereated files" 58 | @echo "- clean-venv Clean python venv" 59 | @echo "- clean-all Clean everything" 60 | @echo "- python-version Show python version" 61 | 62 | 63 | # Build targets. Calling setup is all that is needed for the local files to be installed as needed. 64 | 65 | .PHONY: setup 66 | setup: $(REQUIREMENTS) 67 | pipenv install --dev 68 | pipenv shell 69 | 70 | # linting target, calls upon yapf to make sure your code is easier to read and respects some conventions. 71 | 72 | .PHONY: format 73 | format: $(REQUIREMENTS) 74 | $(YAPF) -p -i --style .style.yapf $(PYTHON_FILES) 75 | $(ISORT) $(PYTHON_FILES) 76 | 77 | 78 | .PHONY: ci-check-format 79 | ci-check-format: format 80 | @if [[ -n `git status --porcelain` ]]; then \ 81 | >&2 echo "ERROR: the following files are not formatted correctly:"; \ 82 | >&2 git status --porcelain; \ 83 | exit 1; \ 84 | fi 85 | 86 | 87 | .PHONY: lint 88 | lint: $(REQUIREMENTS) 89 | $(PYLINT) $(PYTHON_FILES) 90 | 91 | 92 | .PHONY: format-lint 93 | format-lint: format lint 94 | 95 | 96 | # Test target 97 | 98 | .PHONY: test 99 | test: $(DEV_REQUIREMENTS_TIMESTAMP) 100 | mkdir -p $(TEST_REPORT_DIR) 101 | $(NOSE) -v -c tests/unittest.cfg --junit-xml-path $(TEST_REPORT_DIR)/$(TEST_REPORT_FILE) -s tests/ 102 | 103 | 104 | # Packaging target 105 | 106 | .PHONY: package 107 | package: $(DEV_REQUIREMENTS_TIMESTAMP) 108 | $(PYTHON) -m build 109 | 110 | 111 | .PHONY: publish 112 | publish: publish-check package 113 | @echo "Upload package version=$(PACKAGE_VERSION)" 114 | $(PYTHON) -m twine upload dist/* 115 | 116 | 117 | # Clean targets 118 | 119 | .PHONY: clean-venv 120 | clean-venv: 121 | pipenv --rm 122 | 123 | .PHONY: clean 124 | clean: clean-venv 125 | @# clean python cache files 126 | find . -name __pycache__ -type d -print0 | xargs -I {} -0 rm -rf "{}" 127 | rm -rf $(TEST_REPORT_DIR) 128 | rm -rf $(TIMESTAMPS) 129 | rm -rf dist 130 | rm -rf build 131 | rm -rf *.egg-info 132 | rm -f .coverage 133 | 134 | .PHONY: clean-all 135 | clean-all: clean 136 | 137 | .PHONY: python-version 138 | python-version: 139 | $(PYTHON) python --version 140 | 141 | 142 | 143 | # Actual builds targets with dependencies 144 | 145 | $(TIMESTAMPS): 146 | mkdir -p $(TIMESTAMPS) 147 | 148 | 149 | $(VENV_TIMESTAMP): 150 | test -d $(VENV) || $(SYSTEM_PYTHON) -m venv $(VENV) && $(PIP) install --upgrade pip setuptools 151 | @touch $(VENV_TIMESTAMP) 152 | 153 | 154 | $(DEV_REQUIREMENTS_TIMESTAMP): $(VENV_TIMESTAMP) $(DEV_REQUIREMENTS) 155 | $(PIP) install -r $(DEV_REQUIREMENTS) 156 | @touch $(DEV_REQUIREMENTS_TIMESTAMP) 157 | 158 | 159 | publish-check: 160 | @echo "Check if publish is allowed" 161 | @if [ -n "`git status --porcelain`" ]; then echo "ERROR: Repo is dirty !" >&2; exit 1; fi 162 | -------------------------------------------------------------------------------- /tests/test_log_request_context.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import sys 4 | import time 5 | import unittest 6 | from collections import OrderedDict 7 | from concurrent.futures import ThreadPoolExecutor 8 | 9 | from django.conf import settings 10 | from django.test import RequestFactory 11 | 12 | from logging_utilities.django_middlewares.add_request_context import \ 13 | AddToThreadContextMiddleware 14 | from logging_utilities.filters.add_thread_context_filter import \ 15 | AddThreadContextFilter 16 | from logging_utilities.filters.django_request import JsonDjangoRequest 17 | from logging_utilities.formatters.json_formatter import JsonFormatter 18 | 19 | # From python3.7, dict is ordered 20 | if sys.version_info.major >= 3 and sys.version_info.minor >= 7: 21 | dictionary = dict 22 | else: 23 | dictionary = OrderedDict 24 | 25 | if not settings.configured: 26 | settings.configure() 27 | 28 | logger = logging.getLogger(__name__) 29 | 30 | 31 | class AddRequestToLogTest(unittest.TestCase): 32 | 33 | def setUp(self) -> None: 34 | self.factory = RequestFactory() 35 | 36 | @classmethod 37 | def _configure_django_filter(cls, _logger): 38 | for handler in _logger.handlers: 39 | handler.setFormatter(JsonFormatter(add_always_extra=True)) 40 | handler.addFilter( 41 | AddThreadContextFilter( 42 | contexts=[{ 43 | 'logger_key': 'request', 'context_key': 'request' 44 | }] 45 | ) 46 | ) 47 | handler.addFilter( 48 | JsonDjangoRequest( 49 | include_keys=["request.path", "request.META.QUERY_STRING"], attr_name="request" 50 | ) 51 | ) 52 | 53 | def test_log_request_context(self): 54 | 55 | def test_handler(request): 56 | logger.info("some value") 57 | return "some response" 58 | 59 | with self.assertLogs(logger, level=logging.DEBUG) as ctx: 60 | # Global config of filter 61 | self._configure_django_filter(logger) 62 | request = self.factory.get("/some_path?test=some_value") 63 | middleware = AddToThreadContextMiddleware(test_handler) 64 | middleware(request) 65 | 66 | print(ctx.output[0]) 67 | message1 = json.loads(ctx.output[0], object_pairs_hook=dictionary) 68 | self.assertDictEqual( 69 | message1, 70 | dictionary([ 71 | ("levelname", "INFO"), 72 | ("name", "tests.test_log_request_context"), 73 | ("message", "some value"), 74 | ( 75 | "request", 76 | dictionary([("path", "/some_path"), 77 | ("META", dictionary([("QUERY_STRING", "test=some_value")]))]) 78 | ), 79 | ]) 80 | ) 81 | 82 | 83 | class MultiprocessLoggingTest(unittest.TestCase): 84 | 85 | def setUp(self) -> None: 86 | self.factory = RequestFactory() 87 | 88 | @classmethod 89 | def _configure_django_filter(cls, _logger): 90 | for handler in _logger.handlers: 91 | handler.setFormatter(JsonFormatter(add_always_extra=True)) 92 | handler.addFilter( 93 | AddThreadContextFilter( 94 | contexts=[{ 95 | 'logger_key': 'request', 'context_key': 'request' 96 | }] 97 | ) 98 | ) 99 | handler.addFilter(JsonDjangoRequest(include_keys=["request.path"], attr_name="request")) 100 | 101 | def test_threaded_logging(self): 102 | 103 | def test_handler(request): 104 | time.sleep(1) 105 | logger.info(request.path) 106 | return "some response" 107 | 108 | paths = [ 109 | "/first_path", 110 | "/second_path", 111 | "/third_path", 112 | ] 113 | 114 | def execute_request(path): 115 | request = self.factory.get(path) 116 | middleware = AddToThreadContextMiddleware(test_handler) 117 | middleware(request) 118 | 119 | with self.assertLogs(logger, level=logging.DEBUG) as ctx: 120 | # Global config of filter 121 | self._configure_django_filter(logger) 122 | with ThreadPoolExecutor() as executor: 123 | futures = [] 124 | for path in paths: 125 | futures.append(executor.submit(execute_request, path)) 126 | 127 | for output in ctx.output: 128 | msg = json.loads(output, object_pairs_hook=dictionary) 129 | self.assertEqual(msg["message"], msg["request"]["path"]) 130 | -------------------------------------------------------------------------------- /tests/test_log_record_missing.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import unittest 3 | from logging import Formatter 4 | from logging import Logger 5 | 6 | from nose2.tools import params 7 | 8 | from logging_utilities.log_record import LogRecordIgnoreMissing 9 | from logging_utilities.log_record import _DictIgnoreMissing 10 | from logging_utilities.log_record import reset_log_record_factory 11 | from logging_utilities.log_record import set_log_record_ignore_missing_factory 12 | 13 | 14 | class DictIgnoreMissingTest(unittest.TestCase): 15 | 16 | def test_dict_ignore_missing_default(self): 17 | dct = _DictIgnoreMissing() 18 | self.assertEqual(dct['unknown'], '') 19 | 20 | @params(None, 'my-default', 0, 100, {'1': 'default'}, {}, [], [1], ['default']) 21 | def test_dict_ignore_missing(self, default): 22 | 23 | class _DictIgnoreMissingX(_DictIgnoreMissing): 24 | _dft_value = default 25 | 26 | dct = _DictIgnoreMissingX() 27 | self.assertEqual(dct['unknown'], default) 28 | 29 | 30 | class LogRecordIgnoreMissingTest(unittest.TestCase): 31 | 32 | def test_log_record_missing_default(self): 33 | record = LogRecordIgnoreMissing( 34 | 'my-record', logging.INFO, 'my/path', 0, 'my message', None, False 35 | ) 36 | self.assertEqual(record.__dict__['unknown'], '') 37 | self.assertEqual(record.__dict__['name'], 'my-record') 38 | self.assertEqual(record.__dict__['levelno'], logging.INFO) 39 | 40 | def test_logger_missing_default(self): 41 | set_log_record_ignore_missing_factory() 42 | logger = Logger('my-logger') 43 | 44 | record = logger.makeRecord( 45 | 'my-record', 46 | logging.INFO, 47 | 'test', 48 | 0, 49 | 'my message: %s %d', ('test', 2), 50 | False, 51 | extra={'my-extra': 'this is an extra'} 52 | ) 53 | 54 | self.assertEqual(record.__dict__['name'], 'my-record') 55 | self.assertEqual(record.__dict__['levelno'], logging.INFO) 56 | self.assertEqual(record.getMessage(), 'my message: test 2') 57 | self.assertEqual(record.__dict__['my-extra'], 'this is an extra') 58 | self.assertEqual(record.__dict__['unknown'], '') 59 | reset_log_record_factory() 60 | 61 | @params(None, 'my-default', 0, 100, {'1': 'default'}, {}, [], [1], ['default']) 62 | def test_logger_missing(self, default): 63 | set_log_record_ignore_missing_factory(dft_value=default) 64 | logger = Logger('my-logger') 65 | 66 | record = logger.makeRecord( 67 | 'my-record', 68 | logging.INFO, 69 | 'test', 70 | 0, 71 | 'my message: %s %d', ('test', 2), 72 | False, 73 | extra={'my-extra': 'this is an extra'} 74 | ) 75 | 76 | self.assertEqual(record.__dict__['name'], 'my-record') 77 | self.assertEqual(record.__dict__['levelno'], logging.INFO) 78 | self.assertEqual(record.getMessage(), 'my message: test 2') 79 | self.assertEqual(record.__dict__['my-extra'], 'this is an extra') 80 | self.assertEqual(record.__dict__['unknown'], default) 81 | reset_log_record_factory() 82 | 83 | 84 | class LoggerIgnoreMissingTest(unittest.TestCase): 85 | 86 | def setUp(self): 87 | super().setUp() 88 | self.maxDiff = None 89 | 90 | @classmethod 91 | def _configure_logging(cls, logger, fmt): 92 | logger.setLevel(logging.DEBUG) 93 | 94 | for handler in logger.handlers: 95 | handler.setFormatter(Formatter(fmt)) 96 | 97 | def test_logger_ignore_missing_extra_default(self): 98 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 99 | set_log_record_ignore_missing_factory() 100 | logger = logging.getLogger('test_formatter') 101 | self._configure_logging(logger, "%(levelname)s:%(message)s:%(missing_attribute)s") 102 | logger.info('Simple message') 103 | logger.info('Composed message: %s', 'this is a composed message') 104 | logger.info('Composed message %s', 'with extra', extra={'extra1': 23}) 105 | reset_log_record_factory() 106 | self.assertEqual( 107 | ctx.output, 108 | [ 109 | 'INFO:Simple message:', 110 | 'INFO:Composed message: this is a composed message:', 111 | 'INFO:Composed message with extra:' 112 | ] 113 | ) 114 | 115 | @params(None, 'my-default', 0, 100, {'1': 'default'}, {}, [], [1], ['default']) 116 | def test_logger_ignore_missing_extra(self, default): 117 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 118 | set_log_record_ignore_missing_factory(default) 119 | logger = logging.getLogger('test_formatter') 120 | self._configure_logging(logger, "%(levelname)s:%(message)s:%(missing_attribute)s") 121 | logger.info('Simple message') 122 | logger.info('Composed message: %s', 'this is a composed message') 123 | logger.info('Composed message %s', 'with extra', extra={'extra1': 23}) 124 | reset_log_record_factory() 125 | self.assertEqual( 126 | ctx.output, 127 | [ 128 | 'INFO:Simple message:' + str(default), 129 | 'INFO:Composed message: this is a composed message:' + str(default), 130 | 'INFO:Composed message with extra:' + str(default) 131 | ] 132 | ) 133 | -------------------------------------------------------------------------------- /logging_utilities/filters/django_request.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | import sys 4 | import warnings 5 | from collections import OrderedDict 6 | 7 | from django.http import HttpRequest 8 | 9 | # From python3.7, dict is ordered. Ordered dict are preferred in order to keep the json output 10 | # in the same order as its definition 11 | if sys.version_info.major >= 3 and sys.version_info.minor >= 7: 12 | dictionary = dict 13 | else: 14 | dictionary = OrderedDict 15 | 16 | 17 | def _pattern(text): 18 | return r'^{}\b'.format(text.replace('.', '[.]')) 19 | 20 | 21 | class JsonDjangoRequest(logging.Filter): 22 | """Convert Django request to a json object 23 | 24 | This filter recursively converts a django HttpRequest object to a python dictionary that can be 25 | dumped into a json string. This is useful for example if you want to log an extra parameter of 26 | type HttpRequest with the JSON formatter. If the specified attribute is not of type HttpRequest, 27 | it will simply be ignored and passed through. 28 | 29 | Additionally the attributes of the request that needs to be jsonify can be configured using the 30 | `include_keys` and/or `exclude_keys` parameters. 31 | 32 | The django framework adds sometimes an HttpRequest or socket object under "record.request" when 33 | logging. So if you decide to use the attribute name "request" for this filter, beware that you 34 | will need to handle the case where the attribute is of type 'socket' separately, for example by 35 | filtering it out using the attribute type filter. (see example in README) 36 | """ 37 | 38 | def __init__(self, include_keys=None, exclude_keys=None, attr_name='http_request'): 39 | """Initialize the filter 40 | 41 | Args: 42 | include_keys: (list | None) 43 | All request attributes that match any of the dotted keys of the list will be added 44 | to the jsonifiable object. When None then all attributes are added 45 | (default behavior). 46 | exclude_keys: (list | None) 47 | All request attributes that match any of the dotted keys of the list will not be 48 | added to the jsonifiable object. NOTE this has precedence to include_keys 49 | which means that if a key is in both lists, then it is not added. 50 | attr_name: str 51 | The name of the attribute that stores the HttpRequest object. The default is 52 | 'http_request'. 53 | (Note that django sometimes stores an "HttpRequest" under the attribute "request". 54 | This is however not the default as django also stores other types of objects under 55 | this attribute name.) 56 | """ 57 | self.include_keys = include_keys 58 | self.exclude_keys = exclude_keys 59 | self.attr_name = attr_name 60 | super().__init__() 61 | 62 | def filter(self, record): 63 | if not hasattr(record, self.attr_name): 64 | return True 65 | 66 | self._jsonify_request(record) 67 | 68 | return True 69 | 70 | def _jsonify_request(self, record): 71 | orig_request = getattr(record, self.attr_name) 72 | if isinstance(orig_request, HttpRequest) and hasattr(orig_request, '__dict__'): 73 | request = self._jsonify_dict(self.attr_name, orig_request.__dict__) 74 | if self._add_key(self.attr_name + '.headers', 'headers'): 75 | # HttpRequest has a special headers property that is cached and is not always in 76 | # record.http_request.__dict__ 77 | request['headers'] = self._jsonify_dict( 78 | self.attr_name + '.headers', orig_request.headers 79 | ) 80 | setattr(record, self.attr_name, request) 81 | 82 | def _jsonify_dict(self, prefix, dct): 83 | json_obj = dictionary() 84 | if sys.version_info.major < 3 and sys.version_info.minor < 7: 85 | dct = OrderedDict(sorted(dct.items(), key=lambda t: t[0])) 86 | for key, value in dct.items(): 87 | dotted_key = '{}.{}'.format(prefix, key) 88 | if not self._add_key(dotted_key, key): 89 | continue 90 | if hasattr(value, '__dict__'): 91 | json_obj[key] = self._jsonify_dict(dotted_key, value.__dict__) 92 | elif isinstance(value, dict): 93 | json_obj[key] = self._jsonify_dict(dotted_key, value) 94 | elif isinstance(value, (str, int, float, type(None), bool, tuple, list)): 95 | json_obj[key] = value 96 | elif isinstance(value, (bytes)): 97 | json_obj[key] = str(value) 98 | else: 99 | warnings.warn( 100 | "Cannot jsonify key {} with value {}: unsupported type={}".format( 101 | dotted_key, value, type(value) 102 | ) 103 | ) 104 | json_obj[key] = str(value) 105 | return json_obj 106 | 107 | def _add_key(self, dotted_key, key): 108 | return self._include_key(dotted_key, key) and not self._exclude_key(dotted_key, key) 109 | 110 | def _include_key(self, dotted_key, key): 111 | if self.include_keys is None: 112 | # if no include_keys is configured add all keys except for private 113 | if key.startswith('_'): 114 | return False 115 | return True 116 | 117 | def match(item): 118 | return ( 119 | re.match(_pattern(dotted_key), item) or 120 | (re.match(_pattern(item), dotted_key) and not key.startswith('_')) 121 | ) 122 | 123 | return any(map(match, self.include_keys)) 124 | 125 | def _exclude_key(self, dotted_key, key): 126 | if self.exclude_keys is None: 127 | # if no exclude_keys is configured only exclude private key 128 | if key.startswith('_'): 129 | return True 130 | return False 131 | 132 | def match(item): 133 | return re.match(_pattern(item), dotted_key) 134 | 135 | return any(map(match, self.exclude_keys)) 136 | -------------------------------------------------------------------------------- /tests/test_flask_json_formatter.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import sys 4 | import unittest 5 | from collections import OrderedDict 6 | 7 | from flask import Flask 8 | 9 | from logging_utilities.filters.flask_attribute import FlaskRequestAttribute 10 | from logging_utilities.formatters.json_formatter import JsonFormatter 11 | 12 | # From python3.7, dict is ordered 13 | if sys.version_info.major >= 3 and sys.version_info.minor >= 7: 14 | dictionary = dict 15 | else: 16 | dictionary = OrderedDict 17 | 18 | app = Flask(__name__) 19 | 20 | 21 | class FlaskJsonFormatterTest(unittest.TestCase): 22 | 23 | @classmethod 24 | def _configure_logger( 25 | cls, 26 | logger, 27 | fmt=None, 28 | add_always_extra=False, 29 | remove_empty=False, 30 | ignore_missing=False, 31 | flask_attributes=None, 32 | style='%' 33 | ): 34 | logger.setLevel(logging.DEBUG) 35 | 36 | for handler in logger.handlers: 37 | formatter = JsonFormatter( 38 | fmt, 39 | add_always_extra=add_always_extra, 40 | remove_empty=remove_empty, 41 | ignore_missing=ignore_missing, 42 | style=style 43 | ) 44 | handler.setFormatter(formatter) 45 | flask_attribute = FlaskRequestAttribute(attributes=flask_attributes) 46 | handler.addFilter(flask_attribute) 47 | 48 | def test_json_formatter_flask_no_context(self): 49 | 50 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 51 | logger = logging.getLogger('test_formatter') 52 | self._configure_logger( 53 | logger, 54 | fmt=dictionary([ 55 | ('level', 'levelname'), 56 | ( 57 | 'request', 58 | dictionary([ 59 | ('path', '%(flask_request_path)s'), 60 | ('headers', dictionary([('Accept', 'flask_request_headers.Accept')])) 61 | ]) 62 | ), 63 | ('message', 'message'), 64 | ]), 65 | remove_empty=True, 66 | ignore_missing=True, 67 | flask_attributes=['path', 'headers'] 68 | ) 69 | logger.info('Simple message') 70 | 71 | self.assertDictEqual( 72 | json.loads(ctx.output[0], object_pairs_hook=dictionary), 73 | dictionary([ 74 | ("level", "INFO"), 75 | ("message", "Simple message"), 76 | ]) 77 | ) 78 | 79 | def test_json_formatter_flask(self): 80 | 81 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 82 | logger = logging.getLogger('test_formatter') 83 | self._configure_logger( 84 | logger, 85 | fmt=dictionary([ 86 | ('level', 'levelname'), 87 | ( 88 | 'request', 89 | dictionary([ 90 | ('path', '%(flask_request_path)s'), 91 | ('headers', dictionary([('Accept', 'flask_request_headers.Accept')])) 92 | ]), 93 | ), 94 | ('message', 'message'), 95 | ]), 96 | remove_empty=False, 97 | ignore_missing=True, 98 | flask_attributes=['path', 'headers'] 99 | ) 100 | 101 | with app.test_request_context('/make_report/2017'): 102 | logger.info('Simple message') 103 | 104 | with app.test_request_context('/make_report/2017', headers={'Accept': '*/*'}): 105 | logger.info('Simple message with headers') 106 | 107 | self.assertDictEqual( 108 | json.loads(ctx.output[0], object_pairs_hook=dictionary), 109 | dictionary([ 110 | ("level", "INFO"), 111 | ("message", "Simple message"), 112 | ( 113 | "request", 114 | dictionary([("path", "/make_report/2017"), 115 | ("headers", dictionary([("Accept", "")]))]) 116 | ), 117 | ]) 118 | ) 119 | 120 | self.assertDictEqual( 121 | json.loads(ctx.output[1], object_pairs_hook=dictionary), 122 | dictionary([ 123 | ("level", "INFO"), 124 | ("message", "Simple message with headers"), 125 | ( 126 | "request", 127 | dictionary([("path", "/make_report/2017"), 128 | ("headers", dictionary([("Accept", "*/*")]))]) 129 | ), 130 | ]) 131 | ) 132 | 133 | def test_json_formatter_flask_remove_empty(self): 134 | 135 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 136 | logger = logging.getLogger('test_formatter') 137 | self._configure_logger( 138 | logger, 139 | fmt=dictionary([ 140 | ('level', 'levelname'), 141 | ( 142 | 'request', 143 | dictionary([ 144 | ('path', '%(flask_request_path)s'), 145 | ('headers', dictionary([('Accept', 'flask_request_headers.Accept')])) 146 | ]), 147 | ), 148 | ('message', 'message'), 149 | ]), 150 | remove_empty=True, 151 | ignore_missing=True, 152 | flask_attributes=['path', 'headers'] 153 | ) 154 | 155 | with app.test_request_context('/make_report/2017'): 156 | logger.info('Simple message') 157 | 158 | with app.test_request_context('/make_report/2017', headers={'Accept': '*/*'}): 159 | logger.info('Simple message with headers') 160 | 161 | self.assertDictEqual( 162 | json.loads(ctx.output[0], object_pairs_hook=dictionary), 163 | dictionary([ 164 | ("level", "INFO"), 165 | ("message", "Simple message"), 166 | ("request", dictionary([("path", "/make_report/2017")])), 167 | ]) 168 | ) 169 | 170 | self.assertDictEqual( 171 | json.loads(ctx.output[1], object_pairs_hook=dictionary), 172 | dictionary([ 173 | ("level", "INFO"), 174 | ("message", "Simple message with headers"), 175 | ( 176 | "request", 177 | dictionary([("path", "/make_report/2017"), 178 | ("headers", dictionary([("Accept", "*/*")]))]) 179 | ), 180 | ]) 181 | ) 182 | -------------------------------------------------------------------------------- /tests/test_attr_type_filter.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import unittest 4 | 5 | from logging_utilities.filters.attr_type_filter import AttrTypeFilter 6 | from logging_utilities.filters.attr_type_filter import is_instance 7 | from logging_utilities.formatters.json_formatter import JsonFormatter 8 | 9 | 10 | class TestObject: 11 | 12 | def __str__(self): 13 | return 'Test Object' 14 | 15 | 16 | class TestSubobject(TestObject): 17 | 18 | def __str__(self): 19 | return 'Test Subobject' 20 | 21 | 22 | class IsInstanceTest(unittest.TestCase): 23 | 24 | def test_pass_types(self): 25 | self.assertTrue(is_instance(TestObject(), TestObject)) 26 | self.assertFalse(is_instance(TestObject(), TestSubobject)) 27 | self.assertTrue(is_instance(TestSubobject(), TestObject)) 28 | self.assertTrue(is_instance(TestSubobject(), TestSubobject)) 29 | self.assertFalse(is_instance(TestSubobject(), str)) 30 | 31 | def test_pass_dotless_strings(self): 32 | self.assertTrue(is_instance(TestObject(), 'TestObject')) 33 | self.assertFalse(is_instance(TestObject(), 'TestSubobject')) 34 | self.assertTrue(is_instance(TestSubobject(), 'TestObject')) 35 | self.assertTrue(is_instance(TestSubobject(), 'TestSubobject')) 36 | self.assertFalse(is_instance(TestSubobject(), 'str')) 37 | 38 | def test_pass_dotted_strings(self): 39 | self.assertTrue(is_instance(TestObject(), 'tests.test_attr_type_filter.TestObject')) 40 | self.assertFalse(is_instance(TestObject(), 'tests.test_attr_type_filter.TestSubobject')) 41 | self.assertTrue(is_instance(TestSubobject(), 'tests.test_attr_type_filter.TestObject')) 42 | self.assertTrue(is_instance(TestSubobject(), 'tests.test_attr_type_filter.TestSubobject')) 43 | self.assertFalse(is_instance(TestSubobject(), 'builtins.str')) 44 | 45 | 46 | class AttrTypeFilterTest(unittest.TestCase): 47 | 48 | def setUp(self): 49 | self.logger = logging.getLogger('test_formatter') 50 | self.logger.setLevel(logging.DEBUG) 51 | 52 | def log_and_assert(self, type_validators, extra_dict, filtered_extra_dict): 53 | with self.assertLogs(self.logger, level=logging.INFO) as ctx: 54 | for handler in self.logger.handlers: 55 | handler.setFormatter(JsonFormatter(add_always_extra=True)) 56 | for validator in type_validators: 57 | handler.addFilter(validator) 58 | self.logger.info('Composed message %s', 'with extra', extra=extra_dict) 59 | message = json.loads(ctx.output[0], object_pairs_hook=dict) 60 | self.assertDictEqual( 61 | message, 62 | { 63 | "levelname": "INFO", 64 | "name": "test_formatter", 65 | "message": "Composed message with extra", 66 | **filtered_extra_dict 67 | }, 68 | ) 69 | 70 | def test_include_filter(self): 71 | self.log_and_assert( 72 | type_validators=[ 73 | AttrTypeFilter({ 74 | 'request': 'dict', 75 | 'unexistent': 'str', 76 | 'entry': bool, 77 | 'abc': ['bool', str, 'int', 'TestSubobject'], 78 | 'def': 'TestObject', 79 | 'ghi': 'TestObject', 80 | 'ghi2': 'tests.test_attr_type_filter.TestObject', 81 | 'jkl': ['bool', str, 'int', 'TestSubobject'], 82 | 'mno': 'TestSubobject' 83 | }) 84 | ], 85 | extra_dict={ 86 | 'entry': 'hey', #no match 87 | 'abc': TestObject(), #no match 88 | 'def': TestObject(), #match 89 | 'ghi': TestSubobject(), #match (checks that super class detection works) 90 | 'ghi2': TestSubobject(), #match 91 | 'jkl': TestSubobject(), #match 92 | 'mno': TestObject(), #no match 93 | 'request': { #match 94 | 'path': '/my/path', 'method': 'GET', 'comment': TestObject() 95 | } 96 | }, 97 | filtered_extra_dict={ 98 | "request": { 99 | "path": '/my/path', 100 | "method": "GET", 101 | "comment": "Test Object" #str() serializer is used if extra is not serializable 102 | }, 103 | 'def': 'Test Object', 104 | 'ghi': 'Test Subobject', 105 | 'ghi2': 'Test Subobject', 106 | 'jkl': 'Test Subobject', 107 | } 108 | ) 109 | 110 | def test_exclude_filter(self): 111 | self.log_and_assert( 112 | type_validators=[ 113 | AttrTypeFilter( 114 | is_blacklist=True, 115 | typecheck_list={ 116 | 'request': 'dict', 117 | 'unexistent': 'str', 118 | 'entry': bool, 119 | 'abc': ['bool', str, 'int'], 120 | } 121 | ) 122 | ], 123 | extra_dict={ 124 | 'entry': 'hey', #no match 125 | 'abc': TestObject(), #no match 126 | 'request': { #match 127 | 'path': '/my/path', 'method': 'GET', 'comment': TestObject() 128 | } 129 | }, 130 | filtered_extra_dict={ 131 | 'entry': 'hey', 'abc': 'Test Object' 132 | } 133 | ) 134 | 135 | def test_include_and_exclude_filter(self): 136 | self.log_and_assert( 137 | type_validators=[ 138 | AttrTypeFilter({ 139 | 'entry': 'bool', 140 | 'entry3': 'int', 141 | 'request': 'builtins.dict', 142 | 'abc': 'tests.test_attr_type_filter.TestObject', 143 | 'unexistent': 'str', 144 | }), 145 | AttrTypeFilter( 146 | is_blacklist=True, 147 | typecheck_list={ 148 | 'entry2': str, 149 | 'entry3': int, 150 | 'abc': [bool, str, int], 151 | 'nested': dict, 152 | } 153 | ) 154 | ], 155 | extra_dict={ 156 | 'entry': 'hey', #no match, no filter => hidden 157 | 'abc': TestObject(), #no filter, no match => show 158 | "entry2": "hey2", #no filter, match => hidden 159 | 'request': { #match, no filter => show 160 | 'path': '/my/path', 161 | 'scheme': 'https', 162 | }, 163 | "entry3": "hey3", #no match, no match => hidden 164 | 'nested': { #no filter, match => hidden 165 | 'param1': 'text1', 166 | 'param2': 'text2', 167 | } 168 | }, 169 | filtered_extra_dict={ 170 | 'abc': 'Test Object', 'request': { 171 | 'path': '/my/path', 'scheme': 'https' 172 | } 173 | } 174 | ) 175 | -------------------------------------------------------------------------------- /tests/test_django_attribute.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import sys 4 | import unittest 5 | from collections import OrderedDict 6 | 7 | from django.conf import settings 8 | from django.test import RequestFactory 9 | 10 | from logging_utilities.filters.django_request import JsonDjangoRequest 11 | from logging_utilities.formatters.json_formatter import JsonFormatter 12 | 13 | # From python3.7, dict is ordered 14 | if sys.version_info.major >= 3 and sys.version_info.minor >= 7: 15 | dictionary = dict 16 | else: 17 | dictionary = OrderedDict 18 | 19 | if not settings.configured: 20 | settings.configure() 21 | 22 | logger = logging.getLogger(__name__) 23 | 24 | 25 | class RecordDjangoAttributesTest(unittest.TestCase): 26 | 27 | def setUp(self): 28 | # Every test needs access to the request factory. 29 | self.factory = RequestFactory() 30 | 31 | @classmethod 32 | def _configure_django_filter( 33 | cls, _logger, include_keys=None, exclude_keys=None, attr_name='http_request' 34 | ): 35 | _logger.setLevel(logging.DEBUG) 36 | 37 | for handler in _logger.handlers: 38 | django_filter = JsonDjangoRequest( 39 | include_keys=include_keys, exclude_keys=exclude_keys, attr_name=attr_name 40 | ) 41 | handler.addFilter(django_filter) 42 | formatter = JsonFormatter( 43 | dictionary([ 44 | ('level', 'levelname'), 45 | ('message', 'message'), 46 | ('request', attr_name), 47 | ]), 48 | remove_empty=True 49 | ) 50 | handler.setFormatter(formatter) 51 | 52 | def test_django_include_keys(self): 53 | # pylint: disable=protected-access 54 | django_filter = JsonDjangoRequest( 55 | include_keys=[ 56 | 'request.META.METHOD', 57 | 'request.environ', 58 | 'request.environ._include', 59 | ] 60 | ) 61 | # True assertions 62 | self.assertTrue(django_filter._include_key('request', 'request')) 63 | self.assertTrue(django_filter._include_key('request.META', 'META')) 64 | self.assertTrue(django_filter._include_key('request.META.METHOD', 'METHOD')) 65 | self.assertTrue(django_filter._include_key('request.environ', 'environ')) 66 | self.assertTrue(django_filter._include_key('request.environ.CONTENT_TYPE', 'CONTENT_TYPE')) 67 | self.assertTrue(django_filter._include_key('request.environ._include', '_include')) 68 | # False assertions 69 | self.assertFalse(django_filter._include_key('test', 'test')) 70 | self.assertFalse(django_filter._include_key('request.path', 'path')) 71 | self.assertFalse(django_filter._include_key('request.path.full', 'full')) 72 | self.assertFalse(django_filter._include_key('request.META.CONTENT_TYPE', 'CONTENT_TYPE')) 73 | self.assertFalse(django_filter._include_key('request.META.extend.TYPE', 'TYPE')) 74 | self.assertFalse(django_filter._include_key('request.environ._type', '_type')) 75 | 76 | def test_django_exclude_keys(self): 77 | # pylint: disable=protected-access 78 | django_filter = JsonDjangoRequest(exclude_keys=['request.META.METHOD', 'request.environ']) 79 | # True assertions 80 | self.assertTrue(django_filter._exclude_key('request.META.METHOD', 'METHOD')) 81 | self.assertTrue(django_filter._exclude_key('request.environ', 'environ')) 82 | self.assertTrue(django_filter._exclude_key('request.environ.CONTENT_TYPE', 'CONTENT_TYPE')) 83 | self.assertTrue(django_filter._exclude_key('request.environ._include', '_include')) 84 | self.assertTrue(django_filter._exclude_key('request.environ._type', '_type')) 85 | # False assertions 86 | self.assertFalse(django_filter._exclude_key('test', 'test')) 87 | self.assertFalse(django_filter._exclude_key('request.path', 'path')) 88 | self.assertFalse(django_filter._exclude_key('request.path.full', 'full')) 89 | self.assertFalse(django_filter._exclude_key('request.META.CONTENT_TYPE', 'CONTENT_TYPE')) 90 | self.assertFalse(django_filter._exclude_key('request.META.extend.TYPE', 'TYPE')) 91 | self.assertFalse(django_filter._exclude_key('request', 'request')) 92 | self.assertFalse(django_filter._exclude_key('request.META', 'META')) 93 | 94 | def test_django_request_jsonify(self): 95 | request = self.factory.get('/my_path?test=true&test_2=false') 96 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 97 | test_logger = logging.getLogger('test_formatter') 98 | self._configure_django_filter( 99 | test_logger, 100 | include_keys=[ 101 | 'my_http_request.META.REQUEST_METHOD', 102 | 'my_http_request.META.SERVER_NAME', 103 | 'my_http_request.environ' 104 | ], 105 | exclude_keys=['my_http_request.META.SERVER_NAME', 'my_http_request.environ.wsgi'], 106 | attr_name='my_http_request' 107 | ) 108 | test_logger.info('Simple message', extra={'my_http_request': request}) 109 | test_logger.info( 110 | 'Composed message: %s', 111 | 'this is a composed message', 112 | extra={'my_http_request': request} 113 | ) 114 | message1 = json.loads(ctx.output[0], object_pairs_hook=dictionary) 115 | message2 = json.loads(ctx.output[1], object_pairs_hook=dictionary) 116 | logger.debug('message1=%s', message1) 117 | logger.debug('message2=%s', message2) 118 | self.assertDictEqual( 119 | message1, 120 | dictionary([ 121 | ("level", "INFO"), 122 | ("message", "Simple message"), 123 | ( 124 | "request", 125 | dictionary([("META", dictionary([("REQUEST_METHOD", "GET")])), 126 | ( 127 | "environ", 128 | dictionary([ 129 | ("HTTP_COOKIE", ""), 130 | ("PATH_INFO", "/my_path"), 131 | ("QUERY_STRING", "test=true&test_2=false"), 132 | ("REMOTE_ADDR", "127.0.0.1"), 133 | ("REQUEST_METHOD", "GET"), 134 | ("SCRIPT_NAME", ""), 135 | ("SERVER_NAME", "testserver"), 136 | ("SERVER_PORT", "80"), 137 | ("SERVER_PROTOCOL", "HTTP/1.1"), 138 | ]) 139 | )]) 140 | ), 141 | ]), 142 | msg="First message differ" 143 | ) 144 | self.assertDictEqual( 145 | message2, 146 | dictionary([("level", "INFO"), 147 | ("message", "Composed message: this is a composed message"), 148 | ( 149 | "request", 150 | dictionary([("META", dictionary([("REQUEST_METHOD", "GET")])), 151 | ( 152 | "environ", 153 | dictionary([ 154 | ("HTTP_COOKIE", ""), 155 | ("PATH_INFO", "/my_path"), 156 | ("QUERY_STRING", "test=true&test_2=false"), 157 | ("REMOTE_ADDR", "127.0.0.1"), 158 | ("REQUEST_METHOD", "GET"), 159 | ("SCRIPT_NAME", ""), 160 | ("SERVER_NAME", "testserver"), 161 | ("SERVER_PORT", "80"), 162 | ("SERVER_PROTOCOL", "HTTP/1.1"), 163 | ]) 164 | )]) 165 | )]), 166 | msg="Second message differ" 167 | ) 168 | 169 | def test_django_request_jsonify_other(self): 170 | requests = ({'a': 1}, OrderedDict([('a', 1)]), ['a'], 45, 45.5, 'a') 171 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 172 | test_logger = logging.getLogger('test_formatter') 173 | self._configure_django_filter( 174 | test_logger, 175 | include_keys=[ 176 | 'request.META.REQUEST_METHOD', 'request.META.SERVER_NAME', 'request.environ' 177 | ], 178 | exclude_keys=['request.META.SERVER_NAME', 'request.environ.wsgi'], 179 | attr_name='request' 180 | ) 181 | for request in requests: 182 | test_logger.info('Simple message', extra={'request': request}) 183 | 184 | for i, request in enumerate(requests): 185 | message = json.loads(ctx.output[i], object_pairs_hook=dictionary) 186 | self.assertEqual(request, message['request']) 187 | -------------------------------------------------------------------------------- /tests/test_logging_context.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import unittest 3 | from concurrent.futures import ThreadPoolExecutor 4 | from concurrent.futures import as_completed 5 | from threading import Thread 6 | 7 | from logging_utilities.context import get_logging_context 8 | from logging_utilities.context import remove_logging_context 9 | from logging_utilities.context import set_logging_context 10 | from logging_utilities.context.thread_context import ThreadMappingContext 11 | from logging_utilities.formatters.json_formatter import JsonFormatter 12 | from logging_utilities.log_record import reset_log_record_factory 13 | from logging_utilities.log_record import set_log_record_ignore_missing_factory 14 | 15 | initial_factory = logging.getLogRecordFactory() 16 | 17 | 18 | def create_dummy_log(factory): 19 | return factory(__name__, logging.DEBUG, __file__, 0, '', [], None) 20 | 21 | 22 | class ThreadContextTest(unittest.TestCase): 23 | # pylint: disable=invalid-name 24 | 25 | def test_thread_context_empty(self): 26 | ctx = ThreadMappingContext() 27 | self.assertEqual(len(ctx), 0) 28 | self.assertEqual(ctx, {}) 29 | 30 | def test_thread_context_init(self): 31 | ctx = ThreadMappingContext() 32 | ctx.init({'a': 1}) 33 | self.assertNotEqual(len(ctx), 0) 34 | self.assertEqual(ctx, {'a': 1}) 35 | 36 | ctx.init({'c': 3}) 37 | self.assertEqual(ctx, {'c': 3}) 38 | 39 | def test_thread_context_init_invalid(self): 40 | ctx = ThreadMappingContext() 41 | self.assertRaises(ValueError, ctx.init, 'a string') 42 | 43 | def test_thread_context_len(self): 44 | ctx = ThreadMappingContext() 45 | ctx.init({'a': 1}) 46 | self.assertEqual(len(ctx), 1) 47 | 48 | def test_thread_context_get(self): 49 | ctx = ThreadMappingContext() 50 | ctx.init({'a': 1}) 51 | self.assertEqual(ctx.get('b', 'not found'), 'not found') 52 | try: 53 | self.assertEqual(ctx.get('b'), None) 54 | except KeyError: 55 | self.fail('get() should never raise KeyError') 56 | 57 | self.assertEqual(ctx.get('a'), 1) 58 | 59 | self.assertEqual(ctx['a'], 1) 60 | 61 | def test_thread_context_pop(self): 62 | ctx = ThreadMappingContext() 63 | ctx.init({'a': 1, 'b': 2}) 64 | 65 | self.assertEqual(ctx.pop('a'), 1) 66 | self.assertNotIn('a', ctx) 67 | 68 | self.assertIn('b', ctx) 69 | 70 | self.assertRaises(KeyError, ctx.pop, 'c') 71 | self.assertIsNone(ctx.pop('c', None)) 72 | self.assertEqual(ctx.pop('c', 'not found'), 'not found') 73 | 74 | def test_thread_context_set(self): 75 | ctx = ThreadMappingContext() 76 | 77 | ctx['a'] = 1 78 | self.assertEqual(ctx, {'a': 1}) 79 | 80 | ctx.set('b', 2) 81 | self.assertEqual(ctx, {'a': 1, 'b': 2}) 82 | 83 | def test_thread_context_del(self): 84 | ctx = ThreadMappingContext() 85 | ctx.init({'a': 1, 'b': 2, 'c': 3}) 86 | 87 | del ctx['a'] 88 | self.assertEqual(ctx, {'b': 2, 'c': 3}) 89 | 90 | ctx.delete('b') 91 | self.assertEqual(ctx, {'c': 3}) 92 | 93 | def test_thread_context_clear(self): 94 | ctx = ThreadMappingContext() 95 | ctx.init({'a': 1, 'b': 2, 'c': 3}) 96 | 97 | ctx.clear() 98 | self.assertEqual(ctx, {}) 99 | 100 | def test_thread_context_contains(self): 101 | ctx = ThreadMappingContext() 102 | ctx.init({'a': 1, 'b': 2}) 103 | 104 | self.assertTrue('a' in ctx) 105 | self.assertFalse('c' in ctx) 106 | 107 | def test_thread_context_iter(self): 108 | ctx = ThreadMappingContext() 109 | ctx.init({'a': 1, 'b': 2}) 110 | 111 | for k, v in ctx.items(): 112 | self.assertIn(k, ['a', 'b']) 113 | if k == 'a': 114 | self.assertEqual(v, 1) 115 | elif k == 'b': 116 | self.assertEqual(v, 2) 117 | else: 118 | self.fail(f'Invalid key {k}') 119 | 120 | self.assertListEqual(list(ctx.keys()), ['a', 'b']) 121 | 122 | self.assertListEqual(list(ctx.values()), [1, 2]) 123 | 124 | def test_thread_context_str(self): 125 | ctx = ThreadMappingContext() 126 | ctx.init({'a': 1, 'b': 2, 'c': 'my string'}) 127 | self.assertEqual(str(ctx), "{'a': 1, 'b': 2, 'c': 'my string'}") 128 | 129 | def test_thread_context_local_data(self): 130 | ctx = ThreadMappingContext() 131 | ctx['thread'] = 'main' 132 | results = {} 133 | 134 | def worker(): 135 | assert 'thread' not in ctx 136 | ctx['thread'] = 'worker' 137 | 138 | t = Thread(target=worker) 139 | t.start() 140 | t.join() 141 | 142 | assert ctx['thread'] == 'main' 143 | 144 | 145 | class LoggingContextTest(unittest.TestCase): 146 | 147 | def tearDown(self): 148 | super().tearDown() 149 | remove_logging_context() 150 | self.assertEqual(initial_factory, logging.getLogRecordFactory()) 151 | 152 | def test_logging_context_empty(self): 153 | set_logging_context() 154 | factory_with_context = logging.getLogRecordFactory() 155 | 156 | # Make sure the factory has been changed 157 | self.assertNotEqual(initial_factory, factory_with_context) 158 | 159 | # Create a dummy log to check the contex 160 | record = create_dummy_log(factory_with_context) 161 | self.assertTrue(hasattr(record, 'context')) 162 | self.assertIn('context', record.__dict__) 163 | self.assertEqual({}, record.context) 164 | 165 | def test_logging_context_set(self): 166 | ctx1 = {'a': 1} 167 | set_logging_context(ctx1) 168 | record = create_dummy_log(logging.getLogRecordFactory()) 169 | self.assertEqual(ctx1, record.context) 170 | 171 | ctx2 = {'a': 2} 172 | set_logging_context(ctx2) 173 | record = create_dummy_log(logging.getLogRecordFactory()) 174 | self.assertEqual(ctx2, record.context) 175 | 176 | def test_logging_context_get(self): 177 | self.assertIsNone(get_logging_context()) 178 | 179 | ctx1 = {'a': 1} 180 | set_logging_context(ctx1) 181 | 182 | self.assertEqual(ctx1, get_logging_context()) 183 | 184 | def test_logging_context_modify(self): 185 | set_logging_context() 186 | factory_with_context = logging.getLogRecordFactory() 187 | 188 | record = create_dummy_log(factory_with_context) 189 | self.assertTrue(hasattr(record, 'context')) 190 | self.assertIn('context', record.__dict__) 191 | self.assertEqual(record.context, {}) 192 | 193 | # Modify the context 194 | context = get_logging_context() 195 | context['a'] = 'added a string' 196 | record = create_dummy_log(factory_with_context) 197 | self.assertTrue(hasattr(record, 'context')) 198 | self.assertIn('context', record.__dict__) 199 | self.assertEqual(record.context, context) 200 | self.assertEqual(record.__dict__['context'], context) 201 | 202 | # Modify the context again 203 | context['a'] = 1 204 | context['b'] = 2 205 | self.assertEqual(record.context, context) 206 | 207 | def test_logging_context_set_in_thread(self): 208 | set_logging_context() 209 | contexts = {'1': {'a': 1}, '2': {'a': 2}, '3': {'b': 3}} 210 | 211 | def _test_logging_context_thread(ctx_id): 212 | set_logging_context(contexts[ctx_id]) 213 | record = create_dummy_log(logging.getLogRecordFactory()) 214 | self.assertTrue(hasattr(record, 'context')) 215 | self.assertEqual(contexts[ctx_id], record.context) 216 | return str(record.context) 217 | 218 | with ThreadPoolExecutor(max_workers=len(contexts)) as executor: 219 | future_to_ctx_id = { 220 | executor.submit(_test_logging_context_thread, ctx_id): ctx_id for ctx_id in contexts 221 | } 222 | for future in as_completed(future_to_ctx_id): 223 | ctx_id = future_to_ctx_id[future] 224 | try: 225 | context = future.result() 226 | except Exception as exception: # pylint: disable=broad-except 227 | self.fail(f'Excpetion {exception} raised in thread') 228 | self.assertEqual(str(contexts[ctx_id]), context) 229 | 230 | def test_logging_context_with_custom_log_record(self): 231 | record = create_dummy_log(logging.getLogRecordFactory()) 232 | with self.assertRaises(KeyError): 233 | record.__dict__['non_existant_attribute'] # pylint: disable=pointless-statement 234 | 235 | set_log_record_ignore_missing_factory() 236 | record = create_dummy_log(logging.getLogRecordFactory()) 237 | self.assertEqual(record.__dict__['non_existant_attribute'], '') 238 | 239 | set_logging_context({'a': 'my-context'}) 240 | record = create_dummy_log(logging.getLogRecordFactory()) 241 | self.assertTrue(hasattr(record, 'context')) 242 | self.assertEqual(record.context, {'a': 'my-context'}) 243 | self.assertEqual(record.__dict__['non_existant_attribute'], '') 244 | reset_log_record_factory() 245 | 246 | record = create_dummy_log(logging.getLogRecordFactory()) 247 | self.assertFalse(hasattr(record, 'context')) 248 | 249 | def test_logging_context_logger_standard_fmt(self): 250 | set_logging_context({'a': 'my-context'}) 251 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 252 | logger = logging.getLogger('test_formatter') 253 | logger.setLevel(logging.DEBUG) 254 | 255 | for handler in logger.handlers: 256 | formatter = logging.Formatter("%(message)s - %(context)s") 257 | handler.setFormatter(formatter) 258 | 259 | logger.debug('My message with context') 260 | self.assertEqual(ctx.output[0], "My message with context - {'a': 'my-context'}") 261 | 262 | def test_logging_context_logger_json_fmt(self): 263 | set_logging_context({'a': 'my-context'}) 264 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 265 | logger = logging.getLogger('test_formatter') 266 | logger.setLevel(logging.DEBUG) 267 | 268 | for handler in logger.handlers: 269 | formatter = JsonFormatter( 270 | { 271 | "message": "message", "context": "context" 272 | }, 273 | default=dict, 274 | ) 275 | handler.setFormatter(formatter) 276 | 277 | logger.debug('My message with context') 278 | self.assertEqual( 279 | ctx.output[0], '{"message": "My message with context", "context": {"a": "my-context"}}' 280 | ) 281 | 282 | def test_logging_context_logger_json_fmt_sub_element(self): 283 | set_logging_context({'a': 'my-context'}) 284 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 285 | logger = logging.getLogger('test_formatter') 286 | logger.setLevel(logging.DEBUG) 287 | 288 | for handler in logger.handlers: 289 | formatter = JsonFormatter( 290 | { 291 | "message": "message", "context": "context.a" 292 | }, 293 | default=dict, 294 | ) 295 | handler.setFormatter(formatter) 296 | 297 | logger.debug('My message with context') 298 | self.assertEqual( 299 | ctx.output[0], '{"message": "My message with context", "context": "my-context"}' 300 | ) 301 | -------------------------------------------------------------------------------- /tests/test_extra_formatter.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import unittest 3 | from collections import OrderedDict 4 | 5 | from logging_utilities.formatters.extra_formatter import ExtraFormatter 6 | 7 | 8 | class ExtraFormatterTest(unittest.TestCase): 9 | maxDiff = None 10 | 11 | @classmethod 12 | def _configure_logger( 13 | cls, 14 | logger, 15 | fmt=None, 16 | extra_fmt=None, 17 | extra_default='', 18 | extra_pretty_print=False, 19 | pretty_print_kwargs=None 20 | ): 21 | logger.setLevel(logging.DEBUG) 22 | 23 | for handler in logger.handlers: 24 | formatter = ExtraFormatter( 25 | fmt, 26 | extra_default=extra_default, 27 | extra_fmt=extra_fmt, 28 | extra_pretty_print=extra_pretty_print, 29 | pretty_print_kwargs=pretty_print_kwargs 30 | ) 31 | handler.setFormatter(formatter) 32 | 33 | def test_missing_extra(self): 34 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 35 | logger = logging.getLogger('test_formatter') 36 | self._configure_logger(logger, fmt="%(message)s", extra_fmt=':%(extra1)s') 37 | logger.info('Simple message') 38 | logger.info('Composed message: %s', 'this is a composed message') 39 | logger.info('Simple message with extra', extra={'extra1': 23}) 40 | logger.info('Composed message %s', 'with extra', extra={'extra1': 23}) 41 | self.assertEqual(ctx.output[0], 'Simple message') 42 | self.assertEqual(ctx.output[1], 'Composed message: this is a composed message') 43 | self.assertEqual(ctx.output[2], 'Simple message with extra:23') 44 | self.assertEqual(ctx.output[3], 'Composed message with extra:23') 45 | 46 | def test_missing_extra_default_none(self): 47 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 48 | logger = logging.getLogger('test_formatter') 49 | self._configure_logger( 50 | logger, fmt="%(message)s", extra_fmt=':%(extra1)s:%(extra2)s', extra_default=None 51 | ) 52 | logger.info('Simple message') 53 | logger.info('Composed message: %s', 'this is a composed message') 54 | logger.info('Simple message with extra', extra={'extra1': 23}) 55 | logger.info('Composed message %s', 'with extra', extra={'extra1': 23}) 56 | self.assertEqual(ctx.output[0], 'Simple message') 57 | self.assertEqual(ctx.output[1], 'Composed message: this is a composed message') 58 | self.assertEqual(ctx.output[2], 'Simple message with extra:23:None') 59 | self.assertEqual(ctx.output[3], 'Composed message with extra:23:None') 60 | 61 | def test_extra_format_as_dict(self): 62 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 63 | logger = logging.getLogger('test_formatter') 64 | self._configure_logger(logger, fmt="%(message)s", extra_fmt=':extra=%s') 65 | logger.info('Simple message') 66 | logger.info('Composed message: %s', 'this is a composed message') 67 | logger.info('Simple message with extra', extra={'extra2': 1}) 68 | logger.info('Composed message %s', 'with extra', extra={'extra2': 1}) 69 | self.assertEqual(ctx.output[0], 'Simple message') 70 | self.assertEqual(ctx.output[1], 'Composed message: this is a composed message') 71 | self.assertEqual(ctx.output[2], 'Simple message with extra:extra={\'extra2\': 1}') 72 | self.assertEqual(ctx.output[3], 'Composed message with extra:extra={\'extra2\': 1}') 73 | 74 | def test_extra_format_as_dict_duplicate_extra(self): 75 | # Make sure to not replicate extra in the dict that have been added to the standard format 76 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 77 | logger = logging.getLogger('test_formatter') 78 | self._configure_logger( 79 | logger, fmt="%(message)s - %(extra1)s", extra_fmt=' - other_extra=%s' 80 | ) 81 | logger.info('Simple message with extra', extra={'extra1': 1, 'extra2': 2}) 82 | logger.info('Composed message %s', 'with extra', extra={'extra1': 1, 'extra2': 2}) 83 | self.assertEqual( 84 | ctx.output[0], 'Simple message with extra - 1 - other_extra={\'extra2\': 2}' 85 | ) 86 | self.assertEqual( 87 | ctx.output[1], 'Composed message with extra - 1 - other_extra={\'extra2\': 2}' 88 | ) 89 | 90 | def test_extra_format_as_dict_pretty_print(self): 91 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 92 | logger = logging.getLogger('test_formatter') 93 | self._configure_logger( 94 | logger, fmt="%(message)s", extra_fmt=':extra=%s', extra_pretty_print=True 95 | ) 96 | logger.info('Simple message') 97 | logger.info('Composed message: %s', 'this is a composed message') 98 | logger.info('Simple message with extra', extra={'extra2': 1}) 99 | logger.info('Composed message %s', 'with extra', extra={'extra2': 1}) 100 | logger.info( 101 | 'Big extra using pretty print', 102 | extra=OrderedDict([ 103 | ('extra1', list(map(lambda i: 'test ' * i, range(5)))), 104 | ('extra2', { 105 | 'extra2.1': list(map(lambda i: 'test ' * i, range(5))) 106 | }), 107 | ]) 108 | ) 109 | self.assertEqual(ctx.output[0], 'Simple message') 110 | self.assertEqual(ctx.output[1], 'Composed message: this is a composed message') 111 | self.assertEqual(ctx.output[2], 'Simple message with extra:extra={\'extra2\': 1}') 112 | self.assertEqual(ctx.output[3], 'Composed message with extra:extra={\'extra2\': 1}') 113 | self.assertEqual( 114 | ctx.output[4], 115 | """Big extra using pretty print:extra={'extra1': ['', 116 | 'test ', 117 | 'test test ', 118 | 'test test test ', 119 | 'test test test test '], 120 | 'extra2': {'extra2.1': ['', 121 | 'test ', 122 | 'test test ', 123 | 'test test test ', 124 | 'test test test test ']}}""" 125 | ) 126 | 127 | def test_extra_format_as_dict_pretty_print_with_args(self): 128 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 129 | logger = logging.getLogger('test_formatter') 130 | self._configure_logger( 131 | logger, 132 | fmt="%(message)s", 133 | extra_fmt=':extra=%s', 134 | extra_pretty_print=True, 135 | pretty_print_kwargs={ 136 | 'indent': 2, 'width': 20 137 | } 138 | ) 139 | logger.info('Simple message') 140 | logger.info('Composed message: %s', 'this is a composed message') 141 | logger.info('Simple message with extra', extra={'extra2': 1}) 142 | logger.info('Composed message %s', 'with extra', extra={'extra2': 1}) 143 | logger.info( 144 | 'Big extra using pretty print', 145 | extra=OrderedDict([ 146 | ('extra1', list(map(lambda i: 'test ' * i, range(5)))), 147 | ('extra2', { 148 | 'extra2.1': list(map(lambda i: 'test ' * i, range(5))) 149 | }), 150 | ]) 151 | ) 152 | self.assertEqual(ctx.output[0], 'Simple message') 153 | self.assertEqual(ctx.output[1], 'Composed message: this is a composed message') 154 | self.assertEqual(ctx.output[2], 'Simple message with extra:extra={\'extra2\': 1}') 155 | self.assertEqual(ctx.output[3], 'Composed message with extra:extra={\'extra2\': 1}') 156 | # yapf: disable 157 | self.assertEqual( 158 | ctx.output[4], 159 | """Big extra using pretty print:extra={ 'extra1': [ '', 160 | 'test ', 161 | 'test ' 162 | 'test ', 163 | 'test ' 164 | 'test ' 165 | 'test ', 166 | 'test ' 167 | 'test ' 168 | 'test ' 169 | 'test '], 170 | 'extra2': { 'extra2.1': [ '', 171 | 'test ', 172 | 'test ' 173 | 'test ', 174 | 'test ' 175 | 'test ' 176 | 'test ', 177 | 'test ' 178 | 'test ' 179 | 'test ' 180 | 'test ']}}""" 181 | ) 182 | # yapf: enable 183 | 184 | def test_extra_format_custom(self): 185 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 186 | logger = logging.getLogger('test_formatter') 187 | self._configure_logger( 188 | logger, fmt="%(message)s", extra_fmt=':extra2=%(extra2)s:extra3=%(extra3)s' 189 | ) 190 | logger.info('Simple message') 191 | logger.info('Composed message: %s', 'this is a composed message') 192 | logger.info('Simple message with extra', extra={'extra1': 23, 'extra2': 1}) 193 | logger.info( 194 | 'Composed message %s', 195 | 'with extra', 196 | extra={ 197 | 'extra1': 23, 'extra2': 1, 'extra3': 'test' 198 | } 199 | ) 200 | self.assertEqual(ctx.output[0], 'Simple message') 201 | self.assertEqual(ctx.output[1], 'Composed message: this is a composed message') 202 | self.assertEqual(ctx.output[2], 'Simple message with extra:extra2=1:extra3=') 203 | self.assertEqual(ctx.output[3], 'Composed message with extra:extra2=1:extra3=test') 204 | 205 | def test_extra_format_custom_default_none(self): 206 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 207 | logger = logging.getLogger('test_formatter') 208 | self._configure_logger( 209 | logger, 210 | fmt="%(message)s", 211 | extra_fmt=':extra2=%(extra2)s:extra3=%(extra3)s', 212 | extra_default=None 213 | ) 214 | logger.info('Simple message') 215 | logger.info('Composed message: %s', 'this is a composed message') 216 | logger.info('Simple message with extra', extra={'extra1': 23, 'extra2': 1}) 217 | logger.info( 218 | 'Composed message %s', 219 | 'with extra', 220 | extra={ 221 | 'extra1': 23, 'extra2': 1, 'extra3': 'test' 222 | } 223 | ) 224 | self.assertEqual(ctx.output[0], 'Simple message') 225 | self.assertEqual(ctx.output[1], 'Composed message: this is a composed message') 226 | self.assertEqual(ctx.output[2], 'Simple message with extra:extra2=1:extra3=None') 227 | self.assertEqual(ctx.output[3], 'Composed message with extra:extra2=1:extra3=test') 228 | 229 | def test_extra_format_custom_pretty_print(self): 230 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 231 | logger = logging.getLogger('test_formatter') 232 | self._configure_logger( 233 | logger, 234 | fmt="%(message)s", 235 | extra_fmt=':extra2=%(extra2)s:extra3=%(extra3)s', 236 | extra_pretty_print=True 237 | ) 238 | logger.info('Simple message') 239 | logger.info('Composed message: %s', 'this is a composed message') 240 | with self.assertRaises(ValueError): 241 | logger.info('Simple message with extra', extra={'extra1': 23, 'extra2': 1}) 242 | logger.info( 243 | 'Composed message %s', 244 | 'with extra', 245 | extra={ 246 | 'extra1': 23, 'extra2': 1, 'extra3': 'test' 247 | } 248 | ) 249 | self.assertEqual(ctx.output[0], 'Simple message') 250 | self.assertEqual(ctx.output[1], 'Composed message: this is a composed message') 251 | -------------------------------------------------------------------------------- /tests/test_flask_attribute.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import unittest 3 | from logging import Formatter 4 | from urllib.parse import quote 5 | 6 | from flask import Flask 7 | 8 | from logging_utilities.filters.flask_attribute import FlaskRequestAttribute 9 | from logging_utilities.log_record import reset_log_record_factory 10 | from logging_utilities.log_record import set_log_record_ignore_missing_factory 11 | 12 | app = Flask(__name__) 13 | 14 | FLASK_DEFAULT_FMT = "%(levelname)s:%(message)s:%(flask_request_url)s:%(flask_request_json)s:" \ 15 | "%(flask_request_query_string)s" 16 | FLASK_DEFAULT_ATTRIBUTES = ['url', 'method', 'headers', 'json', 'query_string'] 17 | 18 | 19 | class FlaskAttributeTest(unittest.TestCase): 20 | 21 | def setUp(self): 22 | super().setUp() 23 | self.maxDiff = None 24 | 25 | @classmethod 26 | def _configure_flask_attribute(cls, logger, fmt, flask_attributes): 27 | logger.setLevel(logging.DEBUG) 28 | 29 | for handler in logger.handlers: 30 | flask_attribute = FlaskRequestAttribute(attributes=flask_attributes) 31 | handler.addFilter(flask_attribute) 32 | handler.setFormatter(Formatter(fmt)) 33 | 34 | def test_empty_flask_attribute_no_context(self): 35 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 36 | logger = logging.getLogger('test_formatter') 37 | self._configure_flask_attribute(logger, FLASK_DEFAULT_FMT, FLASK_DEFAULT_ATTRIBUTES) 38 | reset_log_record_factory() 39 | with self.assertRaises((ValueError, KeyError)): 40 | logger.info('Simple message') 41 | 42 | def test_empty_flask_attribute_no_context_ignore(self): 43 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 44 | logger = logging.getLogger('test_formatter') 45 | self._configure_flask_attribute(logger, FLASK_DEFAULT_FMT, FLASK_DEFAULT_ATTRIBUTES) 46 | set_log_record_ignore_missing_factory() 47 | logger.info('Simple message') 48 | logger.info('Composed message: %s', 'this is a composed message') 49 | logger.info('Composed message %s', 'with extra', extra={'extra1': 23}) 50 | reset_log_record_factory() 51 | self.assertEqual( 52 | ctx.output, 53 | [ 54 | 'INFO:Simple message:::', 55 | 'INFO:Composed message: this is a composed message:::', 56 | 'INFO:Composed message with extra:::' 57 | ] 58 | ) 59 | 60 | def test_flask_attribute_json_data(self): 61 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 62 | logger = logging.getLogger('test_formatter') 63 | self._configure_flask_attribute(logger, FLASK_DEFAULT_FMT, FLASK_DEFAULT_ATTRIBUTES) 64 | 65 | with app.test_request_context('/make_report/2017', data={'format': 'short'}): 66 | logger.info('Simple message') 67 | logger.info('Composed message: %s', 'this is a composed message') 68 | logger.info('Composed message %s', 'with extra', extra={'extra1': 23}) 69 | 70 | with app.test_request_context('/make_report/2017', data=''): 71 | logger.info('Simple message') 72 | 73 | with app.test_request_context( 74 | '/make_report/2017', data='non json data', content_type='application/json' 75 | ): 76 | logger.info('Simple message') 77 | 78 | with app.test_request_context( 79 | '/make_report/2017', data='{}', content_type='application/json' 80 | ): 81 | logger.info('Simple message') 82 | 83 | with app.test_request_context( 84 | '/make_report/2017', 85 | data='{"jsonData": "this is a json data"}', 86 | content_type='application/json' 87 | ): 88 | logger.info('Simple message') 89 | self.assertEqual( 90 | ctx.output, 91 | [ 92 | # pylint: disable=line-too-long 93 | 'INFO:Simple message:http://localhost/make_report/2017::', 94 | 'INFO:Composed message: this is a composed message:http://localhost/make_report/2017::', 95 | 'INFO:Composed message with extra:http://localhost/make_report/2017::', 96 | 'INFO:Simple message:http://localhost/make_report/2017::', 97 | "INFO:Simple message:http://localhost/make_report/2017:non json data:", 98 | 'INFO:Simple message:http://localhost/make_report/2017:{}:', 99 | "INFO:Simple message:http://localhost/make_report/2017:{'jsonData': 'this is a json data'}:", 100 | ] 101 | ) 102 | 103 | def test_flask_attribute_query_string(self): 104 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 105 | logger = logging.getLogger('test_formatter') 106 | self._configure_flask_attribute(logger, FLASK_DEFAULT_FMT, FLASK_DEFAULT_ATTRIBUTES) 107 | with app.test_request_context('/make_report/2017?param1=value1'): 108 | logger.info('Simple message') 109 | logger.info('Composed message: %s', 'this is a composed message') 110 | logger.info('Composed message %s', 'with extra', extra={'extra1': 23}) 111 | 112 | with app.test_request_context('/make_report/2017?param1=value1¶m2=value2'): 113 | logger.info('Simple message') 114 | 115 | with app.test_request_context( 116 | '/make_report/2017?param1={}'.format(quote("This a string ?")) 117 | ): 118 | logger.info('Simple message') 119 | 120 | self.assertEqual( 121 | ctx.output, 122 | [ 123 | # pylint: disable=line-too-long 124 | 'INFO:Simple message:http://localhost/make_report/2017?param1=value1::param1=value1', 125 | 'INFO:Composed message: this is a composed message:http://localhost/make_report/2017?param1=value1::param1=value1', 126 | 'INFO:Composed message with extra:http://localhost/make_report/2017?param1=value1::param1=value1', 127 | 'INFO:Simple message:http://localhost/make_report/2017?param1=value1¶m2=value2::param1=value1¶m2=value2', 128 | 'INFO:Simple message:http://localhost/make_report/2017?param1=This%20a%20string%20?::param1=This%20a%20string%20%3F', 129 | ] 130 | ) 131 | 132 | def test_flask_attribute_data(self): 133 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 134 | logger = logging.getLogger('test_formatter') 135 | self._configure_flask_attribute(logger, "%(message)s:%(flask_request_data)s", ['data']) 136 | data = "arbitrary data " 137 | with app.test_request_context( 138 | '/make_report/2017', data=data, headers={"Content-Type": "text/plain"} 139 | ): 140 | logger.info('Simple message') 141 | logger.info('Composed message: %s', 'this is a composed message') 142 | logger.info('Composed message %s', 'with extra', extra={'extra1': 23}) 143 | 144 | self.assertEqual( 145 | ctx.output, 146 | [ 147 | 'Simple message:{}'.format(data), 148 | 'Composed message: this is a composed message:{}'.format(data), 149 | 'Composed message with extra:{}'.format(data), 150 | ] 151 | ) 152 | 153 | def test_flask_attribute_form(self): 154 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 155 | logger = logging.getLogger('test_formatter') 156 | self._configure_flask_attribute( 157 | logger, 158 | "%(message)s:%(flask_request_form)s:%(flask_request_mimetype)s", 159 | ['form', 'mimetype'] 160 | ) 161 | form = {'key1': 'value2', 'key2': '234'} 162 | with app.test_request_context('/make_report/2017', data=form): 163 | logger.info('Simple message') 164 | logger.info('Composed message: %s', 'this is a composed message') 165 | logger.info('Composed message %s', 'with extra', extra={'extra1': 23}) 166 | 167 | self.assertEqual( 168 | ctx.output, 169 | [ 170 | 'Simple message:{}:application/x-www-form-urlencoded'.format(form), 171 | 'Composed message: this is a composed message:{}:application/x-www-form-urlencoded'. 172 | format(form), 173 | 'Composed message with extra:{}:application/x-www-form-urlencoded'.format(form), 174 | ] 175 | ) 176 | 177 | def test_flask_attribute_args(self): 178 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 179 | logger = logging.getLogger('test_formatter') 180 | self._configure_flask_attribute(logger, "%(message)s:%(flask_request_args)s", ['args']) 181 | 182 | with app.test_request_context('/make_report/2017?key1=value1&key2={"a":2}'): 183 | logger.info('Simple message') 184 | logger.info('Composed message %s', 'with extra', extra={'extra1': 23}) 185 | 186 | self.assertEqual( 187 | ctx.output, 188 | [ 189 | "Simple message:{'key1': 'value1', 'key2': '{\"a\":2}'}", 190 | "Composed message with extra:{'key1': 'value1', 'key2': '{\"a\":2}'}", 191 | ] 192 | ) 193 | 194 | def test_flask_attribute_mimetype(self): 195 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 196 | logger = logging.getLogger('test_formatter') 197 | self._configure_flask_attribute( 198 | logger, 199 | "%(message)s:%(flask_request_mimetype)s:%(flask_request_mimetype_params)s", 200 | ['mimetype', 'mimetype_params'] 201 | ) 202 | data = "arbitrary data " 203 | with app.test_request_context( 204 | '/make_report/2017', data=data, headers={"Content-Type": "text/plain"} 205 | ): 206 | logger.info('Simple message') 207 | logger.info('Composed message: %s', 'this is a composed message') 208 | logger.info('Composed message %s', 'with extra', extra={'extra1': 23}) 209 | 210 | with app.test_request_context( 211 | '/make_report/2017', 212 | data=data, 213 | headers={"Content-Type": "text/plain; charset=utf-8"} 214 | ): 215 | logger.info('Simple message') 216 | logger.info('Composed message: %s', 'this is a composed message') 217 | logger.info('Composed message %s', 'with extra', extra={'extra1': 23}) 218 | 219 | self.assertEqual( 220 | ctx.output, 221 | [ 222 | 'Simple message:text/plain:{}', 223 | 'Composed message: this is a composed message:text/plain:{}', 224 | 'Composed message with extra:text/plain:{}', 225 | "Simple message:text/plain:{'charset': 'utf-8'}", 226 | "Composed message: this is a composed message:text/plain:{'charset': 'utf-8'}", 227 | "Composed message with extra:text/plain:{'charset': 'utf-8'}", 228 | ] 229 | ) 230 | 231 | def test_flask_attribute_view_args(self): 232 | with self.assertLogs('test_formatter', level=logging.DEBUG) as ctx: 233 | logger = logging.getLogger('test_formatter') 234 | self._configure_flask_attribute( 235 | logger, "%(message)s:%(flask_request_view_args)s", ['view_args'] 236 | ) 237 | 238 | # Handler required by add_url_rule function 239 | def handle_time(time): 240 | return 241 | 242 | # Request without view_args 243 | with app.test_request_context('/make_report'): 244 | logger.info('Simple message') 245 | logger.info('Composed message: %s', 'this is a composed message') 246 | logger.info('Composed message %s', 'with extra', extra={'extra1': 23}) 247 | 248 | # Request with view args 249 | app.add_url_rule('/make_report/