├── tests
├── __init__.py
├── test_deprecation.py
├── test_missing.py
├── test_dictconfig.py
└── test_formatters.py
├── SECURITY.md
├── CODE_OF_CONDUCT.md
├── mypy.ini
├── MANIFEST.in
├── src
└── pythonjsonlogger
│ ├── py.typed
│ ├── __init__.py
│ ├── jsonlogger.py
│ ├── exception.py
│ ├── utils.py
│ ├── msgspec.py
│ ├── orjson.py
│ ├── json.py
│ ├── defaults.py
│ └── core.py
├── NOTICE
├── .gitignore
├── tox.ini
├── docs
├── security.md
├── index.md
├── quickstart.md
├── contributing.md
├── style-guide.md
├── cookbook.md
└── changelog.md
├── .github
└── workflows
│ └── test-suite.yml
├── scripts
└── gen_ref_nav.py
├── LICENSE
├── README.md
├── pyproject.toml
├── mkdocs.yml
└── pylintrc
/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 | docs/security.md
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | docs/contributing.md
--------------------------------------------------------------------------------
/mypy.ini:
--------------------------------------------------------------------------------
1 | [mypy]
2 |
3 | [mypy-orjson.*]
4 | ignore_missing_imports = True
5 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include LICENSE
2 | include README.md
3 | recursive-include tests *.py
4 |
--------------------------------------------------------------------------------
/src/pythonjsonlogger/py.typed:
--------------------------------------------------------------------------------
1 | # PEP-561 marker. https://mypy.readthedocs.io/en/latest/installed_packages.html
2 |
--------------------------------------------------------------------------------
/NOTICE:
--------------------------------------------------------------------------------
1 | This software includes the following licenced software:
2 | - mkdocstrings-python
3 | Copyright (c) 2021, Timothée Mazzucotelli
4 | Licenced under ISC Licence
5 | Source: https://github.com/mkdocstrings/python
6 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | *.swp
3 | build
4 | dist
5 | dist_uploaded
6 | *.egg-info
7 |
8 | # Tests and validation
9 | .tox/
10 | .mypy_cache
11 |
12 | # Python's venv
13 | .env
14 | .venv
15 | env
16 |
17 | # IDE
18 | .vscode
19 | .idea
20 |
21 | # generated docs
22 | site
23 |
--------------------------------------------------------------------------------
/src/pythonjsonlogger/__init__.py:
--------------------------------------------------------------------------------
1 | ### IMPORTS
2 | ### ============================================================================
3 | ## Future
4 |
5 | ## Standard Library
6 | import warnings
7 |
8 | ## Installed
9 |
10 | ## Application
11 | from . import json
12 | from . import utils
13 |
14 | ### CONSTANTS
15 | ### ============================================================================
16 | ORJSON_AVAILABLE = utils.package_is_available("orjson")
17 | MSGSPEC_AVAILABLE = utils.package_is_available("msgspec")
18 |
--------------------------------------------------------------------------------
/src/pythonjsonlogger/jsonlogger.py:
--------------------------------------------------------------------------------
1 | """Stub module retained for compatibility.
2 |
3 | It retains access to old names whilst sending deprecation warnings.
4 | """
5 |
6 | # pylint: disable=wrong-import-position,unused-import
7 |
8 | import warnings
9 |
10 | ## Throw warning
11 | warnings.warn(
12 | "pythonjsonlogger.jsonlogger has been moved to pythonjsonlogger.json",
13 | DeprecationWarning,
14 | )
15 |
16 | ## Import names
17 | from .json import JsonFormatter, JsonEncoder
18 | from .core import RESERVED_ATTRS
19 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | requires = tox>=3,tox-uv
3 | envlist = py{38,39,310,311,312,313,314}, pypy{38,39,310,311}
4 |
5 | [testenv]
6 | description = run unit tests
7 | extras = dev
8 | commands =
9 | pytest tests
10 |
11 | [testenv:format]
12 | description = run formatters
13 | extras = dev
14 | commands =
15 | black src tests
16 |
17 | [testenv:lint]
18 | description = run linters
19 | extras = dev
20 | commands =
21 | validate-pyproject pyproject.toml
22 | black --check --diff src tests
23 | pylint src
24 | mypy src tests
25 |
--------------------------------------------------------------------------------
/docs/security.md:
--------------------------------------------------------------------------------
1 | # Security Policy
2 |
3 | ## Supported Versions
4 |
5 | Security support for Python JSON Logger is provided for all [security supported versions of Python](https://endoflife.date/python) and for unsupported versions of Python where [recent downloads over the last 90 days exceeds 5% of all downloads](https://pypistats.org/packages/python-json-logger).
6 |
7 |
8 | As of 2024-04-24 security support is provided for Python versions `3.8+`.
9 |
10 |
11 | ## Reporting a Vulnerability
12 |
13 | Please report vulnerabilties [using GitHub](https://github.com/nhairs/python-json-logger/security/advisories/new).
14 |
--------------------------------------------------------------------------------
/.github/workflows/test-suite.yml:
--------------------------------------------------------------------------------
1 | name: Test python-json-logger
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 |
8 | pull_request:
9 | branches:
10 | - main
11 |
12 | jobs:
13 | lint:
14 | name: "Python Lint"
15 | runs-on: ubuntu-latest
16 | steps:
17 | - uses: actions/checkout@v4
18 |
19 | - uses: astral-sh/setup-uv@v3
20 |
21 | - name: Lint with tox
22 | run: uvx tox -e lint
23 |
24 | test:
25 | name: "Python Test ${{ matrix.os }}"
26 | needs: [lint]
27 | runs-on: "${{ matrix.os }}"
28 | strategy:
29 | fail-fast: false # allow tests to run on all platforms
30 | matrix:
31 | os:
32 | - ubuntu-latest
33 | - windows-latest
34 | - macos-latest
35 |
36 | steps:
37 | - uses: actions/checkout@v4
38 | - uses: astral-sh/setup-uv@v3
39 |
40 | - name: Test with tox
41 | run: uvx tox
42 |
--------------------------------------------------------------------------------
/src/pythonjsonlogger/exception.py:
--------------------------------------------------------------------------------
1 | ### IMPORTS
2 | ### ============================================================================
3 | ## Future
4 | from __future__ import annotations
5 |
6 | ## Standard Library
7 |
8 | ## Installed
9 |
10 | ## Application
11 |
12 |
13 | ### CLASSES
14 | ### ============================================================================
15 | class PythonJsonLoggerError(Exception):
16 | "Generic base clas for all Python JSON Logger exceptions"
17 |
18 |
19 | class MissingPackageError(ImportError, PythonJsonLoggerError):
20 | "A required package is missing"
21 |
22 | def __init__(self, name: str, extras_name: str | None = None) -> None:
23 | msg = f"The {name!r} package is required but could not be found."
24 | if extras_name is not None:
25 | msg += f" It can be installed using 'python-json-logger[{extras_name}]'."
26 | super().__init__(msg)
27 | return
28 |
--------------------------------------------------------------------------------
/src/pythonjsonlogger/utils.py:
--------------------------------------------------------------------------------
1 | """Utilities for Python JSON Logger"""
2 |
3 | ### IMPORTS
4 | ### ============================================================================
5 | ## Future
6 | from __future__ import annotations
7 |
8 | ## Standard Library
9 | import importlib.util
10 |
11 | ## Installed
12 |
13 | ## Application
14 | from .exception import MissingPackageError
15 |
16 |
17 | ### FUNCTIONS
18 | ### ============================================================================
19 | def package_is_available(
20 | name: str, *, throw_error: bool = False, extras_name: str | None = None
21 | ) -> bool:
22 | """Determine if the given package is available for import.
23 |
24 | Args:
25 | name: Import name of the package to check.
26 | throw_error: Throw an error if the package is unavailable.
27 | extras_name: Extra dependency name to use in `throw_error`'s message.
28 |
29 | Raises:
30 | MissingPackageError: When `throw_error` is `True` and the return value would be `False`
31 |
32 | Returns:
33 | If the package is available for import.
34 | """
35 | available = importlib.util.find_spec(name) is not None
36 |
37 | if not available and throw_error:
38 | raise MissingPackageError(name, extras_name)
39 |
40 | return available
41 |
--------------------------------------------------------------------------------
/scripts/gen_ref_nav.py:
--------------------------------------------------------------------------------
1 | # NOTICE: This file is from mkdocstrings-python see NOTICE for details
2 | """Generate the code reference pages and navigation."""
3 |
4 | from pathlib import Path
5 |
6 | import mkdocs_gen_files
7 |
8 | nav = mkdocs_gen_files.Nav()
9 | mod_symbol = ''
10 |
11 | for path in sorted(Path("src").rglob("*.py")):
12 | module_path = path.relative_to("src").with_suffix("")
13 | doc_path = path.relative_to("src").with_suffix(".md")
14 | full_doc_path = Path("reference", doc_path)
15 |
16 | parts = tuple(module_path.parts)
17 |
18 | if parts[-1] == "__init__":
19 | parts = parts[:-1]
20 | doc_path = doc_path.with_name("index.md")
21 | full_doc_path = full_doc_path.with_name("index.md")
22 | elif parts[-1].startswith("_"):
23 | continue
24 |
25 | nav_parts = [f"{mod_symbol} {part}" for part in parts]
26 | nav[tuple(nav_parts)] = doc_path.as_posix()
27 |
28 | with mkdocs_gen_files.open(full_doc_path, "w") as fd:
29 | ident = ".".join(parts)
30 | fd.write(f"::: {ident}")
31 |
32 | mkdocs_gen_files.set_edit_path(full_doc_path, ".." / path)
33 |
34 | with mkdocs_gen_files.open("reference/SUMMARY.txt", "w") as nav_file:
35 | nav_file.writelines(nav.build_literate_nav())
36 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2011, Zakaria Zajac and the python-json-logger Contributors
2 | All rights reserved.
3 |
4 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
5 |
6 | * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
7 | * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
8 |
9 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
10 |
--------------------------------------------------------------------------------
/tests/test_deprecation.py:
--------------------------------------------------------------------------------
1 | ### IMPORTS
2 | ### ============================================================================
3 | ## Future
4 | from __future__ import annotations
5 |
6 | ## Standard Library
7 | import subprocess
8 | import sys
9 |
10 | ## Installed
11 | import pytest
12 |
13 | ## Application
14 | import pythonjsonlogger
15 |
16 |
17 | ### TESTS
18 | ### ============================================================================
19 | def test_jsonlogger_deprecated():
20 | with pytest.deprecated_call():
21 | import pythonjsonlogger.jsonlogger
22 | return
23 |
24 |
25 | def test_jsonlogger_reserved_attrs_deprecated():
26 | with pytest.deprecated_call():
27 | # Note: We use json instead of jsonlogger as jsonlogger will also produce
28 | # a DeprecationWarning and we specifically want the one for RESERVED_ATTRS
29 | pythonjsonlogger.json.RESERVED_ATTRS
30 | return
31 |
32 |
33 | @pytest.mark.parametrize(
34 | "command",
35 | [
36 | "from pythonjsonlogger import jsonlogger",
37 | "import pythonjsonlogger.jsonlogger",
38 | "from pythonjsonlogger.jsonlogger import JsonFormatter",
39 | "from pythonjsonlogger.jsonlogger import RESERVED_ATTRS",
40 | ],
41 | )
42 | def test_import(command: str):
43 | output = subprocess.check_output([sys.executable, "-c", f"{command};print('OK')"])
44 | assert output.strip() == b"OK"
45 | return
46 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | [](https://pypi.python.org/pypi/python-json-logger/)
2 | [](https://pypi.python.org/pypi/python-json-logger/)
3 | [](https://pypi.python.org/pypi/python-json-logger/)
4 | [](https://github.com/nhairs/python-json-logger)
5 | [](https://github.com/nhairs/python-json-logger)
6 | 
7 | #
8 | # Python JSON Logger
9 |
10 | Python JSON Logger enables you produce JSON logs when using Python's `logging` package.
11 |
12 | JSON logs are machine readable allowing for much easier parsing and ingestion into log aggregation tools.
13 |
14 |
15 | ## Documentation
16 |
17 | - [Documentation](https://nhairs.github.io/python-json-logger/latest/)
18 | - [Quickstart Guide](https://nhairs.github.io/python-json-logger/latest/quickstart/)
19 | - [Change Log](https://nhairs.github.io/python-json-logger/latest/changelog/)
20 | - [Contributing](https://nhairs.github.io/python-json-logger/latest/contributing/)
21 |
22 | ## License
23 |
24 | This project is licensed under the BSD 2 Clause License - see [`LICENSE`](https://github.com/nhairs/python-json-logger/blob/main/LICENSE)
25 |
26 | ## Authors and Maintainers
27 |
28 | This project was originally authored by [Zakaria Zajac](https://github.com/madzak) and our wonderful [contributors](https://github.com/nhairs/python-json-logger/graphs/contributors)
29 |
30 | It is currently maintained by:
31 |
32 | - [Nicholas Hairs](https://github.com/nhairs) - [nicholashairs.com](https://www.nicholashairs.com)
33 |
--------------------------------------------------------------------------------
/tests/test_missing.py:
--------------------------------------------------------------------------------
1 | ### IMPORTS
2 | ### ============================================================================
3 | ## Future
4 | from __future__ import annotations
5 |
6 | ## Standard Library
7 |
8 | ## Installed
9 | import pytest
10 |
11 | ## Application
12 | import pythonjsonlogger
13 | from pythonjsonlogger.utils import package_is_available
14 | from pythonjsonlogger.exception import MissingPackageError
15 |
16 | ### CONSTANTS
17 | ### ============================================================================
18 | MISSING_PACKAGE_NAME = "package_name_is_definintely_not_available"
19 | MISSING_PACKAGE_EXTRA = "package_extra_that_is_unique"
20 |
21 |
22 | ### TESTS
23 | ### ============================================================================
24 | def test_package_is_available():
25 | assert package_is_available("json")
26 | return
27 |
28 |
29 | def test_package_not_available():
30 | assert not package_is_available(MISSING_PACKAGE_NAME)
31 | return
32 |
33 |
34 | def test_package_not_available_throw():
35 | with pytest.raises(MissingPackageError) as e:
36 | package_is_available(MISSING_PACKAGE_NAME, throw_error=True)
37 | assert MISSING_PACKAGE_NAME in e.value.msg
38 | assert MISSING_PACKAGE_EXTRA not in e.value.msg
39 | return
40 |
41 |
42 | def test_package_not_available_throw_extras():
43 | with pytest.raises(MissingPackageError) as e:
44 | package_is_available(
45 | MISSING_PACKAGE_NAME, throw_error=True, extras_name=MISSING_PACKAGE_EXTRA
46 | )
47 | assert MISSING_PACKAGE_NAME in e.value.msg
48 | assert MISSING_PACKAGE_EXTRA in e.value.msg
49 | return
50 |
51 |
52 | ## Python JSON Logger Specific
53 | ## -----------------------------------------------------------------------------
54 | if not pythonjsonlogger.ORJSON_AVAILABLE:
55 |
56 | def test_orjson_import_error():
57 | with pytest.raises(MissingPackageError, match="orjson"):
58 | import pythonjsonlogger.orjson
59 | return
60 |
61 |
62 | if not pythonjsonlogger.MSGSPEC_AVAILABLE:
63 |
64 | def test_msgspec_import_error():
65 | with pytest.raises(MissingPackageError, match="msgspec"):
66 | import pythonjsonlogger.msgspec
67 | return
68 |
--------------------------------------------------------------------------------
/src/pythonjsonlogger/msgspec.py:
--------------------------------------------------------------------------------
1 | """JSON Formatter using [`msgspec`](https://github.com/jcrist/msgspec)"""
2 |
3 | ### IMPORTS
4 | ### ============================================================================
5 | ## Future
6 | from __future__ import annotations
7 |
8 | ## Standard Library
9 | from typing import Any, Optional, Callable
10 |
11 | ## Installed
12 |
13 | ## Application
14 | from . import core
15 | from . import defaults as d
16 | from .utils import package_is_available
17 |
18 | # We import msgspec after checking it is available
19 | package_is_available("msgspec", throw_error=True)
20 | import msgspec.json # pylint: disable=wrong-import-position,wrong-import-order
21 |
22 |
23 | ### FUNCTIONS
24 | ### ============================================================================
25 | def msgspec_default(obj: Any) -> Any:
26 | """msgspec default encoder function for non-standard types"""
27 | if d.use_exception_default(obj):
28 | return d.exception_default(obj)
29 | if d.use_traceback_default(obj):
30 | return d.traceback_default(obj)
31 | if d.use_enum_default(obj):
32 | return d.enum_default(obj)
33 | if d.use_type_default(obj):
34 | return d.type_default(obj)
35 | return d.unknown_default(obj)
36 |
37 |
38 | ### CLASSES
39 | ### ============================================================================
40 | class MsgspecFormatter(core.BaseJsonFormatter):
41 | """JSON formatter using [`msgspec.json.Encoder`](https://jcristharif.com/msgspec/api.html#msgspec.json.Encoder) for encoding."""
42 |
43 | def __init__(
44 | self,
45 | *args,
46 | json_default: Optional[Callable] = msgspec_default,
47 | **kwargs,
48 | ) -> None:
49 | """
50 | Args:
51 | args: see [BaseJsonFormatter][pythonjsonlogger.core.BaseJsonFormatter]
52 | json_default: a function for encoding non-standard objects
53 | kwargs: see [BaseJsonFormatter][pythonjsonlogger.core.BaseJsonFormatter]
54 | """
55 | super().__init__(*args, **kwargs)
56 |
57 | self.json_default = json_default
58 | self._encoder = msgspec.json.Encoder(enc_hook=self.json_default)
59 | return
60 |
61 | def jsonify_log_record(self, log_data: core.LogData) -> str:
62 | """Returns a json string of the log data."""
63 | return self._encoder.encode(log_data).decode("utf8")
64 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["setuptools"]
3 | build-backend = "setuptools.build_meta"
4 |
5 | [project]
6 | name = "python-json-logger"
7 | version = "4.1.0.dev1"
8 | description = "JSON Log Formatter for the Python Logging Package"
9 | authors = [
10 | {name = "Zakaria Zajac", email = "zak@madzak.com"},
11 | {name = "Nicholas Hairs", email = "info+python-json-logger@nicholashairs.com"},
12 | ]
13 | maintainers = [
14 | {name = "Nicholas Hairs", email = "info+python-json-logger@nicholashairs.com"},
15 | ]
16 |
17 | # Dependency Information
18 | requires-python = ">=3.8"
19 | dependencies = [
20 | "typing_extensions;python_version<'3.10'",
21 | ]
22 |
23 | # Extra information
24 | readme = "README.md"
25 | license = {text = "BSD-2-Clause License"}
26 | classifiers = [
27 | "Development Status :: 6 - Mature",
28 | "Intended Audience :: Developers",
29 | "License :: OSI Approved :: BSD License",
30 | "Operating System :: OS Independent",
31 | "Programming Language :: Python :: 3 :: Only",
32 | "Programming Language :: Python :: 3.8",
33 | "Programming Language :: Python :: 3.9",
34 | "Programming Language :: Python :: 3.10",
35 | "Programming Language :: Python :: 3.11",
36 | "Programming Language :: Python :: 3.12",
37 | "Programming Language :: Python :: 3.13",
38 | "Programming Language :: Python :: 3.14",
39 | "Topic :: System :: Logging",
40 | "Typing :: Typed",
41 | ]
42 |
43 | [project.urls]
44 | Homepage = "https://nhairs.github.io/python-json-logger"
45 | GitHub = "https://github.com/nhairs/python-json-logger"
46 |
47 | [project.optional-dependencies]
48 | dev = [
49 | ## Optional but required for dev
50 | "orjson;implementation_name!='pypy'",
51 | "msgspec;implementation_name!='pypy'",
52 | ## Lint
53 | "validate-pyproject[all]",
54 | "black",
55 | "pylint",
56 | "mypy",
57 | ## Test
58 | "pytest",
59 | "freezegun",
60 | "backports.zoneinfo;python_version<'3.9'",
61 | "tzdata",
62 | ## Build
63 | "build",
64 | ## Docs
65 | "mkdocs",
66 | "mkdocs-material>=8.5",
67 | "mkdocs-awesome-pages-plugin",
68 | "mdx_truly_sane_lists",
69 | "mkdocstrings[python]",
70 | "mkdocs-gen-files",
71 | "mkdocs-literate-nav",
72 | "mike",
73 | ]
74 |
75 | [tool.setuptools.packages.find]
76 | where = ["src"]
77 | include = ["pythonjsonlogger*"]
78 |
79 | [tool.setuptools.package-data]
80 | pythonjsonlogger = ["py.typed"]
81 |
82 | [tool.black]
83 | line-length = 100
84 |
--------------------------------------------------------------------------------
/src/pythonjsonlogger/orjson.py:
--------------------------------------------------------------------------------
1 | """JSON Formatter using [orjson](https://github.com/ijl/orjson)"""
2 |
3 | ### IMPORTS
4 | ### ============================================================================
5 | ## Future
6 | from __future__ import annotations
7 |
8 | ## Standard Library
9 | from typing import Any, Optional, Callable
10 |
11 | ## Installed
12 |
13 | ## Application
14 | from . import core
15 | from . import defaults as d
16 | from .utils import package_is_available
17 |
18 | # We import msgspec after checking it is available
19 | package_is_available("orjson", throw_error=True)
20 | import orjson # pylint: disable=wrong-import-position,wrong-import-order
21 |
22 |
23 | ### FUNCTIONS
24 | ### ============================================================================
25 | def orjson_default(obj: Any) -> Any:
26 | """orjson default encoder function for non-standard types"""
27 | if d.use_exception_default(obj):
28 | return d.exception_default(obj)
29 | if d.use_traceback_default(obj):
30 | return d.traceback_default(obj)
31 | if d.use_bytes_default(obj):
32 | return d.bytes_default(obj)
33 | if d.use_enum_default(obj):
34 | return d.enum_default(obj)
35 | if d.use_type_default(obj):
36 | return d.type_default(obj)
37 | return d.unknown_default(obj)
38 |
39 |
40 | ### CLASSES
41 | ### ============================================================================
42 | class OrjsonFormatter(core.BaseJsonFormatter):
43 | """JSON formatter using [orjson](https://github.com/ijl/orjson) for encoding."""
44 |
45 | def __init__(
46 | self,
47 | *args,
48 | json_default: Optional[Callable] = orjson_default,
49 | json_indent: bool = False,
50 | **kwargs,
51 | ) -> None:
52 | """
53 | Args:
54 | args: see [BaseJsonFormatter][pythonjsonlogger.core.BaseJsonFormatter]
55 | json_default: a function for encoding non-standard objects
56 | json_indent: indent output with 2 spaces.
57 | kwargs: see [BaseJsonFormatter][pythonjsonlogger.core.BaseJsonFormatter]
58 | """
59 | super().__init__(*args, **kwargs)
60 |
61 | self.json_default = json_default
62 | self.json_indent = json_indent
63 | return
64 |
65 | def jsonify_log_record(self, log_data: core.LogData) -> str:
66 | """Returns a json string of the log data."""
67 | opt = orjson.OPT_NON_STR_KEYS
68 | if self.json_indent:
69 | opt |= orjson.OPT_INDENT_2
70 |
71 | return orjson.dumps(log_data, default=self.json_default, option=opt).decode("utf8")
72 |
--------------------------------------------------------------------------------
/tests/test_dictconfig.py:
--------------------------------------------------------------------------------
1 | ### IMPORTS
2 | ### ============================================================================
3 | ## Future
4 | from __future__ import annotations
5 |
6 | ## Standard Library
7 | from dataclasses import dataclass
8 | import io
9 | import json
10 | import logging
11 | import logging.config
12 | from typing import Any, Generator
13 |
14 | ## Installed
15 | import pytest
16 |
17 | ### SETUP
18 | ### ============================================================================
19 | _LOGGER_COUNT = 0
20 | EXT_VAL = 999
21 |
22 |
23 | class Dummy:
24 | pass
25 |
26 |
27 | def my_json_default(obj: Any) -> Any:
28 | if isinstance(obj, Dummy):
29 | return "DUMMY"
30 | return obj
31 |
32 |
33 | LOGGING_CONFIG = {
34 | "version": 1,
35 | "disable_existing_loggers": False,
36 | "formatters": {
37 | "default": {
38 | "()": "pythonjsonlogger.json.JsonFormatter",
39 | "json_default": "ext://tests.test_dictconfig.my_json_default",
40 | "static_fields": {"ext-val": "ext://tests.test_dictconfig.EXT_VAL"},
41 | }
42 | },
43 | "handlers": {
44 | "default": {
45 | "level": "DEBUG",
46 | "formatter": "default",
47 | "class": "logging.StreamHandler",
48 | "stream": "ext://sys.stdout", # Default is stderr
49 | },
50 | },
51 | "loggers": {
52 | "": {"handlers": ["default"], "level": "WARNING", "propagate": False}, # root logger
53 | },
54 | }
55 |
56 |
57 | @dataclass
58 | class LoggingEnvironment:
59 | logger: logging.Logger
60 | buffer: io.StringIO
61 |
62 | def load_json(self) -> Any:
63 | return json.loads(self.buffer.getvalue())
64 |
65 |
66 | @pytest.fixture
67 | def env() -> Generator[LoggingEnvironment, None, None]:
68 | global _LOGGER_COUNT # pylint: disable=global-statement
69 | _LOGGER_COUNT += 1
70 | logging.config.dictConfig(LOGGING_CONFIG)
71 | default_formatter = logging.root.handlers[0].formatter
72 | logger = logging.getLogger(f"pythonjsonlogger.tests.{_LOGGER_COUNT}")
73 | logger.setLevel(logging.DEBUG)
74 | buffer = io.StringIO()
75 | handler = logging.StreamHandler(buffer)
76 | handler.setFormatter(default_formatter)
77 | logger.addHandler(handler)
78 | yield LoggingEnvironment(logger=logger, buffer=buffer)
79 | logger.removeHandler(handler)
80 | logger.setLevel(logging.NOTSET)
81 | buffer.close()
82 | return
83 |
84 |
85 | ### TESTS
86 | ### ============================================================================
87 | def test_external_reference_support(env: LoggingEnvironment):
88 |
89 | assert logging.root.handlers[0].formatter.json_default is my_json_default # type: ignore[union-attr]
90 |
91 | env.logger.info("hello", extra={"dummy": Dummy()})
92 | log_json = env.load_json()
93 |
94 | assert log_json["ext-val"] == EXT_VAL
95 | assert log_json["dummy"] == "DUMMY"
96 | return
97 |
--------------------------------------------------------------------------------
/mkdocs.yml:
--------------------------------------------------------------------------------
1 | site_name: "Python JSON Logger"
2 | site_url: https://nhairs.github.io/python-json-logger
3 | repo_url: https://github.com/nhairs/python-json-logger
4 | edit_uri: tree/master/docs
5 | copyright: " Copyright © Python JSON Logger Contributors"
6 | watch:
7 | - mkdocs.yml
8 | - README.md
9 | - src/pythonjsonlogger
10 | - docs
11 |
12 | nav:
13 | - "Home": index.md
14 | - quickstart.md
15 | - cookbook.md
16 | - changelog.md
17 | - security.md
18 | - contributing.md
19 | - API Reference:
20 | - ... | reference/pythonjsonlogger/*
21 |
22 | theme:
23 | name: material
24 |
25 | icon:
26 | logo: material/code-braces
27 |
28 | features:
29 | - navigation.instant
30 | - navigation.sections
31 | - navigation.indexes
32 | - navigation.expand
33 | - navigation.top
34 | - content.code.annotate
35 | - content.code.copy
36 | - toc.follow
37 |
38 | palette:
39 | - media: "(prefers-color-scheme: light)"
40 | primary: amber
41 | scheme: default
42 | toggle:
43 | icon: material/weather-night
44 | name: Switch to dark mode
45 | - media: "(prefers-color-scheme: dark)"
46 | primary: amber
47 | scheme: slate
48 | toggle:
49 | icon: material/weather-sunny
50 | name: Switch to light mode
51 |
52 | extra:
53 | social:
54 | - icon: fontawesome/brands/github
55 | link: https://github.com/nhairs/python-json-logger
56 | version:
57 | provider: mike
58 |
59 | markdown_extensions:
60 | - toc:
61 | permalink: "🔗"
62 | - admonition
63 | - def_list
64 | - mdx_truly_sane_lists
65 | - pymdownx.highlight:
66 | anchor_linenums: true
67 | - pymdownx.inlinehilite
68 | - pymdownx.snippets
69 | - pymdownx.superfences
70 | - pymdownx.details
71 | - pymdownx.caret
72 |
73 | plugins:
74 | - autorefs
75 | - search:
76 | lang: en
77 | - awesome-pages:
78 | collapse_single_pages: true
79 | - gen-files:
80 | scripts:
81 | - scripts/gen_ref_nav.py
82 | - mkdocstrings:
83 | default_handler: python
84 | handlers:
85 | python:
86 | paths:
87 | - src
88 | import:
89 | - https://docs.python.org/3/objects.inv
90 | # - https://mkdocstrings.github.io/objects.inv
91 | # - https://mkdocstrings.github.io/griffe/objects.inv
92 | options:
93 | filters:
94 | - "!^_"
95 | heading_level: 1
96 | inherited_members: true
97 | merge_init_into_class: true
98 | #preload_modules: []
99 | separate_signature: true
100 | show_root_heading: true
101 | show_root_full_path: true
102 | show_signature_annotations: true
103 | show_symbol_type_heading: true
104 | show_symbol_type_toc: true
105 | signature_crossrefs: true
106 | summary: true
107 | unwrap_annotated: true
108 | show_source: false
109 | docstring_section_style: spacy
110 | - literate-nav:
111 | nav_file: SUMMARY.txt
112 | - mike:
113 | canonical_version: latest
114 |
115 |
--------------------------------------------------------------------------------
/docs/index.md:
--------------------------------------------------------------------------------
1 | # Python JSON Logger
2 |
3 | [](https://pypi.python.org/pypi/python-json-logger/)
4 | [](https://pypi.python.org/pypi/python-json-logger/)
5 | [](https://pypi.python.org/pypi/python-json-logger/)
6 | [](https://github.com/nhairs/python-json-logger)
7 | [](https://github.com/nhairs/python-json-logger)
8 | 
9 |
10 | ## Introduction
11 |
12 | Python JSON Logger enables you produce JSON logs when using Python's `logging` package.
13 |
14 | JSON logs are machine readable allowing for much easier parsing and ingestion into log aggregation tools.
15 |
16 | This library assumes that you are famliar with the `logging` standard library package; if you are not you should start by reading the official [Logging HOWTO](https://docs.python.org/3/howto/logging.html).
17 |
18 |
19 | ## Features
20 |
21 | - **Standard Library Compatible:** Implement JSON logging without modifying your existing log setup.
22 | - **Supports Multiple JSON Encoders:** In addition to the standard libary's `json` module, also supports the [`orjson`][pythonjsonlogger.orjson], [`msgspec`][pythonjsonlogger.msgspec] JSON encoders.
23 | - **Fully Customizable Output Fields:** Control required, excluded, and static fields including automatically picking up custom attributes on `LogRecord` objects. Fields can be renamed before they are output.
24 | - **Encode Any Type:** Encoders are customized to ensure that something sane is logged for any input including those that aren't supported by default. For example formatting UUID objects into their string representation and bytes objects into a base 64 encoded string.
25 |
26 | ## Getting Started
27 |
28 | Jump right in with our [Quickstart Guide](quickstart.md) to get `python-json-logger` integrated into your project quickly.
29 |
30 | Here's a small taste of what it looks like:
31 |
32 | ```python title="Example Usage"
33 | import logging
34 | from pythonjsonlogger.json import JsonFormatter
35 |
36 | logger = logging.getLogger()
37 | logger.setLevel(logging.INFO)
38 |
39 | handler = logging.StreamHandler()
40 | handler.setFormatter(JsonFormatter())
41 |
42 | logger.addHandler(handler)
43 |
44 | logger.info("Logging using python-json-logger!", extra={"more_data": True})
45 | # {"message": "Logging using python-json-logger!", "more_data": true}
46 | ```
47 |
48 | ## Where to Go Next
49 |
50 | * **[Quickstart Guide](quickstart.md):** For installation and basic setup.
51 | * **[Cookbook](cookbook.md):** For more advanced usage patterns and recipes.
52 | * **API Reference:** Dive into the details of specific formatters, functions, and classes (see navigation menu).
53 | * **[Contributing Guidelines](contributing.md):** If you'd like to contribute to the project.
54 | * **[Changelog](changelog.md):** To see what's new in recent versions.
55 |
56 | ## Project Information
57 |
58 | ### Bugs, Feature Requests, etc.
59 | Please [submit an issue on GitHub](https://github.com/nhairs/python-json-logger/issues).
60 |
61 | In the case of bug reports, please help us help you by following best practices [^1^](https://marker.io/blog/write-bug-report/) [^2^](https://www.chiark.greenend.org.uk/~sgtatham/bugs.html).
62 |
63 | In the case of feature requests, please provide background to the problem you are trying to solve so that we can find a solution that makes the most sense for the library as well as your use case.
64 |
65 | ### License
66 | This project is licensed under the BSD 2 Clause License - see the [LICENSE file](https://github.com/nhairs/python-json-logger/blob/main/LICENSE) on GitHub.
67 |
68 | ### Authors and Maintainers
69 | This project was originally authored by [Zakaria Zajac](https://github.com/madzak) and our wonderful [contributors](https://github.com/nhairs/python-json-logger/graphs/contributors).
70 |
71 | It is currently maintained by:
72 | - [Nicholas Hairs](https://github.com/nhairs) - [nicholashairs.com](https://www.nicholashairs.com)
73 |
--------------------------------------------------------------------------------
/src/pythonjsonlogger/json.py:
--------------------------------------------------------------------------------
1 | """JSON formatter using the standard library's `json` for encoding.
2 |
3 | Module contains the `JsonFormatter` and a custom `JsonEncoder` which supports a greater
4 | variety of types.
5 | """
6 |
7 | ### IMPORTS
8 | ### ============================================================================
9 | ## Future
10 | from __future__ import annotations
11 |
12 | ## Standard Library
13 | import datetime
14 | import json
15 | from typing import Any, Callable, Optional, Union
16 | import warnings
17 |
18 | ## Application
19 | from . import core
20 | from . import defaults as d
21 |
22 |
23 | ### CLASSES
24 | ### ============================================================================
25 | class JsonEncoder(json.JSONEncoder):
26 | """A custom encoder extending [json.JSONEncoder](https://docs.python.org/3/library/json.html#json.JSONEncoder)"""
27 |
28 | def default(self, o: Any) -> Any:
29 | if d.use_datetime_any(o):
30 | return self.format_datetime_obj(o)
31 |
32 | if d.use_exception_default(o):
33 | return d.exception_default(o)
34 |
35 | if d.use_traceback_default(o):
36 | return d.traceback_default(o)
37 |
38 | if d.use_enum_default(o):
39 | return d.enum_default(o)
40 |
41 | if d.use_bytes_default(o):
42 | return d.bytes_default(o)
43 |
44 | if d.use_dataclass_default(o):
45 | return d.dataclass_default(o)
46 |
47 | if d.use_type_default(o):
48 | return d.type_default(o)
49 |
50 | try:
51 | return super().default(o)
52 | except TypeError:
53 | return d.unknown_default(o)
54 |
55 | def format_datetime_obj(self, o: datetime.time | datetime.date | datetime.datetime) -> str:
56 | """Format datetime objects found in `self.default`
57 |
58 | This allows subclasses to change the datetime format without understanding the
59 | internals of the default method.
60 | """
61 | return d.datetime_any(o)
62 |
63 |
64 | class JsonFormatter(core.BaseJsonFormatter):
65 | """JSON formatter using the standard library's [`json`](https://docs.python.org/3/library/json.html) for encoding"""
66 |
67 | def __init__(
68 | self,
69 | *args,
70 | json_default: Optional[Callable] = None,
71 | json_encoder: Optional[Callable] = None,
72 | json_serializer: Callable = json.dumps,
73 | json_indent: Optional[Union[int, str]] = None,
74 | json_ensure_ascii: bool = True,
75 | **kwargs,
76 | ) -> None:
77 | """
78 | Args:
79 | args: see [BaseJsonFormatter][pythonjsonlogger.core.BaseJsonFormatter]
80 | json_default: a function for encoding non-standard objects
81 | json_encoder: custom JSON encoder
82 | json_serializer: a [`json.dumps`](https://docs.python.org/3/library/json.html#json.dumps)-compatible callable
83 | that will be used to serialize the log record.
84 | json_indent: indent parameter for the `json_serializer`
85 | json_ensure_ascii: `ensure_ascii` parameter for the `json_serializer`
86 | kwargs: see [BaseJsonFormatter][pythonjsonlogger.core.BaseJsonFormatter]
87 | """
88 | super().__init__(*args, **kwargs)
89 |
90 | self.json_default = json_default
91 | self.json_encoder = json_encoder
92 | self.json_serializer = json_serializer
93 | self.json_indent = json_indent
94 | self.json_ensure_ascii = json_ensure_ascii
95 | if not self.json_encoder and not self.json_default:
96 | self.json_encoder = JsonEncoder
97 | return
98 |
99 | def jsonify_log_record(self, log_data: core.LogData) -> str:
100 | """Returns a json string of the log data."""
101 | return self.json_serializer(
102 | log_data,
103 | default=self.json_default,
104 | cls=self.json_encoder,
105 | indent=self.json_indent,
106 | ensure_ascii=self.json_ensure_ascii,
107 | )
108 |
109 |
110 | ### DEPRECATED COMPATIBILITY
111 | ### ============================================================================
112 | def __getattr__(name: str):
113 | if name == "RESERVED_ATTRS":
114 | warnings.warn(
115 | "RESERVED_ATTRS has been moved to pythonjsonlogger.core",
116 | DeprecationWarning,
117 | )
118 | return core.RESERVED_ATTRS
119 | raise AttributeError(f"module {__name__} has no attribute {name}")
120 |
--------------------------------------------------------------------------------
/docs/quickstart.md:
--------------------------------------------------------------------------------
1 | # Quick Start
2 |
3 | ## Installation
4 |
5 | !!! note
6 | All versions of this fork use version `>=3.0.0`.
7 |
8 | To use pre-fork versions use `python-json-logger<3`.
9 |
10 | ### Install via pip
11 |
12 | ```shell
13 | pip install python-json-logger
14 | ```
15 |
16 | ### Install from GitHub
17 |
18 | To install from [releases](https://github.com/nhairs/python-json-logger/releases) (including development releases), you can use the URL to the specific wheel.
19 |
20 | ```shell
21 | # e.g. 3.0.0 wheel
22 | pip install 'python-json-logger@https://github.com/nhairs/python-json-logger/releases/download/v3.0.0/python_json_logger-3.0.0-py3-none-any.whl'
23 | ```
24 |
25 | ## Usage
26 |
27 | Python JSON Logger provides [`logging.Formatter`](https://docs.python.org/3/library/logging.html#logging.Formatter) classes that encode the logged message into JSON. Although [a variety of JSON encoders are supported](#alternate-json-encoders), the following examples will use the [JsonFormatter][pythonjsonlogger.json.JsonFormatter] which uses the the `json` module from the standard library.
28 |
29 | ### Integrating with Python's logging framework
30 |
31 | To produce JSON output, attach the formatter to a logging handler:
32 |
33 | ```python
34 | import logging
35 | from pythonjsonlogger.json import JsonFormatter
36 |
37 | logger = logging.getLogger()
38 |
39 | logHandler = logging.StreamHandler()
40 | formatter = JsonFormatter()
41 | logHandler.setFormatter(formatter)
42 | logger.addHandler(logHandler)
43 | ```
44 |
45 | ### Output fields
46 |
47 | #### Required Fields
48 | You can control the logged fields by setting the `fmt` argument when creating the formatter. A variety of different formats are supported including:
49 |
50 | - Standard library formats: where `style` is one of `%`, `$`, or `{`. This allows using Python JSON Logger Formatters with your existing config.
51 | - Comma format: where `style` is `,` which simplifies the writing of formats where you can't use more complex formats.
52 | - A sequence of string: e.g. lists or tuples.
53 |
54 | All [`LogRecord` attributes](https://docs.python.org/3/library/logging.html#logrecord-attributes) can be output using their name.
55 |
56 | ```python
57 | # Standard library format
58 | formatter = JsonFormatter("{message}{asctime}{exc_info}", style="{")
59 |
60 | # Comma format
61 | formatter = JsonFormatter("message,asctime,exc_info", style=",")
62 |
63 | # Sequence of strings format
64 | formatter = JsonFormatter(["message", "asctime", "exc_info"])
65 | ```
66 |
67 | #### Message Fields
68 |
69 | Instead of logging a string message you can log using a `dict`.
70 |
71 | ```python
72 | logger.info({
73 | "my_data": 1,
74 | "message": "if you don't include this it will be an empty string",
75 | "other_stuff": False,
76 | })
77 | ```
78 |
79 | !!! warning
80 | Be aware that if you log using a `dict`, other formatters may not be able to handle it.
81 |
82 | You can also add additional message fields using the `extra` argument.
83 |
84 | ```python
85 | logger.info(
86 | "this logs the same additional fields as above",
87 | extra={
88 | "my_data": 1,
89 | "other_stuff": False,
90 | },
91 | )
92 | ```
93 |
94 | Finally, any non-standard attributes added to a `LogRecord` will also be included in the logged data. See [Cookbook: Request / Trace IDs](cookbook.md#request-trace-ids) for an example.
95 |
96 | #### Default Fields
97 |
98 | Default fields that are added to every log record prior to any other field can be set using the `default` argument.
99 |
100 | ```python
101 | formatter = JsonFormatter(
102 | defaults={"environment": "dev"}
103 | )
104 | # ...
105 | logger.info("this message will have environment=dev by default")
106 | logger.info("this overwrites the environment field", extra={"environment": "prod"})
107 | ```
108 |
109 | #### Static Fields
110 |
111 | Static fields that are added to every log record can be set using the `static_fields` argument.
112 |
113 | ```python
114 | formatter = JsonFormatter(
115 | static_fields={"True gets logged on every record?": True}
116 | )
117 | ```
118 |
119 | ### Excluding fields
120 |
121 | You can prevent fields being added to the output data by adding them to `reserved_attrs`. By default all [`LogRecord` attributes](https://docs.python.org/3/library/logging.html#logrecord-attributes) are excluded.
122 |
123 | ```python
124 | from pythonjsonlogger.core import RESERVED_ATTRS
125 |
126 | formatter = JsonFormatter(
127 | reserved_attrs=RESERVED_ATTRS+["request_id", "my_other_field"]
128 | )
129 | ```
130 |
131 | ### Renaming fields
132 |
133 | You can rename fields using the `rename_fields` argument.
134 |
135 | ```python
136 | formatter = JsonFormatter(
137 | "{message}{levelname}",
138 | style="{",
139 | rename_fields={"levelname": "LEVEL"},
140 | )
141 | ```
142 |
143 | ### Custom object serialization
144 |
145 | Most formatters support `json_default` which is used to control how objects are serialized.
146 |
147 | ```python
148 | def my_default(obj):
149 | if isinstance(obj, MyClass):
150 | return {"special": obj.special}
151 |
152 | formatter = JsonFormatter(json_default=my_default)
153 | ```
154 |
155 | !!! note
156 | When providing your own `json_default`, you likely want to call the original `json_default` for your encoder. Python JSON Logger provides custom default serializers for each encoder that tries very hard to ensure sane output is always logged.
157 |
158 | ### Alternate JSON Encoders
159 |
160 | The following JSON encoders are also supported:
161 |
162 | - [orjson](https://github.com/ijl/orjson) - [pythonjsonlogger.orjson.OrjsonFormatter][]
163 | - [msgspec](https://github.com/jcrist/msgspec) - [pythonjsonlogger.msgspec.MsgspecFormatter][]
164 |
--------------------------------------------------------------------------------
/docs/contributing.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | Contributions are welcome!
4 |
5 | ## Code of Conduct
6 |
7 | In general we follow the [Python Software Foundation Code of Conduct](https://policies.python.org/python.org/code-of-conduct/). Please note that we are not affiliated with the PSF.
8 |
9 | ## Pull Request Process
10 |
11 | **0. Before you begin**
12 |
13 | If you're not familiar with contributing to open source software, [start by reading this guide](https://opensource.guide/how-to-contribute/).
14 |
15 | Be aware that anything you contribute will be licenced under [the project's licence](https://github.com/nhairs/python-json-logger/blob/main/LICENSE). If you are making a change as a part of your job, be aware that your employer might own your work and you'll need their permission in order to licence the code.
16 |
17 | ### 1. Find something to work on
18 |
19 | Where possible it's best to stick to established issues where discussion has already taken place. Contributions that haven't come from a discussed issue are less likely to be accepted.
20 |
21 | The following are things that can be worked on without an existing issue:
22 |
23 | - Updating documentation. This includes fixing in-code documentation / comments, and the overall docs.
24 | - Small changes that don't change functionality such as refactoring or adding / updating tests.
25 |
26 | ### 2. Fork the repository and make your changes
27 |
28 | #### Coding Style
29 |
30 | Before writing any code, please familiarize yourself with our [Python Style Guide](style-guide.md). This document outlines our coding conventions, formatting expectations, and common patterns used in the project. Adhering to this guide is crucial for maintaining code consistency and readability.
31 |
32 | While the style guide covers detailed conventions, always try to match the style of existing code in the module you are working on, especially regarding local patterns and structure.
33 |
34 | #### Development Setup
35 |
36 | All devlopment tooling can be installed (usually into a virtual environment), using the `dev` optional dependency:
37 |
38 | ```shell
39 | pip install -e '.[dev]'`
40 | ```
41 |
42 | Before creating your pull request you'll want to format your code and run the linters and tests:
43 |
44 | ```shell
45 | # Format
46 | black src tests
47 |
48 | # Lint
49 | pylint --output-format=colorized src
50 | mypy src tests
51 |
52 | # Tests
53 | pytest
54 | ```
55 |
56 | The above commands (`black`, `pylint`, `mypy`, `pytest`) should all be run before submitting a pull request.
57 |
58 | If making changes to the documentation you can preview the changes locally using `mkdocs`. Changes to the `README.md` can be previewed using a tool like [`grip`](https://github.com/joeyespo/grip) (installable via `pip install grip`).
59 |
60 | ```shell
61 | mkdocs serve
62 | # For README preview (after installing grip):
63 | # grip
64 | ```
65 |
66 | !!! note
67 | In general we will always squash merge pull requests so you do not need to worry about a "clean" commit history.
68 |
69 | ### 3. Checklist
70 |
71 | Before pushing and creating your pull request, you should make sure you've done the following:
72 |
73 | - Updated any relevant tests.
74 | - Formatted your code and run the linters and tests.
75 | - Updated the version number in `pyproject.toml`. In general using a `.devN` suffix is acceptable.
76 | This is not required for changes that do no affect the code such as documentation.
77 | - Add details of the changes to the change log (`docs/change-log.md`), creating a new section if needed.
78 | - Add notes for new / changed features in the relevant docstring.
79 |
80 | **4. Create your pull request**
81 |
82 | When creating your pull request be aware that the title and description will be used for the final commit so pay attention to them.
83 |
84 | Your pull request description should include the following:
85 |
86 | - Why the pull request is being made
87 | - Summary of changes
88 | - How the pull request was tested - especially if not covered by unit testing.
89 |
90 | Once you've submitted your pull request make sure that all CI jobs are passing. Pull requests with failing jobs will not be reviewed.
91 |
92 | ### 5. Code review
93 |
94 | Your code will be reviewed by a maintainer.
95 |
96 | If you're not familiar with code review start by reading [this guide](https://google.github.io/eng-practices/review/).
97 |
98 | !!! tip "Remember you are not your work"
99 |
100 | You might be asked to explain or justify your choices. This is not a criticism of your value as a person!
101 |
102 | Often this is because there are multiple ways to solve the same problem and the reviewer would like to understand more about the way you solved.
103 |
104 | ## Common Topics
105 |
106 | ### Adding a new encoder
107 |
108 | New encoders may be added, however how popular / common a library is will be taken into consideration before being added. You should open an issue before creating a pull request.
109 |
110 | ### Versioning and breaking compatability
111 |
112 | This project uses semantic versioning.
113 |
114 | In general backwards compatability is always preferred. This library is widely used and not particularly sophisticated and as such there must be a good reason for breaking changes.
115 |
116 | Feature changes MUST be compatible with all [security supported versions of Python](https://endoflife.date/python) and SHOULD be compatible with all unsupported versions of Python where [recent downloads over the last 90 days exceeds 5% of all downloads](https://pypistats.org/packages/python-json-logger).
117 |
118 | In general, only the latest `major.minor` version of Python JSON Logger is supported. Bug fixes and feature backports requiring a version branch may be considered but must be discussed with the maintainers first.
119 |
120 | See also [Security Policy](security.md).
121 |
122 | ### Spelling
123 |
124 | The original implementation of this project used US spelling so it will continue to use US spelling for all code.
125 |
126 | Documentation is more flexible and may use a variety of English spellings.
127 |
128 | ### Contacting the Maintainers
129 |
130 | In general it is preferred to keep communication to GitHub, e.g. through comments on issues and pull requests. If you do need to contact the maintainers privately, please do so using the email addresses in the maintainers section of the `pyproject.toml`.
131 |
--------------------------------------------------------------------------------
/src/pythonjsonlogger/defaults.py:
--------------------------------------------------------------------------------
1 | """Collection of functions for building custom `json_default` functions.
2 |
3 | In general functions come in pairs of `use_x_default` and `x_default`, where the former is used
4 | to determine if you should call the latter.
5 |
6 | Most `use_x_default` functions also act as a [`TypeGuard`](https://mypy.readthedocs.io/en/stable/type_narrowing.html#user-defined-type-guards).
7 | """
8 |
9 | ### IMPORTS
10 | ### ============================================================================
11 | ## Future
12 | from __future__ import annotations
13 |
14 | ## Standard Library
15 | import base64
16 | import dataclasses
17 | import datetime
18 | import enum
19 | import sys
20 | from types import TracebackType
21 | from typing import Any
22 | import traceback
23 | import uuid
24 |
25 | if sys.version_info >= (3, 10):
26 | from typing import TypeGuard
27 | else:
28 | from typing_extensions import TypeGuard
29 |
30 | ## Installed
31 |
32 | ## Application
33 |
34 |
35 | ### FUNCTIONS
36 | ### ============================================================================
37 | def unknown_default(obj: Any) -> str:
38 | """Backup default function for any object type.
39 |
40 | Will attempt to use `str` or `repr`. If both functions error will return
41 | the string `"__could_not_encode__"`.
42 |
43 | Args:
44 | obj: object to handle
45 | """
46 | try:
47 | return str(obj)
48 | except Exception: # pylint: disable=broad-exception-caught
49 | pass
50 | try:
51 | return repr(obj)
52 | except Exception: # pylint: disable=broad-exception-caught
53 | pass
54 | return "__could_not_encode__"
55 |
56 |
57 | ## Types
58 | ## -----------------------------------------------------------------------------
59 | def use_type_default(obj: Any) -> TypeGuard[type]:
60 | """Default check function for `type` objects (aka classes)."""
61 | return isinstance(obj, type)
62 |
63 |
64 | def type_default(obj: type) -> str:
65 | """Default function for `type` objects.
66 |
67 | Args:
68 | obj: object to handle
69 | """
70 | return obj.__name__
71 |
72 |
73 | ## Dataclasses
74 | ## -----------------------------------------------------------------------------
75 | def use_dataclass_default(obj: Any) -> bool:
76 | """Default check function for dataclass instances"""
77 | return dataclasses.is_dataclass(obj) and not isinstance(obj, type)
78 |
79 |
80 | def dataclass_default(obj) -> dict[str, Any]:
81 | """Default function for dataclass instances
82 |
83 | Args:
84 | obj: object to handle
85 | """
86 | return dataclasses.asdict(obj)
87 |
88 |
89 | ## Dates and Times
90 | ## -----------------------------------------------------------------------------
91 | def use_time_default(obj: Any) -> TypeGuard[datetime.time]:
92 | """Default check function for `datetime.time` instances"""
93 | return isinstance(obj, datetime.time)
94 |
95 |
96 | def time_default(obj: datetime.time) -> str:
97 | """Default function for `datetime.time` instances
98 |
99 | Args:
100 | obj: object to handle
101 | """
102 | return obj.isoformat()
103 |
104 |
105 | def use_date_default(obj: Any) -> TypeGuard[datetime.date]:
106 | """Default check function for `datetime.date` instances"""
107 | return isinstance(obj, datetime.date)
108 |
109 |
110 | def date_default(obj: datetime.date) -> str:
111 | """Default function for `datetime.date` instances
112 |
113 | Args:
114 | obj: object to handle
115 | """
116 | return obj.isoformat()
117 |
118 |
119 | def use_datetime_default(obj: Any) -> TypeGuard[datetime.datetime]:
120 | """Default check function for `datetime.datetime` instances"""
121 | return isinstance(obj, datetime.datetime)
122 |
123 |
124 | def datetime_default(obj: datetime.datetime) -> str:
125 | """Default function for `datetime.datetime` instances
126 |
127 | Args:
128 | obj: object to handle
129 | """
130 | return obj.isoformat()
131 |
132 |
133 | def use_datetime_any(obj: Any) -> TypeGuard[datetime.time | datetime.date | datetime.datetime]:
134 | """Default check function for `datetime` related instances"""
135 | return isinstance(obj, (datetime.time, datetime.date, datetime.datetime))
136 |
137 |
138 | def datetime_any(obj: datetime.time | datetime.date | datetime.date) -> str:
139 | """Default function for `datetime` related instances
140 |
141 | Args:
142 | obj: object to handle
143 | """
144 | return obj.isoformat()
145 |
146 |
147 | ## Exception and Tracebacks
148 | ## -----------------------------------------------------------------------------
149 | def use_exception_default(obj: Any) -> TypeGuard[BaseException]:
150 | """Default check function for exception instances.
151 |
152 | Exception classes are not treated specially and should be handled by the
153 | `[use_]type_default` functions.
154 | """
155 | return isinstance(obj, BaseException)
156 |
157 |
158 | def exception_default(obj: BaseException) -> str:
159 | """Default function for exception instances
160 |
161 | Args:
162 | obj: object to handle
163 | """
164 | return f"{obj.__class__.__name__}: {obj}"
165 |
166 |
167 | def use_traceback_default(obj: Any) -> TypeGuard[TracebackType]:
168 | """Default check function for tracebacks"""
169 | return isinstance(obj, TracebackType)
170 |
171 |
172 | def traceback_default(obj: TracebackType) -> str:
173 | """Default function for tracebacks
174 |
175 | Args:
176 | obj: object to handle
177 | """
178 | return "".join(traceback.format_tb(obj)).strip()
179 |
180 |
181 | ## Enums
182 | ## -----------------------------------------------------------------------------
183 | def use_enum_default(obj: Any) -> TypeGuard[enum.Enum | enum.EnumMeta]:
184 | """Default check function for enums.
185 |
186 | Supports both enum classes and enum values.
187 | """
188 | return isinstance(obj, (enum.Enum, enum.EnumMeta))
189 |
190 |
191 | def enum_default(obj: enum.Enum | enum.EnumMeta) -> Any | list[Any]:
192 | """Default function for enums.
193 |
194 | Supports both enum classes and enum values.
195 |
196 | Args:
197 | obj: object to handle
198 | """
199 | if isinstance(obj, enum.Enum):
200 | return obj.value
201 | return [e.value for e in obj] # type: ignore[var-annotated]
202 |
203 |
204 | ## UUIDs
205 | ## -----------------------------------------------------------------------------
206 | def use_uuid_default(obj: Any) -> TypeGuard[uuid.UUID]:
207 | """Default check function for `uuid.UUID` instances"""
208 | return isinstance(obj, uuid.UUID)
209 |
210 |
211 | def uuid_default(obj: uuid.UUID) -> str:
212 | """Default function for `uuid.UUID` instances
213 |
214 | Formats the UUID using "hyphen" format.
215 |
216 | Args:
217 | obj: object to handle
218 | """
219 | return str(obj)
220 |
221 |
222 | ## Bytes
223 | ## -----------------------------------------------------------------------------
224 | def use_bytes_default(obj: Any) -> TypeGuard[bytes | bytearray]:
225 | """Default check function for bytes"""
226 | return isinstance(obj, (bytes, bytearray))
227 |
228 |
229 | def bytes_default(obj: bytes | bytearray, url_safe: bool = True) -> str:
230 | """Default function for bytes
231 |
232 | Args:
233 | obj: object to handle
234 | url_safe: use URL safe base 64 character set.
235 |
236 | Returns:
237 | The byte data as a base 64 string.
238 | """
239 | if url_safe:
240 | return base64.urlsafe_b64encode(obj).decode("utf8")
241 | return base64.b64encode(obj).decode("utf8")
242 |
--------------------------------------------------------------------------------
/docs/style-guide.md:
--------------------------------------------------------------------------------
1 | # Python Style Guide
2 |
3 | This document outlines the coding style, conventions, and common patterns for the `python-json-logger` project. Adhering to this guide will help maintain code consistency, readability, and quality.
4 |
5 | ## General Principles
6 |
7 | * **Readability Counts:** Write code that is easy for others (and your future self) to understand. This aligns with [PEP 20 (The Zen of Python)](https://peps.python.org/pep-0020/).
8 | * **Consistency:** Strive for consistency in naming, formatting, and structure throughout the codebase.
9 | * **Simplicity:** Prefer simple, straightforward solutions over overly complex ones.
10 | * **PEP 8:** Follow [PEP 8 (Style Guide for Python Code)](https://peps.python.org/pep-0008/) for all Python code. The automated tools mentioned below will enforce many of these rules. This guide highlights project-specific conventions or particularly important PEP 8 aspects.
11 |
12 | ## Formatting and Linting
13 |
14 | We use automated tools to enforce a consistent code style and catch potential errors. These include:
15 |
16 | * **Black:** For opinionated code formatting.
17 | * **Pylint:** For static code analysis and error detection.
18 | * **MyPy:** For static type checking.
19 |
20 | Ensure these tools are run before committing code. Configuration for these tools can be found in `pyproject.toml`, `pylintrc`, and `mypy.ini` respectively. This guide primarily focuses on conventions not automatically verifiable by these tools.
21 |
22 | ## Imports
23 |
24 | Imports should be structured into the following groups, separated by a blank line, and generally alphabetized within each group:
25 |
26 | 1. **Future Imports:** e.g., `from __future__ import annotations`
27 | 2. **Standard Library Imports:** e.g., `import sys`, `from datetime import datetime`
28 | 3. **Installed (Third-Party) Library Imports:** e.g., `import pytest`
29 | 4. **Application (Local) Imports:** e.g., `from .core import BaseJsonFormatter` (This project-specific pattern is crucial for internal organization).
30 |
31 | ## Naming Conventions
32 |
33 | While PEP 8 covers most naming, we emphasize:
34 |
35 | * **Modules:** `lowercase_with_underscores.py`
36 | * **Packages:** `lowercase`
37 | * **Classes & Type Aliases:** `CapWords` (e.g., `BaseJsonFormatter`, `OptionalCallableOrStr`). This is standard, but explicitly stated for clarity.
38 | * **Constants:** `UPPERCASE_WITH_UNDERSCORES` (e.g., `RESERVED_ATTRS`). This is a project convention for module-level constants.
39 |
40 | (Functions, methods, and variables follow standard PEP 8 `lowercase_with_underscores`).
41 |
42 | ## Comments
43 |
44 | * Use comments to explain non-obvious code, complex logic, or important design decisions. Avoid comments that merely restate what the code does.
45 | * For internal code organization within files, especially in longer modules or classes, use comments like `## Section Title ##` or `### Subsection Title ###` to delineate logical blocks of code (e.g., `## Parent Methods ##` as seen in `src/pythonjsonlogger/core.py`). This is distinct from Markdown headings used in this document.
46 |
47 | ## Docstrings
48 |
49 | * All public modules, classes, functions, and methods **must** have docstrings.
50 | * We use `mkdocstrings` for generating API documentation, which defaults to the **Google Python Style Guide** for docstrings. Please adhere to this style. You can find the guide [here](https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings).
51 | * Docstrings should clearly explain the purpose, arguments, return values, and any exceptions raised.
52 | * **Project Convention:** Use the following markers to indicate changes over time:
53 | * `*New in version_number*`: For features added in a specific version.
54 | * `*Changed in version_number*`: For changes made in a specific version.
55 | * `*Deprecated in version_number*`: For features deprecated in a specific version.
56 |
57 | Example:
58 | ```python
59 | def my_function(param1: str, param2: int) -> bool:
60 | """Does something interesting.
61 |
62 | Args:
63 | param1: The first parameter, a string.
64 | param2: The second parameter, an integer.
65 |
66 | Returns:
67 | True if successful, False otherwise.
68 |
69 | Raises:
70 | ValueError: If param2 is negative.
71 |
72 | *New in 3.1*
73 | """
74 | # ... function logic ...
75 | return True # See 'Return Statements'
76 | ```
77 |
78 | ## Type Hinting
79 |
80 | * All new code **must** include type hints for function arguments, return types, and variables where appropriate, as per PEP 484.
81 | * Use standard types from the `typing` module.
82 | * **Project Convention:** For Python versions older than 3.10, use `typing_extensions.TypeAlias` for creating type aliases. For Python 3.10+, use `typing.TypeAlias` (e.g., `OptionalCallableOrStr: TypeAlias = ...`).
83 |
84 | ## Return Statements
85 |
86 | * **Project Convention:** All functions and methods **must** have an explicit `return` statement.
87 | * If a function does not logically return a value, it should end with `return None` or simply `return`. This makes the intent clear and consistent across the codebase.
88 |
89 | Example:
90 | ```python
91 | def process_data(data: dict) -> None:
92 | """Processes the given data."""
93 | # ... processing logic ...
94 | print(data)
95 | return # or return None
96 | ```
97 |
98 | ## Class Structure
99 |
100 | * Group methods logically within a class (e.g., initialization, public, protected/private, special methods).
101 | * The use of internal code comments like `## Parent Methods ##` (as seen in `src/pythonjsonlogger/core.py`) is encouraged for readability in complex classes.
102 |
103 | ## Project-Specific Code Patterns and Idioms
104 |
105 | Familiarize yourself with these patterns commonly used in this project:
106 |
107 | * **Version-Specific Logic:** Using `sys.version_info` for compatibility:
108 | ```python
109 | if sys.version_info >= (3, 10):
110 | # Python 3.10+ specific code
111 | else:
112 | # Code for older versions
113 | ```
114 | * **Type Aliases for Clarity:** As mentioned in Type Hinting, using `TypeAlias` for complex type combinations improves readability.
115 | * **Custom Exceptions:** Defining custom exception classes for application-specific error conditions (e.g., `MissingPackageError` in `src/pythonjsonlogger/exception.py`).
116 | * **Helper/Utility Functions:** Encapsulating reusable logic in utility modules (e.g., functions in `src/pythonjsonlogger/utils.py`).
117 | * **Conditional Imports for Optional Dependencies:** The pattern in `src/pythonjsonlogger/__init__.py` for checking optional dependencies like `orjson` and `msgspec` using `package_is_available` from `utils.py`.
118 |
119 | ## Testing
120 |
121 | This project uses `pytest` for testing. Adherence to good testing practices is crucial.
122 |
123 | * **Test Location:** Tests are located in the `tests/` directory.
124 | * **Test Naming:** Test files `test_*.py`; test functions `test_*`.
125 | * **Fixtures:** Utilize `pytest` fixtures (`@pytest.fixture`) for setup.
126 | * **Project Pattern:** The `LoggingEnvironment` dataclass and `env` fixture in `tests/test_formatters.py` is a key pattern for testing logger behavior. Adapt this for similar scenarios.
127 | * **Parametrization:** Use `@pytest.mark.parametrize` extensively to cover multiple scenarios efficiently.
128 | * **Clarity and Focus:** Each test should be focused and its name descriptive.
129 | * **Assertions:** Use clear, specific `pytest` assertions.
130 |
131 | By following these guidelines, we can ensure that `python-json-logger` remains a high-quality, maintainable, and developer-friendly library.
132 |
--------------------------------------------------------------------------------
/docs/cookbook.md:
--------------------------------------------------------------------------------
1 | # Cookbook
2 |
3 | Recipies for common tasks.
4 |
5 | ## Include all fields
6 |
7 | By default Python JSON Logger will not include fields [defined in the standard library](https://docs.python.org/3/library/logging.html#logrecord-attributes) unless they are included in the format. Manually including all these fields is tedious and Python version specific. Instead of adding them as explicit fields, we can add them implicitly be ensuring they are not in the `reserver_attrs` argument of the formatter.
8 |
9 | ```python
10 | all_fields_formatter = JsonFormatter(reserved_attrs=[])
11 | ```
12 |
13 | ## Custom Styles
14 |
15 | It is possible to support custom `style`s by setting `validate=False` and overriding the `parse` method.
16 |
17 | For example:
18 |
19 | ```python
20 | class CommaSupport(JsonFormatter):
21 | def parse(self) -> list[str]:
22 | if isinstance(self._style, str) and self._style == ",":
23 | return self._fmt.split(",")
24 | return super().parse()
25 |
26 | formatter = CommaSupport("message,asctime", style=",", validate=False)
27 | ```
28 |
29 | ## Modifying the logged data
30 |
31 | You can modify the `dict` of data that will be logged by overriding the `process_log_record` method to modify fields before they are serialized to JSON.
32 |
33 | ```python
34 | class SillyFormatter(JsonFormatter):
35 | def process_log_record(log_data):
36 | new_record = {k[::-1]: v for k, v in log_data.items()}
37 | return new_record
38 | ```
39 |
40 |
41 | ## Request / Trace IDs
42 |
43 | There are many ways to add consistent request IDs to your logging. The exact method will depend on your needs and application.
44 |
45 | ```python
46 | ## Common Setup
47 | ## -----------------------------------------------------------------------------
48 | import logging
49 | import uuid
50 | from pythonjsonlogger.json import JsonFormatter
51 |
52 | logger = logging.getLogger("test")
53 | logger.setLevel(logging.INFO)
54 | handler = logging.StreamHandler()
55 | logger.addHandler(handler)
56 | ```
57 |
58 | One method would be to inject the request ID into each log call using the `extra` argument.
59 | ```python
60 | ## Solution 1
61 | ## -----------------------------------------------------------------------------
62 | formatter = JsonFormatter()
63 | handler.setFormatter(formatter)
64 |
65 | def main_1():
66 | print("========== MAIN 1 ==========")
67 | for i in range(3):
68 | request_id = uuid.uuid4()
69 | logger.info("loop start", extra={"request_id": request_id})
70 | logger.info(f"loop {i}", extra={"request_id": request_id})
71 | logger.info("loop end", extra={"request_id": request_id})
72 | return
73 |
74 | main_1()
75 | ```
76 |
77 | Another method would be to use a filter to modify the `LogRecord` attributes. This would also allow us to use it in any other standard logging machinery. For this example I've manually set a `REQUEST_ID` global and some helper functions, but you might already have stuff available to you; for example, if you're using a web-framework with baked in request IDs.
78 |
79 | This is based on the [logging cookbook filter recipie](https://docs.python.org/3/howto/logging-cookbook.html#using-filters-to-impart-contextual-information).
80 |
81 | ```python
82 | ## Solution 2
83 | ## -----------------------------------------------------------------------------
84 | REQUEST_ID: str | None = None
85 |
86 | def get_request_id() -> str:
87 | return REQUEST_ID
88 |
89 | def generate_request_id():
90 | global REQUEST_ID
91 | REQUEST_ID = str(uuid.uuid4())
92 |
93 | class RequestIdFilter(logging.Filter):
94 | def filter(self, record):
95 | record.request_id = get_request_id() # Add request_id to the LogRecord
96 | return True
97 |
98 | request_id_filter = RequestIdFilter()
99 | logger.addFilter(request_id_filter)
100 |
101 | def main_2():
102 | print("========== MAIN 2 ==========")
103 | for i in range(3):
104 | generate_request_id()
105 | logger.info("loop start")
106 | logger.info(f"loop {i}")
107 | logger.info("loop end")
108 | return
109 |
110 | main_2()
111 |
112 | logger.removeFilter(request_id_filter)
113 | ```
114 |
115 | Another method would be to create a custom formatter class and override the `process_log_record` method. This allows us to inject fields into the record before we log it without modifying the original `LogRecord`.
116 |
117 | ```python
118 | ## Solution 3
119 | ## -----------------------------------------------------------------------------
120 | # Reuse REQUEST_ID stuff from solution 2
121 | class MyFormatter(JsonFormatter):
122 | def process_log_record(self, log_data):
123 | log_data["request_id"] = get_request_id()
124 | return log_data
125 |
126 | handler.setFormatter(MyFormatter())
127 |
128 | def main_3():
129 | print("========== MAIN 3 ==========")
130 | for i in range(3):
131 | generate_request_id()
132 | logger.info("loop start")
133 | logger.info(f"loop {i}")
134 | logger.info("loop end")
135 | return
136 |
137 | main_3()
138 | ```
139 |
140 | ## Using `fileConfig`
141 |
142 | To use the module with a yaml config file using the [`fileConfig` function](https://docs.python.org/3/library/logging.config.html#logging.config.fileConfig), use the class `pythonjsonlogger.json.JsonFormatter`. Here is a sample config file:
143 |
144 | ```yaml title="example_config.yaml"
145 | version: 1
146 | disable_existing_loggers: False
147 | formatters:
148 | default:
149 | "()": pythonjsonlogger.json.JsonFormatter
150 | format: "%(asctime)s %(levelname)s %(name)s %(module)s %(funcName)s %(lineno)s %(message)s"
151 | json_default: ext://logging_config.my_json_default
152 | rename_fields:
153 | "asctime": "timestamp"
154 | "levelname": "status"
155 | static_fields:
156 | "service": ext://logging_config.PROJECT_NAME
157 | "env": ext://logging_config.ENVIRONMENT
158 | "version": ext://logging_config.PROJECT_VERSION
159 | "app_log": "true"
160 | handlers:
161 | default:
162 | formatter: default
163 | class: logging.StreamHandler
164 | stream: ext://sys.stderr
165 | access:
166 | formatter: default
167 | class: logging.StreamHandler
168 | stream: ext://sys.stdout
169 | loggers:
170 | uvicorn.error:
171 | level: INFO
172 | handlers:
173 | - default
174 | propagate: no
175 | uvicorn.access:
176 | level: INFO
177 | handlers:
178 | - access
179 | propagate: no
180 | ```
181 |
182 | You'll notice that we are using `ext://...` for `json_default` and`static_fields`. This will load data from other modules such as the one below.
183 |
184 | ```python title="logging_config.py"
185 | import importlib.metadata
186 | import os
187 |
188 |
189 | class Dummy:
190 | pass
191 |
192 |
193 | def my_json_default(obj: Any) -> Any:
194 | if isinstance(obj, Dummy):
195 | return "DUMMY"
196 | return obj
197 |
198 |
199 | def get_version_metadata():
200 | # https://stackoverflow.com/a/78082532
201 | version = importlib.metadata.version(PROJECT_NAME)
202 | return version
203 |
204 |
205 | PROJECT_NAME = 'test-api'
206 | PROJECT_VERSION = get_version_metadata()
207 | ENVIRONMENT = os.environ.get('ENVIRONMENT', 'dev')
208 | ```
209 |
210 | ## Logging Expensive to Compute Data
211 |
212 | By the nature of Python's logging library, the JSON formatters will only ever run in handlers which are enabled for the given log level. This saves the performance hit of constructing JSON that is never used - but what about the data we pass into the logger? There are two options available to us: using if statements to avoid the call altogether, or using lazy string evaluation libraries.
213 |
214 | !!! note
215 | The below strategies will work for data passed in the `msg` and `extra` arguments.
216 |
217 | To avoid the logging calls we use `logger.isEnabledFor` to ensure that we only start constructing our log messages if the logger is enabled:
218 |
219 | ```python
220 | import logging
221 | import time
222 |
223 | from pythonjsonlogger.json import JsonFormatter
224 |
225 | def expensive_to_compute():
226 | time.sleep(5)
227 | return "world"
228 |
229 | ## Setup
230 | ## -------------------------------------
231 | logger = logging.getLogger()
232 | handler = logging.StreamHandler()
233 | formatter = JsonFormatter()
234 | handler.setFormatter(formatter)
235 | logger.addHandler(handler)
236 | logger.setLevel(logging.INFO)
237 |
238 | ## Log Using isEnabledFor
239 | ## -------------------------------------
240 | start = time.time()
241 | if logger.isEnabledFor(logging.INFO):
242 | logger.info(
243 | {
244 | "data": "hello {}".format(expensive_to_compute())
245 | }
246 | )
247 | print(f"Logging INFO using isEnabledFor took: {int(time.time() - start)}s")
248 |
249 | start = time.time()
250 | if logger.isEnabledFor(logging.DEBUG):
251 | logger.debug(
252 | {
253 | "data": "hello {}".format(expensive_to_compute())
254 | }
255 | )
256 | print(f"Logging DEBUG using isEnabledFor took: {int(time.time() - start)}s")
257 | ```
258 |
259 | For lazy string evaluation we can take advantage of the fact that the default JSON encoders included in this package will call `str` on unkown objects. We can use this to build our own lazy string evaluators, or we can use an existing external package. Pre-existing solutions include: [`lazy-string`](https://pypi.org/project/lazy-string/)'s `LazyString` or [`stringlike`](https://pypi.org/project/stringlike/)'s `CachedLazyString`.
260 |
261 | ```python
262 | ## Log Using lazy-string
263 | ## -------------------------------------
264 | from lazy_string import LazyString as L
265 |
266 | start = time.time()
267 | logger.info(
268 | {
269 | "data": L("hello {}".format, L(expensive_to_compute))
270 | }
271 | )
272 | print(f"Logging INFO using LazyString took: {int(time.time() - start)}s")
273 |
274 | start = time.time()
275 | logger.debug(
276 | {
277 | "data": L("hello {}".format, L(expensive_to_compute))
278 | }
279 | )
280 | print(f"Logging DEBUG using LazyString took: {int(time.time() - start)}s")
281 | ```
282 |
--------------------------------------------------------------------------------
/docs/changelog.md:
--------------------------------------------------------------------------------
1 | # Change Log
2 | All notable changes to this project will be documented in this file.
3 |
4 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
5 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
6 |
7 | ## [4.1.0](https://github.com/nhairs/python-json-logger/compare/v4.0.0...v4.1.0) - UNRELEASED
8 |
9 | ### Added
10 | - Add support for Python 3.14, PyPy 3.11
11 |
12 | ## [4.0.0](https://github.com/nhairs/python-json-logger/compare/v3.3.3...v4.0.0) - 2025-10-06
13 |
14 | ### Added
15 | - Support `DictConfigurator` prefixes for `rename_fields` and `static_fields`. [#45](https://github.com/nhairs/python-json-logger/pull/45)
16 | - Allows using values like `ext://sys.stderr` in `fileConfig`/`dictConfig` value fields.
17 | - Support comma seperated lists for Formatter `fmt` (`style=","`) e.g. `"asctime,message,levelname"` [#15](https://github.com/nhairs/python-json-logger/issues/15)
18 | - Note that this style is specific to `python-json-logger` and thus care should be taken not to pass this format to other logging Formatter implementations.
19 | - Supports sequences of strings (e.g. lists and tuples) of field names for Formatter `fmt`. [#16](https://github.com/nhairs/python-json-logger/issues/16)
20 |
21 | ### Changed
22 | - Rename `pythonjsonlogger.core.LogRecord` and `log_record` arguments to avoid confusion / overlapping with `logging.LogRecord`. [#38](https://github.com/nhairs/python-json-logger/issues/38)
23 | - Affects arguments to `pythonjsonlogger.core.BaseJsonFormatter` (and any child classes).
24 | - `serialize_log_record`
25 | - `add_fields`
26 | - `jsonify_log_record`
27 | - `process_log_record`
28 | - Note: functions referring to `log_record` have **not** had their function name changed.
29 |
30 | ### Removed
31 | - Remove support for providing strings instead of objects when instantiating formatters. Instead use the `DictConfigurator` `ext://` prefix format when using `fileConfig`/`dictConfig`. [#47](https://github.com/nhairs/python-json-logger/issues/47)
32 | - Affects `pythonjsonlogger.json.JsonFormatter`: `json_default`, `json_encoder`, `json_serializer`.
33 | - Affects `pythonjsonlogger.orjson.OrjsonFormatter`: `json_default`.
34 | - Affects `pythonjsonlogger.msgspec.MsgspecFormatter`: `json_default`.
35 |
36 | Thanks @rubensa
37 |
38 | ## [3.3.0](https://github.com/nhairs/python-json-logger/compare/v3.2.1...v3.3.0) - 2025-03-06
39 |
40 | ### Added
41 | - `exc_info_as_array` and `stack_info_as_array` options are added to `pythonjsonlogger.core.BaseJsonFormatter` allowing both to be encoded as list of lines instead of a single multi-line string. [#35](https://github.com/nhairs/python-json-logger/issues/35)
42 |
43 | ### Security
44 | - Remove `msgspec-python313-pre` from `dev` dependencies preventing potential RCE. Details: [GHSA-wmxh-pxcx-9w24](https://github.com/nhairs/python-json-logger/security/advisories/GHSA-wmxh-pxcx-9w24#advisory-comment-121307)
45 |
46 | Thanks @1hakusai1 and @omnigodz
47 |
48 | ## [3.2.1](https://github.com/nhairs/python-json-logger/compare/v3.2.0...v3.2.1) - 2024-12-16
49 |
50 | ### Fixed
51 | - Import error on `import pythonjsonlogger.jsonlogger` [#29](https://github.com/nhairs/python-json-logger/issues/29)
52 |
53 |
54 | ## [3.2.0](https://github.com/nhairs/python-json-logger/compare/v3.1.0...v3.2.0) - 2024-12-11
55 |
56 | ### Changed
57 | - `pythonjsonlogger.[ORJSON,MSGSPEC]_AVAILABLE` no longer imports the respective package when determining availability.
58 | - `pythonjsonlogger.[orjson,msgspec]` now throws a `pythonjsonlogger.exception.MissingPackageError` when required libraries are not available. These contain more information about what is missing whilst still being an `ImportError`.
59 | - `defaults` parameter is no longer ignored and now conforms to the standard library. Setting a defaults dictionary will add the specified keys if the those keys do not exist in a record or weren't passed by the `extra` parameter when logging a message.
60 | - `typing_extensions` is only installed on Python version < 3.10.
61 | - Support Python 3.13
62 | - `msgspec` has only been tested against pre-release versions.
63 |
64 | Thanks @cjwatson and @bharel
65 |
66 | ## [3.1.0](https://github.com/nhairs/python-json-logger/compare/v3.0.1...v3.1.0) - 2023-05-28
67 |
68 | This splits common funcitonality out to allow supporting other JSON encoders. Although this is a large refactor, backwards compatibility has been maintained.
69 |
70 | ### Added
71 | - `pythonjsonlogger.core` - more details below.
72 | - `pythonjsonlogger.defaults` module that provides many functions for handling unsupported types.
73 | - Orjson encoder support via `pythonjsonlogger.orjson.OrjsonFormatter` with the following additions:
74 | - bytes are URL safe base64 encoded.
75 | - Exceptions are "pretty printed" using the exception name and message e.g. `"ValueError: bad value passed"`
76 | - Enum values use their value, Enum classes now return all values as a list.
77 | - Tracebacks are supported
78 | - Classes (aka types) are support
79 | - Will fallback on `__str__` if available, else `__repr__` if available, else will use `__could_not_encode__`
80 | - MsgSpec encoder support via `pythonjsonlogger.msgspec.MsgspecFormatter` with the following additions:
81 | - Exceptions are "pretty printed" using the exception name and message e.g. `"ValueError: bad value passed"`
82 | - Enum classes now return all values as a list.
83 | - Tracebacks are supported
84 | - Classes (aka types) are support
85 | - Will fallback on `__str__` if available, else `__repr__` if available, else will use `__could_not_encode__`
86 | - Note: msgspec only supprts enum values of type `int` or `str` [jcrist/msgspec#680](https://github.com/jcrist/msgspec/issues/680)
87 |
88 | ### Changed
89 | - `pythonjsonlogger.jsonlogger` has been moved to `pythonjsonlogger.json` with core functionality moved to `pythonjsonlogger.core`.
90 | - `pythonjsonlogger.core.BaseJsonFormatter` properly supports all `logging.Formatter` arguments:
91 | - `fmt` is unchanged.
92 | - `datefmt` is unchanged.
93 | - `style` can now support non-standard arguments by setting `validate` to `False`
94 | - `validate` allows non-standard `style` arguments or prevents calling `validate` on standard `style` arguments.
95 | - `default` is ignored.
96 | - `pythonjsonlogger.json.JsonFormatter` default encodings changed:
97 | - bytes are URL safe base64 encoded.
98 | - Exception formatting detected using `BaseException` instead of `Exception`. Now "pretty prints" the exception using the exception name and message e.g. `"ValueError: bad value passed"`
99 | - Dataclasses are now supported
100 | - Enum values now use their value, Enum classes now return all values as a list.
101 | - Will fallback on `__str__` if available, else `__repr__` if available, else will use `__could_not_encode__`
102 | - Renaming fields now preserves order ([#7](https://github.com/nhairs/python-json-logger/issues/7)) and ignores missing fields ([#6](https://github.com/nhairs/python-json-logger/issues/6)).
103 | - Documentation
104 | - Generated documentation using `mkdocs` is stored in `docs/`
105 | - Documentation within `README.md` has been moved to `docs/index.md` and `docs/qucikstart.md`.
106 | - `CHANGELOG.md` has been moved to `docs/change-log.md`
107 | - `SECURITY.md` has been moved and replaced with a symbolic link to `docs/security.md`.
108 |
109 | ### Deprecated
110 | - `pythonjsonlogger.jsonlogger` is now `pythonjsonlogger.json`
111 | - `pythonjsonlogger.jsonlogger.RESERVED_ATTRS` is now `pythonjsonlogger.core.RESERVED_ATTRS`.
112 | - `pythonjsonlogger.jsonlogger.merge_record_extra` is now `pythonjsonlogger.core.merge_record_extra`.
113 |
114 | ### Removed
115 | - Python 3.7 support dropped
116 | - `pythonjsonlogger.jsonlogger.JsonFormatter._str_to_fn` replaced with `pythonjsonlogger.core.str_to_object`.
117 |
118 | ## [3.0.1](https://github.com/nhairs/python-json-logger/compare/v3.0.0...v3.0.1) - 2023-04-01
119 |
120 | ### Fixes
121 |
122 | - Fix spelling of parameter `json_serialiser` -> `json_serializer` ([#8](https://github.com/nhairs/python-json-logger/issues/8)) - @juliangilbey
123 |
124 | ## [3.0.0](https://github.com/nhairs/python-json-logger/compare/v2.0.7...v3.0.0) - 2024-03-25
125 |
126 | Note: using new major version to seperate changes from this fork and the original (upstream). See [#1](https://github.com/nhairs/python-json-logger/issues/1) for details.
127 |
128 | ### Changes
129 | - Update supported Python versions - @nhairs
130 | - Drop 3.6
131 | - The following versions are supported and tested:
132 | - CPython 3.7-3.12 (ubuntu, windows, mac)
133 | - PyPy 3.7-3.10 (ubuntu, wundows, mac)
134 | - `RESERVED_ATTRS` is now a list and version dependent
135 | - Fix `JsonFormatter.__init__` return type (`None`) - @nhairs
136 | - Moved to `pyproject.toml` - @nhairs
137 | - Update linting and testing - @nhairs
138 | - Split lint and test steps in GHA
139 | - Use validate-pyproject, black, pylint, mypy
140 |
141 | ## [2.0.7](https://github.com/nhairs/python-json-logger/compare/v2.0.6...v2.0.7) - 2023-02-21
142 | ### Changed
143 | - Fix inclusion of py.typed in pip packages - @sth
144 | - Added pytest support with test file rename. Migrated to assertEqual
145 |
146 | ## [2.0.6](https://github.com/nhairs/python-json-logger/compare/v2.0.5...v2.0.6) - 2023-02-14
147 | ### Changed
148 | - Parameter `rename_fields` in merge_record_extra is now optional - @afallou
149 |
150 | ## [2.0.5](https://github.com/nhairs/python-json-logger/compare/v2.0.4...v2.0.5) - 2023-02-12
151 | ### Added
152 | - Allow reserved attrs to be renamed - @henkhogan
153 | - Support added for Python 3.11
154 | - Now verifying builds in Pypy 3.9 as well
155 | - Type annotations are now in the package - @louis-jaris
156 | ### Changed
157 | - Fix rename_fields for exc_info - @guilhermeferrari
158 | - Cleaned up test file for PEP8 - @lopagela
159 | - Cleaned up old Python 2 artifacts - @louis-jaris
160 | - Dropped Python 3.5 support - @idomozes
161 | - Moved type check via tox into 3.11 run only
162 | - Added test run in Python3.6 (will keep for a little while longer, but it's EOL so upgrade)
163 |
164 | ## [2.0.4](https://github.com/nhairs/python-json-logger/compare/v2.0.3...v2.0.4) - 2022-07-11
165 | ### Changed
166 | - Fix too strict regex for percentage style logging - @aberres
167 |
168 | ## [2.0.3](https://github.com/nhairs/python-json-logger/compare/v2.0.2...v2.0.3) - 2022-07-08
169 | ### Added
170 | - Add PEP 561 marker/basic mypy configuration. - @bringhurst
171 | - Workaround logging.LogRecord.msg type of string. - @bringhurst
172 | ### Changed
173 | - Changed a link archive of the reference page in case it's down. - @ahonnecke
174 | - Removed unnecessary try-except around OrderedDict usage - @sozofaan
175 | - Update documentation link to json module + use https - @deronnax
176 | - Dropped 3.5 support. - @bringhurst
177 |
178 | ## [2.0.2](https://github.com/nhairs/python-json-logger/compare/v2.0.1...v2.0.2) - 2021-07-27
179 | ### Added
180 | - Officially supporting 3.9 - @felixonmars.
181 | - You can now add static fields to log objects - @cosimomeli.
182 | ### Changed
183 | - Dropped 3.4 support.
184 | - Dropped Travis CI for Github Actions.
185 | - Wheel should build for python 3 instead of just 3.4 now.
186 |
187 | ## [2.0.1](https://github.com/nhairs/python-json-logger/compare/v2.0.0...v2.0.1) - 2020-10-12
188 | ### Added
189 | - Support Pypi long descripton - @ereli-cb
190 | ### Changed
191 | - You can now rename output fields - @schlitzered
192 |
193 | ## [2.0.0](https://github.com/nhairs/python-json-logger/compare/v0.1.11...v2.0.0) - 2020-09-26
194 | ### Added
195 | - New Changelog
196 | - Added timezone support to timestamps - @lalten
197 | - Refactored log record to function - @georgysavva
198 | - Add python 3.8 support - @tommilligan
199 | ### Removed
200 | - Support for Python 2.7
201 | - Debian directory
202 |
203 | ## [0.1.11](https://github.com/nhairs/python-json-logger/compare/v0.1.10...v0.1.11) - 2019-03-29
204 | ### Added
205 | - Support for Python 3.7
206 | ### Changed
207 | - 'stack_info' flag in logging calls is now respected in JsonFormatter by [@ghShu](https://github.com/ghShu)
208 |
--------------------------------------------------------------------------------
/src/pythonjsonlogger/core.py:
--------------------------------------------------------------------------------
1 | """Core functionality shared by all JSON loggers"""
2 |
3 | ### IMPORTS
4 | ### ============================================================================
5 | ## Future
6 | from __future__ import annotations
7 |
8 | ## Standard Library
9 | from datetime import datetime, timezone
10 | import logging
11 | import re
12 | import sys
13 | from typing import Optional, Union, List, Dict, Container, Any, Sequence
14 |
15 | if sys.version_info >= (3, 10):
16 | from typing import TypeAlias
17 | else:
18 | from typing_extensions import TypeAlias
19 |
20 | ## Installed
21 |
22 | ## Application
23 |
24 |
25 | ### CONSTANTS
26 | ### ============================================================================
27 | RESERVED_ATTRS: List[str] = [
28 | "args",
29 | "asctime",
30 | "created",
31 | "exc_info",
32 | "exc_text",
33 | "filename",
34 | "funcName",
35 | "levelname",
36 | "levelno",
37 | "lineno",
38 | "module",
39 | "msecs",
40 | "message",
41 | "msg",
42 | "name",
43 | "pathname",
44 | "process",
45 | "processName",
46 | "relativeCreated",
47 | "stack_info",
48 | "thread",
49 | "threadName",
50 | ]
51 | """Default reserved attributes.
52 |
53 | These come from the [default attributes of `LogRecord` objects](http://docs.python.org/library/logging.html#logrecord-attributes).
54 |
55 | Note:
56 | Although considered a constant, this list is dependent on the Python version due to
57 | different `LogRecord` objects having different attributes in different Python versions.
58 |
59 | *Changed in 3.0*: `RESERVED_ATTRS` is now `list[str]` instead of `tuple[str, ...]`.
60 | """
61 |
62 | if sys.version_info >= (3, 12):
63 | # taskName added in python 3.12
64 | RESERVED_ATTRS.append("taskName")
65 | RESERVED_ATTRS.sort()
66 |
67 |
68 | STYLE_STRING_TEMPLATE_REGEX = re.compile(r"\$\{(.+?)\}", re.IGNORECASE) # $ style
69 | STYLE_STRING_FORMAT_REGEX = re.compile(r"\{(.+?)\}", re.IGNORECASE) # { style
70 | STYLE_PERCENT_REGEX = re.compile(r"%\((.+?)\)", re.IGNORECASE) # % style
71 |
72 | ## Type Aliases
73 | ## -----------------------------------------------------------------------------
74 | LogData: TypeAlias = Dict[str, Any]
75 | """Type alias
76 |
77 | *Changed in 4.0*: renamed from `LogRecord` to `LogData`
78 | """
79 |
80 |
81 | ### FUNCTIONS
82 | ### ============================================================================
83 | def merge_record_extra(
84 | record: logging.LogRecord,
85 | target: Dict,
86 | reserved: Container[str],
87 | rename_fields: Optional[Dict[str, str]] = None,
88 | ) -> Dict:
89 | """
90 | Merges extra attributes from LogRecord object into target dictionary
91 |
92 | Args:
93 | record: logging.LogRecord
94 | target: dict to update
95 | reserved: dict or list with reserved keys to skip
96 | rename_fields: an optional dict, used to rename field names in the output.
97 | e.g. Rename `levelname` to `log.level`: `{'levelname': 'log.level'}`
98 |
99 | *Changed in 3.1*: `reserved` is now `Container[str]`.
100 | """
101 | if rename_fields is None:
102 | rename_fields = {}
103 | for key, value in record.__dict__.items():
104 | # this allows to have numeric keys
105 | if key not in reserved and not (hasattr(key, "startswith") and key.startswith("_")):
106 | target[rename_fields.get(key, key)] = value
107 | return target
108 |
109 |
110 | ### CLASSES
111 | ### ============================================================================
112 | class BaseJsonFormatter(logging.Formatter):
113 | """Base class for all formatters
114 |
115 | Must not be used directly.
116 |
117 | *New in 3.1*
118 |
119 | *Changed in 3.2*: `defaults` argument is no longer ignored.
120 |
121 | *Added in 3.3*: `exc_info_as_array` and `stack_info_as_array` options are added.
122 | """
123 |
124 | _style: Union[logging.PercentStyle, str] # type: ignore[assignment]
125 |
126 | ## Parent Methods
127 | ## -------------------------------------------------------------------------
128 | # pylint: disable=too-many-arguments,super-init-not-called
129 | def __init__(
130 | self,
131 | fmt: Optional[Union[str, Sequence[str]]] = None,
132 | datefmt: Optional[str] = None,
133 | style: str = "%",
134 | validate: bool = True,
135 | *,
136 | prefix: str = "",
137 | rename_fields: Optional[Dict[str, str]] = None,
138 | rename_fields_keep_missing: bool = False,
139 | static_fields: Optional[Dict[str, Any]] = None,
140 | reserved_attrs: Optional[Sequence[str]] = None,
141 | timestamp: Union[bool, str] = False,
142 | defaults: Optional[Dict[str, Any]] = None,
143 | exc_info_as_array: bool = False,
144 | stack_info_as_array: bool = False,
145 | ) -> None:
146 | """
147 | Args:
148 | fmt: String format or `Sequence` of field names of fields to log.
149 | datefmt: format to use when formatting `asctime` field
150 | style: how to extract log fields from `fmt`. Ignored if `fmt` is a `Sequence[str]`.
151 | validate: validate `fmt` against style, if implementing a custom `style` you
152 | must set this to `False`. Ignored if `fmt` is a `Sequence[str]`.
153 | defaults: a dictionary containing default fields that are added before all other fields and
154 | may be overridden. The supplied fields are still subject to `rename_fields`.
155 | prefix: an optional string prefix added at the beginning of
156 | the formatted string
157 | rename_fields: an optional dict, used to rename field names in the output.
158 | Rename `message` to `@message`: `{'message': '@message'}`
159 | rename_fields_keep_missing: When renaming fields, include missing fields in the output.
160 | static_fields: an optional dict, used to add fields with static values to all logs
161 | reserved_attrs: an optional list of fields that will be skipped when
162 | outputting json log record. Defaults to [all log record attributes][pythonjsonlogger.core.RESERVED_ATTRS].
163 | timestamp: an optional string/boolean field to add a timestamp when
164 | outputting the json log record. If string is passed, timestamp will be added
165 | to log record using string as key. If True boolean is passed, timestamp key
166 | will be "timestamp". Defaults to False/off.
167 | exc_info_as_array: break the exc_info into a list of lines based on line breaks.
168 | stack_info_as_array: break the stack_info into a list of lines based on line breaks.
169 |
170 | *Changed in 3.1*:
171 |
172 | - you can now use custom values for style by setting validate to `False`.
173 | The value is stored in `self._style` as a string. The `parse` method will need to be
174 | overridden in order to support the new style.
175 | - Renaming fields now preserves the order that fields were added in and avoids adding
176 | missing fields. The original behaviour, missing fields have a value of `None`, is still
177 | available by setting `rename_fields_keep_missing` to `True`.
178 |
179 | *Added in 4.0*:
180 |
181 | - `fmt` now supports comma seperated lists (`style=","`). Note that this style is specific
182 | to `python-json-logger` and thus care should be taken to not to pass this format to other
183 | logging Formatter implementations.
184 | - `fmt` now supports sequences of strings (e.g. lists and tuples) of field names.
185 | """
186 | ## logging.Formatter compatibility
187 | ## ---------------------------------------------------------------------
188 | # Note: validate added in python 3.8, defaults added in 3.10
189 | if fmt is None or isinstance(fmt, str):
190 | if style in logging._STYLES:
191 | _style = logging._STYLES[style][0](fmt) # type: ignore[operator]
192 | if validate:
193 | _style.validate()
194 | self._style = _style
195 | self._fmt = _style._fmt
196 |
197 | elif style == "," or not validate:
198 | self._style = style
199 | self._fmt = fmt
200 | # TODO: Validate comma format
201 |
202 | else:
203 | raise ValueError("Style must be one of: '%{$,'")
204 |
205 | self._required_fields = self.parse()
206 |
207 | # Note: we do this check second as string is still a Sequence[str]
208 | elif isinstance(fmt, Sequence):
209 | self._style = "__sequence__"
210 | self._fmt = str(fmt)
211 | self._required_fields = list(fmt)
212 |
213 | self.datefmt = datefmt
214 |
215 | ## JSON Logging specific
216 | ## ---------------------------------------------------------------------
217 | self.prefix = prefix
218 |
219 | # We recreate the dict in rename_fields and static_fields to support internal/external
220 | # references which require getting the item to do the conversion.
221 | # For more details see: https://github.com/nhairs/python-json-logger/pull/45
222 | self.rename_fields = (
223 | {key: rename_fields[key] for key in rename_fields} if rename_fields is not None else {}
224 | )
225 | self.static_fields = (
226 | {key: static_fields[key] for key in static_fields} if static_fields is not None else {}
227 | )
228 |
229 | self.rename_fields_keep_missing = rename_fields_keep_missing
230 | self.reserved_attrs = set(reserved_attrs if reserved_attrs is not None else RESERVED_ATTRS)
231 | self.timestamp = timestamp
232 |
233 | self._skip_fields = set(self._required_fields)
234 | self._skip_fields.update(self.reserved_attrs)
235 | self.defaults = defaults if defaults is not None else {}
236 | self.exc_info_as_array = exc_info_as_array
237 | self.stack_info_as_array = stack_info_as_array
238 | return
239 |
240 | def format(self, record: logging.LogRecord) -> str:
241 | """Formats a log record and serializes to json
242 |
243 | Args:
244 | record: the record to format
245 | """
246 | message_dict: Dict[str, Any] = {}
247 | # TODO: logging.LogRecord.msg and logging.LogRecord.message in typeshed
248 | # are always type of str. We shouldn't need to override that.
249 | if isinstance(record.msg, dict):
250 | message_dict = record.msg
251 | record.message = ""
252 | else:
253 | record.message = record.getMessage()
254 |
255 | # only format time if needed
256 | if "asctime" in self._required_fields:
257 | record.asctime = self.formatTime(record, self.datefmt)
258 |
259 | # Display formatted exception, but allow overriding it in the
260 | # user-supplied dict.
261 | if record.exc_info and not message_dict.get("exc_info"):
262 | message_dict["exc_info"] = self.formatException(record.exc_info)
263 | if not message_dict.get("exc_info") and record.exc_text:
264 | message_dict["exc_info"] = record.exc_text
265 |
266 | # Display formatted record of stack frames
267 | # default format is a string returned from :func:`traceback.print_stack`
268 | if record.stack_info and not message_dict.get("stack_info"):
269 | message_dict["stack_info"] = self.formatStack(record.stack_info)
270 |
271 | log_data: LogData = {}
272 | self.add_fields(log_data, record, message_dict)
273 | log_data = self.process_log_record(log_data)
274 |
275 | return self.serialize_log_record(log_data)
276 |
277 | ## JSON Formatter Specific Methods
278 | ## -------------------------------------------------------------------------
279 | def parse(self) -> List[str]:
280 | """Parses format string looking for substitutions
281 |
282 | This method is responsible for returning a list of fields (as strings)
283 | to include in all log messages.
284 |
285 | You can support custom styles by overriding this method.
286 |
287 | Returns:
288 | list of fields to be extracted and serialized
289 | """
290 | if self._fmt is None:
291 | return []
292 |
293 | if isinstance(self._style, str):
294 | if self._style == "__sequence__":
295 | raise RuntimeError("Must not call parse when fmt is a sequence of strings")
296 |
297 | if self._style == ",":
298 | return [field.strip() for field in self._fmt.split(",") if field.strip()]
299 |
300 | raise ValueError(f"Style {self._style!r} is not supported")
301 |
302 | if isinstance(self._style, logging.StringTemplateStyle):
303 | formatter_style_pattern = STYLE_STRING_TEMPLATE_REGEX
304 |
305 | elif isinstance(self._style, logging.StrFormatStyle):
306 | formatter_style_pattern = STYLE_STRING_FORMAT_REGEX
307 |
308 | elif isinstance(self._style, logging.PercentStyle):
309 | # PercentStyle is parent class of StringTemplateStyle and StrFormatStyle
310 | # so it must be checked last.
311 | formatter_style_pattern = STYLE_PERCENT_REGEX
312 |
313 | else:
314 | raise ValueError(f"Style {self._style!r} is not supported")
315 |
316 | return formatter_style_pattern.findall(self._fmt)
317 |
318 | def serialize_log_record(self, log_data: LogData) -> str:
319 | """Returns the final representation of the data to be logged
320 |
321 | Args:
322 | log_data: the data
323 |
324 | *Changed in 4.0*: `log_record` renamed to `log_data`
325 | """
326 | return self.prefix + self.jsonify_log_record(log_data)
327 |
328 | def add_fields(
329 | self,
330 | log_data: Dict[str, Any],
331 | record: logging.LogRecord,
332 | message_dict: Dict[str, Any],
333 | ) -> None:
334 | """Extract fields from a LogRecord for logging
335 |
336 | This method can be overridden to implement custom logic for adding fields.
337 |
338 | Args:
339 | log_data: data that will be logged
340 | record: the record to extract data from
341 | message_dict: dictionary that was logged instead of a message. e.g
342 | `logger.info({"is_this_message_dict": True})`
343 |
344 | *Changed in 4.0*: `log_record` renamed to `log_data`
345 | """
346 | for field in self.defaults:
347 | log_data[self._get_rename(field)] = self.defaults[field]
348 |
349 | for field in self._required_fields:
350 | log_data[self._get_rename(field)] = record.__dict__.get(field)
351 |
352 | for data_dict in [self.static_fields, message_dict]:
353 | for key, value in data_dict.items():
354 | log_data[self._get_rename(key)] = value
355 |
356 | merge_record_extra(
357 | record,
358 | log_data,
359 | reserved=self._skip_fields,
360 | rename_fields=self.rename_fields,
361 | )
362 |
363 | if self.timestamp:
364 | key = self.timestamp if isinstance(self.timestamp, str) else "timestamp"
365 | log_data[self._get_rename(key)] = datetime.fromtimestamp(
366 | record.created, tz=timezone.utc
367 | )
368 |
369 | if self.rename_fields_keep_missing:
370 | for field in self.rename_fields.values():
371 | if field not in log_data:
372 | log_data[field] = None
373 | return
374 |
375 | def _get_rename(self, key: str) -> str:
376 | return self.rename_fields.get(key, key)
377 |
378 | # Child Methods
379 | # ..........................................................................
380 | def jsonify_log_record(self, log_data: LogData) -> str:
381 | """Convert the log data into a JSON string.
382 |
383 | Child classes MUST override this method.
384 |
385 | Args:
386 | log_data: the data to serialize
387 |
388 | *Changed in 4.0*: `log_record` renamed to `log_data`
389 | """
390 | raise NotImplementedError()
391 |
392 | def process_log_record(self, log_data: LogData) -> LogData:
393 | """Custom processing of the data to be logged.
394 |
395 | Child classes can override this method to alter the log record before it
396 | is serialized.
397 |
398 | Args:
399 | log_data: incoming data
400 |
401 | *Changed in 4.0*: `log_record` renamed to `log_data`
402 | """
403 | return log_data
404 |
405 | def formatException(self, ei) -> Union[str, list[str]]: # type: ignore
406 | """Format and return the specified exception information.
407 |
408 | If exc_info_as_array is set to True, This method returns an array of strings.
409 | """
410 | exception_info_str = super().formatException(ei)
411 | return exception_info_str.splitlines() if self.exc_info_as_array else exception_info_str
412 |
413 | def formatStack(self, stack_info) -> Union[str, list[str]]: # type: ignore
414 | """Format and return the specified stack information.
415 |
416 | If stack_info_as_array is set to True, This method returns an array of strings.
417 | """
418 | stack_info_str = super().formatStack(stack_info)
419 | return stack_info_str.splitlines() if self.stack_info_as_array else stack_info_str
420 |
--------------------------------------------------------------------------------
/pylintrc:
--------------------------------------------------------------------------------
1 | [MASTER]
2 |
3 | # A comma-separated list of package or module names from where C extensions may
4 | # be loaded. Extensions are loading into the active Python interpreter and may
5 | # run arbitrary code.
6 | extension-pkg-whitelist=orjson
7 |
8 | # Add files or directories to the blacklist. They should be base names, not
9 | # paths.
10 | ignore=CVS
11 |
12 | # Add files or directories matching the regex patterns to the blacklist. The
13 | # regex matches against base names, not paths.
14 | ignore-patterns=
15 |
16 | # Python code to execute, usually for sys.path manipulation such as
17 | # pygtk.require().
18 | #init-hook=
19 |
20 | # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
21 | # number of processors available to use.
22 | jobs=0
23 |
24 | # Control the amount of potential inferred values when inferring a single
25 | # object. This can help the performance when dealing with large functions or
26 | # complex, nested conditions.
27 | limit-inference-results=100
28 |
29 | # List of plugins (as comma separated values of python module names) to load,
30 | # usually to register additional checkers.
31 | load-plugins=
32 |
33 | # Pickle collected data for later comparisons.
34 | persistent=yes
35 |
36 | # Specify a configuration file.
37 | #rcfile=
38 |
39 | # When enabled, pylint would attempt to guess common misconfiguration and emit
40 | # user-friendly hints instead of false-positive error messages.
41 | suggestion-mode=yes
42 |
43 | # Allow loading of arbitrary C extensions. Extensions are imported into the
44 | # active Python interpreter and may run arbitrary code.
45 | unsafe-load-any-extension=no
46 |
47 |
48 | [MESSAGES CONTROL]
49 |
50 | # Only show warnings with the listed confidence levels. Leave empty to show
51 | # all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
52 | confidence=
53 |
54 | # Disable the message, report, category or checker with the given id(s). You
55 | # can either give multiple identifiers separated by comma (,) or put this
56 | # option multiple times (only on the command line, not in the configuration
57 | # file where it should appear only once). You can also use "--disable=all" to
58 | # disable everything first and then reenable specific checks. For example, if
59 | # you want to run only the similarities checker, you can use "--disable=all
60 | # --enable=similarities". If you want to run only the classes checker, but have
61 | # no Warning level messages displayed, use "--disable=all --enable=classes
62 | # --disable=W".
63 | disable=raw-checker-failed,
64 | bad-inline-option,
65 | locally-disabled,
66 | file-ignored,
67 | suppressed-message,
68 | useless-suppression,
69 | deprecated-pragma,
70 | use-symbolic-message-instead,
71 | ## General Changes
72 | # Explicit is better than implicit so allow bare returns
73 | useless-return,
74 | # pylint and black sometimes disagree - we always prefer black in these
75 | # cases. Disable rules that can cause conflicts
76 | line-too-long,
77 | # Module docstrings are not required
78 | missing-module-docstring,
79 | ## Project Disables
80 | duplicate-code
81 |
82 | # Enable the message, report, category or checker with the given id(s). You can
83 | # either give multiple identifier separated by comma (,) or put this option
84 | # multiple time (only on the command line, not in the configuration file where
85 | # it should appear only once). See also the "--disable" option for examples.
86 | enable=c-extension-no-member
87 |
88 |
89 | [REPORTS]
90 |
91 | # Python expression which should return a score less than or equal to 10. You
92 | # have access to the variables 'error', 'warning', 'refactor', and 'convention'
93 | # which contain the number of messages in each category, as well as 'statement'
94 | # which is the total number of statements analyzed. This score is used by the
95 | # global evaluation report (RP0004).
96 | evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
97 |
98 | # Template used to display messages. This is a python new-style format string
99 | # used to format the message information. See doc for all details.
100 | #msg-template=
101 |
102 | # Set the output format. Available formats are text, parseable, colorized, json
103 | # and msvs (visual studio). You can also give a reporter class, e.g.
104 | # mypackage.mymodule.MyReporterClass.
105 | output-format=text
106 |
107 | # Tells whether to display a full report or only the messages.
108 | reports=no
109 |
110 | # Activate the evaluation score.
111 | score=yes
112 |
113 |
114 | [REFACTORING]
115 |
116 | # Maximum number of nested blocks for function / method body
117 | max-nested-blocks=5
118 |
119 | # Complete name of functions that never returns. When checking for
120 | # inconsistent-return-statements if a never returning function is called then
121 | # it will be considered as an explicit return statement and no message will be
122 | # printed.
123 | never-returning-functions=sys.exit
124 |
125 |
126 | [LOGGING]
127 |
128 | # Format style used to check logging format string. `old` means using %
129 | # formatting, `new` is for `{}` formatting,and `fstr` is for f-strings.
130 | logging-format-style=old
131 |
132 | # Logging modules to check that the string format arguments are in logging
133 | # function parameter format.
134 | logging-modules=logging
135 |
136 |
137 | [VARIABLES]
138 |
139 | # List of additional names supposed to be defined in builtins. Remember that
140 | # you should avoid defining new builtins when possible.
141 | additional-builtins=
142 |
143 | # Tells whether unused global variables should be treated as a violation.
144 | allow-global-unused-variables=yes
145 |
146 | # List of strings which can identify a callback function by name. A callback
147 | # name must start or end with one of those strings.
148 | callbacks=cb_,
149 | _cb
150 |
151 | # A regular expression matching the name of dummy variables (i.e. expected to
152 | # not be used).
153 | dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
154 |
155 | # Argument names that match this expression will be ignored. Default to name
156 | # with leading underscore.
157 | ignored-argument-names=_.*|^ignored_|^unused_
158 |
159 | # Tells whether we should check for unused import in __init__ files.
160 | init-import=no
161 |
162 | # List of qualified module names which can have objects that can redefine
163 | # builtins.
164 | redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
165 |
166 |
167 | [BASIC]
168 |
169 | # Naming style matching correct argument names.
170 | argument-naming-style=snake_case
171 |
172 | # Regular expression matching correct argument names. Overrides argument-
173 | # naming-style.
174 | #argument-rgx=
175 |
176 | # Naming style matching correct attribute names.
177 | attr-naming-style=snake_case
178 |
179 | # Regular expression matching correct attribute names. Overrides attr-naming-
180 | # style.
181 | #attr-rgx=
182 |
183 | # Bad variable names which should always be refused, separated by a comma.
184 | bad-names=foo,
185 | bar,
186 | baz,
187 | toto,
188 | tutu,
189 | tata
190 |
191 | # Naming style matching correct class attribute names.
192 | class-attribute-naming-style=any
193 |
194 | # Regular expression matching correct class attribute names. Overrides class-
195 | # attribute-naming-style.
196 | #class-attribute-rgx=
197 |
198 | # Naming style matching correct class names.
199 | class-naming-style=PascalCase
200 |
201 | # Regular expression matching correct class names. Overrides class-naming-
202 | # style.
203 | #class-rgx=
204 |
205 | # Naming style matching correct constant names.
206 | const-naming-style=UPPER_CASE
207 |
208 | # Regular expression matching correct constant names. Overrides const-naming-
209 | # style.
210 | #const-rgx=
211 |
212 | # Minimum line length for functions/classes that require docstrings, shorter
213 | # ones are exempt.
214 | docstring-min-length=-1
215 |
216 | # Naming style matching correct function names.
217 | function-naming-style=snake_case
218 |
219 | # Regular expression matching correct function names. Overrides function-
220 | # naming-style.
221 | #function-rgx=
222 |
223 | # Good variable names which should always be accepted, separated by a comma.
224 | good-names=i,
225 | j,
226 | k,
227 | ex,
228 | Run,
229 | _,
230 | e,
231 | r,
232 | id,
233 | f,
234 |
235 | # Include a hint for the correct naming format with invalid-name.
236 | include-naming-hint=no
237 |
238 | # Naming style matching correct inline iteration names.
239 | inlinevar-naming-style=any
240 |
241 | # Regular expression matching correct inline iteration names. Overrides
242 | # inlinevar-naming-style.
243 | #inlinevar-rgx=
244 |
245 | # Naming style matching correct method names.
246 | method-naming-style=snake_case
247 |
248 | # Regular expression matching correct method names. Overrides method-naming-
249 | # style.
250 | #method-rgx=
251 |
252 | # Naming style matching correct module names.
253 | module-naming-style=snake_case
254 |
255 | # Regular expression matching correct module names. Overrides module-naming-
256 | # style.
257 | #module-rgx=
258 |
259 | # Colon-delimited sets of names that determine each other's naming style when
260 | # the name regexes allow several styles.
261 | name-group=
262 |
263 | # Regular expression which should only match function or class names that do
264 | # not require a docstring.
265 | no-docstring-rgx=^_
266 |
267 | # List of decorators that produce properties, such as abc.abstractproperty. Add
268 | # to this list to register other decorators that produce valid properties.
269 | # These decorators are taken in consideration only for invalid-name.
270 | property-classes=abc.abstractproperty
271 |
272 | # Naming style matching correct variable names.
273 | variable-naming-style=snake_case
274 |
275 | # Regular expression matching correct variable names. Overrides variable-
276 | # naming-style.
277 | #variable-rgx=
278 |
279 |
280 | [TYPECHECK]
281 |
282 | # List of decorators that produce context managers, such as
283 | # contextlib.contextmanager. Add to this list to register other decorators that
284 | # produce valid context managers.
285 | contextmanager-decorators=contextlib.contextmanager
286 |
287 | # List of members which are set dynamically and missed by pylint inference
288 | # system, and so shouldn't trigger E1101 when accessed. Python regular
289 | # expressions are accepted.
290 | generated-members=
291 |
292 | # Tells whether missing members accessed in mixin class should be ignored. A
293 | # mixin class is detected if its name ends with "mixin" (case insensitive).
294 | ignore-mixin-members=yes
295 |
296 | # Tells whether to warn about missing members when the owner of the attribute
297 | # is inferred to be None.
298 | ignore-none=yes
299 |
300 | # This flag controls whether pylint should warn about no-member and similar
301 | # checks whenever an opaque object is returned when inferring. The inference
302 | # can return multiple potential results while evaluating a Python object, but
303 | # some branches might not be evaluated, which results in partial inference. In
304 | # that case, it might be useful to still emit no-member and other checks for
305 | # the rest of the inferred objects.
306 | ignore-on-opaque-inference=yes
307 |
308 | # List of class names for which member attributes should not be checked (useful
309 | # for classes with dynamically set attributes). This supports the use of
310 | # qualified names.
311 | ignored-classes=optparse.Values,thread._local,_thread._local
312 |
313 | # List of module names for which member attributes should not be checked
314 | # (useful for modules/projects where namespaces are manipulated during runtime
315 | # and thus existing member attributes cannot be deduced by static analysis). It
316 | # supports qualified module names, as well as Unix pattern matching.
317 | ignored-modules=
318 |
319 | # Show a hint with possible names when a member name was not found. The aspect
320 | # of finding the hint is based on edit distance.
321 | missing-member-hint=yes
322 |
323 | # The minimum edit distance a name should have in order to be considered a
324 | # similar match for a missing member name.
325 | missing-member-hint-distance=1
326 |
327 | # The total number of similar names that should be taken in consideration when
328 | # showing a hint for a missing member.
329 | missing-member-max-choices=1
330 |
331 | # List of decorators that change the signature of a decorated function.
332 | signature-mutators=
333 |
334 |
335 | [STRING]
336 |
337 | # This flag controls whether the implicit-str-concat-in-sequence should
338 | # generate a warning on implicit string concatenation in sequences defined over
339 | # several lines.
340 | check-str-concat-over-line-jumps=no
341 |
342 |
343 | [SIMILARITIES]
344 |
345 | # Ignore comments when computing similarities.
346 | ignore-comments=yes
347 |
348 | # Ignore docstrings when computing similarities.
349 | ignore-docstrings=yes
350 |
351 | # Ignore imports when computing similarities.
352 | ignore-imports=no
353 |
354 | # Minimum lines number of a similarity.
355 | min-similarity-lines=4
356 |
357 |
358 | [SPELLING]
359 |
360 | # Limits count of emitted suggestions for spelling mistakes.
361 | max-spelling-suggestions=4
362 |
363 | # Spelling dictionary name. Available dictionaries: none. To make it work,
364 | # install the python-enchant package.
365 | spelling-dict=
366 |
367 | # List of comma separated words that should not be checked.
368 | spelling-ignore-words=
369 |
370 | # A path to a file that contains the private dictionary; one word per line.
371 | spelling-private-dict-file=
372 |
373 | # Tells whether to store unknown words to the private dictionary (see the
374 | # --spelling-private-dict-file option) instead of raising a message.
375 | spelling-store-unknown-words=no
376 |
377 |
378 | [FORMAT]
379 |
380 | # Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
381 | expected-line-ending-format=LF # Force UNIX style new lines
382 |
383 | # Regexp for a line that is allowed to be longer than the limit.
384 | ignore-long-lines=^\s*(# )??$
385 |
386 | # Number of spaces of indent required inside a hanging or continued line.
387 | indent-after-paren=4
388 |
389 | # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
390 | # tab).
391 | indent-string=' '
392 |
393 | # Maximum number of characters on a single line.
394 | max-line-length=100
395 |
396 | # Maximum number of lines in a module.
397 | max-module-lines=1000
398 |
399 | # Allow the body of a class to be on the same line as the declaration if body
400 | # contains single statement.
401 | single-line-class-stmt=no
402 |
403 | # Allow the body of an if to be on the same line as the test if there is no
404 | # else.
405 | single-line-if-stmt=no
406 |
407 |
408 | [MISCELLANEOUS]
409 |
410 | # List of note tags to take in consideration, separated by a comma.
411 | notes=FIXME,
412 | XXX
413 |
414 |
415 | [IMPORTS]
416 |
417 | # List of modules that can be imported at any level, not just the top level
418 | # one.
419 | allow-any-import-level=
420 |
421 | # Allow wildcard imports from modules that define __all__.
422 | allow-wildcard-with-all=no
423 |
424 | # Analyse import fallback blocks. This can be used to support both Python 2 and
425 | # 3 compatible code, which means that the block might have code that exists
426 | # only in one or another interpreter, leading to false positives when analysed.
427 | analyse-fallback-blocks=no
428 |
429 | # Deprecated modules which should not be used, separated by a comma.
430 | deprecated-modules=optparse,tkinter.tix
431 |
432 | # Create a graph of external dependencies in the given file (report RP0402 must
433 | # not be disabled).
434 | ext-import-graph=
435 |
436 | # Create a graph of every (i.e. internal and external) dependencies in the
437 | # given file (report RP0402 must not be disabled).
438 | import-graph=
439 |
440 | # Create a graph of internal dependencies in the given file (report RP0402 must
441 | # not be disabled).
442 | int-import-graph=
443 |
444 | # Force import order to recognize a module as part of the standard
445 | # compatibility libraries.
446 | known-standard-library=
447 |
448 | # Force import order to recognize a module as part of a third party library.
449 | known-third-party=enchant
450 |
451 | # Couples of modules and preferred modules, separated by a comma.
452 | preferred-modules=
453 |
454 |
455 | [CLASSES]
456 |
457 | # List of method names used to declare (i.e. assign) instance attributes.
458 | defining-attr-methods=__init__,
459 | __new__,
460 | setUp,
461 | __post_init__
462 |
463 | # List of member names, which should be excluded from the protected access
464 | # warning.
465 | exclude-protected=_asdict,
466 | _fields,
467 | _replace,
468 | _source,
469 | _make
470 |
471 | # List of valid names for the first argument in a class method.
472 | valid-classmethod-first-arg=cls
473 |
474 | # List of valid names for the first argument in a metaclass class method.
475 | valid-metaclass-classmethod-first-arg=cls
476 |
477 |
478 | [DESIGN]
479 |
480 | # Maximum number of arguments for function / method.
481 | max-args=10
482 |
483 | # Maximum number of attributes for a class (see R0902).
484 | max-attributes=15
485 |
486 | # Maximum number of boolean expressions in an if statement (see R0916).
487 | max-bool-expr=5
488 |
489 | # Maximum number of branch for function / method body.
490 | max-branches=12
491 |
492 | # Maximum number of locals for function / method body.
493 | max-locals=15
494 |
495 | # Maximum number of parents for a class (see R0901).
496 | max-parents=7
497 |
498 | # Maximum number of public methods for a class (see R0904).
499 | max-public-methods=20
500 |
501 | # Maximum number of return / yield for function / method body.
502 | max-returns=10
503 |
504 | # Maximum number of statements in function / method body.
505 | max-statements=50
506 |
507 | # Minimum number of public methods for a class (see R0903).
508 | min-public-methods=1
509 |
510 |
511 | [EXCEPTIONS]
512 |
513 | # Exceptions that will emit a warning when being caught. Defaults to
514 | # "BaseException, Exception".
515 | overgeneral-exceptions=builtins.BaseException,
516 | builtins.Exception
517 |
--------------------------------------------------------------------------------
/tests/test_formatters.py:
--------------------------------------------------------------------------------
1 | ### IMPORTS
2 | ### ============================================================================
3 | ## Future
4 | from __future__ import annotations
5 |
6 | ## Standard Library
7 | from dataclasses import dataclass
8 | import datetime
9 | import enum
10 | import io
11 | import json
12 | import logging
13 | import sys
14 | import traceback
15 | from types import TracebackType
16 | from typing import Any, Generator
17 | import uuid
18 |
19 | if sys.version_info >= (3, 9):
20 | import zoneinfo
21 | else:
22 | from backports import zoneinfo
23 |
24 | ## Installed
25 | import freezegun
26 | import pytest
27 |
28 | ## Application
29 | import pythonjsonlogger
30 | import pythonjsonlogger.defaults
31 | from pythonjsonlogger.core import RESERVED_ATTRS, BaseJsonFormatter, merge_record_extra
32 | from pythonjsonlogger.json import JsonFormatter
33 |
34 | if pythonjsonlogger.ORJSON_AVAILABLE:
35 | from pythonjsonlogger.orjson import OrjsonFormatter
36 |
37 | if pythonjsonlogger.MSGSPEC_AVAILABLE:
38 | from pythonjsonlogger.msgspec import MsgspecFormatter
39 |
40 | ### SETUP
41 | ### ============================================================================
42 | ALL_FORMATTERS: list[type[BaseJsonFormatter]] = [JsonFormatter]
43 | if pythonjsonlogger.ORJSON_AVAILABLE:
44 | ALL_FORMATTERS.append(OrjsonFormatter)
45 | if pythonjsonlogger.MSGSPEC_AVAILABLE:
46 | ALL_FORMATTERS.append(MsgspecFormatter)
47 |
48 | _LOGGER_COUNT = 0
49 |
50 |
51 | @dataclass
52 | class LoggingEnvironment:
53 | logger: logging.Logger
54 | buffer: io.StringIO
55 | handler: logging.Handler
56 |
57 | def set_formatter(self, formatter: BaseJsonFormatter) -> None:
58 | self.handler.setFormatter(formatter)
59 | return
60 |
61 | def load_json(self) -> Any:
62 | return json.loads(self.buffer.getvalue())
63 |
64 |
65 | @pytest.fixture
66 | def env() -> Generator[LoggingEnvironment, None, None]:
67 | global _LOGGER_COUNT # pylint: disable=global-statement
68 | _LOGGER_COUNT += 1
69 | logger = logging.getLogger(f"pythonjsonlogger.tests.{_LOGGER_COUNT}")
70 | logger.setLevel(logging.DEBUG)
71 | buffer = io.StringIO()
72 | handler = logging.StreamHandler(buffer)
73 | logger.addHandler(handler)
74 | yield LoggingEnvironment(logger=logger, buffer=buffer, handler=handler)
75 | logger.removeHandler(handler)
76 | logger.setLevel(logging.NOTSET)
77 | buffer.close()
78 | return
79 |
80 |
81 | def get_traceback_from_exception_followed_by_log_call(env_: LoggingEnvironment) -> str:
82 | try:
83 | raise Exception("test")
84 | except Exception as e:
85 | env_.logger.exception("hello")
86 | str_traceback = traceback.format_exc()
87 | # Formatter removes trailing new line
88 | if str_traceback.endswith("\n"):
89 | str_traceback = str_traceback[:-1]
90 | return str_traceback
91 |
92 |
93 | class SomeClass:
94 | def __init__(self, thing: int):
95 | self.thing = thing
96 | return
97 |
98 |
99 | class BrokenClass:
100 | def __str__(self) -> str:
101 | raise ValueError("hahah sucker")
102 |
103 | def __repr__(self) -> str:
104 | return self.__str__()
105 |
106 |
107 | @dataclass
108 | class SomeDataclass:
109 | things: str
110 | stuff: int
111 | junk: bool
112 |
113 |
114 | try:
115 | raise ValueError
116 | except ValueError as e:
117 | STATIC_TRACEBACK = e.__traceback__
118 | del e
119 |
120 |
121 | class MultiEnum(enum.Enum):
122 | NONE = None
123 | BOOL = False
124 | STR = "somestring"
125 | INT = 99
126 | BYTES = b"some-bytes"
127 |
128 |
129 | NO_TEST = object() # Sentinal
130 |
131 |
132 | ### TESTS
133 | ### ============================================================================
134 | def test_merge_record_extra():
135 | record = logging.LogRecord(
136 | "name", level=1, pathname="", lineno=1, msg="Some message", args=None, exc_info=None
137 | )
138 | output = merge_record_extra(record, target={"foo": "bar"}, reserved=[])
139 | assert output["foo"] == "bar"
140 | assert output["msg"] == "Some message"
141 | return
142 |
143 |
144 | ## Common Formatter Tests
145 | ## -----------------------------------------------------------------------------
146 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
147 | def test_default_format(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
148 | env.set_formatter(class_())
149 |
150 | msg = "testing logging format"
151 | env.logger.info(msg)
152 |
153 | log_json = env.load_json()
154 |
155 | assert log_json["message"] == msg
156 | return
157 |
158 |
159 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
160 | def test_percentage_format(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
161 | # Note: We use different %s styles in the format to check the regex correctly collects them
162 | env.set_formatter(class_("[%(levelname)8s] %(message)s %(filename)s:%(lineno)d %(asctime)"))
163 |
164 | msg = "testing logging format"
165 | env.logger.info(msg)
166 | log_json = env.load_json()
167 |
168 | assert log_json["message"] == msg
169 | assert log_json.keys() == {"levelname", "message", "filename", "lineno", "asctime"}
170 | return
171 |
172 |
173 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
174 | def test_comma_format(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
175 | # Note: we have double comma `,,` to test handling "empty" names
176 | env.set_formatter(class_("levelname,,message,filename,lineno,asctime,", style=","))
177 |
178 | msg = "testing logging format"
179 | env.logger.info(msg)
180 | log_json = env.load_json()
181 |
182 | assert log_json["message"] == msg
183 | assert log_json.keys() == {"levelname", "message", "filename", "lineno", "asctime"}
184 | return
185 |
186 |
187 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
188 | def test_sequence_list_format(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
189 | env.set_formatter(class_(["levelname", "message", "filename", "lineno", "asctime"]))
190 |
191 | msg = "testing logging format"
192 | env.logger.info(msg)
193 | log_json = env.load_json()
194 |
195 | assert log_json["message"] == msg
196 | assert log_json.keys() == {"levelname", "message", "filename", "lineno", "asctime"}
197 | return
198 |
199 |
200 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
201 | def test_sequence_tuple_format(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
202 | env.set_formatter(class_(("levelname", "message", "filename", "lineno", "asctime")))
203 |
204 | msg = "testing logging format"
205 | env.logger.info(msg)
206 | log_json = env.load_json()
207 |
208 | assert log_json["message"] == msg
209 | assert log_json.keys() == {"levelname", "message", "filename", "lineno", "asctime"}
210 | return
211 |
212 |
213 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
214 | def test_defaults_field(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
215 | env.set_formatter(class_(defaults={"first": 1, "second": 2}))
216 |
217 | env.logger.info("testing defaults field", extra={"first": 1234})
218 | log_json = env.load_json()
219 |
220 | assert log_json["first"] == 1234
221 | assert log_json["second"] == 2
222 | return
223 |
224 |
225 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
226 | def test_rename_base_field(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
227 | env.set_formatter(class_(rename_fields={"message": "@message"}))
228 |
229 | msg = "testing logging format"
230 | env.logger.info(msg)
231 | log_json = env.load_json()
232 |
233 | assert log_json["@message"] == msg
234 | return
235 |
236 |
237 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
238 | def test_rename_with_defaults(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
239 | """Make sure that the default fields are also renamed."""
240 | env.set_formatter(class_(rename_fields={"custom": "@custom"}, defaults={"custom": 1234}))
241 |
242 | msg = "testing rename with defaults"
243 | env.logger.info(msg)
244 | log_json = env.load_json()
245 |
246 | assert log_json["@custom"] == 1234
247 | assert "custom" not in log_json
248 | return
249 |
250 |
251 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
252 | def test_rename_missing(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
253 | env.set_formatter(class_(rename_fields={"missing_field": "new_field"}))
254 |
255 | msg = "test rename missing field"
256 | env.logger.info(msg)
257 | log_json = env.load_json()
258 |
259 | assert log_json["message"] == msg
260 | assert "missing_field" not in log_json
261 | assert "new_field" not in log_json
262 | return
263 |
264 |
265 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
266 | def test_rename_keep_missing(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
267 | env.set_formatter(
268 | class_(rename_fields={"missing_field": "new_field"}, rename_fields_keep_missing=True)
269 | )
270 |
271 | msg = "test keep rename missing field"
272 | env.logger.info(msg)
273 | log_json = env.load_json()
274 |
275 | assert log_json["message"] == msg
276 | assert "missing_field" not in log_json
277 | assert log_json["new_field"] is None
278 | return
279 |
280 |
281 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
282 | def test_rename_preserve_order(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
283 | env.set_formatter(
284 | class_("{levelname}{message}{asctime}", style="{", rename_fields={"levelname": "LEVEL"})
285 | )
286 |
287 | env.logger.info("testing logging rename order")
288 | log_json = env.load_json()
289 |
290 | assert list(log_json.keys())[0] == "LEVEL"
291 | return
292 |
293 |
294 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
295 | def test_rename_once(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
296 | env.set_formatter(
297 | class_(
298 | "{levelname}{message}{asctime}",
299 | style="{",
300 | rename_fields={"levelname": "LEVEL", "message": "levelname"},
301 | )
302 | )
303 |
304 | msg = "something"
305 | env.logger.info(msg)
306 | log_json = env.load_json()
307 |
308 | assert log_json["LEVEL"] == "INFO"
309 | assert log_json["levelname"] == msg
310 | return
311 |
312 |
313 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
314 | def test_add_static_fields(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
315 | env.set_formatter(class_(static_fields={"log_stream": "kafka"}))
316 |
317 | msg = "testing static fields"
318 | env.logger.info(msg)
319 | log_json = env.load_json()
320 |
321 | assert log_json["log_stream"] == "kafka"
322 | assert log_json["message"] == msg
323 | return
324 |
325 |
326 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
327 | def test_format_keys(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
328 | supported_keys = [
329 | "asctime",
330 | "created",
331 | "filename",
332 | "funcName",
333 | "levelname",
334 | "levelno",
335 | "lineno",
336 | "module",
337 | "msecs",
338 | "message",
339 | "name",
340 | "pathname",
341 | "process",
342 | "processName",
343 | "relativeCreated",
344 | "thread",
345 | "threadName",
346 | ]
347 |
348 | log_format = lambda x: [f"%({i:s})s" for i in x]
349 | custom_format = " ".join(log_format(supported_keys))
350 |
351 | env.set_formatter(class_(custom_format))
352 |
353 | msg = "testing logging format"
354 | env.logger.info(msg)
355 | log_json = env.load_json()
356 |
357 | for key in supported_keys:
358 | assert key in log_json
359 | return
360 |
361 |
362 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
363 | def test_unknown_format_key(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
364 | env.set_formatter(class_("%(unknown_key)s %(message)s"))
365 | env.logger.info("testing unknown logging format")
366 | # make sure no error occurs
367 | return
368 |
369 |
370 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
371 | def test_log_dict(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
372 | env.set_formatter(class_())
373 |
374 | msg = {"text": "testing logging", "num": 1, 5: "9", "nested": {"more": "data"}}
375 | env.logger.info(msg)
376 | log_json = env.load_json()
377 |
378 | assert log_json["text"] == msg["text"]
379 | assert log_json["num"] == msg["num"]
380 | assert log_json["5"] == msg[5]
381 | assert log_json["nested"] == msg["nested"]
382 | assert log_json["message"] == ""
383 | return
384 |
385 |
386 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
387 | def test_log_dict_defaults(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
388 | env.set_formatter(class_(defaults={"d1": 1234, "d2": "hello"}))
389 |
390 | msg = {"d2": "world"}
391 | env.logger.info(msg)
392 | log_json = env.load_json()
393 |
394 | assert log_json["d1"] == 1234
395 | assert log_json["d2"] == "world"
396 | return
397 |
398 |
399 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
400 | def test_log_extra(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
401 | env.set_formatter(class_())
402 |
403 | extra = {"text": "testing logging", "num": 1, 5: "9", "nested": {"more": "data"}}
404 | env.logger.info("hello", extra=extra) # type: ignore[arg-type]
405 | log_json = env.load_json()
406 |
407 | assert log_json["text"] == extra["text"]
408 | assert log_json["num"] == extra["num"]
409 | assert log_json["5"] == extra[5]
410 | assert log_json["nested"] == extra["nested"]
411 | assert log_json["message"] == "hello"
412 | return
413 |
414 |
415 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
416 | def test_custom_logic_adds_field(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
417 | class CustomJsonFormatter(class_): # type: ignore[valid-type,misc]
418 |
419 | def process_log_record(self, log_data):
420 | log_data["custom"] = "value"
421 | return super().process_log_record(log_data)
422 |
423 | env.set_formatter(CustomJsonFormatter())
424 | env.logger.info("message")
425 | log_json = env.load_json()
426 |
427 | assert log_json["custom"] == "value"
428 | return
429 |
430 |
431 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
432 | def test_exc_info(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
433 | env.set_formatter(class_())
434 |
435 | expected_value = get_traceback_from_exception_followed_by_log_call(env)
436 | log_json = env.load_json()
437 |
438 | assert log_json["exc_info"] == expected_value
439 | return
440 |
441 |
442 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
443 | def test_exc_info_renamed(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
444 | env.set_formatter(class_("%(exc_info)s", rename_fields={"exc_info": "stack_trace"}))
445 |
446 | expected_value = get_traceback_from_exception_followed_by_log_call(env)
447 | log_json = env.load_json()
448 |
449 | assert log_json["stack_trace"] == expected_value
450 | assert "exc_info" not in log_json
451 | return
452 |
453 |
454 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
455 | def test_exc_info_renamed_not_required(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
456 | env.set_formatter(class_(rename_fields={"exc_info": "stack_trace"}))
457 |
458 | expected_value = get_traceback_from_exception_followed_by_log_call(env)
459 | log_json = env.load_json()
460 |
461 | assert log_json["stack_trace"] == expected_value
462 | assert "exc_info" not in log_json
463 | return
464 |
465 |
466 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
467 | def test_exc_info_renamed_no_error(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
468 | env.set_formatter(class_(rename_fields={"exc_info": "stack_trace"}))
469 |
470 | env.logger.info("message")
471 | log_json = env.load_json()
472 |
473 | assert "stack_trace" not in log_json
474 | assert "exc_info" not in log_json
475 | return
476 |
477 |
478 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
479 | def test_custom_object_serialization(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
480 | def encode_complex(z):
481 | if isinstance(z, complex):
482 | return (z.real, z.imag)
483 | raise TypeError(f"Object of type {type(z)} is no JSON serializable")
484 |
485 | env.set_formatter(class_(json_default=encode_complex)) # type: ignore[call-arg]
486 |
487 | env.logger.info("foo", extra={"special": complex(3, 8)})
488 | log_json = env.load_json()
489 |
490 | assert log_json["special"] == [3.0, 8.0]
491 | return
492 |
493 |
494 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
495 | def test_rename_reserved_attrs(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
496 | log_format = lambda x: [f"%({i:s})s" for i in x]
497 | reserved_attrs_map = {
498 | "exc_info": "error.type",
499 | "exc_text": "error.message",
500 | "funcName": "log.origin.function",
501 | "levelname": "log.level",
502 | "module": "log.origin.file.name",
503 | "processName": "process.name",
504 | "threadName": "process.thread.name",
505 | "msg": "log.message",
506 | }
507 |
508 | custom_format = " ".join(log_format(reserved_attrs_map.keys()))
509 | reserved_attrs = [
510 | attr for attr in RESERVED_ATTRS if attr not in list(reserved_attrs_map.keys())
511 | ]
512 | env.set_formatter(
513 | class_(custom_format, reserved_attrs=reserved_attrs, rename_fields=reserved_attrs_map)
514 | )
515 |
516 | env.logger.info("message")
517 | log_json = env.load_json()
518 |
519 | for old_name, new_name in reserved_attrs_map.items():
520 | assert new_name in log_json
521 | assert old_name not in log_json
522 | return
523 |
524 |
525 | @freezegun.freeze_time(datetime.datetime(2017, 7, 14, 2, 40))
526 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
527 | def test_default_encoder_with_timestamp(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
528 | if (pythonjsonlogger.ORJSON_AVAILABLE and class_ is OrjsonFormatter) or (
529 | pythonjsonlogger.MSGSPEC_AVAILABLE and class_ is MsgspecFormatter
530 | ):
531 | # FakeDatetime not supported
532 | # https://github.com/ijl/orjson/issues/481
533 | # https://github.com/jcrist/msgspec/issues/678
534 | def json_default(obj: Any) -> Any:
535 | if isinstance(obj, freezegun.api.FakeDate):
536 | return obj.isoformat()
537 | raise ValueError(f"Unexpected object: {obj!r}")
538 |
539 | env.set_formatter(class_(timestamp=True, json_default=json_default)) # type: ignore[call-arg]
540 | else:
541 | env.set_formatter(class_(timestamp=True))
542 |
543 | env.logger.info("Hello")
544 | log_json = env.load_json()
545 |
546 | assert log_json["timestamp"] == "2017-07-14T02:40:00+00:00"
547 | return
548 |
549 |
550 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
551 | @pytest.mark.parametrize(
552 | ["obj", "type_", "expected"],
553 | [
554 | ("somestring", str, "somestring"),
555 | ("some unicode Привет", str, "some unicode Привет"),
556 | (1234, int, 1234),
557 | (1234.5, float, 1234.5),
558 | (False, bool, False),
559 | (None, type(None), None),
560 | (b"some-bytes", str, "c29tZS1ieXRlcw=="),
561 | (datetime.time(16, 45, 30, 100), str, "16:45:30.000100"),
562 | (datetime.date(2024, 5, 5), str, "2024-05-05"),
563 | (datetime.datetime(2024, 5, 5, 16, 45, 30, 100), str, "2024-05-05T16:45:30.000100"),
564 | (
565 | datetime.datetime(2024, 5, 5, 16, 45, 30, 100, zoneinfo.ZoneInfo("Australia/Sydney")),
566 | str,
567 | "2024-05-05T16:45:30.000100+10:00",
568 | ),
569 | (
570 | uuid.UUID("urn:uuid:12345678-1234-5678-1234-567812345678"),
571 | str,
572 | "12345678-1234-5678-1234-567812345678",
573 | ),
574 | (Exception, str, "Exception"),
575 | (Exception("Foo occurred"), str, "Exception: Foo occurred"),
576 | (BaseException, str, "BaseException"),
577 | (BaseException("BaseFoo occurred"), str, "BaseException: BaseFoo occurred"),
578 | (STATIC_TRACEBACK, str, pythonjsonlogger.defaults.traceback_default(STATIC_TRACEBACK)), # type: ignore[arg-type]
579 | (
580 | SomeDataclass(things="le_things", stuff=99, junk=False),
581 | dict,
582 | {"things": "le_things", "stuff": 99, "junk": False},
583 | ),
584 | (SomeDataclass, str, "SomeDataclass"),
585 | (SomeClass, str, "SomeClass"),
586 | (SomeClass(1234), str, NO_TEST),
587 | (BrokenClass(), str, "__could_not_encode__"),
588 | (MultiEnum.NONE, type(None), None),
589 | (MultiEnum.BOOL, bool, MultiEnum.BOOL.value),
590 | (MultiEnum.STR, str, MultiEnum.STR.value),
591 | (MultiEnum.INT, int, MultiEnum.INT.value),
592 | (MultiEnum.BYTES, str, "c29tZS1ieXRlcw=="),
593 | (MultiEnum, list, [None, False, "somestring", 99, "c29tZS1ieXRlcw=="]),
594 | ],
595 | )
596 | def test_common_types_encoded(
597 | env: LoggingEnvironment,
598 | class_: type[BaseJsonFormatter],
599 | obj: object,
600 | type_: type,
601 | expected: Any,
602 | ):
603 | ## Known bad cases
604 | if pythonjsonlogger.MSGSPEC_AVAILABLE and class_ is MsgspecFormatter:
605 | # Dataclass: https://github.com/jcrist/msgspec/issues/681
606 | # Enum: https://github.com/jcrist/msgspec/issues/680
607 | # These have been fixed in msgspec 0.19.0, however they also dropped python 3.8 support.
608 | # https://github.com/jcrist/msgspec/releases/tag/0.19.0
609 | if sys.version_info < (3, 9) and (
610 | obj is SomeDataclass
611 | or (
612 | isinstance(obj, enum.Enum)
613 | and obj in {MultiEnum.BYTES, MultiEnum.NONE, MultiEnum.BOOL}
614 | )
615 | ):
616 | pytest.xfail()
617 |
618 | ## Test
619 | env.set_formatter(class_())
620 | extra = {
621 | "extra": obj,
622 | "extra_dict": {"item": obj},
623 | "extra_list": [obj],
624 | }
625 | env.logger.info("hello", extra=extra)
626 | log_json = env.load_json()
627 |
628 | assert isinstance(log_json["extra"], type_)
629 | assert isinstance(log_json["extra_dict"]["item"], type_)
630 | assert isinstance(log_json["extra_list"][0], type_)
631 |
632 | if expected is NO_TEST:
633 | return
634 |
635 | if expected is None or isinstance(expected, bool):
636 | assert log_json["extra"] is expected
637 | assert log_json["extra_dict"]["item"] is expected
638 | assert log_json["extra_list"][0] is expected
639 | else:
640 | assert log_json["extra"] == expected
641 | assert log_json["extra_dict"]["item"] == expected
642 | assert log_json["extra_list"][0] == expected
643 | return
644 |
645 |
646 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
647 | def test_custom_default(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
648 | def custom_default(obj):
649 | if isinstance(obj, SomeClass):
650 | return {"TYPE": obj.thing}
651 | return None
652 |
653 | env.set_formatter(class_(json_default=custom_default)) # type: ignore[call-arg]
654 | env.logger.info("hello", extra={"extra": SomeClass(999)})
655 | log_json = env.load_json()
656 |
657 | assert log_json["extra"] == {"TYPE": 999}
658 | return
659 |
660 |
661 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
662 | def test_exc_info_as_array(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
663 | env.set_formatter(class_(exc_info_as_array=True))
664 |
665 | try:
666 | raise Exception("Error")
667 | except BaseException:
668 | env.logger.exception("Error occurs")
669 | log_json = env.load_json()
670 |
671 | assert isinstance(log_json["exc_info"], list)
672 | return
673 |
674 |
675 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
676 | def test_exc_info_as_array_no_exc_info(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
677 | env.set_formatter(class_(exc_info_as_array=True))
678 |
679 | env.logger.info("hello")
680 | log_json = env.load_json()
681 |
682 | assert "exc_info" not in log_json
683 | return
684 |
685 |
686 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
687 | def test_stack_info_as_array(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
688 | env.set_formatter(class_(stack_info_as_array=True))
689 |
690 | env.logger.info("hello", stack_info=True)
691 | log_json = env.load_json()
692 |
693 | assert isinstance(log_json["stack_info"], list)
694 | return
695 |
696 |
697 | @pytest.mark.parametrize("class_", ALL_FORMATTERS)
698 | def test_stack_info_as_array_no_stack_info(
699 | env: LoggingEnvironment, class_: type[BaseJsonFormatter]
700 | ):
701 | env.set_formatter(class_(stack_info_as_array=True))
702 |
703 | env.logger.info("hello", stack_info=False)
704 | log_json = env.load_json()
705 |
706 | assert "stack_info" not in log_json
707 | return
708 |
709 |
710 | ## JsonFormatter Specific
711 | ## -----------------------------------------------------------------------------
712 | def test_json_ensure_ascii_true(env: LoggingEnvironment):
713 | env.set_formatter(JsonFormatter())
714 | env.logger.info("Привет")
715 |
716 | # Note: we don't use env.load_json as we want to know the raw output
717 | msg = env.buffer.getvalue().split('"message": "', 1)[1].split('"', 1)[0]
718 | assert msg == r"\u041f\u0440\u0438\u0432\u0435\u0442"
719 | return
720 |
721 |
722 | def test_json_ensure_ascii_false(env: LoggingEnvironment):
723 | env.set_formatter(JsonFormatter(json_ensure_ascii=False))
724 | env.logger.info("Привет")
725 |
726 | # Note: we don't use env.load_json as we want to know the raw output
727 | msg = env.buffer.getvalue().split('"message": "', 1)[1].split('"', 1)[0]
728 | assert msg == "Привет"
729 | return
730 |
--------------------------------------------------------------------------------