├── .ci ├── scripts │ ├── lint.sh │ └── test.sh └── updatecli │ └── values.d │ ├── ecs-logging-specs.yml │ ├── scm.yml │ └── update-compose.yml ├── .flake8 ├── .github ├── community-label.yml ├── dependabot.yml └── workflows │ ├── addToProject.yml │ ├── docs-build.yml │ ├── docs-cleanup.yml │ ├── labeler.yml │ ├── periodic.yml │ ├── release.yml │ ├── test-docs.yml │ ├── test.yml │ └── update-specs.yml ├── .gitignore ├── .pre-commit-config.yaml ├── CHANGELOG.md ├── LICENSE.txt ├── NOTICE.txt ├── README.md ├── RELEASING.md ├── docs ├── docset.yml └── reference │ ├── index.md │ ├── installation.md │ └── toc.yml ├── ecs_logging ├── __init__.py ├── _meta.py ├── _stdlib.py ├── _structlog.py ├── _utils.py └── py.typed ├── mypy.ini ├── noxfile.py ├── pyproject.toml ├── pytest.ini ├── tests ├── __init__.py ├── conftest.py ├── resources │ └── spec.json ├── test_apm.py ├── test_meta.py ├── test_stdlib_formatter.py ├── test_structlog_formatter.py └── test_utils.py ├── updatecli-compose.yaml └── utils ├── check-license-headers.sh └── license-header.txt /.ci/scripts/lint.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -e 3 | 4 | ## When running in a docker container in the CI then it's required to set the location 5 | ## for the tools to be installed. 6 | export PATH=${HOME}/.local/bin:${PATH} 7 | 8 | python -m pip install -U nox 9 | nox -s lint 10 | -------------------------------------------------------------------------------- /.ci/scripts/test.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -e 3 | 4 | VERSION=${1:?Please specify the python version} 5 | 6 | ## When running in a docker container in the CI then it's required to set the location 7 | ## for the tools to be installed. 8 | export PATH=${HOME}/.local/bin:${PATH} 9 | 10 | python -m pip install -U nox 11 | nox -s test-"${VERSION}" -------------------------------------------------------------------------------- /.ci/updatecli/values.d/ecs-logging-specs.yml: -------------------------------------------------------------------------------- 1 | spec_path: tests/resources/spec.json -------------------------------------------------------------------------------- /.ci/updatecli/values.d/scm.yml: -------------------------------------------------------------------------------- 1 | scm: 2 | enabled: true 3 | owner: elastic 4 | repository: ecs-logging-python 5 | branch: main 6 | commitusingapi: true 7 | # begin update-compose policy values 8 | user: obltmachine 9 | email: obltmachine@users.noreply.github.com 10 | # end update-compose policy values -------------------------------------------------------------------------------- /.ci/updatecli/values.d/update-compose.yml: -------------------------------------------------------------------------------- 1 | spec: 2 | files: 3 | - "updatecli-compose.yaml" -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | exclude= 3 | tests/**, 4 | conftest.py, 5 | setup.py 6 | max-line-length=120 7 | ignore=E731,W503,E203,BLK100,B301 -------------------------------------------------------------------------------- /.github/community-label.yml: -------------------------------------------------------------------------------- 1 | 2 | # add 'community' label to all new issues and PRs created by the community 3 | community: 4 | - '.*' 5 | triage: 6 | - '.*' 7 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | --- 2 | version: 2 3 | updates: 4 | 5 | # Maintain dependencies for GitHub Actions (/.github/workflows) 6 | - package-ecosystem: "github-actions" 7 | directory: "/" 8 | schedule: 9 | interval: "weekly" 10 | day: "sunday" 11 | time: "22:00" 12 | reviewers: 13 | - "elastic/observablt-ci" 14 | labels: 15 | - dependencies 16 | groups: 17 | github-actions: 18 | patterns: 19 | - "*" 20 | -------------------------------------------------------------------------------- /.github/workflows/addToProject.yml: -------------------------------------------------------------------------------- 1 | 2 | name: Auto Assign to Project(s) 3 | 4 | on: 5 | issues: 6 | types: [opened, edited, milestoned] 7 | 8 | permissions: 9 | contents: read 10 | 11 | jobs: 12 | assign_one_project: 13 | runs-on: ubuntu-latest 14 | name: Assign milestoned to Project 15 | steps: 16 | - name: Get token 17 | id: get_token 18 | uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a # v2.1.0 19 | with: 20 | app_id: ${{ secrets.OBS_AUTOMATION_APP_ID }} 21 | private_key: ${{ secrets.OBS_AUTOMATION_APP_PEM }} 22 | permissions: >- 23 | { 24 | "organization_projects": "write", 25 | "issues": "read" 26 | } 27 | - name: Assign issues with milestones to project 28 | uses: elastic/assign-one-project-github-action@1.2.2 29 | if: github.event.issue && github.event.issue.milestone 30 | with: 31 | project: 'https://github.com/orgs/elastic/projects/454' 32 | project_id: '5882982' 33 | column_name: 'Planned' 34 | env: 35 | MY_GITHUB_TOKEN: ${{ steps.get_token.outputs.token }} 36 | -------------------------------------------------------------------------------- /.github/workflows/docs-build.yml: -------------------------------------------------------------------------------- 1 | name: docs-build 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request_target: ~ 8 | merge_group: ~ 9 | 10 | jobs: 11 | docs-preview: 12 | uses: elastic/docs-builder/.github/workflows/preview-build.yml@main 13 | with: 14 | path-pattern: docs/** 15 | permissions: 16 | deployments: write 17 | id-token: write 18 | contents: read 19 | pull-requests: read 20 | -------------------------------------------------------------------------------- /.github/workflows/docs-cleanup.yml: -------------------------------------------------------------------------------- 1 | name: docs-cleanup 2 | 3 | on: 4 | pull_request_target: 5 | types: 6 | - closed 7 | 8 | jobs: 9 | docs-preview: 10 | uses: elastic/docs-builder/.github/workflows/preview-cleanup.yml@main 11 | permissions: 12 | contents: none 13 | id-token: write 14 | deployments: write 15 | -------------------------------------------------------------------------------- /.github/workflows/labeler.yml: -------------------------------------------------------------------------------- 1 | name: "Issue Labeler" 2 | on: 3 | issues: 4 | types: [opened] 5 | pull_request_target: 6 | types: [opened] 7 | 8 | # '*: write' permissions for https://docs.github.com/en/rest/issues/labels?apiVersion=2022-11-28#add-labels-to-an-issue 9 | permissions: 10 | contents: read 11 | issues: write 12 | pull-requests: write 13 | 14 | jobs: 15 | triage: 16 | runs-on: ubuntu-latest 17 | steps: 18 | - name: Add agent-python label 19 | uses: actions-ecosystem/action-add-labels@v1 20 | with: 21 | labels: agent-python 22 | 23 | - name: Get token 24 | id: get_token 25 | uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a # v2.1.0 26 | with: 27 | app_id: ${{ secrets.OBS_AUTOMATION_APP_ID }} 28 | private_key: ${{ secrets.OBS_AUTOMATION_APP_PEM }} 29 | permissions: >- 30 | { 31 | "members": "read" 32 | } 33 | 34 | - id: is_elastic_member 35 | uses: elastic/oblt-actions/github/is-member-of@v1 36 | with: 37 | github-org: "elastic" 38 | github-user: ${{ github.actor }} 39 | github-token: ${{ steps.get_token.outputs.token }} 40 | - name: Add community and triage labels 41 | if: contains(steps.is_elastic_member.outputs.result, 'false') && github.actor != 'dependabot[bot]' && github.actor != 'apmmachine' 42 | uses: actions-ecosystem/action-add-labels@v1 43 | with: 44 | labels: | 45 | community 46 | triage 47 | -------------------------------------------------------------------------------- /.github/workflows/periodic.yml: -------------------------------------------------------------------------------- 1 | name: periodic 2 | 3 | on: 4 | # Run daily at midnight 5 | schedule: 6 | - cron: "0 0 * * *" 7 | 8 | permissions: 9 | contents: read 10 | 11 | jobs: 12 | 13 | test: 14 | runs-on: ubuntu-latest 15 | timeout-minutes: 10 16 | strategy: 17 | matrix: 18 | python: [ '3.8', '3.9', '3.10', '3.11', '3.12', '3.13' ] 19 | fail-fast: false 20 | steps: 21 | - uses: actions/checkout@v4 22 | - uses: actions/setup-python@v5 23 | with: 24 | python-version: ${{ matrix.python }} 25 | - run: .ci/scripts/test.sh ${{ matrix.python }} 26 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | tags: 6 | - "[0-9]+.[0-9]+.[0-9]+" 7 | branches: 8 | - main 9 | 10 | permissions: 11 | contents: read 12 | 13 | jobs: 14 | packages: 15 | permissions: 16 | attestations: write 17 | id-token: write 18 | contents: read 19 | runs-on: ubuntu-latest 20 | steps: 21 | - uses: actions/checkout@v4 22 | 23 | - run: pip install build==1.2.1 24 | 25 | - run: python -m build 26 | 27 | - name: generate build provenance 28 | uses: actions/attest-build-provenance@db473fddc028af60658334401dc6fa3ffd8669fd # v2.3.0 29 | with: 30 | subject-path: "${{ github.workspace }}/dist/*" 31 | 32 | - name: Upload Packages 33 | uses: actions/upload-artifact@v4 34 | with: 35 | name: packages 36 | path: | 37 | dist/*.whl 38 | dist/*tar.gz 39 | 40 | publish-pypi: 41 | needs: 42 | - packages 43 | runs-on: ubuntu-latest 44 | environment: release 45 | permissions: 46 | id-token: write # IMPORTANT: this permission is mandatory for trusted publishing 47 | steps: 48 | - uses: actions/download-artifact@v4 49 | with: 50 | name: packages 51 | path: dist 52 | 53 | - name: Upload pypi.org 54 | if: startsWith(github.ref, 'refs/tags') 55 | uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc # v1.12.4 56 | with: 57 | repository-url: https://upload.pypi.org/legacy/ 58 | -------------------------------------------------------------------------------- /.github/workflows/test-docs.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # This workflow sets the test-docs status check to success in case it's a docs only PR and test.yml is not triggered 3 | # https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests/troubleshooting-required-status-checks#handling-skipped-but-required-checks 4 | name: test # The name must be the same as in ci.yml 5 | 6 | on: 7 | pull_request: 8 | paths-ignore: # This expression needs to match the paths ignored on test.yml. 9 | - '**' 10 | - '!**/*.md' 11 | - '!**/*.asciidoc' 12 | 13 | permissions: 14 | contents: read 15 | 16 | jobs: 17 | lint: 18 | runs-on: ubuntu-latest 19 | steps: 20 | - run: 'echo "No build required"' 21 | test: 22 | runs-on: ubuntu-latest 23 | timeout-minutes: 5 24 | strategy: 25 | matrix: 26 | python: [ '3.8', '3.9', '3.10', '3.11', '3.12', '3.13' ] 27 | fail-fast: false 28 | steps: 29 | - run: 'echo "No build required"' 30 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: test 2 | 3 | on: 4 | push: 5 | branches: [ "main" ] 6 | paths-ignore: [ '*.md', '*.asciidoc' ] 7 | pull_request: 8 | branches: [ "main" ] 9 | paths-ignore: [ '*.md', '*.asciidoc' ] 10 | 11 | permissions: 12 | contents: read 13 | 14 | 15 | ## Concurrency is only allowed in the main branch. 16 | ## So old builds, running for old commits within the same Pull Request, are cancelled 17 | concurrency: 18 | group: ${{ github.workflow }}-${{ github.ref }} 19 | cancel-in-progress: ${{ github.ref != 'refs/heads/main' }} 20 | 21 | jobs: 22 | 23 | pre-commit: 24 | name: Run pre-commit 25 | runs-on: ubuntu-latest 26 | steps: 27 | - uses: elastic/oblt-actions/pre-commit@v1 28 | 29 | lint: 30 | runs-on: ubuntu-latest 31 | timeout-minutes: 5 32 | steps: 33 | - uses: actions/checkout@v4 34 | - uses: actions/setup-python@v5 35 | with: 36 | python-version: '3.10' 37 | - run: .ci/scripts/lint.sh 38 | 39 | test: 40 | runs-on: ubuntu-latest 41 | timeout-minutes: 10 42 | strategy: 43 | matrix: 44 | python: [ '3.8', '3.9', '3.10', '3.11', '3.12', '3.13' ] 45 | fail-fast: false 46 | steps: 47 | - uses: actions/checkout@v4 48 | - uses: actions/setup-python@v5 49 | with: 50 | python-version: ${{ matrix.python }} 51 | - run: .ci/scripts/test.sh ${{ matrix.python }} 52 | -------------------------------------------------------------------------------- /.github/workflows/update-specs.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Send PRs to the subscribed ECS Agents if the spec files (JSON) are modified 3 | name: update-specs 4 | 5 | on: 6 | workflow_dispatch: 7 | schedule: 8 | - cron: '0 6 * * *' 9 | 10 | permissions: 11 | contents: read 12 | 13 | jobs: 14 | compose: 15 | runs-on: ubuntu-latest 16 | permissions: 17 | contents: read 18 | packages: read 19 | steps: 20 | - uses: actions/checkout@v4 21 | 22 | - name: Get token 23 | id: get_token 24 | uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a # v2.1.0 25 | with: 26 | app_id: ${{ secrets.OBS_AUTOMATION_APP_ID }} 27 | private_key: ${{ secrets.OBS_AUTOMATION_APP_PEM }} 28 | permissions: >- 29 | { 30 | "contents": "write", 31 | "pull_requests": "write" 32 | } 33 | 34 | - uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 35 | with: 36 | registry: ghcr.io 37 | username: ${{ github.actor }} 38 | password: ${{ secrets.GITHUB_TOKEN }} 39 | 40 | - uses: elastic/oblt-actions/updatecli/run@v1 41 | with: 42 | command: --experimental compose diff 43 | env: 44 | GITHUB_TOKEN: ${{ steps.get_token.outputs.token }} 45 | 46 | - uses: elastic/oblt-actions/updatecli/run@v1 47 | with: 48 | command: --experimental compose apply 49 | env: 50 | GITHUB_TOKEN: ${{ steps.get_token.outputs.token }} 51 | 52 | - if: failure() 53 | uses: elastic/oblt-actions/slack/send@v1 54 | with: 55 | bot-token: ${{ secrets.SLACK_BOT_TOKEN }} 56 | channel-id: "#apm-agent-python" 57 | message: ":traffic_cone: updatecli failed for `${{ github.repository }}@${{ github.ref_name }}`, @robots-ci please look what's going on " 58 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | 131 | # JUnit file 132 | junit-test.xml 133 | 134 | # VSCode 135 | .vscode/ 136 | 137 | # Doc build 138 | html_docs 139 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v4.0.1 4 | hooks: 5 | - id: check-case-conflict 6 | - id: check-executables-have-shebangs 7 | - id: check-merge-conflict 8 | - repo: https://github.com/elastic/apm-pipeline-library 9 | rev: current 10 | hooks: 11 | - id: check-bash-syntax 12 | - repo: https://github.com/pre-commit/mirrors-mypy 13 | rev: v0.910 14 | hooks: 15 | - id: mypy 16 | args: 17 | [ 18 | --strict, 19 | --show-error-codes, 20 | --no-warn-unused-ignores, 21 | --implicit-reexport, 22 | ] 23 | - repo: https://github.com/psf/black 24 | rev: 22.12.0 25 | hooks: 26 | - id: black 27 | language_version: python3 28 | - repo: https://github.com/pycqa/flake8 29 | rev: 3.9.2 30 | hooks: 31 | - id: flake8 32 | exclude: tests|conftest.py|setup.py 33 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## 2.2.0 (2024-06-28) 4 | 5 | - Rewrite type annotations ([#119](https://github.com/elastic/ecs-logging-python/pull/119)) 6 | - Don't de-dot `ecs.version` ([#118](https://github.com/elastic/ecs-logging-python/pull/118)) 7 | - Make it possible override the JSON serializer in `StructlogFormatter` ([#114](https://github.com/elastic/ecs-logging-python/pull/114)) 8 | - Use `fromtimestamp` instead of deprecated `utcfromtimestamp` ([#105](https://github.com/elastic/ecs-logging-python/pull/105)) 9 | - Remove unused imports and fix an undefined name ([#101](https://github.com/elastic/ecs-logging-python/pull/101)) 10 | 11 | ## 2.1.0 (2023-08-16) 12 | 13 | - Add support for `service.environment` from APM log correlation ([#96](https://github.com/elastic/ecs-logging-python/pull/96)) 14 | - Fix stack trace handling in StructLog for ECS compliance ([#97](https://github.com/elastic/ecs-logging-python/pull/97)) 15 | 16 | ## 2.0.2 (2023-05-17) 17 | 18 | - Allow flit-core 3+ ([#94](https://github.com/elastic/ecs-logging-python/pull/94)) 19 | - Remove python2 leftovers ([#94](https://github.com/elastic/ecs-logging-python/pull/94)) 20 | 21 | ## 2.0.0 (2022-05-18) 22 | 23 | - Remove python 2 support ([#78](https://github.com/elastic/ecs-logging-python/pull/78)) 24 | - Add global `extra` context fields to `StdLibFormatter` ([#65](https://github.com/elastic/ecs-logging-python/pull/65)) 25 | 26 | ## 1.1.0 (2021-10-18) 27 | 28 | - Remove python 3.5 support ([#69](https://github.com/elastic/ecs-logging-python/pull/69)) 29 | - Fix an issue where APM fields would override user-provided fields even when 30 | APM wasn't installed ([#67](https://github.com/elastic/ecs-logging-python/pull/67)) 31 | - Removed `event.dataset` field handling to match 32 | [`elastic-apm` v6.6.0](https://github.com/elastic/apm-agent-python/releases/tag/v6.6.0) 33 | ([#69](https://github.com/elastic/ecs-logging-python/pull/69)) 34 | 35 | ## 1.0.2 (2021-09-22) 36 | 37 | - Fix an signature mismatch between `StdLibFormatter` and `logging.Formatter`, 38 | which could cause issues in Django and Gunicorn 39 | ([#54](https://github.com/elastic/ecs-logging-python/pull/54)) 40 | 41 | ## 1.0.1 (2021-07-06) 42 | 43 | - Fixed an issue in `StructlogFormatter` caused by a conflict with `event` 44 | (used for the log `message`) and `event.dataset` (a field provided by the 45 | `elasticapm` integration) ([#46](https://github.com/elastic/ecs-logging-python/pull/46)) 46 | - Add default/fallback handling for json.dumps ([#47](https://github.com/elastic/ecs-logging-python/pull/47)) 47 | - Fixed an issue in `StdLibFormatter` when `exc_info=False` ([#42](https://github.com/elastic/ecs-logging-python/pull/42)) 48 | 49 | ## 1.0.0 (2021-02-08) 50 | 51 | - Remove "beta" designation 52 | 53 | ## 0.6.0 (2021-01-21) 54 | 55 | - Add validation against the ecs-logging [spec](https://github.com/elastic/ecs-logging/blob/main/spec/spec.json) ([#31](https://github.com/elastic/ecs-logging-python/pull/31)) 56 | - Add support for `service.name` from APM log correlation ([#32](https://github.com/elastic/ecs-logging-python/pull/32)) 57 | - Correctly order `@timestamp`, `log.level`, and `message` fields ([#28](https://github.com/elastic/ecs-logging-python/pull/28)) 58 | 59 | ## 0.5.0 (2020-08-27) 60 | 61 | - Updated supported ECS version to 1.6.0 ([#24](https://github.com/elastic/ecs-logging-python/pull/24)) 62 | - Added support for `LogRecord.stack_info` ([#23](https://github.com/elastic/ecs-logging-python/pull/23)) 63 | - Fixed normalizing of items in `list` that aren't of type 64 | `dict` ([#22](https://github.com/elastic/ecs-logging-python/pull/22), contributed by [`@camerondavison`](https://github.com/camerondavison)) 65 | 66 | ## 0.4 (2020-08-04) 67 | 68 | - Added automatic collection of ECS fields `trace.id`, `span.id`, and `transaction.id` for 69 | [Log Correlation](https://www.elastic.co/guide/en/apm/agent/python/master/log-correlation.html) with 70 | the Python Elastic APM agent ([#17](https://github.com/elastic/ecs-logging-python/pull/17)) 71 | 72 | ## 0.3 (2020-07-27) 73 | 74 | - Added collecting `LogRecord.exc_info` into `error.*` fields 75 | automatically for `StdlibFormatter` ([#16](https://github.com/elastic/ecs-logging-python/pull/16)) 76 | - Added collecting process and thread info from `LogRecord` into `process.*` fields 77 | automatically for `StdlibFormatter` ([#16](https://github.com/elastic/ecs-logging-python/pull/16)) 78 | - Added `exclude_fields` parameter to `StdlibFormatter` to 79 | exclude fields from being formatted to JSON ([#16](https://github.com/elastic/ecs-logging-python/pull/16)) 80 | - Added `stack_trace_limit` parameter to `StdlibFormatter` 81 | to control the number of stack trace frames being 82 | formatted in `error.stack_trace` ([#16](https://github.com/elastic/ecs-logging-python/pull/16)) 83 | 84 | Thanks to community contributor Jon Moore ([@comcast-jonm](https://github.com/comcast-jonm)) 85 | for their contributions to this release. 86 | 87 | ## 0.2 (2020-04-28) 88 | 89 | - Added support for using `log(..., extra={...})` on standard library 90 | loggers to use extended and custom fields ([#8](https://github.com/elastic/ecs-logging-python/pull/8)) 91 | 92 | ## 0.1 (2020-03-26) 93 | 94 | - Added `StdlibFormatter` for use with the standard library `logging` module 95 | - Added `StructlogFormatter` for use with the `structlog` package 96 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /NOTICE.txt: -------------------------------------------------------------------------------- 1 | ecs-logging-python 2 | Copyright 2020-2021 Elasticsearch B.V. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ecs-logging-python 2 | 3 | [![Build Status](https://github.com/elastic/ecs-logging-python/actions/workflows/test.yml/badge.svg)](https://github.com/elastic/ecs-logging-pythonactions/workflows/test.yml) 4 | [![PyPI](https://img.shields.io/pypi/v/ecs-logging)](https://pypi.org/project/ecs-logging) 5 | [![Versions Supported](https://img.shields.io/pypi/pyversions/ecs-logging)](https://pypi.org/project/ecs-logging) 6 | 7 | Check out the [Elastic Common Schema (ECS) reference](https://www.elastic.co/guide/en/ecs/current/index.html) 8 | for more information. 9 | 10 | The library currently implements ECS 1.6. 11 | 12 | ## Installation 13 | 14 | ```console 15 | $ python -m pip install ecs-logging 16 | ``` 17 | 18 | ## Documentation 19 | 20 | See the [ECS Logging Python reference](https://www.elastic.co/guide/en/ecs-logging/python/current/index.html) on elastic.co to get started. 21 | 22 | ## Elastic APM Log Correlation 23 | 24 | `ecs-logging-python` supports automatically collecting [ECS tracing fields](https://www.elastic.co/guide/en/ecs/current/ecs-tracing.html) 25 | from the [Elastic APM Python agent](https://github.com/elastic/apm-agent-python) in order to 26 | [correlate logs to spans, transactions and traces](https://www.elastic.co/guide/en/apm/agent/python/current/log-correlation.html) in Elastic APM. 27 | 28 | ## License 29 | 30 | Apache-2.0 31 | -------------------------------------------------------------------------------- /RELEASING.md: -------------------------------------------------------------------------------- 1 | ### Releasing 2 | 3 | Releases tags are signed so you need to have a PGP key set up, you can follow Github documentation on [creating a key](https://docs.github.com/en/authentication/managing-commit-signature-verification/generating-a-new-gpg-key) and 4 | on [telling git about it](https://docs.github.com/en/authentication/managing-commit-signature-verification/telling-git-about-your-signing-key). Alternatively you can sign with a SSH key, remember you have to upload your key 5 | again even if you want to use the same key you are using for authorization. 6 | Then make sure you have SSO figured out for the key you are using to push to github, see [Github documentation](https://docs.github.com/articles/authenticating-to-a-github-organization-with-saml-single-sign-on/). 7 | 8 | If you have commit access, the process is as follows: 9 | 10 | 1. Update the version in `ecs_logging/__init__.py` according to the scale of the change. (major, minor or patch) 11 | 1. Update `CHANGELOG.md`. 12 | 1. Commit changes with message `update CHANGELOG and rev to vX.Y.Z` 13 | where `X.Y.Z` is the version in `ecs_logging/__init__.py` 14 | 1. Open a PR against `main` with these changes leaving the body empty 15 | 1. Once the PR is merged, fetch and checkout `upstream/main` 16 | 1. Tag the commit with `git tag -s X.Y.Z`, for example `git tag -s 2.2.0`. 17 | Copy the changelog for the release to the tag message, removing any leading `#`. 18 | 1. Push tag upstream with `git push upstream --tags` (and optionally to your own fork as well) 19 | 1. After tests pass, Github Actions will automatically build and push the new release to PyPI. 20 | 1. Edit and publish the [draft Github release](https://github.com/elastic/ecs-logging-python/releases). 21 | Substitute the generated changelog with one hand written into the body of the release. 22 | -------------------------------------------------------------------------------- /docs/docset.yml: -------------------------------------------------------------------------------- 1 | project: 'ECS Logging Python' 2 | products: 3 | - id: ecs-logging 4 | cross_links: 5 | - apm-agent-python 6 | - beats 7 | - docs-content 8 | - ecs 9 | - ecs-logging 10 | toc: 11 | - toc: reference 12 | subs: 13 | filebeat: "Filebeat" 14 | -------------------------------------------------------------------------------- /docs/reference/index.md: -------------------------------------------------------------------------------- 1 | --- 2 | mapped_pages: 3 | - https://www.elastic.co/guide/en/ecs-logging/python/current/intro.html 4 | - https://www.elastic.co/guide/en/ecs-logging/python/current/index.html 5 | --- 6 | 7 | # ECS Logging Python [intro] 8 | 9 | ECS loggers are formatter/encoder plugins for your favorite logging libraries. They make it easy to format your logs into ECS-compatible JSON. 10 | 11 | ::::{tip} 12 | Want to learn more about ECS, ECS logging, and other available language plugins? See the [ECS logging guide](ecs-logging://reference/intro.md). 13 | :::: 14 | 15 | 16 | Ready to jump into `ecs-logging-python`? [Get started](/reference/installation.md). 17 | 18 | If you’d like to try out a tutorial using Python ECS logging, see [Ingest logs from a Python application using Filebeat](docs-content://manage-data/ingest/ingesting-data-from-applications/ingest-logs-from-python-application-using-filebeat.md). 19 | 20 | -------------------------------------------------------------------------------- /docs/reference/installation.md: -------------------------------------------------------------------------------- 1 | --- 2 | mapped_pages: 3 | - https://www.elastic.co/guide/en/ecs-logging/python/current/installation.html 4 | navigation_title: Installation 5 | --- 6 | 7 | # ECS Logging Python installation [installation] 8 | 9 | ```cmd 10 | $ python -m pip install ecs-logging 11 | ``` 12 | 13 | 14 | ## Getting started [gettingstarted] 15 | 16 | `ecs-logging-python` has formatters for the standard library [`logging`](https://docs.python.org/3/library/logging.html) module and the [`structlog`](https://www.structlog.org/en/stable/) package. 17 | 18 | 19 | ### Standard library logging module [logging] 20 | 21 | ```python 22 | import logging 23 | import ecs_logging 24 | 25 | # Get the Logger 26 | logger = logging.getLogger("app") 27 | logger.setLevel(logging.DEBUG) 28 | 29 | # Add an ECS formatter to the Handler 30 | handler = logging.StreamHandler() 31 | handler.setFormatter(ecs_logging.StdlibFormatter()) 32 | logger.addHandler(handler) 33 | 34 | # Emit a log! 35 | logger.debug("Example message!", extra={"http.request.method": "get"}) 36 | ``` 37 | 38 | ```json 39 | { 40 | "@timestamp": "2020-03-20T18:11:37.895Z", 41 | "log.level": "debug", 42 | "message": "Example message!", 43 | "ecs": { 44 | "version": "1.6.0" 45 | }, 46 | "http": { 47 | "request": { 48 | "method": "get" 49 | } 50 | }, 51 | "log": { 52 | "logger": "app", 53 | "origin": { 54 | "file": { 55 | "line": 14, 56 | "name": "test.py" 57 | }, 58 | "function": "func" 59 | }, 60 | "original": "Example message!" 61 | } 62 | } 63 | ``` 64 | 65 | 66 | #### Excluding fields [_excluding_fields] 67 | 68 | You can exclude fields from being collected by using the `exclude_fields` option in the `StdlibFormatter` constructor: 69 | 70 | ```python 71 | from ecs_logging import StdlibFormatter 72 | 73 | formatter = StdlibFormatter( 74 | exclude_fields=[ 75 | # You can specify individual fields to ignore: 76 | "log.original", 77 | # or you can also use prefixes to ignore 78 | # whole categories of fields: 79 | "process", 80 | "log.origin", 81 | ] 82 | ) 83 | ``` 84 | 85 | 86 | #### Limiting stack traces [_limiting_stack_traces] 87 | 88 | The `StdlibLogger` automatically gathers `exc_info` into ECS `error.*` fields. If you’d like to control the number of stack frames that are included in `error.stack_trace` you can use the `stack_trace_limit` parameter (by default all frames are collected): 89 | 90 | ```python 91 | from ecs_logging import StdlibFormatter 92 | 93 | formatter = StdlibFormatter( 94 | # Only collects 3 stack frames 95 | stack_trace_limit=3, 96 | ) 97 | formatter = StdlibFormatter( 98 | # Disable stack trace collection 99 | stack_trace_limit=0, 100 | ) 101 | ``` 102 | 103 | 104 | ### Structlog Example [structlog] 105 | 106 | Note that the structlog processor should be the last processor in the list, as it handles the conversion to JSON as well as the ECS field enrichment. 107 | 108 | ```python 109 | import structlog 110 | import ecs_logging 111 | 112 | # Configure Structlog 113 | structlog.configure( 114 | processors=[ecs_logging.StructlogFormatter()], 115 | wrapper_class=structlog.BoundLogger, 116 | context_class=dict, 117 | logger_factory=structlog.PrintLoggerFactory(), 118 | ) 119 | 120 | # Get the Logger 121 | logger = structlog.get_logger("app") 122 | 123 | # Add additional context 124 | logger = logger.bind(**{ 125 | "http": { 126 | "version": "2", 127 | "request": { 128 | "method": "get", 129 | "bytes": 1337, 130 | }, 131 | }, 132 | "url": { 133 | "domain": "example.com", 134 | "path": "/", 135 | "port": 443, 136 | "scheme": "https", 137 | "registered_domain": "example.com", 138 | "top_level_domain": "com", 139 | "original": "https://example.com", 140 | } 141 | }) 142 | 143 | # Emit a log! 144 | logger.debug("Example message!") 145 | ``` 146 | 147 | ```json 148 | { 149 | "@timestamp": "2020-03-26T13:08:11.728Z", 150 | "ecs": { 151 | "version": "1.6.0" 152 | }, 153 | "http": { 154 | "request": { 155 | "bytes": 1337, 156 | "method": "get" 157 | }, 158 | "version": "2" 159 | }, 160 | "log": { 161 | "level": "debug" 162 | }, 163 | "message": "Example message!", 164 | "url": { 165 | "domain": "example.com", 166 | "original": "https://example.com", 167 | "path": "/", 168 | "port": 443, 169 | "registered_domain": "example.com", 170 | "scheme": "https", 171 | "top_level_domain": "com" 172 | } 173 | } 174 | ``` 175 | 176 | 177 | ## Elastic APM log correlation [correlation] 178 | 179 | `ecs-logging-python` supports automatically collecting [ECS tracing fields](ecs://reference/ecs-tracing.md) from the [Elastic APM Python agent](https://github.com/elastic/apm-agent-python) in order to [correlate logs to spans, transactions and traces](apm-agent-python://reference/logs.md) in Elastic APM. 180 | 181 | You can also quickly turn on ECS-formatted logs in your python app by setting [`LOG_ECS_REFORMATTING=override`](apm-agent-python://reference/configuration.md#config-log_ecs_reformatting) in the Elastic APM Python agent. 182 | 183 | 184 | ## Install Filebeat [filebeat] 185 | 186 | The best way to collect the logs once they are ECS-formatted is with [Filebeat](https://www.elastic.co/beats/filebeat): 187 | 188 | :::::::{tab-set} 189 | 190 | ::::::{tab-item} Log file 191 | 1. Follow the [Filebeat quick start](beats://reference/filebeat/filebeat-installation-configuration.md) 192 | 2. Add the following configuration to your `filebeat.yaml` file. 193 | 194 | For Filebeat 7.16+ 195 | 196 | ```yaml 197 | filebeat.inputs: 198 | - type: filestream <1> 199 | paths: /path/to/logs.json 200 | parsers: 201 | - ndjson: 202 | overwrite_keys: true <2> 203 | add_error_key: true <3> 204 | expand_keys: true <4> 205 | 206 | processors: <5> 207 | - add_host_metadata: ~ 208 | - add_cloud_metadata: ~ 209 | - add_docker_metadata: ~ 210 | - add_kubernetes_metadata: ~ 211 | ``` 212 | 213 | 1. Use the filestream input to read lines from active log files. 214 | 2. Values from the decoded JSON object overwrite the fields that {{filebeat}} normally adds (type, source, offset, etc.) in case of conflicts. 215 | 3. {{filebeat}} adds an "error.message" and "error.type: json" key in case of JSON unmarshalling errors. 216 | 4. {{filebeat}} will recursively de-dot keys in the decoded JSON, and expand them into a hierarchical object structure. 217 | 5. Processors enhance your data. See [processors](beats://reference/filebeat/filtering-enhancing-data.md) to learn more. 218 | 219 | 220 | For Filebeat < 7.16 221 | 222 | ```yaml 223 | filebeat.inputs: 224 | - type: log 225 | paths: /path/to/logs.json 226 | json.keys_under_root: true 227 | json.overwrite_keys: true 228 | json.add_error_key: true 229 | json.expand_keys: true 230 | 231 | processors: 232 | - add_host_metadata: ~ 233 | - add_cloud_metadata: ~ 234 | - add_docker_metadata: ~ 235 | - add_kubernetes_metadata: ~ 236 | ``` 237 | :::::: 238 | 239 | ::::::{tab-item} Kubernetes 240 | 1. Make sure your application logs to stdout/stderr. 241 | 2. Follow the [Run Filebeat on Kubernetes](beats://reference/filebeat/running-on-kubernetes.md) guide. 242 | 3. Enable [hints-based autodiscover](beats://reference/filebeat/configuration-autodiscover-hints.md) (uncomment the corresponding section in `filebeat-kubernetes.yaml`). 243 | 4. Add these annotations to your pods that log using ECS loggers. This will make sure the logs are parsed appropriately. 244 | 245 | ```yaml 246 | annotations: 247 | co.elastic.logs/json.overwrite_keys: true <1> 248 | co.elastic.logs/json.add_error_key: true <2> 249 | co.elastic.logs/json.expand_keys: true <3> 250 | ``` 251 | 252 | 1. Values from the decoded JSON object overwrite the fields that {{filebeat}} normally adds (type, source, offset, etc.) in case of conflicts. 253 | 2. {{filebeat}} adds an "error.message" and "error.type: json" key in case of JSON unmarshalling errors. 254 | 3. {{filebeat}} will recursively de-dot keys in the decoded JSON, and expand them into a hierarchical object structure. 255 | :::::: 256 | 257 | ::::::{tab-item} Docker 258 | 1. Make sure your application logs to stdout/stderr. 259 | 2. Follow the [Run Filebeat on Docker](beats://reference/filebeat/running-on-docker.md) guide. 260 | 3. Enable [hints-based autodiscover](beats://reference/filebeat/configuration-autodiscover-hints.md). 261 | 4. Add these labels to your containers that log using ECS loggers. This will make sure the logs are parsed appropriately. 262 | 263 | ```yaml 264 | labels: 265 | co.elastic.logs/json.overwrite_keys: true <1> 266 | co.elastic.logs/json.add_error_key: true <2> 267 | co.elastic.logs/json.expand_keys: true <3> 268 | ``` 269 | 270 | 1. Values from the decoded JSON object overwrite the fields that {{filebeat}} normally adds (type, source, offset, etc.) in case of conflicts. 271 | 2. {{filebeat}} adds an "error.message" and "error.type: json" key in case of JSON unmarshalling errors. 272 | 3. {{filebeat}} will recursively de-dot keys in the decoded JSON, and expand them into a hierarchical object structure. 273 | :::::: 274 | 275 | ::::::: 276 | For more information, see the [Filebeat reference](beats://reference/filebeat/configuring-howto-filebeat.md). 277 | 278 | -------------------------------------------------------------------------------- /docs/reference/toc.yml: -------------------------------------------------------------------------------- 1 | toc: 2 | - file: index.md 3 | - file: installation.md -------------------------------------------------------------------------------- /ecs_logging/__init__.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | """Logging formatters for ECS (Elastic Common Schema) in Python""" 18 | 19 | from ._meta import ECS_VERSION 20 | from ._stdlib import StdlibFormatter 21 | from ._structlog import StructlogFormatter 22 | 23 | __version__ = "2.2.0" 24 | __all__ = [ 25 | "ECS_VERSION", 26 | "StdlibFormatter", 27 | "StructlogFormatter", 28 | ] 29 | -------------------------------------------------------------------------------- /ecs_logging/_meta.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | ECS_VERSION = "1.6.0" 18 | -------------------------------------------------------------------------------- /ecs_logging/_stdlib.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import collections.abc 19 | import logging 20 | import sys 21 | import time 22 | from functools import lru_cache 23 | from traceback import format_tb 24 | 25 | from ._meta import ECS_VERSION 26 | from ._utils import ( 27 | de_dot, 28 | flatten_dict, 29 | json_dumps, 30 | merge_dicts, 31 | ) 32 | 33 | from typing import Any, Callable, Dict, Optional, Sequence, Union 34 | 35 | try: 36 | from typing import Literal # type: ignore 37 | except ImportError: 38 | from typing_extensions import Literal # type: ignore 39 | 40 | 41 | # Load the attributes of a LogRecord so if some are 42 | # added in the future we won't mistake them for 'extra=...' 43 | try: 44 | _LOGRECORD_DIR = set(dir(logging.LogRecord("", 0, "", 0, "", (), None))) 45 | except Exception: # LogRecord signature changed? 46 | _LOGRECORD_DIR = set() 47 | 48 | 49 | class StdlibFormatter(logging.Formatter): 50 | """ECS Formatter for the standard library ``logging`` module""" 51 | 52 | _LOGRECORD_DICT = { 53 | "name", 54 | "msg", 55 | "args", 56 | "asctime", 57 | "levelname", 58 | "levelno", 59 | "pathname", 60 | "filename", 61 | "module", 62 | "exc_info", 63 | "exc_text", 64 | "stack_info", 65 | "lineno", 66 | "funcName", 67 | "created", 68 | "msecs", 69 | "relativeCreated", 70 | "thread", 71 | "threadName", 72 | "processName", 73 | "process", 74 | "message", 75 | } | _LOGRECORD_DIR 76 | converter = time.gmtime 77 | 78 | def __init__( 79 | self, 80 | fmt: Optional[str] = None, 81 | datefmt: Optional[str] = None, 82 | style: Union[Literal["%"], Literal["{"], Literal["$"]] = "%", 83 | validate: Optional[bool] = None, 84 | stack_trace_limit: Optional[int] = None, 85 | extra: Optional[Dict[str, Any]] = None, 86 | exclude_fields: Sequence[str] = (), 87 | ) -> None: 88 | """Initialize the ECS formatter. 89 | 90 | :param int stack_trace_limit: 91 | Specifies the maximum number of frames to include for stack 92 | traces. Defaults to ``None`` which includes all available frames. 93 | Setting this to zero will suppress stack traces. 94 | This setting doesn't affect ``LogRecord.stack_info`` because 95 | this attribute is typically already pre-formatted. 96 | :param Optional[Dict[str, Any]] extra: 97 | Specifies the collection of meta-data fields to add to all records. 98 | :param Sequence[str] exclude_fields: 99 | Specifies any fields that should be suppressed from the resulting 100 | fields, expressed with dot notation:: 101 | 102 | exclude_keys=["error.stack_trace"] 103 | 104 | You can also use field prefixes to exclude whole groups of fields:: 105 | 106 | exclude_keys=["error"] 107 | """ 108 | _kwargs = {} 109 | if validate is not None: 110 | # validate was introduced in py3.8 so we need to only provide it if the user provided it 111 | _kwargs["validate"] = validate 112 | super().__init__( # type: ignore[call-arg] 113 | fmt=fmt, datefmt=datefmt, style=style, **_kwargs # type: ignore[arg-type] 114 | ) 115 | 116 | if stack_trace_limit is not None: 117 | if not isinstance(stack_trace_limit, int): 118 | raise TypeError( 119 | "'stack_trace_limit' must be None, or a non-negative integer" 120 | ) 121 | elif stack_trace_limit < 0: 122 | raise ValueError( 123 | "'stack_trace_limit' must be None, or a non-negative integer" 124 | ) 125 | 126 | if ( 127 | not isinstance(exclude_fields, collections.abc.Sequence) 128 | or isinstance(exclude_fields, str) 129 | or any(not isinstance(item, str) for item in exclude_fields) 130 | ): 131 | raise TypeError("'exclude_fields' must be a sequence of strings") 132 | 133 | self._extra = extra 134 | self._exclude_fields = frozenset(exclude_fields) 135 | self._stack_trace_limit = stack_trace_limit 136 | 137 | def _record_error_type(self, record: logging.LogRecord) -> Optional[str]: 138 | exc_info = record.exc_info 139 | if not exc_info: 140 | # exc_info is either an iterable or bool. If it doesn't 141 | # evaluate to True, then no error type is used. 142 | return None 143 | if isinstance(exc_info, bool): 144 | # if it is a bool, then look at sys.exc_info 145 | exc_info = sys.exc_info() 146 | if isinstance(exc_info, (list, tuple)) and exc_info[0] is not None: 147 | return exc_info[0].__name__ 148 | return None 149 | 150 | def _record_error_message(self, record: logging.LogRecord) -> Optional[str]: 151 | exc_info = record.exc_info 152 | if not exc_info: 153 | # exc_info is either an iterable or bool. If it doesn't 154 | # evaluate to True, then no error message is used. 155 | return None 156 | if isinstance(exc_info, bool): 157 | # if it is a bool, then look at sys.exc_info 158 | exc_info = sys.exc_info() 159 | if isinstance(exc_info, (list, tuple)) and exc_info[1]: 160 | return str(exc_info[1]) 161 | return None 162 | 163 | def format(self, record: logging.LogRecord) -> str: 164 | result = self.format_to_ecs(record) 165 | return json_dumps(result) 166 | 167 | def format_to_ecs(self, record: logging.LogRecord) -> Dict[str, Any]: 168 | """Function that can be overridden to add additional fields to 169 | (or remove fields from) the JSON before being dumped into a string. 170 | 171 | .. code-block: python 172 | 173 | class MyFormatter(StdlibFormatter): 174 | def format_to_ecs(self, record): 175 | result = super().format_to_ecs(record) 176 | del result["log"]["original"] # remove unwanted field(s) 177 | result["my_field"] = "my_value" # add custom field 178 | return result 179 | """ 180 | 181 | extractors: Dict[str, Callable[[logging.LogRecord], Any]] = { 182 | "@timestamp": self._record_timestamp, 183 | "ecs.version": lambda _: ECS_VERSION, 184 | "log.level": lambda r: (r.levelname.lower() if r.levelname else None), 185 | "log.origin.function": self._record_attribute("funcName"), 186 | "log.origin.file.line": self._record_attribute("lineno"), 187 | "log.origin.file.name": self._record_attribute("filename"), 188 | "log.original": lambda r: r.getMessage(), 189 | "log.logger": self._record_attribute("name"), 190 | "process.pid": self._record_attribute("process"), 191 | "process.name": self._record_attribute("processName"), 192 | "process.thread.id": self._record_attribute("thread"), 193 | "process.thread.name": self._record_attribute("threadName"), 194 | "error.type": self._record_error_type, 195 | "error.message": self._record_error_message, 196 | "error.stack_trace": self._record_error_stack_trace, 197 | } 198 | 199 | result: Dict[str, Any] = {} 200 | for field in set(extractors.keys()).difference(self._exclude_fields): 201 | if self._is_field_excluded(field): 202 | continue 203 | value = extractors[field](record) 204 | if value is not None: 205 | # special case ecs.version that should not be de-dotted 206 | if field == "ecs.version": 207 | field_dict = {field: value} 208 | else: 209 | field_dict = de_dot(field, value) 210 | merge_dicts(field_dict, result) 211 | 212 | available = record.__dict__ 213 | 214 | # This is cleverness because 'message' is NOT a member 215 | # key of ``record.__dict__`` the ``getMessage()`` method 216 | # is effectively ``msg % args`` (actual keys) By manually 217 | # adding 'message' to ``available``, it simplifies the code 218 | available["message"] = record.getMessage() 219 | 220 | # Pull all extras and flatten them to be sent into '_is_field_excluded' 221 | # since they can be defined as 'extras={"http": {"method": "GET"}}' 222 | extra_keys = set(available).difference(self._LOGRECORD_DICT) 223 | extras = flatten_dict({key: available[key] for key in extra_keys}) 224 | # Merge in any global extra's 225 | if self._extra is not None: 226 | for field, value in self._extra.items(): 227 | merge_dicts(de_dot(field, value), extras) 228 | 229 | # Pop all Elastic APM extras and add them 230 | # to standard tracing ECS fields. 231 | extras.setdefault("span.id", extras.pop("elasticapm_span_id", None)) 232 | extras.setdefault( 233 | "transaction.id", extras.pop("elasticapm_transaction_id", None) 234 | ) 235 | extras.setdefault("trace.id", extras.pop("elasticapm_trace_id", None)) 236 | extras.setdefault("service.name", extras.pop("elasticapm_service_name", None)) 237 | extras.setdefault( 238 | "service.environment", extras.pop("elasticapm_service_environment", None) 239 | ) 240 | 241 | # Merge in any keys that were set within 'extra={...}' 242 | for field, value in extras.items(): 243 | if field.startswith("elasticapm_labels."): 244 | continue # Unconditionally remove, we don't need this info. 245 | if value is None or self._is_field_excluded(field): 246 | continue 247 | merge_dicts(de_dot(field, value), result) 248 | 249 | # The following is mostly for the ecs format. You can't have 2x 250 | # 'message' keys in _WANTED_ATTRS, so we set the value to 251 | # 'log.original' in ecs, and this code block guarantees it 252 | # still appears as 'message' too. 253 | if not self._is_field_excluded("message"): 254 | result.setdefault("message", available["message"]) 255 | return result 256 | 257 | @lru_cache() 258 | def _is_field_excluded(self, field: str) -> bool: 259 | field_path = [] 260 | for path in field.split("."): 261 | field_path.append(path) 262 | if ".".join(field_path) in self._exclude_fields: 263 | return True 264 | return False 265 | 266 | def _record_timestamp(self, record: logging.LogRecord) -> str: 267 | return "%s.%03dZ" % ( 268 | self.formatTime(record, datefmt="%Y-%m-%dT%H:%M:%S"), 269 | record.msecs, 270 | ) 271 | 272 | def _record_attribute( 273 | self, attribute: str 274 | ) -> Callable[[logging.LogRecord], Optional[Any]]: 275 | return lambda r: getattr(r, attribute, None) 276 | 277 | def _record_error_stack_trace(self, record: logging.LogRecord) -> Optional[str]: 278 | # Using stack_info=True will add 'error.stack_trace' even 279 | # if the type is not 'error', exc_info=True only gathers 280 | # when there's an active exception. 281 | if ( 282 | record.exc_info 283 | and record.exc_info[2] is not None 284 | and (self._stack_trace_limit is None or self._stack_trace_limit > 0) 285 | ): 286 | return ( 287 | "".join(format_tb(record.exc_info[2], limit=self._stack_trace_limit)) 288 | or None 289 | ) 290 | # LogRecord only has 'stack_info' if it's passed via .log(..., stack_info=True) 291 | stack_info = getattr(record, "stack_info", None) 292 | if stack_info: 293 | return str(stack_info) 294 | return None 295 | -------------------------------------------------------------------------------- /ecs_logging/_structlog.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import time 19 | import datetime 20 | from typing import Any, Dict 21 | 22 | from ._meta import ECS_VERSION 23 | from ._utils import json_dumps, normalize_dict 24 | 25 | 26 | class StructlogFormatter: 27 | """ECS formatter for the ``structlog`` module""" 28 | 29 | def __call__(self, _: Any, name: str, event_dict: Dict[str, Any]) -> str: 30 | 31 | # Handle event -> message now so that stuff like `event.dataset` doesn't 32 | # cause problems down the line 33 | event_dict["message"] = str(event_dict.pop("event")) 34 | event_dict = normalize_dict(event_dict) 35 | event_dict.setdefault("log", {}).setdefault("level", name.lower()) 36 | event_dict = self.format_to_ecs(event_dict) 37 | return self._json_dumps(event_dict) 38 | 39 | def format_to_ecs(self, event_dict: Dict[str, Any]) -> Dict[str, Any]: 40 | if "@timestamp" not in event_dict: 41 | event_dict["@timestamp"] = ( 42 | datetime.datetime.fromtimestamp( 43 | time.time(), tz=datetime.timezone.utc 44 | ).strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] 45 | + "Z" 46 | ) 47 | 48 | if "exception" in event_dict: 49 | stack_trace = event_dict.pop("exception") 50 | if "error" in event_dict: 51 | event_dict["error"]["stack_trace"] = stack_trace 52 | else: 53 | event_dict["error"] = {"stack_trace": stack_trace} 54 | 55 | event_dict.setdefault("ecs.version", ECS_VERSION) 56 | return event_dict 57 | 58 | def _json_dumps(self, value: Dict[str, Any]) -> str: 59 | return json_dumps(value=value) 60 | -------------------------------------------------------------------------------- /ecs_logging/_utils.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import collections.abc 19 | import json 20 | import functools 21 | from typing import Any, Dict, Mapping 22 | 23 | 24 | __all__ = [ 25 | "normalize_dict", 26 | "de_dot", 27 | "merge_dicts", 28 | "json_dumps", 29 | ] 30 | 31 | 32 | def flatten_dict(value: Mapping[str, Any]) -> Dict[str, Any]: 33 | """Adds dots to all nested fields in dictionaries. 34 | Raises an error if there are entries which are represented 35 | with different forms of nesting. (ie {"a": {"b": 1}, "a.b": 2}) 36 | """ 37 | top_level = {} 38 | for key, val in value.items(): 39 | if not isinstance(val, collections.abc.Mapping): 40 | if key in top_level: 41 | raise ValueError(f"Duplicate entry for '{key}' with different nesting") 42 | top_level[key] = val 43 | else: 44 | val = flatten_dict(val) 45 | for vkey, vval in val.items(): 46 | vkey = f"{key}.{vkey}" 47 | if vkey in top_level: 48 | raise ValueError( 49 | f"Duplicate entry for '{vkey}' with different nesting" 50 | ) 51 | top_level[vkey] = vval 52 | 53 | return top_level 54 | 55 | 56 | def normalize_dict(value: Dict[str, Any]) -> Dict[str, Any]: 57 | """Expands all dotted names to nested dictionaries""" 58 | if not isinstance(value, dict): 59 | return value 60 | keys = list(value.keys()) 61 | for key in keys: 62 | if "." in key: 63 | merge_dicts(de_dot(key, value.pop(key)), value) 64 | for key, val in value.items(): 65 | if isinstance(val, dict): 66 | normalize_dict(val) 67 | elif isinstance(val, list): 68 | val[:] = [normalize_dict(x) for x in val] 69 | return value 70 | 71 | 72 | def de_dot(dot_string: str, msg: Any) -> Dict[str, Any]: 73 | """Turn value and dotted string key into a nested dictionary""" 74 | arr = dot_string.split(".") 75 | ret = {arr[-1]: msg} 76 | for i in range(len(arr) - 2, -1, -1): 77 | ret = {arr[i]: ret} 78 | return ret 79 | 80 | 81 | def merge_dicts(from_: Dict[Any, Any], into: Dict[Any, Any]) -> Dict[Any, Any]: 82 | """Merge deeply nested dictionary structures. 83 | When called has side-effects within 'destination'. 84 | """ 85 | for key, value in from_.items(): 86 | into.setdefault(key, {}) 87 | if isinstance(value, dict) and isinstance(into[key], dict): 88 | merge_dicts(value, into[key]) 89 | elif into[key] != {}: 90 | raise TypeError( 91 | "Type mismatch at key `{}`: merging dicts would replace value `{}` with `{}`. This is likely due to " 92 | "dotted keys in the event dict being turned into nested dictionaries, causing a conflict.".format( 93 | key, into[key], value 94 | ) 95 | ) 96 | else: 97 | into[key] = value 98 | return into 99 | 100 | 101 | def json_dumps(value: Dict[str, Any]) -> str: 102 | 103 | # Ensure that the first three fields are '@timestamp', 104 | # 'log.level', and 'message' per ECS spec 105 | ordered_fields = [] 106 | try: 107 | ordered_fields.append(("@timestamp", value.pop("@timestamp"))) 108 | except KeyError: 109 | pass 110 | 111 | # log.level can either be nested or not nested so we have to try both 112 | try: 113 | ordered_fields.append(("log.level", value["log"].pop("level"))) 114 | if not value["log"]: # Remove the 'log' dictionary if it's now empty 115 | value.pop("log", None) 116 | except KeyError: 117 | try: 118 | ordered_fields.append(("log.level", value.pop("log.level"))) 119 | except KeyError: 120 | pass 121 | try: 122 | ordered_fields.append(("message", value.pop("message"))) 123 | except KeyError: 124 | pass 125 | 126 | json_dumps = functools.partial( 127 | json.dumps, sort_keys=True, separators=(",", ":"), default=_json_dumps_fallback 128 | ) 129 | 130 | # Because we want to use 'sorted_keys=True' we manually build 131 | # the first three keys and then build the rest with json.dumps() 132 | if ordered_fields: 133 | # Need to call json.dumps() on values just in 134 | # case the given values aren't strings (even though 135 | # they should be according to the spec) 136 | ordered_json = ",".join(f'"{k}":{json_dumps(v)}' for k, v in ordered_fields) 137 | if value: 138 | return "{{{},{}".format( 139 | ordered_json, 140 | json_dumps(value)[1:], 141 | ) 142 | else: 143 | return "{%s}" % ordered_json 144 | # If there are no fields with ordering requirements we 145 | # pass everything into json.dumps() 146 | else: 147 | return json_dumps(value) 148 | 149 | 150 | def _json_dumps_fallback(value: Any) -> Any: 151 | """ 152 | Fallback handler for json.dumps to handle objects json doesn't know how to 153 | serialize. 154 | """ 155 | try: 156 | # This is what structlog's json fallback does 157 | return value.__structlog__() 158 | except AttributeError: 159 | return repr(value) 160 | -------------------------------------------------------------------------------- /ecs_logging/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/elastic/ecs-logging-python/4da1fda397737c53a433306432140ec46dc4e276/ecs_logging/py.typed -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | exclude = "/tests/" 3 | 4 | [mypy-tests.*] 5 | ignore_errors = true 6 | 7 | [mypy-noxfile] 8 | ignore_errors = true -------------------------------------------------------------------------------- /noxfile.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import nox 19 | 20 | 21 | SOURCE_FILES = ("noxfile.py", "tests/", "ecs_logging/") 22 | 23 | 24 | def tests_impl(session): 25 | session.install(".[develop]") 26 | # Install `elastic-apm` from master branch 27 | session.install( 28 | "elastic-apm @ https://github.com/elastic/apm-agent-python/archive/master.zip" 29 | ) 30 | session.run( 31 | "pytest", 32 | "--junitxml=junit-test.xml", 33 | "--cov=ecs_logging", 34 | *(session.posargs or ("tests/",)), 35 | env={"PYTHONWARNINGS": "always::DeprecationWarning"}, 36 | ) 37 | 38 | 39 | @nox.session(python=["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]) 40 | def test(session): 41 | tests_impl(session) 42 | 43 | 44 | @nox.session() 45 | def blacken(session): 46 | session.install("black") 47 | session.run("black", "--target-version=py36", *SOURCE_FILES) 48 | 49 | lint(session) 50 | 51 | 52 | @nox.session 53 | def lint(session): 54 | session.install("flake8", "black", "mypy") 55 | session.run("black", "--check", "--target-version=py36", *SOURCE_FILES) 56 | session.run("flake8", "--ignore=E501,W503", *SOURCE_FILES) 57 | session.run( 58 | "mypy", 59 | "--strict", 60 | "--show-error-codes", 61 | "--no-warn-unused-ignores", 62 | "ecs_logging/", 63 | ) 64 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["flit_core >=2,<4"] 3 | build-backend = "flit_core.buildapi" 4 | 5 | [tool.flit.metadata] 6 | dist-name = "ecs-logging" 7 | module = "ecs_logging" 8 | description-file = "README.md" 9 | author = "Seth Michael Larson" 10 | author-email = "seth.larson@elastic.co" 11 | home-page = "https://github.com/elastic/ecs-logging-python" 12 | classifiers = [ 13 | "Development Status :: 5 - Production/Stable", 14 | "Intended Audience :: Developers", 15 | "Programming Language :: Python :: 3", 16 | "Programming Language :: Python :: 3.8", 17 | "Programming Language :: Python :: 3.9", 18 | "Programming Language :: Python :: 3.10", 19 | "Programming Language :: Python :: 3.11", 20 | "Programming Language :: Python :: 3.12", 21 | "Programming Language :: Python :: 3.13", 22 | "Topic :: System :: Logging", 23 | "License :: OSI Approved :: Apache Software License" 24 | ] 25 | requires = [] 26 | requires-python = ">=3.8" 27 | 28 | [tool.flit.metadata.requires-extra] 29 | develop = [ 30 | "pytest", 31 | "pytest-cov", 32 | "mock", 33 | "structlog", 34 | "elastic-apm", 35 | ] 36 | 37 | [tool.flit.metadata.urls] 38 | "Source" = "https://github.com/elastic/ecs-logging-python" 39 | "Download" = "https://github.com/elastic/ecs-logging-python/releases" 40 | "Documentation" = "https://github.com/elastic/ecs-logging-python" 41 | "Issue Tracker" = "https://github.com/elastic/ecs-logging-python/issues" 42 | "Changelog" = "https://github.com/elastic/ecs-logging-python/blob/main/CHANGELOG.md" 43 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | junit_logging = system-out 3 | junit_log_passing_tests = True 4 | junit_duration_report = call 5 | junit_family=xunit1 -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import collections 19 | import datetime 20 | import json 21 | import logging 22 | import os 23 | 24 | import elasticapm 25 | import pytest 26 | 27 | 28 | class ValidationError(Exception): 29 | pass 30 | 31 | 32 | @pytest.fixture 33 | def spec_validator(): 34 | with open(os.path.join(os.path.dirname(__file__), "resources", "spec.json")) as fh: 35 | spec = json.load(fh) 36 | 37 | def validator(data_json): 38 | """ 39 | Throws a ValidationError if anything doesn't match the spec. 40 | 41 | Returns the original json (pass-through) 42 | """ 43 | fields = spec["fields"] 44 | data = json.loads(data_json, object_pairs_hook=collections.OrderedDict) 45 | for k, v in fields.items(): 46 | if v.get("required"): 47 | found = False 48 | if k in data: 49 | found = True 50 | elif "." in k: 51 | # Dotted keys could be nested, like ecs.version 52 | subkeys = k.split(".") 53 | subval = data 54 | for subkey in subkeys: 55 | subval = subval.get(subkey, {}) 56 | if subval: 57 | found = True 58 | if not found: 59 | raise ValidationError(f"Missing required key {k}") 60 | if k in data: 61 | if v["type"] == "string" and not isinstance(data[k], str): 62 | raise ValidationError( 63 | "Value {} for key {} should be string, is {}".format( 64 | data[k], k, type(data[k]) 65 | ) 66 | ) 67 | if v["type"] == "datetime": 68 | try: 69 | datetime.datetime.strptime(data[k], "%Y-%m-%dT%H:%M:%S.%fZ") 70 | except ValueError: 71 | raise ValidationError( 72 | "Value {} for key {} doesn't parse as an ISO datetime".format( 73 | data[k], k 74 | ) 75 | ) 76 | if v.get("index") and list(data.keys())[v.get("index")] != k: 77 | raise ValidationError(f"Key {k} is not at index {v.get('index')}") 78 | 79 | return data_json 80 | 81 | return validator 82 | 83 | 84 | @pytest.fixture 85 | def apm(): 86 | record_factory = logging.getLogRecordFactory() 87 | apm = elasticapm.Client( 88 | {"SERVICE_NAME": "apm-service", "ENVIRONMENT": "dev", "DISABLE_SEND": True} 89 | ) 90 | yield apm 91 | apm.close() 92 | logging.setLogRecordFactory(record_factory) 93 | -------------------------------------------------------------------------------- /tests/resources/spec.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1.0, 3 | "url": "https://www.elastic.co/guide/en/ecs/current/index.html", 4 | "ecs": { 5 | "version": "1.x" 6 | }, 7 | "fields": { 8 | "@timestamp": { 9 | "type": "datetime", 10 | "required": true, 11 | "index": 0, 12 | "url": "https://www.elastic.co/guide/en/ecs/current/ecs-base.html", 13 | "comment": [ 14 | "Field order, as specified by 'index', is RECOMMENDED.", 15 | "ECS loggers must implement field order unless the logging framework makes that impossible." 16 | ] 17 | }, 18 | "log.level": { 19 | "type": "string", 20 | "required": true, 21 | "index": 1, 22 | "top_level_field": true, 23 | "url": "https://www.elastic.co/guide/en/ecs/current/ecs-log.html", 24 | "comment": [ 25 | "This field SHOULD NOT be a nested object field but at the top level with a dot in the property name.", 26 | "This is to make the JSON logs more human-readable.", 27 | "Loggers MAY indent the log level so that the `message` field always starts at the exact same offset,", 28 | "no matter the number of characters the log level has.", 29 | "For example: `'DEBUG'` (5 chars) will not be indented, whereas ` 'WARN'` (4 chars) will be indented by one space character." 30 | ] 31 | }, 32 | "message": { 33 | "type": "string", 34 | "required": false, 35 | "index": 2, 36 | "url": "https://www.elastic.co/guide/en/ecs/current/ecs-base.html", 37 | "comment": [ 38 | "A message field is typically included in all log records, but some logging libraries allow records with no message.", 39 | "That's typically the case for libraries that allow for structured logging." 40 | ] 41 | }, 42 | "ecs.version": { 43 | "type": "string", 44 | "required": true, 45 | "top_level_field": true, 46 | "url": "https://www.elastic.co/guide/en/ecs/current/ecs-ecs.html", 47 | "comment": [ 48 | "This field SHOULD NOT be a nested object field but at the top level with a dot in the property name.", 49 | "This is to make the JSON logs more human-readable." 50 | ] 51 | }, 52 | "labels": { 53 | "type": "object", 54 | "required": false, 55 | "url": "https://www.elastic.co/guide/en/ecs/current/ecs-base.html", 56 | "sanitization": { 57 | "key": { 58 | "replacements": [".", "*", "\\"], 59 | "substitute": "_" 60 | } 61 | } 62 | }, 63 | "trace.id": { 64 | "type": "string", 65 | "required": false, 66 | "url": "https://www.elastic.co/guide/en/ecs/current/ecs-tracing.html", 67 | "comment": "When APM agents add this field to the context, ecs loggers should pick it up and add it to the log event." 68 | }, 69 | "transaction.id": { 70 | "type": "string", 71 | "required": false, 72 | "url": "https://www.elastic.co/guide/en/ecs/current/ecs-tracing.html", 73 | "comment": "When APM agents add this field to the context, ecs loggers should pick it up and add it to the log event." 74 | }, 75 | "service.name": { 76 | "type": "string", 77 | "required": false, 78 | "url": "https://www.elastic.co/guide/en/ecs/current/ecs-service.html", 79 | "comment": [ 80 | "Configurable by users.", 81 | "When an APM agent is active, it should auto-configure this field if not already set." 82 | ] 83 | }, 84 | "service.node.name": { 85 | "type": "string", 86 | "required": false, 87 | "url": "https://www.elastic.co/guide/en/ecs/current/ecs-service.html", 88 | "comment": [ 89 | "Configurable by users.", 90 | "When an APM agent is active and `service_node_name` is manually configured, the agent should auto-configure this field if not already set." 91 | ] 92 | }, 93 | "service.version": { 94 | "type": "string", 95 | "required": false, 96 | "url": "https://www.elastic.co/guide/en/ecs/current/ecs-service.html#field-service-version", 97 | "comment": [ 98 | "Configurable by users.", 99 | "When an APM agent is active, it should auto-configure it if not already set." 100 | ] 101 | }, 102 | "event.dataset": { 103 | "type": "string", 104 | "required": false, 105 | "url": "https://www.elastic.co/guide/en/ecs/current/ecs-event.html", 106 | "default": "${service.name} OR ${service.name}.${appender.name}", 107 | "comment": [ 108 | "Configurable by users.", 109 | "If the user manually configures the service name,", 110 | "the logging library should set `event.dataset=${service.name}` if not explicitly configured otherwise.", 111 | "", 112 | "When agents auto-configure the app to use an ECS logger,", 113 | "they should set `event.dataset=${service.name}.${appender.name}` if the appender name is available in the logging library.", 114 | "Otherwise, agents should also set `event.dataset=${service.name}`", 115 | "", 116 | "The field helps to filter for different log streams from the same pod, for example and is required for log anomaly detection." 117 | ] 118 | }, 119 | "service.environment": { 120 | "type": "string", 121 | "required": false, 122 | "url": "https://www.elastic.co/guide/en/ecs/current/ecs-service.html#field-service-environment", 123 | "comment": [ 124 | "Configurable by users.", 125 | "When an APM agent is active, it should auto-configure it if not already set." 126 | ] 127 | }, 128 | "process.thread.name": { 129 | "type": "string", 130 | "required": false, 131 | "url": "https://www.elastic.co/guide/en/ecs/current/ecs-process.html" 132 | }, 133 | "log.logger": { 134 | "type": "string", 135 | "required": false, 136 | "url": "https://www.elastic.co/guide/en/ecs/current/ecs-log.html" 137 | }, 138 | "log.origin.file.line": { 139 | "type": "integer", 140 | "required": false, 141 | "url": "https://www.elastic.co/guide/en/ecs/current/ecs-log.html", 142 | "comment": "Should be opt-in as it requires the logging library to capture a stack trace for each log event." 143 | }, 144 | "log.origin.file.name": { 145 | "type": "string", 146 | "required": false, 147 | "url": "https://www.elastic.co/guide/en/ecs/current/ecs-log.html", 148 | "comment": "Should be opt-in as it requires the logging library to capture a stack trace for each log event." 149 | }, 150 | "log.origin.function": { 151 | "type": "string", 152 | "required": false, 153 | "url": "https://www.elastic.co/guide/en/ecs/current/ecs-log.html", 154 | "comment": "Should be opt-in as it requires the logging library to capture a stack trace for each log event." 155 | }, 156 | "error.type": { 157 | "type": "string", 158 | "required": false, 159 | "url": "https://www.elastic.co/guide/en/ecs/current/ecs-error.html", 160 | "comment": "The exception type or class, such as `java.lang.IllegalArgumentException`." 161 | }, 162 | "error.message": { 163 | "type": "string", 164 | "required": false, 165 | "url": "https://www.elastic.co/guide/en/ecs/current/ecs-error.html", 166 | "comment": "The message of the exception." 167 | }, 168 | "error.stack_trace": { 169 | "type": "string", 170 | "required": false, 171 | "url": "https://www.elastic.co/guide/en/ecs/current/ecs-error.html", 172 | "comment": "The stack trace of the exception as plain text." 173 | } 174 | } 175 | } 176 | -------------------------------------------------------------------------------- /tests/test_apm.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import json 19 | import logging 20 | from io import StringIO 21 | 22 | import elasticapm 23 | import structlog 24 | from elasticapm.handlers.logging import LoggingFilter 25 | from elasticapm.handlers.structlog import structlog_processor 26 | 27 | import ecs_logging 28 | 29 | 30 | def test_elasticapm_structlog_log_correlation_ecs_fields(spec_validator, apm): 31 | stream = StringIO() 32 | logger = structlog.PrintLogger(stream) 33 | logger = structlog.wrap_logger( 34 | logger, processors=[structlog_processor, ecs_logging.StructlogFormatter()] 35 | ) 36 | log = logger.new() 37 | 38 | apm.begin_transaction("test-transaction") 39 | try: 40 | with elasticapm.capture_span("test-span"): 41 | span_id = elasticapm.get_span_id() 42 | trace_id = elasticapm.get_trace_id() 43 | transaction_id = elasticapm.get_transaction_id() 44 | 45 | log.info("test message") 46 | finally: 47 | apm.end_transaction("test-transaction") 48 | 49 | ecs = json.loads(spec_validator(stream.getvalue().rstrip())) 50 | ecs.pop("@timestamp") 51 | assert ecs == { 52 | "ecs.version": "1.6.0", 53 | "log.level": "info", 54 | "message": "test message", 55 | "span": {"id": span_id}, 56 | "trace": {"id": trace_id}, 57 | "transaction": {"id": transaction_id}, 58 | "service": {"name": "apm-service", "environment": "dev"}, 59 | } 60 | 61 | 62 | def test_elastic_apm_stdlib_no_filter_log_correlation_ecs_fields(apm): 63 | stream = StringIO() 64 | logger = logging.getLogger("apm-logger") 65 | handler = logging.StreamHandler(stream) 66 | handler.setFormatter( 67 | ecs_logging.StdlibFormatter( 68 | exclude_fields=["@timestamp", "process", "log.origin.file.line"] 69 | ) 70 | ) 71 | logger.addHandler(handler) 72 | logger.setLevel(logging.DEBUG) 73 | 74 | apm.begin_transaction("test-transaction") 75 | try: 76 | with elasticapm.capture_span("test-span"): 77 | span_id = elasticapm.get_span_id() 78 | trace_id = elasticapm.get_trace_id() 79 | transaction_id = elasticapm.get_transaction_id() 80 | 81 | logger.info("test message") 82 | finally: 83 | apm.end_transaction("test-transaction") 84 | 85 | ecs = json.loads(stream.getvalue().rstrip()) 86 | assert ecs == { 87 | "ecs.version": "1.6.0", 88 | "log.level": "info", 89 | "log": { 90 | "logger": "apm-logger", 91 | "origin": { 92 | "file": {"name": "test_apm.py"}, 93 | "function": "test_elastic_apm_stdlib_no_filter_log_correlation_ecs_fields", 94 | }, 95 | "original": "test message", 96 | }, 97 | "message": "test message", 98 | "span": {"id": span_id}, 99 | "trace": {"id": trace_id}, 100 | "transaction": {"id": transaction_id}, 101 | "service": {"name": "apm-service", "environment": "dev"}, 102 | } 103 | 104 | 105 | def test_elastic_apm_stdlib_with_filter_log_correlation_ecs_fields(apm): 106 | stream = StringIO() 107 | logger = logging.getLogger("apm-logger") 108 | handler = logging.StreamHandler(stream) 109 | handler.setFormatter( 110 | ecs_logging.StdlibFormatter( 111 | exclude_fields=["@timestamp", "process", "log.origin.file.line"] 112 | ) 113 | ) 114 | handler.addFilter(LoggingFilter()) 115 | logger.addHandler(handler) 116 | logger.setLevel(logging.DEBUG) 117 | 118 | apm.begin_transaction("test-transaction") 119 | try: 120 | with elasticapm.capture_span("test-span"): 121 | span_id = elasticapm.get_span_id() 122 | trace_id = elasticapm.get_trace_id() 123 | transaction_id = elasticapm.get_transaction_id() 124 | 125 | logger.info("test message") 126 | finally: 127 | apm.end_transaction("test-transaction") 128 | 129 | ecs = json.loads(stream.getvalue().rstrip()) 130 | assert ecs == { 131 | "ecs.version": "1.6.0", 132 | "log.level": "info", 133 | "log": { 134 | "logger": "apm-logger", 135 | "origin": { 136 | "file": {"name": "test_apm.py"}, 137 | "function": "test_elastic_apm_stdlib_with_filter_log_correlation_ecs_fields", 138 | }, 139 | "original": "test message", 140 | }, 141 | "message": "test message", 142 | "span": {"id": span_id}, 143 | "trace": {"id": trace_id}, 144 | "transaction": {"id": transaction_id}, 145 | "service": {"name": "apm-service", "environment": "dev"}, 146 | } 147 | 148 | 149 | def test_elastic_apm_stdlib_exclude_fields(apm): 150 | stream = StringIO() 151 | logger = logging.getLogger("apm-logger") 152 | handler = logging.StreamHandler(stream) 153 | handler.setFormatter( 154 | ecs_logging.StdlibFormatter( 155 | exclude_fields=[ 156 | "@timestamp", 157 | "process", 158 | "log.origin.file.line", 159 | "span", 160 | "transaction.id", 161 | ] 162 | ) 163 | ) 164 | logger.addHandler(handler) 165 | logger.setLevel(logging.DEBUG) 166 | 167 | apm.begin_transaction("test-transaction") 168 | try: 169 | with elasticapm.capture_span("test-span"): 170 | trace_id = elasticapm.get_trace_id() 171 | 172 | logger.info("test message") 173 | finally: 174 | apm.end_transaction("test-transaction") 175 | 176 | ecs = json.loads(stream.getvalue().rstrip()) 177 | assert ecs == { 178 | "ecs.version": "1.6.0", 179 | "log.level": "info", 180 | "log": { 181 | "logger": "apm-logger", 182 | "origin": { 183 | "file": {"name": "test_apm.py"}, 184 | "function": "test_elastic_apm_stdlib_exclude_fields", 185 | }, 186 | "original": "test message", 187 | }, 188 | "message": "test message", 189 | "trace": {"id": trace_id}, 190 | "service": {"name": "apm-service", "environment": "dev"}, 191 | } 192 | -------------------------------------------------------------------------------- /tests/test_meta.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import re 19 | from ecs_logging import ECS_VERSION 20 | 21 | 22 | def test_ecs_version_format(): 23 | assert re.match(r"[0-9](?:[.0-9]*[0-9])?", ECS_VERSION) 24 | -------------------------------------------------------------------------------- /tests/test_stdlib_formatter.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import logging 19 | import logging.config 20 | from unittest import mock 21 | import pytest 22 | import json 23 | import time 24 | import random 25 | import ecs_logging 26 | from io import StringIO 27 | 28 | 29 | @pytest.fixture(scope="function") 30 | def logger(): 31 | return logging.getLogger(f"test-logger-{time.time():f}-{random.random():f}") 32 | 33 | 34 | def make_record(): 35 | record = logging.LogRecord( 36 | name="logger-name", 37 | level=logging.DEBUG, 38 | pathname="/path/file.py", 39 | lineno=10, 40 | msg="%d: %s", 41 | args=(1, "hello"), 42 | func="test_function", 43 | exc_info=None, 44 | ) 45 | record.created = 1584713566 46 | record.msecs = 123 47 | return record 48 | 49 | 50 | def test_record_formatted(spec_validator): 51 | formatter = ecs_logging.StdlibFormatter(exclude_fields=["process"]) 52 | 53 | assert spec_validator(formatter.format(make_record())) == ( 54 | '{"@timestamp":"2020-03-20T14:12:46.123Z","log.level":"debug","message":"1: hello","ecs.version":"1.6.0",' 55 | '"log":{"logger":"logger-name","origin":{"file":{"line":10,"name":"file.py"},"function":"test_function"},' 56 | '"original":"1: hello"}}' 57 | ) 58 | 59 | 60 | def test_extra_global_is_merged(spec_validator): 61 | formatter = ecs_logging.StdlibFormatter( 62 | exclude_fields=["process"], extra={"environment": "dev"} 63 | ) 64 | 65 | assert spec_validator(formatter.format(make_record())) == ( 66 | '{"@timestamp":"2020-03-20T14:12:46.123Z","log.level":"debug","message":"1: hello","ecs.version":"1.6.0",' 67 | '"environment":"dev",' 68 | '"log":{"logger":"logger-name","origin":{"file":{"line":10,"name":"file.py"},"function":"test_function"},' 69 | '"original":"1: hello"}}' 70 | ) 71 | 72 | 73 | def test_can_be_overridden(spec_validator): 74 | class CustomFormatter(ecs_logging.StdlibFormatter): 75 | def format_to_ecs(self, record): 76 | ecs_dict = super().format_to_ecs(record) 77 | ecs_dict["custom"] = "field" 78 | return ecs_dict 79 | 80 | formatter = CustomFormatter(exclude_fields=["process"]) 81 | assert spec_validator(formatter.format(make_record())) == ( 82 | '{"@timestamp":"2020-03-20T14:12:46.123Z","log.level":"debug","message":"1: hello",' 83 | '"custom":"field","ecs.version":"1.6.0","log":{"logger":"logger-name","origin":' 84 | '{"file":{"line":10,"name":"file.py"},"function":"test_function"},"original":"1: hello"}}' 85 | ) 86 | 87 | 88 | def test_can_be_set_on_handler(): 89 | stream = StringIO() 90 | handler = logging.StreamHandler(stream) 91 | handler.setFormatter(ecs_logging.StdlibFormatter(exclude_fields=["process"])) 92 | 93 | handler.handle(make_record()) 94 | 95 | assert stream.getvalue() == ( 96 | '{"@timestamp":"2020-03-20T14:12:46.123Z","log.level":"debug","message":"1: hello",' 97 | '"ecs.version":"1.6.0","log":{"logger":"logger-name","origin":{"file":{"line":10,' 98 | '"name":"file.py"},"function":"test_function"},"original":"1: hello"}}\n' 99 | ) 100 | 101 | 102 | @mock.patch("time.time_ns") 103 | @mock.patch("time.time") 104 | def test_extra_is_merged(time, time_ns, logger): 105 | time.return_value = 1584720997.187709 106 | time_ns.return_value = time.return_value * 1_000_000_000 107 | 108 | stream = StringIO() 109 | handler = logging.StreamHandler(stream) 110 | handler.setFormatter( 111 | ecs_logging.StdlibFormatter(exclude_fields=["process", "tls.client"]) 112 | ) 113 | logger.addHandler(handler) 114 | logger.setLevel(logging.INFO) 115 | 116 | logger.info( 117 | "hey world", 118 | extra={ 119 | "tls": { 120 | "cipher": "AES", 121 | "client": {"hash": {"md5": "0F76C7F2C55BFD7D8E8B8F4BFBF0C9EC"}}, 122 | }, 123 | "tls.established": True, 124 | "tls.client.certificate": "cert", 125 | }, 126 | ) 127 | 128 | ecs = json.loads(stream.getvalue().rstrip()) 129 | assert isinstance(ecs["log"]["origin"]["file"].pop("line"), int) 130 | assert ecs == { 131 | "@timestamp": "2020-03-20T16:16:37.187Z", 132 | "ecs.version": "1.6.0", 133 | "log.level": "info", 134 | "log": { 135 | "logger": logger.name, 136 | "origin": { 137 | "file": {"name": "test_stdlib_formatter.py"}, 138 | "function": "test_extra_is_merged", 139 | }, 140 | "original": "hey world", 141 | }, 142 | "message": "hey world", 143 | "tls": {"cipher": "AES", "established": True}, 144 | } 145 | 146 | 147 | @pytest.mark.parametrize("kwargs", [{}, {"stack_trace_limit": None}]) 148 | def test_stack_trace_limit_default(kwargs, logger): 149 | def f(): 150 | g() 151 | 152 | def g(): 153 | h() 154 | 155 | def h(): 156 | raise ValueError("error!") 157 | 158 | stream = StringIO() 159 | handler = logging.StreamHandler(stream) 160 | handler.setFormatter(ecs_logging.StdlibFormatter(**kwargs)) 161 | logger.addHandler(handler) 162 | logger.setLevel(logging.DEBUG) 163 | 164 | try: 165 | f() 166 | except ValueError: 167 | logger.info("there was an error", exc_info=True) 168 | 169 | ecs = json.loads(stream.getvalue().rstrip()) 170 | error_stack_trace = ecs["error"].pop("stack_trace") 171 | assert all(x in error_stack_trace for x in ("f()", "g()", "h()")) 172 | 173 | 174 | @pytest.mark.parametrize("stack_trace_limit", [0, False]) 175 | def test_stack_trace_limit_disabled(stack_trace_limit, logger): 176 | stream = StringIO() 177 | handler = logging.StreamHandler(stream) 178 | handler.setFormatter( 179 | ecs_logging.StdlibFormatter(stack_trace_limit=stack_trace_limit) 180 | ) 181 | logger.addHandler(handler) 182 | logger.setLevel(logging.DEBUG) 183 | 184 | try: 185 | raise ValueError("error!") 186 | except ValueError: 187 | logger.info("there was an error", exc_info=True) 188 | 189 | ecs = json.loads(stream.getvalue().rstrip()) 190 | assert ecs["error"] == {"message": "error!", "type": "ValueError"} 191 | assert ecs["log.level"] == "info" 192 | assert ecs["message"] == "there was an error" 193 | assert ecs["log"]["original"] == "there was an error" 194 | 195 | 196 | def test_exc_info_false_does_not_raise(logger): 197 | stream = StringIO() 198 | handler = logging.StreamHandler(stream) 199 | handler.setFormatter(ecs_logging.StdlibFormatter()) 200 | logger.addHandler(handler) 201 | logger.setLevel(logging.DEBUG) 202 | 203 | logger.info("there was %serror", "no ", exc_info=False) 204 | 205 | ecs = json.loads(stream.getvalue().rstrip()) 206 | assert ecs["log.level"] == "info" 207 | assert ecs["message"] == "there was no error" 208 | assert "error" not in ecs 209 | 210 | 211 | def test_stack_trace_limit_traceback(logger): 212 | def f(): 213 | g() 214 | 215 | def g(): 216 | h() 217 | 218 | def h(): 219 | raise ValueError("error!") 220 | 221 | stream = StringIO() 222 | handler = logging.StreamHandler(stream) 223 | handler.setFormatter(ecs_logging.StdlibFormatter(stack_trace_limit=2)) 224 | logger.addHandler(handler) 225 | logger.setLevel(logging.DEBUG) 226 | 227 | try: 228 | f() 229 | except ValueError: 230 | logger.info("there was an error", exc_info=True) 231 | 232 | ecs = json.loads(stream.getvalue().rstrip()) 233 | error_stack_trace = ecs["error"].pop("stack_trace") 234 | assert all(x in error_stack_trace for x in ("f()", "g()")) 235 | assert "h()" not in error_stack_trace 236 | assert ecs["error"] == { 237 | "message": "error!", 238 | "type": "ValueError", 239 | } 240 | assert ecs["log.level"] == "info" 241 | assert ecs["message"] == "there was an error" 242 | assert ecs["log"]["original"] == "there was an error" 243 | 244 | 245 | def test_stack_trace_limit_types_and_values(): 246 | with pytest.raises(TypeError) as e: 247 | ecs_logging.StdlibFormatter(stack_trace_limit="a") 248 | assert str(e.value) == "'stack_trace_limit' must be None, or a non-negative integer" 249 | 250 | with pytest.raises(ValueError) as e: 251 | ecs_logging.StdlibFormatter(stack_trace_limit=-1) 252 | assert str(e.value) == "'stack_trace_limit' must be None, or a non-negative integer" 253 | 254 | 255 | @pytest.mark.parametrize( 256 | "exclude_fields", 257 | [ 258 | "process", 259 | "log", 260 | "log.level", 261 | "message", 262 | ["log.origin", "log.origin.file", "log.origin.file.line"], 263 | ], 264 | ) 265 | def test_exclude_fields(exclude_fields): 266 | if isinstance(exclude_fields, str): 267 | exclude_fields = [exclude_fields] 268 | formatter = ecs_logging.StdlibFormatter(exclude_fields=exclude_fields) 269 | ecs = formatter.format_to_ecs(make_record()) 270 | 271 | for entry in exclude_fields: 272 | field_path = entry.split(".") 273 | try: 274 | obj = ecs 275 | for path in field_path[:-1]: 276 | obj = obj[path] 277 | except KeyError: 278 | continue 279 | assert field_path[-1] not in obj 280 | 281 | 282 | @pytest.mark.parametrize( 283 | "exclude_fields", 284 | [ 285 | "ecs.version", 286 | ], 287 | ) 288 | def test_exclude_fields_not_dedotted(exclude_fields): 289 | formatter = ecs_logging.StdlibFormatter(exclude_fields=[exclude_fields]) 290 | ecs = formatter.format_to_ecs(make_record()) 291 | for entry in exclude_fields: 292 | assert entry not in ecs 293 | 294 | 295 | def test_exclude_fields_empty_json_object(): 296 | """Assert that if all JSON objects attributes are excluded then the object doesn't appear.""" 297 | formatter = ecs_logging.StdlibFormatter( 298 | exclude_fields=["process.pid", "process.name", "process.thread"] 299 | ) 300 | ecs = formatter.format_to_ecs(make_record()) 301 | assert "process" not in ecs 302 | 303 | formatter = ecs_logging.StdlibFormatter(exclude_fields=["ecs.version"]) 304 | ecs = formatter.format_to_ecs(make_record()) 305 | assert "ecs" not in ecs 306 | 307 | 308 | def test_exclude_fields_type_and_values(): 309 | with pytest.raises(TypeError) as e: 310 | ecs_logging.StdlibFormatter(exclude_fields="a") 311 | assert str(e.value) == "'exclude_fields' must be a sequence of strings" 312 | 313 | with pytest.raises(TypeError) as e: 314 | ecs_logging.StdlibFormatter(exclude_fields={"a"}) 315 | assert str(e.value) == "'exclude_fields' must be a sequence of strings" 316 | 317 | with pytest.raises(TypeError) as e: 318 | ecs_logging.StdlibFormatter(exclude_fields=[1]) 319 | assert str(e.value) == "'exclude_fields' must be a sequence of strings" 320 | 321 | 322 | def test_stack_info(logger): 323 | stream = StringIO() 324 | handler = logging.StreamHandler(stream) 325 | handler.setFormatter(ecs_logging.StdlibFormatter()) 326 | logger.addHandler(handler) 327 | logger.setLevel(logging.DEBUG) 328 | 329 | logger.info("stack info!", stack_info=True) 330 | 331 | ecs = json.loads(stream.getvalue().rstrip()) 332 | assert list(ecs["error"].keys()) == ["stack_trace"] 333 | error_stack_trace = ecs["error"].pop("stack_trace") 334 | assert "test_stack_info" in error_stack_trace and __file__ in error_stack_trace 335 | 336 | 337 | @pytest.mark.parametrize("exclude_fields", [["error"], ["error.stack_trace"]]) 338 | def test_stack_info_excluded(logger, exclude_fields): 339 | stream = StringIO() 340 | handler = logging.StreamHandler(stream) 341 | handler.setFormatter(ecs_logging.StdlibFormatter(exclude_fields=exclude_fields)) 342 | logger.addHandler(handler) 343 | logger.setLevel(logging.DEBUG) 344 | 345 | logger.info("stack info!", stack_info=True) 346 | 347 | ecs = json.loads(stream.getvalue().rstrip()) 348 | assert "error" not in ecs 349 | 350 | 351 | def test_stdlibformatter_signature(): 352 | logging.config.dictConfig( 353 | { 354 | "version": 1, 355 | "formatters": {"my_formatter": {"class": "ecs_logging.StdlibFormatter"}}, 356 | } 357 | ) 358 | 359 | 360 | def test_apm_data_conflicts(spec_validator): 361 | record = make_record() 362 | record.service = {"version": "1.0.0", "name": "myapp", "environment": "dev"} 363 | formatter = ecs_logging.StdlibFormatter(exclude_fields=["process"]) 364 | 365 | assert spec_validator(formatter.format(record)) == ( 366 | '{"@timestamp":"2020-03-20T14:12:46.123Z","log.level":"debug","message":"1: hello","ecs.version":"1.6.0",' 367 | '"log":{"logger":"logger-name","origin":{"file":{"line":10,"name":"file.py"},"function":"test_function"},' 368 | '"original":"1: hello"},"service":{"environment":"dev","name":"myapp","version":"1.0.0"}}' 369 | ) 370 | -------------------------------------------------------------------------------- /tests/test_structlog_formatter.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import json 19 | from io import StringIO 20 | from unittest import mock 21 | 22 | import pytest 23 | import structlog 24 | 25 | import ecs_logging 26 | 27 | 28 | class NotSerializable: 29 | def __repr__(self): 30 | return "" 31 | 32 | 33 | @pytest.fixture 34 | def event_dict(): 35 | return { 36 | "event": "test message", 37 | "log.logger": "logger-name", 38 | "foo": "bar", 39 | "baz": NotSerializable(), 40 | } 41 | 42 | 43 | @pytest.fixture 44 | def event_dict_with_exception(): 45 | return { 46 | "event": "test message", 47 | "log.logger": "logger-name", 48 | "foo": "bar", 49 | "exception": "", 50 | } 51 | 52 | 53 | def test_conflicting_event_dict(event_dict): 54 | formatter = ecs_logging.StructlogFormatter() 55 | event_dict["foo.bar"] = "baz" 56 | with pytest.raises(TypeError): 57 | formatter(None, "debug", event_dict) 58 | 59 | 60 | @mock.patch("time.time") 61 | def test_event_dict_formatted(time, spec_validator, event_dict): 62 | time.return_value = 1584720997.187709 63 | 64 | formatter = ecs_logging.StructlogFormatter() 65 | assert spec_validator(formatter(None, "debug", event_dict)) == ( 66 | '{"@timestamp":"2020-03-20T16:16:37.187Z","log.level":"debug",' 67 | '"message":"test message",' 68 | '"baz":"",' 69 | '"ecs.version":"1.6.0",' 70 | '"foo":"bar",' 71 | '"log":{"logger":"logger-name"}}' 72 | ) 73 | 74 | 75 | @mock.patch("time.time") 76 | def test_can_be_set_as_processor(time, spec_validator): 77 | time.return_value = 1584720997.187709 78 | 79 | stream = StringIO() 80 | structlog.configure( 81 | processors=[ecs_logging.StructlogFormatter()], 82 | wrapper_class=structlog.BoundLogger, 83 | context_class=dict, 84 | logger_factory=structlog.PrintLoggerFactory(stream), 85 | ) 86 | 87 | logger = structlog.get_logger("logger-name") 88 | logger.debug("test message", custom="key", **{"dot.ted": 1}) 89 | 90 | assert spec_validator(stream.getvalue()) == ( 91 | '{"@timestamp":"2020-03-20T16:16:37.187Z","log.level":"debug",' 92 | '"message":"test message","custom":"key","dot":{"ted":1},' 93 | '"ecs.version":"1.6.0"}\n' 94 | ) 95 | 96 | 97 | def test_exception_log_is_ecs_compliant_when_used_with_format_exc_info( 98 | event_dict_with_exception, 99 | ): 100 | formatter = ecs_logging.StructlogFormatter() 101 | formatted_event_dict = json.loads( 102 | formatter(None, "debug", event_dict_with_exception) 103 | ) 104 | 105 | assert ( 106 | "exception" not in formatted_event_dict 107 | ), "The key 'exception' at the root of a log is not ECS-compliant" 108 | assert "error" in formatted_event_dict 109 | assert "stack_trace" in formatted_event_dict["error"] 110 | assert "" in formatted_event_dict["error"]["stack_trace"] 111 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import pytest 19 | from ecs_logging._utils import flatten_dict, de_dot, normalize_dict, json_dumps 20 | 21 | 22 | def test_flatten_dict(): 23 | assert flatten_dict( 24 | {"a": {"b": 1}, "a.c": {"d.e": {"f": 1}, "d.e.g": [{"f.c": 2}]}} 25 | ) == {"a.b": 1, "a.c.d.e.f": 1, "a.c.d.e.g": [{"f.c": 2}]} 26 | 27 | with pytest.raises(ValueError) as e: 28 | flatten_dict({"a": {"b": 1}, "a.b": 2}) 29 | 30 | assert str(e.value) == "Duplicate entry for 'a.b' with different nesting" 31 | 32 | with pytest.raises(ValueError) as e: 33 | flatten_dict({"a": {"b": {"c": 1}}, "a.b": {"c": 2}, "a.b.c": 1}) 34 | 35 | assert str(e.value) == "Duplicate entry for 'a.b.c' with different nesting" 36 | 37 | 38 | def test_de_dot(): 39 | assert de_dot("x.y.z", {"a": {"b": 1}}) == {"x": {"y": {"z": {"a": {"b": 1}}}}} 40 | 41 | 42 | def test_normalize_dict(): 43 | assert normalize_dict( 44 | {"a": {"b": 1}, "a.c": {"d.e": {"f": 1}, "d.e.g": [{"f.c": 2}]}} 45 | ) == {"a": {"b": 1, "c": {"d": {"e": {"f": 1, "g": [{"f": {"c": 2}}]}}}}} 46 | 47 | 48 | def test_normalize_dict_with_array(): 49 | assert normalize_dict({"a": ["1", "2"]}) == {"a": ["1", "2"]} 50 | 51 | 52 | @pytest.mark.parametrize( 53 | ["value", "expected"], 54 | [ 55 | ({}, "{}"), 56 | ({"log": {"level": "info"}}, '{"log.level":"info"}'), 57 | ({"log.level": "info"}, '{"log.level":"info"}'), 58 | ( 59 | {"log": {"level": "info", "message": "hello"}}, 60 | '{"log.level":"info","log":{"message":"hello"}}', 61 | ), 62 | ({"@timestamp": "2021-01-01..."}, '{"@timestamp":"2021-01-01..."}'), 63 | ({"message": "hello"}, '{"message":"hello"}'), 64 | ({"message": 1}, '{"message":1}'), 65 | ({"message": ["hello"]}, '{"message":["hello"]}'), 66 | ({"message": {"key": "val"}}, '{"message":{"key":"val"}}'), 67 | ({"custom": "value"}, '{"custom":"value"}'), 68 | ({"log.level": "info"}, '{"log.level":"info"}'), 69 | ( 70 | {"log": {"message": "hello"}, "message": "hello"}, 71 | '{"message":"hello","log":{"message":"hello"}}', 72 | ), 73 | ( 74 | { 75 | "log": {"message": "hello", "level": "info"}, 76 | "message": "hello", 77 | "@timestamp": "2021-01-01...", 78 | }, 79 | '{"@timestamp":"2021-01-01...","log.level":"info","message":"hello","log":{"message":"hello"}}', 80 | ), 81 | ( 82 | { 83 | "log": {"level": "info"}, 84 | "message": "hello", 85 | "@timestamp": "2021-01-01...", 86 | }, 87 | '{"@timestamp":"2021-01-01...","log.level":"info","message":"hello"}', 88 | ), 89 | ], 90 | ) 91 | def test_json_dumps(value, expected): 92 | assert json_dumps(value) == expected 93 | -------------------------------------------------------------------------------- /updatecli-compose.yaml: -------------------------------------------------------------------------------- 1 | # Config file for `updatecli compose ...`. 2 | # https://www.updatecli.io/docs/core/compose/ 3 | policies: 4 | - name: Handle ecs-logging specs 5 | policy: ghcr.io/elastic/oblt-updatecli-policies/apm/ecs-logging-specs:0.5.0@sha256:fbe1697bb32f0d5222ce5267cb8ed9b8e079bf17bee06fb16b4e5c2c3351549e 6 | values: 7 | - .ci/updatecli/values.d/scm.yml 8 | - .ci/updatecli/values.d/ecs-logging-specs.yml 9 | - name: Update Updatecli policies 10 | policy: ghcr.io/updatecli/policies/autodiscovery/updatecli:0.8.0@sha256:99e9e61b501575c2c176c39f2275998d198b590a3f6b1fe829f7315f8d457e7f 11 | values: 12 | - .ci/updatecli/values.d/scm.yml 13 | - .ci/updatecli/values.d/update-compose.yml 14 | -------------------------------------------------------------------------------- /utils/check-license-headers.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Check that source code files in this repo have the appropriate license 4 | # header. 5 | 6 | if [ "$TRACE" != "" ]; then 7 | export PS4='${BASH_SOURCE}:${LINENO}: ${FUNCNAME[0]:+${FUNCNAME[0]}(): }' 8 | set -o xtrace 9 | fi 10 | set -o errexit 11 | set -o pipefail 12 | 13 | TOP=$(cd "$(dirname "$0")/.." >/dev/null && pwd) 14 | NLINES=$(wc -l utils/license-header.txt | awk '{print $1}') 15 | 16 | function check_license_header { 17 | local f 18 | f=$1 19 | if ! diff utils/license-header.txt <(head -$NLINES "$f") >/dev/null; then 20 | echo "check-license-headers: error: '$f' does not have required license header, see 'diff -u utils/license-header.txt <(head -$NLINES $f)'" 21 | return 1 22 | else 23 | return 0 24 | fi 25 | } 26 | 27 | 28 | cd "$TOP" 29 | nErrors=0 30 | for f in $(git ls-files | grep '\.py$'); do 31 | if ! check_license_header $f; then 32 | nErrors=$((nErrors+1)) 33 | fi 34 | done 35 | 36 | if [[ $nErrors -eq 0 ]]; then 37 | exit 0 38 | else 39 | exit 1 40 | fi 41 | -------------------------------------------------------------------------------- /utils/license-header.txt: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | --------------------------------------------------------------------------------