├── .bumpversion.cfg ├── .circleci └── config.yml ├── .editorconfig ├── .github ├── CODEOWNERS ├── ISSUE_TEMPLATE │ ├── bug_report.md │ ├── feature_request.md │ ├── question-discussion.md │ └── security-vulnerability-report.md ├── PULL_REQUEST_TEMPLATE.md ├── dependabot.yml ├── release.yml └── workflows │ ├── add-to-project-v2.yml │ ├── apply-labels.yml │ ├── stale.yml │ └── validate-pr-title.yml ├── .gitignore ├── CHANGELOG.md ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── COPYRIGHT ├── LICENSE ├── NOTICE ├── OSSMETADATA ├── README.md ├── RELEASING.md ├── SECURITY.md ├── SUPPORT.md ├── beeline ├── __init__.py ├── aiotrace.py ├── internal.py ├── middleware │ ├── __init__.py │ ├── awslambda │ │ ├── __init__.py │ │ └── test_awslambda.py │ ├── bottle │ │ ├── __init__.py │ │ └── test_bottle.py │ ├── django │ │ ├── __init__.py │ │ └── test_django.py │ ├── flask │ │ ├── __init__.py │ │ └── test_flask.py │ ├── werkzeug │ │ ├── __init__.py │ │ └── test_werkzeug.py │ └── wsgi.py ├── patch │ ├── __init__.py │ ├── jinja2.py │ ├── requests.py │ ├── test_jinja2.py │ ├── test_requests.py │ ├── test_urllib.py │ ├── tornado.py │ └── urllib.py ├── propagation │ ├── __init__.py │ ├── default.py │ ├── honeycomb.py │ ├── test_honeycomb.py │ ├── test_propagation.py │ ├── test_w3c.py │ └── w3c.py ├── test_async.py ├── test_beeline.py ├── test_internal.py ├── test_suite.py ├── test_trace.py ├── trace.py └── version.py ├── examples ├── django │ ├── README.md │ ├── app │ │ ├── app │ │ │ ├── __init__.py │ │ │ ├── settings.py │ │ │ ├── urls.py │ │ │ └── wsgi.py │ │ ├── hello │ │ │ ├── __init__.py │ │ │ ├── admin.py │ │ │ ├── apps.py │ │ │ ├── migrations │ │ │ │ └── __init__.py │ │ │ ├── models.py │ │ │ ├── tests.py │ │ │ ├── urls.py │ │ │ └── views.py │ │ └── manage.py │ └── pyproject.toml ├── flask │ ├── README.md │ ├── app.py │ └── pyproject.toml └── hello-world │ ├── README.md │ ├── app.py │ └── pyproject.toml ├── poetry.lock ├── push_docs.sh ├── pylint.rc └── pyproject.toml /.bumpversion.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | commit = False 3 | tag = False 4 | current_version = 3.6.0 5 | parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-(?P[a-z]+)(?P\d+))? 6 | serialize = 7 | {major}.{minor}.{patch}-{release}{build} 8 | {major}.{minor}.{patch} 9 | 10 | [bumpversion:part:release] 11 | optional_value = prod 12 | first_value = dev 13 | values = 14 | dev 15 | prod 16 | 17 | [bumpversion:part:build] 18 | 19 | [bumpversion:file:beeline/version.py] 20 | 21 | [bumpversion:file:pyproject.toml] 22 | -------------------------------------------------------------------------------- /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2.1 2 | 3 | orbs: 4 | python: circleci/python@2.1.1 5 | 6 | jobs: 7 | lint: 8 | parameters: 9 | python-version: 10 | type: string 11 | executor: 12 | name: python/default 13 | tag: << parameters.python-version >> 14 | steps: 15 | - checkout 16 | - python/install-packages: 17 | pkg-manager: poetry 18 | - run: poetry run pylint --rcfile=pylint.rc beeline 19 | - run: poetry run pycodestyle beeline --max-line-length=140 20 | 21 | test: 22 | parameters: 23 | python-version: 24 | type: string 25 | executor: 26 | name: python/default 27 | tag: << parameters.python-version >> 28 | steps: 29 | - checkout 30 | - python/install-packages: 31 | pkg-manager: poetry 32 | - run: poetry run coverage run -m beeline.test_suite 33 | - run: poetry run coverage report --include="beeline/*" 34 | - run: poetry run coverage html --include="beeline/*" 35 | - store_artifacts: 36 | path: htmlcov 37 | 38 | build: 39 | executor: 40 | name: python/default 41 | tag: "3.8" 42 | steps: 43 | - checkout 44 | - python/install-packages: 45 | pkg-manager: poetry 46 | - run: mkdir -p ~/artifacts 47 | - run: poetry build 48 | - run: cp dist/* ~/artifacts 49 | - persist_to_workspace: 50 | root: ~/ 51 | paths: 52 | - artifacts 53 | - store_artifacts: 54 | path: ~/artifacts 55 | 56 | publish_pypi: 57 | executor: 58 | name: python/default 59 | tag: "3.8" 60 | steps: 61 | - checkout 62 | - python/install-packages: 63 | pkg-manager: poetry 64 | - run: poetry publish --build -u '__token__' -p ${PYPI_TOKEN} 65 | 66 | publish_github: 67 | docker: 68 | - image: cibuilds/github:0.13.0 69 | steps: 70 | - attach_workspace: 71 | at: ~/ 72 | - run: 73 | name: "Artifacts being published" 74 | command: | 75 | echo "about to publish to tag ${CIRCLE_TAG}" 76 | ls -l ~/artifacts/* 77 | - run: ghr -draft -n ${CIRCLE_TAG} -t ${GITHUB_TOKEN} -u ${CIRCLE_PROJECT_USERNAME} -r ${CIRCLE_PROJECT_REPONAME} -c ${CIRCLE_SHA1} ${CIRCLE_TAG} ~/artifacts 78 | workflows: 79 | weekly: 80 | triggers: 81 | - schedule: 82 | cron: "0 0 * * 0" 83 | filters: 84 | branches: 85 | only: 86 | - main 87 | jobs: 88 | - lint: &lint 89 | matrix: &matrix 90 | parameters: 91 | python-version: 92 | - "3.7" 93 | - "3.8" 94 | - "3.9" 95 | - "3.10" 96 | - test: &test 97 | matrix: *matrix 98 | - build: &build 99 | requires: 100 | - lint 101 | - test 102 | 103 | build_beeline: 104 | jobs: 105 | - lint: 106 | <<: *lint 107 | filters: 108 | tags: 109 | only: /.*/ 110 | - test: 111 | <<: *test 112 | filters: 113 | tags: 114 | only: /.*/ 115 | - build: 116 | <<: *build 117 | filters: 118 | tags: 119 | only: /.*/ 120 | - publish_pypi: 121 | context: Honeycomb Secrets for Public Repos 122 | requires: 123 | - build 124 | filters: 125 | tags: 126 | only: /v[0-9].*/ 127 | branches: 128 | ignore: /.*/ 129 | - publish_github: 130 | context: Honeycomb Secrets for Public Repos 131 | requires: 132 | - build 133 | filters: 134 | tags: 135 | only: /v[0-9].*/ 136 | branches: 137 | ignore: /.*/ 138 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*.{py}] 4 | indent_style = space 5 | indent_size = 4 6 | charset = utf-8 7 | trim_trailing_whitespace = true 8 | insert_final_newline = false 9 | 10 | [*.md] 11 | trim_trailing_whitespace = false -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Code owners file. 2 | # This file controls who is tagged for review for any given pull request. 3 | 4 | # For anything not explicitly taken by someone else: 5 | * @honeycombio/pipeline-team 6 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Let us know if something is not working as expected 4 | title: '' 5 | labels: 'type: bug' 6 | assignees: '' 7 | 8 | --- 9 | 10 | 17 | 18 | **Versions** 19 | 20 | - Python: 21 | - Beeline: 22 | 23 | 24 | **Steps to reproduce** 25 | 26 | 1. 27 | 28 | **Additional context** 29 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: 'type: enhancement' 6 | assignees: '' 7 | 8 | --- 9 | 10 | 15 | 16 | **Is your feature request related to a problem? Please describe.** 17 | 18 | 19 | **Describe the solution you'd like** 20 | 21 | 22 | **Describe alternatives you've considered** 23 | 24 | 25 | **Additional context** 26 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/question-discussion.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Question/Discussion 3 | about: General question about how things work or a discussion 4 | title: '' 5 | labels: 'type: discussion' 6 | assignees: '' 7 | 8 | --- 9 | 10 | 15 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/security-vulnerability-report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Security vulnerability report 3 | about: Let us know if you discover a security vulnerability 4 | title: '' 5 | labels: 'type: security' 6 | assignees: '' 7 | 8 | --- 9 | 10 | 15 | **Versions** 16 | 17 | - Python: 18 | - Beeline: 19 | 20 | **Description** 21 | 22 | (Please include any relevant CVE advisory links) 23 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 12 | 13 | ## Which problem is this PR solving? 14 | 15 | - 16 | 17 | ## Short description of the changes 18 | 19 | - 20 | 21 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "pip" # See documentation for possible values 9 | directory: "/" # Location of package manifests 10 | schedule: 11 | interval: "monthly" 12 | labels: 13 | - "type: dependencies" 14 | reviewers: 15 | - "honeycombio/telemetry-team" 16 | commit-message: 17 | prefix: "maint" 18 | include: "scope" 19 | -------------------------------------------------------------------------------- /.github/release.yml: -------------------------------------------------------------------------------- 1 | # .github/release.yml 2 | 3 | changelog: 4 | exclude: 5 | labels: 6 | - no-changelog 7 | categories: 8 | - title: 💥 Breaking Changes 💥 9 | labels: 10 | - "version: bump major" 11 | - breaking-change 12 | - title: 💡 Enhancements 13 | labels: 14 | - "type: enhancement" 15 | - title: 🐛 Fixes 16 | labels: 17 | - "type: bug" 18 | - title: 🛠 Maintenance 19 | labels: 20 | - "type: maintenance" 21 | - "type: dependencies" 22 | - "type: documentation" 23 | - title: 🤷 Other Changes 24 | labels: 25 | - "*" 26 | -------------------------------------------------------------------------------- /.github/workflows/add-to-project-v2.yml: -------------------------------------------------------------------------------- 1 | name: Add to project 2 | on: 3 | issues: 4 | types: [opened] 5 | pull_request_target: 6 | types: [opened] 7 | jobs: 8 | add-to-project: 9 | runs-on: ubuntu-latest 10 | name: Add issues and PRs to project 11 | steps: 12 | - uses: actions/add-to-project@main 13 | with: 14 | project-url: https://github.com/orgs/honeycombio/projects/27 15 | github-token: ${{ secrets.GHPROJECTS_TOKEN }} 16 | -------------------------------------------------------------------------------- /.github/workflows/apply-labels.yml: -------------------------------------------------------------------------------- 1 | name: Apply project labels 2 | on: [issues, pull_request_target, label] 3 | jobs: 4 | apply-labels: 5 | runs-on: ubuntu-latest 6 | name: Apply common project labels 7 | steps: 8 | - uses: honeycombio/oss-management-actions/labels@v1 9 | with: 10 | github-token: ${{ secrets.GITHUB_TOKEN }} 11 | -------------------------------------------------------------------------------- /.github/workflows/stale.yml: -------------------------------------------------------------------------------- 1 | name: 'Close stale issues and PRs' 2 | on: 3 | schedule: 4 | - cron: '30 1 * * *' 5 | 6 | jobs: 7 | stale: 8 | name: 'Close stale issues and PRs' 9 | runs-on: ubuntu-latest 10 | permissions: 11 | issues: write 12 | pull-requests: write 13 | 14 | steps: 15 | - uses: actions/stale@v4 16 | with: 17 | start-date: '2021-09-01T00:00:00Z' 18 | stale-issue-message: 'Marking this issue as stale because it has been open 14 days with no activity. Please add a comment if this is still an ongoing issue; otherwise this issue will be automatically closed in 7 days.' 19 | stale-pr-message: 'Marking this PR as stale because it has been open 30 days with no activity. Please add a comment if this PR is still relevant; otherwise this PR will be automatically closed in 7 days.' 20 | close-issue-message: 'Closing this issue due to inactivity. Please see our [Honeycomb OSS Lifecyle and Practices](https://github.com/honeycombio/home/blob/main/honeycomb-oss-lifecycle-and-practices.md).' 21 | close-pr-message: 'Closing this PR due to inactivity. Please see our [Honeycomb OSS Lifecyle and Practices](https://github.com/honeycombio/home/blob/main/honeycomb-oss-lifecycle-and-practices.md).' 22 | days-before-issue-stale: 14 23 | days-before-pr-stale: 30 24 | days-before-issue-close: 7 25 | days-before-pr-close: 7 26 | any-of-labels: 'status: info needed,status: revision needed' 27 | -------------------------------------------------------------------------------- /.github/workflows/validate-pr-title.yml: -------------------------------------------------------------------------------- 1 | name: "Validate PR Title" 2 | 3 | on: 4 | pull_request: 5 | types: 6 | - opened 7 | - edited 8 | - synchronize 9 | 10 | jobs: 11 | main: 12 | name: Validate PR title 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: amannn/action-semantic-pull-request@v5 16 | id: lint_pr_title 17 | name: "🤖 Check PR title follows conventional commit spec" 18 | env: 19 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 20 | with: 21 | # Have to specify all types because `maint` and `rel` aren't defaults 22 | types: | 23 | maint 24 | rel 25 | fix 26 | feat 27 | chore 28 | ci 29 | docs 30 | style 31 | refactor 32 | perf 33 | test 34 | ignoreLabels: | 35 | "type: dependencies" 36 | # When the previous steps fails, the workflow would stop. By adding this 37 | # condition you can continue the execution with the populated error message. 38 | - if: always() && (steps.lint_pr_title.outputs.error_message != null) 39 | name: "📝 Add PR comment about using conventional commit spec" 40 | uses: marocchino/sticky-pull-request-comment@v2 41 | with: 42 | header: pr-title-lint-error 43 | message: | 44 | Thank you for contributing to the project! 🎉 45 | 46 | We require pull request titles to follow the [Conventional Commits specification](https://www.conventionalcommits.org/en/v1.0.0/) and it looks like your proposed title needs to be adjusted. 47 | 48 | Make sure to prepend with `feat:`, `fix:`, or another option in the list below. 49 | 50 | Once you update the title, this workflow will re-run automatically and validate the updated title. 51 | 52 | Details: 53 | 54 | ``` 55 | ${{ steps.lint_pr_title.outputs.error_message }} 56 | ``` 57 | 58 | # Delete a previous comment when the issue has been resolved 59 | - if: ${{ steps.lint_pr_title.outputs.error_message == null }} 60 | name: "❌ Delete PR comment after title has been updated" 61 | uses: marocchino/sticky-pull-request-comment@v2 62 | with: 63 | header: pr-title-lint-error 64 | delete: true 65 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | *.pyc 3 | *egg-info 4 | dist 5 | build 6 | env 7 | venv 8 | pyvenv.cfg 9 | .eggs 10 | lib 11 | bin 12 | 13 | .vscode 14 | .coverage 15 | htmlcov 16 | examples/**/poetry.lock 17 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # beeline-python changelog 2 | 3 | ## 3.6.0 2024-03-07 4 | 5 | ### Enhancements 6 | 7 | - feat: add Classic Ingest Key support (#295) | [@jharley](https://github.com/jharley) 8 | 9 | ### Maintenance 10 | 11 | - maint: add labels to release.yml for auto-generated grouping (#296) | [@JamieDanielson](https://github.com/JamieDanielson) 12 | - maint: Update poetry publish to use API token (#294) | [@MikeGoldsmith](https://github.com/MikeGoldsmith) 13 | - maint: update codeowners to pipeline-team (#293) | [@JamieDanielson](https://github.com/JamieDanielson) 14 | - maint: update codeowners to pipeline (#292) | [@JamieDanielson](https://github.com/JamieDanielson) 15 | - ci: move nightly to weekly (#287) | [@vreynolds](https://github.com/vreynolds) 16 | - maint(deps-dev): bump django from 3.2.19 to 3.2.20 (#286) | [@dependabot](https://github.com/dependabot) 17 | - maint(deps-dev): bump coverage from 7.2.5 to 7.2.7 (#284) | [@dependabot](https://github.com/dependabot) 18 | - docs: update readme (#285) | [@vreynolds](https://github.com/vreynolds) 19 | 20 | ## 3.5.2 2023-05-25 21 | 22 | ### Fixes 23 | 24 | - fix: fix error when cursor.lastrowid doesnt exist (#281) | [@JamieDanielson](https://github.com/JamieDanielson) 25 | 26 | ### Maintenance 27 | 28 | - maint(deps): bump wrapt from 1.14.1 to 1.15.0 (#264) 29 | - maint: Bump requests from 2.28.1 to 2.31.0 in /examples/flask (#279) 30 | - maint(deps): bump requests from 2.28.1 to 2.31.0 (#280) 31 | - maint(deps-dev): bump mock from 5.0.1 to 5.0.2 (#275) 32 | - maint(deps-dev): bump coverage from 7.2.3 to 7.2.5 (#273) 33 | - maint(deps-dev): bump django from 3.2.18 to 3.2.19 (#277) 34 | - maint(deps-dev): bump flask from 2.2.3 to 2.2.5 (#276) 35 | - maint(deps): bump sqlparse from 0.4.2 to 0.4.4 (#272) 36 | - maint(deps-dev): bump coverage from 7.0.5 to 7.2.3 (#269) 37 | - maint(deps-dev): bump flask from 2.2.2 to 2.2.3 (#266) 38 | - maint(deps-dev): bump mock from 5.0.0 to 5.0.1 (#260) 39 | - maint(deps-dev): bump django from 3.2.17 to 3.2.18 (#262) 40 | - maint(deps): bump werkzeug from 2.2.2 to 2.2.3 (#263) 41 | - maint(deps-dev): bump django from 3.2.16 to 3.2.17 (#261) 42 | 43 | ## 3.5.1 2023-01-19 44 | 45 | ### Fixes 46 | 47 | - Only try to calculate query duration when we have start time (#255) | [@MikeGoldsmith](https://github.com/MikeGoldsmith) 48 | - Parse version number for beeline-python/{VERSION} addition (#257) | [@emilyashley](https://github.com/emilyashley) 49 | 50 | ### Maintenance 51 | 52 | - Give dependabot PRs better title (#250) | [@vreynolds](https://github.com/vreynolds) 53 | - Update validate PR title workflow (#245) | [@pkanal](https://github.com/pkanal) 54 | - Validate PR title (#244) | [@pkanal](https://github.com/pkanal) 55 | - Add release file (#240) | [@vreynolds](https://github.com/vreynolds) 56 | - Add new project workflow (#239) | [@vreynolds](https://github.com/vreynolds) 57 | - Fix ci build (#249) | [@vreynolds](https://github.com/vreynolds) 58 | - Delete workflows for old board (#241) | [@vreynolds](https://github.com/vreynolds) 59 | - Update releasing notes (#236) | [@emilyashley](https://github.com/emilyashley) 60 | 61 | - Bump pycodestyle from 2.9.1 to 2.10.0 (#246) 62 | - bump coverage from 6.5.0 to 7.0.5 (#256) 63 | - bump mock from 4.0.3 to 5.0.0 (#254) 64 | - bump setuptools from 65.4.1 to 65.5.1 (#252) 65 | - Bump certifi from 2022.6.15 to 2022.12.7 in /examples/flask (#247) 66 | - Bump certifi from 2022.6.15.1 to 2022.12.7 (#248) 67 | - Bump django from 3.2.15 to 3.2.16 (#242) 68 | - Bump flask from 2.1.0 to 2.2.2 (#238) 69 | - Bump coverage from 6.4.4 to 6.5.0 (#237) 70 | 71 | ## 3.5.0 2022-09-09 72 | 73 | ⚠️ Minimum supported Python version is now 3.7 ⚠️ 74 | ### Maintenance 75 | 76 | - Drop Python 3.5, 3.6 Support (#233) | [@emilyashley](https://github.com/emilyashley) 77 | - Bump minimum libhoney version to 2.3 (for python >=3.7)(#234) | [@emilyashley](https://github.com/emilyashley) 78 | 79 | ## 3.4.2 2022-09-06 80 | 81 | ### Fixes 82 | 83 | - Django < 2.2 does not have request.headers (#231) | [@vreynolds](https://github.com/vreynolds) 84 | 85 | ## 3.4.1 2022-07-15 86 | 87 | ### Maintenance 88 | 89 | - drop unnecessary backwards compatibility (remove six) (#228) | [@robbkidd](https://github.com/robbkidd) 90 | - Bump minimum libhoney from 1.7.0 to 2.1.1 (#226) | [@danvendia](https://github.com/danvendia) 91 | - Bump wrapt from 1.14.0 to 1.14.1 (#224) | [dependabot](https://docs.github.com/en/code-security/dependabot/dependabot-security-updates/configuring-dependabot-security-updates) 92 | 93 | ## 3.4.0 2022-05-04 94 | 95 | ### Enhancements 96 | 97 | - Add meta.span_type to root/subroot spans (#219) | [@MikeGoldsmith](https://github.com/MikeGoldsmith) 98 | 99 | ### Maintenance 100 | 101 | - docs: using FileTransmission with classic requires 32 character key (#220) | [@JamieDanielson](https://github.com/JamieDanielson) 102 | - Bump django from 2.2.27 to 2.2.28 (#217) 103 | - Bump libhoney from 2.0.0 to 2.1.0 (#221) 104 | 105 | ## 3.3.1 2022-04-12 106 | 107 | ### Maintenance 108 | 109 | - Bump libhoney from 1.11.2 to 2.0.0 (#209) 110 | - Bump wrapt from 1.13.3 to 1.14.0 (#215) 111 | - Bump django from 2.2.26 to 2.2.27 (#210) 112 | 113 | ## 3.3.0 2022-03-21 114 | 115 | ### Enhancements 116 | 117 | **NOTE** If you are using the [FileTransmission](https://github.com/honeycombio/libhoney-py/blob/main/libhoney/transmission.py#L448) method and setting a false API key - and still working in Classic mode - you must update the key to be 32 characters in length to keep the same behavior. 118 | 119 | - feat: Add Environment & Services support (#213) | [@JamieDanielson](https://github.com/JamieDanielson) 120 | 121 | ## 3.2.0 2022-02-10 122 | 123 | ### Enhancements 124 | 125 | - add rollup SQL duration time to Django traces (#207) | [@jmhodges-color](https://github.com/jmhodges-color) 126 | 127 | ## 3.1.0 2022-01-20 128 | 129 | ### Improvements 130 | 131 | - add a count of SQL queries to root span in django (#205) | [@jmhodges](https://github.com/jmhodges) 132 | 133 | ## 3.0.0 2022-01-13 134 | 135 | ### !!! Breaking Changes !!! 136 | 137 | Minimum supported Python version is now 3.5 138 | 139 | ### Maintenance 140 | 141 | - drop python < 3.5 support, update vulnerable deps (#202) | | [@vreynolds](https://github.com/vreynolds) 142 | - gh: add re-triage workflow (#201) | | [@vreynolds](https://github.com/vreynolds) 143 | 144 | ## 2.18.0 2021-12-23 145 | 146 | ### Improvements 147 | 148 | - accept both w3c and honeycomb propagation headers by default (#199) | [@vreynolds](https://github.com/vreynolds) 149 | 150 | ## 2.17.3 2021-12-01 151 | 152 | ### Fixes 153 | 154 | - Remove condition on status code (#191) | [@JamieDanielson](https://github.com/JamieDanielson) 155 | - Close trace regardless of exception (#190) | [@vreynolds](https://github.com/vreynolds) 156 | 157 | ### Maintenance 158 | 159 | - Update dependabot to monthly (#194) | [@vreynolds](https://github.com/vreynolds) 160 | - Add python 3.9 and 3.10 to test matrix (#192) | [@vreynolds](https://github.com/vreynolds) 161 | - Add example app using Flask (#189) | [@JamieDanielson](https://github.com/JamieDanielson) 162 | - Empower apply-labels action to apply labels (#187) | [robbkidd](https://github.com/robbkidd) 163 | 164 | ## 2.17.2 2021-10-19 165 | 166 | ### Fixes 167 | 168 | - Always return True for sampling when sample rate is 1 (#185) | [@MikeGoldsmith](https://github.com/MikeGoldsmith) 169 | 170 | ### Maintenance 171 | 172 | - Change maintenance badge to maintained (#182) | [@JamieDanielson](https://github.com/JamieDanielson) 173 | - Add Stalebot (#183) | [@JamieDanielson](https://github.com/JamieDanielson) 174 | 175 | ## 2.17.1 2021-09-01 176 | 177 | ### Fixes: 178 | 179 | - Fix stringifying tuple parameters in flask DB middleware (#177) [@anselm-helbig](https://github.com/anselm-helbig) & [@mnemonikk](https://github.com/mnemonikk) 180 | 181 | ## 2.17.0 2021-05-28 182 | 183 | ## Improvements: 184 | 185 | - Added support for Django streaming responses (#166) 186 | 187 | ## Fixes: 188 | 189 | - Avoid using deprecated Django request.is_ajax() (#160) 190 | 191 | ## 2.16.2 2021-01-22 192 | 193 | ### Fixes 194 | 195 | - Capture exception details in AWS Lambda middleware (#154) 196 | - Default w3c sampled flag to 01 (#152) 197 | 198 | ## 2.16.1 2021-01-08 199 | 200 | ### Fixed 201 | - Fix missing content_type, content_length, and status_code in spans from error responses (#145) [@vbarua](https://github.com/vbarua) 202 | 203 | ## 2.16.0 204 | 205 | ### Improvements: 206 | 207 | - Add app.exception_stacktrace to context when an exception is thrown (#135) 208 | 209 | ### Fixes: 210 | 211 | - Fix requests patch to correctly build span name (#142) 212 | - Fix deprecations related to unittest usage (#141) 213 | 214 | ## 2.15.0 215 | 216 | - Update Lambda wrapper to allow omission of input/output fields #130 (thank you, @fitzoh!) 217 | - Add "request.route" field for Django middleware (thank you, @sjoerdjob!) 218 | 219 | ## 2.14.0 220 | 221 | Improvements: 222 | 223 | - Adds support for dataset when parsing honeycomb propagation headers #133 224 | 225 | ## 2.13.1 226 | 227 | This is a maintenace release to fix a bug in the django middleware that can happen in testing environments when a beeling has 228 | not been initialised. 229 | 230 | - Don't attempt to use a non-initialised beeline instance in django middleware #126. 231 | - Adds a .editorconfig to help apply consistent styling across IDEs #127. 232 | 233 | ## 2.13.0 234 | 235 | ### Features 236 | 237 | We have added new functionality for `http_trace_parse_hook` and `http_trace_propagation_hook`. These hooks allow beeline users 238 | to parse incoming headers, as well as add outgoing headers, allowing for interoperability between Honeycomb, 239 | OpenTelemetry (W3C) and other tracing formats. 240 | 241 | - New `beeline` configuration parameters for `http_trace_parse_hook` and `http_trace_propagation_hook` 242 | - New `propagate_and_start_trace` function for use by middleware to invoke the `http_trace_parse_hook` 243 | - New `beeline.propagation` package to centralize propagation-related classes and functions. 244 | - `beeline.propagation.honeycomb` package contains hooks to support parsing and propagation using honeycomb headers. 245 | - `beeline.propagation.w3c` package contains hooks to support parsing and propagation using w3c headers. 246 | 247 | ### Deprecation Notice 248 | 249 | - Deprecated the existing `beeline.marshal_trace_context`, and migrated all usage to new 250 | `beeline.propagation.honeycomb` functions. `beeline.marshal_trace_context` will be removed when the next major version of the beeline is released. 251 | 252 | ### Implementation details 253 | 254 | - Implemented `beeline.propagation.Request` classes for middleware to aid in support of header and propagation hooks. 255 | - Migrateed existing middleware to use new `beeline.propagation` classes and functions to support `http_trace_parse_hooks`. 256 | - Centralized duplicated code for WSGI variants (Flask, Bottle, Werkzeug) into a single location. 257 | - Added `http_trace_propagation_hook` support to requests and urllib. 258 | 259 | ### Fixes 260 | 261 | - Fixed a bug where `urllib.request.urlopen` would fail if given a string URL as an argument. 262 | 263 | ## 2.12.2 264 | 265 | Improvements 266 | 267 | - Trace IDs and Span IDs now correspond to W3C trace context specification. See https://www.w3.org/TR/trace-context/ 268 | - Now using [poetry](https://python-poetry.org/) for packaging and dependency management. 269 | - Tests now exclude `test_async` on Python versions which don't support async instead of requiring maintenance of an includelist of tests. 270 | - No longer use `pyflask` in tests as `pylint` covers all issues checked by `pyflask` 271 | 272 | - Misc 273 | - Files have been reformatted to pass pycodestyle (PEP8) 274 | - Now enforce passing pycodestyle in CI. 275 | - Now do CI testing against Python 3.8. 276 | 277 | ## 2.12.1 2020-03-24 278 | 279 | Fixes 280 | 281 | - Fixes `traced` decorator behavior when working with generators. [#106](https://github.com/honeycombio/beeline-python/pull/106) 282 | - Fixes method for detection of asyncio. [#107](https://github.com/honeycombio/beeline-python/pull/107) 283 | 284 | ## 2.12.0 2020-03-19 285 | 286 | Features 287 | 288 | - urllib auto-instrumentation via patch.[#102](https://github.com/honeycombio/beeline-python/pull/102) 289 | - jinja2 auto-instrumentation via patch. [#103](https://github.com/honeycombio/beeline-python/pull/103) 290 | 291 | Improvements 292 | 293 | - flask auto-instrumentation now includes the route as `request.route` field on the root span. [#104](https://github.com/honeycombio/beeline-python/pull/104) 294 | 295 | ## 2.11.4 2020-01-27 296 | 297 | Fixes 298 | 299 | - Trace context headers injected with the `requests` middleware now reference the correct parent span. Previously, the trace context was generated prior to the wrapping span around the request call, anchoring spans generated with this trace context to the wrong span. 300 | 301 | ## 2.11.3 2020-01-23 302 | 303 | Fixes 304 | 305 | - Prevent duplicate `app.` prefixes in trace fields. [#96](https://github.com/honeycombio/beeline-python/pull/96) 306 | 307 | ## 2.11.2 2019-11-26 308 | 309 | Fixes 310 | 311 | - Allows less than three fields in trace context headers. 312 | 313 | ## 2.11.1 2019-11-19 314 | 315 | Fixes 316 | 317 | - Flask Middleware: AttributeError in DB instrumentation when cursor.lastrowid doesn't exist [#91](https://github.com/honeycombio/beeline-python/pull/91). 318 | 319 | ## 2.11.0 2019-11-18 320 | 321 | Features 322 | 323 | - Asyncio support! The new `AsyncioTracer` is used instead of `SynchronousTracer` when the beeline is initialized from within an asyncio event loop. [#87](https://github.com/honeycombio/beeline-python/pull/87) 324 | 325 | ## 2.10.1 2019-11-12 326 | 327 | - Traces propagated from other beelines (nodejs, go) which supply the "dataset" field in the trace context can now be handled by `unmarshal_trace_context`. The dataset is discarded - honoring this override will come in a later version. 328 | 329 | ## 2.10.0 2019-11-07 330 | 331 | Features 332 | 333 | - `awslambda` middleware can now extract Honeycomb Trace context from single SNS/SQS messages. [#77](https://github.com/honeycombio/beeline-python/pull/77) 334 | 335 | ## 2.9.1 2019-09-10 336 | 337 | Fixes 338 | 339 | - Don't try to access self.state.span in handle_error of Flask DB middleware if there is no current_app [#81](https://github.com/honeycombio/beeline-python/pull/81). 340 | 341 | ## 2.9.0 2019-09-09 342 | 343 | Improvements 344 | 345 | - Django middleware now supports instrumentation of multiple database connections. See [#80](https://github.com/honeycombio/beeline-python/pull/80). 346 | 347 | ## 2.8.0 2019-08-06 348 | 349 | Features 350 | 351 | - Django, Flask, Bottle, and Werkzeug middleware can now be subclassed to provide alternative implementations of `get_context_from_request` (Django) `get_context_from_environ` (Flask, Bottle, Werkzeug) methods. This allows customization of the request fields that are automatically instrumented at the start of a trace. Thanks to sjoerdjob's initial contribution in [#73](https://github.com/honeycombio/beeline-python/pull/73). 352 | 353 | Fixes 354 | 355 | - Django's `HoneyMiddleware` no longer adds a `request.post` field by default. This was removed for two reasons. First, calling `request.POST.dict()` could break other middleware by exhausting the request stream prematurely. See issue [#74](https://github.com/honeycombio/beeline-python/issues/74). Second, POST bodies can contain arbitrary values and potentially sensitive data, and the decision to instrument these values should be a deliberate choice by the user. If you currently rely on this behavior currently, you can swap out `HoneyMiddleware` with `HoneyMiddlewareWithPOST` to maintain the same functionality. 356 | - The `awslambda` middleware no longer crashes if the `context` object is missing certain attributes. See [#76](https://github.com/honeycombio/beeline-python/pull/76). 357 | 358 | ## 2.7.0 2019-07-26 359 | 360 | Features 361 | 362 | - Implements `add_rollup_field` API used in other Beelines. See the official [API reference docs](https://honeycombio.github.io/beeline-python/) for full details. 363 | 364 | ## 2.6.1 2019-07-02 365 | 366 | Fixes 367 | 368 | - Python Beeline now uses the same method to compute deterministic sampling decisions as other beelines (Go, NodeJS, Ruby). Prior to the fix, Beeline-generated traces spanning multiple services implemented in Python and other languages would have sometimes arrived incomplete due to inconsistent sampling behavior. 369 | 370 | ## 2.6.0 2019-06-05 - Update recommended 371 | 372 | Features 373 | 374 | - Adds new `traced_thread` decorator to copy over trace state to new threads. Read more in the official docs [here](https://docs.honeycomb.io/getting-data-in/python/beeline/#threading-and-traces). 375 | - Adds initial support for [Werkzeug](https://werkzeug.palletsprojects.com/en/0.15.x/). Read about how to use it [here](https://docs.honeycomb.io/getting-data-in/python/beeline/#using-automatic-instrumentation). 376 | 377 | Fixes 378 | 379 | - `init` now works after a process fork. If the beeline has already been initialized prior to the fork, it will be reinitialized if called again. Prior to this change, calling `init` before fork would render the beeline inoperable in the forked process(es). 380 | 381 | ## 2.5.1 2019-05-13 382 | 383 | Fixes 384 | 385 | - Support parameters of type `dict` in the flask-sqlachemy middleware. Addresses [#62](https://github.com/honeycombio/beeline-python/issues/62). 386 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Code of Conduct 2 | 3 | This project has adopted the Honeycomb User Community Code of Conduct to clarify expected behavior in our community. 4 | 5 | https://www.honeycomb.io/honeycomb-user-community-code-of-conduct/ -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing Guide 2 | 3 | Please see our [general guide for OSS lifecycle and practices.](https://github.com/honeycombio/home/blob/main/honeycomb-oss-lifecycle-and-practices.md) 4 | 5 | Features, bug fixes and other changes to this project are gladly accepted. 6 | Please open issues or a pull request with your change. Remember to add your name 7 | to the CONTRIBUTORS file! 8 | 9 | All contributions will be released under the Apache License 2.0. 10 | 11 | ## Developing 12 | 13 | beeline-python uses [poetry](https://python-poetry.org/) for packaging and dependency management. Our normal development workflow also uses [pyenv](https://github.com/pyenv/pyenv) to manage python versions. 14 | 15 | If you haven't used pyenv or poetry before, see https://blog.jayway.com/2019/12/28/pyenv-poetry-saviours-in-the-python-chaos/ for a quick guide to getting started using them both. 16 | 17 | ### Setting up on Mac 18 | 19 | - Install [pyenv](https://github.com/pyenv/pyenv) to install python version management 20 | 21 | - `brew install pyenv` 22 | 23 | - Follow https://python-poetry.org/ install instructions - it may be possible to use brew to install `poetry`, but it's not offically supported. 24 | 25 | * Install dependencies: 26 | - `poetry install --no-root` to install dependencies. 27 | - Use `poetry`'s commands for managing dependencies, including adding and updating them.. 28 | * Run tests - the below command is configured to run all tests: 29 | - `poetry run tests` 30 | * Get a shell 31 | 32 | - `poetry shell` will get you a shell with the current virtualenv. 33 | 34 | * Switch python version by using the PYENV_VERSION environment variable to toggle between multiple python versions for testing 35 | - `export PYENV_VERSION=3.10.1` to set the Python virtualenv to 3.10.1 36 | -------------------------------------------------------------------------------- /COPYRIGHT: -------------------------------------------------------------------------------- 1 | Copyright 2016 Honeycomb, Hound Technology, Inc. All rights reserved. 2 | 3 | Use of this source code is governed by the Apache License 2.0 4 | license that can be found in the LICENSE file. 5 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2016-Present Honeycomb, Hound Technology, Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | -------------------------------------------------------------------------------- /OSSMETADATA: -------------------------------------------------------------------------------- 1 | osslifecycle=beingsunset -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Honeycomb Beeline for Python 2 | 3 | [![OSS Lifecycle](https://img.shields.io/osslifecycle/honeycombio/beeline-python?color=pink)](https://github.com/honeycombio/home/blob/main/honeycomb-oss-lifecycle-and-practices.md) 4 | [![Build Status](https://circleci.com/gh/honeycombio/beeline-python.svg?style=svg)](https://app.circleci.com/pipelines/github/honeycombio/beeline-python) 5 | 6 | ⚠️**STATUS**: This project is being Sunset. See [this issue](https://github.com/honeycombio/beeline-python/issues/302) for more details. 7 | 8 | ⚠️**Note**: Beelines are Honeycomb's legacy instrumentation libraries. We embrace OpenTelemetry as the effective way to instrument applications. For any new observability efforts, we recommend [instrumenting with OpenTelemetry](https://docs.honeycomb.io/send-data/python/opentelemetry-sdk/). 9 | 10 | This package makes it easy to instrument your Python web application to send useful events to [Honeycomb](https://honeycomb.io), a service for debugging your software in production. 11 | 12 | - [Usage and Examples](https://docs.honeycomb.io/getting-data-in/beelines/beeline-python/) 13 | - [API Reference](https://honeycombio.github.io/beeline-python/) 14 | 15 | ## Compatible with 16 | 17 | Currently, supports Django (>3.2), Flask(<2.4), Bottle, and Tornado. 18 | 19 | Compatible with Python >3.7. 20 | 21 | ## Updating to 3.3.0 22 | 23 | Version 3.3.0 added support for Environment & Services, which changes sending behavior based on API Key. 24 | 25 | If you are using the [FileTransmission](https://github.com/honeycombio/libhoney-py/blob/main/libhoney/transmission.py#L448) method and setting a false API key - and still working in Classic mode - you must update the key to be 32 characters in length to keep the same behavior. 26 | 27 | ## Contributions 28 | 29 | Features, bug fixes and other changes to `beeline-python` are gladly accepted. 30 | 31 | If you add a new test module, be sure and update `beeline.test_suite` to pick up the new tests. 32 | 33 | All contributions will be released under the Apache License 2.0. 34 | -------------------------------------------------------------------------------- /RELEASING.md: -------------------------------------------------------------------------------- 1 | # Releasing 2 | 3 | Use `poetry run bump2version --new-version poetry run bump2version --new-version 3.5.2 patch 10 | ``` 11 | 12 | - Confirm the version number update appears in `.bumpversion.cfg`, `pyproject.toml`, and `version.py` 13 | - Update `CHANGELOG.md` with the changes since the last release. Consider automating with a command such as these two: 14 | - `git log $(git describe --tags --abbrev=0)..HEAD --no-merges --oneline > new-in-this-release.log` 15 | - `git log --pretty='%C(green)%d%Creset- %s | [%an](https://github.com/)'` 16 | - Commit changes, push, and open a release preparation pull request for review. 17 | - Once the pull request is merged, fetch the updated `main` branch. 18 | - Apply a tag for the new version on the merged commit (e.g. `git tag -a v3.5.2 -m "v3.5.2"`) 19 | - Push the tag upstream (this will kick off the release pipeline in CI) e.g. `git push origin v3.5.2` 20 | - Ensure that there is a draft GitHub release created as part of CI publish steps (this will also publish to PyPi). 21 | - Click "generate release notes" in GitHub for full changelog notes and any new contributors 22 | - Publish the GitHub draft release 23 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | This security policy applies to public projects under the [honeycombio organization][gh-organization] on GitHub. 4 | For security reports involving the services provided at `(ui|ui-eu|api|api-eu).honeycomb.io`, refer to the [Honeycomb Bug Bounty Program][bugbounty] for scope, expectations, and reporting procedures. 5 | 6 | ## Security/Bugfix Versions 7 | 8 | Security and bug fixes are generally provided only for the last minor version. 9 | Fixes are released either as part of the next minor version or as an on-demand patch version. 10 | 11 | Security fixes are given priority and might be enough to cause a new version to be released. 12 | 13 | ## Reporting a Vulnerability 14 | 15 | We encourage responsible disclosure of security vulnerabilities. 16 | If you find something suspicious, we encourage and appreciate your report! 17 | 18 | ### Ways to report 19 | 20 | In order for the vulnerability reports to reach maintainers as soon as possible, the preferred way is to use the "Report a vulnerability" button under the "Security" tab of the associated GitHub project. 21 | This creates a private communication channel between the reporter and the maintainers. 22 | 23 | If you are absolutely unable to or have strong reasons not to use GitHub's vulnerability reporting workflow, please reach out to the Honeycomb security team at [security@honeycomb.io](mailto:security@honeycomb.io). 24 | 25 | [gh-organization]: https://github.com/honeycombio 26 | [bugbounty]: https://www.honeycomb.io/bugbountyprogram 27 | -------------------------------------------------------------------------------- /SUPPORT.md: -------------------------------------------------------------------------------- 1 | # How to Get Help 2 | 3 | This project uses GitHub issues to track bugs, feature requests, and questions about using the project. Please search for existing issues before filing a new one. 4 | -------------------------------------------------------------------------------- /beeline/aiotrace.py: -------------------------------------------------------------------------------- 1 | """Asynchronous tracer implementation. 2 | 3 | This requires Python 3.7, because it uses the contextvars module. 4 | 5 | """ 6 | import asyncio 7 | import contextvars # pylint: disable=import-error 8 | import functools 9 | import inspect 10 | 11 | from beeline.trace import Tracer 12 | 13 | current_trace_var = contextvars.ContextVar("current_trace") 14 | 15 | 16 | def create_task_factory(parent_factory): 17 | """Create a task factory that makes a copy of the current trace. 18 | 19 | New tasks have their own context variables, but the current_trace 20 | context variable still refers to the same Trace object as the one 21 | in the parent task. This task factory replaces the Trace object 22 | with a copy of itself. 23 | 24 | """ 25 | def task_factory_impl(loop, coro): 26 | async def wrapper(): 27 | current_trace = current_trace_var.get(None) 28 | if current_trace is not None: 29 | current_trace_var.set(current_trace.copy()) 30 | return await coro 31 | 32 | if parent_factory is None: 33 | task = asyncio.tasks.Task(wrapper(), loop=loop) 34 | else: 35 | task = parent_factory(wrapper()) 36 | 37 | return task 38 | 39 | task_factory_impl.__trace_task_factory__ = True 40 | return task_factory_impl 41 | 42 | 43 | class AsyncioTracer(Tracer): 44 | def __init__(self, client): 45 | """Initialize, and ensure that our task factory is set up.""" 46 | super().__init__(client) 47 | 48 | loop = asyncio.get_running_loop() # pylint: disable=no-member 49 | 50 | task_factory = loop.get_task_factory() 51 | if task_factory is None or not task_factory.__trace_task_factory__: 52 | new_task_factory = create_task_factory(task_factory) 53 | loop.set_task_factory(new_task_factory) 54 | 55 | @property 56 | def _trace(self): 57 | return current_trace_var.get(None) 58 | 59 | @_trace.setter 60 | def _trace(self, new_trace): 61 | current_trace_var.set(new_trace) 62 | 63 | 64 | def traced_impl(tracer_fn, name, trace_id, parent_id): 65 | """Implementation of the traced decorator including async support. 66 | 67 | The async version needs to be different, because the trace should 68 | cover the execution of the whole decorated function. If using the 69 | synchronous version, the trace would only cover the time it takes 70 | to return the coroutine object. 71 | 72 | """ 73 | def wrapped(fn): 74 | if asyncio.iscoroutinefunction(fn): 75 | @functools.wraps(fn) 76 | async def async_inner(*args, **kwargs): 77 | with tracer_fn(name=name, trace_id=trace_id, parent_id=parent_id): 78 | return await fn(*args, **kwargs) 79 | 80 | return async_inner 81 | elif inspect.isgeneratorfunction(fn): 82 | @functools.wraps(fn) 83 | def inner(*args, **kwargs): 84 | inner_generator = fn(*args, **kwargs) 85 | with tracer_fn(name=name, trace_id=trace_id, parent_id=parent_id): 86 | yield from inner_generator 87 | 88 | return inner 89 | else: 90 | @functools.wraps(fn) 91 | def inner(*args, **kwargs): 92 | with tracer_fn(name=name, trace_id=trace_id, parent_id=parent_id): 93 | return fn(*args, **kwargs) 94 | 95 | return inner 96 | 97 | return wrapped 98 | 99 | 100 | def untraced(fn): 101 | """Async function decorator detaching from any ongoing trace. 102 | 103 | This decorator is necessary for starting independent async tasks 104 | from within a trace, since async tasks inherit trace state by 105 | default. 106 | 107 | """ 108 | 109 | # Both synchronous and asynchronous functions may create tasks. 110 | if asyncio.iscoroutinefunction(fn): 111 | @functools.wraps(fn) 112 | async def wrapped(*args, **kwargs): 113 | token = None 114 | try: 115 | current_trace = current_trace_var.get(None) 116 | if current_trace is not None: 117 | token = current_trace_var.set(None) 118 | 119 | return await fn(*args, **kwargs) 120 | finally: 121 | if token is not None: 122 | current_trace_var.reset(token) 123 | 124 | return wrapped 125 | 126 | else: 127 | @functools.wraps(fn) 128 | def wrapped(*args, **kwargs): 129 | token = None 130 | try: 131 | current_trace = current_trace_var.get(None) 132 | if current_trace is not None: 133 | token = current_trace_var.set(None) 134 | 135 | return fn(*args, **kwargs) 136 | finally: 137 | if token is not None: 138 | current_trace_var.reset(token) 139 | 140 | return wrapped 141 | -------------------------------------------------------------------------------- /beeline/internal.py: -------------------------------------------------------------------------------- 1 | import beeline 2 | 3 | # these are mostly convenience methods for safely calling beeline methods 4 | # even if the beeline hasn't been initialized 5 | 6 | 7 | def send_event(): 8 | bl = beeline.get_beeline() 9 | if bl: 10 | return bl.send_event() 11 | 12 | 13 | def send_all(): 14 | bl = beeline.get_beeline() 15 | if bl: 16 | return bl.send_all() 17 | 18 | 19 | def log(msg, *args, **kwargs): 20 | bl = beeline.get_beeline() 21 | if bl: 22 | bl.log(msg, *args, **kwargs) 23 | 24 | 25 | def stringify_exception(e): 26 | try: 27 | return str(e) 28 | except Exception: 29 | return "unable to decode exception" 30 | -------------------------------------------------------------------------------- /beeline/middleware/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/honeycombio/beeline-python/b8c96386964749c60bcd865d10f31148f970fec1/beeline/middleware/__init__.py -------------------------------------------------------------------------------- /beeline/middleware/awslambda/__init__.py: -------------------------------------------------------------------------------- 1 | import traceback 2 | 3 | import beeline 4 | from beeline.propagation import Request 5 | # In Lambda, a cold start is when Lambda has to spin up a new instance of a 6 | # function to satisfy a request, rather than re-use an existing instance. 7 | # This usually has a non-trivial effect on latency for the request and is 8 | # worth instrumenting. 9 | COLD_START = True 10 | 11 | 12 | class LambdaRequest(Request): 13 | ''' 14 | Look for header values in SNS/SQS Message Attributes 15 | ''' 16 | 17 | def __init__(self, event): 18 | # Look for headers (or equivalents) in common places 19 | self._type = None 20 | self._event = event 21 | if isinstance(event, dict): 22 | # If API gateway is triggering the Lambda, the event will have headers 23 | # and we can look for our trace headers 24 | # https://docs.aws.amazon.com/lambda/latest/dg/with-on-demand-https.html 25 | if 'headers' in event: 26 | if isinstance(event['headers'], dict): 27 | self._attributes = event['headers'] 28 | self._type = 'headers' 29 | 30 | # If a message source is triggering the Lambda, the event may have 31 | # our trace data in the message attributes 32 | elif 'Records' in event: 33 | # Only process batches of exactly 1 34 | # Higher batch sizes would have multiple messages thus 35 | # generating multiple traces and requiring manual instrumentation 36 | if len(event['Records']) == 1: 37 | # If SNS is triggering the Lambda 38 | # https://docs.aws.amazon.com/lambda/latest/dg/with-sns.html 39 | if 'EventSource' in event['Records'][0]: 40 | if event['Records'][0]['EventSource'] == 'aws:sns': 41 | self._attributes = event['Records'][0]['Sns']['MessageAttributes'] 42 | self._type = 'sns' 43 | # If SQS is triggering the Lambda 44 | # https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html 45 | elif 'eventSource' in event['Records'][0]: 46 | if event['Records'][0]['eventSource'] == 'aws:sqs': 47 | self._attributes = event['Records'][0]['messageAttributes'] 48 | self._type = 'sqs' 49 | if self._type: 50 | self._keymap = {k.lower(): k for k in self._attributes.keys()} 51 | 52 | def header(self, key): 53 | if not self._type: 54 | return None 55 | lookup_key = key.lower() 56 | if lookup_key not in self._keymap: 57 | return None 58 | lookup_key = self._keymap[lookup_key] 59 | if self._type == 'headers': 60 | return self._attributes[lookup_key] 61 | elif self._type == 'sns': 62 | return self._attributes[lookup_key]['Value'] 63 | elif self._type == 'sqs': 64 | return self._attributes[lookup_key]['stringValue'] 65 | return None 66 | 67 | def method(self): 68 | ''' 69 | For a lambda request, method is an irrelevant parameter. 70 | ''' 71 | return None 72 | 73 | def scheme(self): 74 | ''' 75 | For a lambda request, scheme is an irrelevant parameter. 76 | ''' 77 | return None 78 | 79 | def host(self): 80 | ''' 81 | For a lambda request, host is an irrelevant parameter. 82 | ''' 83 | return None 84 | 85 | def path(self): 86 | ''' 87 | For a lambda request, path is an irrelevant parameter. 88 | ''' 89 | return None 90 | 91 | def query(self): 92 | ''' 93 | For a lambda request, query is an irrelevant parameter. 94 | ''' 95 | return None 96 | 97 | def middleware_request(self): 98 | return self._event 99 | 100 | 101 | def beeline_wrapper(handler=None, record_input=True, record_output=True): 102 | ''' Honeycomb Beeline decorator for Lambda functions. Expects a handler 103 | function with the signature: 104 | 105 | `def handler(event, context)` 106 | 107 | Example use: 108 | 109 | ``` 110 | @beeline_wrapper 111 | def my_handler(event, context): 112 | # ... 113 | 114 | @beeline_wrapper(record_input=False, record_output=False) 115 | def my_handler_with_large_inputs_and_outputs(event, context): 116 | # ... 117 | ``` 118 | 119 | ''' 120 | 121 | def _beeline_wrapper(event, context): 122 | global COLD_START 123 | 124 | # don't blow up the world if the beeline has not been initialized 125 | if not beeline.get_beeline(): 126 | return handler(event, context) 127 | 128 | root_span = None 129 | try: 130 | # Create request context 131 | request_context = { 132 | "app.function_name": getattr(context, 'function_name', ""), 133 | "app.function_version": getattr(context, 'function_version', ""), 134 | "app.request_id": getattr(context, 'aws_request_id', ""), 135 | "meta.cold_start": COLD_START, 136 | "name": handler.__name__ 137 | } 138 | if record_input: 139 | request_context["app.event"] = event 140 | 141 | lr = LambdaRequest(event) 142 | root_span = beeline.propagate_and_start_trace(request_context, lr) 143 | 144 | # Actually run the handler 145 | resp = handler(event, context) 146 | 147 | if resp is not None and record_output: 148 | beeline.add_context_field('app.response', resp) 149 | 150 | return resp 151 | except Exception as e: 152 | beeline.add_context({ 153 | "app.exception_type": str(type(e)), 154 | "app.exception_string": beeline.internal.stringify_exception(e), 155 | "app.exception_stacktrace": traceback.format_exc(), 156 | }) 157 | raise e 158 | finally: 159 | # This remains false for the lifetime of the module 160 | COLD_START = False 161 | beeline.finish_trace(root_span) 162 | # we have to flush events before the lambda returns 163 | beeline.get_beeline().client.flush() 164 | 165 | def outer_wrapper(*args, **kwargs): 166 | return beeline_wrapper(*args, record_input=record_input, record_output=record_output, **kwargs) 167 | 168 | if handler: 169 | return _beeline_wrapper 170 | return outer_wrapper 171 | -------------------------------------------------------------------------------- /beeline/middleware/awslambda/test_awslambda.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from mock import Mock, patch, ANY 3 | 4 | from beeline.middleware import awslambda 5 | 6 | header_value = '1;trace_id=bloop,parent_id=scoop,context=e30K' 7 | 8 | 9 | class TestLambdaRequest(unittest.TestCase): 10 | def test_get_header_from_headers(self): 11 | ''' 12 | Test that if headers is set, we have case-insensitive match. 13 | ''' 14 | event = { 15 | 'headers': { 16 | # case shouldn't matter 17 | 'X-HoNEyComb-TrACE': header_value, 18 | }, 19 | } 20 | 21 | lr = awslambda.LambdaRequest(event) 22 | self.assertIsNotNone(lr) 23 | self.assertEqual(lr.header('X-Honeycomb-Trace'), header_value) 24 | 25 | def test_handle_no_headers(self): 26 | ''' ensure that we handle events with no header key ''' 27 | event = { 28 | 'foo': 1, 29 | } 30 | 31 | lr = awslambda.LambdaRequest(event) 32 | self.assertIsNotNone(lr) 33 | self.assertIsNone(lr.header('X-Honeycomb-Trace')) 34 | 35 | def test_handle_sns_none(self): 36 | ''' ensure that we handle SNS events with no honeycomb key ''' 37 | event = { 38 | "Records": 39 | [ 40 | { 41 | "EventSource": "aws:sns", 42 | "Sns": { 43 | "Message": "Hello from SNS!", 44 | "MessageAttributes": {} 45 | } 46 | } 47 | ], 48 | } 49 | 50 | lr = awslambda.LambdaRequest(event) 51 | self.assertIsNotNone(lr) 52 | self.assertIsNone(lr.header('X-Honeycomb-Trace')) 53 | 54 | def test_handle_sns_attribute(self): 55 | ''' ensure that we extract SNS data from message attributes''' 56 | event = { 57 | "Records": 58 | [ 59 | { 60 | "EventSource": "aws:sns", 61 | "Sns": { 62 | "Message": "Hello from SNS!", 63 | "MessageAttributes": { 64 | 'X-HoNEyComb-TrACE': { 65 | "Type": "String", 66 | "Value": header_value, 67 | } 68 | } 69 | } 70 | } 71 | ], 72 | } 73 | 74 | lr = awslambda.LambdaRequest(event) 75 | self.assertIsNotNone(lr) 76 | self.assertEqual(lr.header('X-Honeycomb-Trace'), header_value) 77 | 78 | def test_handle_sqs_none(self): 79 | ''' ensure that we handle SQS events with no honeycomb key ''' 80 | event = { 81 | "Records": 82 | [ 83 | { 84 | "body": "Hello from SQS!", 85 | "messageAttributes": {}, 86 | "eventSource": "aws:sqs", 87 | }, 88 | ], 89 | } 90 | 91 | lr = awslambda.LambdaRequest(event) 92 | self.assertIsNotNone(lr) 93 | self.assertIsNone(lr.header('X-Honeycomb-Trace')) 94 | 95 | def test_handle_sqs_attributes(self): 96 | ''' ensure that we extract SQS data from message attributes''' 97 | event = { 98 | "Records": 99 | [ 100 | { 101 | "body": "Hello from SQS!", 102 | "messageAttributes": { 103 | 'X-HoNEyComb-TrACE': { 104 | "Type": "String", 105 | "stringValue": header_value, 106 | }, 107 | 'foo': { 108 | "Type": "String", 109 | "stringValue": "bar", 110 | }, 111 | }, 112 | "eventSource": "aws:sqs", 113 | }, 114 | ], 115 | } 116 | 117 | lr = awslambda.LambdaRequest(event) 118 | self.assertIsNotNone(lr) 119 | self.assertEqual(lr.header('X-Honeycomb-Trace'), header_value) 120 | 121 | def test_message_batch_is_ignored(self): 122 | ''' ensure that we don't process batches''' 123 | event = { 124 | "Records": 125 | [ 126 | { 127 | "body": "Hello from SQS!", 128 | "messageAttributes": { 129 | 'X-HoNEyComb-TrACE': { 130 | "Type": "String", 131 | "stringValue": "1;trace_id=beep,parent_id=moop,context=e29K", 132 | }, 133 | 'foo': { 134 | "Type": "String", 135 | "stringValue": "bar", 136 | }, 137 | }, 138 | "eventSource": "aws:sqs", 139 | }, 140 | { 141 | "body": "Another hello from SQS!", 142 | "messageAttributes": { 143 | 'X-HoNEyComb-TrACE': { 144 | "Type": "String", 145 | "stringValue": "1;trace_id=bloop,parent_id=scoop,context=e30K", 146 | }, 147 | 'foo': { 148 | "Type": "String", 149 | "stringValue": "baz", 150 | }, 151 | }, 152 | "eventSource": "aws:sqs", 153 | }, 154 | ], 155 | } 156 | 157 | lr = awslambda.LambdaRequest(event) 158 | self.assertIsNotNone(lr) 159 | self.assertIsNone(lr.header('X-Honeycomb-Trace')) 160 | 161 | 162 | class TestLambdaWrapper(unittest.TestCase): 163 | def test_wrapper_works_no_init(self): 164 | ''' ensure that the wrapper doesn't break anything if used before 165 | beeline.init is called 166 | ''' 167 | with patch('beeline.get_beeline') as p: 168 | p.return_value = None 169 | 170 | @awslambda.beeline_wrapper 171 | def foo(event, context): 172 | return 1 173 | 174 | self.assertEqual(foo(None, None), 1) 175 | 176 | @awslambda.beeline_wrapper() 177 | def bar(event, context): 178 | return 1 179 | 180 | self.assertEqual(bar(None, None), 1) 181 | 182 | def test_basic_instrumentation(self): 183 | ''' ensure basic event fields get instrumented ''' 184 | with patch('beeline.propagate_and_start_trace') as m_propagate, \ 185 | patch('beeline.add_context_field') as m_add_context_field, \ 186 | patch('beeline.middleware.awslambda.beeline._GBL'), \ 187 | patch('beeline.middleware.awslambda.COLD_START') as m_cold_start: 188 | m_event = Mock() 189 | m_context = Mock(function_name='fn', function_version="1.1.1", 190 | aws_request_id='12345') 191 | 192 | @awslambda.beeline_wrapper 193 | def handler(event, context): 194 | return 1 195 | 196 | self.assertEqual(handler(m_event, m_context), 1) 197 | m_propagate.assert_called_once_with({ 198 | 'app.function_name': 'fn', 199 | 'app.function_version': '1.1.1', 200 | 'app.request_id': '12345', 201 | 'app.event': ANY, # 'app.event' is included by default 202 | 'meta.cold_start': ANY, 203 | 'name': 'handler'}, ANY) 204 | m_add_context_field.assert_called_once_with('app.response', 1) 205 | 206 | def test_handle_exceptions(self): 207 | ''' ensure instrumentation occurs when the handler raises an exception ''' 208 | with patch('beeline.propagate_and_start_trace') as m_propagate, \ 209 | patch('beeline.add_context_field') as m_add_context_field, \ 210 | patch('beeline.add_context') as m_add_context, \ 211 | patch('beeline.middleware.awslambda.beeline._GBL'), \ 212 | patch('beeline.middleware.awslambda.COLD_START') as m_cold_start: 213 | m_event = Mock() 214 | m_context = Mock(function_name='fn', function_version="1.1.1", 215 | aws_request_id='12345') 216 | 217 | @awslambda.beeline_wrapper 218 | def handler(event, context): 219 | raise ValueError('something went wrong') 220 | 221 | with self.assertRaises(ValueError): 222 | handler(m_event, m_context) 223 | 224 | m_propagate.assert_called_once_with({ 225 | 'app.function_name': 'fn', 226 | 'app.function_version': '1.1.1', 227 | 'app.request_id': '12345', 228 | 'app.event': ANY, # 'app.event' is included by default 229 | 'meta.cold_start': ANY, 230 | 'name': 'handler'}, ANY) 231 | m_add_context.assert_called_once_with({ 232 | 'app.exception_type': ANY, # representation changes between 2.7 and 3.x 233 | 'app.exception_string': 'something went wrong', 234 | 'app.exception_stacktrace': ANY 235 | }) 236 | 237 | def test_can_omit_input(self): 238 | ''' ensure input event field can be omitted ''' 239 | with patch('beeline.propagate_and_start_trace') as m_propagate, \ 240 | patch('beeline.add_context_field') as m_add_context_field, \ 241 | patch('beeline.middleware.awslambda.beeline._GBL'), \ 242 | patch('beeline.middleware.awslambda.COLD_START') as m_cold_start: 243 | m_event = Mock() 244 | m_context = Mock(function_name='fn', function_version="1.1.1", 245 | aws_request_id='12345') 246 | 247 | @awslambda.beeline_wrapper(record_input=False) 248 | def handler(event, context): 249 | return 1 250 | 251 | self.assertEqual(handler(m_event, m_context), 1) 252 | m_propagate.assert_called_once_with({ 253 | 'app.function_name': 'fn', 254 | 'app.function_version': '1.1.1', 255 | 'app.request_id': '12345', 256 | 'meta.cold_start': ANY, 257 | 'name': 'handler'}, ANY) # note the lack of an 'app.event' field 258 | m_add_context_field.assert_called_once_with('app.response', 1) 259 | 260 | def test_can_omit_output(self): 261 | ''' ensure output event fields can be omitted ''' 262 | with patch('beeline.propagate_and_start_trace') as m_propagate, \ 263 | patch('beeline.add_context_field') as m_add_context_field, \ 264 | patch('beeline.middleware.awslambda.beeline._GBL'), \ 265 | patch('beeline.middleware.awslambda.COLD_START') as m_cold_start: 266 | m_event = Mock() 267 | m_context = Mock(function_name='fn', function_version="1.1.1", 268 | aws_request_id='12345') 269 | 270 | @awslambda.beeline_wrapper(record_output=False) 271 | def handler(event, context): 272 | return 1 273 | 274 | self.assertEqual(handler(m_event, m_context), 1) 275 | m_propagate.assert_called_once_with({ 276 | 'app.function_name': 'fn', 277 | 'app.function_version': '1.1.1', 278 | 'app.request_id': '12345', 279 | 'app.event': ANY, # 'app.event' is included by default 280 | 'meta.cold_start': ANY, 281 | 'name': 'handler'}, ANY) 282 | m_add_context_field.not_called_with('app.response', 1) 283 | -------------------------------------------------------------------------------- /beeline/middleware/bottle/__init__.py: -------------------------------------------------------------------------------- 1 | import beeline 2 | from beeline.propagation import Request 3 | from beeline.middleware.wsgi import WSGIRequest 4 | 5 | 6 | class HoneyWSGIMiddleware(object): 7 | 8 | def __init__(self, app): 9 | self.app = app 10 | 11 | def __call__(self, environ, start_response): 12 | wr = WSGIRequest("bottle", environ) 13 | 14 | root_span = beeline.propagate_and_start_trace(wr.request_context(), wr) 15 | 16 | def _start_response(status, headers, *args): 17 | beeline.add_context_field("response.status_code", status) 18 | beeline.finish_trace(root_span) 19 | 20 | return start_response(status, headers, *args) 21 | 22 | return self.app(environ, _start_response) 23 | -------------------------------------------------------------------------------- /beeline/middleware/bottle/test_bottle.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from mock import Mock, patch, ANY 3 | 4 | from beeline.middleware.bottle import HoneyWSGIMiddleware 5 | 6 | 7 | class SimpleWSGITest(unittest.TestCase): 8 | def setUp(self): 9 | self.addCleanup(patch.stopall) 10 | self.m_gbl = patch('beeline.middleware.bottle.beeline').start() 11 | 12 | def test_call_middleware(self): 13 | ''' Just call the middleware and ensure that the code runs ''' 14 | mock_app = Mock() 15 | mock_resp = Mock() 16 | mock_trace = Mock() 17 | mock_environ = {} 18 | self.m_gbl.propagate_and_start_trace.return_value = mock_trace 19 | 20 | mw = HoneyWSGIMiddleware(mock_app) 21 | mw({}, mock_resp) 22 | self.m_gbl.propagate_and_start_trace.assert_called_once() 23 | 24 | mock_app.assert_called_once_with(mock_environ, ANY) 25 | 26 | # get the response function passed to the app 27 | resp_func = mock_app.mock_calls[0][1][1] 28 | # call it to make sure it does what we want 29 | # the values here don't really matter 30 | resp_func(1, 2) 31 | 32 | mock_resp.assert_called_once_with(1, 2) 33 | self.m_gbl.finish_trace.assert_called_once_with(mock_trace) 34 | -------------------------------------------------------------------------------- /beeline/middleware/django/__init__.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | import datetime 3 | import beeline 4 | from beeline.propagation import Request 5 | from django.db import connections 6 | 7 | 8 | class DjangoRequest(Request): 9 | def __init__(self, request): 10 | self._request = request 11 | self._META = request.META 12 | 13 | # only write log if beeline has been initalised 14 | if beeline.get_beeline(): 15 | beeline.get_beeline().log(request.META) 16 | 17 | def header(self, key): 18 | lookup_key = "HTTP_" + key.upper().replace('-', '_') 19 | return self._request.META.get(lookup_key) 20 | 21 | def method(self): 22 | return self._request.method 23 | 24 | def scheme(self): 25 | return self._request.scheme 26 | 27 | def host(self): 28 | return self._request.get_host() 29 | 30 | def path(self): 31 | return self._request.path 32 | 33 | def query(self): 34 | return self._request.META.get('QUERY_STRING') 35 | 36 | def middleware_request(self): 37 | return self._request 38 | 39 | 40 | class HoneyDBWrapper(object): 41 | 42 | def __call__(self, execute, sql, params, many, context): 43 | # if beeline has not been initialised, just execute query 44 | if not beeline.get_beeline(): 45 | return execute(sql, params, many, context) 46 | 47 | vendor = context['connection'].vendor 48 | trace_name = f"django_{vendor}_query" 49 | 50 | with beeline.tracer(trace_name): 51 | beeline.add_context({ 52 | "type": "db", 53 | "db.query": sql, 54 | "db.query_args": params, 55 | }) 56 | beeline.add_rollup_field("db.call_count", 1) 57 | 58 | try: 59 | db_call_start = datetime.datetime.now() 60 | result = execute(sql, params, many, context) 61 | db_call_diff = datetime.datetime.now() - db_call_start 62 | duration = db_call_diff.total_seconds() * 1000 63 | beeline.add_context_field("db.duration", duration) 64 | beeline.add_rollup_field("db.total_duration", duration) 65 | except Exception as e: 66 | beeline.add_context_field("db.error", str(type(e))) 67 | beeline.add_context_field( 68 | "db.error_detail", beeline.internal.stringify_exception(e)) 69 | raise 70 | else: 71 | return result 72 | finally: 73 | if vendor in ('postgresql', 'mysql'): 74 | beeline.add_context({ 75 | "db.last_insert_id": getattr(context['cursor'].cursor, 'lastrowid', None), 76 | "db.rows_affected": context['cursor'].cursor.rowcount, 77 | }) 78 | 79 | 80 | class HoneyMiddlewareBase(object): 81 | def __init__(self, get_response): 82 | self.get_response = get_response 83 | 84 | def __call__(self, request): 85 | response = self.create_http_event(request) 86 | return response 87 | 88 | def get_context_from_request(self, request): 89 | trace_name = f"django_http_{request.method.lower()}" 90 | return { 91 | "name": trace_name, 92 | "type": "http_server", 93 | "request.host": request.get_host(), 94 | "request.method": request.method, 95 | "request.path": request.path, 96 | "request.remote_addr": request.META.get('REMOTE_ADDR'), 97 | "request.content_length": request.META.get('CONTENT_LENGTH', 0), 98 | "request.user_agent": request.META.get('HTTP_USER_AGENT'), 99 | "request.scheme": request.scheme, 100 | "request.secure": request.is_secure(), 101 | "request.query": request.GET.dict(), 102 | "request.xhr": request.META.get('HTTP_X_REQUESTED_WITH') == 'XMLHttpRequest', 103 | } 104 | 105 | def get_context_from_response(self, request, response): 106 | return { 107 | "response.status_code": response.status_code, 108 | } 109 | 110 | def create_http_event(self, request): 111 | # if beeline has not been initialised, just execute request 112 | if not beeline.get_beeline(): 113 | return self.get_response(request) 114 | 115 | # Code to be executed for each request before 116 | # the view (and later middleware) are called. 117 | dr = DjangoRequest(request) 118 | 119 | request_context = self.get_context_from_request(request) 120 | root_span = beeline.propagate_and_start_trace(request_context, dr) 121 | 122 | response = self.get_response(request) 123 | 124 | # Code to be executed for each request/response after 125 | # the view is called. 126 | response_context = self.get_context_from_response(request, response) 127 | beeline.add_context(response_context) 128 | 129 | # Streaming responses return immediately, but iterate over 130 | # their `streaming_content` until it's empty; only close the 131 | # trace then, not now. 132 | def wrap_streaming_content(content): 133 | for chunk in content: 134 | yield chunk 135 | beeline.finish_trace(root_span) 136 | 137 | if response.streaming: 138 | response.streaming_content = wrap_streaming_content( 139 | response.streaming_content 140 | ) 141 | else: 142 | beeline.finish_trace(root_span) 143 | 144 | return response 145 | 146 | def process_exception(self, request, exception): 147 | if beeline.get_beeline(): 148 | beeline.add_context_field( 149 | "request.error_detail", beeline.internal.stringify_exception(exception)) 150 | 151 | def process_view(self, request, view_func, view_args, view_kwargs): 152 | if beeline.get_beeline(): 153 | try: 154 | beeline.add_context_field("django.view_func", view_func.__name__) 155 | except AttributeError: 156 | pass 157 | 158 | try: 159 | beeline.add_context_field("request.route", request.resolver_match.route) 160 | except AttributeError: 161 | pass 162 | 163 | 164 | class HoneyMiddlewareHttp(HoneyMiddlewareBase): 165 | pass 166 | 167 | 168 | class HoneyMiddleware(HoneyMiddlewareBase): 169 | def __call__(self, request): 170 | try: 171 | db_wrapper = HoneyDBWrapper() 172 | # db instrumentation is only present in Django > 2.0 173 | with contextlib.ExitStack() as stack: 174 | for connection in connections.all(): 175 | stack.enter_context(connection.execute_wrapper(db_wrapper)) 176 | response = self.create_http_event(request) 177 | 178 | # Is the response is streaming, then _this_ context 179 | # will exit now, but we want to set up the same sort 180 | # context stack when that streaming content gets 181 | # iterated over. 182 | def wrap_streaming_content(content): 183 | with contextlib.ExitStack() as stack: 184 | for connection in connections.all(): 185 | stack.enter_context(connection.execute_wrapper(db_wrapper)) 186 | for chunk in content: 187 | yield chunk 188 | if response.streaming: 189 | response.streaming_content = wrap_streaming_content( 190 | response.streaming_content 191 | ) 192 | except AttributeError: 193 | response = self.create_http_event(request) 194 | 195 | return response 196 | 197 | 198 | class HoneyMiddlewareWithPOST(HoneyMiddleware): 199 | ''' HoneyMiddlewareWithPOST is a subclass of HoneyMiddleware. The only difference is that 200 | the `request.post` field is instrumented. This was removed from the base implementation in 2.8.0 201 | due to conflicts with other middleware. See https://github.com/honeycombio/beeline-python/issues/74.''' 202 | 203 | def get_context_from_request(self, request): 204 | trace_name = f"django_http_{request.method.lower()}" 205 | return { 206 | "name": trace_name, 207 | "type": "http_server", 208 | "request.host": request.get_host(), 209 | "request.method": request.method, 210 | "request.path": request.path, 211 | "request.remote_addr": request.META.get('REMOTE_ADDR'), 212 | "request.content_length": request.META.get('CONTENT_LENGTH', 0), 213 | "request.user_agent": request.META.get('HTTP_USER_AGENT'), 214 | "request.scheme": request.scheme, 215 | "request.secure": request.is_secure(), 216 | "request.query": request.GET.dict(), 217 | "request.xhr": request.META.get('HTTP_X_REQUESTED_WITH') == 'XMLHttpRequest', 218 | "request.post": request.POST.dict(), 219 | } 220 | -------------------------------------------------------------------------------- /beeline/middleware/django/test_django.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from mock import Mock, call, patch 3 | 4 | import django 5 | from django.http import HttpResponse, StreamingHttpResponse 6 | from django.test.client import Client 7 | from django.conf.urls import url 8 | 9 | from beeline.middleware.django import HoneyMiddlewareBase 10 | 11 | 12 | class SimpleWSGITest(unittest.TestCase): 13 | def setUp(self): 14 | self.addCleanup(patch.stopall) 15 | self.m_gbl = patch('beeline.middleware.django.beeline').start() 16 | 17 | def test_call_middleware(self): 18 | ''' Just call the middleware and ensure that the code runs ''' 19 | mock_req = Mock() 20 | mock_resp = Mock() 21 | mock_resp.return_value.streaming = False 22 | mock_trace = Mock() 23 | self.m_gbl.propagate_and_start_trace.return_value = mock_trace 24 | 25 | mw = HoneyMiddlewareBase(mock_resp) 26 | resp = mw(mock_req) 27 | self.m_gbl.propagate_and_start_trace.assert_called_once() 28 | 29 | mock_resp.assert_called_once_with(mock_req) 30 | 31 | self.m_gbl.finish_trace.assert_called_once_with(mock_trace) 32 | self.assertEqual(resp, mock_resp.return_value) 33 | 34 | 35 | @unittest.skipIf(django.VERSION < (2, 2), "Routes are only supported on Django 2.2 and higher") 36 | class FullViewTestCase(unittest.TestCase): 37 | def setUp(self): 38 | self.addCleanup(patch.stopall) 39 | self.m_gbl = patch('beeline.middleware.django.beeline').start() 40 | 41 | # Unfortunately we need to import these quite late, because if we use a 42 | # top-level import, the test discovery procedure checks if `settings` 43 | # is a subclass of `TestCase`, causing the settings to be initialized, 44 | # which isn't possible. 45 | from django.conf import settings # pylint: disable=bad-option-value,import-outside-toplevel 46 | from django.utils.functional import empty # pylint: disable=bad-option-value,import-outside-toplevel 47 | assert not settings.configured 48 | # On shutdown: 49 | self.addCleanup(lambda: setattr(settings, "_wrapped", empty)) 50 | settings.configure( 51 | MIDDLEWARE=['beeline.middleware.django.HoneyMiddlewareHttp'], 52 | ALLOWED_HOSTS=['testserver'], 53 | ROOT_URLCONF=( 54 | url("^hello/(?P[^/]+)/$", self._view, name="greet"), 55 | url("^stream/hello/(?P[^/]+)/$", self._streaming_view, name="stream_greet"), 56 | ), 57 | ) 58 | 59 | def _view(self, request, *args, **kwargs): 60 | return HttpResponse(kwargs["greetee"], status=200) 61 | 62 | def _streaming_view(self, request, *args, **kwargs): 63 | def stream(): 64 | yield b"hello " 65 | yield kwargs["greetee"] 66 | return StreamingHttpResponse(stream(), status=200) 67 | 68 | def test_middleware(self): 69 | mock_trace = Mock() 70 | self.m_gbl.propagate_and_start_trace.return_value = mock_trace 71 | 72 | response = Client().get('/hello/world/') 73 | self.assertEqual(response.content, b"world") 74 | 75 | self.m_gbl.add_context_field.assert_has_calls([ 76 | call("django.view_func", "_view"), 77 | call("request.route", "^hello/(?P[^/]+)/$"), 78 | ]) 79 | self.m_gbl.finish_trace.assert_called_once_with(mock_trace) 80 | 81 | def test_streaming_middleware(self): 82 | mock_trace = Mock() 83 | self.m_gbl.propagate_and_start_trace.return_value = mock_trace 84 | 85 | response = Client().get('/stream/hello/world/') 86 | self.m_gbl.finish_trace.assert_not_called() 87 | content = b"".join(response.streaming_content) 88 | self.m_gbl.finish_trace.assert_called_once_with(mock_trace) 89 | self.assertEqual(content, b"hello world") 90 | -------------------------------------------------------------------------------- /beeline/middleware/flask/__init__.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import threading 3 | 4 | import beeline 5 | import flask # to avoid namespace collision with request vs Request 6 | from beeline.propagation import Request 7 | from flask import current_app, signals 8 | # needed to build a request object from environ in the middleware 9 | from werkzeug.wrappers import Request 10 | from beeline.middleware.wsgi import WSGIRequest 11 | 12 | 13 | class HoneyMiddleware(object): 14 | 15 | def __init__(self, app, db_events=True): 16 | self.app = app 17 | self.app.before_request(self._before_request) 18 | if signals.signals_available: 19 | self.app.teardown_request(self._teardown_request) 20 | app.wsgi_app = HoneyWSGIMiddleware(app.wsgi_app) 21 | if db_events: 22 | app = HoneyDBMiddleware(app) 23 | 24 | def _before_request(self): 25 | beeline.add_field("request.route", flask.request.endpoint) 26 | 27 | def _teardown_request(self, exception): 28 | if exception: 29 | beeline.add_field('request.error_detail', 30 | beeline.internal.stringify_exception(exception)) 31 | beeline.add_field('request.error', str(type(exception))) 32 | 33 | beeline.internal.send_event() 34 | 35 | 36 | class HoneyWSGIMiddleware(object): 37 | 38 | def __init__(self, app): 39 | self.app = app 40 | 41 | def __call__(self, environ, start_response): 42 | req = Request(environ, shallow=True) 43 | wr = WSGIRequest("flask", environ) 44 | 45 | root_span = beeline.propagate_and_start_trace(wr.request_context(), wr) 46 | 47 | def _start_response(status, headers, *args): 48 | status_code = int(status[0:4]) 49 | beeline.add_context_field("response.status_code", status_code) 50 | if not signals.signals_available: 51 | beeline.finish_trace(root_span) 52 | 53 | return start_response(status, headers, *args) 54 | 55 | return self.app(environ, _start_response) 56 | 57 | 58 | class HoneyDBMiddleware(object): 59 | 60 | def __init__(self, app=None): 61 | self.app = app 62 | if app is not None: 63 | self.init_app(app) 64 | 65 | self.state = threading.local() 66 | self.state.span = None 67 | 68 | def init_app(self, app): 69 | try: 70 | from sqlalchemy.engine import Engine # pylint: disable=bad-option-value,import-outside-toplevel 71 | from sqlalchemy.event import listen # pylint: disable=bad-option-value,import-outside-toplevel 72 | 73 | listen(Engine, 'before_cursor_execute', self.before_cursor_execute) 74 | listen(Engine, 'after_cursor_execute', self.after_cursor_execute) 75 | listen(Engine, 'handle_error', self.handle_error) 76 | except ImportError: 77 | pass 78 | 79 | def before_cursor_execute(self, conn, cursor, statement, parameters, context, executemany): 80 | if not current_app: 81 | return 82 | 83 | params = [] 84 | 85 | # the type of parameters passed in varies depending on DB - handle list, dict, and tuple 86 | if type(parameters) == tuple or type(parameters) == list: 87 | for param in parameters: 88 | if type(param) == datetime.datetime: 89 | param = param.isoformat() 90 | params.append(param) 91 | elif type(parameters) == dict: 92 | for k, v in parameters.items(): 93 | param = f"{k}=" 94 | if type(v) == datetime.datetime: 95 | v = v.isoformat() 96 | param += str(v) 97 | params.append(param) 98 | 99 | self.state.span = beeline.start_span(context={ 100 | "name": "flask_db_query", 101 | "type": "db", 102 | "db.query": statement, 103 | "db.query_args": params, 104 | }) 105 | 106 | self.query_start_time = datetime.datetime.now() 107 | 108 | def after_cursor_execute(self, conn, cursor, statement, parameters, context, executemany): 109 | if not current_app: 110 | return 111 | 112 | fields = { 113 | "db.last_insert_id": getattr(cursor, 'lastrowid', None), 114 | "db.rows_affected": cursor.rowcount, 115 | } 116 | 117 | # only try to calculate query duration if we have a start time 118 | if self.query_start_time: 119 | query_duration = datetime.datetime.now() - self.query_start_time 120 | fields["db.duration"] = query_duration.total_seconds() * 1000 121 | 122 | beeline.add_context(fields) 123 | if self.state.span: 124 | beeline.finish_span(self.state.span) 125 | self.state.span = None 126 | 127 | def handle_error(self, context): 128 | if not current_app: 129 | return 130 | 131 | beeline.add_context_field( 132 | "db.error", beeline.internal.stringify_exception(context.original_exception)) 133 | if self.state.span: 134 | beeline.finish_span(self.state.span) 135 | self.state.span = None 136 | -------------------------------------------------------------------------------- /beeline/middleware/flask/test_flask.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from flask import Flask 3 | from mock import Mock, patch, ANY 4 | 5 | from beeline.middleware.flask import HoneyWSGIMiddleware, HoneyDBMiddleware 6 | 7 | 8 | class SimpleWSGITest(unittest.TestCase): 9 | def setUp(self): 10 | self.addCleanup(patch.stopall) 11 | self.m_gbl = patch('beeline.middleware.flask.beeline').start() 12 | 13 | def test_call_middleware(self): 14 | ''' Just call the middleware and ensure that the code runs ''' 15 | mock_app = Mock() 16 | mock_resp = Mock() 17 | mock_trace = Mock() 18 | mock_environ = {} 19 | self.m_gbl.propagate_and_start_trace.return_value = mock_trace 20 | 21 | mw = HoneyWSGIMiddleware(mock_app) 22 | mw({}, mock_resp) 23 | self.m_gbl.propagate_and_start_trace.assert_called_once() 24 | 25 | mock_app.assert_called_once_with(mock_environ, ANY) 26 | 27 | # get the response function passed to the app 28 | resp_func = mock_app.mock_calls[0][1][1] 29 | # call it to make sure it does what we want 30 | # the values here don't really matter 31 | resp_func("200", 2) 32 | 33 | mock_resp.assert_called_once_with("200", 2) 34 | self.m_gbl.finish_trace.assert_called_once_with(mock_trace) 35 | 36 | 37 | class HoneyDBMiddlewareTest(unittest.TestCase): 38 | def setUp(self): 39 | self.app = Flask('test') 40 | 41 | def test_before_cursor_execute(self): 42 | with self.app.app_context(): 43 | with patch("beeline.middleware.flask.beeline") as beeline: 44 | mw = HoneyDBMiddleware(self.app.app_context) 45 | mw.before_cursor_execute( 46 | conn=Mock(name="conn"), 47 | cursor=Mock(name="cursor"), 48 | statement="SELECT * FROM widgets WHERE ID IN :widget_ids", 49 | parameters={'widget_ids': (1, 2)}, 50 | context=Mock(name="context"), 51 | executemany=False 52 | ) 53 | beeline.start_span.assert_called_with( 54 | context={ 55 | 'name': 'flask_db_query', 56 | 'type': 'db', 57 | 'db.query': 'SELECT * FROM widgets WHERE ID IN :widget_ids', 58 | 'db.query_args': ['widget_ids=(1, 2)'] 59 | } 60 | ) 61 | -------------------------------------------------------------------------------- /beeline/middleware/werkzeug/__init__.py: -------------------------------------------------------------------------------- 1 | import beeline 2 | from beeline.propagation import Request 3 | from beeline.middleware.wsgi import WSGIRequest 4 | 5 | 6 | class HoneyWSGIMiddleware(object): 7 | 8 | def __init__(self, app): 9 | self.app = app 10 | 11 | def __call__(self, environ, start_response): 12 | wr = WSGIRequest("werkzeug", environ) 13 | 14 | root_span = beeline.propagate_and_start_trace(wr.request_context(), wr) 15 | 16 | def _start_response(status, headers, *args): 17 | beeline.add_context_field("response.status_code", status) 18 | beeline.finish_trace(root_span) 19 | 20 | return start_response(status, headers, *args) 21 | 22 | return self.app(environ, _start_response) 23 | -------------------------------------------------------------------------------- /beeline/middleware/werkzeug/test_werkzeug.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from mock import Mock, patch, ANY 3 | 4 | from beeline.middleware.werkzeug import HoneyWSGIMiddleware 5 | 6 | 7 | class SimpleWSGITest(unittest.TestCase): 8 | def setUp(self): 9 | self.addCleanup(patch.stopall) 10 | self.m_gbl = patch('beeline.middleware.werkzeug.beeline').start() 11 | 12 | def test_call_middleware(self): 13 | ''' Just call the middleware and ensure that the code runs ''' 14 | mock_app = Mock() 15 | mock_resp = Mock() 16 | mock_trace = Mock() 17 | mock_environ = {} 18 | self.m_gbl.propagate_and_start_trace.return_value = mock_trace 19 | 20 | mw = HoneyWSGIMiddleware(mock_app) 21 | mw({}, mock_resp) 22 | self.m_gbl.propagate_and_start_trace.assert_called_once() 23 | 24 | mock_app.assert_called_once_with(mock_environ, ANY) 25 | 26 | # get the response function passed to the app 27 | resp_func = mock_app.mock_calls[0][1][1] 28 | # call it to make sure it does what we want 29 | # the values here don't really matter 30 | resp_func(1, 2) 31 | 32 | mock_resp.assert_called_once_with(1, 2) 33 | self.m_gbl.finish_trace.assert_called_once_with(mock_trace) 34 | -------------------------------------------------------------------------------- /beeline/middleware/wsgi.py: -------------------------------------------------------------------------------- 1 | from beeline.propagation import Request 2 | 3 | 4 | class WSGIRequest(Request): 5 | def __init__(self, mwname, environ): 6 | self._mwname = mwname 7 | self._environ = environ 8 | 9 | def header(self, key): 10 | # FIXME: Is this .upper strictly necessary? Does environ already do it for us? 11 | lookup_key = "HTTP_" + key.upper().replace('-', '_') 12 | return self._environ.get(lookup_key) 13 | 14 | def method(self): 15 | return self._environ.get('REQUEST_METHOD') 16 | 17 | def scheme(self): 18 | return self._environ.get('wsgi.url_scheme') 19 | 20 | def host(self): 21 | return self._environ.get('HTTP_HOST') 22 | 23 | def path(self): 24 | return self._environ.get('PATH_INFO') 25 | 26 | def query(self): 27 | return self._environ.get('QUERY_STRING') 28 | 29 | def middleware_request(self): 30 | return self._environ 31 | 32 | def request_context(self): 33 | request_method = self._environ.get('REQUEST_METHOD') 34 | if request_method: 35 | trace_name = f"{self._mwname}_http_{request_method.lower()}" 36 | else: 37 | trace_name = f"{self._mwname}_http" 38 | 39 | return { 40 | "name": trace_name, 41 | "type": "http_server", 42 | "request.host": self._environ.get('HTTP_HOST'), 43 | "request.method": request_method, 44 | "request.path": self._environ.get('PATH_INFO'), 45 | "request.remote_addr": self._environ.get('REMOTE_ADDR'), 46 | "request.content_length": self._environ.get('CONTENT_LENGTH', 0), 47 | "request.user_agent": self._environ.get('HTTP_USER_AGENT'), 48 | "request.scheme": self._environ.get('wsgi.url_scheme'), 49 | "request.query": self._environ.get('QUERY_STRING') 50 | } 51 | -------------------------------------------------------------------------------- /beeline/patch/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/honeycombio/beeline-python/b8c96386964749c60bcd865d10f31148f970fec1/beeline/patch/__init__.py -------------------------------------------------------------------------------- /beeline/patch/jinja2.py: -------------------------------------------------------------------------------- 1 | from wrapt import wrap_function_wrapper 2 | import beeline 3 | 4 | 5 | def _render_template(fn, instance, args, kwargs): 6 | span = beeline.start_span(context={ 7 | "name": "jinja2_render_template", 8 | "template.name": instance.name or "[string]", 9 | }) 10 | 11 | try: 12 | return fn(*args, **kwargs) 13 | finally: 14 | beeline.finish_span(span) 15 | 16 | 17 | wrap_function_wrapper('jinja2', 'Template.render', _render_template) 18 | -------------------------------------------------------------------------------- /beeline/patch/requests.py: -------------------------------------------------------------------------------- 1 | import beeline 2 | from wrapt import wrap_function_wrapper 3 | import requests 4 | # needed for pyflakes 5 | assert requests 6 | 7 | 8 | def request(_request, instance, args, kwargs): 9 | span = beeline.start_span(context={"meta.type": "http_client"}) 10 | 11 | b = beeline.get_beeline() 12 | if b and b.http_trace_propagation_hook is not None: 13 | new_headers = beeline.http_trace_propagation_hook() 14 | if new_headers: 15 | b.log( 16 | "requests lib - adding trace context to outbound request: %s", new_headers) 17 | instance.headers.update(new_headers) 18 | else: 19 | b.log("requests lib - no trace context found") 20 | 21 | try: 22 | resp = None 23 | 24 | # Required as Python treats the `or` keyword differently in string 25 | # interpolation vs. when assigning a variable. 26 | method = kwargs.get('method') or args[0] 27 | 28 | beeline.add_context({ 29 | "name": f"requests_{method}", 30 | "request.method": method, 31 | "request.url": kwargs.get('url') or args[1], 32 | }) 33 | resp = _request(*args, **kwargs) 34 | return resp 35 | except Exception as e: 36 | beeline.add_context({ 37 | "request.error_type": str(type(e)), 38 | "request.error": beeline.internal.stringify_exception(e), 39 | }) 40 | raise 41 | finally: 42 | if resp is not None: 43 | content_type = resp.headers.get('content-type') 44 | if content_type: 45 | beeline.add_context_field( 46 | "response.content_type", content_type) 47 | content_length = resp.headers.get('content-length') 48 | if content_length: 49 | beeline.add_context_field( 50 | "response.content_length", content_length) 51 | if hasattr(resp, 'status_code'): 52 | beeline.add_context_field( 53 | "response.status_code", resp.status_code) 54 | beeline.finish_span(span) 55 | 56 | 57 | wrap_function_wrapper('requests.sessions', 'Session.request', request) 58 | -------------------------------------------------------------------------------- /beeline/patch/test_jinja2.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | import unittest 3 | from mock import Mock, patch 4 | 5 | import jinja2 6 | import beeline.patch.jinja2 7 | assert beeline.patch.jinja2 # make pyflake stop complainings 8 | 9 | 10 | class TestJinja2Patch(unittest.TestCase): 11 | def test_wrapper_executes(self): 12 | 13 | with patch('beeline.patch.jinja2.beeline') as m_beeline: 14 | m_span = Mock() 15 | m_beeline.start_span.return_value = m_span 16 | 17 | t = jinja2.Template("my template") 18 | t.render() 19 | m_beeline.start_span.assert_called_once_with(context={ 20 | "name": "jinja2_render_template", 21 | "template.name": "[string]", 22 | }) 23 | m_beeline.finish_span.assert_called_once_with(m_span) 24 | 25 | m_beeline.reset_mock() 26 | m_beeline.start_span.return_value = m_span 27 | 28 | t = jinja2.Template("my template") 29 | t.name = 'foo' 30 | t.render() 31 | m_beeline.start_span.assert_called_once_with(context={ 32 | "name": "jinja2_render_template", 33 | "template.name": "foo", 34 | }) 35 | m_beeline.finish_span.assert_called_once_with(m_span) 36 | -------------------------------------------------------------------------------- /beeline/patch/test_requests.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from mock import Mock, patch 3 | 4 | import beeline 5 | 6 | 7 | class TestRequestsPatch(unittest.TestCase): 8 | def test_request_fn_injects_headers_and_returns(self): 9 | from beeline.patch.requests import request # pylint: disable=bad-option-value,import-outside-toplevel 10 | 11 | with patch('beeline.get_beeline') as m_bl: 12 | bl = Mock() 13 | m_bl.return_value = bl 14 | 15 | trace_context = "1;trace_id=foo,parent_id=bar,context=base64value==" 16 | 17 | bl.tracer_impl.http_trace_propagation_hook.return_value = { 18 | 'X-Honeycomb-Trace': trace_context 19 | } 20 | 21 | # this is the class instance (Session object) that is passed to our request function 22 | # by wrapt 23 | m_session = Mock() 24 | m_session.headers = {} 25 | 26 | # this is our request call that's being wrapped 27 | m_request = Mock() 28 | m_request.return_value = Mock( 29 | headers={'content-type': 'application/json', 'content-length': 23}, status_code=500) 30 | args = ['get'] 31 | kwargs = {'url': 'http://example.com'} 32 | ret = request(m_request, m_session, args, kwargs) 33 | 34 | m_request.assert_called_once_with(*args, **kwargs) 35 | self.assertEqual(ret, m_request.return_value) 36 | self.assertEqual( 37 | m_session.headers['X-Honeycomb-Trace'], trace_context) 38 | 39 | def test_request_fn_injects_correct_context(self): 40 | ''' confirm that the injected trace context references the child span wrapping the request call ''' 41 | from beeline.patch.requests import request # pylint: disable=bad-option-value,import-outside-toplevel 42 | 43 | _beeline = beeline.Beeline(transmission_impl=Mock()) 44 | # prevent spans from being sent/closed so we can get at the span generated by the request patch 45 | _beeline.tracer_impl.finish_span = Mock() 46 | 47 | with patch('beeline.get_beeline') as m_bl: 48 | m_bl.return_value = _beeline 49 | 50 | with _beeline.tracer("parent", trace_id="abc"): 51 | parent_span = _beeline.tracer_impl.get_active_span() 52 | 53 | # this is the class instance (Session object) that is passed to our request function by wrapt 54 | m_session = Mock() 55 | m_session.headers = {} 56 | 57 | # this is our request call that's being wrapped 58 | m_request = Mock() 59 | m_request.return_value = Mock( 60 | headers={'content-type': 'application/json', 'content-length': 23}, status_code=500) 61 | args = ['get'] 62 | kwargs = {'url': 'http://example.com'} 63 | ret = request(m_request, m_session, args, kwargs) 64 | 65 | m_request.assert_called_once_with(*args, **kwargs) 66 | self.assertEqual(ret, m_request.return_value) 67 | 68 | self.assertEqual(len(_beeline.tracer_impl._trace.stack), 2) 69 | child_span = _beeline.tracer_impl.get_active_span() 70 | # we should have two distinct spans in this trace 71 | self.assertNotEqual(child_span.id, parent_span.id) 72 | self.assertEqual(child_span.parent_id, parent_span.id) 73 | 74 | trace_context = m_session.headers['X-Honeycomb-Trace'] 75 | trace_id, parent_id, _ = beeline.trace.unmarshal_trace_context( 76 | trace_context) 77 | # confirm the trace context parent is the child span, not the parent span 78 | self.assertEqual(child_span.trace_id, trace_id) 79 | self.assertEqual(child_span.id, parent_id) 80 | # should be the same trace 81 | self.assertEqual(parent_span.trace_id, trace_id) 82 | 83 | def test_request_fn_works_without_init(self): 84 | ''' verify that the requests wrapper works even if the beeline is not initialized ''' 85 | from beeline.patch.requests import request # pylint: disable=bad-option-value,import-outside-toplevel 86 | 87 | # this is the class instance (Session object) that is passed to our request function by wrapt 88 | m_session = Mock() 89 | m_session.headers = {} 90 | 91 | m_request = Mock() 92 | m_request.return_value = Mock( 93 | headers={'content-type': 'application/json', 'content-length': 23}, status_code=500) 94 | args = ['get'] 95 | kwargs = {'url': 'http://example.com'} 96 | ret = request(m_request, m_session, args, kwargs) 97 | 98 | m_request.assert_called_once_with(*args, **kwargs) 99 | self.assertEqual(ret, m_request.return_value) 100 | -------------------------------------------------------------------------------- /beeline/patch/test_urllib.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | import unittest 3 | from mock import Mock, patch 4 | 5 | import urllib 6 | 7 | 8 | class TestUrllibPatch(unittest.TestCase): 9 | def test_request_fn_injects_headers_and_returns(self): 10 | from beeline.patch.urllib import _urllibopen # pylint: disable=bad-option-value,import-outside-toplevel 11 | 12 | with patch('beeline.get_beeline') as m_bl: 13 | bl = Mock() 14 | m_bl.return_value = bl 15 | 16 | trace_context = "1;trace_id=foo,parent_id=bar,context=base64value==" 17 | 18 | bl.tracer_impl.http_trace_propagation_hook.return_value = { 19 | 'X-Honeycomb-Trace': trace_context 20 | } 21 | # this is our request call that's being wrapped 22 | m_urlopen = Mock() 23 | m_urlopen.return_value = Mock( 24 | headers={'content-type': 'application/json', 'content-length': 23}, status_code=500) 25 | args = ('https://example.com',) 26 | kwargs = {} 27 | ret = _urllibopen(m_urlopen, None, args, kwargs) 28 | 29 | # ensure our arg gets modified and header set before the real function is called 30 | self.assertEqual( 31 | type(m_urlopen.call_args.args[0]), urllib.request.Request) 32 | self.assertEqual( 33 | m_urlopen.call_args.args[0].headers['X-Honeycomb-Trace'], trace_context) 34 | m_urlopen.asset_called_once() 35 | m_urlopen.reset_mock() 36 | 37 | # ensure we return a response 38 | self.assertEqual(ret, m_urlopen.return_value) 39 | 40 | # test case with Request object 41 | m_urlopen.return_value = Mock( 42 | headers={'content-type': 'application/json', 'content-length': 23}, status_code=500) 43 | req = urllib.request.Request('https://example.com/2') 44 | args = [req] 45 | ret = _urllibopen(m_urlopen, None, args, kwargs) 46 | self.assertEqual(type(args[0]), urllib.request.Request) 47 | self.assertEqual(args[0].full_url, 'https://example.com/2') 48 | self.assertEqual( 49 | args[0].headers['X-Honeycomb-Trace'], trace_context) 50 | self.assertEqual(ret, m_urlopen.return_value) 51 | m_urlopen.assert_called_once_with(*args, **kwargs) 52 | -------------------------------------------------------------------------------- /beeline/patch/tornado.py: -------------------------------------------------------------------------------- 1 | ''' patches base tornado classes to add honeycomb instrumentation ''' 2 | 3 | from wrapt import wrap_function_wrapper 4 | import beeline 5 | import tornado 6 | assert tornado # for pyflakes 7 | 8 | 9 | def log_request(_log_request, instance, args, kwargs): 10 | try: 11 | # expecting signature `log_request(self, handler)` 12 | if len(args) == 1: 13 | handler = args[0] 14 | beeline.send_now({ 15 | "duration_ms": handler.request.request_time() * 1000.0, 16 | "request.method": handler.request.method, 17 | "request.remote_addr": handler.request.remote_ip, 18 | "request.path": handler.request.uri, 19 | "request.query": handler.request.query, 20 | "request.host": handler.request.headers.get('Host'), 21 | "response.status_code": handler.get_status(), 22 | }) 23 | except Exception: 24 | pass 25 | finally: 26 | _log_request(*args, **kwargs) 27 | 28 | 29 | def log_exception(_log_exception, instance, args, kwargs): 30 | try: 31 | # expecting signature `log_exception(self, typ, value, tb)`` 32 | if len(args) == 3: 33 | value = args[2] 34 | beeline.send_now({ 35 | "request.method": instance.request.method, 36 | "request.path": instance.request.uri, 37 | "request.remote_addr": instance.request.remote_ip, 38 | "request.query": instance.request.query, 39 | "request.host": instance.request.get('Host'), 40 | "request.error": type(value).__name__, 41 | "request.error_detail": beeline.internal.stringify_exception(value), 42 | }) 43 | except Exception: 44 | pass 45 | finally: 46 | _log_exception(*args, **kwargs) 47 | 48 | 49 | wrap_function_wrapper('tornado.web', 'Application.log_request', log_request) 50 | wrap_function_wrapper( 51 | 'tornado.web', 'RequestHandler.log_exception', log_exception) 52 | -------------------------------------------------------------------------------- /beeline/patch/urllib.py: -------------------------------------------------------------------------------- 1 | from wrapt import wrap_function_wrapper 2 | import beeline 3 | import beeline.propagation 4 | import urllib.request 5 | 6 | 7 | def _urllibopen(_urlopen, instance, args, kwargs): 8 | # urlopen accepts either a string URL or a Request object as its first arg 9 | # It's easier to process the info contained in the request and modify it 10 | # by converting the URL string into a Request 11 | if type(args[0]) != urllib.request.Request: 12 | args = (urllib.request.Request(args[0]),) + tuple(args[1:]) 13 | 14 | span = beeline.start_span(context={"meta.type": "http_client"}) 15 | 16 | b = beeline.get_beeline() 17 | if b and b.http_trace_propagation_hook is not None: 18 | new_headers = beeline.http_trace_propagation_hook() 19 | if new_headers: 20 | # Merge the new headers into the existing headers for the outbound request 21 | b.log( 22 | "urllib lib - adding trace context to outbound request: %s", new_headers) 23 | args[0].headers.update(new_headers) 24 | 25 | try: 26 | resp = None 27 | beeline.add_context({ 28 | "name": f"urllib_{args[0].get_method()}", 29 | "request.method": args[0].get_method(), 30 | "request.uri": args[0].full_url 31 | }) 32 | resp = _urlopen(*args, **kwargs) 33 | return resp 34 | except Exception as e: 35 | beeline.add_context({ 36 | "request.error_type": str(type(e)), 37 | "request.error": beeline.internal.stringify_exception(e), 38 | }) 39 | raise 40 | finally: 41 | if resp: 42 | beeline.add_context_field("response.status_code", resp.status) 43 | content_type = resp.getheader('content-type') 44 | if content_type: 45 | beeline.add_context_field( 46 | "response.content_type", content_type) 47 | content_length = resp.getheader('content-length') 48 | if content_length: 49 | beeline.add_context_field( 50 | "response.content_length", content_length) 51 | 52 | beeline.finish_span(span) 53 | 54 | 55 | # Note that this only patches urllib.request.urlopen, not 56 | # http.client.HTTPConnection. The latter is a lot more of a pain to figure 57 | # out what, exactly, the lifetime of the span ought to be -- but most people 58 | # who plan to block and do nothing else use urlopen, anyway. 59 | wrap_function_wrapper('urllib.request', 'urlopen', _urllibopen) 60 | -------------------------------------------------------------------------------- /beeline/propagation/__init__.py: -------------------------------------------------------------------------------- 1 | from abc import ABCMeta, abstractmethod 2 | import beeline 3 | 4 | 5 | def propagate_dataset(): 6 | return bool 7 | 8 | 9 | class PropagationContext(object): 10 | ''' 11 | PropagationContext represents information that can either be read from, or 12 | propagated via standard trace propagation headers such as X-Honeycomb-Trace 13 | and W3C headers. http_trace_parser_hooks generate this from requests, and 14 | http_trace_propagation_hooks use this to generate a set of headers for 15 | outbound HTTP requests. 16 | ''' 17 | 18 | def __init__(self, trace_id, parent_id, trace_fields={}, dataset=None): 19 | self.trace_id = trace_id 20 | self.parent_id = parent_id 21 | self.trace_fields = trace_fields 22 | self.dataset = dataset 23 | 24 | 25 | class Request(object, metaclass=ABCMeta): 26 | ''' 27 | beeline.propagation.Request is an abstract class that defines the interface that should 28 | be used by middleware to pass request information into http_trace_parser_hooks. It should 29 | at a minimum contain the equivalent of HTTP headers, and optionally include other HTTP 30 | information such as method, schema, host, path, and query. The `middleware_request` method 31 | returns a middleware-specific request object or equivalent that can be used by custom hooks 32 | to extract additional information to be added to the trace or propagated. 33 | ''' 34 | 35 | @abstractmethod 36 | def header(self, key): 37 | """ 38 | Get the value associated with the specified key, transformed as necessary for the 39 | transport and middleware. 40 | """ 41 | pass 42 | 43 | @abstractmethod 44 | def method(self): 45 | """ 46 | For HTTP requests, the HTTP method (GET, PUT, etc.) of the request. 47 | """ 48 | pass 49 | 50 | @abstractmethod 51 | def scheme(self): 52 | """ 53 | For HTTP requests, the scheme (http, https, etc.) 54 | """ 55 | pass 56 | 57 | @abstractmethod 58 | def host(self): 59 | """ 60 | For HTTP requests, the host part of the URL (e.g. www.honeycomb.io, api.honeycomb.io:80) 61 | """ 62 | pass 63 | 64 | @abstractmethod 65 | def path(self): 66 | """ 67 | For HTTP requests, the path part of the URL (e.g. /1/event/) 68 | """ 69 | pass 70 | 71 | @abstractmethod 72 | def query(self): 73 | """ 74 | For HTTP requests, the query part of the URL (e.g. key1=value1&key2=value2) 75 | """ 76 | pass 77 | 78 | @abstractmethod 79 | def middleware_request(self): 80 | """ 81 | The middleware-specific source of request data (for middleware-specific custom hooks) 82 | """ 83 | pass 84 | 85 | 86 | class DictRequest(Request): 87 | ''' 88 | Basic dictionary request that just takes in a dictionary of headers, and a dictionary 89 | of request properties. Primarily for testing. 90 | ''' 91 | 92 | def __init__(self, headers, props={}): 93 | self._headers = headers 94 | self._props = props 95 | self._keymap = {k.lower(): k for k in self._headers.keys()} 96 | 97 | def header(self, key): 98 | lookup_key = key.lower() 99 | if lookup_key not in self._keymap: 100 | return None 101 | lookup_key = self._keymap[lookup_key] 102 | return self._headers[lookup_key] 103 | 104 | def method(self): 105 | return self._props['method'] 106 | 107 | def scheme(self): 108 | return self._props['scheme'] 109 | 110 | def host(self): 111 | return self._props['host'] 112 | 113 | def path(self): 114 | return self._props['path'] 115 | 116 | def query(self): 117 | return self._props['query'] 118 | 119 | def middleware_request(self): 120 | return { 121 | 'headers': self._headers, 122 | 'props': self._props 123 | } 124 | -------------------------------------------------------------------------------- /beeline/propagation/default.py: -------------------------------------------------------------------------------- 1 | from beeline.propagation import honeycomb, w3c 2 | 3 | 4 | def http_trace_parser_hook(request): 5 | """ 6 | Retrieves the propagation context out of the request. Uses the honeycomb header, with W3C header as fallback. 7 | """ 8 | honeycomb_header_value = honeycomb.http_trace_parser_hook(request) 9 | w3c_header_value = w3c.http_trace_parser_hook(request) 10 | if honeycomb_header_value: 11 | return honeycomb_header_value 12 | else: 13 | return w3c_header_value 14 | 15 | 16 | def http_trace_propagation_hook(propagation_context): 17 | """ 18 | Given a propagation context, returns a dictionary of key value pairs that should be 19 | added to outbound requests (usually HTTP headers). Uses the honeycomb format. 20 | """ 21 | return honeycomb.http_trace_propagation_hook(propagation_context) 22 | -------------------------------------------------------------------------------- /beeline/propagation/honeycomb.py: -------------------------------------------------------------------------------- 1 | import beeline 2 | from beeline.propagation import PropagationContext 3 | import base64 4 | import json 5 | from urllib.parse import quote, unquote 6 | 7 | 8 | def http_trace_parser_hook(request): 9 | ''' 10 | Retrieves the honeycomb propagation context out of the request. 11 | request must implement the beeline.propagation.Request abstract base class 12 | ''' 13 | trace_header = request.header('X-Honeycomb-Trace') 14 | if trace_header: 15 | try: 16 | trace_id, parent_id, context, dataset = unmarshal_propagation_context_with_dataset( 17 | trace_header) 18 | return PropagationContext(trace_id, parent_id, context, dataset) 19 | except Exception as e: 20 | beeline.internal.log( 21 | 'error attempting to extract trace context: %s', beeline.internal.stringify_exception(e)) 22 | return None 23 | 24 | 25 | def http_trace_propagation_hook(propagation_context): 26 | ''' 27 | Given a propagation context, returns a dictionary of key value pairs that should be 28 | added to outbound requests (usually HTTP headers) 29 | ''' 30 | if not propagation_context: 31 | return None 32 | 33 | return {"X-Honeycomb-Trace": marshal_propagation_context(propagation_context)} 34 | 35 | 36 | def marshal_propagation_context(propagation_context): 37 | ''' 38 | Given a propagation context, returns the contents of a trace header to be 39 | injected by middleware. 40 | ''' 41 | if not propagation_context: 42 | return None 43 | 44 | # FIXME: Since ALL trace fields are in propagation_context, we may want to strip 45 | # some automatically added trace fields that we DON'T want to propagate - e.g. request.* 46 | version = 1 47 | trace_fields = base64.b64encode(json.dumps( 48 | propagation_context.trace_fields).encode()).decode() 49 | 50 | components = [f"trace_id={propagation_context.trace_id}", 51 | f"parent_id={propagation_context.parent_id}", 52 | f"context={trace_fields}"] 53 | 54 | if beeline.propagation.propagate_dataset and propagation_context.dataset: 55 | components.insert(0, f"dataset={quote(propagation_context.dataset)}") 56 | 57 | join_components = ",".join(components) 58 | trace_header = f"{version};{join_components}" 59 | 60 | return trace_header 61 | 62 | 63 | def unmarshal_propagation_context(trace_header): 64 | """ 65 | Deprecated: Use beeline.propagation.honeycomb.unmarshal_propagation_context_with_dataset instead 66 | """ 67 | trace_id, parent_id, context, _dataset = unmarshal_propagation_context_with_dataset(trace_header) 68 | 69 | return trace_id, parent_id, context 70 | 71 | 72 | def unmarshal_propagation_context_with_dataset(trace_header): 73 | ''' 74 | Given the body of the `X-Honeycomb-Trace` header, returns the trace_id, 75 | parent_id, "context" and dataset. 76 | ''' 77 | # the first value is the trace payload version 78 | # at this time there is only one version, but we should warn 79 | # if another version comes through 80 | version, data = trace_header.split(';', 1) 81 | if version != "1": 82 | beeline.internal.log( 83 | 'warning: trace_header version %s is unsupported', version) 84 | return None, None, None, None 85 | 86 | kv_pairs = data.split(',') 87 | 88 | trace_id, parent_id, context, dataset = None, None, None, None 89 | # Some beelines send "dataset" but we do not handle that yet 90 | for pair in kv_pairs: 91 | k, v = pair.split('=', 1) 92 | if k == 'trace_id': 93 | trace_id = v 94 | elif k == 'parent_id': 95 | parent_id = v 96 | elif k == 'context': 97 | context = json.loads(base64.b64decode(v.encode()).decode()) 98 | elif k == 'dataset' and beeline.propagation.propagate_dataset: 99 | dataset = unquote(v) 100 | 101 | # context should be a dict 102 | if context is None: 103 | context = {} 104 | 105 | return trace_id, parent_id, context, dataset 106 | -------------------------------------------------------------------------------- /beeline/propagation/test_honeycomb.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import beeline.propagation 3 | from beeline.propagation import DictRequest, PropagationContext 4 | import beeline.propagation.honeycomb as hc 5 | 6 | header_value = '1;trace_id=bloop,parent_id=scoop,context=e30K' 7 | 8 | 9 | class TestMarshalUnmarshal(unittest.TestCase): 10 | def test_roundtrip(self): 11 | '''Verify that we can successfully roundtrip (marshal and unmarshal)''' 12 | trace_id = "bloop" 13 | parent_id = "scoop" 14 | trace_fields = {"key": "value"} 15 | pc = PropagationContext(trace_id, parent_id, trace_fields) 16 | header = hc.marshal_propagation_context(pc) 17 | new_trace_id, new_parent_id, new_trace_fields = hc.unmarshal_propagation_context( 18 | header) 19 | self.assertEqual(trace_id, new_trace_id) 20 | self.assertEqual(parent_id, new_parent_id) 21 | self.assertEqual(trace_fields, new_trace_fields) 22 | 23 | def test_roundtrip_with_dataset(self): 24 | '''Verify that we can successfully roundtrip (marshal and unmarshal)''' 25 | beeline.propagation.propagate_dataset = True 26 | dataset = "blorp blorp" 27 | trace_id = "bloop" 28 | parent_id = "scoop" 29 | trace_fields = {"key": "value"} 30 | pc = PropagationContext(trace_id, parent_id, trace_fields, dataset) 31 | header = hc.marshal_propagation_context(pc) 32 | new_trace_id, new_parent_id, new_trace_fields, new_dataset = hc.unmarshal_propagation_context_with_dataset( 33 | header) 34 | self.assertEqual(dataset, new_dataset) 35 | self.assertEqual(trace_id, new_trace_id) 36 | self.assertEqual(parent_id, new_parent_id) 37 | self.assertEqual(trace_fields, new_trace_fields) 38 | 39 | def test_roundtrip_with_dataset_propagation_disabled(self): 40 | '''Verify that we can successfully roundtrip (marshal and unmarshal) without dataset propagation''' 41 | beeline.propagation.propagate_dataset = False 42 | dataset = "blorp blorp" 43 | trace_id = "bloop" 44 | parent_id = "scoop" 45 | trace_fields = {"key": "value"} 46 | pc = PropagationContext(trace_id, parent_id, trace_fields, dataset) 47 | header = hc.marshal_propagation_context(pc) 48 | new_trace_id, new_parent_id, new_trace_fields, new_dataset = hc.unmarshal_propagation_context_with_dataset( 49 | header) 50 | self.assertIsNone(new_dataset) 51 | self.assertEqual(trace_id, new_trace_id) 52 | self.assertEqual(parent_id, new_parent_id) 53 | self.assertEqual(trace_fields, new_trace_fields) 54 | 55 | 56 | class TestHoneycombHTTPTraceParserHook(unittest.TestCase): 57 | def test_has_header(self): 58 | '''Test that the hook properly parses honeycomb trace headers''' 59 | req = DictRequest({ 60 | # case shouldn't matter 61 | 'X-HoNEyComb-TrACE': header_value, 62 | }) 63 | pc = hc.http_trace_parser_hook(req) 64 | self.assertEqual(pc.trace_id, "bloop") 65 | self.assertEqual(pc.parent_id, "scoop") 66 | # FIXME: We should have a legitimate header with trace_field and dataset_id set 67 | 68 | def test_no_header(self): 69 | req = DictRequest({}) 70 | pc = hc.http_trace_parser_hook(req) 71 | self.assertIsNone(pc) 72 | 73 | 74 | class TestHoneycombHTTPTracePropagationHook(unittest.TestCase): 75 | def test_generates_correct_header(self): 76 | beeline.propagation.propagate_dataset = True 77 | dataset = "blorp blorp" 78 | trace_id = "bloop" 79 | parent_id = "scoop" 80 | trace_fields = {"key": "value"} 81 | pc = PropagationContext( 82 | trace_id, parent_id, trace_fields, dataset) 83 | headers = hc.http_trace_propagation_hook(pc) 84 | self.assertIn('X-Honeycomb-Trace', headers) 85 | self.assertEqual(headers['X-Honeycomb-Trace'], 86 | "1;dataset=blorp%20blorp,trace_id=bloop,parent_id=scoop,context=eyJrZXkiOiAidmFsdWUifQ==") 87 | 88 | def test_generates_correct_header_with_dataset_propagation_disabled(self): 89 | beeline.propagation.propagate_dataset = False 90 | dataset = "blorp blorp" 91 | trace_id = "bloop" 92 | parent_id = "scoop" 93 | trace_fields = {"key": "value"} 94 | pc = PropagationContext( 95 | trace_id, parent_id, trace_fields, dataset) 96 | headers = hc.http_trace_propagation_hook(pc) 97 | self.assertIn('X-Honeycomb-Trace', headers) 98 | self.assertEqual(headers['X-Honeycomb-Trace'], 99 | "1;trace_id=bloop,parent_id=scoop,context=eyJrZXkiOiAidmFsdWUifQ==") 100 | -------------------------------------------------------------------------------- /beeline/propagation/test_propagation.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import beeline.propagation 3 | 4 | header_value = '1;trace_id=bloop,parent_id=scoop,context=e30K' 5 | 6 | 7 | class TestDictRequest(unittest.TestCase): 8 | def test_headers(self): 9 | '''Test that we correctly deal with case sensitivity''' 10 | request = beeline.propagation.DictRequest({ 11 | # case shouldn't matter 12 | 'MixedCaseHeader': "value", 13 | 'UPPERCASEHEADER': "value" 14 | }, 15 | {}) 16 | value = request.header('mixedcaseheader') 17 | self.assertEqual(value, "value") 18 | value = request.header('upperCaseHeader') 19 | self.assertEqual(value, "value") 20 | 21 | def test_request_props(self): 22 | '''Test we correctly return request props''' 23 | request = beeline.propagation.DictRequest({ 24 | # case shouldn't matter 25 | 'MixedCaseHeader': "value", 26 | 'UPPERCASEHEADER': "value" 27 | }, 28 | { 29 | 'method': "GET", 30 | 'scheme': "http", 31 | 'host': "api.honeycomb.io", 32 | 'path': "/1/event", 33 | 'query': "key=value" 34 | }) 35 | 36 | self.assertEqual(request.method(), "GET") 37 | self.assertEqual(request.scheme(), "http") 38 | self.assertEqual(request.host(), "api.honeycomb.io") 39 | self.assertEqual(request.path(), "/1/event") 40 | self.assertEqual(request.query(), "key=value") 41 | -------------------------------------------------------------------------------- /beeline/propagation/test_w3c.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from beeline.propagation import DictRequest, PropagationContext 3 | from beeline.propagation import w3c 4 | 5 | _TEST_TRACEPARENT_HEADER = "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-00" 6 | _TEST_TRACEPARENT_HEADER_DEFAULT_TRACEFLAGS = "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01" 7 | _TEST_TRACESTATE_HEADER = "foo=bar,bar=baz" 8 | 9 | _TEST_HEADERS = { 10 | "traceparent": _TEST_TRACEPARENT_HEADER, 11 | "tracestate": _TEST_TRACESTATE_HEADER 12 | } 13 | _TEST_TRACE_ID = "0af7651916cd43dd8448eb211c80319c" 14 | _TEST_PARENT_ID = "b7ad6b7169203331" 15 | _TEST_TRACE_FLAGS = "00" 16 | _TEST_TRACESTATE = "foo=bar,bar=baz" 17 | 18 | 19 | class TestW3CMarshalUnmarshal(unittest.TestCase): 20 | def test_roundtrip(self): 21 | '''Verify that we can successfully roundtrip (marshal and unmarshal)''' 22 | trace_fields = {"traceflags": _TEST_TRACE_FLAGS, 23 | "tracestate": _TEST_TRACESTATE} 24 | 25 | pc = PropagationContext(_TEST_TRACE_ID, _TEST_PARENT_ID, trace_fields) 26 | 27 | traceparent_header = w3c.marshal_traceparent(pc) 28 | tracestate_header = w3c.marshal_tracestate(pc) 29 | 30 | # Make sure marshalled headers are as we expect. 31 | self.assertEqual(_TEST_TRACEPARENT_HEADER, traceparent_header) 32 | self.assertEqual(_TEST_TRACESTATE_HEADER, tracestate_header) 33 | 34 | new_trace_id, new_parent_id, new_trace_flags = w3c.unmarshal_traceparent( 35 | traceparent_header) 36 | new_tracestate = w3c.unmarshal_tracestate(tracestate_header) 37 | 38 | # Check round-trip values are the same as start values. 39 | self.assertEqual(_TEST_TRACE_ID, new_trace_id) 40 | self.assertEqual(_TEST_PARENT_ID, new_parent_id) 41 | self.assertEqual(_TEST_TRACE_FLAGS, new_trace_flags) 42 | self.assertEqual(_TEST_TRACESTATE, new_tracestate) 43 | 44 | 45 | class TestW3CHTTPTraceParserHook(unittest.TestCase): 46 | def test_has_header(self): 47 | '''Test that the hook properly parses W3C trace headers''' 48 | req = DictRequest(_TEST_HEADERS) 49 | pc = w3c.http_trace_parser_hook(req) 50 | self.assertEqual(pc.trace_id, _TEST_TRACE_ID) 51 | self.assertEqual(pc.parent_id, _TEST_PARENT_ID) 52 | self.assertEqual(pc.trace_fields, { 53 | "tracestate": _TEST_TRACESTATE, 54 | "traceflags": _TEST_TRACE_FLAGS 55 | }) 56 | 57 | def test_no_header(self): 58 | req = DictRequest({}) 59 | pc = w3c.http_trace_parser_hook(req) 60 | self.assertIsNone(pc) 61 | 62 | 63 | class TestW3CHTTPTracePropagationHook(unittest.TestCase): 64 | def test_generates_correct_headers(self): 65 | pc = PropagationContext( 66 | _TEST_TRACE_ID, _TEST_PARENT_ID, {"traceflags": _TEST_TRACE_FLAGS, 67 | "tracestate": _TEST_TRACESTATE} 68 | ) 69 | headers = w3c.http_trace_propagation_hook(pc) 70 | self.assertIn('traceparent', headers) 71 | self.assertIn('tracestate', headers) 72 | self.assertEqual(headers['traceparent'], 73 | _TEST_TRACEPARENT_HEADER) 74 | self.assertEqual(headers['tracestate'], 75 | _TEST_TRACESTATE_HEADER) 76 | 77 | def test_defaults_trace_flags_to_sampled(self): 78 | pc = PropagationContext( 79 | _TEST_TRACE_ID, _TEST_PARENT_ID 80 | ) 81 | headers = w3c.http_trace_propagation_hook(pc) 82 | self.assertIn('traceparent', headers) 83 | self.assertEqual(headers['traceparent'], 84 | _TEST_TRACEPARENT_HEADER_DEFAULT_TRACEFLAGS) 85 | -------------------------------------------------------------------------------- /beeline/propagation/w3c.py: -------------------------------------------------------------------------------- 1 | import beeline 2 | from beeline.propagation import PropagationContext 3 | import re 4 | 5 | # Cribbed from OpenTelemetry python implementation. 6 | _TRACEPARENT_HEADER_FORMAT = ( 7 | "^[ \t]*([0-9a-f]{2})-([0-9a-f]{32})-([0-9a-f]{16})-([0-9a-f]{2})" 8 | + "(-.*)?[ \t]*$" 9 | ) 10 | _TRACEPARENT_HEADER_FORMAT_RE = re.compile(_TRACEPARENT_HEADER_FORMAT) 11 | _EMPTY_TRACE_ID = "0" * 32 12 | _EMPTY_PARENT_ID = "0" * 16 13 | 14 | 15 | def http_trace_parser_hook(request): 16 | ''' 17 | Retrieves the w3c propagation context out of the request. 18 | request must implement the beeline.propagation.Request abstract base class 19 | ''' 20 | traceparent_header = request.header('traceparent') 21 | if not traceparent_header: 22 | return None 23 | tracestate_header = request.header('tracestate') 24 | 25 | trace_id = None 26 | parent_id = None 27 | try: 28 | trace_id, parent_id, trace_flags = unmarshal_traceparent( 29 | traceparent_header) 30 | tracestate = unmarshal_tracestate(tracestate_header) 31 | trace_fields = {} 32 | trace_fields['traceflags'] = trace_flags 33 | trace_fields['tracestate'] = tracestate 34 | return PropagationContext(trace_id, parent_id, trace_fields) 35 | except Exception as e: 36 | beeline.internal.log( 37 | 'error attempting to extract w3c trace: %s', beeline.internal.stringify_exception(e)) 38 | return None 39 | 40 | 41 | def http_trace_propagation_hook(propagation_context): 42 | ''' 43 | Given a propagation context, returns a dictionary of key value pairs that should be 44 | added to outbound requests (usually HTTP headers) 45 | ''' 46 | if not propagation_context: 47 | return None 48 | 49 | traceparent_header = marshal_traceparent(propagation_context) 50 | if not traceparent_header: 51 | return {} 52 | 53 | headers = {} 54 | headers["traceparent"] = traceparent_header 55 | 56 | tracestate_header = marshal_tracestate(propagation_context) 57 | 58 | if tracestate_header: 59 | headers['tracestate'] = tracestate_header 60 | return headers 61 | 62 | 63 | def marshal_traceparent(propagation_context): 64 | ''' 65 | Given a propagation context, returns the contents of a trace header to be 66 | injected by middleware. 67 | ''' 68 | if not propagation_context: 69 | return None 70 | 71 | trace_flags = propagation_context.trace_fields.get('traceflags') 72 | if not trace_flags: 73 | trace_flags = "01" 74 | 75 | traceparent_header = f"00-{propagation_context.trace_id}-{propagation_context.parent_id}-{trace_flags}" 76 | 77 | return traceparent_header 78 | 79 | 80 | def marshal_tracestate(propagation_context): 81 | ''' 82 | ''' 83 | if not propagation_context: 84 | return None 85 | 86 | tracestate_header = propagation_context.trace_fields.get('tracestate') 87 | return tracestate_header 88 | 89 | 90 | def unmarshal_traceparent(header): 91 | match = re.search(_TRACEPARENT_HEADER_FORMAT_RE, header) 92 | if not match: 93 | # Raise exception? 94 | return None 95 | version = match.group(1) 96 | trace_id = match.group(2) 97 | parent_id = match.group(3) 98 | trace_flags = match.group(4) 99 | 100 | if trace_id == _EMPTY_TRACE_ID or parent_id == _EMPTY_PARENT_ID: 101 | return None 102 | 103 | if version == "00": 104 | if match.group(5): 105 | return None 106 | if version == "ff": 107 | return None 108 | 109 | return trace_id, parent_id, trace_flags 110 | 111 | 112 | def unmarshal_tracestate(header): 113 | # We treat the tracestate header as an opaque blob, and don't parse it at all. 114 | return header 115 | -------------------------------------------------------------------------------- /beeline/test_async.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | try: 3 | # The async functionality uses the contextvars module, added in 4 | # Python 3.7 5 | import contextvars 6 | except ImportError: 7 | contextvars = None 8 | if not contextvars: 9 | raise unittest.SkipTest("No contextvars failed. Skipping test_async") 10 | 11 | import asyncio 12 | import concurrent.futures 13 | import datetime 14 | import time 15 | import sys 16 | 17 | import beeline 18 | import beeline.aiotrace 19 | import beeline.trace 20 | 21 | 22 | def async_test(fn): 23 | """Decorator for making async methods usable as tests. 24 | 25 | This decorator also runs the async_setup method before each test, 26 | if it exists. 27 | 28 | """ 29 | 30 | def wrapper(self, *args, **kwargs): 31 | async def sequence(): 32 | if hasattr(self, "async_setup"): 33 | await self.async_setup() 34 | return await fn(self, *args, **kwargs) 35 | 36 | return asyncio.run(sequence(*args, **kwargs)) # pylint: disable=no-member 37 | 38 | return wrapper 39 | 40 | 41 | def span_data(span): 42 | """Utility for converting Span objects to dicts.""" 43 | name = span.event.fields().get("name") 44 | start = span.event.start_time 45 | duration = datetime.timedelta( 46 | milliseconds=span.event.fields()["duration_ms"] 47 | ) 48 | end = start + duration 49 | return { 50 | "name": name, 51 | "start": start, 52 | "end": end, 53 | "trace_id": span.trace_id, 54 | "span": span, 55 | } 56 | 57 | 58 | class TestTracerImplChoice(unittest.TestCase): 59 | def test_synchronous_tracer_should_be_used_by_default(self): 60 | """Verify that the SynchronousTracer implementation is chosen when a 61 | Beeline object is initialised outside of an asyncio loop. 62 | 63 | """ 64 | _beeline = beeline.Beeline() 65 | self.assertIsInstance( 66 | _beeline.tracer_impl, beeline.trace.SynchronousTracer 67 | ) 68 | 69 | @async_test 70 | async def test_asyncio_tracer_should_be_used_in_async_code(self): 71 | """Verify that the AsyncioTracer implementation is chosen when a 72 | Beeline object is initialised while running inside an asyncio 73 | loop. 74 | 75 | """ 76 | _beeline = beeline.Beeline() 77 | self.assertIsInstance( 78 | _beeline.tracer_impl, beeline.aiotrace.AsyncioTracer 79 | ) 80 | 81 | 82 | class TestAsynchronousTracer(unittest.TestCase): 83 | async def async_setup(self): 84 | self.finished_spans = [] 85 | 86 | def add_span(span): 87 | self.finished_spans.append(span_data(span)) 88 | 89 | self.beeline = beeline.Beeline() 90 | self.tracer = self.beeline.tracer_impl 91 | self.tracer._run_hooks_and_send = add_span 92 | 93 | @async_test 94 | async def test_tracing_in_new_tasks_should_work(self): 95 | """Test that basic AsyncioTracer functionality is present.""" 96 | trace = self.tracer.start_trace() 97 | self.tracer.finish_trace(trace) 98 | 99 | self.assertEqual(len(self.finished_spans), 1) 100 | 101 | @async_test 102 | async def test_traces_started_in_different_tasks_should_be_independent(self): 103 | """Fork off two tasks, each calling start_trace. 104 | 105 | The traces run simultaneously. This is expected to produce two 106 | independent traces without raising any exceptions. 107 | 108 | """ 109 | async def task0(): 110 | trace0 = self.tracer.start_trace(context={"name": "task0"}) 111 | await asyncio.sleep(0.2) 112 | self.tracer.finish_trace(trace0) 113 | 114 | async def task1(): 115 | await asyncio.sleep(0.1) 116 | trace1 = self.tracer.start_trace(context={"name": "task1"}) 117 | await asyncio.sleep(0.2) 118 | self.tracer.finish_trace(trace1) 119 | 120 | await asyncio.gather(task0(), task1()) 121 | 122 | self.assertEqual(len(self.finished_spans), 2) 123 | task0_span, task1_span = self.finished_spans # pylint: disable=unbalanced-tuple-unpacking 124 | 125 | # Check that the spans finished in the expected order. 126 | self.assertEqual(task0_span["name"], "task0") 127 | self.assertEqual(task1_span["name"], "task1") 128 | self.assertLess(task0_span["end"], task1_span["end"]) 129 | 130 | # Check that the task0 started before task1 131 | self.assertLess(task0_span["start"], task1_span["start"]) 132 | 133 | # Check that the task1 span started during the task0 span 134 | self.assertLess(task1_span["start"], task0_span["end"]) 135 | 136 | # Check that the task spans are both root spans 137 | self.assertTrue(task0_span["span"].is_root()) 138 | self.assertTrue(task1_span["span"].is_root()) 139 | 140 | @async_test 141 | async def test_new_tasks_should_trace_in_parallel(self): 142 | """Fork off two tasks after starting a trace. 143 | 144 | Both tasks record a span, overlapping with each other. Both 145 | spans are expected to have the root span as their parent. 146 | 147 | """ 148 | 149 | trace = self.tracer.start_trace(context={"name": "root"}) 150 | 151 | async def task0(): 152 | span0 = self.tracer.start_span(context={"name": "task0"}) 153 | await asyncio.sleep(0.2) 154 | self.tracer.finish_span(span0) 155 | 156 | async def task1(): 157 | await asyncio.sleep(0.1) 158 | span1 = self.tracer.start_span(context={"name": "task1"}) 159 | await asyncio.sleep(0.2) 160 | self.tracer.finish_span(span1) 161 | 162 | await asyncio.gather(task0(), task1()) 163 | 164 | self.tracer.finish_trace(trace) 165 | 166 | self.assertEqual(len(self.finished_spans), 3) 167 | task0_span, task1_span, root_span = self.finished_spans # pylint: disable=unbalanced-tuple-unpacking 168 | 169 | # Check that the spans finished in the expected order, with 170 | # the root span last. 171 | self.assertEqual(task0_span["name"], "task0") 172 | self.assertEqual(task1_span["name"], "task1") 173 | self.assertLess(task0_span["end"], task1_span["end"]) 174 | self.assertEqual(root_span["name"], "root") 175 | self.assertLessEqual(task1_span["end"], root_span["end"]) 176 | 177 | # Check that the root span was started before the others. 178 | self.assertLess(root_span["start"], task0_span["start"]) 179 | self.assertLess(root_span["start"], task1_span["start"]) 180 | 181 | # Check that the task0 started before task1 182 | self.assertLess(task0_span["start"], task1_span["start"]) 183 | 184 | # Check that the task1 span started during the task0 span 185 | self.assertLess(task1_span["start"], task0_span["end"]) 186 | 187 | # Check that the task spans are both children of the root span 188 | self.assertEqual(root_span["span"].id, task0_span["span"].parent_id) 189 | self.assertEqual(root_span["span"].id, task1_span["span"].parent_id) 190 | 191 | @async_test 192 | async def test_traced_decorators(self): 193 | """Fork off two tasks after starting a trace. 194 | 195 | This is the same as test_new_tasks_should_trace_in_parallel, 196 | except it uses the traced decorator to record the sub-spans. 197 | 198 | """ 199 | trace = self.tracer.start_trace(context={"name": "root"}) 200 | 201 | @self.beeline.traced("task0") 202 | async def task0(): 203 | await asyncio.sleep(0.2) 204 | 205 | async def task1(): 206 | await asyncio.sleep(0.1) 207 | 208 | @self.beeline.traced("task1") 209 | async def decorated_fn(): 210 | await asyncio.sleep(0.2) 211 | 212 | await decorated_fn() 213 | 214 | await asyncio.gather(task0(), task1()) 215 | 216 | self.tracer.finish_trace(trace) 217 | 218 | self.assertEqual(len(self.finished_spans), 3) 219 | 220 | task0_span, task1_span, root_span = self.finished_spans # pylint: disable=unbalanced-tuple-unpacking 221 | 222 | # Check that the spans finished in the expected order, with 223 | # the root span last. 224 | self.assertEqual(task0_span["name"], "task0") 225 | self.assertEqual(task1_span["name"], "task1") 226 | self.assertLess(task0_span["end"], task1_span["end"]) 227 | self.assertEqual(root_span["name"], "root") 228 | self.assertLessEqual(task1_span["end"], root_span["end"]) 229 | 230 | # Check that the root span was started before the others. 231 | self.assertLess(root_span["start"], task0_span["start"]) 232 | self.assertLess(root_span["start"], task1_span["start"]) 233 | 234 | # Check that the task0 started before task1 235 | self.assertLess(task0_span["start"], task1_span["start"]) 236 | 237 | # Check that the task1 span started during the task0 span 238 | self.assertLess(task1_span["start"], task0_span["end"]) 239 | 240 | # Check that the task spans are both children of the root span 241 | self.assertEqual(root_span["span"].id, task0_span["span"].parent_id) 242 | self.assertEqual(root_span["span"].id, task1_span["span"].parent_id) 243 | 244 | @async_test 245 | async def test_traceless_spans_in_other_tasks_should_be_ignored(self): 246 | """Start a span without first starting a trace in the same task. 247 | 248 | This span is started while there is a trace started in another 249 | task. This span should be independent from that trace, and is 250 | expected to be ignored. 251 | 252 | """ 253 | async def task0(): 254 | await asyncio.sleep(0.2) 255 | trace = self.tracer.start_trace(context={"name": "task0"}) 256 | await asyncio.sleep(0.2) 257 | self.tracer.finish_trace(trace) 258 | 259 | async def task1(): 260 | await asyncio.sleep(0.1) 261 | span = self.tracer.start_span(context={"name": "task1"}) 262 | await asyncio.sleep(0.2) 263 | self.tracer.finish_span(span) 264 | 265 | await asyncio.gather(task0(), task1()) 266 | 267 | self.assertEqual(len(self.finished_spans), 1) 268 | task0_span = self.finished_spans[0] 269 | 270 | # Check that only the trace produced a span. 271 | self.assertEqual(task0_span["name"], "task0") 272 | 273 | @async_test 274 | async def test_untraced_async_functions_should_work(self): 275 | """Call functions with the untraced decorator from within a trace. 276 | 277 | A trace is started and untraced async functions are called. 278 | They start spans, but the spans are not expected to be 279 | recorded since they should be considered started outside of 280 | any trace. 281 | 282 | """ 283 | 284 | calls = set() 285 | 286 | trace = self.tracer.start_trace(context={"name": "root"}) 287 | 288 | @beeline.untraced 289 | async def fn0(): 290 | span0 = self.tracer.start_span(context={"name": "fn0"}) 291 | await asyncio.sleep(0.2) 292 | self.tracer.finish_span(span0) 293 | calls.add("fn0") 294 | 295 | @beeline.untraced 296 | async def fn1(): 297 | await asyncio.sleep(0.1) 298 | span1 = self.tracer.start_span(context={"name": "fn1"}) 299 | await asyncio.sleep(0.2) 300 | self.tracer.finish_span(span1) 301 | calls.add("fn1") 302 | 303 | # Use the decorated function as a plain coroutine 304 | await fn0() 305 | 306 | # Use the decorated function as a task 307 | task = asyncio.create_task(fn1()) # pylint: disable=no-member 308 | await task 309 | 310 | self.tracer.finish_trace(trace) 311 | 312 | self.assertEqual(len(self.finished_spans), 1) 313 | root_span = self.finished_spans[0] 314 | 315 | self.assertEqual(root_span["name"], "root") 316 | 317 | # Verify that the untraced functions were actually called 318 | self.assertTrue("fn0" in calls) 319 | self.assertTrue("fn1" in calls) 320 | 321 | @async_test 322 | async def test_untraced_synchronous_functions_should_work(self): 323 | """Call synchronous functions with the untraced decorator. 324 | 325 | A trace is started and untraced synchronous functions are 326 | called. They start spans, but the spans are not expected to be 327 | recorded since they should be considered started outside of 328 | any trace. 329 | 330 | """ 331 | 332 | calls = set() 333 | 334 | trace = self.tracer.start_trace(context={"name": "root"}) 335 | 336 | @beeline.untraced 337 | def fn0(): 338 | span0 = self.tracer.start_span(context={"name": "fn0"}) 339 | self.tracer.finish_span(span0) 340 | calls.add("fn0") 341 | 342 | @beeline.untraced 343 | def fn1(): 344 | async def task1(): 345 | await asyncio.sleep(0.1) 346 | span1 = self.tracer.start_span(context={"name": "fn1"}) 347 | await asyncio.sleep(0.2) 348 | self.tracer.finish_span(span1) 349 | calls.add("fn1") 350 | 351 | return asyncio.create_task(task1()) # pylint: disable=no-member 352 | 353 | # Call one synchronous function 354 | fn0() 355 | # Spawn a task within another synchronous function 356 | task = fn1() 357 | 358 | await task 359 | 360 | self.tracer.finish_trace(trace) 361 | 362 | self.assertEqual(len(self.finished_spans), 1) 363 | root_span = self.finished_spans[0] 364 | 365 | self.assertEqual(root_span["name"], "root") 366 | 367 | # Verify that the untraced functions were actually called 368 | self.assertTrue("fn0" in calls) 369 | self.assertTrue("fn1" in calls) 370 | 371 | @async_test 372 | async def test_traced_thread_should_work_with_async_tracer(self): 373 | """Run traced code in threads from within async code. 374 | 375 | A trace is started and two functions are run in threads via a 376 | ThreadPoolExecutor. Both functions start spans, but only one 377 | of them is decorated with the traced_thread decorator. Only 378 | the span from the decorated function is expected to show up in 379 | the trace. 380 | 381 | """ 382 | loop = asyncio.get_running_loop() # pylint: disable=no-member 383 | executor = concurrent.futures.ThreadPoolExecutor(max_workers=2) 384 | calls = set() 385 | 386 | trace = self.tracer.start_trace(context={"name": "root"}) 387 | 388 | @self.beeline.traced_thread 389 | def traced_worker(): 390 | span = self.tracer.start_span(context={"name": "traced_worker"}) 391 | time.sleep(0.2) 392 | self.tracer.finish_span(span) 393 | 394 | def untraced_worker(): 395 | span = self.tracer.start_span(context={"name": "untraced_worker"}) 396 | time.sleep(0.2) 397 | self.tracer.finish_span(span) 398 | calls.add("untraced_worker") 399 | 400 | future0 = loop.run_in_executor(executor, traced_worker) 401 | future1 = loop.run_in_executor(executor, untraced_worker) 402 | 403 | await asyncio.gather(future0, future1) 404 | 405 | self.tracer.finish_trace(trace) 406 | 407 | self.assertEqual(len(self.finished_spans), 2) 408 | 409 | worker_span, root_span = self.finished_spans # pylint: disable=unbalanced-tuple-unpacking 410 | 411 | # Check that the spans finished in the expected order, with 412 | # the root span last. 413 | self.assertEqual(worker_span["name"], "traced_worker") 414 | self.assertEqual(root_span["name"], "root") 415 | self.assertLessEqual(worker_span["end"], root_span["end"]) 416 | 417 | # Check that the root span was started before the worker span. 418 | self.assertLess(root_span["start"], worker_span["start"]) 419 | 420 | # Check that the worker span is a child of the root span. 421 | self.assertEqual(root_span["span"].id, worker_span["span"].parent_id) 422 | 423 | # Verify that the untraced function was actually called 424 | self.assertTrue("untraced_worker" in calls) 425 | -------------------------------------------------------------------------------- /beeline/test_beeline.py: -------------------------------------------------------------------------------- 1 | import threading 2 | import unittest 3 | from mock import Mock, patch, call 4 | 5 | import beeline 6 | import libhoney 7 | assert libhoney 8 | 9 | 10 | class TestBeeline(unittest.TestCase): 11 | def setUp(self): 12 | self.addCleanup(patch.stopall) 13 | self.m_gbl = patch('beeline._GBL').start() 14 | 15 | def test_send_event(self): 16 | ''' test correct behavior for send_event ''' 17 | _beeline = beeline.Beeline() 18 | _beeline.tracer_impl = Mock() 19 | m_span = Mock() 20 | _beeline.tracer_impl.get_active_span.return_value = m_span 21 | _beeline.send_event() 22 | _beeline.tracer_impl.get_active_span.assert_called_once_with() 23 | _beeline.tracer_impl.finish_trace.assert_called_once_with(m_span) 24 | 25 | def test_send_no_events(self): 26 | ''' ensure nothing crashes when we try to send with no events in the 27 | stack ''' 28 | _beeline = beeline.Beeline() 29 | _beeline.tracer_impl = Mock() 30 | _beeline.tracer_impl.get_active_span.return_value = None 31 | _beeline.send_event() 32 | _beeline.tracer_impl.get_active_span.assert_called_once_with() 33 | 34 | def test_send_all(self): 35 | ''' ensure events are flushed, and that the root span is handled with 36 | finish_trace ''' 37 | s1, s2, s3 = Mock(), Mock(), Mock() 38 | s1.is_root.return_value = False 39 | s2.is_root.return_value = False 40 | s3.is_root.return_value = True 41 | _beeline = beeline.Beeline() 42 | _beeline.tracer_impl = Mock() 43 | _beeline.tracer_impl.get_active_span.side_effect = [s1, s2, s3, None] 44 | 45 | _beeline.send_all() 46 | 47 | _beeline.tracer_impl.finish_span.assert_has_calls([ 48 | call(s1), 49 | call(s2), 50 | ]) 51 | _beeline.tracer_impl.finish_trace.assert_called_once_with(s3) 52 | 53 | def test_run_hooks_and_send_no_hooks(self): 54 | ''' ensure send works when no hooks defined ''' 55 | ev = Mock() 56 | _beeline = beeline.Beeline() 57 | _beeline.tracer_impl = Mock() 58 | _beeline._run_hooks_and_send(ev) 59 | 60 | # no hooks, not a traced event - call send 61 | ev.send.assert_called_once_with() 62 | ev.send_presampled.assert_not_called() 63 | 64 | def test_run_hooks_and_send_sampler(self): 65 | ''' ensure send works with a sampler hook defined ''' 66 | def _sampler_drop_all(fields): 67 | return False, 0 68 | m_sampler_hook = Mock() 69 | m_sampler_hook.side_effect = _sampler_drop_all 70 | _beeline = beeline.Beeline(sampler_hook=m_sampler_hook) 71 | _beeline.tracer_impl = Mock() 72 | ev = Mock() 73 | 74 | _beeline._run_hooks_and_send(ev) 75 | m_sampler_hook.assert_called_once_with(ev.fields()) 76 | ev.send_presampled.assert_not_called() 77 | ev.send.assert_not_called() 78 | 79 | def _sampler_drop_none(fields): 80 | return True, 100 81 | 82 | ev = Mock() 83 | m_sampler_hook.reset_mock() 84 | 85 | m_sampler_hook.side_effect = _sampler_drop_none 86 | 87 | _beeline._run_hooks_and_send(ev) 88 | m_sampler_hook.assert_called_once_with(ev.fields()) 89 | self.assertEqual(ev.sample_rate, 100) 90 | ev.send_presampled.assert_called_once_with() 91 | ev.send.assert_not_called() 92 | 93 | def test_run_hooks_and_send_presend_hook(self): 94 | ''' ensure send works when presend hook is defined ''' 95 | def _presend_hook(fields): 96 | fields["thing i want"] = "put it there" 97 | del fields["thing i don't want"] 98 | m_presend_hook = Mock() 99 | m_presend_hook.side_effect = _presend_hook 100 | _beeline = beeline.Beeline(presend_hook=m_presend_hook) 101 | _beeline.tracer_impl = Mock() 102 | 103 | ev = Mock() 104 | ev.fields.return_value = { 105 | "thing i don't want": "get it out of here", 106 | "happy data": "so happy", 107 | } 108 | 109 | _beeline._run_hooks_and_send(ev) 110 | ev.send_presampled.assert_not_called() 111 | ev.send.assert_called_once_with() 112 | self.assertDictEqual( 113 | ev.fields(), 114 | { 115 | "thing i want": "put it there", 116 | "happy data": "so happy", 117 | }, 118 | ) 119 | 120 | def test_start_trace_returns_value(self): 121 | ''' ensure the top-level start_span and start_trace APIs return the value 122 | form their calls to the tracer ''' 123 | self.m_gbl.tracer_impl.start_span.return_value = 'wooimaspan' 124 | val = beeline.start_span() 125 | self.assertEqual(val, 'wooimaspan') 126 | 127 | self.m_gbl.tracer_impl.start_trace.return_value = 'wooimatrace' 128 | val = beeline.start_trace() 129 | self.assertEqual(val, 'wooimatrace') 130 | 131 | def test_marshal_trace_context_returns_value(self): 132 | ''' ensure the top-level definition of marshal_trace_context returns a value ''' 133 | self.m_gbl.tracer_impl.marshal_trace_context.return_value = 'asdf' 134 | val = beeline.marshal_trace_context() 135 | self.assertEqual(val, 'asdf') 136 | 137 | def test_trace_wrapper(self): 138 | ''' ensure that the trace wrapper decorates a function and starts a trace ''' 139 | _beeline = beeline.Beeline() 140 | with patch('beeline.get_beeline') as m_gbl: 141 | m_gbl.return_value = _beeline 142 | _beeline.tracer_impl._run_hooks_and_send = Mock() 143 | 144 | @beeline.traced(name="my_sum") 145 | def my_sum(a, b): 146 | return a + b 147 | 148 | # this should accept the function's arguments normally and return the function's value 149 | # if there is one 150 | self.assertEqual(my_sum(1, 2), 3) 151 | # check that an event was sent, from which we can infer that the function was wrapped 152 | self.assertTrue(_beeline.tracer_impl._run_hooks_and_send.called) 153 | 154 | def test_generator_wrapper(self): 155 | ''' ensure that the trace wrapper decorates a generator function and starts a trace 156 | also ensure that child traces get the parent trace correctly set 157 | ''' 158 | 159 | _beeline = beeline.Beeline() 160 | 161 | with patch('beeline.get_beeline') as m_gbl: 162 | m_gbl.return_value = _beeline 163 | _beeline.tracer_impl._run_hooks_and_send = Mock() 164 | 165 | @beeline.traced(name="return_integer_n") 166 | def return_integer(n): 167 | return n 168 | 169 | @beeline.traced(name="output_integers_to") 170 | def output_integers_to(n): 171 | for i in range(n): 172 | yield return_integer(i) 173 | 174 | # should accept the function's arguments normally and yield the items from the 175 | # generator 176 | self.assertEqual(list(output_integers_to(3)), [0, 1, 2]) 177 | 178 | self.assertTrue(_beeline.tracer_impl._run_hooks_and_send.called) 179 | 180 | spans = [x[0][0] 181 | for x in _beeline.tracer_impl._run_hooks_and_send.call_args_list] 182 | 183 | # check the child spans now 184 | parent_span = spans[-1] 185 | child_spans = spans[:-1] 186 | 187 | for child_span in child_spans: 188 | self.assertEqual(child_span.parent_id, parent_span.id) 189 | 190 | @staticmethod 191 | def raising_run_in_thread(target): 192 | closure_dict = {} 193 | 194 | def wrapper(): 195 | try: 196 | target() 197 | except Exception as exc: 198 | closure_dict["thread_exception"] = exc 199 | 200 | thread = threading.Thread(target=wrapper) 201 | thread.start() 202 | thread.join() 203 | 204 | if "thread_exception" in closure_dict: 205 | raise closure_dict["thread_exception"] 206 | 207 | def test_threaded_trace(self): 208 | _beeline = beeline.Beeline() 209 | 210 | with patch('beeline.get_beeline') as m_gbl: 211 | m_gbl.return_value = _beeline 212 | _beeline.tracer_impl._run_hooks_and_send = Mock() 213 | 214 | _beeline.tracer_impl.start_trace(trace_id="asdf") 215 | self.assertEqual(_beeline.tracer_impl._trace.id, "asdf") 216 | 217 | def thread_func(): 218 | # confirm no trace state in new thread 219 | self.assertIsNone(_beeline.tracer_impl._trace) 220 | 221 | self.raising_run_in_thread(target=thread_func) 222 | 223 | @beeline.traced_thread 224 | def traced_thread_func(): 225 | self.assertEqual(_beeline.tracer_impl._trace.id, "asdf") 226 | 227 | with _beeline.tracer(name="foo") as span: 228 | self.assertEqual(span.trace_id, "asdf") 229 | self.assertEqual( 230 | span.parent_id, _beeline.tracer_impl._trace.stack[0].id) 231 | 232 | self.raising_run_in_thread(target=traced_thread_func) 233 | 234 | # test use of beeline client 235 | @_beeline.traced_thread 236 | def traced_thread_func_2(): 237 | self.assertEqual(_beeline.tracer_impl._trace.id, "asdf") 238 | 239 | with _beeline.tracer(name="foo2") as span: 240 | self.assertEqual(span.trace_id, "asdf") 241 | self.assertEqual( 242 | span.parent_id, _beeline.tracer_impl._trace.stack[0].id) 243 | 244 | self.raising_run_in_thread(target=traced_thread_func_2) 245 | 246 | def test_finish_span_none(self): 247 | ''' ensure finish_span does not crash if an empty span is passed to it ''' 248 | _beeline = beeline.Beeline() 249 | # should not crash 250 | _beeline.tracer_impl.finish_span(None) 251 | 252 | 253 | class TestBeelineNotInitialized(unittest.TestCase): 254 | def setUp(self): 255 | self.addCleanup(patch.stopall) 256 | self.m_gbl = patch('beeline.get_beeline').start() 257 | self.m_gbl.return_value = None 258 | 259 | def test_trace_wrapper(self): 260 | ''' ensure the trace wrapper doesn't break if the beeline is not initialized ''' 261 | self.assertIsNone(beeline.get_beeline()) 262 | 263 | @beeline.traced(name="my_sum") 264 | def my_sum(a, b): 265 | return a + b 266 | 267 | # this should not crash if the beeline isn't initialized 268 | # it should also accept arguments normally and return the function's value 269 | self.assertEqual(my_sum(1, 2), 3) 270 | 271 | def test_tracer_context_manager(self): 272 | ''' ensure the tracer context manager doesn't break if the beeline is not initialized ''' 273 | self.assertIsNone(beeline.get_beeline()) 274 | 275 | def my_sum(a, b): 276 | with beeline.tracer(name="my_sum"): 277 | return a + b 278 | 279 | # this should not crash if the beeline isn't initialized 280 | # it should also accept arguments normally and return the function's value 281 | self.assertEqual(my_sum(1, 2), 3) 282 | 283 | def test_traced_thread(self): 284 | self.assertIsNone(beeline.get_beeline()) 285 | 286 | @beeline.traced_thread 287 | def my_sum(a, b): 288 | return a + b 289 | 290 | # this should not crash if the beeline isn't initialized 291 | # it should also accept arguments normally and return the function's value 292 | self.assertEqual(my_sum(1, 2), 3) 293 | -------------------------------------------------------------------------------- /beeline/test_internal.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from beeline.internal import stringify_exception 4 | 5 | 6 | class TestInternal(unittest.TestCase): 7 | def test_stringify_exception(self): 8 | '''ensure we don't crash handling utf-8 exceptions''' 9 | e = Exception("foo") 10 | self.assertEqual('foo', stringify_exception(e)) 11 | 12 | e = Exception("\u1024abcdef") 13 | self.assertEqual('\u1024abcdef', stringify_exception(e)) 14 | -------------------------------------------------------------------------------- /beeline/test_suite.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | import unittest 3 | import unittest.loader 4 | import sys 5 | 6 | 7 | def get_test_suite(): 8 | """Return the set of tests suitable for the current Python version""" 9 | # FIXME: Automatically discover tests and strip out async 10 | 11 | test_suite = unittest.defaultTestLoader.discover(".") 12 | 13 | filtered_test_suite = unittest.TestSuite() 14 | for test_group in test_suite: 15 | ts = unittest.TestSuite() 16 | for inner_test_group in test_group: 17 | # Skip the async tests which fail to import on old versions of python 18 | if inner_test_group.__class__.__name__ == "ModuleImportFailure" and inner_test_group._testMethodName == "beeline.test_async": 19 | print( 20 | "Skipping beeline.test_async module tests due to old Python version") 21 | else: 22 | ts.addTest(inner_test_group) 23 | filtered_test_suite.addTest(ts) 24 | 25 | return filtered_test_suite 26 | 27 | 28 | def run_tests(): 29 | runner = unittest.TextTestRunner() 30 | return runner.run(get_test_suite()) 31 | 32 | 33 | if __name__ == "__main__": 34 | result = run_tests() 35 | sys.exit(not result.wasSuccessful()) 36 | -------------------------------------------------------------------------------- /beeline/trace.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import copy 3 | import datetime 4 | import functools 5 | import hashlib 6 | import json 7 | import math 8 | import random 9 | import struct 10 | import sys 11 | import threading 12 | import traceback 13 | import inspect 14 | from collections import defaultdict 15 | 16 | from contextlib import contextmanager 17 | 18 | from beeline.internal import log, stringify_exception 19 | 20 | import beeline.propagation 21 | import beeline.propagation.default 22 | import beeline.propagation.honeycomb 23 | 24 | MAX_INT32 = math.pow(2, 32) - 1 25 | SPAN_ID_BYTES = 8 26 | TRACE_ID_BYTES = 16 27 | 28 | 29 | class Trace(object): 30 | '''Object encapsulating all state of an ongoing trace.''' 31 | 32 | def __init__(self, trace_id, dataset=None): 33 | self.id = trace_id 34 | self.dataset = dataset 35 | self.stack = [] 36 | self.fields = {} 37 | self.rollup_fields = defaultdict(float) 38 | 39 | def copy(self): 40 | '''Copy the trace state for use in another thread or context.''' 41 | result = Trace(self.id) 42 | result.stack = copy.copy(self.stack) 43 | result.fields = copy.copy(self.fields) 44 | return result 45 | 46 | 47 | class Tracer(object): 48 | def __init__(self, client): 49 | self._client = client 50 | 51 | self.presend_hook = None 52 | self.sampler_hook = None 53 | self.http_trace_parser_hook = beeline.propagation.default.http_trace_parser_hook 54 | self.http_trace_propagation_hook = beeline.propagation.default.http_trace_propagation_hook 55 | 56 | @contextmanager 57 | def __call__(self, name, trace_id=None, parent_id=None): 58 | try: 59 | span = None 60 | if self.get_active_trace_id() and trace_id is None: 61 | span = self.start_span( 62 | context={'name': name}, parent_id=parent_id) 63 | if span: 64 | log('tracer context manager started new span, id = %s', 65 | span.id) 66 | else: 67 | span = self.start_trace( 68 | context={'name': name}, trace_id=trace_id, parent_span_id=parent_id) 69 | if span: 70 | log('tracer context manager started new trace, id = %s', 71 | span.trace_id) 72 | yield span 73 | except Exception as e: 74 | if span: 75 | span.add_context({ 76 | "app.exception_type": str(type(e)), 77 | "app.exception_string": stringify_exception(e), 78 | "app.exception_stacktrace": traceback.format_exc(), 79 | }) 80 | raise 81 | finally: 82 | if span: 83 | if span.is_root(): 84 | log('tracer context manager ending trace, id = %s', 85 | span.trace_id) 86 | self.finish_trace(span) 87 | else: 88 | log('tracer context manager ending span, id = %s', 89 | span.id) 90 | self.finish_span(span) 91 | else: 92 | log('tracer context manager span for %s was unexpectedly None', name) 93 | 94 | def start_trace(self, context=None, trace_id=None, parent_span_id=None, dataset=None): 95 | if trace_id: 96 | if self._trace: 97 | log('warning: start_trace got explicit trace_id but we are already in a trace. ' 98 | 'starting new trace with id = %s', trace_id) 99 | self._trace = Trace(trace_id, dataset) 100 | else: 101 | self._trace = Trace(generate_trace_id(), dataset) 102 | 103 | # start the root span 104 | return self.start_span(context=context, parent_id=parent_span_id, is_root_span=True) 105 | 106 | def start_span(self, context=None, parent_id=None, is_root_span=False): 107 | if not self._trace: 108 | log('start_span called but no trace is active') 109 | return None 110 | 111 | span_id = generate_span_id() 112 | if parent_id: 113 | parent_span_id = parent_id 114 | else: 115 | parent_span_id = self._trace.stack[-1].id if self._trace.stack else None 116 | ev = self._client.new_event(data=self._trace.fields) 117 | if context: 118 | ev.add(data=context) 119 | 120 | fields = { 121 | 'trace.trace_id': self._trace.id, 122 | 'trace.parent_id': parent_span_id, 123 | 'trace.span_id': span_id, 124 | } 125 | if is_root_span: 126 | spanType = "root" 127 | if parent_span_id: 128 | spanType = "subroot" 129 | fields['meta.span_type'] = spanType 130 | ev.add(data=fields) 131 | 132 | is_root = len(self._trace.stack) == 0 133 | span = Span(trace_id=self._trace.id, parent_id=parent_span_id, 134 | id=span_id, event=ev, is_root=is_root) 135 | self._trace.stack.append(span) 136 | 137 | return span 138 | 139 | def finish_span(self, span): 140 | # avoid exception if called with None 141 | if span is None: 142 | return 143 | 144 | # send the span's event. Even if the stack is in an unhealthy state, 145 | # it's probably better to send event data than not 146 | if span.event: 147 | if self._trace: 148 | if self._trace.dataset: 149 | span.event.dataset = self._trace.dataset 150 | 151 | # add the trace's rollup fields to the root span 152 | if span.is_root(): 153 | for k, v in self._trace.rollup_fields.items(): 154 | span.event.add_field(k, v) 155 | 156 | for k, v in span.rollup_fields.items(): 157 | span.event.add_field(k, v) 158 | 159 | # propagate trace fields that may have been added in later spans 160 | for k, v in self._trace.fields.items(): 161 | # don't overwrite existing values because they may be different 162 | if k not in span.event.fields(): 163 | span.event.add_field(k, v) 164 | 165 | duration = datetime.datetime.now() - span.event.start_time 166 | duration_ms = duration.total_seconds() * 1000.0 167 | span.event.add_field('duration_ms', duration_ms) 168 | 169 | self._run_hooks_and_send(span) 170 | else: 171 | log('warning: span has no event, was it initialized correctly?') 172 | 173 | if not self._trace: 174 | log('warning: span finished without an active trace') 175 | return 176 | 177 | if span.trace_id != self._trace.id: 178 | log('warning: finished span called for span in inactive trace. ' 179 | 'current trace_id = %s, span trace_id = %s', self._trace.id, span.trace_id) 180 | return 181 | 182 | if not self._trace.stack: 183 | log('warning: finish span called but stack is empty') 184 | return 185 | 186 | if self._trace.stack[-1].id != span.id: 187 | log('warning: finished span is not the currently active span') 188 | return 189 | 190 | self._trace.stack.pop() 191 | 192 | def finish_trace(self, span): 193 | self.finish_span(span) 194 | self._trace = None 195 | 196 | def parse_http_trace(self, request): 197 | if not self.http_trace_parser_hook: 198 | return None 199 | return self.http_trace_parser_hook(request) 200 | 201 | def propagate_and_start_trace(self, context, request): 202 | err = None 203 | propagation_context = None 204 | try: 205 | propagation_context = self.parse_http_trace(request) 206 | except Exception: 207 | err = sys.exc_info()[0] 208 | log('error: http_trace_parser_hook returned exception: %s', 209 | sys.exc_info()[0]) 210 | 211 | if propagation_context: 212 | return self.start_trace(context=context, trace_id=propagation_context.trace_id, 213 | parent_span_id=propagation_context.parent_id, 214 | dataset=propagation_context.dataset) 215 | for k, v in propagation_context.trace_fields: 216 | self.add_trace_field(k, v) 217 | else: 218 | # Initialize a new trace from scratch 219 | if err is not None: 220 | context['parser_hook_error'] = repr(err) 221 | return self.start_trace(context, trace_id=None, parent_span_id=None) 222 | pass 223 | 224 | def get_propagation_context(self): 225 | if not self._trace: 226 | return None 227 | 228 | return beeline.propagation.PropagationContext( 229 | self.get_active_trace_id(), 230 | self.get_active_span().id, 231 | self._trace.fields) 232 | 233 | def get_active_trace_id(self): 234 | if self._trace: 235 | return self._trace.id 236 | return None 237 | 238 | def get_active_span(self): 239 | if self._trace and self._trace.stack: 240 | return self._trace.stack[-1] 241 | return None 242 | 243 | def add_context_field(self, name, value): 244 | span = self.get_active_span() 245 | if span: 246 | span.add_context_field(name=name, value=value) 247 | 248 | def add_context(self, data): 249 | span = self.get_active_span() 250 | if span: 251 | span.add_context(data=data) 252 | 253 | def remove_context_field(self, name): 254 | span = self.get_active_span() 255 | if span: 256 | span.remove_context_field(name=name) 257 | 258 | def add_rollup_field(self, name, value): 259 | value = float(value) 260 | 261 | span = self.get_active_span() 262 | if span: 263 | span.rollup_fields[name] += value 264 | 265 | if not self._trace: 266 | log('warning: adding rollup field without an active trace') 267 | return 268 | 269 | self._trace.rollup_fields[f"rollup.{name}"] += value 270 | 271 | def add_trace_field(self, name, value): 272 | # prefix with app to avoid key conflicts 273 | # add the app prefix if it's missing 274 | 275 | if (type(name) == str and not name.startswith("app.")) or type(name) != str: 276 | key = f"app.{name}" 277 | else: 278 | key = name 279 | 280 | # also add to current span 281 | self.add_context_field(key, value) 282 | 283 | if not self._trace: 284 | log('warning: adding trace field without an active trace') 285 | return 286 | self._trace.fields[key] = value 287 | 288 | def remove_trace_field(self, name): 289 | key = f"app.{name}" 290 | self.remove_context_field(key) 291 | if not self._trace: 292 | log('warning: removing trace field without an active trace') 293 | return 294 | self._trace.fields.pop(key) 295 | 296 | def marshal_trace_context(self): 297 | if not self._trace: 298 | log('warning: marshal_trace_context called, but no active trace') 299 | return 300 | 301 | return marshal_trace_context( 302 | self._trace.id, 303 | self._trace.stack[-1].id, 304 | self._trace.fields 305 | ) 306 | 307 | def register_hooks(self, presend=None, sampler=None, http_trace_parser=None, http_trace_propagation=None): 308 | self.presend_hook = presend 309 | self.sampler_hook = sampler 310 | self.http_trace_parser_hook = http_trace_parser 311 | self.http_trace_propagation_hook = http_trace_propagation 312 | 313 | def _run_hooks_and_send(self, span): 314 | ''' internal - run any defined hooks on the event and send 315 | 316 | kind of hacky: we fetch the hooks from the beeline, but they are only 317 | used here. Pass them to the tracer implementation? 318 | ''' 319 | presampled = False 320 | if self.sampler_hook: 321 | log("executing sampler hook on event ev = %s", span.event.fields()) 322 | keep, new_rate = self.sampler_hook(span.event.fields()) 323 | if not keep: 324 | log("skipping event due to sampler hook sampling ev = %s", 325 | span.event.fields()) 326 | return 327 | span.event.sample_rate = new_rate 328 | presampled = True 329 | 330 | if self.presend_hook: 331 | log("executing presend hook on event ev = %s", span.event.fields()) 332 | self.presend_hook(span.event.fields()) 333 | 334 | if presampled: 335 | log("enqueuing presampled event ev = %s", span.event.fields()) 336 | span.event.send_presampled() 337 | elif _should_sample(span.trace_id, span.event.sample_rate): 338 | # if our sampler hook wasn't used, use deterministic sampling 339 | span.event.send_presampled() 340 | 341 | 342 | class SynchronousTracer(Tracer): 343 | def __init__(self, client): 344 | super().__init__(client) 345 | self._state = threading.local() 346 | 347 | @property 348 | def _trace(self): 349 | return getattr(self._state, 'trace', None) 350 | 351 | @_trace.setter 352 | def _trace(self, new_trace): 353 | self._state.trace = new_trace 354 | 355 | 356 | class Span(object): 357 | ''' Span represents an active span. Should not be initialized directly, but 358 | through a Tracer object's `start_span` method. ''' 359 | 360 | def __init__(self, trace_id, parent_id, id, event, is_root=False): 361 | self.trace_id = trace_id 362 | self.parent_id = parent_id 363 | self.id = id 364 | self.event = event 365 | self.event.start_time = datetime.datetime.now() 366 | self.rollup_fields = defaultdict(float) 367 | self._is_root = is_root 368 | 369 | def add_context_field(self, name, value): 370 | self.event.add_field(name, value) 371 | 372 | def add_context(self, data): 373 | self.event.add(data) 374 | 375 | def remove_context_field(self, name): 376 | if name in self.event.fields(): 377 | del self.event.fields()[name] 378 | 379 | def is_root(self): 380 | return self._is_root 381 | 382 | 383 | def _should_sample(trace_id, sample_rate): 384 | # Always return True if sample rate is 1 385 | if sample_rate == 1: 386 | return True 387 | 388 | sample_upper_bound = MAX_INT32 / sample_rate 389 | # compute a sha1 390 | sha1 = hashlib.sha1() 391 | sha1.update(trace_id.encode('utf-8')) 392 | # convert first 4 digits to int 393 | value, = struct.unpack('>I', sha1.digest()[:4]) 394 | if value < sample_upper_bound: 395 | return True 396 | return False 397 | 398 | 399 | def marshal_trace_context(trace_id, parent_id, context): 400 | """Deprecated: Use beeline.propagation.honeycomb.marshal_trace_context instead""" 401 | version = 1 402 | trace_fields = base64.b64encode(json.dumps(context).encode()).decode() 403 | trace_context = f"{version};trace_id={trace_id},parent_id={parent_id},context={trace_fields}" 404 | 405 | return trace_context 406 | 407 | 408 | def unmarshal_trace_context(trace_header): 409 | """ 410 | Deprecated: Use beeline.propagation.honeycomb.unmarshal_trace_context instead 411 | """ 412 | return beeline.propagation.honeycomb.unmarshal_propagation_context(trace_header) 413 | 414 | 415 | def traced_impl(tracer_fn, name, trace_id, parent_id): 416 | """Implementation of the traced decorator without async support.""" 417 | def wrapped(fn): 418 | if inspect.isgeneratorfunction(fn): 419 | @functools.wraps(fn) 420 | def inner(*args, **kwargs): 421 | inner_generator = fn(*args, **kwargs) 422 | with tracer_fn(name=name, trace_id=trace_id, parent_id=parent_id): 423 | for value in inner_generator: 424 | yield value 425 | return inner 426 | else: 427 | @functools.wraps(fn) 428 | def inner(*args, **kwargs): 429 | with tracer_fn(name=name, trace_id=trace_id, parent_id=parent_id): 430 | return fn(*args, **kwargs) 431 | return inner 432 | return wrapped 433 | 434 | 435 | # Use system random instead of default psuedorandom generator 436 | system_random = random.SystemRandom() 437 | 438 | 439 | def generate_span_id(): 440 | """Generate span ID compatible with w3c tracing spec.""" 441 | format_str = "{{:0{:d}x}}".format(SPAN_ID_BYTES*2) # pylint: disable=C0209 442 | return format_str.format(system_random.getrandbits(SPAN_ID_BYTES*8)) 443 | 444 | 445 | def generate_trace_id(): 446 | """Generate trace ID compatible with w3c tracing spec.""" 447 | format_str = "{{:0{:d}x}}".format(TRACE_ID_BYTES*2) # pylint: disable=C0209 448 | return format_str.format(system_random.getrandbits(TRACE_ID_BYTES*8)) 449 | -------------------------------------------------------------------------------- /beeline/version.py: -------------------------------------------------------------------------------- 1 | VERSION = '3.6.0' # Update using bump2version 2 | -------------------------------------------------------------------------------- /examples/django/README.md: -------------------------------------------------------------------------------- 1 | # example django app 2 | 3 | This simple django app uses auto-instrumentation and adds a manual span with trace context including the message of "Hello World". 4 | 5 | ## Prerequisites 6 | 7 | First set an environment variable `HONEYCOMB_API_KEY`, available from your account page. 8 | This will configure the server to send instrumentation events to Honeycomb in a dataset called my-django-app. 9 | 10 | You'll also need [Poetry](https://python-poetry.org/) installed to run the example. 11 | Poetry automatically creates a virtual environment to run the example in so you don't need to manage one yourself. 12 | 13 | ## Running the example 14 | 15 | Install the dependencies: 16 | 17 | ```bash 18 | poetry install 19 | ``` 20 | 21 | Navigate into the app directory: 22 | 23 | ```bash 24 | cd app 25 | ``` 26 | 27 | Run the application: 28 | 29 | ```bash 30 | poetry run python3 manage.py runserver 31 | ``` 32 | 33 | Now you can `curl` the app: 34 | 35 | ```bash 36 | $ curl localhost:8000/hello/ 37 | Hello World 38 | ``` 39 | 40 | Check out the results in Honeycomb! 41 | -------------------------------------------------------------------------------- /examples/django/app/app/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/honeycombio/beeline-python/b8c96386964749c60bcd865d10f31148f970fec1/examples/django/app/app/__init__.py -------------------------------------------------------------------------------- /examples/django/app/app/settings.py: -------------------------------------------------------------------------------- 1 | """ 2 | Django settings for app project. 3 | 4 | Generated by 'django-admin startproject' using Django 2.1. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/2.1/topics/settings/ 8 | 9 | For the full list of settings and their values, see 10 | https://docs.djangoproject.com/en/2.1/ref/settings/ 11 | """ 12 | 13 | import os 14 | 15 | # Build paths inside the project like this: os.path.join(BASE_DIR, ...) 16 | BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 17 | 18 | 19 | # Quick-start development settings - unsuitable for production 20 | # See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/ 21 | 22 | # SECURITY WARNING: keep the secret key used in production secret! 23 | SECRET_KEY = 'd)3c35#-ay3a(6=lss#em#!t91x#7m+4h8m-uza6csew&9%ajn' 24 | 25 | # SECURITY WARNING: don't run with debug turned on in production! 26 | DEBUG = True 27 | 28 | ALLOWED_HOSTS = [] 29 | 30 | 31 | # Application definition 32 | 33 | INSTALLED_APPS = [ 34 | 'django.contrib.admin', 35 | 'django.contrib.auth', 36 | 'django.contrib.contenttypes', 37 | 'django.contrib.sessions', 38 | 'django.contrib.messages', 39 | 'django.contrib.staticfiles', 40 | 'hello' 41 | ] 42 | 43 | MIDDLEWARE = [ 44 | 'django.middleware.security.SecurityMiddleware', 45 | 'django.contrib.sessions.middleware.SessionMiddleware', 46 | 'django.middleware.common.CommonMiddleware', 47 | 'django.middleware.csrf.CsrfViewMiddleware', 48 | 'django.contrib.auth.middleware.AuthenticationMiddleware', 49 | 'django.contrib.messages.middleware.MessageMiddleware', 50 | 'django.middleware.clickjacking.XFrameOptionsMiddleware', 51 | 'beeline.middleware.django.HoneyMiddleware', 52 | ] 53 | 54 | ROOT_URLCONF = 'app.urls' 55 | 56 | TEMPLATES = [ 57 | { 58 | 'BACKEND': 'django.template.backends.django.DjangoTemplates', 59 | 'DIRS': [], 60 | 'APP_DIRS': True, 61 | 'OPTIONS': { 62 | 'context_processors': [ 63 | 'django.template.context_processors.debug', 64 | 'django.template.context_processors.request', 65 | 'django.contrib.auth.context_processors.auth', 66 | 'django.contrib.messages.context_processors.messages', 67 | ], 68 | }, 69 | }, 70 | ] 71 | 72 | WSGI_APPLICATION = 'app.wsgi.application' 73 | 74 | 75 | # Database 76 | # https://docs.djangoproject.com/en/2.1/ref/settings/#databases 77 | 78 | DATABASES = { 79 | # 'default': { 80 | # 'ENGINE': 'django.db.backends.sqlite3', 81 | # 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), 82 | # } 83 | } 84 | 85 | 86 | # Password validation 87 | # https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators 88 | 89 | AUTH_PASSWORD_VALIDATORS = [ 90 | { 91 | 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', 92 | }, 93 | { 94 | 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', 95 | }, 96 | { 97 | 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', 98 | }, 99 | { 100 | 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', 101 | }, 102 | ] 103 | 104 | 105 | # Internationalization 106 | # https://docs.djangoproject.com/en/2.1/topics/i18n/ 107 | 108 | LANGUAGE_CODE = 'en-us' 109 | 110 | TIME_ZONE = 'UTC' 111 | 112 | USE_I18N = True 113 | 114 | USE_L10N = True 115 | 116 | USE_TZ = True 117 | 118 | 119 | # Static files (CSS, JavaScript, Images) 120 | # https://docs.djangoproject.com/en/2.1/howto/static-files/ 121 | 122 | STATIC_URL = '/static/' 123 | -------------------------------------------------------------------------------- /examples/django/app/app/urls.py: -------------------------------------------------------------------------------- 1 | """app URL Configuration 2 | 3 | The `urlpatterns` list routes URLs to views. For more information please see: 4 | https://docs.djangoproject.com/en/2.1/topics/http/urls/ 5 | Examples: 6 | Function views 7 | 1. Add an import: from my_app import views 8 | 2. Add a URL to urlpatterns: path('', views.home, name='home') 9 | Class-based views 10 | 1. Add an import: from other_app.views import Home 11 | 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') 12 | Including another URLconf 13 | 1. Import the include() function: from django.urls import include, path 14 | 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) 15 | """ 16 | from django.contrib import admin 17 | from django.urls import include, path 18 | 19 | urlpatterns = [ 20 | path("hello/", include("hello.urls")), 21 | path("admin/", admin.site.urls), 22 | ] -------------------------------------------------------------------------------- /examples/django/app/app/wsgi.py: -------------------------------------------------------------------------------- 1 | """ 2 | WSGI config for app project. 3 | 4 | It exposes the WSGI callable as a module-level variable named ``application``. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/2.1/howto/deployment/wsgi/ 8 | """ 9 | 10 | import os 11 | 12 | from django.core.wsgi import get_wsgi_application 13 | 14 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'app.settings') 15 | 16 | application = get_wsgi_application() 17 | -------------------------------------------------------------------------------- /examples/django/app/hello/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/honeycombio/beeline-python/b8c96386964749c60bcd865d10f31148f970fec1/examples/django/app/hello/__init__.py -------------------------------------------------------------------------------- /examples/django/app/hello/admin.py: -------------------------------------------------------------------------------- 1 | from django.contrib import admin 2 | 3 | # Register your models here. 4 | -------------------------------------------------------------------------------- /examples/django/app/hello/apps.py: -------------------------------------------------------------------------------- 1 | import beeline 2 | import os 3 | 4 | from django.apps import AppConfig 5 | 6 | class HelloConfig(AppConfig): 7 | name = 'hello' 8 | 9 | def ready(self): 10 | beeline.init( 11 | # Get this via https://ui.honeycomb.io/account after signing up for Honeycomb 12 | writekey=os.environ.get("HONEYCOMB_API_KEY"), 13 | api_host=os.environ.get('HONEYCOMB_API_ENDPOINT', 'https://api.honeycomb.io:443'), 14 | # The name of your app is a good choice to start with 15 | # dataset='my-django-app', # only needed for classic 16 | service_name=os.environ.get('SERVICE_NAME', 'my-django-app'), 17 | debug=True, # enable to see telemetry in console 18 | ) -------------------------------------------------------------------------------- /examples/django/app/hello/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/honeycombio/beeline-python/b8c96386964749c60bcd865d10f31148f970fec1/examples/django/app/hello/migrations/__init__.py -------------------------------------------------------------------------------- /examples/django/app/hello/models.py: -------------------------------------------------------------------------------- 1 | from django.db import models 2 | 3 | # Create your models here. 4 | -------------------------------------------------------------------------------- /examples/django/app/hello/tests.py: -------------------------------------------------------------------------------- 1 | from django.test import TestCase 2 | 3 | # Create your tests here. 4 | -------------------------------------------------------------------------------- /examples/django/app/hello/urls.py: -------------------------------------------------------------------------------- 1 | from django.urls import path 2 | 3 | from . import views 4 | 5 | urlpatterns = [ 6 | path("", views.index, name="index"), 7 | ] -------------------------------------------------------------------------------- /examples/django/app/hello/views.py: -------------------------------------------------------------------------------- 1 | import beeline 2 | from django.http import HttpResponse 3 | 4 | 5 | @beeline.traced(name="hello_world") 6 | def index(request): 7 | message = "Hello World\n" 8 | beeline.add_trace_field('message', message) 9 | return HttpResponse(message) -------------------------------------------------------------------------------- /examples/django/app/manage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import sys 4 | 5 | if __name__ == '__main__': 6 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'app.settings') 7 | try: 8 | from django.core.management import execute_from_command_line 9 | except ImportError as exc: 10 | raise ImportError( 11 | "Couldn't import Django. Are you sure it's installed and " 12 | "available on your PYTHONPATH environment variable? Did you " 13 | "forget to activate a virtual environment?" 14 | ) from exc 15 | execute_from_command_line(sys.argv) 16 | -------------------------------------------------------------------------------- /examples/django/pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "beeline-django-example" 3 | version = "0.1.0" 4 | description = "django example app using beeline-python" 5 | authors = ["honeycombio"] 6 | 7 | [tool.poetry.dependencies] 8 | python = "^3.8" 9 | honeycomb-beeline = {path = "../..", develop = true} 10 | Django = "4.2.1" 11 | -------------------------------------------------------------------------------- /examples/flask/README.md: -------------------------------------------------------------------------------- 1 | # example flask app 2 | 3 | This simple Flask app uses auto-instrumentation and adds a manual span with trace context including the message of "Hello World". 4 | 5 | ## Prerequisites 6 | 7 | First set an environment variable `HONEYCOMB_API_KEY`, available from your account page. 8 | This will configure the server to send instrumentation events to Honeycomb in a dataset called my-flask-app. 9 | 10 | You'll also need [Poetry](https://python-poetry.org/) installed to run the example. Poetry automatically creates a virtual environment to run the example in so you don't need to manage one yourself. 11 | 12 | ## Running the example 13 | 14 | Install the dependencies: 15 | 16 | ```bash 17 | poetry install 18 | ``` 19 | 20 | Run the application: 21 | 22 | ```bash 23 | poetry run flask run 24 | ``` 25 | 26 | Now you can `curl` the app: 27 | 28 | ```bash 29 | $ curl localhost:5000 30 | Hello World 31 | ``` 32 | 33 | Check out the results in Honeycomb! 34 | -------------------------------------------------------------------------------- /examples/flask/app.py: -------------------------------------------------------------------------------- 1 | import beeline 2 | import os 3 | 4 | from beeline.middleware.flask import HoneyMiddleware 5 | from flask import Flask 6 | 7 | # Get this via https://ui.honeycomb.io/account after signing up for Honeycomb 8 | honeycomb_writekey=os.environ.get("HONEYCOMB_API_KEY") 9 | 10 | beeline.init( 11 | writekey=honeycomb_writekey, 12 | api_host=os.environ.get('HONEYCOMB_API_ENDPOINT', 'https://api.honeycomb.io:443'), 13 | # The name of your app is a good choice to start with 14 | # dataset='my-flask-app', # only needed for classic 15 | service_name=os.environ.get('SERVICE_NAME', 'my-flask-app'), 16 | debug=True, # enable to see telemetry in console 17 | ) 18 | 19 | # Pass your Flask app to HoneyMiddleware 20 | app = Flask(__name__) 21 | HoneyMiddleware(app, db_events=False) 22 | 23 | @app.route("/") 24 | def hello_world(): 25 | span = beeline.start_span(context={"name": "Preparing to greet the world"}) 26 | message = "Hello World" 27 | beeline.add_trace_field('message', message) 28 | beeline.finish_span(span) 29 | return message -------------------------------------------------------------------------------- /examples/flask/pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "beeline-flask-example" 3 | version = "0.1.0" 4 | description = "flask example app using beeline-python" 5 | authors = ["honeycombio"] 6 | 7 | [tool.poetry.dependencies] 8 | python = "^3.8" 9 | honeycomb-beeline = {path = "../..", develop = true} 10 | python-dotenv = "^0.14.0" 11 | MarkupSafe = "2.1" 12 | Flask = "2.1" 13 | -------------------------------------------------------------------------------- /examples/hello-world/README.md: -------------------------------------------------------------------------------- 1 | # hello world example 2 | 3 | This simple python app adds a manual span with trace context including the message of "Hello World". 4 | 5 | ## Prerequisites 6 | 7 | First set an environment variable `HONEYCOMB_API_KEY`, available from your account page. 8 | This will configure the server to send instrumentation events to Honeycomb in a dataset called my-flask-app. 9 | 10 | You'll also need [Poetry](https://python-poetry.org/) installed to run the example. Poetry automatically creates a virtual environment to run the example in so you don't need to manage one yourself. 11 | 12 | ## Running the example 13 | 14 | Install the dependencies: 15 | 16 | ```bash 17 | poetry install 18 | ``` 19 | 20 | Run the application: 21 | 22 | ```bash 23 | poetry run python3 app.py 24 | ``` 25 | 26 | Check out the results in Honeycomb! 27 | -------------------------------------------------------------------------------- /examples/hello-world/app.py: -------------------------------------------------------------------------------- 1 | import beeline 2 | import os 3 | 4 | beeline.init( 5 | # Get this via https://ui.honeycomb.io/account after signing up for Honeycomb 6 | writekey=os.environ.get('HONEYCOMB_API_KEY'), 7 | api_host=os.environ.get('HONEYCOMB_API_ENDPOINT', 'https://api.honeycomb.io:443'), 8 | # The name of your app is a good choice to start with 9 | # dataset='my-python-app', # only needed for classic 10 | service_name=os.environ.get('SERVICE_NAME', 'my-python-app'), 11 | debug=True, # enable to see telemetry in console 12 | ) 13 | 14 | @beeline.traced(name='hello_world') 15 | def hello_world(): 16 | message = "Hello World\n" 17 | beeline.add_trace_field('message', message) 18 | print('hello world') 19 | 20 | hello_world() 21 | 22 | beeline.close() -------------------------------------------------------------------------------- /examples/hello-world/pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "hello-world" 3 | version = "0.1.0" 4 | description = "print hello world" 5 | authors = ["honeycombio"] 6 | 7 | [tool.poetry.dependencies] 8 | python = "^3.9" 9 | honeycomb-beeline = {path = "../..", develop = true} 10 | 11 | [build-system] 12 | requires = ["poetry-core>=1.0.0"] 13 | build-backend = "poetry.core.masonry.api" 14 | -------------------------------------------------------------------------------- /push_docs.sh: -------------------------------------------------------------------------------- 1 | # Unfortunately not as nice as godoc.org/rubydoc.info for now: https://www.pydoc.io/about/ 2 | set -e 3 | 4 | # set up git 5 | git config --global user.email "accounts+circleci@honeycomb.io" 6 | git config --global user.name "Honeycomb CI" 7 | 8 | # build and commit website files 9 | python setup.py install 10 | pip install pdoc 11 | PYTHONPATH=. pdoc beeline --html --html-dir=./docs 12 | 13 | # Check out orphan gh-pages branch, get it set up correctly 14 | git checkout --orphan gh-pages 15 | git reset 16 | git add docs/ 17 | git mv docs/beeline/*.html ./ 18 | git add .gitignore 19 | git clean -fd 20 | git commit -m "CircleCI build: $CIRCLE_BUILD_NUM" 21 | 22 | # Pushing via secure GITHUB_TOKEN in CircleCI project 23 | git remote add origin-pages https://${GITHUB_TOKEN}@github.com/honeycombio/beeline-python.git > /dev/null 2>&1 24 | git push --force --quiet --set-upstream origin-pages gh-pages 25 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "honeycomb-beeline" 3 | version = "3.6.0" # Update using bump2version 4 | description = "Honeycomb library for easy instrumentation" 5 | authors = ["Honeycomb.io "] 6 | license = "Apache-2.0" 7 | packages = [ 8 | { include = "beeline" } 9 | ] 10 | readme = "README.md" 11 | homepage = "https://github.com/honeycombio/beeline-python" 12 | repository = "https://github.com/honeycombio/beeline-python" 13 | 14 | [tool.poetry.dependencies] 15 | python = ">=3.7, <4" 16 | libhoney = "^2.4.0" 17 | wrapt = "^1.12.1" 18 | [tool.poetry.dev-dependencies] 19 | mock = "^5.0.2" 20 | coverage = "^7.2.7" 21 | pylint = [{version = "^2.13", python = ">=3.7,<4"}] 22 | django = [{version = "^3.2", python = ">= 3.7,<4"}] 23 | tornado = "^6.2" 24 | pycodestyle = "^2.10.0" 25 | bump2version = "^1.0.1" 26 | Flask = "2.2.5" 27 | 28 | [tool.poetry.scripts] 29 | tests = "beeline.test_suite:run_tests" 30 | --------------------------------------------------------------------------------