├── .github ├── ISSUE_TEMPLATE │ ├── bug.yml │ ├── config.yml │ ├── feature-request.yml │ └── question.yml └── workflows │ ├── check_changelog.yml │ ├── daily_deps_test.yml │ └── main.yml ├── .gitignore ├── .hyperlint ├── .vale.ini └── styles │ ├── config │ └── vocabularies │ │ └── hyperlint │ │ └── accept.txt │ └── hyperlint │ └── repeatedWords.yml ├── .pre-commit-config.yaml ├── .vscode └── settings.json ├── CHANGELOG.md ├── CONTRIBUTING.md ├── LICENSE ├── Makefile ├── README.md ├── docs ├── compliance.md ├── concepts.md ├── enterprise.md ├── extra │ └── tweaks.css ├── favicon.ico ├── favicon.png ├── guides │ ├── onboarding-checklist │ │ ├── add-auto-tracing.md │ │ ├── add-manual-tracing.md │ │ ├── add-metrics.md │ │ ├── index.md │ │ └── integrate.md │ └── web-ui │ │ ├── alerts.md │ │ ├── dashboards.md │ │ ├── explore.md │ │ └── live.md ├── help.md ├── how-to-guides │ ├── alternative-backends.md │ ├── alternative-clients.md │ ├── create-write-tokens.md │ ├── detect-service-is-down.md │ ├── distributed-tracing.md │ ├── environments.md │ ├── link-to-code-source.md │ ├── mcp-server.md │ ├── otel-collector.md │ ├── query-api.md │ ├── sampling.md │ ├── scrubbing.md │ └── suppress.md ├── images │ ├── cli │ │ ├── browser-screenshot-auth.png │ │ ├── terminal-screenshot-auth-1.png │ │ ├── terminal-screenshot-auth-2.png │ │ └── terminal-screenshot-inspect.png │ ├── compliance │ │ └── soc2_logo.png │ ├── guide │ │ ├── browser-alerts-create.png │ │ ├── browser-alerts-edit.png │ │ ├── browser-alerts-error.png │ │ ├── browser-alerts-full.png │ │ ├── browser-alerts-no-error.png │ │ ├── browser-create-project-button.png │ │ ├── browser-create-project-details.png │ │ ├── browser-create-project.png │ │ ├── browser-dashboard-chart-sql-query.png │ │ ├── browser-dashboard-chart-types.png │ │ ├── browser-dashboard.png │ │ ├── browser-explore-full.png │ │ ├── browser-explore-run-query.png │ │ ├── browser-integrate.png │ │ ├── direct-connect-credentials.png │ │ ├── environments.png │ │ ├── generator-break.png │ │ ├── generator-fine.png │ │ ├── link-to-github.gif │ │ ├── live-view-collapsed-annotated.png │ │ ├── live-view-details-panel-open-annotated.png │ │ ├── live-view-natural-language.png │ │ ├── live-view-reference.png │ │ ├── live-view-search.png │ │ ├── live-view-sql-box.png │ │ ├── live-view-start-here.png │ │ ├── manual-tracing-attribute-hello-world.png │ │ ├── manual-tracing-basic-closed-span.png │ │ ├── manual-tracing-basic.png │ │ ├── manual-tracing-default-levels.png │ │ ├── manual-tracing-explore-basic.png │ │ ├── manual-tracing-level-colors.png │ │ ├── manual-tracing-span-names.png │ │ ├── manual-tracing-traceback.png │ │ ├── terminal-create-project-full.png │ │ ├── terminal-create-project.png │ │ └── terminal-integrate-logging.png │ ├── index │ │ ├── logfire-screenshot-explore-query.png │ │ ├── logfire-screenshot-fastapi-200.png │ │ ├── logfire-screenshot-fastapi-422.png │ │ ├── logfire-screenshot-hello-world-age.png │ │ ├── logfire-screenshot-pydantic-manual.png │ │ ├── logfire-screenshot-pydantic-plugin.png │ │ └── logfire-screenshot-search-query.png │ ├── integrations │ │ ├── pydantic-ai │ │ │ └── pydanticai-instrumentation-screenshot.png │ │ └── use-cases │ │ │ └── web-frameworks │ │ │ └── logfire-screenshot-chart-percentiles.png │ ├── logfire-screenshot-anthropic-arguments.png │ ├── logfire-screenshot-anthropic-stream.png │ ├── logfire-screenshot-anthropic.png │ ├── logfire-screenshot-autotracing.png │ ├── logfire-screenshot-details-panel-variant.png │ ├── logfire-screenshot-details-panel.png │ ├── logfire-screenshot-examples-flask-sqlalchemy.png │ ├── logfire-screenshot-fastapi-arguments.png │ ├── logfire-screenshot-first-steps-example-trace.png │ ├── logfire-screenshot-first-steps-first-project.png │ ├── logfire-screenshot-first-steps-hello-world.png │ ├── logfire-screenshot-first-steps-load-files.png │ ├── logfire-screenshot-live-view.png │ ├── logfire-screenshot-magentic-create-superhero.png │ ├── logfire-screenshot-mirascope-anthropic-call.png │ ├── logfire-screenshot-mirascope-openai-extractor.png │ ├── logfire-screenshot-openai-agents-tools.png │ ├── logfire-screenshot-openai-agents.png │ ├── logfire-screenshot-openai-arguments.png │ ├── logfire-screenshot-openai-image-gen.png │ ├── logfire-screenshot-openai-stream.png │ ├── logfire-screenshot-openai.png │ ├── logfire-screenshot-spans.png │ └── logfire-screenshot-web-app.png ├── index.md ├── integrations │ ├── aws-lambda.md │ ├── databases │ │ ├── asyncpg.md │ │ ├── bigquery.md │ │ ├── mysql.md │ │ ├── psycopg.md │ │ ├── pymongo.md │ │ ├── redis.md │ │ ├── sqlalchemy.md │ │ └── sqlite3.md │ ├── event-streams │ │ ├── airflow.md │ │ ├── celery.md │ │ └── faststream.md │ ├── http-clients │ │ ├── aiohttp.md │ │ ├── httpx.md │ │ └── requests.md │ ├── index.md │ ├── llms │ │ ├── anthropic.md │ │ ├── litellm.md │ │ ├── llamaindex.md │ │ ├── magentic.md │ │ ├── mirascope.md │ │ ├── openai.md │ │ └── pydanticai.md │ ├── logging.md │ ├── loguru.md │ ├── pydantic.md │ ├── stripe.md │ ├── structlog.md │ ├── system-metrics.md │ └── web-frameworks │ │ ├── asgi.md │ │ ├── django.md │ │ ├── fastapi.md │ │ ├── flask.md │ │ ├── index.md │ │ ├── starlette.md │ │ └── wsgi.md ├── javascripts │ └── algolia-search.js ├── join-slack │ └── index.html ├── languages.md ├── logo-white.svg ├── overrides │ ├── main.html │ └── partials │ │ ├── search.html │ │ └── source.html ├── plugins │ ├── algolia.py │ └── main.py ├── reference │ ├── advanced │ │ ├── generators.md │ │ └── testing.md │ ├── api │ │ ├── exceptions.md │ │ ├── logfire.md │ │ ├── propagate.md │ │ ├── pydantic.md │ │ ├── sampling.md │ │ └── testing.md │ ├── cli.md │ ├── configuration.md │ ├── data-regions.md │ ├── examples.md │ └── organization-structure.md ├── release-notes.md ├── roadmap.md └── why.md ├── examples ├── javascript │ └── README.md └── python │ └── flask-sqlalchemy │ ├── README.MD │ ├── app │ ├── __init__.py │ ├── static │ │ └── styles.css │ └── templates │ │ ├── history.html │ │ └── index.html │ └── main.py ├── logfire-api ├── .gitignore ├── README.md ├── logfire_api │ ├── __init__.py │ ├── __init__.pyi │ ├── _internal │ │ ├── __init__.pyi │ │ ├── ast_utils.pyi │ │ ├── async_.pyi │ │ ├── auth.pyi │ │ ├── auto_trace │ │ │ ├── __init__.pyi │ │ │ ├── import_hook.pyi │ │ │ ├── rewrite_ast.pyi │ │ │ └── types.pyi │ │ ├── cli.pyi │ │ ├── collect_system_info.pyi │ │ ├── config.pyi │ │ ├── config_params.pyi │ │ ├── constants.pyi │ │ ├── db_statement_summary.pyi │ │ ├── exporters │ │ │ ├── __init__.pyi │ │ │ ├── console.pyi │ │ │ ├── dynamic_batch.pyi │ │ │ ├── logs.pyi │ │ │ ├── otlp.pyi │ │ │ ├── processor_wrapper.pyi │ │ │ ├── quiet_metrics.pyi │ │ │ ├── remove_pending.pyi │ │ │ ├── tail_sampling.pyi │ │ │ └── wrapper.pyi │ │ ├── formatter.pyi │ │ ├── instrument.pyi │ │ ├── integrations │ │ │ ├── __init__.pyi │ │ │ ├── aiohttp_client.pyi │ │ │ ├── asgi.pyi │ │ │ ├── asyncpg.pyi │ │ │ ├── aws_lambda.pyi │ │ │ ├── celery.pyi │ │ │ ├── django.pyi │ │ │ ├── executors.pyi │ │ │ ├── fastapi.pyi │ │ │ ├── flask.pyi │ │ │ ├── httpx.pyi │ │ │ ├── mcp.pyi │ │ │ ├── mysql.pyi │ │ │ ├── openai_agents.pyi │ │ │ ├── psycopg.pyi │ │ │ ├── pydantic_ai.pyi │ │ │ ├── pymongo.pyi │ │ │ ├── redis.pyi │ │ │ ├── requests.pyi │ │ │ ├── sqlalchemy.pyi │ │ │ ├── sqlite3.pyi │ │ │ ├── starlette.pyi │ │ │ ├── system_metrics.pyi │ │ │ └── wsgi.pyi │ │ ├── json_encoder.pyi │ │ ├── json_formatter.pyi │ │ ├── json_schema.pyi │ │ ├── json_types.pyi │ │ ├── logs.pyi │ │ ├── main.pyi │ │ ├── metrics.pyi │ │ ├── scrubbing.pyi │ │ ├── stack_info.pyi │ │ ├── tracer.pyi │ │ ├── ulid.pyi │ │ └── utils.pyi │ ├── cli.pyi │ ├── exceptions.pyi │ ├── experimental │ │ ├── __init__.pyi │ │ ├── annotations.pyi │ │ └── query_client.pyi │ ├── integrations │ │ ├── __init__.pyi │ │ ├── flask.pyi │ │ ├── httpx.pyi │ │ ├── logging.pyi │ │ ├── loguru.pyi │ │ ├── psycopg.pyi │ │ ├── pydantic.pyi │ │ ├── redis.pyi │ │ ├── sqlalchemy.pyi │ │ ├── structlog.pyi │ │ └── wsgi.pyi │ ├── propagate.pyi │ ├── py.typed │ ├── sampling │ │ ├── __init__.pyi │ │ └── _tail_sampling.pyi │ └── version.pyi └── pyproject.toml ├── logfire ├── __init__.py ├── __main__.py ├── _internal │ ├── __init__.py │ ├── ast_utils.py │ ├── async_.py │ ├── auth.py │ ├── auto_trace │ │ ├── __init__.py │ │ ├── import_hook.py │ │ ├── rewrite_ast.py │ │ └── types.py │ ├── cli.py │ ├── collect_system_info.py │ ├── config.py │ ├── config_params.py │ ├── constants.py │ ├── db_statement_summary.py │ ├── exporters │ │ ├── __init__.py │ │ ├── console.py │ │ ├── dynamic_batch.py │ │ ├── logs.py │ │ ├── otlp.py │ │ ├── processor_wrapper.py │ │ ├── quiet_metrics.py │ │ ├── remove_pending.py │ │ ├── test.py │ │ └── wrapper.py │ ├── formatter.py │ ├── instrument.py │ ├── integrations │ │ ├── __init__.py │ │ ├── aiohttp_client.py │ │ ├── asgi.py │ │ ├── asyncpg.py │ │ ├── aws_lambda.py │ │ ├── celery.py │ │ ├── django.py │ │ ├── executors.py │ │ ├── fastapi.py │ │ ├── flask.py │ │ ├── httpx.py │ │ ├── llm_providers │ │ │ ├── anthropic.py │ │ │ ├── llm_provider.py │ │ │ ├── openai.py │ │ │ └── types.py │ │ ├── mcp.py │ │ ├── mysql.py │ │ ├── openai_agents.py │ │ ├── psycopg.py │ │ ├── pydantic_ai.py │ │ ├── pymongo.py │ │ ├── redis.py │ │ ├── requests.py │ │ ├── sqlalchemy.py │ │ ├── sqlite3.py │ │ ├── starlette.py │ │ ├── system_metrics.py │ │ └── wsgi.py │ ├── json_encoder.py │ ├── json_formatter.py │ ├── json_schema.py │ ├── json_types.py │ ├── logs.py │ ├── main.py │ ├── metrics.py │ ├── scrubbing.py │ ├── stack_info.py │ ├── tracer.py │ ├── ulid.py │ └── utils.py ├── cli.py ├── exceptions.py ├── experimental │ ├── __init__.py │ ├── annotations.py │ └── query_client.py ├── integrations │ ├── __init__.py │ ├── flask.py │ ├── httpx.py │ ├── logging.py │ ├── loguru.py │ ├── psycopg.py │ ├── pydantic.py │ ├── redis.py │ ├── sqlalchemy.py │ ├── structlog.py │ └── wsgi.py ├── propagate.py ├── py.typed ├── sampling │ ├── __init__.py │ └── _tail_sampling.py ├── testing.py └── version.py ├── mkdocs.yml ├── pyodide_test ├── package-lock.json ├── package.json └── test.mjs ├── pyproject.toml ├── release ├── README.md ├── prepare.py ├── push.py └── shared.py ├── tests ├── __init__.py ├── aaa_query_client │ ├── README.md │ ├── cassettes │ │ └── test_query_client │ │ │ ├── test_query_params_async.yaml │ │ │ ├── test_query_params_sync.yaml │ │ │ ├── test_read_async.yaml │ │ │ └── test_read_sync.yaml │ └── test_query_client.py ├── auto_trace_samples │ ├── __init__.py │ ├── __main__.py │ ├── foo.py │ └── simple_nesting.py ├── conftest.py ├── exporters │ ├── test_dynamic_batch_span_processor.py │ ├── test_otlp_session.py │ ├── test_remove_pending.py │ └── test_retry_fewer_spans.py ├── import_used_for_tests │ ├── __init__.py │ ├── a │ │ ├── __init__.py │ │ └── b.py │ ├── internal_error_handling │ │ ├── __init__.py │ │ ├── internal_logfire_code_example.py │ │ └── user_code_example.py │ ├── module_with_getattr.py │ └── slow_async_callbacks_example.py ├── module_used_for_tests.py ├── otel_integrations │ ├── __init__.py │ ├── cassettes │ │ ├── test_openai │ │ │ └── test_responses_api.yaml │ │ ├── test_openai_agents │ │ │ ├── test_chat_completions.yaml │ │ │ ├── test_file_search.yaml │ │ │ ├── test_function_tool_exception.yaml │ │ │ ├── test_input_guardrails.yaml │ │ │ ├── test_responses.yaml │ │ │ ├── test_responses_simple.yaml │ │ │ └── test_voice_pipeline.yaml │ │ └── test_openai_agents_mcp │ │ │ └── test_mcp.yaml │ ├── django_test_project │ │ ├── __init__.py │ │ ├── django_test_app │ │ │ ├── __init__.py │ │ │ ├── admin.py │ │ │ ├── apps.py │ │ │ ├── migrations │ │ │ │ └── __init__.py │ │ │ ├── models.py │ │ │ ├── urls.py │ │ │ └── views.py │ │ ├── django_test_site │ │ │ ├── __init__.py │ │ │ ├── settings.py │ │ │ ├── urls.py │ │ │ └── wsgi.py │ │ └── manage.py │ ├── test_aiohttp_client.py │ ├── test_anthropic.py │ ├── test_anthropic_bedrock.py │ ├── test_asgi.py │ ├── test_asyncpg.py │ ├── test_aws_lambda.py │ ├── test_celery.py │ ├── test_django.py │ ├── test_fastapi.py │ ├── test_flask.py │ ├── test_httpx.py │ ├── test_mysql.py │ ├── test_openai.py │ ├── test_openai_agents.py │ ├── test_openai_agents_mcp.py │ ├── test_psycopg.py │ ├── test_pydantic_ai.py │ ├── test_pymongo.py │ ├── test_redis.py │ ├── test_requests.py │ ├── test_sqlalchemy.py │ ├── test_sqlite3.py │ ├── test_starlette.py │ ├── test_system_metrics.py │ └── test_wsgi.py ├── test_annotations.py ├── test_auto_trace.py ├── test_cli.py ├── test_collect_package_resources.py ├── test_configure.py ├── test_console_exporter.py ├── test_db_statement_summary.py ├── test_formatter.py ├── test_json_args.py ├── test_json_args_formatting.py ├── test_logfire.py ├── test_logfire_api.py ├── test_loguru.py ├── test_metrics.py ├── test_no_production.py ├── test_otel_logs.py ├── test_pydantic_plugin.py ├── test_sampling.py ├── test_secret_scrubbing.py ├── test_slow_async_callbacks.py ├── test_source_code_extraction.py ├── test_stdlib_logging.py ├── test_structlog.py ├── test_tail_sampling.py ├── test_testing.py ├── test_utils.py └── utils.py └── uv.lock /.github/ISSUE_TEMPLATE/bug.yml: -------------------------------------------------------------------------------- 1 | name: 🐛 Bug 2 | description: Report any issue with Logfire — SDK or the Platform 3 | labels: [Bug] 4 | 5 | body: 6 | - type: textarea 7 | id: description 8 | attributes: 9 | label: Description 10 | description: Please explain what you're seeing and what you would expect to see. 11 | validations: 12 | required: true 13 | 14 | - type: textarea 15 | id: version 16 | attributes: 17 | label: Python, Logfire & OS Versions, related packages (not required) 18 | description: | 19 | Which version of Python and Logfire are you using, which Operating System and with which OpenTelemetry packages? 20 | 21 | **Don't worry if you can't run this command or don't have this information, we'll help you if we can.** 22 | 23 | Please run the following command in your terminal: 24 | 25 | ```bash 26 | logfire info 27 | ``` 28 | 29 | Or in python run: 30 | 31 | ```python 32 | import logfire; print(logfire.logfire_info()) 33 | ``` 34 | 35 | render: TOML 36 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: true 2 | contact_links: 3 | - name: 💬 Join Slack 4 | url: 'https://logfire.pydantic.dev/docs/help/#slack' 5 | about: Join the Logfire Slack to ask questions, get help and chat about Logfire 6 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature-request.yml: -------------------------------------------------------------------------------- 1 | name: 💡Feature Request 2 | description: Suggest a feature or new functionality for Logfire 3 | labels: [Feature Request] 4 | 5 | body: 6 | - type: textarea 7 | id: description 8 | attributes: 9 | label: Description 10 | description: "Tell us what you're thinking..." 11 | validations: 12 | required: true 13 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/question.yml: -------------------------------------------------------------------------------- 1 | name: ❓Question 2 | description: Ask a question about anything related to Logfire 3 | labels: [Question] 4 | 5 | body: 6 | - type: textarea 7 | id: description 8 | attributes: 9 | label: Question 10 | description: How can we help? 11 | validations: 12 | required: true 13 | -------------------------------------------------------------------------------- /.github/workflows/check_changelog.yml: -------------------------------------------------------------------------------- 1 | name: Check Changelog 2 | 3 | on: 4 | pull_request: 5 | paths: 6 | - 'pyproject.toml' 7 | - 'CHANGELOG.md' 8 | 9 | jobs: 10 | check-changelog: 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - name: Checkout code 15 | uses: actions/checkout@v4 16 | with: 17 | fetch-depth: 2 18 | 19 | - name: Setup Python 20 | uses: actions/setup-python@v5 21 | with: 22 | python-version: "3.12" 23 | 24 | - name: Install dependencies and check for file changes 25 | run: | 26 | for file in pyproject.toml CHANGELOG.md; do 27 | base_name=$(basename $file .${file##*.} | tr '[:upper:]' '[:lower:]') 28 | if git diff --name-only HEAD^ | grep -q "^$file$"; then 29 | echo "${base_name}_changed=true" >> $GITHUB_ENV 30 | else 31 | echo "${base_name}_changed=false" >> $GITHUB_ENV 32 | fi 33 | done 34 | 35 | - name: Verify version change and CHANGELOG.md update 36 | if: env.pyproject_changed == 'true' 37 | run: | 38 | VERSION_BEFORE=$(git show HEAD^:pyproject.toml | python -c "import tomllib, sys; print(tomllib.loads(sys.stdin.read())['project']['version'])") 39 | VERSION_AFTER=$(python -c "import tomllib; print(tomllib.load(open('pyproject.toml', 'rb'))['project']['version'])") 40 | if [ "$VERSION_BEFORE" != "$VERSION_AFTER" ] && [ "$history_changed" == "false" ]; then 41 | echo "Version changed in pyproject.toml but no changes in CHANGELOG.md" 42 | exit 1 43 | elif [ "$VERSION_BEFORE" != "$VERSION_AFTER" ] && [ "$history_changed" == "true" ]; then 44 | echo "Version changed and CHANGELOG.md updated." 45 | else 46 | echo "Version did not change. Changes to CHANGELOG.md not required." 47 | fi 48 | 49 | - name: No changes detected 50 | if: env.pyproject_changed == 'false' && env.history_changed == 'false' 51 | run: echo "No changes detected in pyproject.toml or CHANGELOG.md. Everything is fine." 52 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.env 2 | .DS_Store 3 | .logfire 4 | .python-version 5 | .venv 6 | /.coverage 7 | /scratch/ 8 | __pycache__ 9 | dist 10 | logfire_credentials.json 11 | site 12 | venv 13 | 14 | # stubgen 15 | out 16 | 17 | # Node/Cloudflare 18 | node_modules 19 | .wrangler 20 | -------------------------------------------------------------------------------- /.hyperlint/.vale.ini: -------------------------------------------------------------------------------- 1 | StylesPath = styles 2 | MinAlertLevel = suggestion 3 | Vocab = hyperlint 4 | SkippedScopes = script, style, pre, figure, code, code-block 5 | 6 | [*] 7 | BasedOnStyles = Vale, hyperlint 8 | -------------------------------------------------------------------------------- /.hyperlint/styles/config/vocabularies/hyperlint/accept.txt: -------------------------------------------------------------------------------- 1 | validator 2 | [Pp]ydantic 3 | validators 4 | [Mm]agentic 5 | namespace 6 | Hyperlint 7 | preprocess 8 | tokenization 9 | tokenizer 10 | API 11 | APIs 12 | SDKs 13 | SDK 14 | [Aa]sync 15 | [Ss]ync 16 | [Ll]ogfire 17 | Superset 18 | Grafana 19 | SQLAlchemy 20 | Conda 21 | uvicorn 22 | OTel 23 | DataFrames 24 | dataclasses 25 | Onboarding 26 | -------------------------------------------------------------------------------- /.hyperlint/styles/hyperlint/repeatedWords.yml: -------------------------------------------------------------------------------- 1 | extends: repetition 2 | message: "'%s' is repeated, did you mean to repeat this word?" 3 | level: error 4 | alpha: true 5 | tokens: 6 | - '[^\s]+' 7 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v4.3.0 4 | hooks: 5 | - id: no-commit-to-branch # prevent direct commits to main branch 6 | - id: check-yaml 7 | args: ["--unsafe"] 8 | - id: check-toml 9 | - id: end-of-file-fixer 10 | - id: trailing-whitespace 11 | 12 | - repo: local 13 | hooks: 14 | - id: ruff 15 | name: Ruff 16 | entry: make 17 | args: [lint] 18 | types: [python] 19 | language: system 20 | - id: ruff format 21 | name: Ruff Format 22 | entry: make 23 | args: [format] 24 | language: system 25 | types: [python] 26 | - id: pyright 27 | name: pyright 28 | entry: make 29 | language: system 30 | args: [typecheck] 31 | types: [python] 32 | pass_filenames: false 33 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.testing.pytestArgs": [ 3 | "tests" 4 | ], 5 | "python.testing.unittestEnabled": false, 6 | "python.testing.pytestEnabled": true 7 | } 8 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to the Logfire SDK and docs 2 | 3 | We'd love anyone interested to contribute to the Logfire SDK and documentation. 4 | 5 | ## How to contribute 6 | 7 | 1. Fork and clone the repository 8 | 2. [Install uv](https://docs.astral.sh/uv/getting-started/installation/) 9 | 3. [Install pre-commit](https://pre-commit.com/#install) 10 | 4. Run `make install` to install dependencies 11 | 5. Run `make test` to run unit tests 12 | 6. Run `make format` to format code 13 | 7. Run `make lint` to lint code 14 | 8. run `make docs` to build docs and `make docs-serve` to serve docs locally 15 | 16 | You're now set up to start contributing! 17 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2023 - present Pydantic Services inc. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /docs/compliance.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | If you have any questions about compliance, feel free to [get in touch](help.md) 4 | 5 | ## SOC2 💡 6 | 7 | Logfire is SOC2 Type II certified. We did not receive any exceptions in our report. You can request a copy of our SOC2 8 | report by emailing: [legal@pydantic.dev](mailto:legal@pydantic.dev) 9 | 10 | ## HIPAA 11 | 12 | Logfire is [HIPAA](https://www.hhs.gov/hipaa/for-professionals/privacy/laws-regulations/index.html) compliant. We are able to offer Business Associate Agreements (BAAs) to customers 13 | on our enterprise plan. For details, please email: [sales@pydantic.dev](mailto:sales@pydantic.dev) 14 | -------------------------------------------------------------------------------- /docs/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/favicon.ico -------------------------------------------------------------------------------- /docs/favicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/favicon.png -------------------------------------------------------------------------------- /docs/guides/onboarding-checklist/index.md: -------------------------------------------------------------------------------- 1 | Welcome to the **Logfire** Onboarding Checklist! Whether you're starting a new project or integrating Logfire with an 2 | existing application, this guide is intended to help you quickly instrument your code and start sending as much data to 3 | Logfire with as little development effort as possible. 4 | 5 | Once you've completed the checklist, you'll be collecting all the data necessary to monitor performance, identify and 6 | fix bugs, analyze user behavior, and make data-driven decisions. 7 | 8 | !!! note 9 | 10 | If you aren't familiar with traces and spans, start with the 11 | [Tracing with Spans](../../concepts.md) page. 12 | 13 | #### Logfire Onboarding Checklist 14 | 15 | * [ ] **[Integrate Logfire](integrate.md)**: Fully integrate Logfire with your logging system and the packages you are 16 | using. 17 | 18 | * [ ] **[Add Logfire manual tracing](add-manual-tracing.md)**: Enhance your tracing data by manually adding custom 19 | spans and logs to your code for more targeted data collection. 20 | 21 | * [ ] **[Add Logfire auto-tracing](add-auto-tracing.md)**: Discover how to leverage Logfire's auto-tracing 22 | capabilities to automatically instrument your application with minimal code changes. 23 | 24 | * [ ] **[Add Logfire metrics](add-metrics.md)**: Learn how to create and use metrics to track and measure important 25 | aspects of your application's performance and behavior. 26 | 27 | We'll walk you through the checklist step by step, introducing relevant features and concepts as we go. While the main 28 | focus of this guide is on getting data into Logfire so you can leverage it in the future, we'll also provide an 29 | introduction to the Logfire Web UI and show you how to interact with the data you're generating. 30 | 31 | !!! note 32 | 33 | For a more comprehensive walkthrough of the Logfire Web UI and its features, you may be interested in our 34 | [Logfire Web UI Guide](../web-ui/live.md). 35 | 36 | Let's get started! :rocket: 37 | -------------------------------------------------------------------------------- /docs/help.md: -------------------------------------------------------------------------------- 1 | --- 2 | hide: 3 | - toc 4 | --- 5 | 6 | # Getting help with Logfire 7 | 8 | If you need help getting started with **Pydantic Logfire** or with advanced usage, the following sources may be useful. 9 | 10 | ## :simple-slack: Slack 11 | 12 | The [Pydantic Logfire Slack][slack] is a great place to ask questions, and get help and chat about Logfire. 13 | 14 | ## :simple-github: GitHub Issues 15 | 16 | The [Logfire GitHub Issues][github-issues] are a great place to ask questions, and give us feedback. 17 | 18 | ## :material-help: Documentation 19 | 20 | The [usage documentation](index.md) is the most complete guide on how to get started with **Pydantic Logfire**. 21 | 22 | ## :material-api: SDK API Documentation 23 | 24 | The [SDK API documentation](reference/api/logfire.md) give reference docs for the **Logfire** SDK. 25 | 26 | ## :material-email: Email 27 | 28 | You can also email us at [engineering@pydantic.dev](mailto:engineering@pydantic.dev). 29 | 30 | [slack]: join-slack/index.html 31 | [github-issues]: https://github.com/pydantic/logfire/issues 32 | -------------------------------------------------------------------------------- /docs/how-to-guides/create-write-tokens.md: -------------------------------------------------------------------------------- 1 | To send data to **Logfire**, you need to create a write token. 2 | A write token is a unique identifier that allows you to send data to a specific **Logfire** project. 3 | If you set up Logfire according to the [getting started guide](../index.md), you already have a write token locally tied to the project you created. 4 | But if you want to configure other computers to write to that project, for example in a deployed application, you need to create a new write token. 5 | 6 | You can create a write token by following these steps: 7 | 8 | 1. Open the **Logfire** web interface at [logfire.pydantic.dev](https://logfire.pydantic.dev). 9 | 2. Select your project from the **Projects** section on the left hand side of the page. 10 | 3. Click on the ⚙️ **Settings** tab in the top right corner of the page. 11 | 4. Select the **{} Write tokens** tab from the left hand menu. 12 | 5. Click on the **Create write token** button. 13 | 14 | After creating the write token, you'll see a dialog with the token value. 15 | **Copy this value and store it securely, it will not be shown again**. 16 | 17 | Now you can use this write token to send data to your **Logfire** project from any computer or application. 18 | 19 | We recommend you inject your write token via environment variables in your deployed application. 20 | Set the token as the value for the environment variable `LOGFIRE_TOKEN` and logfire will automatically use it to send data to your project. 21 | 22 | ## Setting `send_to_logfire='if-token-present'` 23 | 24 | You may want to not send data to logfire during local development, but still have the option to send it in production without changing your code. 25 | To do this we provide the parameter `send_to_logfire='if-token-present'` in the `logfire.configure()` function. 26 | If you set it to `'if-token-present'`, logfire will only send data to logfire if a write token is present in the environment variable `LOGFIRE_TOKEN` or there is a token saved locally. 27 | If you run tests in CI no data will be sent. 28 | 29 | You can also set the environment variable `LOGFIRE_SEND_TO_LOGFIRE` to configure this option. 30 | For example, you can set it to `LOGFIRE_SEND_TO_LOGFIRE=true` in your deployed application and `LOGFIRE_SEND_TO_LOGFIRE=false` in your tests setup. 31 | -------------------------------------------------------------------------------- /docs/how-to-guides/link-to-code-source.md: -------------------------------------------------------------------------------- 1 | We support linking to the source code on GitHub, GitLab, and any other VCS provider that uses the same URL format. 2 | 3 | ![Link to GitHub](../images/guide/link-to-github.gif) 4 | 5 | ## Usage 6 | 7 | Here's an example: 8 | 9 | ```python 10 | import logfire 11 | 12 | logfire.configure( 13 | code_source=logfire.CodeSource( 14 | repository='https://github.com/pydantic/logfire', #(1)! 15 | revision='', #(2)! 16 | root_path='path/within/repo', #(3)! 17 | ) 18 | ) 19 | ``` 20 | 21 | 1. The URL of the repository e.g. `https://github.com/pydantic/logfire`. 22 | 2. The specific branch, tag, or commit hash to link to e.g. `main`. 23 | 3. The path from the root of the repository to the current working directory of the process. If your code is in a 24 | subdirectory of your repo, you can specify it here. Otherwise you can probably omit this. 25 | 26 | You can learn more in our [`logfire.CodeSource`][logfire.CodeSource] API reference. 27 | 28 | ## Alternative Configuration 29 | 30 | For other OpenTelemetry SDKs, you can configure these settings using resource attributes, e.g. by setting the 31 | [`OTEL_RESOURCE_ATTRIBUTES`][otel-resource-attributes] environment variable: 32 | 33 | ``` 34 | OTEL_RESOURCE_ATTRIBUTES=vcs.repository.url.full=https://github.com/pydantic/platform 35 | OTEL_RESOURCE_ATTRIBUTES=${OTEL_RESOURCE_ATTRIBUTES},vcs.repository.ref.revision=main 36 | OTEL_RESOURCE_ATTRIBUTES=${OTEL_RESOURCE_ATTRIBUTES},vcs.root.path=path/within/repo 37 | ``` 38 | 39 | [otel-resource-attributes]: https://opentelemetry.io/docs/specs/otel/configuration/sdk-environment-variables/#general-sdk-configuration 40 | -------------------------------------------------------------------------------- /docs/images/cli/browser-screenshot-auth.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/cli/browser-screenshot-auth.png -------------------------------------------------------------------------------- /docs/images/cli/terminal-screenshot-auth-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/cli/terminal-screenshot-auth-1.png -------------------------------------------------------------------------------- /docs/images/cli/terminal-screenshot-auth-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/cli/terminal-screenshot-auth-2.png -------------------------------------------------------------------------------- /docs/images/cli/terminal-screenshot-inspect.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/cli/terminal-screenshot-inspect.png -------------------------------------------------------------------------------- /docs/images/compliance/soc2_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/compliance/soc2_logo.png -------------------------------------------------------------------------------- /docs/images/guide/browser-alerts-create.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/browser-alerts-create.png -------------------------------------------------------------------------------- /docs/images/guide/browser-alerts-edit.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/browser-alerts-edit.png -------------------------------------------------------------------------------- /docs/images/guide/browser-alerts-error.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/browser-alerts-error.png -------------------------------------------------------------------------------- /docs/images/guide/browser-alerts-full.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/browser-alerts-full.png -------------------------------------------------------------------------------- /docs/images/guide/browser-alerts-no-error.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/browser-alerts-no-error.png -------------------------------------------------------------------------------- /docs/images/guide/browser-create-project-button.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/browser-create-project-button.png -------------------------------------------------------------------------------- /docs/images/guide/browser-create-project-details.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/browser-create-project-details.png -------------------------------------------------------------------------------- /docs/images/guide/browser-create-project.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/browser-create-project.png -------------------------------------------------------------------------------- /docs/images/guide/browser-dashboard-chart-sql-query.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/browser-dashboard-chart-sql-query.png -------------------------------------------------------------------------------- /docs/images/guide/browser-dashboard-chart-types.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/browser-dashboard-chart-types.png -------------------------------------------------------------------------------- /docs/images/guide/browser-dashboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/browser-dashboard.png -------------------------------------------------------------------------------- /docs/images/guide/browser-explore-full.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/browser-explore-full.png -------------------------------------------------------------------------------- /docs/images/guide/browser-explore-run-query.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/browser-explore-run-query.png -------------------------------------------------------------------------------- /docs/images/guide/browser-integrate.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/browser-integrate.png -------------------------------------------------------------------------------- /docs/images/guide/direct-connect-credentials.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/direct-connect-credentials.png -------------------------------------------------------------------------------- /docs/images/guide/environments.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/environments.png -------------------------------------------------------------------------------- /docs/images/guide/generator-break.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/generator-break.png -------------------------------------------------------------------------------- /docs/images/guide/generator-fine.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/generator-fine.png -------------------------------------------------------------------------------- /docs/images/guide/link-to-github.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/link-to-github.gif -------------------------------------------------------------------------------- /docs/images/guide/live-view-collapsed-annotated.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/live-view-collapsed-annotated.png -------------------------------------------------------------------------------- /docs/images/guide/live-view-details-panel-open-annotated.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/live-view-details-panel-open-annotated.png -------------------------------------------------------------------------------- /docs/images/guide/live-view-natural-language.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/live-view-natural-language.png -------------------------------------------------------------------------------- /docs/images/guide/live-view-reference.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/live-view-reference.png -------------------------------------------------------------------------------- /docs/images/guide/live-view-search.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/live-view-search.png -------------------------------------------------------------------------------- /docs/images/guide/live-view-sql-box.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/live-view-sql-box.png -------------------------------------------------------------------------------- /docs/images/guide/live-view-start-here.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/live-view-start-here.png -------------------------------------------------------------------------------- /docs/images/guide/manual-tracing-attribute-hello-world.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/manual-tracing-attribute-hello-world.png -------------------------------------------------------------------------------- /docs/images/guide/manual-tracing-basic-closed-span.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/manual-tracing-basic-closed-span.png -------------------------------------------------------------------------------- /docs/images/guide/manual-tracing-basic.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/manual-tracing-basic.png -------------------------------------------------------------------------------- /docs/images/guide/manual-tracing-default-levels.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/manual-tracing-default-levels.png -------------------------------------------------------------------------------- /docs/images/guide/manual-tracing-explore-basic.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/manual-tracing-explore-basic.png -------------------------------------------------------------------------------- /docs/images/guide/manual-tracing-level-colors.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/manual-tracing-level-colors.png -------------------------------------------------------------------------------- /docs/images/guide/manual-tracing-span-names.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/manual-tracing-span-names.png -------------------------------------------------------------------------------- /docs/images/guide/manual-tracing-traceback.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/manual-tracing-traceback.png -------------------------------------------------------------------------------- /docs/images/guide/terminal-create-project-full.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/terminal-create-project-full.png -------------------------------------------------------------------------------- /docs/images/guide/terminal-create-project.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/terminal-create-project.png -------------------------------------------------------------------------------- /docs/images/guide/terminal-integrate-logging.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/guide/terminal-integrate-logging.png -------------------------------------------------------------------------------- /docs/images/index/logfire-screenshot-explore-query.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/index/logfire-screenshot-explore-query.png -------------------------------------------------------------------------------- /docs/images/index/logfire-screenshot-fastapi-200.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/index/logfire-screenshot-fastapi-200.png -------------------------------------------------------------------------------- /docs/images/index/logfire-screenshot-fastapi-422.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/index/logfire-screenshot-fastapi-422.png -------------------------------------------------------------------------------- /docs/images/index/logfire-screenshot-hello-world-age.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/index/logfire-screenshot-hello-world-age.png -------------------------------------------------------------------------------- /docs/images/index/logfire-screenshot-pydantic-manual.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/index/logfire-screenshot-pydantic-manual.png -------------------------------------------------------------------------------- /docs/images/index/logfire-screenshot-pydantic-plugin.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/index/logfire-screenshot-pydantic-plugin.png -------------------------------------------------------------------------------- /docs/images/index/logfire-screenshot-search-query.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/index/logfire-screenshot-search-query.png -------------------------------------------------------------------------------- /docs/images/integrations/pydantic-ai/pydanticai-instrumentation-screenshot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/integrations/pydantic-ai/pydanticai-instrumentation-screenshot.png -------------------------------------------------------------------------------- /docs/images/integrations/use-cases/web-frameworks/logfire-screenshot-chart-percentiles.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/integrations/use-cases/web-frameworks/logfire-screenshot-chart-percentiles.png -------------------------------------------------------------------------------- /docs/images/logfire-screenshot-anthropic-arguments.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/logfire-screenshot-anthropic-arguments.png -------------------------------------------------------------------------------- /docs/images/logfire-screenshot-anthropic-stream.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/logfire-screenshot-anthropic-stream.png -------------------------------------------------------------------------------- /docs/images/logfire-screenshot-anthropic.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/logfire-screenshot-anthropic.png -------------------------------------------------------------------------------- /docs/images/logfire-screenshot-autotracing.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/logfire-screenshot-autotracing.png -------------------------------------------------------------------------------- /docs/images/logfire-screenshot-details-panel-variant.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/logfire-screenshot-details-panel-variant.png -------------------------------------------------------------------------------- /docs/images/logfire-screenshot-details-panel.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/logfire-screenshot-details-panel.png -------------------------------------------------------------------------------- /docs/images/logfire-screenshot-examples-flask-sqlalchemy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/logfire-screenshot-examples-flask-sqlalchemy.png -------------------------------------------------------------------------------- /docs/images/logfire-screenshot-fastapi-arguments.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/logfire-screenshot-fastapi-arguments.png -------------------------------------------------------------------------------- /docs/images/logfire-screenshot-first-steps-example-trace.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/logfire-screenshot-first-steps-example-trace.png -------------------------------------------------------------------------------- /docs/images/logfire-screenshot-first-steps-first-project.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/logfire-screenshot-first-steps-first-project.png -------------------------------------------------------------------------------- /docs/images/logfire-screenshot-first-steps-hello-world.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/logfire-screenshot-first-steps-hello-world.png -------------------------------------------------------------------------------- /docs/images/logfire-screenshot-first-steps-load-files.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/logfire-screenshot-first-steps-load-files.png -------------------------------------------------------------------------------- /docs/images/logfire-screenshot-live-view.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/logfire-screenshot-live-view.png -------------------------------------------------------------------------------- /docs/images/logfire-screenshot-magentic-create-superhero.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/logfire-screenshot-magentic-create-superhero.png -------------------------------------------------------------------------------- /docs/images/logfire-screenshot-mirascope-anthropic-call.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/logfire-screenshot-mirascope-anthropic-call.png -------------------------------------------------------------------------------- /docs/images/logfire-screenshot-mirascope-openai-extractor.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/logfire-screenshot-mirascope-openai-extractor.png -------------------------------------------------------------------------------- /docs/images/logfire-screenshot-openai-agents-tools.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/logfire-screenshot-openai-agents-tools.png -------------------------------------------------------------------------------- /docs/images/logfire-screenshot-openai-agents.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/logfire-screenshot-openai-agents.png -------------------------------------------------------------------------------- /docs/images/logfire-screenshot-openai-arguments.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/logfire-screenshot-openai-arguments.png -------------------------------------------------------------------------------- /docs/images/logfire-screenshot-openai-image-gen.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/logfire-screenshot-openai-image-gen.png -------------------------------------------------------------------------------- /docs/images/logfire-screenshot-openai-stream.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/logfire-screenshot-openai-stream.png -------------------------------------------------------------------------------- /docs/images/logfire-screenshot-openai.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/logfire-screenshot-openai.png -------------------------------------------------------------------------------- /docs/images/logfire-screenshot-spans.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/logfire-screenshot-spans.png -------------------------------------------------------------------------------- /docs/images/logfire-screenshot-web-app.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/docs/images/logfire-screenshot-web-app.png -------------------------------------------------------------------------------- /docs/integrations/aws-lambda.md: -------------------------------------------------------------------------------- 1 | --- 2 | integration: otel 3 | --- 4 | 5 | # AWS Lambda 6 | 7 | The [`logfire.instrument_aws_lambda`][logfire.Logfire.instrument_aws_lambda] function can be used to 8 | instrument AWS Lambda functions to automatically send traces to **Logfire**. 9 | 10 | ## Installation 11 | 12 | Install `logfire` with the `aws-lambda` extra: 13 | 14 | {{ install_logfire(extras=['aws-lambda']) }} 15 | 16 | ## Usage 17 | 18 | To instrument an AWS Lambda function, call the `logfire.instrument_aws_lambda` function after defining 19 | the handler function: 20 | 21 | ```python 22 | import logfire 23 | 24 | logfire.configure() # (1)! 25 | 26 | 27 | def handler(event, context): 28 | return 'Hello from Lambda' 29 | 30 | logfire.instrument_aws_lambda(handler) 31 | ``` 32 | 33 | 1. Remember to set the `LOGFIRE_TOKEN` environment variable on your Lambda function configuration. 34 | 35 | [`logfire.instrument_aws_lambda`][logfire.Logfire.instrument_aws_lambda] uses the **OpenTelemetry AWS Lambda Instrumentation** package, 36 | which you can find more information about [here][opentelemetry-aws-lambda]. 37 | 38 | [opentelemetry-aws-lambda]: https://opentelemetry-python-contrib.readthedocs.io/en/latest/instrumentation/aws_lambda/aws_lambda.html 39 | -------------------------------------------------------------------------------- /docs/integrations/databases/bigquery.md: -------------------------------------------------------------------------------- 1 | --- 2 | integration: built-in 3 | --- 4 | 5 | # BigQuery 6 | 7 | The [Google Cloud BigQuery Python client library][bigquery-pypi] is instrumented with OpenTelemetry out of the box, 8 | and all the extra dependencies are already included with **Logfire** by default, so you only need to call `logfire.configure()`. 9 | 10 | ??? question "What if I don't want to instrument BigQuery?" 11 | Since BigQuery automatically instruments itself, you need to opt-out of instrumentation 12 | if you don't want to use it. 13 | 14 | To do it, you'll need to call [`logfire.suppress_scopes()`][logfire.Logfire.suppress_scopes] 15 | with the scope `google.cloud.bigquery.opentelemetry_tracing`. 16 | 17 | ```python 18 | import logfire 19 | 20 | logfire.configure() 21 | logfire.suppress_scopes("google.cloud.bigquery.opentelemetry_tracing") 22 | ``` 23 | 24 | 25 | Let's see an example: 26 | 27 | ```python 28 | from google.cloud import bigquery 29 | 30 | import logfire 31 | 32 | logfire.configure() 33 | 34 | client = bigquery.Client() 35 | query = """ 36 | SELECT name 37 | FROM `bigquery-public-data.usa_names.usa_1910_2013` 38 | WHERE state = "TX" 39 | LIMIT 100 40 | """ 41 | query_job = client.query(query) 42 | print(list(query_job.result())) 43 | ``` 44 | 45 | You can find more information about the BigQuery Python client library in the [official documentation][bigquery]. 46 | 47 | [bigquery]: https://cloud.google.com/python/docs/reference/bigquery/latest 48 | [bigquery-pypi]: https://pypi.org/project/google-cloud-bigquery/ 49 | -------------------------------------------------------------------------------- /docs/integrations/databases/sqlalchemy.md: -------------------------------------------------------------------------------- 1 | --- 2 | integration: otel 3 | --- 4 | 5 | The [`logfire.instrument_sqlalchemy()`][logfire.Logfire.instrument_sqlalchemy] method will create a span for every query executed by a [SQLAlchemy][sqlalchemy] engine. 6 | 7 | ## Installation 8 | 9 | Install `logfire` with the `sqlalchemy` extra: 10 | 11 | {{ install_logfire(extras=['sqlalchemy']) }} 12 | 13 | ## Usage 14 | 15 | Let's see a minimal example below. You can run it with `python main.py`: 16 | 17 | ```py title="main.py" 18 | import logfire 19 | from sqlalchemy import create_engine 20 | 21 | logfire.configure() 22 | 23 | engine = create_engine("sqlite:///:memory:") 24 | logfire.instrument_sqlalchemy(engine=engine) 25 | ``` 26 | 27 | The keyword arguments of `logfire.instrument_sqlalchemy()` are passed to the `SQLAlchemyInstrumentor().instrument()` method of the OpenTelemetry SQLAlchemy Instrumentation package, read more about it [here][opentelemetry-sqlalchemy]. 28 | 29 | !!! tip 30 | If you use [SQLModel][sqlmodel], you can use the same `SQLAlchemyInstrumentor` to instrument it. 31 | 32 | [opentelemetry-sqlalchemy]: https://opentelemetry-python-contrib.readthedocs.io/en/latest/instrumentation/sqlalchemy/sqlalchemy.html 33 | [sqlalchemy]: https://www.sqlalchemy.org/ 34 | [sqlmodel]: https://sqlmodel.tiangolo.com/ 35 | -------------------------------------------------------------------------------- /docs/integrations/event-streams/faststream.md: -------------------------------------------------------------------------------- 1 | --- 2 | integration: built-in 3 | --- 4 | 5 | # FastStream 6 | 7 | To instrument [FastStream][faststream] with OpenTelemetry, you need to: 8 | 9 | 1. Call `logfire.configure()`. 10 | 2. Add the needed middleware according to your broker. 11 | 12 | Let's see an example: 13 | 14 | ```python title="main.py" 15 | from faststream import FastStream 16 | from faststream.redis import RedisBroker 17 | from faststream.redis.opentelemetry import RedisTelemetryMiddleware 18 | 19 | import logfire 20 | 21 | logfire.configure() 22 | 23 | broker = RedisBroker(middlewares=(RedisTelemetryMiddleware(),)) 24 | 25 | app = FastStream(broker) 26 | 27 | 28 | @broker.subscriber("test-channel") 29 | @broker.publisher("another-channel") 30 | async def handle(): 31 | return "Hi!" 32 | 33 | 34 | @broker.subscriber("another-channel") 35 | async def handle_next(msg: str): 36 | assert msg == "Hi!" 37 | 38 | 39 | @app.after_startup 40 | async def test(): 41 | await broker.publish("", channel="test-channel") 42 | ``` 43 | 44 | Since we are using Redis, we added the [`RedisTelemetryMiddleware`][faststream.redis.opentelemetry.RedisTelemetryMiddleware] 45 | to the broker. In case you use a different broker, you need to add the corresponding middleware. 46 | 47 | See more about FastStream OpenTelemetry integration in [their documentation][faststream-otel]. 48 | 49 | [faststream]: https://faststream.airt.ai/latest/ 50 | [faststream-otel]: https://faststream.airt.ai/latest/getting-started/opentelemetry/#faststream-tracing 51 | -------------------------------------------------------------------------------- /docs/integrations/http-clients/aiohttp.md: -------------------------------------------------------------------------------- 1 | --- 2 | integration: otel 3 | --- 4 | 5 | # AIOHTTP Client 6 | 7 | [AIOHTTP][aiohttp] is an asynchronous HTTP client/server framework for asyncio and Python. 8 | 9 | The [`logfire.instrument_aiohttp_client()`][logfire.Logfire.instrument_aiohttp_client] method will create a span for every request made by your AIOHTTP clients. 10 | 11 | !!! question "What about AIOHTTP Server?" 12 | The AIOHTTP server instrumentation is not supported yet. You can track the progress [here][aiohttp-server]. 13 | 14 | ## Installation 15 | 16 | Install `logfire` with the `aiohttp` extra: 17 | 18 | {{ install_logfire(extras=['aiohttp']) }} 19 | 20 | ## Usage 21 | 22 | Let's see a minimal example below. You can run it with `python main.py`: 23 | 24 | ```py title="main.py" 25 | import logfire 26 | import aiohttp 27 | 28 | 29 | logfire.configure() 30 | logfire.instrument_aiohttp_client() 31 | 32 | 33 | async def main(): 34 | async with aiohttp.ClientSession() as session: 35 | await session.get("https://httpbin.org/get") 36 | 37 | 38 | if __name__ == "__main__": 39 | import asyncio 40 | 41 | asyncio.run(main()) 42 | ``` 43 | 44 | The keyword arguments of `logfire.instrument_aiohttp_client()` are passed to the `AioHttpClientInstrumentor().instrument()` method of the OpenTelemetry aiohttp client Instrumentation package, read more about it [here][opentelemetry-aiohttp]. 45 | 46 | ## Hiding sensitive URL parameters 47 | 48 | The `url_filter` keyword argument can be used to modify the URL that's recorded in spans. Here's an example of how to use this to redact query parameters: 49 | 50 | ```python 51 | from yarl import URL 52 | 53 | def mask_url(url: URL) -> str: 54 | sensitive_keys = {"username", "password", "token", "api_key", "api_secret", "apikey"} 55 | masked_query = {key: "*****" if key in sensitive_keys else value for key, value in url.query.items()} 56 | return str(url.with_query(masked_query)) 57 | 58 | logfire.instrument_aiohttp_client(url_filter=mask_url) 59 | ``` 60 | 61 | [aiohttp]: https://docs.aiohttp.org/en/stable/ 62 | [aiohttp-server]: https://github.com/open-telemetry/opentelemetry-python-contrib/issues/501 63 | [opentelemetry-aiohttp]: https://opentelemetry-python-contrib.readthedocs.io/en/latest/instrumentation/aiohttp_client/aiohttp_client.html 64 | -------------------------------------------------------------------------------- /docs/integrations/http-clients/requests.md: -------------------------------------------------------------------------------- 1 | --- 2 | integration: otel 3 | --- 4 | 5 | # Requests 6 | 7 | The [`logfire.instrument_requests()`][logfire.Logfire.instrument_requests] method can be used to 8 | instrument [`requests`][requests] with **Logfire**. 9 | 10 | ## Installation 11 | 12 | Install `logfire` with the `requests` extra: 13 | 14 | {{ install_logfire(extras=['requests']) }} 15 | 16 | ## Usage 17 | 18 | ```py title="main.py" 19 | import logfire 20 | import requests 21 | 22 | logfire.configure() 23 | logfire.instrument_requests() 24 | 25 | requests.get("https://httpbin.org/get") 26 | ``` 27 | 28 | [`logfire.instrument_requests()`][logfire.Logfire.instrument_requests] uses the 29 | **OpenTelemetry requests Instrumentation** package, 30 | which you can find more information about [here][opentelemetry-requests]. 31 | 32 | [opentelemetry-requests]: https://opentelemetry-python-contrib.readthedocs.io/en/latest/instrumentation/requests/requests.html 33 | [requests]: https://docs.python-requests.org/en/master/ 34 | -------------------------------------------------------------------------------- /docs/integrations/llms/litellm.md: -------------------------------------------------------------------------------- 1 | --- 2 | integration: third-party 3 | --- 4 | 5 | LiteLLM allows you to call over 100 Large Language Models (LLMs) using the same input/output format. It also supports Logfire for logging and monitoring. 6 | 7 | To integrate Logfire with LiteLLM: 8 | 9 | 1. Set the `LOGFIRE_TOKEN` environment variable. 10 | 2. Add `logfire` to the callbacks of LiteLLM. 11 | 12 | For more details, [check the official LiteLLM documentation.](https://docs.litellm.ai/docs/observability/logfire_integration) 13 | -------------------------------------------------------------------------------- /docs/integrations/llms/pydanticai.md: -------------------------------------------------------------------------------- 1 | --- 2 | integration: logfire 3 | --- 4 | 5 | **Logfire** supports instrumenting [PydanticAI](https://ai.pydantic.dev/) with the 6 | [`logfire.instrument_pydantic_ai()`][logfire.Logfire.instrument_pydantic_ai] method: 7 | 8 | ```python hl_lines="5" 9 | import logfire 10 | from pydantic_ai import Agent, RunContext 11 | 12 | logfire.configure() 13 | logfire.instrument_pydantic_ai() 14 | 15 | roulette_agent = Agent( 16 | 'openai:gpt-4o', 17 | deps_type=int, 18 | result_type=bool, 19 | system_prompt=( 20 | 'Use the `roulette_wheel` function to see if the ' 21 | 'customer has won based on the number they provide.' 22 | ), 23 | ) 24 | 25 | 26 | @roulette_agent.tool 27 | async def roulette_wheel(ctx: RunContext[int], square: int) -> str: 28 | """check if the square is a winner""" 29 | return 'winner' if square == ctx.deps else 'loser' 30 | 31 | 32 | # Run the agent 33 | success_number = 18 34 | result = roulette_agent.run_sync('Put my money on square eighteen', deps=success_number) 35 | print(result.data) 36 | #> True 37 | 38 | result = roulette_agent.run_sync('I bet five is the winner', deps=success_number) 39 | print(result.data) 40 | #> False 41 | ``` 42 | 43 | The above example displays like this in **Logfire**: 44 | 45 | ![Logfire PydanticAI Instrumentation](../../images/integrations/pydantic-ai/pydanticai-instrumentation-screenshot. 46 | png) 47 | 48 | You can use PydanticAI with a [large variety of LLMs](https://ai.pydantic.dev/api/models/base/#pydantic_ai.models.KnownModelName), the example 49 | just happens to show `gpt-4o`. 50 | 51 | You can also instrument a specific agent with `logfire.instrument_pydantic_ai(agent)`. 52 | 53 | For more information, see the [`logfire.instrument_pydantic_ai()`][logfire.Logfire.instrument_pydantic_ai] 54 | reference or the [PydanticAI docs on instrumenting](https://ai.pydantic.dev/logfire/) with **Logfire**. 55 | -------------------------------------------------------------------------------- /docs/integrations/logging.md: -------------------------------------------------------------------------------- 1 | --- 2 | integration: logfire 3 | --- 4 | 5 | # Standard Library Logging 6 | 7 | **Logfire** can act as a sink for [standard library logging][logging] by emitting a **Logfire** log for 8 | every standard library log record. 9 | 10 | ```py title="main.py" 11 | from logging import basicConfig, getLogger 12 | 13 | import logfire 14 | 15 | logfire.configure() 16 | basicConfig(handlers=[logfire.LogfireLoggingHandler()]) 17 | 18 | logger = getLogger(__name__) 19 | 20 | logger.error("Hello %s!", "Fred") 21 | # 10:05:06.855 Hello Fred! 22 | ``` 23 | 24 | ## Oh no! Too many logs from... 25 | 26 | A common issue with logging is that it can be **too verbose**... Right? :sweat_smile: 27 | 28 | Don't worry! We are here to help you. 29 | 30 | In those cases, you can set the log level to a higher value to suppress logs that are less important. 31 | Let's see an example with the [`apscheduler`](https://apscheduler.readthedocs.io/en/3.x/) logger: 32 | 33 | ```py title="main.py" 34 | import logging 35 | 36 | logger = logging.getLogger("apscheduler") 37 | logger.setLevel(logging.WARNING) 38 | ``` 39 | 40 | In this example, we set the log level of the `apscheduler` logger to `WARNING`, which means that 41 | only logs with a level of `WARNING` or higher will be emitted. 42 | 43 | [logging]: https://docs.python.org/3/library/logging.html 44 | -------------------------------------------------------------------------------- /docs/integrations/loguru.md: -------------------------------------------------------------------------------- 1 | --- 2 | integration: logfire 3 | --- 4 | 5 | # Loguru 6 | 7 | **Logfire** can act as a sink for [Loguru][loguru] by emitting a **Logfire** log for every log record. For example: 8 | 9 | ```py title="main.py" 10 | import logfire 11 | from loguru import logger 12 | 13 | logfire.configure() 14 | 15 | logger.configure(handlers=[logfire.loguru_handler()]) 16 | logger.info('Hello, {name}!', name='World') 17 | ``` 18 | 19 | !!! note 20 | Currently, **Logfire** will not scrub sensitive data from the message formatted by Loguru, e.g: 21 | 22 | ```python 23 | logger.info('Foo: {bar}', bar='secret_value') 24 | # > 14:58:26.085 Foo: secret_value 25 | ``` 26 | 27 | [loguru]: https://github.com/Delgan/loguru 28 | -------------------------------------------------------------------------------- /docs/integrations/structlog.md: -------------------------------------------------------------------------------- 1 | --- 2 | integration: logfire 3 | --- 4 | 5 | # Structlog 6 | 7 | **Logfire** has a built-in [structlog][structlog] processor that can be used to emit Logfire logs for every structlog event. 8 | 9 | ```py title="main.py" hl_lines="6 15" 10 | from dataclasses import dataclass 11 | 12 | import structlog 13 | import logfire 14 | 15 | logfire.configure() 16 | 17 | structlog.configure( 18 | processors=[ 19 | structlog.contextvars.merge_contextvars, 20 | structlog.processors.add_log_level, 21 | structlog.processors.StackInfoRenderer(), 22 | structlog.dev.set_exc_info, 23 | structlog.processors.TimeStamper(fmt='%Y-%m-%d %H:%M:%S', utc=False), 24 | logfire.StructlogProcessor(), 25 | structlog.dev.ConsoleRenderer(), 26 | ], 27 | ) 28 | logger = structlog.get_logger() 29 | 30 | 31 | @dataclass 32 | class User: 33 | id: int 34 | name: str 35 | 36 | 37 | logger.info('Login', user=User(id=42, name='Fred')) 38 | #> 2024-03-22 12:57:33 [info ] Login user=User(id=42, name='Fred') 39 | ``` 40 | 41 | The **Logfire** processor **MUST** come before the last processor that renders the logs in the structlog configuration. 42 | 43 | By default, [`LogfireProcessor`][logfire.integrations.structlog.LogfireProcessor] shown above 44 | disables console logging by logfire so you can use the existing logger you have configured for structlog, if you 45 | want to log with logfire, use [`LogfireProcessor(console_log=True)`][logfire.integrations.structlog.LogfireProcessor]. 46 | 47 | !!! note 48 | Positional arguments aren't collected as attributes by the processor, since they are already part of the event 49 | message when the processor is called. 50 | 51 | If you have the following: 52 | 53 | ```py 54 | logger.error('Hello %s!', 'Fred') 55 | #> 2024-03-22 13:39:26 [error ] Hello Fred! 56 | ``` 57 | 58 | The string `'Fred'` will not be collected by the processor as an attribute, just formatted with the message. 59 | 60 | [structlog]: https://www.structlog.org/en/stable/ 61 | -------------------------------------------------------------------------------- /docs/integrations/web-frameworks/asgi.md: -------------------------------------------------------------------------------- 1 | --- 2 | integration: otel 3 | --- 4 | 5 | # ASGI 6 | 7 | If the [ASGI][asgi] web framework you're using doesn't have a dedicated integration, you can use the 8 | [`logfire.instrument_asgi()`][logfire.Logfire.instrument_asgi] method to instrument it. 9 | 10 | ## Installation 11 | 12 | Install `logfire` with the `asgi` extra: 13 | 14 | {{ install_logfire(extras=['asgi']) }} 15 | 16 | ## Usage 17 | 18 | Below we have a minimal example using [Uvicorn][uvicorn]. You can run it with `python main.py`: 19 | 20 | ```py title="main.py" 21 | import logfire 22 | 23 | 24 | logfire.configure() 25 | 26 | 27 | async def app(scope, receive, send): 28 | assert scope["type"] == "http" 29 | await send( 30 | { 31 | "type": "http.response.start", 32 | "status": 200, 33 | "headers": [(b"content-type", b"text/plain"), (b"content-length", b"13")], 34 | } 35 | ) 36 | await send({"type": "http.response.body", "body": b"Hello, world!"}) 37 | 38 | app = logfire.instrument_asgi(app) 39 | 40 | if __name__ == "__main__": 41 | import uvicorn 42 | 43 | uvicorn.run(app) 44 | ``` 45 | 46 | The keyword arguments of [`logfire.instrument_asgi()`][logfire.Logfire.instrument_asgi] are passed to the 47 | [`OpenTelemetryMiddleware`][opentelemetry.instrumentation.asgi.OpenTelemetryMiddleware] class 48 | of the OpenTelemetry ASGI Instrumentation package. 49 | 50 | [asgi]: https://asgi.readthedocs.io/en/latest/ 51 | [uvicorn]: https://www.uvicorn.org/ 52 | -------------------------------------------------------------------------------- /docs/integrations/web-frameworks/django.md: -------------------------------------------------------------------------------- 1 | --- 2 | integration: otel 3 | --- 4 | 5 | # Django 6 | 7 | The [`logfire.instrument_django()`][logfire.Logfire.instrument_django] method can be used to instrument the [Django][django] web framework with **Logfire**. 8 | 9 | ## Installation 10 | 11 | Install `logfire` with the `django` extra: 12 | 13 | {{ install_logfire(extras=['django']) }} 14 | 15 | ## Usage 16 | 17 | In the `settings.py` file, add the following lines: 18 | 19 | ```py 20 | import logfire 21 | 22 | # ...All the other settings... 23 | 24 | # Add the following lines at the end of the file 25 | logfire.configure() 26 | logfire.instrument_django() 27 | ``` 28 | 29 | [`logfire.instrument_django()`][logfire.Logfire.instrument_django] uses the 30 | **OpenTelemetry Django Instrumentation** package, 31 | which you can find more information about [here][opentelemetry-django]. 32 | 33 | ## Database 34 | 35 | By default, the **Django** configuration [uses SQLite as the database engine]. 36 | To instrument it, you need to call [`logfire.instrument_sqlite3()`][logfire.Logfire.instrument_sqlite3]. 37 | 38 | If you are using a different database, check the available instrumentation methods in our [Integrations section]. 39 | 40 | [django]: https://www.djangoproject.com/ 41 | [opentelemetry-django]: https://opentelemetry-python-contrib.readthedocs.io/en/latest/instrumentation/django/django.html 42 | [django-instrumentor]: https://opentelemetry-python-contrib.readthedocs.io/en/latest/instrumentation/django/django.html#opentelemetry.instrumentation.django.DjangoInstrumentor 43 | [uses SQLite as the database engine]: https://docs.djangoproject.com/en/dev/ref/settings/#databases 44 | [Integrations section]: ../index.md 45 | -------------------------------------------------------------------------------- /docs/integrations/web-frameworks/flask.md: -------------------------------------------------------------------------------- 1 | --- 2 | integration: otel 3 | --- 4 | 5 | # Flask 6 | 7 | The [`logfire.instrument_flask()`][logfire.Logfire.instrument_flask] method 8 | will create a span for every request to your [Flask][flask] application. 9 | 10 | ## Install 11 | 12 | Install `logfire` with the `flask` extra: 13 | 14 | {{ install_logfire(extras=['flask']) }} 15 | 16 | ## Usage 17 | 18 | Let's see a minimal example below. You can run it with `python main.py`: 19 | 20 | ```py title="main.py" 21 | import logfire 22 | from flask import Flask 23 | 24 | 25 | logfire.configure() 26 | 27 | app = Flask(__name__) 28 | logfire.instrument_flask(app) 29 | 30 | 31 | @app.route("/") 32 | def hello(): 33 | return "Hello!" 34 | 35 | 36 | if __name__ == "__main__": 37 | app.run(debug=True) 38 | ``` 39 | 40 | The keyword arguments of `logfire.instrument_flask()` are passed to the `FlaskInstrumentor().instrument_app()` method 41 | of the OpenTelemetry Flask Instrumentation package, read more about it [here][opentelemetry-flask]. 42 | 43 | [flask]: https://flask.palletsprojects.com/en/2.0.x/ 44 | [opentelemetry-flask]: https://opentelemetry-python-contrib.readthedocs.io/en/latest/instrumentation/flask/flask.html 45 | -------------------------------------------------------------------------------- /docs/integrations/web-frameworks/starlette.md: -------------------------------------------------------------------------------- 1 | --- 2 | integration: otel 3 | --- 4 | 5 | # Starlette 6 | 7 | The [`logfire.instrument_starlette()`][logfire.Logfire.instrument_starlette] method will create a span for every request to your [Starlette][starlette] application. 8 | 9 | ## Installation 10 | 11 | Install `logfire` with the `starlette` extra: 12 | 13 | {{ install_logfire(extras=['starlette']) }} 14 | 15 | ## Usage 16 | 17 | We have a minimal example below. Please install [Uvicorn][uvicorn] to run it: 18 | 19 | ```bash 20 | pip install uvicorn 21 | ``` 22 | 23 | You can run it with `python main.py`: 24 | 25 | ```py title="main.py" 26 | import logfire 27 | from starlette.applications import Starlette 28 | from starlette.responses import PlainTextResponse 29 | from starlette.requests import Request 30 | from starlette.routing import Route 31 | 32 | logfire.configure() 33 | 34 | 35 | async def home(request: Request) -> PlainTextResponse: 36 | return PlainTextResponse("Hello, world!") 37 | 38 | 39 | app = Starlette(routes=[Route("/", home)]) 40 | logfire.instrument_starlette(app) 41 | 42 | if __name__ == "__main__": 43 | import uvicorn 44 | 45 | uvicorn.run(app) 46 | ``` 47 | 48 | The keyword arguments of `logfire.instrument_starlette()` are passed to the `StarletteInstrumentor.instrument_app()` method of the OpenTelemetry Starlette Instrumentation package, read more about it [here][opentelemetry-starlette]. 49 | 50 | !!! question "What about the OpenTelemetry ASGI middleware?" 51 | If you are a more experienced user, you might be wondering why we are not using 52 | the [OpenTelemetry ASGI middleware][opentelemetry-asgi]. The reason is that the 53 | `StarletteInstrumentor` actually wraps the ASGI middleware and adds some additional 54 | information related to the routes. 55 | 56 | [starlette]: https://www.starlette.io/ 57 | [opentelemetry-asgi]: https://opentelemetry-python-contrib.readthedocs.io/en/latest/instrumentation/asgi/asgi.html 58 | [opentelemetry-starlette]: https://opentelemetry-python-contrib.readthedocs.io/en/latest/instrumentation/starlette/starlette.html 59 | [uvicorn]: https://www.uvicorn.org/ 60 | -------------------------------------------------------------------------------- /docs/integrations/web-frameworks/wsgi.md: -------------------------------------------------------------------------------- 1 | --- 2 | integration: otel 3 | --- 4 | 5 | # WSGI 6 | 7 | If the [WSGI][wsgi] web framework you're using doesn't have a dedicated integration, you can use the 8 | [`logfire.instrument_wsgi()`][logfire.Logfire.instrument_wsgi] method to instrument it. 9 | 10 | ## Installation 11 | 12 | Install `logfire` with the `wsgi` extra: 13 | 14 | {{ install_logfire(extras=['wsgi']) }} 15 | 16 | ## Usage 17 | 18 | Below we have a minimal example using the standard library [`wsgiref`][wsgiref]. You can run it with `python main.py`: 19 | 20 | ```py title="main.py" 21 | from wsgiref.simple_server import make_server 22 | 23 | import logfire 24 | 25 | 26 | logfire.configure() 27 | 28 | def app(env, start_response): 29 | start_response('200 OK', [('Content-Type','text/html')]) 30 | return [b"Hello World"] 31 | 32 | app = logfire.instrument_wsgi(app) 33 | 34 | with make_server("", 8000, app) as httpd: 35 | print("Serving on port 8000...") 36 | 37 | # Serve until process is killed 38 | httpd.serve_forever() 39 | ``` 40 | 41 | The keyword arguments of [`logfire.instrument_wsgi()`][logfire.Logfire.instrument_wsgi] are passed to the 42 | [`OpenTelemetryMiddleware`][opentelemetry.instrumentation.wsgi.OpenTelemetryMiddleware] class of 43 | the OpenTelemetry WSGI Instrumentation package. 44 | 45 | 46 | [wsgi]: https://wsgi.readthedocs.io/en/latest/ 47 | [wsgiref]: https://docs.python.org/3/library/wsgiref.html 48 | -------------------------------------------------------------------------------- /docs/join-slack/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Redirecting to slack... 7 | 10 | 11 | 12 | 13 | You're being redirected to 14 | 15 | a slack invitation link 16 | . 17 | 18 | 19 | -------------------------------------------------------------------------------- /docs/languages.md: -------------------------------------------------------------------------------- 1 | Logfire is built on top of OpenTelemetry, which means that it supports all the languages that OpenTelemetry supports. 2 | 3 | In addition, we currently have custom SDKs for: 4 | 5 | - [Python](https://github.com/pydantic/logfire) 6 | - [JavaScript/TypeScript](https://github.com/pydantic/logfire-js) 7 | - [Rust](https://github.com/pydantic/logfire-rust) 8 | 9 | These SDKs offer a streamlined developer experience. 10 | 11 | You can check our [Alternative Clients](how-to-guides/alternative-clients.md) section to see how 12 | you can send data to Logfire from other languages. 13 | -------------------------------------------------------------------------------- /docs/logo-white.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /docs/overrides/partials/search.html: -------------------------------------------------------------------------------- 1 | 33 | -------------------------------------------------------------------------------- /docs/overrides/partials/source.html: -------------------------------------------------------------------------------- 1 |
2 | 8 |
9 | {% set icon = config.theme.icon.repo or "fontawesome/brands/git-alt" %} 10 | {% include ".icons/" ~ icon ~ ".svg" %} 11 |
12 |
13 | {{ config.repo_name }} 14 |
15 |
16 | 19 |
20 | -------------------------------------------------------------------------------- /docs/reference/api/exceptions.md: -------------------------------------------------------------------------------- 1 | ::: logfire.exceptions 2 | -------------------------------------------------------------------------------- /docs/reference/api/logfire.md: -------------------------------------------------------------------------------- 1 | ::: logfire 2 | options: 3 | show_root_toc_entry: false 4 | members: false 5 | 6 | 7 | ::: logfire.Logfire 8 | options: 9 | show_root_heading: true 10 | show_root_full_path: false 11 | exclude: 12 | filters: 13 | - "!instrument_redis" 14 | - "!instrument_pymongo" 15 | - "!instrument_psycopg" 16 | - "!^with_trace_sample_rate$" 17 | - "!^_[^_]" 18 | 19 | 20 | ::: logfire 21 | options: 22 | show_root_toc_entry: false 23 | show_docstring_description: true 24 | filters: ["!^Logfire$", "!^_[^_]"] 25 | -------------------------------------------------------------------------------- /docs/reference/api/propagate.md: -------------------------------------------------------------------------------- 1 | ::: logfire.propagate 2 | -------------------------------------------------------------------------------- /docs/reference/api/pydantic.md: -------------------------------------------------------------------------------- 1 | ::: logfire.integrations.pydantic 2 | -------------------------------------------------------------------------------- /docs/reference/api/sampling.md: -------------------------------------------------------------------------------- 1 | ::: logfire.sampling 2 | -------------------------------------------------------------------------------- /docs/reference/api/testing.md: -------------------------------------------------------------------------------- 1 | ::: logfire.testing 2 | -------------------------------------------------------------------------------- /docs/reference/configuration.md: -------------------------------------------------------------------------------- 1 | You can use the following ways to configure Logfire: 2 | 3 | 1. Programmatically via [`logfire.configure()`][logfire.configure] 4 | 2. Using [environment variables](#using-environment-variables) 5 | 3. Using a [configuration file](#using-a-configuration-file-pyprojecttoml) (`pyproject.toml`) 6 | 7 | The order of precedence is as above. 8 | 9 | ## Programmatically via `configure` 10 | 11 | For more details, please refer to our [API documentation][logfire.configure]. 12 | 13 | ## Using environment variables 14 | 15 | You can use the following environment variables to configure **Logfire**: 16 | 17 | {{ env_var_table }} 18 | 19 | When using environment variables, you still need to call [`logfire.configure()`][logfire.configure], 20 | but you can leave out the arguments. 21 | 22 | ## Using a configuration file (`pyproject.toml`) 23 | 24 | You can use the `pyproject.toml` to configure **Logfire**. 25 | 26 | Here's an example: 27 | 28 | ```toml 29 | [tool.logfire] 30 | project_name = "My Project" 31 | console_colors = "never" 32 | ``` 33 | 34 | The keys are the same as the parameters of [`logfire.configure()`][logfire.configure]. 35 | -------------------------------------------------------------------------------- /docs/reference/examples.md: -------------------------------------------------------------------------------- 1 | # Examples 2 | 3 | These are working, stand-alone apps and projects that you can clone, spin up locally and play around with to get a feel for the different capabilities of Logfire. 4 | 5 | **Got a suggestion?** 6 | 7 | If you want to see an example of a particular language or library, [get in touch](../help.md). 8 | 9 | ## Python 10 | 11 | ### Flask and SQLAlchemy example 12 | 13 | This example is a simple Python financial calculator app using Flask and SQLAlchemy which is instrumented using the appropriate integrations as well as [auto-tracing](../guides/onboarding-checklist/add-auto-tracing.md). If you spin up the server locally and interact with the calculator app, you'll be able to see traces come in automatically: 14 | 15 | ![Flask and SQLAlchemy example](../images/logfire-screenshot-examples-flask-sqlalchemy.png) 16 | 17 | [See it on GitHub :material-open-in-new:](https://github.com/pydantic/logfire/tree/main/examples/python/flask-sqlalchemy/){:target="_blank"} 18 | 19 | ## JavaScript 20 | 21 | Currently we only have a Python SDK, but the Logfire backend and UI support data sent by any OpenTelemetry client. See the [alternative clients guide](../how-to-guides/alternative-clients.md) for details on setting up OpenTelemetry in any language. We're working on a JavaScript SDK, but in the meantime here are some examples of using plain OpenTelemetry in JavaScript: 22 | 23 | ### Cloudflare worker example 24 | 25 | This example is based on the scaffolding created from `npm create cloudflare@latest`, and uses the [otel-cf-workers package](https://github.com/evanderkoogh/otel-cf-workers) to instrument a Cloudflare Worker and send traces and metrics to Logfire. 26 | 27 | [See it on GitHub :material-open-in-new:](https://github.com/pydantic/logfire/tree/main/examples/javascript/cloudflare-worker/){:target="_blank"} 28 | 29 | ### Express example 30 | 31 | This example demonstrates how to use OpenTelemetry to instrument an Express application and send traces and metrics to Logfire. 32 | 33 | [See it on GitHub :material-open-in-new:](https://github.com/pydantic/logfire/tree/main/examples/javascript/express/){:target="_blank"} 34 | -------------------------------------------------------------------------------- /docs/reference/organization-structure.md: -------------------------------------------------------------------------------- 1 | The following diagram shows the structure of an organization in **Logfire**: 2 | 3 | ```mermaid 4 | classDiagram 5 | Organization <-- OrganizationMember 6 | User <-- OrganizationMember 7 | User <-- ProjectMember 8 | Organization <-- Project 9 | Project <-- ProjectMember 10 | 11 | class Organization { 12 | UUID id 13 | string name 14 | } 15 | 16 | class User { 17 | UUID id 18 | string name 19 | } 20 | 21 | class OrganizationMember { 22 | UUID user_id 23 | UUID organization_id 24 | string role ['admin', 'member', 'guest'] 25 | } 26 | 27 | class Project { 28 | UUID id 29 | UUID organization_id 30 | string name 31 | } 32 | 33 | class ProjectMember { 34 | UUID user_id 35 | UUID project_id 36 | string role ['admin', 'member'] 37 | } 38 | ``` 39 | 40 | As a **user**, you can be a member of multiple **organizations**. On each **organization**, you can either be: 41 | 42 | - [X] An **admin**: who can manage the organization and its projects. 43 | - [X] A **member**: who can only view the organization and the projects that are shared with them. 44 | - [X] A **guest**: who can only view the projects that are shared with them. 45 | 46 | An **admin** can invite other users to join the organization. 47 | When a user accepts the invitation, they become a **member** of the organization. 48 | 49 | Each **organization** can have multiple **projects**. On each **project**, you can either be: 50 | 51 | - [X] An **admin**: who can manage the project. 52 | - [X] A **member**: who can only view the project. 53 | 54 | If a user is invited to join a project, they become a **member** of the project, but they are a **guest** in the organization. 55 | -------------------------------------------------------------------------------- /docs/release-notes.md: -------------------------------------------------------------------------------- 1 | --8<-- "CHANGELOG.md" 2 | -------------------------------------------------------------------------------- /docs/roadmap.md: -------------------------------------------------------------------------------- 1 | This [Github issue][roadmap] is the 2 | best place to check for updates to the Pydantic Logfire roadmap. 3 | 4 | If you have any questions, or a feature request, **please join our [Slack][slack]**. 5 | 6 | [slack]: https://logfire.pydantic.dev/docs/join-slack/ 7 | [roadmap]: https://github.com/pydantic/logfire/issues/1004 8 | -------------------------------------------------------------------------------- /examples/javascript/README.md: -------------------------------------------------------------------------------- 1 | Use the [Logfire JavaScript SDK](https://github.com/pydantic/logfire-js) to send telemetry from your JavaScript code. 2 | -------------------------------------------------------------------------------- /examples/python/flask-sqlalchemy/README.MD: -------------------------------------------------------------------------------- 1 | # Logfire + Flask + SQLAlchemy Example 2 | 3 | This example is a simple Python financial calculator app which is instrumented with Logfire. If you spin up the server locally and interact with the calculator app, you'll be able to see traces come in automatically. The Logfire instrumentation is done with just a few lines of code in `main.py`. 4 | 5 | ## Run the project 6 | 7 | Clone the repository, navigate to it, setup a virtual environment, and install the dependencies: 8 | 9 | ```bash 10 | git clone https://github.com/pydantic/logfire.git 11 | cd examples/python/flask-sqlalchemy 12 | python -m venv venv 13 | source venv/bin/activate 14 | pip install 'logfire[sqlalchemy,flask]' flask-sqlalchemy 15 | ``` 16 | 17 | If you don't have a Logfire account, create a free one [in Logfire](https://logfire.pydantic.dev/). Then authenticate Logfire locally: 18 | 19 | ```bash 20 | logfire auth 21 | ``` 22 | 23 | Run the `main.py` file: 24 | 25 | ```bash 26 | python main.py 27 | ``` 28 | 29 | Now you should see 30 | 31 | - Python app running locally at [http://127.0.0.1:5000/](http://127.0.0.1:5000/) 32 | - Logfire app running live at `https://logfire.pydantic.dev//` 33 | 34 | In order to see traces arrive in the Logfire app, interact with the local calculator app (click around, enter values into the fields, click calculate) 35 | -------------------------------------------------------------------------------- /examples/python/flask-sqlalchemy/app/__init__.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | 3 | from flask import Flask, jsonify, render_template, request 4 | from flask_sqlalchemy import SQLAlchemy 5 | 6 | db = SQLAlchemy() 7 | 8 | 9 | class Calculation(db.Model): 10 | """Store calculation information.""" 11 | 12 | id = db.Column(db.Integer, primary_key=True) 13 | type = db.Column(db.String(50), nullable=False) 14 | input_data = db.Column(db.String(200), nullable=False) 15 | result = db.Column(db.Float, nullable=False) 16 | timestamp = db.Column(db.DateTime, default=datetime.utcnow) 17 | 18 | 19 | app = Flask(__name__) 20 | 21 | 22 | @app.route('/') 23 | def index(): 24 | """Render the calculator view.""" 25 | return render_template('index.html') 26 | 27 | 28 | @app.route('/calculate', methods=['POST']) 29 | def calculate(): 30 | """Calculate the value.""" 31 | data = request.json 32 | calculation_type = data['type'] 33 | result = 0 34 | 35 | if calculation_type == 'compound_interest': 36 | principal = float(data['principal']) 37 | rate = float(data['rate']) 38 | time = float(data['time']) 39 | compounds_per_year = int(data['compounds_per_year']) 40 | result = principal * (1 + rate / compounds_per_year) ** (compounds_per_year * time) 41 | elif calculation_type == 'loan_payment': 42 | principal = float(data['principal']) 43 | rate = float(data['rate']) 44 | time = float(data['time']) 45 | monthly_rate = rate / 12 46 | num_payments = time * 12 47 | result = ( 48 | principal * (monthly_rate * (1 + monthly_rate) ** num_payments) / ((1 + monthly_rate) ** num_payments - 1) 49 | ) 50 | 51 | new_calculation = Calculation(type=calculation_type, input_data=str(data), result=result) 52 | db.session.add(new_calculation) 53 | db.session.commit() 54 | 55 | return jsonify({'result': result}) 56 | 57 | 58 | @app.route('/history') 59 | def history(): 60 | """Render the history view.""" 61 | calculations = Calculation.query.order_by(Calculation.timestamp.desc()).limit(10).all() 62 | return render_template('history.html', calculations=calculations) 63 | -------------------------------------------------------------------------------- /examples/python/flask-sqlalchemy/app/static/styles.css: -------------------------------------------------------------------------------- 1 | body { 2 | font-family: Arial, sans-serif; 3 | max-width: 800px; 4 | margin: 0 auto; 5 | padding: 20px; 6 | } 7 | 8 | h1 { 9 | text-align: center; 10 | } 11 | 12 | #calculator { 13 | display: flex; 14 | flex-direction: column; 15 | gap: 10px; 16 | margin-bottom: 20px; 17 | } 18 | 19 | input, select, button { 20 | padding: 5px; 21 | font-size: 16px; 22 | } 23 | 24 | #result { 25 | font-weight: bold; 26 | margin-top: 10px; 27 | } 28 | 29 | table { 30 | width: 100%; 31 | border-collapse: collapse; 32 | } 33 | 34 | th, td { 35 | border: 1px solid #ddd; 36 | padding: 8px; 37 | text-align: left; 38 | } 39 | 40 | th { 41 | background-color: #f2f2f2; 42 | } 43 | -------------------------------------------------------------------------------- /examples/python/flask-sqlalchemy/app/templates/history.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Calculation History 7 | 8 | 9 | 10 |

Calculation History

11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | {% for calc in calculations %} 22 | 23 | 24 | 25 | 26 | 27 | 28 | {% endfor %} 29 | 30 |
TypeInput DataResultTimestamp
{{ calc.type }}{{ calc.input_data }}${{ "%.2f"|format(calc.result) }}{{ calc.timestamp.strftime('%Y-%m-%d %H:%M:%S') }}
31 | Back to Calculator 32 | 33 | 34 | -------------------------------------------------------------------------------- /examples/python/flask-sqlalchemy/main.py: -------------------------------------------------------------------------------- 1 | import logfire 2 | 3 | logfire.install_auto_tracing(modules=['app'], min_duration=0) 4 | 5 | from app import app, db # noqa # needs to be imported after install_auto_tracing 6 | 7 | logfire.configure() 8 | logfire.instrument_flask(app) 9 | 10 | app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite://' # in-memory database 11 | 12 | db.init_app(app) 13 | 14 | with app.app_context(): 15 | logfire.instrument_sqlalchemy(engine=db.engine) 16 | db.create_all() 17 | 18 | app.run(debug=True) 19 | -------------------------------------------------------------------------------- /logfire-api/.gitignore: -------------------------------------------------------------------------------- 1 | # python generated files 2 | __pycache__/ 3 | *.py[oc] 4 | build/ 5 | dist/ 6 | wheels/ 7 | *.egg-info 8 | 9 | # venv 10 | .venv 11 | -------------------------------------------------------------------------------- /logfire-api/README.md: -------------------------------------------------------------------------------- 1 | # logfire-api 2 | 3 | Shim for the logfire SDK Python API which does nothing unless logfire is installed. 4 | 5 | This package is designed to be used by packages that want to provide opt-in integration with [Logfire](https://github.com/pydantic/logfire). 6 | 7 | The package provides a clone of the Python API exposed by the `logfire` package which does nothing if the `logfire` package is not installed, but makes real calls when it is. 8 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/__init__.pyi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/logfire-api/logfire_api/_internal/__init__.pyi -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/ast_utils.pyi: -------------------------------------------------------------------------------- 1 | import ast 2 | from .constants import ATTRIBUTES_MESSAGE_TEMPLATE_KEY as ATTRIBUTES_MESSAGE_TEMPLATE_KEY, ATTRIBUTES_SAMPLE_RATE_KEY as ATTRIBUTES_SAMPLE_RATE_KEY, ATTRIBUTES_TAGS_KEY as ATTRIBUTES_TAGS_KEY 3 | from .stack_info import StackInfo as StackInfo, get_filepath_attribute as get_filepath_attribute 4 | from .utils import uniquify_sequence as uniquify_sequence 5 | from dataclasses import dataclass 6 | from opentelemetry.util import types as otel_types 7 | 8 | @dataclass(frozen=True) 9 | class LogfireArgs: 10 | """Values passed to `logfire.instrument` and/or values stored in a logfire instance as basic configuration. 11 | 12 | These determine the arguments passed to the method calls added by the AST transformer. 13 | """ 14 | tags: tuple[str, ...] 15 | sample_rate: float | None 16 | msg_template: str | None = ... 17 | span_name: str | None = ... 18 | 19 | @dataclass 20 | class BaseTransformer(ast.NodeTransformer): 21 | """Helper for rewriting ASTs to wrap function bodies in `with {logfire_method_name}(...):`.""" 22 | logfire_args: LogfireArgs 23 | logfire_method_name: str 24 | filename: str 25 | module_name: str 26 | qualname_stack: list[str] = ... 27 | def __post_init__(self) -> None: ... 28 | def visit_ClassDef(self, node: ast.ClassDef): ... 29 | def visit_FunctionDef(self, node: ast.FunctionDef | ast.AsyncFunctionDef): ... 30 | def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef): ... 31 | def rewrite_function(self, node: ast.FunctionDef | ast.AsyncFunctionDef, qualname: str) -> ast.AST: ... 32 | def logfire_method_call_node(self, node: ast.FunctionDef | ast.AsyncFunctionDef, qualname: str) -> ast.Call: ... 33 | def logfire_method_arg_values(self, qualname: str, lineno: int) -> tuple[str, dict[str, otel_types.AttributeValue]]: ... 34 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/async_.pyi: -------------------------------------------------------------------------------- 1 | from .constants import ONE_SECOND_IN_NANOSECONDS as ONE_SECOND_IN_NANOSECONDS 2 | from .main import Logfire as Logfire 3 | from .stack_info import StackInfo as StackInfo, get_code_object_info as get_code_object_info, get_stack_info_from_frame as get_stack_info_from_frame 4 | from .utils import safe_repr as safe_repr 5 | from _typeshed import Incomplete 6 | from types import CoroutineType 7 | from typing import Any, ContextManager 8 | 9 | ASYNCIO_PATH: Incomplete 10 | 11 | def log_slow_callbacks(logfire: Logfire, slow_duration: float) -> ContextManager[None]: 12 | """Log a warning whenever a function running in the asyncio event loop blocks for too long. 13 | 14 | See Logfire.log_slow_async_callbacks. 15 | Inspired by https://gitlab.com/quantlane/libs/aiodebug. 16 | """ 17 | 18 | class _CallbackAttributes(StackInfo, total=False): 19 | name: str 20 | stack: list[StackInfo] 21 | 22 | def stack_info_from_coroutine(coro: CoroutineType[Any, Any, Any]) -> StackInfo: ... 23 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/auth.pyi: -------------------------------------------------------------------------------- 1 | import requests 2 | from .utils import UnexpectedResponse as UnexpectedResponse 3 | from _typeshed import Incomplete 4 | from logfire.exceptions import LogfireConfigError as LogfireConfigError 5 | from typing import TypedDict 6 | 7 | HOME_LOGFIRE: Incomplete 8 | DEFAULT_FILE: Incomplete 9 | 10 | class UserTokenData(TypedDict): 11 | """User token data.""" 12 | token: str 13 | expiration: str 14 | 15 | class DefaultFile(TypedDict): 16 | """Content of the default.toml file.""" 17 | tokens: dict[str, UserTokenData] 18 | 19 | class NewDeviceFlow(TypedDict): 20 | """Matches model of the same name in the backend.""" 21 | device_code: str 22 | frontend_auth_url: str 23 | 24 | def request_device_code(session: requests.Session, base_api_url: str) -> tuple[str, str]: 25 | """Request a device code from the Logfire API. 26 | 27 | Args: 28 | session: The `requests` session to use. 29 | base_api_url: The base URL of the Logfire instance. 30 | 31 | Returns: 32 | return data['device_code'], data['frontend_auth_url'] 33 | The device code and the frontend URL to authenticate the device at, as a `NewDeviceFlow` dict. 34 | """ 35 | def poll_for_token(session: requests.Session, device_code: str, base_api_url: str) -> UserTokenData: 36 | """Poll the Logfire API for the user token. 37 | 38 | This function will keep polling the API until it receives a user token, not that 39 | each request should take ~10 seconds as the API endpoint will block waiting for the user to 40 | complete authentication. 41 | 42 | Args: 43 | session: The `requests` session to use. 44 | device_code: The device code to poll for. 45 | base_api_url: The base URL of the Logfire instance. 46 | 47 | Returns: 48 | The user token. 49 | """ 50 | def is_logged_in(data: DefaultFile, logfire_url: str) -> bool: 51 | """Check if the user is logged in. 52 | 53 | Returns: 54 | True if the user is logged in, False otherwise. 55 | """ 56 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/auto_trace/__init__.pyi: -------------------------------------------------------------------------------- 1 | from ..constants import ONE_SECOND_IN_NANOSECONDS as ONE_SECOND_IN_NANOSECONDS 2 | from ..main import Logfire as Logfire 3 | from .import_hook import LogfireFinder as LogfireFinder 4 | from .types import AutoTraceModule as AutoTraceModule 5 | from typing import Callable, Literal, Sequence 6 | 7 | def install_auto_tracing(logfire: Logfire, modules: Sequence[str] | Callable[[AutoTraceModule], bool], *, min_duration: float, check_imported_modules: Literal['error', 'warn', 'ignore'] = 'error') -> None: 8 | """Install automatic tracing. 9 | 10 | See `Logfire.install_auto_tracing` for more information. 11 | """ 12 | def modules_func_from_sequence(modules: Sequence[str]) -> Callable[[AutoTraceModule], bool]: ... 13 | 14 | class AutoTraceModuleAlreadyImportedException(Exception): ... 15 | class AutoTraceModuleAlreadyImportedWarning(Warning): ... 16 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/auto_trace/import_hook.pyi: -------------------------------------------------------------------------------- 1 | from ..main import Logfire as Logfire 2 | from ..utils import log_internal_error as log_internal_error 3 | from .rewrite_ast import compile_source as compile_source 4 | from .types import AutoTraceModule as AutoTraceModule 5 | from dataclasses import dataclass 6 | from importlib.abc import Loader, MetaPathFinder 7 | from importlib.machinery import ModuleSpec 8 | from types import ModuleType 9 | from typing import Any, Callable, Sequence 10 | 11 | @dataclass 12 | class LogfireFinder(MetaPathFinder): 13 | """The import hook entry point, inserted into `sys.meta_path` to apply AST rewriting to matching modules.""" 14 | logfire: Logfire 15 | modules_filter: Callable[[AutoTraceModule], bool] 16 | min_duration: int 17 | def find_spec(self, fullname: str, path: Sequence[str] | None, target: ModuleType | None = None) -> ModuleSpec | None: 18 | """This is the method that is called by the import system. 19 | 20 | It uses the other existing meta path finders to do most of the standard work, 21 | particularly finding the module's source code and filename. 22 | If it finds a module spec that matches the filter, it returns a new spec that uses the LogfireLoader. 23 | """ 24 | 25 | @dataclass 26 | class LogfireLoader(Loader): 27 | """An import loader produced by LogfireFinder which executes a modified AST of the module's source code.""" 28 | plain_spec: ModuleSpec 29 | execute: Callable[[dict[str, Any]], None] 30 | def exec_module(self, module: ModuleType): 31 | """Execute a modified AST of the module's source code in the module's namespace. 32 | 33 | This is called by the import system. 34 | """ 35 | def create_module(self, spec: ModuleSpec): ... 36 | def get_code(self, _name: str): ... 37 | def __getattr__(self, item: str): 38 | """Forward some methods to the plain spec's loader (likely a `SourceFileLoader`) if they exist.""" 39 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/auto_trace/types.pyi: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Sequence 3 | 4 | @dataclass 5 | class AutoTraceModule: 6 | """Information about a module being imported that should maybe be traced automatically. 7 | 8 | This object will be passed to a function that should return True if the module should be traced. 9 | In particular it'll be passed to a function that's passed to `install_auto_tracing` as the `modules` argument. 10 | """ 11 | name: str 12 | filename: str | None 13 | def parts_start_with(self, prefix: str | Sequence[str]) -> bool: 14 | """Return True if the module name starts with any of the given prefixes, using dots as boundaries. 15 | 16 | For example, if the module name is `foo.bar.spam`, then `parts_start_with('foo')` will return True, 17 | but `parts_start_with('bar')` or `parts_start_with('foo_bar')` will return False. 18 | In other words, this will match the module itself or any submodules. 19 | 20 | If a prefix contains any characters other than letters, numbers, and dots, 21 | then it will be treated as a regular expression. 22 | """ 23 | 24 | def get_module_pattern(module: str): ... 25 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/cli.pyi: -------------------------------------------------------------------------------- 1 | import argparse 2 | from typing import Any, Sequence 3 | 4 | __all__ = ['main', 'logfire_info'] 5 | 6 | def logfire_info() -> str: 7 | """Show versions of logfire, OS and related packages.""" 8 | 9 | class SplitArgs(argparse.Action): 10 | def __call__(self, parser: argparse.ArgumentParser, namespace: argparse.Namespace, values: str | Sequence[Any] | None, option_string: str | None = None): ... 11 | 12 | def main(args: list[str] | None = None) -> None: 13 | """Run the CLI.""" 14 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/collect_system_info.pyi: -------------------------------------------------------------------------------- 1 | def collect_package_info() -> dict[str, str]: 2 | """Retrieve the package information for all installed packages. 3 | 4 | Returns: 5 | A dicts with the package name and version. 6 | """ 7 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/constants.pyi: -------------------------------------------------------------------------------- 1 | from _typeshed import Incomplete 2 | from opentelemetry.util import types as otel_types 3 | 4 | LOGFIRE_ATTRIBUTES_NAMESPACE: str 5 | LevelName: Incomplete 6 | LEVEL_NUMBERS: dict[LevelName, int] 7 | NUMBER_TO_LEVEL: dict[int, LevelName] 8 | LOGGING_TO_OTEL_LEVEL_NUMBERS: dict[int, int] 9 | ATTRIBUTES_LOG_LEVEL_NAME_KEY: Incomplete 10 | ATTRIBUTES_LOG_LEVEL_NUM_KEY: Incomplete 11 | 12 | def log_level_attributes(level: LevelName | int) -> dict[str, otel_types.AttributeValue]: ... 13 | 14 | SpanTypeType: Incomplete 15 | ATTRIBUTES_SPAN_TYPE_KEY: Incomplete 16 | ATTRIBUTES_PENDING_SPAN_REAL_PARENT_KEY: Incomplete 17 | ATTRIBUTES_TAGS_KEY: Incomplete 18 | ATTRIBUTES_MESSAGE_TEMPLATE_KEY: Incomplete 19 | ATTRIBUTES_MESSAGE_KEY: Incomplete 20 | DISABLE_CONSOLE_KEY: Incomplete 21 | ATTRIBUTES_JSON_SCHEMA_KEY: Incomplete 22 | ATTRIBUTES_LOGGING_ARGS_KEY: Incomplete 23 | ATTRIBUTES_LOGGING_NAME: Incomplete 24 | ATTRIBUTES_VALIDATION_ERROR_KEY: str 25 | ATTRIBUTES_SCRUBBED_KEY: Incomplete 26 | RESOURCE_ATTRIBUTES_PACKAGE_VERSIONS: str 27 | RESOURCE_ATTRIBUTES_DEPLOYMENT_ENVIRONMENT_NAME: str 28 | RESOURCE_ATTRIBUTES_VCS_REPOSITORY_REF_REVISION: str 29 | RESOURCE_ATTRIBUTES_VCS_REPOSITORY_URL: str 30 | RESOURCE_ATTRIBUTES_CODE_ROOT_PATH: str 31 | RESOURCE_ATTRIBUTES_CODE_WORK_DIR: str 32 | OTLP_MAX_INT_SIZE: Incomplete 33 | ATTRIBUTES_SAMPLE_RATE_KEY: str 34 | CONTEXT_ATTRIBUTES_KEY: Incomplete 35 | CONTEXT_SAMPLE_RATE_KEY: Incomplete 36 | MESSAGE_FORMATTED_VALUE_LENGTH_LIMIT: int 37 | ONE_SECOND_IN_NANOSECONDS: int 38 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/db_statement_summary.pyi: -------------------------------------------------------------------------------- 1 | from _typeshed import Incomplete 2 | from typing import Any, Mapping 3 | 4 | MAX_QUERY_MESSAGE_LENGTH: int 5 | 6 | def message_from_db_statement(attributes: Mapping[str, Any], message: str | None, span_name: str) -> str | None: 7 | """Try to construct a useful span message from OTel db statement. 8 | 9 | Returns: A new string to use as span message or None to keep the original message. 10 | """ 11 | 12 | TABLE_RE: str 13 | SELECT_RE: Incomplete 14 | SELECT_CTE_RE: Incomplete 15 | SELECT_SUBQUERY_RE: Incomplete 16 | INSERT_RE: Incomplete 17 | 18 | def summarize_query(db_statement: str) -> str | None: 19 | """Summarize a database statement, specifically SQL queries. 20 | 21 | Args: 22 | db_statement: The database statement to summarize. 23 | 24 | Returns: A new string to use as span message or None to keep the original message. 25 | 26 | """ 27 | def select(expr: str, table: str, *, match_end: int, db_statement: str, ctes: str | None = None, sub_query: str | None = None) -> str: ... 28 | def truncate(s: str, length: int) -> str: ... 29 | 30 | FALLBACK_HALF: Incomplete 31 | 32 | def fallback(db_statement: str): ... 33 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/exporters/__init__.pyi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/logfire-api/logfire_api/_internal/exporters/__init__.pyi -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/exporters/dynamic_batch.pyi: -------------------------------------------------------------------------------- 1 | from _typeshed import Incomplete 2 | from logfire._internal.exporters.wrapper import WrapperSpanProcessor as WrapperSpanProcessor 3 | from opentelemetry.sdk.trace import ReadableSpan 4 | from opentelemetry.sdk.trace.export import SpanExporter 5 | 6 | class DynamicBatchSpanProcessor(WrapperSpanProcessor): 7 | """A wrapper around a BatchSpanProcessor that dynamically adjusts the schedule delay. 8 | 9 | The initial schedule delay is set to 100ms, and after processing 10 spans, it is set to the value of 10 | the `OTEL_BSP_SCHEDULE_DELAY` environment variable (default: 500ms). 11 | This makes the initial experience of the SDK more responsive. 12 | """ 13 | final_delay: Incomplete 14 | num_processed: int 15 | def __init__(self, exporter: SpanExporter) -> None: ... 16 | def on_end(self, span: ReadableSpan) -> None: ... 17 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/exporters/logs.pyi: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from logfire._internal.exporters.wrapper import WrapperLogProcessor as WrapperLogProcessor 3 | from logfire._internal.scrubbing import BaseScrubber as BaseScrubber 4 | from logfire._internal.utils import is_instrumentation_suppressed as is_instrumentation_suppressed 5 | from opentelemetry.sdk._logs import LogData 6 | 7 | class CheckSuppressInstrumentationLogProcessorWrapper(WrapperLogProcessor): 8 | """Checks if instrumentation is suppressed, then suppresses instrumentation itself. 9 | 10 | Placed at the root of the tree of processors. 11 | """ 12 | def emit(self, log_data: LogData): ... 13 | 14 | @dataclass 15 | class MainLogProcessorWrapper(WrapperLogProcessor): 16 | scrubber: BaseScrubber 17 | def emit(self, log_data: LogData): ... 18 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/exporters/processor_wrapper.pyi: -------------------------------------------------------------------------------- 1 | from ..constants import ATTRIBUTES_LOG_LEVEL_NUM_KEY as ATTRIBUTES_LOG_LEVEL_NUM_KEY, ATTRIBUTES_MESSAGE_KEY as ATTRIBUTES_MESSAGE_KEY, ATTRIBUTES_MESSAGE_TEMPLATE_KEY as ATTRIBUTES_MESSAGE_TEMPLATE_KEY, LEVEL_NUMBERS as LEVEL_NUMBERS, log_level_attributes as log_level_attributes 2 | from ..db_statement_summary import message_from_db_statement as message_from_db_statement 3 | from ..scrubbing import BaseScrubber as BaseScrubber 4 | from ..utils import ReadableSpanDict as ReadableSpanDict, is_asgi_send_receive_span_name as is_asgi_send_receive_span_name, is_instrumentation_suppressed as is_instrumentation_suppressed, span_to_dict as span_to_dict, truncate_string as truncate_string 5 | from .wrapper import WrapperSpanProcessor as WrapperSpanProcessor 6 | from dataclasses import dataclass 7 | from opentelemetry import context 8 | from opentelemetry.sdk.trace import ReadableSpan, Span 9 | 10 | class CheckSuppressInstrumentationProcessorWrapper(WrapperSpanProcessor): 11 | """Checks if instrumentation is suppressed, then suppresses instrumentation itself. 12 | 13 | Placed at the root of the tree of processors. 14 | """ 15 | def on_start(self, span: Span, parent_context: context.Context | None = None) -> None: ... 16 | def on_end(self, span: ReadableSpan) -> None: ... 17 | 18 | @dataclass 19 | class MainSpanProcessorWrapper(WrapperSpanProcessor): 20 | """Wrapper around other processors to intercept starting and ending spans with our own global logic. 21 | 22 | Suppresses starting/ending if the current context has a `suppress_instrumentation` value. 23 | Tweaks the send/receive span names generated by the ASGI middleware. 24 | """ 25 | scrubber: BaseScrubber 26 | def on_start(self, span: Span, parent_context: context.Context | None = None) -> None: ... 27 | def on_end(self, span: ReadableSpan) -> None: ... 28 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/exporters/quiet_metrics.pyi: -------------------------------------------------------------------------------- 1 | from .wrapper import WrapperMetricExporter as WrapperMetricExporter 2 | from opentelemetry.sdk.metrics.export import MetricExportResult, MetricsData 3 | from typing import Any 4 | 5 | class QuietMetricExporter(WrapperMetricExporter): 6 | """A MetricExporter that catches request exceptions to prevent OTEL from logging a huge traceback.""" 7 | def export(self, metrics_data: MetricsData, timeout_millis: float = 10000, **kwargs: Any) -> MetricExportResult: ... 8 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/exporters/remove_pending.pyi: -------------------------------------------------------------------------------- 1 | from ..constants import ATTRIBUTES_SPAN_TYPE_KEY as ATTRIBUTES_SPAN_TYPE_KEY 2 | from .wrapper import WrapperSpanExporter as WrapperSpanExporter 3 | from opentelemetry.sdk.trace import ReadableSpan as ReadableSpan 4 | from opentelemetry.sdk.trace.export import SpanExportResult 5 | from typing import Sequence 6 | 7 | class RemovePendingSpansExporter(WrapperSpanExporter): 8 | """An exporter that filters out pending spans if the corresponding final span is already in the same batch.""" 9 | def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult: ... 10 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/exporters/tail_sampling.pyi: -------------------------------------------------------------------------------- 1 | from _typeshed import Incomplete 2 | from dataclasses import dataclass 3 | from functools import cached_property 4 | from logfire._internal.constants import ATTRIBUTES_LOG_LEVEL_NUM_KEY as ATTRIBUTES_LOG_LEVEL_NUM_KEY, LEVEL_NUMBERS as LEVEL_NUMBERS, LevelName as LevelName, ONE_SECOND_IN_NANOSECONDS as ONE_SECOND_IN_NANOSECONDS 5 | from logfire._internal.exporters.wrapper import WrapperSpanProcessor as WrapperSpanProcessor 6 | from opentelemetry import context 7 | from opentelemetry.sdk.trace import ReadableSpan, Span, SpanProcessor 8 | 9 | @dataclass 10 | class TailSamplingOptions: 11 | level: LevelName | None = ... 12 | duration: float | None = ... 13 | 14 | @dataclass 15 | class TraceBuffer: 16 | """Arguments of `on_start` and `on_end` for spans in a single trace.""" 17 | started: list[tuple[Span, context.Context | None]] 18 | ended: list[ReadableSpan] 19 | @cached_property 20 | def first_span(self) -> Span: ... 21 | 22 | class TailSamplingProcessor(WrapperSpanProcessor): 23 | """Passes spans to the wrapped processor if any span in a trace meets the sampling criteria.""" 24 | duration: Incomplete 25 | level: Incomplete 26 | random_rate: Incomplete 27 | traces: Incomplete 28 | lock: Incomplete 29 | def __init__(self, processor: SpanProcessor, options: TailSamplingOptions, random_rate: float) -> None: ... 30 | def on_start(self, span: Span, parent_context: context.Context | None = None) -> None: ... 31 | def on_end(self, span: ReadableSpan) -> None: ... 32 | def check_span(self, span: ReadableSpan, buffer: TraceBuffer) -> bool: 33 | """If the span meets the sampling criteria, drop the buffer and return True. Otherwise, return False.""" 34 | def drop_buffer(self, buffer: TraceBuffer) -> None: ... 35 | def push_buffer(self, buffer: TraceBuffer) -> None: ... 36 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/instrument.pyi: -------------------------------------------------------------------------------- 1 | from .constants import ATTRIBUTES_MESSAGE_TEMPLATE_KEY as ATTRIBUTES_MESSAGE_TEMPLATE_KEY, ATTRIBUTES_TAGS_KEY as ATTRIBUTES_TAGS_KEY 2 | from .main import Logfire as Logfire 3 | from .stack_info import get_filepath_attribute as get_filepath_attribute 4 | from .utils import safe_repr as safe_repr, uniquify_sequence as uniquify_sequence 5 | from _typeshed import Incomplete 6 | from collections.abc import Sequence 7 | from opentelemetry.util import types as otel_types 8 | from typing import Any, Callable, ContextManager, Iterable, TypeVar 9 | from typing_extensions import LiteralString, ParamSpec 10 | 11 | P = ParamSpec('P') 12 | R = TypeVar('R') 13 | CONTEXTMANAGER_HELPER_CODE: Incomplete 14 | ASYNCCONTEXTMANAGER_HELPER_CODE: Incomplete 15 | GENERATOR_WARNING_MESSAGE: str 16 | 17 | def instrument(logfire: Logfire, tags: Sequence[str], msg_template: LiteralString | None, span_name: str | None, extract_args: bool | Iterable[str], record_return: bool, allow_generator: bool) -> Callable[[Callable[P, R]], Callable[P, R]]: ... 18 | def get_open_span(logfire: Logfire, attributes: dict[str, otel_types.AttributeValue], span_name: str | None, extract_args: bool | Iterable[str], func: Callable[P, R]) -> Callable[P, ContextManager[Any]]: ... 19 | def get_attributes(func: Any, msg_template: str | None, tags: Sequence[str] | None) -> dict[str, otel_types.AttributeValue]: ... 20 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/integrations/__init__.pyi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/logfire-api/logfire_api/_internal/integrations/__init__.pyi -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/integrations/aiohttp_client.pyi: -------------------------------------------------------------------------------- 1 | from logfire import Logfire as Logfire 2 | from typing import Any 3 | 4 | def instrument_aiohttp_client(logfire_instance: Logfire, **kwargs: Any): 5 | """Instrument the `aiohttp` module so that spans are automatically created for each client request. 6 | 7 | See the `Logfire.instrument_aiohttp_client` method for details. 8 | """ 9 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/integrations/asgi.pyi: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from logfire import Logfire as Logfire 3 | from logfire._internal.utils import is_asgi_send_receive_span_name as is_asgi_send_receive_span_name, maybe_capture_server_headers as maybe_capture_server_headers 4 | from opentelemetry.context import Context 5 | from opentelemetry.trace import Span, Tracer, TracerProvider 6 | from typing import Any, Awaitable, Callable, Protocol, TypedDict 7 | from typing_extensions import Unpack 8 | 9 | Scope = dict[str, Any] 10 | Receive = Callable[[], Awaitable[dict[str, Any]]] 11 | Send = Callable[[dict[str, Any]], Awaitable[None]] 12 | 13 | class ASGIApp(Protocol): 14 | def __call__(self, scope: Scope, receive: Receive, send: Send) -> Awaitable[None]: ... 15 | Hook = Callable[[Span, dict[str, Any]], None] 16 | 17 | class ASGIInstrumentKwargs(TypedDict, total=False): 18 | excluded_urls: str | None 19 | default_span_details: Callable[[Scope], tuple[str, dict[str, Any]]] 20 | server_request_hook: Hook | None 21 | client_request_hook: Hook | None 22 | client_response_hook: Hook | None 23 | http_capture_headers_server_request: list[str] | None 24 | http_capture_headers_server_response: list[str] | None 25 | http_capture_headers_sanitize_fields: list[str] | None 26 | 27 | def tweak_asgi_spans_tracer_provider(logfire_instance: Logfire, record_send_receive: bool) -> TracerProvider: 28 | """If record_send_receive is False, return a TracerProvider that skips spans for ASGI send and receive events.""" 29 | 30 | @dataclass 31 | class TweakAsgiTracerProvider(TracerProvider): 32 | tracer_provider: TracerProvider 33 | def get_tracer(self, *args: Any, **kwargs: Any) -> Tracer: ... 34 | 35 | @dataclass 36 | class TweakAsgiSpansTracer(Tracer): 37 | tracer: Tracer 38 | def start_span(self, name: str, context: Context | None = None, *args: Any, **kwargs: Any) -> Span: ... 39 | start_as_current_span = ... 40 | 41 | def instrument_asgi(logfire_instance: Logfire, app: ASGIApp, *, record_send_receive: bool = False, capture_headers: bool = False, **kwargs: Unpack[ASGIInstrumentKwargs]) -> ASGIApp: 42 | """Instrument `app` so that spans are automatically created for each request. 43 | 44 | See the `Logfire.instrument_asgi` method for details. 45 | """ 46 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/integrations/asyncpg.pyi: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | def instrument_asyncpg(**kwargs: Any) -> None: 4 | """Instrument the `asyncpg` module so that spans are automatically created for each query. 5 | 6 | See the `Logfire.instrument_asyncpg` method for details. 7 | """ 8 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/integrations/aws_lambda.pyi: -------------------------------------------------------------------------------- 1 | from opentelemetry.context import Context as Context 2 | from opentelemetry.metrics import MeterProvider 3 | from opentelemetry.trace import TracerProvider 4 | from typing import Any, Callable 5 | 6 | LambdaEvent = Any 7 | LambdaHandler = Callable[[LambdaEvent, Any], Any] 8 | 9 | def instrument_aws_lambda(lambda_handler: LambdaHandler, *, tracer_provider: TracerProvider, meter_provider: MeterProvider, event_context_extractor: Callable[[LambdaEvent], Context] | None = None, **kwargs: Any) -> None: 10 | """Instrument the AWS Lambda runtime so that spans are automatically created for each invocation. 11 | 12 | See the `Logfire.instrument_aws_lambda` method for details. 13 | """ 14 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/integrations/celery.pyi: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | def instrument_celery(**kwargs: Any) -> None: 4 | """Instrument the `celery` module so that spans are automatically created for each task. 5 | 6 | See the `Logfire.instrument_celery` method for details. 7 | """ 8 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/integrations/django.pyi: -------------------------------------------------------------------------------- 1 | from django.http import HttpRequest as HttpRequest, HttpResponse as HttpResponse 2 | from logfire._internal.utils import maybe_capture_server_headers as maybe_capture_server_headers 3 | from opentelemetry.trace import Span as Span 4 | from typing import Any, Callable 5 | 6 | def instrument_django(*, capture_headers: bool, is_sql_commentor_enabled: bool | None, excluded_urls: str | None, request_hook: Callable[[Span, HttpRequest], None] | None, response_hook: Callable[[Span, HttpRequest, HttpResponse], None] | None, **kwargs: Any) -> None: 7 | """Instrument the `django` module so that spans are automatically created for each web request. 8 | 9 | See the `Logfire.instrument_django` method for details. 10 | """ 11 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/integrations/executors.pyi: -------------------------------------------------------------------------------- 1 | from _typeshed import Incomplete 2 | from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor 3 | from logfire.propagate import ContextCarrier as ContextCarrier, attach_context as attach_context, get_context as get_context 4 | from typing import Any, Callable 5 | 6 | submit_t_orig: Incomplete 7 | submit_p_orig: Incomplete 8 | 9 | def instrument_executors() -> None: 10 | """Monkey-patch `submit()` methods of `ThreadPoolExecutor` and `ProcessPoolExecutor` 11 | to carry over OTEL context across threads and processes. 12 | """ 13 | def submit_t(s: ThreadPoolExecutor, fn: Callable[..., Any], /, *args: Any, **kwargs: Any): 14 | """A wrapper around ThreadPoolExecutor.submit() that carries over OTEL context across threads.""" 15 | def submit_p(s: ProcessPoolExecutor, fn: Callable[..., Any], /, *args: Any, **kwargs: Any): 16 | """A wrapper around ProcessPoolExecutor.submit() that carries over OTEL context across processes.""" 17 | def serialize_config() -> dict[str, Any]: ... 18 | def deserialize_config(config: dict[str, Any]) -> None: ... 19 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/integrations/fastapi.pyi: -------------------------------------------------------------------------------- 1 | from ..main import Logfire as Logfire, set_user_attributes_on_raw_span as set_user_attributes_on_raw_span 2 | from ..stack_info import StackInfo as StackInfo, get_code_object_info as get_code_object_info 3 | from ..utils import handle_internal_errors as handle_internal_errors, maybe_capture_server_headers as maybe_capture_server_headers 4 | from .asgi import tweak_asgi_spans_tracer_provider as tweak_asgi_spans_tracer_provider 5 | from _typeshed import Incomplete 6 | from fastapi import FastAPI 7 | from starlette.requests import Request 8 | from starlette.websockets import WebSocket 9 | from typing import Any, Awaitable, Callable, ContextManager, Iterable 10 | 11 | def find_mounted_apps(app: FastAPI) -> list[FastAPI]: 12 | """Fetch all sub-apps mounted to a FastAPI app, including nested sub-apps.""" 13 | def instrument_fastapi(logfire_instance: Logfire, app: FastAPI, *, capture_headers: bool = False, request_attributes_mapper: Callable[[Request | WebSocket, dict[str, Any]], dict[str, Any] | None] | None = None, excluded_urls: str | Iterable[str] | None = None, record_send_receive: bool = False, **opentelemetry_kwargs: Any) -> ContextManager[None]: 14 | """Instrument a FastAPI app so that spans and logs are automatically created for each request. 15 | 16 | See `Logfire.instrument_fastapi` for more details. 17 | """ 18 | def patch_fastapi(): 19 | """Globally monkeypatch fastapi functions and return a dictionary for recording instrumentation config per app.""" 20 | 21 | class FastAPIInstrumentation: 22 | logfire_instance: Incomplete 23 | request_attributes_mapper: Incomplete 24 | def __init__(self, logfire_instance: Logfire, request_attributes_mapper: Callable[[Request | WebSocket, dict[str, Any]], dict[str, Any] | None]) -> None: ... 25 | async def solve_dependencies(self, request: Request | WebSocket, original: Awaitable[Any]) -> Any: ... 26 | async def run_endpoint_function(self, original_run_endpoint_function: Any, request: Request, dependant: Any, values: dict[str, Any], **kwargs: Any) -> Any: ... 27 | 28 | class _InstrumentedValues(dict): 29 | request: Request 30 | 31 | LOGFIRE_SPAN_SCOPE_KEY: str 32 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/integrations/flask.pyi: -------------------------------------------------------------------------------- 1 | from flask.app import Flask 2 | from logfire._internal.stack_info import warn_at_user_stacklevel as warn_at_user_stacklevel 3 | from logfire._internal.utils import maybe_capture_server_headers as maybe_capture_server_headers 4 | from logfire.integrations.flask import CommenterOptions as CommenterOptions, RequestHook as RequestHook, ResponseHook as ResponseHook 5 | from typing import Any 6 | 7 | def instrument_flask(app: Flask, *, capture_headers: bool, enable_commenter: bool, commenter_options: CommenterOptions | None, excluded_urls: str | None = None, request_hook: RequestHook | None = None, response_hook: ResponseHook | None = None, **kwargs: Any): 8 | """Instrument `app` so that spans are automatically created for each request. 9 | 10 | See the `Logfire.instrument_flask` method for details. 11 | """ 12 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/integrations/mcp.pyi: -------------------------------------------------------------------------------- 1 | from logfire import LevelName as LevelName, Logfire as Logfire 2 | from logfire._internal.utils import handle_internal_errors as handle_internal_errors 3 | 4 | def instrument_mcp(logfire_instance: Logfire): ... 5 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/integrations/mysql.pyi: -------------------------------------------------------------------------------- 1 | from mysql.connector.abstracts import MySQLConnectionAbstract as MySQLConnectionAbstract 2 | from mysql.connector.pooling import PooledMySQLConnection as PooledMySQLConnection 3 | from opentelemetry.trace import TracerProvider 4 | from typing import Any, TypeVar 5 | 6 | MySQLConnection = TypeVar('MySQLConnection', 'PooledMySQLConnection | MySQLConnectionAbstract', None) 7 | 8 | def instrument_mysql(*, conn: MySQLConnection = None, tracer_provider: TracerProvider, **kwargs: Any) -> MySQLConnection: 9 | """Instrument the `mysql` module or a specific MySQL connection so that spans are automatically created for each operation. 10 | 11 | See the `Logfire.instrument_mysql` method for details. 12 | """ 13 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/integrations/psycopg.pyi: -------------------------------------------------------------------------------- 1 | from logfire import Logfire as Logfire 2 | from opentelemetry.instrumentation.psycopg import PsycopgInstrumentor 3 | from opentelemetry.instrumentation.psycopg2 import Psycopg2Instrumentor 4 | from psycopg import AsyncConnection, Connection 5 | from psycopg2._psycopg import connection as Psycopg2Connection 6 | from types import ModuleType 7 | from typing import Any, Literal 8 | from typing_extensions import TypeVar 9 | 10 | PsycopgConnection = TypeVar('PsycopgConnection', Connection[Any], AsyncConnection[Any], Psycopg2Connection) 11 | Instrumentor = PsycopgInstrumentor | Psycopg2Instrumentor 12 | PACKAGE_NAMES: tuple[Literal['psycopg'], Literal['psycopg2']] 13 | 14 | def instrument_psycopg(logfire_instance: Logfire, conn_or_module: ModuleType | Literal['psycopg', 'psycopg2'] | None | PsycopgConnection | Psycopg2Connection, **kwargs: Any) -> None: 15 | """Instrument a `psycopg` connection or module so that spans are automatically created for each query. 16 | 17 | See the `Logfire.instrument_psycopg` method for details. 18 | """ 19 | def check_version(name: str, version: str, instrumentor: Instrumentor) -> bool: ... 20 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/integrations/pydantic_ai.pyi: -------------------------------------------------------------------------------- 1 | from logfire import Logfire as Logfire 2 | from pydantic_ai import Agent 3 | from pydantic_ai.models import Model 4 | from pydantic_ai.models.instrumented import InstrumentedModel 5 | from typing import Any, Literal 6 | 7 | def instrument_pydantic_ai(logfire_instance: Logfire, obj: Agent | Model | None, event_mode: Literal['attributes', 'logs'] | None, **kwargs: Any) -> None | InstrumentedModel: ... 8 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/integrations/pymongo.pyi: -------------------------------------------------------------------------------- 1 | from opentelemetry.sdk.trace import Span as Span 2 | from pymongo.monitoring import CommandFailedEvent as CommandFailedEvent, CommandStartedEvent as CommandStartedEvent, CommandSucceededEvent as CommandSucceededEvent 3 | from typing import Any, Callable 4 | 5 | def instrument_pymongo(*, capture_statement: bool, request_hook: Callable[[Span, CommandStartedEvent], None] | None = None, response_hook: Callable[[Span, CommandSucceededEvent], None] | None = None, failed_hook: Callable[[Span, CommandFailedEvent], None] | None = None, **kwargs: Any) -> None: 6 | """Instrument the `pymongo` module so that spans are automatically created for each operation. 7 | 8 | See the `Logfire.instrument_pymongo` method for details. 9 | """ 10 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/integrations/redis.pyi: -------------------------------------------------------------------------------- 1 | from logfire._internal.constants import ATTRIBUTES_MESSAGE_KEY as ATTRIBUTES_MESSAGE_KEY 2 | from logfire._internal.utils import truncate_string as truncate_string 3 | from logfire.integrations.redis import RequestHook as RequestHook, ResponseHook as ResponseHook 4 | from typing import Any 5 | 6 | def instrument_redis(*, capture_statement: bool, request_hook: RequestHook | None, response_hook: ResponseHook | None, **kwargs: Any) -> None: 7 | """Instrument the `redis` module so that spans are automatically created for each operation. 8 | 9 | See the `Logfire.instrument_redis` method for details. 10 | """ 11 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/integrations/requests.pyi: -------------------------------------------------------------------------------- 1 | import requests 2 | from opentelemetry.sdk.trace import Span as Span 3 | from typing import Any, Callable 4 | 5 | def instrument_requests(excluded_urls: str | None = None, request_hook: Callable[[Span, requests.PreparedRequest], None] | None = None, response_hook: Callable[[Span, requests.PreparedRequest, requests.Response], None] | None = None, **kwargs: Any) -> None: 6 | """Instrument the `requests` module so that spans are automatically created for each request. 7 | 8 | See the `Logfire.instrument_requests` method for details. 9 | """ 10 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/integrations/sqlalchemy.pyi: -------------------------------------------------------------------------------- 1 | from logfire.integrations.sqlalchemy import CommenterOptions as CommenterOptions 2 | from sqlalchemy import Engine 3 | from sqlalchemy.ext.asyncio import AsyncEngine 4 | from typing import Any 5 | 6 | def instrument_sqlalchemy(engine: AsyncEngine | Engine | None, enable_commenter: bool, commenter_options: CommenterOptions, **kwargs: Any) -> None: 7 | """Instrument the `sqlalchemy` module so that spans are automatically created for each query. 8 | 9 | See the `Logfire.instrument_sqlalchemy` method for details. 10 | """ 11 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/integrations/sqlite3.pyi: -------------------------------------------------------------------------------- 1 | import sqlite3 2 | from opentelemetry.trace import TracerProvider 3 | from typing import Any, TypeVar 4 | 5 | SQLite3Connection = TypeVar('SQLite3Connection', sqlite3.Connection, None) 6 | 7 | def instrument_sqlite3(*, conn: SQLite3Connection, tracer_provider: TracerProvider, **kwargs: Any) -> SQLite3Connection: 8 | """Instrument the `sqlite3` module so that spans are automatically created for each query. 9 | 10 | See the `Logfire.instrument_sqlite3` method for details. 11 | """ 12 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/integrations/starlette.pyi: -------------------------------------------------------------------------------- 1 | from logfire import Logfire as Logfire 2 | from logfire._internal.integrations.asgi import tweak_asgi_spans_tracer_provider as tweak_asgi_spans_tracer_provider 3 | from logfire._internal.utils import maybe_capture_server_headers as maybe_capture_server_headers 4 | from opentelemetry.instrumentation.asgi.types import ClientRequestHook, ClientResponseHook, ServerRequestHook 5 | from starlette.applications import Starlette 6 | from typing import Any 7 | 8 | def instrument_starlette(logfire_instance: Logfire, app: Starlette, *, record_send_receive: bool = False, capture_headers: bool = False, server_request_hook: ServerRequestHook | None = None, client_request_hook: ClientRequestHook | None = None, client_response_hook: ClientResponseHook | None = None, **kwargs: Any): 9 | """Instrument `app` so that spans are automatically created for each request. 10 | 11 | See the `Logfire.instrument_starlette` method for details. 12 | """ 13 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/integrations/system_metrics.pyi: -------------------------------------------------------------------------------- 1 | from _typeshed import Incomplete 2 | from logfire import Logfire as Logfire 3 | from typing import Iterable, Literal 4 | from typing_extensions import LiteralString 5 | 6 | MetricName: type[Literal['system.cpu.simple_utilization', 'system.cpu.time', 'system.cpu.utilization', 'system.memory.usage', 'system.memory.utilization', 'system.swap.usage', 'system.swap.utilization', 'system.disk.io', 'system.disk.operations', 'system.disk.time', 'system.network.dropped.packets', 'system.network.packets', 'system.network.errors', 'system.network.io', 'system.network.connections', 'system.thread_count', 'process.open_file_descriptor.count', 'process.context_switches', 'process.cpu.time', 'process.cpu.utilization', 'process.cpu.core_utilization', 'process.memory.usage', 'process.memory.virtual', 'process.thread.count', 'process.runtime.gc_count', 'process.runtime.memory', 'process.runtime.cpu.time', 'process.runtime.thread_count', 'process.runtime.cpu.utilization', 'process.runtime.context_switches']] 7 | Config = dict[MetricName, Iterable[str] | None] 8 | CPU_FIELDS: list[LiteralString] 9 | MEMORY_FIELDS: list[LiteralString] 10 | FULL_CONFIG: Config 11 | BASIC_CONFIG: Config 12 | Base: Incomplete 13 | 14 | def get_base_config(base: Base) -> Config: ... 15 | def instrument_system_metrics(logfire_instance: Logfire, config: Config | None = None, base: Base = 'basic'): ... 16 | def measure_simple_cpu_utilization(logfire_instance: Logfire): ... 17 | def measure_process_runtime_cpu_utilization(logfire_instance: Logfire): ... 18 | def measure_process_cpu_utilization(logfire_instance: Logfire): ... 19 | def measure_process_cpu_core_utilization(logfire_instance: Logfire): 20 | """Same as process.cpu.utilization, but not divided by the number of available cores.""" 21 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/integrations/wsgi.pyi: -------------------------------------------------------------------------------- 1 | from logfire._internal.utils import maybe_capture_server_headers as maybe_capture_server_headers 2 | from logfire.integrations.wsgi import RequestHook as RequestHook, ResponseHook as ResponseHook 3 | from typing import Any 4 | from wsgiref.types import WSGIApplication 5 | 6 | def instrument_wsgi(app: WSGIApplication, *, capture_headers: bool = False, request_hook: RequestHook | None = None, response_hook: ResponseHook | None = None, **kwargs: Any) -> WSGIApplication: 7 | """Instrument `app` so that spans are automatically created for each request. 8 | 9 | See the `Logfire.instrument_wsgi` method for details. 10 | """ 11 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/json_encoder.pyi: -------------------------------------------------------------------------------- 1 | from .utils import JsonValue as JsonValue, safe_repr as safe_repr 2 | from _typeshed import Incomplete 3 | from typing import Any 4 | 5 | NUMPY_DIMENSION_MAX_SIZE: int 6 | EncoderFunction: Incomplete 7 | 8 | def encoder_by_type() -> dict[type[Any], EncoderFunction]: ... 9 | def to_json_value(o: Any, seen: set[int]) -> JsonValue: ... 10 | def logfire_json_dumps(obj: Any) -> str: ... 11 | def is_sqlalchemy(obj: Any) -> bool: ... 12 | def is_attrs(cls) -> bool: ... 13 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/json_formatter.pyi: -------------------------------------------------------------------------------- 1 | from .json_types import ArraySchema as ArraySchema, DataType as DataType, JSONSchema as JSONSchema 2 | from .utils import safe_repr as safe_repr 3 | from _typeshed import Incomplete 4 | from typing import Any 5 | 6 | class JsonArgsValueFormatter: 7 | """Format values recursively based on the information provided in value dict. 8 | 9 | When a custom format is identified, the `$__datatype__` key is always present. 10 | """ 11 | def __init__(self, *, indent: int) -> None: ... 12 | def __call__(self, value: Any, *, schema: JSONSchema | None = None, indent_current: int = 0): ... 13 | 14 | json_args_value_formatter: Incomplete 15 | json_args_value_formatter_compact: Incomplete 16 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/json_schema.pyi: -------------------------------------------------------------------------------- 1 | from .utils import JsonDict 2 | from _typeshed import Incomplete 3 | from typing import Any 4 | 5 | __all__ = ['create_json_schema', 'attributes_json_schema_properties', 'attributes_json_schema', 'JsonSchemaProperties'] 6 | 7 | def create_json_schema(obj: Any, seen: set[int]) -> JsonDict: 8 | """Create a JSON Schema from the given object. 9 | 10 | Args: 11 | obj: The object to create the JSON Schema from. 12 | seen: A set of object IDs that have already been processed. 13 | 14 | Returns: 15 | The JSON Schema. 16 | """ 17 | 18 | JsonSchemaProperties: Incomplete 19 | 20 | def attributes_json_schema(properties: JsonSchemaProperties) -> str: ... 21 | def attributes_json_schema_properties(attributes: dict[str, Any]) -> JsonSchemaProperties: ... 22 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/json_types.pyi: -------------------------------------------------------------------------------- 1 | from _typeshed import Incomplete 2 | from typing import Any, Literal, TypeVar, TypedDict 3 | from typing_extensions import NotRequired 4 | 5 | T = TypeVar('T') 6 | DataType: Incomplete 7 | DateFormat: Incomplete 8 | IPFormat: Incomplete 9 | Format: Incomplete 10 | _EnumBase = TypedDict('_EnumBase', {'x-python-datatype': Literal['Enum']}) 11 | 12 | class _EnumAny(_EnumBase): 13 | type: Literal['object'] 14 | enum: list[Any] 15 | 16 | class _EnumString(_EnumBase): 17 | type: Literal['string'] 18 | enum: list[str] 19 | 20 | class _EnumInt(_EnumBase): 21 | type: Literal['integer'] 22 | enum: list[int] 23 | 24 | class _EnumFloat(_EnumBase): 25 | type: Literal['number'] 26 | enum: list[float] 27 | 28 | class _EnumBool(_EnumBase): 29 | type: Literal['boolean'] 30 | enum: list[bool] 31 | 32 | EnumSchema: Incomplete 33 | 34 | class _Items(TypedDict): 35 | items: JSONSchema 36 | 37 | class _PrefixItems(TypedDict): 38 | prefixItems: list[JSONSchema] 39 | 40 | _ArrayBase = TypedDict('_ArrayBase', {'type': Literal['array'], 'x-python-datatype': Literal['tuple', 'deque', 'set', 'frozenset', 'ndarray'], 'x-columns': NotRequired[list[str]], 'x-indices': NotRequired[list[Any]], 'x-shape': NotRequired[list[int]], 'x-dtype': NotRequired[str]}) 41 | 42 | class _ArrayItems(_ArrayBase, _Items): ... 43 | class _ArrayPrefixItems(_ArrayBase, _PrefixItems): ... 44 | 45 | ArraySchema: Incomplete 46 | _PropertyDataType = TypedDict('_PropertyDataType', {'x-python-datatype': DataType}, total=False) 47 | Type: Incomplete 48 | 49 | class _Property(_PropertyDataType, total=False): 50 | type: Type 51 | title: str 52 | format: Format 53 | properties: dict[str, JSONSchema] 54 | 55 | JSONSchema: Incomplete 56 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/logs.pyi: -------------------------------------------------------------------------------- 1 | import dataclasses 2 | from dataclasses import dataclass 3 | from opentelemetry._logs import LogRecord, Logger, LoggerProvider 4 | from opentelemetry.util.types import _ExtendedAttributes 5 | from threading import Lock 6 | from typing import Any 7 | from weakref import WeakSet 8 | 9 | @dataclass 10 | class ProxyLoggerProvider(LoggerProvider): 11 | """A logger provider that wraps another internal logger provider allowing it to be re-assigned.""" 12 | provider: LoggerProvider 13 | loggers: WeakSet[ProxyLogger] = dataclasses.field(default_factory=WeakSet) 14 | lock: Lock = dataclasses.field(default_factory=Lock) 15 | suppressed_scopes: set[str] = dataclasses.field(default_factory=set) 16 | def get_logger(self, name: str, version: str | None = None, schema_url: str | None = None, attributes: _ExtendedAttributes | None = None) -> Logger: ... 17 | def suppress_scopes(self, *scopes: str) -> None: ... 18 | def set_provider(self, logger_provider: LoggerProvider) -> None: ... 19 | def __getattr__(self, item: str) -> Any: ... 20 | 21 | @dataclass(eq=False) 22 | class ProxyLogger(Logger): 23 | logger: Logger 24 | name: str 25 | version: str | None = ... 26 | schema_url: str | None = ... 27 | attributes: _ExtendedAttributes | None = ... 28 | def emit(self, record: LogRecord) -> None: ... 29 | def set_logger(self, provider: LoggerProvider) -> None: ... 30 | def __getattr__(self, item: str): ... 31 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/_internal/ulid.pyi: -------------------------------------------------------------------------------- 1 | from random import Random 2 | from typing import Callable 3 | 4 | def ulid(random: Random, ms_timestamp_generator: Callable[[], int]) -> int: 5 | """Generate an integer ULID compatible with UUID v4. 6 | 7 | ULIDs as defined by the [spec](https://github.com/ulid/spec) look like this: 8 | 9 | 01AN4Z07BY 79KA1307SR9X4MV3 10 | |----------| |----------------| 11 | Timestamp Randomness 12 | 48bits 80bits 13 | 14 | In the future it would be nice to make this compatible with a UUID, 15 | e.g. v4 UUIDs by setting the version and variant bits correctly. 16 | We can't currently do this because setting these bits would leave us with only 7 bytes of randomness, 17 | which isn't enough for the Python SDK's sampler that currently expects 8 bytes of randomness. 18 | In the future OTEL will probably adopt https://www.w3.org/TR/trace-context-2/#random-trace-id-flag 19 | which relies only on the lower 7 bytes of the trace ID, then all SDKs and tooling should be updated 20 | and leaving only 7 bytes of randomness should be fine. 21 | 22 | Right now we only care about: 23 | - Our SDK / Python SDK's in general. 24 | - The OTEL collector. 25 | 26 | And both behave properly with 8 bytes of randomness because trace IDs were originally 64 bits 27 | so to be compatible with old trace IDs nothing in OTEL can assume >8 bytes of randomness in trace IDs 28 | unless they generated the trace ID themselves (e.g. the Go SDK _does_ expect >8 bytes of randomness internally). 29 | """ 30 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/cli.pyi: -------------------------------------------------------------------------------- 1 | from ._internal.cli import main as main 2 | 3 | __all__ = ['main'] 4 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/exceptions.pyi: -------------------------------------------------------------------------------- 1 | class LogfireConfigError(ValueError): 2 | """Error raised when there is a problem with the Logfire configuration.""" 3 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/experimental/__init__.pyi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/logfire-api/logfire_api/experimental/__init__.pyi -------------------------------------------------------------------------------- /logfire-api/logfire_api/experimental/annotations.pyi: -------------------------------------------------------------------------------- 1 | import logfire 2 | from _typeshed import Incomplete 3 | from logfire._internal.constants import ATTRIBUTES_MESSAGE_KEY as ATTRIBUTES_MESSAGE_KEY, ATTRIBUTES_SPAN_TYPE_KEY as ATTRIBUTES_SPAN_TYPE_KEY 4 | from logfire.propagate import attach_context as attach_context 5 | from opentelemetry.trace import Span 6 | from typing import Any 7 | 8 | TRACEPARENT_PROPAGATOR: Incomplete 9 | TRACEPARENT_NAME: str 10 | feedback_logfire: Incomplete 11 | 12 | def get_traceparent(span: Span | logfire.LogfireSpan) -> str: 13 | """Get a string representing the span context to use for annotating spans.""" 14 | def raw_annotate_span(traceparent: str, span_name: str, message: str, attributes: dict[str, Any]) -> None: 15 | """Create a span of kind 'annotation' as a child of the span with the given traceparent.""" 16 | def record_feedback(traceparent: str, name: str, value: int | float | bool | str, comment: str | None = None, extra: dict[str, Any] | None = None) -> None: 17 | """Attach feedback to a span. 18 | 19 | This is a more structured version of `raw_annotate_span` 20 | with special attributes recognized by the Logfire UI. 21 | 22 | Args: 23 | traceparent: The traceparent string. 24 | name: The name of the evaluation. 25 | value: The value of the evaluation. 26 | Numbers are interpreted as scores, strings as labels, and booleans as assertions. 27 | comment: An optional reason for the evaluation. 28 | extra: Optional additional attributes to include in the span. 29 | """ 30 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/integrations/__init__.pyi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/logfire-api/logfire_api/integrations/__init__.pyi -------------------------------------------------------------------------------- /logfire-api/logfire_api/integrations/flask.pyi: -------------------------------------------------------------------------------- 1 | from _typeshed import Incomplete 2 | from typing import TypedDict 3 | from wsgiref.types import WSGIEnvironment as WSGIEnvironment 4 | 5 | RequestHook: Incomplete 6 | ResponseHook: Incomplete 7 | 8 | class CommenterOptions(TypedDict, total=False): 9 | """The `commenter_options` parameter for `instrument_flask`.""" 10 | framework: bool 11 | route: bool 12 | controller: bool 13 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/integrations/httpx.pyi: -------------------------------------------------------------------------------- 1 | import httpx 2 | from opentelemetry.trace import Span 3 | from typing import Any, Awaitable, Callable, NamedTuple 4 | 5 | class RequestInfo(NamedTuple): 6 | """Information about an HTTP request. 7 | 8 | This is the second parameter passed to the `RequestHook` function. 9 | """ 10 | method: bytes 11 | url: httpx.URL 12 | headers: httpx.Headers 13 | stream: httpx.SyncByteStream | httpx.AsyncByteStream | None 14 | extensions: dict[str, Any] | None 15 | 16 | class ResponseInfo(NamedTuple): 17 | """Information about an HTTP response. 18 | 19 | This is the second parameter passed to the `ResponseHook` function. 20 | """ 21 | status_code: int 22 | headers: httpx.Headers 23 | stream: httpx.SyncByteStream | httpx.AsyncByteStream | None 24 | extensions: dict[str, Any] | None 25 | RequestHook = Callable[[Span, RequestInfo], None] 26 | ResponseHook = Callable[[Span, RequestInfo, ResponseInfo], None] 27 | AsyncRequestHook = Callable[[Span, RequestInfo], Awaitable[None]] 28 | AsyncResponseHook = Callable[[Span, RequestInfo, ResponseInfo], Awaitable[None]] 29 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/integrations/logging.pyi: -------------------------------------------------------------------------------- 1 | from .. import Logfire as Logfire 2 | from .._internal.constants import ATTRIBUTES_LOGGING_ARGS_KEY as ATTRIBUTES_LOGGING_ARGS_KEY, ATTRIBUTES_LOGGING_NAME as ATTRIBUTES_LOGGING_NAME, ATTRIBUTES_MESSAGE_KEY as ATTRIBUTES_MESSAGE_KEY, ATTRIBUTES_MESSAGE_TEMPLATE_KEY as ATTRIBUTES_MESSAGE_TEMPLATE_KEY, LOGGING_TO_OTEL_LEVEL_NUMBERS as LOGGING_TO_OTEL_LEVEL_NUMBERS 3 | from .._internal.utils import is_instrumentation_suppressed as is_instrumentation_suppressed 4 | from _typeshed import Incomplete 5 | from logging import Handler as LoggingHandler, LogRecord 6 | from typing import Any, ClassVar 7 | 8 | RESERVED_ATTRS: frozenset[str] 9 | 10 | class LogfireLoggingHandler(LoggingHandler): 11 | """A [logging](../../integrations/logging.md) handler that sends logs to **Logfire**.""" 12 | custom_scope_suffix: ClassVar[str] 13 | fallback: Incomplete 14 | logfire_instance: Incomplete 15 | def __init__(self, level: int | str = ..., fallback: LoggingHandler = ..., logfire_instance: Logfire | None = None) -> None: ... 16 | def emit(self, record: LogRecord) -> None: 17 | """Send the log to Logfire. 18 | 19 | Args: 20 | record: The log record to send. 21 | """ 22 | def fill_attributes(self, record: LogRecord) -> dict[str, Any]: 23 | """Fill the attributes to send to Logfire. 24 | 25 | This method can be overridden to add more attributes. 26 | 27 | Args: 28 | record: The log record. 29 | 30 | Returns: 31 | The attributes for the log record. 32 | """ 33 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/integrations/loguru.pyi: -------------------------------------------------------------------------------- 1 | from .._internal.constants import ATTRIBUTES_LOGGING_ARGS_KEY as ATTRIBUTES_LOGGING_ARGS_KEY, ATTRIBUTES_MESSAGE_KEY as ATTRIBUTES_MESSAGE_KEY, ATTRIBUTES_MESSAGE_TEMPLATE_KEY as ATTRIBUTES_MESSAGE_TEMPLATE_KEY 2 | from .._internal.stack_info import warn_at_user_stacklevel as warn_at_user_stacklevel 3 | from .logging import LogfireLoggingHandler as LogfireLoggingHandler 4 | from _typeshed import Incomplete 5 | from logging import LogRecord 6 | from typing import Any 7 | 8 | LOGURU_PATH: Incomplete 9 | 10 | class LoguruInspectionFailed(RuntimeWarning): 11 | """Warning raised when magic introspection of loguru stack frames fails. 12 | 13 | This may happen if the loguru library changes in a way that breaks the introspection. 14 | """ 15 | 16 | class LogfireHandler(LogfireLoggingHandler): 17 | """A loguru handler that sends logs to **Logfire**.""" 18 | custom_scope_suffix: str 19 | def fill_attributes(self, record: LogRecord) -> dict[str, Any]: 20 | """Fill attributes from a log record. 21 | 22 | It filters out the 'extra' attribute and adds it's content to the attributes. 23 | 24 | Args: 25 | record: The log record. 26 | 27 | Returns: 28 | The attributes for the log record. 29 | """ 30 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/integrations/psycopg.pyi: -------------------------------------------------------------------------------- 1 | from typing import TypedDict 2 | 3 | class CommenterOptions(TypedDict, total=False): 4 | """The `commenter_options` parameter for `instrument_psycopg`.""" 5 | db_driver: bool 6 | dbapi_threadsafety: bool 7 | dbapi_level: bool 8 | libpq_version: bool 9 | driver_paramstyle: bool 10 | opentelemetry_values: bool 11 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/integrations/redis.pyi: -------------------------------------------------------------------------------- 1 | from opentelemetry.trace import Span 2 | from redis import Connection 3 | from typing import Any, Protocol 4 | 5 | class RequestHook(Protocol): 6 | """A hook that is called before the request is sent.""" 7 | def __call__(self, span: Span, instance: Connection, *args: Any, **kwargs: Any) -> None: 8 | """Call the hook. 9 | 10 | Args: 11 | span: The span that is being created. 12 | instance: The connection instance. 13 | *args: The arguments that are passed to the command. 14 | **kwargs: The keyword arguments that are passed to the command. 15 | """ 16 | 17 | class ResponseHook(Protocol): 18 | """A hook that is called after the response is received.""" 19 | def __call__(self, span: Span, instance: Connection, response: Any) -> None: 20 | """Call the hook. 21 | 22 | Args: 23 | span: The span that is being created. 24 | instance: The connection instance. 25 | response: The response that is received. 26 | """ 27 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/integrations/sqlalchemy.pyi: -------------------------------------------------------------------------------- 1 | from typing import TypedDict 2 | 3 | class CommenterOptions(TypedDict, total=False): 4 | """The `commenter_options` parameter for `instrument_sqlalchemy`.""" 5 | db_driver: bool 6 | db_framework: bool 7 | opentelemetry_values: bool 8 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/integrations/structlog.pyi: -------------------------------------------------------------------------------- 1 | from .. import Logfire as Logfire 2 | from .._internal.constants import ATTRIBUTES_MESSAGE_KEY as ATTRIBUTES_MESSAGE_KEY 3 | from _typeshed import Incomplete 4 | from structlog.types import EventDict, WrappedLogger 5 | 6 | RESERVED_ATTRS: Incomplete 7 | 8 | class LogfireProcessor: 9 | """Logfire processor for [structlog](../../integrations/structlog.md).""" 10 | console_log: Incomplete 11 | logfire_instance: Incomplete 12 | def __init__(self, *, console_log: bool = False, logfire_instance: Logfire | None = None) -> None: ... 13 | def __call__(self, logger: WrappedLogger, name: str, event_dict: EventDict) -> EventDict: 14 | """A middleware to process structlog event, and send it to **Logfire**.""" 15 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/integrations/wsgi.pyi: -------------------------------------------------------------------------------- 1 | from _typeshed import Incomplete 2 | from wsgiref.types import WSGIEnvironment as WSGIEnvironment 3 | 4 | ResponseHook: Incomplete 5 | RequestHook: Incomplete 6 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/logfire-api/logfire_api/py.typed -------------------------------------------------------------------------------- /logfire-api/logfire_api/sampling/__init__.pyi: -------------------------------------------------------------------------------- 1 | from ._tail_sampling import SamplingOptions as SamplingOptions, SpanLevel as SpanLevel, TailSamplingSpanInfo as TailSamplingSpanInfo 2 | 3 | __all__ = ['SamplingOptions', 'SpanLevel', 'TailSamplingSpanInfo'] 4 | -------------------------------------------------------------------------------- /logfire-api/logfire_api/version.pyi: -------------------------------------------------------------------------------- 1 | from _typeshed import Incomplete 2 | 3 | VERSION: Incomplete 4 | -------------------------------------------------------------------------------- /logfire-api/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "logfire-api" 7 | version = "3.16.1" 8 | description = "Shim for the Logfire SDK which does nothing unless Logfire is installed" 9 | authors = [ 10 | { name = "Pydantic Team", email = "engineering@pydantic.dev" }, 11 | { name = "Samuel Colvin", email = "samuel@pydantic.dev" }, 12 | { name = "Hasan Ramezani", email = "hasan@pydantic.dev" }, 13 | { name = "Adrian Garcia Badaracco", email = "adrian@pydantic.dev" }, 14 | { name = "David Montague", email = "david@pydantic.dev" }, 15 | { name = "Marcelo Trylesinski", email = "marcelo@pydantic.dev" }, 16 | { name = "David Hewitt", email = "david.hewitt@pydantic.dev" }, 17 | { name = "Alex Hall", email = "alex@pydantic.dev" }, 18 | ] 19 | dependencies = [] 20 | readme = "README.md" 21 | license = "MIT" 22 | requires-python = ">= 3.8" 23 | 24 | [tool.uv] 25 | managed = true 26 | dev-dependencies = [] 27 | 28 | [tool.hatch.metadata] 29 | allow-direct-references = true 30 | 31 | [tool.hatch.build.targets.wheel] 32 | packages = ["logfire_api"] 33 | -------------------------------------------------------------------------------- /logfire/__main__.py: -------------------------------------------------------------------------------- 1 | """Logfire entry point.""" 2 | 3 | from .cli import main 4 | 5 | if __name__ == '__main__': 6 | main() 7 | -------------------------------------------------------------------------------- /logfire/_internal/__init__.py: -------------------------------------------------------------------------------- 1 | """Internal Logfire logic. 2 | 3 | We use the `_internal` module to discourage imports from outside the package, 4 | and thereby avoid causing breaking changes when refactoring the package. 5 | """ 6 | -------------------------------------------------------------------------------- /logfire/_internal/auto_trace/types.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import re 4 | from dataclasses import dataclass 5 | from typing import Sequence 6 | 7 | 8 | @dataclass 9 | class AutoTraceModule: 10 | """Information about a module being imported that should maybe be traced automatically. 11 | 12 | This object will be passed to a function that should return True if the module should be traced. 13 | In particular it'll be passed to a function that's passed to `install_auto_tracing` as the `modules` argument. 14 | """ 15 | 16 | name: str 17 | """Fully qualified absolute name of the module being imported.""" 18 | 19 | filename: str | None 20 | """Filename of the module being imported.""" 21 | 22 | # The argument is meant to match `str.startswith`. 23 | # The method name is different to avoid confusion with `.name.startswith` which behaves slightly differently. 24 | def parts_start_with(self, prefix: str | Sequence[str]) -> bool: 25 | """Return True if the module name starts with any of the given prefixes, using dots as boundaries. 26 | 27 | For example, if the module name is `foo.bar.spam`, then `parts_start_with('foo')` will return True, 28 | but `parts_start_with('bar')` or `parts_start_with('foo_bar')` will return False. 29 | In other words, this will match the module itself or any submodules. 30 | 31 | If a prefix contains any characters other than letters, numbers, and dots, 32 | then it will be treated as a regular expression. 33 | """ 34 | if isinstance(prefix, str): 35 | prefix = (prefix,) 36 | pattern = '|'.join([get_module_pattern(p) for p in prefix]) 37 | return bool(re.match(pattern, self.name)) 38 | 39 | 40 | def get_module_pattern(module: str): 41 | if not re.match(r'[\w.]+$', module, re.UNICODE): 42 | return module # treat as regex 43 | module = re.escape(module) # escape dots 44 | return rf'{module}($|\.)' 45 | -------------------------------------------------------------------------------- /logfire/_internal/collect_system_info.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import importlib.metadata as metadata 4 | from functools import lru_cache 5 | 6 | 7 | @lru_cache 8 | def collect_package_info() -> dict[str, str]: 9 | """Retrieve the package information for all installed packages. 10 | 11 | Returns: 12 | A dicts with the package name and version. 13 | """ 14 | try: 15 | distributions = list(metadata.distributions()) 16 | try: 17 | metas = [dist.metadata for dist in distributions] 18 | pairs = [(meta['Name'], meta.get('Version', 'UNKNOWN')) for meta in metas if meta.get('Name')] 19 | except Exception: # pragma: no cover 20 | # Just in case `dist.metadata['Name']` stops working but `dist.name` still works, 21 | # not that this is expected. 22 | # Currently this is about 2x slower because `dist.name` and `dist.version` each call `dist.metadata`, 23 | # which reads and parses a file and is not cached. 24 | pairs = [(dist.name, dist.version) for dist in distributions] 25 | except Exception: # pragma: no cover 26 | # Don't crash for this. 27 | pairs = [] 28 | 29 | return dict(sorted(pairs)) 30 | -------------------------------------------------------------------------------- /logfire/_internal/exporters/__init__.py: -------------------------------------------------------------------------------- 1 | """The span exporters for Logfire.""" 2 | -------------------------------------------------------------------------------- /logfire/_internal/exporters/dynamic_batch.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | from typing import cast 5 | 6 | from opentelemetry.sdk.environment_variables import OTEL_BSP_SCHEDULE_DELAY 7 | from opentelemetry.sdk.trace import ReadableSpan 8 | from opentelemetry.sdk.trace.export import BatchSpanProcessor, SpanExporter 9 | 10 | from logfire._internal.exporters.wrapper import WrapperSpanProcessor 11 | 12 | 13 | class DynamicBatchSpanProcessor(WrapperSpanProcessor): 14 | """A wrapper around a BatchSpanProcessor that dynamically adjusts the schedule delay. 15 | 16 | The initial schedule delay is set to 100ms, and after processing 10 spans, it is set to the value of 17 | the `OTEL_BSP_SCHEDULE_DELAY` environment variable (default: 500ms). 18 | This makes the initial experience of the SDK more responsive. 19 | """ 20 | 21 | def __init__(self, exporter: SpanExporter) -> None: 22 | self.final_delay = float(os.environ.get(OTEL_BSP_SCHEDULE_DELAY) or 500) 23 | # Start with the configured value immediately if it's less than 100ms. 24 | initial_delay = min(self.final_delay, 100) 25 | super().__init__(BatchSpanProcessor(exporter, schedule_delay_millis=initial_delay)) 26 | self.num_processed = 0 27 | 28 | def on_end(self, span: ReadableSpan) -> None: 29 | self.num_processed += 1 30 | if self.num_processed == 10: 31 | cast(BatchSpanProcessor, self.processor).schedule_delay_millis = self.final_delay 32 | super().on_end(span) 33 | -------------------------------------------------------------------------------- /logfire/_internal/exporters/logs.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | from opentelemetry.sdk._logs import LogData 4 | 5 | import logfire 6 | from logfire._internal.exporters.wrapper import WrapperLogProcessor 7 | from logfire._internal.scrubbing import BaseScrubber 8 | from logfire._internal.utils import is_instrumentation_suppressed 9 | 10 | 11 | class CheckSuppressInstrumentationLogProcessorWrapper(WrapperLogProcessor): 12 | """Checks if instrumentation is suppressed, then suppresses instrumentation itself. 13 | 14 | Placed at the root of the tree of processors. 15 | """ 16 | 17 | def emit(self, log_data: LogData): 18 | if is_instrumentation_suppressed(): 19 | return 20 | with logfire.suppress_instrumentation(): 21 | return super().emit(log_data) 22 | 23 | 24 | @dataclass 25 | class MainLogProcessorWrapper(WrapperLogProcessor): 26 | scrubber: BaseScrubber 27 | 28 | def emit(self, log_data: LogData): 29 | log_data.log_record = self.scrubber.scrub_log(log_data.log_record) 30 | return super().emit(log_data) 31 | -------------------------------------------------------------------------------- /logfire/_internal/exporters/quiet_metrics.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | import requests 4 | from opentelemetry.sdk.metrics.export import MetricExportResult, MetricsData 5 | 6 | from .wrapper import WrapperMetricExporter 7 | 8 | 9 | class QuietMetricExporter(WrapperMetricExporter): 10 | """A MetricExporter that catches request exceptions to prevent OTEL from logging a huge traceback.""" 11 | 12 | def export(self, metrics_data: MetricsData, timeout_millis: float = 10_000, **kwargs: Any) -> MetricExportResult: 13 | try: 14 | return super().export(metrics_data, timeout_millis, **kwargs) 15 | except requests.exceptions.RequestException: 16 | # Rely on OTLPExporterHttpSession to log this kind of error periodically. 17 | return MetricExportResult.FAILURE 18 | -------------------------------------------------------------------------------- /logfire/_internal/exporters/remove_pending.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Sequence 4 | 5 | from opentelemetry.sdk.trace import ReadableSpan 6 | from opentelemetry.sdk.trace.export import SpanExportResult 7 | 8 | from ..constants import ATTRIBUTES_SPAN_TYPE_KEY 9 | from .wrapper import WrapperSpanExporter 10 | 11 | 12 | class RemovePendingSpansExporter(WrapperSpanExporter): 13 | """An exporter that filters out pending spans if the corresponding final span is already in the same batch.""" 14 | 15 | def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult: 16 | result: list[ReadableSpan] = [] 17 | 18 | # Mapping of (trace_id, span_id) to either a pending or final span, whichever is found first. 19 | # We avoid assuming that pending spans appear first in the `spans` list. 20 | # After all, `result` is likely to be in a different order than `spans`. 21 | spans_by_id: dict[tuple[int, int], ReadableSpan] = {} 22 | 23 | for span in spans: 24 | attributes = span.attributes or {} 25 | span_type = attributes.get(ATTRIBUTES_SPAN_TYPE_KEY) 26 | 27 | if span_type == 'pending_span': 28 | context = span.parent 29 | if context: # pragma: no branch 30 | key = (context.trace_id, context.span_id) 31 | spans_by_id.setdefault(key, span) 32 | continue 33 | 34 | elif span_type == 'span': 35 | context = span.context # note that this context is different from the pending span case 36 | if context: # pragma: no branch 37 | key = (context.trace_id, context.span_id) 38 | spans_by_id[key] = span 39 | continue 40 | 41 | # In particular this includes logs. 42 | result.append(span) 43 | 44 | result.extend(spans_by_id.values()) 45 | return super().export(result) 46 | -------------------------------------------------------------------------------- /logfire/_internal/integrations/__init__.py: -------------------------------------------------------------------------------- 1 | """Private logic for Logfire integrations.""" 2 | -------------------------------------------------------------------------------- /logfire/_internal/integrations/aiohttp_client.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | try: 4 | from opentelemetry.instrumentation.aiohttp_client import AioHttpClientInstrumentor 5 | except ImportError: 6 | raise RuntimeError( 7 | '`logfire.instrument_aiohttp_client()` requires the `opentelemetry-instrumentation-aiohttp-client` package.\n' 8 | 'You can install this with:\n' 9 | " pip install 'logfire[aiohttp]'" 10 | ) 11 | from logfire import Logfire 12 | 13 | 14 | def instrument_aiohttp_client(logfire_instance: Logfire, **kwargs: Any): 15 | """Instrument the `aiohttp` module so that spans are automatically created for each client request. 16 | 17 | See the `Logfire.instrument_aiohttp_client` method for details. 18 | """ 19 | AioHttpClientInstrumentor().instrument( 20 | **{ 21 | 'tracer_provider': logfire_instance.config.get_tracer_provider(), 22 | 'meter_provider': logfire_instance.config.get_meter_provider(), 23 | **kwargs, 24 | }, 25 | ) 26 | -------------------------------------------------------------------------------- /logfire/_internal/integrations/asyncpg.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Any 4 | 5 | try: 6 | from opentelemetry.instrumentation.asyncpg import AsyncPGInstrumentor 7 | except ImportError: 8 | raise RuntimeError( 9 | '`logfire.instrument_asyncpg()` requires the `opentelemetry-instrumentation-asyncpg` package.\n' 10 | 'You can install this with:\n' 11 | " pip install 'logfire[asyncpg]'" 12 | ) 13 | 14 | 15 | def instrument_asyncpg(**kwargs: Any) -> None: 16 | """Instrument the `asyncpg` module so that spans are automatically created for each query. 17 | 18 | See the `Logfire.instrument_asyncpg` method for details. 19 | """ 20 | AsyncPGInstrumentor().instrument(**kwargs) 21 | -------------------------------------------------------------------------------- /logfire/_internal/integrations/aws_lambda.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | try: 4 | from opentelemetry.context import Context 5 | from opentelemetry.instrumentation.aws_lambda import AwsLambdaInstrumentor 6 | from opentelemetry.metrics import MeterProvider 7 | from opentelemetry.trace import TracerProvider 8 | except ImportError: 9 | raise RuntimeError( 10 | '`logfire.instrument_aws_lambda()` requires the `opentelemetry-instrumentation-aws-lambda` package.\n' 11 | 'You can install this with:\n' 12 | " pip install 'logfire[aws-lambda]'" 13 | ) 14 | 15 | from typing import Any, Callable 16 | 17 | LambdaEvent = Any 18 | LambdaHandler = Callable[[LambdaEvent, Any], Any] 19 | 20 | 21 | def instrument_aws_lambda( 22 | lambda_handler: LambdaHandler, 23 | *, 24 | tracer_provider: TracerProvider, 25 | meter_provider: MeterProvider, 26 | event_context_extractor: Callable[[LambdaEvent], Context] | None = None, 27 | **kwargs: Any, 28 | ) -> None: 29 | """Instrument the AWS Lambda runtime so that spans are automatically created for each invocation. 30 | 31 | See the `Logfire.instrument_aws_lambda` method for details. 32 | """ 33 | if event_context_extractor is not None: 34 | kwargs['event_context_extractor'] = event_context_extractor 35 | return AwsLambdaInstrumentor().instrument(tracer_provider=tracer_provider, meter_provider=meter_provider, **kwargs) 36 | -------------------------------------------------------------------------------- /logfire/_internal/integrations/celery.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Any 4 | 5 | try: 6 | from opentelemetry.instrumentation.celery import CeleryInstrumentor 7 | except ImportError: 8 | raise RuntimeError( 9 | '`logfire.instrument_celery()` requires the `opentelemetry-instrumentation-celery` package.\n' 10 | 'You can install this with:\n' 11 | " pip install 'logfire[celery]'" 12 | ) 13 | 14 | 15 | def instrument_celery(**kwargs: Any) -> None: 16 | """Instrument the `celery` module so that spans are automatically created for each task. 17 | 18 | See the `Logfire.instrument_celery` method for details. 19 | """ 20 | return CeleryInstrumentor().instrument(**kwargs) 21 | -------------------------------------------------------------------------------- /logfire/_internal/integrations/django.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Any, Callable 4 | 5 | from django.http import HttpRequest, HttpResponse 6 | from opentelemetry.trace import Span 7 | 8 | from logfire._internal.utils import maybe_capture_server_headers 9 | 10 | try: 11 | from opentelemetry.instrumentation.django import DjangoInstrumentor 12 | except ImportError: 13 | raise RuntimeError( 14 | '`logfire.instrument_django()` requires the `opentelemetry-instrumentation-django` package.\n' 15 | 'You can install this with:\n' 16 | " pip install 'logfire[django]'" 17 | ) 18 | 19 | 20 | def instrument_django( 21 | *, 22 | capture_headers: bool, 23 | is_sql_commentor_enabled: bool | None, 24 | excluded_urls: str | None, 25 | request_hook: Callable[[Span, HttpRequest], None] | None, 26 | response_hook: Callable[[Span, HttpRequest, HttpResponse], None] | None, 27 | **kwargs: Any, 28 | ) -> None: 29 | """Instrument the `django` module so that spans are automatically created for each web request. 30 | 31 | See the `Logfire.instrument_django` method for details. 32 | """ 33 | maybe_capture_server_headers(capture_headers) 34 | DjangoInstrumentor().instrument( 35 | excluded_urls=excluded_urls, 36 | is_sql_commentor_enabled=is_sql_commentor_enabled, 37 | request_hook=request_hook, 38 | response_hook=response_hook, 39 | **kwargs, 40 | ) 41 | -------------------------------------------------------------------------------- /logfire/_internal/integrations/flask.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Any 4 | 5 | from flask.app import Flask 6 | 7 | from logfire._internal.stack_info import warn_at_user_stacklevel 8 | 9 | try: 10 | from opentelemetry.instrumentation.flask import FlaskInstrumentor 11 | except ImportError: 12 | raise RuntimeError( 13 | '`logfire.instrument_flask()` requires the `opentelemetry-instrumentation-flask` package.\n' 14 | 'You can install this with:\n' 15 | " pip install 'logfire[flask]'" 16 | ) 17 | 18 | from logfire._internal.utils import maybe_capture_server_headers 19 | from logfire.integrations.flask import CommenterOptions, RequestHook, ResponseHook 20 | 21 | 22 | def instrument_flask( 23 | app: Flask, 24 | *, 25 | capture_headers: bool, 26 | enable_commenter: bool, 27 | commenter_options: CommenterOptions | None, 28 | excluded_urls: str | None = None, 29 | request_hook: RequestHook | None = None, 30 | response_hook: ResponseHook | None = None, 31 | **kwargs: Any, 32 | ): 33 | """Instrument `app` so that spans are automatically created for each request. 34 | 35 | See the `Logfire.instrument_flask` method for details. 36 | """ 37 | maybe_capture_server_headers(capture_headers) 38 | 39 | # Previously the parameter was accidentally called exclude_urls, so we support both. 40 | if 'exclude_urls' in kwargs: # pragma: no cover 41 | warn_at_user_stacklevel('exclude_urls is deprecated; use excluded_urls instead', DeprecationWarning) 42 | excluded_urls = excluded_urls or kwargs.pop('exclude_urls', None) 43 | 44 | FlaskInstrumentor().instrument_app( # type: ignore[reportUnknownMemberType] 45 | app, 46 | enable_commenter=enable_commenter, 47 | commenter_options=commenter_options, 48 | excluded_urls=excluded_urls, 49 | request_hook=request_hook, 50 | response_hook=response_hook, 51 | **kwargs, 52 | ) 53 | -------------------------------------------------------------------------------- /logfire/_internal/integrations/llm_providers/types.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from abc import ABC, abstractmethod 4 | from typing import Any, NamedTuple 5 | 6 | from typing_extensions import LiteralString 7 | 8 | 9 | class StreamState(ABC): 10 | """Keeps track of the state of a streamed response.""" 11 | 12 | @abstractmethod 13 | def record_chunk(self, chunk: Any) -> None: 14 | """Update the state based on a chunk from the streamed response.""" 15 | 16 | @abstractmethod 17 | def get_response_data(self) -> Any: 18 | """Returns the response data for including in the log.""" 19 | 20 | 21 | class EndpointConfig(NamedTuple): 22 | """The configuration for the endpoint of a provider based on request url.""" 23 | 24 | message_template: LiteralString 25 | span_data: dict[str, Any] 26 | stream_state_cls: type[StreamState] | None = None 27 | -------------------------------------------------------------------------------- /logfire/_internal/integrations/mysql.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Any, TypeVar 4 | 5 | from mysql.connector.abstracts import MySQLConnectionAbstract 6 | from mysql.connector.pooling import PooledMySQLConnection 7 | from opentelemetry.trace import TracerProvider 8 | 9 | try: 10 | from opentelemetry.instrumentation.mysql import MySQLInstrumentor 11 | except ImportError: 12 | raise RuntimeError( 13 | '`logfire.instrument_mysql()` requires the `opentelemetry-instrumentation-mysql` package.\n' 14 | 'You can install this with:\n' 15 | " pip install 'logfire[mysql]'" 16 | ) 17 | 18 | 19 | MySQLConnection = TypeVar('MySQLConnection', 'PooledMySQLConnection | MySQLConnectionAbstract', None) 20 | 21 | 22 | def instrument_mysql( 23 | *, 24 | conn: MySQLConnection = None, 25 | tracer_provider: TracerProvider, 26 | **kwargs: Any, 27 | ) -> MySQLConnection: 28 | """Instrument the `mysql` module or a specific MySQL connection so that spans are automatically created for each operation. 29 | 30 | See the `Logfire.instrument_mysql` method for details. 31 | """ 32 | if conn is not None: 33 | return MySQLInstrumentor().instrument_connection(conn, tracer_provider=tracer_provider) # type: ignore[reportUnknownMemberType] 34 | return MySQLInstrumentor().instrument(**kwargs, tracer_provider=tracer_provider) # type: ignore[reportUnknownMemberType] 35 | -------------------------------------------------------------------------------- /logfire/_internal/integrations/pydantic_ai.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Any, Literal 4 | 5 | from pydantic_ai import Agent 6 | from pydantic_ai.agent import InstrumentationSettings 7 | from pydantic_ai.models import Model 8 | from pydantic_ai.models.instrumented import InstrumentedModel 9 | 10 | from logfire import Logfire 11 | 12 | 13 | def instrument_pydantic_ai( 14 | logfire_instance: Logfire, 15 | obj: Agent | Model | None, 16 | event_mode: Literal['attributes', 'logs'] | None, 17 | **kwargs: Any, 18 | ) -> None | InstrumentedModel: 19 | if event_mode is None: 20 | event_mode = InstrumentationSettings.event_mode 21 | settings = InstrumentationSettings( 22 | tracer_provider=logfire_instance.config.get_tracer_provider(), 23 | event_logger_provider=logfire_instance.config.get_event_logger_provider(), 24 | event_mode=event_mode, 25 | **kwargs, 26 | ) 27 | if isinstance(obj, Agent): 28 | obj.instrument = settings 29 | elif isinstance(obj, Model): 30 | return InstrumentedModel(obj, settings) 31 | elif obj is None: 32 | Agent.instrument_all(settings) 33 | else: 34 | raise TypeError(f'Cannot instrument object of type {type(obj)}') 35 | -------------------------------------------------------------------------------- /logfire/_internal/integrations/pymongo.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Any, Callable 4 | 5 | from opentelemetry.sdk.trace import Span 6 | from pymongo.monitoring import CommandFailedEvent, CommandStartedEvent, CommandSucceededEvent 7 | 8 | try: 9 | from opentelemetry.instrumentation.pymongo import ( 10 | PymongoInstrumentor, 11 | dummy_callback, 12 | ) 13 | except ImportError: 14 | raise RuntimeError( 15 | '`logfire.instrument_pymongo()` requires the `opentelemetry-instrumentation-pymongo` package.\n' 16 | 'You can install this with:\n' 17 | " pip install 'logfire[pymongo]'" 18 | ) 19 | 20 | 21 | def instrument_pymongo( 22 | *, 23 | capture_statement: bool, 24 | request_hook: Callable[[Span, CommandStartedEvent], None] | None = None, 25 | response_hook: Callable[[Span, CommandSucceededEvent], None] | None = None, 26 | failed_hook: Callable[[Span, CommandFailedEvent], None] | None = None, 27 | **kwargs: Any, 28 | ) -> None: 29 | """Instrument the `pymongo` module so that spans are automatically created for each operation. 30 | 31 | See the `Logfire.instrument_pymongo` method for details. 32 | """ 33 | PymongoInstrumentor().instrument( 34 | request_hook=request_hook or dummy_callback, 35 | response_hook=response_hook or dummy_callback, 36 | failed_hook=failed_hook or dummy_callback, 37 | capture_statement=capture_statement, 38 | **kwargs, 39 | ) 40 | -------------------------------------------------------------------------------- /logfire/_internal/integrations/redis.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import functools 4 | from typing import TYPE_CHECKING, Any 5 | 6 | try: 7 | from opentelemetry.instrumentation.redis import RedisInstrumentor 8 | 9 | from logfire.integrations.redis import RequestHook, ResponseHook 10 | except ImportError: 11 | raise RuntimeError( 12 | '`logfire.instrument_redis()` requires the `opentelemetry-instrumentation-redis` package.\n' 13 | 'You can install this with:\n' 14 | " pip install 'logfire[redis]'" 15 | ) 16 | 17 | from logfire._internal.constants import ATTRIBUTES_MESSAGE_KEY 18 | from logfire._internal.utils import truncate_string 19 | 20 | if TYPE_CHECKING: 21 | from opentelemetry.trace import Span 22 | from redis import Connection 23 | 24 | 25 | def instrument_redis( 26 | *, 27 | capture_statement: bool, 28 | request_hook: RequestHook | None, 29 | response_hook: ResponseHook | None, 30 | **kwargs: Any, 31 | ) -> None: 32 | """Instrument the `redis` module so that spans are automatically created for each operation. 33 | 34 | See the `Logfire.instrument_redis` method for details. 35 | """ 36 | if capture_statement: 37 | request_hook = _capture_statement_hook(request_hook) 38 | 39 | RedisInstrumentor().instrument(request_hook=request_hook, response_hook=response_hook, **kwargs) # type: ignore[reportUnknownMemberType] 40 | 41 | 42 | def _capture_statement_hook(request_hook: RequestHook | None = None) -> RequestHook: 43 | truncate_value = functools.partial(truncate_string, max_length=20, middle='...') 44 | 45 | def _capture_statement( 46 | span: Span, instance: Connection, command: tuple[object, ...], *args: Any, **kwargs: Any 47 | ) -> None: 48 | str_command = list(map(str, command)) 49 | span.set_attribute('db.statement', ' '.join(str_command)) 50 | span.set_attribute(ATTRIBUTES_MESSAGE_KEY, ' '.join(map(truncate_value, str_command))) 51 | if request_hook is not None: 52 | request_hook(span, instance, command, *args, **kwargs) 53 | 54 | return _capture_statement 55 | -------------------------------------------------------------------------------- /logfire/_internal/integrations/requests.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Any, Callable 4 | 5 | import requests 6 | from opentelemetry.sdk.trace import Span 7 | 8 | try: 9 | from opentelemetry.instrumentation.requests import RequestsInstrumentor 10 | except ImportError: 11 | raise RuntimeError( 12 | '`logfire.instrument_requests()` requires the `opentelemetry-instrumentation-requests` package.\n' 13 | 'You can install this with:\n' 14 | " pip install 'logfire[requests]'" 15 | ) 16 | 17 | 18 | def instrument_requests( 19 | excluded_urls: str | None = None, 20 | request_hook: Callable[[Span, requests.PreparedRequest], None] | None = None, 21 | response_hook: Callable[[Span, requests.PreparedRequest, requests.Response], None] | None = None, 22 | **kwargs: Any, 23 | ) -> None: 24 | """Instrument the `requests` module so that spans are automatically created for each request. 25 | 26 | See the `Logfire.instrument_requests` method for details. 27 | """ 28 | RequestsInstrumentor().instrument( 29 | excluded_urls=excluded_urls, 30 | request_hook=request_hook, 31 | response_hook=response_hook, 32 | **kwargs, 33 | ) 34 | -------------------------------------------------------------------------------- /logfire/_internal/integrations/sqlalchemy.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import contextlib 4 | from typing import TYPE_CHECKING, Any 5 | 6 | try: 7 | from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor 8 | 9 | from logfire.integrations.sqlalchemy import CommenterOptions 10 | except ImportError: 11 | raise RuntimeError( 12 | '`logfire.instrument_sqlalchemy()` requires the `opentelemetry-instrumentation-sqlalchemy` package.\n' 13 | 'You can install this with:\n' 14 | " pip install 'logfire[sqlalchemy]'" 15 | ) 16 | 17 | if TYPE_CHECKING: 18 | from sqlalchemy import Engine 19 | from sqlalchemy.ext.asyncio import AsyncEngine 20 | 21 | 22 | def instrument_sqlalchemy( 23 | engine: AsyncEngine | Engine | None, 24 | enable_commenter: bool, 25 | commenter_options: CommenterOptions, 26 | **kwargs: Any, 27 | ) -> None: 28 | """Instrument the `sqlalchemy` module so that spans are automatically created for each query. 29 | 30 | See the `Logfire.instrument_sqlalchemy` method for details. 31 | """ 32 | with contextlib.suppress(ImportError): 33 | from sqlalchemy.ext.asyncio import AsyncEngine 34 | 35 | if isinstance(engine, AsyncEngine): 36 | engine = engine.sync_engine 37 | return SQLAlchemyInstrumentor().instrument(engine=engine, **kwargs) 38 | -------------------------------------------------------------------------------- /logfire/_internal/integrations/sqlite3.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import sqlite3 4 | from typing import Any, TypeVar 5 | 6 | from opentelemetry.trace import TracerProvider 7 | 8 | try: 9 | from opentelemetry.instrumentation.sqlite3 import SQLite3Instrumentor 10 | except ImportError: 11 | raise RuntimeError( 12 | '`logfire.instrument_sqlite3()` requires the `opentelemetry-instrumentation-sqlite3` package.\n' 13 | 'You can install this with:\n' 14 | " pip install 'logfire[sqlite3]'" 15 | ) 16 | 17 | 18 | SQLite3Connection = TypeVar('SQLite3Connection', sqlite3.Connection, None) 19 | 20 | 21 | def instrument_sqlite3(*, conn: SQLite3Connection, tracer_provider: TracerProvider, **kwargs: Any) -> SQLite3Connection: 22 | """Instrument the `sqlite3` module so that spans are automatically created for each query. 23 | 24 | See the `Logfire.instrument_sqlite3` method for details. 25 | """ 26 | if conn is not None: 27 | return SQLite3Instrumentor().instrument_connection(conn, tracer_provider=tracer_provider) 28 | else: 29 | return SQLite3Instrumentor().instrument(tracer_provider=tracer_provider, **kwargs) # type: ignore 30 | -------------------------------------------------------------------------------- /logfire/_internal/integrations/starlette.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Any 4 | 5 | from starlette.applications import Starlette 6 | 7 | try: 8 | from opentelemetry.instrumentation.asgi.types import ClientRequestHook, ClientResponseHook, ServerRequestHook 9 | from opentelemetry.instrumentation.starlette import StarletteInstrumentor 10 | except ImportError: 11 | raise RuntimeError( 12 | '`logfire.instrument_starlette()` requires the `opentelemetry-instrumentation-starlette` package.\n' 13 | 'You can install this with:\n' 14 | " pip install 'logfire[starlette]'" 15 | ) 16 | 17 | from logfire import Logfire 18 | from logfire._internal.integrations.asgi import tweak_asgi_spans_tracer_provider 19 | from logfire._internal.utils import maybe_capture_server_headers 20 | 21 | 22 | def instrument_starlette( 23 | logfire_instance: Logfire, 24 | app: Starlette, 25 | *, 26 | record_send_receive: bool = False, 27 | capture_headers: bool = False, 28 | server_request_hook: ServerRequestHook | None = None, 29 | client_request_hook: ClientRequestHook | None = None, 30 | client_response_hook: ClientResponseHook | None = None, 31 | **kwargs: Any, 32 | ): 33 | """Instrument `app` so that spans are automatically created for each request. 34 | 35 | See the `Logfire.instrument_starlette` method for details. 36 | """ 37 | maybe_capture_server_headers(capture_headers) 38 | StarletteInstrumentor().instrument_app( 39 | app, 40 | server_request_hook=server_request_hook, 41 | client_request_hook=client_request_hook, 42 | client_response_hook=client_response_hook, 43 | **{ # type: ignore 44 | 'tracer_provider': tweak_asgi_spans_tracer_provider(logfire_instance, record_send_receive), 45 | 'meter_provider': logfire_instance.config.get_meter_provider(), 46 | **kwargs, 47 | }, 48 | ) 49 | -------------------------------------------------------------------------------- /logfire/_internal/integrations/wsgi.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import TYPE_CHECKING, Any 4 | 5 | try: 6 | from opentelemetry.instrumentation.wsgi import OpenTelemetryMiddleware 7 | except ImportError: 8 | raise RuntimeError( 9 | '`logfire.instrument_wsgi()` requires the `opentelemetry-instrumentation-wsgi` package.\n' 10 | 'You can install this with:\n' 11 | " pip install 'logfire[wsgi]'" 12 | ) 13 | 14 | if TYPE_CHECKING: 15 | from wsgiref.types import WSGIApplication 16 | 17 | from logfire._internal.utils import maybe_capture_server_headers 18 | from logfire.integrations.wsgi import RequestHook, ResponseHook 19 | 20 | 21 | def instrument_wsgi( 22 | app: WSGIApplication, 23 | *, 24 | capture_headers: bool = False, 25 | request_hook: RequestHook | None = None, 26 | response_hook: ResponseHook | None = None, 27 | **kwargs: Any, 28 | ) -> WSGIApplication: 29 | """Instrument `app` so that spans are automatically created for each request. 30 | 31 | See the `Logfire.instrument_wsgi` method for details. 32 | """ 33 | maybe_capture_server_headers(capture_headers) 34 | return OpenTelemetryMiddleware(app, request_hook=request_hook, response_hook=response_hook, **kwargs) 35 | -------------------------------------------------------------------------------- /logfire/_internal/ulid.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from random import Random 4 | from typing import Callable 5 | 6 | 7 | def ulid(random: Random, ms_timestamp_generator: Callable[[], int]) -> int: 8 | """Generate an integer ULID compatible with UUID v4. 9 | 10 | ULIDs as defined by the [spec](https://github.com/ulid/spec) look like this: 11 | 12 | 01AN4Z07BY 79KA1307SR9X4MV3 13 | |----------| |----------------| 14 | Timestamp Randomness 15 | 48bits 80bits 16 | 17 | In the future it would be nice to make this compatible with a UUID, 18 | e.g. v4 UUIDs by setting the version and variant bits correctly. 19 | We can't currently do this because setting these bits would leave us with only 7 bytes of randomness, 20 | which isn't enough for the Python SDK's sampler that currently expects 8 bytes of randomness. 21 | In the future OTEL will probably adopt https://www.w3.org/TR/trace-context-2/#random-trace-id-flag 22 | which relies only on the lower 7 bytes of the trace ID, then all SDKs and tooling should be updated 23 | and leaving only 7 bytes of randomness should be fine. 24 | 25 | Right now we only care about: 26 | - Our SDK / Python SDK's in general. 27 | - The OTEL collector. 28 | 29 | And both behave properly with 8 bytes of randomness because trace IDs were originally 64 bits 30 | so to be compatible with old trace IDs nothing in OTEL can assume >8 bytes of randomness in trace IDs 31 | unless they generated the trace ID themselves (e.g. the Go SDK _does_ expect >8 bytes of randomness internally). 32 | """ 33 | # Timestamp: first 6 bytes of the ULID (48 bits) 34 | # Note that it's not important that this timestamp is super precise or unique. 35 | # It just needs to be roughly monotonically increasing so that the ULID is sortable, at least for our purposes. 36 | timestamp = ms_timestamp_generator().to_bytes(6, byteorder='big') 37 | # Randomness: next 10 bytes of the ULID (80 bits) 38 | randomness = random.getrandbits(80).to_bytes(10, byteorder='big') 39 | # Convert to int and return 40 | return int.from_bytes(timestamp + randomness, byteorder='big') 41 | -------------------------------------------------------------------------------- /logfire/cli.py: -------------------------------------------------------------------------------- 1 | from ._internal.cli import main 2 | 3 | __all__ = ('main',) 4 | -------------------------------------------------------------------------------- /logfire/exceptions.py: -------------------------------------------------------------------------------- 1 | """Logfire exceptions.""" 2 | 3 | 4 | class LogfireConfigError(ValueError): 5 | """Error raised when there is a problem with the Logfire configuration.""" 6 | -------------------------------------------------------------------------------- /logfire/experimental/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/logfire/experimental/__init__.py -------------------------------------------------------------------------------- /logfire/integrations/__init__.py: -------------------------------------------------------------------------------- 1 | """Integrations for Logfire. 2 | 3 | Specifically, public objects are defined here for direct import. 4 | """ 5 | -------------------------------------------------------------------------------- /logfire/integrations/flask.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import TYPE_CHECKING, Callable, TypedDict 4 | 5 | from opentelemetry.trace import Span 6 | 7 | if TYPE_CHECKING: 8 | from wsgiref.types import WSGIEnvironment 9 | 10 | 11 | RequestHook = Callable[[Span, 'WSGIEnvironment'], None] 12 | """A hook that is called before a request is processed.""" 13 | ResponseHook = Callable[[Span, str, 'list[tuple[str, str]]'], None] 14 | """A hook that is called after a response is processed.""" 15 | 16 | 17 | class CommenterOptions(TypedDict, total=False): 18 | """The `commenter_options` parameter for `instrument_flask`.""" 19 | 20 | framework: bool 21 | """Include the framework name and version in the comment.""" 22 | route: bool 23 | """Include the route name in the comment.""" 24 | controller: bool 25 | """Include the controller name in the comment.""" 26 | -------------------------------------------------------------------------------- /logfire/integrations/httpx.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Any, Awaitable, Callable, NamedTuple 4 | 5 | import httpx 6 | from opentelemetry.trace import Span 7 | 8 | # TODO(Marcelo): When https://github.com/open-telemetry/opentelemetry-python-contrib/pull/3098/ gets merged, 9 | # and the next version of `opentelemetry-instrumentation-httpx` is released, we can just do a reimport: 10 | # from opentelemetry.instrumentation.httpx import RequestInfo as RequestInfo 11 | # from opentelemetry.instrumentation.httpx import ResponseInfo as ResponseInfo 12 | # from opentelemetry.instrumentation.httpx import RequestHook as RequestHook 13 | # from opentelemetry.instrumentation.httpx import ResponseHook as ResponseHook 14 | 15 | 16 | class RequestInfo(NamedTuple): 17 | """Information about an HTTP request. 18 | 19 | This is the second parameter passed to the `RequestHook` function. 20 | """ 21 | 22 | method: bytes 23 | url: httpx.URL 24 | headers: httpx.Headers 25 | stream: httpx.SyncByteStream | httpx.AsyncByteStream | None 26 | extensions: dict[str, Any] | None 27 | 28 | 29 | class ResponseInfo(NamedTuple): 30 | """Information about an HTTP response. 31 | 32 | This is the second parameter passed to the `ResponseHook` function. 33 | """ 34 | 35 | status_code: int 36 | headers: httpx.Headers 37 | stream: httpx.SyncByteStream | httpx.AsyncByteStream | None 38 | extensions: dict[str, Any] | None 39 | 40 | 41 | RequestHook = Callable[[Span, RequestInfo], None] 42 | ResponseHook = Callable[[Span, RequestInfo, ResponseInfo], None] 43 | AsyncRequestHook = Callable[[Span, RequestInfo], Awaitable[None]] 44 | AsyncResponseHook = Callable[[Span, RequestInfo, ResponseInfo], Awaitable[None]] 45 | -------------------------------------------------------------------------------- /logfire/integrations/psycopg.py: -------------------------------------------------------------------------------- 1 | from typing import TypedDict 2 | 3 | 4 | class CommenterOptions(TypedDict, total=False): 5 | """The `commenter_options` parameter for `instrument_psycopg`.""" 6 | 7 | db_driver: bool 8 | """Include the database driver name in the comment e.g. 'psycopg2'.""" 9 | 10 | dbapi_threadsafety: bool 11 | """Include the DB-API threadsafety value in the comment.""" 12 | 13 | dbapi_level: bool 14 | """Include the DB-API level in the comment.""" 15 | 16 | libpq_version: bool 17 | """Include the libpq version in the comment.""" 18 | 19 | driver_paramstyle: bool 20 | """Include the driver paramstyle in the comment e.g. 'driver_paramstyle=pyformat'""" 21 | 22 | opentelemetry_values: bool 23 | """Enabling this flag will add traceparent values to the comment.""" 24 | -------------------------------------------------------------------------------- /logfire/integrations/redis.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Protocol 2 | 3 | from opentelemetry.trace import Span 4 | from redis import Connection 5 | 6 | 7 | class RequestHook(Protocol): 8 | """A hook that is called before the request is sent.""" 9 | 10 | def __call__(self, span: Span, instance: Connection, *args: Any, **kwargs: Any) -> None: 11 | """Call the hook. 12 | 13 | Args: 14 | span: The span that is being created. 15 | instance: The connection instance. 16 | *args: The arguments that are passed to the command. 17 | **kwargs: The keyword arguments that are passed to the command. 18 | """ 19 | 20 | 21 | class ResponseHook(Protocol): 22 | """A hook that is called after the response is received.""" 23 | 24 | def __call__(self, span: Span, instance: Connection, response: Any) -> None: 25 | """Call the hook. 26 | 27 | Args: 28 | span: The span that is being created. 29 | instance: The connection instance. 30 | response: The response that is received. 31 | """ 32 | -------------------------------------------------------------------------------- /logfire/integrations/sqlalchemy.py: -------------------------------------------------------------------------------- 1 | from typing import TypedDict 2 | 3 | 4 | class CommenterOptions(TypedDict, total=False): 5 | """The `commenter_options` parameter for `instrument_sqlalchemy`.""" 6 | 7 | db_driver: bool 8 | """Include the database driver name in the comment e.g. 'psycopg2'.""" 9 | db_framework: bool 10 | """Enabling this flag will add the database framework name and version to the comment e.g. 'sqlalchemy:1.4.0'.""" 11 | opentelemetry_values: bool 12 | """Enabling this flag will add traceparent values to the comment.""" 13 | -------------------------------------------------------------------------------- /logfire/integrations/structlog.py: -------------------------------------------------------------------------------- 1 | """Logfire processor for [structlog](https://www.structlog.org/en/stable/).""" 2 | 3 | from __future__ import annotations 4 | 5 | from typing import TYPE_CHECKING 6 | 7 | import logfire 8 | 9 | from .._internal.constants import ATTRIBUTES_MESSAGE_KEY 10 | from .logging import RESERVED_ATTRS as LOGGING_RESERVED_ATTRS 11 | 12 | # This file is currently imported eagerly from __init__.py, so it shouldn't import structlog directly 13 | # since that's not a required dependency. 14 | if TYPE_CHECKING: 15 | from structlog.types import EventDict, WrappedLogger 16 | 17 | from .. import Logfire 18 | 19 | RESERVED_ATTRS = LOGGING_RESERVED_ATTRS | {'level', 'event', 'timestamp'} 20 | """Attributes to strip from the event before sending to Logfire.""" 21 | 22 | 23 | class LogfireProcessor: 24 | """Logfire processor for [structlog](../../integrations/structlog.md).""" 25 | 26 | def __init__( 27 | self, 28 | *, 29 | console_log: bool = False, 30 | logfire_instance: Logfire | None = None, 31 | ) -> None: 32 | self.console_log = console_log 33 | self.logfire_instance = (logfire_instance or logfire.DEFAULT_LOGFIRE_INSTANCE).with_settings( 34 | custom_scope_suffix='structlog' 35 | ) 36 | 37 | def __call__(self, logger: WrappedLogger, name: str, event_dict: EventDict) -> EventDict: 38 | """A middleware to process structlog event, and send it to **Logfire**.""" 39 | attributes = {k: v for k, v in event_dict.items() if k not in RESERVED_ATTRS} 40 | level = event_dict.get('level', 'info').lower() 41 | # NOTE: An event can be `None` in structlog. We may want to create a default msg in those cases. 42 | attributes[ATTRIBUTES_MESSAGE_KEY] = message = event_dict.get('event') or 'structlog event' 43 | self.logfire_instance.log( 44 | level=level, # type: ignore 45 | msg_template=message, 46 | attributes=attributes, 47 | console_log=self.console_log, 48 | exc_info=event_dict.get('exc_info', False), 49 | ) 50 | return event_dict 51 | -------------------------------------------------------------------------------- /logfire/integrations/wsgi.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import TYPE_CHECKING, Callable 4 | 5 | from opentelemetry.trace import Span 6 | 7 | if TYPE_CHECKING: 8 | from wsgiref.types import WSGIEnvironment 9 | 10 | ResponseHook = Callable[[Span, 'WSGIEnvironment', str, 'list[tuple[str, str]]'], None] 11 | """A callback called when a response is sent by the server.""" 12 | 13 | RequestHook = Callable[[Span, 'WSGIEnvironment'], None] 14 | """A callback called when a request is received by the server.""" 15 | -------------------------------------------------------------------------------- /logfire/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/logfire/py.typed -------------------------------------------------------------------------------- /logfire/sampling/__init__.py: -------------------------------------------------------------------------------- 1 | """Types for configuring sampling. See the [sampling guide](https://logfire.pydantic.dev/docs/guides/advanced/sampling/).""" 2 | 3 | from ._tail_sampling import SamplingOptions, SpanLevel, TailSamplingSpanInfo 4 | 5 | __all__ = [ 6 | 'SamplingOptions', 7 | 'SpanLevel', 8 | 'TailSamplingSpanInfo', 9 | ] 10 | -------------------------------------------------------------------------------- /logfire/version.py: -------------------------------------------------------------------------------- 1 | """Version information for logfire package.""" 2 | 3 | import importlib_metadata 4 | 5 | VERSION = importlib_metadata.version('logfire') 6 | -------------------------------------------------------------------------------- /pyodide_test/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pyodide_test", 3 | "version": "0.0.0", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "pyodide_test", 9 | "version": "0.0.0", 10 | "license": "MIT", 11 | "dependencies": { 12 | "pyodide": "^0.27.2" 13 | } 14 | }, 15 | "node_modules/pyodide": { 16 | "version": "0.27.2", 17 | "resolved": "https://registry.npmjs.org/pyodide/-/pyodide-0.27.2.tgz", 18 | "integrity": "sha512-sfA2kiUuQVRpWI4BYnU3sX5PaTTt/xrcVEmRzRcId8DzZXGGtPgCBC0gCqjUTUYSa8ofPaSjXmzESc86yvvCHg==", 19 | "license": "Apache-2.0", 20 | "dependencies": { 21 | "ws": "^8.5.0" 22 | }, 23 | "engines": { 24 | "node": ">=18.0.0" 25 | } 26 | }, 27 | "node_modules/ws": { 28 | "version": "8.18.0", 29 | "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz", 30 | "integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==", 31 | "license": "MIT", 32 | "engines": { 33 | "node": ">=10.0.0" 34 | }, 35 | "peerDependencies": { 36 | "bufferutil": "^4.0.1", 37 | "utf-8-validate": ">=5.0.2" 38 | }, 39 | "peerDependenciesMeta": { 40 | "bufferutil": { 41 | "optional": true 42 | }, 43 | "utf-8-validate": { 44 | "optional": true 45 | } 46 | } 47 | } 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /pyodide_test/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pyodide_test", 3 | "version": "0.0.0", 4 | "main": "test.js", 5 | "scripts": { 6 | "test": "node --experimental-wasm-stack-switching test.mjs" 7 | }, 8 | "author": "", 9 | "license": "MIT", 10 | "description": "", 11 | "dependencies": { 12 | "pyodide": "^0.27.2" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /pyodide_test/test.mjs: -------------------------------------------------------------------------------- 1 | import {opendir} from 'node:fs/promises' 2 | import path from 'path' 3 | import assert from 'assert' 4 | import { loadPyodide } from 'pyodide' 5 | 6 | 7 | async function runTest() { 8 | const wheelPath = await findWheel(path.join(path.resolve(import.meta.dirname, '..'), 'dist')); 9 | const stdout = [] 10 | const stderr = [] 11 | const pyodide = await loadPyodide({ 12 | 13 | stdout: (msg) => { 14 | stdout.push(msg) 15 | }, 16 | stderr: (msg) => { 17 | stderr.push(msg) 18 | } 19 | }) 20 | await pyodide.loadPackage(['micropip', 'pygments']) 21 | console.log('Running Pyodide test...\n') 22 | await pyodide.runPythonAsync(` 23 | import sys 24 | import micropip 25 | 26 | await micropip.install(['file:${wheelPath}']) 27 | import logfire 28 | logfire.configure(token='unknown', inspect_arguments=False) 29 | logfire.info('hello {name}', name='world') 30 | sys.stdout.flush() 31 | sys.stderr.flush() 32 | `) 33 | let out = stdout.join('') 34 | let err = stderr.join('') 35 | console.log('stdout:', out) 36 | console.log('stderr:', err) 37 | assert.ok(out.includes('hello world')) 38 | 39 | assert.ok( 40 | err.includes( 41 | 'UserWarning: Logfire API returned status code 401.' 42 | ), 43 | ) 44 | console.log('\n\nLogfire Pyodide tests passed 🎉') 45 | } 46 | 47 | 48 | async function findWheel(dist_dir) { 49 | const dir = await opendir(dist_dir); 50 | for await (const dirent of dir) { 51 | if (dirent.name.endsWith('.whl')) { 52 | return path.join(dist_dir, dirent.name); 53 | } 54 | } 55 | } 56 | 57 | runTest() 58 | -------------------------------------------------------------------------------- /release/shared.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | 3 | 4 | def run_command(*args: str) -> str: 5 | """Run a shell command and return the output.""" 6 | p = subprocess.run(args, stdout=subprocess.PIPE, check=True, encoding='utf-8') 7 | return p.stdout.strip() 8 | 9 | 10 | REPO = 'pydantic/logfire' 11 | CHANGELOG_FILE = 'CHANGELOG.md' 12 | ROOT_PYPROJECT = 'pyproject.toml' 13 | API_PYPROJECT = 'logfire-api/pyproject.toml' 14 | GITHUB_TOKEN = run_command('gh', 'auth', 'token') 15 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/tests/__init__.py -------------------------------------------------------------------------------- /tests/aaa_query_client/README.md: -------------------------------------------------------------------------------- 1 | This folder starts with `aaa_` in order to make sure these tests run first. 2 | 3 | I don't know why, but they don't pass when they aren't the first tests to run. 4 | Maybe it's related to instrumenting httpx in some of the tests? 5 | -------------------------------------------------------------------------------- /tests/auto_trace_samples/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/tests/auto_trace_samples/__init__.py -------------------------------------------------------------------------------- /tests/auto_trace_samples/__main__.py: -------------------------------------------------------------------------------- 1 | def main(): 2 | return 1 3 | 4 | 5 | main() 6 | -------------------------------------------------------------------------------- /tests/auto_trace_samples/foo.py: -------------------------------------------------------------------------------- 1 | from typing import Iterator 2 | 3 | 4 | async def bar(): 5 | lst = [x async for x in async_gen()] 6 | return lst[10] 7 | 8 | 9 | def gen() -> Iterator[int]: 10 | yield from range(3) 11 | 12 | 13 | async def async_gen(): 14 | def inner(): 15 | return 1 16 | 17 | inner() 18 | 19 | for x in gen(): # pragma: no branch 20 | yield x 21 | -------------------------------------------------------------------------------- /tests/auto_trace_samples/simple_nesting.py: -------------------------------------------------------------------------------- 1 | def func1(): 2 | return func2() 3 | 4 | 5 | def func2(): 6 | return func3() 7 | 8 | 9 | def func3(): 10 | return func4() 11 | 12 | 13 | def func4(): 14 | return 42 15 | -------------------------------------------------------------------------------- /tests/exporters/test_dynamic_batch_span_processor.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Any 4 | 5 | from opentelemetry.sdk.trace.export import BatchSpanProcessor 6 | 7 | import logfire 8 | from logfire._internal.exporters.dynamic_batch import DynamicBatchSpanProcessor 9 | from logfire.testing import TestExporter 10 | 11 | 12 | def test_dynamic_batch_span_processor(exporter: TestExporter, config_kwargs: dict[str, Any]): 13 | processor = DynamicBatchSpanProcessor(exporter) 14 | config_kwargs['additional_span_processors'] = [processor] 15 | logfire.configure(**config_kwargs) 16 | for _ in range(9): 17 | logfire.info('test') 18 | assert processor.num_processed == 9 19 | assert isinstance(processor.processor, BatchSpanProcessor) 20 | assert processor.processor.schedule_delay_millis == 100 21 | logfire.info('test') 22 | assert processor.num_processed == 10 23 | assert processor.processor.schedule_delay_millis == 500 24 | logfire.force_flush() 25 | assert len(exporter.exported_spans) == 10 26 | -------------------------------------------------------------------------------- /tests/exporters/test_remove_pending.py: -------------------------------------------------------------------------------- 1 | from inline_snapshot import snapshot 2 | 3 | import logfire 4 | from logfire._internal.exporters.remove_pending import RemovePendingSpansExporter 5 | from logfire.testing import TestExporter 6 | 7 | 8 | def test_remove_pending_spans(exporter: TestExporter): 9 | with logfire.span('span1'): 10 | logfire.info('log1') 11 | 12 | with logfire.span('span2'): 13 | logfire.info('log2') 14 | 15 | # Simulate the batch span processor exporting at this point. 16 | batch1 = exporter.exported_spans 17 | exporter.clear() 18 | 19 | logfire.info('log3') 20 | 21 | with logfire.span('span3'): 22 | logfire.info('log4') 23 | 24 | batch2 = exporter.exported_spans 25 | 26 | assert [ 27 | [(span.name, (span.attributes or {}).get('logfire.span_type')) for span in batch] for batch in [batch1, batch2] 28 | ] == snapshot( 29 | [ 30 | [('span1', 'pending_span'), ('log1', 'log'), ('span1', 'span'), ('span2', 'pending_span'), ('log2', 'log')], 31 | [('log3', 'log'), ('span2', 'span'), ('span3', 'pending_span'), ('log4', 'log'), ('span3', 'span')], 32 | ] 33 | ) 34 | 35 | inner_exporter = TestExporter() 36 | remove_exporter = RemovePendingSpansExporter(inner_exporter) 37 | remove_exporter.export(batch1) 38 | assert [ 39 | (span.name, (span.attributes or {}).get('logfire.span_type')) for span in inner_exporter.exported_spans 40 | ] == snapshot([('log1', 'log'), ('log2', 'log'), ('span1', 'span'), ('span2', 'pending_span')]) 41 | inner_exporter.clear() 42 | remove_exporter.export(batch2) 43 | assert [ 44 | (span.name, (span.attributes or {}).get('logfire.span_type')) for span in inner_exporter.exported_spans 45 | ] == snapshot([('log3', 'log'), ('log4', 'log'), ('span2', 'span'), ('span3', 'span')]) 46 | -------------------------------------------------------------------------------- /tests/import_used_for_tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/tests/import_used_for_tests/__init__.py -------------------------------------------------------------------------------- /tests/import_used_for_tests/a/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/tests/import_used_for_tests/a/__init__.py -------------------------------------------------------------------------------- /tests/import_used_for_tests/a/b.py: -------------------------------------------------------------------------------- 1 | from time import sleep 2 | from typing import Callable, TypeVar 3 | 4 | from typing_extensions import ParamSpec 5 | 6 | P = ParamSpec('P') 7 | T = TypeVar('T') 8 | 9 | 10 | def wrap(f: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> T: 11 | sleep(0.05) 12 | return f(*args, **kwargs) 13 | -------------------------------------------------------------------------------- /tests/import_used_for_tests/internal_error_handling/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/tests/import_used_for_tests/internal_error_handling/__init__.py -------------------------------------------------------------------------------- /tests/import_used_for_tests/internal_error_handling/internal_logfire_code_example.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from logfire._internal.utils import handle_internal_errors, log_internal_error 4 | 5 | 6 | def inner1(): 7 | raise ValueError('inner1') 8 | 9 | 10 | def inner2(): 11 | inner1() 12 | 13 | 14 | @handle_internal_errors 15 | def using_decorator(): 16 | inner2() 17 | 18 | 19 | def using_context_manager(): 20 | with handle_internal_errors: 21 | inner2() 22 | 23 | 24 | def using_try_except(): 25 | try: 26 | inner2() 27 | except Exception: 28 | log_internal_error() 29 | 30 | 31 | def outer1(func: Any): 32 | func() 33 | 34 | 35 | def outer2(func: Any): 36 | outer1(func) 37 | -------------------------------------------------------------------------------- /tests/import_used_for_tests/internal_error_handling/user_code_example.py: -------------------------------------------------------------------------------- 1 | from tests.import_used_for_tests.internal_error_handling.internal_logfire_code_example import ( 2 | outer2, 3 | using_context_manager, 4 | using_decorator, 5 | using_try_except, 6 | ) 7 | 8 | 9 | def user1(): 10 | user2() 11 | 12 | 13 | def user2(): 14 | user3() 15 | 16 | 17 | def user3(): 18 | user4() 19 | 20 | 21 | def user4(): 22 | user5() 23 | 24 | 25 | def user5(): 26 | user6() 27 | 28 | 29 | def user6(): 30 | outer2(using_decorator) 31 | outer2(using_context_manager) 32 | outer2(using_try_except) 33 | -------------------------------------------------------------------------------- /tests/import_used_for_tests/module_with_getattr.py: -------------------------------------------------------------------------------- 1 | # See test_dynamic_module_ignored_in_ensure_flush_after_aws_lambda 2 | 3 | 4 | def __getattr__(name: str) -> str: 5 | return name 6 | -------------------------------------------------------------------------------- /tests/import_used_for_tests/slow_async_callbacks_example.py: -------------------------------------------------------------------------------- 1 | # The purpose of this file is to keep line numbers stable in tests, so make changes with care. 2 | 3 | import asyncio 4 | 5 | import logfire 6 | 7 | 8 | async def main(): 9 | asyncio.get_running_loop().call_soon(mock_block) 10 | await asyncio.create_task(foo(), name='foo 1') 11 | await asyncio.create_task(bar(), name='bar 1') 12 | 13 | 14 | async def bar(): 15 | await foo() 16 | mock_block() 17 | mock_block() 18 | await asyncio.create_task(foo(), name='foo 2') 19 | mock_block() 20 | mock_block() 21 | mock_block() 22 | raise RuntimeError('bar') 23 | 24 | 25 | async def foo(): 26 | await asyncio.sleep(0) 27 | mock_block() 28 | await asyncio.sleep(0) 29 | 30 | 31 | def mock_block(): 32 | # Simulate time advancing in a synchronous function. 33 | logfire.DEFAULT_LOGFIRE_INSTANCE.config.advanced.ns_timestamp_generator() 34 | -------------------------------------------------------------------------------- /tests/module_used_for_tests.py: -------------------------------------------------------------------------------- 1 | from time import sleep 2 | from typing import Callable, TypeVar 3 | 4 | from typing_extensions import ParamSpec 5 | 6 | P = ParamSpec('P') 7 | T = TypeVar('T') 8 | 9 | 10 | def wrap(f: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> T: 11 | sleep(0.05) 12 | return f(*args, **kwargs) 13 | -------------------------------------------------------------------------------- /tests/otel_integrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/tests/otel_integrations/__init__.py -------------------------------------------------------------------------------- /tests/otel_integrations/django_test_project/__init__.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from pathlib import Path 3 | 4 | sys.path.append(str(Path(__file__).parent)) 5 | -------------------------------------------------------------------------------- /tests/otel_integrations/django_test_project/django_test_app/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/tests/otel_integrations/django_test_project/django_test_app/__init__.py -------------------------------------------------------------------------------- /tests/otel_integrations/django_test_project/django_test_app/admin.py: -------------------------------------------------------------------------------- 1 | # Register your models here. 2 | -------------------------------------------------------------------------------- /tests/otel_integrations/django_test_project/django_test_app/apps.py: -------------------------------------------------------------------------------- 1 | from django.apps import AppConfig 2 | 3 | 4 | class TestAppConfig(AppConfig): 5 | default_auto_field = 'django.db.models.BigAutoField' # type: ignore 6 | name = 'django_test_app' 7 | -------------------------------------------------------------------------------- /tests/otel_integrations/django_test_project/django_test_app/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/tests/otel_integrations/django_test_project/django_test_app/migrations/__init__.py -------------------------------------------------------------------------------- /tests/otel_integrations/django_test_project/django_test_app/models.py: -------------------------------------------------------------------------------- 1 | # Create your models here. 2 | -------------------------------------------------------------------------------- /tests/otel_integrations/django_test_project/django_test_app/urls.py: -------------------------------------------------------------------------------- 1 | from django.urls import path 2 | 3 | from . import views 4 | 5 | urlpatterns = [ 6 | path('/', views.detail, name='detail'), 7 | path('bad/', views.bad, name='bad'), 8 | ] 9 | -------------------------------------------------------------------------------- /tests/otel_integrations/django_test_project/django_test_app/views.py: -------------------------------------------------------------------------------- 1 | from django.core.exceptions import BadRequest 2 | from django.http import HttpRequest, HttpResponse 3 | 4 | 5 | def detail(_request: HttpRequest, item_id: int) -> HttpResponse: 6 | return HttpResponse(f'item_id: {item_id}') # type: ignore 7 | 8 | 9 | def bad(_request: HttpRequest) -> HttpResponse: 10 | raise BadRequest('bad request') 11 | -------------------------------------------------------------------------------- /tests/otel_integrations/django_test_project/django_test_site/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pydantic/logfire/68c34e778a45a2832ce07709502beead5917acf9/tests/otel_integrations/django_test_project/django_test_site/__init__.py -------------------------------------------------------------------------------- /tests/otel_integrations/django_test_project/django_test_site/urls.py: -------------------------------------------------------------------------------- 1 | from django.contrib import admin 2 | from django.urls import include, path # type: ignore 3 | 4 | urlpatterns = [ 5 | path('django_test_app/', include('django_test_app.urls')), 6 | path('admin/', admin.site.urls), 7 | ] 8 | -------------------------------------------------------------------------------- /tests/otel_integrations/django_test_project/django_test_site/wsgi.py: -------------------------------------------------------------------------------- 1 | """ 2 | WSGI config for django_test_site project. 3 | 4 | It exposes the WSGI callable as a module-level variable named ``application``. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/5.0/howto/deployment/wsgi/ 8 | """ 9 | 10 | import os 11 | 12 | from django.core.wsgi import get_wsgi_application 13 | 14 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'django_test_site.settings') 15 | 16 | application = get_wsgi_application() 17 | -------------------------------------------------------------------------------- /tests/otel_integrations/django_test_project/manage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """Django's command-line utility for administrative tasks.""" 3 | 4 | import os 5 | import sys 6 | 7 | 8 | def main(): 9 | """Run administrative tasks.""" 10 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'django_test_site.settings') 11 | 12 | try: 13 | from django.core.management import execute_from_command_line # type: ignore 14 | except ImportError as exc: 15 | raise ImportError( 16 | "Couldn't import Django. Are you sure it's installed and " 17 | 'available on your PYTHONPATH environment variable? Did you ' 18 | 'forget to activate a virtual environment?' 19 | ) from exc 20 | execute_from_command_line(sys.argv) 21 | 22 | 23 | if __name__ == '__main__': 24 | main() 25 | -------------------------------------------------------------------------------- /tests/otel_integrations/test_aiohttp_client.py: -------------------------------------------------------------------------------- 1 | import importlib 2 | from unittest import mock 3 | 4 | import aiohttp 5 | import pytest 6 | from inline_snapshot import snapshot 7 | 8 | import logfire 9 | import logfire._internal.integrations.aiohttp_client 10 | 11 | 12 | # TODO real test 13 | @pytest.mark.anyio 14 | async def test_instrument_aiohttp(): 15 | cls = aiohttp.ClientSession 16 | original_init = cls.__init__ 17 | assert cls.__init__ is original_init 18 | logfire.instrument_aiohttp_client() 19 | assert cls.__init__ is not original_init 20 | 21 | 22 | def test_missing_opentelemetry_dependency() -> None: 23 | with mock.patch.dict('sys.modules', {'opentelemetry.instrumentation.aiohttp_client': None}): 24 | with pytest.raises(RuntimeError) as exc_info: 25 | importlib.reload(logfire._internal.integrations.aiohttp_client) 26 | assert str(exc_info.value) == snapshot("""\ 27 | `logfire.instrument_aiohttp_client()` requires the `opentelemetry-instrumentation-aiohttp-client` package. 28 | You can install this with: 29 | pip install 'logfire[aiohttp]'\ 30 | """) 31 | -------------------------------------------------------------------------------- /tests/otel_integrations/test_asyncpg.py: -------------------------------------------------------------------------------- 1 | import importlib 2 | from unittest import mock 3 | 4 | import asyncpg 5 | import pytest 6 | from inline_snapshot import snapshot 7 | from opentelemetry.instrumentation.asyncpg import AsyncPGInstrumentor 8 | 9 | import logfire 10 | import logfire._internal.integrations.asyncpg 11 | 12 | 13 | def test_asyncpg() -> None: 14 | original_execute = asyncpg.Connection.execute # type: ignore[reportUnknownMemberType] 15 | logfire.instrument_asyncpg() 16 | assert original_execute is not asyncpg.Connection.execute # type: ignore[reportUnknownMemberType] 17 | AsyncPGInstrumentor().uninstrument() 18 | assert original_execute is asyncpg.Connection.execute # type: ignore[reportUnknownMemberType] 19 | 20 | 21 | def test_missing_opentelemetry_dependency() -> None: 22 | with mock.patch.dict('sys.modules', {'opentelemetry.instrumentation.asyncpg': None}): 23 | with pytest.raises(RuntimeError) as exc_info: 24 | importlib.reload(logfire._internal.integrations.asyncpg) 25 | assert str(exc_info.value) == snapshot("""\ 26 | `logfire.instrument_asyncpg()` requires the `opentelemetry-instrumentation-asyncpg` package. 27 | You can install this with: 28 | pip install 'logfire[asyncpg]'\ 29 | """) 30 | -------------------------------------------------------------------------------- /tests/otel_integrations/test_pymongo.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import importlib 4 | from unittest import mock 5 | 6 | import pytest 7 | from inline_snapshot import snapshot 8 | from pymongo import monitoring 9 | 10 | import logfire 11 | import logfire._internal.integrations.pymongo 12 | 13 | 14 | # TODO real test 15 | def test_instrument_pymongo(): 16 | command_listeners = monitoring._LISTENERS.command_listeners # type: ignore 17 | assert len(command_listeners) == 0 # type: ignore 18 | logfire.instrument_pymongo() 19 | assert len(command_listeners) == 1 # type: ignore 20 | 21 | 22 | def test_missing_opentelemetry_dependency() -> None: 23 | with mock.patch.dict('sys.modules', {'opentelemetry.instrumentation.pymongo': None}): 24 | with pytest.raises(RuntimeError) as exc_info: 25 | importlib.reload(logfire._internal.integrations.pymongo) 26 | assert str(exc_info.value) == snapshot("""\ 27 | `logfire.instrument_pymongo()` requires the `opentelemetry-instrumentation-pymongo` package. 28 | You can install this with: 29 | pip install 'logfire[pymongo]'\ 30 | """) 31 | -------------------------------------------------------------------------------- /tests/test_collect_package_resources.py: -------------------------------------------------------------------------------- 1 | from dirty_equals import IsPartialDict 2 | 3 | from logfire import VERSION 4 | from logfire._internal.collect_system_info import collect_package_info 5 | 6 | 7 | def test_collect_package_info() -> None: 8 | assert collect_package_info() == IsPartialDict({'logfire': VERSION}) 9 | -------------------------------------------------------------------------------- /tests/test_no_production.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import httpx 4 | import pytest 5 | from httpx import ConnectTimeout 6 | 7 | 8 | @pytest.mark.skipif(os.environ.get('CI') != 'true', reason='Only run in CI') 9 | def test_cant_hit_production(): # pragma: no cover 10 | # In CI, we modify /etc/hosts to point logfire-api.pydantic.dev and related hostnames to an unreachable IP. 11 | # This won't prevent us from hitting production while testing during local development, but it at least 12 | # ensures that CI will not pass if we accidentally introduce logic that causes us to hit production while 13 | # running the test suite. 14 | with pytest.raises(ConnectTimeout): 15 | # Checking just one endpoint should be sufficient to verify the change to /etc/hosts is working. 16 | httpx.get('http://logfire-api.pydantic.dev', timeout=1) 17 | -------------------------------------------------------------------------------- /tests/utils.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Any 4 | 5 | from pydantic import BaseModel 6 | 7 | from logfire.testing import TestExporter 8 | 9 | 10 | def exported_spans_as_models( 11 | exporter: TestExporter, 12 | fixed_line_number: int | None = 123, 13 | strip_filepaths: bool = True, 14 | include_resources: bool = False, 15 | _include_pending_spans: bool = False, 16 | _strip_function_qualname: bool = True, 17 | ) -> list[ReadableSpanModel]: 18 | """Same as exported_spans_as_dict but converts the dicts to pydantic models. 19 | 20 | This allows using the result in exporters that expect `ReadableSpan`s, not dicts. 21 | """ 22 | return [ 23 | ReadableSpanModel(**span) 24 | for span in exporter.exported_spans_as_dict( 25 | fixed_line_number=fixed_line_number, 26 | strip_filepaths=strip_filepaths, 27 | include_resources=include_resources, 28 | _include_pending_spans=_include_pending_spans, 29 | _strip_function_qualname=_strip_function_qualname, 30 | ) 31 | ] 32 | 33 | 34 | class SpanContextModel(BaseModel): 35 | """A pydantic model similar to an opentelemetry SpanContext.""" 36 | 37 | trace_id: int 38 | span_id: int 39 | is_remote: bool 40 | 41 | 42 | class ReadableSpanModel(BaseModel): 43 | """A pydantic model similar to an opentelemetry ReadableSpan.""" 44 | 45 | name: str 46 | context: SpanContextModel 47 | parent: SpanContextModel | None 48 | start_time: int 49 | end_time: int 50 | attributes: dict[str, Any] | None 51 | events: list[dict[str, Any]] | None = None 52 | resource: dict[str, Any] | None = None 53 | --------------------------------------------------------------------------------