├── .craft.yml ├── .dockerignore ├── .envrc ├── .gitattributes ├── .github ├── PULL_REQUEST_TEMPLATE.md ├── codeql │ └── codeql-config.yml ├── dependabot.yml ├── file-filters.yml ├── labeler.yml └── workflows │ ├── admin-sourcemaps.yml │ ├── bump-version.yml │ ├── ci.yml │ ├── codeql-analysis.yml │ ├── ddl-changes.yml │ ├── dependency-review.yml │ ├── docs-pr.yml │ ├── docs.yml │ ├── enforce-license-compliance.yml │ ├── fast-revert.yml │ ├── image.yml │ ├── labeler.yml │ ├── release-ghcr-version-tag.yml │ ├── release.yml │ └── validate-pipelines.yml ├── .gitignore ├── .isort.cfg ├── .pre-commit-config.yaml ├── .python-version ├── .vscode ├── extensions.json ├── launch.json └── settings.json ├── Brewfile ├── CHANGELOG.md ├── CODEOWNERS ├── Dockerfile ├── LICENSE.md ├── MANIFEST.in ├── MIGRATIONS.md ├── Makefile ├── README.rst ├── bin ├── api ├── cleanup ├── consumer ├── mocks │ └── mock-subscriptions ├── optimize └── wrapper ├── cloudbuild.yaml ├── codecov.yml ├── config ├── clickhouse │ ├── macros.xml │ ├── remote_servers.xml │ └── zookeeper.xml └── hooks │ └── post-merge ├── conftest.py ├── devenv ├── config.ini └── sync.py ├── devservices ├── clickhouse │ └── config.xml ├── config.yml └── programs.conf ├── docker-compose.gcb.yml ├── docker-compose.yml ├── docker_entrypoint.sh ├── docs-requirements.txt ├── docs ├── .gitignore ├── clickhouse-copier.md └── source │ ├── _static │ ├── architecture │ │ ├── clickhouse_nodes.png │ │ ├── compositeprocessing.png │ │ ├── datamodel.png │ │ ├── deployment_legend.png │ │ ├── errors_transactions_deployment.png │ │ ├── joins.png │ │ ├── multientity.png │ │ ├── outcomes_deployment.png │ │ ├── overview.png │ │ ├── queryprocessing.png │ │ ├── sessions_deployment.png │ │ ├── singleentity.png │ │ └── snuba_deployment.png │ ├── query │ │ └── snubaUI.png │ └── snuba.svg │ ├── architecture │ ├── consumer.rst │ ├── datamodel.rst │ ├── overview.rst │ ├── queryprocessing.rst │ └── slicing.rst │ ├── clickhouse │ ├── death_queries.rst │ ├── schema_design.rst │ ├── supported_versions.rst │ └── topology.rst │ ├── conf.py │ ├── configuration │ ├── dataset.md │ ├── entity.md │ ├── entity_subscription.md │ ├── intro.rst │ ├── migration_group.md │ ├── overview.rst │ ├── readable_storage.md │ └── writable_storage.md │ ├── contributing │ └── environment.rst │ ├── getstarted.rst │ ├── index.rst │ ├── intro.rst │ ├── language │ ├── mql.rst │ └── snql.rst │ ├── migrations │ └── modes.rst │ ├── profiler.rst │ └── query │ └── overview.rst ├── gocd ├── README.md └── templates │ ├── bash │ ├── canary-ddog-health-check.sh │ ├── check-cloud-build.sh │ ├── check-github.sh │ ├── check-migrations.sh │ ├── deploy-st.sh │ ├── deploy.sh │ ├── migrate-reverse.sh │ ├── migrate-st.sh │ ├── migrate.sh │ ├── s4s-clickhouse-queries.sh │ ├── s4s-ddog-health-check.sh │ ├── s4s-sentry-health-check.sh │ ├── saas-ddog-health-check.sh │ ├── saas-sentry-error-check.sh │ ├── saas-sentry-health-check.sh │ ├── sentry-release-canary.sh │ └── sentry-release-primary.sh │ ├── jsonnetfile.json │ ├── jsonnetfile.lock.json │ ├── pipelines │ └── snuba.libsonnet │ └── snuba.jsonnet ├── mypy.ini ├── requirements-build.txt ├── requirements-test.txt ├── requirements.txt ├── rust_snuba ├── .cargo │ └── config.toml ├── .gitignore ├── ARCHITECTURE.md ├── Cargo.lock ├── Cargo.toml ├── README.md ├── benches │ └── processors.rs ├── bin │ └── python_processor_infinite.rs ├── pyproject.toml ├── rust-toolchain.toml └── src │ ├── config.rs │ ├── consumer.rs │ ├── factory.rs │ ├── lib.rs │ ├── logging.rs │ ├── metrics │ ├── global_tags.rs │ ├── mod.rs │ └── statsd.rs │ ├── processors │ ├── eap_items.rs │ ├── eap_items_span.rs │ ├── errors.rs │ ├── functions.rs │ ├── generic_metrics.rs │ ├── mod.rs │ ├── outcomes.rs │ ├── profile_chunks.rs │ ├── profiles.rs │ ├── querylog.rs │ ├── release_health_metrics.rs │ ├── replays.rs │ ├── snapshots │ │ ├── rust_snuba__processors__eap_items__tests__serialization.snap │ │ ├── rust_snuba__processors__eap_items_span__tests__serialization.snap │ │ ├── rust_snuba__processors__tests__events-.snap │ │ ├── rust_snuba__processors__tests__profiles-call-tree-.snap │ │ ├── rust_snuba__processors__tests__schemas@events-ErrorsProcessor-events__1__error-with-null-threads.json.snap │ │ ├── rust_snuba__processors__tests__schemas@events-ErrorsProcessor-events__1__error-with-null-values-threads.json.snap │ │ ├── rust_snuba__processors__tests__schemas@events-ErrorsProcessor-events__1__error-with-threads.json.snap │ │ ├── rust_snuba__processors__tests__schemas@events-ErrorsProcessor-events__1__errors1.json.snap │ │ ├── rust_snuba__processors__tests__schemas@events-ErrorsProcessor-events__1__null-tag-keys.json.snap │ │ ├── rust_snuba__processors__tests__schemas@events-ErrorsProcessor-events__1__null-values.json.snap │ │ ├── rust_snuba__processors__tests__schemas@events-ErrorsProcessor-events__1__sdk-info-java.json.snap │ │ ├── rust_snuba__processors__tests__schemas@events-ErrorsProcessor-events__1__weird-transaction-source.json.snap │ │ ├── rust_snuba__processors__tests__schemas@events-errors-events__1__null-tag-keys.json.snap │ │ ├── rust_snuba__processors__tests__schemas@ingest-replay-events-ReplaysProcessor-ingest-replay-events__1__archive.json.snap │ │ ├── rust_snuba__processors__tests__schemas@ingest-replay-events-ReplaysProcessor-ingest-replay-events__1__click-serialized.json.snap │ │ ├── rust_snuba__processors__tests__schemas@ingest-replay-events-ReplaysProcessor-ingest-replay-events__1__click.json.snap │ │ ├── rust_snuba__processors__tests__schemas@ingest-replay-events-ReplaysProcessor-ingest-replay-events__1__event-link.json.snap │ │ ├── rust_snuba__processors__tests__schemas@ingest-replay-events-ReplaysProcessor-ingest-replay-events__1__segment.json.snap │ │ ├── rust_snuba__processors__tests__schemas@ingest-replay-events-ReplaysProcessor-ingest-replay-events__1__viewed.json.snap │ │ ├── rust_snuba__processors__tests__schemas@outcomes-OutcomesProcessor-outcomes__1__outcomes-discarded-hash.json.snap │ │ ├── rust_snuba__processors__tests__schemas@outcomes-OutcomesProcessor-outcomes__1__outcomes-lb.json.snap │ │ ├── rust_snuba__processors__tests__schemas@outcomes-OutcomesProcessor-outcomes__1__outcomes-null-values.json.snap │ │ ├── rust_snuba__processors__tests__schemas@outcomes-OutcomesProcessor-outcomes__1__outcomes-pop-us.json.snap │ │ ├── rust_snuba__processors__tests__schemas@outcomes-OutcomesProcessor-outcomes__1__outcomes-relay-internal.json.snap │ │ ├── rust_snuba__processors__tests__schemas@outcomes-OutcomesProcessor-outcomes__1__outcomes2-missing-key-id.json.snap │ │ ├── rust_snuba__processors__tests__schemas@processed-profiles-ProfilesMessageProcessor-profile-metadata__1__basic.json.snap │ │ ├── rust_snuba__processors__tests__schemas@profiles-call-tree-FunctionsMessageProcessor-profile-functions__1__basic.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericCountersMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-dist-base64.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericCountersMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-dist-encoded-plain-array.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericCountersMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-dist-zstd.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericCountersMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-gauge.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericCountersMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-set-encoded-plain-array.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericCountersMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-sets-base64.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericCountersMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-sets-zstd.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericCountersMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics1.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericDistributionsMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-dist-base64.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericDistributionsMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-dist-encoded-plain-array.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericDistributionsMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-dist-zstd.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericDistributionsMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-gauge.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericDistributionsMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-set-encoded-plain-array.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericDistributionsMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-sets-base64.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericDistributionsMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-sets-zstd.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericDistributionsMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics1.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericGaugesMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-dist-base64.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericGaugesMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-dist-encoded-plain-array.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericGaugesMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-dist-zstd.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericGaugesMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-gauge.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericGaugesMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-set-encoded-plain-array.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericGaugesMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-sets-base64.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericGaugesMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-sets-zstd.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericGaugesMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics1.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericSetsMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-dist-base64.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericSetsMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-dist-encoded-plain-array.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericSetsMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-dist-zstd.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericSetsMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-gauge.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericSetsMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-set-encoded-plain-array.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericSetsMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-sets-base64.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericSetsMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-sets-zstd.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericSetsMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics1.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-items-EAPItemsProcessor-snuba-items__1__basic.protobuf.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-metrics-PolymorphicMetricsProcessor-snuba-metrics__1__snuba-metrics-sampled.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-metrics-PolymorphicMetricsProcessor-snuba-metrics__1__snuba-metrics-unsampled.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-metrics-PolymorphicMetricsProcessor-snuba-metrics__1__snuba-metrics1.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-metrics-summaries-MetricsSummariesMessageProcessor-snuba-metrics-summaries__1__metrics_summary.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-metrics-summaries-MetricsSummariesMessageProcessor-snuba-metrics-summaries__1__only_count.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-ourlogs-OurlogsMessageProcessor-snuba-ourlogs__1__maximal_log.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-ourlogs-OurlogsMessageProcessor-snuba-ourlogs__1__minimal_log.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-profile-chunks-ProfileChunksProcessor-snuba-profile-chunks__1__valid.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-queries-QuerylogProcessor-snuba-queries__1__rate-limited-real.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-queries-QuerylogProcessor-snuba-queries__1__snuba-queries-empty-trace-id.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-queries-QuerylogProcessor-snuba-queries__1__snuba-queries1.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-queries-QuerylogProcessor-snuba-queries__1__with-organization-id.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-spans-EAPItemsProcessor-snuba-spans__1__basic_span.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-spans-EAPItemsSpanProcessor-snuba-spans__1__basic_span.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-spans-EAPSpansMessageProcessor-snuba-spans__1__basic_span.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-spans-SpansMessageProcessor-snuba-spans__1__basic_span.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-uptime-results-UptimeMonitorChecksProcessor-snuba-uptime-results__1__failure.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-uptime-results-UptimeMonitorChecksProcessor-snuba-uptime-results__1__success.json.snap │ │ ├── rust_snuba__processors__tests__schemas@snuba-uptime-results-UptimeMonitorChecksProcessor-snuba-uptime-results__1__timeout.json.snap │ │ ├── rust_snuba__processors__tests__snuba-metrics-summaries-.snap │ │ └── rust_snuba__processors__tests__snuba-profile-chunks-.snap │ ├── spans.rs │ ├── uptime_monitor_checks.rs │ └── utils.rs │ ├── rebalancing.rs │ ├── runtime_config.rs │ ├── strategies │ ├── accountant.rs │ ├── clickhouse │ │ ├── batch.rs │ │ └── mod.rs │ ├── commit_log.rs │ ├── healthcheck.rs │ ├── join_timeout.rs │ ├── mod.rs │ ├── noop.rs │ ├── processor.rs │ ├── python.rs │ └── replacements.rs │ ├── testutils.rs │ └── types.rs ├── scripts ├── __init__.py ├── bump-version.sh ├── check-migrations.py ├── copy_tables.py ├── ddl-changes.py ├── fetch_service_refs.py ├── lib.sh ├── load_uptime_checks.py ├── post-release.sh ├── reset-kafka-offsets.py ├── rust-dummy-build.sh ├── rust-envvars ├── send_spans.py └── spans_oom_reproduce.sh ├── setup.cfg ├── setup.py ├── snuba ├── __init__.py ├── __main__.py ├── admin │ ├── README.md │ ├── __init__.py │ ├── audit_log │ │ ├── __init__.py │ │ ├── action.py │ │ ├── base.py │ │ └── query.py │ ├── auth.py │ ├── auth_roles.py │ ├── build.mjs │ ├── cardinality_analyzer │ │ ├── __init__.py │ │ └── cardinality_analyzer.py │ ├── clickhouse │ │ ├── __init__.py │ │ ├── capacity_management.py │ │ ├── common.py │ │ ├── database_clusters.py │ │ ├── migration_checks.py │ │ ├── nodes.py │ │ ├── predefined_cardinality_analyzer_queries.py │ │ ├── predefined_querylog_queries.py │ │ ├── predefined_system_queries.py │ │ ├── profile_events.py │ │ ├── querylog.py │ │ ├── system_queries.py │ │ ├── trace_log_parsing.py │ │ └── tracing.py │ ├── dead_letter_queue │ │ └── __init__.py │ ├── dist │ │ ├── index.html │ │ └── snuba.svg │ ├── google.py │ ├── iam_policy │ │ └── iam_policy.json │ ├── jest.config.js │ ├── jwt.py │ ├── kafka │ │ ├── __init__.py │ │ └── topics.py │ ├── migrations_policies.py │ ├── notifications │ │ ├── __init__.py │ │ └── slack │ │ │ ├── __init__.py │ │ │ ├── client.py │ │ │ └── utils.py │ ├── package.json │ ├── production_queries │ │ ├── __init__.py │ │ └── prod_queries.py │ ├── rpc │ │ ├── __init__.py │ │ └── rpc_queries.py │ ├── runtime_config │ │ └── __init__.py │ ├── static │ │ ├── api_client.tsx │ │ ├── auto_replacements_bypass_projects │ │ │ ├── index.tsx │ │ │ └── types.tsx │ │ ├── body.tsx │ │ ├── capacity_management │ │ │ ├── add_config_modal.tsx │ │ │ ├── allocation_policy.tsx │ │ │ ├── edit_config_modal.tsx │ │ │ ├── index.tsx │ │ │ ├── row_data.tsx │ │ │ ├── styles.tsx │ │ │ └── types.tsx │ │ ├── cardinality_analyzer │ │ │ ├── CSV.ts │ │ │ ├── index.tsx │ │ │ ├── query_display.tsx │ │ │ └── types.tsx │ │ ├── clickhouse_migrations │ │ │ ├── index.tsx │ │ │ └── types.tsx │ │ ├── clickhouse_queries │ │ │ ├── index.tsx │ │ │ ├── query_display.tsx │ │ │ └── types.tsx │ │ ├── collapse.tsx │ │ ├── common │ │ │ └── components │ │ │ │ ├── __mocks__ │ │ │ │ └── sql_editor.tsx │ │ │ │ ├── sql_editor.tsx │ │ │ │ └── theme.tsx │ │ ├── data.tsx │ │ ├── database_clusters │ │ │ ├── index.tsx │ │ │ └── types.tsx │ │ ├── dead_letter_queue │ │ │ ├── index.tsx │ │ │ └── types.tsx │ │ ├── delete_tool │ │ │ └── index.tsx │ │ ├── header.tsx │ │ ├── index.tsx │ │ ├── kafka │ │ │ ├── index.tsx │ │ │ └── types.tsx │ │ ├── manual_jobs │ │ │ ├── index.tsx │ │ │ └── types.tsx │ │ ├── mql_queries │ │ │ ├── index.tsx │ │ │ ├── styles.tsx │ │ │ └── types.tsx │ │ ├── nav.tsx │ │ ├── production_queries │ │ │ ├── index.tsx │ │ │ ├── styles.tsx │ │ │ └── types.tsx │ │ ├── query_editor.tsx │ │ ├── query_history.tsx │ │ ├── querylog │ │ │ ├── index.tsx │ │ │ ├── query_display.tsx │ │ │ └── types.tsx │ │ ├── rpc_endpoints │ │ │ ├── endpoint_selector.tsx │ │ │ ├── exampleRequestTemplates.json │ │ │ ├── example_request_accordion.tsx │ │ │ ├── index.tsx │ │ │ ├── metadata_table.tsx │ │ │ ├── profile_events_table.tsx │ │ │ ├── request_input.tsx │ │ │ ├── response_display.tsx │ │ │ ├── styles.ts │ │ │ ├── summarized_trace.tsx │ │ │ ├── trace_formatter.tsx │ │ │ ├── types.tsx │ │ │ └── utils.ts │ │ ├── runtime_config │ │ │ ├── auditlog.tsx │ │ │ ├── descriptions.tsx │ │ │ ├── index.tsx │ │ │ ├── row_data.tsx │ │ │ ├── styles.tsx │ │ │ └── types.tsx │ │ ├── select.tsx │ │ ├── snql_to_sql │ │ │ ├── index.tsx │ │ │ ├── styles.tsx │ │ │ ├── types.tsx │ │ │ └── utils.tsx │ │ ├── snuba_explain │ │ │ ├── index.tsx │ │ │ ├── step_render.tsx │ │ │ ├── styles.tsx │ │ │ ├── types.tsx │ │ │ └── utils.tsx │ │ ├── table.tsx │ │ ├── tests │ │ │ ├── capacity_management │ │ │ │ ├── add_config_modal.spec.tsx │ │ │ │ ├── allocation_policies.spec.tsx │ │ │ │ ├── edit_config_modal.spec.tsx │ │ │ │ ├── index.spec.tsx │ │ │ │ └── table_color.spec.tsx │ │ │ ├── cardinality_analyzer │ │ │ │ └── CSV.spec.ts │ │ │ ├── kafka │ │ │ │ └── index.spec.tsx │ │ │ ├── nav_items │ │ │ │ └── index.spec.tsx │ │ │ ├── query_editor.spec.tsx │ │ │ ├── query_history.spec.tsx │ │ │ ├── rpc_enpoints │ │ │ │ └── trace_formatter.test.tsx │ │ │ ├── tracing │ │ │ │ ├── fixture.tsx │ │ │ │ └── index.spec.tsx │ │ │ └── utils │ │ │ │ └── execute_button.spec.tsx │ │ ├── theme.tsx │ │ ├── tracing │ │ │ ├── index.tsx │ │ │ ├── query_display.tsx │ │ │ ├── types.tsx │ │ │ └── util.tsx │ │ ├── types.tsx │ │ ├── utils │ │ │ ├── execute_button.tsx │ │ │ └── query_result_copier.tsx │ │ └── welcome │ │ │ └── index.tsx │ ├── tool_policies.py │ ├── tsconfig.json │ ├── user.py │ ├── views.py │ ├── wsgi.py │ └── yarn.lock ├── attribution │ ├── __init__.py │ ├── appid.py │ └── attribution_info.py ├── cleanup.py ├── cli │ ├── __init__.py │ ├── admin.py │ ├── api.py │ ├── bootstrap.py │ ├── bulk_load.py │ ├── cleanup.py │ ├── config.py │ ├── consumer.py │ ├── devserver.py │ ├── dlq_consumer.py │ ├── entities.py │ ├── health.py │ ├── jobs.py │ ├── lw_deletions_consumer.py │ ├── migrations.py │ ├── offline_replacer.py │ ├── optimize.py │ ├── querylog_to_csv.py │ ├── replacer.py │ ├── rust_consumer.py │ ├── spans_cardinality_analyzer.py │ ├── subscriptions_executor.py │ ├── subscriptions_scheduler.py │ └── subscriptions_scheduler_executor.py ├── clickhouse │ ├── __init__.py │ ├── columns.py │ ├── errors.py │ ├── escaping.py │ ├── formatter │ │ ├── __init__.py │ │ ├── expression.py │ │ ├── nodes.py │ │ └── query.py │ ├── http.py │ ├── native.py │ ├── optimize │ │ ├── __init__.py │ │ ├── optimize.py │ │ ├── optimize_scheduler.py │ │ ├── optimize_tracker.py │ │ └── util.py │ ├── query.py │ ├── query_dsl │ │ ├── __init__.py │ │ └── accessors.py │ ├── query_inspector.py │ ├── query_profiler.py │ ├── span_cardinality_analyzer.py │ └── translators │ │ ├── __init__.py │ │ └── snuba │ │ ├── __init__.py │ │ ├── allowed.py │ │ ├── defaults.py │ │ ├── function_call_mappers.py │ │ ├── mappers.py │ │ └── mapping.py ├── clusters │ ├── __init__.py │ ├── cluster.py │ └── storage_sets.py ├── cogs │ └── accountant.py ├── consumers │ ├── __init__.py │ ├── codecs │ │ └── __init__.py │ ├── consumer.py │ ├── consumer_builder.py │ ├── consumer_config.py │ ├── dlq.py │ ├── rust_processor.py │ ├── schemas.py │ ├── strategy_factory.py │ ├── types.py │ └── utils.py ├── core │ ├── __init__.py │ └── initialize.py ├── datasets │ ├── __init__.py │ ├── cdc │ │ ├── __init__.py │ │ ├── cdcprocessors.py │ │ ├── cdcstorage.py │ │ ├── groupassignee_processor.py │ │ ├── groupedmessage_processor.py │ │ ├── row_processors.py │ │ └── types.py │ ├── common │ │ └── __init__.py │ ├── configuration │ │ ├── __init__.py │ │ ├── dataset_builder.py │ │ ├── discover │ │ │ ├── dataset.yaml │ │ │ ├── entities │ │ │ │ ├── discover.yaml │ │ │ │ ├── discover_events.yaml │ │ │ │ └── discover_transactions.yaml │ │ │ └── storages │ │ │ │ └── discover.yaml │ │ ├── entity_builder.py │ │ ├── events │ │ │ ├── dataset.yaml │ │ │ ├── entities │ │ │ │ └── events.yaml │ │ │ └── storages │ │ │ │ ├── errors.yaml │ │ │ │ └── errors_ro.yaml │ │ ├── events_analytics_platform │ │ │ ├── dataset.yaml │ │ │ ├── entities │ │ │ │ ├── eap_items.yaml │ │ │ │ ├── eap_items_span.yaml │ │ │ │ ├── spans_num_attrs.yaml │ │ │ │ ├── spans_str_attrs.yaml │ │ │ │ └── uptime_checks.yaml │ │ │ └── storages │ │ │ │ ├── eap_item_co_occurring_attrs.yaml │ │ │ │ ├── eap_items.yaml │ │ │ │ ├── eap_items_downsample_512.yaml │ │ │ │ ├── eap_items_downsample_64.yaml │ │ │ │ ├── eap_items_downsample_8.yaml │ │ │ │ ├── eap_items_span.yaml │ │ │ │ ├── items_attrs.yaml │ │ │ │ ├── spans_num_attrs.yaml │ │ │ │ ├── spans_str_attrs.yaml │ │ │ │ └── uptime_monitor_checks.yaml │ │ ├── functions │ │ │ ├── dataset.yaml │ │ │ ├── entities │ │ │ │ └── functions.yaml │ │ │ └── storages │ │ │ │ ├── functions.yaml │ │ │ │ └── functions_raw.yaml │ │ ├── generate_config_docs.py │ │ ├── generic_metrics │ │ │ ├── dataset.yaml │ │ │ ├── entities │ │ │ │ ├── counters.yaml │ │ │ │ ├── counters_meta.yaml │ │ │ │ ├── counters_meta_tag_values.yaml │ │ │ │ ├── distributions.yaml │ │ │ │ ├── distributions_meta.yaml │ │ │ │ ├── gauges.yaml │ │ │ │ ├── gauges_meta.yaml │ │ │ │ ├── gauges_meta_tag_values.yaml │ │ │ │ ├── org_counters.yaml │ │ │ │ ├── org_distributions.yaml │ │ │ │ ├── org_sets.yaml │ │ │ │ ├── sets.yaml │ │ │ │ ├── sets_meta.yaml │ │ │ │ └── sets_meta_tag_values.yaml │ │ │ └── storages │ │ │ │ ├── counters.yaml │ │ │ │ ├── counters_bucket.yaml │ │ │ │ ├── counters_meta.yaml │ │ │ │ ├── counters_meta_tag_values.yaml │ │ │ │ ├── distributions.yaml │ │ │ │ ├── distributions_bucket.yaml │ │ │ │ ├── distributions_meta.yaml │ │ │ │ ├── gauges.yaml │ │ │ │ ├── gauges_bucket.yaml │ │ │ │ ├── gauges_meta.yaml │ │ │ │ ├── gauges_meta_tag_values.yaml │ │ │ │ ├── org_counters.yaml │ │ │ │ ├── sets.yaml │ │ │ │ ├── sets_bucket.yaml │ │ │ │ ├── sets_meta.yaml │ │ │ │ └── sets_meta_tag_values.yaml │ │ ├── group_attributes │ │ │ ├── dataset.yaml │ │ │ ├── entities │ │ │ │ └── group_attributes.yaml │ │ │ └── storages │ │ │ │ └── group_attributes.yaml │ │ ├── groupassignee │ │ │ ├── dataset.yaml │ │ │ ├── entities │ │ │ │ └── groupassignee.yaml │ │ │ └── storages │ │ │ │ └── group_assignees.yaml │ │ ├── groupedmessage │ │ │ ├── dataset.yaml │ │ │ ├── entities │ │ │ │ └── groupedmessage.yaml │ │ │ └── storages │ │ │ │ └── grouped_messages.yaml │ │ ├── issues │ │ │ ├── dataset.yaml │ │ │ ├── entities │ │ │ │ └── search_issues.yaml │ │ │ └── storages │ │ │ │ └── search_issues.yaml │ │ ├── json_schema.py │ │ ├── loader.py │ │ ├── metrics │ │ │ ├── dataset.yaml │ │ │ ├── entities │ │ │ │ ├── metrics_counters.yaml │ │ │ │ ├── metrics_distributions.yaml │ │ │ │ ├── metrics_sets.yaml │ │ │ │ ├── org_counters.yaml │ │ │ │ ├── org_distributions.yaml │ │ │ │ └── org_sets.yaml │ │ │ └── storages │ │ │ │ ├── counters.yaml │ │ │ │ ├── distributions.yaml │ │ │ │ ├── org_counters.yaml │ │ │ │ ├── raw.yaml │ │ │ │ └── sets.yaml │ │ ├── outcomes │ │ │ ├── dataset.yaml │ │ │ ├── entities │ │ │ │ └── outcomes.yaml │ │ │ └── storages │ │ │ │ └── hourly.yaml │ │ ├── outcomes_raw │ │ │ ├── dataset.yaml │ │ │ ├── entities │ │ │ │ └── outcomes_raw.yaml │ │ │ └── storages │ │ │ │ └── raw.yaml │ │ ├── profiles │ │ │ ├── dataset.yaml │ │ │ ├── entities │ │ │ │ └── profiles.yaml │ │ │ └── storages │ │ │ │ ├── chunks.yaml │ │ │ │ └── profiles.yaml │ │ ├── querylog │ │ │ └── storages │ │ │ │ └── querylog.yaml │ │ ├── replays │ │ │ ├── dataset.yaml │ │ │ ├── entities │ │ │ │ ├── aggregated.yaml │ │ │ │ └── replays.yaml │ │ │ └── storages │ │ │ │ ├── aggregated.yaml │ │ │ │ └── replays.yaml │ │ ├── spans │ │ │ ├── dataset.yaml │ │ │ ├── entities │ │ │ │ └── spans.yaml │ │ │ └── storages │ │ │ │ └── spans.yaml │ │ ├── storage_builder.py │ │ ├── transactions │ │ │ ├── dataset.yaml │ │ │ ├── entities │ │ │ │ └── transactions.yaml │ │ │ └── storages │ │ │ │ └── transactions.yaml │ │ └── utils.py │ ├── dataset.py │ ├── deletion_settings.py │ ├── entities │ │ ├── __init__.py │ │ ├── entity_data_model.py │ │ ├── entity_key.py │ │ ├── factory.py │ │ └── storage_selectors │ │ │ ├── __init__.py │ │ │ ├── eap_items.py │ │ │ ├── errors.py │ │ │ └── selector.py │ ├── entity.py │ ├── entity_subscriptions │ │ ├── __init__.py │ │ ├── processors.py │ │ └── validators.py │ ├── events_format.py │ ├── factory.py │ ├── message_filters.py │ ├── metrics_messages.py │ ├── plans │ │ ├── __init__.py │ │ ├── cluster_selector.py │ │ ├── entity_processing.py │ │ ├── entity_validation.py │ │ ├── query_plan.py │ │ ├── storage_processing.py │ │ └── translator │ │ │ ├── __init__.py │ │ │ ├── mapper.py │ │ │ └── query.py │ ├── pluggable_dataset.py │ ├── pluggable_entity.py │ ├── processors │ │ ├── __init__.py │ │ ├── eap_items_processor.py │ │ ├── eap_items_span_processor.py │ │ ├── errors_processor.py │ │ ├── functions_processor.py │ │ ├── generic_metrics_processor.py │ │ ├── group_attributes_processor.py │ │ ├── metrics_bucket_processor.py │ │ ├── ourlogs_processor.py │ │ ├── outcomes_processor.py │ │ ├── profile_chunks_processor.py │ │ ├── profiles_processor.py │ │ ├── querylog_processor.py │ │ ├── replays_processor.py │ │ ├── rust_compat_processor.py │ │ ├── search_issues_processor.py │ │ ├── spans_processor.py │ │ ├── spans_v2_processor.py │ │ ├── transactions_processor.py │ │ └── uptime_monitors_processor.py │ ├── readiness_state.py │ ├── schemas │ │ ├── __init__.py │ │ └── tables.py │ ├── slicing.py │ ├── storage.py │ ├── storages │ │ ├── __init__.py │ │ ├── factory.py │ │ ├── storage_key.py │ │ ├── tags_hash_map.py │ │ └── validator.py │ └── table_storage.py ├── downsampled_storage_tiers.py ├── env │ └── job_manifest.json ├── environment.py ├── lw_deletions │ ├── __init__.py │ ├── batching.py │ ├── formatters.py │ └── strategy.py ├── manual_jobs │ ├── __init__.py │ ├── extract_span_data.py │ ├── job_loader.py │ ├── job_logging.py │ ├── job_status.py │ ├── recreate_eap_dist_tables.py │ ├── recreate_missing_eap_spans_materialized_views.py │ ├── redis.py │ ├── runner.py │ ├── scrub_ips_from_eap_spans.py │ ├── scrub_ips_from_spans.py │ ├── scrub_ips_from_spans_dictionary.py │ ├── scrub_user_from_spans.py │ ├── scrub_users_from_eap_spans.py │ ├── scrub_users_from_eap_spans_str_attrs.py │ ├── toy_job.py │ ├── truncate_eap_spans.py │ └── update_migration_status.py ├── migrations │ ├── __init__.py │ ├── autogeneration │ │ ├── __init__.py │ │ ├── diff.py │ │ └── main.py │ ├── check_dangerous.py │ ├── clickhouse.py │ ├── columns.py │ ├── connect.py │ ├── context.py │ ├── errors.py │ ├── group_loader.py │ ├── groups.py │ ├── migration.py │ ├── migration_utilities.py │ ├── operations.py │ ├── parse_schema.py │ ├── policies.py │ ├── runner.py │ ├── status.py │ ├── system_migrations │ │ ├── 0001_migrations.py │ │ └── __init__.py │ ├── table_engines.py │ └── validator.py ├── pipeline │ ├── __init__.py │ ├── composite_entity_processing.py │ ├── composite_storage_processing.py │ ├── processors.py │ ├── query_pipeline.py │ ├── stages │ │ ├── __init__.py │ │ ├── query_execution.py │ │ └── query_processing.py │ ├── storage_query_identity_translate.py │ └── utils │ │ └── storage_finder.py ├── processor.py ├── query │ ├── __init__.py │ ├── accessors.py │ ├── allocation_policies │ │ ├── __init__.py │ │ ├── bytes_scanned_rejecting_policy.py │ │ ├── bytes_scanned_window_policy.py │ │ ├── concurrent_rate_limit.py │ │ ├── cross_org.py │ │ └── per_referrer.py │ ├── composite.py │ ├── conditions.py │ ├── data_source │ │ ├── __init__.py │ │ ├── join.py │ │ ├── projects_finder.py │ │ ├── simple.py │ │ └── visitor.py │ ├── deletions │ │ └── max_rows_enforcer.py │ ├── dsl.py │ ├── dsl_mapper.py │ ├── exceptions.py │ ├── expressions.py │ ├── formatters │ │ ├── __init__.py │ │ └── tracing.py │ ├── functions.py │ ├── indexer │ │ └── resolver.py │ ├── joins │ │ ├── __init__.py │ │ ├── classifier.py │ │ ├── equivalence_adder.py │ │ ├── metrics_subquery_generator.py │ │ ├── pre_processor.py │ │ ├── semi_joins.py │ │ └── subquery_generator.py │ ├── logical.py │ ├── matchers.py │ ├── mql │ │ ├── context_population.py │ │ ├── exceptions.py │ │ ├── mql_context.py │ │ └── parser.py │ ├── parser │ │ ├── README.md │ │ ├── __init__.py │ │ ├── exceptions.py │ │ ├── expressions.py │ │ └── validation │ │ │ ├── __init__.py │ │ │ └── functions.py │ ├── parsing.py │ ├── processors │ │ ├── __init__.py │ │ ├── condition_checkers │ │ │ ├── __init__.py │ │ │ └── checkers.py │ │ ├── logical │ │ │ ├── __init__.py │ │ │ ├── basic_functions.py │ │ │ ├── calculated_average_processor.py │ │ │ ├── curried_function_bucket_transformer.py │ │ │ ├── custom_function.py │ │ │ ├── filter_in_select_optimizer.py │ │ │ ├── granularity_processor.py │ │ │ ├── handled_functions.py │ │ │ ├── hash_bucket_functions.py │ │ │ ├── low_cardinality_processor.py │ │ │ ├── optional_attribute_aggregation.py │ │ │ ├── tags_type_transformer.py │ │ │ └── timeseries_processor.py │ │ └── physical │ │ │ ├── __init__.py │ │ │ ├── abstract_array_join_optimizer.py │ │ │ ├── array_has_optimizer.py │ │ │ ├── arrayjoin_keyvalue_optimizer.py │ │ │ ├── arrayjoin_optimizer.py │ │ │ ├── bloom_filter_optimizer.py │ │ │ ├── clickhouse_settings_override.py │ │ │ ├── column_filter_processor.py │ │ │ ├── conditions_enforcer.py │ │ │ ├── consistency_enforcer.py │ │ │ ├── empty_tag_condition_processor.py │ │ │ ├── events_bool_contexts.py │ │ │ ├── fixedstring_array_column_processor.py │ │ │ ├── group_id_column_processor.py │ │ │ ├── hexint_column_processor.py │ │ │ ├── mandatory_condition_applier.py │ │ │ ├── mapping_optimizer.py │ │ │ ├── mapping_promoter.py │ │ │ ├── minute_resolution_processor.py │ │ │ ├── null_column_caster.py │ │ │ ├── prewhere.py │ │ │ ├── replaced_groups.py │ │ │ ├── slice_of_map_optimizer.py │ │ │ ├── tuple_unaliaser.py │ │ │ ├── type_condition_optimizer.py │ │ │ ├── type_converters.py │ │ │ ├── uniq_in_select_and_having.py │ │ │ ├── user_column_processor.py │ │ │ ├── uuid_array_column_processor.py │ │ │ └── uuid_column_processor.py │ ├── query_settings.py │ ├── schema.py │ ├── snql │ │ ├── __init__.py │ │ ├── discover_entity_selection.py │ │ ├── expression_visitor.py │ │ ├── joins.py │ │ └── parser.py │ ├── subscripts.py │ └── validation │ │ ├── __init__.py │ │ ├── functions.py │ │ ├── signature.py │ │ └── validators.py ├── querylog │ ├── __init__.py │ └── query_metadata.py ├── reader.py ├── redis.py ├── replacer.py ├── replacers │ ├── __init__.py │ ├── errors_replacer.py │ ├── projects_query_flags.py │ ├── replacements_and_expiry.py │ └── replacer_processor.py ├── request │ ├── __init__.py │ ├── exceptions.py │ ├── schema.py │ └── validation.py ├── schemas.py ├── settings │ ├── __init__.py │ ├── settings_distributed.py │ ├── settings_docker.py │ ├── settings_self_hosted.py │ ├── settings_test.py │ ├── settings_test_distributed.py │ ├── settings_test_distributed_migrations.py │ ├── settings_test_initialization.py │ └── validation.py ├── snapshots │ ├── __init__.py │ ├── loaders │ │ ├── __init__.py │ │ └── single_table.py │ └── postgres_snapshot.py ├── snuba_migrations │ ├── README.md │ ├── __init__.py │ ├── discover │ │ ├── 0001_discover_merge_table.py │ │ ├── 0002_discover_add_deleted_tags_hash_map.py │ │ ├── 0003_discover_fix_user_column.py │ │ ├── 0004_discover_fix_title_and_message.py │ │ ├── 0005_discover_fix_transaction_name.py │ │ ├── 0006_discover_add_trace_id.py │ │ ├── 0007_discover_add_span_id.py │ │ ├── 0008_discover_fix_add_local_table.py │ │ └── 0009_discover_add_replay_id.py │ ├── events │ │ ├── 0001_events_initial.py │ │ ├── 0002_events_onpremise_compatibility.py │ │ ├── 0003_errors.py │ │ ├── 0004_errors_onpremise_compatibility.py │ │ ├── 0005_events_tags_hash_map.py │ │ ├── 0006_errors_tags_hash_map.py │ │ ├── 0007_groupedmessages.py │ │ ├── 0008_groupassignees.py │ │ ├── 0009_errors_add_http_fields.py │ │ ├── 0010_groupedmessages_onpremise_compatibility.py │ │ ├── 0011_rebuild_errors.py │ │ ├── 0012_errors_make_level_nullable.py │ │ ├── 0013_errors_add_hierarchical_hashes.py │ │ ├── 0014_backfill_errors.py │ │ ├── 0015_truncate_events.py │ │ ├── 0016_drop_legacy_events.py │ │ ├── 0017_errors_add_indexes.py │ │ ├── 0018_errors_ro_add_tags_hash_map.py │ │ ├── 0019_add_replay_id_column.py │ │ ├── 0020_add_main_thread_column.py │ │ ├── 0021_add_replay_id_errors_ro.py │ │ ├── 0022_add_main_thread_column_errors_ro.py │ │ ├── 0023_add_trace_sampled_num_processing_errors_columns.py │ │ ├── 0024_add_trace_sampled_num_processing_errors_columns_ro.py │ │ ├── 0025_add_flags_column.py │ │ ├── 0026_add_symbolicated_in_app_column.py │ │ ├── 0027_add_symbolicated_in_app_column_ro.py │ │ ├── 0028_add_timestamp_ms_column_errors.py │ │ ├── 0029_add_sample_weight_column_to_errors.py │ │ └── __init__.py │ ├── events_analytics_platform │ │ ├── 0001_spans.py │ │ ├── 0002_spans_attributes_mv.py │ │ ├── 0003_eap_spans_project_id_index.py │ │ ├── 0004_modify_sampling_weight.py │ │ ├── 0005_remove_attribute_mv.py │ │ ├── 0006_drop_attribute_key_project_id_indexes.py │ │ ├── 0007_drop_project_id_index.py │ │ ├── 0008_drop_index_attribute_key_bucket_0.py │ │ ├── 0009_drop_index_attribute_key_buckets_1_19.py │ │ ├── 0010_drop_indexes_on_attribute_values.py │ │ ├── 0011_span_attribute_table.py │ │ ├── 0012_span_attribute_table_numeric.py │ │ ├── 0013_span_attribute_table_shard_keys.py │ │ ├── 0014_span_attribute_table_smaller.py │ │ ├── 0015_span_attribute_table_namespaced.py │ │ ├── 0016_spans_v2.py │ │ ├── 0017_span_attribute_table_v3.py │ │ ├── 0018_drop_unused_span_tables.py │ │ ├── 0019_uptime_monitors_init.py │ │ ├── 0020_ourlogs_init.py │ │ ├── 0021_ourlogs_attrs.py │ │ ├── 0022_uptime_monitors_init_v2.py │ │ ├── 0023_smart_autocomplete_mv.py │ │ ├── 0024_items.py │ │ ├── 0025_smart_autocomplete_index.py │ │ ├── 0026_items_add_attributes_hash_map.py │ │ ├── 0027_uptime_checks_add_column_in_incident.py │ │ ├── 0028_ourlogs_v3.py │ │ ├── 0029_remove_smart_autocomplete_experimental.py │ │ ├── 0030_smart_autocomplete_items.py │ │ ├── 0032_sampled_storage_views.py │ │ ├── 0033_items_attribute_table_v1.py │ │ ├── 0034_materialize_sampled_storage_views.py │ │ ├── 0035_drop_item_attrs.py │ │ ├── 0036_items_attribute_table_v1.py │ │ ├── 0037_remove_items_attribute_mv_v1.py │ │ ├── 0038_eap_items_add_sampling_factor.py │ │ ├── 0039_update_mv_with_sampling_factor.py │ │ ├── 0040_eap_items_downsampled_dist_add_sampling_factor.py │ │ ├── 0041_hashed_attributes_index.py │ │ ├── 0042_remove_hashed_columns.py │ │ ├── 0043_remove_hashed_keys_column.py │ │ └── 0044_remove_hash_map_columns_0_4.py │ ├── functions │ │ ├── 0001_functions.py │ │ ├── 0002_add_new_columns_to_raw_functions.py │ │ ├── 0003_add_new_columns_to_raw_functions.py │ │ └── 0004_functions_v2.py │ ├── generic_metrics │ │ ├── 0001_sets_aggregate_table.py │ │ ├── 0002_sets_raw_table.py │ │ ├── 0003_sets_mv.py │ │ ├── 0004_sets_raw_add_granularities.py │ │ ├── 0005_sets_replace_mv.py │ │ ├── 0006_sets_raw_add_granularities_dist_table.py │ │ ├── 0007_distributions_aggregate_table.py │ │ ├── 0008_distributions_raw_table.py │ │ ├── 0009_distributions_mv.py │ │ ├── 0010_counters_aggregate_table.py │ │ ├── 0011_counters_raw_table.py │ │ ├── 0012_counters_mv.py │ │ ├── 0013_distributions_dist_tags_hash.py │ │ ├── 0014_distribution_add_options.py │ │ ├── 0015_sets_add_options.py │ │ ├── 0016_counters_add_options.py │ │ ├── 0017_distributions_mv2.py │ │ ├── 0018_sets_update_opt_default.py │ │ ├── 0019_counters_update_opt_default.py │ │ ├── 0020_sets_mv2.py │ │ ├── 0021_counters_mv2.py │ │ ├── 0022_gauges_aggregate_table.py │ │ ├── 0023_gauges_raw_table.py │ │ ├── 0024_gauges_mv.py │ │ ├── 0025_counters_add_raw_tags_hash_column.py │ │ ├── 0026_gauges_add_raw_tags_hash_column.py │ │ ├── 0027_sets_add_raw_tags_column.py │ │ ├── 0028_distributions_add_indexed_tags_column.py │ │ ├── 0029_add_use_case_id_index.py │ │ ├── 0030_add_record_meta_column.py │ │ ├── 0031_counters_meta_table.py │ │ ├── 0032_counters_meta_table_mv.py │ │ ├── 0033_counters_meta_tag_values_table.py │ │ ├── 0034_counters_meta_tag_values_table_mv.py │ │ ├── 0035_recreate_counters_meta_tag_value_table_mv.py │ │ ├── 0036_counters_meta_tables_final.py │ │ ├── 0037_add_record_meta_column_sets.py │ │ ├── 0038_add_record_meta_column_distributions.py │ │ ├── 0039_add_record_meta_column_gauges.py │ │ ├── 0040_remove_counters_meta_tables.py │ │ ├── 0041_adjust_partitioning_meta_tables.py │ │ ├── 0042_rename_counters_meta_tables.py │ │ ├── 0043_sets_meta_tables.py │ │ ├── 0044_gauges_meta_tables.py │ │ ├── 0045_distributions_meta_tables.py │ │ ├── 0046_distributions_add_disable_percentiles.py │ │ ├── 0047_distributions_mv3.py │ │ ├── 0048_counters_meta_tables_support_empty_tags.py │ │ ├── 0049_sets_meta_tables_support_empty_tags.py │ │ ├── 0050_distributions_meta_tables_support_empty_tags.py │ │ ├── 0051_gauges_meta_tables_support_empty_tags.py │ │ ├── 0052_counters_raw_add_sampling_weight.py │ │ ├── 0053_counters_aggregated_add_sampling_weight.py │ │ ├── 0054_counters_mv3.py │ │ ├── 0055_gauges_raw_add_sampling_weight.py │ │ ├── 0056_gauges_aggregated_add_weighted_columns.py │ │ ├── 0057_gauges_mv3.py │ │ ├── 0058_distributions_raw_add_sampling_weight.py │ │ ├── 0059_distributions_aggregated_add_weighted_columns.py │ │ ├── 0060_distributions_mv4.py │ │ └── 0061_remove_distribution_meta_tag_values_mv.py │ ├── group_attributes │ │ ├── 0001_group_attributes.py │ │ ├── 0002_add_priority_to_group_attributes.py │ │ ├── 0003_add_first_release_id_to_group_attributes.py │ │ └── 0004_add_new_first_release_column_to_group_attributes.py │ ├── metrics │ │ ├── 0001_metrics_buckets.py │ │ ├── 0002_metrics_sets.py │ │ ├── 0003_counters_to_buckets.py │ │ ├── 0004_metrics_counters.py │ │ ├── 0005_metrics_distributions_buckets.py │ │ ├── 0006_metrics_distributions.py │ │ ├── 0007_metrics_sets_granularity_10.py │ │ ├── 0008_metrics_counters_granularity_10.py │ │ ├── 0009_metrics_distributions_granularity_10.py │ │ ├── 0010_metrics_sets_granularity_1h.py │ │ ├── 0011_metrics_counters_granularity_1h.py │ │ ├── 0012_metrics_distributions_granularity_1h.py │ │ ├── 0013_metrics_sets_granularity_1d.py │ │ ├── 0014_metrics_counters_granularity_1d.py │ │ ├── 0015_metrics_distributions_granularity_1d.py │ │ ├── 0016_metrics_sets_consolidated_granularity.py │ │ ├── 0017_metrics_counters_consolidated_granularity.py │ │ ├── 0018_metrics_distributions_consolidated_granularity.py │ │ ├── 0019_aggregate_tables_add_ttl.py │ │ ├── 0020_polymorphic_buckets_table.py │ │ ├── 0021_polymorphic_bucket_materialized_views.py │ │ ├── 0022_repartition_polymorphic_table.py │ │ ├── 0023_polymorphic_repartitioned_bucket_matview.py │ │ ├── 0024_metrics_distributions_add_histogram.py │ │ ├── 0025_metrics_counters_aggregate_v2.py │ │ ├── 0026_metrics_counters_v2_writing_matview.py │ │ ├── 0027_fix_migration_0026.py │ │ ├── 0028_metrics_sets_aggregate_v2.py │ │ ├── 0029_metrics_distributions_aggregate_v2.py │ │ ├── 0030_metrics_distributions_v2_writing_mv.py │ │ ├── 0031_metrics_sets_v2_writing_mv.py │ │ ├── 0032_redo_0030_and_0031_without_timestamps.py │ │ ├── 0033_metrics_cleanup_old_views.py │ │ ├── 0034_metrics_cleanup_old_tables.py │ │ ├── 0035_metrics_raw_timeseries_id.py │ │ ├── __init__.py │ │ └── templates.py │ ├── outcomes │ │ ├── 0001_outcomes.py │ │ ├── 0002_outcomes_remove_size_and_bytes.py │ │ ├── 0003_outcomes_add_category_and_quantity.py │ │ ├── 0004_outcomes_matview_additions.py │ │ ├── 0005_outcomes_ttl.py │ │ ├── 0006_outcomes_add_size_col.py │ │ ├── 0007_outcomes_add_event_id_ttl_codec.py │ │ ├── 0008_outcomes_add_indexes.py │ │ └── __init__.py │ ├── profile_chunks │ │ ├── 0001_create_profile_chunks_table.py │ │ └── 0002_add_environment_column.py │ ├── profiles │ │ ├── 0001_profiles.py │ │ ├── 0002_disable_vertical_merge_algorithm.py │ │ ├── 0003_add_device_architecture.py │ │ └── 0004_drop_profile_column.py │ ├── querylog │ │ ├── 0001_querylog.py │ │ ├── 0002_status_type_change.py │ │ ├── 0003_add_profile_fields.py │ │ ├── 0004_add_bytes_scanned.py │ │ ├── 0005_add_codec_update_settings.py │ │ ├── 0006_sorting_key_change.py │ │ ├── 0007_add_offset_column.py │ │ └── __init__.py │ ├── replays │ │ ├── 0001_replays.py │ │ ├── 0002_add_url.py │ │ ├── 0003_alter_url_allow_null.py │ │ ├── 0004_add_error_ids_column.py │ │ ├── 0005_add_urls_user_agent_replay_start_timestamp.py │ │ ├── 0006_add_is_archived_column.py │ │ ├── 0007_add_replay_type_column.py │ │ ├── 0008_add_sample_rate.py │ │ ├── 0009_add_dom_index_columns.py │ │ ├── 0010_add_nullable_columns.py │ │ ├── 0011_add_is_dead_rage.py │ │ ├── 0012_materialize_counts.py │ │ ├── 0013_add_low_cardinality_codecs.py │ │ ├── 0014_add_id_event_columns.py │ │ ├── 0015_index_frequently_accessed_columns.py │ │ ├── 0016_materialize_new_event_counts.py │ │ ├── 0017_add_component_name_column.py │ │ ├── 0018_add_viewed_by_id_column.py │ │ ├── 0019_add_materialization.py │ │ ├── 0020_add_dist_migration_for_materialization.py │ │ ├── 0021_index_tags.py │ │ ├── 0022_add_context_ota_updates.py │ │ ├── 0023_add_geo_columns.py │ │ └── __init__.py │ ├── search_issues │ │ ├── 0001_search_issues.py │ │ ├── 0002_search_issues_add_tags_hash_map.py │ │ ├── 0003_search_issues_modify_occurrence_type_id_size.py │ │ ├── 0004_rebuild_search_issues_with_version.py │ │ ├── 0005_search_issues_v2.py │ │ ├── 0006_add_subtitle_culprit_level_resource_id.py │ │ ├── 0007_add_transaction_duration.py │ │ ├── 0008_add_profile_id_replay_id.py │ │ ├── 0009_add_message.py │ │ └── __init__.py │ ├── sessions │ │ ├── 0001_sessions.py │ │ ├── 0002_sessions_aggregates.py │ │ ├── 0003_sessions_matview.py │ │ ├── 0004_sessions_ttl.py │ │ ├── 0005_drop_sessions_tables.py │ │ ├── __init__.py │ │ └── matview.py │ ├── spans │ │ ├── 0001_spans_v1.py │ │ ├── 0002_spans_add_tags_hashmap.py │ │ ├── 0003_spans_add_ms_columns.py │ │ ├── 0004_spans_group_raw_col.py │ │ ├── 0005_spans_add_sentry_tags.py │ │ ├── 0006_spans_add_profile_id.py │ │ ├── 0007_spans_add_metrics_summary.py │ │ ├── 0008_spans_add_index_on_span_id.py │ │ ├── 0009_spans_add_measure_hashmap.py │ │ ├── 0010_spans_add_compression.py │ │ ├── 0011_spans_add_index_on_trace_id.py │ │ ├── 0012_spans_add_index_on_transaction_name.py │ │ ├── 0013_spans_add_indexes_for_tag_columns.py │ │ └── 0014_spans_add_microsecond_precision_timestamps.py │ ├── test_migration │ │ ├── 0001_create_test_table.py │ │ ├── 0002_add_test_col.py │ │ └── __init__.py │ └── transactions │ │ ├── 0001_transactions.py │ │ ├── 0002_transactions_onpremise_fix_orderby_and_partitionby.py │ │ ├── 0003_transactions_onpremise_fix_columns.py │ │ ├── 0004_transactions_add_tags_hash_map.py │ │ ├── 0005_transactions_add_measurements.py │ │ ├── 0006_transactions_add_http_fields.py │ │ ├── 0007_transactions_add_discover_cols.py │ │ ├── 0008_transactions_add_timestamp_index.py │ │ ├── 0009_transactions_fix_title_and_message.py │ │ ├── 0010_transactions_nullable_trace_id.py │ │ ├── 0011_transactions_add_span_op_breakdowns.py │ │ ├── 0012_transactions_add_spans.py │ │ ├── 0013_transactions_reduce_spans_exclusive_time.py │ │ ├── 0014_transactions_remove_flattened_columns.py │ │ ├── 0015_transactions_add_source_column.py │ │ ├── 0016_transactions_add_group_ids_column.py │ │ ├── 0017_transactions_add_app_start_type_column.py │ │ ├── 0018_transactions_add_profile_id.py │ │ ├── 0019_transactions_add_indexes_and_context_hash.py │ │ ├── 0020_transactions_add_codecs.py │ │ ├── 0021_transactions_add_replay_id.py │ │ ├── 0022_transactions_add_index_on_trace_id.py │ │ └── 0023_add_profiler_id_column.py ├── state │ ├── __init__.py │ ├── cache │ │ ├── __init__.py │ │ ├── abstract.py │ │ └── redis │ │ │ ├── __init__.py │ │ │ └── backend.py │ ├── explain_meta.py │ ├── quota.py │ └── rate_limit.py ├── subscriptions │ ├── __init__.py │ ├── codecs.py │ ├── combined_scheduler_executor.py │ ├── data.py │ ├── executor_consumer.py │ ├── partitioner.py │ ├── scheduler.py │ ├── scheduler_consumer.py │ ├── scheduler_processing_strategy.py │ ├── store.py │ ├── subscription.py │ ├── types.py │ └── utils.py ├── util.py ├── utils │ ├── __init__.py │ ├── bucket_timer.py │ ├── clock.py │ ├── codecs.py │ ├── config_component_factory.py │ ├── constants.py │ ├── describer.py │ ├── gcs.py │ ├── hashes.py │ ├── health_info.py │ ├── iterators.py │ ├── manage_topics.py │ ├── metrics │ │ ├── __init__.py │ │ ├── backends │ │ │ ├── __init__.py │ │ │ ├── abstract.py │ │ │ ├── datadog.py │ │ │ ├── dualwrite.py │ │ │ ├── dummy.py │ │ │ ├── sentry.py │ │ │ └── testing.py │ │ ├── gauge.py │ │ ├── timer.py │ │ ├── types.py │ │ ├── util.py │ │ └── wrapper.py │ ├── profiler.py │ ├── rate_limiter.py │ ├── registered_class.py │ ├── schemas.py │ ├── serializable_exception.py │ ├── streams │ │ ├── __init__.py │ │ ├── configuration_builder.py │ │ ├── metrics_adapter.py │ │ ├── topics.py │ │ └── types.py │ ├── threaded_function_delegator.py │ ├── types.py │ └── uwsgi.py ├── validate_configs.py ├── web │ ├── __init__.py │ ├── bulk_delete_query.py │ ├── constants.py │ ├── converters.py │ ├── db_query.py │ ├── delete_query.py │ ├── query.py │ ├── rpc │ │ ├── __init__.py │ │ ├── common │ │ │ ├── __init__.py │ │ │ ├── common.py │ │ │ ├── debug_info.py │ │ │ └── exceptions.py │ │ ├── proto_visitor.py │ │ └── v1 │ │ │ ├── __init__.py │ │ │ ├── create_subscription.py │ │ │ ├── endpoint_get_trace.py │ │ │ ├── endpoint_get_traces.py │ │ │ ├── endpoint_time_series.py │ │ │ ├── endpoint_trace_item_attribute_names.py │ │ │ ├── endpoint_trace_item_details.py │ │ │ ├── endpoint_trace_item_stats.py │ │ │ ├── endpoint_trace_item_table.py │ │ │ ├── resolvers │ │ │ ├── R_eap_items │ │ │ │ ├── common │ │ │ │ │ ├── __init__.py │ │ │ │ │ └── common.py │ │ │ │ ├── resolver_time_series.py │ │ │ │ ├── resolver_trace_item_stats.py │ │ │ │ ├── resolver_trace_item_table.py │ │ │ │ └── storage_routing │ │ │ │ │ ├── load_retriever.py │ │ │ │ │ ├── routing_strategies │ │ │ │ │ ├── linear_bytes_scanned_storage_routing.py │ │ │ │ │ ├── normal_mode_linear_bytes_scanned.py │ │ │ │ │ ├── outcomes_based.py │ │ │ │ │ └── storage_routing.py │ │ │ │ │ ├── routing_strategy_selector.py │ │ │ │ │ └── sampling_in_storage_util.py │ │ │ ├── R_uptime_checks │ │ │ │ ├── __init__.py │ │ │ │ ├── common │ │ │ │ │ ├── __init__.py │ │ │ │ │ └── common.py │ │ │ │ ├── resolver_time_series.py │ │ │ │ └── resolver_trace_item_table.py │ │ │ ├── __init__.py │ │ │ └── common │ │ │ │ ├── __init__.py │ │ │ │ ├── aggregation.py │ │ │ │ └── trace_item_table.py │ │ │ ├── trace_item_attribute_values.py │ │ │ └── visitors │ │ │ ├── sparse_aggregate_attribute_transformer.py │ │ │ └── visitor_v2.py │ ├── templates │ │ ├── base.html │ │ ├── index.html │ │ └── query.html │ ├── views.py │ └── wsgi.py └── writer.py ├── test_distributed_migrations ├── config │ └── clickhouse │ │ ├── macros-01.xml │ │ ├── macros-02.xml │ │ ├── macros-03.xml │ │ ├── macros-04.xml │ │ ├── macros.xml │ │ ├── remote_servers.xml │ │ └── zookeeper.xml ├── conftest.py └── test_get_nodes.py ├── test_initialization ├── __init__.py └── test_initialize.py └── tests ├── __init__.py ├── admin ├── __init__.py ├── cardinality_analyzer │ └── test_metrics_query.py ├── clickhouse │ ├── test_nodes.py │ ├── test_query_validation.py │ ├── test_querylog.py │ └── test_tracing.py ├── clickhouse_migrations │ ├── __init__.py │ ├── test_api.py │ └── test_migration_checks.py ├── data │ └── mock_responses │ │ ├── check_transitive_membership_200.json │ │ ├── group_lookup_200.json │ │ └── group_lookup_403.json ├── dead_letter_queue │ └── __init__.py ├── notifications │ ├── slack │ │ └── test_client.py │ └── test_build_blocks.py ├── test_api.py ├── test_auditlog.py ├── test_authorization.py ├── test_google.py ├── test_migration_policies.py ├── test_production_queries.py ├── test_querylog_audit_log.py ├── test_system_queries.py ├── test_trace_log_format.py └── tracing │ └── example_raw_trace.txt ├── assertions.py ├── backends ├── __init__.py └── metrics.py ├── base.py ├── cli ├── __init__.py ├── test_consumer.py ├── test_health.py ├── test_jobs.py ├── test_migrations.py ├── test_optimize.py └── test_subscriptions.py ├── clickhouse ├── __init__.py ├── optimize │ ├── test_optimize.py │ ├── test_optimize_scheduler.py │ └── test_optimize_tracker.py ├── query_dsl │ ├── __init__.py │ ├── test_accessors.py │ ├── test_project_id.py │ └── test_time_range.py ├── test_columns.py ├── test_formatted_node.py ├── test_formatter.py ├── test_http.py ├── test_native.py ├── test_profile_events.py ├── test_query_data.py ├── test_query_format.py ├── test_query_profiler.py └── translators │ ├── __init__.py │ ├── snuba │ ├── __init__.py │ ├── test_translation.py │ └── test_visitor.py │ └── test_auto_import.py ├── clusters ├── __init__.py ├── fake_cluster.py ├── test_cluster.py └── test_storage_sets.py ├── conftest.py ├── consumers ├── __init__.py ├── test_consumer_builder.py ├── test_consumer_config.py ├── test_dlq.py ├── test_dlq_consumer.py ├── test_message_processors.py ├── test_schemas.py └── test_utils.py ├── datasets ├── __init__.py ├── cdc │ ├── __init__.py │ ├── test_groupassignee.py │ ├── test_groupedmessage.py │ └── test_message_filters.py ├── configuration │ ├── __init__.py │ ├── broken_entity_bad_query_processor.yaml │ ├── broken_entity_positional_validator_args.yaml │ ├── column_validation_entity.yaml │ ├── entity_join_relationships.yaml │ ├── entity_no_custom_validators.yaml │ ├── entity_with_column_mappers.yaml │ ├── entity_with_fixed_string.yaml │ ├── test_entity_loader.py │ ├── test_storage_loader.py │ ├── test_utils.py │ └── utils.py ├── entities │ ├── storage_selectors │ │ ├── test_eap_items.py │ │ ├── test_errors.py │ │ └── test_selector.py │ ├── test_entity_data_model.py │ ├── test_entity_describer.py │ ├── test_entity_key.py │ ├── test_pluggable_entity.py │ └── test_tags_transformer.py ├── plans │ ├── __init__.py │ ├── test_cluster_selector.py │ └── translator │ │ ├── __init__.py │ │ └── test_mapping.py ├── storages │ ├── processors │ │ ├── __init__.py │ │ └── test_replaced_groups.py │ ├── test_storage_factory.py │ └── test_storages.py ├── test_cdc_events.py ├── test_context_promotion.py ├── test_dataset_factory.py ├── test_discover.py ├── test_entity_factory.py ├── test_errors_processor.py ├── test_errors_replacer.py ├── test_events.py ├── test_events_processing.py ├── test_factory.py ├── test_fast_bulk_load.py ├── test_functions_processor.py ├── test_generic_metrics_processor.py ├── test_group_attributes_join.py ├── test_group_attributes_processor.py ├── test_message_filters.py ├── test_metrics_processing.py ├── test_metrics_processor.py ├── test_nullable_field_casting.py ├── test_processors_idempotency.py ├── test_profiles_processor.py ├── test_querylog_processor.py ├── test_search_issues_processor.py ├── test_spans_payloads.py ├── test_spans_processor.py ├── test_table_storage.py ├── test_tags_hashmap.py ├── test_transaction_processor.py ├── test_transaction_translations.py └── validation │ ├── test_datetime_condition_validator.py │ ├── test_entity_contains_columns_validator.py │ ├── test_entity_validation.py │ ├── test_illegal_aggregate_conditions_validation.py │ ├── test_no_time_condition_validator.py │ ├── test_subscription_clauses_validator.py │ └── test_tag_condition_checker.py ├── fixtures.py ├── helpers.py ├── lw_deletions ├── __init__.py ├── test_formatters.py └── test_lw_deletions.py ├── manual_jobs ├── recreate_missing_eap_spans_materialized_views.py ├── test_extract_span_data.py ├── test_job_lock.py ├── test_job_statuses.py └── test_record_job_start.py ├── migrations ├── __init__.py ├── autogeneration │ ├── test_generate_python_migration.py │ └── test_ui.py ├── test_check_dangerous.py ├── test_connect.py ├── test_groups.py ├── test_legacy_use.py ├── test_operations.py ├── test_parse_schema.py ├── test_policies.py ├── test_runner.py ├── test_runner_individual.py ├── test_table_engines.py └── test_validator.py ├── pipeline ├── __init__.py ├── conftest.py ├── test_entity_processing_stage.py ├── test_entity_processing_stage_composite.py ├── test_execution_stage.py ├── test_pipeline_stage.py ├── test_storage_processing_stage.py ├── test_storage_processing_stage_composite.py └── test_storage_query_identity_translate.py ├── query ├── __init__.py ├── allocation_policies │ ├── test_allocation_policy_base.py │ ├── test_bytes_scanned_rejecting_policy.py │ ├── test_bytes_scanned_window_allocation_policy.py │ ├── test_concurrent_rate_limit_policy.py │ ├── test_cross_org_policy.py │ ├── test_per_referrer.py │ └── test_pickleability.py ├── data_source │ └── test_join.py ├── formatters │ ├── __init__.py │ └── test_query.py ├── indexer │ └── test_resolver.py ├── joins │ ├── __init__.py │ ├── equivalence_schema.py │ ├── join_structures.py │ ├── test_branch_cutter.py │ ├── test_equivalence_adder.py │ ├── test_equivalences.py │ ├── test_metrics_subqueries.py │ ├── test_semi_join.py │ └── test_subqueries.py ├── parser │ ├── __init__.py │ ├── test_formula_mql_query.py │ ├── test_invalid_legacy_query.py │ ├── test_parser.py │ ├── unit_tests │ │ ├── test_parse_populate_resolve_mql.py │ │ ├── test_parse_snql_query_initial.py │ │ ├── test_post_process_and_validate_mql_query.py │ │ ├── test_post_process_and_validate_query.py │ │ ├── test_resolver_visitor.py │ │ └── test_treeify_or_and_conditions.py │ └── validation │ │ └── test_functions.py ├── processors │ ├── __init__.py │ ├── logical │ │ ├── test_calculated_average_processor.py │ │ └── test_optional_attribute_aggregation.py │ ├── query_builders.py │ ├── test_apdex.py │ ├── test_array_has_optimizer.py │ ├── test_arrayjoin_optimizer.py │ ├── test_arrayjoin_spans_optimizer.py │ ├── test_bool_context.py │ ├── test_clickhouse_settings_override.py │ ├── test_column_filter_processor.py │ ├── test_custom_function.py │ ├── test_empty_tag_condition_processor.py │ ├── test_events_column_processor.py │ ├── test_failure_rate.py │ ├── test_filter_in_select_optimizer.py │ ├── test_fixedstring_array_column_processor.py │ ├── test_functions_processor.py │ ├── test_granularity_processor.py │ ├── test_handled_functions.py │ ├── test_hash_bucket_functions_processor.py │ ├── test_hexint_column_processor.py │ ├── test_low_cardinality_processor.py │ ├── test_mandatory_condition_applier.py │ ├── test_mandatory_condition_enforcer.py │ ├── test_mapping_optimizer.py │ ├── test_mapping_optimizer_no_useless_conditions.py │ ├── test_mapping_promoter.py │ ├── test_null_column_caster.py │ ├── test_prewhere.py │ ├── test_slice_of_map_optimizer.py │ ├── test_timeseries_processor.py │ ├── test_transaction_column_processor.py │ ├── test_tuple_unaliaser.py │ ├── test_type_condition_optimizer.py │ ├── test_uniq_in_select_and_having.py │ ├── test_uuid_array_column_processor.py │ └── test_uuid_column_processor.py ├── snql │ ├── test_invalid_queries.py │ ├── test_joins.py │ ├── test_query.py │ ├── test_query_column_validation.py │ └── test_storage_query.py ├── test_conditions.py ├── test_dsl.py ├── test_dsl_mapper.py ├── test_expressions.py ├── test_logical_query.py ├── test_matcher.py ├── test_nested.py ├── test_query.py ├── test_query_ast.py ├── test_query_validation.py ├── test_visitor.py └── validation │ ├── __init__.py │ └── test_signature.py ├── replacer ├── test_cluster_replacements.py ├── test_load_balancer.py └── test_replacements_and_expiry.py ├── request ├── test_build_request.py └── test_schema.py ├── settings └── test_settings.py ├── snapshots ├── __init__.py └── test_postgres_snapshot.py ├── state ├── __init__.py ├── test_cache.py ├── test_rate_limit.py ├── test_record.py └── test_state.py ├── stateful_consumer └── __init__.py ├── subscriptions ├── __init__.py ├── entity_subscriptions │ ├── test_entity_subscriptions.py │ └── test_entity_subscriptions_data.py ├── subscriptions_utils.py ├── test_builder_mode_state.py ├── test_codecs.py ├── test_combined_scheduler_executor.py ├── test_data.py ├── test_executor_consumer.py ├── test_filter_subscriptions.py ├── test_partitioner.py ├── test_scheduler.py ├── test_scheduler_consumer.py ├── test_scheduler_processing_strategy.py ├── test_store.py ├── test_subscription.py ├── test_task_builder.py └── test_types.py ├── test_api.py ├── test_api_status.py ├── test_cleanup.py ├── test_cli.py ├── test_clickhouse.py ├── test_consumer.py ├── test_copy_tables.py ├── test_discover_api.py ├── test_generic_metrics_api.py ├── test_group_attributes_api.py ├── test_metrics_api.py ├── test_metrics_meta_api.py ├── test_metrics_mql_api.py ├── test_metrics_sdk_api.py ├── test_outcomes_api.py ├── test_processor.py ├── test_redis.py ├── test_replacer.py ├── test_replays_api.py ├── test_search_issues_api.py ├── test_snql_api.py ├── test_snql_sdk_api.py ├── test_spans_api.py ├── test_transactions_api.py ├── test_util.py ├── test_writer.py ├── utils ├── __init__.py ├── conftest.py ├── metrics │ ├── __init__.py │ ├── test_gauge.py │ └── test_timer.py ├── streams │ ├── __init__.py │ ├── test_kafka_config.py │ └── test_topics.py ├── test_bucket_timer.py ├── test_check_clickhouse.py ├── test_columns_validator.py ├── test_describer.py ├── test_import_submodules.py ├── test_iterators.py ├── test_package_auto_import │ ├── __init__.py │ ├── a.py │ ├── b.py │ ├── c.py │ └── garbage ├── test_package_no_import │ ├── __init__.py │ ├── a.py │ ├── b.py │ ├── c.py │ └── garbage ├── test_rate_limiter.py ├── test_registered_class.py ├── test_serializable_exception.py └── test_threaded_function_delegator.py └── web ├── rpc ├── test_aggregation.py ├── test_base.py ├── test_common.py ├── test_get_expression_aggregations_visitor.py ├── test_rpc_handler.py └── v1 │ ├── routing_strategies │ ├── test_cluster_loadinfo.py │ ├── test_linear_bytes_scanned.py │ ├── test_normal_mode_linear_bytes_scanned.py │ ├── test_outcomes_based.py │ └── test_strategy_selector.py │ ├── smart_autocomplete │ └── __init__.py │ ├── test_conditional_aggregation.py │ ├── test_create_subscription.py │ ├── test_debug_info.py │ ├── test_endpoint_get_trace.py │ ├── test_endpoint_get_traces.py │ ├── test_endpoint_time_series │ ├── test_endpoint_time_series.py │ ├── test_endpoint_time_series_extrapolation.py │ ├── test_endpoint_time_series_logs.py │ └── test_endpoint_time_series_uptime_checks.py │ ├── test_endpoint_trace_item_attribute_names.py │ ├── test_endpoint_trace_item_details.py │ ├── test_endpoint_trace_item_stats.py │ ├── test_endpoint_trace_item_stats_logs.py │ ├── test_endpoint_trace_item_table │ ├── test_endpoint_trace_item_table.py │ ├── test_endpoint_trace_item_table_extrapolation.py │ ├── test_endpoint_trace_item_table_logs.py │ └── test_endpoint_trace_item_table_uptime_checks.py │ ├── test_storage_routing.py │ ├── test_trace_item_attribute_values_v1.py │ ├── test_utils.py │ └── visitors │ └── test_sparse_aggregate_attribute_transformer.py ├── test__get_allocation_policy.py ├── test_bulk_delete_query.py ├── test_cache_partitions.py ├── test_db_query.py ├── test_delete_query.py ├── test_max_rows_enforcer.py ├── test_parse_and_run_query.py ├── test_project_finder.py ├── test_query_exception.py ├── test_query_size.py ├── test_result_cache_codec.py ├── test_results.py ├── test_tables_collector.py ├── test_transform_names.py └── test_views.py /.craft.yml: -------------------------------------------------------------------------------- 1 | minVersion: "0.27.2" 2 | changelogPolicy: auto 3 | artifactProvider: 4 | name: none 5 | statusProvider: 6 | name: github 7 | config: 8 | contexts: 9 | - 'build-on-branch-push (sentryio)' 10 | targets: 11 | - id: release 12 | name: docker 13 | source: us-central1-docker.pkg.dev/sentryio/snuba/image 14 | target: getsentry/snuba 15 | - id: latest 16 | name: docker 17 | source: us-central1-docker.pkg.dev/sentryio/snuba/image 18 | target: getsentry/snuba 19 | targetFormat: '{{{target}}}:latest' 20 | - name: github 21 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | .git 2 | */.gitignore 3 | docs 4 | cloudbuild.* 5 | docker-compose.* 6 | CODEOWNERS 7 | conftest.py 8 | *.pyc 9 | *.swp 10 | .artifacts 11 | .coverage 12 | .mypy_cache/ 13 | .pytest_cache/ 14 | .venv 15 | htmlcov/ 16 | snuba.egg-info/ 17 | .github/ 18 | snuba/admin/dist/bundle.js* 19 | */node_modules 20 | rust_snuba/target/ 21 | -------------------------------------------------------------------------------- /.envrc: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # shellcheck disable=SC1091 3 | 4 | if [[ -f "${PWD}/.env" ]]; then 5 | dotenv 6 | fi 7 | 8 | PATH_add "${HOME}/.local/share/sentry-devenv/bin" 9 | 10 | if ! command -v devenv >/dev/null; then 11 | echo "install devenv: https://github.com/getsentry/devenv#install" 12 | return 1 13 | fi 14 | 15 | PATH_add "${PWD}/.devenv/bin" 16 | 17 | if [ ! -d .venv ]; then 18 | devenv sync 19 | fi 20 | 21 | export VIRTUAL_ENV="${PWD}/.venv" 22 | PATH_add "${PWD}/.venv/bin" 23 | 24 | . scripts/rust-envvars 25 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | snuba/admin/dist/bundle.js binary 2 | snuba/protobufs/**/*.py linguist-generated=true 3 | snuba/protobufs/**/*.pyi linguist-generated=true 4 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 13 | 14 | ### Legal Boilerplate 15 | 16 | Look, I get it. The entity doing business as "Sentry" was incorporated in the State of Delaware in 2015 as Functional Software, Inc. and is gonna need some rights from me in order to utilize my contributions in this here PR. So here's the deal: I retain all rights, title and interest in and to my contributions, and by keeping this boilerplate intact I confirm that Sentry can use, modify, copy, and redistribute my contributions, under Sentry's choice of terms. 17 | -------------------------------------------------------------------------------- /.github/codeql/codeql-config.yml: -------------------------------------------------------------------------------- 1 | name: "Snuba CodeQL Config" 2 | 3 | paths-ignore: 4 | - "**/tests/**" 5 | -------------------------------------------------------------------------------- /.github/file-filters.yml: -------------------------------------------------------------------------------- 1 | # Based off https://github.com/getsentry/sentry/blob/master/.github/file-filters.yml 2 | 3 | api_changes: 4 | - "snuba/datasets/configuration/**/*.yaml" 5 | - "snuba/web/**/*.py" 6 | - "snuba/query/**/*.py" 7 | - "snuba/snuba_migrations/**/*" 8 | - "snuba/cli/devserver.py" 9 | - "rust_snuba/src/processors/*" 10 | - "snuba/datasets/processors/*" 11 | 12 | devservices_changes: 13 | - 'devservices/**' 14 | - '.github/workflows/ci.yml' 15 | -------------------------------------------------------------------------------- /.github/labeler.yml: -------------------------------------------------------------------------------- 1 | migrations: 2 | - any: 3 | - changed-files: 4 | - any-glob-to-any-file: 5 | - snuba/migrations/group_loader.py 6 | - snuba/migrations/groups.py 7 | - snuba/snuba_migrations/**/* 8 | - snuba/migrations/system_migrations/* 9 | -------------------------------------------------------------------------------- /.github/workflows/admin-sourcemaps.yml: -------------------------------------------------------------------------------- 1 | on: 2 | pull_request: 3 | push: 4 | branches: [master] 5 | jobs: 6 | build: 7 | name: "build sourcemaps" 8 | runs-on: ubuntu-latest 9 | env: 10 | SENTRY_AUTH_TOKEN: ${{ secrets.SNUBA_SENTRY_SOURCEMAP_KEY }} 11 | NODE_VERSION: 20.8.1 12 | steps: 13 | - uses: actions/checkout@v4 14 | name: Checkout code 15 | - uses: actions/setup-python@v5 16 | with: 17 | python-version: 3.8 18 | - uses: actions/setup-node@v4 19 | with: 20 | node-version: ${{env.NODE_VERSION}} 21 | - name: Build admin sourcemaps 22 | run: make build-admin 23 | -------------------------------------------------------------------------------- /.github/workflows/dependency-review.yml: -------------------------------------------------------------------------------- 1 | name: "Dependency Review" 2 | on: 3 | pull_request: 4 | branches: ["master"] 5 | 6 | permissions: 7 | contents: read 8 | 9 | jobs: 10 | dependency-review: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: "Checkout Repository" 14 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 15 | - name: Dependency Review 16 | uses: actions/dependency-review-action@da24556b548a50705dd671f47852072ea4c105d9 # v4.7.1 17 | with: 18 | # Possible values: "critical", "high", "moderate", "low" 19 | fail-on-severity: high 20 | -------------------------------------------------------------------------------- /.github/workflows/docs-pr.yml: -------------------------------------------------------------------------------- 1 | name: Snuba Docs on PR's 2 | 3 | on: 4 | pull_request: 5 | 6 | jobs: 7 | docs: 8 | name: Sphinx 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v4 12 | - name: Setup Python 13 | uses: actions/setup-python@v5 14 | with: 15 | python-version: '3.8' 16 | - name: Generate config schema docs 17 | run: | 18 | pip install virtualenv 19 | make generate-config-docs 20 | - name: Build docs 21 | run: | 22 | make snubadocs 23 | -------------------------------------------------------------------------------- /.github/workflows/docs.yml: -------------------------------------------------------------------------------- 1 | name: Snuba Docs 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | 8 | jobs: 9 | docs: 10 | name: Sphinx 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v4 14 | - name: Setup Python 15 | uses: actions/setup-python@v5 16 | with: 17 | python-version: '3.8' 18 | - name: Generate config schema docs 19 | run: | 20 | pip install virtualenv 21 | make generate-config-docs 22 | - name: Build docs 23 | run: | 24 | make snubadocs 25 | - uses: peaceiris/actions-gh-pages@v4.0.0 26 | name: Publish to GitHub Pages 27 | with: 28 | github_token: ${{ secrets.GITHUB_TOKEN }} 29 | publish_dir: docs/build 30 | force_orphan: true 31 | 32 | - name: Archive Docs 33 | uses: actions/upload-artifact@v4 34 | with: 35 | name: docs 36 | path: docs/build 37 | -------------------------------------------------------------------------------- /.github/workflows/enforce-license-compliance.yml: -------------------------------------------------------------------------------- 1 | name: Enforce License Compliance 2 | 3 | on: 4 | push: 5 | branches: [master, main, release/*] 6 | pull_request: 7 | branches: [master, main] 8 | 9 | jobs: 10 | enforce-license-compliance: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: 'Enforce License Compliance' 14 | uses: getsentry/action-enforce-license-compliance@6599a041195852debba3417e069829060d671e76 15 | with: 16 | fossa_api_key: ${{ secrets.FOSSA_API_KEY }} 17 | -------------------------------------------------------------------------------- /.github/workflows/labeler.yml: -------------------------------------------------------------------------------- 1 | name: "Pull Request Labeler" 2 | on: 3 | - pull_request_target 4 | 5 | jobs: 6 | triage: 7 | permissions: 8 | contents: read 9 | pull-requests: write 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/labeler@v5 13 | with: 14 | repo-token: "${{ secrets.GITHUB_TOKEN }}" 15 | -------------------------------------------------------------------------------- /.github/workflows/release-ghcr-version-tag.yml: -------------------------------------------------------------------------------- 1 | name: Release GHCR Versioned Image 2 | 3 | on: 4 | release: 5 | types: [prereleased, released] 6 | 7 | jobs: 8 | release-ghcr-version-tag: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - name: Log in to GitHub Container Registry 12 | uses: docker/login-action@v3 13 | with: 14 | registry: ghcr.io 15 | username: ${{ github.actor }} 16 | password: ${{ secrets.GITHUB_TOKEN }} 17 | 18 | - name: Tag release version 19 | run: | 20 | docker buildx imagetools create --tag \ 21 | ghcr.io/getsentry/snuba:${{ github.ref_name }} \ 22 | ghcr.io/getsentry/snuba:${{ github.sha }} 23 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.swp 3 | .artifacts 4 | .coverage 5 | .mypy_cache/ 6 | .pytest_cache/ 7 | .venv 8 | htmlcov/ 9 | snuba.egg-info/ 10 | .DS_Store 11 | .idea/ 12 | node_modules 13 | .vscode/*.log 14 | snuba/admin/dist/bundle.js* 15 | snuba/admin/dist/bundle.css* 16 | tmp/ 17 | gocd/templates/vendor/ 18 | gocd/generated-pipelines/ 19 | Brewfile.lock.json 20 | -------------------------------------------------------------------------------- /.isort.cfg: -------------------------------------------------------------------------------- 1 | [settings] 2 | profile=black 3 | src_paths=snuba,tests 4 | atomic=true 5 | -------------------------------------------------------------------------------- /.python-version: -------------------------------------------------------------------------------- 1 | 3.11.11 2 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": [ 3 | "ms-python.black-formatter", 4 | "ms-python.mypy-type-checker", 5 | "ms-python.flake8" 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2.0", 3 | "configurations": [ 4 | { 5 | "name": "Python Debugger: Current File", 6 | "type": "debugpy", 7 | "request": "launch", 8 | "program": "${file}", 9 | "console": "integratedTerminal" 10 | }, 11 | { 12 | "name": "Pytest Current File", 13 | "type": "debugpy", 14 | "request": "launch", 15 | "program": "${workspaceFolder}/.venv/bin/pytest", 16 | "args": [ 17 | "${file}" 18 | ], 19 | "console": "integratedTerminal", 20 | } 21 | ] 22 | } 23 | -------------------------------------------------------------------------------- /Brewfile: -------------------------------------------------------------------------------- 1 | brew 'cmake' # for rust-snuba 2 | brew 'protobuf' # for rust-snuba > sentry_protos 3 | brew 'rustup' 4 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include setup.py MANIFEST.in README.rst 2 | recursive-include snuba * 3 | -------------------------------------------------------------------------------- /bin/api: -------------------------------------------------------------------------------- 1 | wrapper -------------------------------------------------------------------------------- /bin/cleanup: -------------------------------------------------------------------------------- 1 | wrapper -------------------------------------------------------------------------------- /bin/consumer: -------------------------------------------------------------------------------- 1 | wrapper -------------------------------------------------------------------------------- /bin/optimize: -------------------------------------------------------------------------------- 1 | wrapper -------------------------------------------------------------------------------- /bin/wrapper: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | exec snuba "${0##*/}" "$@" 4 | -------------------------------------------------------------------------------- /cloudbuild.yaml: -------------------------------------------------------------------------------- 1 | steps: 2 | - name: "gcr.io/kaniko-project/executor:v1.16.0" 3 | id: runtime-image 4 | waitFor: ["-"] 5 | args: 6 | [ 7 | "--cache=true", 8 | "--use-new-run", 9 | "--build-arg", 10 | "SOURCE_COMMIT=$COMMIT_SHA", 11 | "--destination=us-central1-docker.pkg.dev/$PROJECT_ID/snuba/image:$COMMIT_SHA", 12 | "--target=application", 13 | "-f", 14 | "./Dockerfile", 15 | ] 16 | timeout: 2400s 17 | 18 | # Pull docker image again, so we can get build info. 19 | # https://github.com/GoogleCloudPlatform/cloud-builders-community/issues/212#issuecomment-1478828752 20 | - name: docker 21 | args: [pull, "us-central1-docker.pkg.dev/$PROJECT_ID/snuba/image:$COMMIT_SHA"] 22 | 23 | # This is needed for Freight to find matching builds 24 | images: [ 25 | 'us-central1-docker.pkg.dev/$PROJECT_ID/snuba/image:$COMMIT_SHA', 26 | ] 27 | timeout: 2640s 28 | options: 29 | # We need more memory for Webpack builds & e2e self-hosted tests 30 | machineType: "E2_HIGHCPU_32" 31 | -------------------------------------------------------------------------------- /config/clickhouse/macros.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 1 4 | 1 5 | 6 | 7 | -------------------------------------------------------------------------------- /config/clickhouse/remote_servers.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | clickhouse 7 | 9000 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /config/clickhouse/zookeeper.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | zookeeper 5 | 2181 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | os.environ.setdefault("SNUBA_SETTINGS", "test") 4 | os.environ.setdefault("CLICKHOUSE_DATABASE", "snuba_test") 5 | -------------------------------------------------------------------------------- /devenv/config.ini: -------------------------------------------------------------------------------- 1 | [devenv] 2 | minimum_version = 1.16.0 3 | 4 | [venv.venv] 5 | python = 3.11.11 6 | path = .venv 7 | # TODO: need to combine requirements files into one 8 | requirements = requirements.txt 9 | editable = 10 | . 11 | 12 | [python3.11.11] 13 | darwin_x86_64 = https://github.com/astral-sh/python-build-standalone/releases/download/20250212/cpython-3.11.11+20250212-x86_64-apple-darwin-install_only.tar.gz 14 | darwin_x86_64_sha256 = 1f1afc064b523b67c06e6d4b024a8dbb8df5fa12f0f0018b218c855491833451 15 | darwin_arm64 = https://github.com/astral-sh/python-build-standalone/releases/download/20250212/cpython-3.11.11+20250212-aarch64-apple-darwin-install_only.tar.gz 16 | darwin_arm64_sha256 = 6dd5603e5570b8c4e1abf1ef2a329f195dd052890b1fa3b01b7406552a4f45a1 17 | -------------------------------------------------------------------------------- /devenv/sync.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from devenv import constants 4 | from devenv.lib import brew, colima, config, proc, venv 5 | 6 | 7 | def main(context: dict[str, str]) -> int: 8 | reporoot = context["reporoot"] 9 | 10 | brew.install() 11 | 12 | proc.run( 13 | (f"{constants.homebrew_bin}/brew", "bundle"), 14 | cwd=reporoot, 15 | ) 16 | 17 | venv_dir, python_version, requirements, editable_paths, bins = venv.get( 18 | reporoot, "venv" 19 | ) 20 | url, sha256 = config.get_python(reporoot, python_version) 21 | print(f"ensuring venv at {venv_dir}...") 22 | venv.ensure(venv_dir, python_version, url, sha256) 23 | 24 | print(f"syncing venv with {requirements}...") 25 | venv.sync(reporoot, venv_dir, requirements, editable_paths, bins) 26 | 27 | print("running make develop...") 28 | os.system("make develop") 29 | 30 | # start colima if it's not already running 31 | colima.start(reporoot) 32 | 33 | return 0 34 | -------------------------------------------------------------------------------- /devservices/clickhouse/config.xml: -------------------------------------------------------------------------------- 1 | 2 | 0.3 3 | 4 | 1 5 | 6 | 7 | -------------------------------------------------------------------------------- /devservices/programs.conf: -------------------------------------------------------------------------------- 1 | [program:devserver] 2 | command=snuba devserver 3 | autostart=false 4 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | --- 2 | version: '3' 3 | services: 4 | snuba-api: 5 | build: ./ 6 | ports: 7 | - "1218:1218" 8 | volumes: 9 | - ".:/usr/src/snuba/" 10 | command: 11 | - "api" 12 | - "--http" 13 | - "0.0.0.0:1218" 14 | - "--py-autoreload" 15 | - "1" 16 | - "--http-keepalive" 17 | environment: &snuba_env 18 | CLICKHOUSE_HOST: 'clickhouse' 19 | REDIS_HOST: 'redis' 20 | # Uncomment this to run sentry's snuba testsuite 21 | #SNUBA_SETTINGS: test 22 | clickhouse: 23 | image: altinity/clickhouse-server:21.8.13.1.altinitystable 24 | ports: 25 | - "9000:9000" 26 | - "9009:9009" 27 | - "8123:8123" 28 | volumes: 29 | - "clickhouse:/var/lib/clickhouse" 30 | ulimits: 31 | nofile: 262144 32 | redis: 33 | image: redis:5.0-alpine 34 | 35 | volumes: 36 | clickhouse: 37 | -------------------------------------------------------------------------------- /docker_entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | # first check if we're passing flags, if so 5 | # prepend with snuba 6 | if [ "${1:0:1}" = '-' ]; then 7 | set -- snuba "$@" 8 | fi 9 | 10 | help_result=$(snuba "${1}" --help) 11 | help_return=$? 12 | 13 | if [[ "${help_return}" -eq 0 ]]; then 14 | set -- snuba "$@" 15 | else 16 | # Print the error message if it returns non-zero, to help with troubleshooting. 17 | printf "Error running snuba ${1} --help, passing command to exec directly." 18 | printf "\n${help_result}" 19 | fi 20 | 21 | if [ -n "${ENABLE_HEAPTRACK:-}" ]; then 22 | file_path="./profiler_data/profile_$(date '+%Y%m%d_%H%M%S')" 23 | set -- heaptrack -o "${file_path}" "$@" 24 | fi 25 | 26 | exec "$@" 27 | -------------------------------------------------------------------------------- /docs-requirements.txt: -------------------------------------------------------------------------------- 1 | jsonschema2md==0.4.0 2 | fastjsonschema==2.16.2 3 | sentry-sdk==2.18.0 4 | myst-parser==0.18.0 5 | sphinx==5.1.1 6 | -------------------------------------------------------------------------------- /docs/.gitignore: -------------------------------------------------------------------------------- 1 | build/* 2 | -------------------------------------------------------------------------------- /docs/source/_static/architecture/clickhouse_nodes.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/docs/source/_static/architecture/clickhouse_nodes.png -------------------------------------------------------------------------------- /docs/source/_static/architecture/compositeprocessing.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/docs/source/_static/architecture/compositeprocessing.png -------------------------------------------------------------------------------- /docs/source/_static/architecture/datamodel.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/docs/source/_static/architecture/datamodel.png -------------------------------------------------------------------------------- /docs/source/_static/architecture/deployment_legend.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/docs/source/_static/architecture/deployment_legend.png -------------------------------------------------------------------------------- /docs/source/_static/architecture/errors_transactions_deployment.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/docs/source/_static/architecture/errors_transactions_deployment.png -------------------------------------------------------------------------------- /docs/source/_static/architecture/joins.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/docs/source/_static/architecture/joins.png -------------------------------------------------------------------------------- /docs/source/_static/architecture/multientity.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/docs/source/_static/architecture/multientity.png -------------------------------------------------------------------------------- /docs/source/_static/architecture/outcomes_deployment.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/docs/source/_static/architecture/outcomes_deployment.png -------------------------------------------------------------------------------- /docs/source/_static/architecture/overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/docs/source/_static/architecture/overview.png -------------------------------------------------------------------------------- /docs/source/_static/architecture/queryprocessing.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/docs/source/_static/architecture/queryprocessing.png -------------------------------------------------------------------------------- /docs/source/_static/architecture/sessions_deployment.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/docs/source/_static/architecture/sessions_deployment.png -------------------------------------------------------------------------------- /docs/source/_static/architecture/singleentity.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/docs/source/_static/architecture/singleentity.png -------------------------------------------------------------------------------- /docs/source/_static/architecture/snuba_deployment.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/docs/source/_static/architecture/snuba_deployment.png -------------------------------------------------------------------------------- /docs/source/_static/query/snubaUI.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/docs/source/_static/query/snubaUI.png -------------------------------------------------------------------------------- /docs/source/clickhouse/death_queries.rst: -------------------------------------------------------------------------------- 1 | Clickhouse Queries Of Death 2 | =========================== 3 | 4 | 5 | The following queries have been shown to segfault ClickHouse on 20.7 (which is the minimum Clickhouse version of Snuba). Do not run these queries in the tracing tool, unless you really want to take ClickHouse down. 6 | 7 | countif(”DOOM”) 8 | --------------- 9 | 10 | Query :: 11 | 12 | SELECT countIf(environment='production') 13 | FROM ... 14 | PREWHERE environment = 'production' 15 | 16 | A ``countif`` in the ``SELECT`` with that same condition in the ``PREWHERE`` will segfault ClickHouse. This will be fixed in 21.8 when the upgrade is complete 17 | -------------------------------------------------------------------------------- /docs/source/clickhouse/supported_versions.rst: -------------------------------------------------------------------------------- 1 | ============================= 2 | ClickHouse supported versions 3 | ============================= 4 | The following version(s) of Clickhouse have been tested and are known to work 5 | with Snuba: 6 | 7 | - 23.8.11.29 (Altinity Stable Build) 8 | 9 | Any version of Clikhouse used outside of this list could potentially work, 10 | but is not guaranteed to work. Some functionality might be broken. Use a 11 | different version at your own risk. There are plans to support more recent 12 | versions of Clickhouse in the future. When Snuba has been validated to work 13 | with the new versions of Clickhouse, this list will be updated. 14 | -------------------------------------------------------------------------------- /docs/source/configuration/dataset.md: -------------------------------------------------------------------------------- 1 | # Dataset Schema 2 | 3 | ## Properties 4 | 5 | - **version**: Version of schema. 6 | - **kind**: Component kind. 7 | - **name** *(string)*: Name of the dataset. 8 | - **entities** *(object)*: 9 | - **all** *(array)*: Names of entities associated with this dataset. 10 | -------------------------------------------------------------------------------- /docs/source/configuration/entity_subscription.md: -------------------------------------------------------------------------------- 1 | # Entity Subscription Schema 2 | 3 | ## Properties 4 | 5 | - **version**: Version of schema. 6 | - **kind**: Component kind. 7 | - **name** *(string)*: Name of the entity subscription. 8 | - **max_allowed_aggregations** *(['integer', 'null'])*: Maximum number of allowed aggregations. 9 | - **disallowed_aggregations** *(['array', 'null'])*: Name of aggregation clauses that are not allowed. 10 | -------------------------------------------------------------------------------- /docs/source/configuration/intro.rst: -------------------------------------------------------------------------------- 1 | ===================== 2 | Dataset Configuration 3 | ===================== 4 | 5 | Snuba Datasets are defined through YAML configuration files. These are then loaded and validated by the Snuba application. 6 | -------------------------------------------------------------------------------- /docs/source/configuration/migration_group.md: -------------------------------------------------------------------------------- 1 | # Migration Group Schema 2 | 3 | ## Properties 4 | 5 | - **version**: Version of schema. 6 | - **kind**: Component kind. 7 | - **name** *(string)*: Name of the migration group. 8 | - **optional** *(boolean)*: Flag to determine if migration group is optional. 9 | - **migrations** *(array)*: Names of migrations to be applied in group. 10 | -------------------------------------------------------------------------------- /docs/source/configuration/overview.rst: -------------------------------------------------------------------------------- 1 | .. include:: intro.rst 2 | 3 | Schemas: 4 | --------- 5 | 6 | .. toctree:: 7 | :maxdepth: 1 8 | 9 | dataset 10 | 11 | entity 12 | 13 | entity_subscription 14 | 15 | migration_group 16 | 17 | readable_storage 18 | 19 | writable_storage 20 | -------------------------------------------------------------------------------- /docs/source/configuration/readable_storage.md: -------------------------------------------------------------------------------- 1 | # Readable Storage Schema 2 | 3 | ## Properties 4 | 5 | - **version**: Version of schema. 6 | - **kind**: Component kind. 7 | - **name** *(string)*: Name of the readable storage. 8 | - **storage** *(object)*: 9 | - **key** *(string)*: A unique key identifier for the storage. 10 | - **set_key** *(string)*: A unique key identifier for a collection of storages located in the same cluster. 11 | - **schema** *(object)*: 12 | - **columns** *(array)*: Objects (or nested objects) representing columns containg a name, type and args. 13 | - **local_table_name** *(string)*: The local table name in a single-node ClickHouse. 14 | - **dist_table_name** *(string)*: The distributed table name in distributed ClickHouse. 15 | - **query_processors** *(array)*: Names of QueryProcess class which represents a transformation applied to the ClickHouse query. 16 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. include:: intro.rst 2 | 3 | Contents: 4 | --------- 5 | 6 | .. toctree:: 7 | :maxdepth: 1 8 | 9 | getstarted 10 | architecture/overview 11 | architecture/datamodel 12 | architecture/slicing 13 | architecture/queryprocessing 14 | architecture/consumer 15 | configuration/overview 16 | query/overview 17 | language/snql 18 | language/mql 19 | migrations/modes 20 | contributing/environment 21 | clickhouse/death_queries 22 | clickhouse/topology 23 | clickhouse/schema_design 24 | clickhouse/supported_versions 25 | profiler 26 | -------------------------------------------------------------------------------- /gocd/templates/bash/canary-ddog-health-check.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | /devinfra/scripts/checks/datadog/monitor_status.py \ 4 | 140973101 5 | 6 | 7 | # Above monitor IDs 140973101 map to following monitors: 8 | # Snuba API Health Check is Failing 9 | -------------------------------------------------------------------------------- /gocd/templates/bash/check-cloud-build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | /devinfra/scripts/checks/googlecloud/check_cloudbuild.py \ 4 | sentryio \ 5 | snuba \ 6 | build-on-branch-push \ 7 | ${GO_REVISION_SNUBA_REPO} \ 8 | master 9 | -------------------------------------------------------------------------------- /gocd/templates/bash/check-github.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | /devinfra/scripts/checks/githubactions/checkruns.py \ 4 | --timeout-mins 60 \ 5 | getsentry/snuba \ 6 | ${GO_REVISION_SNUBA_REPO} \ 7 | "Tests and code coverage (test)" \ 8 | "Tests and code coverage (test_distributed)" \ 9 | "Tests and code coverage (test_distributed_migrations)" \ 10 | "Dataset Config Validation" \ 11 | "sentry (0)" \ 12 | "sentry (1)" \ 13 | "sentry (2)" \ 14 | "sentry (3)" 15 | -------------------------------------------------------------------------------- /gocd/templates/bash/check-migrations.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | deploy_sha=`snuba/scripts/fetch_service_refs.py --pipeline "deploy-snuba-s4s"` 4 | snuba/scripts/check-migrations.py --to $deploy_sha --workdir snuba 5 | -------------------------------------------------------------------------------- /gocd/templates/bash/deploy-st.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | eval $(/devinfra/scripts/regions/project_env_vars.py --region="${SENTRY_REGION}") 4 | 5 | /devinfra/scripts/k8s/k8stunnel 6 | 7 | /devinfra/scripts/k8s/k8s-deploy.py \ 8 | --label-selector="${LABEL_SELECTOR}" \ 9 | --image="us-central1-docker.pkg.dev/sentryio/snuba/image:${GO_REVISION_SNUBA_REPO}" \ 10 | --container-name="snuba" \ 11 | --container-name="snuba-admin" 12 | 13 | /devinfra/scripts/k8s/k8s-deploy.py \ 14 | --label-selector="${LABEL_SELECTOR}" \ 15 | --image="us-central1-docker.pkg.dev/sentryio/snuba/image:${GO_REVISION_SNUBA_REPO}" \ 16 | --type="cronjob" \ 17 | --container-name="cleanup" 18 | -------------------------------------------------------------------------------- /gocd/templates/bash/migrate-reverse.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | eval $(/devinfra/scripts/regions/project_env_vars.py --region="${SENTRY_REGION}") 4 | /devinfra/scripts/k8s/k8stunnel 5 | 6 | /devinfra/scripts/k8s/k8s-spawn-job.py \ 7 | --label-selector="service=${SNUBA_SERVICE_NAME}" \ 8 | --container-name="${SNUBA_SERVICE_NAME}" \ 9 | "snuba-migrate-reverse" \ 10 | "us-central1-docker.pkg.dev/sentryio/snuba/image:${GO_REVISION_SNUBA_REPO}" \ 11 | -- \ 12 | snuba migrations reverse-in-progress 13 | -------------------------------------------------------------------------------- /gocd/templates/bash/migrate-st.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # At the time of writing (2023-06-28) the single tenant deployments 4 | # have been using a different migration process compared to the 5 | # US deployment of snuba. 6 | # This script should be merged with migrate.sh if we can figure 7 | # out a common migration script for all regions. 8 | 9 | eval $(/devinfra/scripts/regions/project_env_vars.py --region="${SENTRY_REGION}") 10 | /devinfra/scripts/k8s/k8stunnel 11 | 12 | /devinfra/scripts/k8s/k8s-spawn-job.py \ 13 | --label-selector="service=${SNUBA_SERVICE_NAME}" \ 14 | --container-name="${SNUBA_SERVICE_NAME}" \ 15 | "snuba-migrate" \ 16 | "us-central1-docker.pkg.dev/sentryio/snuba/image:${GO_REVISION_SNUBA_REPO}" \ 17 | -- \ 18 | snuba migrations migrate --check-dangerous 19 | -------------------------------------------------------------------------------- /gocd/templates/bash/migrate.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | eval $(/devinfra/scripts/regions/project_env_vars.py --region="${SENTRY_REGION}") 4 | /devinfra/scripts/k8s/k8stunnel 5 | 6 | /devinfra/scripts/k8s/k8s-spawn-job.py \ 7 | --label-selector="service=${SNUBA_SERVICE_NAME}" \ 8 | --container-name="${SNUBA_SERVICE_NAME}" \ 9 | "snuba-migrate" \ 10 | "us-central1-docker.pkg.dev/sentryio/snuba/image:${GO_REVISION_SNUBA_REPO}" \ 11 | -- \ 12 | snuba migrations migrate --check-dangerous -r complete -r partial 13 | -------------------------------------------------------------------------------- /gocd/templates/bash/s4s-ddog-health-check.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | -------------------------------------------------------------------------------- /gocd/templates/bash/s4s-sentry-health-check.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "S4S Health Check!" 4 | -------------------------------------------------------------------------------- /gocd/templates/bash/saas-ddog-health-check.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | /devinfra/scripts/checks/datadog/monitor_status.py --dry-run=true \ 4 | 113296727 \ 5 | 42722121 6 | 7 | 8 | # Above monitor IDs map to following monitors respectively: 9 | # Snuba - SLO - High API error rate 10 | # Snuba - Too many restarts on Snuba pods 11 | -------------------------------------------------------------------------------- /gocd/templates/bash/saas-sentry-error-check.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | /devinfra/scripts/checks/sentry/release_error_events.py \ 4 | --project-id=300688 \ 5 | --project-slug=snuba \ 6 | --release="${GO_REVISION_SNUBA_REPO}" \ 7 | --sentry-environment="${SENTRY_ENVIRONMENT}" \ 8 | --duration=5 \ 9 | --error-events-limit=500 \ 10 | --skip-warnings=true \ 11 | 12 | 13 | # --skip-check=${SKIP_CANARY_CHECKS} 14 | -------------------------------------------------------------------------------- /gocd/templates/bash/saas-sentry-health-check.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | /devinfra/scripts/checks/sentry/release_new_issues.py \ 4 | --project-id=300688 \ 5 | --project-slug=snuba \ 6 | --release="${GO_REVISION_SNUBA_REPO}" \ 7 | --new-issues-limit=0 \ 8 | --additional-query="issue.type:error !level:info !server_name:*eap*" \ 9 | 10 | 11 | # --skip-check=${SKIP_CANARY_CHECKS} 12 | -------------------------------------------------------------------------------- /gocd/templates/bash/sentry-release-canary.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | sentry-cli releases new "${GO_REVISION_SNUBA_REPO}" 4 | sentry-cli releases set-commits "${GO_REVISION_SNUBA_REPO}" --commit "getsentry/snuba@${GO_REVISION_SNUBA_REPO}" 5 | sentry-cli releases deploys "${GO_REVISION_SNUBA_REPO}" new -e canary 6 | -------------------------------------------------------------------------------- /gocd/templates/bash/sentry-release-primary.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | sentry-cli releases deploys "${GO_REVISION_SNUBA_REPO}" new -e production 4 | sentry-cli releases finalize "${GO_REVISION_SNUBA_REPO}" 5 | -------------------------------------------------------------------------------- /gocd/templates/jsonnetfile.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "dependencies": [ 4 | { 5 | "source": { 6 | "git": { 7 | "remote": "https://github.com/getsentry/gocd-jsonnet.git", 8 | "subdir": "libs" 9 | } 10 | }, 11 | "version": "v2.13.0" 12 | } 13 | ], 14 | "legacyImports": true 15 | } 16 | -------------------------------------------------------------------------------- /gocd/templates/jsonnetfile.lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "dependencies": [ 4 | { 5 | "source": { 6 | "git": { 7 | "remote": "https://github.com/getsentry/gocd-jsonnet.git", 8 | "subdir": "libs" 9 | } 10 | }, 11 | "version": "6ddc943ae87444b48e16995639dfe89f33a0f444", 12 | "sum": "NH9U5jQ8oCSPXLuBw27OqAaPLBUDqMGHvRLxfo84hNQ=" 13 | } 14 | ], 15 | "legacyImports": false 16 | } 17 | -------------------------------------------------------------------------------- /gocd/templates/snuba.jsonnet: -------------------------------------------------------------------------------- 1 | local snuba = import './pipelines/snuba.libsonnet'; 2 | local pipedream = import 'github.com/getsentry/gocd-jsonnet/libs/pipedream.libsonnet'; 3 | 4 | local pipedream_config = { 5 | name: 'snuba', 6 | materials: { 7 | snuba_repo: { 8 | git: 'git@github.com:getsentry/snuba.git', 9 | shallow_clone: true, 10 | branch: 'master', 11 | destination: 'snuba', 12 | }, 13 | }, 14 | rollback: { 15 | material_name: 'snuba_repo', 16 | stage: 'deploy-primary', 17 | elastic_profile_id: 'snuba', 18 | }, 19 | 20 | // Set to true to auto-deploy changes (defaults to true) 21 | auto_deploy: true, 22 | }; 23 | 24 | pipedream.render(pipedream_config, snuba) 25 | -------------------------------------------------------------------------------- /requirements-build.txt: -------------------------------------------------------------------------------- 1 | --index-url https://pypi.devinfra.sentry.io/simple 2 | 3 | maturin==1.4.0 4 | types-protobuf==5.27.0.20240626 5 | -------------------------------------------------------------------------------- /requirements-test.txt: -------------------------------------------------------------------------------- 1 | --index-url https://pypi.devinfra.sentry.io/simple 2 | 3 | time-machine==2.13.0 4 | 5 | # for typing 6 | mypy==1.1.1 7 | types-python-dateutil==2.8.19.14 8 | types-python-jose==3.3.0 9 | types-pyyaml==6.0.12.20240808 10 | types-requests==2.32.0.20240907 11 | types-setuptools==74.1.0.20240907 12 | types-simplejson==3.17.7 13 | types-google-cloud-ndb==2.2.0 14 | -------------------------------------------------------------------------------- /rust_snuba/.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [env] 2 | # Workaround for https://github.com/confluentinc/librdkafka/pull/5012 3 | CMAKE_POLICY_VERSION_MINIMUM = "3.10" 4 | -------------------------------------------------------------------------------- /rust_snuba/.gitignore: -------------------------------------------------------------------------------- 1 | /target/ 2 | rust_arroyo/target/ 3 | rust_arroyo/Cargo.lock 4 | 5 | # insta snapshots 6 | **/*.snap.new 7 | 8 | # These are backup files generated by rustfmt 9 | **/*.rs.bk 10 | -------------------------------------------------------------------------------- /rust_snuba/README.md: -------------------------------------------------------------------------------- 1 | # Rust impl of consumers (Experimental) 2 | 3 | Rust consumers are an experimental project in Snuba. Most users should not be running this code! 4 | 5 | The goal of this project is to provide a feature complete Rust equivalent to the `snuba consumer` and `snuba multistorage-consumer` functionality that is currently written in Python code. 6 | 7 | ## How to run 8 | 9 | 1. Run `make watch-rust-snuba`. 10 | 2. `snuba rust-consumer` can now be used to run a simple Rust consumer that currently does not insert into clickhouse. 11 | -------------------------------------------------------------------------------- /rust_snuba/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["maturin>=1.4.0,<1.4.1"] 3 | build-backend = "maturin" 4 | 5 | [tool.maturin] 6 | features = ["pyo3/extension-module"] 7 | 8 | [project] 9 | name = "rust_snuba" 10 | requires-python = ">=3.7" 11 | classifiers = [ 12 | "Programming Language :: Rust", 13 | "Programming Language :: Python :: Implementation :: CPython", 14 | "Programming Language :: Python :: Implementation :: PyPy", 15 | ] 16 | -------------------------------------------------------------------------------- /rust_snuba/rust-toolchain.toml: -------------------------------------------------------------------------------- 1 | [toolchain] 2 | channel = "1.83.0" 3 | -------------------------------------------------------------------------------- /rust_snuba/src/metrics/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod global_tags; 2 | pub mod statsd; 3 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__events-.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | expression: diff 4 | --- 5 | [ 6 | Change { 7 | path: "..2.data.contexts.", 8 | change: PropertyAdd { 9 | lhs_additional_properties: true, 10 | added: "flags", 11 | }, 12 | }, 13 | Change { 14 | path: "..2.data.contexts..trace.", 15 | change: PropertyAdd { 16 | lhs_additional_properties: true, 17 | added: "parent_span_id", 18 | }, 19 | }, 20 | Change { 21 | path: "..0", 22 | change: TypeRemove { 23 | removed: Number, 24 | }, 25 | }, 26 | Change { 27 | path: "..0", 28 | change: RangeAdd { 29 | added: Minimum( 30 | 0.0, 31 | ), 32 | }, 33 | }, 34 | ] 35 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__schemas@outcomes-OutcomesProcessor-outcomes__1__outcomes-discarded-hash.json.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | description: "{\n \"timestamp\": \"2023-03-28T18:50:44.000000Z\",\n \"org_id\": 1,\n \"project_id\": 1,\n \"key_id\": 1,\n \"outcome\": 1,\n \"reason\": \"discarded-hash\",\n \"event_id\": \"4ff942d62f3f4d5db9f53b5a015b5fd9\",\n \"category\": 1,\n \"quantity\": 1\n}\n" 4 | expression: snapshot_payload 5 | --- 6 | [ 7 | { 8 | "category": 1, 9 | "event_id": "4ff942d6-2f3f-4d5d-b9f5-3b5a015b5fd9", 10 | "key_id": 1, 11 | "org_id": 1, 12 | "outcome": 1, 13 | "project_id": 1, 14 | "quantity": 1, 15 | "reason": "discarded-hash", 16 | "timestamp": 1680029444 17 | } 18 | ] 19 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__schemas@outcomes-OutcomesProcessor-outcomes__1__outcomes-lb.json.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | description: "{\n \"project_id\": 1,\n \"logging.googleapis.com/labels\": {\n \"host\": \"lb-6\"\n },\n \"org_id\": 0,\n \"outcome\": 4,\n \"timestamp\": \"2023-03-28T18:50:39.463685Z\"\n}\n" 4 | expression: snapshot_payload 5 | --- 6 | [ 7 | { 8 | "category": 1, 9 | "event_id": null, 10 | "key_id": null, 11 | "org_id": 0, 12 | "outcome": 4, 13 | "project_id": 1, 14 | "quantity": 1, 15 | "reason": null, 16 | "timestamp": 1680029439 17 | } 18 | ] 19 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__schemas@outcomes-OutcomesProcessor-outcomes__1__outcomes-null-values.json.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | description: "{\n \"timestamp\": \"2023-03-24T19:28:20.605851Z\",\n \"org_id\": 1,\n \"project_id\": 1,\n \"key_id\": null,\n \"outcome\": 0,\n \"reason\": null,\n \"event_id\": \"1410e6d2ea534dcb9d6d15a51c9962f8\",\n \"category\": 1,\n \"quantity\": 1\n}\n" 4 | expression: snapshot_payload 5 | --- 6 | [ 7 | { 8 | "category": 1, 9 | "event_id": "1410e6d2-ea53-4dcb-9d6d-15a51c9962f8", 10 | "key_id": null, 11 | "org_id": 1, 12 | "outcome": 0, 13 | "project_id": 1, 14 | "quantity": 1, 15 | "reason": null, 16 | "timestamp": 1679686100 17 | } 18 | ] 19 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__schemas@outcomes-OutcomesProcessor-outcomes__1__outcomes-pop-us.json.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | description: "{\n \"timestamp\": \"2023-03-28T22:51:00.000000Z\",\n \"project_id\": 1,\n \"outcome\": 3,\n \"reason\": \"project_id\",\n \"source\": \"pop-us\",\n \"category\": 1,\n \"quantity\": 1\n}\n" 4 | expression: snapshot_payload 5 | --- 6 | [ 7 | { 8 | "category": 1, 9 | "event_id": null, 10 | "key_id": null, 11 | "org_id": 0, 12 | "outcome": 3, 13 | "project_id": 1, 14 | "quantity": 1, 15 | "reason": "project_id", 16 | "timestamp": 1680043860 17 | } 18 | ] 19 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__schemas@outcomes-OutcomesProcessor-outcomes__1__outcomes-relay-internal.json.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | description: "{\n \"timestamp\": \"2023-03-28T22:53:00.000000Z\",\n \"project_id\": 1,\n \"outcome\": 3,\n \"reason\": \"project_id\",\n \"source\": \"relay-internal\",\n \"category\": 1,\n \"quantity\": 1\n}\n" 4 | expression: snapshot_payload 5 | --- 6 | [ 7 | { 8 | "category": 1, 9 | "event_id": null, 10 | "key_id": null, 11 | "org_id": 0, 12 | "outcome": 3, 13 | "project_id": 1, 14 | "quantity": 1, 15 | "reason": "project_id", 16 | "timestamp": 1680043980 17 | } 18 | ] 19 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__schemas@outcomes-OutcomesProcessor-outcomes__1__outcomes2-missing-key-id.json.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | description: "{\n \"org_id\": 1,\n \"outcome\": 4,\n \"project_id\": 1,\n \"quantity\": 3,\n \"timestamp\": \"2023-03-28T18:50:49.442341621Z\"\n}\n" 4 | expression: snapshot_payload 5 | --- 6 | [ 7 | { 8 | "category": 1, 9 | "event_id": null, 10 | "key_id": null, 11 | "org_id": 1, 12 | "outcome": 4, 13 | "project_id": 1, 14 | "quantity": 3, 15 | "reason": null, 16 | "timestamp": 1680029449 17 | } 18 | ] 19 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericCountersMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-dist-encoded-plain-array.json.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | description: "{\n \"version\": 2,\n \"use_case_id\": \"spans\",\n \"org_id\": 1,\n \"project_id\": 3,\n \"metric_id\": 65563,\n \"timestamp\": 1704614940,\n \"sentry_received_timestamp\": 1704614940,\n \"tags\": {\n \"9223372036854776010\": \"production\",\n \"9223372036854776017\": \"healthy\",\n \"65690\": \"metric_e2e_spans_dist_v_VUW93LMS\"\n },\n \"retention_days\": 90,\n \"mapping_meta\": {\n \"d\": { \"65560\": \"d:spans/duration@second\" },\n \"h\": {\n \"9223372036854776017\": \"session.status\",\n \"9223372036854776010\": \"environment\"\n },\n \"f\": { \"65691\": \"metric_e2e_spans_dist_k_VUW93LMS\" }\n },\n \"type\": \"d\",\n \"value\": { \"format\": \"array\", \"data\": [0.0, 1.1, 2.2, 3.3, 4.4, 5.5] }\n}\n" 4 | expression: snapshot_payload 5 | --- 6 | [] 7 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericCountersMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-gauge.json.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | description: "{\n \"version\": 2,\n \"mapping_meta\": {\n \"c\": {\n \"1\": \"g:transactions/alerts@none\",\n \"3\": \"environment\",\n \"5\": \"session.status\"\n }\n },\n \"metric_id\": 1,\n \"org_id\": 1,\n \"project_id\": 3,\n \"retention_days\": 90,\n \"tags\": {\n \"3\": \"production\",\n \"5\": \"init\"\n },\n \"timestamp\": 1677512412,\n \"sentry_received_timestamp\": 1677519000.456,\n \"type\": \"g\",\n \"use_case_id\": \"transactions\",\n \"value\": {\n \"min\": 1.0,\n \"max\": 2.0,\n \"sum\": 3.0,\n \"count\": 2,\n \"last\": 1.0\n }\n}\n" 4 | expression: snapshot_payload 5 | --- 6 | [] 7 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericCountersMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-set-encoded-plain-array.json.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | description: "{\n \"version\": 2,\n \"use_case_id\": \"spans\",\n \"org_id\": 1,\n \"project_id\": 3,\n \"metric_id\": 65562,\n \"timestamp\": 1704614940,\n \"sentry_received_timestamp\": 1704614940,\n \"tags\": {\n \"9223372036854776010\": \"production\",\n \"9223372036854776017\": \"errored\",\n \"65690\": \"metric_e2e_spans_set_v_VUW93LMS\"\n },\n \"retention_days\": 90,\n \"mapping_meta\": {\n \"h\": {\n \"9223372036854776017\": \"session.status\",\n \"9223372036854776010\": \"environment\"\n },\n \"f\": {\n \"65690\": \"metric_e2e_spans_set_k_VUW93LMS\"\n },\n \"d\": {\n \"65562\": \"s:spans/error@none\"\n }\n },\n \"type\": \"s\",\n \"value\": {\n \"format\": \"array\",\n \"data\": [0, 1, 2, 3, 4, 5]\n }\n}\n" 4 | expression: snapshot_payload 5 | --- 6 | [] 7 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericCountersMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-sets-base64.json.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | description: "{\n \"version\": 2,\n \"use_case_id\": \"spans\",\n \"org_id\": 1,\n \"project_id\": 3,\n \"metric_id\": 65562,\n \"timestamp\": 1704614940,\n \"sentry_received_timestamp\": 1704614940,\n \"tags\": {\n \"9223372036854776010\": \"production\",\n \"9223372036854776017\": \"errored\",\n \"65690\": \"metric_e2e_spans_set_v_VUW93LMS\"\n },\n \"retention_days\": 90,\n \"mapping_meta\": {\n \"h\": {\n \"9223372036854776017\": \"session.status\",\n \"9223372036854776010\": \"environment\"\n },\n \"f\": {\n \"65690\": \"metric_e2e_spans_set_k_VUW93LMS\"\n },\n \"d\": {\n \"65562\": \"s:spans/error@none\"\n }\n },\n \"type\": \"s\",\n \"value\": {\n \"format\": \"base64\",\n \"data\": \"AQAAAAcAAAA=\"\n }\n}\n" 4 | expression: snapshot_payload 5 | --- 6 | [] 7 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericCountersMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-sets-zstd.json.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | description: "{\n \"version\": 2,\n \"use_case_id\": \"spans\",\n \"org_id\": 1,\n \"project_id\": 3,\n \"metric_id\": 65562,\n \"timestamp\": 1704614940,\n \"sentry_received_timestamp\": 1704614940,\n \"tags\": {\n \"9223372036854776010\": \"production\",\n \"9223372036854776017\": \"errored\",\n \"65690\": \"metric_e2e_spans_set_v_VUW93LMS\"\n },\n \"retention_days\": 90,\n \"mapping_meta\": {\n \"h\": {\n \"9223372036854776017\": \"session.status\",\n \"9223372036854776010\": \"environment\"\n },\n \"f\": {\n \"65690\": \"metric_e2e_spans_set_k_VUW93LMS\"\n },\n \"d\": {\n \"65562\": \"s:spans/error@none\"\n }\n },\n \"type\": \"s\",\n \"value\": {\n \"format\": \"zstd\",\n \"data\": \"KLUv/QBYQQAAAQAAAAcAAAA\"\n }\n}\n" 4 | expression: snapshot_payload 5 | --- 6 | [] 7 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericDistributionsMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-gauge.json.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | description: "{\n \"version\": 2,\n \"mapping_meta\": {\n \"c\": {\n \"1\": \"g:transactions/alerts@none\",\n \"3\": \"environment\",\n \"5\": \"session.status\"\n }\n },\n \"metric_id\": 1,\n \"org_id\": 1,\n \"project_id\": 3,\n \"retention_days\": 90,\n \"tags\": {\n \"3\": \"production\",\n \"5\": \"init\"\n },\n \"timestamp\": 1677512412,\n \"sentry_received_timestamp\": 1677519000.456,\n \"type\": \"g\",\n \"use_case_id\": \"transactions\",\n \"value\": {\n \"min\": 1.0,\n \"max\": 2.0,\n \"sum\": 3.0,\n \"count\": 2,\n \"last\": 1.0\n }\n}\n" 4 | expression: snapshot_payload 5 | --- 6 | [] 7 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericDistributionsMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-set-encoded-plain-array.json.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | description: "{\n \"version\": 2,\n \"use_case_id\": \"spans\",\n \"org_id\": 1,\n \"project_id\": 3,\n \"metric_id\": 65562,\n \"timestamp\": 1704614940,\n \"sentry_received_timestamp\": 1704614940,\n \"tags\": {\n \"9223372036854776010\": \"production\",\n \"9223372036854776017\": \"errored\",\n \"65690\": \"metric_e2e_spans_set_v_VUW93LMS\"\n },\n \"retention_days\": 90,\n \"mapping_meta\": {\n \"h\": {\n \"9223372036854776017\": \"session.status\",\n \"9223372036854776010\": \"environment\"\n },\n \"f\": {\n \"65690\": \"metric_e2e_spans_set_k_VUW93LMS\"\n },\n \"d\": {\n \"65562\": \"s:spans/error@none\"\n }\n },\n \"type\": \"s\",\n \"value\": {\n \"format\": \"array\",\n \"data\": [0, 1, 2, 3, 4, 5]\n }\n}\n" 4 | expression: snapshot_payload 5 | --- 6 | [] 7 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericDistributionsMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-sets-base64.json.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | description: "{\n \"version\": 2,\n \"use_case_id\": \"spans\",\n \"org_id\": 1,\n \"project_id\": 3,\n \"metric_id\": 65562,\n \"timestamp\": 1704614940,\n \"sentry_received_timestamp\": 1704614940,\n \"tags\": {\n \"9223372036854776010\": \"production\",\n \"9223372036854776017\": \"errored\",\n \"65690\": \"metric_e2e_spans_set_v_VUW93LMS\"\n },\n \"retention_days\": 90,\n \"mapping_meta\": {\n \"h\": {\n \"9223372036854776017\": \"session.status\",\n \"9223372036854776010\": \"environment\"\n },\n \"f\": {\n \"65690\": \"metric_e2e_spans_set_k_VUW93LMS\"\n },\n \"d\": {\n \"65562\": \"s:spans/error@none\"\n }\n },\n \"type\": \"s\",\n \"value\": {\n \"format\": \"base64\",\n \"data\": \"AQAAAAcAAAA=\"\n }\n}\n" 4 | expression: snapshot_payload 5 | --- 6 | [] 7 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericDistributionsMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-sets-zstd.json.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | description: "{\n \"version\": 2,\n \"use_case_id\": \"spans\",\n \"org_id\": 1,\n \"project_id\": 3,\n \"metric_id\": 65562,\n \"timestamp\": 1704614940,\n \"sentry_received_timestamp\": 1704614940,\n \"tags\": {\n \"9223372036854776010\": \"production\",\n \"9223372036854776017\": \"errored\",\n \"65690\": \"metric_e2e_spans_set_v_VUW93LMS\"\n },\n \"retention_days\": 90,\n \"mapping_meta\": {\n \"h\": {\n \"9223372036854776017\": \"session.status\",\n \"9223372036854776010\": \"environment\"\n },\n \"f\": {\n \"65690\": \"metric_e2e_spans_set_k_VUW93LMS\"\n },\n \"d\": {\n \"65562\": \"s:spans/error@none\"\n }\n },\n \"type\": \"s\",\n \"value\": {\n \"format\": \"zstd\",\n \"data\": \"KLUv/QBYQQAAAQAAAAcAAAA\"\n }\n}\n" 4 | expression: snapshot_payload 5 | --- 6 | [] 7 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericDistributionsMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics1.json.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | description: "{\n \"version\": 2,\n \"mapping_meta\": {\n \"c\": {\n \"1\": \"c:sessions/session@none\",\n \"3\": \"environment\",\n \"5\": \"session.status\"\n }\n },\n \"metric_id\": 1,\n \"org_id\": 1,\n \"project_id\": 3,\n \"retention_days\": 90,\n \"tags\": {\n \"3\": \"production\",\n \"5\": \"init\"\n },\n \"timestamp\": 1677512412,\n \"sentry_received_timestamp\": 1677519000.456,\n \"type\": \"c\",\n \"use_case_id\": \"performance\",\n \"value\": 1\n}\n" 4 | expression: snapshot_payload 5 | --- 6 | [] 7 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericGaugesMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-dist-encoded-plain-array.json.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | description: "{\n \"version\": 2,\n \"use_case_id\": \"spans\",\n \"org_id\": 1,\n \"project_id\": 3,\n \"metric_id\": 65563,\n \"timestamp\": 1704614940,\n \"sentry_received_timestamp\": 1704614940,\n \"tags\": {\n \"9223372036854776010\": \"production\",\n \"9223372036854776017\": \"healthy\",\n \"65690\": \"metric_e2e_spans_dist_v_VUW93LMS\"\n },\n \"retention_days\": 90,\n \"mapping_meta\": {\n \"d\": { \"65560\": \"d:spans/duration@second\" },\n \"h\": {\n \"9223372036854776017\": \"session.status\",\n \"9223372036854776010\": \"environment\"\n },\n \"f\": { \"65691\": \"metric_e2e_spans_dist_k_VUW93LMS\" }\n },\n \"type\": \"d\",\n \"value\": { \"format\": \"array\", \"data\": [0.0, 1.1, 2.2, 3.3, 4.4, 5.5] }\n}\n" 4 | expression: snapshot_payload 5 | --- 6 | [] 7 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericGaugesMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-set-encoded-plain-array.json.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | description: "{\n \"version\": 2,\n \"use_case_id\": \"spans\",\n \"org_id\": 1,\n \"project_id\": 3,\n \"metric_id\": 65562,\n \"timestamp\": 1704614940,\n \"sentry_received_timestamp\": 1704614940,\n \"tags\": {\n \"9223372036854776010\": \"production\",\n \"9223372036854776017\": \"errored\",\n \"65690\": \"metric_e2e_spans_set_v_VUW93LMS\"\n },\n \"retention_days\": 90,\n \"mapping_meta\": {\n \"h\": {\n \"9223372036854776017\": \"session.status\",\n \"9223372036854776010\": \"environment\"\n },\n \"f\": {\n \"65690\": \"metric_e2e_spans_set_k_VUW93LMS\"\n },\n \"d\": {\n \"65562\": \"s:spans/error@none\"\n }\n },\n \"type\": \"s\",\n \"value\": {\n \"format\": \"array\",\n \"data\": [0, 1, 2, 3, 4, 5]\n }\n}\n" 4 | expression: snapshot_payload 5 | --- 6 | [] 7 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericGaugesMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-sets-base64.json.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | description: "{\n \"version\": 2,\n \"use_case_id\": \"spans\",\n \"org_id\": 1,\n \"project_id\": 3,\n \"metric_id\": 65562,\n \"timestamp\": 1704614940,\n \"sentry_received_timestamp\": 1704614940,\n \"tags\": {\n \"9223372036854776010\": \"production\",\n \"9223372036854776017\": \"errored\",\n \"65690\": \"metric_e2e_spans_set_v_VUW93LMS\"\n },\n \"retention_days\": 90,\n \"mapping_meta\": {\n \"h\": {\n \"9223372036854776017\": \"session.status\",\n \"9223372036854776010\": \"environment\"\n },\n \"f\": {\n \"65690\": \"metric_e2e_spans_set_k_VUW93LMS\"\n },\n \"d\": {\n \"65562\": \"s:spans/error@none\"\n }\n },\n \"type\": \"s\",\n \"value\": {\n \"format\": \"base64\",\n \"data\": \"AQAAAAcAAAA=\"\n }\n}\n" 4 | expression: snapshot_payload 5 | --- 6 | [] 7 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericGaugesMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-sets-zstd.json.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | description: "{\n \"version\": 2,\n \"use_case_id\": \"spans\",\n \"org_id\": 1,\n \"project_id\": 3,\n \"metric_id\": 65562,\n \"timestamp\": 1704614940,\n \"sentry_received_timestamp\": 1704614940,\n \"tags\": {\n \"9223372036854776010\": \"production\",\n \"9223372036854776017\": \"errored\",\n \"65690\": \"metric_e2e_spans_set_v_VUW93LMS\"\n },\n \"retention_days\": 90,\n \"mapping_meta\": {\n \"h\": {\n \"9223372036854776017\": \"session.status\",\n \"9223372036854776010\": \"environment\"\n },\n \"f\": {\n \"65690\": \"metric_e2e_spans_set_k_VUW93LMS\"\n },\n \"d\": {\n \"65562\": \"s:spans/error@none\"\n }\n },\n \"type\": \"s\",\n \"value\": {\n \"format\": \"zstd\",\n \"data\": \"KLUv/QBYQQAAAQAAAAcAAAA\"\n }\n}\n" 4 | expression: snapshot_payload 5 | --- 6 | [] 7 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericGaugesMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics1.json.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | description: "{\n \"version\": 2,\n \"mapping_meta\": {\n \"c\": {\n \"1\": \"c:sessions/session@none\",\n \"3\": \"environment\",\n \"5\": \"session.status\"\n }\n },\n \"metric_id\": 1,\n \"org_id\": 1,\n \"project_id\": 3,\n \"retention_days\": 90,\n \"tags\": {\n \"3\": \"production\",\n \"5\": \"init\"\n },\n \"timestamp\": 1677512412,\n \"sentry_received_timestamp\": 1677519000.456,\n \"type\": \"c\",\n \"use_case_id\": \"performance\",\n \"value\": 1\n}\n" 4 | expression: snapshot_payload 5 | --- 6 | [] 7 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericSetsMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-dist-encoded-plain-array.json.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | description: "{\n \"version\": 2,\n \"use_case_id\": \"spans\",\n \"org_id\": 1,\n \"project_id\": 3,\n \"metric_id\": 65563,\n \"timestamp\": 1704614940,\n \"sentry_received_timestamp\": 1704614940,\n \"tags\": {\n \"9223372036854776010\": \"production\",\n \"9223372036854776017\": \"healthy\",\n \"65690\": \"metric_e2e_spans_dist_v_VUW93LMS\"\n },\n \"retention_days\": 90,\n \"mapping_meta\": {\n \"d\": { \"65560\": \"d:spans/duration@second\" },\n \"h\": {\n \"9223372036854776017\": \"session.status\",\n \"9223372036854776010\": \"environment\"\n },\n \"f\": { \"65691\": \"metric_e2e_spans_dist_k_VUW93LMS\" }\n },\n \"type\": \"d\",\n \"value\": { \"format\": \"array\", \"data\": [0.0, 1.1, 2.2, 3.3, 4.4, 5.5] }\n}\n" 4 | expression: snapshot_payload 5 | --- 6 | [] 7 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericSetsMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics-gauge.json.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | description: "{\n \"version\": 2,\n \"mapping_meta\": {\n \"c\": {\n \"1\": \"g:transactions/alerts@none\",\n \"3\": \"environment\",\n \"5\": \"session.status\"\n }\n },\n \"metric_id\": 1,\n \"org_id\": 1,\n \"project_id\": 3,\n \"retention_days\": 90,\n \"tags\": {\n \"3\": \"production\",\n \"5\": \"init\"\n },\n \"timestamp\": 1677512412,\n \"sentry_received_timestamp\": 1677519000.456,\n \"type\": \"g\",\n \"use_case_id\": \"transactions\",\n \"value\": {\n \"min\": 1.0,\n \"max\": 2.0,\n \"sum\": 3.0,\n \"count\": 2,\n \"last\": 1.0\n }\n}\n" 4 | expression: snapshot_payload 5 | --- 6 | [] 7 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__schemas@snuba-generic-metrics-GenericSetsMetricsProcessor-snuba-generic-metrics__1__snuba-generic-metrics1.json.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | description: "{\n \"version\": 2,\n \"mapping_meta\": {\n \"c\": {\n \"1\": \"c:sessions/session@none\",\n \"3\": \"environment\",\n \"5\": \"session.status\"\n }\n },\n \"metric_id\": 1,\n \"org_id\": 1,\n \"project_id\": 3,\n \"retention_days\": 90,\n \"tags\": {\n \"3\": \"production\",\n \"5\": \"init\"\n },\n \"timestamp\": 1677512412,\n \"sentry_received_timestamp\": 1677519000.456,\n \"type\": \"c\",\n \"use_case_id\": \"performance\",\n \"value\": 1\n}\n" 4 | expression: snapshot_payload 5 | --- 6 | [] 7 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__schemas@snuba-items-EAPItemsProcessor-snuba-items__1__basic.protobuf.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | expression: snapshot_payload 4 | --- 5 | [ 6 | { 7 | "attributes_bool": { 8 | "some_bool": true 9 | }, 10 | "attributes_float_3": { 11 | "another_attribute": 1238.0 12 | }, 13 | "attributes_float_6": { 14 | "some_bool": 1.0 15 | }, 16 | "attributes_float_8": { 17 | "with a float": 123.123 18 | }, 19 | "attributes_int": { 20 | "another_attribute": 1238 21 | }, 22 | "attributes_string_32": { 23 | "one_attribute": "blah" 24 | }, 25 | "item_id": 140754042156678400000000000000000000000.0, 26 | "item_type": 1, 27 | "organization_id": 1, 28 | "project_id": 1, 29 | "retention_days": 90, 30 | "sampling_factor": 1.0, 31 | "sampling_weight": 1, 32 | "timestamp": 1744063104, 33 | "trace_id": "e6453c8a-11e8-4ffc-a6f7-827c30b550d4" 34 | } 35 | ] 36 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__schemas@snuba-profile-chunks-ProfileChunksProcessor-snuba-profile-chunks__1__valid.json.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | description: "{\n \"project_id\": 1,\n \"profiler_id\": \"4d229f1d3807421ba62a5f8bc295d836\",\n \"chunk_id\": \"0432a0a4c25f4697bf9f0a2fcbe6a814\",\n \"start_timestamp\": 1710805688.1234567,\n \"end_timestamp\": 1710805689.1234567,\n \"received\": 1694357860,\n \"retention_days\": 30\n}\n" 4 | expression: snapshot_payload 5 | --- 6 | [ 7 | { 8 | "chunk_id": "0432a0a4-c25f-4697-bf9f-0a2fcbe6a814", 9 | "end_timestamp": 1710805689123456, 10 | "environment": null, 11 | "offset": 1, 12 | "partition": 0, 13 | "profiler_id": "4d229f1d-3807-421b-a62a-5f8bc295d836", 14 | "project_id": 1, 15 | "retention_days": 30, 16 | "start_timestamp": 1710805688123456 17 | } 18 | ] 19 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__snuba-metrics-summaries-.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | expression: diff 4 | --- 5 | [ 6 | Change { 7 | path: "", 8 | change: RequiredAdd { 9 | property: "count", 10 | }, 11 | }, 12 | ] 13 | -------------------------------------------------------------------------------- /rust_snuba/src/processors/snapshots/rust_snuba__processors__tests__snuba-profile-chunks-.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: src/processors/mod.rs 3 | expression: diff 4 | --- 5 | [ 6 | Change { 7 | path: "", 8 | change: PropertyAdd { 9 | lhs_additional_properties: true, 10 | added: "environment", 11 | }, 12 | }, 13 | ] 14 | -------------------------------------------------------------------------------- /rust_snuba/src/strategies/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod accountant; 2 | pub mod clickhouse; 3 | pub mod commit_log; 4 | pub mod healthcheck; 5 | pub mod join_timeout; 6 | pub mod noop; 7 | pub mod processor; 8 | pub mod python; 9 | pub mod replacements; 10 | -------------------------------------------------------------------------------- /rust_snuba/src/strategies/noop.rs: -------------------------------------------------------------------------------- 1 | use std::time::Duration; 2 | 3 | use sentry_arroyo::processing::strategies::{ 4 | CommitRequest, ProcessingStrategy, StrategyError, SubmitError, 5 | }; 6 | use sentry_arroyo::types::Message; 7 | 8 | pub struct Noop; 9 | 10 | impl ProcessingStrategy for Noop { 11 | fn poll(&mut self) -> Result, StrategyError> { 12 | Ok(None) 13 | } 14 | 15 | fn submit(&mut self, _message: Message) -> Result<(), SubmitError> { 16 | Ok(()) 17 | } 18 | 19 | fn terminate(&mut self) {} 20 | 21 | fn join(&mut self, _timeout: Option) -> Result, StrategyError> { 22 | Ok(None) 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /scripts/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/scripts/__init__.py -------------------------------------------------------------------------------- /scripts/bump-version.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -eu 3 | 4 | SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 5 | cd $SCRIPT_DIR/.. 6 | 7 | OLD_VERSION="$1" 8 | NEW_VERSION="$2" 9 | 10 | sed -i -e "s/^VERSION = "'".*"'"\$/VERSION = "'"'"$NEW_VERSION"'"'"/" setup.py 11 | sed -i -e "s/^release = "'".*"'"\$/release = "'"'"$NEW_VERSION"'"'"/" ./docs/source/conf.py 12 | 13 | echo "New version: $NEW_VERSION" 14 | -------------------------------------------------------------------------------- /scripts/lib.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Module containing code shared across various shell scripts 3 | # Execute functions from this module via the script do.sh 4 | 5 | # Check if a command is available 6 | require() { 7 | command -v "$1" >/dev/null 2>&1 8 | } 9 | 10 | query_big_sur() { 11 | if require sw_vers && sw_vers -productVersion | grep -E "11\." >/dev/null; then 12 | return 0 13 | fi 14 | return 1 15 | } 16 | -------------------------------------------------------------------------------- /scripts/post-release.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -eu 3 | 4 | git checkout master && git pull 5 | ./scripts/bump-version.sh '' $(date -d "$(echo $CRAFT_NEW_VERSION | sed -e 's/^\([0-9]\{2\}\)\.\([0-9]\{1,2\}\)\.[0-9]\+$/20\1-\2-1/') 1 month" +%y.%-m.0.dev0) 6 | git diff --quiet || git commit -anm 'meta: Bump new development version' && git pull --rebase && git push 7 | -------------------------------------------------------------------------------- /scripts/rust-dummy-build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -ex 3 | 4 | cd ./rust_snuba/ 5 | mkdir -p ./target/wheels/ 6 | for f in ./src/lib.rs ./benches/processors.rs; do 7 | mkdir -p "`dirname $f`" 8 | touch "$f" 9 | done 10 | -------------------------------------------------------------------------------- /scripts/rust-envvars: -------------------------------------------------------------------------------- 1 | # Related thread: https://github.com/PyO3/pyo3/issues/1741 2 | 3 | # This is required for the tests in python.rs 4 | export SNUBA_TEST_PYTHONPATH="$(python -c 'import sys; print(":".join(sys.path))')" 5 | export SNUBA_TEST_PYTHONEXECUTABLE="$(python -c 'import sys; print(sys.executable)')" 6 | 7 | # load cargo envvars explicitly in case user forgot 8 | . "${CARGO_HOME:-$HOME/.cargo}/env" 9 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [tool:pytest] 2 | python_files = test*.py 3 | addopts = --tb=native -p no:doctest -p no:warnings 4 | norecursedirs = bin dist docs htmlcov script hooks node_modules .* {args} 5 | looponfailroots = snuba tests 6 | markers = 7 | clickhouse_db: Use clickhouse 8 | redis_db: Use redis 9 | ci_only: Only run in CI 10 | 11 | [flake8] 12 | # tests/state/test_state.py:19:36: E712 comparison to True should be 'if cond is True:' or 'if cond:' 13 | ignore = E203,E266,E501,W503,W504,W605,E402,E302,E712 14 | max-line-length = 100 15 | select = B,E,F,W,T4,B9 16 | exclude = .git 17 | 18 | [bdist_wheel] 19 | python-tag = py37 20 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from typing import Sequence 2 | 3 | from setuptools import find_packages, setup 4 | 5 | VERSION = "25.6.0.dev0" 6 | 7 | 8 | def get_requirements() -> Sequence[str]: 9 | with open("requirements.txt") as fp: 10 | return [ 11 | x.strip() for x in fp.read().split("\n") if not x.startswith(("#", "--")) 12 | ] 13 | 14 | 15 | setup( 16 | name="snuba", 17 | version=VERSION, 18 | packages=find_packages(exclude=["tests"]), 19 | zip_safe=False, 20 | include_package_data=True, 21 | install_requires=get_requirements(), 22 | entry_points={"console_scripts": ["snuba=snuba.cli:main"]}, 23 | ) 24 | -------------------------------------------------------------------------------- /snuba/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/__init__.py -------------------------------------------------------------------------------- /snuba/__main__.py: -------------------------------------------------------------------------------- 1 | if __name__ == "__main__": 2 | from snuba.cli import main 3 | 4 | main() 5 | -------------------------------------------------------------------------------- /snuba/admin/README.md: -------------------------------------------------------------------------------- 1 | # Running 2 | You must have node and yarn installed. To do so: 3 | ``` 4 | volta install node 5 | volta install yarn 6 | make build-admin 7 | ``` 8 | 9 | To run the app locally: 10 | ```bash 11 | # assuming you're in the venv 12 | snuba admin 13 | ``` 14 | 15 | The server should be running on http://127.0.0.1:1219 16 | 17 | note: please ensure that sentry devservices are up via `devservices up --exclude=snuba` from within the sentry repository 18 | 19 | # Developing the Javascript 20 | 21 | To start the yarn debug server and live reload your javascript changes. 22 | ``` 23 | make watch-admin 24 | ``` 25 | 26 | If you change environment variables you'll have to restart the server 27 | 28 | # Releasing new javascript 29 | 30 | The admin tool is automatically built as part of our normal CI flow. 31 | -------------------------------------------------------------------------------- /snuba/admin/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/admin/__init__.py -------------------------------------------------------------------------------- /snuba/admin/audit_log/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/admin/audit_log/__init__.py -------------------------------------------------------------------------------- /snuba/admin/build.mjs: -------------------------------------------------------------------------------- 1 | import * as esbuild from 'esbuild'; 2 | import { sentryEsbuildPlugin } from '@sentry/esbuild-plugin'; 3 | 4 | await esbuild.build({ 5 | entryPoints: ['./static/index.tsx'], 6 | bundle: true, 7 | outfile: 'dist/bundle.js', 8 | sourcemap: true, // Source map generation must be turned on 9 | plugins: [ 10 | // Put the Sentry esbuild plugin after all other plugins 11 | sentryEsbuildPlugin({ 12 | org: process.env.SENTRY_ORGANIZATION, 13 | project: process.env.SENTRY_PROJECT, 14 | 15 | // Auth tokens can be obtained from https://sentry.io/orgredirect/organizations/:orgslug/settings/auth-tokens/ 16 | authToken: process.env.SENTRY_AUTH_TOKEN, 17 | telemetry: false, 18 | }), 19 | ], 20 | }); 21 | -------------------------------------------------------------------------------- /snuba/admin/cardinality_analyzer/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/admin/cardinality_analyzer/__init__.py -------------------------------------------------------------------------------- /snuba/admin/clickhouse/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/admin/clickhouse/__init__.py -------------------------------------------------------------------------------- /snuba/admin/clickhouse/capacity_management.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from snuba import settings 4 | from snuba.clusters.storage_sets import DEV_STORAGE_SETS 5 | from snuba.datasets.storages.factory import get_all_storage_keys, get_storage 6 | from snuba.query.allocation_policies import PassthroughPolicy 7 | 8 | 9 | def get_storages_with_allocation_policies() -> list[str]: 10 | 11 | storages = [ 12 | storage 13 | for storage_key in sorted( 14 | get_all_storage_keys(), key=lambda storage_key: storage_key.value 15 | ) 16 | if (storage := get_storage(storage_key)).get_storage_set_key() 17 | not in DEV_STORAGE_SETS 18 | or settings.ENABLE_DEV_FEATURES 19 | ] 20 | 21 | return [ 22 | storage.get_storage_key().value 23 | for storage in storages 24 | if not isinstance(storage.get_allocation_policies()[0], PassthroughPolicy) 25 | ] 26 | -------------------------------------------------------------------------------- /snuba/admin/jest.config.js: -------------------------------------------------------------------------------- 1 | /** @type {import('ts-jest').JestConfigWithTsJest} */ 2 | module.exports = { 3 | preset: 'ts-jest', 4 | testEnvironment: 'jsdom', 5 | moduleNameMapper: { 6 | "^SnubaAdmin/(.*)$": "/static/$1" 7 | } 8 | }; 9 | -------------------------------------------------------------------------------- /snuba/admin/kafka/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/admin/kafka/__init__.py -------------------------------------------------------------------------------- /snuba/admin/kafka/topics.py: -------------------------------------------------------------------------------- 1 | import json 2 | from typing import Any, Sequence 3 | 4 | from confluent_kafka.admin import AdminClient 5 | 6 | from snuba.utils.streams.configuration_builder import get_default_kafka_configuration 7 | from snuba.utils.streams.topics import Topic 8 | 9 | 10 | def get_broker_data() -> Sequence[Any]: 11 | data = [] 12 | 13 | broker_configs = [get_default_kafka_configuration(topic=topic) for topic in Topic] 14 | seen_broker_configs = set() 15 | 16 | for broker_config in broker_configs: 17 | broker_config_str = str(broker_config) 18 | if broker_config_str in seen_broker_configs: 19 | continue 20 | seen_broker_configs.add(broker_config_str) 21 | client = AdminClient(broker_config) 22 | data.append(json.dumps(list(client.list_topics().topics.values()), default=str)) 23 | return data 24 | -------------------------------------------------------------------------------- /snuba/admin/notifications/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/admin/notifications/__init__.py -------------------------------------------------------------------------------- /snuba/admin/notifications/slack/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/admin/notifications/slack/__init__.py -------------------------------------------------------------------------------- /snuba/admin/production_queries/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/admin/production_queries/__init__.py -------------------------------------------------------------------------------- /snuba/admin/rpc/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/admin/rpc/__init__.py -------------------------------------------------------------------------------- /snuba/admin/runtime_config/__init__.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, TypedDict, Union 2 | 3 | ConfigType = Union[str, int, float] 4 | 5 | ConfigChange = TypedDict( 6 | "ConfigChange", 7 | { 8 | "key": str, 9 | "timestamp": float, 10 | "user": Optional[str], 11 | "before": Optional[str], 12 | "beforeType": Optional[str], 13 | "after": Optional[str], 14 | "afterType": Optional[str], 15 | }, 16 | ) 17 | 18 | 19 | def get_config_type_from_value( 20 | value: Optional[Union[str, int, float]] 21 | ) -> Optional[str]: 22 | if value is None: 23 | return None 24 | 25 | if isinstance(value, str): 26 | return "string" 27 | if isinstance(value, int): 28 | return "int" 29 | if isinstance(value, float): 30 | return "float" 31 | raise ValueError("Unexpected config type") 32 | -------------------------------------------------------------------------------- /snuba/admin/static/auto_replacements_bypass_projects/types.tsx: -------------------------------------------------------------------------------- 1 | import { ReactNode } from "react"; 2 | 3 | type AutoReplacementsBypassProjectsData = { 4 | projectID: number; 5 | expiry: string; 6 | }; 7 | 8 | export { AutoReplacementsBypassProjectsData }; 9 | -------------------------------------------------------------------------------- /snuba/admin/static/body.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | 3 | import { NAV_ITEMS } from "SnubaAdmin/data"; 4 | import Client from "SnubaAdmin/api_client"; 5 | 6 | type Props = { 7 | active: string; 8 | api: Client; 9 | }; 10 | 11 | function Body(props: Props) { 12 | const { active, ...rest } = props; 13 | const activeItem = NAV_ITEMS.find((item) => item.id === active)!; 14 | 15 | return ( 16 | 17 | {activeItem.display} 18 | 19 | 20 | ); 21 | } 22 | 23 | const bodyStyle = { 24 | width: "100%", 25 | maxWidth: "calc(100% - 290px)", 26 | margin: 20, 27 | fontSize: 20, 28 | }; 29 | 30 | export default Body; 31 | -------------------------------------------------------------------------------- /snuba/admin/static/capacity_management/styles.tsx: -------------------------------------------------------------------------------- 1 | import { COLORS } from "SnubaAdmin/theme"; 2 | 3 | const selectStyle = { 4 | marginRight: 8, 5 | height: 30, 6 | marginLeft: 8, 7 | }; 8 | 9 | const containerStyle = { 10 | width: 1200, 11 | maxWidth: "100%", 12 | }; 13 | 14 | const linkStyle = { 15 | cursor: "pointer", 16 | fontSize: 13, 17 | color: COLORS.TEXT_LIGHTER, 18 | textDecoration: "underline", 19 | }; 20 | 21 | const paragraphStyle = { 22 | fontSize: 15, 23 | color: COLORS.TEXT_LIGHTER, 24 | }; 25 | 26 | export { containerStyle, linkStyle, paragraphStyle, selectStyle }; 27 | -------------------------------------------------------------------------------- /snuba/admin/static/cardinality_analyzer/CSV.ts: -------------------------------------------------------------------------------- 1 | export class CSV { 2 | static sheet(rows: Array>) { 3 | return rows.map(CSV.row).join("\n"); 4 | } 5 | 6 | static row(values: unknown[]): string { 7 | return values.map(CSV.cell).join(","); 8 | } 9 | 10 | static cell(value: unknown): string { 11 | if (!value) return ""; 12 | 13 | if (typeof value === "string") { 14 | let sanitizedValue: string = ""; 15 | sanitizedValue = value.replace(/"/g, '""'); 16 | 17 | if (value.includes(",")) { 18 | return `"${sanitizedValue}"`; 19 | } 20 | 21 | return sanitizedValue; 22 | } 23 | 24 | return value.toString(); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /snuba/admin/static/cardinality_analyzer/types.tsx: -------------------------------------------------------------------------------- 1 | type QueryResultColumnMetadata = [string]; 2 | type QueryResultRow = [string]; 3 | 4 | type CardinalityQueryRequest= { 5 | sql: string; 6 | }; 7 | 8 | type CardinalityQueryResult = { 9 | input_query: string; 10 | timestamp: number; 11 | column_names: QueryResultColumnMetadata; 12 | rows: [QueryResultRow]; 13 | error?: string; 14 | }; 15 | 16 | type PredefinedQuery = { 17 | name: string; 18 | sql: string; 19 | description: string; 20 | }; 21 | 22 | export { CardinalityQueryRequest, CardinalityQueryResult, PredefinedQuery }; 23 | -------------------------------------------------------------------------------- /snuba/admin/static/clickhouse_migrations/types.tsx: -------------------------------------------------------------------------------- 1 | export type MigrationGroupResult = { 2 | group: string; 3 | migration_ids: MigrationData[]; 4 | }; 5 | 6 | export type MigrationData = { 7 | can_run: boolean; 8 | can_reverse: boolean; 9 | run_reason: string; 10 | reverse_reason: string; 11 | blocking: boolean; 12 | status: string; 13 | migration_id: string; 14 | }; 15 | 16 | export type GroupOptions = { 17 | [key: string]: MigrationGroupResult; 18 | } 19 | 20 | export enum Action { 21 | Run = "run", 22 | Reverse = "reverse", 23 | } 24 | 25 | export type RunMigrationRequest = { 26 | group: string; 27 | action: Action; 28 | migration_id: string; 29 | force?: boolean; 30 | fake?: boolean; 31 | dry_run?: boolean; 32 | }; 33 | 34 | export type RunMigrationResult = { 35 | stdout: string; 36 | error?: string; 37 | }; 38 | -------------------------------------------------------------------------------- /snuba/admin/static/clickhouse_queries/types.tsx: -------------------------------------------------------------------------------- 1 | type ClickhouseNode = { 2 | host: string; 3 | port: number; 4 | }; 5 | 6 | type ClickhouseNodeData = { 7 | storage_name: string; 8 | local_table_name: string; 9 | local_nodes: ClickhouseNode[]; 10 | dist_nodes: ClickhouseNode[]; 11 | query_node: ClickhouseNode; 12 | }; 13 | 14 | type QueryRequest = { 15 | storage: string; 16 | host: string; 17 | port: number; 18 | sql: string; 19 | sudo: boolean; 20 | }; 21 | 22 | type QueryResultColumnMetadata = [string]; 23 | type QueryResultRow = [string]; 24 | 25 | type QueryResult = { 26 | input_query: string; 27 | timestamp: number; 28 | column_names: QueryResultColumnMetadata; 29 | rows: [QueryResultRow]; 30 | trace_output?: string; 31 | error?: string; 32 | }; 33 | 34 | type PredefinedQuery = { 35 | name: string; 36 | sql: string; 37 | description: string; 38 | }; 39 | 40 | export { ClickhouseNodeData, QueryRequest, QueryResult, PredefinedQuery }; 41 | -------------------------------------------------------------------------------- /snuba/admin/static/common/components/__mocks__/sql_editor.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | 3 | import { Props } from "../sql_editor"; 4 | 5 | export function SQLEditor({ value, onChange }: Props) { 6 | return ( 7 | onChange(event.target.value)} 11 | > 12 | ); 13 | } 14 | -------------------------------------------------------------------------------- /snuba/admin/static/database_clusters/types.tsx: -------------------------------------------------------------------------------- 1 | type ClickhouseNodeInfo = { 2 | cluster: string, 3 | host_name: string, 4 | host_address: string, 5 | port: number, 6 | shard: number, 7 | replica: number, 8 | version: string, 9 | storage_name: string, 10 | is_distributed: boolean, 11 | }; 12 | 13 | type ClickhouseSystemSetting = { 14 | name: string, 15 | value: string, 16 | default: string, 17 | changed: number, 18 | description: string, 19 | type: string, 20 | }; 21 | 22 | export { ClickhouseNodeInfo, ClickhouseSystemSetting }; 23 | -------------------------------------------------------------------------------- /snuba/admin/static/dead_letter_queue/types.tsx: -------------------------------------------------------------------------------- 1 | type Topic = { 2 | logicalName: string; 3 | physicalName: string; 4 | slice: number | null; 5 | storage: string; 6 | }; 7 | 8 | type Policy = "reinsert-dlq" | "stop-on-error" | "drop-invalid-messages"; 9 | 10 | type ReplayInstruction = { 11 | messagesToProcess: number; 12 | policy: Policy; 13 | }; 14 | 15 | export { Policy, Topic, ReplayInstruction }; 16 | -------------------------------------------------------------------------------- /snuba/admin/static/kafka/index.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState, useEffect } from "react"; 2 | import { KafkaTopicData } from "SnubaAdmin/kafka/types"; 3 | 4 | import Client from "SnubaAdmin/api_client"; 5 | 6 | function TopicData(props: { api: Client }) { 7 | const [data, setData] = useState(null); 8 | 9 | useEffect(() => { 10 | props.api.getKafkaData().then((res) => { 11 | setData(res); 12 | }); 13 | }, []); 14 | 15 | return {JSON.stringify(data)}; 16 | } 17 | 18 | export default TopicData; 19 | -------------------------------------------------------------------------------- /snuba/admin/static/kafka/types.tsx: -------------------------------------------------------------------------------- 1 | export type KafkaTopicData = any; 2 | -------------------------------------------------------------------------------- /snuba/admin/static/manual_jobs/types.tsx: -------------------------------------------------------------------------------- 1 | type JobSpec = { 2 | job_id: string; 3 | job_type: string; 4 | params: { [key: string]: string }; 5 | }; 6 | 7 | type JobSpecMap = { 8 | [key: string]: { 9 | spec: JobSpec; 10 | status: string; 11 | }; 12 | }; 13 | -------------------------------------------------------------------------------- /snuba/admin/static/mql_queries/styles.tsx: -------------------------------------------------------------------------------- 1 | const executeActionsStyle = { 2 | display: "flex", 3 | justifyContent: "space-between", 4 | marginTop: 8, 5 | }; 6 | 7 | const spacing = { 8 | marginTop: 8, 9 | }; 10 | 11 | const executeButtonStyle = { 12 | height: 30, 13 | border: 0, 14 | padding: "4px 20px", 15 | }; 16 | 17 | const selectStyle = { 18 | marginRight: 8, 19 | height: 30, 20 | }; 21 | 22 | let collapsibleStyle = { listStyleType: "none", fontFamily: "Monaco" }; 23 | 24 | export { 25 | executeActionsStyle, 26 | executeButtonStyle, 27 | selectStyle, 28 | collapsibleStyle, 29 | spacing, 30 | }; 31 | -------------------------------------------------------------------------------- /snuba/admin/static/mql_queries/types.tsx: -------------------------------------------------------------------------------- 1 | type MQLRequest = { 2 | dataset: string; 3 | query: string; 4 | mql_context: object; 5 | }; 6 | 7 | type QueryResult = { 8 | input_query?: string; 9 | input_mql_context?: object; 10 | columns: [string]; 11 | rows: [[string]]; 12 | duration_ms: number; 13 | quota_allowance?: QuotaAllowance; 14 | }; 15 | 16 | type QuotaAllowancePolicy = { 17 | can_run: boolean; 18 | max_threads: number; 19 | explanation: { 20 | reason?: string; 21 | overrides?: Record; 22 | storage_key?: string; 23 | policy?: string; 24 | referrer?: string; 25 | }; 26 | }; 27 | 28 | type QuotaAllowance = { 29 | [policy: string]: QuotaAllowancePolicy; 30 | }; 31 | 32 | type QueryResultColumnMeta = { 33 | name: string; 34 | type: string; 35 | }; 36 | 37 | export { MQLRequest, QueryResult, QueryResultColumnMeta }; 38 | -------------------------------------------------------------------------------- /snuba/admin/static/production_queries/styles.tsx: -------------------------------------------------------------------------------- 1 | const executeActionsStyle = { 2 | display: "flex", 3 | justifyContent: "space-between", 4 | marginTop: 8, 5 | }; 6 | 7 | const executeButtonStyle = { 8 | height: 30, 9 | border: 0, 10 | padding: "4px 20px", 11 | }; 12 | 13 | const selectStyle = { 14 | marginRight: 8, 15 | height: 30, 16 | }; 17 | 18 | let collapsibleStyle = { listStyleType: "none", fontFamily: "Monaco" }; 19 | 20 | export { 21 | executeActionsStyle, 22 | executeButtonStyle, 23 | selectStyle, 24 | collapsibleStyle, 25 | }; 26 | -------------------------------------------------------------------------------- /snuba/admin/static/production_queries/types.tsx: -------------------------------------------------------------------------------- 1 | type SnQLRequest = { 2 | dataset: string; 3 | query: string; 4 | }; 5 | 6 | type QueryResult = { 7 | input_query?: string; 8 | columns: [string]; 9 | rows: [[string]]; 10 | duration_ms: number; 11 | quota_allowance?: QuotaAllowance; 12 | }; 13 | 14 | type QuotaAllowancePolicy = { 15 | can_run: boolean; 16 | max_threads: number; 17 | explanation: { 18 | reason?: string; 19 | overrides?: Record; 20 | storage_key?: string; 21 | policy?: string; 22 | referrer?: string; 23 | }; 24 | }; 25 | 26 | type QuotaAllowance = { 27 | [policy: string]: QuotaAllowancePolicy; 28 | }; 29 | 30 | type QueryResultColumnMeta = { 31 | name: string; 32 | type: string; 33 | }; 34 | 35 | export { SnQLRequest, QueryResult, QueryResultColumnMeta }; 36 | -------------------------------------------------------------------------------- /snuba/admin/static/querylog/types.tsx: -------------------------------------------------------------------------------- 1 | type QueryResultColumnMetadata = [string]; 2 | type QueryResultRow = [string]; 3 | 4 | type QuerylogRequest = { 5 | sql: string; 6 | }; 7 | 8 | type QuerylogResult = { 9 | input_query: string; 10 | timestamp: number; 11 | column_names: QueryResultColumnMetadata; 12 | rows: [QueryResultRow]; 13 | error?: string; 14 | }; 15 | 16 | type PredefinedQuery = { 17 | name: string; 18 | sql: string; 19 | description: string; 20 | }; 21 | 22 | export { QuerylogRequest, QuerylogResult, PredefinedQuery }; 23 | -------------------------------------------------------------------------------- /snuba/admin/static/rpc_endpoints/endpoint_selector.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Select } from '@mantine/core'; 3 | import { EndpointSelectorProps } from 'SnubaAdmin/rpc_endpoints/types'; 4 | 5 | export const EndpointSelector = ({ 6 | endpoints, 7 | selectedEndpoint, 8 | selectedVersion, 9 | handleEndpointSelect 10 | }: EndpointSelectorProps) => ( 11 | <> 12 | RPC Endpoints 13 | ({ 17 | value: `${endpoint.name}_${endpoint.version}`, 18 | label: `${endpoint.name} (${endpoint.version})` 19 | }))} 20 | value={selectedEndpoint + "_" + selectedVersion} 21 | onChange={handleEndpointSelect} 22 | style={{ width: '100%', marginBottom: '1rem' }} 23 | /> 24 | > 25 | ); 26 | -------------------------------------------------------------------------------- /snuba/admin/static/rpc_endpoints/metadata_table.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Table, Code } from '@mantine/core'; 3 | import { MetadataTableProps } from 'SnubaAdmin/rpc_endpoints/types'; 4 | 5 | export const MetadataTable = ({ queryInfo, classes }: MetadataTableProps) => ( 6 | 7 | 8 | 9 | Attribute 10 | Value 11 | 12 | 13 | 14 | {Object.entries({ ...queryInfo.stats, ...queryInfo.metadata }).map(([key, value]) => ( 15 | 16 | {key} 17 | 18 | {typeof value === 'object' ? ( 19 | {JSON.stringify(value, null, 2)} 20 | ) : ( 21 | String(value) 22 | )} 23 | 24 | 25 | ))} 26 | 27 | 28 | ); 29 | -------------------------------------------------------------------------------- /snuba/admin/static/runtime_config/styles.tsx: -------------------------------------------------------------------------------- 1 | import { COLORS } from "SnubaAdmin/theme"; 2 | 3 | const containerStyle = { 4 | width: 1200, 5 | maxWidth: "100%", 6 | }; 7 | 8 | const linkStyle = { 9 | cursor: "pointer", 10 | fontSize: 13, 11 | color: COLORS.TEXT_LIGHTER, 12 | textDecoration: "underline", 13 | }; 14 | 15 | const paragraphStyle = { 16 | fontSize: 15, 17 | color: COLORS.TEXT_LIGHTER, 18 | }; 19 | 20 | export { containerStyle, linkStyle, paragraphStyle }; 21 | -------------------------------------------------------------------------------- /snuba/admin/static/runtime_config/types.tsx: -------------------------------------------------------------------------------- 1 | import { ReactNode } from "react"; 2 | 3 | type ConfigKey = string; 4 | type ConfigValue = string; 5 | type ConfigDescription = string; 6 | type ConfigType = "string" | "int" | "float"; 7 | type ConfigDescriptions = { [key: string]: string }; 8 | 9 | type Config = { 10 | key: ConfigKey; 11 | value: ConfigValue; 12 | description: ConfigDescription; 13 | type: ConfigType; 14 | }; 15 | 16 | type RowData = [ReactNode, ReactNode, ReactNode, ReactNode, ReactNode]; 17 | 18 | type ConfigChange = { 19 | key: ConfigKey; 20 | user: string | null; 21 | timestamp: number; 22 | before: ConfigValue | null; 23 | beforeType: ConfigType | null; 24 | after: ConfigValue | null; 25 | afterType: ConfigType | null; 26 | }; 27 | 28 | export { 29 | Config, 30 | ConfigKey, 31 | ConfigValue, 32 | ConfigDescription, 33 | ConfigDescriptions, 34 | ConfigType, 35 | RowData, 36 | ConfigChange, 37 | }; 38 | -------------------------------------------------------------------------------- /snuba/admin/static/snql_to_sql/styles.tsx: -------------------------------------------------------------------------------- 1 | const executeActionsStyle = { 2 | display: "flex", 3 | justifyContent: "space-between", 4 | marginTop: 8, 5 | }; 6 | 7 | const executeButtonStyle = { 8 | height: 30, 9 | border: 0, 10 | padding: "4px 20px", 11 | }; 12 | 13 | const selectStyle = { 14 | marginRight: 8, 15 | height: 30, 16 | }; 17 | 18 | let collapsibleStyle = { listStyleType: "none", fontFamily: "Monaco" }; 19 | 20 | export { 21 | executeActionsStyle, 22 | executeButtonStyle, 23 | selectStyle, 24 | collapsibleStyle, 25 | }; 26 | -------------------------------------------------------------------------------- /snuba/admin/static/snql_to_sql/types.tsx: -------------------------------------------------------------------------------- 1 | type SnubaDatasetName = string; 2 | type SnQLQueryState = Partial; 3 | 4 | type SnQLRequest = { 5 | dataset: string; 6 | query: string; 7 | }; 8 | 9 | type SnQLResult = { 10 | input_query?: string; 11 | sql: string; 12 | explain?: object; 13 | }; 14 | 15 | export { SnubaDatasetName, SnQLRequest, SnQLResult, SnQLQueryState }; 16 | -------------------------------------------------------------------------------- /snuba/admin/static/snql_to_sql/utils.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | 3 | function TextArea(props: { 4 | value: string; 5 | onChange: (nextValue: string) => void; 6 | }) { 7 | const { value, onChange } = props; 8 | return ( 9 | onChange(evt.target.value)} 13 | style={{ width: "100%", height: 100 }} 14 | placeholder={"Write your query here"} 15 | /> 16 | ); 17 | } 18 | 19 | function copyText(text: string) { 20 | window.navigator.clipboard.writeText(text); 21 | } 22 | 23 | export { TextArea, copyText }; 24 | -------------------------------------------------------------------------------- /snuba/admin/static/snuba_explain/styles.tsx: -------------------------------------------------------------------------------- 1 | const executeActionsStyle = { 2 | display: "flex", 3 | justifyContent: "space-between", 4 | marginTop: 8, 5 | }; 6 | 7 | const executeButtonStyle = { 8 | height: 30, 9 | border: 0, 10 | padding: "4px 20px", 11 | }; 12 | 13 | const selectStyle = { 14 | marginRight: 8, 15 | height: 30, 16 | }; 17 | 18 | let collapsibleStyle = { listStyleType: "none", fontFamily: "Monaco" }; 19 | 20 | const nonCollapsibleStyle = { 21 | fontSize: 16, 22 | lineHeight: 1, 23 | height: 20, 24 | display: "flex", 25 | }; 26 | 27 | export { 28 | executeActionsStyle, 29 | executeButtonStyle, 30 | selectStyle, 31 | collapsibleStyle, 32 | nonCollapsibleStyle, 33 | }; 34 | -------------------------------------------------------------------------------- /snuba/admin/static/snuba_explain/types.tsx: -------------------------------------------------------------------------------- 1 | type SnubaDatasetName = string; 2 | type SnQLQueryState = Partial; 3 | 4 | type SnQLRequest = { 5 | dataset: string; 6 | query: string; 7 | }; 8 | 9 | type QueryTransformData = { 10 | original: string; 11 | transformed: string; 12 | diff: string[]; 13 | }; 14 | 15 | type ExplainStep = { 16 | category: string; 17 | type: string; 18 | name: string; 19 | data: object; 20 | }; 21 | 22 | type ExplainResult = { 23 | original_ast: string; 24 | steps: ExplainStep[]; 25 | }; 26 | 27 | type SnQLResult = { 28 | input_query?: string; 29 | sql: string; 30 | explain?: ExplainResult; 31 | }; 32 | 33 | export { 34 | SnubaDatasetName, 35 | SnQLRequest, 36 | SnQLResult, 37 | SnQLQueryState, 38 | ExplainResult, 39 | ExplainStep, 40 | QueryTransformData, 41 | }; 42 | -------------------------------------------------------------------------------- /snuba/admin/static/snuba_explain/utils.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | 3 | function TextArea(props: { 4 | value: string; 5 | onChange: (nextValue: string) => void; 6 | }) { 7 | const { value, onChange } = props; 8 | return ( 9 | onChange(evt.target.value)} 13 | style={{ width: "100%", height: 100 }} 14 | placeholder={"Write your query here"} 15 | /> 16 | ); 17 | } 18 | 19 | function copyText(text: string) { 20 | window.navigator.clipboard.writeText(text); 21 | } 22 | 23 | export { TextArea, copyText }; 24 | -------------------------------------------------------------------------------- /snuba/admin/static/theme.tsx: -------------------------------------------------------------------------------- 1 | const COLORS = { 2 | HEADER_BG: "#303030", 3 | HEADER_TEXT: "white", 4 | NAV_BORDER: "#CBCBCB", 5 | TABLE_BORDER: "#CBCBCB", 6 | SNUBA_BLUE: "#4681e0", 7 | TEXT_DEFAULT: "black", 8 | TEXT_LIGHTER: "#464646", 9 | TEXT_INACTIVE: "#858585", 10 | BG_LIGHT: "#E3E3E3", 11 | RED: "#c42e2b", 12 | }; 13 | 14 | export { COLORS }; 15 | -------------------------------------------------------------------------------- /snuba/admin/static/types.tsx: -------------------------------------------------------------------------------- 1 | type AllowedTools = { 2 | tools: string[]; 3 | }; 4 | 5 | type Settings = { 6 | dsn: string; 7 | tracesSampleRate: number; 8 | profilesSampleRate: number; 9 | tracePropagationTargets: string[] | null; 10 | replaysSessionSampleRate: number; 11 | replaysOnErrorSampleRate: number; 12 | userEmail: string; 13 | }; 14 | 15 | export { AllowedTools, Settings }; 16 | -------------------------------------------------------------------------------- /snuba/admin/static/utils/query_result_copier.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import { Button, Group } from "@mantine/core"; 3 | 4 | function QueryResultCopier(props: { 5 | rawInput?: string; 6 | jsonInput?: string; 7 | csvInput?: string; 8 | }) { 9 | function copyText(text: string) { 10 | window.navigator.clipboard.writeText(text); 11 | } 12 | 13 | return 14 | {props.rawInput && ( 15 | copyText(props.rawInput || "")} 17 | > 18 | Copy to clipboard (Raw) 19 | 20 | )} 21 | {props.jsonInput && ( 22 | copyText(props.jsonInput || "")}> 23 | Copy to clipboard (JSON) 24 | 25 | )} 26 | {props.csvInput && ( 27 | copyText(props.csvInput || "")}> 28 | Copy to clipboard (CSV) 29 | 30 | )} 31 | ; 32 | } 33 | 34 | export default QueryResultCopier; 35 | -------------------------------------------------------------------------------- /snuba/admin/user.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass, field 2 | from typing import Sequence 3 | 4 | from snuba.admin.auth_roles import Role 5 | 6 | 7 | @dataclass 8 | class AdminUser: 9 | """ 10 | Basic encapsulation of a user of the admin panel. In the future, 11 | should be extended to contain permissions among other things 12 | """ 13 | 14 | email: str 15 | id: str 16 | roles: Sequence[Role] = field(default_factory=list) 17 | -------------------------------------------------------------------------------- /snuba/admin/wsgi.py: -------------------------------------------------------------------------------- 1 | from snuba.environment import setup_logging, setup_sentry 2 | 3 | setup_logging() 4 | setup_sentry() 5 | 6 | from snuba.core.initialize import initialize_snuba 7 | 8 | initialize_snuba() 9 | from snuba.admin.views import application # noqa 10 | -------------------------------------------------------------------------------- /snuba/attribution/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import logging 4 | from datetime import datetime 5 | 6 | from snuba import environment 7 | from snuba.utils.metrics.wrapper import MetricsWrapper 8 | 9 | from .appid import AppID 10 | 11 | metrics = MetricsWrapper(environment.metrics, "snuba.attribution") 12 | logger = logging.getLogger("snuba.attribution") 13 | 14 | INVALID_APPID = AppID("invalid", "sns", datetime(2022, 3, 25)) 15 | 16 | 17 | def get_app_id(app_id: str) -> AppID: 18 | return AppID(app_id) if app_id else INVALID_APPID 19 | 20 | 21 | __all__ = ( 22 | "AppID", 23 | "get_app_id", 24 | ) 25 | -------------------------------------------------------------------------------- /snuba/attribution/appid.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from dataclasses import dataclass 4 | from datetime import datetime 5 | from typing import Any 6 | 7 | 8 | @dataclass(frozen=True) 9 | class AppID: 10 | key: str 11 | created_by: str = "unknown" 12 | date_created: datetime = datetime.utcnow() 13 | 14 | @staticmethod 15 | def from_dict(obj: dict[str, Any]) -> AppID: 16 | return AppID( 17 | key=str(obj["key"]), 18 | created_by=str(obj["created_by"]), 19 | date_created=obj["date_created"], 20 | ) 21 | -------------------------------------------------------------------------------- /snuba/attribution/attribution_info.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from dataclasses import dataclass 4 | 5 | from snuba.attribution import AppID 6 | 7 | 8 | @dataclass(frozen=True) 9 | class AttributionInfo: 10 | """The settings for a attribution of a query + quota enforcement 11 | should be immutable 12 | """ 13 | 14 | app_id: AppID 15 | tenant_ids: dict[str, str | int] 16 | referrer: str 17 | team: str | None 18 | feature: str | None 19 | parent_api: str | None 20 | -------------------------------------------------------------------------------- /snuba/cli/health.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import sys 4 | 5 | import click 6 | 7 | from snuba.utils.health_info import get_health_info 8 | 9 | 10 | @click.command() 11 | @click.option( 12 | "--thorough", 13 | help="Whether to run a thorough health check.", 14 | is_flag=True, 15 | default=False, 16 | ) 17 | def health( 18 | *, 19 | thorough: bool, 20 | ) -> int: 21 | 22 | health_info = get_health_info(thorough) 23 | if health_info.status == 200: 24 | sys.exit(0) 25 | else: 26 | sys.exit(1) 27 | -------------------------------------------------------------------------------- /snuba/clickhouse/__init__.py: -------------------------------------------------------------------------------- 1 | DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S" 2 | -------------------------------------------------------------------------------- /snuba/clickhouse/errors.py: -------------------------------------------------------------------------------- 1 | from typing import cast 2 | 3 | from snuba.utils.serializable_exception import SerializableException 4 | 5 | 6 | class ClickhouseError(SerializableException): 7 | @property 8 | def code(self) -> int: 9 | return cast(int, self.extra_data.get("code", -1)) 10 | 11 | 12 | class ClickhouseWriterError(ClickhouseError): 13 | @property 14 | def row(self) -> int: 15 | return cast(int, self.extra_data.get("row", -1)) 16 | -------------------------------------------------------------------------------- /snuba/clickhouse/formatter/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/clickhouse/formatter/__init__.py -------------------------------------------------------------------------------- /snuba/clickhouse/optimize/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/clickhouse/optimize/__init__.py -------------------------------------------------------------------------------- /snuba/clickhouse/optimize/util.py: -------------------------------------------------------------------------------- 1 | import typing 2 | from dataclasses import dataclass 3 | 4 | from snuba.state import get_config 5 | 6 | _OPTIMIZE_PARALLEL_THREADS_KEY = "optimize_parallel_threads" 7 | 8 | 9 | @dataclass 10 | class MergeInfo: 11 | result_part_name: str 12 | elapsed: float 13 | progress: float 14 | size: int 15 | 16 | @property 17 | # estimated time remaining in seconds 18 | def estimated_time(self) -> float: 19 | return self.elapsed / (self.progress + 0.0001) 20 | 21 | 22 | def get_num_threads(default_parallel_threads: int) -> int: 23 | return typing.cast( 24 | int, get_config(_OPTIMIZE_PARALLEL_THREADS_KEY, default_parallel_threads) 25 | ) 26 | -------------------------------------------------------------------------------- /snuba/clickhouse/query_dsl/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/clickhouse/query_dsl/__init__.py -------------------------------------------------------------------------------- /snuba/clickhouse/translators/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/clickhouse/translators/__init__.py -------------------------------------------------------------------------------- /snuba/clusters/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/clusters/__init__.py -------------------------------------------------------------------------------- /snuba/consumers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/consumers/__init__.py -------------------------------------------------------------------------------- /snuba/consumers/codecs/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/consumers/codecs/__init__.py -------------------------------------------------------------------------------- /snuba/consumers/types.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from typing import NamedTuple 3 | 4 | 5 | class KafkaMessageMetadata(NamedTuple): 6 | offset: int 7 | partition: int 8 | timestamp: datetime 9 | -------------------------------------------------------------------------------- /snuba/core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/core/__init__.py -------------------------------------------------------------------------------- /snuba/datasets/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/datasets/__init__.py -------------------------------------------------------------------------------- /snuba/datasets/cdc/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/datasets/cdc/__init__.py -------------------------------------------------------------------------------- /snuba/datasets/cdc/cdcstorage.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from snuba.datasets.cdc.row_processors import CdcRowProcessor 4 | from snuba.datasets.storage import WritableTableStorage 5 | 6 | 7 | class CdcStorage(WritableTableStorage): 8 | def __init__( 9 | self, 10 | *, 11 | default_control_topic: str, 12 | postgres_table: str, 13 | row_processor: CdcRowProcessor, 14 | **kwargs: Any, 15 | ): 16 | super().__init__(**kwargs) 17 | self.__default_control_topic = default_control_topic 18 | self.__postgres_table = postgres_table 19 | self.__row_processor = row_processor 20 | 21 | def get_row_processor(self) -> CdcRowProcessor: 22 | return self.__row_processor 23 | 24 | def get_postgres_table(self) -> str: 25 | return self.__postgres_table 26 | -------------------------------------------------------------------------------- /snuba/datasets/common/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/datasets/common/__init__.py -------------------------------------------------------------------------------- /snuba/datasets/configuration/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/datasets/configuration/__init__.py -------------------------------------------------------------------------------- /snuba/datasets/configuration/dataset_builder.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from snuba.datasets.configuration.json_schema import DATASET_VALIDATORS 4 | from snuba.datasets.configuration.loader import load_configuration_data 5 | from snuba.datasets.entities.entity_key import EntityKey 6 | from snuba.datasets.pluggable_dataset import PluggableDataset 7 | 8 | 9 | def build_dataset_from_config(config_file_path: str) -> PluggableDataset: 10 | config = load_configuration_data(config_file_path, DATASET_VALIDATORS) 11 | return PluggableDataset( 12 | name=config["name"], 13 | all_entities=[EntityKey(key) for key in config.get("entities", [])], 14 | ) 15 | -------------------------------------------------------------------------------- /snuba/datasets/configuration/discover/dataset.yaml: -------------------------------------------------------------------------------- 1 | version: v1 2 | kind: dataset 3 | name: discover 4 | 5 | entities: 6 | - discover 7 | - discover_events 8 | - discover_transactions 9 | -------------------------------------------------------------------------------- /snuba/datasets/configuration/events/dataset.yaml: -------------------------------------------------------------------------------- 1 | version: v1 2 | kind: dataset 3 | name: events 4 | 5 | entities: 6 | - events 7 | -------------------------------------------------------------------------------- /snuba/datasets/configuration/events_analytics_platform/dataset.yaml: -------------------------------------------------------------------------------- 1 | version: v1 2 | kind: dataset 3 | name: events_analytics_platform 4 | 5 | entities: 6 | - spans_num_attrs 7 | - spans_str_attrs 8 | - uptime_checks 9 | - eap_items 10 | - eap_items_span 11 | -------------------------------------------------------------------------------- /snuba/datasets/configuration/functions/dataset.yaml: -------------------------------------------------------------------------------- 1 | version: v1 2 | kind: dataset 3 | name: functions 4 | 5 | entities: 6 | - functions 7 | -------------------------------------------------------------------------------- /snuba/datasets/configuration/generic_metrics/dataset.yaml: -------------------------------------------------------------------------------- 1 | version: v1 2 | kind: dataset 3 | name: generic_metrics 4 | 5 | entities: 6 | - generic_metrics_sets 7 | - generic_metrics_distributions 8 | - generic_metrics_counters 9 | - generic_org_metrics_counters 10 | - generic_metrics_gauges 11 | -------------------------------------------------------------------------------- /snuba/datasets/configuration/group_attributes/dataset.yaml: -------------------------------------------------------------------------------- 1 | version: v1 2 | kind: dataset 3 | name: group_attributes 4 | 5 | entities: 6 | - group_attributes 7 | -------------------------------------------------------------------------------- /snuba/datasets/configuration/groupassignee/dataset.yaml: -------------------------------------------------------------------------------- 1 | version: v1 2 | kind: dataset 3 | name: groupassignee 4 | 5 | entities: 6 | - groupassignee 7 | -------------------------------------------------------------------------------- /snuba/datasets/configuration/groupedmessage/dataset.yaml: -------------------------------------------------------------------------------- 1 | version: v1 2 | kind: dataset 3 | name: groupedmessage 4 | 5 | entities: 6 | - groupedmessage 7 | -------------------------------------------------------------------------------- /snuba/datasets/configuration/issues/dataset.yaml: -------------------------------------------------------------------------------- 1 | version: v1 2 | kind: dataset 3 | name: search_issues 4 | 5 | entities: 6 | - search_issues 7 | -------------------------------------------------------------------------------- /snuba/datasets/configuration/loader.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Any 4 | 5 | import sentry_sdk 6 | from yaml import safe_load 7 | 8 | from snuba import settings 9 | 10 | 11 | def load_configuration_data(path: str, validators: dict[str, Any]) -> dict[str, Any]: 12 | """ 13 | Loads a configuration file from the given path 14 | Returns an untyped dict of dicts 15 | """ 16 | with sentry_sdk.start_span(op="load_and_validate") as span: 17 | span.set_tag("file", path) 18 | with open(path) as file: 19 | config = safe_load(file) 20 | assert isinstance(config, dict) 21 | if settings.VALIDATE_DATASET_YAMLS_ON_STARTUP: 22 | validators[config["kind"]](config) 23 | span.description = config["name"] 24 | return config 25 | -------------------------------------------------------------------------------- /snuba/datasets/configuration/metrics/dataset.yaml: -------------------------------------------------------------------------------- 1 | version: v1 2 | kind: dataset 3 | name: metrics 4 | 5 | entities: 6 | - metrics_counters 7 | - metrics_distributions 8 | - metrics_sets 9 | - org_metrics_counters 10 | -------------------------------------------------------------------------------- /snuba/datasets/configuration/metrics/storages/org_counters.yaml: -------------------------------------------------------------------------------- 1 | version: v1 2 | kind: readable_storage 3 | name: org_metrics_counters 4 | storage: 5 | key: org_metrics_counters 6 | set_key: metrics 7 | readiness_state: complete 8 | schema: 9 | columns: 10 | [ 11 | { name: org_id, type: UInt, args: { size: 64 } }, 12 | { name: project_id, type: UInt, args: { size: 64 } }, 13 | { name: metric_id, type: UInt, args: { size: 64 } }, 14 | { name: granularity, type: UInt, args: { size: 32 } }, 15 | { name: timestamp, type: DateTime }, 16 | ] 17 | local_table_name: metrics_counters_v2_local 18 | dist_table_name: metrics_counters_v2_dist 19 | allocation_policies: 20 | - name: ReferrerGuardRailPolicy 21 | args: 22 | required_tenant_types: 23 | - referrer 24 | default_config_overrides: 25 | is_enforced: 0 26 | is_active: 0 27 | -------------------------------------------------------------------------------- /snuba/datasets/configuration/outcomes/dataset.yaml: -------------------------------------------------------------------------------- 1 | version: v1 2 | kind: dataset 3 | name: outcomes 4 | 5 | entities: 6 | - outcomes 7 | -------------------------------------------------------------------------------- /snuba/datasets/configuration/outcomes_raw/dataset.yaml: -------------------------------------------------------------------------------- 1 | version: v1 2 | kind: dataset 3 | name: outcomes_raw 4 | 5 | entities: 6 | - outcomes_raw 7 | -------------------------------------------------------------------------------- /snuba/datasets/configuration/profiles/dataset.yaml: -------------------------------------------------------------------------------- 1 | version: v1 2 | kind: dataset 3 | name: profiles 4 | 5 | entities: 6 | - profiles 7 | -------------------------------------------------------------------------------- /snuba/datasets/configuration/replays/dataset.yaml: -------------------------------------------------------------------------------- 1 | version: v1 2 | kind: dataset 3 | name: replays 4 | 5 | entities: 6 | - replays 7 | -------------------------------------------------------------------------------- /snuba/datasets/configuration/spans/dataset.yaml: -------------------------------------------------------------------------------- 1 | version: v1 2 | kind: dataset 3 | name: spans 4 | 5 | entities: 6 | - spans 7 | -------------------------------------------------------------------------------- /snuba/datasets/configuration/transactions/dataset.yaml: -------------------------------------------------------------------------------- 1 | version: v1 2 | kind: dataset 3 | name: transactions 4 | 5 | entities: 6 | - transactions 7 | -------------------------------------------------------------------------------- /snuba/datasets/deletion_settings.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from dataclasses import dataclass, field 4 | from typing import Sequence 5 | 6 | MAX_ROWS_TO_DELETE_DEFAULT = 100000 7 | 8 | 9 | @dataclass 10 | class DeletionSettings: 11 | is_enabled: int 12 | tables: Sequence[str] 13 | allowed_columns: Sequence[str] = field(default_factory=list) 14 | max_rows_to_delete: int = MAX_ROWS_TO_DELETE_DEFAULT 15 | -------------------------------------------------------------------------------- /snuba/datasets/entities/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/datasets/entities/__init__.py -------------------------------------------------------------------------------- /snuba/datasets/entities/entity_data_model.py: -------------------------------------------------------------------------------- 1 | from snuba.utils.schemas import ColumnSet 2 | 3 | 4 | class EntityColumnSet(ColumnSet): 5 | """ 6 | Entity data model supports wildcard columns as well as the other types 7 | """ 8 | 9 | def __repr__(self) -> str: 10 | return "EntityColumnSet({})".format(repr(self.columns)) 11 | -------------------------------------------------------------------------------- /snuba/datasets/entity_subscriptions/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/datasets/entity_subscriptions/__init__.py -------------------------------------------------------------------------------- /snuba/datasets/plans/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/datasets/plans/__init__.py -------------------------------------------------------------------------------- /snuba/datasets/plans/translator/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/datasets/plans/translator/__init__.py -------------------------------------------------------------------------------- /snuba/datasets/pluggable_dataset.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Any 4 | 5 | from snuba.datasets.dataset import Dataset 6 | from snuba.datasets.entities.entity_key import EntityKey 7 | 8 | 9 | class PluggableDataset(Dataset): 10 | """ 11 | PluggableDataset is a version of Dataset that is designed to be populated by 12 | static YAML-based configuration files. It is intentionally less flexible 13 | than Dataset. See the documentation of Dataset for explanation about how 14 | overridden methods are supposed to behave. 15 | """ 16 | 17 | def __init__( 18 | self, 19 | *, 20 | name: str, 21 | all_entities: list[EntityKey], 22 | ) -> None: 23 | super().__init__(all_entities=all_entities) 24 | self.name = name 25 | 26 | def __eq__(self, other: Any) -> bool: 27 | return isinstance(other, PluggableDataset) and self.name == other.name 28 | 29 | def __hash__(self) -> int: 30 | return hash(self.name) 31 | -------------------------------------------------------------------------------- /snuba/datasets/processors/eap_items_processor.py: -------------------------------------------------------------------------------- 1 | from snuba.datasets.processors.rust_compat_processor import RustCompatProcessor 2 | 3 | 4 | class EAPItemsProcessor(RustCompatProcessor): 5 | def __init__(self) -> None: 6 | super().__init__("EAPItemsProcessor") 7 | -------------------------------------------------------------------------------- /snuba/datasets/processors/eap_items_span_processor.py: -------------------------------------------------------------------------------- 1 | from snuba.datasets.processors.rust_compat_processor import RustCompatProcessor 2 | 3 | 4 | class EAPItemsSpanProcessor(RustCompatProcessor): 5 | def __init__(self) -> None: 6 | super().__init__("EAPItemsSpanProcessor") 7 | -------------------------------------------------------------------------------- /snuba/datasets/processors/errors_processor.py: -------------------------------------------------------------------------------- 1 | from snuba.datasets.processors.rust_compat_processor import RustCompatProcessor 2 | 3 | 4 | class ErrorsProcessor(RustCompatProcessor): 5 | def __init__(self) -> None: 6 | super().__init__("ErrorsProcessor") 7 | -------------------------------------------------------------------------------- /snuba/datasets/processors/functions_processor.py: -------------------------------------------------------------------------------- 1 | from snuba.datasets.processors.rust_compat_processor import RustCompatProcessor 2 | 3 | 4 | class FunctionsMessageProcessor(RustCompatProcessor): 5 | def __init__(self) -> None: 6 | super().__init__("FunctionsMessageProcessor") 7 | -------------------------------------------------------------------------------- /snuba/datasets/processors/generic_metrics_processor.py: -------------------------------------------------------------------------------- 1 | from snuba.datasets.processors.rust_compat_processor import RustCompatProcessor 2 | 3 | 4 | class GenericSetsMetricsProcessor(RustCompatProcessor): 5 | def __init__(self) -> None: 6 | super().__init__("GenericSetsMetricsProcessor") 7 | 8 | 9 | class GenericDistributionsMetricsProcessor(RustCompatProcessor): 10 | def __init__(self) -> None: 11 | super().__init__("GenericDistributionsMetricsProcessor") 12 | 13 | 14 | class GenericCountersMetricsProcessor(RustCompatProcessor): 15 | def __init__(self) -> None: 16 | super().__init__("GenericCountersMetricsProcessor") 17 | 18 | 19 | class GenericGaugesMetricsProcessor(RustCompatProcessor): 20 | def __init__(self) -> None: 21 | super().__init__("GenericGaugesMetricsProcessor") 22 | -------------------------------------------------------------------------------- /snuba/datasets/processors/metrics_bucket_processor.py: -------------------------------------------------------------------------------- 1 | from snuba.datasets.processors.rust_compat_processor import RustCompatProcessor 2 | 3 | 4 | class PolymorphicMetricsProcessor(RustCompatProcessor): 5 | def __init__(self) -> None: 6 | super().__init__("PolymorphicMetricsProcessor") 7 | -------------------------------------------------------------------------------- /snuba/datasets/processors/ourlogs_processor.py: -------------------------------------------------------------------------------- 1 | from snuba.datasets.processors.rust_compat_processor import RustCompatProcessor 2 | 3 | 4 | class OurlogsMessageProcessor(RustCompatProcessor): 5 | def __init__(self) -> None: 6 | super().__init__("OurlogsMessageProcessor") 7 | -------------------------------------------------------------------------------- /snuba/datasets/processors/outcomes_processor.py: -------------------------------------------------------------------------------- 1 | from snuba.datasets.processors.rust_compat_processor import RustCompatProcessor 2 | 3 | 4 | class OutcomesProcessor(RustCompatProcessor): 5 | def __init__(self) -> None: 6 | super().__init__("OutcomesProcessor") 7 | -------------------------------------------------------------------------------- /snuba/datasets/processors/profile_chunks_processor.py: -------------------------------------------------------------------------------- 1 | from snuba.datasets.processors.rust_compat_processor import RustCompatProcessor 2 | 3 | 4 | class ProfileChunksProcessor(RustCompatProcessor): 5 | def __init__(self) -> None: 6 | super().__init__("ProfileChunksProcessor") 7 | -------------------------------------------------------------------------------- /snuba/datasets/processors/profiles_processor.py: -------------------------------------------------------------------------------- 1 | from snuba.datasets.processors.rust_compat_processor import RustCompatProcessor 2 | 3 | 4 | class ProfilesMessageProcessor(RustCompatProcessor): 5 | def __init__(self) -> None: 6 | super().__init__("ProfilesMessageProcessor") 7 | -------------------------------------------------------------------------------- /snuba/datasets/processors/querylog_processor.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from snuba.datasets.processors.rust_compat_processor import RustCompatProcessor 4 | 5 | 6 | class QuerylogProcessor(RustCompatProcessor): 7 | def __init__(self) -> None: 8 | super().__init__("QuerylogProcessor") 9 | -------------------------------------------------------------------------------- /snuba/datasets/processors/replays_processor.py: -------------------------------------------------------------------------------- 1 | from snuba.datasets.processors.rust_compat_processor import RustCompatProcessor 2 | 3 | 4 | class ReplaysProcessor(RustCompatProcessor): 5 | def __init__(self) -> None: 6 | super().__init__("ReplaysProcessor") 7 | -------------------------------------------------------------------------------- /snuba/datasets/processors/spans_processor.py: -------------------------------------------------------------------------------- 1 | from snuba.datasets.processors.rust_compat_processor import RustCompatProcessor 2 | 3 | 4 | class SpansMessageProcessor(RustCompatProcessor): 5 | def __init__(self) -> None: 6 | super().__init__("SpansMessageProcessor") 7 | -------------------------------------------------------------------------------- /snuba/datasets/processors/spans_v2_processor.py: -------------------------------------------------------------------------------- 1 | from snuba.datasets.processors.rust_compat_processor import RustCompatProcessor 2 | 3 | 4 | class EAPSpansMessageProcessor(RustCompatProcessor): 5 | def __init__(self) -> None: 6 | super().__init__("EAPSpansMessageProcessor") 7 | -------------------------------------------------------------------------------- /snuba/datasets/processors/uptime_monitors_processor.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from snuba import environment 4 | from snuba.datasets.processors.rust_compat_processor import RustCompatProcessor 5 | from snuba.utils.metrics.wrapper import MetricsWrapper 6 | 7 | logger = logging.getLogger(__name__) 8 | 9 | metrics = MetricsWrapper(environment.metrics, "uptime_monitor_checks.processor") 10 | 11 | 12 | class UptimeMonitorChecksProcessor(RustCompatProcessor): 13 | def __init__(self) -> None: 14 | super().__init__("UptimeMonitorChecksProcessor") 15 | -------------------------------------------------------------------------------- /snuba/datasets/storages/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/datasets/storages/__init__.py -------------------------------------------------------------------------------- /snuba/downsampled_storage_tiers.py: -------------------------------------------------------------------------------- 1 | from enum import IntEnum 2 | 3 | 4 | class Tier(IntEnum): 5 | TIER_NO_TIER = -1 6 | TIER_1 = 1 7 | TIER_8 = 8 8 | TIER_64 = 64 9 | TIER_512 = 512 10 | -------------------------------------------------------------------------------- /snuba/env/job_manifest.json: -------------------------------------------------------------------------------- 1 | [{ "id": "abc1234", "job_type": "ToyJob", "params": { "p1": "value1" } }] 2 | -------------------------------------------------------------------------------- /snuba/lw_deletions/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/lw_deletions/__init__.py -------------------------------------------------------------------------------- /snuba/manual_jobs/job_loader.py: -------------------------------------------------------------------------------- 1 | from typing import cast 2 | 3 | from snuba.manual_jobs import Job, JobSpec 4 | from snuba.utils.serializable_exception import SerializableException 5 | 6 | 7 | class NonexistentJobException(SerializableException): 8 | def __init__(self, job_type: str): 9 | super().__init__(f"Job does not exist. Did you make a file {job_type}.py yet?") 10 | 11 | 12 | class _JobLoader: 13 | @staticmethod 14 | def get_job_instance(job_spec: JobSpec) -> "Job": 15 | job_type_class = Job.class_from_name(job_spec.job_type) 16 | if job_type_class is None: 17 | raise NonexistentJobException(job_spec.job_type) 18 | 19 | return cast("Job", job_type_class(job_spec)) 20 | -------------------------------------------------------------------------------- /snuba/manual_jobs/job_status.py: -------------------------------------------------------------------------------- 1 | from enum import StrEnum 2 | 3 | 4 | class JobStatus(StrEnum): 5 | RUNNING = "running" 6 | FINISHED = "finished" 7 | NOT_STARTED = "not_started" 8 | ASYNC_RUNNING_BACKGROUND = "async_running_background" 9 | FAILED = "failed" 10 | -------------------------------------------------------------------------------- /snuba/manual_jobs/toy_job.py: -------------------------------------------------------------------------------- 1 | from snuba.manual_jobs import Job, JobLogger, JobSpec 2 | 3 | 4 | class ToyJob(Job): 5 | def __init__( 6 | self, 7 | job_spec: JobSpec, 8 | ): 9 | super().__init__(job_spec) 10 | 11 | def _build_query(self) -> str: 12 | return "query" 13 | 14 | def execute(self, logger: JobLogger) -> None: 15 | logger.info( 16 | "executing job " 17 | + self.job_spec.job_id 18 | + " with query `" 19 | + self._build_query() 20 | + "`" 21 | ) 22 | 23 | if not self.job_spec.params: 24 | return 25 | 26 | if self.job_spec.params.get("fail"): 27 | raise Exception("failed as requested") 28 | -------------------------------------------------------------------------------- /snuba/manual_jobs/truncate_eap_spans.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | from snuba.clusters.cluster import ClickhouseClientSettings, get_cluster 4 | from snuba.clusters.storage_sets import StorageSetKey 5 | from snuba.manual_jobs import Job, JobLogger, JobSpec 6 | 7 | STATEMENT = ( 8 | "TRUNCATE TABLE eap_spans_2_local ON CLUSTER 'snuba-events-analytics-platform'" 9 | ) 10 | 11 | 12 | class TruncateEAPSpans(Job): 13 | def __init__(self, job_spec: JobSpec) -> None: 14 | super().__init__(job_spec) 15 | 16 | def execute(self, logger: JobLogger) -> None: 17 | cluster = get_cluster(StorageSetKey.EVENTS_ANALYTICS_PLATFORM) 18 | 19 | storage_node = cluster.get_local_nodes()[0] 20 | connection = cluster.get_node_connection( 21 | ClickhouseClientSettings.MIGRATE, 22 | storage_node, 23 | ) 24 | logger.info(f"Run truncate table statement: {STATEMENT}") 25 | connection.execute(query=STATEMENT) 26 | 27 | logger.info("TRUNCATE completed") 28 | -------------------------------------------------------------------------------- /snuba/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/migrations/__init__.py -------------------------------------------------------------------------------- /snuba/migrations/autogeneration/__init__.py: -------------------------------------------------------------------------------- 1 | from .main import generate 2 | 3 | __all__ = ["generate"] 4 | -------------------------------------------------------------------------------- /snuba/migrations/clickhouse.py: -------------------------------------------------------------------------------- 1 | CLICKHOUSE_SERVER_MIN_VERSION = "23.8.11.29" 2 | # Note: SaaS, self-hosted, and sentry dev 3 | # environements should all be on 23.8.11.29 4 | CLICKHOUSE_SERVER_MAX_VERSION = "24.8.14.10459" 5 | -------------------------------------------------------------------------------- /snuba/migrations/context.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import Callable, NamedTuple 3 | 4 | from snuba.migrations.status import Status 5 | 6 | 7 | class Context(NamedTuple): 8 | migration_id: str 9 | logger: logging.Logger 10 | update_status: Callable[[Status], None] 11 | -------------------------------------------------------------------------------- /snuba/migrations/errors.py: -------------------------------------------------------------------------------- 1 | from snuba.utils.serializable_exception import SerializableException 2 | 3 | 4 | class InvalidMigrationState(SerializableException): 5 | pass 6 | 7 | 8 | class MigrationDoesNotExist(SerializableException): 9 | pass 10 | 11 | 12 | class MigrationError(SerializableException): 13 | pass 14 | 15 | 16 | class MigrationInProgress(SerializableException): 17 | pass 18 | 19 | 20 | class InvalidClickhouseVersion(SerializableException): 21 | pass 22 | 23 | 24 | class InactiveClickhouseReplica(SerializableException): 25 | pass 26 | -------------------------------------------------------------------------------- /snuba/migrations/status.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class Status(Enum): 5 | NOT_STARTED = "not_started" 6 | IN_PROGRESS = "in_progress" 7 | COMPLETED = "completed" 8 | -------------------------------------------------------------------------------- /snuba/migrations/system_migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/migrations/system_migrations/__init__.py -------------------------------------------------------------------------------- /snuba/pipeline/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/pipeline/__init__.py -------------------------------------------------------------------------------- /snuba/pipeline/processors.py: -------------------------------------------------------------------------------- 1 | import sentry_sdk 2 | 3 | from snuba.datasets.entities.factory import get_entity 4 | from snuba.query.logical import EntityQuery 5 | from snuba.query.query_settings import QuerySettings 6 | from snuba.state import explain_meta 7 | 8 | 9 | def execute_entity_processors(query: EntityQuery, settings: QuerySettings) -> None: 10 | """ 11 | Executes the entity query processors for the query. These are taken 12 | from the entity. 13 | """ 14 | entity = get_entity(query.get_from_clause().key) 15 | 16 | for processor in entity.get_query_processors(): 17 | with sentry_sdk.start_span( 18 | description=type(processor).__name__, op="processor" 19 | ): 20 | if settings.get_dry_run(): 21 | with explain_meta.with_query_differ( 22 | "entity_processor", type(processor).__name__, query 23 | ): 24 | processor.process_query(query, settings) 25 | else: 26 | processor.process_query(query, settings) 27 | -------------------------------------------------------------------------------- /snuba/pipeline/stages/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/pipeline/stages/__init__.py -------------------------------------------------------------------------------- /snuba/query/accessors.py: -------------------------------------------------------------------------------- 1 | from typing import Set 2 | 3 | from snuba.query.expressions import Column, Expression 4 | 5 | 6 | def get_columns_in_expression(exp: Expression) -> Set[Column]: 7 | """ 8 | Returns all the columns referenced in an arbitrary AST expression. 9 | """ 10 | return set(e for e in exp if isinstance(e, Column)) 11 | -------------------------------------------------------------------------------- /snuba/query/data_source/__init__.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | from snuba.utils.schemas import ColumnSet 4 | 5 | 6 | class DataSource(ABC): 7 | """ 8 | Represents the source of the records a query (or a portion of it) 9 | acts upon. 10 | In the most common case this is the FROM clause but it can be used 11 | in other sections of the query for subqueries. 12 | """ 13 | 14 | @abstractmethod 15 | def get_columns(self) -> ColumnSet: 16 | raise NotImplementedError 17 | -------------------------------------------------------------------------------- /snuba/query/deletions/max_rows_enforcer.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/query/deletions/max_rows_enforcer.py -------------------------------------------------------------------------------- /snuba/query/formatters/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/query/formatters/__init__.py -------------------------------------------------------------------------------- /snuba/query/joins/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/query/joins/__init__.py -------------------------------------------------------------------------------- /snuba/query/mql/exceptions.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | 4 | class InvalidMQLContextError(Exception): 5 | pass 6 | 7 | 8 | class InvalidExpressionError(Exception): 9 | pass 10 | -------------------------------------------------------------------------------- /snuba/query/parser/exceptions.py: -------------------------------------------------------------------------------- 1 | from snuba.query.composite import CompositeQuery 2 | from snuba.query.data_source.simple import LogicalDataSource 3 | from snuba.query.exceptions import InvalidQueryException, ValidationException 4 | from snuba.query.logical import Query 5 | 6 | 7 | class ParsingException(InvalidQueryException): 8 | pass 9 | 10 | 11 | class CyclicAliasException(ValidationException): 12 | pass 13 | 14 | 15 | class AliasShadowingException(ValidationException): 16 | pass 17 | 18 | 19 | class PostProcessingError(Exception): 20 | """ 21 | Class for exceptions that happen during post processing of a query, 22 | after the original query has been created 23 | """ 24 | 25 | def __init__( 26 | self, 27 | query: Query | CompositeQuery[LogicalDataSource], 28 | message: str | None = None, 29 | ): 30 | super().__init__(message) 31 | self.query = query 32 | -------------------------------------------------------------------------------- /snuba/query/parser/validation/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/query/parser/validation/__init__.py -------------------------------------------------------------------------------- /snuba/query/parsing.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | 4 | class ParsingContext: 5 | """ 6 | This class is passed around during the query parsing process 7 | to keep any state needed during the process itself (like the 8 | alias cache). 9 | """ 10 | 11 | def __init__(self) -> None: 12 | self.__alias_cache: List[str] = [] 13 | 14 | def add_alias(self, alias: str) -> None: 15 | self.__alias_cache.append(alias) 16 | 17 | def is_alias_present(self, alias: str) -> bool: 18 | return alias in self.__alias_cache 19 | -------------------------------------------------------------------------------- /snuba/query/processors/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/query/processors/__init__.py -------------------------------------------------------------------------------- /snuba/query/processors/physical/consistency_enforcer.py: -------------------------------------------------------------------------------- 1 | from dataclasses import replace 2 | 3 | from snuba.clickhouse.query import Query 4 | from snuba.query.processors.physical import ClickhouseQueryProcessor 5 | from snuba.query.query_settings import QuerySettings 6 | 7 | 8 | class ConsistencyEnforcerProcessor(ClickhouseQueryProcessor): 9 | """ 10 | This processor modifies the query to ensure that deduplication/merge happens when the query 11 | is run. This is done by setting the FINAL mode in clickhouse query. 12 | 13 | This should only be used for tables whose data is mutable and have less amount of data entries 14 | like the CDC tables. 15 | """ 16 | 17 | def process_query(self, query: Query, query_settings: QuerySettings) -> None: 18 | query.set_from_clause(replace(query.get_from_clause(), final=True)) 19 | -------------------------------------------------------------------------------- /snuba/query/processors/physical/group_id_column_processor.py: -------------------------------------------------------------------------------- 1 | from snuba.clickhouse.query import Query 2 | from snuba.query.expressions import Column, Expression, FunctionCall, Literal 3 | from snuba.query.processors.physical import ClickhouseQueryProcessor 4 | from snuba.query.query_settings import QuerySettings 5 | 6 | 7 | class GroupIdColumnProcessor(ClickhouseQueryProcessor): 8 | def process_query(self, query: Query, query_settings: QuerySettings) -> None: 9 | def process_column(exp: Expression) -> Expression: 10 | if isinstance(exp, Column): 11 | if exp.column_name == "group_id": 12 | return FunctionCall( 13 | exp.alias, 14 | "nullIf", 15 | ( 16 | Column(None, exp.table_name, exp.column_name), 17 | Literal(None, 0), 18 | ), 19 | ) 20 | 21 | return exp 22 | 23 | query.transform_expressions(process_column) 24 | -------------------------------------------------------------------------------- /snuba/query/processors/physical/mandatory_condition_applier.py: -------------------------------------------------------------------------------- 1 | from snuba.clickhouse.query import Query 2 | from snuba.query.conditions import combine_and_conditions 3 | from snuba.query.processors.physical import ClickhouseQueryProcessor 4 | from snuba.query.query_settings import QuerySettings 5 | 6 | 7 | class MandatoryConditionApplier(ClickhouseQueryProcessor): 8 | 9 | """ 10 | Obtains mandatory conditions from a Query object’s underlying storage 11 | and applies them to the query. 12 | """ 13 | 14 | def process_query(self, query: Query, query_settings: QuerySettings) -> None: 15 | 16 | mandatory_conditions = query.get_from_clause().mandatory_conditions 17 | 18 | if len(mandatory_conditions) > 0: 19 | query.add_condition_to_ast(combine_and_conditions(mandatory_conditions)) 20 | -------------------------------------------------------------------------------- /snuba/query/processors/physical/minute_resolution_processor.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | 3 | from snuba.clickhouse.query import Query 4 | from snuba.clickhouse.query_dsl.accessors import get_time_range 5 | from snuba.query.exceptions import ValidationException 6 | from snuba.query.processors.physical import ClickhouseQueryProcessor 7 | from snuba.query.query_settings import QuerySettings 8 | 9 | 10 | class MinuteResolutionProcessor(ClickhouseQueryProcessor): 11 | def process_query(self, query: Query, query_settings: QuerySettings) -> None: 12 | # NOTE: the product side is restricted to a 6h window, however it rounds 13 | # outwards, which extends the window to 7h. 14 | from_date, to_date = get_time_range(query, "started") 15 | if not from_date or not to_date or (to_date - from_date) > timedelta(hours=7): 16 | raise ValidationException( 17 | "Minute-resolution queries are restricted to a 7-hour time window." 18 | ) 19 | -------------------------------------------------------------------------------- /snuba/query/snql/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/query/snql/__init__.py -------------------------------------------------------------------------------- /snuba/query/subscripts.py: -------------------------------------------------------------------------------- 1 | from snuba.query.expressions import SubscriptableReference 2 | 3 | # Contains functions to derive the expanded column names in the ColumnSet 4 | # that correspond to a subscriptable nested column like tags and contexts. 5 | # Example: the tags nested column is actually represented as two columns 6 | # tags.key and tags.value in ColumnSet objects. 7 | 8 | SUBSCRIPT_KEY = "key" 9 | 10 | 11 | def _subscript_col_name(expression: SubscriptableReference, col_name: str) -> str: 12 | table_name = expression.column.table_name 13 | table = f"{table_name}." if table_name is not None else "" 14 | return f"{table}{expression.column.column_name}.{col_name}" 15 | 16 | 17 | def subscript_key_column_name(expression: SubscriptableReference) -> str: 18 | return _subscript_col_name(expression, SUBSCRIPT_KEY) 19 | -------------------------------------------------------------------------------- /snuba/query/validation/__init__.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from typing import Sequence 3 | 4 | from snuba.query.data_source import DataSource 5 | from snuba.query.expressions import Expression 6 | from snuba.utils.serializable_exception import SerializableException 7 | 8 | 9 | class InvalidFunctionCall(SerializableException): 10 | pass 11 | 12 | 13 | class FunctionCallValidator(ABC): 14 | """ 15 | Validates the signature of a function call given the parameters 16 | and the schema of the function call. 17 | Raise InvalidFunctionCall to signal an invalid call. 18 | """ 19 | 20 | @abstractmethod 21 | def validate( 22 | self, func_name: str, parameters: Sequence[Expression], data_source: DataSource 23 | ) -> None: 24 | raise NotImplementedError 25 | -------------------------------------------------------------------------------- /snuba/replacers/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from snuba.utils.registered_class import import_submodules_in_directory 4 | 5 | import_submodules_in_directory( 6 | os.path.dirname(os.path.realpath(__file__)), "snuba.replacers" 7 | ) 8 | -------------------------------------------------------------------------------- /snuba/request/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import uuid 4 | from dataclasses import dataclass 5 | from typing import Any, Dict, Union 6 | 7 | from snuba.attribution.attribution_info import AttributionInfo 8 | from snuba.query import ProcessableQuery 9 | from snuba.query.composite import CompositeQuery 10 | from snuba.query.data_source.simple import LogicalDataSource, Table 11 | from snuba.query.logical import Query 12 | from snuba.query.query_settings import QuerySettings 13 | 14 | 15 | @dataclass(frozen=True) 16 | class Request: 17 | id: uuid.UUID 18 | original_body: Dict[str, Any] 19 | query: Union[Query, CompositeQuery[LogicalDataSource]] 20 | query_settings: QuerySettings 21 | attribution_info: AttributionInfo 22 | 23 | @property 24 | def referrer(self) -> str: 25 | return self.attribution_info.referrer 26 | 27 | 28 | @dataclass(frozen=True) 29 | class DeleteRequest: 30 | id: str 31 | query: ProcessableQuery[Table] 32 | storage: str 33 | where_clause: str 34 | -------------------------------------------------------------------------------- /snuba/request/exceptions.py: -------------------------------------------------------------------------------- 1 | from snuba.utils.serializable_exception import SerializableException 2 | 3 | 4 | class InvalidJsonRequestException(SerializableException): 5 | """ 6 | Common parent class for exceptions signaling the json payload 7 | of the request was not valid. 8 | """ 9 | 10 | pass 11 | 12 | 13 | class JsonDecodeException(InvalidJsonRequestException): 14 | pass 15 | 16 | 17 | class JsonSchemaValidationException(InvalidJsonRequestException): 18 | pass 19 | -------------------------------------------------------------------------------- /snuba/settings/settings_docker.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | env = os.environ.get 4 | 5 | DEBUG = env("DEBUG", "0").lower() in ("1", "true") 6 | 7 | DEFAULT_RETENTION_DAYS = env("SENTRY_EVENT_RETENTION_DAYS", 90) 8 | 9 | REDIS_HOST = env("REDIS_HOST", "127.0.0.1") 10 | REDIS_PORT = int(env("REDIS_PORT", 6379)) 11 | REDIS_PASSWORD = env("REDIS_PASSWORD") 12 | REDIS_DB = int(env("REDIS_DB", 1)) 13 | USE_REDIS_CLUSTER = False 14 | 15 | # Dogstatsd Options 16 | DOGSTATSD_HOST = env("DOGSTATSD_HOST") 17 | DOGSTATSD_PORT = env("DOGSTATSD_PORT") 18 | 19 | SENTRY_DSN = env("SENTRY_DSN") 20 | 21 | USE_EAP_ITEMS_TABLE = True 22 | -------------------------------------------------------------------------------- /snuba/settings/settings_self_hosted.py: -------------------------------------------------------------------------------- 1 | import os 2 | from typing import Set 3 | 4 | env = os.environ.get 5 | 6 | ALLOCATION_POLICY_ENABLED = False 7 | DEBUG = env("DEBUG", "0").lower() in ("1", "true") 8 | 9 | DEFAULT_RETENTION_DAYS = int(env("SENTRY_EVENT_RETENTION_DAYS", 90)) 10 | VALID_RETENTION_DAYS = set([int(env("SENTRY_EVENT_RETENTION_DAYS", 90)), 30, 60]) 11 | LOWER_RETENTION_DAYS = min(DEFAULT_RETENTION_DAYS, 30) 12 | 13 | REDIS_HOST = env("REDIS_HOST", "127.0.0.1") 14 | REDIS_PORT = int(env("REDIS_PORT", 6379)) 15 | REDIS_PASSWORD = env("REDIS_PASSWORD") 16 | REDIS_DB = int(env("REDIS_DB", 1)) 17 | USE_REDIS_CLUSTER = False 18 | 19 | # Dogstatsd Options 20 | DOGSTATSD_HOST = env("DOGSTATSD_HOST") 21 | DOGSTATSD_PORT = env("DOGSTATSD_PORT") 22 | 23 | # Dataset readiness states supported in this environment 24 | SUPPORTED_STATES: Set[str] = {"deprecate", "complete"} 25 | READINESS_STATE_FAIL_QUERIES: bool = False 26 | 27 | 28 | SENTRY_DSN = env("SENTRY_DSN") 29 | 30 | VALIDATE_DATASET_YAMLS_ON_STARTUP = True 31 | -------------------------------------------------------------------------------- /snuba/settings/settings_test_distributed.py: -------------------------------------------------------------------------------- 1 | from snuba.settings.settings_distributed import * # noqa 2 | from snuba.settings.settings_test import * # noqa 3 | -------------------------------------------------------------------------------- /snuba/settings/settings_test_initialization.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/settings/settings_test_initialization.py -------------------------------------------------------------------------------- /snuba/snuba_migrations/README.md: -------------------------------------------------------------------------------- 1 | If you don't know anything about snuba migrations see `MIGRATIONS.md` in the root folder 2 | 3 | Each folder in here represents a migration group. see `snuba/migrations/group_loader.py` 4 | 5 | Each migration (ex. `events/0001_events_initial.py`) needs to follow the naming scheme `xxxx_migration_name.py` 6 | where `xxxx` is the 0 padded migration number. Migrations are applied in order of migration number. See `snuba/migrations/group_loader.py` for more info. 7 | 8 | ## Migration Auto-Generation 9 | Who wants to write their own migrations by hand? Certainly not me! See `MIGRATIONS.md` to learn how you can have them generated for you. 10 | -------------------------------------------------------------------------------- /snuba/snuba_migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/snuba_migrations/__init__.py -------------------------------------------------------------------------------- /snuba/snuba_migrations/events/0001_events_initial.py: -------------------------------------------------------------------------------- 1 | from snuba.migrations import migration 2 | 3 | 4 | class Migration(migration.SquashedMigration): 5 | pass 6 | -------------------------------------------------------------------------------- /snuba/snuba_migrations/events/0002_events_onpremise_compatibility.py: -------------------------------------------------------------------------------- 1 | from snuba.migrations import migration 2 | 3 | 4 | class Migration(migration.SquashedMigration): 5 | pass 6 | -------------------------------------------------------------------------------- /snuba/snuba_migrations/events/0003_errors.py: -------------------------------------------------------------------------------- 1 | from snuba.migrations import migration 2 | 3 | 4 | class Migration(migration.SquashedMigration): 5 | pass 6 | -------------------------------------------------------------------------------- /snuba/snuba_migrations/events/0004_errors_onpremise_compatibility.py: -------------------------------------------------------------------------------- 1 | from snuba.migrations import migration 2 | 3 | 4 | class Migration(migration.SquashedMigration): 5 | pass 6 | -------------------------------------------------------------------------------- /snuba/snuba_migrations/events/0005_events_tags_hash_map.py: -------------------------------------------------------------------------------- 1 | from snuba.migrations import migration 2 | 3 | 4 | class Migration(migration.SquashedMigration): 5 | pass 6 | -------------------------------------------------------------------------------- /snuba/snuba_migrations/events/0006_errors_tags_hash_map.py: -------------------------------------------------------------------------------- 1 | from snuba.migrations import migration 2 | 3 | 4 | class Migration(migration.SquashedMigration): 5 | pass 6 | -------------------------------------------------------------------------------- /snuba/snuba_migrations/events/0009_errors_add_http_fields.py: -------------------------------------------------------------------------------- 1 | from snuba.migrations import migration 2 | 3 | 4 | class Migration(migration.SquashedMigration): 5 | pass 6 | -------------------------------------------------------------------------------- /snuba/snuba_migrations/events/0014_backfill_errors.py: -------------------------------------------------------------------------------- 1 | from snuba.migrations import migration 2 | 3 | 4 | class Migration(migration.SquashedMigration): 5 | pass 6 | -------------------------------------------------------------------------------- /snuba/snuba_migrations/events/0015_truncate_events.py: -------------------------------------------------------------------------------- 1 | from snuba.migrations import migration 2 | 3 | 4 | class Migration(migration.SquashedMigration): 5 | pass 6 | -------------------------------------------------------------------------------- /snuba/snuba_migrations/events/0016_drop_legacy_events.py: -------------------------------------------------------------------------------- 1 | from snuba.migrations import migration 2 | 3 | 4 | class Migration(migration.SquashedMigration): 5 | pass 6 | -------------------------------------------------------------------------------- /snuba/snuba_migrations/events/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/snuba_migrations/events/__init__.py -------------------------------------------------------------------------------- /snuba/snuba_migrations/events_analytics_platform/0006_drop_attribute_key_project_id_indexes.py: -------------------------------------------------------------------------------- 1 | from typing import Sequence 2 | 3 | from snuba.migrations import migration 4 | from snuba.migrations.operations import SqlOperation 5 | 6 | 7 | class Migration(migration.ClickhouseNodeMigration): 8 | blocking = False 9 | 10 | def forwards_ops(self) -> Sequence[SqlOperation]: 11 | return [] 12 | 13 | def backwards_ops(self) -> Sequence[SqlOperation]: 14 | return [] 15 | -------------------------------------------------------------------------------- /snuba/snuba_migrations/events_analytics_platform/0007_drop_project_id_index.py: -------------------------------------------------------------------------------- 1 | from typing import Sequence 2 | 3 | from snuba.clusters.storage_sets import StorageSetKey 4 | from snuba.migrations import migration, operations 5 | from snuba.migrations.operations import SqlOperation 6 | 7 | storage_set_name = StorageSetKey.EVENTS_ANALYTICS_PLATFORM 8 | local_table_name = "eap_spans_local" 9 | 10 | 11 | class Migration(migration.ClickhouseNodeMigration): 12 | blocking = False 13 | 14 | def forwards_ops(self) -> Sequence[SqlOperation]: 15 | return [ 16 | operations.DropIndex( 17 | storage_set=StorageSetKey.EVENTS_ANALYTICS_PLATFORM, 18 | table_name="eap_spans_local", 19 | index_name="bf_project_id", 20 | target=operations.OperationTarget.LOCAL, 21 | ), 22 | ] 23 | 24 | def backwards_ops(self) -> Sequence[SqlOperation]: 25 | return [] 26 | -------------------------------------------------------------------------------- /snuba/snuba_migrations/events_analytics_platform/0008_drop_index_attribute_key_bucket_0.py: -------------------------------------------------------------------------------- 1 | from typing import Sequence 2 | 3 | from snuba.clusters.storage_sets import StorageSetKey 4 | from snuba.migrations import migration, operations 5 | 6 | 7 | class Migration(migration.ClickhouseNodeMigration): 8 | blocking = False 9 | 10 | def forwards_ops(self) -> Sequence[operations.SqlOperation]: 11 | return [ 12 | operations.DropIndex( 13 | storage_set=StorageSetKey.EVENTS_ANALYTICS_PLATFORM, 14 | table_name="eap_spans_local", 15 | index_name=index_name, 16 | target=operations.OperationTarget.LOCAL, 17 | run_async=True, 18 | ) 19 | for bucket in {0} 20 | for index_name in {f"bf_attr_num_{bucket}", f"bf_attr_str_{bucket}"} 21 | ] 22 | 23 | def backwards_ops(self) -> Sequence[operations.SqlOperation]: 24 | return [] 25 | -------------------------------------------------------------------------------- /snuba/snuba_migrations/events_analytics_platform/0009_drop_index_attribute_key_buckets_1_19.py: -------------------------------------------------------------------------------- 1 | from typing import Sequence 2 | 3 | from snuba.clusters.storage_sets import StorageSetKey 4 | from snuba.migrations import migration, operations 5 | 6 | 7 | class Migration(migration.ClickhouseNodeMigration): 8 | blocking = False 9 | 10 | def forwards_ops(self) -> Sequence[operations.SqlOperation]: 11 | return [ 12 | operations.DropIndices( 13 | storage_set=StorageSetKey.EVENTS_ANALYTICS_PLATFORM, 14 | table_name="eap_spans_local", 15 | indices=[ 16 | index_name 17 | for bucket in range(1, 20) 18 | for index_name in {f"bf_attr_num_{bucket}", f"bf_attr_str_{bucket}"} 19 | ], 20 | target=operations.OperationTarget.LOCAL, 21 | run_async=True, 22 | ) 23 | ] 24 | 25 | def backwards_ops(self) -> Sequence[operations.SqlOperation]: 26 | return [] 27 | -------------------------------------------------------------------------------- /snuba/snuba_migrations/events_analytics_platform/0010_drop_indexes_on_attribute_values.py: -------------------------------------------------------------------------------- 1 | from typing import Sequence 2 | 3 | from snuba.clusters.storage_sets import StorageSetKey 4 | from snuba.migrations import migration, operations 5 | 6 | 7 | class Migration(migration.ClickhouseNodeMigration): 8 | blocking = False 9 | 10 | def forwards_ops(self) -> Sequence[operations.SqlOperation]: 11 | return [ 12 | operations.DropIndices( 13 | storage_set=StorageSetKey.EVENTS_ANALYTICS_PLATFORM, 14 | table_name="eap_spans_local", 15 | indices=[ 16 | index_name 17 | for bucket in range(0, 20) 18 | for index_name in {f"bf_attr_str_val_{bucket}"} 19 | ], 20 | target=operations.OperationTarget.LOCAL, 21 | run_async=True, 22 | ) 23 | ] 24 | 25 | def backwards_ops(self) -> Sequence[operations.SqlOperation]: 26 | return [] 27 | -------------------------------------------------------------------------------- /snuba/snuba_migrations/metrics/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/snuba_migrations/metrics/__init__.py -------------------------------------------------------------------------------- /snuba/snuba_migrations/outcomes/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/snuba_migrations/outcomes/__init__.py -------------------------------------------------------------------------------- /snuba/snuba_migrations/querylog/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/snuba_migrations/querylog/__init__.py -------------------------------------------------------------------------------- /snuba/snuba_migrations/replays/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/snuba_migrations/replays/__init__.py -------------------------------------------------------------------------------- /snuba/snuba_migrations/search_issues/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/snuba_migrations/search_issues/__init__.py -------------------------------------------------------------------------------- /snuba/snuba_migrations/sessions/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/snuba_migrations/sessions/__init__.py -------------------------------------------------------------------------------- /snuba/snuba_migrations/test_migration/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/snuba_migrations/test_migration/__init__.py -------------------------------------------------------------------------------- /snuba/state/cache/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/state/cache/__init__.py -------------------------------------------------------------------------------- /snuba/state/cache/redis/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/state/cache/redis/__init__.py -------------------------------------------------------------------------------- /snuba/state/quota.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from dataclasses import dataclass 4 | 5 | 6 | @dataclass(frozen=True) 7 | class ResourceQuota: 8 | """ 9 | Tracks the quota a client can use when running a query. 10 | 11 | As of now we only represent that in threads. 12 | """ 13 | 14 | max_threads: int 15 | -------------------------------------------------------------------------------- /snuba/subscriptions/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/subscriptions/__init__.py -------------------------------------------------------------------------------- /snuba/subscriptions/partitioner.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from binascii import crc32 3 | 4 | from snuba.datasets.table_storage import KafkaTopicSpec 5 | from snuba.subscriptions.data import PartitionId, SubscriptionData 6 | 7 | 8 | class SubscriptionDataPartitioner(ABC): 9 | @abstractmethod 10 | def build_partition_id(self, data: SubscriptionData) -> PartitionId: 11 | pass 12 | 13 | 14 | class TopicSubscriptionDataPartitioner(SubscriptionDataPartitioner): 15 | """ 16 | Identifies the partition index that contains the source data for a subscription. 17 | """ 18 | 19 | def __init__(self, topic: KafkaTopicSpec): 20 | self.__topic = topic 21 | 22 | def build_partition_id(self, data: SubscriptionData) -> PartitionId: 23 | return PartitionId( 24 | crc32(str(data.project_id).encode("utf-8")) % self.__topic.partitions_number 25 | ) 26 | -------------------------------------------------------------------------------- /snuba/subscriptions/utils.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import logging 4 | from enum import Enum 5 | from typing import NamedTuple 6 | 7 | from snuba.subscriptions.types import Interval 8 | 9 | logger = logging.getLogger(__name__) 10 | 11 | 12 | class SchedulingWatermarkMode(Enum): 13 | PARTITION = "partition" 14 | GLOBAL = "global" 15 | 16 | 17 | class Tick(NamedTuple): 18 | partition: int 19 | offsets: Interval[int] 20 | timestamps: Interval[float] 21 | 22 | def time_shift(self, delta: float) -> Tick: 23 | """ 24 | Returns a new ``Tick`` instance that has had the bounds of its time 25 | interval shifted by the provided delta. 26 | """ 27 | return Tick( 28 | self.partition, 29 | self.offsets, 30 | Interval(self.timestamps.lower + delta, self.timestamps.upper + delta), 31 | ) 32 | -------------------------------------------------------------------------------- /snuba/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/utils/__init__.py -------------------------------------------------------------------------------- /snuba/utils/codecs.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from typing import Generic, TypeVar 3 | 4 | from snuba.utils.serializable_exception import SerializableException 5 | 6 | TEncoded = TypeVar("TEncoded") 7 | 8 | TDecoded = TypeVar("TDecoded") 9 | 10 | 11 | class Encoder(Generic[TEncoded, TDecoded], ABC): 12 | @abstractmethod 13 | def encode(self, value: TDecoded) -> TEncoded: 14 | raise NotImplementedError 15 | 16 | 17 | class Decoder(Generic[TEncoded, TDecoded], ABC): 18 | @abstractmethod 19 | def decode(self, value: TEncoded) -> TDecoded: 20 | raise NotImplementedError 21 | 22 | 23 | class Codec( 24 | Encoder[TEncoded, TDecoded], 25 | Decoder[TEncoded, TDecoded], 26 | ): 27 | pass 28 | 29 | 30 | class ExceptionAwareCodec(Codec[TEncoded, TDecoded]): 31 | @abstractmethod 32 | def encode_exception(self, value: SerializableException) -> TEncoded: 33 | raise NotImplementedError 34 | -------------------------------------------------------------------------------- /snuba/utils/config_component_factory.py: -------------------------------------------------------------------------------- 1 | from typing import Generic, TypeVar 2 | 3 | T = TypeVar("T") 4 | KeyType = TypeVar("KeyType") 5 | 6 | 7 | class ConfigComponentFactory(Generic[T, KeyType]): 8 | def get(self, name: KeyType) -> T: 9 | raise NotImplementedError 10 | -------------------------------------------------------------------------------- /snuba/utils/constants.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | NESTED_COL_EXPR_RE = re.compile(r"^([a-zA-Z0-9_\.]+)\[([a-zA-Z0-9_\.:-]+)\]$") 4 | 5 | 6 | #: Metrics granularities for which a materialized view exist, in ascending order 7 | GRANULARITIES_AVAILABLE = (10, 60, 60 * 60, 24 * 60 * 60) 8 | 9 | # Number of EAP Span buckets 10 | # Changing this column will change Snuba's understanding of how attributes 11 | # on spans are bucketed into different columns 12 | # This will affect migrations and querying. 13 | ATTRIBUTE_BUCKETS = 20 14 | 15 | ATTRIBUTE_BUCKETS_EAP_ITEMS = 40 16 | 17 | # number of buckets in eap_items_1_local table 18 | ITEM_ATTRIBUTE_BUCKETS = 40 19 | 20 | # Maximum number of attempts to fetch profile events 21 | PROFILE_EVENTS_MAX_ATTEMPTS = ( 22 | 4 # Will result in ~23 seconds total wait time with exponential backoff 23 | ) 24 | 25 | # Maximum wait time between attempts in seconds 26 | PROFILE_EVENTS_MAX_WAIT_SECONDS = 16 27 | -------------------------------------------------------------------------------- /snuba/utils/hashes.py: -------------------------------------------------------------------------------- 1 | def fnv_1a(b: bytes) -> int: 2 | fnv_1a_32_prime = 16777619 3 | fnv_1a_32_offset_basis = 2166136261 4 | 5 | res = fnv_1a_32_offset_basis 6 | for byt in b: 7 | res = res ^ byt 8 | res = (res * fnv_1a_32_prime) & 0xFFFFFFFF # force 32 bit 9 | return res 10 | -------------------------------------------------------------------------------- /snuba/utils/iterators.py: -------------------------------------------------------------------------------- 1 | from typing import Iterable, Iterator, MutableSequence, Sequence, TypeVar 2 | 3 | T = TypeVar("T") 4 | 5 | 6 | def chunked(iterable: Iterable[T], size: int) -> Iterator[Sequence[T]]: 7 | chunk: MutableSequence[T] = [] 8 | 9 | for value in iterable: 10 | chunk.append(value) 11 | if len(chunk) == size: 12 | yield chunk 13 | chunk = [] 14 | 15 | if chunk: 16 | yield chunk 17 | -------------------------------------------------------------------------------- /snuba/utils/metrics/__init__.py: -------------------------------------------------------------------------------- 1 | from snuba.utils.metrics.backends.abstract import MetricsBackend 2 | 3 | __all__ = ["MetricsBackend"] 4 | -------------------------------------------------------------------------------- /snuba/utils/metrics/backends/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/utils/metrics/backends/__init__.py -------------------------------------------------------------------------------- /snuba/utils/metrics/types.py: -------------------------------------------------------------------------------- 1 | from typing import Mapping 2 | 3 | Tags = Mapping[str, str] 4 | -------------------------------------------------------------------------------- /snuba/utils/streams/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/utils/streams/__init__.py -------------------------------------------------------------------------------- /snuba/utils/streams/metrics_adapter.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Union 2 | 3 | from snuba.utils.metrics import MetricsBackend 4 | from snuba.utils.metrics.types import Tags 5 | 6 | 7 | class StreamMetricsAdapter: 8 | def __init__(self, metrics: MetricsBackend) -> None: 9 | self.__wrapper = metrics 10 | 11 | def increment( 12 | self, name: str, value: Union[int, float] = 1, tags: Optional[Tags] = None 13 | ) -> None: 14 | self.__wrapper.increment(name, value, tags) 15 | 16 | def gauge( 17 | self, name: str, value: Union[int, float], tags: Optional[Tags] = None 18 | ) -> None: 19 | self.__wrapper.gauge(name, value, tags) 20 | 21 | def timing( 22 | self, name: str, value: Union[int, float], tags: Optional[Tags] = None 23 | ) -> None: 24 | self.__wrapper.timing(name, value, tags) 25 | -------------------------------------------------------------------------------- /snuba/utils/streams/types.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict 2 | 3 | KafkaBrokerConfig = Dict[str, Any] 4 | -------------------------------------------------------------------------------- /snuba/utils/types.py: -------------------------------------------------------------------------------- 1 | from typing import MutableMapping, Tuple 2 | 3 | ColumnStatesMapType = MutableMapping[Tuple[str, int, str, str], str] 4 | -------------------------------------------------------------------------------- /snuba/web/rpc/common/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/web/rpc/common/__init__.py -------------------------------------------------------------------------------- /snuba/web/rpc/common/exceptions.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | 3 | from sentry_protos.snuba.v1.error_pb2 import Error as ErrorProto 4 | 5 | from snuba.web import QueryException 6 | 7 | 8 | class RPCRequestException(Exception): 9 | status_code: int 10 | 11 | def __init__(self, status_code: int, message: str): 12 | self.status_code = status_code 13 | super().__init__(message) 14 | 15 | 16 | class BadSnubaRPCRequestException(RPCRequestException): 17 | def __init__(self, message: str): 18 | super().__init__(400, message) 19 | 20 | 21 | def convert_rpc_exception_to_proto( 22 | exc: Union[RPCRequestException, QueryException] 23 | ) -> ErrorProto: 24 | if isinstance(exc, RPCRequestException): 25 | return ErrorProto(code=exc.status_code, message=str(exc)) 26 | 27 | inferred_status = 500 28 | if exc.exception_type == "RateLimitExceeded": 29 | inferred_status = 429 30 | 31 | return ErrorProto(code=inferred_status, message=str(exc)) 32 | -------------------------------------------------------------------------------- /snuba/web/rpc/v1/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/web/rpc/v1/__init__.py -------------------------------------------------------------------------------- /snuba/web/rpc/v1/resolvers/R_eap_items/common/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/web/rpc/v1/resolvers/R_eap_items/common/__init__.py -------------------------------------------------------------------------------- /snuba/web/rpc/v1/resolvers/R_uptime_checks/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/web/rpc/v1/resolvers/R_uptime_checks/__init__.py -------------------------------------------------------------------------------- /snuba/web/rpc/v1/resolvers/R_uptime_checks/common/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/web/rpc/v1/resolvers/R_uptime_checks/common/__init__.py -------------------------------------------------------------------------------- /snuba/web/rpc/v1/resolvers/common/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/snuba/web/rpc/v1/resolvers/common/__init__.py -------------------------------------------------------------------------------- /snuba/web/templates/base.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | {% block content %}{% endblock %} 4 | 5 | -------------------------------------------------------------------------------- /snuba/web/templates/index.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% block content %} 3 | {{ body|safe }} 4 | {% endblock %} 5 | -------------------------------------------------------------------------------- /snuba/web/templates/query.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% block content %} 3 | 18 | Query 19 | {{query_template}} 20 | Run 21 | Result 22 | 23 | {% endblock %} 24 | -------------------------------------------------------------------------------- /snuba/web/wsgi.py: -------------------------------------------------------------------------------- 1 | from snuba.core.initialize import initialize_snuba 2 | from snuba.environment import setup_logging, setup_sentry 3 | 4 | setup_logging() 5 | setup_sentry() 6 | initialize_snuba() 7 | 8 | from snuba.web.views import application # noqa 9 | -------------------------------------------------------------------------------- /test_distributed_migrations/config/clickhouse/macros-01.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | clickhouse-01 4 | 0 5 | 6 | 7 | -------------------------------------------------------------------------------- /test_distributed_migrations/config/clickhouse/macros-02.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | clickhouse-02 4 | 0 5 | 6 | 7 | -------------------------------------------------------------------------------- /test_distributed_migrations/config/clickhouse/macros-03.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | clickhouse-03 4 | 01 5 | 6 | 7 | -------------------------------------------------------------------------------- /test_distributed_migrations/config/clickhouse/macros-04.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | clickhouse-04 4 | 02 5 | 6 | 7 | -------------------------------------------------------------------------------- /test_distributed_migrations/config/clickhouse/macros.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | clickhouse 4 | 1 5 | 6 | 7 | -------------------------------------------------------------------------------- /test_distributed_migrations/config/clickhouse/zookeeper.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | zookeeper 5 | 2181 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /test_distributed_migrations/test_get_nodes.py: -------------------------------------------------------------------------------- 1 | from snuba.admin.clickhouse.nodes import _get_dist_nodes, _get_local_nodes 2 | from snuba.datasets.storages.storage_key import StorageKey 3 | 4 | 5 | def test_get_local_and_distributed_nodes() -> None: 6 | # test we get the right nodes when dist and local have different hosts 7 | assert sorted(_get_local_nodes(StorageKey("errors")), key=lambda n: n["host"]) == [ 8 | {"host": "clickhouse-02", "port": 9000}, 9 | {"host": "clickhouse-03", "port": 9000}, 10 | ] 11 | assert _get_dist_nodes(StorageKey("errors")) == [ 12 | {"host": "clickhouse-query", "port": 9000} 13 | ] 14 | -------------------------------------------------------------------------------- /test_initialization/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/test_initialization/__init__.py -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/__init__.py -------------------------------------------------------------------------------- /tests/admin/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/admin/__init__.py -------------------------------------------------------------------------------- /tests/admin/clickhouse/test_nodes.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | 3 | import pytest 4 | 5 | from snuba.admin.clickhouse.nodes import _get_local_nodes 6 | from snuba.clusters.cluster import _get_storage_set_cluster_map 7 | from snuba.clusters.storage_sets import StorageSetKey 8 | from snuba.datasets.storages.storage_key import StorageKey 9 | 10 | _OG_CLUSTER_MAP = _get_storage_set_cluster_map() 11 | 12 | 13 | @mock.patch( 14 | "snuba.clusters.cluster._get_storage_set_cluster_map", 15 | return_value={StorageSetKey.EVENTS: _OG_CLUSTER_MAP[StorageSetKey.EVENTS]}, 16 | ) 17 | @pytest.mark.clickhouse_db 18 | def test_get_local_nodes(map_mock: mock.MagicMock) -> None: 19 | """ 20 | This test is checking that requesting a storage key not in the map doesn't cause any errors. 21 | """ 22 | assert len(_get_local_nodes(StorageKey.ERRORS)) == 1 23 | assert _get_local_nodes(StorageKey.TRANSACTIONS) == [] 24 | -------------------------------------------------------------------------------- /tests/admin/clickhouse/test_tracing.py: -------------------------------------------------------------------------------- 1 | from snuba.admin.clickhouse.tracing import scrub_row 2 | 3 | 4 | def test_scrub() -> None: 5 | assert scrub_row((1, 2, 3, "release name")) == (1, 2, 3, "") 6 | -------------------------------------------------------------------------------- /tests/admin/clickhouse_migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/admin/clickhouse_migrations/__init__.py -------------------------------------------------------------------------------- /tests/admin/data/mock_responses/check_transitive_membership_200.json: -------------------------------------------------------------------------------- 1 | { 2 | "hasMembership": true 3 | } 4 | -------------------------------------------------------------------------------- /tests/admin/data/mock_responses/group_lookup_200.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "groups/group_id" 3 | } 4 | -------------------------------------------------------------------------------- /tests/admin/data/mock_responses/group_lookup_403.json: -------------------------------------------------------------------------------- 1 | { 2 | "error": { 3 | "code": 403, 4 | "message": "Error(2028): Permission denied for resource team-sns2@sentry.io (or it may not exist).", 5 | "status": "PERMISSION_DENIED" 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /tests/admin/dead_letter_queue/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/admin/dead_letter_queue/__init__.py -------------------------------------------------------------------------------- /tests/backends/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/backends/__init__.py -------------------------------------------------------------------------------- /tests/base.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Callable 2 | 3 | 4 | class BaseApiTest: 5 | def setup_method(self, test_method: Callable[..., Any]) -> None: 6 | from snuba.datasets.factory import reset_dataset_factory 7 | from snuba.web.views import application 8 | 9 | assert application.testing is True 10 | application.config["PROPAGATE_EXCEPTIONS"] = False 11 | self.app = application.test_client() 12 | 13 | reset_dataset_factory() 14 | -------------------------------------------------------------------------------- /tests/cli/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/cli/__init__.py -------------------------------------------------------------------------------- /tests/cli/test_optimize.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | from click.testing import CliRunner 5 | 6 | from snuba.cli.optimize import optimize 7 | 8 | 9 | @pytest.mark.redis_db 10 | @pytest.mark.clickhouse_db 11 | def test_optimize_cli() -> None: 12 | runner = CliRunner() 13 | host = os.environ.get("CLICKHOUSE_HOST", "127.0.0.1") 14 | port = os.environ.get("CLICKHOUSE_PORT", "9000") 15 | result = runner.invoke( 16 | optimize, 17 | [ 18 | "--clickhouse-host", 19 | host, 20 | "--clickhouse-port", 21 | port, 22 | "--parallel", 23 | "2", 24 | "--storage", 25 | "errors", 26 | "--divide-partitions", 27 | "2", 28 | ], 29 | ) 30 | assert result.exit_code == 0, result.output 31 | -------------------------------------------------------------------------------- /tests/clickhouse/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/clickhouse/__init__.py -------------------------------------------------------------------------------- /tests/clickhouse/query_dsl/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/clickhouse/query_dsl/__init__.py -------------------------------------------------------------------------------- /tests/clickhouse/translators/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/clickhouse/translators/__init__.py -------------------------------------------------------------------------------- /tests/clickhouse/translators/snuba/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/clickhouse/translators/snuba/__init__.py -------------------------------------------------------------------------------- /tests/clickhouse/translators/test_auto_import.py: -------------------------------------------------------------------------------- 1 | def test_auto_import() -> None: 2 | from snuba.clickhouse.translators.snuba.allowed import FunctionCallMapper 3 | 4 | assert FunctionCallMapper.get_from_name("AggregateFunctionMapper") is not None 5 | -------------------------------------------------------------------------------- /tests/clusters/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/clusters/__init__.py -------------------------------------------------------------------------------- /tests/clusters/test_storage_sets.py: -------------------------------------------------------------------------------- 1 | from snuba.clusters.storage_sets import StorageSetKey, is_valid_storage_set_combination 2 | 3 | 4 | def test_storage_set_combination() -> None: 5 | assert ( 6 | is_valid_storage_set_combination(StorageSetKey.EVENTS, StorageSetKey.SESSIONS) 7 | is False 8 | ) 9 | -------------------------------------------------------------------------------- /tests/consumers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/consumers/__init__.py -------------------------------------------------------------------------------- /tests/datasets/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/datasets/__init__.py -------------------------------------------------------------------------------- /tests/datasets/cdc/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/datasets/cdc/__init__.py -------------------------------------------------------------------------------- /tests/datasets/configuration/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/datasets/configuration/__init__.py -------------------------------------------------------------------------------- /tests/datasets/configuration/entity_no_custom_validators.yaml: -------------------------------------------------------------------------------- 1 | version: v1 2 | kind: entity 3 | name: generic_metrics_sets 4 | 5 | schema: 6 | [ 7 | { name: org_id, type: UInt, args: { size: 64 } }, 8 | { name: project_id, type: UInt, args: { size: 64 } }, 9 | { name: metric_id, type: UInt, args: { size: 64 } }, 10 | { name: timestamp, type: DateTime }, 11 | { name: bucketed_time, type: DateTime }, 12 | ] 13 | 14 | storages: 15 | - storage: generic_metrics_sets 16 | - storage: generic_metrics_sets_raw 17 | is_writable: true 18 | storage_selector: 19 | selector: SimpleQueryStorageSelector 20 | args: 21 | storage: generic_metrics_sets 22 | query_processors: [] 23 | validators: [] 24 | required_time_column: timestamp 25 | partition_key_column_name: org_id 26 | -------------------------------------------------------------------------------- /tests/datasets/configuration/entity_with_fixed_string.yaml: -------------------------------------------------------------------------------- 1 | version: v1 2 | kind: entity 3 | name: fixedstring 4 | 5 | schema: 6 | - name: event_id 7 | type: FixedString 8 | args: 9 | length: 420 10 | - name: project_id 11 | type: UInt 12 | args: 13 | size: 64 14 | - name: primary_hash 15 | type: FixedString 16 | args: 17 | length: 69 18 | schema_modifiers: 19 | - nullable 20 | 21 | storages: 22 | - storage: discover 23 | is_writable: false 24 | required_time_column: event_id 25 | storage_selector: 26 | selector: DefaultQueryStorageSelector 27 | query_processors: [] 28 | validators: [] 29 | -------------------------------------------------------------------------------- /tests/datasets/configuration/utils.py: -------------------------------------------------------------------------------- 1 | from typing import Generator 2 | 3 | import pytest 4 | 5 | from snuba.datasets.factory import reset_dataset_factory 6 | 7 | 8 | class ConfigurationTest: 9 | @pytest.fixture(autouse=True) 10 | def reset_configuration(self) -> Generator[None, None, None]: 11 | reset_dataset_factory() 12 | yield 13 | -------------------------------------------------------------------------------- /tests/datasets/entities/test_entity_key.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from snuba.datasets.entities.entity_key import REGISTERED_ENTITY_KEYS, EntityKey 4 | from snuba.datasets.entities.factory import initialize_entity_factory 5 | 6 | 7 | def test_entity_key() -> None: 8 | initialize_entity_factory() 9 | with pytest.raises(AttributeError): 10 | EntityKey.NON_EXISTENT_ENTITY 11 | 12 | assert ( 13 | REGISTERED_ENTITY_KEYS["GENERIC_METRICS_DISTRIBUTIONS"] 14 | == "generic_metrics_distributions" 15 | ) 16 | assert REGISTERED_ENTITY_KEYS["GENERIC_METRICS_SETS"] == "generic_metrics_sets" 17 | assert REGISTERED_ENTITY_KEYS["TRANSACTIONS"] == "transactions" 18 | assert REGISTERED_ENTITY_KEYS["SEARCH_ISSUES"] == "search_issues" 19 | -------------------------------------------------------------------------------- /tests/datasets/plans/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/datasets/plans/__init__.py -------------------------------------------------------------------------------- /tests/datasets/plans/translator/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/datasets/plans/translator/__init__.py -------------------------------------------------------------------------------- /tests/datasets/storages/processors/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/datasets/storages/processors/__init__.py -------------------------------------------------------------------------------- /tests/datasets/storages/test_storages.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from snuba.datasets.storages.storage_key import StorageKey 4 | 5 | 6 | def test_storage_key() -> None: 7 | with pytest.raises(AttributeError): 8 | StorageKey.NON_EXISTENT_STORAGE 9 | -------------------------------------------------------------------------------- /tests/datasets/test_factory.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from snuba.clusters.cluster import get_cluster 4 | from snuba.datasets.factory import ( 5 | get_dataset, 6 | get_dataset_name, 7 | get_enabled_dataset_names, 8 | ) 9 | 10 | 11 | def test_get_dataset_name() -> None: 12 | dataset_name = "events" 13 | assert get_dataset_name(get_dataset(dataset_name)) == dataset_name 14 | 15 | 16 | DATASETS = [pytest.param(name, id=name) for name in get_enabled_dataset_names()] 17 | 18 | 19 | @pytest.mark.parametrize("dataset_name", DATASETS) 20 | def test_dataset_load(dataset_name: str) -> None: 21 | """ 22 | Tests that if we decalare a dataset that is not in in dev. It can 23 | be fully loaded including all its entities and storages 24 | """ 25 | 26 | dataset = get_dataset(dataset_name) 27 | for entity in dataset.get_all_entities(): 28 | for storage in entity.get_all_storages(): 29 | get_cluster(storage.get_storage_set_key()) 30 | -------------------------------------------------------------------------------- /tests/datasets/test_table_storage.py: -------------------------------------------------------------------------------- 1 | from snuba.datasets.storages.factory import get_writable_storage 2 | from snuba.datasets.storages.storage_key import StorageKey 3 | from snuba.settings import SLICED_KAFKA_TOPIC_MAP 4 | 5 | 6 | def test_get_physical_topic_name(monkeypatch) -> None: # type: ignore 7 | 8 | monkeypatch.setitem( 9 | SLICED_KAFKA_TOPIC_MAP, ("ingest-replay-events", 2), "ingest-replay-events-2" 10 | ) 11 | 12 | storage_key = StorageKey.REPLAYS 13 | storage = get_writable_storage(storage_key) 14 | 15 | stream_loader = storage.get_table_writer().get_stream_loader() 16 | 17 | default_topic_spec = stream_loader.get_default_topic_spec() 18 | 19 | physical_topic_name = default_topic_spec.get_physical_topic_name(slice_id=2) 20 | 21 | assert physical_topic_name == "ingest-replay-events-2" 22 | -------------------------------------------------------------------------------- /tests/lw_deletions/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/lw_deletions/__init__.py -------------------------------------------------------------------------------- /tests/manual_jobs/test_job_lock.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from snuba.manual_jobs import JobSpec 4 | from snuba.manual_jobs.redis import _acquire_job_lock 5 | from snuba.manual_jobs.runner import JobLockedException, run_job 6 | 7 | JOB_ID = "abc1234" 8 | test_job_spec = JobSpec(job_id=JOB_ID, job_type="ToyJob") 9 | 10 | 11 | @pytest.mark.redis_db 12 | def test_job_lock() -> None: 13 | _acquire_job_lock(JOB_ID) 14 | with pytest.raises(JobLockedException): 15 | run_job(test_job_spec) 16 | -------------------------------------------------------------------------------- /tests/manual_jobs/test_record_job_start.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import patch 2 | 3 | import pytest 4 | 5 | from snuba.manual_jobs import JobSpec 6 | from snuba.manual_jobs.redis import _build_start_time_key, _redis_client 7 | from snuba.manual_jobs.runner import run_job 8 | 9 | JOB_ID = "abc1234" 10 | job_spec = JobSpec(job_id=JOB_ID, job_type="ToyJob") 11 | 12 | 13 | @pytest.mark.redis_db 14 | def test_record_job_start_time_correctly() -> None: 15 | with patch("snuba.manual_jobs.redis.datetime") as mock_datetime: 16 | mock_datetime.utcnow.return_value.isoformat.return_value = ( 17 | "2024-10-23T01:12:23.456789" 18 | ) 19 | run_job(job_spec) 20 | assert ( 21 | _redis_client.get(name=_build_start_time_key(JOB_ID)).decode() 22 | == "2024-10-23T01:12:23.456789" 23 | ) 24 | -------------------------------------------------------------------------------- /tests/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/migrations/__init__.py -------------------------------------------------------------------------------- /tests/pipeline/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/pipeline/__init__.py -------------------------------------------------------------------------------- /tests/query/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/query/__init__.py -------------------------------------------------------------------------------- /tests/query/allocation_policies/test_pickleability.py: -------------------------------------------------------------------------------- 1 | import pickle 2 | 3 | from snuba.datasets.storages.factory import get_all_storage_keys, get_storage 4 | 5 | 6 | def test_policies_are_pickleable() -> None: 7 | # * allocation policies will eventually become part of the Table object which queries are run against 8 | # * that table object is part of the query 9 | # * we need the query object to be deepcopy-able 10 | # * to deepcopy an object, all of its members must be pickleable 11 | 12 | # this test makes sure that any allocation policy defined on a storage is in fact pickleable 13 | for key in get_all_storage_keys(): 14 | storage = get_storage(key) 15 | pickle.dumps(storage.get_allocation_policies()) 16 | -------------------------------------------------------------------------------- /tests/query/formatters/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/query/formatters/__init__.py -------------------------------------------------------------------------------- /tests/query/joins/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/query/joins/__init__.py -------------------------------------------------------------------------------- /tests/query/parser/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/query/parser/__init__.py -------------------------------------------------------------------------------- /tests/query/processors/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/query/processors/__init__.py -------------------------------------------------------------------------------- /tests/query/test_dsl.py: -------------------------------------------------------------------------------- 1 | from snuba.query.dsl import CurriedFunctions as cf 2 | from snuba.query.dsl import Functions as f 3 | from snuba.query.dsl import column 4 | from snuba.query.expressions import CurriedFunctionCall, FunctionCall, Literal 5 | 6 | 7 | def test_function_syntax() -> None: 8 | assert f.equals(1, 1, alias="eq") == FunctionCall( 9 | "eq", "equals", parameters=(Literal(None, 1), Literal(None, 1)) 10 | ) 11 | 12 | 13 | def test_curried_function() -> None: 14 | assert cf.quantile(0.9)(column("measurement"), alias="p90") == CurriedFunctionCall( 15 | alias="p90", 16 | internal_function=f.quantile(0.9), 17 | parameters=(column("measurement"),), 18 | ) 19 | -------------------------------------------------------------------------------- /tests/query/validation/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/query/validation/__init__.py -------------------------------------------------------------------------------- /tests/snapshots/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/snapshots/__init__.py -------------------------------------------------------------------------------- /tests/state/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/state/__init__.py -------------------------------------------------------------------------------- /tests/state/test_record.py: -------------------------------------------------------------------------------- 1 | from snuba.state import _kafka_producer 2 | 3 | 4 | def test_get_producer() -> None: 5 | assert _kafka_producer() is not None 6 | -------------------------------------------------------------------------------- /tests/stateful_consumer/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/stateful_consumer/__init__.py -------------------------------------------------------------------------------- /tests/subscriptions/test_types.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from snuba.subscriptions.types import Interval, InvalidRangeError 4 | 5 | 6 | def test_interval_validation() -> None: 7 | Interval(1, 1) 8 | Interval(1, 10) 9 | 10 | with pytest.raises(InvalidRangeError) as e: 11 | Interval(10, 1) 12 | 13 | with pytest.raises(InvalidRangeError): 14 | Interval(1, None) # type: ignore 15 | 16 | assert e.value.lower == 10 17 | assert e.value.upper == 1 18 | -------------------------------------------------------------------------------- /tests/test_cli.py: -------------------------------------------------------------------------------- 1 | import signal 2 | import subprocess 3 | import time 4 | 5 | 6 | class TestCli(object): 7 | def test_consumer_cli(self) -> None: 8 | """ 9 | Check that the consumer daemon runs until it is killed 10 | """ 11 | proc = subprocess.Popen(["snuba", "consumer", "--storage", "errors"]) 12 | time.sleep(3) 13 | proc.poll() 14 | assert proc.returncode is None # still running 15 | 16 | proc.send_signal(signal.SIGINT) 17 | proc.wait() 18 | -------------------------------------------------------------------------------- /tests/test_processor.py: -------------------------------------------------------------------------------- 1 | from typing import cast 2 | 3 | from snuba.processor import _unicodify 4 | 5 | 6 | def test_unicodify() -> None: 7 | # invalid utf-8 surrogate should be replaced with escape sequence 8 | assert cast(str, _unicodify("\ud83c")).encode("utf8") == b"\\ud83c" 9 | -------------------------------------------------------------------------------- /tests/test_redis.py: -------------------------------------------------------------------------------- 1 | from redis.exceptions import RedisClusterException 2 | from snuba import redis 3 | 4 | 5 | def test_retry_init() -> None: 6 | fails_left = 2 7 | 8 | @redis._retry(2) 9 | def my_bad_function() -> int: 10 | nonlocal fails_left 11 | fails_left -= 1 12 | if fails_left > 0: 13 | raise RedisClusterException( 14 | "All slots are not covered after query all startup_nodes." 15 | ) 16 | return 1 17 | 18 | assert my_bad_function() == 1 19 | -------------------------------------------------------------------------------- /tests/test_util.py: -------------------------------------------------------------------------------- 1 | from snuba.clickhouse.escaping import escape_alias, escape_identifier 2 | 3 | 4 | def test_escape_identifier() -> None: 5 | assert escape_identifier(None) is None 6 | assert escape_identifier("") == "" 7 | assert escape_identifier("foo") == "foo" 8 | assert escape_identifier("foo.bar") == "foo.bar" 9 | assert escape_identifier("foo:bar") == "`foo:bar`" 10 | 11 | # Even though backtick characters in columns should be 12 | # disallowed by the query schema, make sure we dont allow 13 | # injection anyway. 14 | assert escape_identifier("`") == r"`\``" 15 | assert escape_identifier("production`; --") == r"`production\`; --`" 16 | 17 | 18 | def test_escape_alias() -> None: 19 | assert escape_alias(None) is None 20 | assert escape_alias("") == "" 21 | assert escape_alias("foo") == "foo" 22 | assert escape_alias("foo.bar") == "`foo.bar`" 23 | assert escape_alias("foo:bar") == "`foo:bar`" 24 | -------------------------------------------------------------------------------- /tests/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/utils/__init__.py -------------------------------------------------------------------------------- /tests/utils/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | @pytest.fixture(autouse=True) 5 | def run_migrations() -> None: 6 | pass 7 | -------------------------------------------------------------------------------- /tests/utils/metrics/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/utils/metrics/__init__.py -------------------------------------------------------------------------------- /tests/utils/streams/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/utils/streams/__init__.py -------------------------------------------------------------------------------- /tests/utils/streams/test_topics.py: -------------------------------------------------------------------------------- 1 | import sentry_kafka_schemas 2 | 3 | from snuba.utils.streams.topics import Topic 4 | 5 | 6 | def test_valid_topics() -> None: 7 | # Ensures that Snuba's topic list matches those registered in sentry-kafka-schemas 8 | for topic in Topic: 9 | try: 10 | sentry_kafka_schemas.get_topic( 11 | topic.value 12 | ) # Throws an exception if topic not defined 13 | except sentry_kafka_schemas.SchemaNotFound: 14 | # These topics are not in use but have not yet been removed from snuba's codebase 15 | deprecated_topics = (Topic.CDC,) 16 | 17 | if topic not in deprecated_topics: 18 | raise 19 | -------------------------------------------------------------------------------- /tests/utils/test_iterators.py: -------------------------------------------------------------------------------- 1 | from snuba.utils.iterators import chunked 2 | 3 | 4 | def test_chunked() -> None: 5 | assert [*chunked([], 3)] == [] 6 | assert [*chunked(range(3), 3)] == [[0, 1, 2]] 7 | assert [*chunked(range(10), 3)] == [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]] 8 | -------------------------------------------------------------------------------- /tests/utils/test_package_auto_import/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from snuba.utils.registered_class import RegisteredClass, import_submodules_in_directory 4 | 5 | 6 | class SomeBase(metaclass=RegisteredClass): 7 | @classmethod 8 | def config_key(cls) -> str: 9 | return cls.__name__ 10 | 11 | 12 | import_submodules_in_directory( 13 | os.path.dirname(os.path.realpath(__file__)), "tests.utils.test_package_auto_import" 14 | ) 15 | -------------------------------------------------------------------------------- /tests/utils/test_package_auto_import/a.py: -------------------------------------------------------------------------------- 1 | from tests.utils.test_package_auto_import import SomeBase 2 | 3 | 4 | class A(SomeBase): 5 | pass 6 | -------------------------------------------------------------------------------- /tests/utils/test_package_auto_import/b.py: -------------------------------------------------------------------------------- 1 | from tests.utils.test_package_auto_import import SomeBase 2 | 3 | 4 | class B(SomeBase): 5 | pass 6 | -------------------------------------------------------------------------------- /tests/utils/test_package_auto_import/c.py: -------------------------------------------------------------------------------- 1 | from tests.utils.test_package_auto_import import SomeBase 2 | 3 | 4 | class C(SomeBase): 5 | pass 6 | -------------------------------------------------------------------------------- /tests/utils/test_package_auto_import/garbage: -------------------------------------------------------------------------------- 1 | this is garbage meant to test the code's response to garbage files 2 | -------------------------------------------------------------------------------- /tests/utils/test_package_no_import/__init__.py: -------------------------------------------------------------------------------- 1 | from snuba.utils.registered_class import RegisteredClass 2 | 3 | 4 | class SomeBase(metaclass=RegisteredClass): 5 | @classmethod 6 | def config_key(cls) -> str: 7 | return cls.__name__ 8 | -------------------------------------------------------------------------------- /tests/utils/test_package_no_import/a.py: -------------------------------------------------------------------------------- 1 | from tests.utils.test_package_no_import import SomeBase 2 | 3 | 4 | class A(SomeBase): 5 | pass 6 | -------------------------------------------------------------------------------- /tests/utils/test_package_no_import/b.py: -------------------------------------------------------------------------------- 1 | from tests.utils.test_package_no_import import SomeBase 2 | 3 | 4 | class B(SomeBase): 5 | pass 6 | -------------------------------------------------------------------------------- /tests/utils/test_package_no_import/c.py: -------------------------------------------------------------------------------- 1 | from tests.utils.test_package_no_import import SomeBase 2 | 3 | 4 | class C(SomeBase): 5 | pass 6 | -------------------------------------------------------------------------------- /tests/utils/test_package_no_import/garbage: -------------------------------------------------------------------------------- 1 | this is garbage meant to test the code's response to garbage files 2 | -------------------------------------------------------------------------------- /tests/web/rpc/v1/smart_autocomplete/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/snuba/0f7a17cfb16530260789f3ef473442722fea6162/tests/web/rpc/v1/smart_autocomplete/__init__.py -------------------------------------------------------------------------------- /tests/web/test_cache_partitions.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from snuba.clickhouse.native import ClickhousePool, NativeDriverReader 4 | from snuba.web.db_query import _get_cache_partition 5 | 6 | 7 | @pytest.mark.redis_db 8 | def test_cache_partition() -> None: 9 | pool = ClickhousePool("127.0.0.1", 9000, "", "", "") 10 | reader1 = NativeDriverReader(None, pool, None) 11 | reader2 = NativeDriverReader(None, pool, None) 12 | 13 | default_cache = _get_cache_partition(reader1) 14 | another_default_cache = _get_cache_partition(reader2) 15 | 16 | assert id(default_cache) == id(another_default_cache) 17 | 18 | reader3 = NativeDriverReader("non_default", pool, None) 19 | reader4 = NativeDriverReader("non_default", pool, None) 20 | nondefault_cache = _get_cache_partition(reader3) 21 | another_nondefault_cache = _get_cache_partition(reader4) 22 | 23 | assert id(nondefault_cache) == id(another_nondefault_cache) 24 | assert id(default_cache) != id(nondefault_cache) 25 | -------------------------------------------------------------------------------- /tests/web/test_query_exception.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from snuba.utils.serializable_exception import SerializableException 4 | from snuba.web import QueryException 5 | 6 | 7 | def test_printable() -> None: 8 | e = QueryException.from_args( 9 | "generic exception type", 10 | "the cause was coming from inside the house!", 11 | {"stats": {}, "sql": "fdsfsdaf", "experiments": {}}, 12 | ) 13 | assert isinstance(repr(e), str) 14 | assert isinstance(e.extra, dict) 15 | json_exc = json.dumps(e.to_dict()) 16 | dict_exc = json.loads(json_exc) 17 | assert isinstance(SerializableException.from_dict(dict_exc), QueryException) 18 | -------------------------------------------------------------------------------- /tests/web/test_query_size.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from snuba import settings 4 | from snuba.pipeline.stages.query_execution import get_query_size_group 5 | 6 | TENTH_PLUS_ONE = int(settings.MAX_QUERY_SIZE_BYTES / 10) + 1 7 | A = "A" 8 | 9 | TEST_GROUPS = [ 10 | pytest.param(1, ">=0%", id="Under 10%"), 11 | pytest.param(TENTH_PLUS_ONE, ">=10%", id="Greater than or equal to 10%"), 12 | pytest.param(TENTH_PLUS_ONE * 5, ">=50%", id="Greater than or equal to 50%"), 13 | pytest.param(TENTH_PLUS_ONE * 8, ">=80%", id="Greater than or equal to 80%"), 14 | pytest.param( 15 | settings.MAX_QUERY_SIZE_BYTES, "100%", id="Greater than or equal to 100%" 16 | ), 17 | ] 18 | 19 | 20 | @pytest.mark.parametrize("query_size, group", TEST_GROUPS) 21 | def test_query_size_group(query_size: int, group: str) -> None: 22 | assert get_query_size_group(query_size) == group 23 | -------------------------------------------------------------------------------- /tests/web/test_result_cache_codec.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from snuba.reader import Result 4 | from snuba.utils.serializable_exception import SerializableException 5 | from snuba.web.db_query import ResultCacheCodec 6 | 7 | 8 | def test_encode_decode() -> None: 9 | payload: Result = { 10 | "meta": [{"name": "foo", "type": "bar"}], 11 | "data": [{"foo": "bar"}], 12 | "totals": {"foo": 1}, 13 | } 14 | codec = ResultCacheCodec() 15 | assert codec.decode(codec.encode(payload)) == payload 16 | 17 | 18 | def test_encode_decode_exception() -> None: 19 | class SomeException(SerializableException): 20 | pass 21 | 22 | codec = ResultCacheCodec() 23 | encoded_exception = codec.encode_exception(SomeException("some message")) 24 | with pytest.raises(SomeException): 25 | codec.decode(encoded_exception) 26 | --------------------------------------------------------------------------------
{JSON.stringify(value, null, 2)}