├── cmd └── oteltestbedcol │ └── .gitkeep ├── internal ├── tools │ ├── Makefile │ └── tools.go ├── mockeec │ ├── Makefile │ ├── go.mod │ ├── README.md │ ├── config.yaml │ └── main.go ├── testbed │ ├── .gitignore │ ├── Makefile │ ├── integration │ │ ├── kafka │ │ │ └── testdata │ │ │ │ ├── namespace.yaml │ │ │ │ ├── collector-kafka │ │ │ │ ├── serviceaccount.yaml │ │ │ │ ├── configmap.yaml │ │ │ │ └── deployment.yaml │ │ │ │ ├── collector-exporter │ │ │ │ ├── serviceaccount.yaml │ │ │ │ ├── configmap.yaml │ │ │ │ ├── service.yaml │ │ │ │ └── deployment.yaml │ │ │ │ ├── collector-receiver │ │ │ │ ├── serviceaccount.yaml │ │ │ │ ├── configmap.yaml │ │ │ │ └── deployment.yaml │ │ │ │ ├── testobjects │ │ │ │ ├── kafka-service.yaml │ │ │ │ ├── kafka-deployment.yaml │ │ │ │ └── telemetrygen.yaml │ │ │ │ └── config-overlays │ │ │ │ ├── kafkametrics-receiver-local.yaml │ │ │ │ ├── kafkametrics-receiver-env.yaml │ │ │ │ ├── receiver-env.yaml │ │ │ │ └── receiver-local.yaml │ │ ├── netflow │ │ │ └── testdata │ │ │ │ ├── namespace.yaml │ │ │ │ ├── collector │ │ │ │ ├── serviceaccount.yaml │ │ │ │ ├── configmap.yaml │ │ │ │ ├── service.yaml │ │ │ │ └── deployment.yaml │ │ │ │ └── netflow │ │ │ │ └── job.yaml │ │ ├── statsd │ │ │ └── testdata │ │ │ │ ├── namespace.yaml │ │ │ │ ├── collector │ │ │ │ ├── serviceaccount.yaml │ │ │ │ ├── configmap.yaml │ │ │ │ ├── service.yaml │ │ │ │ └── deployment.yaml │ │ │ │ └── statsd │ │ │ │ └── job.yaml │ │ ├── zipkin │ │ │ └── testdata │ │ │ │ ├── namespace.yaml │ │ │ │ ├── collector │ │ │ │ ├── serviceaccount.yaml │ │ │ │ ├── configmap.yaml │ │ │ │ ├── service.yaml │ │ │ │ └── deployment.yaml │ │ │ │ └── zipkin │ │ │ │ └── deployment.yaml │ │ ├── k8scluster │ │ │ └── testdata │ │ │ │ ├── namespace.yaml │ │ │ │ └── collector │ │ │ │ ├── serviceaccount.yaml │ │ │ │ ├── configmap.yaml │ │ │ │ ├── clusterrolebinding.yaml │ │ │ │ ├── service.yaml │ │ │ │ ├── clusterrole.yaml │ │ │ │ └── deployment.yaml │ │ ├── k8sobjects │ │ │ └── testdata │ │ │ │ ├── namespace.yaml │ │ │ │ ├── collector │ │ │ │ ├── serviceaccount.yaml │ │ │ │ ├── configmap.yaml │ │ │ │ ├── clusterrolebinding.yaml │ │ │ │ ├── service.yaml │ │ │ │ ├── clusterrole.yaml │ │ │ │ └── deployment.yaml │ │ │ │ └── testobjects │ │ │ │ └── deployment.yaml │ │ ├── k8spodlogs │ │ │ └── testdata │ │ │ │ ├── namespace.yaml │ │ │ │ ├── collector │ │ │ │ ├── serviceaccount.yaml │ │ │ │ ├── configmap.yaml │ │ │ │ ├── clusterrolebinding.yaml │ │ │ │ ├── service.yaml │ │ │ │ └── clusterrole.yaml │ │ │ │ └── testobjects │ │ │ │ └── deployment.yaml │ │ ├── prometheus │ │ │ ├── testdata │ │ │ │ ├── namespace.yaml │ │ │ │ ├── collector │ │ │ │ │ ├── serviceaccount.yaml │ │ │ │ │ ├── configmap.yaml │ │ │ │ │ ├── service.yaml │ │ │ │ │ └── deployment.yaml │ │ │ │ └── prometheus │ │ │ │ │ └── install.sh │ │ │ └── README.md │ │ ├── redaction │ │ │ └── testdata │ │ │ │ ├── namespace.yaml │ │ │ │ ├── collector │ │ │ │ ├── serviceaccount.yaml │ │ │ │ ├── configmap.yaml │ │ │ │ ├── clusterrolebinding.yaml │ │ │ │ ├── service.yaml │ │ │ │ ├── clusterrole.yaml │ │ │ │ └── deployment.yaml │ │ │ │ └── telemetrygen │ │ │ │ ├── job.yaml │ │ │ │ ├── daemonset.yaml │ │ │ │ ├── deployment.yaml │ │ │ │ └── statefulset.yaml │ │ ├── combinedload │ │ │ └── testdata │ │ │ │ ├── namespace.yaml │ │ │ │ ├── collector │ │ │ │ ├── serviceaccount.yaml │ │ │ │ ├── clusterrolebinding.yaml │ │ │ │ ├── service.yaml │ │ │ │ ├── clusterrole.yaml │ │ │ │ ├── configmap.yaml │ │ │ │ └── deployment.yaml │ │ │ │ └── metrics-server │ │ │ │ ├── serviceaccount.yaml │ │ │ │ ├── service.yaml │ │ │ │ ├── apiservice.yaml │ │ │ │ ├── clusterrolebinding-metricserver.yaml │ │ │ │ ├── clusterrole-metricserver.yaml │ │ │ │ ├── clusterrolebinding-authdelegator.yaml │ │ │ │ ├── rolebinding-authreader.yaml │ │ │ │ └── clusterrole-aggregated-metrics.yaml │ │ ├── hostmetrics │ │ │ └── testdata │ │ │ │ ├── namespace.yaml │ │ │ │ └── collector │ │ │ │ ├── serviceaccount.yaml │ │ │ │ ├── configmap.yaml │ │ │ │ ├── clusterrolebinding.yaml │ │ │ │ ├── clusterrole.yaml │ │ │ │ └── daemonset.yaml │ │ ├── k8scombined │ │ │ └── testdata │ │ │ │ ├── namespace.yaml │ │ │ │ ├── collector │ │ │ │ ├── serviceaccount.yaml │ │ │ │ ├── configmap.yaml │ │ │ │ ├── clusterrolebinding.yaml │ │ │ │ ├── service.yaml │ │ │ │ └── clusterrole.yaml │ │ │ │ └── testobjects │ │ │ │ └── deployment.yaml │ │ ├── k8senrichment │ │ │ └── testdata │ │ │ │ ├── namespace.yaml │ │ │ │ ├── collector │ │ │ │ ├── serviceaccount.yaml │ │ │ │ ├── configmap.yaml │ │ │ │ ├── clusterrolebinding.yaml │ │ │ │ ├── service.yaml │ │ │ │ ├── clusterrole.yaml │ │ │ │ └── deployment.yaml │ │ │ │ └── telemetrygen │ │ │ │ ├── job.yaml │ │ │ │ ├── daemonset.yaml │ │ │ │ ├── deployment.yaml │ │ │ │ └── statefulset.yaml │ │ ├── kubeletstats │ │ │ └── testdata │ │ │ │ ├── namespace.yaml │ │ │ │ └── collector │ │ │ │ ├── serviceaccount.yaml │ │ │ │ ├── configmap.yaml │ │ │ │ ├── clusterrolebinding.yaml │ │ │ │ ├── service.yaml │ │ │ │ ├── clusterrole.yaml │ │ │ │ └── daemonset.yaml │ │ ├── loadbalancing │ │ │ └── testdata │ │ │ │ ├── namespace.yaml │ │ │ │ ├── collector │ │ │ │ ├── serviceaccount.yaml │ │ │ │ ├── configmap.yaml │ │ │ │ ├── clusterrolebinding.yaml │ │ │ │ ├── service.yaml │ │ │ │ ├── clusterrole.yaml │ │ │ │ └── deployment.yaml │ │ │ │ └── telemetrygen │ │ │ │ ├── telemetrygen-deployment-logs.yaml │ │ │ │ ├── telemetrygen-deployment-traces.yaml │ │ │ │ ├── telemetrygen-deployment-metrics1.yaml │ │ │ │ └── telemetrygen-deployment-metrics2.yaml │ │ ├── resource-detection │ │ │ └── testdata │ │ │ │ ├── namespace.yaml │ │ │ │ ├── collector │ │ │ │ ├── serviceaccount.yaml │ │ │ │ ├── configmap.yaml │ │ │ │ ├── clusterrolebinding.yaml │ │ │ │ ├── service.yaml │ │ │ │ ├── enrichment-file.yaml │ │ │ │ └── clusterrole.yaml │ │ │ │ └── telemetrygen │ │ │ │ ├── job.yaml │ │ │ │ ├── daemonset.yaml │ │ │ │ ├── deployment.yaml │ │ │ │ └── statefulset.yaml │ │ └── self-monitoring │ │ │ └── testdata │ │ │ ├── namespace │ │ │ └── namespace.yaml │ │ │ ├── collector │ │ │ ├── serviceaccount.yaml │ │ │ ├── configmap.yaml │ │ │ ├── clusterrolebinding.yaml │ │ │ ├── clusterrole.yaml │ │ │ └── service.yaml │ │ │ └── telemetrygen │ │ │ └── deployment.yaml │ ├── load │ │ ├── Makefile │ │ ├── tests │ │ │ └── package_test.go │ │ └── runtests.sh │ ├── linux-services │ │ ├── Dockerfile.test.rpm │ │ ├── config.test.yaml │ │ ├── Dockerfile.test.deb │ │ ├── README.md │ │ └── common.sh │ ├── testdata │ │ └── config-smoke.yaml │ └── scripts │ │ └── install_dt_collector_helm.sh ├── testcommon │ ├── Makefile │ ├── testutil │ │ └── testutil_test.go │ └── k8stest │ │ └── metrics_server.go ├── data-ingest-cli │ ├── Makefile │ ├── commands │ │ ├── syslog │ │ │ └── testdata │ │ │ │ ├── RFC3164 │ │ │ │ ├── rfc3164-skip-pri-absent │ │ │ │ ├── rfc6587-octet-counting-preserve-space │ │ │ │ ├── RFC5424 │ │ │ │ ├── rfc5424-skip-pri-present │ │ │ │ ├── rfc6587-octet-counting │ │ │ │ ├── rfc6587-non-transparent-framing │ │ │ │ └── README.md │ │ ├── fluent │ │ │ └── testdata │ │ │ │ └── msg.json │ │ ├── filelog │ │ │ ├── testdata │ │ │ │ ├── file_simple.log │ │ │ │ ├── file_simple.json │ │ │ │ └── file_nested_attr.json │ │ │ └── config.yaml │ │ ├── otlpjson │ │ │ └── testdata │ │ │ │ ├── metrics.json │ │ │ │ ├── logs.json │ │ │ │ └── traces.json │ │ ├── statsd │ │ │ └── testdata │ │ │ │ └── metrics.txt │ │ ├── netflow │ │ │ └── config.yaml │ │ ├── jaeger │ │ │ └── config.yaml │ │ ├── prometheus │ │ │ └── config.yaml │ │ ├── zipkin │ │ │ └── testdata │ │ │ │ ├── sample_err_v2.json │ │ │ │ ├── sample_v1.json │ │ │ │ └── sample_err_v1.json │ │ └── receive │ │ │ └── command.go │ ├── sender │ │ ├── fluent │ │ │ └── sender.go │ │ └── zipkin │ │ │ └── sender.go │ └── receiver │ │ └── common.go ├── testing-setups │ └── kafka │ │ ├── Makefile │ │ ├── down.sh │ │ ├── kind-config.yaml │ │ ├── send-data.sh │ │ ├── up.sh │ │ ├── README.md │ │ └── otelcol-values.yaml ├── confmap │ └── provider │ │ └── eecprovider │ │ ├── Makefile │ │ ├── package_test.go │ │ ├── testdata │ │ ├── otel-config.yaml │ │ └── otel-config-updated.yaml │ │ ├── yaml.go │ │ ├── go.mod │ │ ├── yaml_test.go │ │ └── watcher.go └── release │ ├── dynatrace-otel-collector.conf │ ├── dynatrace-otel-collector.service │ ├── preinstall.sh │ ├── preremove.sh │ └── postinstall.sh ├── .github ├── CODEOWNERS ├── actions │ └── create-cluster │ │ ├── single-node.yaml │ │ └── multi-node.yaml └── workflows │ ├── check-links.yml │ ├── ci.yaml │ └── security.yml ├── docs ├── dashboards │ ├── img │ │ └── host-dashboard_1.png │ └── README.md └── hotfix.md ├── .gitignore ├── THIRD_PARTY_NOTICES.txt ├── config_examples ├── jaeger.yaml ├── zipkin.yaml ├── netflow.yaml ├── k8scluster.yaml ├── resource-detection.yaml ├── kafka-receiver.yaml ├── kafka-metrics-receiver.yaml ├── redaction_iban.yaml ├── redaction_api_token.yaml ├── kafka-exporter.yaml ├── prometheus.yaml ├── statsd.yaml ├── redaction.yaml ├── masking_api_token.yaml ├── redaction_creditcards.yaml ├── masking_iban.yaml ├── tail_sampling.yaml ├── masking_logbody.yaml ├── syslog.yaml ├── split_histogram.yaml ├── collector-helm-values.yaml ├── masking_creditcards.yaml ├── filtering_user_data.yaml ├── self-monitoring.yaml ├── load-balancing.yaml ├── host-metrics.yaml ├── k8sobjects.yaml ├── kubeletstats.yaml ├── k8s_attribute_redaction.yaml └── self-monitoring-check-metrics.yaml ├── lychee.toml ├── Dockerfile ├── .chloggen ├── TEMPLATE.yaml ├── config.yaml └── summary.tmpl └── renovate.json /cmd/oteltestbedcol/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /internal/tools/Makefile: -------------------------------------------------------------------------------- 1 | include ../../Makefile.Common 2 | -------------------------------------------------------------------------------- /internal/mockeec/Makefile: -------------------------------------------------------------------------------- 1 | include ../../Makefile.Common 2 | -------------------------------------------------------------------------------- /internal/testbed/.gitignore: -------------------------------------------------------------------------------- 1 | *.log 2 | 3 | **/results/** 4 | -------------------------------------------------------------------------------- /internal/testbed/Makefile: -------------------------------------------------------------------------------- 1 | include ../../Makefile.Common 2 | -------------------------------------------------------------------------------- /internal/testcommon/Makefile: -------------------------------------------------------------------------------- 1 | include ../../Makefile.Common 2 | -------------------------------------------------------------------------------- /internal/data-ingest-cli/Makefile: -------------------------------------------------------------------------------- 1 | include ../../Makefile.Common 2 | -------------------------------------------------------------------------------- /internal/testing-setups/kafka/Makefile: -------------------------------------------------------------------------------- 1 | include ../../../Makefile.Common 2 | -------------------------------------------------------------------------------- /internal/confmap/provider/eecprovider/Makefile: -------------------------------------------------------------------------------- 1 | include ../../../../Makefile.Common 2 | -------------------------------------------------------------------------------- /internal/data-ingest-cli/commands/syslog/testdata/RFC3164: -------------------------------------------------------------------------------- 1 | <34>Feb 4 11:14:27 1.2.3.4 apache_server: test message -------------------------------------------------------------------------------- /internal/mockeec/go.mod: -------------------------------------------------------------------------------- 1 | module github.com/Dynatrace/dynatrace-otel-collector/internal/mockeec 2 | 3 | go 1.25.5 4 | -------------------------------------------------------------------------------- /internal/testing-setups/kafka/down.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -euo pipefail 4 | 5 | kind delete cluster 6 | -------------------------------------------------------------------------------- /internal/data-ingest-cli/commands/syslog/testdata/rfc3164-skip-pri-absent: -------------------------------------------------------------------------------- 1 | Feb 4 11:14:27 1.2.3.4 apache_server: test message -------------------------------------------------------------------------------- /internal/testbed/integration/kafka/testdata/namespace.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | name: e2ekafka 5 | -------------------------------------------------------------------------------- /internal/testbed/integration/netflow/testdata/namespace.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | name: e2enetflow 5 | -------------------------------------------------------------------------------- /internal/testbed/integration/statsd/testdata/namespace.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | name: e2estatsd 5 | -------------------------------------------------------------------------------- /internal/testbed/integration/zipkin/testdata/namespace.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | name: e2ezipkin 5 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/en/articles/about-code-owners for syntax and more. 2 | 3 | * @Dynatrace/team-ospo-otel 4 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8scluster/testdata/namespace.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | name: e2ek8scluster 5 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8sobjects/testdata/namespace.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | name: e2ek8sobjects 5 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8spodlogs/testdata/namespace.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | name: e2ek8spodlogs 5 | -------------------------------------------------------------------------------- /internal/testbed/integration/prometheus/testdata/namespace.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | name: e2eprometheus 5 | -------------------------------------------------------------------------------- /internal/testbed/integration/redaction/testdata/namespace.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | name: e2eredaction 5 | -------------------------------------------------------------------------------- /internal/data-ingest-cli/commands/fluent/testdata/msg.json: -------------------------------------------------------------------------------- 1 | { 2 | "tag": "my-tag", 3 | "message": { 4 | "content": "a message" 5 | } 6 | } -------------------------------------------------------------------------------- /internal/testbed/integration/combinedload/testdata/namespace.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | name: load-combined 5 | -------------------------------------------------------------------------------- /internal/testbed/integration/hostmetrics/testdata/namespace.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | name: e2ehostmetrics 5 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8scombined/testdata/namespace.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | name: e2ek8scombined 5 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8senrichment/testdata/namespace.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | name: e2ek8senrichment 5 | -------------------------------------------------------------------------------- /internal/testbed/integration/kubeletstats/testdata/namespace.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | name: e2ekubeletstats 5 | -------------------------------------------------------------------------------- /internal/testbed/integration/loadbalancing/testdata/namespace.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | name: e2eloadbalancing 5 | -------------------------------------------------------------------------------- /internal/testbed/load/Makefile: -------------------------------------------------------------------------------- 1 | include ../../../Makefile.Common 2 | 3 | .PHONY: run-tests 4 | run-tests: 5 | GOJUNIT=$(GOJUNIT) ./runtests.sh 6 | -------------------------------------------------------------------------------- /docs/dashboards/img/host-dashboard_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Dynatrace/dynatrace-otel-collector/HEAD/docs/dashboards/img/host-dashboard_1.png -------------------------------------------------------------------------------- /.github/actions/create-cluster/single-node.yaml: -------------------------------------------------------------------------------- 1 | kind: Cluster 2 | apiVersion: kind.x-k8s.io/v1alpha4 3 | name: kind 4 | nodes: 5 | - role: control-plane 6 | -------------------------------------------------------------------------------- /internal/testbed/integration/resource-detection/testdata/namespace.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | name: e2eresourcedetection 5 | -------------------------------------------------------------------------------- /internal/testbed/integration/self-monitoring/testdata/namespace/namespace.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | name: {{ .Namespace }} 5 | -------------------------------------------------------------------------------- /internal/data-ingest-cli/commands/syslog/testdata/rfc6587-octet-counting-preserve-space: -------------------------------------------------------------------------------- 1 | 77 <86>1 2015-08-05T21:58:59.693Z 192.168.2.132 inactive - - - partition is p2 -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | bin/ 2 | build/ 3 | dist/ 4 | lib/ 5 | .tools 6 | .idea 7 | cmd/oteltestbedcol/* 8 | !cmd/oteltestbedcol/.gitkeep 9 | release-notes.md 10 | .DS_Store 11 | -------------------------------------------------------------------------------- /internal/data-ingest-cli/commands/filelog/testdata/file_simple.log: -------------------------------------------------------------------------------- 1 | 1971-01-01 INFO Something routine 2 | 1972-01-01 ERROR Some error occurred! 3 | 1973-01-01 DEBUG Some details... 4 | -------------------------------------------------------------------------------- /.github/actions/create-cluster/multi-node.yaml: -------------------------------------------------------------------------------- 1 | kind: Cluster 2 | apiVersion: kind.x-k8s.io/v1alpha4 3 | name: kind 4 | nodes: 5 | - role: control-plane 6 | - role: worker 7 | - role: worker 8 | -------------------------------------------------------------------------------- /internal/testbed/linux-services/Dockerfile.test.rpm: -------------------------------------------------------------------------------- 1 | # A rockylinux image with systemd enabled 2 | FROM rockylinux:9.3 3 | 4 | ENV container docker 5 | 6 | RUN dnf install -y initscripts 7 | 8 | CMD ["/usr/sbin/init"] 9 | -------------------------------------------------------------------------------- /internal/testbed/integration/kafka/testdata/collector-kafka/serviceaccount.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | labels: 5 | app: {{ .Name }} 6 | name: {{ .Name }} 7 | namespace: e2ekafka 8 | -------------------------------------------------------------------------------- /internal/testbed/integration/netflow/testdata/collector/serviceaccount.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | labels: 5 | app: {{ .Name }} 6 | name: {{ .Name }} 7 | namespace: e2enetflow 8 | -------------------------------------------------------------------------------- /internal/testbed/integration/statsd/testdata/collector/serviceaccount.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | labels: 5 | app: {{ .Name }} 6 | name: {{ .Name }} 7 | namespace: e2estatsd 8 | -------------------------------------------------------------------------------- /internal/testbed/integration/zipkin/testdata/collector/serviceaccount.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | labels: 5 | app: {{ .Name }} 6 | name: {{ .Name }} 7 | namespace: e2ezipkin 8 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8scluster/testdata/collector/serviceaccount.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | labels: 5 | app: {{ .Name }} 6 | name: {{ .Name }} 7 | namespace: e2ek8scluster 8 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8sobjects/testdata/collector/serviceaccount.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | labels: 5 | app: {{ .Name }} 6 | name: {{ .Name }} 7 | namespace: e2ek8sobjects 8 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8spodlogs/testdata/collector/serviceaccount.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | labels: 5 | app: {{ .Name }} 6 | name: {{ .Name }} 7 | namespace: e2ek8spodlogs 8 | -------------------------------------------------------------------------------- /internal/testbed/integration/kafka/testdata/collector-exporter/serviceaccount.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | labels: 5 | app: {{ .Name }} 6 | name: {{ .Name }} 7 | namespace: e2ekafka 8 | -------------------------------------------------------------------------------- /internal/testbed/integration/kafka/testdata/collector-receiver/serviceaccount.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | labels: 5 | app: {{ .Name }} 6 | name: {{ .Name }} 7 | namespace: e2ekafka 8 | -------------------------------------------------------------------------------- /internal/testbed/integration/prometheus/testdata/collector/serviceaccount.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | labels: 5 | app: {{ .Name }} 6 | name: {{ .Name }} 7 | namespace: e2eprometheus 8 | -------------------------------------------------------------------------------- /internal/testbed/integration/redaction/testdata/collector/serviceaccount.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | labels: 5 | app: {{ .Name }} 6 | name: {{ .Name }} 7 | namespace: e2eredaction 8 | -------------------------------------------------------------------------------- /internal/testbed/integration/statsd/testdata/collector/configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: {{ .Name }}-config 5 | namespace: e2estatsd 6 | data: 7 | relay: | 8 | {{ .CollectorConfig }} 9 | -------------------------------------------------------------------------------- /internal/testbed/integration/zipkin/testdata/collector/configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: {{ .Name }}-config 5 | namespace: e2ezipkin 6 | data: 7 | relay: | 8 | {{ .CollectorConfig }} 9 | -------------------------------------------------------------------------------- /internal/testbed/integration/combinedload/testdata/collector/serviceaccount.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | labels: 5 | app: {{ .Name }} 6 | name: {{ .Name }} 7 | namespace: load-combined 8 | -------------------------------------------------------------------------------- /internal/testbed/integration/hostmetrics/testdata/collector/serviceaccount.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | labels: 5 | app: {{ .Name }} 6 | name: {{ .Name }} 7 | namespace: e2ehostmetrics 8 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8scombined/testdata/collector/serviceaccount.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | labels: 5 | app: {{ .Name }} 6 | name: {{ .Name }} 7 | namespace: e2ek8scombined 8 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8senrichment/testdata/collector/serviceaccount.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | labels: 5 | app: {{ .Name }} 6 | name: {{ .Name }} 7 | namespace: e2ek8senrichment 8 | -------------------------------------------------------------------------------- /internal/testbed/integration/kafka/testdata/collector-kafka/configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: {{ .Name }}-config 5 | namespace: e2ekafka 6 | data: 7 | relay: | 8 | {{ .CollectorConfig }} 9 | -------------------------------------------------------------------------------- /internal/testbed/integration/kubeletstats/testdata/collector/serviceaccount.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | labels: 5 | app: {{ .Name }} 6 | name: {{ .Name }} 7 | namespace: e2ekubeletstats 8 | -------------------------------------------------------------------------------- /internal/testbed/integration/loadbalancing/testdata/collector/serviceaccount.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | labels: 5 | app: {{ .Name }} 6 | name: {{ .Name }} 7 | namespace: e2eloadbalancing 8 | -------------------------------------------------------------------------------- /internal/testbed/integration/netflow/testdata/collector/configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: {{ .Name }}-config 5 | namespace: e2enetflow 6 | data: 7 | relay: | 8 | {{ .CollectorConfig }} 9 | -------------------------------------------------------------------------------- /internal/testbed/integration/redaction/testdata/collector/configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: {{ .Name }}-config 5 | namespace: e2eredaction 6 | data: 7 | relay: | 8 | {{ .CollectorConfig }} 9 | -------------------------------------------------------------------------------- /internal/testbed/integration/hostmetrics/testdata/collector/configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: {{ .Name }}-config 5 | namespace: e2ehostmetrics 6 | data: 7 | relay: | 8 | {{ .CollectorConfig }} 9 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8scluster/testdata/collector/configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: {{ .Name }}-config 5 | namespace: e2ek8scluster 6 | data: 7 | relay: | 8 | {{ .CollectorConfig }} 9 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8scombined/testdata/collector/configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: {{ .Name }}-config 5 | namespace: e2ek8scombined 6 | data: 7 | relay: | 8 | {{ .CollectorConfig }} 9 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8sobjects/testdata/collector/configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: {{ .Name }}-config 5 | namespace: e2ek8sobjects 6 | data: 7 | relay: | 8 | {{ .CollectorConfig }} 9 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8spodlogs/testdata/collector/configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: {{ .Name }}-config 5 | namespace: e2ek8spodlogs 6 | data: 7 | relay: | 8 | {{ .CollectorConfig }} 9 | -------------------------------------------------------------------------------- /internal/testbed/integration/kafka/testdata/collector-exporter/configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: {{ .Name }}-config 5 | namespace: e2ekafka 6 | data: 7 | relay: | 8 | {{ .CollectorConfig }} 9 | -------------------------------------------------------------------------------- /internal/testbed/integration/kafka/testdata/collector-receiver/configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: {{ .Name }}-config 5 | namespace: e2ekafka 6 | data: 7 | relay: | 8 | {{ .CollectorConfig }} 9 | -------------------------------------------------------------------------------- /internal/testbed/integration/prometheus/testdata/collector/configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: {{ .Name }}-config 5 | namespace: e2eprometheus 6 | data: 7 | relay: | 8 | {{ .CollectorConfig }} 9 | -------------------------------------------------------------------------------- /internal/testbed/integration/self-monitoring/testdata/collector/serviceaccount.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | labels: 5 | app: {{ .Name }} 6 | name: {{ .Name }} 7 | namespace: {{ .Namespace }} 8 | -------------------------------------------------------------------------------- /internal/testing-setups/kafka/kind-config.yaml: -------------------------------------------------------------------------------- 1 | kind: Cluster 2 | apiVersion: kind.x-k8s.io/v1alpha4 3 | nodes: 4 | - role: control-plane 5 | extraPortMappings: 6 | - containerPort: 30000 7 | hostPort: 30000 8 | protocol: TCP 9 | -------------------------------------------------------------------------------- /internal/testbed/integration/combinedload/testdata/metrics-server/serviceaccount.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | labels: 5 | k8s-app: metrics-server 6 | name: metrics-server 7 | namespace: kube-system 8 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8senrichment/testdata/collector/configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: {{ .Name }}-config 5 | namespace: e2ek8senrichment 6 | data: 7 | relay: | 8 | {{ .CollectorConfig }} 9 | -------------------------------------------------------------------------------- /internal/testbed/integration/kubeletstats/testdata/collector/configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: {{ .Name }}-config 5 | namespace: e2ekubeletstats 6 | data: 7 | relay: | 8 | {{ .CollectorConfig }} 9 | -------------------------------------------------------------------------------- /internal/testbed/integration/loadbalancing/testdata/collector/configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: {{ .Name }}-config 5 | namespace: e2eloadbalancing 6 | data: 7 | relay: | 8 | {{ .CollectorConfig }} 9 | -------------------------------------------------------------------------------- /internal/testbed/integration/resource-detection/testdata/collector/serviceaccount.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | labels: 5 | app: {{ .Name }} 6 | name: {{ .Name }} 7 | namespace: e2eresourcedetection 8 | -------------------------------------------------------------------------------- /internal/testbed/integration/self-monitoring/testdata/collector/configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: {{ .Name }}-config 5 | namespace: {{ .Namespace }} 6 | data: 7 | relay: | 8 | {{ .CollectorConfig }} 9 | -------------------------------------------------------------------------------- /internal/testing-setups/kafka/send-data.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -euo pipefail 4 | 5 | go tool telemetrygen metrics --rate 1000 --interval 1s --duration 6000s --metrics 1000 --otlp-insecure --otlp-http --otlp-endpoint localhost:30000 6 | -------------------------------------------------------------------------------- /internal/testbed/integration/resource-detection/testdata/collector/configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: {{ .Name }}-config 5 | namespace: e2eresourcedetection 6 | data: 7 | relay: | 8 | {{ .CollectorConfig }} 9 | -------------------------------------------------------------------------------- /THIRD_PARTY_NOTICES.txt: -------------------------------------------------------------------------------- 1 | ### github.com/hashicorp/go-uuid v1.0.3 2 | Upstream repository: https://github.com/hashicorp/go-uuid 3 | 4 | Copyright © 2015-2022 HashiCorp, Inc.【1-2-0】 5 | 6 | License: Mozilla Public License 2.0 (MPL-2.0) — see LICENSES/MPL-2.0.txt 7 | -------------------------------------------------------------------------------- /internal/tools/tools.go: -------------------------------------------------------------------------------- 1 | package tools 2 | 3 | import ( 4 | _ "go.opentelemetry.io/build-tools/chloggen" 5 | _ "go.opentelemetry.io/collector/cmd/builder" 6 | _ "github.com/sigstore/cosign/v3/cmd/cosign" 7 | _ "github.com/jstemmer/go-junit-report/v2" 8 | ) 9 | -------------------------------------------------------------------------------- /internal/data-ingest-cli/commands/syslog/testdata/RFC5424: -------------------------------------------------------------------------------- 1 | <86>1 2015-08-05T21:58:59.693Z 192.168.2.132 SecureAuth0 23108 ID52020 [SecureAuth@27389 UserHostAddress="192.168.2.132" Realm="SecureAuth0" UserID="Tester2" PEN="27389"] Found the user for retrieving user's profile -------------------------------------------------------------------------------- /internal/data-ingest-cli/commands/otlpjson/testdata/metrics.json: -------------------------------------------------------------------------------- 1 | {"resourceMetrics":[{"resource":{},"scopeMetrics":[{"scope":{},"metrics":[{"name":"gen","gauge":{"dataPoints":[{"timeUnixNano":"1737962958727640000","asInt":"0"}]}}]}],"schemaUrl":"https://opentelemetry.io/schemas/1.13.0"}]} 2 | -------------------------------------------------------------------------------- /internal/data-ingest-cli/commands/syslog/testdata/rfc5424-skip-pri-present: -------------------------------------------------------------------------------- 1 | <123>1 2015-08-05T21:58:59.693Z 192.168.2.132 SecureAuth0 23108 ID52020 [SecureAuth@27389 UserHostAddress="192.168.2.132" Realm="SecureAuth0" UserID="Tester2" PEN="27389"] Found the user for retrieving user's profile -------------------------------------------------------------------------------- /internal/data-ingest-cli/commands/syslog/testdata/rfc6587-octet-counting: -------------------------------------------------------------------------------- 1 | 215 <86>1 2015-08-05T21:58:59.693Z 192.168.2.132 SecureAuth0 23108 ID52020 [SecureAuth@27389 UserHostAddress="192.168.2.132" Realm="SecureAuth0" UserID="Tester2" PEN="27389"] Found the user for retrieving user's profile -------------------------------------------------------------------------------- /internal/data-ingest-cli/commands/syslog/testdata/rfc6587-non-transparent-framing: -------------------------------------------------------------------------------- 1 | <86>1 2015-08-05T21:58:59.693Z 192.168.2.132 SecureAuth0 23108 ID52020 [SecureAuth@27389 UserHostAddress="192.168.2.132" Realm="SecureAuth0" UserID="Tester2" PEN="27389"] Found the user for retrieving user's profile -------------------------------------------------------------------------------- /internal/release/dynatrace-otel-collector.conf: -------------------------------------------------------------------------------- 1 | # Systemd environment file for the otelcol service 2 | 3 | # Command-line options for the otelcol service. 4 | # Run `/usr/bin/dynatrace-otel-collector --help` to see all available options. 5 | OTELCOL_OPTIONS="--config=/etc/dynatrace-otel-collector/config.yaml" 6 | -------------------------------------------------------------------------------- /internal/testbed/integration/kafka/testdata/testobjects/kafka-service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: redpanda 5 | namespace: e2ekafka 6 | spec: 7 | ports: 8 | - name: kafka 9 | port: 9092 10 | targetPort: 9092 11 | selector: 12 | app: redpanda 13 | -------------------------------------------------------------------------------- /internal/data-ingest-cli/commands/statsd/testdata/metrics.txt: -------------------------------------------------------------------------------- 1 | test.metric:42|c|#myKey:myVal 2 | test.metric:42|c|#myKey:myVal 3 | test.metric:42|c|#myKey:myVal 4 | test.metric:42|c|#myKey:myVal 5 | statsdTestMetric1:3000|c|#mykey:myvalue 6 | statsdTestMetric1:4000|c|#mykey:myvalue 7 | test.metric:42|g|#myKey:myVal 8 | -------------------------------------------------------------------------------- /internal/confmap/provider/eecprovider/package_test.go: -------------------------------------------------------------------------------- 1 | // Copyright The OpenTelemetry Authors 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | package eecprovider 5 | 6 | import ( 7 | "testing" 8 | 9 | "go.uber.org/goleak" 10 | ) 11 | 12 | func TestMain(m *testing.M) { 13 | goleak.VerifyTestMain(m) 14 | } 15 | -------------------------------------------------------------------------------- /internal/data-ingest-cli/commands/filelog/testdata/file_simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "timestamp": "2025-02-21T10:40:28Z", 3 | "severity": "INFO", 4 | "body": "User login successful", 5 | "attributes": { 6 | "user_id": "12345", 7 | "ip_address": "192.168.1.1", 8 | "session_id": "abcde12345" 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /internal/testbed/linux-services/config.test.yaml: -------------------------------------------------------------------------------- 1 | receivers: 2 | otlp: 3 | protocols: 4 | http: 5 | endpoint: 0.0.0.0:4318 6 | 7 | exporters: 8 | debug: 9 | verbosity: basic 10 | 11 | service: 12 | pipelines: 13 | metrics: 14 | receivers: [otlp] 15 | processors: [] 16 | exporters: [debug] 17 | -------------------------------------------------------------------------------- /internal/data-ingest-cli/commands/otlpjson/testdata/logs.json: -------------------------------------------------------------------------------- 1 | {"resourceLogs":[{"resource":{},"scopeLogs":[{"scope":{},"logRecords":[{"timeUnixNano":"1737962987660456000","severityNumber":9,"severityText":"Info","body":{"stringValue":"the message"},"attributes":[{"key":"app","value":{"stringValue":"server"}}],"droppedAttributesCount":1,"traceId":"","spanId":""}]}]}]} 2 | -------------------------------------------------------------------------------- /internal/testbed/integration/kafka/testdata/config-overlays/kafkametrics-receiver-local.yaml: -------------------------------------------------------------------------------- 1 | exporters: 2 | otlphttp/second: 3 | endpoint: http://%s:4322 4 | 5 | service: 6 | extensions: [health_check] 7 | pipelines: 8 | metrics: 9 | receivers: [kafkametrics] 10 | processors: [cumulativetodelta] 11 | exporters: [otlphttp/second] 12 | -------------------------------------------------------------------------------- /config_examples/jaeger.yaml: -------------------------------------------------------------------------------- 1 | receivers: 2 | jaeger: 3 | protocols: 4 | grpc: 5 | endpoint: 0.0.0.0:14250 6 | 7 | exporters: 8 | otlphttp: 9 | endpoint: ${env:DT_ENDPOINT} 10 | headers: 11 | Authorization: "Api-Token ${env:API_TOKEN}" 12 | 13 | service: 14 | pipelines: 15 | traces: 16 | receivers: [jaeger] 17 | exporters: [otlphttp] 18 | -------------------------------------------------------------------------------- /lychee.toml: -------------------------------------------------------------------------------- 1 | include-fragments = true 2 | 3 | accept = ["200", "429"] 4 | 5 | exclude = [ 6 | "http(s)?://localhost", 7 | "http(s)?://example.com", 8 | "https://github.com/open-telemetry/opentelemetry-collector(-contrib)?/releases/tag/vXX.YY.ZZ", 9 | "http(s)?://\\d+\\.\\d+\\.\\d+\\.\\d+", 10 | ] 11 | 12 | # better to be safe and avoid failures 13 | max-retries = 6 14 | -------------------------------------------------------------------------------- /internal/testbed/integration/combinedload/testdata/metrics-server/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | labels: 5 | k8s-app: metrics-server 6 | name: metrics-server 7 | namespace: kube-system 8 | spec: 9 | ports: 10 | - name: https 11 | port: 443 12 | protocol: TCP 13 | targetPort: https 14 | selector: 15 | k8s-app: metrics-server 16 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8scluster/testdata/collector/clusterrolebinding.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRoleBinding 3 | metadata: 4 | name: {{ .Name }} 5 | roleRef: 6 | apiGroup: rbac.authorization.k8s.io 7 | kind: ClusterRole 8 | name: {{ .Name }} 9 | subjects: 10 | - kind: ServiceAccount 11 | name: {{ .Name }} 12 | namespace: e2ek8scluster 13 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8sobjects/testdata/collector/clusterrolebinding.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRoleBinding 3 | metadata: 4 | name: {{ .Name }} 5 | roleRef: 6 | apiGroup: rbac.authorization.k8s.io 7 | kind: ClusterRole 8 | name: {{ .Name }} 9 | subjects: 10 | - kind: ServiceAccount 11 | name: {{ .Name }} 12 | namespace: e2ek8sobjects 13 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8spodlogs/testdata/collector/clusterrolebinding.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRoleBinding 3 | metadata: 4 | name: {{ .Name }} 5 | roleRef: 6 | apiGroup: rbac.authorization.k8s.io 7 | kind: ClusterRole 8 | name: {{ .Name }} 9 | subjects: 10 | - kind: ServiceAccount 11 | name: {{ .Name }} 12 | namespace: e2ek8spodlogs 13 | -------------------------------------------------------------------------------- /internal/testbed/integration/hostmetrics/testdata/collector/clusterrolebinding.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRoleBinding 3 | metadata: 4 | name: {{ .Name }} 5 | roleRef: 6 | apiGroup: rbac.authorization.k8s.io 7 | kind: ClusterRole 8 | name: {{ .Name }} 9 | subjects: 10 | - kind: ServiceAccount 11 | name: {{ .Name }} 12 | namespace: e2ehostmetrics 13 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8scombined/testdata/collector/clusterrolebinding.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRoleBinding 3 | metadata: 4 | name: {{ .Name }} 5 | roleRef: 6 | apiGroup: rbac.authorization.k8s.io 7 | kind: ClusterRole 8 | name: {{ .Name }} 9 | subjects: 10 | - kind: ServiceAccount 11 | name: {{ .Name }} 12 | namespace: e2ek8scombined 13 | -------------------------------------------------------------------------------- /internal/testbed/integration/kubeletstats/testdata/collector/clusterrolebinding.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRoleBinding 3 | metadata: 4 | name: {{ .Name }} 5 | roleRef: 6 | apiGroup: rbac.authorization.k8s.io 7 | kind: ClusterRole 8 | name: {{ .Name }} 9 | subjects: 10 | - kind: ServiceAccount 11 | name: {{ .Name }} 12 | namespace: e2ekubeletstats 13 | -------------------------------------------------------------------------------- /internal/testbed/integration/loadbalancing/testdata/collector/clusterrolebinding.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRoleBinding 3 | metadata: 4 | name: {{ .Name }} 5 | roleRef: 6 | apiGroup: rbac.authorization.k8s.io 7 | kind: ClusterRole 8 | name: {{ .Name }} 9 | subjects: 10 | - kind: ServiceAccount 11 | name: {{ .Name }} 12 | namespace: e2eloadbalancing 13 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8scombined/testdata/collector/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: otelcolsvc 5 | namespace: e2ek8scombined 6 | spec: 7 | type: ClusterIP 8 | ports: 9 | - name: otlp 10 | port: 4317 11 | targetPort: 4317 12 | protocol: TCP 13 | appProtocol: grpc 14 | selector: 15 | app.kubernetes.io/name: opentelemetry-collector 16 | -------------------------------------------------------------------------------- /internal/testbed/integration/kafka/testdata/collector-exporter/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: otelcolsvc 5 | namespace: e2ekafka 6 | spec: 7 | type: ClusterIP 8 | ports: 9 | - name: otlp 10 | port: 4317 11 | targetPort: 4317 12 | protocol: TCP 13 | appProtocol: grpc 14 | selector: 15 | app.kubernetes.io/name: opentelemetry-collector 16 | -------------------------------------------------------------------------------- /internal/mockeec/README.md: -------------------------------------------------------------------------------- 1 | # Mock EEC server 2 | 3 | ## How to use 4 | 5 | 1. Start the server in this directory: `go run .` 6 | 2. Run the Collector: 7 | 8 | ```text 9 | ./bin/dynatrace-otel-collector --config=eec://localhost:8000/config.yaml#insecure=true&refresh-interval=100ms 10 | ``` 11 | 12 | 3. Update `config.yaml` 13 | 4. See the Collector restart the service. 14 | 5. Shut down the Collector and server. 15 | -------------------------------------------------------------------------------- /internal/testbed/integration/kafka/testdata/config-overlays/kafkametrics-receiver-env.yaml: -------------------------------------------------------------------------------- 1 | exporters: 2 | otlphttp: 3 | endpoint: ${env:DT_ENDPOINT} 4 | headers: 5 | Authorization: "Api-Token ${env:DT_API_TOKEN}" 6 | 7 | service: 8 | extensions: [health_check] 9 | pipelines: 10 | metrics: 11 | receivers: [kafkametrics] 12 | processors: [cumulativetodelta] 13 | exporters: [otlphttp] 14 | -------------------------------------------------------------------------------- /internal/data-ingest-cli/commands/filelog/testdata/file_nested_attr.json: -------------------------------------------------------------------------------- 1 | { 2 | "timestamp": "2025-02-21T10:43:30Z", 3 | "severity": "DEBUG", 4 | "body": "API request received", 5 | "attributes": { 6 | "endpoint": "/api/v1/resource", 7 | "method": "GET", 8 | "headers": { 9 | "User-Agent": "Mozilla/5.0", 10 | "Accept": "application/json" 11 | } 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /internal/testbed/integration/redaction/testdata/collector/clusterrolebinding.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRoleBinding 3 | metadata: 4 | name: {{ .Name }} 5 | labels: 6 | app: {{ .Name }} 7 | roleRef: 8 | apiGroup: rbac.authorization.k8s.io 9 | kind: ClusterRole 10 | name: {{ .Name }} 11 | subjects: 12 | - kind: ServiceAccount 13 | name: {{ .Name }} 14 | namespace: e2eredaction 15 | -------------------------------------------------------------------------------- /internal/mockeec/config.yaml: -------------------------------------------------------------------------------- 1 | extensions: 2 | zpages: 3 | endpoint: 0.0.0.0:55679 4 | 5 | receivers: 6 | otlp: 7 | protocols: 8 | http: 9 | 10 | exporters: 11 | debug: 12 | verbosity: detailed 13 | 14 | service: 15 | pipelines: 16 | traces: 17 | receivers: [otlp] 18 | exporters: [debug] 19 | metrics: 20 | receivers: [otlp] 21 | exporters: [debug] 22 | 23 | extensions: [zpages] 24 | -------------------------------------------------------------------------------- /internal/testbed/integration/combinedload/testdata/collector/clusterrolebinding.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRoleBinding 3 | metadata: 4 | name: {{ .Name }} 5 | labels: 6 | app: {{ .Name }} 7 | roleRef: 8 | apiGroup: rbac.authorization.k8s.io 9 | kind: ClusterRole 10 | name: {{ .Name }} 11 | subjects: 12 | - kind: ServiceAccount 13 | name: {{ .Name }} 14 | namespace: e2eredaction 15 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8senrichment/testdata/collector/clusterrolebinding.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRoleBinding 3 | metadata: 4 | name: {{ .Name }} 5 | labels: 6 | app: {{ .Name }} 7 | roleRef: 8 | apiGroup: rbac.authorization.k8s.io 9 | kind: ClusterRole 10 | name: {{ .Name }} 11 | subjects: 12 | - kind: ServiceAccount 13 | name: {{ .Name }} 14 | namespace: e2ek8senrichment 15 | -------------------------------------------------------------------------------- /internal/testbed/integration/self-monitoring/testdata/collector/clusterrolebinding.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRoleBinding 3 | metadata: 4 | name: {{ .Name }} 5 | labels: 6 | app: {{ .Name }} 7 | roleRef: 8 | apiGroup: rbac.authorization.k8s.io 9 | kind: ClusterRole 10 | name: {{ .Name }} 11 | subjects: 12 | - kind: ServiceAccount 13 | name: {{ .Name }} 14 | namespace: {{ .Namespace }} 15 | -------------------------------------------------------------------------------- /config_examples/zipkin.yaml: -------------------------------------------------------------------------------- 1 | extensions: 2 | health_check: 3 | endpoint: 0.0.0.0:13133 4 | receivers: 5 | zipkin: 6 | endpoint: 0.0.0.0:9411 7 | 8 | exporters: 9 | otlphttp: 10 | endpoint: ${env:DT_ENDPOINT} 11 | headers: 12 | Authorization: "Api-Token ${env:API_TOKEN}" 13 | 14 | service: 15 | extensions: [health_check] 16 | pipelines: 17 | traces: 18 | receivers: [zipkin] 19 | exporters: [otlphttp] 20 | -------------------------------------------------------------------------------- /internal/testbed/integration/resource-detection/testdata/collector/clusterrolebinding.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRoleBinding 3 | metadata: 4 | name: {{ .Name }} 5 | labels: 6 | app: {{ .Name }} 7 | roleRef: 8 | apiGroup: rbac.authorization.k8s.io 9 | kind: ClusterRole 10 | name: {{ .Name }} 11 | subjects: 12 | - kind: ServiceAccount 13 | name: {{ .Name }} 14 | namespace: e2eresourcedetection 15 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine:3.23@sha256:51183f2cfa6320055da30872f211093f9ff1d3cf06f39a0bdb212314c5dc7375 AS certs 2 | RUN apk --update add ca-certificates 3 | 4 | FROM scratch 5 | 6 | USER 10001 7 | 8 | COPY --from=certs /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt 9 | COPY --chmod=755 dynatrace-otel-collector /dynatrace-otel-collector 10 | ENTRYPOINT ["/dynatrace-otel-collector"] 11 | CMD ["--config", "/etc/otelcol/config.yaml"] 12 | -------------------------------------------------------------------------------- /internal/testbed/integration/prometheus/testdata/prometheus/install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/env bash 2 | 3 | helm repo add prometheus-community https://prometheus-community.github.io/helm-charts 4 | helm repo update 5 | helm install prometheus-node-exporter prometheus-community/prometheus-node-exporter --namespace e2eprometheus 6 | 7 | # Wait until the node exporter is up and running 8 | kubectl rollout --timeout 120s status daemonset/prometheus-node-exporter -n e2eprometheus 9 | -------------------------------------------------------------------------------- /internal/data-ingest-cli/commands/netflow/config.yaml: -------------------------------------------------------------------------------- 1 | receivers: 2 | netflow: 3 | scheme: netflow 4 | hostname: 0.0.0.0 5 | port: 2055 6 | 7 | exporters: 8 | otlphttp: 9 | endpoint: http://0.0.0.0:4319 10 | tls: 11 | insecure: true 12 | compression: none 13 | 14 | debug: 15 | verbosity: detailed 16 | 17 | service: 18 | pipelines: 19 | logs: 20 | receivers: [netflow] 21 | exporters: [otlphttp, debug] 22 | -------------------------------------------------------------------------------- /internal/testbed/integration/redaction/testdata/collector/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: {{ .Name }} 5 | namespace: e2eredaction 6 | spec: 7 | type: ClusterIP 8 | ports: 9 | - name: otlp 10 | port: 4317 11 | targetPort: 4317 12 | protocol: TCP 13 | appProtocol: grpc 14 | selector: 15 | app.kubernetes.io/name: opentelemetry-collector 16 | app.kubernetes.io/instance: {{ .Name }} 17 | -------------------------------------------------------------------------------- /internal/testbed/integration/combinedload/testdata/collector/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: {{ .Name }} 5 | namespace: load-combined 6 | spec: 7 | type: ClusterIP 8 | ports: 9 | - name: otlp 10 | port: 4317 11 | targetPort: 4317 12 | protocol: TCP 13 | appProtocol: grpc 14 | selector: 15 | app.kubernetes.io/name: opentelemetry-collector 16 | app.kubernetes.io/instance: {{ .Name }} 17 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8scluster/testdata/collector/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: otelcolsvc 5 | namespace: e2ek8scluster 6 | spec: 7 | type: ClusterIP 8 | ports: 9 | - name: otlp 10 | port: 4317 11 | targetPort: 4317 12 | protocol: TCP 13 | appProtocol: grpc 14 | selector: 15 | app.kubernetes.io/name: opentelemetry-collector 16 | app.kubernetes.io/instance: {{ .Name }} 17 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8sobjects/testdata/collector/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: otelcolsvc 5 | namespace: e2ek8sobjects 6 | spec: 7 | type: ClusterIP 8 | ports: 9 | - name: otlp 10 | port: 4317 11 | targetPort: 4317 12 | protocol: TCP 13 | appProtocol: grpc 14 | selector: 15 | app.kubernetes.io/name: opentelemetry-collector 16 | app.kubernetes.io/instance: {{ .Name }} 17 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8spodlogs/testdata/collector/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: otelcolsvc 5 | namespace: e2ek8spodlogs 6 | spec: 7 | type: ClusterIP 8 | ports: 9 | - name: otlp 10 | port: 4317 11 | targetPort: 4317 12 | protocol: TCP 13 | appProtocol: grpc 14 | selector: 15 | app.kubernetes.io/name: opentelemetry-collector 16 | app.kubernetes.io/instance: {{ .Name }} 17 | -------------------------------------------------------------------------------- /internal/testbed/integration/kubeletstats/testdata/collector/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: otelcolsvc 5 | namespace: e2ekubeletstats 6 | spec: 7 | type: ClusterIP 8 | ports: 9 | - name: otlp 10 | port: 4317 11 | targetPort: 4317 12 | protocol: TCP 13 | appProtocol: grpc 14 | selector: 15 | app.kubernetes.io/name: opentelemetry-collector 16 | app.kubernetes.io/instance: {{ .Name }} 17 | -------------------------------------------------------------------------------- /internal/testbed/integration/netflow/testdata/netflow/job.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: batch/v1 2 | kind: Job 3 | metadata: 4 | name: nflow-generator-job 5 | namespace: e2enetflow 6 | spec: 7 | template: 8 | spec: 9 | containers: 10 | - name: nflow-generator 11 | image: networkstatic/nflow-generator 12 | args: ["-c", "16", "-t", "otelcolsvc.e2enetflow.svc.cluster.local", "-p", "2055"] 13 | restartPolicy: Never 14 | backoffLimit: 4 15 | -------------------------------------------------------------------------------- /internal/testbed/integration/prometheus/testdata/collector/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: {{ .Name }} 5 | namespace: e2eprometheus 6 | spec: 7 | type: ClusterIP 8 | ports: 9 | - name: otlp 10 | port: 4317 11 | targetPort: 4317 12 | protocol: TCP 13 | appProtocol: grpc 14 | selector: 15 | app.kubernetes.io/name: opentelemetry-collector 16 | app.kubernetes.io/instance: {{ .Name }} 17 | -------------------------------------------------------------------------------- /internal/data-ingest-cli/commands/jaeger/config.yaml: -------------------------------------------------------------------------------- 1 | receivers: 2 | jaeger: 3 | protocols: 4 | thrift_http: 5 | endpoint: 0.0.0.0:14268 6 | 7 | exporters: 8 | otlphttp: 9 | endpoint: http://0.0.0.0:4319 10 | tls: 11 | insecure: true 12 | compression: none 13 | 14 | debug: 15 | verbosity: detailed 16 | 17 | service: 18 | pipelines: 19 | traces: 20 | receivers: [jaeger] 21 | exporters: [debug, otlphttp] 22 | -------------------------------------------------------------------------------- /internal/testbed/integration/combinedload/testdata/metrics-server/apiservice.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apiregistration.k8s.io/v1 2 | kind: APIService 3 | metadata: 4 | labels: 5 | k8s-app: metrics-server 6 | name: v1beta1.metrics.k8s.io 7 | spec: 8 | group: metrics.k8s.io 9 | groupPriorityMinimum: 100 10 | insecureSkipTLSVerify: true 11 | service: 12 | name: metrics-server 13 | namespace: kube-system 14 | version: v1beta1 15 | versionPriority: 100 16 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8senrichment/testdata/collector/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: {{ .Name }} 5 | namespace: e2ek8senrichment 6 | spec: 7 | type: ClusterIP 8 | ports: 9 | - name: otlp 10 | port: 4317 11 | targetPort: 4317 12 | protocol: TCP 13 | appProtocol: grpc 14 | selector: 15 | app.kubernetes.io/name: opentelemetry-collector 16 | app.kubernetes.io/instance: {{ .Name }} 17 | -------------------------------------------------------------------------------- /internal/testbed/integration/loadbalancing/testdata/collector/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: {{ .Name }} 5 | namespace: e2eloadbalancing 6 | spec: 7 | type: ClusterIP 8 | ports: 9 | - name: otlp 10 | port: 4317 11 | targetPort: 4317 12 | protocol: TCP 13 | appProtocol: grpc 14 | selector: 15 | app.kubernetes.io/name: opentelemetry-collector 16 | app.kubernetes.io/instance: {{ .Name }} 17 | -------------------------------------------------------------------------------- /internal/testbed/integration/combinedload/testdata/metrics-server/clusterrolebinding-metricserver.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRoleBinding 3 | metadata: 4 | labels: 5 | k8s-app: metrics-server 6 | name: system:metrics-server 7 | roleRef: 8 | apiGroup: rbac.authorization.k8s.io 9 | kind: ClusterRole 10 | name: system:metrics-server 11 | subjects: 12 | - kind: ServiceAccount 13 | name: metrics-server 14 | namespace: kube-system 15 | -------------------------------------------------------------------------------- /internal/testbed/integration/resource-detection/testdata/collector/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: {{ .Name }} 5 | namespace: e2eresourcedetection 6 | spec: 7 | type: ClusterIP 8 | ports: 9 | - name: otlp 10 | port: 4317 11 | targetPort: 4317 12 | protocol: TCP 13 | appProtocol: grpc 14 | selector: 15 | app.kubernetes.io/name: opentelemetry-collector 16 | app.kubernetes.io/instance: {{ .Name }} 17 | -------------------------------------------------------------------------------- /internal/testbed/linux-services/Dockerfile.test.deb: -------------------------------------------------------------------------------- 1 | # A debian image with systemd enabled 2 | FROM debian:13@sha256:0d01188e8dd0ac63bf155900fad49279131a876a1ea7fac917c62e87ccb2732d 3 | 4 | ENV container docker 5 | ENV DEBIAN_FRONTEND noninteractive 6 | 7 | # Enable systemd. 8 | RUN apt-get update ; \ 9 | apt-get install -y systemd systemd-sysv procps; \ 10 | apt-get clean ; \ 11 | rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* 12 | 13 | CMD ["/lib/systemd/systemd"] 14 | -------------------------------------------------------------------------------- /internal/testbed/load/tests/package_test.go: -------------------------------------------------------------------------------- 1 | package loadtest 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/open-telemetry/opentelemetry-collector-contrib/testbed/testbed" 7 | ) 8 | 9 | var ( 10 | performanceResultsSummary testbed.TestResultsSummary = &testbed.PerformanceResults{} 11 | ) 12 | 13 | // TestMain is used to initiate setup, execution and tear down of testbed. 14 | func TestMain(m *testing.M) { 15 | testbed.DoTestMain(m, performanceResultsSummary) 16 | } 17 | -------------------------------------------------------------------------------- /internal/testbed/integration/combinedload/testdata/metrics-server/clusterrole-metricserver.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRole 3 | metadata: 4 | labels: 5 | k8s-app: metrics-server 6 | name: system:metrics-server 7 | rules: 8 | - apiGroups: 9 | - "" 10 | resources: 11 | - nodes/metrics 12 | verbs: 13 | - get 14 | - apiGroups: 15 | - "" 16 | resources: 17 | - pods 18 | - nodes 19 | verbs: 20 | - get 21 | - list 22 | - watch 23 | -------------------------------------------------------------------------------- /internal/testbed/integration/resource-detection/testdata/collector/enrichment-file.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: {{ .Name }}-enrichment-file 5 | namespace: e2eresourcedetection 6 | data: 7 | dt_host_metadata.properties: | 8 | dt.entity.host=my-host-from-properties 9 | host.name=my-host-from-properties 10 | dt.entity.host_group=my-host-group-from-properties 11 | dt.foo=bar 12 | dt.smartscape.host=my-smartscape-host-from-properties 13 | -------------------------------------------------------------------------------- /config_examples/netflow.yaml: -------------------------------------------------------------------------------- 1 | extensions: 2 | health_check: 3 | endpoint: 0.0.0.0:13133 4 | 5 | receivers: 6 | netflow: 7 | scheme: netflow 8 | hostname: 0.0.0.0 9 | port: 2055 10 | 11 | exporters: 12 | otlphttp: 13 | endpoint: ${env:DT_ENDPOINT} 14 | headers: 15 | Authorization: "Api-Token ${env:API_TOKEN}" 16 | 17 | service: 18 | extensions: [health_check] 19 | pipelines: 20 | logs: 21 | receivers: [netflow] 22 | exporters: [otlphttp] 23 | -------------------------------------------------------------------------------- /internal/testbed/integration/combinedload/testdata/metrics-server/clusterrolebinding-authdelegator.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRoleBinding 3 | metadata: 4 | labels: 5 | k8s-app: metrics-server 6 | name: metrics-server:system:auth-delegator 7 | roleRef: 8 | apiGroup: rbac.authorization.k8s.io 9 | kind: ClusterRole 10 | name: system:auth-delegator 11 | subjects: 12 | - kind: ServiceAccount 13 | name: metrics-server 14 | namespace: kube-system 15 | -------------------------------------------------------------------------------- /internal/testbed/integration/combinedload/testdata/metrics-server/rolebinding-authreader.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: RoleBinding 3 | metadata: 4 | labels: 5 | k8s-app: metrics-server 6 | name: metrics-server-auth-reader 7 | namespace: kube-system 8 | roleRef: 9 | apiGroup: rbac.authorization.k8s.io 10 | kind: Role 11 | name: extension-apiserver-authentication-reader 12 | subjects: 13 | - kind: ServiceAccount 14 | name: metrics-server 15 | namespace: kube-system 16 | -------------------------------------------------------------------------------- /internal/data-ingest-cli/commands/prometheus/config.yaml: -------------------------------------------------------------------------------- 1 | receivers: 2 | prometheus: 3 | config: 4 | scrape_configs: 5 | - job_name: 'localhost' 6 | scrape_interval: 5s 7 | static_configs: 8 | - targets: ['localhost:9100'] 9 | 10 | processors: 11 | cumulativetodelta: 12 | 13 | exporters: 14 | debug: 15 | 16 | service: 17 | pipelines: 18 | metrics: 19 | receivers: [prometheus] 20 | processors: [cumulativetodelta] 21 | exporters: [debug] 22 | -------------------------------------------------------------------------------- /internal/testbed/integration/hostmetrics/testdata/collector/clusterrole.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRole 3 | metadata: 4 | name: {{.Name}} 5 | rules: 6 | - apiGroups: [""] 7 | resources: ["pods", "namespaces"] 8 | verbs: ["get", "watch", "list"] 9 | 10 | - apiGroups: ["apps"] 11 | resources: ["replicasets"] 12 | verbs: ["get", "watch", "list"] 13 | 14 | - apiGroups: ["extensions"] 15 | resources: ["replicasets"] 16 | verbs: ["get", "watch", "list"] 17 | -------------------------------------------------------------------------------- /internal/testbed/integration/kafka/testdata/config-overlays/receiver-env.yaml: -------------------------------------------------------------------------------- 1 | exporters: 2 | otlphttp: 3 | endpoint: ${env:DT_ENDPOINT} 4 | headers: 5 | Authorization: "Api-Token ${env:DT_API_TOKEN}" 6 | 7 | service: 8 | extensions: [health_check] 9 | pipelines: 10 | traces: 11 | receivers: [kafka] 12 | exporters: [otlphttp] 13 | metrics: 14 | receivers: [kafka] 15 | exporters: [otlphttp] 16 | logs: 17 | receivers: [kafka] 18 | exporters: [otlphttp] 19 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8sobjects/testdata/collector/clusterrole.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRole 3 | metadata: 4 | name: {{ .Name }} 5 | rules: 6 | - apiGroups: [""] 7 | resources: ["namespaces", "events", "pods", "nodes"] 8 | verbs: ["get", "watch", "list"] 9 | - apiGroups: ["apps"] 10 | resources: ["deployments"] 11 | verbs: ["get", "watch", "list"] 12 | - apiGroups: ["events.k8s.io"] 13 | resources: ["events"] 14 | verbs: ["get", "watch", "list"] 15 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8sobjects/testdata/testobjects/deployment.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: apps/v1 3 | kind: Deployment 4 | metadata: 5 | name: busybox-deployment 6 | namespace: e2ek8sobjects 7 | labels: 8 | app: busybox 9 | spec: 10 | replicas: 1 11 | selector: 12 | matchLabels: 13 | app: busybox 14 | template: 15 | metadata: 16 | labels: 17 | app: busybox 18 | spec: 19 | containers: 20 | - name: busybox 21 | image: busybox 22 | command: ["sleep", "3600"] 23 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8spodlogs/testdata/collector/clusterrole.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRole 3 | metadata: 4 | name: {{ .Name }} 5 | rules: 6 | - apiGroups: [""] 7 | resources: ["namespaces", "events", "pods", "nodes"] 8 | verbs: ["get", "watch", "list"] 9 | - apiGroups: ["apps"] 10 | resources: ["deployments", "replicasets"] 11 | verbs: ["get", "watch", "list"] 12 | - apiGroups: ["events.k8s.io"] 13 | resources: ["events"] 14 | verbs: ["get", "watch", "list"] 15 | -------------------------------------------------------------------------------- /internal/testbed/testdata/config-smoke.yaml: -------------------------------------------------------------------------------- 1 | receivers: 2 | otlp: 3 | protocols: 4 | http: 5 | endpoint: localhost:4318 6 | 7 | exporters: 8 | debug: 9 | verbosity: detailed 10 | 11 | service: 12 | pipelines: 13 | metrics: 14 | receivers: [otlp] 15 | exporters: [debug] 16 | telemetry: 17 | metrics: 18 | level: normal 19 | readers: 20 | - pull: 21 | exporter: 22 | prometheus: 23 | host: '0.0.0.0' 24 | port: 9090 25 | -------------------------------------------------------------------------------- /internal/release/dynatrace-otel-collector.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Dynatrace OpenTelemetry Collector 3 | After=network.target 4 | Documentation=https://docs.dynatrace.com/docs/shortlink/otel-collector 5 | 6 | [Service] 7 | EnvironmentFile=/etc/dynatrace-otel-collector/dynatrace-otel-collector.conf 8 | ExecStart=/usr/bin/dynatrace-otel-collector $OTELCOL_OPTIONS 9 | ExecReload=/bin/kill -HUP $MAINPID 10 | KillMode=mixed 11 | Restart=on-failure 12 | Type=simple 13 | User=otel 14 | Group=otel 15 | 16 | [Install] 17 | WantedBy=multi-user.target 18 | -------------------------------------------------------------------------------- /internal/testbed/integration/netflow/testdata/collector/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: otelcolsvc 5 | namespace: e2enetflow 6 | spec: 7 | type: ClusterIP 8 | ports: 9 | - name: otlp 10 | port: 4317 11 | targetPort: 4317 12 | protocol: TCP 13 | appProtocol: grpc 14 | - name: netflow 15 | port: 2055 16 | targetPort: 2055 17 | protocol: UDP 18 | selector: 19 | app.kubernetes.io/name: opentelemetry-collector 20 | app.kubernetes.io/instance: {{ .Name }} 21 | -------------------------------------------------------------------------------- /internal/testbed/integration/statsd/testdata/collector/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: otelcolsvc 5 | namespace: e2estatsd 6 | spec: 7 | type: ClusterIP 8 | ports: 9 | - name: otlp 10 | port: 4317 11 | targetPort: 4317 12 | protocol: TCP 13 | appProtocol: grpc 14 | - name: statsd 15 | port: 8125 16 | targetPort: 8125 17 | protocol: UDP 18 | selector: 19 | app.kubernetes.io/name: opentelemetry-collector 20 | app.kubernetes.io/instance: {{ .Name }} 21 | -------------------------------------------------------------------------------- /internal/testbed/integration/zipkin/testdata/collector/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: {{ .Name }} 5 | namespace: e2ezipkin 6 | spec: 7 | type: ClusterIP 8 | ports: 9 | - name: otlp 10 | port: 4317 11 | targetPort: 4317 12 | protocol: TCP 13 | appProtocol: grpc 14 | - name: zipkin 15 | port: 9411 16 | targetPort: 9411 17 | protocol: TCP 18 | selector: 19 | app.kubernetes.io/name: opentelemetry-collector 20 | app.kubernetes.io/instance: {{ .Name }} 21 | -------------------------------------------------------------------------------- /internal/testbed/integration/redaction/testdata/collector/clusterrole.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRole 3 | metadata: 4 | name: {{ .Name }} 5 | labels: 6 | app: {{ .Name }} 7 | rules: 8 | - apiGroups: [""] 9 | resources: ["pods", "namespaces"] 10 | verbs: ["get", "watch", "list"] 11 | 12 | - apiGroups: ["apps"] 13 | resources: ["replicasets"] 14 | verbs: ["get", "watch", "list"] 15 | 16 | - apiGroups: ["extensions"] 17 | resources: ["replicasets"] 18 | verbs: ["get", "watch", "list"] 19 | -------------------------------------------------------------------------------- /internal/testbed/integration/combinedload/testdata/collector/clusterrole.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRole 3 | metadata: 4 | name: {{ .Name }} 5 | labels: 6 | app: {{ .Name }} 7 | rules: 8 | - apiGroups: [""] 9 | resources: ["pods", "namespaces"] 10 | verbs: ["get", "watch", "list"] 11 | 12 | - apiGroups: ["apps"] 13 | resources: ["replicasets"] 14 | verbs: ["get", "watch", "list"] 15 | 16 | - apiGroups: ["extensions"] 17 | resources: ["replicasets"] 18 | verbs: ["get", "watch", "list"] 19 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8senrichment/testdata/collector/clusterrole.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRole 3 | metadata: 4 | name: {{ .Name }} 5 | labels: 6 | app: {{ .Name }} 7 | rules: 8 | - apiGroups: [""] 9 | resources: ["pods", "namespaces"] 10 | verbs: ["get", "watch", "list"] 11 | 12 | - apiGroups: ["apps"] 13 | resources: ["replicasets"] 14 | verbs: ["get", "watch", "list"] 15 | 16 | - apiGroups: ["extensions"] 17 | resources: ["replicasets"] 18 | verbs: ["get", "watch", "list"] 19 | -------------------------------------------------------------------------------- /internal/testbed/integration/self-monitoring/testdata/collector/clusterrole.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRole 3 | metadata: 4 | name: {{ .Name }} 5 | labels: 6 | app: {{ .Name }} 7 | rules: 8 | - apiGroups: [""] 9 | resources: ["pods", "namespaces"] 10 | verbs: ["get", "watch", "list"] 11 | 12 | - apiGroups: ["apps"] 13 | resources: ["replicasets"] 14 | verbs: ["get", "watch", "list"] 15 | 16 | - apiGroups: ["extensions"] 17 | resources: ["replicasets"] 18 | verbs: ["get", "watch", "list"] 19 | -------------------------------------------------------------------------------- /internal/testbed/integration/k8spodlogs/testdata/testobjects/deployment.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: apps/v1 3 | kind: Deployment 4 | metadata: 5 | name: busybox-deployment 6 | namespace: e2ek8spodlogs 7 | labels: 8 | app: busybox 9 | spec: 10 | replicas: 1 11 | selector: 12 | matchLabels: 13 | app: busybox 14 | template: 15 | metadata: 16 | labels: 17 | app: busybox 18 | spec: 19 | containers: 20 | - name: logger 21 | image: busybox 22 | command: ["sh", "-c", "echo 'Hello from logger container!'"] 23 | -------------------------------------------------------------------------------- /internal/testbed/integration/resource-detection/testdata/collector/clusterrole.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRole 3 | metadata: 4 | name: {{ .Name }} 5 | labels: 6 | app: {{ .Name }} 7 | rules: 8 | - apiGroups: [""] 9 | resources: ["pods", "namespaces"] 10 | verbs: ["get", "watch", "list"] 11 | 12 | - apiGroups: ["apps"] 13 | resources: ["replicasets"] 14 | verbs: ["get", "watch", "list"] 15 | 16 | - apiGroups: ["extensions"] 17 | resources: ["replicasets"] 18 | verbs: ["get", "watch", "list"] 19 | -------------------------------------------------------------------------------- /internal/testbed/integration/loadbalancing/testdata/collector/clusterrole.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRole 3 | metadata: 4 | name: {{ .Name }} 5 | labels: 6 | app: {{ .Name }} 7 | rules: 8 | - apiGroups: ["apps"] 9 | resources: ["deployments"] 10 | verbs: ["get", "watch", "list"] 11 | - apiGroups: [""] 12 | resources: ["pods", "nodes", "endpoints", "services"] 13 | verbs: ["get", "list", "watch"] 14 | - apiGroups: ["discovery.k8s.io"] 15 | resources: ["endpointslices"] 16 | verbs: ["get", "list", "watch"] -------------------------------------------------------------------------------- /internal/testbed/integration/kafka/testdata/config-overlays/receiver-local.yaml: -------------------------------------------------------------------------------- 1 | exporters: 2 | otlphttp/traces: 3 | endpoint: http://%[1]s:4321 4 | otlphttp/metrics: 5 | endpoint: http://%[1]s:4320 6 | otlphttp/logs: 7 | endpoint: http://%[1]s:4319 8 | 9 | service: 10 | extensions: [health_check] 11 | pipelines: 12 | traces: 13 | receivers: [kafka] 14 | exporters: [otlphttp/traces] 15 | metrics: 16 | receivers: [kafka] 17 | exporters: [otlphttp/metrics] 18 | logs: 19 | receivers: [kafka] 20 | exporters: [otlphttp/logs] 21 | -------------------------------------------------------------------------------- /internal/testbed/integration/self-monitoring/testdata/collector/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: {{ .Name }} 5 | namespace: {{ .Namespace }} 6 | spec: 7 | type: ClusterIP 8 | ports: 9 | - name: otlp 10 | port: 4318 11 | targetPort: 4318 12 | protocol: TCP 13 | appProtocol: grpc 14 | - name: otlpgrpc 15 | port: 4317 16 | targetPort: 4317 17 | protocol: TCP 18 | appProtocol: grpc 19 | selector: 20 | app.kubernetes.io/name: opentelemetry-collector 21 | app.kubernetes.io/instance: {{ .Name }} 22 | -------------------------------------------------------------------------------- /internal/testbed/integration/combinedload/testdata/metrics-server/clusterrole-aggregated-metrics.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRole 3 | metadata: 4 | labels: 5 | k8s-app: metrics-server 6 | rbac.authorization.k8s.io/aggregate-to-admin: "true" 7 | rbac.authorization.k8s.io/aggregate-to-edit: "true" 8 | rbac.authorization.k8s.io/aggregate-to-view: "true" 9 | name: system:aggregated-metrics-reader 10 | rules: 11 | - apiGroups: 12 | - metrics.k8s.io 13 | resources: 14 | - pods 15 | - nodes 16 | verbs: 17 | - get 18 | - list 19 | - watch 20 | -------------------------------------------------------------------------------- /config_examples/k8scluster.yaml: -------------------------------------------------------------------------------- 1 | exporters: 2 | otlphttp: 3 | endpoint: ${env:DT_ENDPOINT} 4 | headers: 5 | Authorization: "Api-Token ${env:DT_API_TOKEN}" 6 | 7 | extensions: 8 | health_check: 9 | endpoint: 0.0.0.0:13133 10 | 11 | receivers: 12 | k8s_cluster: 13 | auth_type: "serviceAccount" 14 | allocatable_types_to_report: 15 | - cpu 16 | - memory 17 | - pods 18 | 19 | processors: {} 20 | 21 | service: 22 | extensions: [health_check] 23 | pipelines: 24 | metrics: 25 | receivers: [k8s_cluster] 26 | processors: [] 27 | exporters: [otlphttp] 28 | -------------------------------------------------------------------------------- /internal/data-ingest-cli/commands/syslog/testdata/README.md: -------------------------------------------------------------------------------- 1 | # Syslog test data 2 | 3 | All files in this directory can be used as the basis for custom syslog payloads 4 | to be sent to the syslog receiver. The payloads were taken from examples used to 5 | test the parser used for the syslog receiver, so they should be sufficiently 6 | comprehensive as a basis. 7 | 8 | ## Attribution 9 | 10 | All payloads in this directory were derived from test cases provided in the `syslogtest` package [here](https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/pkg/stanza/operator/parser/syslog/syslogtest/data.go#L46). -------------------------------------------------------------------------------- /internal/testbed/integration/statsd/testdata/statsd/job.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: batch/v1 2 | kind: Job 3 | metadata: 4 | name: statsd-generator-job 5 | namespace: e2estatsd 6 | spec: 7 | template: 8 | spec: 9 | containers: 10 | - name: netcat 11 | image: alpine 12 | command: ["/bin/sh", "-c"] 13 | args: [ 14 | "echo 'test.metric:42|g|#myKey:myVal' | nc -u -w 1 -v otelcolsvc.e2estatsd.svc.cluster.local 8125 && \ 15 | sleep 1 && \ 16 | echo 'timerMetric:320|ms|@0.1|#timerKey:timerVal' | nc -u -w 1 -v otelcolsvc.e2estatsd.svc.cluster.local 8125" 17 | ] 18 | restartPolicy: Never 19 | backoffLimit: 4 20 | -------------------------------------------------------------------------------- /config_examples/resource-detection.yaml: -------------------------------------------------------------------------------- 1 | extensions: 2 | health_check: 3 | endpoint: 0.0.0.0:13133 4 | receivers: 5 | otlp: 6 | protocols: 7 | grpc: 8 | endpoint: ${env:MY_POD_IP}:4317 9 | processors: 10 | resourcedetection/dynatrace: 11 | override: false 12 | detectors: [dynatrace] 13 | exporters: 14 | otlphttp: 15 | endpoint: ${env:DT_ENDPOINT} 16 | headers: 17 | Authorization: "Api-Token ${env:API_TOKEN}" 18 | service: 19 | extensions: 20 | - health_check 21 | pipelines: 22 | traces: 23 | receivers: 24 | - otlp 25 | processors: 26 | - resourcedetection/dynatrace 27 | exporters: 28 | - otlphttp 29 | -------------------------------------------------------------------------------- /config_examples/kafka-receiver.yaml: -------------------------------------------------------------------------------- 1 | extensions: 2 | health_check: 3 | endpoint: 0.0.0.0:13133 4 | 5 | receivers: 6 | kafka: 7 | tls: 8 | insecure: true 9 | traces: 10 | metrics: 11 | logs: 12 | brokers: ["${env:BROKER_ADDRESS}"] 13 | 14 | exporters: 15 | otlphttp: 16 | endpoint: ${env:DT_ENDPOINT} 17 | headers: 18 | Authorization: "Api-Token ${env:DT_API_TOKEN}" 19 | 20 | service: 21 | extensions: [health_check] 22 | pipelines: 23 | traces: 24 | receivers: [kafka] 25 | exporters: [otlphttp] 26 | metrics: 27 | receivers: [kafka] 28 | exporters: [otlphttp] 29 | logs: 30 | receivers: [kafka] 31 | exporters: [otlphttp] 32 | -------------------------------------------------------------------------------- /config_examples/kafka-metrics-receiver.yaml: -------------------------------------------------------------------------------- 1 | extensions: 2 | health_check: 3 | endpoint: 0.0.0.0:13133 4 | 5 | receivers: 6 | kafkametrics: 7 | brokers: ["${env:BROKER_ADDRESS}"] 8 | scrapers: 9 | - brokers 10 | - topics 11 | - consumers 12 | collection_interval: 5s 13 | 14 | processors: 15 | cumulativetodelta: 16 | max_staleness: 25h 17 | 18 | exporters: 19 | otlphttp: 20 | endpoint: ${env:DT_ENDPOINT} 21 | headers: 22 | Authorization: "Api-Token ${env:DT_API_TOKEN}" 23 | 24 | service: 25 | extensions: [health_check] 26 | pipelines: 27 | metrics: 28 | receivers: [kafkametrics] 29 | processors: [cumulativetodelta] 30 | exporters: [otlphttp] 31 | -------------------------------------------------------------------------------- /internal/testbed/integration/prometheus/README.md: -------------------------------------------------------------------------------- 1 | # Enrich from Kubernetes 2 | 3 | This is the e2e test for the Collector use-case: 4 | [Scrape data from Prometheus](https://docs.dynatrace.com/docs/shortlink/otel-collector-cases-prometheus). 5 | 6 | ## Requirements to run the tests 7 | 8 | - Docker 9 | - Kind 10 | 11 | The tests require a running Kind k8s cluster. During the tests, 12 | a Dynatrace distribution of the OpenTelemetry Collector along with a 13 | Prometheus node exporter are deployed on the k8s cluster with 14 | configurations as per the Dynatrace documentation page. 15 | 16 | The Prometheus receiver scrapes the metrics which then are exported by the collector 17 | to the test where a few metrics are asserted. 18 | -------------------------------------------------------------------------------- /internal/data-ingest-cli/commands/filelog/config.yaml: -------------------------------------------------------------------------------- 1 | receivers: 2 | filelog: 3 | include: [ ./commands/filelog/testdata/file.log ] 4 | start_at: beginning 5 | operators: 6 | - type: regex_parser 7 | regex: '^(?P