├── .gitlab-ci ├── aws ├── logs_monitoring │ ├── tests │ │ ├── __init__.py │ │ ├── run_unit_tests.sh │ │ ├── test_logs.py │ │ └── test_cloudtrail_s3.py │ ├── .gitignore │ ├── trace_forwarder │ │ ├── .gitignore │ │ ├── internal │ │ │ └── apm │ │ │ │ ├── testdata │ │ │ │ ├── xray-parent.json~snapshot │ │ │ │ ├── xray-parent.json │ │ │ │ ├── xray_reparent.json~snapshot │ │ │ │ ├── xray_reparent.json │ │ │ │ ├── basic.json │ │ │ │ └── basic.json~snapshot │ │ │ │ ├── stats.go │ │ │ │ └── model_test.go │ │ ├── Makefile │ │ ├── Gopkg.toml │ │ ├── __init__.py │ │ ├── Dockerfile │ │ ├── scripts │ │ │ ├── run_tests.sh │ │ │ └── build_linux_go_bin.sh │ │ ├── cmd │ │ │ └── trace │ │ │ │ ├── testdata │ │ │ │ └── xray-parent.json │ │ │ │ ├── main_test.go │ │ │ │ └── main.go │ │ ├── connection.py │ │ └── README.md │ ├── tools │ │ ├── integration_tests │ │ │ ├── tester │ │ │ │ └── Dockerfile │ │ │ ├── recorder │ │ │ │ ├── Dockerfile │ │ │ │ ├── pb │ │ │ │ │ ├── compile_protobufs.sh │ │ │ │ │ ├── trace.proto │ │ │ │ │ ├── __init__.py │ │ │ │ │ ├── trace_payload.proto │ │ │ │ │ ├── span.proto │ │ │ │ │ ├── trace_pb2.py │ │ │ │ │ ├── trace_payload_pb2.py │ │ │ │ │ └── span_pb2.py │ │ │ │ └── recorder.py │ │ │ ├── cache_test_lambda │ │ │ │ ├── handler.py │ │ │ │ ├── serverless.yml │ │ │ │ └── .gitignore │ │ │ ├── forwarder │ │ │ │ └── Dockerfile │ │ │ ├── external_lambda │ │ │ │ ├── serverless.yml │ │ │ │ ├── handler.py │ │ │ │ └── .gitignore │ │ │ ├── snapshots │ │ │ │ ├── cloudwatch_log_custom_tags.json │ │ │ │ ├── cloudwatch_log.json │ │ │ │ ├── cloudwatch_log_route53.json │ │ │ │ ├── cloudwatch_log_coldstart.json │ │ │ │ ├── cloudwatch_log_timeout.json │ │ │ │ ├── cloudwatch_log_apigateway.json │ │ │ │ ├── cloudwatch_log_fsx_windows.json │ │ │ │ ├── cloudwatch_log_apigateway.json~snapshot │ │ │ │ ├── cloudwatch_log_coldstart.json~snapshot │ │ │ │ ├── cloudwatch_log_custom_tags.json~snapshot │ │ │ │ ├── cloudwatch_log_route53.json~snapshot │ │ │ │ ├── cloudwatch_log.json~snapshot │ │ │ │ ├── cloudwatch_log_cloudtrail.json │ │ │ │ └── cloudwatch_log_fsx_windows.json~snapshot │ │ │ ├── snapshots-cache-test │ │ │ │ ├── cloudwatch_log_custom_tags.json │ │ │ │ ├── cloudwatch_log.json │ │ │ │ ├── cloudwatch_log_coldstart.json │ │ │ │ ├── cloudwatch_log_timeout.json │ │ │ │ ├── cloudwatch_log_custom_tags.json~snapshot │ │ │ │ ├── cloudwatch_log.json~snapshot │ │ │ │ └── cloudwatch_log_cloudtrail.json │ │ │ └── docker-compose.yml │ │ ├── publish_sandbox.sh │ │ ├── Dockerfile_bundle │ │ ├── publish_prod.sh │ │ ├── semver.sh │ │ ├── list_layers.sh │ │ ├── build_bundle.sh │ │ ├── add_new_region.sh │ │ ├── installation_test.sh │ │ ├── sign_bundle.sh │ │ └── publish_layers.sh │ ├── .dockerignore │ ├── proxy_conf │ │ ├── nginx.txt │ │ └── haproxy.txt │ ├── setup.py │ └── telemetry.py ├── rds_enhanced_monitoring.zip ├── vpc_flow_log_monitoring.zip ├── README.md ├── rds_enhanced_monitoring │ ├── rds-enhanced-sam-template.yaml │ └── examples │ │ ├── generate_CWLogTestEvent.sh │ │ ├── README.md │ │ └── tmp_message.json ├── vpc_flow_log_monitoring │ ├── vpc-flow-log-sam-template.yaml │ └── README.md └── .gitignore ├── .github ├── CODEOWNERS ├── ISSUE_TEMPLATE.md ├── workflows │ ├── pr.yaml │ ├── trace_forwarder.yml │ ├── integration_test.yml │ ├── test.yml │ ├── lint.yml │ └── codeql-analysis.yml ├── labeler.yml ├── stale.yaml └── PULL_REQUEST_TEMPLATE.md ├── .gitignore ├── azure ├── prettier.config.js ├── deploy-to-azure │ ├── README.md │ ├── event_hub.json │ ├── activity_log_diagnostic_settings.json │ └── parent_template.json ├── README.md ├── blobs_logs_monitoring │ ├── function.json │ └── README.md ├── activity_logs_monitoring │ ├── function.json │ └── README.md ├── eventhub_log_forwarder │ ├── README.md │ ├── event_hub.json │ ├── resource_deploy.ps1 │ ├── activity_logs_deploy.ps1 │ ├── activity_log_diagnostic_settings.json │ ├── parent_template.json │ └── function_template.json └── package.json ├── NOTICE ├── NOTICE.TXT ├── LICENSE-3rdparty.csv ├── README.md └── CONTRIBUTING.md /.gitlab-ci: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /aws/logs_monitoring/.gitignore: -------------------------------------------------------------------------------- 1 | *.zip 2 | tools/layers 3 | .forwarder -------------------------------------------------------------------------------- /aws/logs_monitoring/trace_forwarder/.gitignore: -------------------------------------------------------------------------------- 1 | bin 2 | .layers 3 | *.pyc 4 | !scripts 5 | vendor 6 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Azure Integrations 2 | azure/ @DataDog/azure-integrations 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | env 3 | base.zip 4 | */gen 5 | */env 6 | .vscode 7 | **/node_modules 8 | .idea 9 | -------------------------------------------------------------------------------- /aws/logs_monitoring/trace_forwarder/internal/apm/testdata/xray-parent.json~snapshot: -------------------------------------------------------------------------------- 1 | ([]*pb.TracePayload) { 2 | } 3 | -------------------------------------------------------------------------------- /azure/prettier.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | printWidth: 80, 3 | tabWidth: 4, 4 | singleQuote: true 5 | }; 6 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/tester/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.8 2 | 3 | COPY . . 4 | RUN pip install "deepdiff<6" 5 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/recorder/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.8 2 | 3 | RUN pip install protobuf 4 | 5 | COPY . . 6 | 7 | -------------------------------------------------------------------------------- /aws/rds_enhanced_monitoring.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mikebryant/datadog-serverless-functions/master/aws/rds_enhanced_monitoring.zip -------------------------------------------------------------------------------- /aws/vpc_flow_log_monitoring.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mikebryant/datadog-serverless-functions/master/aws/vpc_flow_log_monitoring.zip -------------------------------------------------------------------------------- /aws/logs_monitoring/trace_forwarder/Makefile: -------------------------------------------------------------------------------- 1 | target: 2 | go build -o bin/trace-intake.so -gcflags="-e" -buildmode=c-shared cmd/trace/main.go 3 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | **Describe what happened:** 2 | 3 | 4 | **Describe what you expected:** 5 | 6 | 7 | **Steps to reproduce the issue:** 8 | -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | Datadog aws-serverless-apps 2 | Copyright 2021 Datadog, Inc. 3 | 4 | This product includes software developed at Datadog (https://www.datadoghq.com/). -------------------------------------------------------------------------------- /NOTICE.TXT: -------------------------------------------------------------------------------- 1 | Datadog datadog-serverless-functions 2 | Copyright 2021 Datadog, Inc. 3 | 4 | This product includes software developed at Datadog (https://www.datadoghq.com/). 5 | -------------------------------------------------------------------------------- /LICENSE-3rdparty.csv: -------------------------------------------------------------------------------- 1 | Component,Origin,License,Copyright 2 | boto3,github.com/boto/boto3,Apache-2.0,"Copyright 2013-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved." 3 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/cache_test_lambda/handler.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | 4 | def handle(event, context): 5 | print(json.dumps(event)) 6 | 7 | return {"statusCode": 200} 8 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/forwarder/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG image 2 | FROM $image 3 | 4 | ARG forwarder 5 | 6 | # Add the code into /var/task (will unzip files) 7 | ADD $forwarder /var/task/ 8 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tests/run_unit_tests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | export DD_API_KEY=11111111111111111111111111111111 4 | export DD_ADDITIONAL_TARGET_LAMBDAS=ironmaiden,megadeth 5 | python3 -m unittest discover . -------------------------------------------------------------------------------- /aws/logs_monitoring/trace_forwarder/Gopkg.toml: -------------------------------------------------------------------------------- 1 | [[constraint]] 2 | name = "github.com/DataDog/datadog-agent" 3 | branch = "master" 4 | 5 | [[constraint]] 6 | name = "github.com/gogo/protobuf" 7 | version = "~v1.0.0" 8 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # datadog-serverless-functions 2 | 3 | This repository contains our serverless functions that process streams and send data to datadog 4 | 5 | - For AWS, [go here](./aws/README.md) 6 | - For Azure, [go here](./azure/README.md) 7 | -------------------------------------------------------------------------------- /.github/workflows/pr.yaml: -------------------------------------------------------------------------------- 1 | name: pull-request 2 | on: [pull_request] 3 | 4 | jobs: 5 | label: 6 | runs-on: ubuntu-latest 7 | steps: 8 | - uses: actions/labeler@v2 9 | with: 10 | repo-token: "${{ secrets.GITHUB_TOKEN }}" 11 | -------------------------------------------------------------------------------- /.github/labeler.yml: -------------------------------------------------------------------------------- 1 | # this module configures the GitHub labeler to apply the e.g. `azure-integrations` label to azure changes automatically. 2 | 3 | 4 | azure-integrations: 5 | - azure/**/* 6 | 7 | azure: 8 | - azure/**/* 9 | 10 | aws: 11 | - aws/**/* 12 | -------------------------------------------------------------------------------- /azure/deploy-to-azure/README.md: -------------------------------------------------------------------------------- 1 | [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FDataDog%2Fdatadog-serverless-functions%2Fmaster%2Fazure%2Fdeploy-to-azure%2Fparent_template.json) 2 | -------------------------------------------------------------------------------- /aws/logs_monitoring/trace_forwarder/__init__.py: -------------------------------------------------------------------------------- 1 | # Unless explicitly stated otherwise all files in this repository are licensed 2 | # under the Apache License Version 2.0. 3 | # This product includes software developed at Datadog (https://www.datadoghq.com/). 4 | # Copyright 2021 Datadog, Inc. 5 | -------------------------------------------------------------------------------- /aws/logs_monitoring/.dockerignore: -------------------------------------------------------------------------------- 1 | * 2 | !lambda_function.py 3 | !enhanced_lambda_metrics.py 4 | !logs.py 5 | !parsing.py 6 | !cache.py 7 | !telemetry.py 8 | !settings.py 9 | !setup.py 10 | !template.yaml 11 | !trace_forwarder/bin 12 | !trace_forwarder/__init__.py 13 | !trace_forwarder/connection.py -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/recorder/pb/compile_protobufs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Compile .py files from .proto files 4 | # You must run this after updating a .proto file 5 | 6 | # Requires protoc, which can be installed with `brew install protobuf` 7 | 8 | protoc *.proto --python_out=./ -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/external_lambda/serverless.yml: -------------------------------------------------------------------------------- 1 | service: forwarder-tests-external-lambda 2 | provider: 3 | name: aws 4 | runtime: python2.7 5 | 6 | functions: 7 | ironmaiden: 8 | handler: handler.ironmaiden 9 | megadeth: 10 | handler: handler.megadeth 11 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/cache_test_lambda/serverless.yml: -------------------------------------------------------------------------------- 1 | service: integration-tests 2 | provider: 3 | name: aws 4 | runtime: python3.8 5 | 6 | functions: 7 | cache_test_lambda: 8 | handler: handler.handle 9 | tags: 10 | TAG1: Tag Value 11 | TAG2: Tag2 Value 12 | -------------------------------------------------------------------------------- /azure/README.md: -------------------------------------------------------------------------------- 1 | # Azure functions 2 | 3 | * [EventHub triggered function](activity_logs_monitoring/README.md) 4 | * [Blob triggered function](blobs_logs_monitoring/README.md) 5 | 6 | 7 | ## Log forwarding pipeline 8 | * [Powershell scripts to create the Eventhub log forwarding pipeline](eventhub_log_forwarder/README.md) -------------------------------------------------------------------------------- /azure/blobs_logs_monitoring/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "bindings": [ 3 | { 4 | "name": "blobContent", 5 | "type": "blobTrigger", 6 | "direction": "in", 7 | "path": "/{name}", 8 | "connection": "" 9 | } 10 | ], 11 | "disabled": false 12 | } 13 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/external_lambda/handler.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | 4 | def ironmaiden(event, context): 5 | print(json.dumps(event)) 6 | 7 | return {"statusCode": 200} 8 | 9 | 10 | def megadeth(event, context): 11 | print(json.dumps(event)) 12 | 13 | return {"statusCode": 200} 14 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/recorder/pb/trace.proto: -------------------------------------------------------------------------------- 1 | // copied from datadog-agent/pkg/trace/pb 2 | 3 | syntax = "proto3"; 4 | 5 | package pb; 6 | 7 | import "span.proto"; 8 | 9 | message APITrace { 10 | uint64 traceID = 1; 11 | repeated Span spans = 2; 12 | int64 startTime = 6; 13 | int64 endTime = 7; 14 | } 15 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/recorder/pb/__init__.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | 4 | # hack to add this dir to the python path, which fixes an issue with 5 | # generated python code having incorrect import paths 6 | # see https://github.com/google/protobuf/issues/881 7 | sys.path.insert(0, os.path.abspath(os.path.dirname(__file__))) 8 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/cache_test_lambda/.gitignore: -------------------------------------------------------------------------------- 1 | # Distribution / packaging 2 | .Python 3 | *.pyc 4 | env/ 5 | build/ 6 | develop-eggs/ 7 | dist/ 8 | downloads/ 9 | eggs/ 10 | .eggs/ 11 | lib/ 12 | lib64/ 13 | parts/ 14 | sdist/ 15 | var/ 16 | *.egg-info/ 17 | .installed.cfg 18 | *.egg 19 | 20 | # Serverless directories 21 | .serverless 22 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/external_lambda/.gitignore: -------------------------------------------------------------------------------- 1 | # Distribution / packaging 2 | .Python 3 | *.pyc 4 | env/ 5 | build/ 6 | develop-eggs/ 7 | dist/ 8 | downloads/ 9 | eggs/ 10 | .eggs/ 11 | lib/ 12 | lib64/ 13 | parts/ 14 | sdist/ 15 | var/ 16 | *.egg-info/ 17 | .installed.cfg 18 | *.egg 19 | 20 | # Serverless directories 21 | .serverless 22 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/recorder/pb/trace_payload.proto: -------------------------------------------------------------------------------- 1 | // copied from datadog-agent/pkg/trace/pb 2 | 3 | syntax = "proto3"; 4 | 5 | package pb; 6 | 7 | import "trace.proto"; 8 | import "span.proto"; 9 | 10 | message TracePayload { 11 | string hostName = 1; 12 | string env = 2; 13 | repeated APITrace traces = 3; 14 | } 15 | -------------------------------------------------------------------------------- /.github/workflows/trace_forwarder.yml: -------------------------------------------------------------------------------- 1 | name: Trace forwarder tests 2 | 3 | on: [pull_request] 4 | 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - name: Checkout source 10 | uses: actions/checkout@v3 11 | - name: Run trace forwarder tests 12 | run: | 13 | ./aws/logs_monitoring/trace_forwarder/scripts/run_tests.sh 14 | -------------------------------------------------------------------------------- /aws/README.md: -------------------------------------------------------------------------------- 1 | # Overview 2 | 3 | Repository of Lambda functions that process AWS log streams and send data to Datadog as logs or metrics. 4 | 5 | * [Instructions to collect and forward any AWS service logs](logs_monitoring) 6 | * [Instructions to collect and send VPC metrics from flow logs](vpc_flow_log_monitoring) 7 | * [Instructions to collect and send RDS metrics](rds_enhanced_monitoring) 8 | -------------------------------------------------------------------------------- /azure/activity_logs_monitoring/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "bindings": [ 3 | { 4 | "type": "eventHubTrigger", 5 | "name": "eventHubMessages", 6 | "direction": "in", 7 | "path": "insights-operational-logs", 8 | "connection": "DDEventHubTest_RootManageSharedAccessKey_EVENTHUB", 9 | "cardinality": "many", 10 | "consumerGroup": "$Default" 11 | } 12 | ], 13 | "disabled": false 14 | } 15 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/snapshots/cloudwatch_log_custom_tags.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageType": "DATA_MESSAGE", 3 | "owner": "123456789123", 4 | "logGroup": "testLogGroup", 5 | "logStream": "testLogStream", 6 | "subscriptionFilters": ["testFilter"], 7 | "logEvents": [ 8 | { 9 | "id": "eventId1", 10 | "timestamp": 1440442987000, 11 | "message": "{\"message\": \"hello world\", \"ddtags\": \"custom_tag1:value1,custom_tag2:value2\"}\n" 12 | } 13 | ] 14 | } 15 | -------------------------------------------------------------------------------- /aws/logs_monitoring/trace_forwarder/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM golang:1.12 2 | ARG runtime 3 | 4 | RUN go get -u github.com/golang/dep/cmd/dep 5 | 6 | # Install dependencies 7 | COPY . /go/src/github.com/DataDog/datadog-serverless-functions/aws/logs_monitoring/trace_forwarder/ 8 | WORKDIR /go/src/github.com/DataDog/datadog-serverless-functions/aws/logs_monitoring/trace_forwarder/ 9 | ENV GOOS=linux 10 | ENV GOARCH=amd64 11 | RUN dep ensure 12 | 13 | # Build the go binary 14 | 15 | RUN make 16 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/snapshots-cache-test/cloudwatch_log_custom_tags.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageType": "DATA_MESSAGE", 3 | "owner": "123456789123", 4 | "logGroup": "testLogGroup", 5 | "logStream": "testLogStream", 6 | "subscriptionFilters": ["testFilter"], 7 | "logEvents": [ 8 | { 9 | "id": "eventId1", 10 | "timestamp": 1440442987000, 11 | "message": "{\"message\": \"hello world\", \"ddtags\": \"custom_tag1:value1,custom_tag2:value2\"}\n" 12 | } 13 | ] 14 | } 15 | -------------------------------------------------------------------------------- /.github/workflows/integration_test.yml: -------------------------------------------------------------------------------- 1 | name: Integration tests 2 | 3 | on: [pull_request] 4 | 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | strategy: 9 | max-parallel: 4 10 | matrix: 11 | python-version: [3.7, 3.8] 12 | steps: 13 | - name: Checkout source 14 | uses: actions/checkout@v3 15 | 16 | - name: Run integration tests 17 | run: | 18 | ./aws/logs_monitoring/tools/integration_tests/integration_tests.sh --python-version=${{ matrix.python-version }} 19 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/snapshots/cloudwatch_log.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageType": "DATA_MESSAGE", 3 | "owner": "123456789123", 4 | "logGroup": "testLogGroup", 5 | "logStream": "testLogStream", 6 | "subscriptionFilters": ["testFilter"], 7 | "logEvents": [ 8 | { 9 | "id": "eventId1", 10 | "timestamp": 1440442987000, 11 | "message": "[ERROR] First test message" 12 | }, 13 | { 14 | "id": "eventId2", 15 | "timestamp": 1440442987001, 16 | "message": "[ERROR] Second test message" 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/recorder/pb/span.proto: -------------------------------------------------------------------------------- 1 | // copied from datadog-agent/pkg/trace/pb 2 | 3 | syntax = "proto3"; 4 | 5 | package pb; 6 | 7 | message Span { 8 | string service = 1; 9 | string name = 2; 10 | string resource = 3; 11 | uint64 traceID = 4; 12 | uint64 spanID = 5; 13 | uint64 parentID = 6; 14 | int64 start = 7; 15 | int64 duration = 8; 16 | int32 error = 9; 17 | map meta = 10; 18 | map metrics = 11; 19 | string type = 12; 20 | } 21 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/snapshots-cache-test/cloudwatch_log.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageType": "DATA_MESSAGE", 3 | "owner": "123456789123", 4 | "logGroup": "testLogGroup", 5 | "logStream": "testLogStream", 6 | "subscriptionFilters": ["testFilter"], 7 | "logEvents": [ 8 | { 9 | "id": "eventId1", 10 | "timestamp": 1440442987000, 11 | "message": "[ERROR] First test message" 12 | }, 13 | { 14 | "id": "eventId2", 15 | "timestamp": 1440442987001, 16 | "message": "[ERROR] Second test message" 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/publish_sandbox.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | # Read the new version 4 | if [ -z "$1" ]; then 5 | echo "Must specify a desired version number" 6 | exit 1 7 | else 8 | LAYER_VERSION=$1 9 | fi 10 | 11 | # Read the new version 12 | if [ -z "$2" ]; then 13 | echo "Must specify a forwarder version" 14 | exit 1 15 | else 16 | FORWARDER_VERSION=$2 17 | fi 18 | 19 | echo "FORWARDER_VERSION=$FORWARDER_VERSION" 20 | 21 | LAYER_VERSION=$LAYER_VERSION FORWARDER_VERSION=$FORWARDER_VERSION aws-vault exec sandbox-account-admin -- ./tools/publish_layers.sh 22 | -------------------------------------------------------------------------------- /aws/logs_monitoring/trace_forwarder/scripts/run_tests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # Unless explicitly stated otherwise all files in this repository are licensed 4 | # under the Apache License Version 2.0. 5 | # This product includes software developed at Datadog (https://www.datadoghq.com/). 6 | # Copyright 2021 Datadog, Inc. 7 | 8 | # Builds Datadogpy layers for lambda functions, using Docker 9 | set -e 10 | 11 | # Change to the parent of the directory this script is in 12 | cd $(dirname "$0")/.. 13 | 14 | docker buildx build --platform linux/amd64 -t datadog-go-layer . --build-arg runtime=python:3.7 15 | docker run --rm datadog-go-layer go test -v ./... 16 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/Dockerfile_bundle: -------------------------------------------------------------------------------- 1 | ARG runtime 2 | 3 | FROM python:$runtime 4 | 5 | RUN mkdir build 6 | WORKDIR /build 7 | 8 | COPY . . 9 | RUN pip install . -t . 10 | 11 | # Remove *.pyc files 12 | RUN find . -name \*.pyc -delete 13 | 14 | # Remove botocore (40MB) to reduce package size. aws-xray-sdk 15 | # installs it, while it's already provided by the Lambda Runtime. 16 | RUN rm -rf ./botocore* 17 | 18 | # Remove the following files from ddtrace, because they contain code 19 | # like `os.execl`, which cause security scans to fail for certain customers. 20 | # These files are not directly used by the Forwarder. 21 | RUN rm ./ddtrace/commands/ddtrace_run.py 22 | RUN rm ./decorator.py -------------------------------------------------------------------------------- /aws/logs_monitoring/proxy_conf/nginx.txt: -------------------------------------------------------------------------------- 1 | # For Datadog EU, change `datadoghq.com` to `datadoghq.eu` 2 | 3 | # HTTP Proxy for Datadog Agent 4 | http { 5 | server { 6 | listen 3834; 7 | access_log off; 8 | 9 | location /api/v1/validate { 10 | proxy_pass https://api.datadoghq.com:443/api/v1/validate; 11 | } 12 | location / { 13 | proxy_pass https://haproxy-app.agent.datadoghq.com:443/; 14 | } 15 | } 16 | } 17 | 18 | # TCP Proxy for Datadog Agent 19 | stream { 20 | server { 21 | listen 3835; 22 | proxy_ssl on; 23 | proxy_pass trace.agent.datadoghq.com:443; 24 | } 25 | server { 26 | listen 3837; 27 | proxy_ssl on; 28 | proxy_pass lambda-http-intake.logs.datadoghq.com:443; 29 | } 30 | } -------------------------------------------------------------------------------- /.github/stale.yaml: -------------------------------------------------------------------------------- 1 | # Number of days of inactivity before an issue becomes stale 2 | daysUntilStale: 60 3 | # Number of days of inactivity before a stale issue is closed 4 | daysUntilClose: 7 5 | # Issues with these labels will never be considered stale 6 | exemptLabels: 7 | - pinned 8 | # Label to use when marking an issue as stale 9 | staleLabel: wontfix 10 | # Comment to post when marking an issue as stale. Set to `false` to disable 11 | markComment: > 12 | This issue has been automatically marked as stale and it will be closed 13 | if no further activity occurs. Thank you for your contributions! You can 14 | also find us in the \#serverless channel from the 15 | [Datadog community Slack](https://chat.datadoghq.com/). 16 | # Comment to post when closing a stale issue. Set to `false` to disable 17 | closeComment: false 18 | 19 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Python unit tests 2 | 3 | on: [pull_request] 4 | 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | strategy: 9 | max-parallel: 4 10 | matrix: 11 | python-version: [3.7, 3.8] 12 | steps: 13 | - name: Checkout source 14 | uses: actions/checkout@v3 15 | - name: Setup Python ${{ matrix.python-version }} 16 | uses: actions/setup-python@v3 17 | with: 18 | python-version: ${{ matrix.python-version }} 19 | 20 | - name: Run Python unit tests 21 | env: 22 | AWS_DEFAULT_REGION: us-east-1 23 | DD_API_KEY: "11111111111111111111111111111111" 24 | DD_ADDITIONAL_TARGET_LAMBDAS: "ironmaiden,megadeth" 25 | run: | 26 | pip install boto3 mock 27 | python -m unittest discover ./aws/logs_monitoring/ 28 | -------------------------------------------------------------------------------- /azure/eventhub_log_forwarder/README.md: -------------------------------------------------------------------------------- 1 | # Eventhub Log Forwarder Deployment 2 | 3 | Powershell scripts to automatically create a 'log forwarding pipeline' using Azure EventHub to collect Azure Platform Logs. 4 | 5 | At a high level, the scripts call Azure Powershell functions to create and deploy Azure resources, mostly defined via a URL to a json template. 6 | 7 | The JSON template format is a [Azure Resource Manager (ARM)](https://docs.microsoft.com/en-us/azure/azure-resource-manager/templates/overview), which can store variables, define resources to create, dependencies... etc. And can be validated via e.g. a Visual Studio Code plugin mentioned therein (under Authoring tools). 8 | 9 | The exact steps these scripts automate are found in the ['Manual installation' of Log collection documentation](https://docs.datadoghq.com/integrations/azure/?tab=manualinstallation#log-collection). 10 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/snapshots/cloudwatch_log_route53.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageType":"DATA_MESSAGE", 3 | "owner":"601427279990", 4 | "logGroup":"/aws/route53", 5 | "logStream":"vpc-34d4ae52_20210419T0940Z_i-090384b04e4e5bfd2", 6 | "subscriptionFilters":[ 7 | "route53" 8 | ], 9 | "logEvents":[ 10 | { 11 | "id":"36101011026851289935914250459116581911799633971817938944", 12 | "timestamp":1618825322000, 13 | "message":"{\"version\":\"1.100000\",\"account_id\":\"601427279990\",\"region\":\"us-east-1\",\"vpc_id\":\"vpc-34d4ae52\",\"query_timestamp\":\"2021-04-19T09:42:02Z\",\"query_name\":\"queue.amazonaws.com.\",\"query_type\":\"A\",\"query_class\":\"IN\",\"rcode\":\"NOERROR\",\"answers\":[{\"Rdata\":\"3.236.169.0\",\"Type\":\"A\",\"Class\":\"IN\"}],\"srcaddr\":\"172.31.26.215\",\"srcport\":\"34460\",\"transport\":\"UDP\",\"srcids\":{\"instance\":\"i-090384b04e4e5bfd2\"}}" 14 | } 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /aws/logs_monitoring/trace_forwarder/scripts/build_linux_go_bin.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # Unless explicitly stated otherwise all files in this repository are licensed 4 | # under the Apache License Version 2.0. 5 | # This product includes software developed at Datadog (https://www.datadoghq.com/). 6 | # Copyright 2021 Datadog, Inc. 7 | 8 | # Builds Datadogpy layers for lambda functions, using Docker 9 | set -e 10 | 11 | # Change to the parent of the directory this script is in 12 | cd $(dirname "$0")/.. 13 | 14 | 15 | rm -rf ./bin 16 | 17 | # Install datadogpy in a docker container to avoid the mess from switching 18 | # between different python runtimes. 19 | docker buildx build --platform linux/amd64 -t datadog-go-layer . --no-cache --build-arg runtime=python:3.7 20 | 21 | id=$(docker create datadog-go-layer) 22 | docker cp $id:/go/src/github.com/DataDog/datadog-serverless-functions/aws/logs_monitoring/trace_forwarder/bin . 23 | docker rm -v $id 24 | echo "Done creating archive bin" -------------------------------------------------------------------------------- /aws/rds_enhanced_monitoring/rds-enhanced-sam-template.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Transform: AWS::Serverless-2016-10-31 3 | Description: Pushes RDS Enhanced metrics to Datadog. 4 | Parameters: 5 | KMSKeyId: 6 | Type: String 7 | Description: The id (final part of the key's ARN) of a KMS key used to encrypt and decrypt your Datadog API and App keys. 8 | Resources: 9 | rdslambdaddfunction: 10 | Type: 'AWS::Serverless::Function' 11 | Properties: 12 | Description: Pushes RDS Enhanced metrics to Datadog. 13 | Environment: 14 | Variables: 15 | kmsEncryptedKeys: 'YOUR_KEY' 16 | Handler: lambda_function.lambda_handler 17 | MemorySize: 128 18 | Policies: 19 | KMSDecryptPolicy: 20 | KeyId: !Ref KMSKeyId 21 | Runtime: python3.7 22 | Timeout: 10 23 | KmsKeyArn: 24 | !Sub 25 | - 'arn:${AWS::Partition}:kms:${AWS::Region}:${AWS::AccountId}:key/${keyId}' 26 | - {keyId: !Ref KMSKeyId} 27 | Type: AWS::Serverless::Function 28 | -------------------------------------------------------------------------------- /aws/logs_monitoring/setup.py: -------------------------------------------------------------------------------- 1 | # Unless explicitly stated otherwise all files in this repository are licensed 2 | # under the Apache License Version 2.0. 3 | # This product includes software developed at Datadog (https://www.datadoghq.com/). 4 | # Copyright 2021 Datadog, Inc. 5 | from setuptools import setup 6 | 7 | setup( 8 | name="aws-dd-forwarder", 9 | version="0.0.0.dev0", 10 | description="Datadog AWS Forwarder Lambda Function", 11 | url="https://github.com/DataDog/datadog-serverless-functions/tree/master/aws/logs_monitoring", 12 | author="Datadog, Inc.", 13 | author_email="dev@datadoghq.com", 14 | classifiers=[ 15 | "Programming Language :: Python :: 3.7", 16 | "Programming Language :: Python :: 3.8", 17 | ], 18 | keywords="datadog aws lambda layer", 19 | python_requires=">=3.7, <3.9", 20 | install_requires=["datadog-lambda==3.39.0", "requests-futures==1.0.0"], 21 | extras_require={ 22 | "dev": ["nose2==0.9.1", "flake8==3.7.9", "requests==2.22.0", "boto3==1.10.33"] 23 | }, 24 | ) 25 | -------------------------------------------------------------------------------- /aws/vpc_flow_log_monitoring/vpc-flow-log-sam-template.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Transform: AWS::Serverless-2016-10-31 3 | Description: Pushes VPC Flow Log metrics to Datadog. 4 | Parameters: 5 | KMSKeyId: 6 | Type: String 7 | Description: The id (final part of the key's ARN) of a KMS key used to encrypt and decrypt your Datadog API and App keys. 8 | Resources: 9 | vpcflowlambdaddfunction: 10 | Type: 'AWS::Serverless::Function' 11 | Properties: 12 | Description: Pushes VPC Flow Log metrics to Datadog. 13 | Environment: 14 | Variables: 15 | kmsEncryptedKeys: 'YOUR_KEY' 16 | Handler: lambda_function.lambda_handler 17 | MemorySize: 128 18 | Policies: 19 | KMSDecryptPolicy: 20 | KeyId: !Ref KMSKeyId 21 | Runtime: python3.7 22 | Timeout: 10 23 | KmsKeyArn: 24 | !Sub 25 | - 'arn:${AWS::Partition}:kms:${AWS::Region}:${AWS::AccountId}:key/${keyId}' 26 | - {keyId: !Ref KMSKeyId} 27 | Type: AWS::Serverless::Function 28 | -------------------------------------------------------------------------------- /aws/logs_monitoring/telemetry.py: -------------------------------------------------------------------------------- 1 | # Unless explicitly stated otherwise all files in this repository are licensed 2 | # under the Apache License Version 2.0. 3 | # This product includes software developed at Datadog (https://www.datadoghq.com/). 4 | # Copyright 2021 Datadog, Inc. 5 | 6 | from settings import DD_FORWARDER_VERSION 7 | 8 | DD_FORWARDER_TELEMETRY_NAMESPACE_PREFIX = "aws.dd_forwarder" 9 | DD_FORWARDER_TELEMETRY_TAGS = [] 10 | 11 | 12 | def set_forwarder_telemetry_tags(context, event_type): 13 | """Helper function to set tags on telemetry metrics 14 | Do not submit telemetry metrics before this helper function is invoked 15 | """ 16 | global DD_FORWARDER_TELEMETRY_TAGS 17 | DD_FORWARDER_TELEMETRY_TAGS = [ 18 | f"forwardername:{context.function_name.lower()}", 19 | f"forwarder_memorysize:{context.memory_limit_in_mb}", 20 | f"forwarder_version:{DD_FORWARDER_VERSION}", 21 | f"event_type:{event_type}", 22 | ] 23 | 24 | 25 | def get_forwarder_telemetry_tags(): 26 | return DD_FORWARDER_TELEMETRY_TAGS 27 | -------------------------------------------------------------------------------- /azure/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "activity_logs_monitoring", 3 | "version": "1.0.0", 4 | "description": "[Activity and Diagnostic Logs](./activity_logs_monitoring/README.md)", 5 | "main": "index.js", 6 | "directories": { 7 | "test": "tests" 8 | }, 9 | "scripts": { 10 | "test": "mocha", 11 | "lint": "prettier --write shared/*.js activity_logs_monitoring/*.js blobs_logs_monitoring/*.js test/*.js" 12 | }, 13 | "repository": { 14 | "type": "git", 15 | "url": "git+https://github.com/DataDog/datadog-serverless-functions.git" 16 | }, 17 | "author": "Datadog, Inc.", 18 | "license": "Apache-2.0", 19 | "bugs": { 20 | "url": "https://github.com/DataDog/datadog-serverless-functions/issues" 21 | }, 22 | "homepage": "https://github.com/DataDog/datadog-serverless-functions#readme", 23 | "dependencies": {}, 24 | "devDependencies": { 25 | "mocha": "^7.2.0", 26 | "prettier": "^1.19.1", 27 | "sinon": "^9.2.4", 28 | "lodash": ">=4.17.21" 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Lint 2 | 3 | on: [pull_request] 4 | 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - name: Checkout source 10 | uses: actions/checkout@v3 11 | - name: Setup Python 12 | uses: actions/setup-python@v3 13 | with: 14 | python-version: 3.7 15 | 16 | - name: Install pip 17 | run: | 18 | python -m pip install --upgrade pip 19 | 20 | - name: Lint with flake8 21 | run: | 22 | pip install flake8 23 | flake8 ./aws/logs_monitoring/ --count --select=E9,F --show-source --statistics --exclude=*_pb2.py 24 | 25 | - name: Check formatting with Black 26 | run: | 27 | pip install black 28 | black --check --diff --exclude pb2.py ./aws/logs_monitoring 29 | 30 | - name: Setup CloudFormation Linter with Latest Version 31 | uses: scottbrenner/cfn-lint-action@v2 32 | 33 | - name: Print the CloudFormation Linter Version & run Linter 34 | run: | 35 | cfn-lint --version 36 | cfn-lint -t aws/logs_monitoring/template.yaml 37 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/snapshots/cloudwatch_log_coldstart.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageType": "DATA_MESSAGE", 3 | "owner": "601427279990", 4 | "logGroup": "/aws/lambda/storms-cloudwatch-event", 5 | "logStream": "2020/06/04/[$LATEST]af2b1e1843b84a2d80c67840ae3ffa72", 6 | "subscriptionFilters": [ 7 | "myevent" 8 | ], 9 | "logEvents": [{ 10 | "id": "35486831490800643125153606102923171443962457178576257024", 11 | "timestamp": 1591284559098, 12 | "message": "START RequestId: db275f87-a934-471a-8980-b63bf4dc1beb Version: $LATEST\\n" 13 | }, 14 | { 15 | "id": "35486831490867545360749197972347778598780402263094198273", 16 | "timestamp": 1591284559101, 17 | "message": "END RequestId: db275f87-a934-471a-8980-b63bf4dc1beb\\n" 18 | }, 19 | { 20 | "id": "35486831490867545360749197972347778598780402263094198274", 21 | "timestamp": 1591284559101, 22 | "message": "REPORT RequestId: db275f87-a934-471a-8980-b63bf4dc1beb\\tDuration: 1.76 ms\\tBilled Duration: 100 ms\\tMemory Size: 128 MB\\tMax Memory Used: 48 MB\\tInit Duration: 120.96 ms\\t\\n" 23 | } 24 | ] 25 | } -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/snapshots-cache-test/cloudwatch_log_coldstart.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageType": "DATA_MESSAGE", 3 | "owner": "601427279990", 4 | "logGroup": "/aws/lambda/integration-tests-dev-cache_test_lambda", 5 | "logStream": "2020/06/04/[$LATEST]af2b1e1843b84a2d80c67840ae3ffa72", 6 | "subscriptionFilters": [ 7 | "myevent" 8 | ], 9 | "logEvents": [{ 10 | "id": "35486831490800643125153606102923171443962457178576257024", 11 | "timestamp": 1591284559098, 12 | "message": "START RequestId: db275f87-a934-471a-8980-b63bf4dc1beb Version: $LATEST\\n" 13 | }, 14 | { 15 | "id": "35486831490867545360749197972347778598780402263094198273", 16 | "timestamp": 1591284559101, 17 | "message": "END RequestId: db275f87-a934-471a-8980-b63bf4dc1beb\\n" 18 | }, 19 | { 20 | "id": "35486831490867545360749197972347778598780402263094198274", 21 | "timestamp": 1591284559101, 22 | "message": "REPORT RequestId: db275f87-a934-471a-8980-b63bf4dc1beb\\tDuration: 1.76 ms\\tBilled Duration: 100 ms\\tMemory Size: 128 MB\\tMax Memory Used: 48 MB\\tInit Duration: 120.96 ms\\t\\n" 23 | } 24 | ] 25 | } -------------------------------------------------------------------------------- /aws/logs_monitoring/trace_forwarder/cmd/trace/testdata/xray-parent.json: -------------------------------------------------------------------------------- 1 | [{"message": "{\"traces\": [[{\"trace_id\": \"0C36BE8EAF653E1E\", \"parent_id\": \"D732CAEBD27957F0\", \"span_id\": \"D79426C7432A47E7\", \"service\": \"aws.lambda\", \"resource\": \"StockBuyerFunction\", \"name\": \"aws.lambda\", \"error\": 0, \"start\": 1604618092749618660, \"duration\": 135477, \"meta\": {\"_dd.origin\": \"lambda\", \"runtime-id\": \"c3b1e7af86604182808ac323a69c8d0d\", \"cold_start\": \"false\", \"function_arn\": \"arn:aws:lambda:sa-east-1:601427279990:function:stockbuyerfunction\", \"function_version\": \"$LATEST\", \"request_id\": \"247aacf6-b69c-4b26-8c58-cc6d1da0e564\", \"resource_names\": \"StockBuyerFunction\", \"datadog_lambda\": \"2.23.0\", \"dd_trace\": \"0.41.2\", \"_dd.parent_source\": \"xray\"}, \"metrics\": {\"system.pid\": 8, \"_sampling_priority_v1\": 2}, \"type\": \"serverless\"}]]}\n", "tags": "forwardername:datadog-forwarder-prod-org-447397,forwarder_memorysize:1024,forwarder_version:3.21.2,,account_id:601427279990,aws_account:601427279990,aws_cloudformation_logical-id:stockbuyerfunction,aws_cloudformation_stack-id:arn:aws:cloudformation:sa-east-1:601427279990:stack/stock-trader/5cce0d60-fa8a-11ea-af25-02ec8a020c46,aws_cloudformation_stack-name:stock-trader,env:reference,functionname:stockbuyerfunction,lambda_createdby:sam,region:sa-east-1,service:stock-trader"}] -------------------------------------------------------------------------------- /aws/logs_monitoring/trace_forwarder/connection.py: -------------------------------------------------------------------------------- 1 | # Unless explicitly stated otherwise all files in this repository are licensed 2 | # under the Apache License Version 2.0. 3 | # This product includes software developed at Datadog (https://www.datadoghq.com/). 4 | # Copyright 2021 Datadog, Inc. 5 | from ctypes import cdll, Structure, c_char_p, c_int 6 | import json 7 | import os 8 | 9 | 10 | class GO_STRING(Structure): 11 | _fields_ = [("p", c_char_p), ("n", c_int)] 12 | 13 | 14 | def make_go_string(str): 15 | if not type(str) is bytes: 16 | str = str.encode("utf-8") 17 | return GO_STRING(str, len(str)) 18 | 19 | 20 | class TraceConnection: 21 | def __init__(self, root_url, api_key, insecure_skip_verify): 22 | dir = os.path.dirname(os.path.realpath(__file__)) 23 | self.lib = cdll.LoadLibrary("{}/bin/trace-intake.so".format(dir)) 24 | self.lib.Configure( 25 | make_go_string(root_url), 26 | make_go_string(api_key), 27 | insecure_skip_verify, 28 | ) 29 | 30 | def send_traces(self, trace_payloads): 31 | serialized_trace_paylods = json.dumps(trace_payloads) 32 | had_error = ( 33 | self.lib.ForwardTraces(make_go_string(serialized_trace_paylods)) != 0 34 | ) 35 | if had_error: 36 | raise Exception("Failed to send to trace intake") 37 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/snapshots/cloudwatch_log_timeout.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageType": "DATA_MESSAGE", 3 | "owner": "601427279990", 4 | "logGroup": "/aws/lambda/storms-cloudwatch-event", 5 | "logStream": "2020/06/09/[$LATEST]b249865adaaf4fad80f95f8ad09725b8", 6 | "subscriptionFilters": [ 7 | "myevent" 8 | ], 9 | "logEvents": [{ 10 | "id": "35496429375792603298393743017356257146982675867810398208", 11 | "timestamp": 1591714943146, 12 | "message": "START RequestId: 7c9567b5-107b-4a6c-8798-0157ac21db52 Version: $LATEST\\n" 13 | }, 14 | { 15 | "id": "35496429442806342619978265557671090556291002193281548289", 16 | "timestamp": 1591714946151, 17 | "message": "END RequestId: 7c9567b5-107b-4a6c-8798-0157ac21db52\\n" 18 | }, 19 | { 20 | "id": "35496429442806342619978265557671090556291002193281548290", 21 | "timestamp": 1591714946151, 22 | "message": "REPORT RequestId: 7c9567b5-107b-4a6c-8798-0157ac21db52\\tDuration: 3003.16 ms\\tBilled Duration: 3000 ms\\tMemory Size: 128 MB\\tMax Memory Used: 48 MB\\tInit Duration: 127.02 ms\\t\\n" 23 | }, 24 | { 25 | "id": "35496429442806342619978265557671090556291002193281548291", 26 | "timestamp": 1591714946151, 27 | "message": "2020-06-09T15:02:26.150Z 7c9567b5-107b-4a6c-8798-0157ac21db52 Task timed out after 3.00 seconds\\n\\n" 28 | } 29 | ] 30 | } -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to the Datadog AWS Lambda functions 2 | 3 | :tada: First, thanks for contributing! :tada: 4 | 5 | ## Submitting Issues 6 | 7 | - First take a look at the [Troubleshooting section](https://help.datadoghq.com/hc/en-us/sections/200763635-Amazon-Web-Services) of our [Knowledge Base](https://help.datadoghq.com/hc/en-us). 8 | - If you can't find anything useful, please contact our Solutions Team for assistance. 9 | - Finally, you can open a GitHub issue 10 | 11 | ## Pull Requests 12 | 13 | Thanks for helping this code base grow! In order to ease/speed up our review, here are some items you can check/improve when submitting your PR: 14 | 15 | - [ ] have a [proper commit history](#commits) (we advise you to rebase if needed). 16 | - [ ] write tests for the code you wrote. 17 | - [ ] preferably make sure that all tests pass locally. 18 | - [ ] summarize your PR with a descriptive title and a message describing your changes, cross-referencing any related bugs/PRs. 19 | 20 | ## Commits 21 | 22 | - Please keep each commit's changes small and focused--many changes in the same commit makes them harder to review. 23 | - Please also combine each code-change within one commit rather than many commits. Rebase liberally. 24 | - Please make all commit messages clear and communicative. 25 | 26 | Following these rules keeps history cleaner, makes it easier to revert things, and it makes developers happier too. 27 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/snapshots-cache-test/cloudwatch_log_timeout.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageType": "DATA_MESSAGE", 3 | "owner": "601427279990", 4 | "logGroup": "/aws/lambda/integration-tests-dev-cache_test_lambda", 5 | "logStream": "2020/06/09/[$LATEST]b249865adaaf4fad80f95f8ad09725b8", 6 | "subscriptionFilters": [ 7 | "myevent" 8 | ], 9 | "logEvents": [{ 10 | "id": "35496429375792603298393743017356257146982675867810398208", 11 | "timestamp": 1591714943146, 12 | "message": "START RequestId: 7c9567b5-107b-4a6c-8798-0157ac21db52 Version: $LATEST\\n" 13 | }, 14 | { 15 | "id": "35496429442806342619978265557671090556291002193281548289", 16 | "timestamp": 1591714946151, 17 | "message": "END RequestId: 7c9567b5-107b-4a6c-8798-0157ac21db52\\n" 18 | }, 19 | { 20 | "id": "35496429442806342619978265557671090556291002193281548290", 21 | "timestamp": 1591714946151, 22 | "message": "REPORT RequestId: 7c9567b5-107b-4a6c-8798-0157ac21db52\\tDuration: 3003.16 ms\\tBilled Duration: 3000 ms\\tMemory Size: 128 MB\\tMax Memory Used: 48 MB\\tInit Duration: 127.02 ms\\t\\n" 23 | }, 24 | { 25 | "id": "35496429442806342619978265557671090556291002193281548291", 26 | "timestamp": 1591714946151, 27 | "message": "2020-06-09T15:02:26.150Z 7c9567b5-107b-4a6c-8798-0157ac21db52 Task timed out after 3.00 seconds\\n\\n" 28 | } 29 | ] 30 | } -------------------------------------------------------------------------------- /aws/rds_enhanced_monitoring/examples/generate_CWLogTestEvent.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | SCRIPTPATH=$(cd `dirname $0` && pwd) 4 | 5 | if [ $(uname) == "Darwin" ]; then 6 | datecmd=gdate 7 | base64cmd=gbase64 8 | command -v ${datecmd} >/dev/null 2>&1 || { echo >&2 "${datecmd} is required but is not installed. Install with 'brew install coreutils'. Aborting."; exit 1; } 9 | command -v ${base64cmd} >/dev/null 2>&1 || { echo >&2 "${base64cmd} is required but is not installed. Install with 'brew install coreutils'. Aborting."; exit 1; } 10 | else 11 | datecmd=date 12 | base64cmd=base64 13 | fi 14 | 15 | ts=$(($(${datecmd} +%s%N)/1000000)) 16 | tmp_CWLogEvent=''' 17 | { 18 | "awslogs": { 19 | "data":"__b64data__" 20 | } 21 | } 22 | ''' 23 | 24 | tmp_data=''' 25 | { 26 | "logStream": "db-ABCDEFGHIJKLMNOPQRSTUVWXYZ", 27 | "messageType": "DATA_MESSAGE", 28 | "logEvents": [ 29 | { 30 | "timestamp": __timestamp__, 31 | "message": "__message__" 32 | } 33 | ], 34 | "owner": "123456789000", 35 | "subscriptionFilters": [ 36 | "dd-rdsenhanced-filter" 37 | ], 38 | "logGroup": "RDSOSMetrics" 39 | } 40 | ''' 41 | 42 | tmp_message=$(sed -e 's/"/\\"/g' $SCRIPTPATH/tmp_message.json) 43 | 44 | tmp_CWLogRaw=${tmp_data/__message__/$tmp_message} 45 | tmp_CWLogB64=$(echo ${tmp_CWLogRaw/__timestamp__/$ts} | gzip | ${base64cmd} -w0) 46 | 47 | printf "${tmp_CWLogEvent/__b64data__/$tmp_CWLogB64}" 48 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | ### What does this PR do? 4 | 5 | 6 | 7 | ### Motivation 8 | 9 | 10 | 11 | ### Testing Guidelines 12 | 13 | 14 | 15 | ### Additional Notes 16 | 17 | 18 | 19 | ### Types of changes 20 | 21 | - [ ] Bug fix 22 | - [ ] New feature 23 | - [ ] Breaking change 24 | - [ ] Misc (docs, refactoring, dependency upgrade, etc.) 25 | 26 | ### Check all that apply 27 | 28 | - [ ] This PR's description is comprehensive 29 | - [ ] This PR contains breaking changes that are documented in the description 30 | - [ ] This PR introduces new APIs or parameters that are documented and unlikely to change in the foreseeable future 31 | - [ ] This PR impacts documentation, and it has been updated (or a ticket has been logged) 32 | - [ ] This PR's changes are covered by the automated tests 33 | - [ ] This PR collects user input/sensitive content into Datadog 34 | - [ ] This PR passes the integration tests (ask a Datadog member to run the tests) 35 | - [ ] This PR passes the unit tests 36 | - [ ] This PR passes the installation tests (ask a Datadog member to run the tests) 37 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tests/test_logs.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from logs import filter_logs 4 | 5 | 6 | class TestFilterLogs(unittest.TestCase): 7 | example_logs = [ 8 | "START RequestId: ...", 9 | "This is not a REPORT log", 10 | "END RequestId: ...", 11 | "REPORT RequestId: ...", 12 | ] 13 | 14 | def test_include_at_match(self): 15 | filtered_logs = filter_logs(self.example_logs, include_pattern=r"^(START|END)") 16 | 17 | self.assertEqual( 18 | filtered_logs, 19 | [ 20 | "START RequestId: ...", 21 | "END RequestId: ...", 22 | ], 23 | ) 24 | 25 | def test_exclude_at_match(self): 26 | filtered_logs = filter_logs(self.example_logs, exclude_pattern=r"^(START|END)") 27 | 28 | self.assertEqual( 29 | filtered_logs, 30 | [ 31 | "This is not a REPORT log", 32 | "REPORT RequestId: ...", 33 | ], 34 | ) 35 | 36 | def test_exclude_overrides_include(self): 37 | filtered_logs = filter_logs( 38 | self.example_logs, include_pattern=r"^(START|END)", exclude_pattern=r"^END" 39 | ) 40 | 41 | self.assertEqual( 42 | filtered_logs, 43 | [ 44 | "START RequestId: ...", 45 | ], 46 | ) 47 | 48 | def test_no_filtering_rules(self): 49 | filtered_logs = filter_logs(self.example_logs) 50 | self.assertEqual(filtered_logs, self.example_logs) 51 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/recorder/pb/trace_pb2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! 3 | # source: trace.proto 4 | """Generated protocol buffer code.""" 5 | from google.protobuf import descriptor as _descriptor 6 | from google.protobuf import descriptor_pool as _descriptor_pool 7 | from google.protobuf import message as _message 8 | from google.protobuf import reflection as _reflection 9 | from google.protobuf import symbol_database as _symbol_database 10 | # @@protoc_insertion_point(imports) 11 | 12 | _sym_db = _symbol_database.Default() 13 | 14 | import span_pb2 as span__pb2 15 | 16 | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( 17 | b'\n\x0btrace.proto\x12\x02pb\x1a\nspan.proto"X\n\x08\x41PITrace\x12\x0f\n\x07traceID\x18\x01 \x01(\x04\x12\x17\n\x05spans\x18\x02 \x03(\x0b\x32\x08.pb.Span\x12\x11\n\tstartTime\x18\x06 \x01(\x03\x12\x0f\n\x07\x65ndTime\x18\x07 \x01(\x03\x62\x06proto3' 18 | ) 19 | 20 | 21 | _APITRACE = DESCRIPTOR.message_types_by_name["APITrace"] 22 | APITrace = _reflection.GeneratedProtocolMessageType( 23 | "APITrace", 24 | (_message.Message,), 25 | { 26 | "DESCRIPTOR": _APITRACE, 27 | "__module__": "trace_pb2" 28 | # @@protoc_insertion_point(class_scope:pb.APITrace) 29 | }, 30 | ) 31 | _sym_db.RegisterMessage(APITrace) 32 | 33 | if _descriptor._USE_C_DESCRIPTORS == False: 34 | DESCRIPTOR._options = None 35 | _APITRACE._serialized_start = 31 36 | _APITRACE._serialized_end = 119 37 | # @@protoc_insertion_point(module_scope) 38 | -------------------------------------------------------------------------------- /azure/eventhub_log_forwarder/event_hub.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", 3 | "contentVersion": "1.0.0.0", 4 | "parameters": { 5 | "eventHubNamespace": { 6 | "type": "string", 7 | "metadata": { 8 | "description": "Name of EventHub namespace" 9 | } 10 | }, 11 | "eventHubName": { 12 | "type": "string", 13 | "defaultValue": "datadog-eventhub", 14 | "metadata": { 15 | "description": "Name of Event Hub" 16 | } 17 | }, 18 | "location": { 19 | "type": "string", 20 | "defaultValue": "[resourceGroup().location]", 21 | "metadata": { 22 | "description": "Location for all resources." 23 | } 24 | } 25 | }, 26 | "resources": [ 27 | { 28 | "apiVersion": "2018-01-01-preview", 29 | "name": "[parameters('eventHubNamespace')]", 30 | "type": "Microsoft.EventHub/namespaces", 31 | "location": "[parameters('location')]", 32 | "sku": { 33 | "name": "Standard", 34 | "tier": "Standard", 35 | "capacity": 1 36 | }, 37 | "tags": {}, 38 | "properties": {}, 39 | "resources": [ 40 | { 41 | "apiVersion": "2017-04-01", 42 | "name": "[parameters('eventHubName')]", 43 | "type": "eventhubs", 44 | "dependsOn": [ 45 | "[resourceId('Microsoft.EventHub/namespaces/', parameters('eventHubNamespace'))]" 46 | ], 47 | "properties": {} 48 | } 49 | ] 50 | } 51 | ] 52 | } 53 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/publish_prod.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Use with `./publish_prod.sh 4 | 5 | set -e 6 | 7 | # Ensure on main, and pull the latest 8 | BRANCH=$(git rev-parse --abbrev-ref HEAD) 9 | if [ $BRANCH != "master" ]; then 10 | echo "Not on master, aborting" 11 | exit 1 12 | else 13 | echo "Updating master" 14 | git pull origin master 15 | fi 16 | 17 | # Ensure no uncommitted changes 18 | if [ -n "$(git status --porcelain)" ]; then 19 | echo "Detected uncommitted changes, aborting" 20 | exit 1 21 | fi 22 | 23 | # Read the new version 24 | if [ -z "$1" ]; then 25 | echo "Must specify a layer version" 26 | exit 1 27 | else 28 | LAYER_VERSION=$1 29 | fi 30 | 31 | # Read the new version 32 | if [ -z "$2" ]; then 33 | echo "Must specify a forwarder version" 34 | exit 1 35 | else 36 | FORWARDER_VERSION=$2 37 | fi 38 | 39 | # Ensure AWS access before proceeding 40 | saml2aws login -a govcloud-us1-fed-human-engineering 41 | AWS_PROFILE=govcloud-us1-fed-human-engineering aws sts get-caller-identity 42 | aws-vault exec prod-engineering -- aws sts get-caller-identity 43 | 44 | echo "Publishing layers to GovCloud AWS regions" 45 | saml2aws login -a govcloud-us1-fed-human-engineering 46 | LAYER_VERSION=$LAYER_VERSION FORWARDER_VERSION=$FORWARDER_VERSION AWS_PROFILE=govcloud-us1-fed-human-engineering ./tools/publish_layers.sh 47 | 48 | echo 49 | echo "Publishing layers to commercial AWS regions" 50 | LAYER_VERSION=$LAYER_VERSION FORWARDER_VERSION=$FORWARDER_VERSION aws-vault exec prod-engineering -- ./tools/publish_layers.sh -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/snapshots/cloudwatch_log_apigateway.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageType": "DATA_MESSAGE", 3 | "owner": "123456789123", 4 | "logGroup": "API-Gateway-Execution-Logs_xxxxxxxxxx/production", 5 | "logStream": "123456789123_API-Gateway_us-east-1", 6 | "subscriptionFilters": [ 7 | "testFilter" 8 | ], 9 | "logEvents": [ 10 | { 11 | "ingestionTime": 1607444452966, 12 | "timestamp": 1607444432899, 13 | "message": "(aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaa) Extended Request Id: XXXXXX-XXXXXXXX=", 14 | "eventId": "11111111111111111111111111111111", 15 | "logStreamName": "11111111111111111111111111111111" 16 | }, 17 | { 18 | "ingestionTime": 1607444452966, 19 | "timestamp": 1607444432899, 20 | "message": "(aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaa) Verifying Usage Plan for request: a5217acf-1172-4860-b374-8116dbd3e9de. API Key: API Stage: xxxxxxxxxx/production", 21 | "eventId": "11111111111111111111111111111111", 22 | "logStreamName": "11111111111111111111111111111111" 23 | }, 24 | { 25 | "ingestionTime": 1607444452966, 26 | "timestamp": 1607444432900, 27 | "message": "(aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaa) API Key authorized because method 'POST /event' does not require API Key. Request will not contribute to throttle or quota limits", 28 | "eventId": "11111111111111111111111111111111", 29 | "logStreamName": "11111111111111111111111111111111" 30 | } 31 | ] 32 | } -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/recorder/pb/trace_payload_pb2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! 3 | # source: trace_payload.proto 4 | """Generated protocol buffer code.""" 5 | from google.protobuf import descriptor as _descriptor 6 | from google.protobuf import descriptor_pool as _descriptor_pool 7 | from google.protobuf import message as _message 8 | from google.protobuf import reflection as _reflection 9 | from google.protobuf import symbol_database as _symbol_database 10 | # @@protoc_insertion_point(imports) 11 | 12 | _sym_db = _symbol_database.Default() 13 | 14 | import trace_pb2 as trace__pb2 15 | import span_pb2 as span__pb2 16 | 17 | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( 18 | b'\n\x13trace_payload.proto\x12\x02pb\x1a\x0btrace.proto\x1a\nspan.proto"K\n\x0cTracePayload\x12\x10\n\x08hostName\x18\x01 \x01(\t\x12\x0b\n\x03\x65nv\x18\x02 \x01(\t\x12\x1c\n\x06traces\x18\x03 \x03(\x0b\x32\x0c.pb.APITraceb\x06proto3' 19 | ) 20 | 21 | 22 | _TRACEPAYLOAD = DESCRIPTOR.message_types_by_name["TracePayload"] 23 | TracePayload = _reflection.GeneratedProtocolMessageType( 24 | "TracePayload", 25 | (_message.Message,), 26 | { 27 | "DESCRIPTOR": _TRACEPAYLOAD, 28 | "__module__": "trace_payload_pb2" 29 | # @@protoc_insertion_point(class_scope:pb.TracePayload) 30 | }, 31 | ) 32 | _sym_db.RegisterMessage(TracePayload) 33 | 34 | if _descriptor._USE_C_DESCRIPTORS == False: 35 | 36 | DESCRIPTOR._options = None 37 | _TRACEPAYLOAD._serialized_start = 52 38 | _TRACEPAYLOAD._serialized_end = 127 39 | # @@protoc_insertion_point(module_scope) 40 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/snapshots/cloudwatch_log_fsx_windows.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageType": "DATA_MESSAGE", 3 | "owner": "123456789123", 4 | "logGroup": "/aws/fsx/windows/12345", 5 | "logStream": "123456789123_us-east-1", 6 | "subscriptionFilters": [ 7 | "testFilter" 8 | ], 9 | "logEvents": [ 10 | { 11 | "id": "35689263648391837472973739781728019701390240798247944192", 12 | "timestamp": 1600361930988, 13 | "message": "4663101280000x8020000000000000294054Securityamznfsxjgnfqf2v.fsx.demo.comS-1-5-21-1387100404-3545110199-3154596375-1113Adminfsx0xbc9cfccSecurityFile\\Device\\HarddiskVolume13\\share\\My first folder0x1350%%44230x800x4S:AI" 14 | } 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /aws/logs_monitoring/trace_forwarder/internal/apm/testdata/xray-parent.json: -------------------------------------------------------------------------------- 1 | { 2 | "comment": "edge case", 3 | "tags": "forwardername:datadog-forwarder-prod-org-447397,forwarder_memorysize:1024,forwarder_version:3.21.2,,account_id:601427279990,aws_account:601427279990,aws_cloudformation_logical-id:stockcheckerfunction,aws_cloudformation_stack-id:arn:aws:cloudformation:sa-east-1:601427279990:stack/stock-trader/5cce0d60-fa8a-11ea-af25-02ec8a020c46,aws_cloudformation_stack-name:stock-trader,env:reference,functionname:stockcheckerfunction,lambda_createdby:sam,region:sa-east-1,service:stock-trader", 4 | "trace": { 5 | "traces": [ 6 | [ 7 | { 8 | "trace_id": "53DCCBD73F8D0D3B", 9 | "parent_id": "D99A4DB9F549DAC0", 10 | "span_id": "F98E98917B776BAD", 11 | "service": "aws.lambda", 12 | "resource": "StockCheckerFunction", 13 | "name": "aws.lambda", 14 | "error": 0, 15 | "start": 1604619292588914051, 16 | "duration": 103808, 17 | "meta": { 18 | "_dd.origin": "lambda", 19 | "runtime-id": "2fca8125e38047e3a812ffba65924e81", 20 | "cold_start": "false", 21 | "function_arn": "arn:aws:lambda:sa-east-1:601427279990:function:stockcheckerfunction", 22 | "function_version": "$LATEST", 23 | "request_id": "acf2c587-ee79-42e7-9cf7-2a5218fa30ce", 24 | "resource_names": "StockCheckerFunction", 25 | "_dd.parent_source": "xray" 26 | }, 27 | "metrics": { 28 | "system.pid": 8, 29 | "_sampling_priority_v1": 2 30 | }, 31 | "type": "serverless" 32 | } 33 | ] 34 | ] 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /azure/deploy-to-azure/event_hub.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", 3 | "contentVersion": "1.0.0.0", 4 | "parameters": { 5 | "eventHubNamespace": { 6 | "type": "string", 7 | "metadata": { 8 | "description": "Name of EventHub namespace" 9 | } 10 | }, 11 | "eventHubName": { 12 | "type": "string", 13 | "defaultValue": "datadog-eventhub", 14 | "metadata": { 15 | "description": "Name of Event Hub" 16 | } 17 | }, 18 | "partitionCount": { 19 | "type": "int", 20 | "defaultValue": 32, 21 | "metadata": { 22 | "description": "The number of event hub partitions" 23 | } 24 | }, 25 | "location": { 26 | "type": "string", 27 | "defaultValue": "[resourceGroup().location]", 28 | "metadata": { 29 | "description": "Location for all resources." 30 | } 31 | } 32 | }, 33 | "resources": [ 34 | { 35 | "apiVersion": "2018-01-01-preview", 36 | "name": "[parameters('eventHubNamespace')]", 37 | "type": "Microsoft.EventHub/namespaces", 38 | "location": "[parameters('location')]", 39 | "sku": { 40 | "name": "Standard", 41 | "tier": "Standard", 42 | "capacity": 1 43 | }, 44 | "tags": {}, 45 | "properties": {}, 46 | "resources": [ 47 | { 48 | "apiVersion": "2017-04-01", 49 | "name": "[parameters('eventHubName')]", 50 | "type": "eventhubs", 51 | "dependsOn": [ 52 | "[resourceId('Microsoft.EventHub/namespaces/', parameters('eventHubNamespace'))]" 53 | ], 54 | "properties": { 55 | "partitionCount": "[parameters('partitionCount')]" 56 | } 57 | } 58 | ] 59 | } 60 | ] 61 | } 62 | -------------------------------------------------------------------------------- /aws/rds_enhanced_monitoring/examples/README.md: -------------------------------------------------------------------------------- 1 | # Testing the Lambda function 2 | 3 | To test the Lambda function, you can use the script `generate_CWLogTestEvent.sh` to generate test event with current timestamps. The script requires the following files in the same directory: 4 | 5 | * `tmp_message.json` - This contains the metrics would be parsed and sent via Datadog API. Based from the [RDS Example message](../#rds-message-example) 6 | 7 | ## Running the command 8 | ```sh 9 | $ bash generate_CWLogTestEvent.sh 10 | ``` 11 | 12 | * sample output 13 | ```json 14 | { 15 | "awslogs": { 16 | "data":"H4sIAE/E31sAA81WTXPbNhC991dweI5VAPyUb0pku27tSrHkpG3U8UAkJGEsgQwASpE9/u8FsCRFOp720EN75O7btw+L3SWePX9brGdaMrrzzz0/X56N3n8YX1xe/XT98y83t79Oph/vZvP7T59/+/0P/53n75hSdM3mx5JZ/Hg0Hz3cXsxmo6sL6zZkF3smtDLOL96z52tuIjTdlcaAoxAHQxwGQ4TQicvyPHsLn4k1F2xhPhf+qJKFpAtDufC5MAQiY9dj8PEzRPGSZEEesmgVr/uoO6aKSp7Qf3siCG01QsTDQ2t4eADEnknFC2H9eGC1L/yqtCiIwMjL6VG98/B5FJyjEIJEtfv0YXqvLIZYQ1ZW95pv+RPVNZk99royuewHqpm5/Nr9VEel2Q4saWotB8rrgCh0AfnWCRkmgyhx2hSTAIhdgC403YL4KHacmoHBJHERgmcMvknkvVjLtqD5yJzbXNAtF5Vmjd4VX2nGBKCxE7Di+zoaO7ZCNJ8pkO3YrpDHhuEguWZLmj06kPVvqjWbmkzqUjL2vfVO7fPvrbNKlq01o9mGORDGSUjiMO1j+ZPjJQgcqzoPiYYIxz3ovCkWXIagma6Ph1Oc4MShSzsDdLllcLtRihxtzqV2x4xjF72jZQmqDCCJsWuDDmFgkqM07N9RFCcIAVRt6dLawiAZgsxltVqZZnRZE4KDGAqsqXpUTX3VlrGSi7UDkcD6n4rdkp8qKyshagCGfigaoagnhhAnbrktssfaDwnVgZZNvlPt+8GoW+k6TjB9KKS7+S8umAvN5Ipm9SgxvUEwPvKbE4CbptbwHeDhIE68F+9PKLh6vJ6c2Mwey2+oZiI7dmfIdVzHbuYgaO3zjSyq9aasdNMhAwJVMmR9Z8NnPdcTNZ11jVbLx4pVbMxKvWkL4HK04GiQBo34Fd+y2VGd1Ju5dWU09x8noEHQZsnIXNkAZQPqIc8vedOBcYS6xikzK1DUmnEAvfitheOhadAUWrSohJ4WHMAL/0eTJ1/mVNfbt73NAKUkjcI2TScFIYOkPVUpi8ws9hsOWw1OtleQ1wwnqhv+dDLaWfZ6zWFewiiCQZMmCSxzfFol91ZABpM2GAbNdm2t7S7tkclahbniJB660flvxKWkIy6Br/+NOBT1xIX/LC4OkwC9kjaZeXUnMNUXiF6rQ2+psz+Pt8R1Koc6ygiJEXolKyBhmAxJX9fd+F8KIwMYnFfCkvRtYWGI44jY4fBefJgRvzgIJu2zB5MgNOs+tQ8i+3xS1VJlkpf2fWCG1WxGeEj5eX5m5pKJjX3i5Gcr5/MdmXlyXZkdZR9Y9nCT2S3TkmfKJPvhL3xnDsPfCQAA" 17 | } 18 | } 19 | ``` 20 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.8" 2 | 3 | services: 4 | forwarder: 5 | image: datadog-log-forwarder:${PYTHON_RUNTIME:-python3.8} 6 | command: lambda_function.lambda_handler 7 | environment: 8 | AWS_ACCOUNT_ID: ${AWS_ACCOUNT_ID:-0000000000} 9 | AWS_ACCESS_KEY_ID: "${AWS_ACCESS_KEY_ID}" 10 | AWS_SECRET_ACCESS_KEY: "${AWS_SECRET_ACCESS_KEY}" 11 | AWS_SECURITY_TOKEN: "${AWS_SECURITY_TOKEN}" 12 | AWS_SESSION_TOKEN: "${AWS_SESSION_TOKEN}" 13 | DOCKER_LAMBDA_STAY_OPEN: 1 14 | DD_LOG_LEVEL: ${LOG_LEVEL:-info} 15 | DD_API_KEY: abcdefghijklmnopqrstuvwxyz012345 # Must be 32 characters exactly 16 | DD_URL: recorder # Used for logs intake 17 | DD_PORT: 8080 # API port to use 18 | DD_SITE: datadog.com 19 | DD_API_URL: http://recorder:8080 20 | DD_LOGS_INTAKE_URL: recorder:8080 21 | DD_TRACE_INTAKE_URL: http://recorder:8080 22 | DD_NO_SSL: "true" 23 | DD_SKIP_SSL_VALIDATION: "true" 24 | DD_USE_TCP: "false" 25 | DD_USE_COMPRESSION: "false" 26 | DD_ADDITIONAL_TARGET_LAMBDAS: "${EXTERNAL_LAMBDAS}" 27 | DD_S3_BUCKET_NAME: "${DD_S3_BUCKET_NAME}" 28 | DD_FETCH_LAMBDA_TAGS: "true" 29 | DD_FETCH_LOG_GROUP_TAGS: "true" 30 | expose: 31 | - 9001 32 | 33 | recorder: 34 | build: recorder 35 | command: ./recorder.py 36 | environment: 37 | SERVER_PORT: 8080 38 | expose: 39 | - 8080 40 | 41 | tester: 42 | build: tester 43 | command: python -m unittest discover 44 | volumes: 45 | - ${SNAPSHOTS_DIR_NAME}:/snapshots 46 | environment: 47 | RECORDER_URL: http://recorder:8080/recording 48 | FORWARDER_URL: http://forwarder:9001/2015-03-31/functions/myfunction/invocations 49 | UPDATE_SNAPSHOTS: ${UPDATE_SNAPSHOTS:-false} 50 | SNAPSHOTS_DIR_NAME: ${SNAPSHOTS_DIR_NAME} 51 | 52 | depends_on: 53 | - forwarder 54 | - recorder 55 | -------------------------------------------------------------------------------- /aws/logs_monitoring/trace_forwarder/README.md: -------------------------------------------------------------------------------- 1 | # Datadog Trace Forwarder 2 | 3 | Shared libary for submitting traces to trace intake. 4 | Features include: 5 | * Tools for building AWS Lambda Layer from library 6 | * Trace obfuscation, (using logic from datadog-agent) 7 | * Submits Stats/Transcations for traces 8 | * Python bindings 9 | 10 | ```python 11 | from trace_forwarder.connection import TraceConnection 12 | conn = TraceConnection("datadoghq.com", "my_api_key") 13 | conn.send(""" 14 | { 15 | "traces": [ 16 | [ 17 | { 18 | "trace_id": "75BCD15", 19 | "span_id": "248B0C57D64F053", 20 | "parent_id": "B853ABB94CFE745C", 21 | "name": "aws.lambda", 22 | "type": "web", 23 | "resource": "aws.lambda", 24 | "error": 0, 25 | "meta": { 26 | "language": "javascript", 27 | "http.url": "https://www.google.com?api_key=12AB45DEWF" 28 | }, 29 | "metrics": { 30 | "_sample_rate": 1, 31 | "_sampling_priority_v1": 2 32 | }, 33 | "start": 1565381107070475300, 34 | "duration": 539684570, 35 | "service": "node" 36 | } 37 | ] 38 | ] 39 | } 40 | """) 41 | ``` 42 | 43 | ## Requirements 44 | 45 | * dep 46 | * go 1.12 or higher 47 | * docker 48 | 49 | ## Building Go Binary 50 | 51 | ```bash 52 | dep ensure 53 | make 54 | ``` 55 | 56 | Output is saved to bin, and the shared library will be compatible with your local environment. If you want to build a linux compatible binary, you will need to use docker, (setting GOOS/GOARCH enviornment variable doesn't work for shared libraries). 57 | 58 | ```bash 59 | ./scripts/build_linux_go_bin.sh 60 | ``` 61 | 62 | ## Lambda Layer 63 | ### Building Lambda Layer 64 | 65 | You can build the lambda layer with the following command 66 | 67 | ```bash 68 | ./scripts/build_layers.sh 69 | ``` 70 | 71 | ### Publishing to staging 72 | 73 | ```bash 74 | ./scripts/publish_staging.sh 75 | ``` 76 | 77 | ### Publishing to prod 78 | 79 | ```bash 80 | ./scripts/publish_prod.sh 81 | ``` -------------------------------------------------------------------------------- /azure/eventhub_log_forwarder/resource_deploy.ps1: -------------------------------------------------------------------------------- 1 | param ( 2 | $SubscriptionId, 3 | $ApiKey, 4 | $EventhubNamespace, 5 | $FunctionAppName, 6 | $ResourceGroupLocation = "westus2", 7 | $ResourceGroupName = "datadog-log-forwarder-rg-" + $ResourceGroupLocation, 8 | $EventhubName = "datadog-eventhub-" + $ResourceGroupLocation, 9 | $FunctionName = "datadog-function-" + $ResourceGroupLocation, 10 | $DatadogSite = "datadoghq.com", 11 | $Environment = "AzureCloud", 12 | $DatadogTags = "" 13 | ) 14 | 15 | if (-Not ($SubscriptionId -And $ApiKey)) { Throw "`SubscriptionId` and `ApiKey` are required." } 16 | 17 | Set-AzContext -SubscriptionId $SubscriptionId 18 | 19 | $code = (New-Object System.Net.WebClient).DownloadString("https://raw.githubusercontent.com/DataDog/datadog-serverless-functions/master/azure/activity_logs_monitoring/index.js") 20 | 21 | New-AzResourceGroup -Name $ResourceGroupName -Location $ResourceGroupLocation 22 | 23 | $environment = Get-AzEnvironment -Name $Environment 24 | $endpointSuffix = $environment.StorageEndpointSuffix 25 | $secureApiKey = ConvertTo-SecureString $ApiKey -AsPlainText -Force 26 | 27 | $deploymentArgs = @{ 28 | TemplateUri = "https://raw.githubusercontent.com/DataDog/datadog-serverless-functions/master/azure/eventhub_log_forwarder/parent_template.json" 29 | ResourceGroupName = $ResourceGroupName 30 | functionCode = $code 31 | apiKey = $secureApiKey 32 | location = $ResourceGroupLocation 33 | eventHubName = $EventhubName 34 | functionName = $FunctionName 35 | datadogSite = $DatadogSite 36 | endpointSuffix = $endpointSuffix 37 | datadogTags = $DatadogTags 38 | } 39 | 40 | # Use values if parameters passed, otherwise we rely on the default value generated by the ARM template 41 | if ($EventhubNamespace) { $deploymentArgs["eventhubNamespace"] = $EventhubNamespace } 42 | if ($FunctionAppName) { $deploymentArgs["functionAppName"] = $FunctionAppName } 43 | 44 | try { 45 | New-AzResourceGroupDeployment @deploymentArgs -Verbose -ErrorAction Stop 46 | } catch { 47 | Write-Error $_ 48 | Return 49 | } 50 | -------------------------------------------------------------------------------- /azure/blobs_logs_monitoring/README.md: -------------------------------------------------------------------------------- 1 | # Datadog-Azure function 2 | 3 | The Datadog-Azure function is used to forward Azure logs to Datadog from new blob files added in 4 | a storage account. The function reads the file, splits lines on \n and sends each line as 5 | a log entry to Datadog. 6 | 7 | ## Quick Start 8 | 9 | The provided Node.js script must be deployed into your Azure Functions service. Follow the tutorial below to learn how to do so: 10 | 11 | ### 1. Create a new Blob triggered function 12 | 13 | - Expand your function application and click the `+` button next to `Functions`. If this is the first function in your function application, select `Custom function`. This displays the complete set of function templates. 14 | - In the search field type `Blob` and choose `Blob Trigger`. 15 | - Select the `Javascript` language in the right menu. 16 | - Enter a name for the function. 17 | - Select the path in the storage account where you want to read file from you want to pull logs from. 18 | - Add the wanted `Storage account connection` or create a new one if you haven't have one already. 19 | 20 | ### 2. Provide the code 21 | 22 | - Copy paste the code of the [Datadog-Azure function](./index.js). 23 | 24 | ## 3. (optional) Send logs to EU or to a proxy 25 | 26 | ### Send logs to EU 27 | 28 | Set the environment variable `DD_SITE` to `datadoghq.eu` and logs are automatically forwarded to your EU platform. 29 | 30 | ## Parameters 31 | 32 | - **API KEY**: 33 | 34 | There are 2 possibilities to set your [Datadog's API key](https://app.datadoghq.com/organization-settings/api-keys): 35 | 36 | 1. Replace `` in the code with your API Key value. 37 | 2. Set the value through the `DD_API_KEY` environment variable 38 | 39 | - **Custom Tags**: 40 | 41 | You have two options to add custom tags to your logs: 42 | 43 | - Manually by editing the function code: Replace the `''` placeholder for the `DD_TAGS` variable by a comma separated list of tags 44 | - Automatically with the `DD_TAGS` environment variable 45 | 46 | Learn more about Datadog tagging in our main [Tagging documentation](https://docs.datadoghq.com/tagging/). 47 | -------------------------------------------------------------------------------- /aws/logs_monitoring/trace_forwarder/internal/apm/testdata/xray_reparent.json~snapshot: -------------------------------------------------------------------------------- 1 | ([]*pb.TracePayload) (len=1) { 2 | (*pb.TracePayload)({ 3 | HostName: (string) "", 4 | Env: (string) (len=4) "none", 5 | Traces: ([]*pb.APITrace) (len=1) { 6 | (*pb.APITrace)({ 7 | TraceID: (uint64) 5673508998968570243, 8 | Spans: ([]*pb.Span) (len=2) { 9 | (*pb.Span)({ 10 | Service: (string) (len=10) "aws.lambda", 11 | Name: (string) (len=16) "requests.request", 12 | Resource: (string) (len=16) "requests.request", 13 | TraceID: (uint64) 5673508998968570243, 14 | SpanID: (uint64) 15300116249758373965, 15 | ParentID: (uint64) 4834147509824110201, 16 | Start: (int64) 1586269922945357000, 17 | Duration: (int64) 138997000, 18 | Error: (int32) 0, 19 | Meta: (map[string]string) (len=4) { 20 | (string) (len=10) "_dd.origin": (string) (len=6) "lambda", 21 | (string) (len=11) "http.method": (string) (len=3) "GET", 22 | (string) (len=16) "http.status_code": (string) (len=3) "200", 23 | (string) (len=8) "http.url": (string) (len=26) "https://www.datadoghq.com/" 24 | }, 25 | Metrics: (map[string]float64) (len=2) { 26 | (string) (len=12) "_dd.measured": (float64) 1, 27 | (string) (len=10) "_top_level": (float64) 1 28 | }, 29 | Type: (string) (len=4) "http" 30 | }), 31 | (*pb.Span)({ 32 | Service: (string) (len=10) "aws.lambda", 33 | Name: (string) (len=19) "handler.get_message", 34 | Resource: (string) (len=19) "handler.get_message", 35 | TraceID: (uint64) 5673508998968570243, 36 | SpanID: (uint64) 9089086961342797587, 37 | ParentID: (uint64) 4834147509824110201, 38 | Start: (int64) 1586269923086220000, 39 | Duration: (int64) 100232000, 40 | Error: (int32) 0, 41 | Meta: (map[string]string) (len=1) { 42 | (string) (len=10) "_dd.origin": (string) (len=6) "lambda" 43 | }, 44 | Metrics: (map[string]float64) (len=1) { 45 | (string) (len=10) "_top_level": (float64) 1 46 | }, 47 | Type: (string) "" 48 | }) 49 | }, 50 | StartTime: (int64) 1586269922945357000, 51 | EndTime: (int64) 0 52 | }) 53 | }, 54 | Transactions: ([]*pb.Span) 55 | }) 56 | } 57 | -------------------------------------------------------------------------------- /aws/logs_monitoring/trace_forwarder/cmd/trace/main_test.go: -------------------------------------------------------------------------------- 1 | /* 2 | * Unless explicitly stated otherwise all files in this repository are licensed 3 | * under the Apache License Version 2.0. 4 | * 5 | * This product includes software developed at Datadog (https://www.datadoghq.com/). 6 | * Copyright 2021 Datadog, Inc. 7 | */ 8 | package main 9 | 10 | import ( 11 | "io/ioutil" 12 | "os" 13 | "testing" 14 | 15 | "github.com/DataDog/datadog-agent/pkg/trace/pb" 16 | "github.com/stretchr/testify/assert" 17 | ) 18 | 19 | func TestUnmarshalSerializedTraces(t *testing.T) { 20 | input := "[{\"message\":\"traces\",\"tags\":\"tag1:value\"},{\"message\":\"traces\",\"tags\":\"tag1:value\"}]" 21 | 22 | output, _ := unmarshalSerializedTraces(input) 23 | 24 | assert.Equal(t, output[0].Message, "traces") 25 | assert.Equal(t, output[0].Tags, "tag1:value") 26 | assert.Equal(t, output[1].Message, "traces") 27 | assert.Equal(t, output[1].Tags, "tag1:value") 28 | } 29 | 30 | func TestAggregateTracePayloadsByEnv(t *testing.T) { 31 | payload1 := pb.TracePayload{ 32 | HostName: "", 33 | Env: "none", 34 | Traces: make([]*pb.APITrace, 0), 35 | } 36 | 37 | payload2 := pb.TracePayload{ 38 | HostName: "", 39 | Env: "", 40 | Traces: make([]*pb.APITrace, 0), 41 | } 42 | 43 | payload3 := pb.TracePayload{ 44 | HostName: "", 45 | Env: "", 46 | Traces: make([]*pb.APITrace, 0), 47 | } 48 | 49 | input := []*pb.TracePayload{&payload1, &payload2, &payload3} 50 | output := aggregateTracePayloadsByEnv(input) 51 | 52 | assert.Equal(t, len(output), 2) 53 | } 54 | 55 | func TestForwardTracesWithXRayRoot(t *testing.T) { 56 | inputFile := "testdata/xray-parent.json" 57 | file, err := os.Open(inputFile) 58 | assert.NoError(t, err) 59 | defer file.Close() 60 | 61 | contents, err := ioutil.ReadAll(file) 62 | input := string(contents) 63 | 64 | assert.NoError(t, err, "Couldn't read contents of test file") 65 | 66 | // We capture stdout 67 | originalStdout := os.Stdout 68 | r, w, _ := os.Pipe() 69 | os.Stdout = w 70 | 71 | result := ForwardTraces(input) 72 | 73 | w.Close() 74 | out, _ := ioutil.ReadAll(r) 75 | os.Stdout = originalStdout 76 | 77 | outputLog := string(out) 78 | 79 | assert.Equal(t, result, 0) 80 | assert.Equal(t, outputLog, "No traces to forward") 81 | } 82 | -------------------------------------------------------------------------------- /aws/logs_monitoring/trace_forwarder/internal/apm/testdata/xray_reparent.json: -------------------------------------------------------------------------------- 1 | { 2 | "comment": "Tests reparenting root span to X-Ray trace", 3 | "tags": "", 4 | "trace": { 5 | "traces": [ 6 | [ 7 | { 8 | "trace_id": "4EBC5A600180F183", 9 | "parent_id": "4316578C3AE7BE79", 10 | "span_id": "AEC382007ED199CC", 11 | "service": "aws.lambda", 12 | "resource": "hello-dog-dev-hello36", 13 | "name": "aws.lambda", 14 | "error": 0, 15 | "start": 1586269922931758000, 16 | "duration": 254812000, 17 | "meta": { 18 | "_dd.origin": "lambda", 19 | "cold_start": "false", 20 | "function_arn": "arn:aws:lambda:us-east-1:172597598159:function:hello-dog-dev-hello36", 21 | "request_id": "148e0567-902c-46e3-9374-d9642152420a", 22 | "resource_names": "hello-dog-dev-hello36", 23 | "_dd.parent_source": "xray" 24 | }, 25 | "metrics": { "system.pid": 1, "_sampling_priority_v1": 2 }, 26 | "type": "serverless" 27 | }, 28 | { 29 | "trace_id": "4EBC5A600180F183", 30 | "parent_id": "AEC382007ED199CC", 31 | "span_id": "D454EEAA6907984D", 32 | "service": "aws.lambda", 33 | "resource": "requests.request", 34 | "name": "requests.request", 35 | "error": 0, 36 | "start": 1586269922945357000, 37 | "duration": 138997000, 38 | "meta": { 39 | "_dd.origin": "lambda", 40 | "http.method": "GET", 41 | "http.url": "https://www.datadoghq.com/", 42 | "http.status_code": "200" 43 | }, 44 | "metrics": { "_dd.measured": 1 }, 45 | "type": "http" 46 | }, 47 | { 48 | "trace_id": "4EBC5A600180F183", 49 | "parent_id": "AEC382007ED199CC", 50 | "span_id": "7E22EC6F6F78D713", 51 | "service": "aws.lambda", 52 | "resource": "handler.get_message", 53 | "name": "handler.get_message", 54 | "error": 0, 55 | "start": 1586269923086220000, 56 | "duration": 100232000, 57 | "meta": { "_dd.origin": "lambda" } 58 | } 59 | ] 60 | ] 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /aws/.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # Created by https://www.gitignore.io/api/python 3 | 4 | ### Python ### 5 | # Byte-compiled / optimized / DLL files 6 | __pycache__/ 7 | *.py[cod] 8 | *$py.class 9 | 10 | # C extensions 11 | *.so 12 | 13 | # Distribution / packaging 14 | .Python 15 | build/ 16 | develop-eggs/ 17 | dist/ 18 | downloads/ 19 | eggs/ 20 | .eggs/ 21 | lib/ 22 | lib64/ 23 | parts/ 24 | sdist/ 25 | var/ 26 | wheels/ 27 | *.egg-info/ 28 | .installed.cfg 29 | *.egg 30 | MANIFEST 31 | 32 | # PyInstaller 33 | # Usually these files are written by a python script from a template 34 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 35 | *.manifest 36 | *.spec 37 | 38 | # Installer logs 39 | pip-log.txt 40 | pip-delete-this-directory.txt 41 | 42 | # Unit test / coverage reports 43 | htmlcov/ 44 | .tox/ 45 | .nox/ 46 | .coverage 47 | .coverage.* 48 | .cache 49 | nosetests.xml 50 | coverage.xml 51 | *.cover 52 | .hypothesis/ 53 | .pytest_cache/ 54 | 55 | # Translations 56 | *.mo 57 | *.pot 58 | 59 | # Django stuff: 60 | *.log 61 | local_settings.py 62 | db.sqlite3 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # celery beat schedule file 88 | celerybeat-schedule 89 | 90 | # SageMath parsed files 91 | *.sage.py 92 | 93 | # Environments 94 | .env 95 | .venv 96 | env/ 97 | venv/ 98 | ENV/ 99 | env.bak/ 100 | venv.bak/ 101 | 102 | # Spyder project settings 103 | .spyderproject 104 | .spyproject 105 | 106 | # Rope project settings 107 | .ropeproject 108 | 109 | # mkdocs documentation 110 | /site 111 | 112 | # mypy 113 | .mypy_cache/ 114 | .dmypy.json 115 | dmypy.json 116 | 117 | ### Python Patch ### 118 | .venv/ 119 | 120 | ### Python.VirtualEnv Stack ### 121 | # Virtualenv 122 | # http://iamzed.com/2009/05/07/a-primer-on-virtualenv/ 123 | [Bb]in 124 | [Ii]nclude 125 | [Ll]ib 126 | [Ll]ib64 127 | [Ll]ocal 128 | [Ss]cripts 129 | pyvenv.cfg 130 | pip-selfcheck.json 131 | 132 | 133 | # End of https://www.gitignore.io/api/python 134 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/semver.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Source: https://gist.github.com/Ariel-Rodriguez/9e3c2163f4644d7a389759b224bfe7f3 4 | 5 | ### 6 | # semantic version comparition using semver specification http://semver.org/ 7 | # This bash script compares pre-releases alphabetically as well 8 | # 9 | # returns 1 when A greater than B 10 | # returns 0 when A equals B 11 | # returns -1 when A lower than B 12 | # 13 | # Usage 14 | # chmod +x semver.sh 15 | # ./semver.sh 1.0.0 v1.0.0-rc.0 16 | # --> 1 17 | # 18 | # Author Ariel Rodriguez 19 | # License MIT 20 | ### 21 | semver_compare() { 22 | local version_a version_b pr_a pr_b 23 | # strip word "v" and extract first subset version (x.y.z from x.y.z-foo.n) 24 | version_a=$(echo "${1//v/}" | awk -F'-' '{print $1}') 25 | version_b=$(echo "${2//v/}" | awk -F'-' '{print $1}') 26 | 27 | if [ "$version_a" \= "$version_b" ] 28 | then 29 | # check for pre-release 30 | # extract pre-release (-foo.n from x.y.z-foo.n) 31 | pr_a=$(echo "$1" | awk -F'-' '{print $2}') 32 | pr_b=$(echo "$2" | awk -F'-' '{print $2}') 33 | 34 | #### 35 | # Return 0 when A is equal to B 36 | [ "$pr_a" \= "$pr_b" ] && echo 0 && return 0 37 | 38 | #### 39 | # Return 1 40 | 41 | # Case when A is not pre-release 42 | if [ -z "$pr_a" ] 43 | then 44 | echo 1 && return 0 45 | fi 46 | 47 | #### 48 | # Case when pre-release A exists and is greater than B's pre-release 49 | 50 | # extract numbers -rc.x --> x 51 | number_a=$(echo ${pr_a//[!0-9]/}) 52 | number_b=$(echo ${pr_b//[!0-9]/}) 53 | [ -z "${number_a}" ] && number_a=0 54 | [ -z "${number_b}" ] && number_b=0 55 | 56 | [ "$pr_a" \> "$pr_b" ] && [ -n "$pr_b" ] && [ "$number_a" -gt "$number_b" ] && echo 1 && return 0 57 | 58 | #### 59 | # Retrun -1 when A is lower than B 60 | echo -1 && return 0 61 | fi 62 | arr_version_a=(${version_a//./ }) 63 | arr_version_b=(${version_b//./ }) 64 | cursor=0 65 | # Iterate arrays from left to right and find the first difference 66 | while [ "$([ "${arr_version_a[$cursor]}" -eq "${arr_version_b[$cursor]}" ] && [ $cursor -lt ${#arr_version_a[@]} ] && echo true)" == true ] 67 | do 68 | cursor=$((cursor+1)) 69 | done 70 | [ "${arr_version_a[$cursor]}" -gt "${arr_version_b[$cursor]}" ] && echo 1 || echo -1 71 | } 72 | 73 | [ -n "$1" ] && echo $(semver_compare $1 $2) -------------------------------------------------------------------------------- /aws/logs_monitoring/proxy_conf/haproxy.txt: -------------------------------------------------------------------------------- 1 | # For Datadog EU, change `datadoghq.com` to `datadoghq.eu` 2 | 3 | frontend metrics-forwarder 4 | bind *:3834 5 | mode http 6 | option tcplog 7 | default_backend datadog-metrics 8 | 9 | use_backend datadog-api if { path_beg -i /api/v1/validate } 10 | 11 | frontend traces-forwarder 12 | bind *:3835 13 | mode tcp 14 | option tcplog 15 | default_backend datadog-traces 16 | 17 | frontend logs-forwarder 18 | bind *:3837 19 | mode http 20 | option tcplog 21 | default_backend datadog-logs 22 | 23 | backend datadog-metrics 24 | balance roundrobin 25 | mode http 26 | # The following configuration is for HAProxy 1.8 and newer 27 | server-template mothership 5 haproxy-app.agent.datadoghq.com:443 check port 443 ssl verify none check resolvers my-dns init-addr none resolve-prefer ipv4 28 | # Uncomment the following configuration for older HAProxy versions 29 | # server mothership haproxy-app.agent.datadoghq.com:443 check port 443 ssl verify none 30 | 31 | backend datadog-api 32 | mode http 33 | # The following configuration is for HAProxy 1.8 and newer 34 | server-template mothership 5 api.datadoghq.com:443 check port 443 ssl verify none check resolvers my-dns init-addr none resolve-prefer ipv4 35 | # Uncomment the following configuration for older HAProxy versions 36 | # server mothership api.datadoghq.com:443 check port 443 ssl verify none 37 | 38 | backend datadog-traces 39 | balance roundrobin 40 | mode tcp 41 | # The following configuration is for HAProxy 1.8 and newer 42 | server-template mothership 5 trace.agent.datadoghq.com:443 check port 443 ssl verify none check resolvers my-dns init-addr none resolve-prefer ipv4 43 | # Uncomment the following configuration for older HAProxy versions 44 | # server mothership trace.agent.datadoghq.com:443 check port 443 ssl verify none 45 | 46 | backend datadog-logs 47 | balance roundrobin 48 | mode http 49 | # The following configuration is for HAProxy 1.8 and newer 50 | server-template mothership 5 lambda-http-intake.logs.datadoghq.com:443 check port 443 ssl verify none check resolvers my-dns init-addr none resolve-prefer ipv4 51 | # Uncomment the following configuration for older HAProxy versions 52 | # server datadog lambda-http-intake.logs.datadoghq.com:443 check port 443 ssl verify none -------------------------------------------------------------------------------- /aws/logs_monitoring/trace_forwarder/internal/apm/testdata/basic.json: -------------------------------------------------------------------------------- 1 | { 2 | "comment": "Tests reparenting root span to X-Ray trace", 3 | "tags": "", 4 | "trace": { 5 | "traces": [ 6 | [ 7 | { 8 | "trace_id": "4EBC5A600180F183", 9 | "parent_id": "4316578C3AE7BE79", 10 | "span_id": "AEC382007ED199CC", 11 | "service": "aws.lambda", 12 | "resource": "hello-dog-dev-hello36", 13 | "name": "aws.lambda", 14 | "error": 0, 15 | "start": 1586269922931758000, 16 | "duration": 254812000, 17 | "meta": { 18 | "service": "aws.lambda", 19 | "_dd.origin": "lambda", 20 | "cold_start": "false", 21 | "function_arn": "arn:aws:lambda:us-east-1:172597598159:function:hello-dog-dev-hello36", 22 | "request_id": "148e0567-902c-46e3-9374-d9642152420a", 23 | "resource_names": "hello-dog-dev-hello36" 24 | }, 25 | "metrics": { "system.pid": 1, "_sampling_priority_v1": 2 }, 26 | "type": "serverless" 27 | }, 28 | { 29 | "trace_id": "4EBC5A600180F183", 30 | "parent_id": "AEC382007ED199CC", 31 | "span_id": "D454EEAA6907984D", 32 | "service": "aws.lambda", 33 | "resource": "requests.request", 34 | "name": "requests.request", 35 | "error": 0, 36 | "start": 1586269922945357000, 37 | "duration": 138997000, 38 | "meta": { 39 | "service": "aws.lambda", 40 | "_dd.origin": "lambda", 41 | "http.method": "GET", 42 | "http.url": "https://www.datadoghq.com/", 43 | "http.status_code": "200" 44 | }, 45 | "metrics": { 46 | "_dd.measured": 1, 47 | "_dd1.sr.eausr": 1 48 | }, 49 | "type": "http" 50 | }, 51 | { 52 | "trace_id": "4EBC5A600180F183", 53 | "parent_id": "AEC382007ED199CC", 54 | "span_id": "7E22EC6F6F78D713", 55 | "service": "aws.lambda", 56 | "resource": "handler.get_message", 57 | "name": "handler.get_message", 58 | "error": 0, 59 | "start": 1586269923086220000, 60 | "duration": 100232000, 61 | "meta": { "_dd.origin": "lambda" } 62 | } 63 | ] 64 | ] 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/list_layers.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Unless explicitly stated otherwise all files in this repository are licensed 4 | # under the Apache License Version 2.0. 5 | # This product includes software developed at Datadog (https://www.datadoghq.com/). 6 | # Copyright 2021 Datadog, Inc. 7 | 8 | # Lists most recent layers ARNs across regions to STDOUT 9 | # Optionals args: [layer-name] [region] 10 | 11 | set -e 12 | 13 | LAYER_NAMES=("Datadog-Forwarder") 14 | AVAILABLE_REGIONS=$(aws ec2 describe-regions | jq -r '.[] | .[] | .RegionName') 15 | LAYERS_MISSING_REGIONS=() 16 | 17 | # Check region arg 18 | if [ -z "$2" ]; then 19 | >&2 echo "Region parameter not specified, running for all available regions." 20 | REGIONS=$AVAILABLE_REGIONS 21 | else 22 | 23 | >&2 echo "Region parameter specified: $2" 24 | if [[ ! "$AVAILABLE_REGIONS" == *"$2"* ]]; then 25 | >&2 echo "Could not find $2 in available regions:" $AVAILABLE_REGIONS 26 | >&2 echo "" 27 | >&2 echo "EXITING SCRIPT." 28 | exit 1 29 | fi 30 | REGIONS=($2) 31 | fi 32 | 33 | # Check region arg 34 | if [ -z "$1" ]; then 35 | >&2 echo "Layer parameter not specified, running for all layers " 36 | LAYERS=("${LAYER_NAMES[@]}") 37 | else 38 | >&2 echo "Layer parameter specified: $1" 39 | if [[ ! " ${LAYER_NAMES[@]} " =~ " ${1} " ]]; then 40 | >&2 echo "Could not find $1 in layers: ${LAYER_NAMES[@]}" 41 | >&2 echo "" 42 | >&2 echo "EXITING SCRIPT." 43 | return 1 44 | fi 45 | LAYERS=($1) 46 | fi 47 | 48 | for region in $REGIONS 49 | do 50 | for layer_name in "${LAYERS[@]}" 51 | do 52 | last_layer_arn=$(aws lambda list-layer-versions --layer-name $layer_name --region $region | jq -r ".LayerVersions | .[0] | .LayerVersionArn") 53 | if [ "$last_layer_arn" == "null" ]; then 54 | >&2 echo "No layer found for $region, $layer_name" 55 | if [[ ! " ${LAYERS_MISSING_REGIONS[@]} " =~ " ${region} " ]]; then 56 | LAYERS_MISSING_REGIONS+=( $region ) 57 | fi 58 | else 59 | echo $last_layer_arn 60 | fi 61 | done 62 | done 63 | 64 | if [ ${#LAYERS_MISSING_REGIONS[@]} -gt 0 ]; then 65 | echo "WARNING: Following regions missing layers: ${LAYERS_MISSING_REGIONS[@]}" 66 | echo "Please run ./add_new_region.sh to add layers to the missing regions" 67 | exit 1 68 | fi -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/snapshots/cloudwatch_log_apigateway.json~snapshot: -------------------------------------------------------------------------------- 1 | { 2 | "events": [ 3 | { 4 | "data": { 5 | "series": [ 6 | { 7 | "device": null, 8 | "host": null, 9 | "interval": 10, 10 | "metric": "aws.dd_forwarder.local_cache_expired", 11 | "points": "", 12 | "tags": [ 13 | "forwardername:test", 14 | "forwarder_memorysize:1536", 15 | "forwarder_version:", 16 | "event_type:awslogs" 17 | ], 18 | "type": "distribution" 19 | }, 20 | { 21 | "device": null, 22 | "host": null, 23 | "interval": 10, 24 | "metric": "aws.dd_forwarder.s3_cache_fetch_failure", 25 | "points": "", 26 | "tags": [ 27 | "forwardername:test", 28 | "forwarder_memorysize:1536", 29 | "forwarder_version:", 30 | "event_type:awslogs" 31 | ], 32 | "type": "distribution" 33 | }, 34 | { 35 | "device": null, 36 | "host": null, 37 | "interval": 10, 38 | "metric": "aws.dd_forwarder.s3_cache_expired", 39 | "points": "", 40 | "tags": [ 41 | "forwardername:test", 42 | "forwarder_memorysize:1536", 43 | "forwarder_version:", 44 | "event_type:awslogs" 45 | ], 46 | "type": "distribution" 47 | } 48 | ] 49 | }, 50 | "headers": { 51 | "Accept": "*/*", 52 | "Accept-Encoding": "gzip, deflate", 53 | "Connection": "keep-alive", 54 | "Content-Encoding": "deflate", 55 | "Content-Length": "", 56 | "Content-Type": "application/json", 57 | "Host": "recorder:8080", 58 | "User-Agent": "", 59 | "x-datadog-parent-id": "", 60 | "x-datadog-sampling-priority": "2", 61 | "x-datadog-trace-id": "4842834437835386637" 62 | }, 63 | "path": "/api/v1/distribution_points?api_key=abcdefghijklmnopqrstuvwxyz012345", 64 | "verb": "POST" 65 | } 66 | ] 67 | } -------------------------------------------------------------------------------- /aws/logs_monitoring/trace_forwarder/internal/apm/stats.go: -------------------------------------------------------------------------------- 1 | /* 2 | * Unless explicitly stated otherwise all files in this repository are licensed 3 | * under the Apache License Version 2.0. 4 | * 5 | * This product includes software developed at Datadog (https://www.datadoghq.com/). 6 | * Copyright 2021 Datadog, Inc. 7 | */ 8 | package apm 9 | 10 | import ( 11 | "github.com/DataDog/datadog-agent/pkg/trace/pb" 12 | "github.com/DataDog/datadog-agent/pkg/trace/stats" 13 | ) 14 | 15 | const ( 16 | statsBucketDuration int64 = 1e10 // 10 seconds 17 | ) 18 | 19 | // ComputeAPMStats calculates the stats that should be submitted to APM about a given trace 20 | func ComputeAPMStats(tracePayload *pb.TracePayload) *stats.Payload { 21 | 22 | statsRawBuckets := make(map[int64]*stats.RawBucket) 23 | 24 | for _, trace := range tracePayload.Traces { 25 | spans := GetAnalyzedSpans(trace.Spans) 26 | 27 | sublayers := stats.ComputeSublayers(trace.Spans) 28 | for _, span := range spans { 29 | 30 | // Aggregate the span to a bucket by rounding its end timestamp to the closest bucket ts. 31 | // E.g., for buckets of size 10, a span ends on 36 should be aggregated to the second bucket 32 | // with bucketTS 30 (36 - 36 % 10). Create a new bucket if needed. 33 | spanEnd := span.Start + span.Duration 34 | bucketTS := spanEnd - (spanEnd % statsBucketDuration) 35 | var statsRawBucket *stats.RawBucket 36 | if existingBucket, ok := statsRawBuckets[bucketTS]; ok { 37 | statsRawBucket = existingBucket 38 | } else { 39 | statsRawBucket = stats.NewRawBucket(bucketTS, statsBucketDuration) 40 | statsRawBuckets[bucketTS] = statsRawBucket 41 | } 42 | 43 | // Use weight 1, as xray sampling is not uniform, and its rate is unknown to us. 44 | // In fact, for "low volume" Lambda functions, the sampling rate is typically 100%. 45 | // TopLevel is always "true" since we only compute stats for top-level spans. 46 | weightedSpan := &stats.WeightedSpan{ 47 | Span: span, 48 | Weight: 1, 49 | TopLevel: true, 50 | } 51 | statsRawBucket.HandleSpan(weightedSpan, tracePayload.Env, []string{}, sublayers) 52 | } 53 | } 54 | 55 | // Export statsRawBuckets to statsBuckets 56 | statsBuckets := make([]stats.Bucket, 0) 57 | for _, statsRawBucket := range statsRawBuckets { 58 | statsBuckets = append(statsBuckets, statsRawBucket.Export()) 59 | } 60 | return &stats.Payload{ 61 | HostName: tracePayload.HostName, 62 | Env: tracePayload.Env, 63 | Stats: statsBuckets, 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # 12 | name: CodeQL 13 | 14 | on: 15 | push: 16 | branches: [ master ] 17 | pull_request: 18 | # The branches below must be a subset of the branches above 19 | branches: [ master ] 20 | schedule: 21 | - cron: '27 5 * * 3' 22 | 23 | jobs: 24 | analyze: 25 | name: Analyze 26 | runs-on: ubuntu-latest 27 | 28 | permissions: 29 | actions: read 30 | contents: read 31 | security-events: write 32 | 33 | strategy: 34 | fail-fast: false 35 | matrix: 36 | language: [ 'go', 'javascript', 'python' ] 37 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] 38 | # Learn more about CodeQL language support at https://codeql.github.com/docs/codeql-overview/supported-languages-and-frameworks/ 39 | 40 | steps: 41 | - name: Checkout source 42 | uses: actions/checkout@v3 43 | 44 | # Initializes the CodeQL tools for scanning. 45 | - name: Initialize CodeQL 46 | uses: github/codeql-action/init@v2 47 | with: 48 | languages: ${{ matrix.language }} 49 | # If you wish to specify custom queries, you can do so here or in a config file. 50 | # By default, queries listed here will override any specified in a config file. 51 | # Prefix the list here with "+" to use these queries and those in the config file. 52 | # queries: ./path/to/local/query, your-org/your-repo/queries@main 53 | 54 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 55 | # If this step fails, then you should remove it and run the build manually (see below). 56 | - name: Autobuild 57 | uses: github/codeql-action/autobuild@v2 58 | 59 | # ℹ️ Command-line programs to run using the OS shell. 60 | # 📚 https://git.io/JvXDl 61 | 62 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines 63 | # and modify them (or add more) to build your code if your project 64 | # uses a compiled language 65 | 66 | #- run: | 67 | # make bootstrap 68 | # make release 69 | 70 | - name: Perform CodeQL Analysis 71 | uses: github/codeql-action/analyze@v2 72 | -------------------------------------------------------------------------------- /aws/vpc_flow_log_monitoring/README.md: -------------------------------------------------------------------------------- 1 | # vpc_flow_log_monitoring 2 | Process a VPC Flow Log monitoring DATA_MESSAGE, coming from CLOUDWATCH LOGS 3 | 4 | # VPC Flow Log message example 5 | ``` 6 | 2 123456789010 eni-abc123de 172.31.16.139 172.31.16.21 20641 22 6 20 4249 1418530010 1418530070 ACCEPT OK 7 | ``` 8 | 9 | which correspond to the following fields: 10 | ``` 11 | version, account, eni, source, destination, srcport, destport="22", protocol="6", packets, bytes, windowstart, windowend, action="REJECT", flowlogstatus 12 | ``` 13 | 14 | # Setup 15 | 16 | 1. Create a KMS key for the datadog api key and app key 17 | - Create a KMS key - http://docs.aws.amazon.com/kms/latest/developerguide/create-keys.html 18 | - Encrypt the token using the AWS CLI.`aws kms encrypt --key-id alias/ --plaintext '{"api_key":"", "app_key":""}'` 19 | - Make sure to save the base-64 encoded, encrypted key (CiphertextBlob). This will be used for the `KMS_ENCRYPTED_KEYS` variable in all lambda functions. 20 | - Optional: set the environment variable `DD_SITE` to `datadoghq.eu` and data is automatically forwarded to your EU platform. 21 | 22 | 1. Create and configure a lambda function 23 | - In the AWS Console, create a `lambda_execution` policy, with the following policy: 24 | ``` 25 | { 26 | "Version": "2012-10-17", 27 | "Statement": [ 28 | { 29 | "Effect": "Allow", 30 | "Action": [ 31 | "logs:CreateLogGroup", 32 | "logs:CreateLogStream", 33 | "logs:PutLogEvents" 34 | ], 35 | "Resource": "arn:aws:logs:*:*:*" 36 | }, 37 | { 38 | "Effect": "Allow", 39 | "Action": [ 40 | "kms:Decrypt" 41 | ], 42 | "Resource": [ 43 | "" 44 | ] 45 | } 46 | ] 47 | } 48 | ``` 49 | 50 | - Create a `lambda_execution` role and attach this policy 51 | 52 | - Create a lambda function: Skip the blueprint, name it `functionname`, set the Runtime to `Python 2.7`, the handle to `lambda_function.lambda_handler`, and the role to `lambda_execution`. 53 | 54 | - Copy the content of `functionname/lambda_function.py` in the code section, make sure to update the `KMS_ENCRYPTED_KEYS` environment variable with the encrypted key generated in step 1 55 | 56 | 1. Subscribe to the appropriate log stream 57 | 58 | 59 | # Deploying to production 60 | 61 | See: https://github.com/DataDog/devops/wiki/Datadog-Serverless-Applications#how-to-deploy-to-production-for-aws 62 | 63 | -------------------------------------------------------------------------------- /azure/eventhub_log_forwarder/activity_logs_deploy.ps1: -------------------------------------------------------------------------------- 1 | param ( 2 | $SubscriptionId, 3 | $ApiKey, 4 | $EventhubNamespace, 5 | $FunctionAppName, 6 | $ResourceGroupLocation = "westus2", 7 | $ResourceGroupName = "datadog-log-forwarder-rg", 8 | $EventhubName = "datadog-eventhub", 9 | $FunctionName = "datadog-function", 10 | $DiagnosticSettingName = "datadog-activity-logs-diagnostic-setting", 11 | $DatadogSite = "datadoghq.com", 12 | $Environment = "AzureCloud", 13 | $DatadogTags = "" 14 | ) 15 | 16 | if (-Not ($SubscriptionId -And $ApiKey)) { Throw "`SubscriptionId` and `ApiKey` are required." } 17 | 18 | Set-AzContext -SubscriptionId $SubscriptionId 19 | 20 | $code = (New-Object System.Net.WebClient).DownloadString("https://raw.githubusercontent.com/DataDog/datadog-serverless-functions/master/azure/activity_logs_monitoring/index.js") 21 | 22 | New-AzResourceGroup -Name $ResourceGroupName -Location $ResourceGroupLocation 23 | 24 | $environment = Get-AzEnvironment -Name $Environment 25 | $endpointSuffix = $environment.StorageEndpointSuffix 26 | $secureApiKey = ConvertTo-SecureString $ApiKey -AsPlainText -Force 27 | 28 | $deploymentArgs = @{ 29 | TemplateUri = "https://raw.githubusercontent.com/DataDog/datadog-serverless-functions/master/azure/eventhub_log_forwarder/parent_template.json" 30 | ResourceGroupName = $ResourceGroupName 31 | functionCode = $code 32 | apiKey = $secureApiKey 33 | location = $ResourceGroupLocation 34 | eventHubName = $EventhubName 35 | functionName = $FunctionName 36 | datadogSite = $DatadogSite 37 | endpointSuffix = $endpointSuffix 38 | datadogTags = $DatadogTags 39 | } 40 | 41 | # Use values if parameters passed, otherwise we rely on the default value generated by the ARM template 42 | if ($EventhubNamespace) { $deploymentArgs["eventhubNamespace"] = $EventhubNamespace } 43 | if ($FunctionAppName) { $deploymentArgs["functionAppName"] = $FunctionAppName } 44 | 45 | try { 46 | $output = New-AzResourceGroupDeployment @deploymentArgs -Verbose -ErrorAction Stop 47 | # Get the generated globally-unique eventhub namespace 48 | $EventhubNamespace = $output.Outputs.eventHubNamespace.Value 49 | } catch { 50 | Write-Error $_ 51 | Return 52 | } 53 | 54 | try { 55 | New-AzDeployment ` 56 | -TemplateUri "https://raw.githubusercontent.com/DataDog/datadog-serverless-functions/master/azure/eventhub_log_forwarder/activity_log_diagnostic_settings.json" ` 57 | -eventHubNamespace $EventhubNamespace ` 58 | -eventHubName $EventhubName ` 59 | -settingName $DiagnosticSettingName ` 60 | -resourceGroup $ResourceGroupName ` 61 | -Location $ResourceGroupLocation ` 62 | -Verbose ` 63 | -ErrorAction Stop 64 | } catch { 65 | Write-Error $_ 66 | Return 67 | } 68 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/build_bundle.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Unless explicitly stated otherwise all files in this repository are licensed 4 | # under the Apache License Version 2.0. 5 | # This product includes software developed at Datadog (https://www.datadoghq.com/). 6 | # Copyright 2021 Datadog, Inc 7 | 8 | set -e 9 | 10 | # Move into the tools directory 11 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" 12 | cd $DIR 13 | 14 | # Read the desired version 15 | if [ -z "$1" ]; then 16 | echo "Must specify a desired version number" 17 | exit 1 18 | elif [[ ! $1 =~ [0-9]+\.[0-9]+\.[0-9]+ ]]; then 19 | echo "Must use a semantic version, e.g., 3.1.4" 20 | exit 1 21 | else 22 | VERSION=$1 23 | fi 24 | 25 | PYTHON_VERSION="${PYTHON_VERSION:-3.8}" 26 | FORWARDER_PREFIX="aws-dd-forwarder" 27 | FORWARDER_DIR="../.forwarder" 28 | 29 | function make_path_absolute { 30 | echo "$(cd "$(dirname "$1")"; pwd)/$(basename "$1")" 31 | } 32 | 33 | ../trace_forwarder/scripts/build_linux_go_bin.sh 34 | 35 | function docker_build_zip { 36 | # Args: [python version] [zip destination] 37 | zip_destination=$(make_path_absolute $2) 38 | layer_destination=$(make_path_absolute $3) 39 | 40 | # Install datadogpy in a docker container to avoid the mess from switching 41 | # between different python runtimes. 42 | temp_dir=$(mktemp -d) 43 | 44 | docker buildx build --platform linux/amd64 --file "${DIR}/Dockerfile_bundle" -t "datadog-bundle:$1" .. --no-cache \ 45 | --build-arg runtime=$1 46 | 47 | # Run the image by runtime tag, tar its generated `python` directory to sdout, 48 | # then extract it to a temp directory. 49 | docker run datadog-bundle:$1 tar cf - . | tar -xf - -C $temp_dir 50 | 51 | # Zip to destination, and keep directory structure as based in $temp_dir 52 | (cd $temp_dir && zip -q -r $zip_destination ./) 53 | 54 | rm -rf $temp_dir 55 | echo "Done creating forwarder zip archive $zip_destination" 56 | 57 | temp_dir=$(mktemp -d) 58 | SUB_DIRECTORY=python 59 | mkdir $temp_dir/$SUB_DIRECTORY 60 | 61 | # Run the image by runtime tag, tar its generated `python` directory to sdout, 62 | # then extract it to a temp directory. 63 | docker run datadog-bundle:$1 tar cf - . | tar -xf - -C $temp_dir/$SUB_DIRECTORY 64 | 65 | # Zip to destination, and keep directory structure as based in $temp_dir 66 | (cd $temp_dir && zip -q -r $layer_destination ./) 67 | echo "Done creating layer zip archive $layer_destination" 68 | } 69 | 70 | rm -rf $FORWARDER_DIR 71 | mkdir $FORWARDER_DIR 72 | 73 | docker_build_zip ${PYTHON_VERSION} ${FORWARDER_DIR}/${FORWARDER_PREFIX}-${VERSION}.zip ${FORWARDER_DIR}/${FORWARDER_PREFIX}-${VERSION}-layer.zip 74 | 75 | echo "Successfully created Forwarder bundle!" 76 | ls $FORWARDER_DIR | xargs -I _ echo "${FORWARDER_DIR}/_" 77 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/snapshots-cache-test/cloudwatch_log_custom_tags.json~snapshot: -------------------------------------------------------------------------------- 1 | { 2 | "events": [ 3 | { 4 | "content-type": "application/json", 5 | "data": [ 6 | { 7 | "aws": { 8 | "awslogs": { 9 | "logGroup": "testLogGroup", 10 | "logStream": "testLogStream", 11 | "owner": "123456789123" 12 | }, 13 | "function_version": "$LATEST", 14 | "invoked_function_arn": "arn:aws:lambda:us-east-1:601427279990:function:test" 15 | }, 16 | "ddsource": "cloudwatch", 17 | "ddsourcecategory": "aws", 18 | "ddtags": "forwardername:test,forwarder_memorysize:1536,forwarder_version:,custom_tag1:value1,custom_tag2:value2", 19 | "host": "testLogGroup", 20 | "id": "eventId1", 21 | "message": "{\"message\": \"hello world\"}", 22 | "service": "cloudwatch", 23 | "timestamp": 1440442987000 24 | } 25 | ], 26 | "path": "/v1/input/abcdefghijklmnopqrstuvwxyz012345", 27 | "verb": "POST" 28 | }, 29 | { 30 | "content-type": "application/json", 31 | "data": { 32 | "series": [ 33 | { 34 | "device": null, 35 | "host": null, 36 | "interval": 10, 37 | "metric": "aws.dd_forwarder.incoming_events", 38 | "points": "", 39 | "tags": [ 40 | "forwardername:test", 41 | "forwarder_memorysize:1536", 42 | "forwarder_version:", 43 | "event_type:awslogs" 44 | ], 45 | "type": "distribution" 46 | }, 47 | { 48 | "device": null, 49 | "host": null, 50 | "interval": 10, 51 | "metric": "aws.dd_forwarder.logs_forwarded", 52 | "points": "", 53 | "tags": [ 54 | "forwardername:test", 55 | "forwarder_memorysize:1536", 56 | "forwarder_version:", 57 | "event_type:awslogs" 58 | ], 59 | "type": "distribution" 60 | }, 61 | { 62 | "device": null, 63 | "host": null, 64 | "interval": 10, 65 | "metric": "aws.dd_forwarder.metrics_forwarded", 66 | "points": "", 67 | "tags": [ 68 | "forwardername:test", 69 | "forwarder_memorysize:1536", 70 | "forwarder_version:", 71 | "event_type:awslogs" 72 | ], 73 | "type": "distribution" 74 | } 75 | ] 76 | }, 77 | "path": "/api/v1/distribution_points?api_key=abcdefghijklmnopqrstuvwxyz012345", 78 | "verb": "POST" 79 | } 80 | ] 81 | } -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/recorder/recorder.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from http.server import BaseHTTPRequestHandler, HTTPServer 3 | import json 4 | import os 5 | import zlib 6 | 7 | from google.protobuf.json_format import MessageToDict 8 | import pb.trace_payload_pb2 as TracePayloadProtobuf 9 | 10 | 11 | PORT_NUMBER = 8080 12 | 13 | print("Starting recorder", flush=True) 14 | 15 | events = [] 16 | 17 | 18 | class RecorderHandler(BaseHTTPRequestHandler): 19 | def __init__(self, request, client_address, server): 20 | super().__init__(request, client_address, server) 21 | 22 | def handle_request(self): 23 | global events 24 | response = '{"status":200}' 25 | response_type = "text/json" 26 | 27 | if self.path == "/recording": 28 | # Return the recent events and clear the recording 29 | response = json.dumps({"events": events}) 30 | events = [] 31 | 32 | else: 33 | print("Recorded: {} {}".format(self.command, self.path), flush=True) 34 | 35 | data = None 36 | if self.headers["Content-Length"] != None: 37 | contents = self.rfile.read(int(self.headers["Content-Length"])) 38 | if self.headers["Content-Type"] == "application/json": 39 | if self.headers["Content-Encoding"] == "deflate": 40 | contents = zlib.decompress(contents) 41 | try: 42 | data = json.loads(contents.decode()) 43 | except: 44 | pass 45 | elif self.headers["Content-Type"] == "application/x-protobuf": 46 | # Assume that protobuf calls contain trace payloads 47 | message = TracePayloadProtobuf.TracePayload() 48 | message.ParseFromString(contents) 49 | data = MessageToDict(message) 50 | 51 | event = { 52 | "path": self.path, 53 | "verb": self.command, 54 | "headers": {k: v for k, v in self.headers.items()}, 55 | "data": data, 56 | } 57 | 58 | events.append(event) 59 | 60 | # Send an OK response 61 | self.send_response(200) 62 | self.send_header("Content-type", response_type) 63 | self.end_headers() 64 | 65 | # Send the html message 66 | self.wfile.write(response.encode("utf-8")) 67 | return 68 | 69 | def do_GET(self): 70 | self.handle_request() 71 | 72 | def do_POST(self): 73 | self.handle_request() 74 | 75 | 76 | port = int(os.environ.get("SERVER_PORT", default=PORT_NUMBER)) 77 | 78 | try: 79 | server = HTTPServer(("", port), RecorderHandler) 80 | print("Started recorder on port {}".format(port), flush=True) 81 | server.serve_forever() 82 | finally: 83 | print("Shutting down recorder", flush=True) 84 | server.socket.close() 85 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/add_new_region.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Unless explicitly stated otherwise all files in this repository are licensed 4 | # under the Apache License Version 2.0. 5 | # This product includes software developed at Datadog (https://www.datadoghq.com/). 6 | # Copyright 2019 Datadog, Inc. 7 | 8 | # Copy layers from us-east-1 to new region 9 | # args: [new-region] 10 | 11 | set -e 12 | 13 | OLD_REGION='us-east-1' 14 | 15 | PYTHON_VERSIONS_FOR_AWS_CLI=("python3.8") 16 | LAYER_NAMES=("Datadog-Forwarder") 17 | NEW_REGION=$1 18 | 19 | publish_layer() { 20 | region=$1 21 | layer_name=$2 22 | aws_version_key=$3 23 | layer_path=$4 24 | 25 | version_nbr=$(aws lambda publish-layer-version --layer-name $layer_name \ 26 | --description "Datadog Lambda Layer for Python" \ 27 | --zip-file "fileb://$layer_path" \ 28 | --region $region \ 29 | --compatible-runtimes $aws_version_key \ 30 | | jq -r '.Version') 31 | 32 | aws lambda add-layer-version-permission --layer-name $layer_name \ 33 | --version-number $version_nbr \ 34 | --statement-id "release-$version_nbr" \ 35 | --action lambda:GetLayerVersion --principal "*" \ 36 | --region $region 37 | 38 | echo "Published layer for region $region, version $aws_version_key, layer_name $layer_name, layer_version $version_nbr" 39 | } 40 | 41 | get_max_version() { 42 | layer_name=$1 43 | region=$2 44 | last_layer_version=$(aws lambda list-layer-versions --layer-name $layer_name --region $region | jq -r ".LayerVersions | .[0] | .Version") 45 | if [ "$last_layer_version" == "null" ]; then 46 | echo 0 47 | else 48 | echo $last_layer_version 49 | fi 50 | } 51 | 52 | if [ -z "$1" ]; then 53 | echo "Region parameter not specified, exiting" 54 | exit 1 55 | fi 56 | 57 | j=0 58 | for layer_name in "${LAYER_NAMES[@]}"; do 59 | # get latest version 60 | last_layer_version=$(get_max_version $layer_name $OLD_REGION) 61 | starting_version=$(get_max_version $layer_name $NEW_REGION) 62 | starting_version=$(expr $starting_version + 1) 63 | 64 | # exit if region is already all caught up 65 | if [ $starting_version -gt $last_layer_version ]; then 66 | echo "INFO: $NEW_REGION is already up to date for $layer_name" 67 | continue 68 | fi 69 | 70 | # run for each version of layer 71 | for i in $(seq 1 $last_layer_version); do 72 | layer_path=$layer_name"_"$i.zip 73 | aws_version_key="${PYTHON_VERSIONS_FOR_AWS_CLI[$j]}" 74 | 75 | # download layer versions 76 | URL=$(AWS_REGION=$OLD_REGION aws lambda get-layer-version --layer-name $layer_name --version-number $i --query Content.Location --output text) 77 | curl $URL -o $layer_path 78 | 79 | # publish layer to new region 80 | publish_layer $NEW_REGION $layer_name $aws_version_key $layer_path 81 | rm $layer_path 82 | done 83 | 84 | j=$(expr $j + 1) 85 | done -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/installation_test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Unless explicitly stated otherwise all files in this repository are licensed 4 | # under the Apache License Version 2.0. 5 | # This product includes software developed at Datadog (https://www.datadoghq.com/). 6 | # Copyright 2021 Datadog, Inc. 7 | 8 | # Tests installation and deployment process of forwarder, and that CloudFormation template works. 9 | set -e 10 | 11 | # Deploy the stack to a less commonly used region to avoid any problems with limits 12 | AWS_REGION="us-west-2" 13 | 14 | # Limits any layer publishing to the test region 15 | export REGIONS=$AWS_REGION 16 | # Prevents the scripts from asking permission 17 | export NO_INPUT=true 18 | 19 | # Move into the root directory, so this script can be called from any directory 20 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" 21 | cd $DIR/.. 22 | 23 | RUN_ID=$(head /dev/urandom | tr -dc A-Za-z0-9 | head -c10) 24 | 25 | # Since we never run the log forwarder, api key can be anything. 26 | DD_API_KEY=RUN_ID 27 | 28 | CURRENT_VERSION="$(grep -E -o 'Version: [0-9]+\.[0-9]+\.[0-9]+' template.yaml | cut -d' ' -f2)" 29 | 30 | function aws-login() { 31 | cfg=( "$@" ) 32 | shift 33 | if [ "$ACCOUNT" = "prod" ] ; then 34 | aws-vault exec prod-engineering -- ${cfg[@]} 35 | else 36 | aws-vault exec sandbox-account-admin -- ${cfg[@]} 37 | fi 38 | } 39 | 40 | # Run script in this process. This gives us TEMPLATE_URL, NEXT_LAYER_VERSION and FORWARDER_SOURCE_URL env vars 41 | . release.sh $CURRENT_VERSION sandbox 42 | 43 | function param { 44 | KEY=$1 45 | VALUE=$2 46 | echo "{\"ParameterKey\":\"${KEY}\",\"ParameterValue\":${VALUE}}" 47 | } 48 | echo $FORWARDER_SOURCE_URL 49 | 50 | publish_test() { 51 | ADDED_PARAMS=$1 52 | 53 | PARAM_LIST=[$(param DdApiKey \"${DD_API_KEY}\"),$(param DdSite \"datadoghq.com\"),$(param ReservedConcurrency \"1\"),$ADDED_PARAMS] 54 | echo "Setting params ${PARAM_LIST}" 55 | 56 | # Create an instance of the stack 57 | STACK_NAME="datadog-forwarder-integration-stack-${RUN_ID}" 58 | 59 | echo "Creating stack using Zip Copier Flow ${STACK_NAME}" 60 | aws-login aws cloudformation create-stack --stack-name $STACK_NAME --template-url $TEMPLATE_URL --capabilities "CAPABILITY_AUTO_EXPAND" "CAPABILITY_IAM" --on-failure "DELETE" \ 61 | --parameters=$PARAM_LIST --region $AWS_REGION 62 | 63 | echo "Waiting for stack to complete creation ${STACK_NAME}" 64 | aws-login aws cloudformation wait stack-create-complete --stack-name $STACK_NAME --region $AWS_REGION 65 | 66 | echo "Completed stack creation" 67 | 68 | echo "Cleaning up stack" 69 | aws-login aws cloudformation delete-stack --stack-name $STACK_NAME --region $AWS_REGION 70 | } 71 | 72 | echo 73 | echo "Running Publish with Zip Copier test" 74 | publish_test "$(param SourceZipUrl \"${FORWARDER_SOURCE_URL}\"),$(param InstallAsLayer \"false\")" 75 | 76 | RUN_ID=$(head /dev/urandom | tr -dc A-Za-z0-9 | head -c10) 77 | 78 | echo 79 | echo "Running Publish with Layer test" 80 | LAYER_ARN="arn:aws:lambda:${AWS_REGION}:${CURRENT_ACCOUNT}:layer:${LAYER_NAME}:${LAYER_VERSION}" 81 | publish_test $(param LayerARN \"${LAYER_ARN}\") 82 | -------------------------------------------------------------------------------- /azure/deploy-to-azure/activity_log_diagnostic_settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://schema.management.azure.com/schemas/2018-05-01/subscriptionDeploymentTemplate.json#", 3 | "contentVersion": "1.0.0.0", 4 | "parameters": { 5 | "settingName": { 6 | "type": "string", 7 | "defaultValue": "datadog-activity-logs-diagnostic-setting", 8 | "metadata": { 9 | "description": "The name of the diagnostic setting" 10 | } 11 | }, 12 | "resourceGroup": { 13 | "type": "string", 14 | "metadata": { 15 | "description": "Name of the Resource Group of the EventHub" 16 | } 17 | }, 18 | "eventHubNamespace": { 19 | "type": "string", 20 | "metadata": { 21 | "description": "Name of EventHub namespace, which must be globally unique." 22 | } 23 | }, 24 | "eventHubName": { 25 | "type": "string", 26 | "defaultValue": "datadog-eventhub", 27 | "metadata": { 28 | "description": "Name of the EventHub to which the Activity logs will be sent." 29 | } 30 | } 31 | }, 32 | "variables": { 33 | "subscriptionId": "[subscription().subscriptionId]", 34 | "eventHubAuthorizationRuleId": "[concat('/subscriptions/', variables('subscriptionId'), '/resourceGroups/', parameters('resourceGroup'), '/providers/Microsoft.EventHub/namespaces/', parameters('eventHubNamespace'), '/authorizationRules/RootManageSharedAccessKey')]" 35 | }, 36 | "resources": [ 37 | { 38 | "type": "Microsoft.Insights/diagnosticSettings", 39 | "apiVersion": "2017-05-01-preview", 40 | "name": "[parameters('settingName')]", 41 | "properties": { 42 | "eventHubAuthorizationRuleId": "[variables('eventHubAuthorizationRuleId')]", 43 | "eventHubName": "[parameters('eventHubName')]", 44 | "logs": [ 45 | { 46 | "category": "Administrative", 47 | "enabled": true 48 | }, 49 | { 50 | "category": "Security", 51 | "enabled": true 52 | }, 53 | { 54 | "category": "ServiceHealth", 55 | "enabled": true 56 | }, 57 | { 58 | "category": "Alert", 59 | "enabled": true 60 | }, 61 | { 62 | "category": "Recommendation", 63 | "enabled": true 64 | }, 65 | { 66 | "category": "Policy", 67 | "enabled": true 68 | }, 69 | { 70 | "category": "Autoscale", 71 | "enabled": true 72 | }, 73 | { 74 | "category": "ResourceHealth", 75 | "enabled": true 76 | } 77 | ] 78 | } 79 | } 80 | ] 81 | } 82 | -------------------------------------------------------------------------------- /azure/eventhub_log_forwarder/activity_log_diagnostic_settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://schema.management.azure.com/schemas/2018-05-01/subscriptionDeploymentTemplate.json#", 3 | "contentVersion": "1.0.0.0", 4 | "parameters": { 5 | "settingName": { 6 | "type": "string", 7 | "defaultValue": "datadog-activity-logs-diagnostic-setting", 8 | "metadata": { 9 | "description": "The name of the diagnostic setting" 10 | } 11 | }, 12 | "resourceGroup": { 13 | "type": "string", 14 | "metadata": { 15 | "description": "Name of the Resource Group of the EventHub" 16 | } 17 | }, 18 | "eventHubNamespace": { 19 | "type": "string", 20 | "metadata": { 21 | "description": "Name of EventHub namespace, which must be globally unique." 22 | } 23 | }, 24 | "eventHubName": { 25 | "type": "string", 26 | "defaultValue": "datadog-eventhub", 27 | "metadata": { 28 | "description": "Name of the EventHub to which the Activity logs will be sent." 29 | } 30 | } 31 | }, 32 | "variables": { 33 | "subscriptionId": "[subscription().subscriptionId]", 34 | "eventHubAuthorizationRuleId": "[concat('/subscriptions/', variables('subscriptionId'), '/resourceGroups/', parameters('resourceGroup'), '/providers/Microsoft.EventHub/namespaces/', parameters('eventHubNamespace'), '/authorizationRules/RootManageSharedAccessKey')]" 35 | }, 36 | "resources": [ 37 | { 38 | "type": "Microsoft.Insights/diagnosticSettings", 39 | "apiVersion": "2017-05-01-preview", 40 | "name": "[parameters('settingName')]", 41 | "properties": { 42 | "eventHubAuthorizationRuleId": "[variables('eventHubAuthorizationRuleId')]", 43 | "eventHubName": "[parameters('eventHubName')]", 44 | "logs": [ 45 | { 46 | "category": "Administrative", 47 | "enabled": true 48 | }, 49 | { 50 | "category": "Security", 51 | "enabled": true 52 | }, 53 | { 54 | "category": "ServiceHealth", 55 | "enabled": true 56 | }, 57 | { 58 | "category": "Alert", 59 | "enabled": true 60 | }, 61 | { 62 | "category": "Recommendation", 63 | "enabled": true 64 | }, 65 | { 66 | "category": "Policy", 67 | "enabled": true 68 | }, 69 | { 70 | "category": "Autoscale", 71 | "enabled": true 72 | }, 73 | { 74 | "category": "ResourceHealth", 75 | "enabled": true 76 | } 77 | ] 78 | } 79 | } 80 | ] 81 | } 82 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/recorder/pb/span_pb2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! 3 | # source: span.proto 4 | """Generated protocol buffer code.""" 5 | from google.protobuf import descriptor as _descriptor 6 | from google.protobuf import descriptor_pool as _descriptor_pool 7 | from google.protobuf import message as _message 8 | from google.protobuf import reflection as _reflection 9 | from google.protobuf import symbol_database as _symbol_database 10 | # @@protoc_insertion_point(imports) 11 | 12 | _sym_db = _symbol_database.Default() 13 | 14 | 15 | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( 16 | b'\n\nspan.proto\x12\x02pb"\xcf\x02\n\x04Span\x12\x0f\n\x07service\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08resource\x18\x03 \x01(\t\x12\x0f\n\x07traceID\x18\x04 \x01(\x04\x12\x0e\n\x06spanID\x18\x05 \x01(\x04\x12\x10\n\x08parentID\x18\x06 \x01(\x04\x12\r\n\x05start\x18\x07 \x01(\x03\x12\x10\n\x08\x64uration\x18\x08 \x01(\x03\x12\r\n\x05\x65rror\x18\t \x01(\x05\x12 \n\x04meta\x18\n \x03(\x0b\x32\x12.pb.Span.MetaEntry\x12&\n\x07metrics\x18\x0b \x03(\x0b\x32\x15.pb.Span.MetricsEntry\x12\x0c\n\x04type\x18\x0c \x01(\t\x1a+\n\tMetaEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a.\n\x0cMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x01:\x02\x38\x01\x62\x06proto3' 17 | ) 18 | 19 | 20 | _SPAN = DESCRIPTOR.message_types_by_name["Span"] 21 | _SPAN_METAENTRY = _SPAN.nested_types_by_name["MetaEntry"] 22 | _SPAN_METRICSENTRY = _SPAN.nested_types_by_name["MetricsEntry"] 23 | Span = _reflection.GeneratedProtocolMessageType( 24 | "Span", 25 | (_message.Message,), 26 | { 27 | "MetaEntry": _reflection.GeneratedProtocolMessageType( 28 | "MetaEntry", 29 | (_message.Message,), 30 | { 31 | "DESCRIPTOR": _SPAN_METAENTRY, 32 | "__module__": "span_pb2" 33 | # @@protoc_insertion_point(class_scope:pb.Span.MetaEntry) 34 | }, 35 | ), 36 | "MetricsEntry": _reflection.GeneratedProtocolMessageType( 37 | "MetricsEntry", 38 | (_message.Message,), 39 | { 40 | "DESCRIPTOR": _SPAN_METRICSENTRY, 41 | "__module__": "span_pb2" 42 | # @@protoc_insertion_point(class_scope:pb.Span.MetricsEntry) 43 | }, 44 | ), 45 | "DESCRIPTOR": _SPAN, 46 | "__module__": "span_pb2" 47 | # @@protoc_insertion_point(class_scope:pb.Span) 48 | }, 49 | ) 50 | 51 | _sym_db.RegisterMessage(Span) 52 | _sym_db.RegisterMessage(Span.MetaEntry) 53 | _sym_db.RegisterMessage(Span.MetricsEntry) 54 | 55 | if _descriptor._USE_C_DESCRIPTORS == False: 56 | DESCRIPTOR._options = None 57 | _SPAN_METAENTRY._options = None 58 | _SPAN_METAENTRY._serialized_options = b"8\001" 59 | _SPAN_METRICSENTRY._options = None 60 | _SPAN_METRICSENTRY._serialized_options = b"8\001" 61 | _SPAN._serialized_start = 19 62 | _SPAN._serialized_end = 354 63 | _SPAN_METAENTRY._serialized_start = 263 64 | _SPAN_METAENTRY._serialized_end = 306 65 | _SPAN_METRICSENTRY._serialized_start = 308 66 | _SPAN_METRICSENTRY._serialized_end = 354 67 | # @@protoc_insertion_point(module_scope) 68 | -------------------------------------------------------------------------------- /aws/logs_monitoring/trace_forwarder/internal/apm/model_test.go: -------------------------------------------------------------------------------- 1 | /* 2 | * Unless explicitly stated otherwise all files in this repository are licensed 3 | * under the Apache License Version 2.0. 4 | * 5 | * This product includes software developed at Datadog (https://www.datadoghq.com/). 6 | * Copyright 2021 Datadog, Inc. 7 | */ 8 | package apm 9 | 10 | import ( 11 | "encoding/json" 12 | "fmt" 13 | "os" 14 | "strings" 15 | "testing" 16 | 17 | "github.com/DataDog/datadog-agent/pkg/trace/obfuscate" 18 | "github.com/davecgh/go-spew/spew" 19 | "github.com/stretchr/testify/assert" 20 | 21 | "io/ioutil" 22 | ) 23 | 24 | type integrationTestData struct { 25 | Comment string `json:"comment"` 26 | Tags string `json:"tags"` 27 | Trace interface{} `json:"trace"` 28 | } 29 | 30 | func CompareSnapshot(t *testing.T, inputFile, snapshotFile string, updateSnapshots bool) { 31 | 32 | // Spew is used to serialize snapshots, for 33 | sc := spew.NewDefaultConfig() 34 | sc.DisablePointerAddresses = true 35 | sc.SortKeys = true 36 | sc.DisableCapacities = true 37 | sc.DisablePointerMethods = true 38 | sc.DisableMethods = true 39 | sc.SpewKeys = true 40 | 41 | file, err := os.Open(inputFile) 42 | assert.NoError(t, err) 43 | defer file.Close() 44 | 45 | contents, err := ioutil.ReadAll(file) 46 | 47 | assert.NoError(t, err, "Couldn't read contents of test file") 48 | 49 | var td integrationTestData 50 | err = json.Unmarshal(contents, &td) 51 | assert.NoError(t, err, "Couldn't parse contents of test file") 52 | 53 | tc, _ := json.Marshal(td.Trace) 54 | traceContents := string(tc[:]) 55 | 56 | obfuscator := obfuscate.NewObfuscator(&obfuscate.Config{ 57 | ES: obfuscate.JSONSettings{ 58 | Enabled: true, 59 | }, 60 | Mongo: obfuscate.JSONSettings{ 61 | Enabled: true, 62 | }, 63 | RemoveQueryString: true, 64 | RemovePathDigits: true, 65 | RemoveStackTraces: true, 66 | Redis: true, 67 | Memcached: true, 68 | }) 69 | 70 | payload, err := ProcessTrace(traceContents, obfuscator, td.Tags) 71 | AddTagsToTracePayloads(payload, td.Tags) 72 | assert.NoError(t, err, "Couldn't parse trace") 73 | 74 | output := sc.Sdump(payload) 75 | 76 | if updateSnapshots { 77 | err = ioutil.WriteFile(snapshotFile, []byte(output), 0644) 78 | assert.NoError(t, err) 79 | fmt.Printf("Updated Snapshot %s\n", snapshotFile) 80 | } else { 81 | snapshot, err := ioutil.ReadFile(snapshotFile) 82 | assert.NoError(t, err, "Missing snapshot file for test") 83 | expected := string(snapshot) 84 | assert.Equal(t, expected, output, fmt.Sprintf("Snapshot's didn't match for %s. To update run `$UPDATE_SNAPSHOTS=true go test ./...`", inputFile)) 85 | } 86 | } 87 | 88 | func TestSnapshotsMatch(t *testing.T) { 89 | files, err := ioutil.ReadDir("testdata") 90 | assert.NoError(t, err) 91 | us := os.Getenv("UPDATE_SNAPSHOTS") 92 | updateSnapshots := strings.ToLower(us) == "true" 93 | 94 | for _, f := range files { 95 | 96 | if !strings.HasSuffix(f.Name(), ".json") { 97 | continue 98 | } 99 | inputFile := fmt.Sprintf("testdata/%s", f.Name()) 100 | 101 | snapshotFile := fmt.Sprintf("%s~snapshot", inputFile) 102 | CompareSnapshot(t, inputFile, snapshotFile, updateSnapshots) 103 | } 104 | } 105 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/snapshots/cloudwatch_log_coldstart.json~snapshot: -------------------------------------------------------------------------------- 1 | { 2 | "events": [ 3 | { 4 | "data": { 5 | "series": [ 6 | { 7 | "device": null, 8 | "host": null, 9 | "interval": 10, 10 | "metric": "aws.dd_forwarder.list_tags_log_group_api_call", 11 | "points": "", 12 | "tags": [ 13 | "forwardername:test", 14 | "forwarder_memorysize:1536", 15 | "forwarder_version:", 16 | "event_type:awslogs" 17 | ], 18 | "type": "distribution" 19 | }, 20 | { 21 | "device": null, 22 | "host": null, 23 | "interval": 10, 24 | "metric": "aws.dd_forwarder.incoming_events", 25 | "points": "", 26 | "tags": [ 27 | "forwardername:test", 28 | "forwarder_memorysize:1536", 29 | "forwarder_version:", 30 | "event_type:awslogs" 31 | ], 32 | "type": "distribution" 33 | }, 34 | { 35 | "device": null, 36 | "host": null, 37 | "interval": 10, 38 | "metric": "aws.dd_forwarder.local_cache_expired", 39 | "points": "", 40 | "tags": [ 41 | "forwardername:test", 42 | "forwarder_memorysize:1536", 43 | "forwarder_version:", 44 | "event_type:awslogs" 45 | ], 46 | "type": "distribution" 47 | }, 48 | { 49 | "device": null, 50 | "host": null, 51 | "interval": 10, 52 | "metric": "aws.dd_forwarder.s3_cache_fetch_failure", 53 | "points": "", 54 | "tags": [ 55 | "forwardername:test", 56 | "forwarder_memorysize:1536", 57 | "forwarder_version:", 58 | "event_type:awslogs" 59 | ], 60 | "type": "distribution" 61 | }, 62 | { 63 | "device": null, 64 | "host": null, 65 | "interval": 10, 66 | "metric": "aws.dd_forwarder.s3_cache_expired", 67 | "points": "", 68 | "tags": [ 69 | "forwardername:test", 70 | "forwarder_memorysize:1536", 71 | "forwarder_version:", 72 | "event_type:awslogs" 73 | ], 74 | "type": "distribution" 75 | } 76 | ] 77 | }, 78 | "headers": { 79 | "Accept": "*/*", 80 | "Accept-Encoding": "gzip, deflate", 81 | "Connection": "keep-alive", 82 | "Content-Encoding": "deflate", 83 | "Content-Length": "", 84 | "Content-Type": "application/json", 85 | "Host": "recorder:8080", 86 | "User-Agent": "", 87 | "x-datadog-parent-id": "", 88 | "x-datadog-sampling-priority": "2", 89 | "x-datadog-trace-id": "4842834437835386637" 90 | }, 91 | "path": "/api/v1/distribution_points?api_key=abcdefghijklmnopqrstuvwxyz012345", 92 | "verb": "POST" 93 | } 94 | ] 95 | } -------------------------------------------------------------------------------- /azure/activity_logs_monitoring/README.md: -------------------------------------------------------------------------------- 1 | # Datadog-Azure function 2 | 3 | The Datadog-Azure function is used to forward Azure logs to Datadog, including Activity and Diagnostic logs from EventHub. 4 | 5 | ## Quick Start 6 | 7 | The provided Node.js script must be deployed into your Azure Functions service. Follow the tutorial below to learn how to do so: 8 | 9 | ### 1. Create a new EventHub triggered function 10 | 11 | - Expand your function application and click the `+` button next to `Functions`. If this is the first function in your function application, select `Custom function`. This displays the complete set of function templates. 12 | - In the search field type `Event Hub` and choose `Event Hub Trigger`. 13 | - Select the `Javascript` language in the right menu. 14 | - Enter a name for the function. 15 | - Add the wanted `Event Hub connection` or create a new one if you haven't have one already. 16 | - Select the `Event Hub consumer group` and the `Event Hub Name` you want to pull logs from. 17 | 18 | ### 2. Provide the code 19 | 20 | - Copy paste the code of the [Datadog-Azure function](./index.js). 21 | - In the `Integrate` part: 22 | - `Event Hub Cardinality` must be set to `Many`. 23 | - Set the `Event Parameter Name` to `eventHubMessages`. 24 | 25 | ## 3. (optional) Send logs to EU or to a proxy 26 | 27 | ### Send logs to EU 28 | 29 | Set the environment variable `DD_SITE` to `datadoghq.eu` and logs are automatically forwarded to your EU platform. 30 | 31 | ## Parameters 32 | 33 | - **API KEY**: 34 | 35 | There are 2 possibilities to set your [Datadog's API key](https://app.datadoghq.com/organization-settings/api-keys): 36 | 37 | 1. Replace `` in the code with your API Key value. 38 | 2. Set the value through the `DD_API_KEY` environment variable 39 | 40 | - **Custom Tags**: 41 | 42 | You have two options to add custom tags to your logs: 43 | 44 | - Manually by editing the function code: Replace the `:` placeholder for the `DD_TAGS` variable by a comma separated list of tags 45 | - Automatically with the `DD_TAGS` environment variable 46 | 47 | Learn more about Datadog tagging in our main [Tagging documentation](https://docs.datadoghq.com/tagging/). 48 | 49 | ## Customization 50 | 51 | - **Scrubbing PII** 52 | 53 | To scrub PII from your logs, uncomment the SCRUBBER_RULE_CONFIG code. If you'd like to scrub more than just emails and IP addresses, add your own config to this map in the format 54 | ``` 55 | { 56 | NAME: { 57 | pattern: , 58 | replacement: } 59 | } 60 | ``` 61 | 62 | - **Log Splitting** 63 | 64 | To split array-type fields in your logs into individual logs, you can add sections to the DD_LOG_SPLITTING_CONFIG map in the code or by setting the DD_LOG_SPLITTING_CONFIG env variable (which must be a json string in the same format). 65 | This will create an attribute in your logs called "parsed_arrays", which contains the fields in the format of the original log with the split log value. 66 | 67 | An example of an azure.datafactory use case is provided in the code and commented out. The format is as follows: 68 | ``` 69 | { 70 | source_type: 71 | paths: [list of [list of fields in the log payload to iterate through to find the one to split]], 72 | keep_original_log: bool, if you'd like to preserve the original log in addition to the split ones or not, 73 | preserve_fields: bool, whether or not to keep the original log fields in the new split logs 74 | } 75 | ``` 76 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/sign_bundle.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Unless explicitly stated otherwise all files in this repository are licensed 4 | # under the Apache License Version 2.0. 5 | # This product includes software developed at Datadog (https://www.datadoghq.com/). 6 | # Copyright 2021 Datadog, Inc. 7 | 8 | # Usage: ./sign_bundle.sh 9 | 10 | set -e 11 | 12 | SIGNING_PROFILE_NAME="DatadogLambdaSigningProfile" 13 | 14 | # Get bundle path from arguments 15 | if [ -z "$1" ]; then 16 | echo "ERROR: You must pass a bundle path to sign" 17 | exit 1 18 | fi 19 | BUNDLE_LOCAL_PATH=$1 20 | 21 | # Check account parameter 22 | VALID_ACCOUNTS=("sandbox" "prod") 23 | if [ -z "$2" ]; then 24 | echo "ERROR: You must pass an account parameter to sign the bundle" 25 | exit 1 26 | fi 27 | if [[ ! "${VALID_ACCOUNTS[@]}" =~ $2 ]]; then 28 | echo "ERROR: The account parameter was invalid. Please choose sandbox or prod." 29 | exit 1 30 | fi 31 | if [ "$2" = "sandbox" ]; then 32 | REGION="sa-east-1" 33 | S3_BUCKET_NAME="dd-lambda-signing-bucket-sandbox" 34 | fi 35 | if [ "$2" = "prod" ]; then 36 | REGION="us-east-1" 37 | S3_BUCKET_NAME="dd-lambda-signing-bucket" 38 | fi 39 | 40 | echo 41 | 42 | # Upload the bundle to S3 for signing 43 | echo "Uploading bundle to S3 for signing..." 44 | UUID=$(uuidgen) 45 | S3_UNSIGNED_ZIP_KEY="${UUID}.zip" 46 | S3_UNSIGNED_ZIP_URI="s3://${S3_BUCKET_NAME}/${S3_UNSIGNED_ZIP_KEY}" 47 | aws s3 cp $BUNDLE_LOCAL_PATH $S3_UNSIGNED_ZIP_URI 48 | 49 | # Start a signing job 50 | echo "Starting the signing job..." 51 | SIGNING_JOB_ID=$(aws signer start-signing-job \ 52 | --source "s3={bucketName=${S3_BUCKET_NAME},key=${S3_UNSIGNED_ZIP_KEY},version=null}" \ 53 | --destination "s3={bucketName=${S3_BUCKET_NAME}}" \ 54 | --profile-name $SIGNING_PROFILE_NAME \ 55 | --region $REGION \ 56 | | jq -r '.jobId'\ 57 | ) 58 | 59 | # Wait for the signing job to complete 60 | echo "Waiting for the signing job to complete..." 61 | SECONDS_WAITED_SO_FAR=0 62 | while : 63 | do 64 | sleep 3 65 | SECONDS_WAITED_SO_FAR=$((SECONDS_WAITED_SO_FAR + 3)) 66 | 67 | SIGNING_JOB_DESCRIPTION=$(aws signer describe-signing-job \ 68 | --job-id $SIGNING_JOB_ID \ 69 | --region $REGION\ 70 | ) 71 | SIGNING_JOB_STATUS=$(echo $SIGNING_JOB_DESCRIPTION | jq -r '.status') 72 | SIGNING_JOB_STATUS_REASON=$(echo $SIGNING_JOB_DESCRIPTION | jq -r '.statusReason') 73 | 74 | if [ $SIGNING_JOB_STATUS = "Succeeded" ]; then 75 | echo "Signing job succeeded!" 76 | break 77 | fi 78 | 79 | if [ $SIGNING_JOB_STATUS = "Failed" ]; then 80 | echo "ERROR: Signing job failed" 81 | echo $SIGNING_JOB_STATUS_REASON 82 | exit 1 83 | fi 84 | 85 | if [ $SECONDS_WAITED_SO_FAR -ge 60 ]; then 86 | echo "ERROR: Timed out waiting for the signing job to complete" 87 | exit 1 88 | fi 89 | 90 | echo "Signing job still in progress..." 91 | done 92 | 93 | # Download the signed ZIP, overwriting the original ZIP 94 | echo "Replacing the local bundle with the bundle from S3..." 95 | S3_SIGNED_ZIP_KEY="${SIGNING_JOB_ID}.zip" 96 | S3_SIGNED_ZIP_URI="s3://${S3_BUCKET_NAME}/${S3_SIGNED_ZIP_KEY}" 97 | aws s3 cp $S3_SIGNED_ZIP_URI $BUNDLE_LOCAL_PATH 98 | 99 | # Delete the signed and unsigned ZIPs in S3 100 | echo "Cleaning up the S3 bucket..." 101 | aws s3api delete-object --bucket $S3_BUCKET_NAME --key $S3_UNSIGNED_ZIP_KEY 102 | aws s3api delete-object --bucket $S3_BUCKET_NAME --key $S3_SIGNED_ZIP_KEY 103 | 104 | echo 105 | echo "Successfully signed the bundle!" 106 | echo -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/snapshots-cache-test/cloudwatch_log.json~snapshot: -------------------------------------------------------------------------------- 1 | { 2 | "events": [ 3 | { 4 | "content-type": "application/json", 5 | "data": [ 6 | { 7 | "aws": { 8 | "awslogs": { 9 | "logGroup": "testLogGroup", 10 | "logStream": "testLogStream", 11 | "owner": "123456789123" 12 | }, 13 | "function_version": "$LATEST", 14 | "invoked_function_arn": "arn:aws:lambda:us-east-1:601427279990:function:test" 15 | }, 16 | "ddsource": "cloudwatch", 17 | "ddsourcecategory": "aws", 18 | "ddtags": "forwardername:test,forwarder_memorysize:1536,forwarder_version:", 19 | "host": "testLogGroup", 20 | "id": "eventId1", 21 | "message": "[ERROR] First test message", 22 | "service": "cloudwatch", 23 | "timestamp": 1440442987000 24 | }, 25 | { 26 | "aws": { 27 | "awslogs": { 28 | "logGroup": "testLogGroup", 29 | "logStream": "testLogStream", 30 | "owner": "123456789123" 31 | }, 32 | "function_version": "$LATEST", 33 | "invoked_function_arn": "arn:aws:lambda:us-east-1:601427279990:function:test" 34 | }, 35 | "ddsource": "cloudwatch", 36 | "ddsourcecategory": "aws", 37 | "ddtags": "forwardername:test,forwarder_memorysize:1536,forwarder_version:", 38 | "host": "testLogGroup", 39 | "id": "eventId2", 40 | "message": "[ERROR] Second test message", 41 | "service": "cloudwatch", 42 | "timestamp": 1440442987001 43 | } 44 | ], 45 | "path": "/v1/input/abcdefghijklmnopqrstuvwxyz012345", 46 | "verb": "POST" 47 | }, 48 | { 49 | "content-type": "application/json", 50 | "data": { 51 | "series": [ 52 | { 53 | "device": null, 54 | "host": null, 55 | "interval": 10, 56 | "metric": "aws.dd_forwarder.incoming_events", 57 | "points": "", 58 | "tags": [ 59 | "forwardername:test", 60 | "forwarder_memorysize:1536", 61 | "forwarder_version:", 62 | "event_type:awslogs" 63 | ], 64 | "type": "distribution" 65 | }, 66 | { 67 | "device": null, 68 | "host": null, 69 | "interval": 10, 70 | "metric": "aws.dd_forwarder.logs_forwarded", 71 | "points": "", 72 | "tags": [ 73 | "forwardername:test", 74 | "forwarder_memorysize:1536", 75 | "forwarder_version:", 76 | "event_type:awslogs" 77 | ], 78 | "type": "distribution" 79 | }, 80 | { 81 | "device": null, 82 | "host": null, 83 | "interval": 10, 84 | "metric": "aws.dd_forwarder.metrics_forwarded", 85 | "points": "", 86 | "tags": [ 87 | "forwardername:test", 88 | "forwarder_memorysize:1536", 89 | "forwarder_version:", 90 | "event_type:awslogs" 91 | ], 92 | "type": "distribution" 93 | } 94 | ] 95 | }, 96 | "path": "/api/v1/distribution_points?api_key=abcdefghijklmnopqrstuvwxyz012345", 97 | "verb": "POST" 98 | } 99 | ] 100 | } -------------------------------------------------------------------------------- /aws/logs_monitoring/trace_forwarder/internal/apm/testdata/basic.json~snapshot: -------------------------------------------------------------------------------- 1 | ([]*pb.TracePayload) (len=1) { 2 | (*pb.TracePayload)({ 3 | HostName: (string) "", 4 | Env: (string) (len=4) "none", 5 | Traces: ([]*pb.APITrace) (len=1) { 6 | (*pb.APITrace)({ 7 | TraceID: (uint64) 5673508998968570243, 8 | Spans: ([]*pb.Span) (len=3) { 9 | (*pb.Span)({ 10 | Service: (string) (len=10) "aws.lambda", 11 | Name: (string) (len=10) "aws.lambda", 12 | Resource: (string) (len=21) "hello-dog-dev-hello36", 13 | TraceID: (uint64) 5673508998968570243, 14 | SpanID: (uint64) 12593051921697315276, 15 | ParentID: (uint64) 4834147509824110201, 16 | Start: (int64) 1586269922931758000, 17 | Duration: (int64) 254812000, 18 | Error: (int32) 0, 19 | Meta: (map[string]string) (len=6) { 20 | (string) (len=10) "_dd.origin": (string) (len=6) "lambda", 21 | (string) (len=10) "cold_start": (string) (len=5) "false", 22 | (string) (len=12) "function_arn": (string) (len=68) "arn:aws:lambda:us-east-1:172597598159:function:hello-dog-dev-hello36", 23 | (string) (len=10) "request_id": (string) (len=36) "148e0567-902c-46e3-9374-d9642152420a", 24 | (string) (len=14) "resource_names": (string) (len=21) "hello-dog-dev-hello36", 25 | (string) (len=7) "service": (string) (len=10) "aws.lambda" 26 | }, 27 | Metrics: (map[string]float64) (len=7) { 28 | (string) (len=21) "_sampling_priority_v1": (float64) 2, 29 | (string) (len=58) "_sublayers.duration.by_service.sublayer_service:aws.lambda": (float64) 2.54812e+08, 30 | (string) (len=46) "_sublayers.duration.by_type.sublayer_type:http": (float64) 1.38997e+08, 31 | (string) (len=52) "_sublayers.duration.by_type.sublayer_type:serverless": (float64) 1.5583e+07, 32 | (string) (len=21) "_sublayers.span_count": (float64) 3, 33 | (string) (len=10) "_top_level": (float64) 1, 34 | (string) (len=10) "system.pid": (float64) 1 35 | }, 36 | Type: (string) (len=10) "serverless" 37 | }), 38 | (*pb.Span)({ 39 | Service: (string) (len=10) "aws.lambda", 40 | Name: (string) (len=16) "requests.request", 41 | Resource: (string) (len=16) "requests.request", 42 | TraceID: (uint64) 5673508998968570243, 43 | SpanID: (uint64) 15300116249758373965, 44 | ParentID: (uint64) 12593051921697315276, 45 | Start: (int64) 1586269922945357000, 46 | Duration: (int64) 138997000, 47 | Error: (int32) 0, 48 | Meta: (map[string]string) (len=5) { 49 | (string) (len=10) "_dd.origin": (string) (len=6) "lambda", 50 | (string) (len=11) "http.method": (string) (len=3) "GET", 51 | (string) (len=16) "http.status_code": (string) (len=3) "200", 52 | (string) (len=8) "http.url": (string) (len=26) "https://www.datadoghq.com/", 53 | (string) (len=7) "service": (string) (len=10) "aws.lambda" 54 | }, 55 | Metrics: (map[string]float64) (len=2) { 56 | (string) (len=12) "_dd.measured": (float64) 1, 57 | (string) (len=13) "_dd1.sr.eausr": (float64) 1 58 | }, 59 | Type: (string) (len=4) "http" 60 | }), 61 | (*pb.Span)({ 62 | Service: (string) (len=10) "aws.lambda", 63 | Name: (string) (len=19) "handler.get_message", 64 | Resource: (string) (len=19) "handler.get_message", 65 | TraceID: (uint64) 5673508998968570243, 66 | SpanID: (uint64) 9089086961342797587, 67 | ParentID: (uint64) 12593051921697315276, 68 | Start: (int64) 1586269923086220000, 69 | Duration: (int64) 100232000, 70 | Error: (int32) 0, 71 | Meta: (map[string]string) (len=1) { 72 | (string) (len=10) "_dd.origin": (string) (len=6) "lambda" 73 | }, 74 | Metrics: (map[string]float64) , 75 | Type: (string) "" 76 | }) 77 | }, 78 | StartTime: (int64) 1586269922931758000, 79 | EndTime: (int64) 0 80 | }) 81 | }, 82 | Transactions: ([]*pb.Span) 83 | }) 84 | } 85 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/snapshots/cloudwatch_log_custom_tags.json~snapshot: -------------------------------------------------------------------------------- 1 | { 2 | "events": [ 3 | { 4 | "data": [ 5 | { 6 | "aws": { 7 | "awslogs": { 8 | "logGroup": "testLogGroup", 9 | "logStream": "testLogStream", 10 | "owner": "123456789123" 11 | }, 12 | "function_version": "$LATEST", 13 | "invoked_function_arn": "arn:aws:lambda:us-east-1:0000000000:function:test" 14 | }, 15 | "ddsource": "cloudwatch", 16 | "ddsourcecategory": "aws", 17 | "ddtags": "forwardername:test,forwarder_memorysize:1536,forwarder_version:,custom_tag1:value1,custom_tag2:value2", 18 | "host": "testLogGroup", 19 | "id": "eventId1", 20 | "message": "{\"message\": \"hello world\"}", 21 | "service": "cloudwatch", 22 | "timestamp": 1440442987000 23 | } 24 | ], 25 | "headers": { 26 | "Accept": "*/*", 27 | "Accept-Encoding": "gzip, deflate", 28 | "Connection": "keep-alive", 29 | "Content-Length": "", 30 | "Content-type": "application/json", 31 | "DD-API-KEY": "abcdefghijklmnopqrstuvwxyz012345", 32 | "DD-EVP-ORIGIN": "aws_forwarder", 33 | "DD-EVP-ORIGIN-VERSION": "", 34 | "Host": "recorder:8080", 35 | "User-Agent": "", 36 | "x-datadog-parent-id": "", 37 | "x-datadog-sampling-priority": "2", 38 | "x-datadog-trace-id": "4842834437835386637" 39 | }, 40 | "path": "/api/v2/logs", 41 | "verb": "POST" 42 | }, 43 | { 44 | "data": { 45 | "series": [ 46 | { 47 | "device": null, 48 | "host": null, 49 | "interval": 10, 50 | "metric": "aws.dd_forwarder.incoming_events", 51 | "points": "", 52 | "tags": [ 53 | "forwardername:test", 54 | "forwarder_memorysize:1536", 55 | "forwarder_version:", 56 | "event_type:awslogs" 57 | ], 58 | "type": "distribution" 59 | }, 60 | { 61 | "device": null, 62 | "host": null, 63 | "interval": 10, 64 | "metric": "aws.dd_forwarder.logs_forwarded", 65 | "points": "", 66 | "tags": [ 67 | "forwardername:test", 68 | "forwarder_memorysize:1536", 69 | "forwarder_version:", 70 | "event_type:awslogs" 71 | ], 72 | "type": "distribution" 73 | }, 74 | { 75 | "device": null, 76 | "host": null, 77 | "interval": 10, 78 | "metric": "aws.dd_forwarder.metrics_forwarded", 79 | "points": "", 80 | "tags": [ 81 | "forwardername:test", 82 | "forwarder_memorysize:1536", 83 | "forwarder_version:", 84 | "event_type:awslogs" 85 | ], 86 | "type": "distribution" 87 | } 88 | ] 89 | }, 90 | "headers": { 91 | "Accept": "*/*", 92 | "Accept-Encoding": "gzip, deflate", 93 | "Connection": "keep-alive", 94 | "Content-Encoding": "deflate", 95 | "Content-Length": "", 96 | "Content-Type": "application/json", 97 | "Host": "recorder:8080", 98 | "User-Agent": "", 99 | "x-datadog-parent-id": "", 100 | "x-datadog-sampling-priority": "2", 101 | "x-datadog-trace-id": "4842834437835386637" 102 | }, 103 | "path": "/api/v1/distribution_points?api_key=abcdefghijklmnopqrstuvwxyz012345", 104 | "verb": "POST" 105 | } 106 | ] 107 | } -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/snapshots-cache-test/cloudwatch_log_cloudtrail.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageType": "DATA_MESSAGE", 3 | "owner": "123456789123", 4 | "logGroup": "aws-cloudtrail-logs-123456789123-c81b5193", 5 | "logStream": "123456789123_CloudTrail_us-east-1", 6 | "subscriptionFilters": [ 7 | "testFilter" 8 | ], 9 | "logEvents": [ 10 | { 11 | "id": "35689263648391837472973739781728019701390240798247944192", 12 | "timestamp": 1600361930988, 13 | "message": "{\"eventVersion\":\"1.05\",\"userIdentity\":{\"type\":\"AssumedRole\",\"principalId\":\"AROA55TKTI7VKCG3ZB64V:DatadogAWSIntegration\",\"arn\":\"arn:aws:sts::123456789123:assumed-role/DatadogAWSIntegrationRole/DatadogAWSIntegration\",\"accountId\":\"123456789123\",\"accessKeyId\":\"ASIA55TKTI7VANF3DEVD\",\"sessionContext\":{\"sessionIssuer\":{\"type\":\"Role\",\"principalId\":\"AROA55TKTI7VKCG3ZB64V\",\"arn\":\"arn:aws:iam::123456789123:role/DatadogAWSIntegrationRole\",\"accountId\":\"123456789123\",\"userName\":\"DatadogAWSIntegrationRole\"},\"webIdFederationData\":{},\"attributes\":{\"mfaAuthenticated\":\"false\",\"creationDate\":\"2020-09-17T16:29:28Z\"}}},\"eventTime\":\"2020-09-17T16:44:11Z\",\"eventSource\":\"logs.amazonaws.com\",\"eventName\":\"DescribeLogStreams\",\"awsRegion\":\"us-east-1\",\"sourceIPAddress\":\"54.161.67.200\",\"userAgent\":\"Datadog\",\"requestParameters\":{\"logGroupName\":\"/aws/lambda/hello-dog-node-dev-hello12x\",\"descending\":true,\"orderBy\":\"LastEventTime\"},\"responseElements\":null,\"requestID\":\"149d9263-2a7a-4e79-b97e-07642b95b0ee\",\"eventID\":\"14b20909-b46b-4f74-b725-d987888edbc7\",\"eventType\":\"AwsApiCall\",\"apiVersion\":\"20140328\",\"recipientAccountId\":\"123456789123\"}" 14 | }, 15 | { 16 | "id": "35689263648391837472973739781728019701390240798247944193", 17 | "timestamp": 1600361930988, 18 | "message": "{\"eventVersion\":\"1.05\",\"userIdentity\":{\"type\":\"Root\",\"principalId\":\"123456789123\",\"arn\":\"arn:aws:iam::123456789123:root\",\"accountId\":\"123456789123\",\"accessKeyId\":\"ASIA55TKTI7VJK52JDP3\",\"sessionContext\":{\"sessionIssuer\":{},\"webIdFederationData\":{},\"attributes\":{\"mfaAuthenticated\":\"false\",\"creationDate\":\"2020-09-17T14:19:46Z\"}}},\"eventTime\":\"2020-09-17T16:44:28Z\",\"eventSource\":\"health.amazonaws.com\",\"eventName\":\"DescribeEventAggregates\",\"awsRegion\":\"us-east-1\",\"sourceIPAddress\":\"68.194.150.7\",\"userAgent\":\"console.amazonaws.com\",\"requestParameters\":{\"aggregateField\":\"eventTypeCategory\",\"filter\":{\"eventStatusCodes\":[\"open\",\"upcoming\"],\"startTimes\":[{\"from\":\"Sep 10, 2020 4:44:28 PM\"}]}},\"responseElements\":null,\"requestID\":\"f0cd8b48-77ce-4432-b458-478424f38a38\",\"eventID\":\"303c31dd-aeee-4d5d-9311-61a1aa802c6f\",\"eventType\":\"AwsApiCall\",\"recipientAccountId\":\"123456789123\"}" 19 | }, 20 | { 21 | "id": "35689263648391837472973739781728019701390240798247944194", 22 | "timestamp": 1600361930988, 23 | "message": "{\"eventVersion\":\"1.05\",\"userIdentity\":{\"type\":\"AssumedRole\",\"principalId\":\"AROA55TKTI7VKCG3ZB64V:DatadogAWSIntegration\",\"arn\":\"arn:aws:sts::123456789123:assumed-role/DatadogAWSIntegrationRole/DatadogAWSIntegration\",\"accountId\":\"123456789123\",\"accessKeyId\":\"ASIA55TKTI7VANF3DEVD\",\"sessionContext\":{\"sessionIssuer\":{\"type\":\"Role\",\"principalId\":\"AROA55TKTI7VKCG3ZB64V\",\"arn\":\"arn:aws:iam::123456789123:role/DatadogAWSIntegrationRole\",\"accountId\":\"123456789123\",\"userName\":\"DatadogAWSIntegrationRole\"},\"webIdFederationData\":{},\"attributes\":{\"mfaAuthenticated\":\"false\",\"creationDate\":\"2020-09-17T16:29:28Z\"}}},\"eventTime\":\"2020-09-17T16:44:10Z\",\"eventSource\":\"logs.amazonaws.com\",\"eventName\":\"DescribeLogStreams\",\"awsRegion\":\"us-east-1\",\"sourceIPAddress\":\"54.161.67.200\",\"userAgent\":\"Datadog\",\"requestParameters\":{\"logGroupName\":\"/aws/lambda/hello-dog-node-dev-hello10x\",\"descending\":true,\"orderBy\":\"LastEventTime\"},\"responseElements\":null,\"requestID\":\"8fdf17f5-1887-4008-88ee-96da7b039924\",\"eventID\":\"6b574dee-bdc5-4abe-b6ec-10888ac258a2\",\"eventType\":\"AwsApiCall\",\"apiVersion\":\"20140328\",\"recipientAccountId\":\"123456789123\"}" 24 | } 25 | ] 26 | } 27 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/snapshots/cloudwatch_log_route53.json~snapshot: -------------------------------------------------------------------------------- 1 | { 2 | "events": [ 3 | { 4 | "data": [ 5 | { 6 | "aws": { 7 | "awslogs": { 8 | "logGroup": "/aws/route53", 9 | "logStream": "vpc-34d4ae52_20210419T0940Z_i-090384b04e4e5bfd2", 10 | "owner": "601427279990" 11 | }, 12 | "function_version": "$LATEST", 13 | "invoked_function_arn": "arn:aws:lambda:us-east-1:0000000000:function:test" 14 | }, 15 | "ddsource": "route53", 16 | "ddsourcecategory": "aws", 17 | "ddtags": "forwardername:test,forwarder_memorysize:1536,forwarder_version:", 18 | "host": "i-090384b04e4e5bfd2", 19 | "id": "36101011026851289935914250459116581911799633971817938944", 20 | "message": "{\"version\":\"1.100000\",\"account_id\":\"601427279990\",\"region\":\"us-east-1\",\"vpc_id\":\"vpc-34d4ae52\",\"query_timestamp\":\"2021-04-19T09:42:02Z\",\"query_name\":\"queue.amazonaws.com.\",\"query_type\":\"A\",\"query_class\":\"IN\",\"rcode\":\"NOERROR\",\"answers\":[{\"Rdata\":\"3.236.169.0\",\"Type\":\"A\",\"Class\":\"IN\"}],\"srcaddr\":\"172.31.26.215\",\"srcport\":\"34460\",\"transport\":\"UDP\",\"srcids\":{\"instance\":\"i-090384b04e4e5bfd2\"}}", 21 | "service": "route53", 22 | "timestamp": 1618825322000 23 | } 24 | ], 25 | "headers": { 26 | "Accept": "*/*", 27 | "Accept-Encoding": "gzip, deflate", 28 | "Connection": "keep-alive", 29 | "Content-Length": "", 30 | "Content-type": "application/json", 31 | "DD-API-KEY": "abcdefghijklmnopqrstuvwxyz012345", 32 | "DD-EVP-ORIGIN": "aws_forwarder", 33 | "DD-EVP-ORIGIN-VERSION": "", 34 | "Host": "recorder:8080", 35 | "User-Agent": "", 36 | "x-datadog-parent-id": "", 37 | "x-datadog-sampling-priority": "2", 38 | "x-datadog-trace-id": "4842834437835386637" 39 | }, 40 | "path": "/api/v2/logs", 41 | "verb": "POST" 42 | }, 43 | { 44 | "data": { 45 | "series": [ 46 | { 47 | "device": null, 48 | "host": null, 49 | "interval": 10, 50 | "metric": "aws.dd_forwarder.list_tags_log_group_api_call", 51 | "points": "", 52 | "tags": [ 53 | "forwardername:test", 54 | "forwarder_memorysize:1536", 55 | "forwarder_version:", 56 | "event_type:awslogs" 57 | ], 58 | "type": "distribution" 59 | }, 60 | { 61 | "device": null, 62 | "host": null, 63 | "interval": 10, 64 | "metric": "aws.dd_forwarder.incoming_events", 65 | "points": "", 66 | "tags": [ 67 | "forwardername:test", 68 | "forwarder_memorysize:1536", 69 | "forwarder_version:", 70 | "event_type:awslogs" 71 | ], 72 | "type": "distribution" 73 | }, 74 | { 75 | "device": null, 76 | "host": null, 77 | "interval": 10, 78 | "metric": "aws.dd_forwarder.logs_forwarded", 79 | "points": "", 80 | "tags": [ 81 | "forwardername:test", 82 | "forwarder_memorysize:1536", 83 | "forwarder_version:", 84 | "event_type:awslogs" 85 | ], 86 | "type": "distribution" 87 | }, 88 | { 89 | "device": null, 90 | "host": null, 91 | "interval": 10, 92 | "metric": "aws.dd_forwarder.metrics_forwarded", 93 | "points": "", 94 | "tags": [ 95 | "forwardername:test", 96 | "forwarder_memorysize:1536", 97 | "forwarder_version:", 98 | "event_type:awslogs" 99 | ], 100 | "type": "distribution" 101 | } 102 | ] 103 | }, 104 | "headers": { 105 | "Accept": "*/*", 106 | "Accept-Encoding": "gzip, deflate", 107 | "Connection": "keep-alive", 108 | "Content-Encoding": "deflate", 109 | "Content-Length": "", 110 | "Content-Type": "application/json", 111 | "Host": "recorder:8080", 112 | "User-Agent": "", 113 | "x-datadog-parent-id": "", 114 | "x-datadog-sampling-priority": "2", 115 | "x-datadog-trace-id": "4842834437835386637" 116 | }, 117 | "path": "/api/v1/distribution_points?api_key=abcdefghijklmnopqrstuvwxyz012345", 118 | "verb": "POST" 119 | } 120 | ] 121 | } -------------------------------------------------------------------------------- /aws/logs_monitoring/trace_forwarder/cmd/trace/main.go: -------------------------------------------------------------------------------- 1 | /* 2 | * Unless explicitly stated otherwise all files in this repository are licensed 3 | * under the Apache License Version 2.0. 4 | * 5 | * This product includes software developed at Datadog (https://www.datadoghq.com/). 6 | * Copyright 2021 Datadog, Inc. 7 | */ 8 | package main 9 | 10 | import ( 11 | "C" 12 | "context" 13 | "encoding/json" 14 | "errors" 15 | "fmt" 16 | 17 | "github.com/DataDog/datadog-serverless-functions/aws/logs_monitoring/trace_forwarder/internal/apm" 18 | 19 | "github.com/DataDog/datadog-agent/pkg/trace/obfuscate" 20 | "github.com/DataDog/datadog-agent/pkg/trace/pb" 21 | ) 22 | 23 | var ( 24 | obfuscator *obfuscate.Obfuscator 25 | edgeConnection apm.TraceEdgeConnection 26 | ) 27 | 28 | type ( 29 | RawTracePayload struct { 30 | Message string `json:"message"` 31 | Tags string `json:"tags"` 32 | } 33 | ) 34 | 35 | // Configure will set up the bindings 36 | //export Configure 37 | func Configure(rootURL, apiKey string, InsecureSkipVerify bool) { 38 | // Need to make a copy of these values, otherwise the underlying memory 39 | // might be cleaned up by the runtime. 40 | localRootURL := fmt.Sprintf("%s", rootURL) 41 | localAPIKey := fmt.Sprintf("%s", apiKey) 42 | 43 | obfuscator = obfuscate.NewObfuscator(&obfuscate.Config{ 44 | ES: obfuscate.JSONSettings{ 45 | Enabled: true, 46 | }, 47 | Mongo: obfuscate.JSONSettings{ 48 | Enabled: true, 49 | }, 50 | RemoveQueryString: true, 51 | RemovePathDigits: true, 52 | RemoveStackTraces: true, 53 | Redis: true, 54 | Memcached: true, 55 | }) 56 | edgeConnection = apm.CreateTraceEdgeConnection(localRootURL, localAPIKey, InsecureSkipVerify) 57 | } 58 | 59 | // returns 0 on success, 1 on error 60 | //export ForwardTraces 61 | func ForwardTraces(serializedTraces string) int { 62 | rawTracePayloads, err := unmarshalSerializedTraces(serializedTraces) 63 | if err != nil { 64 | fmt.Printf("Couldn't forward traces: %v", err) 65 | return 1 66 | } 67 | 68 | processedTracePayloads, err := processRawTracePayloads(rawTracePayloads) 69 | 70 | if len(processedTracePayloads) == 0 { 71 | fmt.Printf("No traces to forward") 72 | return 0 73 | } 74 | 75 | if err != nil { 76 | fmt.Printf("Couldn't forward traces: %v", err) 77 | return 1 78 | } 79 | 80 | aggregatedTracePayloads := aggregateTracePayloadsByEnv(processedTracePayloads) 81 | 82 | err = sendTracesToIntake(aggregatedTracePayloads) 83 | if err != nil { 84 | fmt.Printf("Couldn't forward traces: %v", err) 85 | return 1 86 | } 87 | 88 | return 0 89 | } 90 | 91 | func unmarshalSerializedTraces(serializedTraces string) ([]RawTracePayload, error) { 92 | var rawTracePayloads []RawTracePayload 93 | err := json.Unmarshal([]byte(serializedTraces), &rawTracePayloads) 94 | 95 | if err != nil { 96 | return rawTracePayloads, fmt.Errorf("Couldn't unmarshal serialized traces, %v", err) 97 | } 98 | 99 | return rawTracePayloads, nil 100 | } 101 | 102 | func processRawTracePayloads(rawTracePayloads []RawTracePayload) ([]*pb.TracePayload, error) { 103 | var processedTracePayloads []*pb.TracePayload 104 | for _, rawTracePayload := range rawTracePayloads { 105 | traceList, err := apm.ProcessTrace(rawTracePayload.Message, obfuscator, rawTracePayload.Tags) 106 | if err != nil { 107 | return processedTracePayloads, err 108 | } 109 | processedTracePayloads = append(processedTracePayloads, traceList...) 110 | } 111 | return processedTracePayloads, nil 112 | } 113 | 114 | func aggregateTracePayloadsByEnv(tracePayloads []*pb.TracePayload) []*pb.TracePayload { 115 | lookup := make(map[string]*pb.TracePayload) 116 | for _, tracePayload := range tracePayloads { 117 | key := fmt.Sprintf("%s|%s", tracePayload.HostName, tracePayload.Env) 118 | var existingPayload *pb.TracePayload 119 | if val, ok := lookup[key]; ok { 120 | existingPayload = val 121 | } else { 122 | existingPayload = &pb.TracePayload{ 123 | HostName: tracePayload.HostName, 124 | Env: tracePayload.Env, 125 | Traces: make([]*pb.APITrace, 0), 126 | } 127 | lookup[key] = existingPayload 128 | } 129 | existingPayload.Traces = append(existingPayload.Traces, tracePayload.Traces...) 130 | } 131 | 132 | newPayloads := make([]*pb.TracePayload, 0) 133 | 134 | for _, tracePayload := range lookup { 135 | newPayloads = append(newPayloads, tracePayload) 136 | } 137 | return newPayloads 138 | } 139 | 140 | func sendTracesToIntake(tracePayloads []*pb.TracePayload) error { 141 | hadErr := false 142 | for _, tracePayload := range tracePayloads { 143 | err := edgeConnection.SendTraces(context.Background(), tracePayload, 3) 144 | if err != nil { 145 | fmt.Printf("Failed to send traces with error %v\n", err) 146 | hadErr = true 147 | } 148 | stats := apm.ComputeAPMStats(tracePayload) 149 | err = edgeConnection.SendStats(context.Background(), stats, 3) 150 | if err != nil { 151 | fmt.Printf("Failed to send trace stats with error %v\n", err) 152 | hadErr = true 153 | } 154 | } 155 | 156 | if hadErr { 157 | return errors.New("Failed to send traces or stats to intake") 158 | } 159 | return nil 160 | } 161 | 162 | func main() {} 163 | -------------------------------------------------------------------------------- /azure/eventhub_log_forwarder/parent_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", 3 | "contentVersion": "1.0.0.0", 4 | "parameters": { 5 | "eventHubNamespace": { 6 | "type": "string", 7 | "defaultValue": "[concat('datadog-ns-', newGuid())]", 8 | "metadata": { 9 | "description": "Name of EventHub namespace, which must be globally unique." 10 | } 11 | }, 12 | "eventHubName": { 13 | "type": "string", 14 | "defaultValue": "datadog-eventhub", 15 | "metadata": { 16 | "description": "Name of Event Hub" 17 | } 18 | }, 19 | "functionAppName": { 20 | "type": "string", 21 | "defaultValue": "[concat('datadog-functionapp-', newGuid())]", 22 | "metadata": { 23 | "description": "The name of the function app " 24 | } 25 | }, 26 | "functionName": { 27 | "type": "string", 28 | "defaultValue": "datadog-function", 29 | "metadata": { 30 | "description": "The name of the function." 31 | } 32 | }, 33 | "functionCode": { 34 | "type": "string", 35 | "metadata": { 36 | "description": "Code for the function to run, saved into index.js" 37 | } 38 | }, 39 | "apiKey": { 40 | "type": "securestring", 41 | "metadata": { 42 | "description": "Datadog API key" 43 | } 44 | }, 45 | "datadogTags": { 46 | "type": "string", 47 | "defaultValue": "", 48 | "metadata": { 49 | "description": "Comma-separated list of tags" 50 | } 51 | }, 52 | "location": { 53 | "type": "string", 54 | "defaultValue": "[resourceGroup().location]", 55 | "metadata": { 56 | "description": "Specify a location for the resources." 57 | } 58 | }, 59 | "datadogSite": { 60 | "type": "string", 61 | "defaultValue": "datadoghq.com", 62 | "metadata": { 63 | "description": "Datadog site to send logs" 64 | } 65 | }, 66 | "endpointSuffix": { 67 | "type": "string", 68 | "defaultValue": "core.windows.net", 69 | "metadata": { 70 | "description": "Endpoint suffix for storage account" 71 | } 72 | } 73 | }, 74 | "variables": { 75 | "eventHubTemplateLink": "https://raw.githubusercontent.com/DataDog/datadog-serverless-functions/master/azure/eventhub_log_forwarder/event_hub.json", 76 | "functionAppTemplateLink": "https://raw.githubusercontent.com/DataDog/datadog-serverless-functions/master/azure/eventhub_log_forwarder/function_template.json" 77 | }, 78 | "resources": [ 79 | { 80 | "name": "eventHubTemplate", 81 | "type": "Microsoft.Resources/deployments", 82 | "apiVersion": "2018-05-01", 83 | "properties": { 84 | "mode": "Incremental", 85 | "templateLink": { 86 | "uri": "[variables('eventHubTemplateLink')]", 87 | "contentVersion": "1.0.0.0" 88 | }, 89 | "parameters": { 90 | "eventHubNamespace": { 91 | "value": "[parameters('eventHubNamespace')]" 92 | }, 93 | "eventHubName": { 94 | "value": "[parameters('eventHubName')]" 95 | }, 96 | "location": { 97 | "value": "[parameters('location')]" 98 | } 99 | } 100 | } 101 | }, 102 | { 103 | "name": "functionAppTemplate", 104 | "type": "Microsoft.Resources/deployments", 105 | "apiVersion": "2018-05-01", 106 | "properties": { 107 | "mode": "Incremental", 108 | "templateLink": { 109 | "uri": "[variables('functionAppTemplateLink')]", 110 | "contentVersion": "1.0.0.0" 111 | }, 112 | "parameters": { 113 | "eventHubNamespace": { 114 | "value": "[parameters('eventHubNamespace')]" 115 | }, 116 | "eventHubName": { 117 | "value": "[parameters('eventHubName')]" 118 | }, 119 | "functionAppName": { 120 | "value": "[parameters('functionAppName')]" 121 | }, 122 | "functionName": { 123 | "value": "[parameters('functionName')]" 124 | }, 125 | "functionCode": { 126 | "value": "[parameters('functionCode')]" 127 | }, 128 | "apiKey": { 129 | "value": "[parameters('apiKey')]" 130 | }, 131 | "datadogTags": { 132 | "value": "[parameters('datadogTags')]" 133 | }, 134 | "location": { 135 | "value": "[parameters('location')]" 136 | }, 137 | "datadogSite": { 138 | "value": "[parameters('datadogSite')]" 139 | }, 140 | "endpointSuffix": { 141 | "value": "[parameters('endpointSuffix')]" 142 | } 143 | } 144 | }, 145 | "dependsOn": [ 146 | "[resourceId('Microsoft.Resources/deployments','eventHubTemplate')]" 147 | ] 148 | } 149 | ], 150 | "outputs": { 151 | "eventHubNamespace": { 152 | "type": "string", 153 | "value": "[parameters('eventHubNamespace')]" 154 | } 155 | } 156 | } 157 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/snapshots/cloudwatch_log.json~snapshot: -------------------------------------------------------------------------------- 1 | { 2 | "events": [ 3 | { 4 | "data": [ 5 | { 6 | "aws": { 7 | "awslogs": { 8 | "logGroup": "testLogGroup", 9 | "logStream": "testLogStream", 10 | "owner": "123456789123" 11 | }, 12 | "function_version": "$LATEST", 13 | "invoked_function_arn": "arn:aws:lambda:us-east-1:0000000000:function:test" 14 | }, 15 | "ddsource": "cloudwatch", 16 | "ddsourcecategory": "aws", 17 | "ddtags": "forwardername:test,forwarder_memorysize:1536,forwarder_version:", 18 | "host": "testLogGroup", 19 | "id": "eventId1", 20 | "message": "[ERROR] First test message", 21 | "service": "cloudwatch", 22 | "timestamp": 1440442987000 23 | }, 24 | { 25 | "aws": { 26 | "awslogs": { 27 | "logGroup": "testLogGroup", 28 | "logStream": "testLogStream", 29 | "owner": "123456789123" 30 | }, 31 | "function_version": "$LATEST", 32 | "invoked_function_arn": "arn:aws:lambda:us-east-1:0000000000:function:test" 33 | }, 34 | "ddsource": "cloudwatch", 35 | "ddsourcecategory": "aws", 36 | "ddtags": "forwardername:test,forwarder_memorysize:1536,forwarder_version:", 37 | "host": "testLogGroup", 38 | "id": "eventId2", 39 | "message": "[ERROR] Second test message", 40 | "service": "cloudwatch", 41 | "timestamp": 1440442987001 42 | } 43 | ], 44 | "headers": { 45 | "Accept": "*/*", 46 | "Accept-Encoding": "gzip, deflate", 47 | "Connection": "keep-alive", 48 | "Content-Length": "", 49 | "Content-type": "application/json", 50 | "DD-API-KEY": "abcdefghijklmnopqrstuvwxyz012345", 51 | "DD-EVP-ORIGIN": "aws_forwarder", 52 | "DD-EVP-ORIGIN-VERSION": "", 53 | "Host": "recorder:8080", 54 | "User-Agent": "", 55 | "x-datadog-parent-id": "", 56 | "x-datadog-sampling-priority": "2", 57 | "x-datadog-trace-id": "4842834437835386637" 58 | }, 59 | "path": "/api/v2/logs", 60 | "verb": "POST" 61 | }, 62 | { 63 | "data": { 64 | "series": [ 65 | { 66 | "device": null, 67 | "host": null, 68 | "interval": 10, 69 | "metric": "aws.dd_forwarder.list_tags_log_group_api_call", 70 | "points": "", 71 | "tags": [ 72 | "forwardername:test", 73 | "forwarder_memorysize:1536", 74 | "forwarder_version:", 75 | "event_type:awslogs" 76 | ], 77 | "type": "distribution" 78 | }, 79 | { 80 | "device": null, 81 | "host": null, 82 | "interval": 10, 83 | "metric": "aws.dd_forwarder.incoming_events", 84 | "points": "", 85 | "tags": [ 86 | "forwardername:test", 87 | "forwarder_memorysize:1536", 88 | "forwarder_version:", 89 | "event_type:awslogs" 90 | ], 91 | "type": "distribution" 92 | }, 93 | { 94 | "device": null, 95 | "host": null, 96 | "interval": 10, 97 | "metric": "aws.dd_forwarder.logs_forwarded", 98 | "points": "", 99 | "tags": [ 100 | "forwardername:test", 101 | "forwarder_memorysize:1536", 102 | "forwarder_version:", 103 | "event_type:awslogs" 104 | ], 105 | "type": "distribution" 106 | }, 107 | { 108 | "device": null, 109 | "host": null, 110 | "interval": 10, 111 | "metric": "aws.dd_forwarder.metrics_forwarded", 112 | "points": "", 113 | "tags": [ 114 | "forwardername:test", 115 | "forwarder_memorysize:1536", 116 | "forwarder_version:", 117 | "event_type:awslogs" 118 | ], 119 | "type": "distribution" 120 | } 121 | ] 122 | }, 123 | "headers": { 124 | "Accept": "*/*", 125 | "Accept-Encoding": "gzip, deflate", 126 | "Connection": "keep-alive", 127 | "Content-Encoding": "deflate", 128 | "Content-Length": "", 129 | "Content-Type": "application/json", 130 | "Host": "recorder:8080", 131 | "User-Agent": "", 132 | "x-datadog-parent-id": "", 133 | "x-datadog-sampling-priority": "2", 134 | "x-datadog-trace-id": "4842834437835386637" 135 | }, 136 | "path": "/api/v1/distribution_points?api_key=abcdefghijklmnopqrstuvwxyz012345", 137 | "verb": "POST" 138 | } 139 | ] 140 | } -------------------------------------------------------------------------------- /aws/rds_enhanced_monitoring/examples/tmp_message.json: -------------------------------------------------------------------------------- 1 | { 2 | "engine": "Aurora", 3 | "instanceID": "i-0a1b2c3d4e5f6g", 4 | "instanceResourceID": "db-ABCDEFGHIJKLMNOPQRSTUVWXYZ", 5 | "timestamp": "__timestamp__", 6 | "version": 1.00, 7 | "uptime": "10 days, 1:53:04", 8 | "numVCPUs": 2, 9 | "cpuUtilization": { 10 | "guest": 0.00, 11 | "irq": 0.00, 12 | "system": 0.88, 13 | "wait": 0.54, 14 | "idle": 97.57, 15 | "user": 0.68, 16 | "total": 1.56, 17 | "steal": 0.07, 18 | "nice": 0.25 19 | }, 20 | "loadAverageMinute": { 21 | "fifteen": 0.14, 22 | "five": 0.17, 23 | "one": 0.18 24 | }, 25 | "memory": { 26 | "writeback": 0, 27 | "hugePagesFree": 0, 28 | "hugePagesRsvd": 0, 29 | "hugePagesSurp": 0, 30 | "cached": 11742648, 31 | "hugePagesSize": 2048, 32 | "free": 259016, 33 | "hugePagesTotal": 0, 34 | "inactive": 1817176, 35 | "pageTables": 25808, 36 | "dirty": 660, 37 | "mapped": 8087612, 38 | "active": 13016084, 39 | "total": 15670012, 40 | "slab": 437916, 41 | "buffers": 272136 42 | }, 43 | "tasks": { 44 | "sleeping": 223, 45 | "zombie": 0, 46 | "running": 1, 47 | "stopped": 0, 48 | "total": 224, 49 | "blocked": 0 50 | }, 51 | "swap": { 52 | "cached": 0, 53 | "total": 0, 54 | "free": 0 55 | }, 56 | "network": [ 57 | { 58 | "interface": "eth0", 59 | "rx": 217.57, 60 | "tx": 2319.67 61 | } 62 | ], 63 | "diskIO": [{ 64 | "writeKbPS": 2301.6, 65 | "readIOsPS": 0.03, 66 | "await": 4.04, 67 | "readKbPS": 0.13, 68 | "rrqmPS": 0, 69 | "util": 0.2, 70 | "avgQueueLen": 0.11, 71 | "tps": 28.27, 72 | "readKb": 4, 73 | "device": "rdsdev", 74 | "writeKb": 69048, 75 | "avgReqSz": 162.86, 76 | "wrqmPS": 0, 77 | "writeIOsPS": 28.23 78 | },{ 79 | "writeKbPS": 177.2, 80 | "readIOsPS": 0.03, 81 | "await": 1.52, 82 | "readKbPS": 0.13, 83 | "rrqmPS": 0, 84 | "util": 0.35, 85 | "avgQueueLen": 0.03, 86 | "tps": 25.67, 87 | "readKb": 4, 88 | "device": "filesystem", 89 | "writeKb": 5316, 90 | "avgReqSz": 13.82, 91 | "wrqmPS": 8.3, 92 | "writeIOsPS": 25.63 93 | } 94 | ], 95 | "fileSys": [ 96 | { 97 | "used": 7006720, 98 | "name": "rdsfilesys", 99 | "usedFiles": 2650, 100 | "usedFilePercent": 0.13, 101 | "maxFiles": 1966080, 102 | "mountPoint": "/rdsdbdata", 103 | "total": 30828540, 104 | "usedPercent": 22.73 105 | } 106 | ], 107 | "physicalDeviceIO": [ 108 | { 109 | "writeKbPS": 583.6, 110 | "readIOsPS": 0, 111 | "await": 2.32, 112 | "readKbPS": 0, 113 | "rrqmPS": 0, 114 | "util": 0.09, 115 | "avgQueueLen": 0.02, 116 | "tps": 9.9, 117 | "readKb": 0, 118 | "device": "nvme3n1", 119 | "writeKb": 17508, 120 | "avgReqSz": 117.9, 121 | "wrqmPS": 4.97, 122 | "writeIOsPS": 9.9 123 | }, { 124 | "writeKbPS": 575.07, 125 | "readIOsPS": 0, 126 | "await": 3.04, 127 | "readKbPS": 0, 128 | "rrqmPS": 0, 129 | "util": 0.09, 130 | "avgQueueLen": 0.03, 131 | "tps": 9.47, 132 | "readKb": 0, 133 | "device": "nvme1n1", 134 | "writeKb": 17252, 135 | "avgReqSz": 121.49, 136 | "wrqmPS": 3.97, 137 | "writeIOsPS": 9.47 138 | }, { 139 | "writeKbPS": 567.33, 140 | "readIOsPS": 0.03, 141 | "await": 2.69, 142 | "readKbPS": 0.13, 143 | "rrqmPS": 0, 144 | "util": 0.09, 145 | "avgQueueLen": 0.02, 146 | "tps": 9.47, 147 | "readKb": 4, 148 | "device": "nvme5n1", 149 | "writeKb": 17020, 150 | "avgReqSz": 119.89, 151 | "wrqmPS": 3.07, 152 | "writeIOsPS": 9.43 153 | }, { 154 | "writeKbPS": 576.53, 155 | "readIOsPS": 0, 156 | "await": 2.64, 157 | "readKbPS": 0, 158 | "rrqmPS": 0, 159 | "util": 0.09, 160 | "avgQueueLen": 0.02, 161 | "tps": 9.8, 162 | "readKb": 0, 163 | "device": "nvme2n1", 164 | "writeKb": 17296, 165 | "avgReqSz": 117.66, 166 | "wrqmPS": 3.9, 167 | "writeIOsPS": 9.8 168 | } 169 | ], 170 | "processList": [ 171 | { 172 | "vss": 11170084, 173 | "name": "aurora", 174 | "tgid": 8455, 175 | "parentID": 1, 176 | "memoryUsedPc": 66.93, 177 | "cpuUsedPc": 0.00, 178 | "id": 8455, 179 | "rss": 10487696 180 | }, 181 | { 182 | "vss": 11170084, 183 | "name": "aurora", 184 | "tgid": 8455, 185 | "parentID": 1, 186 | "memoryUsedPc": 66.93, 187 | "cpuUsedPc": 0.82, 188 | "id": 8782, 189 | "rss": 10487696 190 | }, 191 | { 192 | "vss": 11170084, 193 | "name": "aurora", 194 | "tgid": 8455, 195 | "parentID": 1, 196 | "memoryUsedPc": 66.93, 197 | "cpuUsedPc": 0.05, 198 | "id": 8784, 199 | "rss": 10487696 200 | }, 201 | { 202 | "vss": 647304, 203 | "name": "OS processes", 204 | "tgid": 0, 205 | "parentID": 0, 206 | "memoryUsedPc": 0.18, 207 | "cpuUsedPc": 0.02, 208 | "id": 0, 209 | "rss": 22600 210 | }, 211 | { 212 | "vss": 3244792, 213 | "name": "RDS processes", 214 | "tgid": 0, 215 | "parentID": 0, 216 | "memoryUsedPc": 2.80, 217 | "cpuUsedPc": 0.78, 218 | "id": 0, 219 | "rss": 441652 220 | } 221 | ] 222 | } 223 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/snapshots/cloudwatch_log_cloudtrail.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageType": "DATA_MESSAGE", 3 | "owner": "123456789123", 4 | "logGroup": "aws-cloudtrail-logs-123456789123-c81b5193", 5 | "logStream": "123456789123_CloudTrail_us-east-1", 6 | "subscriptionFilters": [ 7 | "testFilter" 8 | ], 9 | "logEvents": [ 10 | { 11 | "id": "35689263648391837472973739781728019701390240798247944192", 12 | "timestamp": 1600361930988, 13 | "message": "{\"eventVersion\":\"1.05\",\"userIdentity\":{\"type\":\"AssumedRole\",\"principalId\":\"AROA55TKTI7VKCG3ZB64V:DatadogAWSIntegration\",\"arn\":\"arn:aws:sts::123456789123:assumed-role/DatadogAWSIntegrationRole/DatadogAWSIntegration\",\"accountId\":\"123456789123\",\"accessKeyId\":\"ASIA55TKTI7VANF3DEVD\",\"sessionContext\":{\"sessionIssuer\":{\"type\":\"Role\",\"principalId\":\"AROA55TKTI7VKCG3ZB64V\",\"arn\":\"arn:aws:iam::123456789123:role/DatadogAWSIntegrationRole\",\"accountId\":\"123456789123\",\"userName\":\"DatadogAWSIntegrationRole\"},\"webIdFederationData\":{},\"attributes\":{\"mfaAuthenticated\":\"false\",\"creationDate\":\"2020-09-17T16:29:28Z\"}}},\"eventTime\":\"2020-09-17T16:44:11Z\",\"eventSource\":\"logs.amazonaws.com\",\"eventName\":\"DescribeLogStreams\",\"awsRegion\":\"us-east-1\",\"sourceIPAddress\":\"54.161.67.200\",\"userAgent\":\"Datadog\",\"requestParameters\":{\"logGroupName\":\"/aws/lambda/hello-dog-node-dev-hello12x\",\"descending\":true,\"orderBy\":\"LastEventTime\"},\"responseElements\":null,\"requestID\":\"149d9263-2a7a-4e79-b97e-07642b95b0ee\",\"eventID\":\"14b20909-b46b-4f74-b725-d987888edbc7\",\"eventType\":\"AwsApiCall\",\"apiVersion\":\"20140328\",\"recipientAccountId\":\"123456789123\"}" 14 | }, 15 | { 16 | "id": "35689263648391837472973739781728019701390240798247944193", 17 | "timestamp": 1600361930988, 18 | "message": "{\"eventVersion\":\"1.05\",\"userIdentity\":{\"type\":\"Root\",\"principalId\":\"123456789123\",\"arn\":\"arn:aws:iam::123456789123:root\",\"accountId\":\"123456789123\",\"accessKeyId\":\"ASIA55TKTI7VJK52JDP3\",\"sessionContext\":{\"sessionIssuer\":{},\"webIdFederationData\":{},\"attributes\":{\"mfaAuthenticated\":\"false\",\"creationDate\":\"2020-09-17T14:19:46Z\"}}},\"eventTime\":\"2020-09-17T16:44:28Z\",\"eventSource\":\"health.amazonaws.com\",\"eventName\":\"DescribeEventAggregates\",\"awsRegion\":\"us-east-1\",\"sourceIPAddress\":\"68.194.150.7\",\"userAgent\":\"console.amazonaws.com\",\"requestParameters\":{\"aggregateField\":\"eventTypeCategory\",\"filter\":{\"eventStatusCodes\":[\"open\",\"upcoming\"],\"startTimes\":[{\"from\":\"Sep 10, 2020 4:44:28 PM\"}]}},\"responseElements\":null,\"requestID\":\"f0cd8b48-77ce-4432-b458-478424f38a38\",\"eventID\":\"303c31dd-aeee-4d5d-9311-61a1aa802c6f\",\"eventType\":\"AwsApiCall\",\"recipientAccountId\":\"123456789123\"}" 19 | }, 20 | { 21 | "id": "35689263648391837472973739781728019701390240798247944194", 22 | "timestamp": 1600361930988, 23 | "message": "{\"eventVersion\":\"1.05\",\"userIdentity\":{\"type\":\"AssumedRole\",\"principalId\":\"AROA55TKTI7VKCG3ZB64V:DatadogAWSIntegration\",\"arn\":\"arn:aws:sts::123456789123:assumed-role/DatadogAWSIntegrationRole/DatadogAWSIntegration\",\"accountId\":\"123456789123\",\"accessKeyId\":\"ASIA55TKTI7VANF3DEVD\",\"sessionContext\":{\"sessionIssuer\":{\"type\":\"Role\",\"principalId\":\"AROA55TKTI7VKCG3ZB64V\",\"arn\":\"arn:aws:iam::123456789123:role/DatadogAWSIntegrationRole\",\"accountId\":\"123456789123\",\"userName\":\"DatadogAWSIntegrationRole\"},\"webIdFederationData\":{},\"attributes\":{\"mfaAuthenticated\":\"false\",\"creationDate\":\"2020-09-17T16:29:28Z\"}}},\"eventTime\":\"2020-09-17T16:44:10Z\",\"eventSource\":\"logs.amazonaws.com\",\"eventName\":\"DescribeLogStreams\",\"awsRegion\":\"us-east-1\",\"sourceIPAddress\":\"54.161.67.200\",\"userAgent\":\"Datadog\",\"requestParameters\":{\"logGroupName\":\"/aws/lambda/hello-dog-node-dev-hello10x\",\"descending\":true,\"orderBy\":\"LastEventTime\"},\"responseElements\":null,\"requestID\":\"8fdf17f5-1887-4008-88ee-96da7b039924\",\"eventID\":\"6b574dee-bdc5-4abe-b6ec-10888ac258a2\",\"eventType\":\"AwsApiCall\",\"apiVersion\":\"20140328\",\"recipientAccountId\":\"123456789123\"}" 24 | }, 25 | { 26 | "id": "35689263648391837472973739781728019701390240798247944195", 27 | "timestamp": 1600361930988, 28 | "message": "{\"eventVersion\":\"1.05\",\"userIdentity\":{\"type\":\"AssumedRole\",\"principalId\":\"AROA55TKTI7VKCG3ZB64V:DatadogAWSIntegration\",\"arn\":\"arn:aws:sts::123456789123:assumed-role/DatadogAWSIntegrationRole/i-08014e4f62ccf762d\",\"accountId\":\"123456789123\",\"accessKeyId\":\"ASIA55TKTI7VANF3DEVD\",\"sessionContext\":{\"sessionIssuer\":{\"type\":\"Role\",\"principalId\":\"AROA55TKTI7VKCG3ZB64V\",\"arn\":\"arn:aws:iam::123456789123:role/DatadogAWSIntegrationRole\",\"accountId\":\"123456789123\",\"userName\":\"DatadogAWSIntegrationRole\"},\"webIdFederationData\":{},\"attributes\":{\"mfaAuthenticated\":\"false\",\"creationDate\":\"2020-09-17T16:29:28Z\"}}},\"eventTime\":\"2020-09-17T16:44:10Z\",\"eventSource\":\"logs.amazonaws.com\",\"eventName\":\"DescribeLogStreams\",\"awsRegion\":\"us-east-1\",\"sourceIPAddress\":\"54.161.67.200\",\"userAgent\":\"Datadog\",\"requestParameters\":{\"logGroupName\":\"/aws/lambda/hello-dog-node-dev-hello10x\",\"descending\":true,\"orderBy\":\"LastEventTime\"},\"responseElements\":null,\"requestID\":\"8fdf17f5-1887-4008-88ee-96da7b039924\",\"eventID\":\"6b574dee-bdc5-4abe-b6ec-10888ac258a2\",\"eventType\":\"AwsApiCall\",\"apiVersion\":\"20140328\",\"recipientAccountId\":\"123456789123\"}" 29 | } 30 | ] 31 | } 32 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tests/test_cloudtrail_s3.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import MagicMock, patch 2 | import os 3 | import sys 4 | import unittest 5 | import json 6 | import copy 7 | import io 8 | import gzip 9 | 10 | sys.modules["trace_forwarder.connection"] = MagicMock() 11 | sys.modules["datadog_lambda.wrapper"] = MagicMock() 12 | sys.modules["datadog_lambda.metric"] = MagicMock() 13 | sys.modules["datadog"] = MagicMock() 14 | sys.modules["requests"] = MagicMock() 15 | sys.modules["requests_futures.sessions"] = MagicMock() 16 | 17 | env_patch = patch.dict( 18 | os.environ, 19 | { 20 | "DD_API_KEY": "11111111111111111111111111111111", 21 | "DD_ADDITIONAL_TARGET_LAMBDAS": "ironmaiden,megadeth", 22 | }, 23 | ) 24 | env_patch.start() 25 | 26 | import lambda_function 27 | import parsing 28 | 29 | env_patch.stop() 30 | 31 | 32 | class Context: 33 | function_version = 0 34 | invoked_function_arn = "invoked_function_arn" 35 | function_name = "function_name" 36 | memory_limit_in_mb = "10" 37 | 38 | 39 | test_data = { 40 | "Records": [ 41 | { 42 | "eventVersion": "1.08", 43 | "userIdentity": { 44 | "type": "AssumedRole", 45 | "principalId": "AROAYYB64AB3HGPQO2EPR:DatadogAWSIntegration", 46 | "arn": "arn:aws:sts::601427279990:assumed-role/Siti_DatadogAWSIntegrationRole/i-08014e4f62ccf762d", 47 | "accountId": "601427279990", 48 | "accessKeyId": "ASIAYYB64AB3DWOY7JNT", 49 | "sessionContext": { 50 | "sessionIssuer": { 51 | "type": "Role", 52 | "principalId": "AROAYYB64AB3HGPQO2EPR", 53 | "arn": "arn:aws:iam::601427279990:role/Siti_DatadogAWSIntegrationRole", 54 | "accountId": "601427279990", 55 | "userName": "Siti_DatadogAWSIntegrationRole", 56 | }, 57 | "attributes": { 58 | "creationDate": "2021-05-02T23:49:01Z", 59 | "mfaAuthenticated": "false", 60 | }, 61 | }, 62 | }, 63 | "eventTime": "2021-05-02T23:53:28Z", 64 | "eventSource": "dynamodb.amazonaws.com", 65 | "eventName": "DescribeTable", 66 | "awsRegion": "us-east-1", 67 | "sourceIPAddress": "54.162.201.161", 68 | "userAgent": "Datadog", 69 | "requestParameters": {"tableName": "KinesisClientLibraryLocal"}, 70 | "responseElements": None, 71 | "requestID": "A9K7562IBO4MPDQE4O5G9QETRFVV4KQNSO5AEMVJF66Q9ASUAAJG", 72 | "eventID": "a5dd11f9-f616-4ea8-8030-0b3eef554352", 73 | "readOnly": True, 74 | "resources": [ 75 | { 76 | "accountId": "601427279990", 77 | "type": "AWS::DynamoDB::Table", 78 | "ARN": "arn:aws:dynamodb:us-east-1:601427279990:table/KinesisClientLibraryLocal", 79 | } 80 | ], 81 | "eventType": "AwsApiCall", 82 | "apiVersion": "2012-08-10", 83 | "managementEvent": True, 84 | "recipientAccountId": "601427279990", 85 | "eventCategory": "Management", 86 | } 87 | ] 88 | } 89 | 90 | 91 | def test_data_gzipped() -> io.BytesIO: 92 | return io.BytesIO( 93 | gzip.compress(json.dumps(copy.deepcopy(test_data)).encode("utf-8")) 94 | ) 95 | 96 | 97 | class TestS3CloudwatchParsing(unittest.TestCase): 98 | def setUp(self): 99 | self.maxDiff = 9000 100 | 101 | @patch("cache.boto3") 102 | @patch("parsing.boto3") 103 | @patch("lambda_function.boto3") 104 | def test_s3_cloudtrail_pasing_and_enrichment( 105 | self, lambda_boto3, parsing_boto3, cache_boto3 106 | ): 107 | context = Context() 108 | 109 | boto3 = parsing_boto3.client() 110 | boto3.get_object.return_value = {"Body": test_data_gzipped()} 111 | 112 | payload = { 113 | "s3": { 114 | "bucket": { 115 | "name": "test-bucket", 116 | }, 117 | "object": { 118 | "key": "601427279990_CloudTrail_us-east-1_20210503T0000Z_QrttGEk4ZcBTLwj5.json.gz" 119 | }, 120 | } 121 | } 122 | 123 | result = parsing.parse({"Records": [payload]}, context) 124 | 125 | expected = copy.deepcopy([test_data["Records"][0]]) 126 | expected[0].update( 127 | { 128 | "ddsource": "cloudtrail", 129 | "ddsourcecategory": "aws", 130 | "service": "cloudtrail", 131 | "aws": { 132 | "s3": { 133 | "bucket": payload["s3"]["bucket"]["name"], 134 | "key": payload["s3"]["object"]["key"], 135 | }, 136 | "function_version": context.function_version, 137 | "invoked_function_arn": context.invoked_function_arn, 138 | }, 139 | } 140 | ) 141 | 142 | # yeah, there are tags, but we don't care to compare them 143 | result[0].pop("ddtags") 144 | 145 | # expected parsed result, now testing enrichment 146 | self.assertEqual(expected[0], result[0]) 147 | 148 | expected[0]["host"] = "i-08014e4f62ccf762d" 149 | self.assertEqual(expected[0], lambda_function.enrich(result)[0]) 150 | 151 | 152 | if __name__ == "__main__": 153 | unittest.main() 154 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/publish_layers.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Unless explicitly stated otherwise all files in this repository are licensed 4 | # under the Apache License Version 2.0. 5 | # This product includes software developed at Datadog (https://www.datadoghq.com/). 6 | # Copyright 2019 Datadog, Inc. 7 | 8 | # Publish the datadog forwarder layer across regions, using the AWS CLI 9 | # Usage: VERSION=5 REGIONS=us-east-1 LAYERS=Datadog-Python27 publish_layers.sh 10 | # VERSION is required. 11 | set -e 12 | 13 | # Makes sure any subprocesses will be terminated with this process 14 | trap "pkill -P $$; exit 1;" INT 15 | 16 | PYTHON_VERSIONS_FOR_AWS_CLI=("python3.8") 17 | LAYER_PATHS=(".forwarder/aws-dd-forwarder-${FORWARDER_VERSION}-layer.zip") 18 | AVAILABLE_LAYERS=("Datadog-Forwarder") 19 | AVAILABLE_REGIONS=$(aws ec2 describe-regions | jq -r '.[] | .[] | .RegionName') 20 | 21 | # Check that the layer files exist 22 | for layer_file in "${LAYER_PATHS[@]}" 23 | do 24 | if [ ! -f $layer_file ]; then 25 | echo "Could not find $layer_file." 26 | exit 1 27 | fi 28 | done 29 | 30 | # Determine the target regions 31 | if [ -z "$REGIONS" ]; then 32 | echo "Region not specified, running for all available regions." 33 | REGIONS=$AVAILABLE_REGIONS 34 | else 35 | echo "Region specified: $REGIONS" 36 | if [[ ! "$AVAILABLE_REGIONS" == *"$REGIONS"* ]]; then 37 | echo "Could not find $REGIONS in available regions: $AVAILABLE_REGIONS" 38 | echo "" 39 | echo "EXITING SCRIPT." 40 | exit 1 41 | fi 42 | fi 43 | 44 | # Determine the target layers 45 | if [ -z "$LAYERS" ]; then 46 | echo "Layer not specified, running for all layers." 47 | LAYERS=("${AVAILABLE_LAYERS[@]}") 48 | else 49 | echo "Layer specified: $LAYERS" 50 | if [[ ! " ${AVAILABLE_LAYERS[@]} " =~ " ${LAYERS} " ]]; then 51 | echo "Could not find $LAYERS in available layers: ${AVAILABLE_LAYERS[@]}" 52 | echo "" 53 | echo "EXITING SCRIPT." 54 | exit 1 55 | fi 56 | fi 57 | 58 | # Determine the target layer version 59 | if [ -z "$LAYER_VERSION" ]; then 60 | echo "Layer version not specified" 61 | echo "" 62 | echo "EXITING SCRIPT." 63 | exit 1 64 | else 65 | echo "Layer version specified: $LAYER_VERSION" 66 | fi 67 | 68 | if [ "$NO_INPUT" = true ] ; then 69 | echo "Publishing version $LAYER_VERSION of layers ${LAYERS[*]} to regions ${REGIONS[*]}" 70 | else 71 | read -p "Ready to publish version $LAYER_VERSION of layers ${LAYERS[*]} to regions ${REGIONS[*]} (y/n)?" CONT 72 | if [ "$CONT" != "y" ]; then 73 | echo "Exiting" 74 | exit 1 75 | fi 76 | fi 77 | 78 | index_of_layer() { 79 | layer_name=$1 80 | for i in "${!AVAILABLE_LAYERS[@]}"; do 81 | if [[ "${AVAILABLE_LAYERS[$i]}" = "${layer_name}" ]]; then 82 | echo "${i}"; 83 | fi 84 | done 85 | } 86 | 87 | publish_layer() { 88 | region=$1 89 | layer_name=$2 90 | aws_version_key=$3 91 | layer_path=$4 92 | version_nbr=$(aws lambda publish-layer-version --layer-name $layer_name \ 93 | --description "Datadog Forwarder Layer Package" \ 94 | --zip-file "fileb://$layer_path" \ 95 | --region $region \ 96 | --compatible-runtimes $aws_version_key \ 97 | --cli-read-timeout 300 \ 98 | | jq -r '.Version') 99 | 100 | permission=$(aws lambda add-layer-version-permission --layer-name $layer_name \ 101 | --version-number $version_nbr \ 102 | --statement-id "release-$version_nbr" \ 103 | --action lambda:GetLayerVersion --principal "*" \ 104 | --region $region) 105 | 106 | echo $version_nbr 107 | } 108 | 109 | for region in $REGIONS 110 | do 111 | echo "Starting publishing layer for region $region..." 112 | # Publish the layers for each version of python 113 | for layer_name in "${LAYERS[@]}"; do 114 | latest_version=$(aws lambda list-layer-versions --region $region --layer-name $layer_name --query 'LayerVersions[0].Version || `0`') 115 | if [ $latest_version -ge $LAYER_VERSION ]; then 116 | echo "Layer $layer_name version $LAYER_VERSION already exists in region $region, skipping..." 117 | continue 118 | elif [ $latest_version -lt $((LAYER_VERSION-1)) ]; then 119 | read -p "WARNING: The latest version of layer $layer_name in region $region is $latest_version, publish all the missing versions including $LAYER_VERSION or EXIT the script (y/n)?" CONT 120 | if [ "$CONT" != "y" ]; then 121 | echo "Exiting" 122 | exit 1 123 | fi 124 | fi 125 | 126 | index=$(index_of_layer $layer_name) 127 | aws_version_key="${PYTHON_VERSIONS_FOR_AWS_CLI[$index]}" 128 | layer_path="${LAYER_PATHS[$index]}" 129 | 130 | while [ $latest_version -lt $LAYER_VERSION ]; do 131 | latest_version=$(publish_layer $region $layer_name $aws_version_key $layer_path) 132 | echo "Published version $latest_version for layer $layer_name in region $region" 133 | 134 | # This shouldn't happen unless someone manually deleted the latest version, say 28 135 | # and then try to republish it again. The published version is actually be 29, because 136 | # Lambda layers are immutable and AWS will skip deleted version and use the next number. 137 | if [ $latest_version -gt $LAYER_VERSION ]; then 138 | echo "ERROR: Published version $latest_version is greater than the desired version $LAYER_VERSION!" 139 | echo "Exiting" 140 | exit 1 141 | fi 142 | done 143 | done 144 | done 145 | 146 | echo "Done !" 147 | -------------------------------------------------------------------------------- /aws/logs_monitoring/tools/integration_tests/snapshots/cloudwatch_log_fsx_windows.json~snapshot: -------------------------------------------------------------------------------- 1 | { 2 | "events": [ 3 | { 4 | "data": [ 5 | { 6 | "aws": { 7 | "awslogs": { 8 | "logGroup": "/aws/fsx/windows/12345", 9 | "logStream": "123456789123_us-east-1", 10 | "owner": "123456789123" 11 | }, 12 | "function_version": "$LATEST", 13 | "invoked_function_arn": "arn:aws:lambda:us-east-1:0000000000:function:test" 14 | }, 15 | "ddsource": "aws.fsx", 16 | "ddsourcecategory": "aws", 17 | "ddtags": "forwardername:test,forwarder_memorysize:1536,forwarder_version:", 18 | "host": "/aws/fsx/windows/12345", 19 | "id": "35689263648391837472973739781728019701390240798247944192", 20 | "message": "4663101280000x8020000000000000294054Securityamznfsxjgnfqf2v.fsx.demo.comS-1-5-21-1387100404-3545110199-3154596375-1113Adminfsx0xbc9cfccSecurityFile\\Device\\HarddiskVolume13\\share\\My first folder0x1350%%44230x800x4S:AI", 21 | "service": "aws.fsx", 22 | "timestamp": 1600361930988 23 | } 24 | ], 25 | "headers": { 26 | "Accept": "*/*", 27 | "Accept-Encoding": "gzip, deflate", 28 | "Connection": "keep-alive", 29 | "Content-Length": "", 30 | "Content-type": "application/json", 31 | "DD-API-KEY": "abcdefghijklmnopqrstuvwxyz012345", 32 | "DD-EVP-ORIGIN": "aws_forwarder", 33 | "DD-EVP-ORIGIN-VERSION": "", 34 | "Host": "recorder:8080", 35 | "User-Agent": "", 36 | "x-datadog-parent-id": "", 37 | "x-datadog-sampling-priority": "2", 38 | "x-datadog-trace-id": "4842834437835386637" 39 | }, 40 | "path": "/api/v2/logs", 41 | "verb": "POST" 42 | }, 43 | { 44 | "data": { 45 | "series": [ 46 | { 47 | "device": null, 48 | "host": null, 49 | "interval": 10, 50 | "metric": "aws.dd_forwarder.list_tags_log_group_api_call", 51 | "points": "", 52 | "tags": [ 53 | "forwardername:test", 54 | "forwarder_memorysize:1536", 55 | "forwarder_version:", 56 | "event_type:awslogs" 57 | ], 58 | "type": "distribution" 59 | }, 60 | { 61 | "device": null, 62 | "host": null, 63 | "interval": 10, 64 | "metric": "aws.dd_forwarder.incoming_events", 65 | "points": "", 66 | "tags": [ 67 | "forwardername:test", 68 | "forwarder_memorysize:1536", 69 | "forwarder_version:", 70 | "event_type:awslogs" 71 | ], 72 | "type": "distribution" 73 | }, 74 | { 75 | "device": null, 76 | "host": null, 77 | "interval": 10, 78 | "metric": "aws.dd_forwarder.logs_forwarded", 79 | "points": "", 80 | "tags": [ 81 | "forwardername:test", 82 | "forwarder_memorysize:1536", 83 | "forwarder_version:", 84 | "event_type:awslogs" 85 | ], 86 | "type": "distribution" 87 | }, 88 | { 89 | "device": null, 90 | "host": null, 91 | "interval": 10, 92 | "metric": "aws.dd_forwarder.metrics_forwarded", 93 | "points": "", 94 | "tags": [ 95 | "forwardername:test", 96 | "forwarder_memorysize:1536", 97 | "forwarder_version:", 98 | "event_type:awslogs" 99 | ], 100 | "type": "distribution" 101 | } 102 | ] 103 | }, 104 | "headers": { 105 | "Accept": "*/*", 106 | "Accept-Encoding": "gzip, deflate", 107 | "Connection": "keep-alive", 108 | "Content-Encoding": "deflate", 109 | "Content-Length": "", 110 | "Content-Type": "application/json", 111 | "Host": "recorder:8080", 112 | "User-Agent": "", 113 | "x-datadog-parent-id": "", 114 | "x-datadog-sampling-priority": "2", 115 | "x-datadog-trace-id": "4842834437835386637" 116 | }, 117 | "path": "/api/v1/distribution_points?api_key=abcdefghijklmnopqrstuvwxyz012345", 118 | "verb": "POST" 119 | } 120 | ] 121 | } -------------------------------------------------------------------------------- /azure/eventhub_log_forwarder/function_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", 3 | "contentVersion": "1.0.0.0", 4 | "parameters": { 5 | "functionAppName": { 6 | "type": "string", 7 | "defaultValue": "[concat('datadog-functionapp-', newGuid())]", 8 | "metadata": { 9 | "description": "The name of the function app " 10 | } 11 | }, 12 | "functionName": { 13 | "type": "string", 14 | "defaultValue": "datadog-function", 15 | "metadata": { 16 | "description": "The name of the function." 17 | } 18 | }, 19 | "eventhubName": { 20 | "type": "string", 21 | "defaultValue": "datadog-eventhub", 22 | "metadata": { 23 | "description": "The name of the eventhub." 24 | } 25 | }, 26 | "eventhubNamespace": { 27 | "type": "string", 28 | "metadata": { 29 | "description": "The name of the eventhub namespace." 30 | } 31 | }, 32 | "functionCode": { 33 | "type": "string", 34 | "metadata": { 35 | "description": "Code for the function to run, saved into index.js" 36 | } 37 | }, 38 | "apiKey": { 39 | "type": "securestring", 40 | "metadata": { 41 | "description": "Datadog API key" 42 | } 43 | }, 44 | "datadogTags": { 45 | "type": "string", 46 | "defaultValue": "", 47 | "metadata": { 48 | "description": "Comma-separated list of tags" 49 | } 50 | }, 51 | "location": { 52 | "type": "string", 53 | "defaultValue": "[resourceGroup().location]", 54 | "metadata": { 55 | "description": "Location for all resources." 56 | } 57 | }, 58 | "datadogSite": { 59 | "type": "string", 60 | "defaultValue": "datadoghq.com", 61 | "metadata": { 62 | "description": "Datadog site to send logs" 63 | } 64 | }, 65 | "endpointSuffix": { 66 | "type": "string", 67 | "defaultValue": "core.windows.net", 68 | "metadata": { 69 | "description": "Endpoint suffix for storage account" 70 | } 71 | } 72 | }, 73 | "variables": { 74 | "storageAccountName": "[concat(uniquestring(resourceGroup().id), 'storageacct')]", 75 | "connectionStringKey": "[concat('Datadog-',parameters('eventhubNamespace'),'-AccessKey')]", 76 | "authRule": "[resourceId('Microsoft.EventHub/namespaces/authorizationRules', parameters('eventhubNamespace'),'RootManageSharedAccessKey')]" 77 | }, 78 | "resources": [ 79 | { 80 | "type": "Microsoft.Storage/storageAccounts", 81 | "apiVersion": "2019-06-01", 82 | "name": "[variables('storageAccountName')]", 83 | "location": "[parameters('location')]", 84 | "kind": "StorageV2", 85 | "sku": { 86 | "name": "Standard_LRS" 87 | } 88 | }, 89 | { 90 | "apiVersion": "2018-11-01", 91 | "type": "Microsoft.Web/sites", 92 | "name": "[parameters('functionAppName')]", 93 | "location": "[parameters('location')]", 94 | "kind": "functionapp", 95 | "dependsOn": [ 96 | "[resourceId('Microsoft.Storage/storageAccounts', variables('storageAccountName'))]" 97 | ], 98 | "properties": { 99 | "name": "[parameters('functionAppName')]", 100 | "clientAffinityEnabled": false, 101 | "siteConfig": { 102 | "cors": { 103 | "allowedOrigins": [ 104 | "*" 105 | ] 106 | }, 107 | "appSettings": [ 108 | { 109 | "name": "FUNCTIONS_EXTENSION_VERSION", 110 | "value": "~4" 111 | }, 112 | { 113 | "name": "DD_API_KEY", 114 | "value": "[parameters('apiKey')]" 115 | }, 116 | { 117 | "name": "DD_SITE", 118 | "value": "[parameters('datadogSite')]" 119 | }, 120 | { 121 | "name": "DD_TAGS", 122 | "value": "[parameters('datadogTags')]" 123 | }, 124 | { 125 | "name": "AzureWebJobsStorage", 126 | "value": "[concat('DefaultEndpointsProtocol=https;AccountName=',variables('storageAccountName'),';AccountKey=',listkeys(resourceId('Microsoft.Storage/storageAccounts', variables('storageAccountName')), '2018-11-01').keys[0].value,';','EndpointSuffix=',parameters('endpointSuffix'),';')]" 127 | }, 128 | { 129 | "name": "FUNCTIONS_WORKER_RUNTIME", 130 | "value": "node" 131 | }, 132 | { 133 | "name": "[variables('connectionStringKey')]", 134 | "value": "[listKeys(variables('authRule'),'2017-04-01').primaryConnectionString]" 135 | }, 136 | { 137 | "name": "WEBSITE_CONTENTAZUREFILECONNECTIONSTRING", 138 | "value": "[concat('DefaultEndpointsProtocol=https;AccountName=',variables('storageAccountName'),';AccountKey=',listkeys(resourceId('Microsoft.Storage/storageAccounts', variables('storageAccountName')), '2018-11-01').keys[0].value,';','EndpointSuffix=',parameters('endpointSuffix'),';')]" 139 | }, 140 | { 141 | "name": "WEBSITE_CONTENTSHARE", 142 | "value": "[toLower(parameters('functionAppName'))]" 143 | }, 144 | { 145 | "name": "WEBSITE_NODE_DEFAULT_VERSION", 146 | "value": "~16" 147 | } 148 | ] 149 | } 150 | } 151 | }, 152 | { 153 | "name": "[concat(parameters('functionAppName'), '/', parameters('functionName'))]", 154 | "type": "Microsoft.Web/sites/functions", 155 | "apiVersion": "2020-06-01", 156 | "properties": { 157 | "config": { 158 | "bindings": [ 159 | { 160 | "name": "eventHubMessages", 161 | "type": "eventHubTrigger", 162 | "direction": "in", 163 | "eventHubName": "[parameters('eventhubName')]", 164 | "connection": "[variables('connectionStringKey')]", 165 | "cardinality": "many", 166 | "dataType": "", 167 | "consumerGroup": "$Default" 168 | } 169 | ], 170 | "disabled": false 171 | }, 172 | "files": { 173 | "index.js": "[parameters('functionCode')]" 174 | } 175 | }, 176 | "dependsOn": [ 177 | "[resourceId('Microsoft.Web/sites', parameters('functionAppName'))]" 178 | ] 179 | } 180 | ] 181 | } 182 | -------------------------------------------------------------------------------- /azure/deploy-to-azure/parent_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", 3 | "contentVersion": "1.0.0.0", 4 | "parameters": { 5 | "datadogSite": { 6 | "type": "string", 7 | "defaultValue": "datadoghq.com", 8 | "metadata": { 9 | "description": "Datadog site to send logs" 10 | } 11 | }, 12 | "sendActivityLogs": { 13 | "type": "bool", 14 | "metadata": { 15 | "description": "Enable Activity Logs forwarding and create subscription diagnostic settings" 16 | }, 17 | "defaultValue": false 18 | }, 19 | "apiKey": { 20 | "type": "securestring", 21 | "metadata": { 22 | "description": "Datadog API key" 23 | } 24 | }, 25 | "eventHubNamespace": { 26 | "type": "string", 27 | "defaultValue": "[concat('datadog-ns-', newGuid())]", 28 | "metadata": { 29 | "description": "Name of EventHub namespace, which must be globally unique." 30 | } 31 | }, 32 | "eventHubName": { 33 | "type": "string", 34 | "defaultValue": "datadog-eventhub", 35 | "metadata": { 36 | "description": "Name of Event Hub" 37 | } 38 | }, 39 | "partitionCount": { 40 | "type": "int", 41 | "defaultValue": 32, 42 | "metadata": { 43 | "description": "The number of event hub partitions" 44 | } 45 | }, 46 | "functionAppName": { 47 | "type": "string", 48 | "defaultValue": "[concat('datadog-functionapp-', newGuid())]", 49 | "metadata": { 50 | "description": "The name of the function app " 51 | } 52 | }, 53 | "functionName": { 54 | "type": "string", 55 | "defaultValue": "datadog-function", 56 | "metadata": { 57 | "description": "The name of the function." 58 | } 59 | }, 60 | "resourcesLocation": { 61 | "type": "string", 62 | "defaultValue": "[resourceGroup().location]", 63 | "metadata": { 64 | "description": "Specify a location for the Azure resources." 65 | } 66 | }, 67 | "storageEndpointSuffix": { 68 | "type": "string", 69 | "defaultValue": "[environment().suffixes.storage]", 70 | "metadata": { 71 | "description": "Endpoint suffix for storage account" 72 | } 73 | }, 74 | "diagnosticSettingName": { 75 | "type": "string", 76 | "defaultValue": "datadog-activity-logs-diagnostic-setting", 77 | "metadata": { 78 | "description": "The name of the diagnostic setting if sending Activity Logs" 79 | } 80 | } 81 | }, 82 | "variables": { 83 | "eventHubTemplateLink": "https://raw.githubusercontent.com/DataDog/datadog-serverless-functions/master/azure/deploy-to-azure/event_hub.json", 84 | "functionAppTemplateLink": "https://raw.githubusercontent.com/DataDog/datadog-serverless-functions/master/azure/deploy-to-azure/function_template.json", 85 | "activityLogDiagnosticSettingsTemplateLink": "https://raw.githubusercontent.com/DataDog/datadog-serverless-functions/master/azure/deploy-to-azure/activity_log_diagnostic_settings.json" 86 | }, 87 | "resources": [ 88 | { 89 | "name": "eventHubTemplate", 90 | "type": "Microsoft.Resources/deployments", 91 | "apiVersion": "2018-05-01", 92 | "properties": { 93 | "mode": "Incremental", 94 | "templateLink": { 95 | "uri": "[variables('eventHubTemplateLink')]", 96 | "contentVersion": "1.0.0.0" 97 | }, 98 | "parameters": { 99 | "eventHubNamespace": { 100 | "value": "[parameters('eventHubNamespace')]" 101 | }, 102 | "eventHubName": { 103 | "value": "[parameters('eventHubName')]" 104 | }, 105 | "partitionCount": { 106 | "value": "[parameters('partitionCount')]" 107 | }, 108 | "location": { 109 | "value": "[parameters('resourcesLocation')]" 110 | } 111 | } 112 | } 113 | }, 114 | { 115 | "name": "functionAppTemplate", 116 | "type": "Microsoft.Resources/deployments", 117 | "apiVersion": "2018-05-01", 118 | "properties": { 119 | "mode": "Incremental", 120 | "templateLink": { 121 | "uri": "[variables('functionAppTemplateLink')]", 122 | "contentVersion": "1.0.0.0" 123 | }, 124 | "parameters": { 125 | "eventHubNamespace": { 126 | "value": "[parameters('eventHubNamespace')]" 127 | }, 128 | "eventHubName": { 129 | "value": "[parameters('eventHubName')]" 130 | }, 131 | "functionAppName": { 132 | "value": "[parameters('functionAppName')]" 133 | }, 134 | "functionName": { 135 | "value": "[parameters('functionName')]" 136 | }, 137 | "apiKey": { 138 | "value": "[parameters('apiKey')]" 139 | }, 140 | "location": { 141 | "value": "[parameters('resourcesLocation')]" 142 | }, 143 | "datadogSite": { 144 | "value": "[parameters('datadogSite')]" 145 | }, 146 | "endpointSuffix": { 147 | "value": "[parameters('storageEndpointSuffix')]" 148 | } 149 | } 150 | }, 151 | "dependsOn": [ 152 | "[resourceId('Microsoft.Resources/deployments','eventHubTemplate')]" 153 | ] 154 | }, 155 | { 156 | "condition": "[parameters('sendActivityLogs')]", 157 | "type": "Microsoft.Resources/deployments", 158 | "name": "activityLogDiagnosticSettingsTemplate", 159 | "apiVersion": "2018-05-01", 160 | "properties": { 161 | "mode": "Incremental", 162 | "templateLink": { 163 | "uri": "[variables('activityLogDiagnosticSettingsTemplateLink')]", 164 | "contentVersion": "1.0.0.0" 165 | }, 166 | "parameters": { 167 | "settingName": { 168 | "value": "[parameters('diagnosticSettingName')]" 169 | }, 170 | "resourceGroup": { 171 | "value": "[resourceGroup().name]" 172 | }, 173 | "eventHubNamespace": { 174 | "value": "[parameters('eventHubNamespace')]" 175 | }, 176 | "eventHubName": { 177 | "value": "[parameters('eventHubName')]" 178 | } 179 | } 180 | }, 181 | "dependsOn": [ 182 | "[resourceId('Microsoft.Resources/deployments','functionAppTemplate')]" 183 | ], 184 | "subscriptionId": "[subscription().subscriptionId]", 185 | "location": "[parameters('resourcesLocation')]" 186 | } 187 | ], 188 | "outputs": { 189 | "eventHubNamespace": { 190 | "type": "string", 191 | "value": "[parameters('eventHubNamespace')]" 192 | } 193 | } 194 | } --------------------------------------------------------------------------------