├── .gitignore ├── eslint.config.js ├── commitlint.config.js ├── tsconfig.json ├── examples ├── typescript │ ├── README.md │ └── example.ts ├── minimalistic │ ├── minimalistic.js │ └── README.md ├── grafana-loki │ ├── otel-collector-config.yaml │ ├── README.md │ └── docker-compose.yaml ├── using-multiple-record-processors │ ├── README.md │ └── multiple-processors.js └── trace-context │ ├── http-server.js │ ├── trace-instrumentation.js │ └── README.md ├── docker-compose.yaml ├── .github ├── workflows │ ├── release-please.yml │ ├── ci.yml │ └── publish.yml └── dependabot.yml ├── otel-collector-config.yaml ├── test ├── types │ └── pino-opentelemetry-transport.test-d.ts └── lib │ ├── opentelemetry-mapper.test.js │ └── pino-opentelemetry-transport.test.js ├── LICENSE ├── lib ├── pino-opentelemetry-transport.js └── opentelemetry-mapper.js ├── package.json ├── CHANGELOG.md └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | *.log 3 | *.nyc_output/ 4 | *.d.ts 5 | *.d.ts.map 6 | coverage 7 | package-lock.json 8 | -------------------------------------------------------------------------------- /eslint.config.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const neostandard = require('neostandard') 4 | 5 | module.exports = neostandard({ 6 | ts: true, 7 | ignores: ['node_modules/', 'coverage/', '.nyc_output/', 'types/'] 8 | }) 9 | -------------------------------------------------------------------------------- /commitlint.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | extends: ['@commitlint/config-conventional'], 3 | // We need this until https://github.com/dependabot/dependabot-core/issues/2445 4 | // is resolved. 5 | ignores: [msg => /Signed-off-by: dependabot\[bot]/m.test(msg)] 6 | } 7 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "include": ["lib/pino-opentelemetry-transport.js"], 3 | "compilerOptions": { 4 | "skipLibCheck": true, 5 | "allowJs": true, 6 | "declaration": true, 7 | "emitDeclarationOnly": true, 8 | "outDir": "types/", 9 | "declarationMap": true 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /examples/typescript/README.md: -------------------------------------------------------------------------------- 1 | # TypeScript 2 | 3 | ## Running the example 4 | 5 | 1. Generate types 6 | ``` 7 | npm run generate-types 8 | ``` 9 | 10 | 2. Run the [docker compose file](/docker-compose.yaml) in the root of the repo, which will boot the OTLP collector: 11 | `docker compose up` 12 | 13 | 3. Run the service 14 | 15 | ``` 16 | npx ts-node example.ts 17 | ``` 18 | -------------------------------------------------------------------------------- /docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: "2" 2 | services: 3 | otel-collector: 4 | image: otel/opentelemetry-collector-contrib:0.112.0 5 | command: ["--config=/etc/otel-collector-config.yaml"] 6 | volumes: 7 | - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml 8 | - /tmp/test-logs:/etc/test-logs 9 | ports: 10 | - "4317:4317" # OTLP gRPC receiver 11 | - "4318:4318" # OTLP HTTP receiver 12 | -------------------------------------------------------------------------------- /examples/minimalistic/minimalistic.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const path = require('path') 4 | const pino = require('pino') 5 | 6 | const transport = pino.transport({ 7 | target: path.join(__dirname, '..', '..', 'lib', 'pino-opentelemetry-transport') 8 | }) 9 | 10 | const logger = pino(transport) 11 | 12 | transport.on('ready', () => { 13 | setInterval(() => { 14 | logger.info('test log') 15 | }, 1000) 16 | }) 17 | -------------------------------------------------------------------------------- /.github/workflows/release-please.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - main 5 | 6 | permissions: 7 | contents: write 8 | pull-requests: write 9 | 10 | name: release-please 11 | 12 | jobs: 13 | release-please: 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: google-github-actions/release-please-action@v4 17 | with: 18 | release-type: node 19 | package-name: automatic-octo-rotary-phone 20 | -------------------------------------------------------------------------------- /examples/grafana-loki/otel-collector-config.yaml: -------------------------------------------------------------------------------- 1 | receivers: 2 | otlp: 3 | protocols: 4 | grpc: 5 | endpoint: 0.0.0.0:4317 6 | http: 7 | endpoint: 0.0.0.0:4318 8 | 9 | exporters: 10 | loki: 11 | endpoint: http://loki:3100/loki/api/v1/push 12 | 13 | processors: 14 | batch: 15 | 16 | service: 17 | pipelines: 18 | logs: 19 | receivers: [otlp] 20 | processors: [] 21 | exporters: [loki] 22 | 23 | -------------------------------------------------------------------------------- /otel-collector-config.yaml: -------------------------------------------------------------------------------- 1 | receivers: 2 | otlp: 3 | protocols: 4 | grpc: 5 | endpoint: 0.0.0.0:4317 6 | http: 7 | endpoint: 0.0.0.0:4318 8 | 9 | exporters: 10 | file: 11 | path: ./etc/test-logs/otlp-logs.log 12 | flush_interval: 1 13 | 14 | debug: 15 | verbosity: detailed 16 | 17 | processors: 18 | batch: 19 | 20 | service: 21 | pipelines: 22 | logs: 23 | receivers: [otlp] 24 | processors: [] 25 | exporters: [debug, file] 26 | -------------------------------------------------------------------------------- /examples/minimalistic/README.md: -------------------------------------------------------------------------------- 1 | ### Minimalistic example 2 | 3 | ## Running the example 4 | 5 | 1. Run the [docker compose file](/docker-compose.yaml) in the root of the repo, which will boot the OTLP collector: 6 | `docker compose up` 7 | 8 | 2. Run the service setting the `OTEL_EXPORTER_OTLP_LOGS_PROTOCOL`, `OTEL_EXPORTER_OTLP_LOGS_ENDPOINT`, and `OTEL_RESOURCE_ATTRIBUTES` env vars 9 | 10 | ``` 11 | OTEL_EXPORTER_OTLP_LOGS_PROTOCOL='grpc' OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=http://localhost:4317 OTEL_RESOURCE_ATTRIBUTES="service.name=my-service,service.version=1.2.3" node minimalistic.js 12 | ``` 13 | 14 | -------------------------------------------------------------------------------- /examples/using-multiple-record-processors/README.md: -------------------------------------------------------------------------------- 1 | # Using Multiple Record Processors 2 | 3 | Logs can be exported with multiple record processors, each using different exporter. Instead of providing `logRecordProcessorOptions` as a single object, an array of `logRecordProcessorOptions` must be provided instead. 4 | 5 | Supported processors: `batch`, `simple` 6 | 7 | Supported exporters: `console`, `grpc`, `http`, `http/protobuf` 8 | 9 | ## Running the example 10 | 11 | 1. Run the [docker compose file](/docker-compose.yaml) in the root of the repo, which will boot the OTLP collector: 12 | `docker compose up` 13 | 14 | 2. Run the service 15 | 16 | ``` 17 | node multiple-processors.js 18 | ``` 19 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "monthly" 7 | open-pull-requests-limit: 10 8 | commit-message: 9 | prefix: fix 10 | prefix-development: chore 11 | include: scope 12 | 13 | - package-ecosystem: "npm" 14 | directory: "/" 15 | schedule: 16 | interval: "daily" 17 | open-pull-requests-limit: 10 18 | commit-message: 19 | prefix: fix 20 | prefix-development: chore 21 | include: scope 22 | groups: 23 | production-dependencies: 24 | dependency-type: "production" 25 | development-dependencies: 26 | dependency-type: "development" 27 | -------------------------------------------------------------------------------- /examples/trace-context/http-server.js: -------------------------------------------------------------------------------- 1 | // https://nodejs.dev/en/learn/#an-example-nodejs-application 2 | const http = require('http') 3 | const pino = require('pino') 4 | const path = require('path') 5 | const transport = pino.transport({ 6 | target: path.join(__dirname, '..', '..', 'lib', 'pino-opentelemetry-transport') 7 | }) 8 | 9 | const logger = pino(transport) 10 | 11 | const hostname = '127.0.0.1' 12 | const port = 8080 13 | 14 | const server = http.createServer((req, res) => { 15 | res.statusCode = 200 16 | res.setHeader('Content-Type', 'text/plain') 17 | logger.info({ msg: 'test log', foo: 'bar' }) 18 | res.end('Hello World\n') 19 | }) 20 | 21 | server.listen(port, hostname, () => { 22 | logger.info(`Server running at http://${hostname}:${port}/`) 23 | }) 24 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | paths-ignore: 6 | - 'docs/**' 7 | - '*.md' 8 | pull_request: 9 | paths-ignore: 10 | - 'docs/**' 11 | - '*.md' 12 | 13 | jobs: 14 | build: 15 | runs-on: ${{ matrix.os }} 16 | strategy: 17 | matrix: 18 | node-version: [20, 22, 24] 19 | pino-version: [^10.0.0] 20 | os: [ubuntu-latest] 21 | steps: 22 | - name: Checkout 23 | uses: actions/checkout@v6 24 | 25 | - name: Use Node.js 26 | uses: actions/setup-node@v6 27 | with: 28 | node-version: ${{ matrix.node-version }} 29 | 30 | - name: Install dependencies 31 | run: npm install 32 | 33 | - name: Install pino 34 | run: npm install --no-save pino@${{ matrix.pino-version }} 35 | 36 | - name: Run Tests 37 | run: npm run test 38 | -------------------------------------------------------------------------------- /test/types/pino-opentelemetry-transport.test-d.ts: -------------------------------------------------------------------------------- 1 | import { expectType } from 'tsd' 2 | import { OnUnknown } from 'pino-abstract-transport' 3 | import { Transform } from 'stream' 4 | 5 | import transport from '../../lib/pino-opentelemetry-transport' 6 | 7 | expectType>( 8 | transport({ 9 | loggerName: 'test', 10 | serviceVersion: '1.0.0' 11 | }) 12 | ) 13 | expectType>( 14 | transport({ 15 | loggerName: 'test', 16 | serviceVersion: '1.0.0', 17 | resourceAttributes: { 'service.name': 'test' } 18 | }) 19 | ) 20 | expectType>( 21 | transport({ 22 | loggerName: 'test', 23 | serviceVersion: '1.0.0' 24 | }) 25 | ) 26 | expectType>( 27 | transport({ 28 | loggerName: 'test', 29 | serviceVersion: '1.0.0', 30 | severityNumberMap: { 31 | 35: 10 32 | } 33 | }) 34 | ) 35 | -------------------------------------------------------------------------------- /examples/using-multiple-record-processors/multiple-processors.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const pino = require('pino') 4 | const path = require('path') 5 | 6 | const transport = pino.transport({ 7 | target: path.join(__dirname, '..', '..', 'lib', 'pino-opentelemetry-transport'), 8 | options: { 9 | logRecordProcessorOptions: [ 10 | { recordProcessorType: 'batch', exporterOptions: { protocol: 'http' } }, 11 | { 12 | recordProcessorType: 'batch', 13 | exporterOptions: { 14 | protocol: 'grpc', 15 | grpcExporterOptions: { 16 | headers: { foo: 'some custom header' } 17 | } 18 | } 19 | }, 20 | { 21 | recordProcessorType: 'simple', 22 | exporterOptions: { protocol: 'console' } 23 | } 24 | ], 25 | loggerName: 'test-logger', 26 | serviceVersion: '1.0.0' 27 | } 28 | }) 29 | 30 | const logger = pino(transport) 31 | 32 | transport.on('ready', () => { 33 | setInterval(() => { 34 | logger.info('test log') 35 | }, 1000) 36 | }) 37 | -------------------------------------------------------------------------------- /examples/trace-context/trace-instrumentation.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const process = require('process') 4 | const opentelemetry = require('@opentelemetry/sdk-node') 5 | const { HttpInstrumentation } = require('@opentelemetry/instrumentation-http') 6 | const { PinoInstrumentation } = require('@opentelemetry/instrumentation-pino') 7 | const { resourceFromAttributes } = require('@opentelemetry/resources') 8 | const { ConsoleSpanExporter } = require('@opentelemetry/sdk-trace-node') 9 | 10 | const traceExporter = new ConsoleSpanExporter() 11 | 12 | const instrumentations = [new HttpInstrumentation(), new PinoInstrumentation()] 13 | 14 | const sdk = new opentelemetry.NodeSDK({ 15 | resource: resourceFromAttributes({ 16 | 'service.name': 'Pino OpenTelemetry Example' 17 | }), 18 | traceExporter, 19 | instrumentations 20 | }) 21 | 22 | sdk.start() 23 | 24 | process.on('SIGTERM', () => { 25 | sdk 26 | .shutdown() 27 | .then(() => console.log('Tracing terminated')) 28 | .catch(error => console.log('Error terminating tracing', error)) 29 | .finally(() => process.exit(0)) 30 | }) 31 | -------------------------------------------------------------------------------- /examples/grafana-loki/README.md: -------------------------------------------------------------------------------- 1 | # Sending logs to Grafana Loki 2 | 3 | ```mermaid 4 | graph LR; 5 | pino["Pino Opentelemetry Transport"] 6 | otel["OTEL Collector"] 7 | loki["Loki"] 8 | grafana["Grafana"] 9 | pino-->otel; 10 | otel-->loki; 11 | loki-->grafana; 12 | ``` 13 | 14 | ## Running the example 15 | 16 | ### Local infrastructure 17 | 18 | Run the required infra locally with 19 | `docker compose up` 20 | 21 | It will boot [Grafana](https://grafana.com/docs/grafana/latest/), [Loki](https://grafana.com/docs/loki/latest/) and an [Opentelemetry Collector](https://opentelemetry.io/docs/collector/). 22 | 23 | Loki ingester readiness can be checked at 24 | [http://localhost:3100/ready](http://localhost:3100/ready). 25 | 26 | The logs can be inspected in Grafana UI at 27 | [http://localhost:3000/explore](http://localhost:3000/explore). 28 | 29 | [This article](https://grafana.com/docs/opentelemetry/visualization/loki-data/) explains how to inspect OTLP data in the Grafana UI. 30 | 31 | ### Generating logs 32 | Run the [trace-context](../trace-context) example, but skip the `docker compose up` part 33 | 34 | 35 | -------------------------------------------------------------------------------- /examples/typescript/example.ts: -------------------------------------------------------------------------------- 1 | import pino from 'pino' 2 | import { join } from 'path' 3 | // import type { Options } from 'pino-opentelemetry-transport' 4 | import type { Options } from '../../' 5 | 6 | const transport = pino.transport({ 7 | // target: 'pino-opentelemetry-transport', 8 | target: join(__dirname, '..', '..', 'lib', 'pino-opentelemetry-transport'), 9 | options: { 10 | logRecordProcessorOptions: [ 11 | { recordProcessorType: 'batch', exporterOptions: { protocol: 'http' } }, 12 | { 13 | recordProcessorType: 'batch', 14 | exporterOptions: { 15 | protocol: 'grpc', 16 | grpcExporterOptions: { 17 | headers: { foo: 'some custom header' } 18 | } 19 | } 20 | }, 21 | { 22 | recordProcessorType: 'simple', 23 | exporterOptions: { protocol: 'console' } 24 | } 25 | ], 26 | loggerName: 'test-logger', 27 | serviceVersion: '1.0.0' 28 | } 29 | }) 30 | 31 | const logger = pino(transport) 32 | 33 | transport.on('ready', () => { 34 | setInterval(() => { 35 | logger.info('test log') 36 | }, 1000) 37 | }) 38 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 Vladimir Adamić 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /lib/pino-opentelemetry-transport.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const build = require('pino-abstract-transport') 4 | const { getOtlpLogger } = require('otlp-logger') 5 | const { toOpenTelemetry } = require('./opentelemetry-mapper') 6 | 7 | /** 8 | * Pino OpenTelemetry transport 9 | * 10 | * @typedef {Object} PinoOptions 11 | * @property {Object.} [severityNumberMap] 12 | * 13 | * @typedef {PinoOptions & import('otlp-logger').Options} Options 14 | * 15 | * @param { Options } opts 16 | */ 17 | module.exports = async function ({ severityNumberMap, ...loggerOpts } = {}) { 18 | const logger = getOtlpLogger(loggerOpts) 19 | 20 | return build( 21 | async function (/** @type { AsyncIterable } */ source) { 22 | const mapperOptions = { 23 | messageKey: source.messageKey, 24 | levels: source.levels, 25 | severityNumberMap 26 | } 27 | for await (const obj of source) { 28 | logger.emit(toOpenTelemetry(obj, mapperOptions)) 29 | } 30 | }, 31 | { 32 | async close () { 33 | return logger.shutdown() 34 | }, 35 | expectPinoConfig: true 36 | } 37 | ) 38 | } 39 | -------------------------------------------------------------------------------- /examples/trace-context/README.md: -------------------------------------------------------------------------------- 1 | # HTTP Server with trace context propagation 2 | 3 | ## Running the example 4 | 5 | 1. Run the [docker compose file](/docker-compose.yaml) in the root of the repo, which will boot the OTLP collector: 6 | `docker compose up` 7 | 8 | 2. Run the http server by [preloading](https://opentelemetry.io/docs/instrumentation/js/getting-started/nodejs/#run-the-instrumented-app) the instrumentation code. 9 | ``` 10 | node -r "./trace-instrumentation.js" http-server.js 11 | ``` 12 | 13 | 3. Access the app at [http://localhost:8080](http://localhost:8080) 14 | 15 | 16 | ### Explanation 17 | 18 | The request handler function in http-server will create a log entry. Since pino is instrumented with [@opentelemetry/instrumentation-pino](https://www.npmjs.com/package/@opentelemetry/instrumentation-pino) in `trace-instrumentation.js`, it will also add the span context values as attributes [(`trace_id`, `span_id`, `trace_flags`)](https://github.com/open-telemetry/opentelemetry-js-contrib/tree/main/plugins/node/opentelemetry-instrumentation-pino#fields-added-to-pino-log-objects). 19 | 20 | [pino-opentelemetry-transport](https://www.npmjs.com/package/pino-opentelemetry-transport) will read those attributes and add them to the current context. The resulting LogRecord will have those fields populated. Fields `trace_id`, `span_id`, `trace_flags` will not be visible in the LogRecord attributes. 21 | 22 | 23 | Observe the logs with: 24 | 25 | ```tail -f /tmp/test-logs/otlp-logs.log``` 26 | 27 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: publish-on-release-pr-merge 2 | 3 | on: 4 | pull_request: 5 | types: [closed] 6 | 7 | jobs: 8 | publish-npm: 9 | if: github.event.pull_request.merged == true && github.event.pull_request.user.login == 'github-actions[bot]' && contains(github.event.pull_request.head.ref, 'release-please') 10 | runs-on: ubuntu-latest 11 | permissions: 12 | contents: read 13 | id-token: write 14 | steps: 15 | - uses: actions/checkout@v6 16 | - name: Wait for release-please workflow to complete 17 | run: | 18 | merged_at=$(date -d"${{ github.event.pull_request.merged_at }}" +%s) 19 | while : ; do 20 | status=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \ 21 | https://api.github.com/repos/${{ github.repository }}/actions/runs | \ 22 | jq -r --arg merged_at "$merged_at" \ 23 | '.workflow_runs[] | select(.name=="release-please" and (.created_at | sub("\\.[0-9]+Z$"; "Z") | fromdate | . > ($merged_at | tonumber))) | .status' | head -1) 24 | if [ "$status" = "completed" ]; then 25 | break 26 | fi 27 | echo "Waiting for release-please workflow to complete" 28 | sleep 20 29 | done 30 | - uses: actions/setup-node@v6 31 | with: 32 | node-version: 20 33 | registry-url: https://registry.npmjs.org/ 34 | - run: npm install 35 | - run: npm publish --provenance --access public 36 | env: 37 | NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} 38 | -------------------------------------------------------------------------------- /examples/grafana-loki/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: "3" 2 | 3 | networks: 4 | loki: 5 | 6 | services: 7 | loki: 8 | image: grafana/loki:2.9.0 9 | ports: 10 | - "3100:3100" 11 | command: -config.file=/etc/loki/local-config.yaml 12 | networks: 13 | - loki 14 | 15 | grafana: 16 | environment: 17 | - GF_PATHS_PROVISIONING=/etc/grafana/provisioning 18 | - GF_AUTH_ANONYMOUS_ENABLED=true 19 | - GF_AUTH_ANONYMOUS_ORG_ROLE=Admin 20 | entrypoint: 21 | - sh 22 | - -euc 23 | - | 24 | mkdir -p /etc/grafana/provisioning/datasources 25 | cat < /etc/grafana/provisioning/datasources/ds.yaml 26 | apiVersion: 1 27 | datasources: 28 | - name: Loki 29 | type: loki 30 | access: proxy 31 | orgId: 1 32 | url: http://loki:3100 33 | basicAuth: false 34 | isDefault: true 35 | version: 1 36 | editable: false 37 | EOF 38 | /run.sh 39 | image: grafana/grafana:latest 40 | ports: 41 | - "3000:3000" 42 | networks: 43 | - loki 44 | depends_on: 45 | - loki 46 | 47 | otel-collector: 48 | image: otel/opentelemetry-collector-contrib:latest 49 | command: ["--config=/etc/otel-collector-config.yaml"] 50 | volumes: 51 | - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml 52 | - /tmp/test-logs:/etc/test-logs 53 | ports: 54 | - "4317:4317" # OTLP gRPC receiver 55 | - "4318:4318" # OTLP HTTP receiver 56 | networks: 57 | - loki 58 | depends_on: 59 | - loki 60 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pino-opentelemetry-transport", 3 | "version": "2.0.0", 4 | "description": "OpenTelemetry transport for Pino", 5 | "main": "lib/pino-opentelemetry-transport.js", 6 | "scripts": { 7 | "validate-and-test": "eslint && borp -T && tsd", 8 | "test": "npm run validate-and-test", 9 | "docker-run": "docker compose up", 10 | "generate-types": "tsc", 11 | "pretest-ci": "npm run generate-types", 12 | "pretest": "npm run generate-types", 13 | "prepack": "npm run generate-types" 14 | }, 15 | "author": "Vladimir Adamic ", 16 | "repository": "github:pinojs/pino-opentelemetry-transport", 17 | "license": "MIT", 18 | "dependencies": { 19 | "otlp-logger": "^1.1.4", 20 | "pino-abstract-transport": "^3.0.0" 21 | }, 22 | "types": "./types/pino-opentelemetry-transport.d.ts", 23 | "peerDependencies": { 24 | "pino": "^10.0.0" 25 | }, 26 | "devDependencies": { 27 | "@commitlint/cli": "^20.0.0", 28 | "@commitlint/config-conventional": "^20.0.0", 29 | "@fastify/pre-commit": "^2.0.2", 30 | "@opentelemetry/api": "^1.4.1", 31 | "@opentelemetry/api-logs": "^0.208.0", 32 | "@opentelemetry/instrumentation-http": "^0.208.0", 33 | "@opentelemetry/instrumentation-pino": "^0.55.0", 34 | "@opentelemetry/sdk-node": "^0.208.0", 35 | "@types/node": "^24.0.12", 36 | "borp": "^0.21.0", 37 | "eslint": "^9.39.1", 38 | "neostandard": "^0.12.2", 39 | "pino": "^10.1.0", 40 | "require-inject": "^1.4.4", 41 | "tar-stream": "^3.1.6", 42 | "testcontainers": "^11.0.0", 43 | "tsd": "^0.33.0", 44 | "typescript": "^5.2.2" 45 | }, 46 | "tsd": { 47 | "directory": "./test/types" 48 | }, 49 | "pre-commit": [ 50 | "test" 51 | ], 52 | "files": [ 53 | "types", 54 | "*.js" 55 | ] 56 | } 57 | -------------------------------------------------------------------------------- /lib/opentelemetry-mapper.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | /** 4 | * If the source format has only a single severity that matches the meaning of the range 5 | * then it is recommended to assign that severity the smallest value of the range. 6 | * https://github.com/open-telemetry/opentelemetry-specification/blob/fc8289b8879f3a37e1eba5b4e445c94e74b20359/specification/logs/data-model.md#mapping-of-severitynumber 7 | */ 8 | const DEFAULT_SEVERITY_NUMBER_MAP = { 9 | 10: 1, // TRACE 10 | 20: 5, // DEBUG 11 | 30: 9, // INFO 12 | 40: 13, // WARN 13 | 50: 17, // ERROR 14 | 60: 21 // FATAL 15 | } 16 | 17 | /** 18 | * @typedef {Object} CommonBindings 19 | * @property {string=} msg 20 | * @property {number=} level 21 | * @property {number=} time 22 | * @property {string=} hostname 23 | * @property {number=} pid 24 | * 25 | * @typedef {Record & CommonBindings} Bindings 26 | * 27 | */ 28 | 29 | /** 30 | * Converts a pino log object to an OpenTelemetry log object. 31 | * 32 | * @typedef {Object} MapperOptions 33 | * @property {string} messageKey 34 | * @property {import('pino').LevelMapping} levels 35 | * @property {Object.} [severityNumberMap] 36 | * 37 | * @param {Bindings} sourceObject 38 | * @param {MapperOptions} mapperOptions 39 | * @returns {import('@opentelemetry/api-logs').LogRecord} 40 | */ 41 | function toOpenTelemetry (sourceObject, { messageKey, levels, severityNumberMap = {} }) { 42 | const { 43 | time, 44 | level, 45 | hostname, 46 | pid, 47 | [messageKey]: msg, 48 | ...attributes 49 | } = sourceObject 50 | 51 | const severityNumber = 52 | severityNumberMap[sourceObject.level] ?? DEFAULT_SEVERITY_NUMBER_MAP[sourceObject.level] ?? 0 53 | const severityText = levels.labels[sourceObject.level] 54 | 55 | return { 56 | timestamp: time, 57 | body: msg, 58 | severityNumber, 59 | attributes, 60 | severityText 61 | } 62 | } 63 | 64 | module.exports = { 65 | DEFAULT_SEVERITY_NUMBER_MAP, 66 | toOpenTelemetry 67 | } 68 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## [2.0.0](https://github.com/pinojs/pino-opentelemetry-transport/compare/v1.1.0...v2.0.0) (2025-12-03) 4 | 5 | 6 | ### ⚠ BREAKING CHANGES 7 | 8 | * requires pino ^10.0.0 and Node.js 20+ 9 | 10 | ### Bug Fixes 11 | 12 | * **deps:** bump actions/checkout from 5 to 6 ([#236](https://github.com/pinojs/pino-opentelemetry-transport/issues/236)) ([1c8620c](https://github.com/pinojs/pino-opentelemetry-transport/commit/1c8620cbfeae67a7a26737941b07cee3aaf89d2d)) 13 | * **deps:** bump actions/setup-node from 4 to 5 ([#226](https://github.com/pinojs/pino-opentelemetry-transport/issues/226)) ([13349d6](https://github.com/pinojs/pino-opentelemetry-transport/commit/13349d686a5a1f55191f9b8c97cd8b2c6ff8733d)) 14 | * **deps:** bump pino-abstract-transport ([#228](https://github.com/pinojs/pino-opentelemetry-transport/issues/228)) ([ce179b0](https://github.com/pinojs/pino-opentelemetry-transport/commit/ce179b0d3b1a8bbc12a9863a3467064bcfaccbe9)) 15 | 16 | 17 | ### Miscellaneous Chores 18 | 19 | * drop Node 18 and pino <10 support ([#237](https://github.com/pinojs/pino-opentelemetry-transport/issues/237)) ([3fc10f7](https://github.com/pinojs/pino-opentelemetry-transport/commit/3fc10f7a7d445704921dc5714591d1c0a57841c4)) 20 | 21 | ## [1.1.0](https://github.com/pinojs/pino-opentelemetry-transport/compare/v1.0.1...v1.1.0) (2025-09-09) 22 | 23 | 24 | ### Features 25 | 26 | * update trace-instrumentation example ([#209](https://github.com/pinojs/pino-opentelemetry-transport/issues/209)) ([49d64c1](https://github.com/pinojs/pino-opentelemetry-transport/commit/49d64c112a02441ecbd13e3cc91fae9cf85760e6)) 27 | 28 | 29 | ### Bug Fixes 30 | 31 | * **deps:** bump actions/checkout from 4 to 5 ([#220](https://github.com/pinojs/pino-opentelemetry-transport/issues/220)) ([fa61ff6](https://github.com/pinojs/pino-opentelemetry-transport/commit/fa61ff6c88a24936b451aeb45adc204b28913ac3)) 32 | * handle undefined options in multiple targets configuration ([#218](https://github.com/pinojs/pino-opentelemetry-transport/issues/218)) ([9152e7a](https://github.com/pinojs/pino-opentelemetry-transport/commit/9152e7a57dda15945c35fed33f4d387322efaf89)), closes [#216](https://github.com/pinojs/pino-opentelemetry-transport/issues/216) 33 | 34 | ## [1.0.1](https://github.com/pinojs/pino-opentelemetry-transport/compare/v1.0.0...v1.0.1) (2024-09-10) 35 | 36 | 37 | ### Bug Fixes 38 | 39 | * **deps:** bump pino-abstract-transport ([#185](https://github.com/pinojs/pino-opentelemetry-transport/issues/185)) ([d11a46e](https://github.com/pinojs/pino-opentelemetry-transport/commit/d11a46e57bfdb953405f399b9c3bd5c6afa01a7c)) 40 | 41 | ## [1.0.0](https://github.com/pinojs/pino-opentelemetry-transport/compare/v0.6.0...v1.0.0) (2024-04-26) 42 | 43 | 44 | ### ⚠ BREAKING CHANGES 45 | 46 | * 47 | 48 | ### Features 49 | 50 | * use config sent by pino to support custom message key and log levels ([#165](https://github.com/pinojs/pino-opentelemetry-transport/issues/165)) ([2bf9bc8](https://github.com/pinojs/pino-opentelemetry-transport/commit/2bf9bc879bb0ff6e902b04ebbc89612d755ce927)) 51 | 52 | ## [0.6.0](https://github.com/Vunovati/pino-opentelemetry-transport/compare/v0.5.0...v0.6.0) (2024-03-08) 53 | 54 | 55 | ### Features 56 | 57 | * support for custom log levels ([#146](https://github.com/Vunovati/pino-opentelemetry-transport/issues/146)) ([d3e0c82](https://github.com/Vunovati/pino-opentelemetry-transport/commit/d3e0c82c2810ed1cd48bd892d54d1627fc92f2b6)) 58 | 59 | 60 | ### Bug Fixes 61 | 62 | * **deps:** bump actions/setup-node from 3 to 4 ([#133](https://github.com/Vunovati/pino-opentelemetry-transport/issues/133)) ([c3735e7](https://github.com/Vunovati/pino-opentelemetry-transport/commit/c3735e7ebd38c5d1893238e893490d4b353d5662)) 63 | 64 | ## [0.5.0](https://github.com/Vunovati/pino-opentelemetry-transport/compare/v0.4.1...v0.5.0) (2024-02-09) 65 | 66 | 67 | ### Features 68 | 69 | * extract otlp-logger ([#130](https://github.com/Vunovati/pino-opentelemetry-transport/issues/130)) ([a2a2666](https://github.com/Vunovati/pino-opentelemetry-transport/commit/a2a2666f75befda0e4b25a9c4e8e53c6f94953e7)) 70 | 71 | 72 | ### Bug Fixes 73 | 74 | * **deps:** bump the production-dependencies group with 1 update ([#137](https://github.com/Vunovati/pino-opentelemetry-transport/issues/137)) ([86bba58](https://github.com/Vunovati/pino-opentelemetry-transport/commit/86bba58d558382571878bd4b91fa322bf850e853)) 75 | -------------------------------------------------------------------------------- /test/lib/opentelemetry-mapper.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { test } = require('node:test') 4 | const assert = require('node:assert') 5 | const { toOpenTelemetry, DEFAULT_SEVERITY_NUMBER_MAP } = require('../../lib/opentelemetry-mapper') 6 | const { SeverityNumber } = require('@opentelemetry/api-logs') 7 | const pino = require('pino') 8 | 9 | const pinoLogLevels = pino.levels.values 10 | 11 | test('default severity number map', async () => { 12 | assert.deepStrictEqual(DEFAULT_SEVERITY_NUMBER_MAP, { 13 | [pinoLogLevels.trace]: SeverityNumber.TRACE, 14 | [pinoLogLevels.debug]: SeverityNumber.DEBUG, 15 | [pinoLogLevels.info]: SeverityNumber.INFO, 16 | [pinoLogLevels.warn]: SeverityNumber.WARN, 17 | [pinoLogLevels.error]: SeverityNumber.ERROR, 18 | [pinoLogLevels.fatal]: SeverityNumber.FATAL 19 | }) 20 | }) 21 | 22 | test('toOpenTelemetry maps all log levels correctly', async () => { 23 | const mapperOptions = { messageKey: 'msg', levels: pino.levels } 24 | const testStart = Date.now() 25 | const testLogEntryBase = { 26 | msg: 'test message', 27 | pid: 123, 28 | time: testStart, 29 | hostname: 'test-hostname' 30 | } 31 | 32 | const testTraceId = '12345678901234567890123456789012' 33 | const testSpanId = '1234567890123456' 34 | const testTraceFlags = '01' 35 | 36 | assert.deepStrictEqual( 37 | toOpenTelemetry( 38 | { 39 | ...testLogEntryBase, 40 | level: pinoLogLevels.trace, 41 | trace_id: testTraceId, 42 | span_id: testSpanId, 43 | trace_flags: testTraceFlags, 44 | testAttribute: 'test' 45 | }, 46 | mapperOptions 47 | ), 48 | { 49 | severityNumber: SeverityNumber.TRACE, 50 | severityText: 'trace', 51 | timestamp: testStart, 52 | body: 'test message', 53 | attributes: { 54 | testAttribute: 'test', 55 | span_id: testSpanId, 56 | trace_id: testTraceId, 57 | trace_flags: testTraceFlags 58 | } 59 | } 60 | ) 61 | 62 | const debugResult = toOpenTelemetry( 63 | { 64 | ...testLogEntryBase, 65 | level: pinoLogLevels.debug 66 | }, 67 | mapperOptions 68 | ) 69 | assert.strictEqual(debugResult.severityNumber, SeverityNumber.DEBUG) 70 | assert.strictEqual(debugResult.severityText, 'debug') 71 | 72 | const infoResult = toOpenTelemetry( 73 | { 74 | ...testLogEntryBase, 75 | level: pinoLogLevels.info 76 | }, 77 | mapperOptions 78 | ) 79 | assert.strictEqual(infoResult.severityNumber, SeverityNumber.INFO) 80 | assert.strictEqual(infoResult.severityText, 'info') 81 | 82 | const warnResult = toOpenTelemetry( 83 | { 84 | ...testLogEntryBase, 85 | level: pinoLogLevels.warn 86 | }, 87 | mapperOptions 88 | ) 89 | assert.strictEqual(warnResult.severityNumber, SeverityNumber.WARN) 90 | assert.strictEqual(warnResult.severityText, 'warn') 91 | 92 | const errorResult = toOpenTelemetry( 93 | { 94 | ...testLogEntryBase, 95 | level: pinoLogLevels.error 96 | }, 97 | mapperOptions 98 | ) 99 | assert.strictEqual(errorResult.severityNumber, SeverityNumber.ERROR) 100 | assert.strictEqual(errorResult.severityText, 'error') 101 | 102 | const fatalResult = toOpenTelemetry( 103 | { 104 | ...testLogEntryBase, 105 | level: pinoLogLevels.fatal 106 | }, 107 | { 108 | ...mapperOptions, 109 | severityNumberMap: { 110 | 35: SeverityNumber.INFO2 111 | } 112 | } 113 | ) 114 | assert.strictEqual(fatalResult.severityNumber, SeverityNumber.FATAL, 'use default severity numbers when level does not exist in severityNumberMap') 115 | assert.strictEqual(fatalResult.severityText, 'fatal') 116 | 117 | const customInfoResult = toOpenTelemetry( 118 | { 119 | ...testLogEntryBase, 120 | level: pinoLogLevels.info 121 | }, 122 | { 123 | ...mapperOptions, 124 | severityNumberMap: { 125 | [pinoLogLevels.info]: SeverityNumber.INFO3 126 | } 127 | } 128 | ) 129 | assert.strictEqual(customInfoResult.severityNumber, SeverityNumber.INFO3, 'use configured severity numbers for built-in levels') 130 | assert.strictEqual(customInfoResult.severityText, 'info') 131 | 132 | const customLevelResult = toOpenTelemetry( 133 | { 134 | ...testLogEntryBase, 135 | level: 35 136 | }, 137 | { 138 | ...mapperOptions, 139 | levels: { 140 | labels: { 141 | 35: 'custom' 142 | } 143 | }, 144 | severityNumberMap: { 145 | 35: SeverityNumber.INFO2 146 | } 147 | } 148 | ) 149 | assert.strictEqual(customLevelResult.severityNumber, SeverityNumber.INFO2, 'use configured severity numbers for custom levels') 150 | assert.strictEqual(customLevelResult.severityText, 'custom') 151 | 152 | const unmappedCustomLevelResult = toOpenTelemetry( 153 | { 154 | ...testLogEntryBase, 155 | level: 35 156 | }, 157 | { 158 | ...mapperOptions, 159 | levels: { 160 | labels: { 161 | 35: 'custom' 162 | } 163 | } 164 | } 165 | ) 166 | assert.strictEqual(unmappedCustomLevelResult.severityNumber, SeverityNumber.UNSPECIFIED, 'use UNSPECIFIED severity number when there is no match for the level') 167 | assert.strictEqual(unmappedCustomLevelResult.severityText, 'custom') 168 | }) 169 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # pino-opentelemetry-transport 2 | 3 | [![npm version](https://img.shields.io/npm/v/pino-opentelemetry-transport)](https://www.npmjs.com/package/pino-opentelemetry-transport) 4 | [![Build Status](https://img.shields.io/github/actions/workflow/status/pinojs/pino-opentelemetry-transport/ci.yml)](https://github.com/pinojs/pino-opentelemetry-transport/actions) 5 | 6 | Pino transport for OpenTelemetry. Outputs logs in the [OpenTelemetry Log Data Model](https://github.com/open-telemetry/opentelemetry-specification/blob/fc8289b8879f3a37e1eba5b4e445c94e74b20359/specification/logs/data-model.md) and sends them to an OTLP logs collector. 7 | 8 | ## Install 9 | 10 | ```bash 11 | npm i pino-opentelemetry-transport 12 | ``` 13 | 14 | ## Configuration 15 | 16 | ### Protocol 17 | 18 | can be set to `http/protobuf`, `grpc`, `http` or `console` by using 19 | 20 | * env var `OTEL_EXPORTER_OTLP_PROTOCOL` 21 | * env var `OTEL_EXPORTER_OTLP_LOGS_PROTOCOL` 22 | * setting the exporterProtocol option 23 | 24 | Settings configured programmatically take precedence over environment variables. Per-signal environment variables take precedence over non-per-signal environment variables. This principle applies to all the configurations in this module. 25 | 26 | If no protocol is specified, `http/protobuf` is used as a default. 27 | 28 | ### Exporter settings 29 | 30 | #### Collector URL 31 | 32 | Set either of the following environment variables: 33 | `OTEL_EXPORTER_OTLP_LOGS_ENDPOINT`, 34 | `OTEL_EXPORTER_OTLP_ENDPOINT` 35 | 36 | #### Protocol-specific exporter configuration 37 | 38 | #### `http/protobuf` 39 | 40 | [Env vars in README](https://github.com/open-telemetry/opentelemetry-js/blob/d4a41bd815dd50703f692000a70c59235ad71959/experimental/packages/exporter-trace-otlp-proto/README.md#exporter-timeout-configuration) 41 | 42 | #### `grpc` 43 | 44 | [Environment Variable Configuration](https://github.com/open-telemetry/opentelemetry-js/blob/d4a41bd815dd50703f692000a70c59235ad71959/experimental/packages/exporter-logs-otlp-grpc/README.md#environment-variable-configuration) 45 | 46 | #### `http` 47 | 48 | [Env vars in README](https://github.com/open-telemetry/opentelemetry-js/blob/d4a41bd815dd50703f692000a70c59235ad71959/experimental/packages/exporter-trace-otlp-http/README.md#configuration-options-as-environment-variables) 49 | 50 | #### Processor-specific configuration 51 | 52 | If batch log processor is selected (is default), it can be configured using env vars described in the [OpenTelemetry specification](https://opentelemetry.io/docs/specs/otel/configuration/sdk-environment-variables/#batch-logrecord-processor) 53 | 54 | ### Options 55 | 56 | When using the transport, the following options can be used to configure the transport programmatically: 57 | 58 | * `loggerName`: name to be used by the OpenTelemetry logger 59 | * `serviceVersion`: version to be used by the OpenTelemetry logger 60 | * `severityNumberMap`: Object mapping Pino log level numbers to OpenTelemetry log severity numbers. This is an override for adding custom log levels and changing default log levels. Undefined default Pino log levels will still be mapped to their default OpenTelemetry log severity. Optional 61 | * `resourceAttributes`: Object containing [resource attributes](https://opentelemetry.io/docs/instrumentation/js/resources/). Optional 62 | * `logRecordProcessorOptions`: a single object or an array of objects specifying the LogProcessor and LogExporter types and constructor params. Optional 63 | 64 | ## Usage 65 | 66 | ### Minimalistic example 67 | 68 | Make sure you have access to an OTEL collector. 69 | 70 | To start quickly, create a minimal configuration for OTEL collector in the `otel-collector-config.yaml` file: 71 | 72 | ```yaml 73 | receivers: 74 | otlp: 75 | protocols: 76 | grpc: 77 | endpoint: 0.0.0.0:4317 78 | http: 79 | endpoint: 0.0.0.0:4318 80 | 81 | exporters: 82 | file: 83 | path: ./etc/test-logs/otlp-logs.log 84 | flush_interval: 1 85 | 86 | debug: 87 | verbosity: basic 88 | 89 | processors: 90 | batch: 91 | 92 | service: 93 | pipelines: 94 | logs: 95 | receivers: [otlp] 96 | processors: [] 97 | exporters: [debug, file] 98 | ``` 99 | 100 | The collector can then be ran with: 101 | 102 | ```bash 103 | docker run --volume=$(pwd)/otel-collector-config.yaml:/etc/otel-collector-config.yaml:rw --volume=/tmp/test-logs:/etc/test-logs:rw -p 4317:4317 -d otel/opentelemetry-collector-contrib:latest --config=/etc/otel-collector-config.yaml 104 | ``` 105 | 106 | Create an index.js file containing 107 | 108 | ```js 109 | const pino = require('pino') 110 | 111 | const transport = pino.transport({ 112 | target: 'pino-opentelemetry-transport' 113 | }) 114 | 115 | const logger = pino(transport) 116 | 117 | transport.on('ready', () => { 118 | setInterval(() => { 119 | logger.info('test log') 120 | }, 1000) 121 | }) 122 | ``` 123 | 124 | Install Pino and pino-opentelemetry-transport 125 | 126 | ```bash 127 | npm install pino pino-opentelemetry-transport 128 | ``` 129 | 130 | Run the service setting the `OTEL_EXPORTER_OTLP_LOGS_ENDPOINT` and `OTEL_RESOURCE_ATTRIBUTES` env vars 131 | 132 | ```bash 133 | OTEL_EXPORTER_OTLP_LOGS_PROTOCOL='grpc' OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=http://localhost:4317 OTEL_RESOURCE_ATTRIBUTES="service.name=my-service,service.version=1.2.3" node index.js 134 | ``` 135 | 136 | ## Examples 137 | 138 | * [Minimalistic](./examples/minimalistic) 139 | * [HTTP Server with trace context propagation](./examples/trace-context) 140 | * [Sending logs to Grafana Loki](./examples/grafana-loki) 141 | * [Using Multiple Record Processors](./examples/using-multiple-record-processors) 142 | * [TypeScript](./examples/typescript) 143 | 144 | ## Test the repo locally 145 | 146 | Run the OTLP collector in a container 147 | 148 | ```npm run docker-run``` 149 | 150 | Run an example 151 | 152 | ```node examples/minimalistic/minimalistic.js``` 153 | 154 | Observe the logs 155 | 156 | ```tail -f /tmp/test-logs/otlp-logs.log``` 157 | 158 | Note that not all log entries will immediately be written to the `otlp-logs.log` file. The collector will flush to the disk eventually. The flush will be forced if the collector receives a kill signal. 159 | 160 | ## Acknowledgements 161 | 162 | This project is kindly sponsored by: 163 | - [NearForm](https://nearform.com) 164 | 165 | ## License 166 | 167 | MIT 168 | -------------------------------------------------------------------------------- /test/lib/pino-opentelemetry-transport.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { join } = require('path') 4 | const { test, before } = require('node:test') 5 | const assert = require('node:assert') 6 | const requireInject = require('require-inject') 7 | const { Wait, GenericContainer } = require('testcontainers') 8 | const { extract } = require('tar-stream') 9 | const { SeverityNumber } = require('@opentelemetry/api-logs') 10 | const { text } = require('node:stream/consumers') 11 | const { setInterval } = require('node:timers/promises') 12 | 13 | const LOG_FILE_PATH = '/etc/test-logs/otlp-logs.log' 14 | 15 | let container 16 | 17 | before(async () => { 18 | container = await new GenericContainer( 19 | 'otel/opentelemetry-collector-contrib:0.112.0' 20 | ) 21 | .withCopyFilesToContainer([ 22 | { 23 | source: join(__dirname, '..', '..', 'otel-collector-config.yaml'), 24 | target: '/etc/otel-collector-config.yaml' 25 | } 26 | ]) 27 | .withExposedPorts({ 28 | container: 4317, 29 | host: 4317 30 | }) 31 | .withExposedPorts({ 32 | container: 4318, 33 | host: 4318 34 | }) 35 | .withCommand(['--config=/etc/otel-collector-config.yaml']) 36 | .withWaitStrategy(Wait.forLogMessage('Everything is ready')) 37 | .withCopyContentToContainer([ 38 | { 39 | content: '', 40 | target: LOG_FILE_PATH, 41 | mode: parseInt('0777', 8) 42 | } 43 | ]) 44 | .start() 45 | }) 46 | 47 | const MOCK_HOSTNAME = 'hostname' 48 | 49 | test('translate Pino log format to Open Telemetry data format for each log level', async () => { 50 | const pino = requireInject.withEmptyCache('pino', { 51 | os: { 52 | hostname: () => MOCK_HOSTNAME 53 | } 54 | }) 55 | 56 | const transport = pino.transport({ 57 | target: '../..', 58 | options: { 59 | loggerName: 'test-logger-name', 60 | resourceAttributes: { 61 | 'service.name': 'test-service', 62 | 'service.version': 'test-service-version' 63 | }, 64 | serviceVersion: 'test-service-version', 65 | logRecordProcessorOptions: { 66 | recordProcessorType: 'simple', 67 | exporterOptions: { 68 | protocol: 'grpc' 69 | } 70 | }, 71 | severityNumberMap: { 72 | 35: SeverityNumber.INFO2 73 | } 74 | } 75 | }) 76 | 77 | const logger = pino({ 78 | level: 'trace', 79 | customLevels: { 80 | custom: 35 81 | } 82 | }, transport) 83 | 84 | const testTraceId = '12345678901234567890123456789012' 85 | const testSpanId = '1234567890123456' 86 | const testTraceFlags = '01' 87 | 88 | const extra = { 89 | foo: 'bar', 90 | baz: 'qux', 91 | trace_id: testTraceId, 92 | span_id: testSpanId, 93 | trace_flags: testTraceFlags 94 | } 95 | 96 | logger.trace(extra, 'test trace') 97 | logger.debug('test debug') 98 | logger.info('test info') 99 | logger.custom('test custom') 100 | logger.warn('test warn') 101 | logger.error('test error') 102 | logger.fatal('test fatal') 103 | 104 | const expectedResourceAttributes = [ 105 | { 106 | key: 'service.name', 107 | value: { 108 | stringValue: 'test-service' 109 | } 110 | }, 111 | { 112 | key: 'service.version', 113 | value: { 114 | stringValue: 'test-service-version' 115 | } 116 | } 117 | ] 118 | 119 | const scope = { 120 | name: 'test-logger-name', 121 | version: 'test-service-version' 122 | } 123 | 124 | const expectedLines = [ 125 | { 126 | severityNumber: SeverityNumber.TRACE, 127 | severityText: 'trace', 128 | body: { stringValue: 'test trace' }, 129 | traceId: testTraceId, 130 | spanId: testSpanId, 131 | attributes: [ 132 | { key: 'foo', value: { stringValue: 'bar' } }, 133 | { key: 'baz', value: { stringValue: 'qux' } } 134 | ] 135 | }, 136 | { 137 | severityNumber: SeverityNumber.DEBUG, 138 | severityText: 'debug', 139 | body: { stringValue: 'test debug' }, 140 | traceId: '', 141 | spanId: '' 142 | }, 143 | { 144 | severityNumber: SeverityNumber.INFO, 145 | severityText: 'info', 146 | body: { stringValue: 'test info' }, 147 | traceId: '', 148 | spanId: '' 149 | }, 150 | { 151 | severityNumber: SeverityNumber.INFO2, 152 | severityText: 'custom', 153 | body: { stringValue: 'test custom' }, 154 | traceId: '', 155 | spanId: '' 156 | }, 157 | { 158 | severityNumber: SeverityNumber.WARN, 159 | severityText: 'warn', 160 | body: { stringValue: 'test warn' }, 161 | traceId: '', 162 | spanId: '' 163 | }, 164 | { 165 | severityNumber: SeverityNumber.ERROR, 166 | severityText: 'error', 167 | body: { stringValue: 'test error' }, 168 | traceId: '', 169 | spanId: '' 170 | }, 171 | { 172 | severityNumber: SeverityNumber.FATAL, 173 | severityText: 'fatal', 174 | body: { stringValue: 'test fatal' }, 175 | traceId: '', 176 | spanId: '' 177 | } 178 | ] 179 | 180 | const logs = await container.logs() 181 | let logRecordReceivedOnCollectorCount = 0 182 | 183 | logs 184 | .on('data', line => { 185 | if (line.includes('LogRecord')) { 186 | logRecordReceivedOnCollectorCount++ 187 | } 188 | }) 189 | .on('err', line => console.error(line)) 190 | 191 | // eslint-disable-next-line 192 | for await (const _ of setInterval(0)) { 193 | if (logRecordReceivedOnCollectorCount >= expectedLines.length) { 194 | break 195 | } 196 | } 197 | 198 | const stoppedContainer = await container.stop({ remove: false }) 199 | 200 | const tarArchiveStream = await stoppedContainer.copyArchiveFromContainer( 201 | LOG_FILE_PATH 202 | ) 203 | 204 | const extractedArchiveStream = extract() 205 | 206 | tarArchiveStream.pipe(extractedArchiveStream) 207 | 208 | const archivedFileContents = [] 209 | 210 | for await (const entry of extractedArchiveStream) { 211 | const fileContent = await text(entry) 212 | archivedFileContents.push(fileContent) 213 | } 214 | 215 | const content = archivedFileContents.join('\n') 216 | 217 | const lines = content.split('\n').filter(Boolean) 218 | 219 | assert.strictEqual(lines.length, expectedLines.length, 'correct number of lines') 220 | 221 | lines.forEach(line => { 222 | const foundAttributes = JSON.parse( 223 | line 224 | ).resourceLogs?.[0]?.resource.attributes.filter( 225 | attribute => 226 | attribute.key === 'service.name' || attribute.key === 'service.version' 227 | ) 228 | assert.deepStrictEqual(foundAttributes, expectedResourceAttributes) 229 | }) 230 | 231 | lines.forEach(line => { 232 | assert.deepStrictEqual(JSON.parse(line).resourceLogs?.[0]?.scopeLogs?.[0]?.scope, scope) 233 | }) 234 | 235 | const logRecords = [...lines.entries()] 236 | .map(([_lineNumber, logLine]) => { 237 | return JSON.parse(logLine).resourceLogs?.[0]?.scopeLogs?.[0] 238 | ?.logRecords?.[0] 239 | }) 240 | .sort((a, b) => { 241 | return a.severityNumber - b.severityNumber 242 | }) 243 | 244 | for (let i = 0; i < logRecords.length; i++) { 245 | const logRecord = logRecords[i] 246 | const expectedLine = expectedLines[i] 247 | // Only check the fields we care about, ignore extra fields like timeUnixNano, observedTimeUnixNano, flags 248 | assert.strictEqual(logRecord.severityNumber, expectedLine.severityNumber, `line ${i} severityNumber matches`) 249 | assert.strictEqual(logRecord.severityText, expectedLine.severityText, `line ${i} severityText matches`) 250 | assert.deepStrictEqual(logRecord.body, expectedLine.body, `line ${i} body matches`) 251 | assert.strictEqual(logRecord.traceId, expectedLine.traceId, `line ${i} traceId matches`) 252 | assert.strictEqual(logRecord.spanId, expectedLine.spanId, `line ${i} spanId matches`) 253 | if (expectedLine.attributes) { 254 | assert.deepStrictEqual(logRecord.attributes, expectedLine.attributes, `line ${i} attributes match`) 255 | } 256 | } 257 | }) 258 | 259 | test('works without explicit options parameter', async () => { 260 | const pino = requireInject.withEmptyCache('pino', { 261 | os: { 262 | hostname: () => MOCK_HOSTNAME 263 | } 264 | }) 265 | 266 | // Test both undefined options and empty options object 267 | const transport1 = pino.transport({ 268 | target: '../..' 269 | }) 270 | 271 | const transport2 = pino.transport({ 272 | target: '../..', 273 | options: {} 274 | }) 275 | 276 | const logger1 = pino({ 277 | level: 'info' 278 | }, transport1) 279 | 280 | const logger2 = pino({ 281 | level: 'info' 282 | }, transport2) 283 | 284 | logger1.info('test message without options') 285 | logger2.info('test message with empty options') 286 | 287 | // If we get here without errors, the test passes 288 | assert.ok(true, 'Transport works without explicit options') 289 | }) 290 | 291 | test('module function handles undefined options parameter', async () => { 292 | const transportModule = require('../../lib/pino-opentelemetry-transport') 293 | 294 | const transport = await transportModule() 295 | assert.ok(transport, 'Transport created successfully') 296 | }) 297 | --------------------------------------------------------------------------------