├── .eslintignore ├── .eslintrc ├── .github ├── dependabot.yml └── workflows │ ├── bench.yml │ ├── ci.yml │ ├── lock-threads.yml │ └── target-main.yml ├── .gitignore ├── .nojekyll ├── .npmrc ├── .prettierignore ├── .taprc.yaml ├── CNAME ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── SECURITY.md ├── benchmarks ├── basic.bench.js ├── child-child.bench.js ├── child-creation.bench.js ├── child.bench.js ├── deep-object.bench.js ├── formatters.bench.js ├── internal │ ├── custom-levels.js │ ├── just-pino-heavy.bench.js │ ├── just-pino.bench.js │ ├── parent-vs-child.bench.js │ └── redact.bench.js ├── long-string.bench.js ├── multi-arg.bench.js ├── multistream.js ├── object.bench.js └── utils │ ├── generate-benchmark-doc.js │ ├── runbench.js │ └── wrap-log-level.js ├── bin.js ├── browser.js ├── build └── sync-version.js ├── docs ├── api.md ├── asynchronous.md ├── benchmarks.md ├── browser.md ├── bundling.md ├── child-loggers.md ├── ecosystem.md ├── help.md ├── lts.md ├── pretty.md ├── redaction.md ├── transports.md └── web.md ├── docsify └── sidebar.md ├── examples ├── basic.js └── transport.js ├── favicon-16x16.png ├── favicon-32x32.png ├── favicon.ico ├── file.js ├── inc-version.sh ├── index.html ├── lib ├── caller.js ├── constants.js ├── deprecations.js ├── levels.js ├── meta.js ├── multistream.js ├── proto.js ├── redaction.js ├── symbols.js ├── time.js ├── tools.js ├── transport-stream.js ├── transport.js └── worker.js ├── package.json ├── pino-banner.png ├── pino-logo-hire.png ├── pino-tree.png ├── pino.d.ts ├── pino.js ├── pretty-demo.png ├── test ├── basic.test.js ├── broken-pipe.test.js ├── browser-child.test.js ├── browser-disabled.test.js ├── browser-early-console-freeze.test.js ├── browser-is-level-enabled.test.js ├── browser-levels.test.js ├── browser-serializers.test.js ├── browser-timestamp.test.js ├── browser-transmit.test.js ├── browser.test.js ├── complex-objects.test.js ├── crlf.test.js ├── custom-levels.test.js ├── error.test.js ├── errorKey.test.js ├── escaping.test.js ├── esm │ ├── esm.mjs │ ├── index.test.js │ └── named-exports.mjs ├── exit.test.js ├── fixtures │ ├── broken-pipe │ │ ├── basic.js │ │ ├── destination.js │ │ └── syncfalse.js │ ├── console-transport.js │ ├── crashing-transport.js │ ├── default-exit.js │ ├── destination-exit.js │ ├── eval │ │ ├── index.js │ │ └── node_modules │ │ │ ├── 14-files.js │ │ │ ├── 2-files.js │ │ │ ├── file1.js │ │ │ ├── file10.js │ │ │ ├── file11.js │ │ │ ├── file12.js │ │ │ ├── file13.js │ │ │ ├── file14.js │ │ │ ├── file2.js │ │ │ ├── file3.js │ │ │ ├── file4.js │ │ │ ├── file5.js │ │ │ ├── file6.js │ │ │ ├── file7.js │ │ │ ├── file8.js │ │ │ └── file9.js │ ├── noop-transport.js │ ├── pretty │ │ └── null-prototype.js │ ├── stdout-hack-protection.js │ ├── syncfalse-child.js │ ├── syncfalse-exit.js │ ├── syncfalse-flush-exit.js │ ├── syncfalse.js │ ├── syntax-error-esm.mjs │ ├── to-file-transport-with-transform.js │ ├── to-file-transport.js │ ├── to-file-transport.mjs │ ├── transport-exit-immediately-with-async-dest.js │ ├── transport-exit-immediately.js │ ├── transport-exit-on-ready.js │ ├── transport-main.js │ ├── transport-many-lines.js │ ├── transport-string-stdout.js │ ├── transport-transform.js │ ├── transport-uses-pino-config.js │ ├── transport-with-on-exit.js │ ├── transport-worker-data.js │ ├── transport-worker.js │ ├── transport-wrong-export-type.js │ ├── transport │ │ ├── index.js │ │ └── package.json │ └── ts │ │ ├── to-file-transport-with-transform.ts │ │ ├── to-file-transport.ts │ │ ├── transpile.cjs │ │ ├── transport-exit-immediately-with-async-dest.ts │ │ ├── transport-exit-immediately.ts │ │ ├── transport-exit-on-ready.ts │ │ ├── transport-main.ts │ │ ├── transport-string-stdout.ts │ │ └── transport-worker.ts ├── formatters.test.js ├── helper.d.ts ├── helper.js ├── hooks.test.js ├── http.test.js ├── internals │ └── version.test.js ├── is-level-enabled.test.js ├── jest │ └── basic.spec.js ├── levels.test.js ├── metadata.test.js ├── mixin-merge-strategy.test.js ├── mixin.test.js ├── multistream.test.js ├── pkg │ ├── index.js │ ├── pkg.config.json │ └── pkg.test.js ├── redact.test.js ├── serializers.test.js ├── stdout-protection.test.js ├── syncfalse.test.js ├── timestamp.test.js ├── transport-stream.test.js ├── transport │ ├── big.test.js │ ├── bundlers-support.test.js │ ├── caller.test.js │ ├── core.test.js │ ├── core.test.ts │ ├── core.transpiled.test.ts │ ├── crash.test.js │ ├── module-link.test.js │ ├── pipeline.test.js │ ├── repl.test.js │ ├── syncTrue.test.js │ ├── syncfalse.test.js │ ├── targets.test.js │ └── uses-pino-config.test.js └── types │ ├── pino-import.test-d.ts │ ├── pino-multistream.test-d.ts │ ├── pino-top-export.test-d.ts │ ├── pino-transport.test-d.ts │ ├── pino-type-only.test-d.ts │ ├── pino.test-d.ts │ └── pino.ts └── tsconfig.json /.eslintignore: -------------------------------------------------------------------------------- 1 | pino.d.ts 2 | test/types/pino.test-d.ts 3 | -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "extends": [ 3 | "standard" 4 | ], 5 | "rules": { 6 | "no-var": "off" 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "monthly" 7 | open-pull-requests-limit: 10 8 | 9 | - package-ecosystem: "npm" 10 | directory: "/" 11 | schedule: 12 | interval: "monthly" 13 | open-pull-requests-limit: 10 14 | -------------------------------------------------------------------------------- /.github/workflows/bench.yml: -------------------------------------------------------------------------------- 1 | name: Benchmarks 2 | on: 3 | push: 4 | branches: 5 | - main 6 | paths-ignore: 7 | - 'docs/**' 8 | - '*.md' 9 | pull_request: 10 | paths-ignore: 11 | - 'docs/**' 12 | - '*.md' 13 | 14 | permissions: 15 | contents: read 16 | 17 | jobs: 18 | benchmark_current: 19 | name: benchmark current 20 | runs-on: ubuntu-latest 21 | steps: 22 | - name: Checkout Code 23 | uses: actions/checkout@v4 24 | with: 25 | ref: ${{ github.base_ref }} 26 | persist-credentials: false 27 | - name: Setup Node 28 | uses: actions/setup-node@v4 29 | with: 30 | node-version: lts/* 31 | - name: Install Modules 32 | run: npm i --ignore-scripts 33 | - name: Run Benchmark 34 | run: npm run bench | tee current.txt 35 | - name: Upload Current Results 36 | uses: actions/upload-artifact@v4 37 | with: 38 | name: current 39 | path: current.txt 40 | 41 | benchmark_branch: 42 | name: benchmark branch 43 | runs-on: ubuntu-latest 44 | steps: 45 | - name: Checkout Code 46 | uses: actions/checkout@v4 47 | with: 48 | persist-credentials: false 49 | - name: Setup Node 50 | uses: actions/setup-node@v4 51 | with: 52 | node-version: lts/* 53 | - name: Install Modules 54 | run: npm i --ignore-scripts 55 | - name: Run Benchmark 56 | run: npm run bench | tee branch.txt 57 | - name: Upload Branch Results 58 | uses: actions/upload-artifact@v4 59 | with: 60 | name: branch 61 | path: branch.txt 62 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | - 'v*' 8 | paths-ignore: 9 | - 'docs/**' 10 | - '*.md' 11 | pull_request: 12 | paths-ignore: 13 | - 'docs/**' 14 | - '*.md' 15 | 16 | # This allows a subsequently queued workflow run to interrupt previous runs 17 | concurrency: 18 | group: "${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}" 19 | cancel-in-progress: true 20 | 21 | jobs: 22 | dependency-review: 23 | name: Dependency Review 24 | if: github.event_name == 'pull_request' 25 | runs-on: ubuntu-latest 26 | permissions: 27 | contents: read 28 | steps: 29 | - name: Check out repo 30 | uses: actions/checkout@v4 31 | with: 32 | persist-credentials: false 33 | 34 | - name: Dependency review 35 | uses: actions/dependency-review-action@v4 36 | 37 | test: 38 | name: ${{ matrix.node-version }} ${{ matrix.os }} 39 | runs-on: ${{ matrix.os }} 40 | permissions: 41 | contents: read 42 | strategy: 43 | fail-fast: false 44 | matrix: 45 | os: [macOS-latest, windows-latest, ubuntu-latest] 46 | node-version: [18, 20, 22] 47 | exclude: 48 | - os: windows-latest 49 | node-version: 22 50 | 51 | steps: 52 | - name: Check out repo 53 | uses: actions/checkout@v4 54 | with: 55 | persist-credentials: false 56 | 57 | - name: Setup Node ${{ matrix.node-version }} 58 | uses: actions/setup-node@v4 59 | with: 60 | node-version: ${{ matrix.node-version }} 61 | 62 | - name: Install dependencies 63 | run: npm i --ignore-scripts 64 | 65 | - name: Run tests 66 | run: npm run test-ci 67 | 68 | - name: Run smoke test 69 | if: > 70 | matrix.os != 'windows-latest' && 71 | matrix.node-version > 14 72 | run: npm run test:smoke 73 | 74 | automerge: 75 | name: Automerge Dependabot PRs 76 | if: > 77 | github.event_name == 'pull_request' && 78 | github.event.pull_request.user.login == 'dependabot[bot]' 79 | needs: test 80 | permissions: 81 | pull-requests: write 82 | contents: write 83 | runs-on: ubuntu-latest 84 | steps: 85 | - uses: fastify/github-action-merge-dependabot@v3 86 | with: 87 | github-token: ${{ secrets.GITHUB_TOKEN }} 88 | exclude: 'sonic-boom,pino-std-serializers,quick-format-unescaped,fast-redact' 89 | -------------------------------------------------------------------------------- /.github/workflows/lock-threads.yml: -------------------------------------------------------------------------------- 1 | name: 'Lock Threads' 2 | 3 | on: 4 | schedule: 5 | - cron: '0 0 * * *' 6 | workflow_dispatch: 7 | 8 | permissions: 9 | issues: write 10 | pull-requests: write 11 | 12 | concurrency: 13 | group: lock 14 | 15 | jobs: 16 | action: 17 | runs-on: ubuntu-latest 18 | steps: 19 | - uses: jsumners/lock-threads@b27edac0ac998d42b2815e122b6c24b32b568321 20 | with: 21 | log-output: true 22 | issue-inactive-days: '30' 23 | issue-comment: > 24 | This issue has been automatically locked since there 25 | has not been any recent activity after it was closed. 26 | Please open a new issue for related bugs. 27 | pr-comment: > 28 | This pull request has been automatically locked since there 29 | has not been any recent activity after it was closed. 30 | Please open a new issue for related bugs. 31 | -------------------------------------------------------------------------------- /.github/workflows/target-main.yml: -------------------------------------------------------------------------------- 1 | name: PR Target Check 2 | 3 | on: 4 | pull_request_target: 5 | types: [opened] 6 | 7 | permissions: 8 | pull-requests: write 9 | 10 | jobs: 11 | comment: 12 | if: ${{ github.base_ref != "master" }} 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/github-script@v7 16 | with: 17 | script: | 18 | github.rest.issues.createComment({ 19 | issue_number: context.issue.number, 20 | owner: context.repo.owner, 21 | repo: context.repo.repo, 22 | body: '⚠️ This pull request does not target the master branch.' 23 | }) 24 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | .pnpm-debug.log* 9 | 10 | # Diagnostic reports (https://nodejs.org/api/report.html) 11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 12 | 13 | # Runtime data 14 | pids 15 | *.pid 16 | *.seed 17 | *.pid.lock 18 | 19 | # Directory for instrumented libs generated by jscoverage/JSCover 20 | lib-cov 21 | 22 | # Coverage directory used by tools like istanbul 23 | coverage 24 | *.lcov 25 | 26 | # nyc test coverage 27 | .nyc_output 28 | 29 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 30 | .grunt 31 | 32 | # Bower dependency directory (https://bower.io/) 33 | bower_components 34 | 35 | # node-waf configuration 36 | .lock-wscript 37 | 38 | # Compiled binary addons (https://nodejs.org/api/addons.html) 39 | build/Release 40 | 41 | # Dependency directories 42 | node_modules/ 43 | jspm_packages/ 44 | 45 | # Snowpack dependency directory (https://snowpack.dev/) 46 | web_modules/ 47 | 48 | # TypeScript cache 49 | *.tsbuildinfo 50 | 51 | # Optional npm cache directory 52 | .npm 53 | 54 | # Optional eslint cache 55 | .eslintcache 56 | 57 | # Optional stylelint cache 58 | .stylelintcache 59 | 60 | # Microbundle cache 61 | .rpt2_cache/ 62 | .rts2_cache_cjs/ 63 | .rts2_cache_es/ 64 | .rts2_cache_umd/ 65 | 66 | # Optional REPL history 67 | .node_repl_history 68 | 69 | # Output of 'npm pack' 70 | *.tgz 71 | 72 | # Yarn Integrity file 73 | .yarn-integrity 74 | 75 | # dotenv environment variable files 76 | .env 77 | .env.development.local 78 | .env.test.local 79 | .env.production.local 80 | .env.local 81 | 82 | # parcel-bundler cache (https://parceljs.org/) 83 | .cache 84 | .parcel-cache 85 | 86 | # Next.js build output 87 | .next 88 | out 89 | 90 | # Nuxt.js build / generate output 91 | .nuxt 92 | dist 93 | 94 | # Gatsby files 95 | .cache/ 96 | # Comment in the public line in if your project uses Gatsby and not Next.js 97 | # https://nextjs.org/blog/next-9-1#public-directory-support 98 | # public 99 | 100 | # vuepress build output 101 | .vuepress/dist 102 | 103 | # vuepress v2.x temp and cache directory 104 | .temp 105 | .cache 106 | 107 | # Docusaurus cache and generated files 108 | .docusaurus 109 | 110 | # Serverless directories 111 | .serverless/ 112 | 113 | # FuseBox cache 114 | .fusebox/ 115 | 116 | # DynamoDB Local files 117 | .dynamodb/ 118 | 119 | # TernJS port file 120 | .tern-port 121 | 122 | # Stores VSCode versions used for testing VSCode extensions 123 | .vscode-test 124 | 125 | # yarn v2 126 | .yarn/cache 127 | .yarn/unplugged 128 | .yarn/build-state.yml 129 | .yarn/install-state.gz 130 | .pnp.* 131 | 132 | # Vim swap files 133 | *.swp 134 | 135 | # macOS files 136 | .DS_Store 137 | 138 | # editor files 139 | .vscode 140 | .idea 141 | 142 | # lock files 143 | package-lock.json 144 | pnpm-lock.yaml 145 | yarn.lock 146 | 147 | # 0x 148 | .__browserify_string_empty.js 149 | profile-* 150 | 151 | # Generated files 152 | test/fixtures/ts/*js 153 | !test/fixtures/eval/node_modules 154 | !test/fixtures/ts/transpile.cjs 155 | -------------------------------------------------------------------------------- /.nojekyll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinojs/pino/427cbaf30d4717e7df5795c5ede7fdf3fa01eb5c/.nojekyll -------------------------------------------------------------------------------- /.npmrc: -------------------------------------------------------------------------------- 1 | package-lock=false 2 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | * 2 | -------------------------------------------------------------------------------- /.taprc.yaml: -------------------------------------------------------------------------------- 1 | coverage: true 2 | timeout: 480 3 | check-coverage: false 4 | 5 | reporter: terse 6 | 7 | files: 8 | - 'test/**/*.test.js' 9 | -------------------------------------------------------------------------------- /CNAME: -------------------------------------------------------------------------------- 1 | getpino.io -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Pino is an OPEN Open Source Project 2 | 3 | ## What? 4 | 5 | Individuals making significant and valuable contributions are given commit-access to the project to contribute as they see fit. This project is more like an open wiki than a standard guarded open source project. 6 | 7 | ## Rules 8 | 9 | Before you start coding, please read [Contributing to projects with git](https://jrfom.com/posts/2017/03/08/a-primer-on-contributing-to-projects-with-git/). 10 | 11 | Notice that as long as you don't have commit-access to the project, you have to fork the project and open PRs from the feature branches of the forked project. 12 | 13 | There are a few basic ground-rules for contributors: 14 | 15 | 1. **No `--force` pushes** on `master` or modifying the Git history in any way after a PR has been merged. 16 | 1. **Non-master branches** ought to be used for ongoing work. 17 | 1. **Non-trivial changes** ought to be subject to an **internal pull-request** to solicit feedback from other contributors. 18 | 1. All pull-requests for new features **must** target the `master` branch. PRs to fix bugs in LTS releases are also allowed. 19 | 1. Contributors should attempt to adhere to the prevailing code-style. 20 | 1. 100% code coverage 21 | 22 | ## Releases 23 | 24 | Declaring formal releases remains the prerogative of the project maintainer. 25 | 26 | ## Changes to this arrangement 27 | 28 | This is an experiment and feedback is welcome! This document may also be subject to pull-requests or changes by contributors where you believe you have something valuable to add or change. 29 | 30 | ----------------------------------------- 31 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2016-2025 Matteo Collina, David Mark Clements and the Pino contributors listed at and in the README file. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![banner](pino-banner.png) 2 | 3 | # pino 4 | [![npm version](https://img.shields.io/npm/v/pino)](https://www.npmjs.com/package/pino) 5 | [![Build Status](https://img.shields.io/github/actions/workflow/status/pinojs/pino/ci.yml)](https://github.com/pinojs/pino/actions) 6 | [![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://standardjs.com/) 7 | 8 | [Very low overhead](#low-overhead) Node.js logger. 9 | 10 | ## Documentation 11 | 12 | * [Benchmarks ⇗](/docs/benchmarks.md) 13 | * [API ⇗](/docs/api.md) 14 | * [Browser API ⇗](/docs/browser.md) 15 | * [Redaction ⇗](/docs/redaction.md) 16 | * [Child Loggers ⇗](/docs/child-loggers.md) 17 | * [Transports ⇗](/docs/transports.md) 18 | * [Web Frameworks ⇗](/docs/web.md) 19 | * [Pretty Printing ⇗](/docs/pretty.md) 20 | * [Asynchronous Logging ⇗](/docs/asynchronous.md) 21 | * [Ecosystem ⇗](/docs/ecosystem.md) 22 | * [Help ⇗](/docs/help.md) 23 | * [Long Term Support Policy ⇗](/docs/lts.md) 24 | 25 | ## Install 26 | 27 | Using NPM: 28 | ``` 29 | $ npm install pino 30 | ``` 31 | 32 | Using YARN: 33 | ``` 34 | $ yarn add pino 35 | ``` 36 | 37 | If you would like to install pino v6, refer to https://github.com/pinojs/pino/tree/v6.x. 38 | 39 | ## Usage 40 | 41 | ```js 42 | const logger = require('pino')() 43 | 44 | logger.info('hello world') 45 | 46 | const child = logger.child({ a: 'property' }) 47 | child.info('hello child!') 48 | ``` 49 | 50 | This produces: 51 | 52 | ``` 53 | {"level":30,"time":1531171074631,"msg":"hello world","pid":657,"hostname":"Davids-MBP-3.fritz.box"} 54 | {"level":30,"time":1531171082399,"msg":"hello child!","pid":657,"hostname":"Davids-MBP-3.fritz.box","a":"property"} 55 | ``` 56 | 57 | For using Pino with a web framework see: 58 | 59 | * [Pino with Fastify](docs/web.md#fastify) 60 | * [Pino with Express](docs/web.md#express) 61 | * [Pino with Hapi](docs/web.md#hapi) 62 | * [Pino with Restify](docs/web.md#restify) 63 | * [Pino with Koa](docs/web.md#koa) 64 | * [Pino with Node core `http`](docs/web.md#http) 65 | * [Pino with Nest](docs/web.md#nest) 66 | 67 | 68 | 69 | ## Essentials 70 | 71 | ### Development Formatting 72 | 73 | The [`pino-pretty`](https://github.com/pinojs/pino-pretty) module can be used to 74 | format logs during development: 75 | 76 | ![pretty demo](pretty-demo.png) 77 | 78 | ### Transports & Log Processing 79 | 80 | Due to Node's single-threaded event-loop, it's highly recommended that sending, 81 | alert triggering, reformatting, and all forms of log processing 82 | are conducted in a separate process or thread. 83 | 84 | In Pino terminology, we call all log processors "transports" and recommend that the 85 | transports be run in a worker thread using our `pino.transport` API. 86 | 87 | For more details see our [Transports⇗](docs/transports.md) document. 88 | 89 | ### Low overhead 90 | 91 | Using minimum resources for logging is very important. Log messages 92 | tend to get added over time and this can lead to a throttling effect 93 | on applications – such as reduced requests per second. 94 | 95 | In many cases, Pino is over 5x faster than alternatives. 96 | 97 | See the [Benchmarks](docs/benchmarks.md) document for comparisons. 98 | 99 | ### Bundling support 100 | 101 | Pino supports being bundled using tools like webpack or esbuild. 102 | 103 | See [Bundling](docs/bundling.md) document for more information. 104 | 105 | 106 | ## The Team 107 | 108 | ### Matteo Collina 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | ### David Mark Clements 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | ### James Sumners 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | ### Thomas Watson Steen 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | ## Contributing 141 | 142 | Pino is an **OPEN Open Source Project**. This means that: 143 | 144 | > Individuals making significant and valuable contributions are given commit-access to the project to contribute as they see fit. This project is more like an open wiki than a standard guarded open source project. 145 | 146 | See the [CONTRIBUTING.md](https://github.com/pinojs/pino/blob/main/CONTRIBUTING.md) file for more details. 147 | 148 | 149 | ## Acknowledgments 150 | 151 | This project was kindly sponsored by [nearForm](https://nearform.com). 152 | This project is kindly sponsored by [Platformatic](https://platformatic.dev). 153 | 154 | Logo and identity designed by Cosmic Fox Design: https://www.behance.net/cosmicfox. 155 | 156 | ## License 157 | 158 | Licensed under [MIT](./LICENSE). 159 | 160 | [elasticsearch]: https://www.elastic.co/products/elasticsearch 161 | [kibana]: https://www.elastic.co/products/kibana 162 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | This document describes the management of vulnerabilities for the 4 | Pino project and all modules within the Pino organization. 5 | 6 | ## Reporting vulnerabilities 7 | 8 | Individuals who find potential vulnerabilities in Pino are invited 9 | to report them via email at matteo.collina@gmail.com. 10 | 11 | ### Strict measures when reporting vulnerabilities 12 | 13 | Avoid creating new "informative" reports. Only create new 14 | report a potential vulnerability if you are absolutely sure this 15 | should be tagged as an actual vulnerability. Be careful on the maintainers time. 16 | 17 | ## Handling vulnerability reports 18 | 19 | When a potential vulnerability is reported, the following actions are taken: 20 | 21 | ### Triage 22 | 23 | **Delay:** 5 business days 24 | 25 | Within 5 business days, a member of the security team provides a first answer to the 26 | individual who submitted the potential vulnerability. The possible responses 27 | can be: 28 | 29 | * Acceptance: what was reported is considered as a new vulnerability 30 | * Rejection: what was reported is not considered as a new vulnerability 31 | * Need more information: the security team needs more information in order to evaluate what was reported. 32 | 33 | Triaging should include updating issue fields: 34 | * Asset - set/create the module affected by the report 35 | * Severity - TBD, currently left empty 36 | 37 | ### Correction follow-up 38 | 39 | **Delay:** 90 days 40 | 41 | When a vulnerability is confirmed, a member of the security team volunteers to follow 42 | up on this report. 43 | 44 | With the help of the individual who reported the vulnerability, they contact 45 | the maintainers of the vulnerable package to make them aware of the 46 | vulnerability. The maintainers can be invited as participants to the reported issue. 47 | 48 | With the package maintainer, they define a release date for the publication 49 | of the vulnerability. Ideally, this release date should not happen before 50 | the package has been patched. 51 | 52 | The report's vulnerable versions upper limit should be set to: 53 | * `*` if there is no fixed version available by the time of publishing the report. 54 | * the last vulnerable version. For example: `<=1.2.3` if a fix exists in `1.2.4` 55 | 56 | ### Publication 57 | 58 | **Delay:** 90 days 59 | 60 | Within 90 days after the triage date, the vulnerability must be made public. 61 | 62 | **Severity**: Vulnerability severity is assessed using [CVSS v.3](https://www.first.org/cvss/user-guide). 63 | 64 | If the package maintainer is actively developing a patch, an additional delay 65 | can be added with the approval of the security team and the individual who 66 | reported the vulnerability. 67 | 68 | At this point, a CVE will be requested by the team. 69 | -------------------------------------------------------------------------------- /benchmarks/basic.bench.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const bench = require('fastbench') 4 | const pino = require('../') 5 | const bunyan = require('bunyan') 6 | const bole = require('bole')('bench') 7 | const winston = require('winston') 8 | const fs = require('node:fs') 9 | const dest = fs.createWriteStream('/dev/null') 10 | const loglevel = require('./utils/wrap-log-level')(dest) 11 | const plogNodeStream = pino(dest) 12 | delete require.cache[require.resolve('../')] 13 | const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', minLength: 4096 })) 14 | delete require.cache[require.resolve('../')] 15 | const plogDest = require('../')(pino.destination('/dev/null')) 16 | 17 | process.env.DEBUG = 'dlog' 18 | const debug = require('debug') 19 | const dlog = debug('dlog') 20 | dlog.log = function (s) { dest.write(s) } 21 | 22 | const max = 10 23 | const blog = bunyan.createLogger({ 24 | name: 'myapp', 25 | streams: [{ 26 | level: 'trace', 27 | stream: dest 28 | }] 29 | }) 30 | 31 | require('bole').output({ 32 | level: 'info', 33 | stream: dest 34 | }).setFastTime(true) 35 | 36 | const chill = winston.createLogger({ 37 | transports: [ 38 | new winston.transports.Stream({ 39 | stream: fs.createWriteStream('/dev/null') 40 | }) 41 | ] 42 | }) 43 | 44 | const run = bench([ 45 | function benchBunyan (cb) { 46 | for (var i = 0; i < max; i++) { 47 | blog.info('hello world') 48 | } 49 | setImmediate(cb) 50 | }, 51 | function benchWinston (cb) { 52 | for (var i = 0; i < max; i++) { 53 | chill.log('info', 'hello world') 54 | } 55 | setImmediate(cb) 56 | }, 57 | function benchBole (cb) { 58 | for (var i = 0; i < max; i++) { 59 | bole.info('hello world') 60 | } 61 | setImmediate(cb) 62 | }, 63 | function benchDebug (cb) { 64 | for (var i = 0; i < max; i++) { 65 | dlog('hello world') 66 | } 67 | setImmediate(cb) 68 | }, 69 | function benchLogLevel (cb) { 70 | for (var i = 0; i < max; i++) { 71 | loglevel.info('hello world') 72 | } 73 | setImmediate(cb) 74 | }, 75 | function benchPino (cb) { 76 | for (var i = 0; i < max; i++) { 77 | plogDest.info('hello world') 78 | } 79 | setImmediate(cb) 80 | }, 81 | function benchPinoMinLength (cb) { 82 | for (var i = 0; i < max; i++) { 83 | plogMinLength.info('hello world') 84 | } 85 | setImmediate(cb) 86 | }, 87 | function benchPinoNodeStream (cb) { 88 | for (var i = 0; i < max; i++) { 89 | plogNodeStream.info('hello world') 90 | } 91 | setImmediate(cb) 92 | } 93 | ], 10000) 94 | 95 | run(run) 96 | -------------------------------------------------------------------------------- /benchmarks/child-child.bench.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const bench = require('fastbench') 4 | const pino = require('../') 5 | const bunyan = require('bunyan') 6 | const fs = require('node:fs') 7 | const dest = fs.createWriteStream('/dev/null') 8 | const plogNodeStream = pino(dest).child({ a: 'property' }).child({ sub: 'child' }) 9 | delete require.cache[require.resolve('../')] 10 | const plogDest = require('../')(pino.destination('/dev/null')).child({ a: 'property' }).child({ sub: 'child' }) 11 | delete require.cache[require.resolve('../')] 12 | const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 })) 13 | .child({ a: 'property' }) 14 | .child({ sub: 'child' }) 15 | 16 | const max = 10 17 | const blog = bunyan.createLogger({ 18 | name: 'myapp', 19 | streams: [{ 20 | level: 'trace', 21 | stream: dest 22 | }] 23 | }).child({ a: 'property' }).child({ sub: 'child' }) 24 | 25 | const run = bench([ 26 | function benchBunyanChildChild (cb) { 27 | for (var i = 0; i < max; i++) { 28 | blog.info({ hello: 'world' }) 29 | } 30 | setImmediate(cb) 31 | }, 32 | function benchPinoChildChild (cb) { 33 | for (var i = 0; i < max; i++) { 34 | plogDest.info({ hello: 'world' }) 35 | } 36 | setImmediate(cb) 37 | }, 38 | function benchPinoMinLengthChildChild (cb) { 39 | for (var i = 0; i < max; i++) { 40 | plogMinLength.info({ hello: 'world' }) 41 | } 42 | setImmediate(cb) 43 | }, 44 | function benchPinoNodeStreamChildChild (cb) { 45 | for (var i = 0; i < max; i++) { 46 | plogNodeStream.info({ hello: 'world' }) 47 | } 48 | setImmediate(cb) 49 | } 50 | ], 10000) 51 | 52 | run(run) 53 | -------------------------------------------------------------------------------- /benchmarks/child-creation.bench.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const bench = require('fastbench') 4 | const pino = require('../') 5 | const bunyan = require('bunyan') 6 | const bole = require('bole')('bench') 7 | const fs = require('node:fs') 8 | const dest = fs.createWriteStream('/dev/null') 9 | const plogNodeStream = pino(dest) 10 | const plogDest = pino(pino.destination(('/dev/null'))) 11 | delete require.cache[require.resolve('../')] 12 | const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 })) 13 | 14 | const max = 10 15 | const blog = bunyan.createLogger({ 16 | name: 'myapp', 17 | streams: [{ 18 | level: 'trace', 19 | stream: dest 20 | }] 21 | }) 22 | 23 | require('bole').output({ 24 | level: 'info', 25 | stream: dest 26 | }).setFastTime(true) 27 | 28 | const run = bench([ 29 | function benchBunyanCreation (cb) { 30 | const child = blog.child({ a: 'property' }) 31 | for (var i = 0; i < max; i++) { 32 | child.info({ hello: 'world' }) 33 | } 34 | setImmediate(cb) 35 | }, 36 | function benchBoleCreation (cb) { 37 | const child = bole('child') 38 | for (var i = 0; i < max; i++) { 39 | child.info({ hello: 'world' }) 40 | } 41 | setImmediate(cb) 42 | }, 43 | function benchPinoCreation (cb) { 44 | const child = plogDest.child({ a: 'property' }) 45 | for (var i = 0; i < max; i++) { 46 | child.info({ hello: 'world' }) 47 | } 48 | setImmediate(cb) 49 | }, 50 | function benchPinoMinLengthCreation (cb) { 51 | const child = plogMinLength.child({ a: 'property' }) 52 | for (var i = 0; i < max; i++) { 53 | child.info({ hello: 'world' }) 54 | } 55 | setImmediate(cb) 56 | }, 57 | function benchPinoNodeStreamCreation (cb) { 58 | const child = plogNodeStream.child({ a: 'property' }) 59 | for (var i = 0; i < max; i++) { 60 | child.info({ hello: 'world' }) 61 | } 62 | setImmediate(cb) 63 | }, 64 | function benchPinoCreationWithOption (cb) { 65 | const child = plogDest.child({ a: 'property' }, { redact: [] }) 66 | for (var i = 0; i < max; i++) { 67 | child.info({ hello: 'world' }) 68 | } 69 | setImmediate(cb) 70 | } 71 | ], 10000) 72 | 73 | run(run) 74 | -------------------------------------------------------------------------------- /benchmarks/child.bench.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const bench = require('fastbench') 4 | const pino = require('../') 5 | const bunyan = require('bunyan') 6 | const bole = require('bole')('bench')('child') 7 | const fs = require('node:fs') 8 | const dest = fs.createWriteStream('/dev/null') 9 | const plogNodeStream = pino(dest).child({ a: 'property' }) 10 | delete require.cache[require.resolve('../')] 11 | const plogDest = require('../')(pino.destination('/dev/null')).child({ a: 'property' }) 12 | delete require.cache[require.resolve('../')] 13 | const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 })) 14 | 15 | const max = 10 16 | const blog = bunyan.createLogger({ 17 | name: 'myapp', 18 | streams: [{ 19 | level: 'trace', 20 | stream: dest 21 | }] 22 | }).child({ a: 'property' }) 23 | 24 | require('bole').output({ 25 | level: 'info', 26 | stream: dest 27 | }).setFastTime(true) 28 | 29 | const run = bench([ 30 | function benchBunyanChild (cb) { 31 | for (var i = 0; i < max; i++) { 32 | blog.info({ hello: 'world' }) 33 | } 34 | setImmediate(cb) 35 | }, 36 | function benchBoleChild (cb) { 37 | for (var i = 0; i < max; i++) { 38 | bole.info({ hello: 'world' }) 39 | } 40 | setImmediate(cb) 41 | }, 42 | function benchPinoChild (cb) { 43 | for (var i = 0; i < max; i++) { 44 | plogDest.info({ hello: 'world' }) 45 | } 46 | setImmediate(cb) 47 | }, 48 | function benchPinoMinLengthChild (cb) { 49 | for (var i = 0; i < max; i++) { 50 | plogMinLength.info({ hello: 'world' }) 51 | } 52 | setImmediate(cb) 53 | }, 54 | function benchPinoNodeStreamChild (cb) { 55 | for (var i = 0; i < max; i++) { 56 | plogNodeStream.info({ hello: 'world' }) 57 | } 58 | setImmediate(cb) 59 | } 60 | ], 10000) 61 | 62 | run(run) 63 | -------------------------------------------------------------------------------- /benchmarks/deep-object.bench.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const bench = require('fastbench') 4 | const pino = require('../') 5 | const bunyan = require('bunyan') 6 | const bole = require('bole')('bench') 7 | const winston = require('winston') 8 | const fs = require('node:fs') 9 | const dest = fs.createWriteStream('/dev/null') 10 | const plogNodeStream = pino(dest) 11 | delete require.cache[require.resolve('../')] 12 | const plogDest = require('../')(pino.destination('/dev/null')) 13 | delete require.cache[require.resolve('../')] 14 | const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 })) 15 | delete require.cache[require.resolve('../')] 16 | 17 | const loglevel = require('./utils/wrap-log-level')(dest) 18 | 19 | const deep = Object.assign({}, require('../package.json'), { level: 'info' }) 20 | 21 | const max = 10 22 | const blog = bunyan.createLogger({ 23 | name: 'myapp', 24 | streams: [{ 25 | level: 'trace', 26 | stream: dest 27 | }] 28 | }) 29 | 30 | require('bole').output({ 31 | level: 'info', 32 | stream: dest 33 | }).setFastTime(true) 34 | 35 | const chill = winston.createLogger({ 36 | transports: [ 37 | new winston.transports.Stream({ 38 | stream: fs.createWriteStream('/dev/null') 39 | }) 40 | ] 41 | }) 42 | 43 | const run = bench([ 44 | function benchBunyanDeepObj (cb) { 45 | for (var i = 0; i < max; i++) { 46 | blog.info(deep) 47 | } 48 | setImmediate(cb) 49 | }, 50 | function benchWinstonDeepObj (cb) { 51 | for (var i = 0; i < max; i++) { 52 | chill.log(deep) 53 | } 54 | setImmediate(cb) 55 | }, 56 | function benchBoleDeepObj (cb) { 57 | for (var i = 0; i < max; i++) { 58 | bole.info(deep) 59 | } 60 | setImmediate(cb) 61 | }, 62 | function benchLogLevelDeepObj (cb) { 63 | for (var i = 0; i < max; i++) { 64 | loglevel.info(deep) 65 | } 66 | setImmediate(cb) 67 | }, 68 | function benchPinoDeepObj (cb) { 69 | for (var i = 0; i < max; i++) { 70 | plogDest.info(deep) 71 | } 72 | setImmediate(cb) 73 | }, 74 | function benchPinoMinLengthDeepObj (cb) { 75 | for (var i = 0; i < max; i++) { 76 | plogMinLength.info(deep) 77 | } 78 | setImmediate(cb) 79 | }, 80 | function benchPinoNodeStreamDeepObj (cb) { 81 | for (var i = 0; i < max; i++) { 82 | plogNodeStream.info(deep) 83 | } 84 | setImmediate(cb) 85 | } 86 | ], 10000) 87 | 88 | run(run) 89 | -------------------------------------------------------------------------------- /benchmarks/formatters.bench.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const formatters = { 4 | level (label, number) { 5 | return { 6 | log: { 7 | level: label 8 | } 9 | } 10 | }, 11 | bindings (bindings) { 12 | return { 13 | process: { 14 | pid: bindings.pid 15 | }, 16 | host: { 17 | name: bindings.hostname 18 | } 19 | } 20 | }, 21 | log (obj) { 22 | return { foo: 'bar', ...obj } 23 | } 24 | } 25 | 26 | const bench = require('fastbench') 27 | const pino = require('../') 28 | delete require.cache[require.resolve('../')] 29 | const pinoNoFormatters = require('../')(pino.destination('/dev/null')) 30 | delete require.cache[require.resolve('../')] 31 | const pinoFormatters = require('../')({ formatters }, pino.destination('/dev/null')) 32 | 33 | const max = 10 34 | 35 | const run = bench([ 36 | function benchPinoNoFormatters (cb) { 37 | for (var i = 0; i < max; i++) { 38 | pinoNoFormatters.info({ hello: 'world' }) 39 | } 40 | setImmediate(cb) 41 | }, 42 | function benchPinoFormatters (cb) { 43 | for (var i = 0; i < max; i++) { 44 | pinoFormatters.info({ hello: 'world' }) 45 | } 46 | setImmediate(cb) 47 | } 48 | ], 10000) 49 | 50 | run(run) 51 | -------------------------------------------------------------------------------- /benchmarks/internal/custom-levels.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const bench = require('fastbench') 4 | const pino = require('../../') 5 | 6 | const base = pino(pino.destination('/dev/null')) 7 | const baseCl = pino({ 8 | customLevels: { foo: 31 } 9 | }, pino.destination('/dev/null')) 10 | const child = base.child({}) 11 | const childCl = base.child({ 12 | customLevels: { foo: 31 } 13 | }) 14 | const childOfBaseCl = baseCl.child({}) 15 | 16 | const max = 100 17 | 18 | const run = bench([ 19 | function benchPinoNoCustomLevel (cb) { 20 | for (var i = 0; i < max; i++) { 21 | base.info({ hello: 'world' }) 22 | } 23 | setImmediate(cb) 24 | }, 25 | function benchPinoCustomLevel (cb) { 26 | for (var i = 0; i < max; i++) { 27 | baseCl.foo({ hello: 'world' }) 28 | } 29 | setImmediate(cb) 30 | }, 31 | function benchChildNoCustomLevel (cb) { 32 | for (var i = 0; i < max; i++) { 33 | child.info({ hello: 'world' }) 34 | } 35 | setImmediate(cb) 36 | }, 37 | function benchPinoChildCustomLevel (cb) { 38 | for (var i = 0; i < max; i++) { 39 | childCl.foo({ hello: 'world' }) 40 | } 41 | setImmediate(cb) 42 | }, 43 | function benchPinoChildInheritedCustomLevel (cb) { 44 | for (var i = 0; i < max; i++) { 45 | childOfBaseCl.foo({ hello: 'world' }) 46 | } 47 | setImmediate(cb) 48 | }, 49 | function benchPinoChildCreation (cb) { 50 | const child = base.child({}) 51 | for (var i = 0; i < max; i++) { 52 | child.info({ hello: 'world' }) 53 | } 54 | setImmediate(cb) 55 | }, 56 | function benchPinoChildCreationCustomLevel (cb) { 57 | const child = base.child({ 58 | customLevels: { foo: 31 } 59 | }) 60 | for (var i = 0; i < max; i++) { 61 | child.foo({ hello: 'world' }) 62 | } 63 | setImmediate(cb) 64 | } 65 | ], 10000) 66 | 67 | run(run) 68 | -------------------------------------------------------------------------------- /benchmarks/internal/just-pino-heavy.bench.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const bench = require('fastbench') 4 | const pino = require('../../') 5 | const fs = require('node:fs') 6 | const dest = fs.createWriteStream('/dev/null') 7 | const plog = pino(dest) 8 | delete require.cache[require.resolve('../../')] 9 | const plogDest = require('../../')(pino.destination('/dev/null')) 10 | delete require.cache[require.resolve('../../')] 11 | const plogAsync = require('../../')(pino.destination({ dest: '/dev/null', sync: false })) 12 | const deep = require('../../package.json') 13 | deep.deep = JSON.parse(JSON.stringify(deep)) 14 | deep.deep.deep = JSON.parse(JSON.stringify(deep)) 15 | const longStr = JSON.stringify(deep) 16 | 17 | const max = 10 18 | 19 | const run = bench([ 20 | function benchPinoLongString (cb) { 21 | for (var i = 0; i < max; i++) { 22 | plog.info(longStr) 23 | } 24 | setImmediate(cb) 25 | }, 26 | function benchPinoDestLongString (cb) { 27 | for (var i = 0; i < max; i++) { 28 | plogDest.info(longStr) 29 | } 30 | setImmediate(cb) 31 | }, 32 | function benchPinoAsyncLongString (cb) { 33 | for (var i = 0; i < max; i++) { 34 | plogAsync.info(longStr) 35 | } 36 | setImmediate(cb) 37 | }, 38 | function benchPinoDeepObj (cb) { 39 | for (var i = 0; i < max; i++) { 40 | plog.info(deep) 41 | } 42 | setImmediate(cb) 43 | }, 44 | function benchPinoDestDeepObj (cb) { 45 | for (var i = 0; i < max; i++) { 46 | plogDest.info(deep) 47 | } 48 | setImmediate(cb) 49 | }, 50 | function benchPinoAsyncDeepObj (cb) { 51 | for (var i = 0; i < max; i++) { 52 | plogAsync.info(deep) 53 | } 54 | setImmediate(cb) 55 | }, 56 | function benchPinoInterpolateDeep (cb) { 57 | for (var i = 0; i < max; i++) { 58 | plog.info('hello %j', deep) 59 | } 60 | setImmediate(cb) 61 | }, 62 | function benchPinoDestInterpolateDeep (cb) { 63 | for (var i = 0; i < max; i++) { 64 | plogDest.info('hello %j', deep) 65 | } 66 | setImmediate(cb) 67 | }, 68 | function benchPinoAsyncInterpolateDeep (cb) { 69 | for (var i = 0; i < max; i++) { 70 | plogAsync.info('hello %j', deep) 71 | } 72 | setImmediate(cb) 73 | } 74 | ], 1000) 75 | 76 | run(run) 77 | -------------------------------------------------------------------------------- /benchmarks/internal/parent-vs-child.bench.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const bench = require('fastbench') 4 | const pino = require('../../') 5 | 6 | const base = pino(pino.destination('/dev/null')) 7 | const child = base.child({}) 8 | const childChild = child.child({}) 9 | const childChildChild = childChild.child({}) 10 | const childChildChildChild = childChildChild.child({}) 11 | const child2 = base.child({}) 12 | const baseSerializers = pino(pino.destination('/dev/null')) 13 | const baseSerializersChild = baseSerializers.child({}) 14 | const baseSerializersChildSerializers = baseSerializers.child({}) 15 | 16 | const max = 100 17 | 18 | const run = bench([ 19 | function benchPinoBase (cb) { 20 | for (var i = 0; i < max; i++) { 21 | base.info({ hello: 'world' }) 22 | } 23 | setImmediate(cb) 24 | }, 25 | function benchPinoChild (cb) { 26 | for (var i = 0; i < max; i++) { 27 | child.info({ hello: 'world' }) 28 | } 29 | setImmediate(cb) 30 | }, 31 | function benchPinoChildChild (cb) { 32 | for (var i = 0; i < max; i++) { 33 | childChild.info({ hello: 'world' }) 34 | } 35 | setImmediate(cb) 36 | }, 37 | function benchPinoChildChildChild (cb) { 38 | for (var i = 0; i < max; i++) { 39 | childChildChild.info({ hello: 'world' }) 40 | } 41 | setImmediate(cb) 42 | }, 43 | function benchPinoChildChildChildChild (cb) { 44 | for (var i = 0; i < max; i++) { 45 | childChildChildChild.info({ hello: 'world' }) 46 | } 47 | setImmediate(cb) 48 | }, 49 | function benchPinoChild2 (cb) { 50 | for (var i = 0; i < max; i++) { 51 | child2.info({ hello: 'world' }) 52 | } 53 | setImmediate(cb) 54 | }, 55 | function benchPinoBaseSerializers (cb) { 56 | for (var i = 0; i < max; i++) { 57 | baseSerializers.info({ hello: 'world' }) 58 | } 59 | setImmediate(cb) 60 | }, 61 | function benchPinoBaseSerializersChild (cb) { 62 | for (var i = 0; i < max; i++) { 63 | baseSerializersChild.info({ hello: 'world' }) 64 | } 65 | setImmediate(cb) 66 | }, 67 | function benchPinoBaseSerializersChildSerializers (cb) { 68 | for (var i = 0; i < max; i++) { 69 | baseSerializersChildSerializers.info({ hello: 'world' }) 70 | } 71 | setImmediate(cb) 72 | } 73 | ], 10000) 74 | 75 | run(run) 76 | -------------------------------------------------------------------------------- /benchmarks/internal/redact.bench.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const bench = require('fastbench') 4 | const pino = require('../../') 5 | const fs = require('node:fs') 6 | const dest = fs.createWriteStream('/dev/null') 7 | const plog = pino(dest) 8 | delete require.cache[require.resolve('../../')] 9 | const plogAsync = require('../../')(pino.destination({ dest: '/dev/null', sync: false })) 10 | delete require.cache[require.resolve('../../')] 11 | const plogUnsafe = require('../../')({ safe: false }, dest) 12 | delete require.cache[require.resolve('../../')] 13 | const plogUnsafeAsync = require('../../')( 14 | { safe: false }, 15 | pino.destination({ dest: '/dev/null', sync: false }) 16 | ) 17 | const plogRedact = pino({ redact: ['a.b.c'] }, dest) 18 | delete require.cache[require.resolve('../../')] 19 | const plogAsyncRedact = require('../../')( 20 | { redact: ['a.b.c'] }, 21 | pino.destination({ dest: '/dev/null', sync: false }) 22 | ) 23 | delete require.cache[require.resolve('../../')] 24 | const plogUnsafeRedact = require('../../')({ redact: ['a.b.c'], safe: false }, dest) 25 | delete require.cache[require.resolve('../../')] 26 | const plogUnsafeAsyncRedact = require('../../')( 27 | { redact: ['a.b.c'], safe: false }, 28 | pino.destination({ dest: '/dev/null', sync: false }) 29 | ) 30 | 31 | const max = 10 32 | 33 | // note that "redact me." is the same amount of bytes as the censor: "[Redacted]" 34 | 35 | const run = bench([ 36 | function benchPinoNoRedact (cb) { 37 | for (var i = 0; i < max; i++) { 38 | plog.info({ a: { b: { c: 'redact me.', d: 'leave me' } } }) 39 | } 40 | setImmediate(cb) 41 | }, 42 | function benchPinoRedact (cb) { 43 | for (var i = 0; i < max; i++) { 44 | plogRedact.info({ a: { b: { c: 'redact me.', d: 'leave me' } } }) 45 | } 46 | setImmediate(cb) 47 | }, 48 | function benchPinoUnsafeNoRedact (cb) { 49 | for (var i = 0; i < max; i++) { 50 | plogUnsafe.info({ a: { b: { c: 'redact me.', d: 'leave me' } } }) 51 | } 52 | setImmediate(cb) 53 | }, 54 | function benchPinoUnsafeRedact (cb) { 55 | for (var i = 0; i < max; i++) { 56 | plogUnsafeRedact.info({ a: { b: { c: 'redact me.', d: 'leave me' } } }) 57 | } 58 | setImmediate(cb) 59 | }, 60 | function benchPinoAsyncNoRedact (cb) { 61 | for (var i = 0; i < max; i++) { 62 | plogAsync.info({ a: { b: { c: 'redact me.', d: 'leave me' } } }) 63 | } 64 | setImmediate(cb) 65 | }, 66 | function benchPinoAsyncRedact (cb) { 67 | for (var i = 0; i < max; i++) { 68 | plogAsyncRedact.info({ a: { b: { c: 'redact me.', d: 'leave me' } } }) 69 | } 70 | setImmediate(cb) 71 | }, 72 | function benchPinoUnsafeAsyncNoRedact (cb) { 73 | for (var i = 0; i < max; i++) { 74 | plogUnsafeAsync.info({ a: { b: { c: 'redact me.', d: 'leave me' } } }) 75 | } 76 | setImmediate(cb) 77 | }, 78 | function benchPinoUnsafeAsyncRedact (cb) { 79 | for (var i = 0; i < max; i++) { 80 | plogUnsafeAsyncRedact.info({ a: { b: { c: 'redact me.', d: 'leave me' } } }) 81 | } 82 | setImmediate(cb) 83 | } 84 | ], 10000) 85 | 86 | run(run) 87 | -------------------------------------------------------------------------------- /benchmarks/long-string.bench.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const bench = require('fastbench') 4 | const pino = require('../') 5 | const bunyan = require('bunyan') 6 | const bole = require('bole')('bench') 7 | const winston = require('winston') 8 | const fs = require('node:fs') 9 | const dest = fs.createWriteStream('/dev/null') 10 | const plogNodeStream = pino(dest) 11 | delete require.cache[require.resolve('../')] 12 | const plogDest = require('../')(pino.destination('/dev/null')) 13 | delete require.cache[require.resolve('../')] 14 | const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 })) 15 | 16 | const crypto = require('crypto') 17 | 18 | const longStr = crypto.randomBytes(2000).toString() 19 | 20 | const max = 10 21 | const blog = bunyan.createLogger({ 22 | name: 'myapp', 23 | streams: [{ 24 | level: 'trace', 25 | stream: dest 26 | }] 27 | }) 28 | 29 | require('bole').output({ 30 | level: 'info', 31 | stream: dest 32 | }).setFastTime(true) 33 | 34 | const chill = winston.createLogger({ 35 | transports: [ 36 | new winston.transports.Stream({ 37 | stream: fs.createWriteStream('/dev/null') 38 | }) 39 | ] 40 | }) 41 | 42 | const run = bench([ 43 | function benchBunyan (cb) { 44 | for (var i = 0; i < max; i++) { 45 | blog.info(longStr) 46 | } 47 | setImmediate(cb) 48 | }, 49 | function benchWinston (cb) { 50 | for (var i = 0; i < max; i++) { 51 | chill.info(longStr) 52 | } 53 | setImmediate(cb) 54 | }, 55 | function benchBole (cb) { 56 | for (var i = 0; i < max; i++) { 57 | bole.info(longStr) 58 | } 59 | setImmediate(cb) 60 | }, 61 | function benchPino (cb) { 62 | for (var i = 0; i < max; i++) { 63 | plogDest.info(longStr) 64 | } 65 | setImmediate(cb) 66 | }, 67 | function benchPinoMinLength (cb) { 68 | for (var i = 0; i < max; i++) { 69 | plogMinLength.info(longStr) 70 | } 71 | setImmediate(cb) 72 | }, 73 | function benchPinoNodeStream (cb) { 74 | for (var i = 0; i < max; i++) { 75 | plogNodeStream.info(longStr) 76 | } 77 | setImmediate(cb) 78 | } 79 | ], 1000) 80 | 81 | run(run) 82 | -------------------------------------------------------------------------------- /benchmarks/multistream.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const bench = require('fastbench') 4 | const bunyan = require('bunyan') 5 | const pino = require('../') 6 | const fs = require('node:fs') 7 | const dest = fs.createWriteStream('/dev/null') 8 | 9 | const tenStreams = [ 10 | { stream: dest }, 11 | { stream: dest }, 12 | { stream: dest }, 13 | { stream: dest }, 14 | { stream: dest }, 15 | { level: 'debug', stream: dest }, 16 | { level: 'debug', stream: dest }, 17 | { level: 'trace', stream: dest }, 18 | { level: 'warn', stream: dest }, 19 | { level: 'fatal', stream: dest } 20 | ] 21 | const pinomsTen = pino({ level: 'debug' }, pino.multistream(tenStreams)) 22 | 23 | const fourStreams = [ 24 | { stream: dest }, 25 | { stream: dest }, 26 | { level: 'debug', stream: dest }, 27 | { level: 'trace', stream: dest } 28 | ] 29 | const pinomsFour = pino({ level: 'debug' }, pino.multistream(fourStreams)) 30 | 31 | const pinomsOne = pino({ level: 'info' }, pino.multistream(dest)) 32 | const blogOne = bunyan.createLogger({ 33 | name: 'myapp', 34 | streams: [{ stream: dest }] 35 | }) 36 | 37 | const blogTen = bunyan.createLogger({ 38 | name: 'myapp', 39 | streams: tenStreams 40 | }) 41 | const blogFour = bunyan.createLogger({ 42 | name: 'myapp', 43 | streams: fourStreams 44 | }) 45 | 46 | const max = 10 47 | const run = bench([ 48 | function benchBunyanTen (cb) { 49 | for (let i = 0; i < max; i++) { 50 | blogTen.info('hello world') 51 | blogTen.debug('hello world') 52 | blogTen.trace('hello world') 53 | blogTen.warn('hello world') 54 | blogTen.fatal('hello world') 55 | } 56 | setImmediate(cb) 57 | }, 58 | function benchPinoMSTen (cb) { 59 | for (let i = 0; i < max; i++) { 60 | pinomsTen.info('hello world') 61 | pinomsTen.debug('hello world') 62 | pinomsTen.trace('hello world') 63 | pinomsTen.warn('hello world') 64 | pinomsTen.fatal('hello world') 65 | } 66 | setImmediate(cb) 67 | }, 68 | function benchBunyanFour (cb) { 69 | for (let i = 0; i < max; i++) { 70 | blogFour.info('hello world') 71 | blogFour.debug('hello world') 72 | blogFour.trace('hello world') 73 | } 74 | setImmediate(cb) 75 | }, 76 | function benchPinoMSFour (cb) { 77 | for (let i = 0; i < max; i++) { 78 | pinomsFour.info('hello world') 79 | pinomsFour.debug('hello world') 80 | pinomsFour.trace('hello world') 81 | } 82 | setImmediate(cb) 83 | }, 84 | function benchBunyanOne (cb) { 85 | for (let i = 0; i < max; i++) { 86 | blogOne.info('hello world') 87 | } 88 | setImmediate(cb) 89 | }, 90 | function benchPinoMSOne (cb) { 91 | for (let i = 0; i < max; i++) { 92 | pinomsOne.info('hello world') 93 | } 94 | setImmediate(cb) 95 | } 96 | ], 10000) 97 | 98 | run() 99 | -------------------------------------------------------------------------------- /benchmarks/object.bench.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const bench = require('fastbench') 4 | const pino = require('../') 5 | const bunyan = require('bunyan') 6 | const bole = require('bole')('bench') 7 | const winston = require('winston') 8 | const fs = require('node:fs') 9 | const dest = fs.createWriteStream('/dev/null') 10 | const loglevel = require('./utils/wrap-log-level')(dest) 11 | const plogNodeStream = pino(dest) 12 | delete require.cache[require.resolve('../')] 13 | const plogDest = require('../')(pino.destination('/dev/null')) 14 | delete require.cache[require.resolve('../')] 15 | const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 })) 16 | const blog = bunyan.createLogger({ 17 | name: 'myapp', 18 | streams: [{ 19 | level: 'trace', 20 | stream: dest 21 | }] 22 | }) 23 | require('bole').output({ 24 | level: 'info', 25 | stream: dest 26 | }).setFastTime(true) 27 | const chill = winston.createLogger({ 28 | transports: [ 29 | new winston.transports.Stream({ 30 | stream: fs.createWriteStream('/dev/null') 31 | }) 32 | ] 33 | }) 34 | 35 | const max = 10 36 | 37 | const run = bench([ 38 | function benchBunyanObj (cb) { 39 | for (var i = 0; i < max; i++) { 40 | blog.info({ hello: 'world' }) 41 | } 42 | setImmediate(cb) 43 | }, 44 | function benchWinstonObj (cb) { 45 | for (var i = 0; i < max; i++) { 46 | chill.info({ hello: 'world' }) 47 | } 48 | setImmediate(cb) 49 | }, 50 | function benchBoleObj (cb) { 51 | for (var i = 0; i < max; i++) { 52 | bole.info({ hello: 'world' }) 53 | } 54 | setImmediate(cb) 55 | }, 56 | function benchLogLevelObject (cb) { 57 | for (var i = 0; i < max; i++) { 58 | loglevel.info({ hello: 'world' }) 59 | } 60 | setImmediate(cb) 61 | }, 62 | function benchPinoObj (cb) { 63 | for (var i = 0; i < max; i++) { 64 | plogDest.info({ hello: 'world' }) 65 | } 66 | setImmediate(cb) 67 | }, 68 | function benchPinoMinLengthObj (cb) { 69 | for (var i = 0; i < max; i++) { 70 | plogMinLength.info({ hello: 'world' }) 71 | } 72 | setImmediate(cb) 73 | }, 74 | function benchPinoNodeStreamObj (cb) { 75 | for (var i = 0; i < max; i++) { 76 | plogNodeStream.info({ hello: 'world' }) 77 | } 78 | setImmediate(cb) 79 | } 80 | ], 10000) 81 | 82 | run(run) 83 | -------------------------------------------------------------------------------- /benchmarks/utils/generate-benchmark-doc.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | const { join } = require('node:path') 3 | const { execSync } = require('node:child_process') 4 | 5 | const run = (type) => { 6 | process.stderr.write(`benchmarking ${type}\n`) 7 | return execSync(`node ${join(__dirname, 'runbench')} ${type} -q`) 8 | } 9 | 10 | console.log(` 11 | # Benchmarks 12 | 13 | \`pino.info('hello world')\`: 14 | 15 | \`\`\` 16 | ${run('basic')} 17 | \`\`\` 18 | 19 | \`pino.info({'hello': 'world'})\`: 20 | 21 | \`\`\` 22 | ${run('object')} 23 | \`\`\` 24 | 25 | \`pino.info(aBigDeeplyNestedObject)\`: 26 | 27 | \`\`\` 28 | ${run('deep-object')} 29 | \`\`\` 30 | 31 | \`pino.info('hello %s %j %d', 'world', {obj: true}, 4, {another: 'obj'})\`: 32 | 33 | For a fair comparison, [LogLevel](http://npm.im/loglevel) was extended 34 | to include a timestamp and [bole](http://npm.im/bole) had 35 | \`fastTime\` mode switched on. 36 | `) 37 | -------------------------------------------------------------------------------- /benchmarks/utils/runbench.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { type, platform, arch, release, cpus } = require('node:os') 4 | const { resolve, join } = require('node:path') 5 | const spawn = require('node:child_process').spawn 6 | const pump = require('pump') 7 | const split = require('split2') 8 | const through = require('through2') 9 | const steed = require('steed') 10 | 11 | function usage () { 12 | console.log(` 13 | Pino Benchmarks 14 | 15 | To run a benchmark, specify which to run: 16 | 17 | ・all ⁃ run all benchmarks (takes a while) 18 | ・basic ⁃ log a simple string 19 | ・object ⁃ logging a basic object 20 | ・deep-object ⁃ logging a large object 21 | ・multi-arg ⁃ multiple log method arguments 22 | ・child ⁃ child from a parent 23 | ・child-child ⁃ child from a child 24 | ・child-creation ⁃ child constructor 25 | ・formatters ⁃ difference between with or without formatters 26 | 27 | Example: 28 | 29 | node runbench basic 30 | `) 31 | } 32 | 33 | if (!process.argv[2]) { 34 | usage() 35 | process.exit() 36 | } 37 | 38 | const quiet = process.argv[3] === '-q' 39 | 40 | const selectedBenchmark = process.argv[2].toLowerCase() 41 | const benchmarkDir = resolve(__dirname, '..') 42 | const benchmarks = { 43 | basic: 'basic.bench.js', 44 | object: 'object.bench.js', 45 | 'deep-object': 'deep-object.bench.js', 46 | 'multi-arg': 'multi-arg.bench.js', 47 | 'long-string': 'long-string.bench.js', 48 | child: 'child.bench.js', 49 | 'child-child': 'child-child.bench.js', 50 | 'child-creation': 'child-creation.bench.js', 51 | formatters: 'formatters.bench.js' 52 | } 53 | 54 | function runBenchmark (name, done) { 55 | const benchmarkResults = {} 56 | benchmarkResults[name] = {} 57 | 58 | const processor = through(function (line, enc, cb) { 59 | const [label, time] = ('' + line).split(': ') 60 | const [target, iterations] = label.split('*') 61 | const logger = target.replace('bench', '') 62 | 63 | if (!benchmarkResults[name][logger]) benchmarkResults[name][logger] = [] 64 | 65 | benchmarkResults[name][logger].push({ 66 | time: time.replace('ms', ''), 67 | iterations: iterations.replace(':', '') 68 | }) 69 | 70 | cb() 71 | }) 72 | 73 | if (quiet === false) console.log(`Running ${name.toUpperCase()} benchmark\n`) 74 | 75 | const benchmark = spawn( 76 | process.argv[0], 77 | [join(benchmarkDir, benchmarks[name])] 78 | ) 79 | 80 | if (quiet === false) { 81 | benchmark.stdout.pipe(process.stdout) 82 | } 83 | 84 | pump(benchmark.stdout, split(), processor) 85 | 86 | benchmark.on('exit', () => { 87 | console.log() 88 | if (done && typeof done === 'function') done(null, benchmarkResults) 89 | }) 90 | } 91 | 92 | function sum (arr) { 93 | let result = 0 94 | for (var i = 0; i < arr.length; i += 1) { 95 | result += Number.parseFloat(arr[i].time) 96 | } 97 | return result 98 | } 99 | 100 | function displayResults (results) { 101 | if (quiet === false) console.log('==========') 102 | const benchNames = Object.keys(results) 103 | for (var i = 0; i < benchNames.length; i += 1) { 104 | console.log(`${benchNames[i].toUpperCase()} benchmark averages`) 105 | const benchmark = results[benchNames[i]] 106 | const loggers = Object.keys(benchmark) 107 | for (var j = 0; j < loggers.length; j += 1) { 108 | const logger = benchmark[loggers[j]] 109 | const average = sum(logger) / logger.length 110 | console.log(`${loggers[j]} average: ${average.toFixed(3)}ms`) 111 | } 112 | } 113 | if (quiet === false) { 114 | console.log('==========') 115 | console.log( 116 | `System: ${type()}/${platform()} ${arch()} ${release()}`, 117 | `~ ${cpus()[0].model} (cores/threads: ${cpus().length})` 118 | ) 119 | } 120 | } 121 | 122 | function toBench (done) { 123 | runBenchmark(this.name, done) 124 | } 125 | 126 | const benchQueue = [] 127 | if (selectedBenchmark !== 'all') { 128 | benchQueue.push(toBench.bind({ name: selectedBenchmark })) 129 | } else { 130 | const keys = Object.keys(benchmarks) 131 | for (var i = 0; i < keys.length; i += 1) { 132 | benchQueue.push(toBench.bind({ name: keys[i] })) 133 | } 134 | } 135 | steed.series(benchQueue, function (err, results) { 136 | if (err) return console.error(err.message) 137 | results.forEach(displayResults) 138 | }) 139 | -------------------------------------------------------------------------------- /benchmarks/utils/wrap-log-level.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { readFileSync } = require('node:fs') 4 | const vm = require('vm') 5 | const { join } = require('node:path') 6 | const code = readFileSync( 7 | join(__dirname, '..', '..', 'node_modules', 'loglevel', 'lib', 'loglevel.js') 8 | ) 9 | const { Console } = require('console') 10 | 11 | function build (dest) { 12 | const sandbox = { 13 | module: {}, 14 | console: new Console(dest, dest) 15 | } 16 | const context = vm.createContext(sandbox) 17 | 18 | const script = new vm.Script(code) 19 | script.runInContext(context) 20 | 21 | const loglevel = sandbox.log 22 | 23 | const originalFactory = loglevel.methodFactory 24 | loglevel.methodFactory = function (methodName, logLevel, loggerName) { 25 | const rawMethod = originalFactory(methodName, logLevel, loggerName) 26 | 27 | return function () { 28 | const time = new Date() 29 | let array 30 | if (typeof arguments[0] === 'string') { 31 | arguments[0] = '[' + time.toISOString() + '] ' + arguments[0] 32 | rawMethod.apply(null, arguments) 33 | } else { 34 | array = new Array(arguments.length + 1) 35 | array[0] = '[' + time.toISOString() + ']' 36 | for (var i = 0; i < arguments.length; i++) { 37 | array[i + 1] = arguments[i] 38 | } 39 | rawMethod.apply(null, array) 40 | } 41 | } 42 | } 43 | 44 | loglevel.setLevel(loglevel.levels.INFO) 45 | return loglevel 46 | } 47 | 48 | module.exports = build 49 | 50 | if (require.main === module) { 51 | const loglevel = build(process.stdout) 52 | loglevel.info('hello') 53 | loglevel.info({ hello: 'world' }) 54 | loglevel.info('hello %j', { hello: 'world' }) 55 | } 56 | -------------------------------------------------------------------------------- /bin.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | console.error( 3 | '`pino` cli has been removed. Use `pino-pretty` cli instead.\n' + 4 | '\nSee: https://github.com/pinojs/pino-pretty' 5 | ) 6 | process.exit(1) 7 | -------------------------------------------------------------------------------- /build/sync-version.js: -------------------------------------------------------------------------------- 1 | const fs = require('node:fs') 2 | const path = require('node:path') 3 | const { version } = require('../package.json') 4 | 5 | const metaContent = `'use strict' 6 | 7 | module.exports = { version: '${version}' } 8 | ` 9 | 10 | fs.writeFileSync(path.resolve('./lib/meta.js'), metaContent, { encoding: 'utf-8' }) 11 | -------------------------------------------------------------------------------- /docs/asynchronous.md: -------------------------------------------------------------------------------- 1 | # Asynchronous Logging 2 | 3 | Asynchronous logging enables the minimum overhead of Pino. 4 | Asynchronous logging works by buffering log messages and writing them in larger chunks. 5 | 6 | ```js 7 | const pino = require('pino') 8 | const logger = pino(pino.destination({ 9 | dest: './my-file', // omit for stdout 10 | minLength: 4096, // Buffer before writing 11 | sync: false // Asynchronous logging 12 | })) 13 | ``` 14 | 15 | It's always possible to turn on synchronous logging by passing `sync: true`. 16 | In this mode of operation, log messages are directly written to the 17 | output stream as the messages are generated with a _blocking_ operation. 18 | 19 | * See [`pino.destination`](/docs/api.md#pino-destination) 20 | * `pino.destination` is implemented on [`sonic-boom` ⇗](https://github.com/mcollina/sonic-boom). 21 | 22 | ### AWS Lambda 23 | 24 | Asynchronous logging is disabled by default on AWS Lambda or any other environment 25 | that modifies `process.stdout`. If forcefully turned on, we recommend calling `dest.flushSync()` at the end 26 | of each function execution to avoid losing data. 27 | 28 | ## Caveats 29 | 30 | Asynchronous logging has a couple of important caveats: 31 | 32 | * As opposed to the synchronous mode, there is not a one-to-one relationship between 33 | calls to logging methods (e.g. `logger.info`) and writes to a log file 34 | * There is a possibility of the most recently buffered log messages being lost 35 | in case of a system failure, e.g. a power cut. 36 | 37 | See also: 38 | 39 | * [`pino.destination` API](/docs/api.md#pino-destination) 40 | * [`destination` parameter](/docs/api.md#destination) 41 | -------------------------------------------------------------------------------- /docs/benchmarks.md: -------------------------------------------------------------------------------- 1 | 2 | # Benchmarks 3 | 4 | `pino.info('hello world')`: 5 | 6 | ``` 7 | 8 | BASIC benchmark averages 9 | Bunyan average: 377.434ms 10 | Winston average: 270.249ms 11 | Bole average: 172.690ms 12 | Debug average: 220.527ms 13 | LogLevel average: 222.802ms 14 | Pino average: 114.801ms 15 | PinoMinLength average: 70.968ms 16 | PinoNodeStream average: 159.192ms 17 | 18 | ``` 19 | 20 | `pino.info({'hello': 'world'})`: 21 | 22 | ``` 23 | 24 | OBJECT benchmark averages 25 | BunyanObj average: 410.379ms 26 | WinstonObj average: 273.120ms 27 | BoleObj average: 185.069ms 28 | LogLevelObject average: 433.425ms 29 | PinoObj average: 119.315ms 30 | PinoMinLengthObj average: 76.968ms 31 | PinoNodeStreamObj average: 164.268ms 32 | 33 | ``` 34 | 35 | `pino.info(aBigDeeplyNestedObject)`: 36 | 37 | ``` 38 | 39 | DEEP-OBJECT benchmark averages 40 | BunyanDeepObj average: 1.839ms 41 | WinstonDeepObj average: 5.604ms 42 | BoleDeepObj average: 3.422ms 43 | LogLevelDeepObj average: 11.716ms 44 | PinoDeepObj average: 2.256ms 45 | PinoMinLengthDeepObj average: 2.240ms 46 | PinoNodeStreamDeepObj average: 2.595ms 47 | 48 | ``` 49 | 50 | `pino.info('hello %s %j %d', 'world', {obj: true}, 4, {another: 'obj'})`: 51 | 52 | For a fair comparison, [LogLevel](http://npm.im/loglevel) was extended 53 | to include a timestamp and [bole](http://npm.im/bole) had 54 | `fastTime` mode switched on. 55 | 56 | -------------------------------------------------------------------------------- /docs/bundling.md: -------------------------------------------------------------------------------- 1 | # Bundling 2 | 3 | Due to its internal architecture based on Worker Threads, it is not possible to bundle Pino *without* generating additional files. 4 | 5 | In particular, a bundler must ensure that the following files are also bundled separately: 6 | 7 | * `lib/worker.js` from the `thread-stream` dependency 8 | * `file.js` 9 | * `lib/worker.js` 10 | * Any transport used by the user (like `pino-pretty`) 11 | 12 | Once the files above have been generated, the bundler must also add information about the files above by injecting a code that sets `__bundlerPathsOverrides` in the `globalThis` object. 13 | 14 | The variable is an object whose keys are an identifier for the files and the values are the paths of files relative to the currently bundle files. 15 | 16 | Example: 17 | 18 | ```javascript 19 | // Inject this using your bundle plugin 20 | globalThis.__bundlerPathsOverrides = { 21 | 'thread-stream-worker': pinoWebpackAbsolutePath('./thread-stream-worker.js') 22 | 'pino/file': pinoWebpackAbsolutePath('./pino-file.js'), 23 | 'pino-worker': pinoWebpackAbsolutePath('./pino-worker.js'), 24 | 'pino-pretty': pinoWebpackAbsolutePath('./pino-pretty.js'), 25 | }; 26 | ``` 27 | 28 | Note that `pino/file`, `pino-worker` and `thread-stream-worker` are required identifiers. Other identifiers are possible based on the user configuration. 29 | 30 | ## Webpack Plugin 31 | 32 | If you are a Webpack user, you can achieve this with [pino-webpack-plugin](https://github.com/pinojs/pino-webpack-plugin) without manual configuration of `__bundlerPathsOverrides`; however, you still need to configure it manually if you are using other bundlers. 33 | 34 | ## Esbuild Plugin 35 | 36 | [esbuild-plugin-pino](https://github.com/davipon/esbuild-plugin-pino) is the esbuild plugin to generate extra pino files for bundling. 37 | 38 | ## Bun Plugin 39 | 40 | [bun-plugin-pino](https://github.com/vktrl/bun-plugin-pino) is the Bun plugin to generate extra pino files for bundling. -------------------------------------------------------------------------------- /docs/child-loggers.md: -------------------------------------------------------------------------------- 1 | # Child loggers 2 | 3 | Let's assume we want to have `"module":"foo"` added to every log within a 4 | module `foo.js`. 5 | 6 | To accomplish this, simply use a child logger: 7 | 8 | ```js 9 | 'use strict' 10 | // imports a pino logger instance of `require('pino')()` 11 | const parentLogger = require('./lib/logger') 12 | const log = parentLogger.child({module: 'foo'}) 13 | 14 | function doSomething () { 15 | log.info('doSomething invoked') 16 | } 17 | 18 | module.exports = { 19 | doSomething 20 | } 21 | ``` 22 | 23 | ## Cost of child logging 24 | 25 | Child logger creation is fast: 26 | 27 | ``` 28 | benchBunyanCreation*10000: 564.514ms 29 | benchBoleCreation*10000: 283.276ms 30 | benchPinoCreation*10000: 258.745ms 31 | benchPinoExtremeCreation*10000: 150.506ms 32 | ``` 33 | 34 | Logging through a child logger has little performance penalty: 35 | 36 | ``` 37 | benchBunyanChild*10000: 556.275ms 38 | benchBoleChild*10000: 288.124ms 39 | benchPinoChild*10000: 231.695ms 40 | benchPinoExtremeChild*10000: 122.117ms 41 | ``` 42 | 43 | Logging via the child logger of a child logger also has negligible overhead: 44 | 45 | ``` 46 | benchBunyanChildChild*10000: 559.082ms 47 | benchPinoChildChild*10000: 229.264ms 48 | benchPinoExtremeChildChild*10000: 127.753ms 49 | ``` 50 | 51 | ## Duplicate keys caveat 52 | 53 | Naming conflicts can arise between child loggers and 54 | children of child loggers. 55 | 56 | This isn't as bad as it sounds, even if the same keys between 57 | parent and child loggers are used, Pino resolves the conflict in the sanest way. 58 | 59 | For example, consider the following: 60 | 61 | ```js 62 | const pino = require('pino') 63 | pino(pino.destination('./my-log')) 64 | .child({a: 'property'}) 65 | .child({a: 'prop'}) 66 | .info('howdy') 67 | ``` 68 | 69 | ```sh 70 | $ cat my-log 71 | {"pid":95469,"hostname":"MacBook-Pro-3.home","level":30,"msg":"howdy","time":1459534114473,"a":"property","a":"prop"} 72 | ``` 73 | 74 | Notice how there are two keys named `a` in the JSON output. The sub-child's properties 75 | appear after the parent child properties. 76 | 77 | At some point, the logs will most likely be processed (for instance with a [transport](transports.md)), 78 | and this generally involves parsing. `JSON.parse` will return an object where the conflicting 79 | namespace holds the final value assigned to it: 80 | 81 | ```sh 82 | $ cat my-log | node -e "process.stdin.once('data', (line) => console.log(JSON.stringify(JSON.parse(line))))" 83 | {"pid":95469,"hostname":"MacBook-Pro-3.home","level":30,"msg":"howdy","time":"2016-04-01T18:08:34.473Z","a":"prop"} 84 | ``` 85 | 86 | Ultimately the conflict is resolved by taking the last value, which aligns with Bunyan's child logging 87 | behavior. 88 | 89 | There may be cases where this edge case becomes problematic if a JSON parser with alternative behavior 90 | is used to process the logs. It's recommended to be conscious of namespace conflicts with child loggers, 91 | in light of an expected log processing approach. 92 | 93 | One of Pino's performance tricks is to avoid building objects and stringifying 94 | them, so we're building strings instead. This is why duplicate keys between 95 | parents and children will end up in the log output. 96 | -------------------------------------------------------------------------------- /docs/lts.md: -------------------------------------------------------------------------------- 1 | ## Long Term Support 2 | 3 | Pino's Long Term Support (LTS) is provided according to the schedule laid 4 | out in this document: 5 | 6 | 1. Major releases, "X" release of [semantic versioning][semver] X.Y.Z release 7 | versions, are supported for a minimum period of six months from their release 8 | date. The release date of any specific version can be found at 9 | [https://github.com/pinojs/pino/releases](https://github.com/pinojs/pino/releases). 10 | 11 | 1. Major releases will receive security updates for an additional six months 12 | from the release of the next major release. After this period 13 | we will still review and release security fixes as long as they are 14 | provided by the community and they do not violate other constraints, 15 | e.g. minimum supported Node.js version. 16 | 17 | 1. Major releases will be tested and verified against all Node.js 18 | release lines that are supported by the 19 | [Node.js LTS policy](https://github.com/nodejs/Release) within the 20 | LTS period of that given Pino release line. This implies that only 21 | the latest Node.js release of a given line is supported. 22 | 23 | A "month" is defined as 30 consecutive days. 24 | 25 | > ## Security Releases and Semver 26 | > 27 | > As a consequence of providing long-term support for major releases, there 28 | > are occasions where we need to release breaking changes as a _minor_ 29 | > version release. Such changes will _always_ be noted in the 30 | > [release notes](https://github.com/pinojs/pino/releases). 31 | > 32 | > To avoid automatically receiving breaking security updates it is possible to use 33 | > the tilde (`~`) range qualifier. For example, to get patches for the 6.1 34 | > release, and avoid automatically updating to the 6.1 release, specify 35 | > the dependency as `"pino": "~6.1.x"`. This will leave your application vulnerable, 36 | > so please use with caution. 37 | 38 | [semver]: https://semver.org/ 39 | 40 | 41 | 42 | ### Schedule 43 | 44 | | Version | Release Date | End Of LTS Date | Node.js | 45 | | :------ | :----------- | :-------------- | :------------------- | 46 | | 9.x | 2024-04-26 | TBD | 18, 20, 22 | 47 | | 8.x | 2022-06-01 | 2024-10-26 | 14, 16, 18, 20 | 48 | | 7.x | 2021-10-14 | 2023-06-01 | 12, 14, 16 | 49 | | 6.x | 2020-03-07 | 2022-04-14 | 10, 12, 14, 16 | 50 | 51 | 52 | 53 | ### CI tested operating systems 54 | 55 | Pino uses GitHub Actions for CI testing, please refer to 56 | [GitHub's documentation regarding workflow runners](https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners#supported-runners-and-hardware-resources) 57 | for further details on what the latest virtual environment is in relation to 58 | the YAML workflow labels below: 59 | 60 | | OS | YAML Workflow Label | Node.js | 61 | |---------|------------------------|--------------| 62 | | Linux | `ubuntu-latest` | 18, 20, 22 | 63 | | Windows | `windows-latest` | 18, 20, 22 | 64 | | MacOS | `macos-latest` | 18, 20, 22 | 65 | -------------------------------------------------------------------------------- /docs/pretty.md: -------------------------------------------------------------------------------- 1 | # Pretty Printing 2 | 3 | By default, Pino log lines are newline delimited JSON (NDJSON). This is perfect 4 | for production usage and long-term storage. It's not so great for development 5 | environments. Thus, Pino logs can be prettified by using a Pino prettifier 6 | module like [`pino-pretty`][pp]: 7 | 8 | 1. Install a prettifier module as a separate dependency, e.g. `npm install pino-pretty`. 9 | 2. Instantiate the logger with the `transport.target` option set to `'pino-pretty'`: 10 | ```js 11 | const pino = require('pino') 12 | const logger = pino({ 13 | transport: { 14 | target: 'pino-pretty' 15 | }, 16 | }) 17 | 18 | logger.info('hi') 19 | ``` 20 | 3. The transport option can also have an options object containing `pino-pretty` options: 21 | ```js 22 | const pino = require('pino') 23 | const logger = pino({ 24 | transport: { 25 | target: 'pino-pretty', 26 | options: { 27 | colorize: true 28 | } 29 | } 30 | }) 31 | 32 | logger.info('hi') 33 | ``` 34 | 35 | [pp]: https://github.com/pinojs/pino-pretty 36 | -------------------------------------------------------------------------------- /docs/redaction.md: -------------------------------------------------------------------------------- 1 | # Redaction 2 | 3 | > Redaction is not supported in the browser [#670](https://github.com/pinojs/pino/issues/670) 4 | 5 | To redact sensitive information, supply paths to keys that hold sensitive data 6 | using the `redact` option. Note that paths that contain hyphens need to use 7 | brackets to access the hyphenated property: 8 | 9 | ```js 10 | const logger = require('.')({ 11 | redact: ['key', 'path.to.key', 'stuff.thats[*].secret', 'path["with-hyphen"]'] 12 | }) 13 | 14 | logger.info({ 15 | key: 'will be redacted', 16 | path: { 17 | to: {key: 'sensitive', another: 'thing'} 18 | }, 19 | stuff: { 20 | thats: [ 21 | {secret: 'will be redacted', logme: 'will be logged'}, 22 | {secret: 'as will this', logme: 'as will this'} 23 | ] 24 | } 25 | }) 26 | ``` 27 | 28 | This will output: 29 | 30 | ```JSON 31 | {"level":30,"time":1527777350011,"pid":3186,"hostname":"Davids-MacBook-Pro-3.local","key":"[Redacted]","path":{"to":{"key":"[Redacted]","another":"thing"}},"stuff":{"thats":[{"secret":"[Redacted]","logme":"will be logged"},{"secret":"[Redacted]","logme":"as will this"}]}} 32 | ``` 33 | 34 | The `redact` option can take an array (as shown in the above example) or 35 | an object. This allows control over *how* information is redacted. 36 | 37 | For instance, setting the censor: 38 | 39 | ```js 40 | const logger = require('.')({ 41 | redact: { 42 | paths: ['key', 'path.to.key', 'stuff.thats[*].secret'], 43 | censor: '**GDPR COMPLIANT**' 44 | } 45 | }) 46 | 47 | logger.info({ 48 | key: 'will be redacted', 49 | path: { 50 | to: {key: 'sensitive', another: 'thing'} 51 | }, 52 | stuff: { 53 | thats: [ 54 | {secret: 'will be redacted', logme: 'will be logged'}, 55 | {secret: 'as will this', logme: 'as will this'} 56 | ] 57 | } 58 | }) 59 | ``` 60 | 61 | This will output: 62 | 63 | ```JSON 64 | {"level":30,"time":1527778563934,"pid":3847,"hostname":"Davids-MacBook-Pro-3.local","key":"**GDPR COMPLIANT**","path":{"to":{"key":"**GDPR COMPLIANT**","another":"thing"}},"stuff":{"thats":[{"secret":"**GDPR COMPLIANT**","logme":"will be logged"},{"secret":"**GDPR COMPLIANT**","logme":"as will this"}]}} 65 | ``` 66 | 67 | The `redact.remove` option also allows for the key and value to be removed from output: 68 | 69 | ```js 70 | const logger = require('.')({ 71 | redact: { 72 | paths: ['key', 'path.to.key', 'stuff.thats[*].secret'], 73 | remove: true 74 | } 75 | }) 76 | 77 | logger.info({ 78 | key: 'will be redacted', 79 | path: { 80 | to: {key: 'sensitive', another: 'thing'} 81 | }, 82 | stuff: { 83 | thats: [ 84 | {secret: 'will be redacted', logme: 'will be logged'}, 85 | {secret: 'as will this', logme: 'as will this'} 86 | ] 87 | } 88 | }) 89 | ``` 90 | 91 | This will output 92 | 93 | ```JSON 94 | {"level":30,"time":1527782356751,"pid":5758,"hostname":"Davids-MacBook-Pro-3.local","path":{"to":{"another":"thing"}},"stuff":{"thats":[{"logme":"will be logged"},{"logme":"as will this"}]}} 95 | ``` 96 | 97 | See [pino options in API](/docs/api.md#redact-array-object) for `redact` API details. 98 | 99 | 100 | ## Path Syntax 101 | 102 | The syntax for paths supplied to the `redact` option conform to the syntax in path lookups 103 | in standard ECMAScript, with two additions: 104 | 105 | * paths may start with bracket notation 106 | * paths may contain the asterisk `*` to denote a wildcard 107 | * paths are **case sensitive** 108 | 109 | By way of example, the following are all valid paths: 110 | 111 | * `a.b.c` 112 | * `a["b-c"].d` 113 | * `["a-b"].c` 114 | * `a.b.*` 115 | * `a[*].b` 116 | 117 | ## Overhead 118 | 119 | Pino's redaction functionality is built on top of [`fast-redact`](https://github.com/davidmarkclements/fast-redact) 120 | which adds about 2% overhead to `JSON.stringify` when using paths without wildcards. 121 | 122 | When used with pino logger with a single redacted path, any overhead is within noise - 123 | a way to deterministically measure its effect has not been found. This is because it is not a bottleneck. 124 | 125 | However, wildcard redaction does carry a non-trivial cost relative to explicitly declaring the keys 126 | (50% in a case where four keys are redacted across two objects). See 127 | the [`fast-redact` benchmarks](https://github.com/davidmarkclements/fast-redact#benchmarks) for details. 128 | 129 | ## Safety 130 | 131 | The `redact` option is intended as an initialization time configuration option. 132 | Path strings must not originate from user input. 133 | The `fast-redact` module uses a VM context to syntax check the paths, user input 134 | should never be combined with such an approach. See the [`fast-redact` Caveat](https://github.com/davidmarkclements/fast-redact#caveat) 135 | and the [`fast-redact` Approach](https://github.com/davidmarkclements/fast-redact#approach) for in-depth information. 136 | -------------------------------------------------------------------------------- /docsify/sidebar.md: -------------------------------------------------------------------------------- 1 | * [Readme](/) 2 | * [API](/docs/api.md) 3 | * [Browser API](/docs/browser.md) 4 | * [Redaction](/docs/redaction.md) 5 | * [Child Loggers](/docs/child-loggers.md) 6 | * [Transports](/docs/transports.md) 7 | * [Web Frameworks](/docs/web.md) 8 | * [Pretty Printing](/docs/pretty.md) 9 | * [Asynchronous Logging](/docs/asynchronous.md) 10 | * [Ecosystem](/docs/ecosystem.md) 11 | * [Benchmarks](/docs/benchmarks.md) 12 | * [Long Term Support](/docs/lts.md) 13 | * [Help](/docs/help.md) 14 | * [Log rotation](/docs/help.md#rotate) 15 | * [Reopening log files](/docs/help.md#reopening) 16 | * [Saving to multiple files](/docs/help.md#multiple) 17 | * [Log filtering](/docs/help.md#filter-logs) 18 | * [Transports and systemd](/docs/help.md#transport-systemd) 19 | * [Duplicate keys](/docs/help.md#dupe-keys) 20 | * [Log levels as labels instead of numbers](/docs/help.md#level-string) 21 | * [Pino with `debug`](/docs/help.md#debug) 22 | * [Unicode and Windows terminal](/docs/help.md#windows) 23 | * [Mapping Pino Log Levels to Google Cloud Logging (Stackdriver) Severity Levels](/docs/help.md#stackdriver) 24 | * [Avoid Message Conflict](/docs/help.md#avoid-message-conflict) 25 | * [Best performance for logging to `stdout`](/docs/help.md#best-performance-for-stdout) 26 | * [Testing](/docs/help.md#testing) 27 | -------------------------------------------------------------------------------- /examples/basic.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | // Pino's primary usage writes ndjson to `stdout`: 4 | const pino = require('..')() 5 | 6 | // However, if "human readable" output is desired, 7 | // `pino-pretty` can be provided as the destination 8 | // stream by uncommenting the following line in place 9 | // of the previous declaration: 10 | // const pino = require('..')(require('pino-pretty')()) 11 | 12 | pino.info('hello world') 13 | pino.error('this is at error level') 14 | pino.info('the answer is %d', 42) 15 | pino.info({ obj: 42 }, 'hello world') 16 | pino.info({ obj: 42, b: 2 }, 'hello world') 17 | pino.info({ nested: { obj: 42 } }, 'nested') 18 | setImmediate(() => { 19 | pino.info('after setImmediate') 20 | }) 21 | pino.error(new Error('an error')) 22 | 23 | const child = pino.child({ a: 'property' }) 24 | child.info('hello child!') 25 | 26 | const childsChild = child.child({ another: 'property' }) 27 | childsChild.info('hello baby..') 28 | 29 | pino.debug('this should be mute') 30 | 31 | pino.level = 'trace' 32 | 33 | pino.debug('this is a debug statement') 34 | 35 | pino.child({ another: 'property' }).debug('this is a debug statement via child') 36 | pino.trace('this is a trace statement') 37 | 38 | pino.debug('this is a "debug" statement with "') 39 | 40 | pino.info(new Error('kaboom')) 41 | pino.info(null) 42 | 43 | pino.info(new Error('kaboom'), 'with', 'a', 'message') 44 | -------------------------------------------------------------------------------- /examples/transport.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const pino = require('..') 4 | const { tmpdir } = require('node:os') 5 | const { join } = require('node:path') 6 | 7 | const file = join(tmpdir(), `pino-${process.pid}-example`) 8 | 9 | const transport = pino.transport({ 10 | targets: [{ 11 | level: 'warn', 12 | target: 'pino/file', 13 | options: { 14 | destination: file 15 | } 16 | /* 17 | }, { 18 | level: 'info', 19 | target: 'pino-elasticsearch', 20 | options: { 21 | node: 'http://localhost:9200' 22 | } 23 | */ 24 | }, { 25 | level: 'info', 26 | target: 'pino-pretty' 27 | }] 28 | }) 29 | 30 | const logger = pino(transport) 31 | 32 | logger.info({ 33 | file 34 | }, 'logging destination') 35 | 36 | logger.info('hello world') 37 | logger.error('this is at error level') 38 | logger.info('the answer is %d', 42) 39 | logger.info({ obj: 42 }, 'hello world') 40 | logger.info({ obj: 42, b: 2 }, 'hello world') 41 | logger.info({ nested: { obj: 42 } }, 'nested') 42 | logger.warn('WARNING!') 43 | setImmediate(() => { 44 | logger.info('after setImmediate') 45 | }) 46 | logger.error(new Error('an error')) 47 | 48 | const child = logger.child({ a: 'property' }) 49 | child.info('hello child!') 50 | 51 | const childsChild = child.child({ another: 'property' }) 52 | childsChild.info('hello baby..') 53 | 54 | logger.debug('this should be mute') 55 | 56 | logger.level = 'trace' 57 | 58 | logger.debug('this is a debug statement') 59 | 60 | logger.child({ another: 'property' }).debug('this is a debug statement via child') 61 | logger.trace('this is a trace statement') 62 | 63 | logger.debug('this is a "debug" statement with "') 64 | 65 | logger.info(new Error('kaboom')) 66 | logger.info(null) 67 | 68 | logger.info(new Error('kaboom'), 'with', 'a', 'message') 69 | -------------------------------------------------------------------------------- /favicon-16x16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinojs/pino/427cbaf30d4717e7df5795c5ede7fdf3fa01eb5c/favicon-16x16.png -------------------------------------------------------------------------------- /favicon-32x32.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinojs/pino/427cbaf30d4717e7df5795c5ede7fdf3fa01eb5c/favicon-32x32.png -------------------------------------------------------------------------------- /favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinojs/pino/427cbaf30d4717e7df5795c5ede7fdf3fa01eb5c/favicon.ico -------------------------------------------------------------------------------- /file.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const pino = require('./pino') 4 | const { once } = require('node:events') 5 | 6 | module.exports = async function (opts = {}) { 7 | const destOpts = Object.assign({}, opts, { dest: opts.destination || 1, sync: false }) 8 | delete destOpts.destination 9 | const destination = pino.destination(destOpts) 10 | await once(destination, 'ready') 11 | return destination 12 | } 13 | -------------------------------------------------------------------------------- /inc-version.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | PATH=./node_modules/.bin:${PATH} 6 | CURRENT_VERSION=$(jq -r .version package.json) 7 | 8 | case ${1} in 9 | Major | MAJOR | major) 10 | LEVEL=major 11 | ;; 12 | 13 | Minor | MINOR | minor) 14 | LEVEL=minor 15 | ;; 16 | 17 | Patch | PATCH | patch) 18 | LEVEL=patch 19 | ;; 20 | 21 | *) 22 | LEVEL=patch 23 | ;; 24 | esac 25 | 26 | NEW_VERSION=$(semver -i ${LEVEL} ${CURRENT_VERSION}) 27 | echo "${CURRENT_VERSION} => ${NEW_VERSION}" 28 | read -n 1 -s -r -p "Press any key to continue (ctrl+c to abort)..." 29 | echo "" 30 | 31 | echo "Patching package.json..." 32 | cat package.json | \ 33 | jq --arg vers "${NEW_VERSION}" '.version = $vers' | \ 34 | tee package.json 1>/dev/null 35 | 36 | echo "Patching lib/meta.js ..." 37 | SED_SCRIPT=$(printf 's/%s/%s/' ${CURRENT_VERSION//\./\\.} ${NEW_VERSION//\./\\.}) 38 | cat ./lib/meta.js | \ 39 | sed -e ${SED_SCRIPT} | \ 40 | tee ./lib/meta.js 1>/dev/null 41 | 42 | echo "Done." 43 | -------------------------------------------------------------------------------- /index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Pino - Super fast, all natural JSON logger for Node.js 6 | 7 | 8 | 9 | 33 | 34 | 35 | 36 | 37 |
38 | 39 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | -------------------------------------------------------------------------------- /lib/caller.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | function noOpPrepareStackTrace (_, stack) { 4 | return stack 5 | } 6 | 7 | module.exports = function getCallers () { 8 | const originalPrepare = Error.prepareStackTrace 9 | Error.prepareStackTrace = noOpPrepareStackTrace 10 | const stack = new Error().stack 11 | Error.prepareStackTrace = originalPrepare 12 | 13 | if (!Array.isArray(stack)) { 14 | return undefined 15 | } 16 | 17 | const entries = stack.slice(2) 18 | 19 | const fileNames = [] 20 | 21 | for (const entry of entries) { 22 | if (!entry) { 23 | continue 24 | } 25 | 26 | fileNames.push(entry.getFileName()) 27 | } 28 | 29 | return fileNames 30 | } 31 | -------------------------------------------------------------------------------- /lib/constants.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Represents default log level values 3 | * 4 | * @enum {number} 5 | */ 6 | const DEFAULT_LEVELS = { 7 | trace: 10, 8 | debug: 20, 9 | info: 30, 10 | warn: 40, 11 | error: 50, 12 | fatal: 60 13 | } 14 | 15 | /** 16 | * Represents sort order direction: `ascending` or `descending` 17 | * 18 | * @enum {string} 19 | */ 20 | const SORTING_ORDER = { 21 | ASC: 'ASC', 22 | DESC: 'DESC' 23 | } 24 | 25 | module.exports = { 26 | DEFAULT_LEVELS, 27 | SORTING_ORDER 28 | } 29 | -------------------------------------------------------------------------------- /lib/deprecations.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const warning = require('process-warning')() 4 | module.exports = warning 5 | 6 | // const warnName = 'PinoWarning' 7 | 8 | // warning.create(warnName, 'PINODEP010', 'A new deprecation') 9 | -------------------------------------------------------------------------------- /lib/meta.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | module.exports = { version: '9.7.0' } 4 | -------------------------------------------------------------------------------- /lib/redaction.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const fastRedact = require('fast-redact') 4 | const { redactFmtSym, wildcardFirstSym } = require('./symbols') 5 | const { rx, validator } = fastRedact 6 | 7 | const validate = validator({ 8 | ERR_PATHS_MUST_BE_STRINGS: () => 'pino – redacted paths must be strings', 9 | ERR_INVALID_PATH: (s) => `pino – redact paths array contains an invalid path (${s})` 10 | }) 11 | 12 | const CENSOR = '[Redacted]' 13 | const strict = false // TODO should this be configurable? 14 | 15 | function redaction (opts, serialize) { 16 | const { paths, censor } = handle(opts) 17 | 18 | const shape = paths.reduce((o, str) => { 19 | rx.lastIndex = 0 20 | const first = rx.exec(str) 21 | const next = rx.exec(str) 22 | 23 | // ns is the top-level path segment, brackets + quoting removed. 24 | let ns = first[1] !== undefined 25 | ? first[1].replace(/^(?:"|'|`)(.*)(?:"|'|`)$/, '$1') 26 | : first[0] 27 | 28 | if (ns === '*') { 29 | ns = wildcardFirstSym 30 | } 31 | 32 | // top level key: 33 | if (next === null) { 34 | o[ns] = null 35 | return o 36 | } 37 | 38 | // path with at least two segments: 39 | // if ns is already redacted at the top level, ignore lower level redactions 40 | if (o[ns] === null) { 41 | return o 42 | } 43 | 44 | const { index } = next 45 | const nextPath = `${str.substr(index, str.length - 1)}` 46 | 47 | o[ns] = o[ns] || [] 48 | 49 | // shape is a mix of paths beginning with literal values and wildcard 50 | // paths [ "a.b.c", "*.b.z" ] should reduce to a shape of 51 | // { "a": [ "b.c", "b.z" ], *: [ "b.z" ] } 52 | // note: "b.z" is in both "a" and * arrays because "a" matches the wildcard. 53 | // (* entry has wildcardFirstSym as key) 54 | if (ns !== wildcardFirstSym && o[ns].length === 0) { 55 | // first time ns's get all '*' redactions so far 56 | o[ns].push(...(o[wildcardFirstSym] || [])) 57 | } 58 | 59 | if (ns === wildcardFirstSym) { 60 | // new * path gets added to all previously registered literal ns's. 61 | Object.keys(o).forEach(function (k) { 62 | if (o[k]) { 63 | o[k].push(nextPath) 64 | } 65 | }) 66 | } 67 | 68 | o[ns].push(nextPath) 69 | return o 70 | }, {}) 71 | 72 | // the redactor assigned to the format symbol key 73 | // provides top level redaction for instances where 74 | // an object is interpolated into the msg string 75 | const result = { 76 | [redactFmtSym]: fastRedact({ paths, censor, serialize, strict }) 77 | } 78 | 79 | const topCensor = (...args) => { 80 | return typeof censor === 'function' ? serialize(censor(...args)) : serialize(censor) 81 | } 82 | 83 | return [...Object.keys(shape), ...Object.getOwnPropertySymbols(shape)].reduce((o, k) => { 84 | // top level key: 85 | if (shape[k] === null) { 86 | o[k] = (value) => topCensor(value, [k]) 87 | } else { 88 | const wrappedCensor = typeof censor === 'function' 89 | ? (value, path) => { 90 | return censor(value, [k, ...path]) 91 | } 92 | : censor 93 | o[k] = fastRedact({ 94 | paths: shape[k], 95 | censor: wrappedCensor, 96 | serialize, 97 | strict 98 | }) 99 | } 100 | return o 101 | }, result) 102 | } 103 | 104 | function handle (opts) { 105 | if (Array.isArray(opts)) { 106 | opts = { paths: opts, censor: CENSOR } 107 | validate(opts) 108 | return opts 109 | } 110 | let { paths, censor = CENSOR, remove } = opts 111 | if (Array.isArray(paths) === false) { throw Error('pino – redact must contain an array of strings') } 112 | if (remove === true) censor = undefined 113 | validate({ paths, censor }) 114 | 115 | return { paths, censor } 116 | } 117 | 118 | module.exports = redaction 119 | -------------------------------------------------------------------------------- /lib/symbols.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const setLevelSym = Symbol('pino.setLevel') 4 | const getLevelSym = Symbol('pino.getLevel') 5 | const levelValSym = Symbol('pino.levelVal') 6 | const levelCompSym = Symbol('pino.levelComp') 7 | const useLevelLabelsSym = Symbol('pino.useLevelLabels') 8 | const useOnlyCustomLevelsSym = Symbol('pino.useOnlyCustomLevels') 9 | const mixinSym = Symbol('pino.mixin') 10 | 11 | const lsCacheSym = Symbol('pino.lsCache') 12 | const chindingsSym = Symbol('pino.chindings') 13 | 14 | const asJsonSym = Symbol('pino.asJson') 15 | const writeSym = Symbol('pino.write') 16 | const redactFmtSym = Symbol('pino.redactFmt') 17 | 18 | const timeSym = Symbol('pino.time') 19 | const timeSliceIndexSym = Symbol('pino.timeSliceIndex') 20 | const streamSym = Symbol('pino.stream') 21 | const stringifySym = Symbol('pino.stringify') 22 | const stringifySafeSym = Symbol('pino.stringifySafe') 23 | const stringifiersSym = Symbol('pino.stringifiers') 24 | const endSym = Symbol('pino.end') 25 | const formatOptsSym = Symbol('pino.formatOpts') 26 | const messageKeySym = Symbol('pino.messageKey') 27 | const errorKeySym = Symbol('pino.errorKey') 28 | const nestedKeySym = Symbol('pino.nestedKey') 29 | const nestedKeyStrSym = Symbol('pino.nestedKeyStr') 30 | const mixinMergeStrategySym = Symbol('pino.mixinMergeStrategy') 31 | const msgPrefixSym = Symbol('pino.msgPrefix') 32 | 33 | const wildcardFirstSym = Symbol('pino.wildcardFirst') 34 | 35 | // public symbols, no need to use the same pino 36 | // version for these 37 | const serializersSym = Symbol.for('pino.serializers') 38 | const formattersSym = Symbol.for('pino.formatters') 39 | const hooksSym = Symbol.for('pino.hooks') 40 | const needsMetadataGsym = Symbol.for('pino.metadata') 41 | 42 | module.exports = { 43 | setLevelSym, 44 | getLevelSym, 45 | levelValSym, 46 | levelCompSym, 47 | useLevelLabelsSym, 48 | mixinSym, 49 | lsCacheSym, 50 | chindingsSym, 51 | asJsonSym, 52 | writeSym, 53 | serializersSym, 54 | redactFmtSym, 55 | timeSym, 56 | timeSliceIndexSym, 57 | streamSym, 58 | stringifySym, 59 | stringifySafeSym, 60 | stringifiersSym, 61 | endSym, 62 | formatOptsSym, 63 | messageKeySym, 64 | errorKeySym, 65 | nestedKeySym, 66 | wildcardFirstSym, 67 | needsMetadataGsym, 68 | useOnlyCustomLevelsSym, 69 | formattersSym, 70 | hooksSym, 71 | nestedKeyStrSym, 72 | mixinMergeStrategySym, 73 | msgPrefixSym 74 | } 75 | -------------------------------------------------------------------------------- /lib/time.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const nullTime = () => '' 4 | 5 | const epochTime = () => `,"time":${Date.now()}` 6 | 7 | const unixTime = () => `,"time":${Math.round(Date.now() / 1000.0)}` 8 | 9 | const isoTime = () => `,"time":"${new Date(Date.now()).toISOString()}"` // using Date.now() for testability 10 | 11 | module.exports = { nullTime, epochTime, unixTime, isoTime } 12 | -------------------------------------------------------------------------------- /lib/transport-stream.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { realImport, realRequire } = require('real-require') 4 | 5 | module.exports = loadTransportStreamBuilder 6 | 7 | /** 8 | * Loads & returns a function to build transport streams 9 | * @param {string} target 10 | * @returns {Promise>} 11 | * @throws {Error} In case the target module does not export a function 12 | */ 13 | async function loadTransportStreamBuilder (target) { 14 | let fn 15 | try { 16 | const toLoad = target.startsWith('file://') ? target : 'file://' + target 17 | 18 | if (toLoad.endsWith('.ts') || toLoad.endsWith('.cts')) { 19 | // TODO: add support for the TSM modules loader ( https://github.com/lukeed/tsm ). 20 | if (process[Symbol.for('ts-node.register.instance')]) { 21 | realRequire('ts-node/register') 22 | } else if (process.env && process.env.TS_NODE_DEV) { 23 | realRequire('ts-node-dev') 24 | } 25 | // TODO: Support ES imports once tsc, tap & ts-node provide better compatibility guarantees. 26 | fn = realRequire(decodeURIComponent(target)) 27 | } else { 28 | fn = (await realImport(toLoad)) 29 | } 30 | } catch (error) { 31 | // See this PR for details: https://github.com/pinojs/thread-stream/pull/34 32 | if ((error.code === 'ENOTDIR' || error.code === 'ERR_MODULE_NOT_FOUND')) { 33 | fn = realRequire(target) 34 | } else if (error.code === undefined || error.code === 'ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING') { 35 | // When bundled with pkg, an undefined error is thrown when called with realImport 36 | // When bundled with pkg and using node v20, an ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING error is thrown when called with realImport 37 | // More info at: https://github.com/pinojs/thread-stream/issues/143 38 | try { 39 | fn = realRequire(decodeURIComponent(target)) 40 | } catch { 41 | throw error 42 | } 43 | } else { 44 | throw error 45 | } 46 | } 47 | 48 | // Depending on how the default export is performed, and on how the code is 49 | // transpiled, we may find cases of two nested "default" objects. 50 | // See https://github.com/pinojs/pino/issues/1243#issuecomment-982774762 51 | if (typeof fn === 'object') fn = fn.default 52 | if (typeof fn === 'object') fn = fn.default 53 | if (typeof fn !== 'function') throw Error('exported worker is not a function') 54 | 55 | return fn 56 | } 57 | -------------------------------------------------------------------------------- /lib/transport.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { createRequire } = require('module') 4 | const getCallers = require('./caller') 5 | const { join, isAbsolute, sep } = require('node:path') 6 | const sleep = require('atomic-sleep') 7 | const onExit = require('on-exit-leak-free') 8 | const ThreadStream = require('thread-stream') 9 | 10 | function setupOnExit (stream) { 11 | // This is leak free, it does not leave event handlers 12 | onExit.register(stream, autoEnd) 13 | onExit.registerBeforeExit(stream, flush) 14 | 15 | stream.on('close', function () { 16 | onExit.unregister(stream) 17 | }) 18 | } 19 | 20 | function buildStream (filename, workerData, workerOpts, sync) { 21 | const stream = new ThreadStream({ 22 | filename, 23 | workerData, 24 | workerOpts, 25 | sync 26 | }) 27 | 28 | stream.on('ready', onReady) 29 | stream.on('close', function () { 30 | process.removeListener('exit', onExit) 31 | }) 32 | 33 | process.on('exit', onExit) 34 | 35 | function onReady () { 36 | process.removeListener('exit', onExit) 37 | stream.unref() 38 | 39 | if (workerOpts.autoEnd !== false) { 40 | setupOnExit(stream) 41 | } 42 | } 43 | 44 | function onExit () { 45 | /* istanbul ignore next */ 46 | if (stream.closed) { 47 | return 48 | } 49 | stream.flushSync() 50 | // Apparently there is a very sporadic race condition 51 | // that in certain OS would prevent the messages to be flushed 52 | // because the thread might not have been created still. 53 | // Unfortunately we need to sleep(100) in this case. 54 | sleep(100) 55 | stream.end() 56 | } 57 | 58 | return stream 59 | } 60 | 61 | function autoEnd (stream) { 62 | stream.ref() 63 | stream.flushSync() 64 | stream.end() 65 | stream.once('close', function () { 66 | stream.unref() 67 | }) 68 | } 69 | 70 | function flush (stream) { 71 | stream.flushSync() 72 | } 73 | 74 | function transport (fullOptions) { 75 | const { pipeline, targets, levels, dedupe, worker = {}, caller = getCallers(), sync = false } = fullOptions 76 | 77 | const options = { 78 | ...fullOptions.options 79 | } 80 | 81 | // Backwards compatibility 82 | const callers = typeof caller === 'string' ? [caller] : caller 83 | 84 | // This will be eventually modified by bundlers 85 | const bundlerOverrides = '__bundlerPathsOverrides' in globalThis ? globalThis.__bundlerPathsOverrides : {} 86 | 87 | let target = fullOptions.target 88 | 89 | if (target && targets) { 90 | throw new Error('only one of target or targets can be specified') 91 | } 92 | 93 | if (targets) { 94 | target = bundlerOverrides['pino-worker'] || join(__dirname, 'worker.js') 95 | options.targets = targets.filter(dest => dest.target).map((dest) => { 96 | return { 97 | ...dest, 98 | target: fixTarget(dest.target) 99 | } 100 | }) 101 | options.pipelines = targets.filter(dest => dest.pipeline).map((dest) => { 102 | return dest.pipeline.map((t) => { 103 | return { 104 | ...t, 105 | level: dest.level, // duplicate the pipeline `level` property defined in the upper level 106 | target: fixTarget(t.target) 107 | } 108 | }) 109 | }) 110 | } else if (pipeline) { 111 | target = bundlerOverrides['pino-worker'] || join(__dirname, 'worker.js') 112 | options.pipelines = [pipeline.map((dest) => { 113 | return { 114 | ...dest, 115 | target: fixTarget(dest.target) 116 | } 117 | })] 118 | } 119 | 120 | if (levels) { 121 | options.levels = levels 122 | } 123 | 124 | if (dedupe) { 125 | options.dedupe = dedupe 126 | } 127 | 128 | options.pinoWillSendConfig = true 129 | 130 | return buildStream(fixTarget(target), options, worker, sync) 131 | 132 | function fixTarget (origin) { 133 | origin = bundlerOverrides[origin] || origin 134 | 135 | if (isAbsolute(origin) || origin.indexOf('file://') === 0) { 136 | return origin 137 | } 138 | 139 | if (origin === 'pino/file') { 140 | return join(__dirname, '..', 'file.js') 141 | } 142 | 143 | let fixTarget 144 | 145 | for (const filePath of callers) { 146 | try { 147 | const context = filePath === 'node:repl' 148 | ? process.cwd() + sep 149 | : filePath 150 | 151 | fixTarget = createRequire(context).resolve(origin) 152 | break 153 | } catch (err) { 154 | // Silent catch 155 | continue 156 | } 157 | } 158 | 159 | if (!fixTarget) { 160 | throw new Error(`unable to determine transport target for "${origin}"`) 161 | } 162 | 163 | return fixTarget 164 | } 165 | } 166 | 167 | module.exports = transport 168 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pino", 3 | "version": "9.7.0", 4 | "description": "super fast, all natural json logger", 5 | "main": "pino.js", 6 | "type": "commonjs", 7 | "types": "pino.d.ts", 8 | "browser": "./browser.js", 9 | "scripts": { 10 | "docs": "docsify serve", 11 | "browser-test": "airtap --local 8080 test/browser*test.js", 12 | "lint": "eslint .", 13 | "prepublishOnly": "tap --no-check-coverage test/internals/version.test.js", 14 | "test": "npm run lint && npm run transpile && tap --ts && jest test/jest && npm run test-types", 15 | "test-ci": "npm run lint && npm run transpile && tap --ts --no-check-coverage --coverage-report=lcovonly && npm run test-types", 16 | "test-ci-pnpm": "pnpm run lint && npm run transpile && tap --ts --no-coverage --no-check-coverage && pnpm run test-types", 17 | "test-ci-yarn-pnp": "yarn run lint && npm run transpile && tap --ts --no-check-coverage --coverage-report=lcovonly", 18 | "test-types": "tsc && tsd && ts-node test/types/pino.ts", 19 | "test:smoke": "smoker smoke:pino && smoker smoke:browser && smoker smoke:file", 20 | "smoke:pino": "node ./pino.js", 21 | "smoke:browser": "node ./browser.js", 22 | "smoke:file": "node ./file.js", 23 | "transpile": "node ./test/fixtures/ts/transpile.cjs", 24 | "cov-ui": "tap --ts --coverage-report=html", 25 | "bench": "node benchmarks/utils/runbench all", 26 | "bench-basic": "node benchmarks/utils/runbench basic", 27 | "bench-object": "node benchmarks/utils/runbench object", 28 | "bench-deep-object": "node benchmarks/utils/runbench deep-object", 29 | "bench-multi-arg": "node benchmarks/utils/runbench multi-arg", 30 | "bench-longs-tring": "node benchmarks/utils/runbench long-string", 31 | "bench-child": "node benchmarks/utils/runbench child", 32 | "bench-child-child": "node benchmarks/utils/runbench child-child", 33 | "bench-child-creation": "node benchmarks/utils/runbench child-creation", 34 | "bench-formatters": "node benchmarks/utils/runbench formatters", 35 | "update-bench-doc": "node benchmarks/utils/generate-benchmark-doc > docs/benchmarks.md" 36 | }, 37 | "bin": { 38 | "pino": "./bin.js" 39 | }, 40 | "precommit": "test", 41 | "repository": { 42 | "type": "git", 43 | "url": "git+https://github.com/pinojs/pino.git" 44 | }, 45 | "keywords": [ 46 | "fast", 47 | "logger", 48 | "stream", 49 | "json" 50 | ], 51 | "author": "Matteo Collina ", 52 | "contributors": [ 53 | "David Mark Clements ", 54 | "James Sumners ", 55 | "Thomas Watson Steen (https://twitter.com/wa7son)" 56 | ], 57 | "license": "MIT", 58 | "bugs": { 59 | "url": "https://github.com/pinojs/pino/issues" 60 | }, 61 | "homepage": "https://getpino.io", 62 | "devDependencies": { 63 | "@types/flush-write-stream": "^1.0.0", 64 | "@types/node": "^22.0.0", 65 | "@types/tap": "^15.0.6", 66 | "@yao-pkg/pkg": "6.3.0", 67 | "airtap": "5.0.0", 68 | "benchmark": "^2.1.4", 69 | "bole": "^5.0.5", 70 | "bunyan": "^1.8.14", 71 | "debug": "^4.3.4", 72 | "docsify-cli": "^4.4.4", 73 | "eslint": "^8.17.0", 74 | "eslint-config-standard": "^17.0.0", 75 | "eslint-plugin-import": "^2.26.0", 76 | "eslint-plugin-n": "15.7.0", 77 | "eslint-plugin-node": "^11.1.0", 78 | "eslint-plugin-promise": "^6.0.0", 79 | "execa": "^5.0.0", 80 | "fastbench": "^1.0.1", 81 | "flush-write-stream": "^2.0.0", 82 | "import-fresh": "^3.2.1", 83 | "jest": "^29.0.3", 84 | "log": "^6.0.0", 85 | "loglevel": "^1.6.7", 86 | "midnight-smoker": "1.1.1", 87 | "pino-pretty": "^13.0.0", 88 | "pre-commit": "^1.2.2", 89 | "proxyquire": "^2.1.3", 90 | "pump": "^3.0.0", 91 | "rimraf": "^6.0.1", 92 | "semver": "^7.3.7", 93 | "split2": "^4.0.0", 94 | "steed": "^1.1.3", 95 | "strip-ansi": "^6.0.0", 96 | "tap": "^16.2.0", 97 | "tape": "^5.5.3", 98 | "through2": "^4.0.0", 99 | "ts-node": "^10.9.1", 100 | "tsd": "^0.32.0", 101 | "typescript": "~5.8.2", 102 | "winston": "^3.7.2" 103 | }, 104 | "dependencies": { 105 | "atomic-sleep": "^1.0.0", 106 | "fast-redact": "^3.1.1", 107 | "on-exit-leak-free": "^2.1.0", 108 | "pino-abstract-transport": "^2.0.0", 109 | "pino-std-serializers": "^7.0.0", 110 | "process-warning": "^5.0.0", 111 | "quick-format-unescaped": "^4.0.3", 112 | "real-require": "^0.2.0", 113 | "safe-stable-stringify": "^2.3.1", 114 | "sonic-boom": "^4.0.1", 115 | "thread-stream": "^3.0.0" 116 | }, 117 | "tsd": { 118 | "directory": "test/types" 119 | } 120 | } 121 | -------------------------------------------------------------------------------- /pino-banner.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinojs/pino/427cbaf30d4717e7df5795c5ede7fdf3fa01eb5c/pino-banner.png -------------------------------------------------------------------------------- /pino-logo-hire.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinojs/pino/427cbaf30d4717e7df5795c5ede7fdf3fa01eb5c/pino-logo-hire.png -------------------------------------------------------------------------------- /pino-tree.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinojs/pino/427cbaf30d4717e7df5795c5ede7fdf3fa01eb5c/pino-tree.png -------------------------------------------------------------------------------- /pretty-demo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinojs/pino/427cbaf30d4717e7df5795c5ede7fdf3fa01eb5c/pretty-demo.png -------------------------------------------------------------------------------- /test/broken-pipe.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const t = require('tap') 4 | const { join } = require('node:path') 5 | const { fork } = require('node:child_process') 6 | const { once } = require('./helper') 7 | const pino = require('..') 8 | 9 | if (process.platform === 'win32') { 10 | t.skip('skipping on windows') 11 | process.exit(0) 12 | } 13 | 14 | if (process.env.CITGM) { 15 | // This looks like a some form of limitations of the CITGM test runner 16 | // or the HW/SW we run it on. This file can hang on Node.js v18.x. 17 | // The failure does not reproduce locally or on our CI. 18 | // Skipping it is the only way to keep pino in CITGM. 19 | // https://github.com/nodejs/citgm/pull/1002#issuecomment-1751942988 20 | t.skip('Skipping on Node.js core CITGM because it hangs on v18.x') 21 | process.exit(0) 22 | } 23 | 24 | function test (file) { 25 | file = join('fixtures', 'broken-pipe', file) 26 | t.test(file, { parallel: true }, async ({ equal }) => { 27 | const child = fork(join(__dirname, file), { silent: true }) 28 | child.stdout.destroy() 29 | 30 | child.stderr.pipe(process.stdout) 31 | 32 | const res = await once(child, 'close') 33 | equal(res, 0) // process exits successfully 34 | }) 35 | } 36 | 37 | t.jobs = 42 38 | 39 | test('basic.js') 40 | test('destination.js') 41 | test('syncfalse.js') 42 | 43 | t.test('let error pass through', ({ equal, plan }) => { 44 | plan(3) 45 | const stream = pino.destination({ sync: true }) 46 | 47 | // side effect of the pino constructor is that it will set an 48 | // event handler for error 49 | pino(stream) 50 | 51 | process.nextTick(() => stream.emit('error', new Error('kaboom'))) 52 | process.nextTick(() => stream.emit('error', new Error('kaboom'))) 53 | 54 | stream.on('error', (err) => { 55 | equal(err.message, 'kaboom') 56 | }) 57 | }) 58 | -------------------------------------------------------------------------------- /test/browser-child.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | const test = require('tape') 3 | const pino = require('../browser') 4 | 5 | test('child has parent level', ({ end, same, is }) => { 6 | const instance = pino({ 7 | level: 'error', 8 | browser: {} 9 | }) 10 | 11 | const child = instance.child({}) 12 | 13 | same(child.level, instance.level) 14 | end() 15 | }) 16 | 17 | test('child can set level at creation time', ({ end, same, is }) => { 18 | const instance = pino({ 19 | level: 'error', 20 | browser: {} 21 | }) 22 | 23 | const child = instance.child({}, { level: 'info' }) // first bindings, then options 24 | 25 | same(child.level, 'info') 26 | end() 27 | }) 28 | 29 | test('changing child level does not affect parent', ({ end, same, is }) => { 30 | const instance = pino({ 31 | level: 'error', 32 | browser: {} 33 | }) 34 | 35 | const child = instance.child({}) 36 | child.level = 'info' 37 | 38 | same(instance.level, 'error') 39 | end() 40 | }) 41 | 42 | test('child should log, if its own level allows it', ({ end, same, is }) => { 43 | const expected = [ 44 | { 45 | level: 30, 46 | msg: 'this is info' 47 | }, 48 | { 49 | level: 40, 50 | msg: 'this is warn' 51 | }, 52 | { 53 | level: 50, 54 | msg: 'this is an error' 55 | } 56 | ] 57 | const instance = pino({ 58 | level: 'error', 59 | browser: { 60 | write (actual) { 61 | checkLogObjects(is, same, actual, expected.shift()) 62 | } 63 | } 64 | }) 65 | 66 | const child = instance.child({}) 67 | child.level = 'info' 68 | 69 | child.debug('this is debug') 70 | child.info('this is info') 71 | child.warn('this is warn') 72 | child.error('this is an error') 73 | 74 | same(expected.length, 0, 'not all messages were read') 75 | end() 76 | }) 77 | 78 | test('changing child log level should not affect parent log behavior', ({ end, same, is }) => { 79 | const expected = [ 80 | { 81 | level: 50, 82 | msg: 'this is an error' 83 | }, 84 | { 85 | level: 60, 86 | msg: 'this is fatal' 87 | } 88 | ] 89 | const instance = pino({ 90 | level: 'error', 91 | browser: { 92 | write (actual) { 93 | checkLogObjects(is, same, actual, expected.shift()) 94 | } 95 | } 96 | }) 97 | 98 | const child = instance.child({}) 99 | child.level = 'info' 100 | 101 | instance.warn('this is warn') 102 | instance.error('this is an error') 103 | instance.fatal('this is fatal') 104 | 105 | same(expected.length, 0, 'not all messages were read') 106 | end() 107 | }) 108 | 109 | test('onChild callback should be called when new child is created', ({ end, pass, plan }) => { 110 | plan(1) 111 | const instance = pino({ 112 | level: 'error', 113 | browser: {}, 114 | onChild: (_child) => { 115 | pass('onChild callback was called') 116 | end() 117 | } 118 | }) 119 | 120 | instance.child({}) 121 | }) 122 | 123 | function checkLogObjects (is, same, actual, expected) { 124 | is(actual.time <= Date.now(), true, 'time is greater than Date.now()') 125 | 126 | const actualCopy = Object.assign({}, actual) 127 | const expectedCopy = Object.assign({}, expected) 128 | delete actualCopy.time 129 | delete expectedCopy.time 130 | 131 | same(actualCopy, expectedCopy) 132 | } 133 | -------------------------------------------------------------------------------- /test/browser-disabled.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | const test = require('tape') 3 | const pino = require('../browser') 4 | 5 | test('set browser opts disabled to true', ({ end, same }) => { 6 | const instance = pino({ 7 | browser: { 8 | disabled: true, 9 | write (actual) { 10 | checkLogObjects(same, actual, []) 11 | } 12 | } 13 | }) 14 | instance.info('hello world') 15 | instance.error('this is an error') 16 | instance.fatal('this is fatal') 17 | 18 | end() 19 | }) 20 | 21 | test('set browser opts disabled to false', ({ end, same }) => { 22 | const expected = [ 23 | { 24 | level: 30, 25 | msg: 'hello world' 26 | }, 27 | { 28 | level: 50, 29 | msg: 'this is an error' 30 | }, 31 | { 32 | level: 60, 33 | msg: 'this is fatal' 34 | } 35 | ] 36 | const instance = pino({ 37 | browser: { 38 | disabled: false, 39 | write (actual) { 40 | checkLogObjects(same, actual, expected.shift()) 41 | } 42 | } 43 | }) 44 | instance.info('hello world') 45 | instance.error('this is an error') 46 | instance.fatal('this is fatal') 47 | 48 | end() 49 | }) 50 | 51 | test('disabled is not set in browser opts', ({ end, same }) => { 52 | const expected = [ 53 | { 54 | level: 30, 55 | msg: 'hello world' 56 | }, 57 | { 58 | level: 50, 59 | msg: 'this is an error' 60 | }, 61 | { 62 | level: 60, 63 | msg: 'this is fatal' 64 | } 65 | ] 66 | const instance = pino({ 67 | browser: { 68 | write (actual) { 69 | checkLogObjects(same, actual, expected.shift()) 70 | } 71 | } 72 | }) 73 | instance.info('hello world') 74 | instance.error('this is an error') 75 | instance.fatal('this is fatal') 76 | 77 | end() 78 | }) 79 | 80 | function checkLogObjects (same, actual, expected, is) { 81 | const actualCopy = Object.assign({}, actual) 82 | const expectedCopy = Object.assign({}, expected) 83 | delete actualCopy.time 84 | delete expectedCopy.time 85 | 86 | same(actualCopy, expectedCopy) 87 | } 88 | -------------------------------------------------------------------------------- /test/browser-early-console-freeze.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | Object.freeze(console) 3 | const test = require('tape') 4 | const pino = require('../browser') 5 | 6 | test('silent level', ({ end, fail, pass }) => { 7 | pino({ 8 | level: 'silent', 9 | browser: { } 10 | }) 11 | end() 12 | }) 13 | -------------------------------------------------------------------------------- /test/browser-is-level-enabled.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { test } = require('tap') 4 | const pino = require('../browser') 5 | 6 | const customLevels = { 7 | trace: 10, 8 | debug: 20, 9 | info: 30, 10 | warn: 40, 11 | error: 50, 12 | fatal: 60 13 | } 14 | 15 | test('Default levels suite', ({ test, end }) => { 16 | test('can check if current level enabled', async ({ equal }) => { 17 | const log = pino({ level: 'debug' }) 18 | equal(true, log.isLevelEnabled('debug')) 19 | }) 20 | 21 | test('can check if current level enabled when as object', async ({ equal }) => { 22 | const log = pino({ asObject: true, level: 'debug' }) 23 | equal(true, log.isLevelEnabled('debug')) 24 | }) 25 | 26 | test('can check if level enabled after level set', async ({ equal }) => { 27 | const log = pino() 28 | equal(false, log.isLevelEnabled('debug')) 29 | log.level = 'debug' 30 | equal(true, log.isLevelEnabled('debug')) 31 | }) 32 | 33 | test('can check if higher level enabled', async ({ equal }) => { 34 | const log = pino({ level: 'debug' }) 35 | equal(true, log.isLevelEnabled('error')) 36 | }) 37 | 38 | test('can check if lower level is disabled', async ({ equal }) => { 39 | const log = pino({ level: 'error' }) 40 | equal(false, log.isLevelEnabled('trace')) 41 | }) 42 | 43 | test('ASC: can check if child has current level enabled', async ({ equal }) => { 44 | const log = pino().child({}, { level: 'debug' }) 45 | equal(true, log.isLevelEnabled('debug')) 46 | equal(true, log.isLevelEnabled('error')) 47 | equal(false, log.isLevelEnabled('trace')) 48 | }) 49 | 50 | test('can check if custom level is enabled', async ({ equal }) => { 51 | const log = pino({ 52 | customLevels: { foo: 35 }, 53 | level: 'debug' 54 | }) 55 | equal(true, log.isLevelEnabled('foo')) 56 | equal(true, log.isLevelEnabled('error')) 57 | equal(false, log.isLevelEnabled('trace')) 58 | }) 59 | 60 | end() 61 | }) 62 | 63 | test('Custom levels suite', ({ test, end }) => { 64 | test('can check if current level enabled', async ({ equal }) => { 65 | const log = pino({ level: 'debug', customLevels }) 66 | equal(true, log.isLevelEnabled('debug')) 67 | }) 68 | 69 | test('can check if level enabled after level set', async ({ equal }) => { 70 | const log = pino({ customLevels }) 71 | equal(false, log.isLevelEnabled('debug')) 72 | log.level = 'debug' 73 | equal(true, log.isLevelEnabled('debug')) 74 | }) 75 | 76 | test('can check if higher level enabled', async ({ equal }) => { 77 | const log = pino({ level: 'debug', customLevels }) 78 | equal(true, log.isLevelEnabled('error')) 79 | }) 80 | 81 | test('can check if lower level is disabled', async ({ equal }) => { 82 | const log = pino({ level: 'error', customLevels }) 83 | equal(false, log.isLevelEnabled('trace')) 84 | }) 85 | 86 | test('can check if child has current level enabled', async ({ equal }) => { 87 | const log = pino().child({ customLevels }, { level: 'debug' }) 88 | equal(true, log.isLevelEnabled('debug')) 89 | equal(true, log.isLevelEnabled('error')) 90 | equal(false, log.isLevelEnabled('trace')) 91 | }) 92 | 93 | test('can check if custom level is enabled', async ({ equal }) => { 94 | const log = pino({ 95 | customLevels: { foo: 35, ...customLevels }, 96 | level: 'debug' 97 | }) 98 | equal(true, log.isLevelEnabled('foo')) 99 | equal(true, log.isLevelEnabled('error')) 100 | equal(false, log.isLevelEnabled('trace')) 101 | }) 102 | 103 | end() 104 | }) 105 | -------------------------------------------------------------------------------- /test/browser-timestamp.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | const test = require('tape') 3 | const pino = require('../browser') 4 | 5 | Date.now = () => 1599400603614 6 | 7 | test('null timestamp', ({ end, is }) => { 8 | const instance = pino({ 9 | timestamp: pino.stdTimeFunctions.nullTime, 10 | browser: { 11 | asObject: true, 12 | write: function (o) { 13 | is(o.time, undefined) 14 | } 15 | } 16 | }) 17 | instance.info('hello world') 18 | end() 19 | }) 20 | 21 | test('iso timestamp', ({ end, is }) => { 22 | const instance = pino({ 23 | timestamp: pino.stdTimeFunctions.isoTime, 24 | browser: { 25 | asObject: true, 26 | write: function (o) { 27 | is(o.time, '2020-09-06T13:56:43.614Z') 28 | } 29 | } 30 | }) 31 | instance.info('hello world') 32 | end() 33 | }) 34 | 35 | test('epoch timestamp', ({ end, is }) => { 36 | const instance = pino({ 37 | timestamp: pino.stdTimeFunctions.epochTime, 38 | browser: { 39 | asObject: true, 40 | write: function (o) { 41 | is(o.time, 1599400603614) 42 | } 43 | } 44 | }) 45 | instance.info('hello world') 46 | end() 47 | }) 48 | 49 | test('unix timestamp', ({ end, is }) => { 50 | const instance = pino({ 51 | timestamp: pino.stdTimeFunctions.unixTime, 52 | browser: { 53 | asObject: true, 54 | write: function (o) { 55 | is(o.time, Math.round(1599400603614 / 1000.0)) 56 | } 57 | } 58 | }) 59 | instance.info('hello world') 60 | end() 61 | }) 62 | 63 | test('epoch timestamp by default', ({ end, is }) => { 64 | const instance = pino({ 65 | browser: { 66 | asObject: true, 67 | write: function (o) { 68 | is(o.time, 1599400603614) 69 | } 70 | } 71 | }) 72 | instance.info('hello world') 73 | end() 74 | }) 75 | 76 | test('not print timestamp if the option is false', ({ end, is }) => { 77 | const instance = pino({ 78 | timestamp: false, 79 | browser: { 80 | asObject: true, 81 | write: function (o) { 82 | is(o.time, undefined) 83 | } 84 | } 85 | }) 86 | instance.info('hello world') 87 | end() 88 | }) 89 | -------------------------------------------------------------------------------- /test/complex-objects.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { test } = require('tap') 4 | const { sink, once } = require('./helper') 5 | const { PassThrough } = require('node:stream') 6 | const pino = require('../') 7 | 8 | test('Proxy and stream objects', async ({ equal }) => { 9 | const s = new PassThrough() 10 | s.resume() 11 | s.write('', () => {}) 12 | const obj = { s, p: new Proxy({}, { get () { throw new Error('kaboom') } }) } 13 | const stream = sink() 14 | const instance = pino(stream) 15 | instance.info({ obj }) 16 | 17 | const result = await once(stream, 'data') 18 | 19 | equal(result.obj, '[unable to serialize, circular reference is too complex to analyze]') 20 | }) 21 | 22 | test('Proxy and stream objects', async ({ equal }) => { 23 | const s = new PassThrough() 24 | s.resume() 25 | s.write('', () => {}) 26 | const obj = { s, p: new Proxy({}, { get () { throw new Error('kaboom') } }) } 27 | const stream = sink() 28 | const instance = pino(stream) 29 | instance.info(obj) 30 | 31 | const result = await once(stream, 'data') 32 | 33 | equal(result.p, '[unable to serialize, circular reference is too complex to analyze]') 34 | }) 35 | -------------------------------------------------------------------------------- /test/crlf.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { test } = require('tap') 4 | const writer = require('flush-write-stream') 5 | const pino = require('../') 6 | 7 | function capture () { 8 | const ws = writer((chunk, enc, cb) => { 9 | ws.data += chunk.toString() 10 | cb() 11 | }) 12 | ws.data = '' 13 | return ws 14 | } 15 | 16 | test('pino uses LF by default', async ({ ok }) => { 17 | const stream = capture() 18 | const logger = pino(stream) 19 | logger.info('foo') 20 | logger.error('bar') 21 | ok(/foo[^\r\n]+\n[^\r\n]+bar[^\r\n]+\n/.test(stream.data)) 22 | }) 23 | 24 | test('pino can log CRLF', async ({ ok }) => { 25 | const stream = capture() 26 | const logger = pino({ 27 | crlf: true 28 | }, stream) 29 | logger.info('foo') 30 | logger.error('bar') 31 | ok(/foo[^\n]+\r\n[^\n]+bar[^\n]+\r\n/.test(stream.data)) 32 | }) 33 | -------------------------------------------------------------------------------- /test/errorKey.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | const { test } = require('tap') 3 | const { sink, once } = require('./helper') 4 | const stdSerializers = require('pino-std-serializers') 5 | const pino = require('../') 6 | 7 | test('set the errorKey with error serializer', async ({ equal, same }) => { 8 | const stream = sink() 9 | const errorKey = 'error' 10 | const instance = pino({ 11 | errorKey, 12 | serializers: { [errorKey]: stdSerializers.err } 13 | }, stream) 14 | instance.error(new ReferenceError('test')) 15 | const o = await once(stream, 'data') 16 | equal(typeof o[errorKey], 'object') 17 | equal(o[errorKey].type, 'ReferenceError') 18 | equal(o[errorKey].message, 'test') 19 | equal(typeof o[errorKey].stack, 'string') 20 | }) 21 | 22 | test('set the errorKey without error serializer', async ({ equal, same }) => { 23 | const stream = sink() 24 | const errorKey = 'error' 25 | const instance = pino({ 26 | errorKey 27 | }, stream) 28 | instance.error(new ReferenceError('test')) 29 | const o = await once(stream, 'data') 30 | equal(typeof o[errorKey], 'object') 31 | equal(o[errorKey].type, 'ReferenceError') 32 | equal(o[errorKey].message, 'test') 33 | equal(typeof o[errorKey].stack, 'string') 34 | }) 35 | -------------------------------------------------------------------------------- /test/escaping.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const os = require('node:os') 4 | const { test } = require('tap') 5 | const { sink, once } = require('./helper') 6 | const pino = require('../') 7 | 8 | const { pid } = process 9 | const hostname = os.hostname() 10 | 11 | function testEscape (ch, key) { 12 | test('correctly escape ' + ch, async ({ same }) => { 13 | const stream = sink() 14 | const instance = pino({ 15 | name: 'hello' 16 | }, stream) 17 | instance.fatal('this contains ' + key) 18 | const result = await once(stream, 'data') 19 | delete result.time 20 | same(result, { 21 | pid, 22 | hostname, 23 | level: 60, 24 | name: 'hello', 25 | msg: 'this contains ' + key 26 | }) 27 | }) 28 | } 29 | 30 | testEscape('\\n', '\n') 31 | testEscape('\\/', '/') 32 | testEscape('\\\\', '\\') 33 | testEscape('\\r', '\r') 34 | testEscape('\\t', '\t') 35 | testEscape('\\b', '\b') 36 | 37 | const toEscape = [ 38 | '\u0000', // NUL Null character 39 | '\u0001', // SOH Start of Heading 40 | '\u0002', // STX Start of Text 41 | '\u0003', // ETX End-of-text character 42 | '\u0004', // EOT End-of-transmission character 43 | '\u0005', // ENQ Enquiry character 44 | '\u0006', // ACK Acknowledge character 45 | '\u0007', // BEL Bell character 46 | '\u0008', // BS Backspace 47 | '\u0009', // HT Horizontal tab 48 | '\u000A', // LF Line feed 49 | '\u000B', // VT Vertical tab 50 | '\u000C', // FF Form feed 51 | '\u000D', // CR Carriage return 52 | '\u000E', // SO Shift Out 53 | '\u000F', // SI Shift In 54 | '\u0010', // DLE Data Link Escape 55 | '\u0011', // DC1 Device Control 1 56 | '\u0012', // DC2 Device Control 2 57 | '\u0013', // DC3 Device Control 3 58 | '\u0014', // DC4 Device Control 4 59 | '\u0015', // NAK Negative-acknowledge character 60 | '\u0016', // SYN Synchronous Idle 61 | '\u0017', // ETB End of Transmission Block 62 | '\u0018', // CAN Cancel character 63 | '\u0019', // EM End of Medium 64 | '\u001A', // SUB Substitute character 65 | '\u001B', // ESC Escape character 66 | '\u001C', // FS File Separator 67 | '\u001D', // GS Group Separator 68 | '\u001E', // RS Record Separator 69 | '\u001F' // US Unit Separator 70 | ] 71 | 72 | toEscape.forEach((key) => { 73 | testEscape(JSON.stringify(key), key) 74 | }) 75 | 76 | test('correctly escape `hello \\u001F world \\n \\u0022`', async ({ same }) => { 77 | const stream = sink() 78 | const instance = pino({ 79 | name: 'hello' 80 | }, stream) 81 | instance.fatal('hello \u001F world \n \u0022') 82 | const result = await once(stream, 'data') 83 | delete result.time 84 | same(result, { 85 | pid, 86 | hostname, 87 | level: 60, 88 | name: 'hello', 89 | msg: 'hello \u001F world \n \u0022' 90 | }) 91 | }) 92 | -------------------------------------------------------------------------------- /test/esm/esm.mjs: -------------------------------------------------------------------------------- 1 | import t from 'tap' 2 | import pino from '../../pino.js' 3 | import helper from '../helper.js' 4 | 5 | const { sink, check, once } = helper 6 | 7 | t.test('esm support', async ({ equal }) => { 8 | const stream = sink() 9 | const instance = pino(stream) 10 | instance.info('hello world') 11 | check(equal, await once(stream, 'data'), 30, 'hello world') 12 | }) 13 | -------------------------------------------------------------------------------- /test/esm/index.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const t = require('tap') 4 | const semver = require('semver') 5 | 6 | const { isYarnPnp } = require('../helper') 7 | 8 | if (!semver.satisfies(process.versions.node, '^13.3.0 || ^12.10.0 || >= 14.0.0') || isYarnPnp) { 9 | t.skip('Skip esm because not supported by Node') 10 | } else { 11 | // Node v8 throw a `SyntaxError: Unexpected token import` 12 | // even if this branch is never touch in the code, 13 | // by using `eval` we can avoid this issue. 14 | // eslint-disable-next-line 15 | new Function('module', 'return import(module)')('./esm.mjs').catch((err) => { 16 | process.nextTick(() => { 17 | throw err 18 | }) 19 | }) 20 | } 21 | 22 | if (!semver.satisfies(process.versions.node, '>= 14.13.0 || ^12.20.0') || isYarnPnp) { 23 | t.skip('Skip named exports because not supported by Node') 24 | } else { 25 | // Node v8 throw a `SyntaxError: Unexpected token import` 26 | // even if this branch is never touch in the code, 27 | // by using `eval` we can avoid this issue. 28 | // eslint-disable-next-line 29 | new Function('module', 'return import(module)')('./named-exports.mjs').catch((err) => { 30 | process.nextTick(() => { 31 | throw err 32 | }) 33 | }) 34 | } 35 | -------------------------------------------------------------------------------- /test/esm/named-exports.mjs: -------------------------------------------------------------------------------- 1 | import { hostname } from 'node:os' 2 | import t from 'tap' 3 | import { sink, check, once, watchFileCreated, file } from '../helper.js' 4 | import { pino, destination } from '../../pino.js' 5 | import { readFileSync } from 'node:fs' 6 | 7 | t.test('named exports support', async ({ equal }) => { 8 | const stream = sink() 9 | const instance = pino(stream) 10 | instance.info('hello world') 11 | check(equal, await once(stream, 'data'), 30, 'hello world') 12 | }) 13 | 14 | t.test('destination', async ({ same }) => { 15 | const tmp = file() 16 | const instance = pino(destination(tmp)) 17 | instance.info('hello') 18 | await watchFileCreated(tmp) 19 | const result = JSON.parse(readFileSync(tmp).toString()) 20 | delete result.time 21 | same(result, { 22 | pid: process.pid, 23 | hostname, 24 | level: 30, 25 | msg: 'hello' 26 | }) 27 | }) 28 | -------------------------------------------------------------------------------- /test/exit.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { test } = require('tap') 4 | const { join } = require('node:path') 5 | const execa = require('execa') 6 | const writer = require('flush-write-stream') 7 | const { once } = require('./helper') 8 | 9 | // https://github.com/pinojs/pino/issues/542 10 | test('pino.destination log everything when calling process.exit(0)', async ({ not }) => { 11 | let actual = '' 12 | const child = execa(process.argv[0], [join(__dirname, 'fixtures', 'destination-exit.js')]) 13 | 14 | child.stdout.pipe(writer((s, enc, cb) => { 15 | actual += s 16 | cb() 17 | })) 18 | 19 | await once(child, 'close') 20 | 21 | not(actual.match(/hello/), null) 22 | not(actual.match(/world/), null) 23 | }) 24 | 25 | test('pino with no args log everything when calling process.exit(0)', async ({ not }) => { 26 | let actual = '' 27 | const child = execa(process.argv[0], [join(__dirname, 'fixtures', 'default-exit.js')]) 28 | 29 | child.stdout.pipe(writer((s, enc, cb) => { 30 | actual += s 31 | cb() 32 | })) 33 | 34 | await once(child, 'close') 35 | 36 | not(actual.match(/hello/), null) 37 | not(actual.match(/world/), null) 38 | }) 39 | 40 | test('sync false logs everything when calling process.exit(0)', async ({ not }) => { 41 | let actual = '' 42 | const child = execa(process.argv[0], [join(__dirname, 'fixtures', 'syncfalse-exit.js')]) 43 | 44 | child.stdout.pipe(writer((s, enc, cb) => { 45 | actual += s 46 | cb() 47 | })) 48 | 49 | await once(child, 'close') 50 | 51 | not(actual.match(/hello/), null) 52 | not(actual.match(/world/), null) 53 | }) 54 | 55 | test('sync false logs everything when calling flushSync', async ({ not }) => { 56 | let actual = '' 57 | const child = execa(process.argv[0], [join(__dirname, 'fixtures', 'syncfalse-flush-exit.js')]) 58 | 59 | child.stdout.pipe(writer((s, enc, cb) => { 60 | actual += s 61 | cb() 62 | })) 63 | 64 | await once(child, 'close') 65 | 66 | not(actual.match(/hello/), null) 67 | not(actual.match(/world/), null) 68 | }) 69 | 70 | test('transports exits gracefully when logging in exit', async ({ equal }) => { 71 | const child = execa(process.argv[0], [join(__dirname, 'fixtures', 'transport-with-on-exit.js')]) 72 | child.stdout.resume() 73 | 74 | const code = await once(child, 'close') 75 | 76 | equal(code, 0) 77 | }) 78 | -------------------------------------------------------------------------------- /test/fixtures/broken-pipe/basic.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | global.process = { __proto__: process, pid: 123456 } 4 | Date.now = function () { return 1459875739796 } 5 | require('node:os').hostname = function () { return 'abcdefghijklmnopqr' } 6 | 7 | const pino = require('../../..')() 8 | 9 | pino.info('hello world') 10 | -------------------------------------------------------------------------------- /test/fixtures/broken-pipe/destination.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | global.process = { __proto__: process, pid: 123456 } 4 | Date.now = function () { return 1459875739796 } 5 | require('node:os').hostname = function () { return 'abcdefghijklmnopqr' } 6 | 7 | const pino = require('../../..') 8 | const logger = pino(pino.destination()) 9 | 10 | logger.info('hello world') 11 | -------------------------------------------------------------------------------- /test/fixtures/broken-pipe/syncfalse.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | global.process = { __proto__: process, pid: 123456 } 4 | Date.now = function () { return 1459875739796 } 5 | require('node:os').hostname = function () { return 'abcdefghijklmnopqr' } 6 | 7 | const pino = require('../../..') 8 | const logger = pino(pino.destination({ sync: false })) 9 | 10 | for (var i = 0; i < 1000; i++) { 11 | logger.info('hello world') 12 | } 13 | -------------------------------------------------------------------------------- /test/fixtures/console-transport.js: -------------------------------------------------------------------------------- 1 | const { Writable } = require('node:stream') 2 | 3 | module.exports = (options) => { 4 | const myTransportStream = new Writable({ 5 | autoDestroy: true, 6 | write (chunk, enc, cb) { 7 | // apply a transform and send to stdout 8 | console.log(chunk.toString().toUpperCase()) 9 | cb() 10 | } 11 | }) 12 | return myTransportStream 13 | } 14 | -------------------------------------------------------------------------------- /test/fixtures/crashing-transport.js: -------------------------------------------------------------------------------- 1 | const { Writable } = require('node:stream') 2 | 3 | module.exports = () => 4 | new Writable({ 5 | autoDestroy: true, 6 | write (chunk, enc, cb) { 7 | setImmediate(() => { 8 | /* eslint-disable no-empty */ 9 | for (let i = 0; i < 1e3; i++) {} 10 | process.exit(0) 11 | }) 12 | } 13 | }) 14 | -------------------------------------------------------------------------------- /test/fixtures/default-exit.js: -------------------------------------------------------------------------------- 1 | global.process = { __proto__: process, pid: 123456 } 2 | Date.now = function () { return 1459875739796 } 3 | require('node:os').hostname = function () { return 'abcdefghijklmnopqr' } 4 | const pino = require(require.resolve('./../../')) 5 | const logger = pino() 6 | logger.info('hello') 7 | logger.info('world') 8 | process.exit(0) 9 | -------------------------------------------------------------------------------- /test/fixtures/destination-exit.js: -------------------------------------------------------------------------------- 1 | global.process = { __proto__: process, pid: 123456 } 2 | Date.now = function () { return 1459875739796 } 3 | require('node:os').hostname = function () { return 'abcdefghijklmnopqr' } 4 | const pino = require(require.resolve('./../../')) 5 | const logger = pino({}, pino.destination(1)) 6 | logger.info('hello') 7 | logger.info('world') 8 | process.exit(0) 9 | -------------------------------------------------------------------------------- /test/fixtures/eval/index.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-eval */ 2 | 3 | eval(` 4 | const pino = require('../../../') 5 | 6 | const logger = pino( 7 | pino.transport({ 8 | target: 'pino/file' 9 | }) 10 | ) 11 | 12 | logger.info('done!') 13 | `) 14 | -------------------------------------------------------------------------------- /test/fixtures/eval/node_modules/14-files.js: -------------------------------------------------------------------------------- 1 | const file1 = require("./file1.js") 2 | 3 | file1() 4 | -------------------------------------------------------------------------------- /test/fixtures/eval/node_modules/2-files.js: -------------------------------------------------------------------------------- 1 | const file12 = require("./file12.js") 2 | 3 | file12() 4 | -------------------------------------------------------------------------------- /test/fixtures/eval/node_modules/file1.js: -------------------------------------------------------------------------------- 1 | const file2 = require("./file2.js") 2 | 3 | module.exports = function () { 4 | file2() 5 | } 6 | -------------------------------------------------------------------------------- /test/fixtures/eval/node_modules/file10.js: -------------------------------------------------------------------------------- 1 | const file11 = require("./file11.js") 2 | 3 | module.exports = function () { 4 | file11() 5 | } 6 | -------------------------------------------------------------------------------- /test/fixtures/eval/node_modules/file11.js: -------------------------------------------------------------------------------- 1 | const file12 = require("./file12.js") 2 | 3 | module.exports = function () { 4 | file12() 5 | } 6 | -------------------------------------------------------------------------------- /test/fixtures/eval/node_modules/file12.js: -------------------------------------------------------------------------------- 1 | const file13 = require("./file13.js") 2 | 3 | module.exports = function () { 4 | file13() 5 | } 6 | -------------------------------------------------------------------------------- /test/fixtures/eval/node_modules/file13.js: -------------------------------------------------------------------------------- 1 | const file14 = require("./file14.js") 2 | 3 | module.exports = function () { 4 | file14() 5 | } 6 | -------------------------------------------------------------------------------- /test/fixtures/eval/node_modules/file14.js: -------------------------------------------------------------------------------- 1 | const pino = require("../../../../"); 2 | 3 | module.exports = function() { 4 | const logger = pino( 5 | pino.transport({ 6 | target: 'pino/file' 7 | }) 8 | ) 9 | 10 | logger.info('done!') 11 | } 12 | -------------------------------------------------------------------------------- /test/fixtures/eval/node_modules/file2.js: -------------------------------------------------------------------------------- 1 | const file3 = require("./file3.js") 2 | 3 | module.exports = function () { 4 | file3() 5 | } 6 | -------------------------------------------------------------------------------- /test/fixtures/eval/node_modules/file3.js: -------------------------------------------------------------------------------- 1 | const file4 = require("./file4.js") 2 | 3 | module.exports = function () { 4 | file4() 5 | } 6 | -------------------------------------------------------------------------------- /test/fixtures/eval/node_modules/file4.js: -------------------------------------------------------------------------------- 1 | const file5 = require("./file5.js") 2 | 3 | module.exports = function () { 4 | file5() 5 | } 6 | -------------------------------------------------------------------------------- /test/fixtures/eval/node_modules/file5.js: -------------------------------------------------------------------------------- 1 | const file6 = require("./file6.js") 2 | 3 | module.exports = function () { 4 | file6() 5 | } 6 | -------------------------------------------------------------------------------- /test/fixtures/eval/node_modules/file6.js: -------------------------------------------------------------------------------- 1 | const file7 = require("./file7.js") 2 | 3 | module.exports = function () { 4 | file7() 5 | } 6 | -------------------------------------------------------------------------------- /test/fixtures/eval/node_modules/file7.js: -------------------------------------------------------------------------------- 1 | const file8 = require("./file8.js") 2 | 3 | module.exports = function () { 4 | file8() 5 | } 6 | -------------------------------------------------------------------------------- /test/fixtures/eval/node_modules/file8.js: -------------------------------------------------------------------------------- 1 | const file9 = require("./file9.js") 2 | 3 | module.exports = function () { 4 | file9() 5 | } 6 | -------------------------------------------------------------------------------- /test/fixtures/eval/node_modules/file9.js: -------------------------------------------------------------------------------- 1 | const file10 = require("./file10.js") 2 | 3 | module.exports = function () { 4 | file10() 5 | } 6 | -------------------------------------------------------------------------------- /test/fixtures/noop-transport.js: -------------------------------------------------------------------------------- 1 | const { Writable } = require('node:stream') 2 | 3 | module.exports = () => { 4 | return new Writable({ 5 | autoDestroy: true, 6 | write (chunk, enc, cb) { 7 | cb() 8 | } 9 | }) 10 | } 11 | -------------------------------------------------------------------------------- /test/fixtures/pretty/null-prototype.js: -------------------------------------------------------------------------------- 1 | global.process = { __proto__: process, pid: 123456 } 2 | Date.now = function () { return 1459875739796 } 3 | require('node:os').hostname = function () { return 'abcdefghijklmnopqr' } 4 | const pino = require(require.resolve('./../../../')) 5 | const log = pino({ prettyPrint: true }) 6 | const obj = Object.create(null) 7 | Object.assign(obj, { foo: 'bar' }) 8 | log.info(obj, 'hello') 9 | -------------------------------------------------------------------------------- /test/fixtures/stdout-hack-protection.js: -------------------------------------------------------------------------------- 1 | global.process = { __proto__: process, pid: 123456 } 2 | 3 | const write = process.stdout.write.bind(process.stdout) 4 | process.stdout.write = function (chunk) { 5 | write('hack ' + chunk) 6 | } 7 | 8 | Date.now = function () { return 1459875739796 } 9 | require('node:os').hostname = function () { return 'abcdefghijklmnopqr' } 10 | const pino = require(require.resolve('../../'))() 11 | pino.info('me') 12 | -------------------------------------------------------------------------------- /test/fixtures/syncfalse-child.js: -------------------------------------------------------------------------------- 1 | global.process = { __proto__: process, pid: 123456 } 2 | Date.now = function () { return 1459875739796 } 3 | require('node:os').hostname = function () { return 'abcdefghijklmnopqr' } 4 | const pino = require(require.resolve('./../../')) 5 | const asyncLogger = pino(pino.destination({ sync: false })).child({ hello: 'world' }) 6 | asyncLogger.info('h') 7 | -------------------------------------------------------------------------------- /test/fixtures/syncfalse-exit.js: -------------------------------------------------------------------------------- 1 | global.process = { __proto__: process, pid: 123456 } 2 | Date.now = function () { return 1459875739796 } 3 | require('node:os').hostname = function () { return 'abcdefghijklmnopqr' } 4 | const pino = require(require.resolve('./../../')) 5 | const dest = pino.destination({ dest: 1, minLength: 4096, sync: false }) 6 | const logger = pino({}, dest) 7 | logger.info('hello') 8 | logger.info('world') 9 | process.exit(0) 10 | -------------------------------------------------------------------------------- /test/fixtures/syncfalse-flush-exit.js: -------------------------------------------------------------------------------- 1 | global.process = { __proto__: process, pid: 123456 } 2 | Date.now = function () { return 1459875739796 } 3 | require('node:os').hostname = function () { return 'abcdefghijklmnopqr' } 4 | const pino = require(require.resolve('./../../')) 5 | const dest = pino.destination({ dest: 1, minLength: 4096, sync: false }) 6 | const logger = pino({}, dest) 7 | logger.info('hello') 8 | logger.info('world') 9 | dest.flushSync() 10 | process.exit(0) 11 | -------------------------------------------------------------------------------- /test/fixtures/syncfalse.js: -------------------------------------------------------------------------------- 1 | global.process = { __proto__: process, pid: 123456 } 2 | Date.now = function () { return 1459875739796 } 3 | require('node:os').hostname = function () { return 'abcdefghijklmnopqr' } 4 | const pino = require(require.resolve('./../../')) 5 | const asyncLogger = pino(pino.destination({ minLength: 4096, sync: false })) 6 | asyncLogger.info('h') 7 | -------------------------------------------------------------------------------- /test/fixtures/syntax-error-esm.mjs: -------------------------------------------------------------------------------- 1 | // This is a syntax error 2 | import 3 | -------------------------------------------------------------------------------- /test/fixtures/to-file-transport-with-transform.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const fs = require('node:fs') 4 | const { once } = require('node:events') 5 | const { Transform } = require('node:stream') 6 | 7 | async function run (opts) { 8 | if (!opts.destination) throw new Error('kaboom') 9 | const stream = fs.createWriteStream(opts.destination) 10 | await once(stream, 'open') 11 | const t = new Transform({ 12 | transform (chunk, enc, cb) { 13 | setImmediate(cb, null, chunk.toString().toUpperCase()) 14 | } 15 | }) 16 | t.pipe(stream) 17 | return t 18 | } 19 | 20 | module.exports = run 21 | -------------------------------------------------------------------------------- /test/fixtures/to-file-transport.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const fs = require('node:fs') 4 | const { once } = require('node:events') 5 | 6 | async function run (opts) { 7 | if (!opts.destination) throw new Error('kaboom') 8 | const stream = fs.createWriteStream(opts.destination) 9 | await once(stream, 'open') 10 | return stream 11 | } 12 | 13 | module.exports = run 14 | -------------------------------------------------------------------------------- /test/fixtures/to-file-transport.mjs: -------------------------------------------------------------------------------- 1 | import { createWriteStream } from 'node:fs' 2 | import { once } from 'node:events' 3 | 4 | export default async function run (opts) { 5 | const stream = createWriteStream(opts.destination) 6 | await once(stream, 'open') 7 | return stream 8 | } 9 | -------------------------------------------------------------------------------- /test/fixtures/transport-exit-immediately-with-async-dest.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const pino = require('../..') 4 | const transport = pino.transport({ 5 | target: './to-file-transport-with-transform.js', 6 | options: { 7 | destination: process.argv[2] 8 | } 9 | }) 10 | const logger = pino(transport) 11 | 12 | logger.info('Hello') 13 | 14 | logger.info('World') 15 | 16 | process.exit(0) 17 | -------------------------------------------------------------------------------- /test/fixtures/transport-exit-immediately.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const pino = require('../..') 4 | const transport = pino.transport({ 5 | target: 'pino/file' 6 | }) 7 | const logger = pino(transport) 8 | 9 | logger.info('Hello') 10 | 11 | process.exit(0) 12 | -------------------------------------------------------------------------------- /test/fixtures/transport-exit-on-ready.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const pino = require('../..') 4 | const transport = pino.transport({ 5 | target: 'pino/file' 6 | }) 7 | const logger = pino(transport) 8 | 9 | transport.on('ready', function () { 10 | logger.info('Hello') 11 | process.exit(0) 12 | }) 13 | -------------------------------------------------------------------------------- /test/fixtures/transport-main.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { join } = require('node:path') 4 | const pino = require('../..') 5 | const transport = pino.transport({ 6 | target: join(__dirname, 'transport-worker.js') 7 | }) 8 | const logger = pino(transport) 9 | logger.info('Hello') 10 | -------------------------------------------------------------------------------- /test/fixtures/transport-many-lines.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const pino = require('../..') 4 | const transport = pino.transport({ 5 | targets: [{ 6 | level: 'info', 7 | target: 'pino/file', 8 | options: { 9 | destination: process.argv[2] 10 | } 11 | }] 12 | }) 13 | const logger = pino(transport) 14 | 15 | const toWrite = 1000000 16 | transport.on('ready', run) 17 | 18 | let total = 0 19 | 20 | function run () { 21 | if (total++ === 8) { 22 | return 23 | } 24 | 25 | for (let i = 0; i < toWrite; i++) { 26 | logger.info(`hello ${i}`) 27 | } 28 | transport.once('drain', run) 29 | } 30 | -------------------------------------------------------------------------------- /test/fixtures/transport-string-stdout.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const pino = require('../..') 4 | const transport = pino.transport({ 5 | target: 'pino/file', 6 | options: { destination: '1' } 7 | }) 8 | const logger = pino(transport) 9 | logger.info('Hello') 10 | -------------------------------------------------------------------------------- /test/fixtures/transport-transform.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const build = require('pino-abstract-transport') 4 | const { pipeline, Transform } = require('node:stream') 5 | module.exports = (options) => { 6 | return build(function (source) { 7 | const myTransportStream = new Transform({ 8 | autoDestroy: true, 9 | objectMode: true, 10 | transform (chunk, enc, cb) { 11 | chunk.service = 'pino' 12 | this.push(JSON.stringify(chunk)) 13 | cb() 14 | } 15 | }) 16 | pipeline(source, myTransportStream, () => {}) 17 | return myTransportStream 18 | }, { 19 | enablePipelining: true 20 | }) 21 | } 22 | -------------------------------------------------------------------------------- /test/fixtures/transport-uses-pino-config.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const build = require('pino-abstract-transport') 4 | const { pipeline, Transform } = require('node:stream') 5 | module.exports = () => { 6 | return build(function (source) { 7 | const myTransportStream = new Transform({ 8 | autoDestroy: true, 9 | objectMode: true, 10 | transform (chunk, enc, cb) { 11 | const { 12 | time, 13 | level, 14 | [source.messageKey]: body, 15 | [source.errorKey]: error, 16 | ...attributes 17 | } = chunk 18 | this.push(JSON.stringify({ 19 | severityText: source.levels.labels[level], 20 | body, 21 | attributes, 22 | ...(error && { error }) 23 | })) 24 | cb() 25 | } 26 | }) 27 | pipeline(source, myTransportStream, () => {}) 28 | return myTransportStream 29 | }, { 30 | enablePipelining: true, 31 | expectPinoConfig: true 32 | }) 33 | } 34 | -------------------------------------------------------------------------------- /test/fixtures/transport-with-on-exit.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | const pino = require('../..') 3 | const log = pino({ 4 | transport: { 5 | target: 'pino/file', 6 | options: { destination: 1 } 7 | } 8 | }) 9 | log.info('hello world!') 10 | process.on('exit', (code) => { 11 | log.info('Exiting peacefully') 12 | }) 13 | -------------------------------------------------------------------------------- /test/fixtures/transport-worker-data.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { parentPort, workerData } = require('worker_threads') 4 | const { Writable } = require('node:stream') 5 | 6 | module.exports = (options) => { 7 | const myTransportStream = new Writable({ 8 | autoDestroy: true, 9 | write (chunk, enc, cb) { 10 | parentPort.postMessage({ 11 | code: 'EVENT', 12 | name: 'workerData', 13 | args: [workerData] 14 | }) 15 | cb() 16 | } 17 | }) 18 | return myTransportStream 19 | } 20 | -------------------------------------------------------------------------------- /test/fixtures/transport-worker.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { Writable } = require('node:stream') 4 | const fs = require('node:fs') 5 | module.exports = (options) => { 6 | const myTransportStream = new Writable({ 7 | autoDestroy: true, 8 | write (chunk, enc, cb) { 9 | // Bypass console.log() to avoid flakiness 10 | fs.writeSync(1, chunk.toString()) 11 | cb() 12 | } 13 | }) 14 | return myTransportStream 15 | } 16 | -------------------------------------------------------------------------------- /test/fixtures/transport-wrong-export-type.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | completelyUnrelatedProperty: 'Just a very incorrect transport worker implementation' 3 | } 4 | -------------------------------------------------------------------------------- /test/fixtures/transport/index.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const fs = require('node:fs') 4 | const { once } = require('node:events') 5 | 6 | async function run (opts) { 7 | const stream = fs.createWriteStream(opts.destination) 8 | await once(stream, 'open') 9 | return stream 10 | } 11 | 12 | module.exports = run 13 | -------------------------------------------------------------------------------- /test/fixtures/transport/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "transport", 3 | "version": "0.0.1", 4 | "main": "./index.js" 5 | } 6 | -------------------------------------------------------------------------------- /test/fixtures/ts/to-file-transport-with-transform.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'node:fs' 2 | import { once } from 'node:events' 3 | import { Transform } from 'node:stream' 4 | 5 | async function run (opts: { destination?: fs.PathLike }): Promise { 6 | if (!opts.destination) throw new Error('kaboom') 7 | const stream = fs.createWriteStream(opts.destination) 8 | await once(stream, 'open') 9 | const t = new Transform({ 10 | transform (chunk, enc, cb) { 11 | setImmediate(cb, null, chunk.toString().toUpperCase()) 12 | } 13 | }) 14 | t.pipe(stream) 15 | return t 16 | } 17 | 18 | export default run 19 | -------------------------------------------------------------------------------- /test/fixtures/ts/to-file-transport.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'node:fs' 2 | import { once } from 'node:events' 3 | 4 | async function run (opts: { destination?: fs.PathLike }): Promise { 5 | if (!opts.destination) throw new Error('kaboom') 6 | const stream = fs.createWriteStream(opts.destination, { encoding: 'utf8' }) 7 | await once(stream, 'open') 8 | return stream 9 | } 10 | 11 | export default run 12 | -------------------------------------------------------------------------------- /test/fixtures/ts/transpile.cjs: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const execa = require('execa') 4 | const fs = require('node:fs') 5 | 6 | const existsSync = fs.existsSync 7 | const stat = fs.promises.stat 8 | 9 | // Hardcoded parameters 10 | const esVersions = ['es5', 'es6', 'es2017', 'esnext'] 11 | const filesToTranspile = ['to-file-transport.ts'] 12 | 13 | async function transpile () { 14 | process.chdir(__dirname) 15 | 16 | for (const sourceFileName of filesToTranspile) { 17 | const sourceStat = await stat(sourceFileName) 18 | 19 | for (const esVersion of esVersions) { 20 | const intermediateFileName = sourceFileName.replace(/\.ts$/, '.js') 21 | const targetFileName = sourceFileName.replace(/\.ts$/, `.${esVersion}.cjs`) 22 | 23 | const shouldTranspile = !existsSync(targetFileName) || (await stat(targetFileName)).mtimeMs < sourceStat.mtimeMs 24 | 25 | if (shouldTranspile) { 26 | await execa('tsc', ['--target', esVersion, '--module', 'commonjs', sourceFileName]) 27 | await execa('mv', [intermediateFileName, targetFileName]) 28 | } 29 | } 30 | } 31 | } 32 | 33 | transpile().catch(err => { 34 | process.exitCode = 1 35 | throw err 36 | }) 37 | -------------------------------------------------------------------------------- /test/fixtures/ts/transport-exit-immediately-with-async-dest.ts: -------------------------------------------------------------------------------- 1 | import pino from '../../..' 2 | import { join } from 'node:path' 3 | 4 | const transport = pino.transport({ 5 | target: join(__dirname, 'to-file-transport-with-transform.ts'), 6 | options: { 7 | destination: process.argv[2] 8 | } 9 | }) 10 | const logger = pino(transport) 11 | 12 | logger.info('Hello') 13 | logger.info('World') 14 | 15 | process.exit(0) 16 | -------------------------------------------------------------------------------- /test/fixtures/ts/transport-exit-immediately.ts: -------------------------------------------------------------------------------- 1 | import pino from '../../..' 2 | 3 | const transport = pino.transport({ 4 | target: 'pino/file' 5 | }) 6 | const logger = pino(transport) 7 | 8 | logger.info('Hello') 9 | 10 | process.exit(0) 11 | -------------------------------------------------------------------------------- /test/fixtures/ts/transport-exit-on-ready.ts: -------------------------------------------------------------------------------- 1 | import pino from '../../..' 2 | 3 | const transport = pino.transport({ 4 | target: 'pino/file' 5 | }) 6 | const logger = pino(transport) 7 | 8 | transport.on('ready', function () { 9 | logger.info('Hello') 10 | process.exit(0) 11 | }) 12 | -------------------------------------------------------------------------------- /test/fixtures/ts/transport-main.ts: -------------------------------------------------------------------------------- 1 | import { join } from 'node:path' 2 | import pino from '../../..' 3 | 4 | const transport = pino.transport({ 5 | target: join(__dirname, 'transport-worker.ts') 6 | }) 7 | const logger = pino(transport) 8 | logger.info('Hello') 9 | -------------------------------------------------------------------------------- /test/fixtures/ts/transport-string-stdout.ts: -------------------------------------------------------------------------------- 1 | import pino from '../../..' 2 | 3 | const transport = pino.transport({ 4 | target: 'pino/file', 5 | options: { destination: '1' } 6 | }) 7 | const logger = pino(transport) 8 | logger.info('Hello') 9 | -------------------------------------------------------------------------------- /test/fixtures/ts/transport-worker.ts: -------------------------------------------------------------------------------- 1 | import { Writable } from 'node:stream' 2 | 3 | export default (): Writable => { 4 | const myTransportStream = new Writable({ 5 | autoDestroy: true, 6 | write (chunk, _enc, cb) { 7 | console.log(chunk.toString()) 8 | cb() 9 | }, 10 | defaultEncoding: 'utf8' 11 | }) 12 | 13 | return myTransportStream 14 | } 15 | -------------------------------------------------------------------------------- /test/helper.d.ts: -------------------------------------------------------------------------------- 1 | import { PathLike } from 'node:fs' 2 | 3 | export declare function watchFileCreated(filename: PathLike): Promise 4 | export declare function watchForWrite(filename: PathLike, testString: string): Promise 5 | -------------------------------------------------------------------------------- /test/helper.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const crypto = require('crypto') 4 | const os = require('node:os') 5 | const writer = require('flush-write-stream') 6 | const split = require('split2') 7 | const { existsSync, readFileSync, statSync, unlinkSync } = require('node:fs') 8 | const pid = process.pid 9 | const hostname = os.hostname() 10 | const t = require('tap') 11 | const { join } = require('node:path') 12 | const { tmpdir } = os 13 | 14 | const isWin = process.platform === 'win32' 15 | const isYarnPnp = process.versions.pnp !== undefined 16 | 17 | function getPathToNull () { 18 | return isWin ? '\\\\.\\NUL' : '/dev/null' 19 | } 20 | 21 | function once (emitter, name) { 22 | return new Promise((resolve, reject) => { 23 | if (name !== 'error') emitter.once('error', reject) 24 | emitter.once(name, (...args) => { 25 | emitter.removeListener('error', reject) 26 | resolve(...args) 27 | }) 28 | }) 29 | } 30 | 31 | function sink (func) { 32 | const result = split((data) => { 33 | try { 34 | return JSON.parse(data) 35 | } catch (err) { 36 | console.log(err) 37 | console.log(data) 38 | } 39 | }) 40 | if (func) result.pipe(writer.obj(func)) 41 | return result 42 | } 43 | 44 | function check (is, chunk, level, msg) { 45 | is(new Date(chunk.time) <= new Date(), true, 'time is greater than Date.now()') 46 | delete chunk.time 47 | is(chunk.pid, pid) 48 | is(chunk.hostname, hostname) 49 | is(chunk.level, level) 50 | is(chunk.msg, msg) 51 | } 52 | 53 | function sleep (ms) { 54 | return new Promise((resolve) => { 55 | setTimeout(resolve, ms) 56 | }) 57 | } 58 | 59 | function watchFileCreated (filename) { 60 | return new Promise((resolve, reject) => { 61 | const TIMEOUT = process.env.PINO_TEST_WAIT_WATCHFILE_TIMEOUT || 10000 62 | const INTERVAL = 100 63 | const threshold = TIMEOUT / INTERVAL 64 | let counter = 0 65 | const interval = setInterval(() => { 66 | const exists = existsSync(filename) 67 | // On some CI runs file is created but not filled 68 | if (exists && statSync(filename).size !== 0) { 69 | clearInterval(interval) 70 | resolve() 71 | } else if (counter <= threshold) { 72 | counter++ 73 | } else { 74 | clearInterval(interval) 75 | reject(new Error( 76 | `${filename} hasn't been created within ${TIMEOUT} ms. ` + 77 | (exists ? 'File exist, but still empty.' : 'File not yet created.') 78 | )) 79 | } 80 | }, INTERVAL) 81 | }) 82 | } 83 | 84 | function watchForWrite (filename, testString) { 85 | return new Promise((resolve, reject) => { 86 | const TIMEOUT = process.env.PINO_TEST_WAIT_WRITE_TIMEOUT || 10000 87 | const INTERVAL = 100 88 | const threshold = TIMEOUT / INTERVAL 89 | let counter = 0 90 | const interval = setInterval(() => { 91 | if (readFileSync(filename).includes(testString)) { 92 | clearInterval(interval) 93 | resolve() 94 | } else if (counter <= threshold) { 95 | counter++ 96 | } else { 97 | clearInterval(interval) 98 | reject(new Error(`'${testString}' hasn't been written to ${filename} within ${TIMEOUT} ms.`)) 99 | } 100 | }, INTERVAL) 101 | }) 102 | } 103 | 104 | let files = [] 105 | 106 | function file () { 107 | const hash = crypto.randomBytes(12).toString('hex') 108 | const file = join(tmpdir(), `pino-${pid}-${hash}`) 109 | files.push(file) 110 | return file 111 | } 112 | 113 | process.on('beforeExit', () => { 114 | if (files.length === 0) return 115 | t.comment('unlink files') 116 | for (const file of files) { 117 | try { 118 | t.comment(`unliking ${file}`) 119 | unlinkSync(file) 120 | } catch (e) { 121 | console.log(e) 122 | } 123 | } 124 | files = [] 125 | t.comment('unlink completed') 126 | }) 127 | 128 | module.exports = { getPathToNull, sink, check, once, sleep, watchFileCreated, watchForWrite, isWin, isYarnPnp, file } 129 | -------------------------------------------------------------------------------- /test/hooks.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const tap = require('tap') 4 | const { sink, once } = require('./helper') 5 | const pino = require('../') 6 | 7 | tap.test('log method hook', t => { 8 | t.test('gets invoked', async t => { 9 | t.plan(8) 10 | 11 | const stream = sink() 12 | const logger = pino({ 13 | hooks: { 14 | logMethod (args, method, level) { 15 | t.type(args, Array) 16 | t.type(level, 'number') 17 | t.equal(args.length, 3) 18 | t.equal(level, this.levels.values.info) 19 | t.same(args, ['a', 'b', 'c']) 20 | 21 | t.type(method, Function) 22 | t.equal(method.name, 'LOG') 23 | 24 | method.apply(this, [args.join('-')]) 25 | } 26 | } 27 | }, stream) 28 | 29 | const o = once(stream, 'data') 30 | logger.info('a', 'b', 'c') 31 | t.match(await o, { msg: 'a-b-c' }) 32 | }) 33 | 34 | t.test('fatal method invokes hook', async t => { 35 | t.plan(2) 36 | 37 | const stream = sink() 38 | const logger = pino({ 39 | hooks: { 40 | logMethod (args, method) { 41 | t.pass() 42 | method.apply(this, [args.join('-')]) 43 | } 44 | } 45 | }, stream) 46 | 47 | const o = once(stream, 'data') 48 | logger.fatal('a') 49 | t.match(await o, { msg: 'a' }) 50 | }) 51 | 52 | t.test('children get the hook', async t => { 53 | t.plan(4) 54 | 55 | const stream = sink() 56 | const root = pino({ 57 | hooks: { 58 | logMethod (args, method) { 59 | t.pass() 60 | method.apply(this, [args.join('-')]) 61 | } 62 | } 63 | }, stream) 64 | const child = root.child({ child: 'one' }) 65 | const grandchild = child.child({ child: 'two' }) 66 | 67 | let o = once(stream, 'data') 68 | child.info('a', 'b') 69 | t.match(await o, { msg: 'a-b' }) 70 | 71 | o = once(stream, 'data') 72 | grandchild.info('c', 'd') 73 | t.match(await o, { msg: 'c-d' }) 74 | }) 75 | 76 | t.test('get log level', async t => { 77 | t.plan(3) 78 | 79 | const stream = sink() 80 | const logger = pino({ 81 | hooks: { 82 | logMethod (args, method, level) { 83 | t.type(level, 'number') 84 | t.equal(level, this.levels.values.error) 85 | 86 | method.apply(this, [args.join('-')]) 87 | } 88 | } 89 | }, stream) 90 | 91 | const o = once(stream, 'data') 92 | logger.error('a') 93 | t.match(await o, { msg: 'a' }) 94 | }) 95 | 96 | t.end() 97 | }) 98 | 99 | tap.test('streamWrite hook', t => { 100 | t.test('gets invoked', async t => { 101 | t.plan(1) 102 | 103 | const stream = sink() 104 | const logger = pino({ 105 | hooks: { 106 | streamWrite (s) { 107 | return s.replaceAll('redact-me', 'XXX') 108 | } 109 | } 110 | }, stream) 111 | 112 | const o = once(stream, 'data') 113 | logger.info('hide redact-me in this string') 114 | t.match(await o, { msg: 'hide XXX in this string' }) 115 | }) 116 | 117 | t.end() 118 | }) 119 | -------------------------------------------------------------------------------- /test/internals/version.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const fs = require('node:fs') 4 | const path = require('node:path') 5 | const t = require('tap') 6 | const test = t.test 7 | const pino = require('../..')() 8 | 9 | test('should be the same as package.json', t => { 10 | t.plan(1) 11 | 12 | const json = JSON.parse(fs.readFileSync(path.join(__dirname, '..', '..', 'package.json')).toString('utf8')) 13 | 14 | t.equal(pino.version, json.version) 15 | }) 16 | -------------------------------------------------------------------------------- /test/jest/basic.spec.js: -------------------------------------------------------------------------------- 1 | /* global test */ 2 | const pino = require('../../pino') 3 | 4 | test('transport should work in jest', function () { 5 | pino({ 6 | transport: { 7 | target: 'pino-pretty' 8 | } 9 | }) 10 | }) 11 | -------------------------------------------------------------------------------- /test/metadata.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const os = require('node:os') 4 | const { test } = require('tap') 5 | const pino = require('../') 6 | 7 | const { pid } = process 8 | const hostname = os.hostname() 9 | 10 | test('metadata works', async ({ ok, same, equal }) => { 11 | const now = Date.now() 12 | const instance = pino({}, { 13 | [Symbol.for('pino.metadata')]: true, 14 | write (chunk) { 15 | equal(instance, this.lastLogger) 16 | equal(30, this.lastLevel) 17 | equal('a msg', this.lastMsg) 18 | ok(Number(this.lastTime) >= now) 19 | same(this.lastObj, { hello: 'world' }) 20 | const result = JSON.parse(chunk) 21 | ok(new Date(result.time) <= new Date(), 'time is greater than Date.now()') 22 | delete result.time 23 | same(result, { 24 | pid, 25 | hostname, 26 | level: 30, 27 | hello: 'world', 28 | msg: 'a msg' 29 | }) 30 | } 31 | }) 32 | 33 | instance.info({ hello: 'world' }, 'a msg') 34 | }) 35 | 36 | test('child loggers works', async ({ ok, same, equal }) => { 37 | const instance = pino({}, { 38 | [Symbol.for('pino.metadata')]: true, 39 | write (chunk) { 40 | equal(child, this.lastLogger) 41 | equal(30, this.lastLevel) 42 | equal('a msg', this.lastMsg) 43 | same(this.lastObj, { from: 'child' }) 44 | const result = JSON.parse(chunk) 45 | ok(new Date(result.time) <= new Date(), 'time is greater than Date.now()') 46 | delete result.time 47 | same(result, { 48 | pid, 49 | hostname, 50 | level: 30, 51 | hello: 'world', 52 | from: 'child', 53 | msg: 'a msg' 54 | }) 55 | } 56 | }) 57 | 58 | const child = instance.child({ hello: 'world' }) 59 | child.info({ from: 'child' }, 'a msg') 60 | }) 61 | 62 | test('without object', async ({ ok, same, equal }) => { 63 | const instance = pino({}, { 64 | [Symbol.for('pino.metadata')]: true, 65 | write (chunk) { 66 | equal(instance, this.lastLogger) 67 | equal(30, this.lastLevel) 68 | equal('a msg', this.lastMsg) 69 | same({ }, this.lastObj) 70 | const result = JSON.parse(chunk) 71 | ok(new Date(result.time) <= new Date(), 'time is greater than Date.now()') 72 | delete result.time 73 | same(result, { 74 | pid, 75 | hostname, 76 | level: 30, 77 | msg: 'a msg' 78 | }) 79 | } 80 | }) 81 | 82 | instance.info('a msg') 83 | }) 84 | 85 | test('without msg', async ({ ok, same, equal }) => { 86 | const instance = pino({}, { 87 | [Symbol.for('pino.metadata')]: true, 88 | write (chunk) { 89 | equal(instance, this.lastLogger) 90 | equal(30, this.lastLevel) 91 | equal(undefined, this.lastMsg) 92 | same({ hello: 'world' }, this.lastObj) 93 | const result = JSON.parse(chunk) 94 | ok(new Date(result.time) <= new Date(), 'time is greater than Date.now()') 95 | delete result.time 96 | same(result, { 97 | pid, 98 | hostname, 99 | level: 30, 100 | hello: 'world' 101 | }) 102 | } 103 | }) 104 | 105 | instance.info({ hello: 'world' }) 106 | }) 107 | -------------------------------------------------------------------------------- /test/mixin-merge-strategy.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { test } = require('tap') 4 | const { sink, once } = require('./helper') 5 | const pino = require('../') 6 | 7 | const level = 50 8 | const name = 'error' 9 | 10 | test('default merge strategy', async ({ ok, same }) => { 11 | const stream = sink() 12 | const instance = pino({ 13 | base: {}, 14 | mixin () { 15 | return { tag: 'k8s' } 16 | } 17 | }, stream) 18 | instance.level = name 19 | instance[name]({ 20 | tag: 'local' 21 | }, 'test') 22 | const result = await once(stream, 'data') 23 | ok(new Date(result.time) <= new Date(), 'time is greater than Date.now()') 24 | delete result.time 25 | same(result, { 26 | level, 27 | msg: 'test', 28 | tag: 'local' 29 | }) 30 | }) 31 | 32 | test('custom merge strategy with mixin priority', async ({ ok, same }) => { 33 | const stream = sink() 34 | const instance = pino({ 35 | base: {}, 36 | mixin () { 37 | return { tag: 'k8s' } 38 | }, 39 | mixinMergeStrategy (mergeObject, mixinObject) { 40 | return Object.assign(mergeObject, mixinObject) 41 | } 42 | }, stream) 43 | instance.level = name 44 | instance[name]({ 45 | tag: 'local' 46 | }, 'test') 47 | const result = await once(stream, 'data') 48 | ok(new Date(result.time) <= new Date(), 'time is greater than Date.now()') 49 | delete result.time 50 | same(result, { 51 | level, 52 | msg: 'test', 53 | tag: 'k8s' 54 | }) 55 | }) 56 | -------------------------------------------------------------------------------- /test/pkg/index.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const os = require('node:os') 4 | const { join } = require('node:path') 5 | const { readFile } = require('node:fs').promises 6 | const { watchFileCreated, file } = require('../helper') 7 | const { test } = require('tap') 8 | const pino = require('../../pino') 9 | 10 | const { pid } = process 11 | const hostname = os.hostname() 12 | 13 | /** 14 | * This file is packaged using pkg in order to test if transport-stream.js works in that context 15 | */ 16 | 17 | test('pino.transport with worker destination overridden by bundler and mjs transport', async ({ same, teardown }) => { 18 | globalThis.__bundlerPathsOverrides = { 19 | 'pino-worker': join(__dirname, '..', '..', 'lib/worker.js') 20 | } 21 | 22 | const destination = file() 23 | const transport = pino.transport({ 24 | targets: [ 25 | { 26 | target: join(__dirname, '..', 'fixtures', 'ts', 'to-file-transport.es2017.cjs'), 27 | options: { destination } 28 | } 29 | ] 30 | }) 31 | 32 | teardown(transport.end.bind(transport)) 33 | const instance = pino(transport) 34 | instance.info('hello') 35 | await watchFileCreated(destination) 36 | const result = JSON.parse(await readFile(destination)) 37 | delete result.time 38 | same(result, { 39 | pid, 40 | hostname, 41 | level: 30, 42 | msg: 'hello' 43 | }) 44 | 45 | globalThis.__bundlerPathsOverrides = undefined 46 | }) 47 | -------------------------------------------------------------------------------- /test/pkg/pkg.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "pkg": { 3 | "assets": [ 4 | "../../lib/worker.js", 5 | "../../lib/transport-stream.js", 6 | "../../test/fixtures/ts/to-file-transport.es2017.cjs", 7 | "../../node_modules/pino-abstract-transport/index.js" 8 | ], 9 | "targets": [ 10 | "node14", 11 | "node16", 12 | "node18", 13 | "node20" 14 | ], 15 | "outputPath": "test/pkg" 16 | } 17 | } -------------------------------------------------------------------------------- /test/pkg/pkg.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { test } = require('tap') 4 | const config = require('./pkg.config.json') 5 | const { promisify } = require('node:util') 6 | const { unlink } = require('node:fs/promises') 7 | const { join } = require('node:path') 8 | const { platform } = require('node:process') 9 | const execFile = promisify(require('node:child_process').execFile) 10 | 11 | const skip = process.env.PNPM_CI || process.env.CITGM || process.arch === 'ppc64' 12 | 13 | /** 14 | * The following regex is for tesintg the deprecation warning that is thrown by the `punycode` module. 15 | * Exact text that it's matching is: 16 | * (node:1234) [DEP0040] DeprecationWarning: The `punycode` module is deprecated. 17 | Please use a userland alternative instead. 18 | 19 | (Use `node --trace-deprecation ...` to show where the warning was created) 20 | */ 21 | const deprecationWarningRegex = /^\(\w+:\d+\)\s\[[\w|\d]+\]\sDeprecationWarning: The `punycode` module is deprecated\.\s+Please use a userland alternative instead\.\s+\(Use `node --trace-deprecation \.\.\.` to show where the warning was created\)\s+$/ 22 | 23 | test('worker test when packaged into executable using pkg', { skip }, async (t) => { 24 | const packageName = 'index' 25 | 26 | // package the app into several node versions, check config for more info 27 | const filePath = `${join(__dirname, packageName)}.js` 28 | const configPath = join(__dirname, 'pkg.config.json') 29 | const { stderr } = await execFile('npx', ['pkg', filePath, '--config', configPath], { shell: true }) 30 | 31 | // there should be no error when packaging 32 | const expectedvalue = stderr === '' || deprecationWarningRegex.test(stderr) 33 | t.ok(expectedvalue) 34 | 35 | // pkg outputs files in the following format by default: {filename}-{node version} 36 | for (const target of config.pkg.targets) { 37 | // execute the packaged test 38 | let executablePath = `${join(config.pkg.outputPath, packageName)}-${target}` 39 | 40 | // when on windows, we need the .exe extension 41 | if (platform === 'win32') { 42 | executablePath = `${executablePath}.exe` 43 | } else { 44 | executablePath = `./${executablePath}` 45 | } 46 | 47 | const { stderr } = await execFile(executablePath) 48 | 49 | // check if there were no errors 50 | const expectedvalue = stderr === '' || deprecationWarningRegex.test(stderr) 51 | t.ok(expectedvalue) 52 | 53 | // clean up afterwards 54 | await unlink(executablePath) 55 | } 56 | 57 | t.end() 58 | }) 59 | -------------------------------------------------------------------------------- /test/stdout-protection.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { test } = require('tap') 4 | const { join } = require('node:path') 5 | const { fork } = require('node:child_process') 6 | const { once } = require('./helper') 7 | const writer = require('flush-write-stream') 8 | const pino = require('..') 9 | 10 | test('do not use SonicBoom is someone tampered with process.stdout.write', async ({ not }) => { 11 | let actual = '' 12 | const child = fork(join(__dirname, 'fixtures', 'stdout-hack-protection.js'), { silent: true }) 13 | 14 | child.stdout.pipe(writer((s, enc, cb) => { 15 | actual += s 16 | cb() 17 | })) 18 | await once(child, 'close') 19 | not(actual.match(/^hack/), null) 20 | }) 21 | 22 | test('do not use SonicBoom is someone has passed process.stdout to pino', async ({ equal }) => { 23 | const logger = pino(process.stdout) 24 | equal(logger[pino.symbols.streamSym], process.stdout) 25 | }) 26 | 27 | test('do not crash if process.stdout has no fd', async ({ teardown }) => { 28 | const fd = process.stdout.fd 29 | delete process.stdout.fd 30 | teardown(function () { process.stdout.fd = fd }) 31 | pino() 32 | }) 33 | 34 | test('use fd=1 if process.stdout has no fd in pino.destination() (worker case)', async ({ teardown }) => { 35 | const fd = process.stdout.fd 36 | delete process.stdout.fd 37 | teardown(function () { process.stdout.fd = fd }) 38 | pino.destination() 39 | }) 40 | -------------------------------------------------------------------------------- /test/timestamp.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | /* eslint no-prototype-builtins: 0 */ 4 | 5 | const { test } = require('tap') 6 | const { sink, once } = require('./helper') 7 | const pino = require('../') 8 | 9 | test('pino exposes standard time functions', async ({ ok }) => { 10 | ok(pino.stdTimeFunctions) 11 | ok(pino.stdTimeFunctions.epochTime) 12 | ok(pino.stdTimeFunctions.unixTime) 13 | ok(pino.stdTimeFunctions.nullTime) 14 | ok(pino.stdTimeFunctions.isoTime) 15 | }) 16 | 17 | test('pino accepts external time functions', async ({ equal }) => { 18 | const opts = { 19 | timestamp: () => ',"time":"none"' 20 | } 21 | const stream = sink() 22 | const instance = pino(opts, stream) 23 | instance.info('foobar') 24 | const result = await once(stream, 'data') 25 | equal(result.hasOwnProperty('time'), true) 26 | equal(result.time, 'none') 27 | }) 28 | 29 | test('pino accepts external time functions with custom label', async ({ equal }) => { 30 | const opts = { 31 | timestamp: () => ',"custom-time-label":"none"' 32 | } 33 | const stream = sink() 34 | const instance = pino(opts, stream) 35 | instance.info('foobar') 36 | const result = await once(stream, 'data') 37 | equal(result.hasOwnProperty('custom-time-label'), true) 38 | equal(result['custom-time-label'], 'none') 39 | }) 40 | 41 | test('inserts timestamp by default', async ({ ok, equal }) => { 42 | const stream = sink() 43 | const instance = pino(stream) 44 | instance.info('foobar') 45 | const result = await once(stream, 'data') 46 | equal(result.hasOwnProperty('time'), true) 47 | ok(new Date(result.time) <= new Date(), 'time is greater than timestamp') 48 | equal(result.msg, 'foobar') 49 | }) 50 | 51 | test('omits timestamp when timestamp option is false', async ({ equal }) => { 52 | const stream = sink() 53 | const instance = pino({ timestamp: false }, stream) 54 | instance.info('foobar') 55 | const result = await once(stream, 'data') 56 | equal(result.hasOwnProperty('time'), false) 57 | equal(result.msg, 'foobar') 58 | }) 59 | 60 | test('inserts timestamp when timestamp option is true', async ({ ok, equal }) => { 61 | const stream = sink() 62 | const instance = pino({ timestamp: true }, stream) 63 | instance.info('foobar') 64 | const result = await once(stream, 'data') 65 | equal(result.hasOwnProperty('time'), true) 66 | ok(new Date(result.time) <= new Date(), 'time is greater than timestamp') 67 | equal(result.msg, 'foobar') 68 | }) 69 | 70 | test('child inserts timestamp by default', async ({ ok, equal }) => { 71 | const stream = sink() 72 | const logger = pino(stream) 73 | const instance = logger.child({ component: 'child' }) 74 | instance.info('foobar') 75 | const result = await once(stream, 'data') 76 | equal(result.hasOwnProperty('time'), true) 77 | ok(new Date(result.time) <= new Date(), 'time is greater than timestamp') 78 | equal(result.msg, 'foobar') 79 | }) 80 | 81 | test('child omits timestamp with option', async ({ equal }) => { 82 | const stream = sink() 83 | const logger = pino({ timestamp: false }, stream) 84 | const instance = logger.child({ component: 'child' }) 85 | instance.info('foobar') 86 | const result = await once(stream, 'data') 87 | equal(result.hasOwnProperty('time'), false) 88 | equal(result.msg, 'foobar') 89 | }) 90 | 91 | test('pino.stdTimeFunctions.unixTime returns seconds based timestamps', async ({ equal }) => { 92 | const opts = { 93 | timestamp: pino.stdTimeFunctions.unixTime 94 | } 95 | const stream = sink() 96 | const instance = pino(opts, stream) 97 | const now = Date.now 98 | Date.now = () => 1531069919686 99 | instance.info('foobar') 100 | const result = await once(stream, 'data') 101 | equal(result.hasOwnProperty('time'), true) 102 | equal(result.time, 1531069920) 103 | Date.now = now 104 | }) 105 | 106 | test('pino.stdTimeFunctions.isoTime returns ISO 8601 timestamps', async ({ equal }) => { 107 | const opts = { 108 | timestamp: pino.stdTimeFunctions.isoTime 109 | } 110 | const stream = sink() 111 | const instance = pino(opts, stream) 112 | const ms = 1531069919686 113 | const now = Date.now 114 | Date.now = () => ms 115 | const iso = new Date(ms).toISOString() 116 | instance.info('foobar') 117 | const result = await once(stream, 'data') 118 | equal(result.hasOwnProperty('time'), true) 119 | equal(result.time, iso) 120 | Date.now = now 121 | }) 122 | -------------------------------------------------------------------------------- /test/transport-stream.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { test } = require('tap') 4 | 5 | test('should import', async (t) => { 6 | t.plan(2) 7 | const mockRealRequire = (target) => { 8 | return { 9 | default: { 10 | default: () => { 11 | t.equal(target, 'pino-pretty') 12 | return Promise.resolve() 13 | } 14 | } 15 | } 16 | } 17 | const mockRealImport = async () => { await Promise.resolve(); throw Object.assign(new Error(), { code: 'ERR_MODULE_NOT_FOUND' }) } 18 | 19 | /** @type {typeof import('../lib/transport-stream.js')} */ 20 | const loadTransportStreamBuilder = t.mock('../lib/transport-stream.js', { 'real-require': { realRequire: mockRealRequire, realImport: mockRealImport } }) 21 | 22 | const fn = await loadTransportStreamBuilder('pino-pretty') 23 | 24 | t.resolves(fn()) 25 | t.end() 26 | }) 27 | -------------------------------------------------------------------------------- /test/transport/big.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { test } = require('tap') 4 | const { join } = require('node:path') 5 | const { createReadStream } = require('node:fs') 6 | const { promisify } = require('node:util') 7 | const execa = require('execa') 8 | const split = require('split2') 9 | const stream = require('node:stream') 10 | const { file } = require('../helper') 11 | 12 | const pipeline = promisify(stream.pipeline) 13 | const { Writable } = stream 14 | const sleep = promisify(setTimeout) 15 | 16 | const skip = process.env.CI || process.env.CITGM 17 | 18 | test('eight million lines', { skip }, async ({ equal, comment }) => { 19 | const destination = file() 20 | await execa(process.argv[0], [join(__dirname, '..', 'fixtures', 'transport-many-lines.js'), destination]) 21 | 22 | if (process.platform !== 'win32') { 23 | try { 24 | await execa('sync') // Wait for the file to be written to disk 25 | } catch { 26 | // Just a fallback, this should be unreachable 27 | } 28 | } 29 | await sleep(1000) // It seems that sync is not enough (even in POSIX systems) 30 | 31 | const toWrite = 8 * 1000000 32 | let count = 0 33 | await pipeline(createReadStream(destination), split(), new Writable({ 34 | write (chunk, enc, cb) { 35 | if (count % (toWrite / 10) === 0) { 36 | comment(`read ${count}`) 37 | } 38 | count++ 39 | cb() 40 | } 41 | })) 42 | equal(count, toWrite) 43 | }) 44 | -------------------------------------------------------------------------------- /test/transport/bundlers-support.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const os = require('node:os') 4 | const { join } = require('node:path') 5 | const { readFile } = require('node:fs').promises 6 | const { watchFileCreated, file } = require('../helper') 7 | const { test } = require('tap') 8 | const pino = require('../../pino') 9 | 10 | const { pid } = process 11 | const hostname = os.hostname() 12 | 13 | test('pino.transport with destination overridden by bundler', async ({ same, teardown }) => { 14 | globalThis.__bundlerPathsOverrides = { 15 | foobar: join(__dirname, '..', 'fixtures', 'to-file-transport.js') 16 | } 17 | 18 | const destination = file() 19 | const transport = pino.transport({ 20 | target: 'foobar', 21 | options: { destination } 22 | }) 23 | teardown(transport.end.bind(transport)) 24 | const instance = pino(transport) 25 | instance.info('hello') 26 | await watchFileCreated(destination) 27 | const result = JSON.parse(await readFile(destination)) 28 | delete result.time 29 | same(result, { 30 | pid, 31 | hostname, 32 | level: 30, 33 | msg: 'hello' 34 | }) 35 | 36 | globalThis.__bundlerPathsOverrides = undefined 37 | }) 38 | 39 | test('pino.transport with worker destination overridden by bundler', async ({ same, teardown }) => { 40 | globalThis.__bundlerPathsOverrides = { 41 | 'pino-worker': join(__dirname, '..', '..', 'lib/worker.js') 42 | } 43 | 44 | const destination = file() 45 | const transport = pino.transport({ 46 | targets: [ 47 | { 48 | target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), 49 | options: { destination } 50 | } 51 | ] 52 | }) 53 | teardown(transport.end.bind(transport)) 54 | const instance = pino(transport) 55 | instance.info('hello') 56 | await watchFileCreated(destination) 57 | const result = JSON.parse(await readFile(destination)) 58 | delete result.time 59 | same(result, { 60 | pid, 61 | hostname, 62 | level: 30, 63 | msg: 'hello' 64 | }) 65 | 66 | globalThis.__bundlerPathsOverrides = undefined 67 | }) 68 | 69 | test('pino.transport with worker destination overridden by bundler and mjs transport', async ({ same, teardown }) => { 70 | globalThis.__bundlerPathsOverrides = { 71 | 'pino-worker': join(__dirname, '..', '..', 'lib/worker.js') 72 | } 73 | 74 | const destination = file() 75 | const transport = pino.transport({ 76 | targets: [ 77 | { 78 | target: join(__dirname, '..', 'fixtures', 'ts', 'to-file-transport.es2017.cjs'), 79 | options: { destination } 80 | } 81 | ] 82 | }) 83 | teardown(transport.end.bind(transport)) 84 | const instance = pino(transport) 85 | instance.info('hello') 86 | await watchFileCreated(destination) 87 | const result = JSON.parse(await readFile(destination)) 88 | delete result.time 89 | same(result, { 90 | pid, 91 | hostname, 92 | level: 30, 93 | msg: 'hello' 94 | }) 95 | 96 | globalThis.__bundlerPathsOverrides = undefined 97 | }) 98 | -------------------------------------------------------------------------------- /test/transport/caller.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { join } = require('node:path') 4 | const { test } = require('tap') 5 | const execa = require('execa') 6 | 7 | test('when using a custom transport outside node_modules, the first file outside node_modules should be used', async function (t) { 8 | const evalApp = join(__dirname, '../', '/fixtures/eval/index.js') 9 | const { stdout } = await execa(process.argv[0], [evalApp]) 10 | t.match(stdout, /done!/) 11 | }) 12 | 13 | test('when using a custom transport where some files in stacktrace are in the node_modules, the first file outside node_modules should be used', async function (t) { 14 | const evalApp = join(__dirname, '../', '/fixtures/eval/node_modules/2-files.js') 15 | const { stdout } = await execa(process.argv[0], [evalApp]) 16 | t.match(stdout, /done!/) 17 | }) 18 | 19 | test('when using a custom transport where all files in stacktrace are in the node_modules, the first file inside node_modules should be used', async function (t) { 20 | const evalApp = join(__dirname, '../', '/fixtures/eval/node_modules/14-files.js') 21 | const { stdout } = await execa(process.argv[0], [evalApp]) 22 | t.match(stdout, /done!/) 23 | }) 24 | -------------------------------------------------------------------------------- /test/transport/core.transpiled.test.ts: -------------------------------------------------------------------------------- 1 | import * as os from 'node:os' 2 | import { join } from 'node:path' 3 | import fs from 'node:fs' 4 | import { watchFileCreated } from '../helper' 5 | import { test } from 'tap' 6 | import pino from '../../' 7 | import * as url from 'node:url' 8 | 9 | const readFile = fs.promises.readFile 10 | 11 | const { pid } = process 12 | const hostname = os.hostname() 13 | 14 | // A subset of the test from core.test.js, we don't need all of them to check for compatibility 15 | function runTests(esVersion: string): void { 16 | test(`(ts -> ${esVersion}) pino.transport with file`, async ({ same, teardown }) => { 17 | const destination = join( 18 | os.tmpdir(), 19 | '_' + Math.random().toString(36).substr(2, 9) 20 | ) 21 | const transport = pino.transport({ 22 | target: join(__dirname, '..', 'fixtures', 'ts', `to-file-transport.${esVersion}.cjs`), 23 | options: { destination } 24 | }) 25 | teardown(transport.end.bind(transport)) 26 | const instance = pino(transport) 27 | instance.info('hello') 28 | await watchFileCreated(destination) 29 | const result = JSON.parse(await readFile(destination, { encoding: 'utf8' })) 30 | delete result.time 31 | same(result, { 32 | pid, 33 | hostname, 34 | level: 30, 35 | msg: 'hello' 36 | }) 37 | }) 38 | 39 | test(`(ts -> ${esVersion}) pino.transport with file URL`, async ({ same, teardown }) => { 40 | const destination = join( 41 | os.tmpdir(), 42 | '_' + Math.random().toString(36).substr(2, 9) 43 | ) 44 | const transport = pino.transport({ 45 | target: url.pathToFileURL(join(__dirname, '..', 'fixtures', 'ts', `to-file-transport.${esVersion}.cjs`)).href, 46 | options: { destination } 47 | }) 48 | teardown(transport.end.bind(transport)) 49 | const instance = pino(transport) 50 | instance.info('hello') 51 | await watchFileCreated(destination) 52 | const result = JSON.parse(await readFile(destination, { encoding: 'utf8' })) 53 | delete result.time 54 | same(result, { 55 | pid, 56 | hostname, 57 | level: 30, 58 | msg: 'hello' 59 | }) 60 | }) 61 | 62 | test(`(ts -> ${esVersion}) pino.transport with two files`, async ({ same, teardown }) => { 63 | const dest1 = join( 64 | os.tmpdir(), 65 | '_' + Math.random().toString(36).substr(2, 9) 66 | ) 67 | const dest2 = join( 68 | os.tmpdir(), 69 | '_' + Math.random().toString(36).substr(2, 9) 70 | ) 71 | const transport = pino.transport({ 72 | targets: [{ 73 | level: 'info', 74 | target: join(__dirname, '..', 'fixtures', 'ts', `to-file-transport.${esVersion}.cjs`), 75 | options: { destination: dest1 } 76 | }, { 77 | level: 'info', 78 | target: join(__dirname, '..', 'fixtures', 'ts', `to-file-transport.${esVersion}.cjs`), 79 | options: { destination: dest2 } 80 | }] 81 | }) 82 | 83 | teardown(transport.end.bind(transport)) 84 | 85 | const instance = pino(transport) 86 | instance.info('hello') 87 | 88 | await Promise.all([watchFileCreated(dest1), watchFileCreated(dest2)]) 89 | 90 | const result1 = JSON.parse(await readFile(dest1, { encoding: 'utf8' })) 91 | delete result1.time 92 | same(result1, { 93 | pid, 94 | hostname, 95 | level: 30, 96 | msg: 'hello' 97 | }) 98 | const result2 = JSON.parse(await readFile(dest2, { encoding: 'utf8' })) 99 | delete result2.time 100 | same(result2, { 101 | pid, 102 | hostname, 103 | level: 30, 104 | msg: 'hello' 105 | }) 106 | }) 107 | } 108 | 109 | runTests('es5') 110 | runTests('es6') 111 | runTests('es2017') 112 | runTests('esnext') 113 | -------------------------------------------------------------------------------- /test/transport/crash.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { join } = require('node:path') 4 | const { once } = require('node:events') 5 | const { setImmediate: immediate } = require('node:timers/promises') 6 | const { test } = require('tap') 7 | const pino = require('../../') 8 | 9 | test('pino.transport emits error if the worker exits with 0 unexpectably', async ({ same, teardown, equal }) => { 10 | // This test will take 10s, because flushSync waits for 10s 11 | const transport = pino.transport({ 12 | target: join(__dirname, '..', 'fixtures', 'crashing-transport.js'), 13 | sync: true 14 | }) 15 | teardown(transport.end.bind(transport)) 16 | 17 | await once(transport, 'ready') 18 | 19 | let maybeError 20 | transport.on('error', (err) => { 21 | maybeError = err 22 | }) 23 | 24 | const logger = pino(transport) 25 | for (let i = 0; i < 100000; i++) { 26 | logger.info('hello') 27 | } 28 | 29 | await once(transport.worker, 'exit') 30 | 31 | await immediate() 32 | 33 | same(maybeError.message, 'the worker has exited') 34 | }) 35 | -------------------------------------------------------------------------------- /test/transport/pipeline.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const os = require('node:os') 4 | const { join } = require('node:path') 5 | const { readFile } = require('node:fs').promises 6 | const { watchFileCreated, file } = require('../helper') 7 | const { test } = require('tap') 8 | const pino = require('../../') 9 | const { DEFAULT_LEVELS } = require('../../lib/constants') 10 | 11 | const { pid } = process 12 | const hostname = os.hostname() 13 | 14 | test('pino.transport with a pipeline', async ({ same, teardown }) => { 15 | const destination = file() 16 | const transport = pino.transport({ 17 | pipeline: [{ 18 | target: join(__dirname, '..', 'fixtures', 'transport-transform.js') 19 | }, { 20 | target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), 21 | options: { destination } 22 | }] 23 | }) 24 | teardown(transport.end.bind(transport)) 25 | const instance = pino(transport) 26 | instance.info('hello') 27 | await watchFileCreated(destination) 28 | const result = JSON.parse(await readFile(destination)) 29 | delete result.time 30 | same(result, { 31 | pid, 32 | hostname, 33 | level: DEFAULT_LEVELS.info, 34 | msg: 'hello', 35 | service: 'pino' // this property was added by the transform 36 | }) 37 | }) 38 | 39 | test('pino.transport with targets containing pipelines', async ({ same, teardown }) => { 40 | const destinationA = file() 41 | const destinationB = file() 42 | const transport = pino.transport({ 43 | targets: [ 44 | { 45 | target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), 46 | options: { destination: destinationA } 47 | }, 48 | { 49 | pipeline: [ 50 | { 51 | target: join(__dirname, '..', 'fixtures', 'transport-transform.js') 52 | }, 53 | { 54 | target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), 55 | options: { destination: destinationB } 56 | } 57 | ] 58 | } 59 | ] 60 | }) 61 | 62 | teardown(transport.end.bind(transport)) 63 | const instance = pino(transport) 64 | instance.info('hello') 65 | await watchFileCreated(destinationA) 66 | await watchFileCreated(destinationB) 67 | const resultA = JSON.parse(await readFile(destinationA)) 68 | const resultB = JSON.parse(await readFile(destinationB)) 69 | delete resultA.time 70 | delete resultB.time 71 | same(resultA, { 72 | pid, 73 | hostname, 74 | level: DEFAULT_LEVELS.info, 75 | msg: 'hello' 76 | }) 77 | same(resultB, { 78 | pid, 79 | hostname, 80 | level: DEFAULT_LEVELS.info, 81 | msg: 'hello', 82 | service: 'pino' // this property was added by the transform 83 | }) 84 | }) 85 | 86 | test('pino.transport with targets containing pipelines with levels defined and dedupe', async ({ same, teardown }) => { 87 | const destinationA = file() 88 | const destinationB = file() 89 | const transport = pino.transport({ 90 | targets: [ 91 | { 92 | target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), 93 | options: { destination: destinationA }, 94 | level: DEFAULT_LEVELS.info 95 | }, 96 | { 97 | pipeline: [ 98 | { 99 | target: join(__dirname, '..', 'fixtures', 'transport-transform.js') 100 | }, 101 | { 102 | target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), 103 | options: { destination: destinationB } 104 | } 105 | ], 106 | level: DEFAULT_LEVELS.error 107 | } 108 | ], 109 | dedupe: true 110 | }) 111 | 112 | teardown(transport.end.bind(transport)) 113 | const instance = pino(transport) 114 | instance.info('hello info') 115 | instance.error('hello error') 116 | await watchFileCreated(destinationA) 117 | await watchFileCreated(destinationB) 118 | const resultA = JSON.parse(await readFile(destinationA)) 119 | const resultB = JSON.parse(await readFile(destinationB)) 120 | delete resultA.time 121 | delete resultB.time 122 | same(resultA, { 123 | pid, 124 | hostname, 125 | level: DEFAULT_LEVELS.info, 126 | msg: 'hello info' 127 | }) 128 | same(resultB, { 129 | pid, 130 | hostname, 131 | level: DEFAULT_LEVELS.error, 132 | msg: 'hello error', 133 | service: 'pino' // this property was added by the transform 134 | }) 135 | }) 136 | -------------------------------------------------------------------------------- /test/transport/repl.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { doesNotThrow, test } = require('tap') 4 | const proxyquire = require('proxyquire') 5 | 6 | test('pino.transport resolves targets in REPL', async ({ same }) => { 7 | // Arrange 8 | const transport = proxyquire('../../lib/transport', { 9 | './caller': () => ['node:repl'] 10 | }) 11 | 12 | // Act / Assert 13 | doesNotThrow(() => transport({ target: 'pino-pretty' })) 14 | }) 15 | -------------------------------------------------------------------------------- /test/transport/syncTrue.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const pino = require('../..') 4 | const { join } = require('node:path') 5 | const { readFileSync } = require('node:fs') 6 | const { test } = require('tap') 7 | const { file } = require('../helper') 8 | 9 | test('thread-stream sync true should log synchronously', async (t) => { 10 | const outputPath = file() 11 | 12 | function getOutputLogLines () { 13 | return (readFileSync(outputPath)).toString().trim().split('\n').map(JSON.parse) 14 | } 15 | 16 | const transport = pino.transport({ 17 | target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), 18 | options: { destination: outputPath, flush: true }, 19 | sync: true 20 | }) 21 | const instance = pino(transport) 22 | 23 | var value = { message: 'sync' } 24 | instance.info(value) 25 | instance.info(value) 26 | instance.info(value) 27 | instance.info(value) 28 | instance.info(value) 29 | instance.info(value) 30 | let interrupt = false 31 | let flushData 32 | let loopCounter = 0 33 | 34 | // Start a synchronous loop 35 | while (!interrupt && loopCounter < (process.env.MAX_TEST_LOOP_ITERATION || 20000)) { 36 | try { 37 | loopCounter++ 38 | const data = getOutputLogLines() 39 | flushData = data 40 | if (data) { 41 | interrupt = true 42 | break 43 | } 44 | } catch (error) { 45 | // File may not exist yet 46 | // Wait till MAX_TEST_LOOP_ITERATION iterations 47 | } 48 | } 49 | 50 | if (!interrupt) { 51 | throw new Error('Sync loop did not get interrupt') 52 | } 53 | 54 | t.equal(flushData.length, 6) 55 | }) 56 | -------------------------------------------------------------------------------- /test/transport/syncfalse.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const os = require('node:os') 4 | const pino = require('../..') 5 | const { join } = require('node:path') 6 | const { test } = require('tap') 7 | const { readFile } = require('node:fs').promises 8 | const { watchFileCreated, file } = require('../helper') 9 | const { promisify } = require('node:util') 10 | 11 | const { pid } = process 12 | const hostname = os.hostname() 13 | 14 | test('thread-stream async flush', async ({ equal, same }) => { 15 | const destination = file() 16 | const transport = pino.transport({ 17 | target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), 18 | options: { destination } 19 | }) 20 | const instance = pino(transport) 21 | instance.info('hello') 22 | 23 | equal(instance.flush(), undefined) 24 | 25 | await watchFileCreated(destination) 26 | const result = JSON.parse(await readFile(destination)) 27 | delete result.time 28 | same(result, { 29 | pid, 30 | hostname, 31 | level: 30, 32 | msg: 'hello' 33 | }) 34 | }) 35 | 36 | test('thread-stream async flush should call the passed callback', async (t) => { 37 | const outputPath = file() 38 | async function getOutputLogLines () { 39 | return (await readFile(outputPath)).toString().trim().split('\n').map(JSON.parse) 40 | } 41 | const transport = pino.transport({ 42 | target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), 43 | options: { destination: outputPath } 44 | }) 45 | const instance = pino(transport) 46 | const flushPromise = promisify(instance.flush).bind(instance) 47 | 48 | instance.info('hello') 49 | await flushPromise() 50 | await watchFileCreated(outputPath) 51 | 52 | const [firstFlushData] = await getOutputLogLines() 53 | 54 | t.equal(firstFlushData.msg, 'hello') 55 | 56 | // should not flush this as no data accumulated that's bigger than min length 57 | instance.info('world') 58 | 59 | // Making sure data is not flushed yet 60 | const afterLogData = await getOutputLogLines() 61 | t.equal(afterLogData.length, 1) 62 | 63 | await flushPromise() 64 | 65 | // Making sure data is not flushed yet 66 | const afterSecondFlush = (await getOutputLogLines())[1] 67 | t.equal(afterSecondFlush.msg, 'world') 68 | }) 69 | -------------------------------------------------------------------------------- /test/transport/targets.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { test } = require('tap') 4 | const { join } = require('node:path') 5 | const proxyquire = require('proxyquire') 6 | const Writable = require('node:stream').Writable 7 | const pino = require('../../pino') 8 | 9 | test('file-target mocked', async function ({ equal, same, plan, pass }) { 10 | plan(1) 11 | let ret 12 | const fileTarget = proxyquire('../../file', { 13 | './pino': { 14 | destination (opts) { 15 | same(opts, { dest: 1, sync: false }) 16 | 17 | ret = new Writable() 18 | ret.fd = opts.dest 19 | 20 | process.nextTick(() => { 21 | ret.emit('ready') 22 | }) 23 | 24 | return ret 25 | } 26 | } 27 | }) 28 | 29 | await fileTarget() 30 | }) 31 | 32 | test('pino.transport with syntax error', ({ same, teardown, plan }) => { 33 | plan(1) 34 | const transport = pino.transport({ 35 | targets: [{ 36 | target: join(__dirname, '..', 'fixtures', 'syntax-error-esm.mjs') 37 | }] 38 | }) 39 | teardown(transport.end.bind(transport)) 40 | 41 | transport.on('error', (err) => { 42 | same(err, new SyntaxError('Unexpected end of input')) 43 | }) 44 | }) 45 | -------------------------------------------------------------------------------- /test/transport/uses-pino-config.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const os = require('node:os') 4 | const { join } = require('node:path') 5 | const { readFile } = require('node:fs').promises 6 | const writeStream = require('flush-write-stream') 7 | const { watchFileCreated, file } = require('../helper') 8 | const { test } = require('tap') 9 | const pino = require('../../') 10 | 11 | const { pid } = process 12 | const hostname = os.hostname() 13 | 14 | function serializeError (error) { 15 | return { 16 | type: error.name, 17 | message: error.message, 18 | stack: error.stack 19 | } 20 | } 21 | 22 | function parseLogs (buffer) { 23 | return JSON.parse(`[${buffer.toString().replace(/}{/g, '},{')}]`) 24 | } 25 | 26 | test('transport uses pino config', async ({ same, teardown, plan }) => { 27 | plan(1) 28 | const destination = file() 29 | const transport = pino.transport({ 30 | pipeline: [{ 31 | target: join(__dirname, '..', 'fixtures', 'transport-uses-pino-config.js') 32 | }, { 33 | target: 'pino/file', 34 | options: { destination } 35 | }] 36 | }) 37 | teardown(transport.end.bind(transport)) 38 | const instance = pino({ 39 | messageKey: 'customMessageKey', 40 | errorKey: 'customErrorKey', 41 | customLevels: { custom: 35 } 42 | }, transport) 43 | 44 | const error = new Error('bar') 45 | instance.custom('foo') 46 | instance.error(error) 47 | await watchFileCreated(destination) 48 | const result = parseLogs(await readFile(destination)) 49 | 50 | same(result, [{ 51 | severityText: 'custom', 52 | body: 'foo', 53 | attributes: { 54 | pid, 55 | hostname 56 | } 57 | }, { 58 | severityText: 'error', 59 | body: 'bar', 60 | attributes: { 61 | pid, 62 | hostname 63 | }, 64 | error: serializeError(error) 65 | }]) 66 | }) 67 | 68 | test('transport uses pino config without customizations', async ({ same, teardown, plan }) => { 69 | plan(1) 70 | const destination = file() 71 | const transport = pino.transport({ 72 | pipeline: [{ 73 | target: join(__dirname, '..', 'fixtures', 'transport-uses-pino-config.js') 74 | }, { 75 | target: 'pino/file', 76 | options: { destination } 77 | }] 78 | }) 79 | teardown(transport.end.bind(transport)) 80 | const instance = pino(transport) 81 | 82 | const error = new Error('qux') 83 | instance.info('baz') 84 | instance.error(error) 85 | await watchFileCreated(destination) 86 | const result = parseLogs(await readFile(destination)) 87 | 88 | same(result, [{ 89 | severityText: 'info', 90 | body: 'baz', 91 | attributes: { 92 | pid, 93 | hostname 94 | } 95 | }, { 96 | severityText: 'error', 97 | body: 'qux', 98 | attributes: { 99 | pid, 100 | hostname 101 | }, 102 | error: serializeError(error) 103 | }]) 104 | }) 105 | 106 | test('transport uses pino config with multistream', async ({ same, teardown, plan }) => { 107 | plan(2) 108 | const destination = file() 109 | const messages = [] 110 | const stream = writeStream(function (data, enc, cb) { 111 | const message = JSON.parse(data) 112 | delete message.time 113 | messages.push(message) 114 | cb() 115 | }) 116 | const transport = pino.transport({ 117 | pipeline: [{ 118 | target: join(__dirname, '..', 'fixtures', 'transport-uses-pino-config.js') 119 | }, { 120 | target: 'pino/file', 121 | options: { destination } 122 | }] 123 | }) 124 | teardown(transport.end.bind(transport)) 125 | const instance = pino({ 126 | messageKey: 'customMessageKey', 127 | errorKey: 'customErrorKey', 128 | customLevels: { custom: 35 } 129 | }, pino.multistream([transport, { stream }])) 130 | 131 | const error = new Error('buzz') 132 | const serializedError = serializeError(error) 133 | instance.custom('fizz') 134 | instance.error(error) 135 | await watchFileCreated(destination) 136 | const result = parseLogs(await readFile(destination)) 137 | 138 | same(result, [{ 139 | severityText: 'custom', 140 | body: 'fizz', 141 | attributes: { 142 | pid, 143 | hostname 144 | } 145 | }, { 146 | severityText: 'error', 147 | body: 'buzz', 148 | attributes: { 149 | pid, 150 | hostname 151 | }, 152 | error: serializedError 153 | }]) 154 | 155 | same(messages, [{ 156 | level: 35, 157 | pid, 158 | hostname, 159 | customMessageKey: 'fizz' 160 | }, { 161 | level: 50, 162 | pid, 163 | hostname, 164 | customErrorKey: serializedError, 165 | customMessageKey: 'buzz' 166 | }]) 167 | }) 168 | -------------------------------------------------------------------------------- /test/types/pino-import.test-d.ts: -------------------------------------------------------------------------------- 1 | import { expectType } from "tsd"; 2 | 3 | import pino from '../../pino'; 4 | import { pino as pinoNamed, P } from "../../pino"; 5 | import * as pinoStar from "../../pino"; 6 | import pinoCjsImport = require ("../../pino"); 7 | const pinoCjs = require("../../pino"); 8 | const { P: pinoCjsNamed } = require('pino') 9 | 10 | const log = pino(); 11 | expectType(log.info); 12 | expectType(log.error); 13 | 14 | expectType(pinoNamed()); 15 | expectType(pinoNamed()); 16 | expectType(pinoStar.default()); 17 | expectType(pinoStar.pino()); 18 | expectType(pinoCjsImport.default()); 19 | expectType(pinoCjsImport.pino()); 20 | expectType(pinoCjsNamed()); 21 | expectType(pinoCjs()); 22 | 23 | const levelChangeEventListener: P.LevelChangeEventListener = ( 24 | lvl: P.LevelWithSilent | string, 25 | val: number, 26 | prevLvl: P.LevelWithSilent | string, 27 | prevVal: number, 28 | ) => {} 29 | expectType(levelChangeEventListener) 30 | -------------------------------------------------------------------------------- /test/types/pino-multistream.test-d.ts: -------------------------------------------------------------------------------- 1 | import { expectType } from 'tsd' 2 | 3 | import { createWriteStream } from 'node:fs' 4 | 5 | import pino, { multistream } from '../../pino' 6 | 7 | const streams = [ 8 | { stream: process.stdout }, 9 | { stream: createWriteStream('') }, 10 | { level: 'error' as const, stream: process.stderr }, 11 | { level: 'fatal' as const, stream: process.stderr }, 12 | ] 13 | 14 | expectType(pino.multistream(process.stdout)) 15 | expectType(pino.multistream([createWriteStream('')])) 16 | expectType>(pino.multistream({ level: 'error' as const, stream: process.stderr })) 17 | expectType>(pino.multistream([{ level: 'fatal' as const, stream: createWriteStream('') }])) 18 | 19 | expectType>(pino.multistream(streams)) 20 | expectType>(pino.multistream(streams, {})) 21 | expectType>(pino.multistream(streams, { levels: { 'info': 30 } })) 22 | expectType>(pino.multistream(streams, { dedupe: true })) 23 | expectType>(pino.multistream(streams[0]).add(streams[1])) 24 | expectType>(multistream(streams)) 25 | expectType>(multistream(streams).clone('error')) 26 | 27 | 28 | expectType(multistream(process.stdout)); 29 | -------------------------------------------------------------------------------- /test/types/pino-top-export.test-d.ts: -------------------------------------------------------------------------------- 1 | import { expectType, expectAssignable } from 'tsd' 2 | import type { SonicBoom } from "sonic-boom"; 3 | 4 | import { 5 | destination, 6 | LevelMapping, 7 | levels, 8 | Logger, 9 | multistream, 10 | MultiStreamRes, 11 | SerializedError, 12 | stdSerializers, 13 | stdTimeFunctions, 14 | symbols, 15 | transport, 16 | version, 17 | } from "../../pino"; 18 | import pino from "../../pino"; 19 | 20 | expectType(destination("")); 21 | expectType(levels); 22 | expectType(multistream(process.stdout)); 23 | expectType(stdSerializers.err({} as any)); 24 | expectType(stdTimeFunctions.isoTime()); 25 | expectType(version); 26 | 27 | // Can't test against `unique symbol`, see https://github.com/SamVerschueren/tsd/issues/49 28 | expectAssignable(symbols.endSym); 29 | 30 | // TODO: currently returns (aliased) `any`, waiting for strong typed `thread-stream` 31 | transport({ 32 | target: '#pino/pretty', 33 | options: { some: 'options for', the: 'transport' } 34 | }); 35 | 36 | -------------------------------------------------------------------------------- /test/types/pino-transport.test-d.ts: -------------------------------------------------------------------------------- 1 | import { pino } from '../../pino' 2 | import { expectType } from "tsd"; 3 | 4 | // Single 5 | const transport = pino.transport({ 6 | target: '#pino/pretty', 7 | options: { some: 'options for', the: 'transport' } 8 | }) 9 | pino(transport) 10 | 11 | expectType(pino({ 12 | transport: { 13 | target: 'pino-pretty' 14 | }, 15 | })) 16 | 17 | // Multiple 18 | const transports = pino.transport({targets: [ 19 | { 20 | level: 'info', 21 | target: '#pino/pretty', 22 | options: { some: 'options for', the: 'transport' } 23 | }, 24 | { 25 | level: 'trace', 26 | target: '#pino/file', 27 | options: { destination: './test.log' } 28 | } 29 | ]}) 30 | pino(transports) 31 | 32 | expectType(pino({ 33 | transport: {targets: [ 34 | { 35 | level: 'info', 36 | target: '#pino/pretty', 37 | options: { some: 'options for', the: 'transport' } 38 | }, 39 | { 40 | level: 'trace', 41 | target: '#pino/file', 42 | options: { destination: './test.log' } 43 | } 44 | ]}, 45 | })) 46 | 47 | const transportsWithCustomLevels = pino.transport({targets: [ 48 | { 49 | level: 'info', 50 | target: '#pino/pretty', 51 | options: { some: 'options for', the: 'transport' } 52 | }, 53 | { 54 | level: 'foo', 55 | target: '#pino/file', 56 | options: { destination: './test.log' } 57 | } 58 | ], levels: { foo: 35 }}) 59 | pino(transports) 60 | 61 | expectType(pino({ 62 | transport: {targets: [ 63 | { 64 | level: 'info', 65 | target: '#pino/pretty', 66 | options: { some: 'options for', the: 'transport' } 67 | }, 68 | { 69 | level: 'trace', 70 | target: '#pino/file', 71 | options: { destination: './test.log' } 72 | } 73 | ], levels: { foo: 35 } 74 | }, 75 | })) 76 | 77 | const transportsWithoutOptions = pino.transport({ 78 | targets: [ 79 | { target: '#pino/pretty' }, 80 | { target: '#pino/file' } 81 | ], levels: { foo: 35 } 82 | }) 83 | pino(transports) 84 | 85 | expectType(pino({ 86 | transport: { 87 | targets: [ 88 | { target: '#pino/pretty' }, 89 | { target: '#pino/file' } 90 | ], levels: { foo: 35 } 91 | }, 92 | })) 93 | 94 | const pipelineTransport = pino.transport({ 95 | pipeline: [{ 96 | target: './my-transform.js' 97 | }, { 98 | // Use target: 'pino/file' to write to stdout 99 | // without any change. 100 | target: 'pino-pretty' 101 | }] 102 | }) 103 | pino(pipelineTransport) 104 | 105 | expectType(pino({ 106 | transport: { 107 | pipeline: [{ 108 | target: './my-transform.js' 109 | }, { 110 | // Use target: 'pino/file' to write to stdout 111 | // without any change. 112 | target: 'pino-pretty' 113 | }] 114 | } 115 | })) 116 | 117 | type TransportConfig = { 118 | id: string 119 | } 120 | 121 | // Custom transport params 122 | const customTransport = pino.transport({ 123 | target: 'custom', 124 | options: { id: 'abc' } 125 | }) 126 | pino(customTransport) 127 | 128 | // Worker 129 | pino.transport({ 130 | target: 'custom', 131 | worker: { 132 | argv: ['a', 'b'], 133 | stdin: false, 134 | stderr: true, 135 | stdout: false, 136 | autoEnd: true, 137 | }, 138 | options: { id: 'abc' } 139 | }) 140 | 141 | // Dedupe 142 | pino.transport({ 143 | targets: [], 144 | dedupe: true, 145 | }) 146 | -------------------------------------------------------------------------------- /test/types/pino-type-only.test-d.ts: -------------------------------------------------------------------------------- 1 | import { expectAssignable, expectType, expectNotAssignable } from "tsd"; 2 | 3 | import pino from "../../"; 4 | import type {LevelWithSilent, Logger, LogFn, P, DestinationStreamWithMetadata, Level, LevelOrString, LevelWithSilentOrString, LoggerExtras, LoggerOptions } from "../../pino"; 5 | 6 | // NB: can also use `import * as pino`, but that form is callable as `pino()` 7 | // under `esModuleInterop: false` or `pino.default()` under `esModuleInterop: true`. 8 | const log = pino(); 9 | expectAssignable(log); 10 | expectType(log); 11 | expectType(log.info); 12 | 13 | expectType(log); 14 | expectType(log.info); 15 | 16 | expectType>([log.level]); 17 | 18 | const level: Level = 'debug'; 19 | expectAssignable(level); 20 | expectAssignable(level); 21 | 22 | const levelWithSilent: LevelWithSilent = 'silent'; 23 | expectAssignable(levelWithSilent); 24 | expectAssignable(levelWithSilent); 25 | 26 | const levelOrString: LevelOrString = "myCustomLevel"; 27 | expectAssignable(levelOrString); 28 | expectNotAssignable(levelOrString); 29 | expectNotAssignable(levelOrString); 30 | expectAssignable(levelOrString); 31 | 32 | const levelWithSilentOrString: LevelWithSilentOrString = "myCustomLevel"; 33 | expectAssignable(levelWithSilentOrString); 34 | expectNotAssignable(levelWithSilentOrString); 35 | expectNotAssignable(levelWithSilentOrString); 36 | expectAssignable(levelWithSilentOrString); 37 | 38 | function createStream(): DestinationStreamWithMetadata { 39 | return { write() {} }; 40 | } 41 | 42 | const stream = createStream(); 43 | // Argh. TypeScript doesn't seem to narrow unless we assign the symbol like so, and tsd seems to 44 | // break without annotating the type explicitly 45 | const needsMetadata: typeof pino.symbols.needsMetadataGsym = pino.symbols.needsMetadataGsym; 46 | if (stream[needsMetadata]) { 47 | expectType(stream.lastLevel); 48 | } 49 | 50 | const loggerOptions:LoggerOptions = { 51 | browser: { 52 | formatters: { 53 | log(obj) { 54 | return obj 55 | }, 56 | level(label, number) { 57 | return { label, number} 58 | } 59 | 60 | } 61 | } 62 | } 63 | 64 | expectType(loggerOptions) 65 | -------------------------------------------------------------------------------- /test/types/pino.ts: -------------------------------------------------------------------------------- 1 | import { join } from 'node:path' 2 | import { tmpdir } from 'node:os' 3 | import pinoPretty from 'pino-pretty' 4 | import { LoggerOptions, StreamEntry, pino } from '../../pino' 5 | 6 | const destination = join( 7 | tmpdir(), 8 | '_' + Math.random().toString(36).substr(2, 9) 9 | ) 10 | 11 | // Single 12 | const transport = pino.transport({ 13 | target: 'pino-pretty', 14 | options: { some: 'options for', the: 'transport' } 15 | }) 16 | const logger = pino(transport) 17 | logger.setBindings({ some: 'bindings' }) 18 | logger.info('test2') 19 | logger.flush() 20 | 21 | const transport2 = pino.transport({ 22 | target: 'pino-pretty', 23 | }) 24 | const logger2 = pino(transport2) 25 | logger2.info('test2') 26 | 27 | 28 | // Multiple 29 | 30 | const transports = pino.transport({targets: [ 31 | { 32 | level: 'info', 33 | target: 'pino-pretty', 34 | options: { some: 'options for', the: 'transport' } 35 | }, 36 | { 37 | level: 'trace', 38 | target: 'pino/file', 39 | options: { destination } 40 | } 41 | ]}) 42 | const loggerMulti = pino(transports) 43 | loggerMulti.info('test2') 44 | 45 | // custom levels 46 | 47 | const customLevels = { 48 | customDebug : 1, 49 | info : 2, 50 | customNetwork : 3, 51 | customError : 4, 52 | }; 53 | 54 | type CustomLevels = keyof typeof customLevels; 55 | 56 | const pinoOpts = { 57 | useOnlyCustomLevels: true, 58 | customLevels: customLevels, 59 | level: 'customDebug', 60 | } satisfies LoggerOptions; 61 | 62 | const multistreamOpts = { 63 | dedupe: true, 64 | levels: customLevels 65 | }; 66 | 67 | const streams: StreamEntry[] = [ 68 | { level : 'customDebug', stream : pinoPretty() }, 69 | { level : 'info', stream : pinoPretty() }, 70 | { level : 'customNetwork', stream : pinoPretty() }, 71 | { level : 'customError', stream : pinoPretty() }, 72 | ]; 73 | 74 | const loggerCustomLevel = pino(pinoOpts, pino.multistream(streams, multistreamOpts)); 75 | loggerCustomLevel.customDebug('test3') 76 | loggerCustomLevel.info('test4') 77 | loggerCustomLevel.customError('test5') 78 | loggerCustomLevel.customNetwork('test6') 79 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es6", 4 | "lib": [ "es2015", "dom" ], 5 | "module": "commonjs", 6 | "noEmit": true, 7 | "strict": true, 8 | "esModuleInterop": true, 9 | }, 10 | "exclude": [ 11 | "./test/types/*.test-d.ts", 12 | "./*.d.ts" 13 | ] 14 | } 15 | --------------------------------------------------------------------------------