├── .eslintrc ├── .github └── workflows │ ├── codeql-analysis.yml │ ├── node.js.yml │ ├── npm-audit.yml │ └── npm-publish.yml ├── .gitignore ├── .travis.yml ├── CHANGELOG.md ├── LICENSE ├── README.md ├── lib ├── DateRollingFileStream.js ├── RollingFileStream.js ├── RollingFileWriteStream.js ├── fileNameFormatter.js ├── fileNameParser.js ├── index.js ├── moveAndMaybeCompressFile.js └── now.js ├── package-lock.json ├── package.json └── test ├── DateRollingFileStream-test.js ├── RollingFileStream-test.js ├── RollingFileWriteStream-test.js ├── fileNameFormatter-test.js ├── fileNameParser-test.js └── moveAndMaybeCompressFile-test.js /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "extends": [ 3 | "eslint:recommended" 4 | ], 5 | "env": { 6 | "node": true, 7 | "es6": true, 8 | "mocha": true 9 | }, 10 | "parserOptions": { 11 | "ecmaVersion": 2018 12 | }, 13 | "rules": { 14 | "indent": [2, 2], 15 | "max-len": [1, 120, 2], 16 | "no-use-before-define": ["warn"] 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # 12 | name: "CodeQL" 13 | 14 | on: 15 | push: 16 | branches: [ "master" ] 17 | pull_request: 18 | # The branches below must be a subset of the branches above 19 | branches: [ "master" ] 20 | 21 | jobs: 22 | analyze: 23 | name: Analyze 24 | runs-on: ubuntu-latest 25 | permissions: 26 | actions: read 27 | contents: read 28 | security-events: write 29 | 30 | strategy: 31 | fail-fast: false 32 | matrix: 33 | language: [ 'javascript' ] 34 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] 35 | # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support 36 | 37 | steps: 38 | - name: Checkout repository 39 | uses: actions/checkout@v3 40 | 41 | # Initializes the CodeQL tools for scanning. 42 | - name: Initialize CodeQL 43 | uses: github/codeql-action/init@v2 44 | with: 45 | languages: ${{ matrix.language }} 46 | # If you wish to specify custom queries, you can do so here or in a config file. 47 | # By default, queries listed here will override any specified in a config file. 48 | # Prefix the list here with "+" to use these queries and those in the config file. 49 | 50 | # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs 51 | # queries: security-extended,security-and-quality 52 | 53 | 54 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 55 | # If this step fails, then you should remove it and run the build manually (see below) 56 | - name: Autobuild 57 | uses: github/codeql-action/autobuild@v2 58 | 59 | # ℹ️ Command-line programs to run using the OS shell. 60 | # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun 61 | 62 | # If the Autobuild fails above, remove it and uncomment the following three lines. 63 | # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. 64 | 65 | # - run: | 66 | # echo "Run, Build Application using script" 67 | # ./location_of_script_within_repo/buildscript.sh 68 | 69 | - name: Perform CodeQL Analysis 70 | uses: github/codeql-action/analyze@v2 71 | with: 72 | category: "/language:${{matrix.language}}" 73 | -------------------------------------------------------------------------------- /.github/workflows/node.js.yml: -------------------------------------------------------------------------------- 1 | # This workflow will do a clean installation of node dependencies, cache/restore them, build the source code and run tests across different versions of node 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions 3 | 4 | name: Node.js CI 5 | 6 | on: 7 | push: 8 | branches: [ "master" ] 9 | pull_request: 10 | branches: [ "master" ] 11 | workflow_dispatch: {} 12 | 13 | jobs: 14 | build: 15 | 16 | runs-on: ${{ matrix.os }} 17 | 18 | defaults: 19 | run: 20 | shell: bash 21 | 22 | strategy: 23 | matrix: 24 | os: [ubuntu-latest, windows-latest] 25 | node-version: [8.x, 10.x, 12.x, 14.x, 16.x, 18.x] 26 | # See supported Node.js release schedule at https://nodejs.org/en/about/releases/ 27 | include: 28 | - node-version: 8.x 29 | npm-i: "eslint@6.x mocha@7.x" 30 | 31 | - node-version: 10.x 32 | npm-i: "eslint@7.x mocha@8.x" 33 | 34 | - node-version: 12.x 35 | npm-i: "mocha@9.x" 36 | 37 | steps: 38 | - uses: actions/checkout@v3 39 | - name: Use Node.js ${{ matrix.node-version }} 40 | uses: actions/setup-node@v3 41 | with: 42 | node-version: ${{ matrix.node-version }} 43 | cache: 'npm' 44 | 45 | - name: Configure npm 46 | run: npm config set loglevel error 47 | 48 | - name: Get npm version 49 | id: npm-version 50 | run: | 51 | npm -v 52 | npmMajorVer=$(npm -v | cut -d. -f1) 53 | echo "major=$npmMajorVer" >> $GITHUB_OUTPUT 54 | 55 | - name: Install downgraded modules ${{ matrix.npm-i }} 56 | run: | 57 | npm install --save-dev ${{ matrix.npm-i }} 58 | if [ ${{ steps.npm-version.outputs.major }} -le 5 ]; then 59 | npm install 60 | fi 61 | if: matrix.npm-i != '' 62 | 63 | - run: npm install 64 | if: matrix.npm-i == '' && steps.npm-version.outputs.major <= 5 65 | 66 | - run: npm ci 67 | if: matrix.npm-i == '' && steps.npm-version.outputs.major > 5 68 | 69 | - name: List dependencies 70 | run: npm ls --depth=0 --dev && npm ls --depth=0 --prod 71 | 72 | - run: npm run build --if-present 73 | - run: npm test 74 | -------------------------------------------------------------------------------- /.github/workflows/npm-audit.yml: -------------------------------------------------------------------------------- 1 | # This workflow will do a clean installation of node dependencies, cache/restore them, build the source code and run tests across different versions of node 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions 3 | 4 | name: npm audit 5 | 6 | on: 7 | push: 8 | branches: [ "master" ] 9 | pull_request: 10 | branches: [ "master" ] 11 | workflow_dispatch: {} 12 | schedule: 13 | - cron: '0 0 * * 3' 14 | 15 | jobs: 16 | build: 17 | 18 | runs-on: ubuntu-latest 19 | 20 | strategy: 21 | matrix: 22 | node-version: [latest] 23 | # See supported Node.js release schedule at https://nodejs.org/en/about/releases/ 24 | 25 | steps: 26 | - uses: actions/checkout@v3 27 | - name: Use Node.js ${{ matrix.node-version }} 28 | uses: actions/setup-node@v3 29 | with: 30 | node-version: ${{ matrix.node-version }} 31 | cache: 'npm' 32 | 33 | - run: npm audit 34 | env: 35 | NODE_ENV: production 36 | -------------------------------------------------------------------------------- /.github/workflows/npm-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will run tests using node and then publish a package to GitHub Packages when a milestone is closed 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/publishing-nodejs-packages 3 | 4 | name: Node.js Package 5 | 6 | on: 7 | milestone: 8 | types: [closed] 9 | 10 | jobs: 11 | build: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@v3 15 | - uses: actions/setup-node@v3 16 | with: 17 | node-version: 16 18 | - run: npm ci 19 | - run: npm test 20 | 21 | publish-npm: 22 | needs: build 23 | runs-on: ubuntu-latest 24 | steps: 25 | - uses: actions/checkout@v3 26 | - uses: actions/setup-node@v3 27 | with: 28 | node-version: 16 29 | registry-url: https://registry.npmjs.org/ 30 | - run: npm ci 31 | - run: | 32 | git config user.name github-actions 33 | git config user.email github-actions@github.com 34 | - run: npm version ${{ github.event.milestone.title }} 35 | - run: git push && git push --tags 36 | - run: npm publish 37 | env: 38 | NODE_AUTH_TOKEN: ${{secrets.npm_token}} 39 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | lib-cov 2 | *.seed 3 | *.log 4 | *.csv 5 | *.dat 6 | *.out 7 | *.pid 8 | *.gz 9 | .DS_Store 10 | .nyc_output 11 | coverage 12 | 13 | pids 14 | logs 15 | results 16 | 17 | npm-debug.log 18 | node_modules 19 | .nyc_output/ 20 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | sudo: false 3 | node_js: 4 | - "12" 5 | - "10" 6 | - "8" 7 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # streamroller Changelog 2 | 3 | ## [3.1.5](https://github.com/log4js-node/streamroller/milestone/30) 4 | 5 | - [fix: tilde expansion for windows](https://github.com/log4js-node/streamroller/pull/165) - thanks [@lamweili](https://github.com/lamweili) 6 | - [chore(deps-dev): updated dependencies](https://github.com/log4js-node/streamroller/pull/166) - thanks [@lamweili](https://github.com/lamweili) 7 | - chore(deps-dev): bump @commitlint/cli from 17.3.0 to 17.4.3 8 | - chore(deps-dev): bump @commitlint/config-conventional from 17.3.0 to 17.4.3 9 | - chore(deps-dev): bump @types/node from 8.11.18 to 8.13.0 10 | - chore(deps-dev): bump eslint from 8.30.0 to 8.34.0 11 | - chore(deps-dev): bump husky from 8.0.2 to 8.0.3 12 | - chore(deps-dev): updated package-lock.json 13 | 14 | ## [3.1.4](https://github.com/log4js-node/streamroller/milestone/29) 15 | 16 | - [fix: addressed unhandled promise rejection when a file gets deleted in midst of rolling](https://github.com/log4js-node/streamroller/pull/160) - thanks [@lamweili](https://github.com/lamweili) 17 | - [docs: updated repository url](https://github.com/log4js-node/streamroller/pull/158) - thanks [@lamweili](https://github.com/lamweili) 18 | - [ci: replaced deprecated github set-output](https://github.com/log4js-node/streamroller/pull/159) - thanks [@lamweili](https://github.com/lamweili) 19 | - [ci: added quotes](https://github.com/log4js-node/streamroller/pull/157) - thanks [@lamweili](https://github.com/lamweili) 20 | - [chore(deps-dev): updated dependencies](https://github.com/log4js-node/streamroller/pull/161) - thanks [@lamweili](https://github.com/lamweili) 21 | - chore(deps-dev): bump @types/node from 18.11.9 to 18.11.18 22 | - chore(deps-dev): bump eslint from 8.28.0 to 8.30.0 23 | - chore(deps-dev): bump mocha from 10.1.0 to 10.2.0 24 | - chore(deps-dev): updated package-lock.json 25 | - [chore(deps-dev): updated dependencies](https://github.com/log4js-node/streamroller/pull/156) - thanks [@lamweili](https://github.com/lamweili) 26 | - chore(deps-dev): bump @commitlint/cli from 17.1.2 to 17.3.0 27 | - chore(deps-dev): bump @commitlint/config-conventional from 17.1.0 to 17.3.0 28 | - chore(deps-dev): bump @types/node from 18.7.23 to 18.11.9 29 | - chore(deps-dev): bump eslint from 8.24.0 to 8.28.0 30 | - chore(deps-dev): bump husky from 8.0.1 to 8.0.2 31 | - chore(deps-dev): bump mocha from 10.0.0 to 10.1.0 32 | - chore(deps-dev): updated package-lock.json 33 | 34 | ## [3.1.3](https://github.com/log4js-node/streamroller/milestone/28) 35 | 36 | - [ci: manually downgrade dev dependencies for older versions](https://github.com/log4js-node/streamroller/pull/153) - thanks [@lamweili](https://github.com/lamweili) 37 | - [ci: removed scheduled job from codeql and separated npm audit](https://github.com/log4js-node/streamroller/pull/152) - thanks [@lamweili](https://github.com/lamweili) 38 | - [ci: updated codeql from v1 to v2](https://github.com/log4js-node/streamroller/pull/151) - thanks [@lamweili](https://github.com/lamweili) 39 | - [chore(deps): updated dependencies](https://github.com/log4js-node/streamroller/pull/154) - thanks [@lamweili](https://github.com/lamweili) 40 | - chore(deps-dev): bump @commitlint/cli from 17.0.3 to 17.1.2 41 | - chore(deps-dev): bump @commitlint/config-conventional from 17.0.3 to 17.1.0 42 | - chore(deps-dev): bump @types/node from 18.0.6 to 18.7.23 43 | - chore(deps-dev): bump eslint from 6.8.0 to 8.24.0 44 | - chore(deps-dev): bump mocha from 7.2.0 to 10.0.0 45 | - chore(deps): bump date-format from 4.0.13 to 4.0.14 46 | - chore(deps): updated package-lock.json 47 | 48 | ## [3.1.2](https://github.com/log4js-node/streamroller/milestone/27) 49 | 50 | - [refactor: support older Node.js versions](https://github.com/log4js-node/streamroller/pull/147) - thanks [@lamweili](https://github.com/lamweili) 51 | - [docs: renamed peteriman to lamweili](https://github.com/log4js-node/streamroller/pull/144) - thanks [@lamweili](https://github.com/lamweili) 52 | - [ci: added tests for Node.js 8.x, 10.x, 18.x](https://github.com/log4js-node/streamroller/pull/148) - thanks [@lamweili](https://github.com/lamweili) 53 | - [chore(deps): bump date-format from 4.0.11 to 4.0.13](https://github.com/log4js-node/streamroller/pull/150) - thanks [@lamweili](https://github.com/lamweili) 54 | - [chore(deps-dev): updated dependencies](https://github.com/log4js-node/streamroller/pull/146) - thanks [@lamweili](https://github.com/lamweili) 55 | - chore(deps-dev): bump @commitlint/cli from 17.0.1 to 17.0.3 56 | - chore(deps-dev): bump @commitlint/config-conventional from 17.0.2 to 17.0.3 57 | - chore(deps-dev): bump @types/node from 17.0.38 to 18.0.6 58 | - chore(deps-dev): bump eslint from 8.16.0 to 8.20.0 59 | - chore(deps-dev): updated package-lock.json 60 | - [chore(deps-dev): updated dependencies](https://github.com/log4js-node/streamroller/pull/143) - thanks [@lamweili](https://github.com/lamweili) 61 | - chore(deps-dev): bump @commitlint/cli from 17.0.0 to 17.0.1 62 | - chore(deps-dev): bump @commitlint/config-conventional 17.0.0 to 17.0.2 63 | - chore(deps-dev): bump @types/node from 17.0.35 to 17.0.38 64 | - chore(deps): bump date-format 4.0.10 to 4.0.11 65 | - chore(deps): updated package-lock.json 66 | 67 | ## [3.1.1](https://github.com/log4js-node/streamroller/milestone/26) 68 | 69 | - [fix: fs.appendFileSync should use flag instead of flags](https://github.com/log4js-node/streamroller/pull/141) - thanks [@lamweili](https://github.com/lamweili) 70 | 71 | ## [3.1.0](https://github.com/log4js-node/streamroller/milestone/25) 72 | 73 | - [feat: tilde expansion for filename](https://github.com/log4js-node/streamroller/pull/135) - thanks [@lamweili](https://github.com/lamweili) 74 | - [fix: better file validation](https://github.com/log4js-node/streamroller/pull/134) - thanks [@lamweili](https://github.com/lamweili) 75 | - [chore(deps-dev): updated dependencies](https://github.com/log4js-node/streamroller/pull/140) - thanks [@lamweili](https://github.com/lamweili) 76 | - chore(deps-dev): bump @commitlint/cli from 16.3.0 to 17.0.0 77 | - chore(deps-dev): bump @commitlint/config-conventional from 16.2.4 to 17.0.0 78 | - chore(deps-dev): bump @types/node from 17.0.33 to 17.0.35 79 | - chore(deps-dev): bump eslint from 8.15.0 to 8.16.0 80 | - chore(deps): updated package-lock.json 81 | 82 | ## [3.0.9](https://github.com/log4js-node/streamroller/milestone/24) 83 | 84 | - [fix: maxSize=0 means no rolling](https://github.com/log4js-node/streamroller/pull/131) - thanks [@lamweili](https://github.com/lamweili) 85 | - [chore(deps): updated dependencies](https://github.com/log4js-node/streamroller/pull/132) - thanks [@lamweili](https://github.com/lamweili) 86 | - chore(deps-dev): bump @commitlint/cli from 16.2.3 to 16.3.0 87 | - chore(deps-dev): bump @commitlint/config-conventional from 16.2.1 to 16.2.4 88 | - chore(deps-dev): bump @types/node from 17.0.26 to 17.0.33 89 | - chore(deps-dev): bump eslint from 8.14.0 to 8.15.0 90 | - chore(deps-dev): bump husky from 7.0.4 to 8.0.1 91 | - chore(deps-dev): bump mocha from 9.2.2 to 10.0.0 92 | - chore(deps): bump date-format from 4.0.9 to 4.0.10 93 | - chore(deps): updated package-lock.json 94 | 95 | ## [3.0.8](https://github.com/log4js-node/streamroller/milestone/23) 96 | 97 | - [fix: concurrency issues when forked processes trying to roll same file](https://github.com/log4js-node/streamroller/pull/124) - thanks [@lamweili](https://github.com/lamweili) 98 | - [refactor: use writeStream.destroy() instead](https://github.com/log4js-node/streamroller/pull/125) 99 | - [refactor: use isCreated variable instead of e.code='EEXIST'](https://github.com/log4js-node/streamroller/pull/126) 100 | - [chore(lint): added .eslintrc and fixed linting issues](https://github.com/log4js-node/streamroller/pull/123) - thanks [@lamweili](https://github.com/lamweili) 101 | - [chore(deps): updated dependencies](https://github.com/log4js-node/streamroller/pull/127) - thanks [@lamweili](https://github.com/lamweili) 102 | - chore(deps-dev): bump @types/node from 17.0.24 to 17.0.26 103 | - chore(deps-dev): bump eslint from 8.13.0 to 8.14.0 104 | - chore(deps): bump date-format from 4.0.7 to 4.0.9 105 | - chore(deps): updated package-lock.json 106 | - [chore(deps): updated dependencies](https://github.com/log4js-node/streamroller/pull/119) - thanks [@lamweili](https://github.com/lamweili) 107 | - chore(deps): bump fs-extra from 10.0.1 to 10.1.0 108 | - chore(deps): updated package-lock.json 109 | - revert: "[chore(dep): temporary fix for fs-extra issue (to be reverted when fs-extra patches it)](https://github.com/log4js-node/streamroller/pull/116)" 110 | 111 | ## [3.0.7](https://github.com/log4js-node/streamroller/milestone/22) 112 | 113 | - [chore(deps): temporary fix for fs-extra issue (to be reverted when fs-extra patches it)](https://github.com/log4js-node/streamroller/pull/116) - thanks [@lamweili](https://github.com/lamweili) 114 | - [chore(deps): updated dependencies](https://github.com/log4js-node/streamroller/pull/117) - thanks [@lamweili](https://github.com/lamweili) 115 | - chore(deps): bump date-format from 4.0.6 to 4.0.7 116 | - chore(deps): updated package-lock.json 117 | - [chore(deps-dev): updated dependencies](https://github.com/log4js-node/streamroller/pull/113) - thanks [@lamweili](https://github.com/lamweili) 118 | - chore(deps-dev): bump @types/node from 17.0.23 to 17.0.24 119 | - chore(deps-dev): updated package-lock.json 120 | - [chore(deps-dev): updated dependencies](https://github.com/log4js-node/streamroller/pull/112) - thanks [@lamweili](https://github.com/lamweili) 121 | - chore(deps-dev): bump @types/node from 17.0.22 to 17.0.23 122 | - chore(deps-dev): bump eslint from 8.11.0 to 8.13.0 123 | - chore(deps-dev): updated package-lock.json 124 | 125 | ## [3.0.6](https://github.com/log4js-node/streamroller/milestone/21) 126 | 127 | - [chore(deps): updated dependencies](https://github.com/log4js-node/streamroller/pull/110) - thanks [@lamweili](https://github.com/lamweili) 128 | - chore(deps): bump debug from 4.3.3 to 4.3.4 129 | - chore(deps): bump date-format from 4.0.5 to 4.0.6 130 | - chore(deps-dev): bump @types/node from 17.0.21 to 17.0.22 131 | - chore(deps-dev): bump @commitlint/cli from 16.2.1 to 16.2.3 132 | - chore(deps): updated package-lock.json 133 | 134 | ## [3.0.5](https://github.com/log4js-node/streamroller/milestone/20) 135 | 136 | - [fix: added filename validation](https://github.com/log4js-node/streamroller/pull/101) - thanks [@lamweili](https://github.com/lamweili) 137 | - [docs: updated README.md with badges](https://github.com/log4js-node/streamroller/pull/105) - thanks [@lamweili](https://github.com/lamweili) 138 | - [docs: updated README.md for DateRollingFileStream](https://github.com/log4js-node/streamroller/pull/106) - thanks [@lamweili](https://github.com/lamweili) 139 | - [docs: added docs for istanbul ignore](https://github.com/log4js-node/streamroller/pull/107) - thanks [@lamweili](https://github.com/lamweili) 140 | - [chore(deps): updated dependencies](https://github.com/log4js-node/streamroller/pull/109) - thanks [@lamweili](https://github.com/lamweili) 141 | - chore(deps-dev): eslint from 8.10.0 to 8.11.0 142 | - chore(deps-dev): mocha from 9.2.1 to 9.2.2 143 | - chore(deps): date-format from 4.0.4 to 4.0.5 144 | - chore(deps): updated package-lock.json 145 | 146 | ## [3.0.4](https://github.com/log4js-node/streamroller/milestone/19) 147 | 148 | - [test: remove test file/folder remnants](https://github.com/log4js-node/streamroller/pull/99) - thanks [@lamweili](https://github.com/lamweili) 149 | 150 | ## [3.0.3](https://github.com/log4js-node/streamroller/milestone/18) 151 | 152 | - [fix: backward compatibility for RollingFileWriteStream to recursively create directory](https://github.com/log4js-node/streamroller/pull/96) - thanks [@lamweili](https://github.com/lamweili) 153 | - [test: 100% test coverage](https://github.com/log4js-node/streamroller/pull/94) - thanks [@lamweili](https://github.com/lamweili) 154 | - [chore(deps): updated dependencies](https://github.com/log4js-node/streamroller/pull/97) - thanks [@lamweili](https://github.com/lamweili) 155 | - chore(deps-dev): bump @commitlint/cli from 16.1.0 to 16.2.1 156 | - chore(deps-dev): bump @commitlint/config-conventional from 16.0.0 to 16.2.1 157 | - chore(deps-dev): bump @types/node from 17.0.16 to 17.0.21 158 | - chore(deps-dev): bump eslint from 8.8.0 to 8.10.0 159 | - chore(deps-dev): bump mocha from 9.2.0 to 9.2.1 160 | - chore(deps): bump date-format from 4.0.3 to 4.0.4 161 | - chore(deps): bump fs-extra from 10.0.0 to 10.0.1 162 | - [chore(deps): updated dependencies](https://github.com/log4js-node/streamroller/pull/95) - thanks [@lamweili](https://github.com/lamweili) 163 | - chore(deps-dev): bump @commitlint/cli from 16.0.2 to 16.1.0 164 | - chore(deps-dev): bump @types/node from 17.0.9 to 17.0.16 165 | - chore(deps-dev): bump eslint from 8.7.0 to 8.8.0 166 | - chore(deps-dev): bump proxyquire from 2.1.1 to 2.1.3 167 | - chore(deps): bump debug from 4.1.1 to 4.3.3 168 | - [chore(deps): updated dependencies](https://github.com/log4js-node/streamroller/pull/92) - thanks [@lamweili](https://github.com/lamweili) 169 | - updated package-lock.json 170 | - [chore(deps): updated dependencies](https://github.com/log4js-node/streamroller/pull/91) - thanks [@lamweili](https://github.com/lamweili) 171 | - chore(deps-dev): bump mocha from 9.1.4 to 9.2.0 172 | 173 | ## [3.0.2](https://github.com/log4js-node/streamroller/milestone/17) 174 | 175 | - [fix: changed default file modes from 0o644 to 0o600 for better security](https://github.com/log4js-node/streamroller/pull/87) - thanks [@lamweili](https://github.com/lamweili) 176 | - [refactor: housekeeping for comments and .gitignore](https://github.com/log4js-node/streamroller/pull/89) - thanks [@lamweili](https://github.com/lamweili) 177 | - [chore(deps): updated dependencies](https://github.com/log4js-node/streamroller/pull/88) - thanks [@lamweili](https://github.com/lamweili) 178 | - chore(deps-dev): bump caniuse-lite from 1.0.30001299 to 1.0.30001300 179 | - chore(deps-dev): bump electron-to-chromium from 1.4.45 to 1.4.47 180 | - chore(deps-dev): bump @types/node from 17.0.8 to 17.0.9 181 | - chore(deps-dev): bump eslint from 8.6.0 to 8.7.0 182 | - chore(deps-dev): bump mocha from 9.1.3 to 9.1.4 183 | - chore(deps): bump date-format from 4.0.2 to 4.0.3 184 | 185 | ## [3.0.1](https://github.com/log4js-node/streamroller/milestone/16) 186 | 187 | - [build: not to publish misc files to NPM](https://github.com/log4js-node/streamroller/pull/82) - thanks [@lamweili](https://github.com/lamweili) 188 | - chore(deps): updated dependencies - thanks [@lamweili](https://github.com/lamweili) 189 | - [chore(deps): bump date-format from 4.0.1 to 4.0.2](https://github.com/log4js-node/streamroller/pull/86) 190 | - [chore(deps-dev): bump electron-to-chromium from 1.4.44 to 1.4.45](https://github.com/log4js-node/streamroller/pull/81) 191 | 192 | ## [3.0.0](https://github.com/log4js-node/streamroller/milestone/15) 193 | 194 | - [feat: allow for 0 backups (only hot file)](https://github.com/log4js-node/streamroller/pull/74) - thanks [@lamweili](https://github.com/lamweili) 195 | - [feat: exposed fileNameSep to be configurable](https://github.com/log4js-node/streamroller/pull/67) - thanks [@laidaxian](https://github.com/laidaxian) 196 | - [fix: for fileNameSep affecting globally](https://github.com/log4js-node/streamroller/pull/79) - thanks [@lamweili](https://github.com/lamweili) 197 | - [fix: for mode to apply to compressed file](https://github.com/log4js-node/streamroller/pull/65) - thanks [@rnd-debug](https://github.com/rnd-debug) 198 | - [fix: for extra separator in filenames](https://github.com/log4js-node/streamroller/pull/75) - thanks [@lamweili](https://github.com/lamweili) 199 | - [refactor: moved to options.numBackups instead of options.daysToKeep](https://github.com/log4js-node/streamroller/pull/78) - thanks [@lamweili](https://github.com/lamweili) 200 | - [test: improved test case performance for fakeNow](https://github.com/log4js-node/streamroller/pull/76) - thanks [@lamweili](https://github.com/lamweili) 201 | - chore(deps-dev): updated dependencies - thanks @lamweili 202 | - [chore(deps-dev): updated package.json](https://github.com/log4js-node/streamroller/pull/70) 203 | - chore(deps-dev): bump @commitlint/cli from 8.1.0 to 16.0.2 204 | - chore(deps-dev): bump @commitlint/config-conventional from 8.1.0 to 16.0.0 205 | - chore(deps-dev): bump @type/nodes 17.0.8 206 | - chore(deps-dev): bump eslint from 6.0.1 to 8.6.0 207 | - chore(deps-dev): bump mocha from 6.1.4 to 9.1.3 208 | - chore(deps-dev): bump nyc from 14.1.1 to 15.1.0 209 | - [chore(deps-dev): updated package-lock.json](https://github.com/log4js-node/streamroller/pull/71) 210 | - chore(deps-dev): bump @babel/compat-data from 7.16.4 to 7.16.8 211 | - chore(deps-dev): bump @babel/generator from 7.16.7 to 7.16.8 212 | - chore(deps-dev): bump @babel/parser from 7.16.7 to 7.16.8 213 | - chore(deps-dev): bump @babel/travers from 7.16.7 to 7.16.8 214 | - chore(deps-dev): bump @babel/types from 7.16.7 to 7.16.8 215 | - [chore(deps-dev): updated package-lock.json](https://github.com/log4js-node/streamroller/pull/77) 216 | - chore(deps-dev): bump caniuse-lite from 1.0.30001298 to 1.0.30001299 217 | - chore(deps-dev): bump electron-to-chromium from 1.4.39 to 1.4.44 218 | - [chore(deps-dev): updated package.json](https://github.com/log4js-node/streamroller/pull/80) 219 | - chore(deps): bump date-format from 3.0.0 to 4.0.1 220 | - chore(deps-dev): bump husky from 3.0.0 to 7.0.4 221 | - chore(deps): bump fs-extra from 8.1.0 to 10.0.0 222 | 223 | ## [2.2.4](https://github.com/log4js-node/streamroller/milestone/14) 224 | 225 | - [Fix for incorrect filename matching](https://github.com/log4js-node/streamroller/pull/61) - thanks [@rnd-debug](https://github.com/rnd-debug) 226 | 227 | ## [2.2.3](https://github.com/log4js-node/streamroller/milestone/13) 228 | 229 | - [Fix for unhandled promise rejection during cleanup](https://github.com/log4js-node/streamroller/pull/56) 230 | 231 | ## [2.2.2](https://github.com/log4js-node/streamroller/milestone/12) 232 | 233 | - [Fix for overwriting current file when using date rotation](https://github.com/log4js-node/streamroller/pull/54) 234 | 235 | ## 2.2.1 236 | 237 | - Fix for num to keep not working when date pattern is all digits (forgot to do a PR for this one) 238 | 239 | ## [2.2.0](https://github.com/log4js-node/streamroller/milestone/11) 240 | 241 | - [Fallback to copy and truncate when file is busy](https://github.com/log4js-node/streamroller/pull/53) 242 | 243 | ## [2.1.0](https://github.com/log4js-node/streamroller/milestone/10) 244 | 245 | - [Improve Windows support (closing streams)](https://github.com/log4js-node/streamroller/pull/52) 246 | 247 | ## [2.0.0](https://github.com/log4js-node/streamroller/milestone/9) 248 | 249 | - [Remove support for node v6](https://github.com/log4js-node/streamroller/pull/44) 250 | - [Replace lodash with native alternatives](https://github.com/log4js-node/streamroller/pull/45) - thanks [@devoto13](https://github.com/devoto13) 251 | - [Simplify filename formatting and parsing](https://github.com/log4js-node/streamroller/pull/46) 252 | - [Removed async lib from main code](https://github.com/log4js-node/streamroller/pull/47) 253 | - [Fix timezone issues in tests](https://github.com/log4js-node/streamroller/pull/48) - thanks [@devoto13](https://github.com/devoto13) 254 | - [Fix for flag values that need existing file size](https://github.com/log4js-node/streamroller/pull/49) 255 | - [Refactor for better readability](https://github.com/log4js-node/streamroller/pull/50) 256 | - [Removed async lib from test code](https://github.com/log4js-node/streamroller/pull/51) 257 | 258 | ## [1.0.6](https://github.com/log4js-node/streamroller/milestone/8) 259 | 260 | - [Fix for overwriting old backup files](https://github.com/log4js-node/streamroller/pull/43) 261 | - Updated lodash to 4.17.14 262 | 263 | ## [1.0.5](https://github.com/log4js-node/streamroller/milestone/7) 264 | 265 | - [Updated dependencies](https://github.com/log4js-node/streamroller/pull/38) 266 | - [Fix for initial file date when appending to existing file](https://github.com/log4js-node/streamroller/pull/40) 267 | 268 | ## [1.0.4](https://github.com/log4js-node/streamroller/milestone/6) 269 | 270 | - [Fix for initial size when appending to existing file](https://github.com/log4js-node/streamroller/pull/35) 271 | 272 | ## [1.0.3](https://github.com/log4js-node/streamroller/milestone/5) 273 | 274 | - [Fix for crash when pattern is all digits](https://github.com/log4js-node/streamroller/pull/33) 275 | 276 | ## 1.0.2 277 | 278 | - is exactly the same as 1.0.1, due to me being an idiot and not pulling before I pushed 279 | 280 | ## Previous versions 281 | 282 | Previous release details are available by browsing the [milestones](https://github.com/log4js-node/streamroller/milestones) in github. 283 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2013 Gareth Jones 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of 6 | this software and associated documentation files (the "Software"), to deal in 7 | the Software without restriction, including without limitation the rights to 8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software is furnished to do so, 10 | subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 17 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 18 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 19 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 20 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | streamroller [![CodeQL](https://github.com/log4js-node/streamroller/actions/workflows/codeql-analysis.yml/badge.svg)](https://github.com/log4js-node/streamroller/actions/workflows/codeql-analysis.yml) [![Node.js CI](https://github.com/log4js-node/streamroller/actions/workflows/node.js.yml/badge.svg)](https://github.com/log4js-node/streamroller/actions/workflows/node.js.yml) 2 | ============ 3 | 4 | [![NPM](https://nodei.co/npm/streamroller.png?downloads=true&downloadRank=true&stars=true)](https://nodei.co/npm/streamroller/) 5 | 6 | node.js file streams that roll over when they reach a maximum size, or a date/time. 7 | 8 | ```sh 9 | npm install streamroller 10 | ``` 11 | 12 | ## usage 13 | 14 | ```javascript 15 | var rollers = require('streamroller'); 16 | var stream = new rollers.RollingFileStream('myfile', 1024, 3); 17 | stream.write("stuff"); 18 | stream.end(); 19 | ``` 20 | 21 | The streams behave the same as standard node.js streams, except that when certain conditions are met they will rename the current file to a backup and start writing to a new file. 22 | 23 | ### new RollingFileStream(filename [, maxSize, numBackups, options]) 24 | * `filename` \ 25 | * `maxSize` \ - defaults to `0` - the size in bytes to trigger a rollover. If not specified or 0, then no log rolling will happen. 26 | * `numBackups` \ - defaults to `1` - the number of old files to keep (excluding the hot file) 27 | * `options` \ 28 | * `encoding` \ - defaults to `'utf8'` 29 | * `mode` \ - defaults to `0o600` (see [node.js file modes](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_modes)) 30 | * `flags` \ - defaults to `'a'` (see [node.js file flags](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_system_flags)) 31 | * `compress` \ - defaults to `false` - compress the backup files using gzip (backup files will have `.gz` extension) 32 | * `keepFileExt` \ - defaults to `false` - preserve the file extension when rotating log files (`file.log` becomes `file.1.log` instead of `file.log.1`). 33 | * `fileNameSep` \ - defaults to `'.'` - the filename separator when rolling. e.g.: abc.log`.`1 or abc`.`1.log (keepFileExt) 34 | 35 | This returns a `WritableStream`. When the current file being written to (given by `filename`) gets up to or larger than `maxSize`, then the current file will be renamed to `filename.1` and a new file will start being written to. Up to `numBackups` of old files are maintained, so if `numBackups` is 3 then there will be 4 files: 36 |
37 |      filename
38 |      filename.1
39 |      filename.2
40 |      filename.3
41 | 
42 | When filename size >= maxSize then: 43 |
44 |      filename -> filename.1
45 |      filename.1 -> filename.2
46 |      filename.2 -> filename.3
47 |      filename.3 gets overwritten
48 |      filename is a new file
49 | 
50 | 51 | ### new DateRollingFileStream(filename [, pattern, options]) 52 | * `filename` \ 53 | * `pattern` \ - defaults to `yyyy-MM-dd` - the date pattern to trigger rolling (see below) 54 | * `options` \ 55 | * `encoding` \ - defaults to `'utf8'` 56 | * `mode` \ - defaults to `0o600` (see [node.js file modes](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_modes)) 57 | * `flags` \ - defaults to `'a'` (see [node.js file flags](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_system_flags)) 58 | * `compress` \ - defaults to `false` - compress the backup files using gzip (backup files will have `.gz` extension) 59 | * `keepFileExt` \ - defaults to `false` - preserve the file extension when rotating log files (`file.log` becomes `file.2017-05-30.log` instead of `file.log.2017-05-30`). 60 | * `fileNameSep` \ - defaults to `'.'` - the filename separator when rolling. e.g.: abc.log`.`2013-08-30 or abc`.`2013-08-30.log (keepFileExt) 61 | * `alwaysIncludePattern` \ - defaults to `false` - extend the initial file with the pattern 62 | * `daysToKeep` `numBackups` \ - defaults to `1` - the number of old files that matches the pattern to keep (excluding the hot file) 63 | * `maxSize` \ - defaults to `0` - the size in bytes to trigger a rollover. If not specified or 0, then no log rolling will happen. 64 | 65 | This returns a `WritableStream`. When the current time, formatted as `pattern`, changes then the current file will be renamed to `filename.formattedDate` where `formattedDate` is the result of processing the date through the pattern, and a new file will begin to be written. Streamroller uses [date-format](http://github.com/nomiddlename/date-format) to format dates, and the `pattern` should use the date-format format. e.g. with a `pattern` of `"yyyy-MM-dd"`, and assuming today is August 29, 2013 then writing to the stream today will just write to `filename`. At midnight (or more precisely, at the next file write after midnight), `filename` will be renamed to `filename.2013-08-29` and a new `filename` will be created. If `options.alwaysIncludePattern` is true, then the initial file will be `filename.2013-08-29` and no renaming will occur at midnight, but a new file will be written to with the name `filename.2013-08-30`. If `maxSize` is populated, when the current file being written to (given by `filename`) gets up to or larger than `maxSize`, then the current file will be renamed to `filename.pattern.1` and a new file will start being written to. Up to `numBackups` of old files are maintained, so if `numBackups` is 3 then there will be 4 files: 66 |
67 |      filename
68 |      filename.20220131.1
69 |      filename.20220131.2
70 |      filename.20220131.3
71 | 
72 | -------------------------------------------------------------------------------- /lib/DateRollingFileStream.js: -------------------------------------------------------------------------------- 1 | const RollingFileWriteStream = require('./RollingFileWriteStream'); 2 | 3 | // just to adapt the previous version 4 | class DateRollingFileStream extends RollingFileWriteStream { 5 | constructor(filename, pattern, options) { 6 | if (pattern && typeof(pattern) === 'object') { 7 | options = pattern; 8 | pattern = null; 9 | } 10 | if (!options) { 11 | options = {}; 12 | } 13 | if (!pattern) { 14 | pattern = 'yyyy-MM-dd'; 15 | } 16 | options.pattern = pattern; 17 | if (!options.numBackups && options.numBackups !== 0) { 18 | if (!options.daysToKeep && options.daysToKeep !== 0) { 19 | options.daysToKeep = 1; 20 | } else { 21 | process.emitWarning( 22 | "options.daysToKeep is deprecated due to the confusion it causes when used " + 23 | "together with file size rolling. Please use options.numBackups instead.", 24 | "DeprecationWarning", "streamroller-DEP0001" 25 | ); 26 | } 27 | options.numBackups = options.daysToKeep; 28 | } else { 29 | options.daysToKeep = options.numBackups; 30 | } 31 | super(filename, options); 32 | this.mode = this.options.mode; 33 | } 34 | 35 | get theStream() { 36 | return this.currentFileStream; 37 | } 38 | 39 | } 40 | 41 | module.exports = DateRollingFileStream; 42 | -------------------------------------------------------------------------------- /lib/RollingFileStream.js: -------------------------------------------------------------------------------- 1 | const RollingFileWriteStream = require('./RollingFileWriteStream'); 2 | 3 | // just to adapt the previous version 4 | class RollingFileStream extends RollingFileWriteStream { 5 | constructor(filename, size, backups, options) { 6 | if (!options) { 7 | options = {}; 8 | } 9 | if (size) { 10 | options.maxSize = size; 11 | } 12 | if (!options.numBackups && options.numBackups !== 0) { 13 | if (!backups && backups !== 0) { 14 | backups = 1; 15 | } 16 | options.numBackups = backups; 17 | } 18 | super(filename, options); 19 | this.backups = options.numBackups; 20 | this.size = this.options.maxSize; 21 | } 22 | 23 | get theStream() { 24 | return this.currentFileStream; 25 | } 26 | 27 | } 28 | 29 | module.exports = RollingFileStream; 30 | -------------------------------------------------------------------------------- /lib/RollingFileWriteStream.js: -------------------------------------------------------------------------------- 1 | const debug = require("debug")("streamroller:RollingFileWriteStream"); 2 | const fs = require("fs-extra"); 3 | const path = require("path"); 4 | const os = require("os"); 5 | const newNow = require("./now"); 6 | const format = require("date-format"); 7 | const { Writable } = require("stream"); 8 | const fileNameFormatter = require("./fileNameFormatter"); 9 | const fileNameParser = require("./fileNameParser"); 10 | const moveAndMaybeCompressFile = require("./moveAndMaybeCompressFile"); 11 | 12 | const deleteFiles = fileNames => { 13 | debug(`deleteFiles: files to delete: ${fileNames}`); 14 | return Promise.all(fileNames.map(f => fs.unlink(f).catch((e) => { 15 | debug(`deleteFiles: error when unlinking ${f}, ignoring. Error was ${e}`); 16 | }))); 17 | }; 18 | 19 | /** 20 | * RollingFileWriteStream is mainly used when writing to a file rolling by date or size. 21 | * RollingFileWriteStream inherits from stream.Writable 22 | */ 23 | class RollingFileWriteStream extends Writable { 24 | /** 25 | * Create a RollingFileWriteStream 26 | * @constructor 27 | * @param {string} filePath - The file path to write. 28 | * @param {object} options - The extra options 29 | * @param {number} options.numToKeep - The max numbers of files to keep. 30 | * @param {number} options.maxSize - The maxSize one file can reach. Unit is Byte. 31 | * This should be more than 1024. The default is 0. 32 | * If not specified or 0, then no log rolling will happen. 33 | * @param {string} options.mode - The mode of the files. The default is '0600'. Refer to stream.writable for more. 34 | * @param {string} options.flags - The default is 'a'. Refer to stream.flags for more. 35 | * @param {boolean} options.compress - Whether to compress backup files. 36 | * @param {boolean} options.keepFileExt - Whether to keep the file extension. 37 | * @param {string} options.pattern - The date string pattern in the file name. 38 | * @param {boolean} options.alwaysIncludePattern - Whether to add date to the name of the first file. 39 | */ 40 | constructor(filePath, options) { 41 | debug(`constructor: creating RollingFileWriteStream. path=${filePath}`); 42 | if (typeof filePath !== "string" || filePath.length === 0) { 43 | throw new Error(`Invalid filename: ${filePath}`); 44 | } else if (filePath.endsWith(path.sep)) { 45 | throw new Error(`Filename is a directory: ${filePath}`); 46 | } else if (filePath.indexOf(`~${path.sep}`) === 0) { 47 | // handle ~ expansion: https://github.com/nodejs/node/issues/684 48 | // exclude ~ and ~filename as these can be valid files 49 | filePath = filePath.replace("~", os.homedir()); 50 | } 51 | super(options); 52 | this.options = this._parseOption(options); 53 | this.fileObject = path.parse(filePath); 54 | if (this.fileObject.dir === "") { 55 | this.fileObject = path.parse(path.join(process.cwd(), filePath)); 56 | } 57 | this.fileFormatter = fileNameFormatter({ 58 | file: this.fileObject, 59 | alwaysIncludeDate: this.options.alwaysIncludePattern, 60 | needsIndex: this.options.maxSize < Number.MAX_SAFE_INTEGER, 61 | compress: this.options.compress, 62 | keepFileExt: this.options.keepFileExt, 63 | fileNameSep: this.options.fileNameSep 64 | }); 65 | 66 | this.fileNameParser = fileNameParser({ 67 | file: this.fileObject, 68 | keepFileExt: this.options.keepFileExt, 69 | pattern: this.options.pattern, 70 | fileNameSep: this.options.fileNameSep 71 | }); 72 | 73 | this.state = { 74 | currentSize: 0 75 | }; 76 | 77 | if (this.options.pattern) { 78 | this.state.currentDate = format(this.options.pattern, newNow()); 79 | } 80 | 81 | this.filename = this.fileFormatter({ 82 | index: 0, 83 | date: this.state.currentDate 84 | }); 85 | if (["a", "a+", "as", "as+"].includes(this.options.flags)) { 86 | this._setExistingSizeAndDate(); 87 | } 88 | 89 | debug( 90 | `constructor: create new file ${this.filename}, state=${JSON.stringify( 91 | this.state 92 | )}` 93 | ); 94 | this._renewWriteStream(); 95 | } 96 | 97 | _setExistingSizeAndDate() { 98 | try { 99 | const stats = fs.statSync(this.filename); 100 | this.state.currentSize = stats.size; 101 | if (this.options.pattern) { 102 | this.state.currentDate = format(this.options.pattern, stats.mtime); 103 | } 104 | } catch (e) { 105 | //file does not exist, that's fine - move along 106 | return; 107 | } 108 | } 109 | 110 | _parseOption(rawOptions) { 111 | const defaultOptions = { 112 | maxSize: 0, 113 | numToKeep: Number.MAX_SAFE_INTEGER, 114 | encoding: "utf8", 115 | mode: parseInt("0600", 8), 116 | flags: "a", 117 | compress: false, 118 | keepFileExt: false, 119 | alwaysIncludePattern: false 120 | }; 121 | const options = Object.assign({}, defaultOptions, rawOptions); 122 | if (!options.maxSize) { 123 | delete options.maxSize; 124 | } else if (options.maxSize <= 0) { 125 | throw new Error(`options.maxSize (${options.maxSize}) should be > 0`); 126 | } 127 | // options.numBackups will supercede options.numToKeep 128 | if (options.numBackups || options.numBackups === 0) { 129 | if (options.numBackups < 0) { 130 | throw new Error(`options.numBackups (${options.numBackups}) should be >= 0`); 131 | } else if (options.numBackups >= Number.MAX_SAFE_INTEGER) { 132 | // to cater for numToKeep (include the hot file) at Number.MAX_SAFE_INTEGER 133 | throw new Error(`options.numBackups (${options.numBackups}) should be < Number.MAX_SAFE_INTEGER`); 134 | } else { 135 | options.numToKeep = options.numBackups + 1; 136 | } 137 | } else if (options.numToKeep <= 0) { 138 | throw new Error(`options.numToKeep (${options.numToKeep}) should be > 0`); 139 | } 140 | debug( 141 | `_parseOption: creating stream with option=${JSON.stringify(options)}` 142 | ); 143 | return options; 144 | } 145 | 146 | _final(callback) { 147 | this.currentFileStream.end("", this.options.encoding, callback); 148 | } 149 | 150 | _write(chunk, encoding, callback) { 151 | this._shouldRoll().then(() => { 152 | debug( 153 | `_write: writing chunk. ` + 154 | `file=${this.currentFileStream.path} ` + 155 | `state=${JSON.stringify(this.state)} ` + 156 | `chunk=${chunk}` 157 | ); 158 | this.currentFileStream.write(chunk, encoding, e => { 159 | this.state.currentSize += chunk.length; 160 | callback(e); 161 | }); 162 | }); 163 | } 164 | 165 | async _shouldRoll() { 166 | if (this._dateChanged() || this._tooBig()) { 167 | debug( 168 | `_shouldRoll: rolling because dateChanged? ${this._dateChanged()} or tooBig? ${this._tooBig()}` 169 | ); 170 | await this._roll(); 171 | } 172 | } 173 | 174 | _dateChanged() { 175 | return ( 176 | this.state.currentDate && 177 | this.state.currentDate !== format(this.options.pattern, newNow()) 178 | ); 179 | } 180 | 181 | _tooBig() { 182 | return this.state.currentSize >= this.options.maxSize; 183 | } 184 | 185 | _roll() { 186 | debug(`_roll: closing the current stream`); 187 | return new Promise((resolve, reject) => { 188 | this.currentFileStream.end("", this.options.encoding, () => { 189 | this._moveOldFiles() 190 | .then(resolve) 191 | .catch(reject); 192 | }); 193 | }); 194 | } 195 | 196 | async _moveOldFiles() { 197 | const files = await this._getExistingFiles(); 198 | const todaysFiles = this.state.currentDate 199 | ? files.filter(f => f.date === this.state.currentDate) 200 | : files; 201 | for (let i = todaysFiles.length; i >= 0; i--) { 202 | debug(`_moveOldFiles: i = ${i}`); 203 | const sourceFilePath = this.fileFormatter({ 204 | date: this.state.currentDate, 205 | index: i 206 | }); 207 | const targetFilePath = this.fileFormatter({ 208 | date: this.state.currentDate, 209 | index: i + 1 210 | }); 211 | 212 | const moveAndCompressOptions = { 213 | compress: this.options.compress && i === 0, 214 | mode: this.options.mode 215 | }; 216 | await moveAndMaybeCompressFile( 217 | sourceFilePath, 218 | targetFilePath, 219 | moveAndCompressOptions 220 | ); 221 | } 222 | 223 | this.state.currentSize = 0; 224 | this.state.currentDate = this.state.currentDate 225 | ? format(this.options.pattern, newNow()) 226 | : null; 227 | debug( 228 | `_moveOldFiles: finished rolling files. state=${JSON.stringify( 229 | this.state 230 | )}` 231 | ); 232 | this._renewWriteStream(); 233 | // wait for the file to be open before cleaning up old ones, 234 | // otherwise the daysToKeep calculations can be off 235 | await new Promise((resolve, reject) => { 236 | this.currentFileStream.write("", "utf8", () => { 237 | this._clean() 238 | .then(resolve) 239 | .catch(reject); 240 | }); 241 | }); 242 | } 243 | 244 | // Sorted from the oldest to the latest 245 | async _getExistingFiles() { 246 | const files = await fs.readdir(this.fileObject.dir) 247 | .catch( /* istanbul ignore next: will not happen on windows */ () => []); 248 | 249 | debug(`_getExistingFiles: files=${files}`); 250 | const existingFileDetails = files 251 | .map(n => this.fileNameParser(n)) 252 | .filter(n => n); 253 | 254 | const getKey = n => 255 | (n.timestamp ? n.timestamp : newNow().getTime()) - n.index; 256 | existingFileDetails.sort((a, b) => getKey(a) - getKey(b)); 257 | 258 | return existingFileDetails; 259 | } 260 | 261 | _renewWriteStream() { 262 | const filePath = this.fileFormatter({ 263 | date: this.state.currentDate, 264 | index: 0 265 | }); 266 | 267 | // attempt to create the directory 268 | const mkdir = (dir) => { 269 | try { 270 | return fs.mkdirSync(dir, { recursive: true }); 271 | } 272 | // backward-compatible fs.mkdirSync for nodejs pre-10.12.0 (without recursive option) 273 | catch (e) { 274 | // recursive creation of parent first 275 | if (e.code === "ENOENT") { 276 | mkdir(path.dirname(dir)); 277 | return mkdir(dir); 278 | } 279 | 280 | // throw error for all except EEXIST and EROFS (read-only filesystem) 281 | if (e.code !== "EEXIST" && e.code !== "EROFS") { 282 | throw e; 283 | } 284 | 285 | // EEXIST: throw if file and not directory 286 | // EROFS : throw if directory not found 287 | else { 288 | try { 289 | if (fs.statSync(dir).isDirectory()) { 290 | return dir; 291 | } 292 | throw e; 293 | } catch (err) { 294 | throw e; 295 | } 296 | } 297 | } 298 | }; 299 | mkdir(this.fileObject.dir); 300 | 301 | const ops = { 302 | flags: this.options.flags, 303 | encoding: this.options.encoding, 304 | mode: this.options.mode 305 | }; 306 | const renameKey = function(obj, oldKey, newKey) { 307 | obj[newKey] = obj[oldKey]; 308 | delete obj[oldKey]; 309 | return obj; 310 | }; 311 | // try to throw EISDIR, EROFS, EACCES 312 | fs.appendFileSync(filePath, "", renameKey({ ...ops }, "flags", "flag")); 313 | this.currentFileStream = fs.createWriteStream(filePath, ops); 314 | this.currentFileStream.on("error", e => { 315 | this.emit("error", e); 316 | }); 317 | } 318 | 319 | async _clean() { 320 | const existingFileDetails = await this._getExistingFiles(); 321 | debug( 322 | `_clean: numToKeep = ${this.options.numToKeep}, existingFiles = ${existingFileDetails.length}` 323 | ); 324 | debug("_clean: existing files are: ", existingFileDetails); 325 | if (this._tooManyFiles(existingFileDetails.length)) { 326 | const fileNamesToRemove = existingFileDetails 327 | .slice(0, existingFileDetails.length - this.options.numToKeep) 328 | .map(f => path.format({ dir: this.fileObject.dir, base: f.filename })); 329 | await deleteFiles(fileNamesToRemove); 330 | } 331 | } 332 | 333 | _tooManyFiles(numFiles) { 334 | return this.options.numToKeep > 0 && numFiles > this.options.numToKeep; 335 | } 336 | } 337 | 338 | module.exports = RollingFileWriteStream; 339 | -------------------------------------------------------------------------------- /lib/fileNameFormatter.js: -------------------------------------------------------------------------------- 1 | const debug = require("debug")("streamroller:fileNameFormatter"); 2 | const path = require("path"); 3 | const ZIP_EXT = ".gz"; 4 | const DEFAULT_FILENAME_SEP = "."; 5 | 6 | module.exports = ({ 7 | file, 8 | keepFileExt, 9 | needsIndex, 10 | alwaysIncludeDate, 11 | compress, 12 | fileNameSep 13 | }) => { 14 | let FILENAME_SEP = fileNameSep || DEFAULT_FILENAME_SEP; 15 | const dirAndName = path.join(file.dir, file.name); 16 | 17 | const ext = f => f + file.ext; 18 | 19 | const index = (f, i, d) => 20 | (needsIndex || !d) && i ? f + FILENAME_SEP + i : f; 21 | 22 | const date = (f, i, d) => { 23 | return (i > 0 || alwaysIncludeDate) && d ? f + FILENAME_SEP + d : f; 24 | }; 25 | 26 | const gzip = (f, i) => (i && compress ? f + ZIP_EXT : f); 27 | 28 | const parts = keepFileExt 29 | ? [date, index, ext, gzip] 30 | : [ext, date, index, gzip]; 31 | 32 | return ({ date, index }) => { 33 | debug(`_formatFileName: date=${date}, index=${index}`); 34 | return parts.reduce( 35 | (filename, part) => part(filename, index, date), 36 | dirAndName 37 | ); 38 | }; 39 | }; 40 | -------------------------------------------------------------------------------- /lib/fileNameParser.js: -------------------------------------------------------------------------------- 1 | const debug = require("debug")("streamroller:fileNameParser"); 2 | const ZIP_EXT = ".gz"; 3 | const format = require("date-format"); 4 | const DEFAULT_FILENAME_SEP = "."; 5 | 6 | module.exports = ({ file, keepFileExt, pattern, fileNameSep }) => { 7 | let FILENAME_SEP = fileNameSep || DEFAULT_FILENAME_SEP; 8 | // All these functions take two arguments: f, the filename, and p, the result placeholder 9 | // They return the filename with any matching parts removed. 10 | // The "zip" function, for instance, removes the ".gz" part of the filename (if present) 11 | const zip = (f, p) => { 12 | if (f.endsWith(ZIP_EXT)) { 13 | debug("it is gzipped"); 14 | p.isCompressed = true; 15 | return f.slice(0, -1 * ZIP_EXT.length); 16 | } 17 | return f; 18 | }; 19 | 20 | const __NOT_MATCHING__ = "__NOT_MATCHING__"; 21 | 22 | const extAtEnd = f => { 23 | if (f.startsWith(file.name) && f.endsWith(file.ext)) { 24 | debug("it starts and ends with the right things"); 25 | return f.slice(file.name.length + 1, -1 * file.ext.length); 26 | } 27 | return __NOT_MATCHING__; 28 | }; 29 | 30 | const extInMiddle = f => { 31 | if (f.startsWith(file.base)) { 32 | debug("it starts with the right things"); 33 | return f.slice(file.base.length + 1); 34 | } 35 | return __NOT_MATCHING__; 36 | }; 37 | 38 | const dateAndIndex = (f, p) => { 39 | const items = f.split(FILENAME_SEP); 40 | let indexStr = items[items.length - 1]; 41 | debug("items: ", items, ", indexStr: ", indexStr); 42 | let dateStr = f; 43 | if (indexStr !== undefined && indexStr.match(/^\d+$/)) { 44 | dateStr = f.slice(0, -1 * (indexStr.length + 1)); 45 | debug(`dateStr is ${dateStr}`); 46 | if (pattern && !dateStr) { 47 | dateStr = indexStr; 48 | indexStr = "0"; 49 | } 50 | } else { 51 | indexStr = "0"; 52 | } 53 | 54 | try { 55 | // Two arguments for new Date() are intentional. This will set other date 56 | // components to minimal values in the current timezone instead of UTC, 57 | // as new Date(0) will do. 58 | const date = format.parse(pattern, dateStr, new Date(0, 0)); 59 | if (format.asString(pattern, date) !== dateStr) return f; 60 | p.index = parseInt(indexStr, 10); 61 | p.date = dateStr; 62 | p.timestamp = date.getTime(); 63 | return ""; 64 | } catch (e) { 65 | //not a valid date, don't panic. 66 | debug(`Problem parsing ${dateStr} as ${pattern}, error was: `, e); 67 | return f; 68 | } 69 | }; 70 | 71 | const index = (f, p) => { 72 | if (f.match(/^\d+$/)) { 73 | debug("it has an index"); 74 | p.index = parseInt(f, 10); 75 | return ""; 76 | } 77 | return f; 78 | }; 79 | 80 | let parts = [ 81 | zip, 82 | keepFileExt ? extAtEnd : extInMiddle, 83 | pattern ? dateAndIndex : index 84 | ]; 85 | 86 | return filename => { 87 | let result = { filename, index: 0, isCompressed: false }; 88 | // pass the filename through each of the file part parsers 89 | let whatsLeftOver = parts.reduce( 90 | (remains, part) => part(remains, result), 91 | filename 92 | ); 93 | // if there's anything left after parsing, then it wasn't a valid filename 94 | return whatsLeftOver ? null : result; 95 | }; 96 | }; 97 | -------------------------------------------------------------------------------- /lib/index.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | RollingFileWriteStream: require('./RollingFileWriteStream'), 3 | RollingFileStream: require('./RollingFileStream'), 4 | DateRollingFileStream: require('./DateRollingFileStream') 5 | }; 6 | -------------------------------------------------------------------------------- /lib/moveAndMaybeCompressFile.js: -------------------------------------------------------------------------------- 1 | const debug = require('debug')('streamroller:moveAndMaybeCompressFile'); 2 | const fs = require('fs-extra'); 3 | const zlib = require('zlib'); 4 | 5 | const _parseOption = function(rawOptions){ 6 | const defaultOptions = { 7 | mode: parseInt("0600", 8), 8 | compress: false, 9 | }; 10 | const options = Object.assign({}, defaultOptions, rawOptions); 11 | debug(`_parseOption: moveAndMaybeCompressFile called with option=${JSON.stringify(options)}`); 12 | return options; 13 | }; 14 | 15 | const moveAndMaybeCompressFile = async ( 16 | sourceFilePath, 17 | targetFilePath, 18 | options 19 | ) => { 20 | options = _parseOption(options); 21 | 22 | if (sourceFilePath === targetFilePath) { 23 | debug(`moveAndMaybeCompressFile: source and target are the same, not doing anything`); 24 | return; 25 | } 26 | 27 | if (await fs.pathExists(sourceFilePath)) { 28 | debug( 29 | `moveAndMaybeCompressFile: moving file from ${sourceFilePath} to ${targetFilePath} ${ 30 | options.compress ? "with" : "without" 31 | } compress` 32 | ); 33 | if (options.compress) { 34 | await new Promise((resolve, reject) => { 35 | let isCreated = false; 36 | // to avoid concurrency, the forked process which can create the file will proceed (using flags wx) 37 | const writeStream = fs.createWriteStream(targetFilePath, { mode: options.mode, flags: "wx" }) 38 | // wait until writable stream is valid before proceeding to read 39 | .on("open", () => { 40 | isCreated = true; 41 | const readStream = fs.createReadStream(sourceFilePath) 42 | // wait until readable stream is valid before piping 43 | .on("open", () => { 44 | readStream.pipe(zlib.createGzip()).pipe(writeStream); 45 | }) 46 | .on("error", (e) => { 47 | debug(`moveAndMaybeCompressFile: error reading ${sourceFilePath}`, e); 48 | // manually close writable: https://nodejs.org/api/stream.html#readablepipedestination-options 49 | writeStream.destroy(e); 50 | }); 51 | }) 52 | .on("finish", () => { 53 | debug(`moveAndMaybeCompressFile: finished compressing ${targetFilePath}, deleting ${sourceFilePath}`); 54 | // delete sourceFilePath 55 | fs.unlink(sourceFilePath) 56 | .then(resolve) 57 | .catch((e) => { 58 | debug(`moveAndMaybeCompressFile: error deleting ${sourceFilePath}, truncating instead`, e); 59 | // fallback to truncate 60 | fs.truncate(sourceFilePath) 61 | .then(resolve) 62 | .catch((e) => { 63 | debug(`moveAndMaybeCompressFile: error truncating ${sourceFilePath}`, e); 64 | reject(e); 65 | }); 66 | }); 67 | }) 68 | .on("error", (e) => { 69 | if (!isCreated) { 70 | debug(`moveAndMaybeCompressFile: error creating ${targetFilePath}`, e); 71 | // do not do anything if handled by another forked process 72 | reject(e); 73 | } else { 74 | debug(`moveAndMaybeCompressFile: error writing ${targetFilePath}, deleting`, e); 75 | // delete targetFilePath (taking as nothing happened) 76 | fs.unlink(targetFilePath) 77 | .then(() => { reject(e); }) 78 | .catch((e) => { 79 | debug(`moveAndMaybeCompressFile: error deleting ${targetFilePath}`, e); 80 | reject(e); 81 | }); 82 | } 83 | }); 84 | }).catch(() => {}); 85 | } else { 86 | debug(`moveAndMaybeCompressFile: renaming ${sourceFilePath} to ${targetFilePath}`); 87 | try { 88 | await fs.move(sourceFilePath, targetFilePath, { overwrite: true }); 89 | } catch (e) { 90 | debug(`moveAndMaybeCompressFile: error renaming ${sourceFilePath} to ${targetFilePath}`, e); 91 | /* istanbul ignore else: no need to do anything if file does not exist */ 92 | if (e.code !== "ENOENT") { 93 | debug(`moveAndMaybeCompressFile: trying copy+truncate instead`); 94 | try { 95 | await fs.copy(sourceFilePath, targetFilePath, { overwrite: true }); 96 | await fs.truncate(sourceFilePath); 97 | } catch (e) { 98 | debug(`moveAndMaybeCompressFile: error copy+truncate`, e); 99 | } 100 | } 101 | } 102 | } 103 | } 104 | }; 105 | 106 | module.exports = moveAndMaybeCompressFile; 107 | -------------------------------------------------------------------------------- /lib/now.js: -------------------------------------------------------------------------------- 1 | // allows us to inject a mock date in tests 2 | module.exports = () => new Date(); 3 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "streamroller", 3 | "version": "3.1.5", 4 | "description": "file streams that roll over when size limits, or dates are reached", 5 | "main": "lib/index.js", 6 | "files": [ 7 | "lib", 8 | "CHANGELOG.md" 9 | ], 10 | "directories": { 11 | "test": "test" 12 | }, 13 | "scripts": { 14 | "codecheck": "eslint \"lib/*.js\" \"test/*.js\"", 15 | "prepublishOnly": "npm test", 16 | "pretest": "npm run codecheck", 17 | "clean": "rm -rf node_modules/", 18 | "test": "nyc --check-coverage mocha", 19 | "html-report": "nyc report --reporter=html" 20 | }, 21 | "repository": { 22 | "type": "git", 23 | "url": "https://github.com/log4js-node/streamroller.git" 24 | }, 25 | "keywords": [ 26 | "stream", 27 | "rolling" 28 | ], 29 | "author": "Gareth Jones , Huang Yichao ", 30 | "license": "MIT", 31 | "readmeFilename": "README.md", 32 | "gitHead": "ece35d7d86c87c04ff09e8604accae81cf36a0ce", 33 | "devDependencies": { 34 | "@commitlint/cli": "^17.4.3", 35 | "@commitlint/config-conventional": "^17.4.3", 36 | "@types/node": "^18.13.0", 37 | "eslint": "^8.34.0", 38 | "husky": "^8.0.3", 39 | "mocha": "^10.2.0", 40 | "nyc": "^15.1.0", 41 | "proxyquire": "^2.1.3", 42 | "should": "^13.2.3" 43 | }, 44 | "dependencies": { 45 | "date-format": "^4.0.14", 46 | "debug": "^4.3.4", 47 | "fs-extra": "^8.1.0" 48 | }, 49 | "engines": { 50 | "node": ">=8.0" 51 | }, 52 | "commitlint": { 53 | "extends": [ 54 | "@commitlint/config-conventional" 55 | ] 56 | }, 57 | "eslintConfig": { 58 | "env": { 59 | "browser": false, 60 | "node": true, 61 | "es6": true, 62 | "mocha": true 63 | }, 64 | "parserOptions": { 65 | "ecmaVersion": 2018 66 | }, 67 | "extends": "eslint:recommended", 68 | "rules": { 69 | "no-console": "off" 70 | } 71 | }, 72 | "husky": { 73 | "hooks": { 74 | "commit-msg": "commitlint -e $HUSKY_GIT_PARAMS" 75 | } 76 | }, 77 | "nyc": { 78 | "include": [ 79 | "lib/**" 80 | ], 81 | "branches": 100, 82 | "lines": 100, 83 | "functions": 100 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /test/DateRollingFileStream-test.js: -------------------------------------------------------------------------------- 1 | require("should"); 2 | 3 | const fs = require("fs-extra"), 4 | path = require("path"), 5 | zlib = require("zlib"), 6 | proxyquire = require("proxyquire").noPreserveCache(), 7 | util = require("util"), 8 | streams = require("stream"); 9 | 10 | let fakeNow = new Date(2012, 8, 12, 10, 37, 11); 11 | const mockNow = () => fakeNow; 12 | const RollingFileWriteStream = proxyquire("../lib/RollingFileWriteStream", { 13 | "./now": mockNow 14 | }); 15 | const DateRollingFileStream = proxyquire("../lib/DateRollingFileStream", { 16 | "./RollingFileWriteStream": RollingFileWriteStream 17 | }); 18 | 19 | const gunzip = util.promisify(zlib.gunzip); 20 | const gzip = util.promisify(zlib.gzip); 21 | const remove = filename => fs.unlink(filename).catch(() => {}); 22 | const close = async (stream) => new Promise( 23 | (resolve, reject) => stream.end(e => e ? reject(e) : resolve()) 24 | ); 25 | 26 | describe("DateRollingFileStream", function() { 27 | describe("arguments", function() { 28 | let stream; 29 | 30 | before(function() { 31 | stream = new DateRollingFileStream( 32 | path.join(__dirname, "test-date-rolling-file-stream-1"), 33 | "yyyy-MM-dd.hh" 34 | ); 35 | }); 36 | 37 | after(async function() { 38 | await close(stream); 39 | await remove(path.join(__dirname, "test-date-rolling-file-stream-1")); 40 | }); 41 | 42 | it("should take a filename and a pattern and return a WritableStream", function() { 43 | stream.filename.should.eql( 44 | path.join(__dirname, "test-date-rolling-file-stream-1") 45 | ); 46 | stream.options.pattern.should.eql("yyyy-MM-dd.hh"); 47 | stream.should.be.instanceOf(streams.Writable); 48 | }); 49 | 50 | it("with default settings for the underlying stream", function() { 51 | stream.currentFileStream.mode.should.eql(0o600); 52 | stream.currentFileStream.flags.should.eql("a"); 53 | }); 54 | }); 55 | 56 | describe("default arguments", function() { 57 | var stream; 58 | 59 | before(function() { 60 | stream = new DateRollingFileStream( 61 | path.join(__dirname, "test-date-rolling-file-stream-2") 62 | ); 63 | }); 64 | 65 | after(async function() { 66 | await close(stream); 67 | await remove(path.join(__dirname, "test-date-rolling-file-stream-2")); 68 | }); 69 | 70 | it("should have pattern of yyyy-MM-dd", function() { 71 | stream.options.pattern.should.eql("yyyy-MM-dd"); 72 | }); 73 | }); 74 | 75 | describe("with stream arguments", function() { 76 | var stream; 77 | 78 | before(function() { 79 | stream = new DateRollingFileStream( 80 | path.join(__dirname, "test-date-rolling-file-stream-3"), 81 | "yyyy-MM-dd", 82 | { mode: parseInt("0666", 8) } 83 | ); 84 | }); 85 | 86 | after(async function() { 87 | await close(stream); 88 | await remove(path.join(__dirname, "test-date-rolling-file-stream-3")); 89 | }); 90 | 91 | it("should pass them to the underlying stream", function() { 92 | stream.theStream.mode.should.eql(parseInt("0666", 8)); 93 | }); 94 | }); 95 | 96 | describe("with stream arguments but no pattern", function() { 97 | var stream; 98 | 99 | before(function() { 100 | stream = new DateRollingFileStream( 101 | path.join(__dirname, "test-date-rolling-file-stream-4"), 102 | { mode: parseInt("0666", 8) } 103 | ); 104 | }); 105 | 106 | after(async function() { 107 | await close(stream); 108 | await remove(path.join(__dirname, "test-date-rolling-file-stream-4")); 109 | }); 110 | 111 | it("should pass them to the underlying stream", function() { 112 | stream.theStream.mode.should.eql(parseInt("0666", 8)); 113 | }); 114 | 115 | it("should use default pattern", function() { 116 | stream.options.pattern.should.eql("yyyy-MM-dd"); 117 | }); 118 | }); 119 | 120 | describe("with a pattern of .yyyy-MM-dd", function() { 121 | var stream; 122 | 123 | before(function(done) { 124 | stream = new DateRollingFileStream( 125 | path.join(__dirname, "test-date-rolling-file-stream-5"), 126 | ".yyyy-MM-dd", 127 | null 128 | ); 129 | stream.write("First message\n", "utf8", done); 130 | }); 131 | 132 | after(async function() { 133 | await close(stream); 134 | await remove(path.join(__dirname, "test-date-rolling-file-stream-5")); 135 | }); 136 | 137 | it("should create a file with the base name", async function() { 138 | const contents = await fs.readFile( 139 | path.join(__dirname, "test-date-rolling-file-stream-5"), 140 | "utf8" 141 | ); 142 | contents.should.eql("First message\n"); 143 | }); 144 | 145 | describe("when the day changes", function() { 146 | before(function(done) { 147 | fakeNow = new Date(2012, 8, 13, 0, 10, 12); 148 | stream.write("Second message\n", "utf8", done); 149 | }); 150 | 151 | after(async function() { 152 | await remove( 153 | path.join(__dirname, "test-date-rolling-file-stream-5..2012-09-12") 154 | ); 155 | }); 156 | 157 | describe("the number of files", function() { 158 | it("should be two", async function() { 159 | const files = await fs.readdir(__dirname); 160 | files 161 | .filter( 162 | file => file.indexOf("test-date-rolling-file-stream-5") > -1 163 | ) 164 | .should.have.length(2); 165 | }); 166 | }); 167 | 168 | describe("the file without a date", function() { 169 | it("should contain the second message", async function() { 170 | const contents = await fs.readFile( 171 | path.join(__dirname, "test-date-rolling-file-stream-5"), 172 | "utf8" 173 | ); 174 | contents.should.eql("Second message\n"); 175 | }); 176 | }); 177 | 178 | describe("the file with the date", function() { 179 | it("should contain the first message", async function() { 180 | const contents = await fs.readFile( 181 | path.join(__dirname, "test-date-rolling-file-stream-5..2012-09-12"), 182 | "utf8" 183 | ); 184 | contents.should.eql("First message\n"); 185 | }); 186 | }); 187 | }); 188 | }); 189 | 190 | describe("with alwaysIncludePattern", function() { 191 | var stream; 192 | 193 | before(async function() { 194 | fakeNow = new Date(2012, 8, 12, 11, 10, 12); 195 | await remove( 196 | path.join( 197 | __dirname, 198 | "test-date-rolling-file-stream-pattern.2012-09-12-11.log" 199 | ) 200 | ); 201 | stream = new DateRollingFileStream( 202 | path.join(__dirname, "test-date-rolling-file-stream-pattern"), 203 | "yyyy-MM-dd-hh.log", 204 | { alwaysIncludePattern: true } 205 | ); 206 | 207 | await new Promise(resolve => { 208 | setTimeout(function() { 209 | stream.write("First message\n", "utf8", () => resolve()); 210 | }, 50); 211 | }); 212 | }); 213 | 214 | after(async function() { 215 | await close(stream); 216 | await remove( 217 | path.join( 218 | __dirname, 219 | "test-date-rolling-file-stream-pattern.2012-09-12-11.log" 220 | ) 221 | ); 222 | }); 223 | 224 | it("should create a file with the pattern set", async function() { 225 | const contents = await fs.readFile( 226 | path.join( 227 | __dirname, 228 | "test-date-rolling-file-stream-pattern.2012-09-12-11.log" 229 | ), 230 | "utf8" 231 | ); 232 | contents.should.eql("First message\n"); 233 | }); 234 | 235 | describe("when the day changes", function() { 236 | before(function(done) { 237 | fakeNow = new Date(2012, 8, 12, 12, 10, 12); 238 | stream.write("Second message\n", "utf8", done); 239 | }); 240 | 241 | after(async function() { 242 | await remove( 243 | path.join( 244 | __dirname, 245 | "test-date-rolling-file-stream-pattern.2012-09-12-12.log" 246 | ) 247 | ); 248 | }); 249 | 250 | describe("the number of files", function() { 251 | it("should be two", async function() { 252 | const files = await fs.readdir(__dirname); 253 | files 254 | .filter( 255 | file => file.indexOf("test-date-rolling-file-stream-pattern") > -1 256 | ) 257 | .should.have.length(2); 258 | }); 259 | }); 260 | 261 | describe("the file with the later date", function() { 262 | it("should contain the second message", async function() { 263 | const contents = await fs.readFile( 264 | path.join( 265 | __dirname, 266 | "test-date-rolling-file-stream-pattern.2012-09-12-12.log" 267 | ), 268 | "utf8" 269 | ); 270 | contents.should.eql("Second message\n"); 271 | }); 272 | }); 273 | 274 | describe("the file with the date", function() { 275 | it("should contain the first message", async function() { 276 | const contents = await fs.readFile( 277 | path.join( 278 | __dirname, 279 | "test-date-rolling-file-stream-pattern.2012-09-12-11.log" 280 | ), 281 | "utf8" 282 | ); 283 | contents.should.eql("First message\n"); 284 | }); 285 | }); 286 | }); 287 | }); 288 | 289 | describe("with a pattern that evaluates to digits", function() { 290 | let stream; 291 | before(done => { 292 | fakeNow = new Date(2012, 8, 12, 0, 10, 12); 293 | stream = new DateRollingFileStream( 294 | path.join(__dirname, "digits.log"), 295 | "yyyyMMdd" 296 | ); 297 | stream.write("First message\n", "utf8", done); 298 | }); 299 | 300 | describe("when the day changes", function() { 301 | before(function(done) { 302 | fakeNow = new Date(2012, 8, 13, 0, 10, 12); 303 | stream.write("Second message\n", "utf8", done); 304 | }); 305 | 306 | it("should be two files (it should not get confused by indexes)", async function() { 307 | const files = await fs.readdir(__dirname); 308 | var logFiles = files.filter(file => file.indexOf("digits.log") > -1); 309 | logFiles.should.have.length(2); 310 | 311 | const contents = await fs.readFile( 312 | path.join(__dirname, "digits.log.20120912"), 313 | "utf8" 314 | ); 315 | contents.should.eql("First message\n"); 316 | const c = await fs.readFile(path.join(__dirname, "digits.log"), "utf8"); 317 | c.should.eql("Second message\n"); 318 | }); 319 | }); 320 | 321 | after(async function() { 322 | await close(stream); 323 | await remove(path.join(__dirname, "digits.log")); 324 | await remove(path.join(__dirname, "digits.log.20120912")); 325 | }); 326 | }); 327 | 328 | describe("with compress option", function() { 329 | var stream; 330 | 331 | before(function(done) { 332 | fakeNow = new Date(2012, 8, 12, 0, 10, 12); 333 | stream = new DateRollingFileStream( 334 | path.join(__dirname, "compressed.log"), 335 | "yyyy-MM-dd", 336 | { compress: true } 337 | ); 338 | stream.write("First message\n", "utf8", done); 339 | }); 340 | 341 | describe("when the day changes", function() { 342 | before(function(done) { 343 | fakeNow = new Date(2012, 8, 13, 0, 10, 12); 344 | stream.write("Second message\n", "utf8", done); 345 | }); 346 | 347 | it("should be two files, one compressed", async function() { 348 | const files = await fs.readdir(__dirname); 349 | var logFiles = files.filter( 350 | file => file.indexOf("compressed.log") > -1 351 | ); 352 | logFiles.should.have.length(2); 353 | 354 | const gzipped = await fs.readFile( 355 | path.join(__dirname, "compressed.log.2012-09-12.gz") 356 | ); 357 | const contents = await gunzip(gzipped); 358 | contents.toString("utf8").should.eql("First message\n"); 359 | 360 | (await fs.readFile( 361 | path.join(__dirname, "compressed.log"), 362 | "utf8" 363 | )).should.eql("Second message\n"); 364 | }); 365 | }); 366 | 367 | after(async function() { 368 | await close(stream); 369 | await remove(path.join(__dirname, "compressed.log")); 370 | await remove(path.join(__dirname, "compressed.log.2012-09-12.gz")); 371 | }); 372 | }); 373 | 374 | describe("with keepFileExt option", function() { 375 | var stream; 376 | 377 | before(function(done) { 378 | fakeNow = new Date(2012, 8, 12, 0, 10, 12); 379 | stream = new DateRollingFileStream( 380 | path.join(__dirname, "keepFileExt.log"), 381 | "yyyy-MM-dd", 382 | { keepFileExt: true } 383 | ); 384 | stream.write("First message\n", "utf8", done); 385 | }); 386 | 387 | describe("when the day changes", function() { 388 | before(function(done) { 389 | fakeNow = new Date(2012, 8, 13, 0, 10, 12); 390 | stream.write("Second message\n", "utf8", done); 391 | }); 392 | 393 | it("should be two files", async function() { 394 | const files = await fs.readdir(__dirname); 395 | var logFiles = files.filter(file => file.indexOf("keepFileExt") > -1); 396 | logFiles.should.have.length(2); 397 | 398 | (await fs.readFile( 399 | path.join(__dirname, "keepFileExt.2012-09-12.log"), 400 | "utf8" 401 | )).should.eql("First message\n"); 402 | (await fs.readFile( 403 | path.join(__dirname, "keepFileExt.log"), 404 | "utf8" 405 | )).should.eql("Second message\n"); 406 | }); 407 | }); 408 | 409 | after(async function() { 410 | await close(stream); 411 | await remove(path.join(__dirname, "keepFileExt.log")); 412 | await remove(path.join(__dirname, "keepFileExt.2012-09-12.log")); 413 | }); 414 | }); 415 | 416 | describe("with compress option and keepFileExt option", function() { 417 | var stream; 418 | 419 | before(function(done) { 420 | fakeNow = new Date(2012, 8, 12, 0, 10, 12); 421 | stream = new DateRollingFileStream( 422 | path.join(__dirname, "compressedAndKeepExt.log"), 423 | "yyyy-MM-dd", 424 | { compress: true, keepFileExt: true } 425 | ); 426 | stream.write("First message\n", "utf8", done); 427 | }); 428 | 429 | describe("when the day changes", function() { 430 | before(function(done) { 431 | fakeNow = new Date(2012, 8, 13, 0, 10, 12); 432 | stream.write("Second message\n", "utf8", done); 433 | }); 434 | 435 | it("should be two files, one compressed", async function() { 436 | const files = await fs.readdir(__dirname); 437 | var logFiles = files.filter( 438 | file => file.indexOf("compressedAndKeepExt") > -1 439 | ); 440 | logFiles.should.have.length(2); 441 | 442 | const gzipped = await fs.readFile( 443 | path.join(__dirname, "compressedAndKeepExt.2012-09-12.log.gz") 444 | ); 445 | const contents = await gunzip(gzipped); 446 | contents.toString("utf8").should.eql("First message\n"); 447 | (await fs.readFile( 448 | path.join(__dirname, "compressedAndKeepExt.log"), 449 | "utf8" 450 | )).should.eql("Second message\n"); 451 | }); 452 | }); 453 | 454 | after(async function() { 455 | await close(stream); 456 | await remove(path.join(__dirname, "compressedAndKeepExt.log")); 457 | await remove( 458 | path.join(__dirname, "compressedAndKeepExt.2012-09-12.log.gz") 459 | ); 460 | }); 461 | }); 462 | 463 | describe("with fileNameSep option", function() { 464 | var stream; 465 | 466 | before(function(done) { 467 | fakeNow = new Date(2012, 8, 12, 0, 10, 12); 468 | stream = new DateRollingFileStream( 469 | path.join(__dirname, "fileNameSep.log"), 470 | "yyyy-MM-dd", 471 | { fileNameSep: "_" } 472 | ); 473 | stream.write("First message\n", "utf8", done); 474 | }); 475 | 476 | describe("when the day changes", function() { 477 | before(function(done) { 478 | fakeNow = new Date(2012, 8, 13, 0, 10, 12); 479 | stream.write("Second message\n", "utf8", done); 480 | }); 481 | 482 | it("should be two files", async function() { 483 | const files = await fs.readdir(__dirname); 484 | var logFiles = files.filter(file => file.indexOf("fileNameSep") > -1); 485 | logFiles.should.have.length(2); 486 | 487 | (await fs.readFile( 488 | path.join(__dirname, "fileNameSep.log_2012-09-12"), 489 | "utf8" 490 | )).should.eql("First message\n"); 491 | (await fs.readFile( 492 | path.join(__dirname, "fileNameSep.log"), 493 | "utf8" 494 | )).should.eql("Second message\n"); 495 | }); 496 | }); 497 | 498 | after(async function() { 499 | await close(stream); 500 | await remove(path.join(__dirname, "fileNameSep.log")); 501 | await remove(path.join(__dirname, "fileNameSep.log_2012-09-12")); 502 | }); 503 | }); 504 | 505 | describe("with fileNameSep option and keepFileExt option", function() { 506 | var stream; 507 | 508 | before(function(done) { 509 | fakeNow = new Date(2012, 8, 12, 0, 10, 12); 510 | stream = new DateRollingFileStream( 511 | path.join(__dirname, "fileNameSepAndKeepExt.log"), 512 | "yyyy-MM-dd", 513 | { fileNameSep: "_", keepFileExt: true } 514 | ); 515 | stream.write("First message\n", "utf8", done); 516 | }); 517 | 518 | describe("when the day changes", function() { 519 | before(function(done) { 520 | fakeNow = new Date(2012, 8, 13, 0, 10, 12); 521 | stream.write("Second message\n", "utf8", done); 522 | }); 523 | 524 | it("should be two files", async function() { 525 | const files = await fs.readdir(__dirname); 526 | var logFiles = files.filter(file => file.indexOf("fileNameSepAndKeepExt") > -1); 527 | logFiles.should.have.length(2); 528 | 529 | (await fs.readFile( 530 | path.join(__dirname, "fileNameSepAndKeepExt_2012-09-12.log"), 531 | "utf8" 532 | )).should.eql("First message\n"); 533 | (await fs.readFile( 534 | path.join(__dirname, "fileNameSepAndKeepExt.log"), 535 | "utf8" 536 | )).should.eql("Second message\n"); 537 | }); 538 | }); 539 | 540 | after(async function() { 541 | await close(stream); 542 | await remove(path.join(__dirname, "fileNameSepAndKeepExt.log")); 543 | await remove(path.join(__dirname, "fileNameSepAndKeepExt_2012-09-12.log")); 544 | }); 545 | }); 546 | 547 | describe("with fileNameSep option and compress option", function() { 548 | var stream; 549 | 550 | before(function(done) { 551 | fakeNow = new Date(2012, 8, 12, 0, 10, 12); 552 | stream = new DateRollingFileStream( 553 | path.join(__dirname, "fileNameSepAndCompressed.log"), 554 | "yyyy-MM-dd", 555 | { fileNameSep: "_", compress: true } 556 | ); 557 | stream.write("First message\n", "utf8", done); 558 | }); 559 | 560 | describe("when the day changes", function() { 561 | before(function(done) { 562 | fakeNow = new Date(2012, 8, 13, 0, 10, 12); 563 | stream.write("Second message\n", "utf8", done); 564 | }); 565 | 566 | it("should be two files, one compressed", async function() { 567 | const files = await fs.readdir(__dirname); 568 | var logFiles = files.filter( 569 | file => file.indexOf("fileNameSepAndCompressed") > -1 570 | ); 571 | logFiles.should.have.length(2); 572 | 573 | const gzipped = await fs.readFile( 574 | path.join(__dirname, "fileNameSepAndCompressed.log_2012-09-12.gz") 575 | ); 576 | const contents = await gunzip(gzipped); 577 | contents.toString("utf8").should.eql("First message\n"); 578 | (await fs.readFile( 579 | path.join(__dirname, "fileNameSepAndCompressed.log"), 580 | "utf8" 581 | )).should.eql("Second message\n"); 582 | }); 583 | }); 584 | 585 | after(async function() { 586 | await close(stream); 587 | await remove(path.join(__dirname, "fileNameSepAndCompressed.log")); 588 | await remove( 589 | path.join(__dirname, "fileNameSepAndCompressed.log_2012-09-12.gz") 590 | ); 591 | }); 592 | }); 593 | 594 | describe("with fileNameSep option, compress option and keepFileExt option", function() { 595 | var stream; 596 | 597 | before(function(done) { 598 | fakeNow = new Date(2012, 8, 12, 0, 10, 12); 599 | stream = new DateRollingFileStream( 600 | path.join(__dirname, "fileNameSepCompressedAndKeepExt.log"), 601 | "yyyy-MM-dd", 602 | { fileNameSep: "_", compress: true, keepFileExt: true } 603 | ); 604 | stream.write("First message\n", "utf8", done); 605 | }); 606 | 607 | describe("when the day changes", function() { 608 | before(function(done) { 609 | fakeNow = new Date(2012, 8, 13, 0, 10, 12); 610 | stream.write("Second message\n", "utf8", done); 611 | }); 612 | 613 | it("should be two files, one compressed", async function() { 614 | const files = await fs.readdir(__dirname); 615 | var logFiles = files.filter( 616 | file => file.indexOf("fileNameSepCompressedAndKeepExt") > -1 617 | ); 618 | logFiles.should.have.length(2); 619 | 620 | const gzipped = await fs.readFile( 621 | path.join(__dirname, "fileNameSepCompressedAndKeepExt_2012-09-12.log.gz") 622 | ); 623 | const contents = await gunzip(gzipped); 624 | contents.toString("utf8").should.eql("First message\n"); 625 | (await fs.readFile( 626 | path.join(__dirname, "fileNameSepCompressedAndKeepExt.log"), 627 | "utf8" 628 | )).should.eql("Second message\n"); 629 | }); 630 | }); 631 | 632 | after(async function() { 633 | await close(stream); 634 | await remove(path.join(__dirname, "fileNameSepCompressedAndKeepExt.log")); 635 | await remove( 636 | path.join(__dirname, "fileNameSepCompressedAndKeepExt_2012-09-12.log.gz") 637 | ); 638 | }); 639 | }); 640 | 641 | describe("using deprecated daysToKeep", () => { 642 | const onWarning = process.listeners("warning").shift(); 643 | let wrapper; 644 | let stream; 645 | 646 | before(done => { 647 | const muteSelfDeprecation = (listener) => { 648 | return (warning) => { 649 | if (warning.name === "DeprecationWarning" && warning.code === "streamroller-DEP0001") { 650 | return; 651 | } else { 652 | listener(warning); 653 | } 654 | }; 655 | }; 656 | wrapper = muteSelfDeprecation(onWarning); 657 | process.prependListener("warning", wrapper); 658 | process.removeListener("warning", onWarning); 659 | done(); 660 | }); 661 | 662 | after(async () => { 663 | process.prependListener("warning", onWarning); 664 | process.removeListener("warning", wrapper); 665 | await close(stream); 666 | await remove(path.join(__dirname, "daysToKeep.log")); 667 | }); 668 | 669 | it("should have deprecated warning for daysToKeep", () => { 670 | process.on("warning", (warning) => { 671 | warning.name.should.eql("DeprecationWarning"); 672 | warning.code.should.eql("streamroller-DEP0001"); 673 | }); 674 | 675 | stream = new DateRollingFileStream( 676 | path.join(__dirname, "daysToKeep.log"), 677 | { daysToKeep: 4 } 678 | ); 679 | }); 680 | 681 | describe("with options.daysToKeep but not options.numBackups", () => { 682 | it("should be routed from options.daysToKeep to options.numBackups", () => { 683 | stream.options.numBackups.should.eql(stream.options.daysToKeep); 684 | }); 685 | 686 | it("should be generated into stream.options.numToKeep from options.numBackups", () => { 687 | stream.options.numToKeep.should.eql(stream.options.numBackups + 1); 688 | }); 689 | }); 690 | 691 | describe("with both options.daysToKeep and options.numBackups", function() { 692 | let stream; 693 | it("should take options.numBackups to supercede options.daysToKeep", function() { 694 | stream = new DateRollingFileStream( 695 | path.join(__dirname, "numBackups.log"), 696 | { 697 | daysToKeep: 3, 698 | numBackups: 9 699 | } 700 | ); 701 | stream.options.daysToKeep.should.not.eql(3); 702 | stream.options.daysToKeep.should.eql(9); 703 | stream.options.numBackups.should.eql(9); 704 | stream.options.numToKeep.should.eql(10); 705 | }); 706 | 707 | after(async function() { 708 | await close(stream); 709 | await remove("numBackups.log"); 710 | }); 711 | }); 712 | }); 713 | 714 | describe("with invalid number of numBackups", () => { 715 | it("should complain about negative numBackups", () => { 716 | const numBackups = -1; 717 | (() => { 718 | new DateRollingFileStream( 719 | path.join(__dirname, "numBackups.log"), 720 | { numBackups: numBackups } 721 | ); 722 | }).should.throw(`options.numBackups (${numBackups}) should be >= 0`); 723 | }); 724 | 725 | it("should complain about numBackups >= Number.MAX_SAFE_INTEGER", () => { 726 | const numBackups = Number.MAX_SAFE_INTEGER; 727 | (() => { 728 | new DateRollingFileStream( 729 | path.join(__dirname, "numBackups.log"), 730 | { numBackups: numBackups } 731 | ); 732 | }).should.throw(`options.numBackups (${numBackups}) should be < Number.MAX_SAFE_INTEGER`); 733 | }); 734 | }); 735 | 736 | describe("with numBackups option", function() { 737 | let stream; 738 | var numBackups = 4; 739 | var numOriginalLogs = 10; 740 | 741 | before(async function() { 742 | fakeNow = new Date(2012, 8, 13, 0, 10, 12); // pre-req to trigger a date-change later 743 | for (let i = 0; i < numOriginalLogs; i += 1) { 744 | await fs.writeFile( 745 | path.join(__dirname, `numBackups.log.2012-09-${20-i}`), 746 | `Message on day ${i}\n`, 747 | { encoding: "utf-8" } 748 | ); 749 | } 750 | stream = new DateRollingFileStream( 751 | path.join(__dirname, "numBackups.log"), 752 | "yyyy-MM-dd", 753 | { 754 | alwaysIncludePattern: true, 755 | numBackups: numBackups 756 | } 757 | ); 758 | }); 759 | 760 | describe("when the day changes", function() { 761 | before(function(done) { 762 | fakeNow = new Date(2012, 8, 21, 0, 10, 12); // trigger a date-change 763 | stream.write("Test message\n", "utf8", done); 764 | }); 765 | 766 | it("should be numBackups + 1 files left from numOriginalLogs", async function() { 767 | const files = await fs.readdir(__dirname); 768 | var logFiles = files.filter( 769 | file => file.indexOf("numBackups.log") > -1 770 | ); 771 | logFiles.should.have.length(numBackups + 1); 772 | }); 773 | }); 774 | 775 | after(async function() { 776 | await close(stream); 777 | const files = await fs.readdir(__dirname); 778 | const logFiles = files 779 | .filter(file => file.indexOf("numBackups.log") > -1) 780 | .map(f => remove(path.join(__dirname, f))); 781 | await Promise.all(logFiles); 782 | }); 783 | }); 784 | 785 | describe("with numBackups and compress options", function() { 786 | let stream; 787 | const numBackups = 4; 788 | const numOriginalLogs = 10; 789 | 790 | before(async function() { 791 | fakeNow = new Date(2012, 8, 13, 0, 10, 12); // pre-req to trigger a date-change later 792 | for (let i = numOriginalLogs; i >= 0; i -= 1) { 793 | const contents = await gzip(`Message on day ${i}\n`); 794 | await fs.writeFile( 795 | path.join(__dirname, `compressedNumBackups.log.2012-09-${20-i}.gz`), 796 | contents 797 | ); 798 | } 799 | stream = new DateRollingFileStream( 800 | path.join(__dirname, "compressedNumBackups.log"), 801 | "yyyy-MM-dd", 802 | { 803 | alwaysIncludePattern: true, 804 | compress: true, 805 | numBackups: numBackups 806 | } 807 | ); 808 | }); 809 | 810 | describe("when the day changes", function() { 811 | before(function(done) { 812 | fakeNow = new Date(2012, 8, 21, 0, 10, 12); // trigger a date-change 813 | stream.write("New file message\n", "utf8", done); 814 | }); 815 | 816 | it("should be 5 files left from original 11", async function() { 817 | const files = await fs.readdir(__dirname); 818 | var logFiles = files.filter( 819 | file => file.indexOf("compressedNumBackups.log") > -1 820 | ); 821 | logFiles.should.have.length(numBackups + 1); 822 | }); 823 | }); 824 | 825 | after(async function() { 826 | await close(stream); 827 | const files = await fs.readdir(__dirname); 828 | const logFiles = files 829 | .filter(file => file.indexOf("compressedNumBackups.log") > -1) 830 | .map(f => remove(path.join(__dirname, f))); 831 | await Promise.all(logFiles); 832 | }); 833 | }); 834 | }); 835 | -------------------------------------------------------------------------------- /test/RollingFileStream-test.js: -------------------------------------------------------------------------------- 1 | const should = require("should"); 2 | 3 | const fs = require("fs-extra"), 4 | path = require("path"), 5 | util = require("util"), 6 | zlib = require("zlib"), 7 | streams = require("stream"), 8 | RollingFileStream = require("../lib").RollingFileStream; 9 | 10 | const gunzip = util.promisify(zlib.gunzip); 11 | const fullPath = f => path.join(__dirname, f); 12 | const remove = filename => fs.unlink(fullPath(filename)).catch(() => {}); 13 | const create = filename => fs.writeFile(fullPath(filename), "test file"); 14 | 15 | const write = (stream, data) => { 16 | return new Promise((resolve, reject) => { 17 | stream.write(data, "utf8", e => { 18 | if (e) { 19 | reject(e); 20 | } else { 21 | resolve(); 22 | } 23 | }); 24 | }); 25 | }; 26 | 27 | const writeInSequence = async (stream, messages) => { 28 | for (let i = 0; i < messages.length; i += 1) { 29 | await write(stream, messages[i] + "\n"); 30 | } 31 | return new Promise(resolve => { 32 | stream.end(resolve); 33 | }); 34 | }; 35 | 36 | const close = async (stream) => new Promise( 37 | (resolve, reject) => stream.end(e => e ? reject(e) : resolve()) 38 | ); 39 | 40 | describe("RollingFileStream", function() { 41 | describe("arguments", function() { 42 | let stream; 43 | 44 | before(async function() { 45 | await remove("test-rolling-file-stream"); 46 | stream = new RollingFileStream( 47 | path.join(__dirname, "test-rolling-file-stream"), 48 | 1024, 49 | 5 50 | ); 51 | }); 52 | 53 | after(async function() { 54 | await close(stream); 55 | await remove("test-rolling-file-stream"); 56 | }); 57 | 58 | it("should take a filename, file size (bytes), no. backups, return Writable", function() { 59 | stream.should.be.an.instanceOf(streams.Writable); 60 | stream.filename.should.eql( 61 | path.join(__dirname, "test-rolling-file-stream") 62 | ); 63 | stream.size.should.eql(1024); 64 | stream.backups.should.eql(5); 65 | }); 66 | 67 | it("should apply default settings to the underlying stream", function() { 68 | stream.theStream.mode.should.eql(0o600); 69 | stream.theStream.flags.should.eql("a"); 70 | }); 71 | }); 72 | 73 | describe("with stream arguments", function() { 74 | let stream; 75 | it("should pass them to the underlying stream", function() { 76 | stream = new RollingFileStream( 77 | path.join(__dirname, "test-rolling-file-stream"), 78 | 1024, 79 | 5, 80 | { mode: parseInt("0666", 8) } 81 | ); 82 | stream.theStream.mode.should.eql(parseInt("0666", 8)); 83 | }); 84 | 85 | after(async function() { 86 | await close(stream); 87 | await remove("test-rolling-file-stream"); 88 | }); 89 | }); 90 | 91 | describe("without size", function() { 92 | let stream; 93 | it("should default to undefined", function() { 94 | stream = new RollingFileStream( 95 | path.join(__dirname, "test-rolling-file-stream") 96 | ); 97 | should(stream.size).not.be.ok(); 98 | }); 99 | 100 | after(async function() { 101 | await close(stream); 102 | await remove("test-rolling-file-stream"); 103 | }); 104 | }); 105 | 106 | describe("with size 0", function() { 107 | let stream; 108 | it("should become undefined", function() { 109 | stream = new RollingFileStream( 110 | path.join(__dirname, "test-rolling-file-stream"), 111 | 0 112 | ); 113 | should(stream.size).not.be.ok(); 114 | }); 115 | 116 | after(async function() { 117 | await close(stream); 118 | await remove("test-rolling-file-stream"); 119 | }); 120 | }); 121 | 122 | describe("without number of backups", function() { 123 | let stream; 124 | it("should default to 1 backup", function() { 125 | stream = new RollingFileStream( 126 | path.join(__dirname, "test-rolling-file-stream"), 127 | 1024 128 | ); 129 | stream.backups.should.eql(1); 130 | }); 131 | 132 | after(async function() { 133 | await close(stream); 134 | await remove("test-rolling-file-stream"); 135 | }); 136 | }); 137 | 138 | describe("with invalid number of backups", () => { 139 | it("should complain about negative backups", () => { 140 | const backups = -1; 141 | (() => { 142 | new RollingFileStream( 143 | path.join(__dirname, "test-rolling-file-stream"), 144 | 1024, 145 | backups 146 | ); 147 | }).should.throw(`options.numBackups (${backups}) should be >= 0`); 148 | }); 149 | 150 | it("should complain about backups >= Number.MAX_SAFE_INTEGER", () => { 151 | const backups = Number.MAX_SAFE_INTEGER; 152 | (() => { 153 | new RollingFileStream( 154 | path.join(__dirname, "test-rolling-file-stream"), 155 | 1024, 156 | backups 157 | ); 158 | }).should.throw(`options.numBackups (${backups}) should be < Number.MAX_SAFE_INTEGER`); 159 | }); 160 | }); 161 | 162 | describe("using backups", () => { 163 | describe("with backups but not options.numBackups", () => { 164 | let stream; 165 | it("should be routed from backups to options.numBackups", function() { 166 | stream = new RollingFileStream( 167 | path.join(__dirname, "test-rolling-file-stream"), 168 | 1024, 169 | 3 170 | ); 171 | stream.options.numBackups.should.eql(stream.backups); 172 | }); 173 | 174 | it("should be generated into stream.options.numToKeep from options.numBackups", function() { 175 | stream.options.numToKeep.should.eql(stream.options.numBackups + 1); 176 | }); 177 | 178 | after(async function() { 179 | await close(stream); 180 | await remove("test-rolling-file-stream"); 181 | }); 182 | }); 183 | 184 | describe("with both backups and options.numBackups", function() { 185 | let stream; 186 | it("should take options.numBackups to supercede backups", function() { 187 | stream = new RollingFileStream( 188 | path.join(__dirname, "test-rolling-file-stream"), 189 | 1024, 190 | 3, 191 | { numBackups: 9 } 192 | ); 193 | stream.backups.should.not.eql(3); 194 | stream.backups.should.eql(9); 195 | stream.options.numBackups.should.eql(9); 196 | stream.options.numToKeep.should.eql(10); 197 | }); 198 | 199 | after(async function() { 200 | await close(stream); 201 | await remove("test-rolling-file-stream"); 202 | }); 203 | }); 204 | }); 205 | 206 | describe("writing less than the file size", function() { 207 | before(async function() { 208 | await remove("test-rolling-file-stream-write-less"); 209 | const stream = new RollingFileStream( 210 | path.join(__dirname, "test-rolling-file-stream-write-less"), 211 | 100 212 | ); 213 | await writeInSequence(stream, ["cheese"]); 214 | }); 215 | 216 | after(async function() { 217 | await remove("test-rolling-file-stream-write-less"); 218 | }); 219 | 220 | it("should write to the file", async function() { 221 | const contents = await fs.readFile( 222 | path.join(__dirname, "test-rolling-file-stream-write-less"), 223 | "utf8" 224 | ); 225 | contents.should.eql("cheese\n"); 226 | }); 227 | 228 | it("should write one file", async function() { 229 | const files = await fs.readdir(__dirname); 230 | files 231 | .filter( 232 | file => file.indexOf("test-rolling-file-stream-write-less") > -1 233 | ) 234 | .should.have.length(1); 235 | }); 236 | }); 237 | 238 | describe("writing more than the file size", function() { 239 | before(async function() { 240 | await remove("test-rolling-file-stream-write-more"); 241 | await remove("test-rolling-file-stream-write-more.1"); 242 | const stream = new RollingFileStream( 243 | path.join(__dirname, "test-rolling-file-stream-write-more"), 244 | 45 245 | ); 246 | await writeInSequence( 247 | stream, 248 | [0, 1, 2, 3, 4, 5, 6].map(i => i + ".cheese") 249 | ); 250 | }); 251 | 252 | after(async function() { 253 | await remove("test-rolling-file-stream-write-more"); 254 | await remove("test-rolling-file-stream-write-more.1"); 255 | }); 256 | 257 | it("should write two files", async function() { 258 | const files = await fs.readdir(__dirname); 259 | files 260 | .filter( 261 | file => file.indexOf("test-rolling-file-stream-write-more") > -1 262 | ) 263 | .should.have.length(2); 264 | }); 265 | 266 | it("should write the last two log messages to the first file", async function() { 267 | const contents = await fs.readFile( 268 | path.join(__dirname, "test-rolling-file-stream-write-more"), 269 | "utf8" 270 | ); 271 | contents.should.eql("5.cheese\n6.cheese\n"); 272 | }); 273 | 274 | it("should write the first five log messages to the second file", async function() { 275 | const contents = await fs.readFile( 276 | path.join(__dirname, "test-rolling-file-stream-write-more.1"), 277 | "utf8" 278 | ); 279 | contents.should.eql("0.cheese\n1.cheese\n2.cheese\n3.cheese\n4.cheese\n"); 280 | }); 281 | }); 282 | 283 | describe("with options.compress = true", function() { 284 | before(async function() { 285 | const stream = new RollingFileStream( 286 | path.join(__dirname, "compressed-backups.log"), 287 | 30, //30 bytes max size 288 | 2, //two backup files to keep 289 | { compress: true } 290 | ); 291 | const messages = [ 292 | "This is the first log message.", 293 | "This is the second log message.", 294 | "This is the third log message.", 295 | "This is the fourth log message." 296 | ]; 297 | await writeInSequence(stream, messages); 298 | }); 299 | 300 | it("should produce three files, with the backups compressed", async function() { 301 | const files = await fs.readdir(__dirname); 302 | const testFiles = files 303 | .filter(f => f.indexOf("compressed-backups.log") > -1) 304 | .sort(); 305 | 306 | testFiles.length.should.eql(3); 307 | testFiles.should.eql([ 308 | "compressed-backups.log", 309 | "compressed-backups.log.1.gz", 310 | "compressed-backups.log.2.gz" 311 | ]); 312 | 313 | let contents = await fs.readFile( 314 | path.join(__dirname, testFiles[0]), 315 | "utf8" 316 | ); 317 | contents.should.eql("This is the fourth log message.\n"); 318 | 319 | let gzipped = await fs.readFile(path.join(__dirname, testFiles[1])); 320 | contents = await gunzip(gzipped); 321 | contents.toString("utf8").should.eql("This is the third log message.\n"); 322 | 323 | gzipped = await fs.readFile(path.join(__dirname, testFiles[2])); 324 | contents = await gunzip(gzipped); 325 | contents.toString("utf8").should.eql("This is the second log message.\n"); 326 | }); 327 | 328 | after(function() { 329 | return Promise.all([ 330 | remove("compressed-backups.log"), 331 | remove("compressed-backups.log.1.gz"), 332 | remove("compressed-backups.log.2.gz") 333 | ]); 334 | }); 335 | }); 336 | 337 | describe("with options.keepFileExt = true", function() { 338 | before(async function() { 339 | const stream = new RollingFileStream( 340 | path.join(__dirname, "extKept-backups.log"), 341 | 30, //30 bytes max size 342 | 2, //two backup files to keep 343 | { keepFileExt: true } 344 | ); 345 | const messages = [ 346 | "This is the first log message.", 347 | "This is the second log message.", 348 | "This is the third log message.", 349 | "This is the fourth log message." 350 | ]; 351 | await writeInSequence(stream, messages); 352 | }); 353 | 354 | it("should produce three files, with the file-extension kept", async function() { 355 | const files = await fs.readdir(__dirname); 356 | const testFiles = files 357 | .filter(f => f.indexOf("extKept-backups") > -1) 358 | .sort(); 359 | 360 | testFiles.length.should.eql(3); 361 | testFiles.should.eql([ 362 | "extKept-backups.1.log", 363 | "extKept-backups.2.log", 364 | "extKept-backups.log" 365 | ]); 366 | 367 | let contents = await fs.readFile( 368 | path.join(__dirname, testFiles[0]), 369 | "utf8" 370 | ); 371 | contents.should.eql("This is the third log message.\n"); 372 | 373 | contents = await fs.readFile(path.join(__dirname, testFiles[1]), "utf8"); 374 | contents.toString("utf8").should.eql("This is the second log message.\n"); 375 | contents = await fs.readFile(path.join(__dirname, testFiles[2]), "utf8"); 376 | contents.toString("utf8").should.eql("This is the fourth log message.\n"); 377 | }); 378 | 379 | after(function() { 380 | return Promise.all([ 381 | remove("extKept-backups.log"), 382 | remove("extKept-backups.1.log"), 383 | remove("extKept-backups.2.log") 384 | ]); 385 | }); 386 | }); 387 | 388 | describe("with options.compress = true and keepFileExt = true", function() { 389 | before(async function() { 390 | const stream = new RollingFileStream( 391 | path.join(__dirname, "compressed-backups.log"), 392 | 30, //30 bytes max size 393 | 2, //two backup files to keep 394 | { compress: true, keepFileExt: true } 395 | ); 396 | const messages = [ 397 | "This is the first log message.", 398 | "This is the second log message.", 399 | "This is the third log message.", 400 | "This is the fourth log message." 401 | ]; 402 | await writeInSequence(stream, messages); 403 | }); 404 | 405 | it("should produce three files, with the backups compressed", async function() { 406 | const files = await fs.readdir(__dirname); 407 | const testFiles = files 408 | .filter(f => f.indexOf("compressed-backups") > -1) 409 | .sort(); 410 | 411 | testFiles.length.should.eql(3); 412 | testFiles.should.eql([ 413 | "compressed-backups.1.log.gz", 414 | "compressed-backups.2.log.gz", 415 | "compressed-backups.log" 416 | ]); 417 | 418 | let contents = await fs.readFile( 419 | path.join(__dirname, testFiles[2]), 420 | "utf8" 421 | ); 422 | contents.should.eql("This is the fourth log message.\n"); 423 | 424 | let gzipped = await fs.readFile(path.join(__dirname, testFiles[1])); 425 | contents = await gunzip(gzipped); 426 | contents.toString("utf8").should.eql("This is the second log message.\n"); 427 | gzipped = await fs.readFile(path.join(__dirname, testFiles[0])); 428 | contents = await gunzip(gzipped); 429 | contents.toString("utf8").should.eql("This is the third log message.\n"); 430 | }); 431 | 432 | after(function() { 433 | return Promise.all([ 434 | remove("compressed-backups.log"), 435 | remove("compressed-backups.1.log.gz"), 436 | remove("compressed-backups.2.log.gz") 437 | ]); 438 | }); 439 | }); 440 | 441 | describe("with options.fileNameSep = \"_\"", function() { 442 | before(async function() { 443 | const stream = new RollingFileStream( 444 | path.join(__dirname, "fileNameSep-backups.log"), 445 | 30, //30 bytes max size 446 | 2, //two backup files to keep 447 | { fileNameSep: "_" } 448 | ); 449 | const messages = [ 450 | "This is the first log message.", 451 | "This is the second log message.", 452 | "This is the third log message.", 453 | "This is the fourth log message." 454 | ]; 455 | await writeInSequence(stream, messages); 456 | }); 457 | 458 | it("should produce three files, with the fileNameSep", async function() { 459 | const files = await fs.readdir(__dirname); 460 | const testFiles = files 461 | .filter(f => f.indexOf("fileNameSep-backups") > -1) 462 | .sort(); 463 | 464 | testFiles.length.should.eql(3); 465 | testFiles.should.eql([ 466 | "fileNameSep-backups.log_1", 467 | "fileNameSep-backups.log_2", 468 | "fileNameSep-backups.log" 469 | ].sort()); 470 | 471 | let contents = await fs.readFile( 472 | path.join(__dirname, testFiles[0]), 473 | "utf8" 474 | ); 475 | contents.should.eql("This is the fourth log message.\n"); 476 | 477 | contents = await fs.readFile(path.join(__dirname, testFiles[1]), "utf8"); 478 | contents.toString("utf8").should.eql("This is the third log message.\n"); 479 | contents = await fs.readFile(path.join(__dirname, testFiles[2]), "utf8"); 480 | contents.toString("utf8").should.eql("This is the second log message.\n"); 481 | }); 482 | 483 | after(function() { 484 | return Promise.all([ 485 | remove("fileNameSep-backups.log"), 486 | remove("fileNameSep-backups.log_1"), 487 | remove("fileNameSep-backups.log_2") 488 | ]); 489 | }); 490 | }); 491 | 492 | describe("with options.fileNameSep = \"_\" and keepFileExt = true", function() { 493 | before(async function() { 494 | const stream = new RollingFileStream( 495 | path.join(__dirname, "fileNameSep-extKept-backups.log"), 496 | 30, //30 bytes max size 497 | 2, //two backup files to keep 498 | { fileNameSep: "_", keepFileExt: true } 499 | ); 500 | const messages = [ 501 | "This is the first log message.", 502 | "This is the second log message.", 503 | "This is the third log message.", 504 | "This is the fourth log message." 505 | ]; 506 | await writeInSequence(stream, messages); 507 | }); 508 | 509 | it("should produce three files, with the file-extension kept", async function() { 510 | const files = await fs.readdir(__dirname); 511 | const testFiles = files 512 | .filter(f => f.indexOf("fileNameSep-extKept-backups") > -1) 513 | .sort(); 514 | 515 | testFiles.length.should.eql(3); 516 | testFiles.should.eql([ 517 | "fileNameSep-extKept-backups_1.log", 518 | "fileNameSep-extKept-backups_2.log", 519 | "fileNameSep-extKept-backups.log" 520 | ].sort()); 521 | 522 | let contents = await fs.readFile( 523 | path.join(__dirname, testFiles[0]), 524 | "utf8" 525 | ); 526 | contents.should.eql("This is the fourth log message.\n"); 527 | 528 | contents = await fs.readFile(path.join(__dirname, testFiles[1]), "utf8"); 529 | contents.toString("utf8").should.eql("This is the third log message.\n"); 530 | contents = await fs.readFile(path.join(__dirname, testFiles[2]), "utf8"); 531 | contents.toString("utf8").should.eql("This is the second log message.\n"); 532 | }); 533 | 534 | after(function() { 535 | return Promise.all([ 536 | remove("fileNameSep-extKept-backups.log"), 537 | remove("fileNameSep-extKept-backups_1.log"), 538 | remove("fileNameSep-extKept-backups_2.log") 539 | ]); 540 | }); 541 | }); 542 | 543 | describe("with options.fileNameSep = \"_\" and options.compress = true", function() { 544 | before(async function() { 545 | const stream = new RollingFileStream( 546 | path.join(__dirname, "compressed-backups.log"), 547 | 30, //30 bytes max size 548 | 2, //two backup files to keep 549 | { fileNameSep: "_", compress: true } 550 | ); 551 | const messages = [ 552 | "This is the first log message.", 553 | "This is the second log message.", 554 | "This is the third log message.", 555 | "This is the fourth log message." 556 | ]; 557 | await writeInSequence(stream, messages); 558 | }); 559 | 560 | it("should produce three files, with the backups compressed", async function() { 561 | const files = await fs.readdir(__dirname); 562 | const testFiles = files 563 | .filter(f => f.indexOf("compressed-backups") > -1) 564 | .sort(); 565 | 566 | testFiles.length.should.eql(3); 567 | testFiles.should.eql([ 568 | "compressed-backups.log_1.gz", 569 | "compressed-backups.log_2.gz", 570 | "compressed-backups.log" 571 | ].sort()); 572 | 573 | let contents = await fs.readFile( 574 | path.join(__dirname, testFiles[0]), 575 | "utf8" 576 | ); 577 | contents.should.eql("This is the fourth log message.\n"); 578 | 579 | let gzipped = await fs.readFile(path.join(__dirname, testFiles[1])); 580 | contents = await gunzip(gzipped); 581 | contents.toString("utf8").should.eql("This is the third log message.\n"); 582 | gzipped = await fs.readFile(path.join(__dirname, testFiles[2])); 583 | contents = await gunzip(gzipped); 584 | contents.toString("utf8").should.eql("This is the second log message.\n"); 585 | }); 586 | 587 | after(function() { 588 | return Promise.all([ 589 | remove("compressed-backups.log"), 590 | remove("compressed-backups.log_1.gz"), 591 | remove("compressed-backups.log_2.gz") 592 | ]); 593 | }); 594 | }); 595 | 596 | describe("with options.fileNameSep = \"_\", options.compress = true and keepFileExt = true", function() { 597 | before(async function() { 598 | const stream = new RollingFileStream( 599 | path.join(__dirname, "compressed-backups.log"), 600 | 30, //30 bytes max size 601 | 2, //two backup files to keep 602 | { fileNameSep: "_", compress: true, keepFileExt: true } 603 | ); 604 | const messages = [ 605 | "This is the first log message.", 606 | "This is the second log message.", 607 | "This is the third log message.", 608 | "This is the fourth log message." 609 | ]; 610 | await writeInSequence(stream, messages); 611 | }); 612 | 613 | it("should produce three files, with the backups compressed", async function() { 614 | const files = await fs.readdir(__dirname); 615 | const testFiles = files 616 | .filter(f => f.indexOf("compressed-backups") > -1) 617 | .sort(); 618 | 619 | testFiles.length.should.eql(3); 620 | testFiles.should.eql([ 621 | "compressed-backups_1.log.gz", 622 | "compressed-backups_2.log.gz", 623 | "compressed-backups.log" 624 | ].sort()); 625 | 626 | let contents = await fs.readFile( 627 | path.join(__dirname, testFiles[0]), 628 | "utf8" 629 | ); 630 | contents.should.eql("This is the fourth log message.\n"); 631 | 632 | let gzipped = await fs.readFile(path.join(__dirname, testFiles[1])); 633 | contents = await gunzip(gzipped); 634 | contents.toString("utf8").should.eql("This is the third log message.\n"); 635 | gzipped = await fs.readFile(path.join(__dirname, testFiles[2])); 636 | contents = await gunzip(gzipped); 637 | contents.toString("utf8").should.eql("This is the second log message.\n"); 638 | }); 639 | 640 | after(function() { 641 | return Promise.all([ 642 | remove("compressed-backups.log"), 643 | remove("compressed-backups_1.log.gz"), 644 | remove("compressed-backups_2.log.gz") 645 | ]); 646 | }); 647 | }); 648 | 649 | describe("when many files already exist", function() { 650 | before(async function() { 651 | await Promise.all([ 652 | remove("test-rolling-stream-with-existing-files.11"), 653 | remove("test-rolling-stream-with-existing-files.20"), 654 | remove("test-rolling-stream-with-existing-files.-1"), 655 | remove("test-rolling-stream-with-existing-files.1.1"), 656 | remove("test-rolling-stream-with-existing-files.1") 657 | ]); 658 | await Promise.all([ 659 | create("test-rolling-stream-with-existing-files.11"), 660 | create("test-rolling-stream-with-existing-files.20"), 661 | create("test-rolling-stream-with-existing-files.-1"), 662 | create("test-rolling-stream-with-existing-files.1.1"), 663 | create("test-rolling-stream-with-existing-files.1") 664 | ]); 665 | 666 | const stream = new RollingFileStream( 667 | path.join(__dirname, "test-rolling-stream-with-existing-files"), 668 | 18, 669 | 5 670 | ); 671 | 672 | await writeInSequence( 673 | stream, 674 | [0, 1, 2, 3, 4, 5, 6].map(i => i + ".cheese") 675 | ); 676 | }); 677 | 678 | after(function() { 679 | return Promise.all( 680 | [ 681 | "test-rolling-stream-with-existing-files.-1", 682 | "test-rolling-stream-with-existing-files", 683 | "test-rolling-stream-with-existing-files.1.1", 684 | "test-rolling-stream-with-existing-files.0", 685 | "test-rolling-stream-with-existing-files.1", 686 | "test-rolling-stream-with-existing-files.2", 687 | "test-rolling-stream-with-existing-files.3", 688 | "test-rolling-stream-with-existing-files.4", 689 | "test-rolling-stream-with-existing-files.5", 690 | "test-rolling-stream-with-existing-files.6", 691 | "test-rolling-stream-with-existing-files.11", 692 | "test-rolling-stream-with-existing-files.20" 693 | ].map(remove) 694 | ); 695 | }); 696 | 697 | it("should roll the files, removing the highest indices", async function() { 698 | const files = await fs.readdir(__dirname); 699 | files.should.containEql("test-rolling-stream-with-existing-files"); 700 | files.should.containEql("test-rolling-stream-with-existing-files.1"); 701 | files.should.containEql("test-rolling-stream-with-existing-files.2"); 702 | files.should.containEql("test-rolling-stream-with-existing-files.3"); 703 | files.should.containEql("test-rolling-stream-with-existing-files.4"); 704 | }); 705 | }); 706 | 707 | // in windows, you can't delete a directory if there is an open file handle 708 | if (process.platform !== "win32") { 709 | 710 | describe("when the directory gets deleted", function() { 711 | var stream; 712 | before(function(done) { 713 | stream = new RollingFileStream( 714 | path.join("subdir", "test-rolling-file-stream"), 715 | 5, 716 | 5 717 | ); 718 | stream.write("initial", "utf8", done); 719 | }); 720 | 721 | after(async () => { 722 | await fs.unlink(path.join("subdir", "test-rolling-file-stream")); 723 | await fs.rmdir("subdir"); 724 | }); 725 | 726 | it("handles directory deletion gracefully", async function() { 727 | stream.theStream.on("error", e => { 728 | throw e; 729 | }); 730 | 731 | await fs.unlink(path.join("subdir", "test-rolling-file-stream")); 732 | await fs.rmdir("subdir"); 733 | await new Promise(resolve => stream.write("rollover", "utf8", resolve)); 734 | await close(stream); 735 | (await fs.readFile( 736 | path.join("subdir", "test-rolling-file-stream"), 737 | "utf8" 738 | )).should.eql("rollover"); 739 | }); 740 | }); 741 | } 742 | 743 | }); 744 | -------------------------------------------------------------------------------- /test/RollingFileWriteStream-test.js: -------------------------------------------------------------------------------- 1 | const should = require("should"); 2 | 3 | const path = require("path"); 4 | const zlib = require("zlib"); 5 | const stream = require("stream"); 6 | const fs = require("fs-extra"); 7 | const proxyquire = require("proxyquire").noPreserveCache(); 8 | 9 | let fakeNow = new Date(2012, 8, 12, 10, 37, 11); 10 | const mockNow = () => fakeNow; 11 | const RollingFileWriteStream = proxyquire("../lib/RollingFileWriteStream", { 12 | "./now": mockNow 13 | }); 14 | let fakedFsDate = fakeNow; 15 | const mockFs = require("fs-extra"); 16 | const oldStatSync = mockFs.statSync; 17 | mockFs.statSync = fd => { 18 | const result = oldStatSync(fd); 19 | result.mtime = fakedFsDate; 20 | return result; 21 | }; 22 | 23 | function generateTestFile(fileName) { 24 | const dirName = path.join( 25 | __dirname, 26 | "tmp_" + Math.floor(Math.random() * new Date()) 27 | ); 28 | fileName = fileName || "ignored.log"; 29 | const fileNameObj = path.parse(fileName); 30 | return { 31 | dir: dirName, 32 | base: fileNameObj.base, 33 | name: fileNameObj.name, 34 | ext: fileNameObj.ext, 35 | path: path.join(dirName, fileName) 36 | }; 37 | } 38 | 39 | function resetTime() { 40 | fakeNow = new Date(2012, 8, 12, 10, 37, 11); 41 | fakedFsDate = fakeNow; 42 | } 43 | 44 | describe("RollingFileWriteStream", () => { 45 | beforeEach(() => { 46 | resetTime(); 47 | }); 48 | 49 | after(() => { 50 | fs.readdirSync(__dirname) 51 | .filter(f => f.startsWith("tmp_")) 52 | .forEach(f => fs.removeSync(path.join(__dirname, f))); 53 | }); 54 | 55 | describe("with no arguments", () => { 56 | it("should throw an error", () => { 57 | (() => new RollingFileWriteStream()).should.throw( 58 | /Invalid filename:/i 59 | ); 60 | }); 61 | }); 62 | 63 | describe("with directory as filename", () => { 64 | it("should throw an error", () => { 65 | (() => new RollingFileWriteStream("." + path.sep)).should.throw( 66 | /Filename is a directory:/i 67 | ); 68 | }); 69 | }); 70 | 71 | describe("with invalid options", () => { 72 | after(done => { 73 | fs.remove("filename", done); 74 | }); 75 | 76 | it("should complain about a negative maxSize", () => { 77 | (() => { 78 | new RollingFileWriteStream("filename", { maxSize: -3 }); 79 | }).should.throw("options.maxSize (-3) should be > 0"); 80 | }); 81 | 82 | it("should complain about a negative numToKeep", () => { 83 | (() => { 84 | new RollingFileWriteStream("filename", { numToKeep: -3 }); 85 | }).should.throw("options.numToKeep (-3) should be > 0"); 86 | (() => { 87 | new RollingFileWriteStream("filename", { numToKeep: 0 }); 88 | }).should.throw("options.numToKeep (0) should be > 0"); 89 | }); 90 | }); 91 | 92 | describe("with default arguments", () => { 93 | const fileObj = generateTestFile(); 94 | let s; 95 | 96 | before(() => { 97 | s = new RollingFileWriteStream(fileObj.path); 98 | }); 99 | 100 | after(() => { 101 | s.end(() => fs.removeSync(fileObj.dir)); 102 | }); 103 | 104 | it("should take a filename and options, return Writable", () => { 105 | s.should.be.an.instanceOf(stream.Writable); 106 | s.currentFileStream.path.should.eql(fileObj.path); 107 | s.currentFileStream.mode.should.eql(0o600); 108 | s.currentFileStream.flags.should.eql("a"); 109 | }); 110 | 111 | it("should apply default options", () => { 112 | should(s.options.maxSize).not.be.ok(); 113 | s.options.encoding.should.eql("utf8"); 114 | s.options.mode.should.eql(0o600); 115 | s.options.flags.should.eql("a"); 116 | s.options.compress.should.eql(false); 117 | s.options.keepFileExt.should.eql(false); 118 | }); 119 | }); 120 | 121 | describe("with tilde expansion in filename", () => { 122 | let s; 123 | const fileName = "tmpTilde.log"; 124 | const expandedPath = path.join(__dirname, fileName); 125 | 126 | before(() => { 127 | const RollingFileWriteStream = proxyquire("../lib/RollingFileWriteStream", { 128 | "os": { 129 | homedir() { 130 | return __dirname; 131 | } 132 | } 133 | }); 134 | s = new RollingFileWriteStream(path.join("~", fileName)); 135 | }); 136 | 137 | after(() => { 138 | s.end(() => fs.removeSync(expandedPath)); 139 | }); 140 | 141 | it("should expand tilde to create in home directory", () => { 142 | s.currentFileStream.path.should.eql(expandedPath); 143 | fs.existsSync(expandedPath).should.be.true(); 144 | }); 145 | }); 146 | 147 | describe("with 5 maxSize, rotating daily", () => { 148 | const fileObj = generateTestFile("noExtension"); 149 | let s; 150 | 151 | before(async () => { 152 | fakeNow = new Date(2012, 8, 12, 10, 37, 11); 153 | s = new RollingFileWriteStream(fileObj.path, { 154 | pattern: "yyyy-MM-dd", 155 | maxSize: 5 156 | }); 157 | const flows = Array.from(Array(38).keys()).map(i => () => { 158 | fakeNow = new Date(2012, 8, 12 + parseInt(i / 5, 10), 10, 37, 11); 159 | return new Promise(resolve => { 160 | s.write(i.toString(), "utf8", () => resolve()); 161 | }); 162 | }); 163 | for (let i = 0; i < flows.length; i += 1) { 164 | await flows[i](); 165 | } 166 | }); 167 | 168 | after(done => { 169 | s.end(() => { 170 | fs.removeSync(fileObj.dir); 171 | done(); 172 | }); 173 | }); 174 | 175 | it("should rotate using filename with no extension", () => { 176 | const files = fs.readdirSync(fileObj.dir); 177 | const expectedFileList = [ 178 | fileObj.base, //353637 179 | fileObj.base + ".2012-09-12.1", // 01234 180 | fileObj.base + ".2012-09-13.1", // 56789 181 | fileObj.base + ".2012-09-14.2", // 101112 182 | fileObj.base + ".2012-09-14.1", // 1314 183 | fileObj.base + ".2012-09-15.2", // 151617 184 | fileObj.base + ".2012-09-15.1", // 1819 185 | fileObj.base + ".2012-09-16.2", // 202122 186 | fileObj.base + ".2012-09-16.1", // 2324 187 | fileObj.base + ".2012-09-17.2", // 252627 188 | fileObj.base + ".2012-09-17.1", // 2829 189 | fileObj.base + ".2012-09-18.2", // 303132 190 | fileObj.base + ".2012-09-18.1" // 3334 191 | ]; 192 | files.should.containDeep(expectedFileList); 193 | files.length.should.equal(expectedFileList.length); 194 | fs.readFileSync(path.format(fileObj)) 195 | .toString() 196 | .should.equal("353637"); 197 | fs.readFileSync( 198 | path.format( 199 | Object.assign({}, fileObj, { 200 | base: fileObj.base + ".2012-09-12.1" 201 | }) 202 | ) 203 | ) 204 | .toString() 205 | .should.equal("01234"); 206 | fs.readFileSync( 207 | path.format( 208 | Object.assign({}, fileObj, { 209 | base: fileObj.base + ".2012-09-13.1" 210 | }) 211 | ) 212 | ) 213 | .toString() 214 | .should.equal("56789"); 215 | fs.readFileSync( 216 | path.format( 217 | Object.assign({}, fileObj, { 218 | base: fileObj.base + ".2012-09-14.2" 219 | }) 220 | ) 221 | ) 222 | .toString() 223 | .should.equal("101112"); 224 | fs.readFileSync( 225 | path.format( 226 | Object.assign({}, fileObj, { 227 | base: fileObj.base + ".2012-09-14.1" 228 | }) 229 | ) 230 | ) 231 | .toString() 232 | .should.equal("1314"); 233 | fs.readFileSync( 234 | path.format( 235 | Object.assign({}, fileObj, { 236 | base: fileObj.base + ".2012-09-15.2" 237 | }) 238 | ) 239 | ) 240 | .toString() 241 | .should.equal("151617"); 242 | fs.readFileSync( 243 | path.format( 244 | Object.assign({}, fileObj, { 245 | base: fileObj.base + ".2012-09-15.1" 246 | }) 247 | ) 248 | ) 249 | .toString() 250 | .should.equal("1819"); 251 | fs.readFileSync( 252 | path.format( 253 | Object.assign({}, fileObj, { 254 | base: fileObj.base + ".2012-09-16.2" 255 | }) 256 | ) 257 | ) 258 | .toString() 259 | .should.equal("202122"); 260 | fs.readFileSync( 261 | path.format( 262 | Object.assign({}, fileObj, { 263 | base: fileObj.base + ".2012-09-16.1" 264 | }) 265 | ) 266 | ) 267 | .toString() 268 | .should.equal("2324"); 269 | fs.readFileSync( 270 | path.format( 271 | Object.assign({}, fileObj, { 272 | base: fileObj.base + ".2012-09-17.2" 273 | }) 274 | ) 275 | ) 276 | .toString() 277 | .should.equal("252627"); 278 | fs.readFileSync( 279 | path.format( 280 | Object.assign({}, fileObj, { 281 | base: fileObj.base + ".2012-09-17.1" 282 | }) 283 | ) 284 | ) 285 | .toString() 286 | .should.equal("2829"); 287 | fs.readFileSync( 288 | path.format( 289 | Object.assign({}, fileObj, { 290 | base: fileObj.base + ".2012-09-18.2" 291 | }) 292 | ) 293 | ) 294 | .toString() 295 | .should.equal("303132"); 296 | fs.readFileSync( 297 | path.format( 298 | Object.assign({}, fileObj, { 299 | base: fileObj.base + ".2012-09-18.1" 300 | }) 301 | ) 302 | ) 303 | .toString() 304 | .should.equal("3334"); 305 | }); 306 | }); 307 | 308 | describe("with default arguments and recreated in the same day", () => { 309 | const fileObj = generateTestFile(); 310 | let s; 311 | 312 | before(async () => { 313 | const flows = Array.from(Array(3).keys()).map(() => () => { 314 | s = new RollingFileWriteStream(fileObj.path); 315 | return new Promise(resolve => { 316 | s.end("abc", "utf8", () => resolve()); 317 | }); 318 | }); 319 | for (let i = 0; i < flows.length; i += 1) { 320 | await flows[i](); 321 | } 322 | }); 323 | 324 | after(() => { 325 | fs.removeSync(fileObj.dir); 326 | }); 327 | 328 | it("should have only 1 file", () => { 329 | const files = fs.readdirSync(fileObj.dir); 330 | const expectedFileList = [fileObj.base]; 331 | files.should.containDeep(expectedFileList); 332 | files.length.should.equal(expectedFileList.length); 333 | fs.readFileSync( 334 | path.format( 335 | Object.assign({}, fileObj, { 336 | base: fileObj.base 337 | }) 338 | ) 339 | ) 340 | .toString() 341 | .should.equal("abcabcabc"); 342 | }); 343 | }); 344 | 345 | describe("with 5 maxSize, using filename with extension", () => { 346 | const fileObj = generateTestFile("withExtension.log"); 347 | let s; 348 | 349 | before(async () => { 350 | fakeNow = new Date(2012, 8, 12, 10, 37, 11); 351 | s = new RollingFileWriteStream(fileObj.path, { 352 | pattern: "yyyy-MM-dd", 353 | maxSize: 5 354 | }); 355 | const flows = Array.from(Array(38).keys()).map(i => () => { 356 | fakeNow = new Date(2012, 8, 12 + parseInt(i / 10, 10), 10, 37, 11); 357 | return new Promise(resolve => { 358 | s.write(i.toString(), "utf8", () => resolve()); 359 | }); 360 | }); 361 | for (let i = 0; i < flows.length; i += 1) { 362 | await flows[i](); 363 | } 364 | }); 365 | 366 | after(done => { 367 | s.end(() => { 368 | fs.removeSync(fileObj.dir); 369 | done(); 370 | }); 371 | }); 372 | 373 | it("should rotate files within the day, and when the day changes", () => { 374 | const files = fs.readdirSync(fileObj.dir); 375 | const expectedFileList = [ 376 | fileObj.base, //3637 377 | fileObj.base + ".2012-09-12.2", //01234 378 | fileObj.base + ".2012-09-12.1", //56789 379 | fileObj.base + ".2012-09-13.4", //101112 380 | fileObj.base + ".2012-09-13.3", //131415 381 | fileObj.base + ".2012-09-13.2", //161718 382 | fileObj.base + ".2012-09-13.1", //19 383 | fileObj.base + ".2012-09-14.4", //202122 384 | fileObj.base + ".2012-09-14.3", //232425 385 | fileObj.base + ".2012-09-14.2", //262728 386 | fileObj.base + ".2012-09-14.1", //29 387 | fileObj.base + ".2012-09-15.2", //303132 388 | fileObj.base + ".2012-09-15.1" //333435 389 | ]; 390 | files.should.containDeep(expectedFileList); 391 | files.length.should.equal(expectedFileList.length); 392 | fs.readFileSync(path.format(fileObj)) 393 | .toString() 394 | .should.equal("3637"); 395 | fs.readFileSync( 396 | path.format( 397 | Object.assign({}, fileObj, { 398 | base: fileObj.base + ".2012-09-12.2" 399 | }) 400 | ) 401 | ) 402 | .toString() 403 | .should.equal("01234"); 404 | fs.readFileSync( 405 | path.format( 406 | Object.assign({}, fileObj, { 407 | base: fileObj.base + ".2012-09-12.1" 408 | }) 409 | ) 410 | ) 411 | .toString() 412 | .should.equal("56789"); 413 | fs.readFileSync( 414 | path.format( 415 | Object.assign({}, fileObj, { 416 | base: fileObj.base + ".2012-09-13.4" 417 | }) 418 | ) 419 | ) 420 | .toString() 421 | .should.equal("101112"); 422 | fs.readFileSync( 423 | path.format( 424 | Object.assign({}, fileObj, { 425 | base: fileObj.base + ".2012-09-13.3" 426 | }) 427 | ) 428 | ) 429 | .toString() 430 | .should.equal("131415"); 431 | fs.readFileSync( 432 | path.format( 433 | Object.assign({}, fileObj, { 434 | base: fileObj.base + ".2012-09-13.2" 435 | }) 436 | ) 437 | ) 438 | .toString() 439 | .should.equal("161718"); 440 | fs.readFileSync( 441 | path.format( 442 | Object.assign({}, fileObj, { 443 | base: fileObj.base + ".2012-09-13.1" 444 | }) 445 | ) 446 | ) 447 | .toString() 448 | .should.equal("19"); 449 | fs.readFileSync( 450 | path.format( 451 | Object.assign({}, fileObj, { 452 | base: fileObj.base + ".2012-09-14.4" 453 | }) 454 | ) 455 | ) 456 | .toString() 457 | .should.equal("202122"); 458 | fs.readFileSync( 459 | path.format( 460 | Object.assign({}, fileObj, { 461 | base: fileObj.base + ".2012-09-14.3" 462 | }) 463 | ) 464 | ) 465 | .toString() 466 | .should.equal("232425"); 467 | fs.readFileSync( 468 | path.format( 469 | Object.assign({}, fileObj, { 470 | base: fileObj.base + ".2012-09-14.2" 471 | }) 472 | ) 473 | ) 474 | .toString() 475 | .should.equal("262728"); 476 | fs.readFileSync( 477 | path.format( 478 | Object.assign({}, fileObj, { 479 | base: fileObj.base + ".2012-09-14.1" 480 | }) 481 | ) 482 | ) 483 | .toString() 484 | .should.equal("29"); 485 | fs.readFileSync( 486 | path.format( 487 | Object.assign({}, fileObj, { 488 | base: fileObj.base + ".2012-09-15.2" 489 | }) 490 | ) 491 | ) 492 | .toString() 493 | .should.equal("303132"); 494 | fs.readFileSync( 495 | path.format( 496 | Object.assign({}, fileObj, { 497 | base: fileObj.base + ".2012-09-15.1" 498 | }) 499 | ) 500 | ) 501 | .toString() 502 | .should.equal("333435"); 503 | }); 504 | }); 505 | 506 | describe("with 5 maxSize and 3 backups limit", () => { 507 | const fileObj = generateTestFile(); 508 | let s; 509 | 510 | before(async () => { 511 | fakeNow = new Date(2012, 8, 12, 10, 37, 11); 512 | s = new RollingFileWriteStream(fileObj.path, { 513 | maxSize: 5, 514 | numToKeep: 4 515 | }); 516 | const flows = Array.from(Array(38).keys()).map(i => () => { 517 | fakeNow = new Date(2012, 8, 12 + parseInt(i / 5), 10, 37, 11); 518 | return new Promise(resolve => { 519 | s.write(i.toString(), "utf8", () => resolve()); 520 | }); 521 | }); 522 | for (let i = 0; i < flows.length; i += 1) { 523 | await flows[i](); 524 | } 525 | }); 526 | 527 | after(done => { 528 | s.end(() => { 529 | fs.removeSync(fileObj.dir); 530 | done(); 531 | }); 532 | }); 533 | 534 | it("should rotate with at most 3 backup files not including the hot one", () => { 535 | const files = fs.readdirSync(fileObj.dir); 536 | const expectedFileList = [ 537 | fileObj.base, 538 | fileObj.base + ".1", 539 | fileObj.base + ".2", 540 | fileObj.base + ".3" 541 | ]; 542 | files.should.containDeep(expectedFileList); 543 | files.length.should.equal(expectedFileList.length); 544 | fs.readFileSync(path.format(fileObj)) 545 | .toString() 546 | .should.equal("37"); 547 | fs.readFileSync( 548 | path.format( 549 | Object.assign({}, fileObj, { 550 | base: fileObj.base + ".1" 551 | }) 552 | ) 553 | ) 554 | .toString() 555 | .should.equal("343536"); 556 | fs.readFileSync( 557 | path.format( 558 | Object.assign({}, fileObj, { 559 | base: fileObj.base + ".2" 560 | }) 561 | ) 562 | ) 563 | .toString() 564 | .should.equal("313233"); 565 | fs.readFileSync( 566 | path.format( 567 | Object.assign({}, fileObj, { 568 | base: fileObj.base + ".3" 569 | }) 570 | ) 571 | ) 572 | .toString() 573 | .should.equal("282930"); 574 | }); 575 | }); 576 | 577 | describe("with 5 maxSize and 3 backups limit, rotating daily", () => { 578 | const fileObj = generateTestFile(); 579 | let s; 580 | 581 | before(async () => { 582 | fakeNow = new Date(2012, 8, 12, 10, 37, 11); 583 | s = new RollingFileWriteStream(fileObj.path, { 584 | maxSize: 5, 585 | pattern: "yyyy-MM-dd", 586 | numToKeep: 4 587 | }); 588 | const flows = Array.from(Array(38).keys()).map(i => () => { 589 | fakeNow = new Date(2012, 8, 12 + parseInt(i / 10), 10, 37, 11); 590 | return new Promise(resolve => { 591 | s.write(i.toString(), "utf8", () => resolve()); 592 | }); 593 | }); 594 | for (let i = 0; i < flows.length; i += 1) { 595 | await flows[i](); 596 | } 597 | }); 598 | 599 | after(done => { 600 | s.end(() => { 601 | fs.removeSync(fileObj.dir); 602 | done(); 603 | }); 604 | }); 605 | 606 | it("should rotate with at most 3 backup files not including the hot one", () => { 607 | const files = fs.readdirSync(fileObj.dir); 608 | const expectedFileList = [ 609 | fileObj.base, //3637 610 | fileObj.base + ".2012-09-14.1", //29 611 | fileObj.base + ".2012-09-15.2", //303132 612 | fileObj.base + ".2012-09-15.1" //333435 613 | ]; 614 | files.should.containDeep(expectedFileList); 615 | files.length.should.equal(expectedFileList.length); 616 | fs.readFileSync(path.format(fileObj)) 617 | .toString() 618 | .should.equal("3637"); 619 | fs.readFileSync( 620 | path.format( 621 | Object.assign({}, fileObj, { 622 | base: fileObj.base + ".2012-09-15.1" 623 | }) 624 | ) 625 | ) 626 | .toString() 627 | .should.equal("333435"); 628 | fs.readFileSync( 629 | path.format( 630 | Object.assign({}, fileObj, { 631 | base: fileObj.base + ".2012-09-15.2" 632 | }) 633 | ) 634 | ) 635 | .toString() 636 | .should.equal("303132"); 637 | fs.readFileSync( 638 | path.format( 639 | Object.assign({}, fileObj, { 640 | base: fileObj.base + ".2012-09-14.1" 641 | }) 642 | ) 643 | ) 644 | .toString() 645 | .should.equal("29"); 646 | }); 647 | }); 648 | 649 | describe("with date pattern dd-MM-yyyy", () => { 650 | const fileObj = generateTestFile(); 651 | let s; 652 | 653 | before(async () => { 654 | fakeNow = new Date(2012, 8, 12, 10, 37, 11); 655 | s = new RollingFileWriteStream(fileObj.path, { 656 | maxSize: 5, 657 | pattern: "dd-MM-yyyy" 658 | }); 659 | const flows = Array.from(Array(8).keys()).map(i => () => { 660 | fakeNow = new Date(2012, 8, 12 + parseInt(i / 5, 10), 10, 37, 11); 661 | return new Promise(resolve => { 662 | s.write(i.toString(), "utf8", () => resolve()); 663 | }); 664 | }); 665 | for (let i = 0; i < flows.length; i += 1) { 666 | await flows[i](); 667 | } 668 | }); 669 | 670 | after(done => { 671 | s.end(() => { 672 | fs.remove(fileObj.dir, done); 673 | }); 674 | }); 675 | 676 | it("should rotate with date pattern dd-MM-yyyy in the file name", () => { 677 | const files = fs.readdirSync(fileObj.dir); 678 | const expectedFileList = [fileObj.base, fileObj.base + ".12-09-2012.1"]; 679 | files.should.containDeep(expectedFileList); 680 | files.length.should.equal(expectedFileList.length); 681 | fs.readFileSync(path.format(fileObj)) 682 | .toString() 683 | .should.equal("567"); 684 | fs.readFileSync( 685 | path.format( 686 | Object.assign({}, fileObj, { 687 | base: fileObj.base + ".12-09-2012.1" 688 | }) 689 | ) 690 | ) 691 | .toString() 692 | .should.equal("01234"); 693 | }); 694 | }); 695 | 696 | describe("with compress true", () => { 697 | const fileObj = generateTestFile(); 698 | let s; 699 | 700 | before(async () => { 701 | fakeNow = new Date(2012, 8, 12, 10, 37, 11); 702 | s = new RollingFileWriteStream(fileObj.path, { 703 | maxSize: 5, 704 | pattern: "yyyy-MM-dd", 705 | compress: true 706 | }); 707 | const flows = Array.from(Array(8).keys()).map(i => () => { 708 | fakeNow = new Date(2012, 8, 12 + parseInt(i / 5, 10), 10, 37, 11); 709 | return new Promise(resolve => { 710 | s.write(i.toString(), "utf8", () => resolve()); 711 | }); 712 | }); 713 | for (let i = 0; i < flows.length; i += 1) { 714 | await flows[i](); 715 | } 716 | }); 717 | 718 | after(done => { 719 | s.end(() => { 720 | fs.removeSync(fileObj.dir); 721 | done(); 722 | }); 723 | }); 724 | 725 | it("should rotate with gunzip", () => { 726 | const files = fs.readdirSync(fileObj.dir); 727 | const expectedFileList = [ 728 | fileObj.base, 729 | fileObj.base + ".2012-09-12.1.gz" 730 | ]; 731 | files.should.containDeep(expectedFileList); 732 | files.length.should.equal(expectedFileList.length); 733 | 734 | fs.readFileSync(path.format(fileObj)) 735 | .toString() 736 | .should.equal("567"); 737 | const content = fs.readFileSync( 738 | path.format( 739 | Object.assign({}, fileObj, { 740 | base: fileObj.base + ".2012-09-12.1.gz" 741 | }) 742 | ) 743 | ); 744 | zlib 745 | .gunzipSync(content) 746 | .toString() 747 | .should.equal("01234"); 748 | }); 749 | }); 750 | 751 | describe("with keepFileExt", () => { 752 | const fileObj = generateTestFile("keepFileExt.log"); 753 | let s; 754 | 755 | before(async () => { 756 | fakeNow = new Date(2012, 8, 12, 10, 37, 11); 757 | s = new RollingFileWriteStream(fileObj.path, { 758 | pattern: "yyyy-MM-dd", 759 | maxSize: 5, 760 | keepFileExt: true 761 | }); 762 | const flows = Array.from(Array(8).keys()).map(i => () => { 763 | fakeNow = new Date(2012, 8, 12 + parseInt(i / 5, 10), 10, 37, 11); 764 | return new Promise(resolve => { 765 | s.write(i.toString(), "utf8", () => resolve()); 766 | }); 767 | }); 768 | for (let i = 0; i < flows.length; i += 1) { 769 | await flows[i](); 770 | } 771 | }); 772 | 773 | after(done => { 774 | s.end(() => { 775 | fs.removeSync(fileObj.dir); 776 | done(); 777 | }); 778 | }); 779 | 780 | it("should rotate with the same extension", () => { 781 | const files = fs.readdirSync(fileObj.dir); 782 | const expectedFileList = [ 783 | fileObj.base, 784 | fileObj.name + ".2012-09-12.1.log" 785 | ]; 786 | files.should.containDeep(expectedFileList); 787 | files.length.should.equal(expectedFileList.length); 788 | 789 | fs.readFileSync(path.format(fileObj)) 790 | .toString() 791 | .should.equal("567"); 792 | fs.readFileSync( 793 | path.format({ 794 | dir: fileObj.dir, 795 | base: fileObj.name + ".2012-09-12.1" + fileObj.ext 796 | }) 797 | ) 798 | .toString() 799 | .should.equal("01234"); 800 | }); 801 | }); 802 | 803 | describe("with keepFileExt and compress", () => { 804 | const fileObj = generateTestFile("keepFileExt.log"); 805 | let s; 806 | 807 | before(async () => { 808 | fakeNow = new Date(2012, 8, 12, 10, 37, 11); 809 | s = new RollingFileWriteStream(fileObj.path, { 810 | maxSize: 5, 811 | pattern: "yyyy-MM-dd", 812 | keepFileExt: true, 813 | compress: true 814 | }); 815 | const flows = Array.from(Array(8).keys()).map(i => () => { 816 | fakeNow = new Date(2012, 8, 12 + parseInt(i / 5, 10), 10, 37, 11); 817 | return new Promise(resolve => { 818 | s.write(i.toString(), "utf8", () => resolve()); 819 | }); 820 | }); 821 | for (let i = 0; i < flows.length; i += 1) { 822 | await flows[i](); 823 | } 824 | }); 825 | 826 | after(done => { 827 | s.end(() => { 828 | fs.removeSync(fileObj.dir); 829 | done(); 830 | }); 831 | }); 832 | 833 | it("should rotate with the same extension", () => { 834 | const files = fs.readdirSync(fileObj.dir); 835 | const expectedFileList = [ 836 | fileObj.base, 837 | fileObj.name + ".2012-09-12.1.log.gz" 838 | ]; 839 | files.should.containDeep(expectedFileList); 840 | files.length.should.equal(expectedFileList.length); 841 | 842 | fs.readFileSync(path.format(fileObj)) 843 | .toString() 844 | .should.equal("567"); 845 | const content = fs.readFileSync( 846 | path.format( 847 | Object.assign({}, fileObj, { 848 | base: fileObj.name + ".2012-09-12.1.log.gz" 849 | }) 850 | ) 851 | ); 852 | zlib 853 | .gunzipSync(content) 854 | .toString() 855 | .should.equal("01234"); 856 | }); 857 | }); 858 | 859 | describe("with alwaysIncludePattern and keepFileExt", () => { 860 | const fileObj = generateTestFile("keepFileExt.log"); 861 | let s; 862 | 863 | before(async () => { 864 | fakeNow = new Date(2012, 8, 12, 10, 37, 11); 865 | s = new RollingFileWriteStream(fileObj.path, { 866 | maxSize: 5, 867 | pattern: "yyyy-MM-dd", 868 | keepFileExt: true, 869 | alwaysIncludePattern: true 870 | }); 871 | const flows = Array.from(Array(8).keys()).map(i => () => { 872 | fakeNow = new Date(2012, 8, 12 + parseInt(i / 5, 10), 10, 37, 11); 873 | return new Promise(resolve => { 874 | s.write(i.toString(), "utf8", () => resolve()); 875 | }); 876 | }); 877 | for (let i = 0; i < flows.length; i += 1) { 878 | await flows[i](); 879 | } 880 | }); 881 | 882 | after(done => { 883 | s.end(() => { 884 | fs.removeSync(fileObj.dir); 885 | done(); 886 | }); 887 | }); 888 | 889 | it("should rotate with the same extension and keep date in the filename", () => { 890 | const files = fs.readdirSync(fileObj.dir); 891 | const expectedFileList = [ 892 | fileObj.name + ".2012-09-12.1.log", 893 | fileObj.name + ".2012-09-13.log" 894 | ]; 895 | files.should.containDeep(expectedFileList); 896 | files.length.should.equal(expectedFileList.length); 897 | fs.readFileSync( 898 | path.format( 899 | Object.assign({}, fileObj, { 900 | base: fileObj.name + ".2012-09-13.log" 901 | }) 902 | ) 903 | ) 904 | .toString() 905 | .should.equal("567"); 906 | fs.readFileSync( 907 | path.format( 908 | Object.assign({}, fileObj, { 909 | base: fileObj.name + ".2012-09-12.1.log" 910 | }) 911 | ) 912 | ) 913 | .toString() 914 | .should.equal("01234"); 915 | }); 916 | }); 917 | 918 | describe("with 5 maxSize, compress, keepFileExt and alwaysIncludePattern", () => { 919 | const fileObj = generateTestFile("keepFileExt.log"); 920 | let s; 921 | 922 | before(async () => { 923 | fakeNow = new Date(2012, 8, 12, 10, 37, 11); 924 | s = new RollingFileWriteStream(fileObj.path, { 925 | maxSize: 5, 926 | pattern: "yyyy-MM-dd", 927 | compress: true, 928 | keepFileExt: true, 929 | alwaysIncludePattern: true 930 | }); 931 | const flows = Array.from(Array(38).keys()).map(i => () => { 932 | fakeNow = new Date(2012, 8, 12 + parseInt(i / 5, 10), 10, 37, 11); 933 | return new Promise(resolve => { 934 | s.write(i.toString(), "utf8", () => resolve()); 935 | }); 936 | }); 937 | for (let i = 0; i < flows.length; i += 1) { 938 | await flows[i](); 939 | } 940 | }); 941 | 942 | after(done => { 943 | s.end(() => { 944 | fs.removeSync(fileObj.dir); 945 | done(); 946 | }); 947 | }); 948 | 949 | it("should rotate every day", () => { 950 | const files = fs.readdirSync(fileObj.dir); 951 | const expectedFileList = [ 952 | fileObj.name + ".2012-09-12.1.log.gz", //01234 953 | fileObj.name + ".2012-09-13.1.log.gz", //56789 954 | fileObj.name + ".2012-09-14.2.log.gz", //101112 955 | fileObj.name + ".2012-09-14.1.log.gz", //1314 956 | fileObj.name + ".2012-09-15.2.log.gz", //151617 957 | fileObj.name + ".2012-09-15.1.log.gz", //1819 958 | fileObj.name + ".2012-09-16.2.log.gz", //202122 959 | fileObj.name + ".2012-09-16.1.log.gz", //2324 960 | fileObj.name + ".2012-09-17.2.log.gz", //252627 961 | fileObj.name + ".2012-09-17.1.log.gz", //2829 962 | fileObj.name + ".2012-09-18.2.log.gz", //303132 963 | fileObj.name + ".2012-09-18.1.log.gz", //3334 964 | fileObj.name + ".2012-09-19.log" //353637 965 | ]; 966 | files.should.containDeep(expectedFileList); 967 | files.length.should.equal(expectedFileList.length); 968 | fs.readFileSync( 969 | path.format( 970 | Object.assign({}, fileObj, { 971 | base: fileObj.name + ".2012-09-19.log" 972 | }) 973 | ) 974 | ) 975 | .toString() 976 | .should.equal("353637"); 977 | zlib 978 | .gunzipSync( 979 | fs.readFileSync( 980 | path.format( 981 | Object.assign({}, fileObj, { 982 | base: fileObj.name + ".2012-09-18.1.log.gz" 983 | }) 984 | ) 985 | ) 986 | ) 987 | .toString() 988 | .should.equal("3334"); 989 | zlib 990 | .gunzipSync( 991 | fs.readFileSync( 992 | path.format( 993 | Object.assign({}, fileObj, { 994 | base: fileObj.name + ".2012-09-18.2.log.gz" 995 | }) 996 | ) 997 | ) 998 | ) 999 | .toString() 1000 | .should.equal("303132"); 1001 | zlib 1002 | .gunzipSync( 1003 | fs.readFileSync( 1004 | path.format( 1005 | Object.assign({}, fileObj, { 1006 | base: fileObj.name + ".2012-09-17.1.log.gz" 1007 | }) 1008 | ) 1009 | ) 1010 | ) 1011 | .toString() 1012 | .should.equal("2829"); 1013 | zlib 1014 | .gunzipSync( 1015 | fs.readFileSync( 1016 | path.format( 1017 | Object.assign({}, fileObj, { 1018 | base: fileObj.name + ".2012-09-17.2.log.gz" 1019 | }) 1020 | ) 1021 | ) 1022 | ) 1023 | .toString() 1024 | .should.equal("252627"); 1025 | zlib 1026 | .gunzipSync( 1027 | fs.readFileSync( 1028 | path.format( 1029 | Object.assign({}, fileObj, { 1030 | base: fileObj.name + ".2012-09-16.1.log.gz" 1031 | }) 1032 | ) 1033 | ) 1034 | ) 1035 | .toString() 1036 | .should.equal("2324"); 1037 | zlib 1038 | .gunzipSync( 1039 | fs.readFileSync( 1040 | path.format( 1041 | Object.assign({}, fileObj, { 1042 | base: fileObj.name + ".2012-09-16.2.log.gz" 1043 | }) 1044 | ) 1045 | ) 1046 | ) 1047 | .toString() 1048 | .should.equal("202122"); 1049 | zlib 1050 | .gunzipSync( 1051 | fs.readFileSync( 1052 | path.format( 1053 | Object.assign({}, fileObj, { 1054 | base: fileObj.name + ".2012-09-15.1.log.gz" 1055 | }) 1056 | ) 1057 | ) 1058 | ) 1059 | .toString() 1060 | .should.equal("1819"); 1061 | zlib 1062 | .gunzipSync( 1063 | fs.readFileSync( 1064 | path.format( 1065 | Object.assign({}, fileObj, { 1066 | base: fileObj.name + ".2012-09-15.2.log.gz" 1067 | }) 1068 | ) 1069 | ) 1070 | ) 1071 | .toString() 1072 | .should.equal("151617"); 1073 | zlib 1074 | .gunzipSync( 1075 | fs.readFileSync( 1076 | path.format( 1077 | Object.assign({}, fileObj, { 1078 | base: fileObj.name + ".2012-09-14.1.log.gz" 1079 | }) 1080 | ) 1081 | ) 1082 | ) 1083 | .toString() 1084 | .should.equal("1314"); 1085 | zlib 1086 | .gunzipSync( 1087 | fs.readFileSync( 1088 | path.format( 1089 | Object.assign({}, fileObj, { 1090 | base: fileObj.name + ".2012-09-14.2.log.gz" 1091 | }) 1092 | ) 1093 | ) 1094 | ) 1095 | .toString() 1096 | .should.equal("101112"); 1097 | zlib 1098 | .gunzipSync( 1099 | fs.readFileSync( 1100 | path.format( 1101 | Object.assign({}, fileObj, { 1102 | base: fileObj.name + ".2012-09-13.1.log.gz" 1103 | }) 1104 | ) 1105 | ) 1106 | ) 1107 | .toString() 1108 | .should.equal("56789"); 1109 | zlib 1110 | .gunzipSync( 1111 | fs.readFileSync( 1112 | path.format( 1113 | Object.assign({}, fileObj, { 1114 | base: fileObj.name + ".2012-09-12.1.log.gz" 1115 | }) 1116 | ) 1117 | ) 1118 | ) 1119 | .toString() 1120 | .should.equal("01234"); 1121 | }); 1122 | }); 1123 | 1124 | describe("with fileNameSep", () => { 1125 | const fileObj = generateTestFile("fileNameSep.log"); 1126 | let s; 1127 | 1128 | before(async () => { 1129 | fakeNow = new Date(2012, 8, 12, 10, 37, 11); 1130 | s = new RollingFileWriteStream(fileObj.path, { 1131 | pattern: "yyyy-MM-dd", 1132 | maxSize: 5, 1133 | fileNameSep: "_" 1134 | }); 1135 | const flows = Array.from(Array(8).keys()).map(i => () => { 1136 | fakeNow = new Date(2012, 8, 12 + parseInt(i / 5, 10), 10, 37, 11); 1137 | return new Promise(resolve => { 1138 | s.write(i.toString(), "utf8", () => resolve()); 1139 | }); 1140 | }); 1141 | for (let i = 0; i < flows.length; i += 1) { 1142 | await flows[i](); 1143 | } 1144 | }); 1145 | 1146 | after(done => { 1147 | s.end(() => { 1148 | fs.removeSync(fileObj.dir); 1149 | done(); 1150 | }); 1151 | }); 1152 | 1153 | it("should rotate with the same fileNameSep", () => { 1154 | const files = fs.readdirSync(fileObj.dir); 1155 | const expectedFileList = [ 1156 | fileObj.base, 1157 | fileObj.name + ".log_2012-09-12_1" 1158 | ]; 1159 | files.should.containDeep(expectedFileList); 1160 | files.length.should.equal(expectedFileList.length); 1161 | 1162 | fs.readFileSync(path.format(fileObj)) 1163 | .toString() 1164 | .should.equal("567"); 1165 | fs.readFileSync( 1166 | path.format({ 1167 | dir: fileObj.dir, 1168 | base: fileObj.name + fileObj.ext + "_2012-09-12_1" 1169 | }) 1170 | ) 1171 | .toString() 1172 | .should.equal("01234"); 1173 | }); 1174 | }); 1175 | 1176 | describe("with fileNameSep and keepFileExt", () => { 1177 | const fileObj = generateTestFile("keepFileExt.log"); 1178 | let s; 1179 | 1180 | before(async () => { 1181 | fakeNow = new Date(2012, 8, 12, 10, 37, 11); 1182 | s = new RollingFileWriteStream(fileObj.path, { 1183 | pattern: "yyyy-MM-dd", 1184 | maxSize: 5, 1185 | fileNameSep: "_", 1186 | keepFileExt: true 1187 | }); 1188 | const flows = Array.from(Array(8).keys()).map(i => () => { 1189 | fakeNow = new Date(2012, 8, 12 + parseInt(i / 5, 10), 10, 37, 11); 1190 | return new Promise(resolve => { 1191 | s.write(i.toString(), "utf8", () => resolve()); 1192 | }); 1193 | }); 1194 | for (let i = 0; i < flows.length; i += 1) { 1195 | await flows[i](); 1196 | } 1197 | }); 1198 | 1199 | after(done => { 1200 | s.end(() => { 1201 | fs.removeSync(fileObj.dir); 1202 | done(); 1203 | }); 1204 | }); 1205 | 1206 | it("should rotate with the same fileNameSep and extension", () => { 1207 | const files = fs.readdirSync(fileObj.dir); 1208 | const expectedFileList = [ 1209 | fileObj.base, 1210 | fileObj.name + "_2012-09-12_1.log" 1211 | ]; 1212 | files.should.containDeep(expectedFileList); 1213 | files.length.should.equal(expectedFileList.length); 1214 | 1215 | fs.readFileSync(path.format(fileObj)) 1216 | .toString() 1217 | .should.equal("567"); 1218 | fs.readFileSync( 1219 | path.format({ 1220 | dir: fileObj.dir, 1221 | base: fileObj.name + "_2012-09-12_1" + fileObj.ext 1222 | }) 1223 | ) 1224 | .toString() 1225 | .should.equal("01234"); 1226 | }); 1227 | }); 1228 | 1229 | describe("with fileNameSep and compress true", () => { 1230 | const fileObj = generateTestFile(); 1231 | let s; 1232 | 1233 | before(async () => { 1234 | fakeNow = new Date(2012, 8, 12, 10, 37, 11); 1235 | s = new RollingFileWriteStream(fileObj.path, { 1236 | maxSize: 5, 1237 | pattern: "yyyy-MM-dd", 1238 | fileNameSep: "_", 1239 | compress: true 1240 | }); 1241 | const flows = Array.from(Array(8).keys()).map(i => () => { 1242 | fakeNow = new Date(2012, 8, 12 + parseInt(i / 5, 10), 10, 37, 11); 1243 | return new Promise(resolve => { 1244 | s.write(i.toString(), "utf8", () => resolve()); 1245 | }); 1246 | }); 1247 | for (let i = 0; i < flows.length; i += 1) { 1248 | await flows[i](); 1249 | } 1250 | }); 1251 | 1252 | after(done => { 1253 | s.end(() => { 1254 | fs.removeSync(fileObj.dir); 1255 | done(); 1256 | }); 1257 | }); 1258 | 1259 | it("should rotate with the same fileNameSep and gunzip", () => { 1260 | const files = fs.readdirSync(fileObj.dir); 1261 | const expectedFileList = [ 1262 | fileObj.base, 1263 | fileObj.base + "_2012-09-12_1.gz" 1264 | ]; 1265 | files.should.containDeep(expectedFileList); 1266 | files.length.should.equal(expectedFileList.length); 1267 | 1268 | fs.readFileSync(path.format(fileObj)) 1269 | .toString() 1270 | .should.equal("567"); 1271 | const content = fs.readFileSync( 1272 | path.format( 1273 | Object.assign({}, fileObj, { 1274 | base: fileObj.base + "_2012-09-12_1.gz" 1275 | }) 1276 | ) 1277 | ); 1278 | zlib 1279 | .gunzipSync(content) 1280 | .toString() 1281 | .should.equal("01234"); 1282 | }); 1283 | }); 1284 | 1285 | describe("with fileNameSep, keepFileExt and compress", () => { 1286 | const fileObj = generateTestFile("keepFileExt.log"); 1287 | let s; 1288 | 1289 | before(async () => { 1290 | fakeNow = new Date(2012, 8, 12, 10, 37, 11); 1291 | s = new RollingFileWriteStream(fileObj.path, { 1292 | maxSize: 5, 1293 | pattern: "yyyy-MM-dd", 1294 | fileNameSep: "_", 1295 | keepFileExt: true, 1296 | compress: true 1297 | }); 1298 | const flows = Array.from(Array(8).keys()).map(i => () => { 1299 | fakeNow = new Date(2012, 8, 12 + parseInt(i / 5, 10), 10, 37, 11); 1300 | return new Promise(resolve => { 1301 | s.write(i.toString(), "utf8", () => resolve()); 1302 | }); 1303 | }); 1304 | for (let i = 0; i < flows.length; i += 1) { 1305 | await flows[i](); 1306 | } 1307 | }); 1308 | 1309 | after(done => { 1310 | s.end(() => { 1311 | fs.removeSync(fileObj.dir); 1312 | done(); 1313 | }); 1314 | }); 1315 | 1316 | it("should rotate with the same fileNameSep and extension", () => { 1317 | const files = fs.readdirSync(fileObj.dir); 1318 | const expectedFileList = [ 1319 | fileObj.base, 1320 | fileObj.name + "_2012-09-12_1.log.gz" 1321 | ]; 1322 | files.should.containDeep(expectedFileList); 1323 | files.length.should.equal(expectedFileList.length); 1324 | 1325 | fs.readFileSync(path.format(fileObj)) 1326 | .toString() 1327 | .should.equal("567"); 1328 | const content = fs.readFileSync( 1329 | path.format( 1330 | Object.assign({}, fileObj, { 1331 | base: fileObj.name + "_2012-09-12_1.log.gz" 1332 | }) 1333 | ) 1334 | ); 1335 | zlib 1336 | .gunzipSync(content) 1337 | .toString() 1338 | .should.equal("01234"); 1339 | }); 1340 | }); 1341 | 1342 | describe("with fileNameSep, alwaysIncludePattern and keepFileExt", () => { 1343 | const fileObj = generateTestFile("keepFileExt.log"); 1344 | let s; 1345 | 1346 | before(async () => { 1347 | fakeNow = new Date(2012, 8, 12, 10, 37, 11); 1348 | s = new RollingFileWriteStream(fileObj.path, { 1349 | maxSize: 5, 1350 | pattern: "yyyy-MM-dd", 1351 | fileNameSep: "_", 1352 | keepFileExt: true, 1353 | alwaysIncludePattern: true 1354 | }); 1355 | const flows = Array.from(Array(8).keys()).map(i => () => { 1356 | fakeNow = new Date(2012, 8, 12 + parseInt(i / 5, 10), 10, 37, 11); 1357 | return new Promise(resolve => { 1358 | s.write(i.toString(), "utf8", () => resolve()); 1359 | }); 1360 | }); 1361 | for (let i = 0; i < flows.length; i += 1) { 1362 | await flows[i](); 1363 | } 1364 | }); 1365 | 1366 | after(done => { 1367 | s.end(() => { 1368 | fs.removeSync(fileObj.dir); 1369 | done(); 1370 | }); 1371 | }); 1372 | 1373 | it("should rotate with the same fileNameSep, extension and keep date in the filename", () => { 1374 | const files = fs.readdirSync(fileObj.dir); 1375 | const expectedFileList = [ 1376 | fileObj.name + "_2012-09-12_1.log", 1377 | fileObj.name + "_2012-09-13.log" 1378 | ]; 1379 | files.should.containDeep(expectedFileList); 1380 | files.length.should.equal(expectedFileList.length); 1381 | fs.readFileSync( 1382 | path.format( 1383 | Object.assign({}, fileObj, { 1384 | base: fileObj.name + "_2012-09-13.log" 1385 | }) 1386 | ) 1387 | ) 1388 | .toString() 1389 | .should.equal("567"); 1390 | fs.readFileSync( 1391 | path.format( 1392 | Object.assign({}, fileObj, { 1393 | base: fileObj.name + "_2012-09-12_1.log" 1394 | }) 1395 | ) 1396 | ) 1397 | .toString() 1398 | .should.equal("01234"); 1399 | }); 1400 | }); 1401 | 1402 | describe("with fileNameSep, 5 maxSize, compress, keepFileExt and alwaysIncludePattern", () => { 1403 | const fileObj = generateTestFile("keepFileExt.log"); 1404 | let s; 1405 | 1406 | before(async () => { 1407 | fakeNow = new Date(2012, 8, 12, 10, 37, 11); 1408 | s = new RollingFileWriteStream(fileObj.path, { 1409 | maxSize: 5, 1410 | pattern: "yyyy-MM-dd", 1411 | fileNameSep: "_", 1412 | compress: true, 1413 | keepFileExt: true, 1414 | alwaysIncludePattern: true 1415 | }); 1416 | const flows = Array.from(Array(38).keys()).map(i => () => { 1417 | fakeNow = new Date(2012, 8, 12 + parseInt(i / 5, 10), 10, 37, 11); 1418 | return new Promise(resolve => { 1419 | s.write(i.toString(), "utf8", () => resolve()); 1420 | }); 1421 | }); 1422 | for (let i = 0; i < flows.length; i += 1) { 1423 | await flows[i](); 1424 | } 1425 | }); 1426 | 1427 | after(done => { 1428 | s.end(() => { 1429 | fs.removeSync(fileObj.dir); 1430 | done(); 1431 | }); 1432 | }); 1433 | 1434 | it("should rotate every day", () => { 1435 | const files = fs.readdirSync(fileObj.dir); 1436 | const expectedFileList = [ 1437 | fileObj.name + "_2012-09-12_1.log.gz", //01234 1438 | fileObj.name + "_2012-09-13_1.log.gz", //56789 1439 | fileObj.name + "_2012-09-14_2.log.gz", //101112 1440 | fileObj.name + "_2012-09-14_1.log.gz", //1314 1441 | fileObj.name + "_2012-09-15_2.log.gz", //151617 1442 | fileObj.name + "_2012-09-15_1.log.gz", //1819 1443 | fileObj.name + "_2012-09-16_2.log.gz", //202122 1444 | fileObj.name + "_2012-09-16_1.log.gz", //2324 1445 | fileObj.name + "_2012-09-17_2.log.gz", //252627 1446 | fileObj.name + "_2012-09-17_1.log.gz", //2829 1447 | fileObj.name + "_2012-09-18_2.log.gz", //303132 1448 | fileObj.name + "_2012-09-18_1.log.gz", //3334 1449 | fileObj.name + "_2012-09-19.log" //353637 1450 | ]; 1451 | files.should.containDeep(expectedFileList); 1452 | files.length.should.equal(expectedFileList.length); 1453 | fs.readFileSync( 1454 | path.format( 1455 | Object.assign({}, fileObj, { 1456 | base: fileObj.name + "_2012-09-19.log" 1457 | }) 1458 | ) 1459 | ) 1460 | .toString() 1461 | .should.equal("353637"); 1462 | zlib 1463 | .gunzipSync( 1464 | fs.readFileSync( 1465 | path.format( 1466 | Object.assign({}, fileObj, { 1467 | base: fileObj.name + "_2012-09-18_1.log.gz" 1468 | }) 1469 | ) 1470 | ) 1471 | ) 1472 | .toString() 1473 | .should.equal("3334"); 1474 | zlib 1475 | .gunzipSync( 1476 | fs.readFileSync( 1477 | path.format( 1478 | Object.assign({}, fileObj, { 1479 | base: fileObj.name + "_2012-09-18_2.log.gz" 1480 | }) 1481 | ) 1482 | ) 1483 | ) 1484 | .toString() 1485 | .should.equal("303132"); 1486 | zlib 1487 | .gunzipSync( 1488 | fs.readFileSync( 1489 | path.format( 1490 | Object.assign({}, fileObj, { 1491 | base: fileObj.name + "_2012-09-17_1.log.gz" 1492 | }) 1493 | ) 1494 | ) 1495 | ) 1496 | .toString() 1497 | .should.equal("2829"); 1498 | zlib 1499 | .gunzipSync( 1500 | fs.readFileSync( 1501 | path.format( 1502 | Object.assign({}, fileObj, { 1503 | base: fileObj.name + "_2012-09-17_2.log.gz" 1504 | }) 1505 | ) 1506 | ) 1507 | ) 1508 | .toString() 1509 | .should.equal("252627"); 1510 | zlib 1511 | .gunzipSync( 1512 | fs.readFileSync( 1513 | path.format( 1514 | Object.assign({}, fileObj, { 1515 | base: fileObj.name + "_2012-09-16_1.log.gz" 1516 | }) 1517 | ) 1518 | ) 1519 | ) 1520 | .toString() 1521 | .should.equal("2324"); 1522 | zlib 1523 | .gunzipSync( 1524 | fs.readFileSync( 1525 | path.format( 1526 | Object.assign({}, fileObj, { 1527 | base: fileObj.name + "_2012-09-16_2.log.gz" 1528 | }) 1529 | ) 1530 | ) 1531 | ) 1532 | .toString() 1533 | .should.equal("202122"); 1534 | zlib 1535 | .gunzipSync( 1536 | fs.readFileSync( 1537 | path.format( 1538 | Object.assign({}, fileObj, { 1539 | base: fileObj.name + "_2012-09-15_1.log.gz" 1540 | }) 1541 | ) 1542 | ) 1543 | ) 1544 | .toString() 1545 | .should.equal("1819"); 1546 | zlib 1547 | .gunzipSync( 1548 | fs.readFileSync( 1549 | path.format( 1550 | Object.assign({}, fileObj, { 1551 | base: fileObj.name + "_2012-09-15_2.log.gz" 1552 | }) 1553 | ) 1554 | ) 1555 | ) 1556 | .toString() 1557 | .should.equal("151617"); 1558 | zlib 1559 | .gunzipSync( 1560 | fs.readFileSync( 1561 | path.format( 1562 | Object.assign({}, fileObj, { 1563 | base: fileObj.name + "_2012-09-14_1.log.gz" 1564 | }) 1565 | ) 1566 | ) 1567 | ) 1568 | .toString() 1569 | .should.equal("1314"); 1570 | zlib 1571 | .gunzipSync( 1572 | fs.readFileSync( 1573 | path.format( 1574 | Object.assign({}, fileObj, { 1575 | base: fileObj.name + "_2012-09-14_2.log.gz" 1576 | }) 1577 | ) 1578 | ) 1579 | ) 1580 | .toString() 1581 | .should.equal("101112"); 1582 | zlib 1583 | .gunzipSync( 1584 | fs.readFileSync( 1585 | path.format( 1586 | Object.assign({}, fileObj, { 1587 | base: fileObj.name + "_2012-09-13_1.log.gz" 1588 | }) 1589 | ) 1590 | ) 1591 | ) 1592 | .toString() 1593 | .should.equal("56789"); 1594 | zlib 1595 | .gunzipSync( 1596 | fs.readFileSync( 1597 | path.format( 1598 | Object.assign({}, fileObj, { 1599 | base: fileObj.name + "_2012-09-12_1.log.gz" 1600 | }) 1601 | ) 1602 | ) 1603 | ) 1604 | .toString() 1605 | .should.equal("01234"); 1606 | }); 1607 | }); 1608 | 1609 | describe("when old files exist", () => { 1610 | const fileObj = generateTestFile(); 1611 | let s; 1612 | 1613 | before(done => { 1614 | fakeNow = new Date(2012, 8, 12, 10, 37, 11); 1615 | fs.ensureFileSync(fileObj.path); 1616 | fs.writeFileSync(fileObj.path, "exist"); 1617 | s = new RollingFileWriteStream(fileObj.path); 1618 | s.write("now", "utf8", done); 1619 | }); 1620 | 1621 | after(done => { 1622 | s.end(() => { 1623 | fs.removeSync(fileObj.dir); 1624 | done(); 1625 | }); 1626 | }); 1627 | 1628 | it("should use write in the old file if not reach the maxSize limit", () => { 1629 | const files = fs.readdirSync(fileObj.dir); 1630 | const expectedFileList = [fileObj.base]; 1631 | files.should.containDeep(expectedFileList); 1632 | files.length.should.equal(expectedFileList.length); 1633 | 1634 | fs.readFileSync(path.format(fileObj)) 1635 | .toString() 1636 | .should.equal("existnow"); 1637 | }); 1638 | }); 1639 | 1640 | describe("when old files exist with contents", () => { 1641 | const fileObj = generateTestFile(); 1642 | let s; 1643 | 1644 | before(done => { 1645 | fakeNow = new Date(2012, 8, 12, 10, 37, 11); 1646 | fs.ensureFileSync(fileObj.path); 1647 | fs.writeFileSync(fileObj.path, "This is exactly 30 bytes long\n"); 1648 | s = new RollingFileWriteStream(fileObj.path, { maxSize: 35 }); 1649 | s.write("one\n", "utf8"); //34 1650 | s.write("two\n", "utf8"); //38 - file should be rotated next time 1651 | s.write("three\n", "utf8", done); // this should be in a new file. 1652 | }); 1653 | 1654 | after(done => { 1655 | s.end(() => { 1656 | fs.removeSync(fileObj.dir); 1657 | done(); 1658 | }); 1659 | }); 1660 | 1661 | it("should respect the existing file size", () => { 1662 | const files = fs.readdirSync(fileObj.dir); 1663 | const expectedFileList = [fileObj.base, fileObj.base + ".1"]; 1664 | files.should.containDeep(expectedFileList); 1665 | files.length.should.equal(expectedFileList.length); 1666 | 1667 | fs.readFileSync(path.format(fileObj)) 1668 | .toString() 1669 | .should.equal("three\n"); 1670 | fs.readFileSync(path.join(fileObj.dir, fileObj.base + ".1")) 1671 | .toString() 1672 | .should.equal("This is exactly 30 bytes long\none\ntwo\n"); 1673 | }); 1674 | }); 1675 | 1676 | describe("when old files exist with contents and the flag is a+", () => { 1677 | const fileObj = generateTestFile(); 1678 | let s; 1679 | 1680 | before(done => { 1681 | fakeNow = new Date(2012, 8, 12, 10, 37, 11); 1682 | fs.ensureFileSync(fileObj.path); 1683 | fs.writeFileSync(fileObj.path, "This is exactly 30 bytes long\n"); 1684 | s = new RollingFileWriteStream(fileObj.path, { 1685 | maxSize: 35, 1686 | flags: "a+" 1687 | }); 1688 | s.write("one\n", "utf8"); //34 1689 | s.write("two\n", "utf8"); //38 - file should be rotated next time 1690 | s.write("three\n", "utf8", done); // this should be in a new file. 1691 | }); 1692 | 1693 | after(done => { 1694 | s.end(() => { 1695 | fs.removeSync(fileObj.dir); 1696 | done(); 1697 | }); 1698 | }); 1699 | 1700 | it("should respect the existing file size", () => { 1701 | const files = fs.readdirSync(fileObj.dir); 1702 | const expectedFileList = [fileObj.base, fileObj.base + ".1"]; 1703 | files.should.containDeep(expectedFileList); 1704 | files.length.should.equal(expectedFileList.length); 1705 | 1706 | fs.readFileSync(path.format(fileObj)) 1707 | .toString() 1708 | .should.equal("three\n"); 1709 | fs.readFileSync(path.join(fileObj.dir, fileObj.base + ".1")) 1710 | .toString() 1711 | .should.equal("This is exactly 30 bytes long\none\ntwo\n"); 1712 | }); 1713 | }); 1714 | 1715 | describe("when old files exist with indices", () => { 1716 | const fileObj = generateTestFile(); 1717 | let s; 1718 | 1719 | before(done => { 1720 | fs.ensureFileSync(fileObj.path); 1721 | fs.writeFileSync( 1722 | fileObj.path, 1723 | "This was the base file and it should be more than 30 bytes\n" 1724 | ); // base 1725 | fs.writeFileSync(fileObj.path + ".1", "This was the first old file\n"); // base.1 1726 | s = new RollingFileWriteStream(fileObj.path, { 1727 | maxSize: 30, 1728 | numToKeep: 5 1729 | }); 1730 | s.write("This is exactly 30 bytes long\n", "utf8"); // base.1 -> base.2, base -> base.1 1731 | s.write("This is exactly 30 bytes long\n", "utf8"); // base.2 -> base.3, base.1 -> base.2, base -> base.1 1732 | s.write("three\n", "utf8", done); // base.3 -> base.4, base.2 -> base.3, base.1 -> base.2, base -> base.1 1733 | }); 1734 | 1735 | after(done => { 1736 | s.end(() => { 1737 | fs.removeSync(fileObj.dir); 1738 | done(); 1739 | }); 1740 | }); 1741 | 1742 | it("should rotate the old file indices", () => { 1743 | const files = fs.readdirSync(fileObj.dir); 1744 | const expectedFileList = [ 1745 | fileObj.base, 1746 | fileObj.base + ".1", 1747 | fileObj.base + ".2", 1748 | fileObj.base + ".3", 1749 | fileObj.base + ".4" 1750 | ]; 1751 | files.should.containDeep(expectedFileList); 1752 | files.length.should.equal(expectedFileList.length); 1753 | 1754 | fs.readFileSync(path.format(fileObj)) 1755 | .toString() 1756 | .should.equal("three\n"); 1757 | fs.readFileSync(path.join(fileObj.dir, fileObj.base + ".1")) 1758 | .toString() 1759 | .should.equal("This is exactly 30 bytes long\n"); 1760 | fs.readFileSync(path.join(fileObj.dir, fileObj.base + ".2")) 1761 | .toString() 1762 | .should.equal("This is exactly 30 bytes long\n"); 1763 | fs.readFileSync(path.join(fileObj.dir, fileObj.base + ".3")) 1764 | .toString() 1765 | .should.equal( 1766 | "This was the base file and it should be more than 30 bytes\n" 1767 | ); 1768 | fs.readFileSync(path.join(fileObj.dir, fileObj.base + ".4")) 1769 | .toString() 1770 | .should.equal("This was the first old file\n"); 1771 | }); 1772 | }); 1773 | 1774 | describe("when old files exist with contents and rolling by date", () => { 1775 | const fileObj = generateTestFile(); 1776 | let s; 1777 | 1778 | before(done => { 1779 | fakeNow = new Date(2012, 8, 12, 10, 37, 11); 1780 | fs.ensureFileSync(fileObj.path); 1781 | fs.writeFileSync(fileObj.path, "This was created Sept 12, 2012.\n"); 1782 | fakeNow = new Date(2012, 8, 13, 10, 53, 12); 1783 | s = new RollingFileWriteStream(fileObj.path, { pattern: "yyyy-MM-dd" }); 1784 | s.write("It is now Sept 13, 2012.\n", "utf8", done); // this should be in a new file. 1785 | }); 1786 | 1787 | after(done => { 1788 | s.end(() => { 1789 | fs.removeSync(fileObj.dir); 1790 | done(); 1791 | }); 1792 | }); 1793 | 1794 | it("should respect the existing file date", () => { 1795 | const files = fs.readdirSync(fileObj.dir); 1796 | const expectedFileList = [fileObj.base, fileObj.base + ".2012-09-12"]; 1797 | files.should.containDeep(expectedFileList); 1798 | files.length.should.equal(expectedFileList.length); 1799 | 1800 | fs.readFileSync(path.format(fileObj)) 1801 | .toString() 1802 | .should.equal("It is now Sept 13, 2012.\n"); 1803 | fs.readFileSync(path.join(fileObj.dir, fileObj.base + ".2012-09-12")) 1804 | .toString() 1805 | .should.equal("This was created Sept 12, 2012.\n"); 1806 | }); 1807 | }); 1808 | 1809 | describe("when old files exist with contents and stream created with overwrite flag", () => { 1810 | const fileObj = generateTestFile(); 1811 | let s; 1812 | 1813 | before(done => { 1814 | fakeNow = new Date(2012, 8, 12, 10, 37, 11); 1815 | fs.ensureFileSync(fileObj.path); 1816 | fs.writeFileSync(fileObj.path, "This is exactly 30 bytes long\n"); 1817 | s = new RollingFileWriteStream(fileObj.path, { maxSize: 35, flags: "w" }); 1818 | s.write("there should only be this\n", "utf8", done); 1819 | }); 1820 | 1821 | after(done => { 1822 | s.end(() => { 1823 | fs.removeSync(fileObj.dir); 1824 | done(); 1825 | }); 1826 | }); 1827 | 1828 | it("should ignore the existing file size", () => { 1829 | const files = fs.readdirSync(fileObj.dir); 1830 | const expectedFileList = [fileObj.base]; 1831 | files.should.containDeep(expectedFileList); 1832 | files.length.should.equal(expectedFileList.length); 1833 | 1834 | s.state.currentSize.should.equal(26); 1835 | 1836 | fs.readFileSync(path.format(fileObj)) 1837 | .toString() 1838 | .should.equal("there should only be this\n"); 1839 | }); 1840 | }); 1841 | 1842 | describe("when dir does not exist", () => { 1843 | const fileObj = generateTestFile(); 1844 | let s; 1845 | 1846 | before(done => { 1847 | fs.removeSync(fileObj.dir); 1848 | fakeNow = new Date(2012, 8, 12, 10, 37, 11); 1849 | s = new RollingFileWriteStream(fileObj.path); 1850 | s.write("test", "utf8", done); 1851 | }); 1852 | 1853 | after(done => { 1854 | s.end(() => { 1855 | fs.removeSync(fileObj.dir); 1856 | done(); 1857 | }); 1858 | }); 1859 | 1860 | it("should create the dir", () => { 1861 | const files = fs.readdirSync(fileObj.dir); 1862 | const expectedFileList = [fileObj.base]; 1863 | files.should.containDeep(expectedFileList); 1864 | files.length.should.equal(expectedFileList.length); 1865 | 1866 | fs.readFileSync(path.format(fileObj)) 1867 | .toString() 1868 | .should.equal("test"); 1869 | }); 1870 | }); 1871 | 1872 | describe("when multi-dir does not exist (recursive, nodejs >= 10.12.0)", () => { 1873 | const testFile = path.join(__dirname, "tmp_/tmpA/tmpB/tmpC/ignored.log"); 1874 | let s; 1875 | 1876 | before(done => { 1877 | fs.removeSync(path.join(__dirname, "tmp_/tmpA")); 1878 | fakeNow = new Date(2012, 8, 12, 10, 37, 11); 1879 | s = new RollingFileWriteStream(testFile); 1880 | s.write("test", "utf8", done); 1881 | }); 1882 | 1883 | after(done => { 1884 | s.end(() => { 1885 | fs.removeSync(path.join(__dirname, "tmp_/tmpA")); 1886 | done(); 1887 | }); 1888 | }); 1889 | 1890 | it("should create the dir", () => { 1891 | const files = fs.readdirSync(path.dirname(testFile)); 1892 | const expectedFileList = ["ignored.log"]; 1893 | files.should.containDeep(expectedFileList); 1894 | files.length.should.equal(expectedFileList.length); 1895 | 1896 | fs.readFileSync(testFile) 1897 | .toString() 1898 | .should.equal("test"); 1899 | }); 1900 | }); 1901 | 1902 | describe("when multi-dir does not exist (non-recursive, nodejs < 10.12.0)", () => { 1903 | const testFile = path.join(__dirname, "tmp_/tmpA/tmpB/tmpC/ignored.log"); 1904 | let s; 1905 | 1906 | before(done => { 1907 | fs.removeSync(path.join(__dirname, "tmp_/tmpA")); 1908 | fakeNow = new Date(2012, 8, 12, 10, 37, 11); 1909 | const RollingFileWriteStream = proxyquire("../lib/RollingFileWriteStream", { 1910 | "./now": mockNow, 1911 | "fs-extra": { 1912 | mkdirSync(dirPath, options) { 1913 | return fs.mkdirSync(dirPath, { ...options, ...{ recursive: false } }); 1914 | } 1915 | } 1916 | }); 1917 | s = new RollingFileWriteStream(testFile); 1918 | s.write("test", "utf8", done); 1919 | }); 1920 | 1921 | after(done => { 1922 | s.end(() => { 1923 | fs.removeSync(path.join(__dirname, "tmp_/tmpA")); 1924 | done(); 1925 | }); 1926 | }); 1927 | 1928 | it("should create the dir", () => { 1929 | const files = fs.readdirSync(path.dirname(testFile)); 1930 | const expectedFileList = ["ignored.log"]; 1931 | files.should.containDeep(expectedFileList); 1932 | files.length.should.equal(expectedFileList.length); 1933 | 1934 | fs.readFileSync(testFile) 1935 | .toString() 1936 | .should.equal("test"); 1937 | }); 1938 | }); 1939 | 1940 | describe("when multi-dir does not exist (error handling)", () => { 1941 | const testFile = path.join(__dirname, "tmp_/tmpA/tmpB/tmpC/ignored.log"); 1942 | let s; 1943 | 1944 | before(done => { 1945 | fs.removeSync(path.join(__dirname, "tmp_/tmpA")); 1946 | fakeNow = new Date(2012, 8, 12, 10, 37, 11); 1947 | done(); 1948 | }); 1949 | 1950 | after(done => { 1951 | try { 1952 | s.end(() => { 1953 | fs.removeSync(path.join(__dirname, "tmp_/tmpA")); 1954 | done(); 1955 | }); 1956 | } catch (e) { 1957 | done(); 1958 | } 1959 | }); 1960 | 1961 | it("should throw EPERM error", () => { 1962 | const errorEPERM = new Error("EPERM"); 1963 | errorEPERM.code = "EPERM"; 1964 | (() => { 1965 | const RollingFileWriteStream = proxyquire("../lib/RollingFileWriteStream", { 1966 | "./now": mockNow, 1967 | "fs-extra": { 1968 | mkdirSync() { 1969 | throw errorEPERM; 1970 | } 1971 | } 1972 | }); 1973 | s = new RollingFileWriteStream(testFile); 1974 | }).should.throw(errorEPERM); 1975 | }); 1976 | 1977 | const errorEROFS = new Error("EROFS"); 1978 | errorEROFS.code = "EROFS"; 1979 | 1980 | it("should throw EROFS error", () => { 1981 | (() => { 1982 | const RollingFileWriteStream = proxyquire("../lib/RollingFileWriteStream", { 1983 | "./now": mockNow, 1984 | "fs-extra": { 1985 | mkdirSync() { 1986 | throw errorEROFS; 1987 | }, 1988 | statSync() { 1989 | return { isDirectory() { return false; } }; 1990 | } 1991 | } 1992 | }); 1993 | s = new RollingFileWriteStream(testFile); 1994 | }).should.throw(errorEROFS); 1995 | }); 1996 | 1997 | it("should not throw EROFS error", () => { 1998 | (() => { 1999 | fs.mkdirSync(path.join(__dirname, "tmp_/tmpA/tmpB/tmpC"), { recursive: true }); 2000 | const RollingFileWriteStream = proxyquire("../lib/RollingFileWriteStream", { 2001 | "./now": mockNow, 2002 | "fs-extra": { 2003 | mkdirSync() { 2004 | throw errorEROFS; 2005 | } 2006 | } 2007 | }); 2008 | s = new RollingFileWriteStream(testFile); 2009 | }).should.not.throw(errorEROFS); 2010 | }); 2011 | }); 2012 | 2013 | describe("when given just a base filename with no dir", () => { 2014 | let s; 2015 | before(done => { 2016 | s = new RollingFileWriteStream("test.log"); 2017 | s.write("this should not cause any problems", "utf8", done); 2018 | }); 2019 | 2020 | after(done => { 2021 | s.end(() => { 2022 | fs.removeSync("test.log"); 2023 | done(); 2024 | }); 2025 | }); 2026 | 2027 | it("should use process.cwd() as the dir", () => { 2028 | const files = fs.readdirSync(process.cwd()); 2029 | files.should.containDeep(["test.log"]); 2030 | 2031 | fs.readFileSync(path.join(process.cwd(), "test.log")) 2032 | .toString() 2033 | .should.equal("this should not cause any problems"); 2034 | }); 2035 | }); 2036 | 2037 | describe("with no callback to write", () => { 2038 | let s; 2039 | before(done => { 2040 | s = new RollingFileWriteStream("no-callback.log"); 2041 | s.write("this is all very nice", "utf8", done); 2042 | }); 2043 | 2044 | after(done => { 2045 | fs.remove("no-callback.log", done); 2046 | }); 2047 | 2048 | it("should not complain", done => { 2049 | s.write("I am not bothered if this succeeds or not"); 2050 | s.end(done); 2051 | }); 2052 | }); 2053 | 2054 | describe("events", () => { 2055 | let s; 2056 | before(done => { 2057 | s = new RollingFileWriteStream("test-events.log"); 2058 | s.write("this should not cause any problems", "utf8", done); 2059 | }); 2060 | 2061 | after(done => { 2062 | s.end(() => { 2063 | fs.removeSync("test-events.log"); 2064 | done(); 2065 | }); 2066 | }); 2067 | 2068 | it("should emit the error event of the underlying stream", done => { 2069 | s.on("error", e => { 2070 | e.message.should.equal("oh no"); 2071 | done(); 2072 | }); 2073 | s.currentFileStream.emit("error", new Error("oh no")); 2074 | }); 2075 | }); 2076 | 2077 | describe("when deleting old files and there is an error", () => { 2078 | let s; 2079 | before(done => { 2080 | // using ensureDir for test coverage for fs.unlink(f).catch(e) of deleteFiles() in lib/RollingFileWriteStream.js 2081 | fs.ensureDir(path.join(__dirname, "tmp-delete-test/logfile.log.2"), done); 2082 | }); 2083 | 2084 | it("should not let errors bubble up", done => { 2085 | s = new RollingFileWriteStream(path.join(__dirname, "tmp-delete-test/logfile.log"), { 2086 | maxSize: 10, 2087 | numToKeep: 1 2088 | }); 2089 | 2090 | s.write("length is 10", "utf8", () => { 2091 | // if there's an error during deletion, then done never gets called 2092 | s.write("length is 10", "utf8", done); 2093 | }); 2094 | }); 2095 | 2096 | after(done => { 2097 | s.end(() => { 2098 | fs.remove(path.join(__dirname, "tmp-delete-test"), done); 2099 | }); 2100 | }); 2101 | }); 2102 | }); 2103 | -------------------------------------------------------------------------------- /test/fileNameFormatter-test.js: -------------------------------------------------------------------------------- 1 | require("should"); 2 | const { normalize } = require("path"); 3 | 4 | describe("fileNameFormatter", () => { 5 | describe("without a date", () => { 6 | const fileNameFormatter = require("../lib/fileNameFormatter")({ 7 | file: { 8 | dir: "/path/to/file", 9 | base: "thefile.log", 10 | ext: ".log", 11 | name: "thefile" 12 | } 13 | }); 14 | it("should take an index and return a filename", () => { 15 | fileNameFormatter({ 16 | index: 0 17 | }).should.eql(normalize("/path/to/file/thefile.log")); 18 | fileNameFormatter({ index: 1, date: "" }).should.eql( 19 | normalize("/path/to/file/thefile.log.1") 20 | ); 21 | fileNameFormatter({ index: 15, date: undefined }).should.eql( 22 | normalize("/path/to/file/thefile.log.15") 23 | ); 24 | fileNameFormatter({ index: 15 }).should.eql( 25 | normalize("/path/to/file/thefile.log.15") 26 | ); 27 | }); 28 | }); 29 | 30 | describe("with a date", () => { 31 | const fileNameFormatter = require("../lib/fileNameFormatter")({ 32 | file: { 33 | dir: "/path/to/file", 34 | base: "thefile.log", 35 | ext: ".log", 36 | name: "thefile" 37 | } 38 | }); 39 | it("should take an index, date and return a filename", () => { 40 | fileNameFormatter({ index: 0, date: "2019-07-15" }).should.eql( 41 | normalize("/path/to/file/thefile.log") 42 | ); 43 | fileNameFormatter({ 44 | index: 2, 45 | date: "2019-07-16" 46 | }).should.eql(normalize("/path/to/file/thefile.log.2019-07-16")); 47 | }); 48 | }); 49 | 50 | describe("with the alwaysIncludeDate option", () => { 51 | const fileNameFormatter = require("../lib/fileNameFormatter")({ 52 | file: { 53 | dir: "/path/to/file", 54 | base: "thefile.log", 55 | ext: ".log", 56 | name: "thefile" 57 | }, 58 | alwaysIncludeDate: true 59 | }); 60 | it("should take an index, date and return a filename", () => { 61 | fileNameFormatter({ 62 | index: 0, 63 | date: "2019-07-15" 64 | }).should.eql(normalize("/path/to/file/thefile.log.2019-07-15")); 65 | fileNameFormatter({ index: 0, date: "2019-07-15" }).should.eql( 66 | normalize("/path/to/file/thefile.log.2019-07-15") 67 | ); 68 | fileNameFormatter({ 69 | index: 2, 70 | date: "2019-07-16" 71 | }).should.eql(normalize("/path/to/file/thefile.log.2019-07-16")); 72 | }); 73 | }); 74 | 75 | describe("with the keepFileExt option", () => { 76 | const fileNameFormatter = require("../lib/fileNameFormatter")({ 77 | file: { 78 | dir: "/path/to/file", 79 | base: "thefile.log", 80 | ext: ".log", 81 | name: "thefile" 82 | }, 83 | keepFileExt: true 84 | }); 85 | it("should take an index, date and return a filename", () => { 86 | fileNameFormatter({ 87 | index: 0, 88 | date: "2019-07-15" 89 | }).should.eql(normalize("/path/to/file/thefile.log")); 90 | fileNameFormatter({ index: 1 }).should.eql(normalize("/path/to/file/thefile.1.log")); 91 | fileNameFormatter({ index: 2 }).should.eql(normalize("/path/to/file/thefile.2.log")); 92 | fileNameFormatter({ index: 15 }).should.eql( 93 | normalize("/path/to/file/thefile.15.log") 94 | ); 95 | }); 96 | }); 97 | 98 | describe("with the keepFileExt option and a date", () => { 99 | const fileNameFormatter = require("../lib/fileNameFormatter")({ 100 | file: { 101 | dir: "/path/to/file", 102 | base: "thefile.log", 103 | ext: ".log", 104 | name: "thefile" 105 | }, 106 | keepFileExt: true 107 | }); 108 | it("should take an index, date and return a filename", () => { 109 | fileNameFormatter({ 110 | index: 0, 111 | date: "2019-07-15" 112 | }).should.eql(normalize("/path/to/file/thefile.log")); 113 | fileNameFormatter({ index: 1, date: "2019-07-15" }).should.eql( 114 | normalize("/path/to/file/thefile.2019-07-15.log") 115 | ); 116 | fileNameFormatter({ 117 | index: 2, 118 | date: "2019-07-16" 119 | }).should.eql(normalize("/path/to/file/thefile.2019-07-16.log")); 120 | }); 121 | }); 122 | 123 | describe("with the keepFileExt, alwaysIncludeDate options", () => { 124 | const fileNameFormatter = require("../lib/fileNameFormatter")({ 125 | file: { 126 | dir: "/path/to/file", 127 | base: "thefile.log", 128 | ext: ".log", 129 | name: "thefile" 130 | }, 131 | keepFileExt: true, 132 | alwaysIncludeDate: true 133 | }); 134 | it("should take an index, date and return a filename", () => { 135 | fileNameFormatter({ 136 | index: 0, 137 | date: "2019-07-15" 138 | }).should.eql(normalize("/path/to/file/thefile.2019-07-15.log")); 139 | fileNameFormatter({ index: 1, date: "2019-07-15" }).should.eql( 140 | normalize("/path/to/file/thefile.2019-07-15.log") 141 | ); 142 | fileNameFormatter({ 143 | index: 2, 144 | date: "2019-07-16" 145 | }).should.eql(normalize("/path/to/file/thefile.2019-07-16.log")); 146 | }); 147 | }); 148 | 149 | describe("with the compress option", () => { 150 | const fileNameFormatter = require("../lib/fileNameFormatter")({ 151 | file: { 152 | dir: "/path/to/file", 153 | base: "thefile.log", 154 | ext: ".log", 155 | name: "thefile" 156 | }, 157 | compress: true 158 | }); 159 | it("should take an index and return a filename", () => { 160 | fileNameFormatter({ 161 | index: 0, 162 | date: "2019-07-15" 163 | }).should.eql(normalize("/path/to/file/thefile.log")); 164 | fileNameFormatter({ index: 1 }).should.eql( 165 | normalize("/path/to/file/thefile.log.1.gz") 166 | ); 167 | fileNameFormatter({ 168 | index: 2 169 | }).should.eql(normalize("/path/to/file/thefile.log.2.gz")); 170 | }); 171 | }); 172 | 173 | describe("with the compress option and a date", () => { 174 | const fileNameFormatter = require("../lib/fileNameFormatter")({ 175 | file: { 176 | dir: "/path/to/file", 177 | base: "thefile.log", 178 | ext: ".log", 179 | name: "thefile" 180 | }, 181 | compress: true 182 | }); 183 | it("should take an index, date and return a filename", () => { 184 | fileNameFormatter({ 185 | index: 0, 186 | date: "2019-07-15" 187 | }).should.eql(normalize("/path/to/file/thefile.log")); 188 | fileNameFormatter({ index: 1, date: "2019-07-15" }).should.eql( 189 | normalize("/path/to/file/thefile.log.2019-07-15.gz") 190 | ); 191 | fileNameFormatter({ 192 | index: 2, 193 | date: "2019-07-16" 194 | }).should.eql(normalize("/path/to/file/thefile.log.2019-07-16.gz")); 195 | }); 196 | }); 197 | 198 | describe("with the compress, alwaysIncludeDate option and a date", () => { 199 | const fileNameFormatter = require("../lib/fileNameFormatter")({ 200 | file: { 201 | dir: "/path/to/file", 202 | base: "thefile.log", 203 | ext: ".log", 204 | name: "thefile" 205 | }, 206 | compress: true, 207 | alwaysIncludeDate: true 208 | }); 209 | it("should take an index, date and return a filename", () => { 210 | fileNameFormatter({ 211 | index: 0, 212 | date: "2019-07-15" 213 | }).should.eql(normalize("/path/to/file/thefile.log.2019-07-15")); 214 | fileNameFormatter({ index: 1, date: "2019-07-15" }).should.eql( 215 | normalize("/path/to/file/thefile.log.2019-07-15.gz") 216 | ); 217 | fileNameFormatter({ 218 | index: 2, 219 | date: "2019-07-16" 220 | }).should.eql(normalize("/path/to/file/thefile.log.2019-07-16.gz")); 221 | }); 222 | }); 223 | 224 | describe("with the compress, alwaysIncludeDate, keepFileExt option and a date", () => { 225 | const fileNameFormatter = require("../lib/fileNameFormatter")({ 226 | file: { 227 | dir: "/path/to/file", 228 | base: "thefile.log", 229 | ext: ".log", 230 | name: "thefile" 231 | }, 232 | compress: true, 233 | alwaysIncludeDate: true, 234 | keepFileExt: true 235 | }); 236 | it("should take an index, date and return a filename", () => { 237 | fileNameFormatter({ 238 | index: 0, 239 | date: "2019-07-15" 240 | }).should.eql(normalize("/path/to/file/thefile.2019-07-15.log")); 241 | fileNameFormatter({ index: 1, date: "2019-07-15" }).should.eql( 242 | normalize("/path/to/file/thefile.2019-07-15.log.gz") 243 | ); 244 | fileNameFormatter({ 245 | index: 2, 246 | date: "2019-07-16" 247 | }).should.eql(normalize("/path/to/file/thefile.2019-07-16.log.gz")); 248 | }); 249 | }); 250 | 251 | describe("with the needsIndex option", () => { 252 | const fileNameFormatter = require("../lib/fileNameFormatter")({ 253 | file: { 254 | dir: "/path/to/file", 255 | base: "thefile.log", 256 | ext: ".log", 257 | name: "thefile" 258 | }, 259 | compress: true, 260 | needsIndex: true, 261 | alwaysIncludeDate: true, 262 | keepFileExt: true 263 | }); 264 | it("should take an index, date and return a filename", () => { 265 | fileNameFormatter({ 266 | index: 0, 267 | date: "2019-07-15" 268 | }).should.eql(normalize("/path/to/file/thefile.2019-07-15.log")); 269 | fileNameFormatter({ index: 1, date: "2019-07-15" }).should.eql( 270 | normalize("/path/to/file/thefile.2019-07-15.1.log.gz") 271 | ); 272 | fileNameFormatter({ 273 | index: 2, 274 | date: "2019-07-16" 275 | }).should.eql(normalize("/path/to/file/thefile.2019-07-16.2.log.gz")); 276 | }); 277 | }); 278 | 279 | describe("with a date and needsIndex", () => { 280 | const fileNameFormatter = require("../lib/fileNameFormatter")({ 281 | file: { 282 | dir: "/path/to/file", 283 | base: "thefile.log", 284 | ext: ".log", 285 | name: "thefile" 286 | }, 287 | needsIndex: true 288 | }); 289 | it("should take an index, date and return a filename", () => { 290 | fileNameFormatter({ index: 0, date: "2019-07-15" }).should.eql( 291 | normalize("/path/to/file/thefile.log") 292 | ); 293 | fileNameFormatter({ 294 | index: 2, 295 | date: "2019-07-16" 296 | }).should.eql(normalize("/path/to/file/thefile.log.2019-07-16.2")); 297 | }); 298 | }); 299 | 300 | describe("with the alwaysIncludeDate, needsIndex option", () => { 301 | const fileNameFormatter = require("../lib/fileNameFormatter")({ 302 | file: { 303 | dir: "/path/to/file", 304 | base: "thefile.log", 305 | ext: ".log", 306 | name: "thefile" 307 | }, 308 | needsIndex: true, 309 | alwaysIncludeDate: true 310 | }); 311 | it("should take an index, date and return a filename", () => { 312 | fileNameFormatter({ 313 | index: 0, 314 | date: "2019-07-15" 315 | }).should.eql(normalize("/path/to/file/thefile.log.2019-07-15")); 316 | fileNameFormatter({ index: 0, date: "2019-07-15" }).should.eql( 317 | normalize("/path/to/file/thefile.log.2019-07-15") 318 | ); 319 | fileNameFormatter({ 320 | index: 2, 321 | date: "2019-07-16" 322 | }).should.eql(normalize("/path/to/file/thefile.log.2019-07-16.2")); 323 | }); 324 | }); 325 | 326 | describe("with the keepFileExt, needsIndex option", () => { 327 | const fileNameFormatter = require("../lib/fileNameFormatter")({ 328 | file: { 329 | dir: "/path/to/file", 330 | base: "thefile.log", 331 | ext: ".log", 332 | name: "thefile" 333 | }, 334 | needsIndex: true, 335 | keepFileExt: true 336 | }); 337 | it("should take an index, date and return a filename", () => { 338 | fileNameFormatter({ 339 | index: 0, 340 | date: "2019-07-15" 341 | }).should.eql(normalize("/path/to/file/thefile.log")); 342 | fileNameFormatter({ index: 1 }).should.eql(normalize("/path/to/file/thefile.1.log")); 343 | fileNameFormatter({ index: 2 }).should.eql(normalize("/path/to/file/thefile.2.log")); 344 | fileNameFormatter({ index: 15 }).should.eql( 345 | normalize("/path/to/file/thefile.15.log") 346 | ); 347 | }); 348 | }); 349 | 350 | describe("with the keepFileExt, needsIndex option and a date", () => { 351 | const fileNameFormatter = require("../lib/fileNameFormatter")({ 352 | file: { 353 | dir: "/path/to/file", 354 | base: "thefile.log", 355 | ext: ".log", 356 | name: "thefile" 357 | }, 358 | needsIndex: true, 359 | keepFileExt: true 360 | }); 361 | it("should take an index, date and return a filename", () => { 362 | fileNameFormatter({ 363 | index: 0, 364 | date: "2019-07-15" 365 | }).should.eql(normalize("/path/to/file/thefile.log")); 366 | fileNameFormatter({ index: 1, date: "2019-07-15" }).should.eql( 367 | normalize("/path/to/file/thefile.2019-07-15.1.log") 368 | ); 369 | fileNameFormatter({ 370 | index: 2, 371 | date: "2019-07-16" 372 | }).should.eql(normalize("/path/to/file/thefile.2019-07-16.2.log")); 373 | }); 374 | }); 375 | 376 | describe("with the keepFileExt, needsIndex, alwaysIncludeDate options", () => { 377 | const fileNameFormatter = require("../lib/fileNameFormatter")({ 378 | file: { 379 | dir: "/path/to/file", 380 | base: "thefile.log", 381 | ext: ".log", 382 | name: "thefile" 383 | }, 384 | needsIndex: true, 385 | keepFileExt: true, 386 | alwaysIncludeDate: true 387 | }); 388 | it("should take an index, date and return a filename", () => { 389 | fileNameFormatter({ 390 | index: 0, 391 | date: "2019-07-15" 392 | }).should.eql(normalize("/path/to/file/thefile.2019-07-15.log")); 393 | fileNameFormatter({ index: 1, date: "2019-07-15" }).should.eql( 394 | normalize("/path/to/file/thefile.2019-07-15.1.log") 395 | ); 396 | fileNameFormatter({ 397 | index: 2, 398 | date: "2019-07-16" 399 | }).should.eql(normalize("/path/to/file/thefile.2019-07-16.2.log")); 400 | }); 401 | }); 402 | 403 | describe("with the compress, needsIndex option", () => { 404 | const fileNameFormatter = require("../lib/fileNameFormatter")({ 405 | file: { 406 | dir: "/path/to/file", 407 | base: "thefile.log", 408 | ext: ".log", 409 | name: "thefile" 410 | }, 411 | needsIndex: true, 412 | compress: true 413 | }); 414 | it("should take an index and return a filename", () => { 415 | fileNameFormatter({ 416 | index: 0, 417 | date: "2019-07-15" 418 | }).should.eql(normalize("/path/to/file/thefile.log")); 419 | fileNameFormatter({ index: 1 }).should.eql( 420 | normalize("/path/to/file/thefile.log.1.gz") 421 | ); 422 | fileNameFormatter({ 423 | index: 2 424 | }).should.eql(normalize("/path/to/file/thefile.log.2.gz")); 425 | }); 426 | }); 427 | 428 | describe("with the compress, needsIndex option and a date", () => { 429 | const fileNameFormatter = require("../lib/fileNameFormatter")({ 430 | file: { 431 | dir: "/path/to/file", 432 | base: "thefile.log", 433 | ext: ".log", 434 | name: "thefile" 435 | }, 436 | needsIndex: true, 437 | compress: true 438 | }); 439 | it("should take an index, date and return a filename", () => { 440 | fileNameFormatter({ 441 | index: 0, 442 | date: "2019-07-15" 443 | }).should.eql(normalize("/path/to/file/thefile.log")); 444 | fileNameFormatter({ index: 1, date: "2019-07-15" }).should.eql( 445 | normalize("/path/to/file/thefile.log.2019-07-15.1.gz") 446 | ); 447 | fileNameFormatter({ 448 | index: 2, 449 | date: "2019-07-16" 450 | }).should.eql(normalize("/path/to/file/thefile.log.2019-07-16.2.gz")); 451 | }); 452 | }); 453 | 454 | describe("with the compress, alwaysIncludeDate, needsIndex option and a date", () => { 455 | const fileNameFormatter = require("../lib/fileNameFormatter")({ 456 | file: { 457 | dir: "/path/to/file", 458 | base: "thefile.log", 459 | ext: ".log", 460 | name: "thefile" 461 | }, 462 | needsIndex: true, 463 | compress: true, 464 | alwaysIncludeDate: true 465 | }); 466 | it("should take an index, date and return a filename", () => { 467 | fileNameFormatter({ 468 | index: 0, 469 | date: "2019-07-15" 470 | }).should.eql(normalize("/path/to/file/thefile.log.2019-07-15")); 471 | fileNameFormatter({ index: 1, date: "2019-07-15" }).should.eql( 472 | normalize("/path/to/file/thefile.log.2019-07-15.1.gz") 473 | ); 474 | fileNameFormatter({ 475 | index: 2, 476 | date: "2019-07-16" 477 | }).should.eql(normalize("/path/to/file/thefile.log.2019-07-16.2.gz")); 478 | }); 479 | }); 480 | 481 | describe("with the compress, alwaysIncludeDate, keepFileExt, needsIndex option and a date", () => { 482 | const fileNameFormatter = require("../lib/fileNameFormatter")({ 483 | file: { 484 | dir: "/path/to/file", 485 | base: "thefile.log", 486 | ext: ".log", 487 | name: "thefile" 488 | }, 489 | needsIndex: true, 490 | compress: true, 491 | alwaysIncludeDate: true, 492 | keepFileExt: true 493 | }); 494 | it("should take an index, date and return a filename", () => { 495 | fileNameFormatter({ 496 | index: 0, 497 | date: "2019-07-15" 498 | }).should.eql(normalize("/path/to/file/thefile.2019-07-15.log")); 499 | fileNameFormatter({ index: 1, date: "2019-07-15" }).should.eql( 500 | normalize("/path/to/file/thefile.2019-07-15.1.log.gz") 501 | ); 502 | fileNameFormatter({ 503 | index: 2, 504 | date: "2019-07-16" 505 | }).should.eql(normalize("/path/to/file/thefile.2019-07-16.2.log.gz")); 506 | }); 507 | }); 508 | }); 509 | -------------------------------------------------------------------------------- /test/fileNameParser-test.js: -------------------------------------------------------------------------------- 1 | const should = require("should"); 2 | 3 | describe("fileNameParser", () => { 4 | describe("with default options", () => { 5 | const parser = require("../lib/fileNameParser")({ 6 | file: { 7 | dir: "/path/to/file", 8 | base: "thefile.log", 9 | ext: ".log", 10 | name: "thefile" 11 | } 12 | }); 13 | it("should return null for filenames that do not match", () => { 14 | should(parser("cheese.txt")).not.be.ok(); 15 | should(parser("thefile.log.biscuits")).not.be.ok(); 16 | }); 17 | it("should take a filename and return the index", () => { 18 | parser("thefile.log.2").should.eql({ 19 | filename: "thefile.log.2", 20 | index: 2, 21 | isCompressed: false 22 | }); 23 | parser("thefile.log.2.gz").should.eql({ 24 | filename: "thefile.log.2.gz", 25 | index: 2, 26 | isCompressed: true 27 | }); 28 | }); 29 | }); 30 | 31 | describe("with pattern option", () => { 32 | const parser = require("../lib/fileNameParser")({ 33 | file: { 34 | dir: "/path/to/file", 35 | base: "thefile.log", 36 | ext: ".log", 37 | name: "thefile" 38 | }, 39 | pattern: "yyyy-MM-dd" 40 | }); 41 | it("should return null for files that do not match", () => { 42 | should(parser("thefile.log.biscuits")).not.be.ok(); 43 | should(parser("thefile.log.2019")).not.be.ok(); 44 | should(parser("thefile.log.3.2")).not.be.ok(); 45 | should(parser("thefile.log.04-18")).not.be.ok(); 46 | should(parser("anotherfile.log.2020-04-18")).not.be.ok(); 47 | should(parser("2020-05-18")).not.be.ok(); 48 | }); 49 | it("should take a filename and return the date", () => { 50 | parser("thefile.log.2019-07-17").should.eql({ 51 | filename: "thefile.log.2019-07-17", 52 | index: 0, 53 | date: "2019-07-17", 54 | timestamp: new Date(2019, 6, 17).getTime(), 55 | isCompressed: false 56 | }); 57 | parser("thefile.log.gz").should.eql({ 58 | filename: "thefile.log.gz", 59 | index: 0, 60 | isCompressed: true 61 | }); 62 | }); 63 | it("should take a filename and return both date and index", () => { 64 | parser("thefile.log.2019-07-17.2").should.eql({ 65 | filename: "thefile.log.2019-07-17.2", 66 | index: 2, 67 | date: "2019-07-17", 68 | timestamp: new Date(2019, 6, 17).getTime(), 69 | isCompressed: false 70 | }); 71 | parser("thefile.log.2019-07-17.2.gz").should.eql({ 72 | filename: "thefile.log.2019-07-17.2.gz", 73 | index: 2, 74 | date: "2019-07-17", 75 | timestamp: new Date(2019, 6, 17).getTime(), 76 | isCompressed: true 77 | }); 78 | }); 79 | }); 80 | 81 | describe("with keepFileExt option", () => { 82 | const parser = require("../lib/fileNameParser")({ 83 | file: { 84 | dir: "/path/to/file", 85 | base: "thefile.log", 86 | ext: ".log", 87 | name: "thefile" 88 | }, 89 | keepFileExt: true 90 | }); 91 | it("should take a filename and return the index", () => { 92 | should(parser("thefile.log.2")).not.be.ok(); 93 | should(parser("thefile.log.2.gz")).not.be.ok(); 94 | parser("thefile.2.log").should.eql({ 95 | filename: "thefile.2.log", 96 | index: 2, 97 | isCompressed: false 98 | }); 99 | parser("thefile.2.log.gz").should.eql({ 100 | filename: "thefile.2.log.gz", 101 | index: 2, 102 | isCompressed: true 103 | }); 104 | }); 105 | }); 106 | 107 | describe("with a two-digit date pattern", () => { 108 | const parser = require("../lib/fileNameParser")({ 109 | file: { 110 | dir: "/path/to/file", 111 | base: "thing.log", 112 | ext: ".log", 113 | name: "thing" 114 | }, 115 | pattern: "mm" 116 | }); 117 | it("should take a filename and return the date", () => { 118 | const expectedTimestamp = new Date(0, 0); 119 | expectedTimestamp.setMinutes(34); 120 | parser("thing.log.34").should.eql({ 121 | filename: "thing.log.34", 122 | date: "34", 123 | isCompressed: false, 124 | index: 0, 125 | timestamp: expectedTimestamp.getTime() 126 | }); 127 | }); 128 | }) 129 | 130 | describe("with a four-digit date pattern", () => { 131 | const parser = require("../lib/fileNameParser")({ 132 | file: { 133 | dir: "/path/to/file", 134 | base: "stuff.log", 135 | ext: ".log", 136 | name: "stuff" 137 | }, 138 | pattern: "mm-ss" 139 | }); 140 | it("should return null for files that do not match", () => { 141 | should(parser("stuff.log.2020-04-18")).not.be.ok(); 142 | should(parser("09-18")).not.be.ok(); 143 | }); 144 | it("should take a filename and return the date", () => { 145 | const expectedTimestamp = new Date(0, 0); 146 | expectedTimestamp.setMinutes(34); 147 | expectedTimestamp.setSeconds(59); 148 | parser("stuff.log.34-59").should.eql({ 149 | filename: "stuff.log.34-59", 150 | date: "34-59", 151 | isCompressed: false, 152 | index: 0, 153 | timestamp: expectedTimestamp.getTime() 154 | }); 155 | }); 156 | it("should take a filename and return both date and index", () => { 157 | const expectedTimestamp_1 = new Date(0, 0); 158 | expectedTimestamp_1.setMinutes(7); 159 | expectedTimestamp_1.setSeconds(17); 160 | parser("stuff.log.07-17.2").should.eql({ 161 | filename: "stuff.log.07-17.2", 162 | index: 2, 163 | date: "07-17", 164 | timestamp: expectedTimestamp_1.getTime(), 165 | isCompressed: false 166 | }); 167 | const expectedTimestamp_2 = new Date(0, 0); 168 | expectedTimestamp_2.setMinutes(17); 169 | expectedTimestamp_2.setSeconds(30); 170 | parser("stuff.log.17-30.3.gz").should.eql({ 171 | filename: "stuff.log.17-30.3.gz", 172 | index: 3, 173 | date: "17-30", 174 | timestamp: expectedTimestamp_2.getTime(), 175 | isCompressed: true 176 | }); 177 | }); 178 | }) 179 | 180 | }); 181 | -------------------------------------------------------------------------------- /test/moveAndMaybeCompressFile-test.js: -------------------------------------------------------------------------------- 1 | require("should"); 2 | 3 | const fs = require('fs-extra'); 4 | const path = require('path'); 5 | const zlib = require('zlib'); 6 | const proxyquire = require('proxyquire').noPreserveCache(); 7 | const moveAndMaybeCompressFile = require('../lib/moveAndMaybeCompressFile'); 8 | const TEST_DIR = path.join(__dirname, `moveAndMaybeCompressFile_${Math.floor(Math.random()*10000)}`); 9 | 10 | describe('moveAndMaybeCompressFile', () => { 11 | beforeEach(async () => { 12 | await fs.emptyDir(TEST_DIR); 13 | }); 14 | 15 | after(async () => { 16 | await fs.remove(TEST_DIR); 17 | }); 18 | 19 | it('should move the source file to a new destination', async () => { 20 | const source = path.join(TEST_DIR, 'test.log'); 21 | const destination = path.join(TEST_DIR, 'moved-test.log'); 22 | await fs.outputFile(source, 'This is the test file.'); 23 | await moveAndMaybeCompressFile(source, destination); 24 | 25 | const contents = await fs.readFile(destination, 'utf8'); 26 | contents.should.equal('This is the test file.'); 27 | 28 | const exists = await fs.pathExists(source); 29 | exists.should.be.false(); 30 | }); 31 | 32 | it('should compress the source file at the new destination', async () => { 33 | const source = path.join(TEST_DIR, 'test.log'); 34 | const destination = path.join(TEST_DIR, 'moved-test.log.gz'); 35 | await fs.outputFile(source, 'This is the test file.'); 36 | const moveAndCompressOptions = {compress: true}; 37 | await moveAndMaybeCompressFile(source, destination, moveAndCompressOptions); 38 | 39 | const zippedContents = await fs.readFile(destination); 40 | const contents = await new Promise(resolve => { 41 | zlib.gunzip(zippedContents, (e, data) => { 42 | resolve(data.toString()); 43 | }); 44 | }); 45 | contents.should.equal('This is the test file.'); 46 | 47 | const exists = await fs.pathExists(source); 48 | exists.should.be.false(); 49 | }); 50 | 51 | it('should do nothing if the source file and destination are the same', async () => { 52 | const source = path.join(TEST_DIR, 'pants.log'); 53 | const destination = path.join(TEST_DIR, 'pants.log'); 54 | await fs.outputFile(source, 'This is the test file.'); 55 | await moveAndMaybeCompressFile(source, destination); 56 | 57 | (await fs.readFile(source, 'utf8')).should.equal('This is the test file.'); 58 | }); 59 | 60 | it('should do nothing if the source file does not exist', async () => { 61 | const source = path.join(TEST_DIR, 'pants.log'); 62 | const destination = path.join(TEST_DIR, 'moved-pants.log'); 63 | await moveAndMaybeCompressFile(source, destination); 64 | 65 | (await fs.pathExists(destination)).should.be.false(); 66 | }); 67 | 68 | it('should do nothing if compress is concurrently in progress by another process', async () => { 69 | const source = path.join(TEST_DIR, 'test.log'); 70 | const destination = path.join(TEST_DIR, 'moved-test.log.gz'); 71 | await fs.outputFile(source, 'This is the test file.'); 72 | // simulate another process has already started writing the destination file 73 | await fs.outputFile(destination, 'Compressed file.'); 74 | const options = {compress: true}; 75 | await moveAndMaybeCompressFile(source, destination, options); 76 | 77 | (await fs.readFile(source, 'utf8')).should.equal('This is the test file.', 'source file should remain intact'); 78 | (await fs.readFile(destination, 'utf8')).should.equal('Compressed file.', 'destination file should remain'); 79 | }); 80 | 81 | it('should remove destination file if readstream error', async () => { 82 | const moveWithMock = proxyquire('../lib/moveAndMaybeCompressFile', { 83 | "fs-extra": { 84 | createReadStream: (...args) => { 85 | if (args[0]) { 86 | // replace test.log with a non-existent file to simulate readstream error 87 | args[0] = args[0].replace(new RegExp('test.log' + '$'), 'non-exist.log'); 88 | } 89 | return fs.createReadStream(...args); 90 | } 91 | } 92 | }); 93 | 94 | const source = path.join(TEST_DIR, 'test.log'); 95 | const destination = path.join(TEST_DIR, 'moved-test.log.gz'); 96 | await fs.outputFile(source, 'This is the test file.'); 97 | const options = {compress: true}; 98 | await moveWithMock(source, destination, options); 99 | 100 | (await fs.readFile(source, 'utf8')).should.equal('This is the test file.', 'source file should remain intact'); 101 | (await fs.pathExists(destination)).should.be.false('destination file should be removed'); 102 | }); 103 | 104 | it('should have destination file if readstream error and remove fails', async () => { 105 | const moveWithMock = proxyquire('../lib/moveAndMaybeCompressFile', { 106 | "fs-extra": { 107 | createReadStream: (...args) => { 108 | if (args[0]) { 109 | // replace test.log with a non-existent file to simulate readstream error 110 | args[0] = args[0].replace(new RegExp('test.log' + '$'), 'non-exist.log'); 111 | } 112 | return fs.createReadStream(...args); 113 | }, 114 | unlink: () => Promise.reject({ code: 'EBUSY', message: 'all gone wrong'}), 115 | } 116 | }); 117 | 118 | const source = path.join(TEST_DIR, 'test.log'); 119 | const destination = path.join(TEST_DIR, 'moved-test.log.gz'); 120 | await fs.outputFile(source, 'This is the test file.'); 121 | const options = {compress: true}; 122 | await moveWithMock(source, destination, options); 123 | 124 | (await fs.readFile(source, 'utf8')).should.equal('This is the test file.', 'source file should remain intact'); 125 | (await fs.readFile(destination, 'utf8')).should.equal('', 'destination file should remain'); 126 | }); 127 | 128 | it('should use copy+truncate if source file is locked (windows)', async () => { 129 | const moveWithMock = proxyquire('../lib/moveAndMaybeCompressFile', { 130 | "fs-extra": { 131 | exists: () => Promise.resolve(true), 132 | move: () => Promise.reject({ code: 'EBUSY', message: 'all gone wrong'}), 133 | copy: (fs.copy.bind(fs)), 134 | truncate: (fs.truncate.bind(fs)) 135 | } 136 | }); 137 | 138 | const source = path.join(TEST_DIR, 'test.log'); 139 | const destination = path.join(TEST_DIR, 'moved-test.log'); 140 | await fs.outputFile(source, 'This is the test file.'); 141 | await moveWithMock(source, destination); 142 | 143 | const contents = await fs.readFile(destination, 'utf8'); 144 | contents.should.equal('This is the test file.'); 145 | 146 | // won't delete the source, but it will be empty 147 | (await fs.readFile(source, 'utf8')).should.be.empty(); 148 | }); 149 | 150 | it('should not throw unhandled promise rejection when doing copy+truncate', async () => { 151 | const moveWithMock = proxyquire('../lib/moveAndMaybeCompressFile', { 152 | "fs-extra": { 153 | exists: () => Promise.resolve(true), 154 | move: () => Promise.reject({ code: 'EBUSY', message: 'all gone wrong'}), 155 | copy: () => Promise.reject({ code: 'ENOENT', message: 'file deleted halfway'}), 156 | truncate: (fs.truncate.bind(fs)) 157 | } 158 | }); 159 | 160 | const source = path.join(TEST_DIR, 'test.log'); 161 | const destination = path.join(TEST_DIR, 'moved-test.log'); 162 | await fs.outputFile(source, 'This is the test file.'); 163 | await moveWithMock(source, destination).should.not.be.rejected(); 164 | }); 165 | 166 | it('should truncate file if remove fails when compressed (windows)', async () => { 167 | const moveWithMock = proxyquire('../lib/moveAndMaybeCompressFile', { 168 | "fs-extra": { 169 | exists: () => Promise.resolve(true), 170 | unlink: () => Promise.reject({ code: 'EBUSY', message: 'all gone wrong'}), 171 | createReadStream: fs.createReadStream.bind(fs), 172 | truncate: fs.truncate.bind(fs) 173 | } 174 | }); 175 | 176 | const source = path.join(TEST_DIR, 'test.log'); 177 | const destination = path.join(TEST_DIR, 'moved-test.log.gz'); 178 | await fs.outputFile(source, 'This is the test file.'); 179 | const options = {compress: true}; 180 | await moveWithMock(source, destination, options); 181 | 182 | const zippedContents = await fs.readFile(destination); 183 | const contents = await new Promise(resolve => { 184 | zlib.gunzip(zippedContents, (e, data) => { 185 | resolve(data.toString()); 186 | }); 187 | }); 188 | contents.should.equal('This is the test file.'); 189 | 190 | // won't delete the source, but it will be empty 191 | (await fs.readFile(source, 'utf8')).should.be.empty(); 192 | }); 193 | 194 | it('should not affect source file if remove and truncate fails when compressed (windows)', async () => { 195 | const moveWithMock = proxyquire('../lib/moveAndMaybeCompressFile', { 196 | "fs-extra": { 197 | exists: () => Promise.resolve(true), 198 | unlink: () => Promise.reject({ code: 'EBUSY', message: 'all gone wrong'}), 199 | createReadStream: fs.createReadStream.bind(fs), 200 | truncate: () => Promise.reject({ code: 'EBUSY', message: 'all gone wrong'}), 201 | } 202 | }); 203 | 204 | const source = path.join(TEST_DIR, 'test.log'); 205 | const destination = path.join(TEST_DIR, 'moved-test.log.gz'); 206 | await fs.outputFile(source, 'This is the test file.'); 207 | const options = {compress: true}; 208 | await moveWithMock(source, destination, options); 209 | 210 | const zippedContents = await fs.readFile(destination); 211 | const contents = await new Promise(resolve => { 212 | zlib.gunzip(zippedContents, (e, data) => { 213 | resolve(data.toString()); 214 | }); 215 | }); 216 | contents.should.equal('This is the test file.'); 217 | 218 | // won't delete or truncate the source 219 | (await fs.readFile(source, 'utf8')).should.equal('This is the test file.', 'source file should remain intact'); 220 | }); 221 | 222 | it('should compress the source file at the new destination with 0o744 rights', async () => { 223 | const source = path.join(TEST_DIR, 'test.log'); 224 | const destination = path.join(TEST_DIR, 'moved-test.log.gz'); 225 | await fs.outputFile(source, 'This is the test file.'); 226 | const moveAndCompressOptions = {compress: true, mode:0o744}; 227 | await moveAndMaybeCompressFile(source, destination, moveAndCompressOptions); 228 | 229 | const destinationStats = await fs.stat(destination); 230 | const destMode = (destinationStats.mode & 0o777).toString(8); 231 | destMode.should.equalOneOf('744', '666'); // windows does not use unix file modes 232 | 233 | const zippedContents = await fs.readFile(destination); 234 | const contents = await new Promise(resolve => { 235 | zlib.gunzip(zippedContents, (e, data) => { 236 | resolve(data.toString()); 237 | }); 238 | }); 239 | contents.should.equal('This is the test file.'); 240 | 241 | const exists = await fs.pathExists(source); 242 | exists.should.be.false(); 243 | }); 244 | 245 | it('should compress the source file at the new destination with 0o400 rights', async () => { 246 | const source = path.join(TEST_DIR, 'test.log'); 247 | const destination = path.join(TEST_DIR, 'moved-test.log.gz'); 248 | await fs.outputFile(source, 'This is the test file.'); 249 | const moveAndCompressOptions = {compress: true, mode:0o400}; 250 | await moveAndMaybeCompressFile(source, destination, moveAndCompressOptions); 251 | 252 | const destinationStats = await fs.stat(destination); 253 | const destMode = (destinationStats.mode & 0o777).toString(8); 254 | destMode.should.equalOneOf('400', '444'); // windows does not use unix file modes 255 | 256 | const zippedContents = await fs.readFile(destination); 257 | const contents = await new Promise(resolve => { 258 | zlib.gunzip(zippedContents, (e, data) => { 259 | resolve(data.toString()); 260 | }); 261 | }); 262 | contents.should.equal('This is the test file.'); 263 | 264 | const exists = await fs.pathExists(source); 265 | exists.should.be.false(); 266 | }); 267 | }); 268 | --------------------------------------------------------------------------------