├── .commitlintrc.js ├── .eslintrc.js ├── .eslintrc.local.js ├── .github ├── CODEOWNERS ├── ISSUE_TEMPLATE │ ├── bug.yml │ └── config.yml ├── actions │ ├── create-check │ │ └── action.yml │ └── install-latest-npm │ │ └── action.yml ├── dependabot.yml ├── matchers │ └── tap.json ├── settings.yml └── workflows │ ├── audit.yml │ ├── ci-release.yml │ ├── ci.yml │ ├── codeql-analysis.yml │ ├── post-dependabot.yml │ ├── pull-request.yml │ ├── release-integration.yml │ └── release.yml ├── .gitignore ├── .npmrc ├── .release-please-manifest.json ├── CHANGELOG.md ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE.md ├── README.md ├── SECURITY.md ├── lib ├── content │ ├── path.js │ ├── read.js │ ├── rm.js │ └── write.js ├── entry-index.js ├── get.js ├── index.js ├── memoization.js ├── put.js ├── rm.js ├── util │ ├── glob.js │ ├── hash-to-segments.js │ └── tmp.js └── verify.js ├── package.json ├── release-please-config.json └── test ├── content ├── read.js ├── rm.js └── write.js ├── entry-index.find.js ├── entry-index.insert.js ├── entry-index.js ├── fixtures ├── cache-content.js └── cache-index.js ├── get.js ├── ls.js ├── memoization.js ├── put.js ├── rm.js ├── util └── tmp.js └── verify.js /.commitlintrc.js: -------------------------------------------------------------------------------- 1 | /* This file is automatically added by @npmcli/template-oss. Do not edit. */ 2 | 3 | module.exports = { 4 | extends: ['@commitlint/config-conventional'], 5 | rules: { 6 | 'type-enum': [2, 'always', ['feat', 'fix', 'docs', 'deps', 'chore']], 7 | 'header-max-length': [2, 'always', 80], 8 | 'subject-case': [0], 9 | 'body-max-line-length': [0], 10 | 'footer-max-line-length': [0], 11 | }, 12 | } 13 | -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | /* This file is automatically added by @npmcli/template-oss. Do not edit. */ 2 | 3 | 'use strict' 4 | 5 | const { readdirSync: readdir } = require('fs') 6 | 7 | const localConfigs = readdir(__dirname) 8 | .filter((file) => file.startsWith('.eslintrc.local.')) 9 | .map((file) => `./${file}`) 10 | 11 | module.exports = { 12 | root: true, 13 | ignorePatterns: [ 14 | 'tap-testdir*/', 15 | ], 16 | extends: [ 17 | '@npmcli', 18 | ...localConfigs, 19 | ], 20 | } 21 | -------------------------------------------------------------------------------- /.eslintrc.local.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | module.exports = { 4 | rules: { 5 | strict: 'error', 6 | 'no-shadow': 0, // XXX: fix this later 7 | }, 8 | } 9 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # This file is automatically added by @npmcli/template-oss. Do not edit. 2 | 3 | * @npm/cli-team 4 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug.yml: -------------------------------------------------------------------------------- 1 | # This file is automatically added by @npmcli/template-oss. Do not edit. 2 | 3 | name: Bug 4 | description: File a bug/issue 5 | title: "[BUG] " 6 | labels: [ Bug, Needs Triage ] 7 | 8 | body: 9 | - type: checkboxes 10 | attributes: 11 | label: Is there an existing issue for this? 12 | description: Please [search here](./issues) to see if an issue already exists for your problem. 13 | options: 14 | - label: I have searched the existing issues 15 | required: true 16 | - type: textarea 17 | attributes: 18 | label: Current Behavior 19 | description: A clear & concise description of what you're experiencing. 20 | validations: 21 | required: false 22 | - type: textarea 23 | attributes: 24 | label: Expected Behavior 25 | description: A clear & concise description of what you expected to happen. 26 | validations: 27 | required: false 28 | - type: textarea 29 | attributes: 30 | label: Steps To Reproduce 31 | description: Steps to reproduce the behavior. 32 | value: | 33 | 1. In this environment... 34 | 2. With this config... 35 | 3. Run '...' 36 | 4. See error... 37 | validations: 38 | required: false 39 | - type: textarea 40 | attributes: 41 | label: Environment 42 | description: | 43 | examples: 44 | - **npm**: 7.6.3 45 | - **Node**: 13.14.0 46 | - **OS**: Ubuntu 20.04 47 | - **platform**: Macbook Pro 48 | value: | 49 | - npm: 50 | - Node: 51 | - OS: 52 | - platform: 53 | validations: 54 | required: false 55 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | # This file is automatically added by @npmcli/template-oss. Do not edit. 2 | 3 | blank_issues_enabled: true 4 | -------------------------------------------------------------------------------- /.github/actions/create-check/action.yml: -------------------------------------------------------------------------------- 1 | # This file is automatically added by @npmcli/template-oss. Do not edit. 2 | 3 | name: 'Create Check' 4 | inputs: 5 | name: 6 | required: true 7 | token: 8 | required: true 9 | sha: 10 | required: true 11 | check-name: 12 | default: '' 13 | outputs: 14 | check-id: 15 | value: ${{ steps.create-check.outputs.check_id }} 16 | runs: 17 | using: "composite" 18 | steps: 19 | - name: Get Workflow Job 20 | uses: actions/github-script@v7 21 | id: workflow 22 | env: 23 | JOB_NAME: "${{ inputs.name }}" 24 | SHA: "${{ inputs.sha }}" 25 | with: 26 | result-encoding: string 27 | script: | 28 | const { repo: { owner, repo}, runId, serverUrl } = context 29 | const { JOB_NAME, SHA } = process.env 30 | 31 | const job = await github.rest.actions.listJobsForWorkflowRun({ 32 | owner, 33 | repo, 34 | run_id: runId, 35 | per_page: 100 36 | }).then(r => r.data.jobs.find(j => j.name.endsWith(JOB_NAME))) 37 | 38 | return [ 39 | `This check is assosciated with ${serverUrl}/${owner}/${repo}/commit/${SHA}.`, 40 | 'Run logs:', 41 | job?.html_url || `could not be found for a job ending with: "${JOB_NAME}"`, 42 | ].join(' ') 43 | - name: Create Check 44 | uses: LouisBrunner/checks-action@v1.6.0 45 | id: create-check 46 | with: 47 | token: ${{ inputs.token }} 48 | sha: ${{ inputs.sha }} 49 | status: in_progress 50 | name: ${{ inputs.check-name || inputs.name }} 51 | output: | 52 | {"summary":"${{ steps.workflow.outputs.result }}"} 53 | -------------------------------------------------------------------------------- /.github/actions/install-latest-npm/action.yml: -------------------------------------------------------------------------------- 1 | # This file is automatically added by @npmcli/template-oss. Do not edit. 2 | 3 | name: 'Install Latest npm' 4 | description: 'Install the latest version of npm compatible with the Node version' 5 | inputs: 6 | node: 7 | description: 'Current Node version' 8 | required: true 9 | runs: 10 | using: "composite" 11 | steps: 12 | # node 10/12/14 ship with npm@6, which is known to fail when updating itself in windows 13 | - name: Update Windows npm 14 | if: | 15 | runner.os == 'Windows' && ( 16 | startsWith(inputs.node, 'v10.') || 17 | startsWith(inputs.node, 'v12.') || 18 | startsWith(inputs.node, 'v14.') 19 | ) 20 | shell: cmd 21 | run: | 22 | curl -sO https://registry.npmjs.org/npm/-/npm-7.5.4.tgz 23 | tar xf npm-7.5.4.tgz 24 | cd package 25 | node lib/npm.js install --no-fund --no-audit -g ..\npm-7.5.4.tgz 26 | cd .. 27 | rmdir /s /q package 28 | - name: Install Latest npm 29 | shell: bash 30 | env: 31 | NODE_VERSION: ${{ inputs.node }} 32 | working-directory: ${{ runner.temp }} 33 | run: | 34 | MATCH="" 35 | SPECS=("latest" "next-10" "next-9" "next-8" "next-7" "next-6") 36 | 37 | echo "node@$NODE_VERSION" 38 | 39 | for SPEC in ${SPECS[@]}; do 40 | ENGINES=$(npm view npm@$SPEC --json | jq -r '.engines.node') 41 | echo "Checking if node@$NODE_VERSION satisfies npm@$SPEC ($ENGINES)" 42 | 43 | if npx semver -r "$ENGINES" "$NODE_VERSION" > /dev/null; then 44 | MATCH=$SPEC 45 | echo "Found compatible version: npm@$MATCH" 46 | break 47 | fi 48 | done 49 | 50 | if [ -z $MATCH ]; then 51 | echo "Could not find a compatible version of npm for node@$NODE_VERSION" 52 | exit 1 53 | fi 54 | 55 | npm i --prefer-online --no-fund --no-audit -g npm@$MATCH 56 | - name: npm Version 57 | shell: bash 58 | run: npm -v 59 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # This file is automatically added by @npmcli/template-oss. Do not edit. 2 | 3 | version: 2 4 | 5 | updates: 6 | - package-ecosystem: npm 7 | directory: / 8 | schedule: 9 | interval: daily 10 | target-branch: "main" 11 | allow: 12 | - dependency-type: direct 13 | versioning-strategy: increase-if-necessary 14 | commit-message: 15 | prefix: deps 16 | prefix-development: chore 17 | labels: 18 | - "Dependencies" 19 | open-pull-requests-limit: 10 20 | -------------------------------------------------------------------------------- /.github/matchers/tap.json: -------------------------------------------------------------------------------- 1 | { 2 | "//@npmcli/template-oss": "This file is automatically added by @npmcli/template-oss. Do not edit.", 3 | "problemMatcher": [ 4 | { 5 | "owner": "tap", 6 | "pattern": [ 7 | { 8 | "regexp": "^\\s*not ok \\d+ - (.*)", 9 | "message": 1 10 | }, 11 | { 12 | "regexp": "^\\s*---" 13 | }, 14 | { 15 | "regexp": "^\\s*at:" 16 | }, 17 | { 18 | "regexp": "^\\s*line:\\s*(\\d+)", 19 | "line": 1 20 | }, 21 | { 22 | "regexp": "^\\s*column:\\s*(\\d+)", 23 | "column": 1 24 | }, 25 | { 26 | "regexp": "^\\s*file:\\s*(.*)", 27 | "file": 1 28 | } 29 | ] 30 | } 31 | ] 32 | } 33 | -------------------------------------------------------------------------------- /.github/settings.yml: -------------------------------------------------------------------------------- 1 | # This file is automatically added by @npmcli/template-oss. Do not edit. 2 | 3 | repository: 4 | allow_merge_commit: false 5 | allow_rebase_merge: true 6 | allow_squash_merge: true 7 | squash_merge_commit_title: PR_TITLE 8 | squash_merge_commit_message: PR_BODY 9 | delete_branch_on_merge: true 10 | enable_automated_security_fixes: true 11 | enable_vulnerability_alerts: true 12 | 13 | branches: 14 | - name: main 15 | protection: 16 | required_status_checks: null 17 | enforce_admins: true 18 | block_creations: true 19 | required_pull_request_reviews: 20 | required_approving_review_count: 1 21 | require_code_owner_reviews: true 22 | require_last_push_approval: true 23 | dismiss_stale_reviews: true 24 | restrictions: 25 | apps: [] 26 | users: [] 27 | teams: [ "cli-team" ] 28 | -------------------------------------------------------------------------------- /.github/workflows/audit.yml: -------------------------------------------------------------------------------- 1 | # This file is automatically added by @npmcli/template-oss. Do not edit. 2 | 3 | name: Audit 4 | 5 | on: 6 | workflow_dispatch: 7 | schedule: 8 | # "At 08:00 UTC (01:00 PT) on Monday" https://crontab.guru/#0_8_*_*_1 9 | - cron: "0 8 * * 1" 10 | 11 | jobs: 12 | audit: 13 | name: Audit Dependencies 14 | if: github.repository_owner == 'npm' 15 | runs-on: ubuntu-latest 16 | defaults: 17 | run: 18 | shell: bash 19 | steps: 20 | - name: Checkout 21 | uses: actions/checkout@v4 22 | - name: Setup Git User 23 | run: | 24 | git config --global user.email "npm-cli+bot@github.com" 25 | git config --global user.name "npm CLI robot" 26 | - name: Setup Node 27 | uses: actions/setup-node@v4 28 | id: node 29 | with: 30 | node-version: 22.x 31 | check-latest: contains('22.x', '.x') 32 | - name: Install Latest npm 33 | uses: ./.github/actions/install-latest-npm 34 | with: 35 | node: ${{ steps.node.outputs.node-version }} 36 | - name: Install Dependencies 37 | run: npm i --ignore-scripts --no-audit --no-fund --package-lock 38 | - name: Run Production Audit 39 | run: npm audit --omit=dev 40 | - name: Run Full Audit 41 | run: npm audit --audit-level=none 42 | -------------------------------------------------------------------------------- /.github/workflows/ci-release.yml: -------------------------------------------------------------------------------- 1 | # This file is automatically added by @npmcli/template-oss. Do not edit. 2 | 3 | name: CI - Release 4 | 5 | on: 6 | workflow_dispatch: 7 | inputs: 8 | ref: 9 | required: true 10 | type: string 11 | default: main 12 | workflow_call: 13 | inputs: 14 | ref: 15 | required: true 16 | type: string 17 | check-sha: 18 | required: true 19 | type: string 20 | 21 | jobs: 22 | lint-all: 23 | name: Lint All 24 | if: github.repository_owner == 'npm' 25 | runs-on: ubuntu-latest 26 | defaults: 27 | run: 28 | shell: bash 29 | steps: 30 | - name: Checkout 31 | uses: actions/checkout@v4 32 | with: 33 | ref: ${{ inputs.ref }} 34 | - name: Setup Git User 35 | run: | 36 | git config --global user.email "npm-cli+bot@github.com" 37 | git config --global user.name "npm CLI robot" 38 | - name: Create Check 39 | id: create-check 40 | if: ${{ inputs.check-sha }} 41 | uses: ./.github/actions/create-check 42 | with: 43 | name: "Lint All" 44 | token: ${{ secrets.GITHUB_TOKEN }} 45 | sha: ${{ inputs.check-sha }} 46 | - name: Setup Node 47 | uses: actions/setup-node@v4 48 | id: node 49 | with: 50 | node-version: 22.x 51 | check-latest: contains('22.x', '.x') 52 | - name: Install Latest npm 53 | uses: ./.github/actions/install-latest-npm 54 | with: 55 | node: ${{ steps.node.outputs.node-version }} 56 | - name: Install Dependencies 57 | run: npm i --ignore-scripts --no-audit --no-fund 58 | - name: Lint 59 | run: npm run lint --ignore-scripts 60 | - name: Post Lint 61 | run: npm run postlint --ignore-scripts 62 | - name: Conclude Check 63 | uses: LouisBrunner/checks-action@v1.6.0 64 | if: steps.create-check.outputs.check-id && always() 65 | with: 66 | token: ${{ secrets.GITHUB_TOKEN }} 67 | conclusion: ${{ job.status }} 68 | check_id: ${{ steps.create-check.outputs.check-id }} 69 | 70 | test-all: 71 | name: Test All - ${{ matrix.platform.name }} - ${{ matrix.node-version }} 72 | if: github.repository_owner == 'npm' 73 | strategy: 74 | fail-fast: false 75 | matrix: 76 | platform: 77 | - name: Linux 78 | os: ubuntu-latest 79 | shell: bash 80 | - name: macOS 81 | os: macos-latest 82 | shell: bash 83 | - name: macOS 84 | os: macos-13 85 | shell: bash 86 | node-version: 87 | - 18.17.0 88 | - 18.x 89 | - 20.5.0 90 | - 20.x 91 | - 22.x 92 | exclude: 93 | - platform: { name: macOS, os: macos-13, shell: bash } 94 | node-version: 18.17.0 95 | - platform: { name: macOS, os: macos-13, shell: bash } 96 | node-version: 18.x 97 | - platform: { name: macOS, os: macos-13, shell: bash } 98 | node-version: 20.5.0 99 | - platform: { name: macOS, os: macos-13, shell: bash } 100 | node-version: 20.x 101 | - platform: { name: macOS, os: macos-13, shell: bash } 102 | node-version: 22.x 103 | runs-on: ${{ matrix.platform.os }} 104 | defaults: 105 | run: 106 | shell: ${{ matrix.platform.shell }} 107 | steps: 108 | - name: Checkout 109 | uses: actions/checkout@v4 110 | with: 111 | ref: ${{ inputs.ref }} 112 | - name: Setup Git User 113 | run: | 114 | git config --global user.email "npm-cli+bot@github.com" 115 | git config --global user.name "npm CLI robot" 116 | - name: Create Check 117 | id: create-check 118 | if: ${{ inputs.check-sha }} 119 | uses: ./.github/actions/create-check 120 | with: 121 | name: "Test All - ${{ matrix.platform.name }} - ${{ matrix.node-version }}" 122 | token: ${{ secrets.GITHUB_TOKEN }} 123 | sha: ${{ inputs.check-sha }} 124 | - name: Setup Node 125 | uses: actions/setup-node@v4 126 | id: node 127 | with: 128 | node-version: ${{ matrix.node-version }} 129 | check-latest: contains(matrix.node-version, '.x') 130 | - name: Install Latest npm 131 | uses: ./.github/actions/install-latest-npm 132 | with: 133 | node: ${{ steps.node.outputs.node-version }} 134 | - name: Install Dependencies 135 | run: npm i --ignore-scripts --no-audit --no-fund 136 | - name: Add Problem Matcher 137 | run: echo "::add-matcher::.github/matchers/tap.json" 138 | - name: Test 139 | run: npm test --ignore-scripts 140 | - name: Conclude Check 141 | uses: LouisBrunner/checks-action@v1.6.0 142 | if: steps.create-check.outputs.check-id && always() 143 | with: 144 | token: ${{ secrets.GITHUB_TOKEN }} 145 | conclusion: ${{ job.status }} 146 | check_id: ${{ steps.create-check.outputs.check-id }} 147 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | # This file is automatically added by @npmcli/template-oss. Do not edit. 2 | 3 | name: CI 4 | 5 | on: 6 | workflow_dispatch: 7 | pull_request: 8 | push: 9 | branches: 10 | - main 11 | schedule: 12 | # "At 09:00 UTC (02:00 PT) on Monday" https://crontab.guru/#0_9_*_*_1 13 | - cron: "0 9 * * 1" 14 | 15 | jobs: 16 | lint: 17 | name: Lint 18 | if: github.repository_owner == 'npm' 19 | runs-on: ubuntu-latest 20 | defaults: 21 | run: 22 | shell: bash 23 | steps: 24 | - name: Checkout 25 | uses: actions/checkout@v4 26 | - name: Setup Git User 27 | run: | 28 | git config --global user.email "npm-cli+bot@github.com" 29 | git config --global user.name "npm CLI robot" 30 | - name: Setup Node 31 | uses: actions/setup-node@v4 32 | id: node 33 | with: 34 | node-version: 22.x 35 | check-latest: contains('22.x', '.x') 36 | - name: Install Latest npm 37 | uses: ./.github/actions/install-latest-npm 38 | with: 39 | node: ${{ steps.node.outputs.node-version }} 40 | - name: Install Dependencies 41 | run: npm i --ignore-scripts --no-audit --no-fund 42 | - name: Lint 43 | run: npm run lint --ignore-scripts 44 | - name: Post Lint 45 | run: npm run postlint --ignore-scripts 46 | 47 | test: 48 | name: Test - ${{ matrix.platform.name }} - ${{ matrix.node-version }} 49 | if: github.repository_owner == 'npm' 50 | strategy: 51 | fail-fast: false 52 | matrix: 53 | platform: 54 | - name: Linux 55 | os: ubuntu-latest 56 | shell: bash 57 | - name: macOS 58 | os: macos-latest 59 | shell: bash 60 | - name: macOS 61 | os: macos-13 62 | shell: bash 63 | node-version: 64 | - 18.17.0 65 | - 18.x 66 | - 20.5.0 67 | - 20.x 68 | - 22.x 69 | exclude: 70 | - platform: { name: macOS, os: macos-13, shell: bash } 71 | node-version: 18.17.0 72 | - platform: { name: macOS, os: macos-13, shell: bash } 73 | node-version: 18.x 74 | - platform: { name: macOS, os: macos-13, shell: bash } 75 | node-version: 20.5.0 76 | - platform: { name: macOS, os: macos-13, shell: bash } 77 | node-version: 20.x 78 | - platform: { name: macOS, os: macos-13, shell: bash } 79 | node-version: 22.x 80 | runs-on: ${{ matrix.platform.os }} 81 | defaults: 82 | run: 83 | shell: ${{ matrix.platform.shell }} 84 | steps: 85 | - name: Checkout 86 | uses: actions/checkout@v4 87 | - name: Setup Git User 88 | run: | 89 | git config --global user.email "npm-cli+bot@github.com" 90 | git config --global user.name "npm CLI robot" 91 | - name: Setup Node 92 | uses: actions/setup-node@v4 93 | id: node 94 | with: 95 | node-version: ${{ matrix.node-version }} 96 | check-latest: contains(matrix.node-version, '.x') 97 | - name: Install Latest npm 98 | uses: ./.github/actions/install-latest-npm 99 | with: 100 | node: ${{ steps.node.outputs.node-version }} 101 | - name: Install Dependencies 102 | run: npm i --ignore-scripts --no-audit --no-fund 103 | - name: Add Problem Matcher 104 | run: echo "::add-matcher::.github/matchers/tap.json" 105 | - name: Test 106 | run: npm test --ignore-scripts 107 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | # This file is automatically added by @npmcli/template-oss. Do not edit. 2 | 3 | name: CodeQL 4 | 5 | on: 6 | push: 7 | branches: 8 | - main 9 | pull_request: 10 | branches: 11 | - main 12 | schedule: 13 | # "At 10:00 UTC (03:00 PT) on Monday" https://crontab.guru/#0_10_*_*_1 14 | - cron: "0 10 * * 1" 15 | 16 | jobs: 17 | analyze: 18 | name: Analyze 19 | runs-on: ubuntu-latest 20 | permissions: 21 | actions: read 22 | contents: read 23 | security-events: write 24 | steps: 25 | - name: Checkout 26 | uses: actions/checkout@v4 27 | - name: Setup Git User 28 | run: | 29 | git config --global user.email "npm-cli+bot@github.com" 30 | git config --global user.name "npm CLI robot" 31 | - name: Initialize CodeQL 32 | uses: github/codeql-action/init@v3 33 | with: 34 | languages: javascript 35 | - name: Perform CodeQL Analysis 36 | uses: github/codeql-action/analyze@v3 37 | -------------------------------------------------------------------------------- /.github/workflows/post-dependabot.yml: -------------------------------------------------------------------------------- 1 | # This file is automatically added by @npmcli/template-oss. Do not edit. 2 | 3 | name: Post Dependabot 4 | 5 | on: pull_request 6 | 7 | permissions: 8 | contents: write 9 | 10 | jobs: 11 | template-oss: 12 | name: template-oss 13 | if: github.repository_owner == 'npm' && github.actor == 'dependabot[bot]' 14 | runs-on: ubuntu-latest 15 | defaults: 16 | run: 17 | shell: bash 18 | steps: 19 | - name: Checkout 20 | uses: actions/checkout@v4 21 | with: 22 | ref: ${{ github.event.pull_request.head.ref }} 23 | - name: Setup Git User 24 | run: | 25 | git config --global user.email "npm-cli+bot@github.com" 26 | git config --global user.name "npm CLI robot" 27 | - name: Setup Node 28 | uses: actions/setup-node@v4 29 | id: node 30 | with: 31 | node-version: 22.x 32 | check-latest: contains('22.x', '.x') 33 | - name: Install Latest npm 34 | uses: ./.github/actions/install-latest-npm 35 | with: 36 | node: ${{ steps.node.outputs.node-version }} 37 | - name: Install Dependencies 38 | run: npm i --ignore-scripts --no-audit --no-fund 39 | - name: Fetch Dependabot Metadata 40 | id: metadata 41 | uses: dependabot/fetch-metadata@v1 42 | with: 43 | github-token: ${{ secrets.GITHUB_TOKEN }} 44 | 45 | # Dependabot can update multiple directories so we output which directory 46 | # it is acting on so we can run the command for the correct root or workspace 47 | - name: Get Dependabot Directory 48 | if: contains(steps.metadata.outputs.dependency-names, '@npmcli/template-oss') 49 | id: flags 50 | run: | 51 | dependabot_dir="${{ steps.metadata.outputs.directory }}" 52 | if [[ "$dependabot_dir" == "/" || "$dependabot_dir" == "/main" ]]; then 53 | echo "workspace=-iwr" >> $GITHUB_OUTPUT 54 | else 55 | # strip leading slash from directory so it works as a 56 | # a path to the workspace flag 57 | echo "workspace=-w ${dependabot_dir#/}" >> $GITHUB_OUTPUT 58 | fi 59 | 60 | - name: Apply Changes 61 | if: steps.flags.outputs.workspace 62 | id: apply 63 | run: | 64 | npm run template-oss-apply ${{ steps.flags.outputs.workspace }} 65 | if [[ `git status --porcelain` ]]; then 66 | echo "changes=true" >> $GITHUB_OUTPUT 67 | fi 68 | # This only sets the conventional commit prefix. This workflow can't reliably determine 69 | # what the breaking change is though. If a BREAKING CHANGE message is required then 70 | # this PR check will fail and the commit will be amended with stafftools 71 | if [[ "${{ steps.metadata.outputs.update-type }}" == "version-update:semver-major" ]]; then 72 | prefix='feat!' 73 | else 74 | prefix='chore' 75 | fi 76 | echo "message=$prefix: postinstall for dependabot template-oss PR" >> $GITHUB_OUTPUT 77 | 78 | # This step will fail if template-oss has made any workflow updates. It is impossible 79 | # for a workflow to update other workflows. In the case it does fail, we continue 80 | # and then try to apply only a portion of the changes in the next step 81 | - name: Push All Changes 82 | if: steps.apply.outputs.changes 83 | id: push 84 | continue-on-error: true 85 | env: 86 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 87 | run: | 88 | git commit -am "${{ steps.apply.outputs.message }}" 89 | git push 90 | 91 | # If the previous step failed, then reset the commit and remove any workflow changes 92 | # and attempt to commit and push again. This is helpful because we will have a commit 93 | # with the correct prefix that we can then --amend with @npmcli/stafftools later. 94 | - name: Push All Changes Except Workflows 95 | if: steps.apply.outputs.changes && steps.push.outcome == 'failure' 96 | env: 97 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 98 | run: | 99 | git reset HEAD~ 100 | git checkout HEAD -- .github/workflows/ 101 | git clean -fd .github/workflows/ 102 | git commit -am "${{ steps.apply.outputs.message }}" 103 | git push 104 | 105 | # Check if all the necessary template-oss changes were applied. Since we continued 106 | # on errors in one of the previous steps, this check will fail if our follow up 107 | # only applied a portion of the changes and we need to followup manually. 108 | # 109 | # Note that this used to run `lint` and `postlint` but that will fail this action 110 | # if we've also shipped any linting changes separate from template-oss. We do 111 | # linting in another action, so we want to fail this one only if there are 112 | # template-oss changes that could not be applied. 113 | - name: Check Changes 114 | if: steps.apply.outputs.changes 115 | run: | 116 | npm exec --offline ${{ steps.flags.outputs.workspace }} -- template-oss-check 117 | 118 | - name: Fail on Breaking Change 119 | if: steps.apply.outputs.changes && startsWith(steps.apply.outputs.message, 'feat!') 120 | run: | 121 | echo "This PR has a breaking change. Run 'npx -p @npmcli/stafftools gh template-oss-fix'" 122 | echo "for more information on how to fix this with a BREAKING CHANGE footer." 123 | exit 1 124 | -------------------------------------------------------------------------------- /.github/workflows/pull-request.yml: -------------------------------------------------------------------------------- 1 | # This file is automatically added by @npmcli/template-oss. Do not edit. 2 | 3 | name: Pull Request 4 | 5 | on: 6 | pull_request: 7 | types: 8 | - opened 9 | - reopened 10 | - edited 11 | - synchronize 12 | 13 | jobs: 14 | commitlint: 15 | name: Lint Commits 16 | if: github.repository_owner == 'npm' 17 | runs-on: ubuntu-latest 18 | defaults: 19 | run: 20 | shell: bash 21 | steps: 22 | - name: Checkout 23 | uses: actions/checkout@v4 24 | with: 25 | fetch-depth: 0 26 | - name: Setup Git User 27 | run: | 28 | git config --global user.email "npm-cli+bot@github.com" 29 | git config --global user.name "npm CLI robot" 30 | - name: Setup Node 31 | uses: actions/setup-node@v4 32 | id: node 33 | with: 34 | node-version: 22.x 35 | check-latest: contains('22.x', '.x') 36 | - name: Install Latest npm 37 | uses: ./.github/actions/install-latest-npm 38 | with: 39 | node: ${{ steps.node.outputs.node-version }} 40 | - name: Install Dependencies 41 | run: npm i --ignore-scripts --no-audit --no-fund 42 | - name: Run Commitlint on Commits 43 | id: commit 44 | continue-on-error: true 45 | run: npx --offline commitlint -V --from 'origin/${{ github.base_ref }}' --to ${{ github.event.pull_request.head.sha }} 46 | - name: Run Commitlint on PR Title 47 | if: steps.commit.outcome == 'failure' 48 | env: 49 | PR_TITLE: ${{ github.event.pull_request.title }} 50 | run: echo "$PR_TITLE" | npx --offline commitlint -V 51 | -------------------------------------------------------------------------------- /.github/workflows/release-integration.yml: -------------------------------------------------------------------------------- 1 | # This file is automatically added by @npmcli/template-oss. Do not edit. 2 | 3 | name: Release Integration 4 | 5 | on: 6 | workflow_dispatch: 7 | inputs: 8 | releases: 9 | required: true 10 | type: string 11 | description: 'A json array of releases. Required fields: publish: tagName, publishTag. publish check: pkgName, version' 12 | workflow_call: 13 | inputs: 14 | releases: 15 | required: true 16 | type: string 17 | description: 'A json array of releases. Required fields: publish: tagName, publishTag. publish check: pkgName, version' 18 | secrets: 19 | PUBLISH_TOKEN: 20 | required: true 21 | 22 | jobs: 23 | publish: 24 | name: Publish 25 | runs-on: ubuntu-latest 26 | defaults: 27 | run: 28 | shell: bash 29 | permissions: 30 | id-token: write 31 | steps: 32 | - name: Checkout 33 | uses: actions/checkout@v4 34 | with: 35 | ref: ${{ fromJSON(inputs.releases)[0].tagName }} 36 | - name: Setup Git User 37 | run: | 38 | git config --global user.email "npm-cli+bot@github.com" 39 | git config --global user.name "npm CLI robot" 40 | - name: Setup Node 41 | uses: actions/setup-node@v4 42 | id: node 43 | with: 44 | node-version: 22.x 45 | check-latest: contains('22.x', '.x') 46 | - name: Install Latest npm 47 | uses: ./.github/actions/install-latest-npm 48 | with: 49 | node: ${{ steps.node.outputs.node-version }} 50 | - name: Install Dependencies 51 | run: npm i --ignore-scripts --no-audit --no-fund 52 | - name: Set npm authToken 53 | run: npm config set '//registry.npmjs.org/:_authToken'=\${PUBLISH_TOKEN} 54 | - name: Publish 55 | env: 56 | PUBLISH_TOKEN: ${{ secrets.PUBLISH_TOKEN }} 57 | RELEASES: ${{ inputs.releases }} 58 | run: | 59 | EXIT_CODE=0 60 | 61 | for release in $(echo $RELEASES | jq -r '.[] | @base64'); do 62 | PUBLISH_TAG=$(echo "$release" | base64 --decode | jq -r .publishTag) 63 | npm publish --provenance --tag="$PUBLISH_TAG" 64 | STATUS=$? 65 | if [[ "$STATUS" -eq 1 ]]; then 66 | EXIT_CODE=$STATUS 67 | fi 68 | done 69 | 70 | exit $EXIT_CODE 71 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | # This file is automatically added by @npmcli/template-oss. Do not edit. 2 | 3 | name: Release 4 | 5 | on: 6 | push: 7 | branches: 8 | - main 9 | 10 | permissions: 11 | contents: write 12 | pull-requests: write 13 | checks: write 14 | 15 | jobs: 16 | release: 17 | outputs: 18 | pr: ${{ steps.release.outputs.pr }} 19 | pr-branch: ${{ steps.release.outputs.pr-branch }} 20 | pr-number: ${{ steps.release.outputs.pr-number }} 21 | pr-sha: ${{ steps.release.outputs.pr-sha }} 22 | releases: ${{ steps.release.outputs.releases }} 23 | comment-id: ${{ steps.create-comment.outputs.comment-id || steps.update-comment.outputs.comment-id }} 24 | check-id: ${{ steps.create-check.outputs.check-id }} 25 | name: Release 26 | if: github.repository_owner == 'npm' 27 | runs-on: ubuntu-latest 28 | defaults: 29 | run: 30 | shell: bash 31 | steps: 32 | - name: Checkout 33 | uses: actions/checkout@v4 34 | - name: Setup Git User 35 | run: | 36 | git config --global user.email "npm-cli+bot@github.com" 37 | git config --global user.name "npm CLI robot" 38 | - name: Setup Node 39 | uses: actions/setup-node@v4 40 | id: node 41 | with: 42 | node-version: 22.x 43 | check-latest: contains('22.x', '.x') 44 | - name: Install Latest npm 45 | uses: ./.github/actions/install-latest-npm 46 | with: 47 | node: ${{ steps.node.outputs.node-version }} 48 | - name: Install Dependencies 49 | run: npm i --ignore-scripts --no-audit --no-fund 50 | - name: Release Please 51 | id: release 52 | env: 53 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 54 | run: npx --offline template-oss-release-please --branch="${{ github.ref_name }}" --backport="" --defaultTag="latest" 55 | - name: Create Release Manager Comment Text 56 | if: steps.release.outputs.pr-number 57 | uses: actions/github-script@v7 58 | id: comment-text 59 | with: 60 | result-encoding: string 61 | script: | 62 | const { runId, repo: { owner, repo } } = context 63 | const { data: workflow } = await github.rest.actions.getWorkflowRun({ owner, repo, run_id: runId }) 64 | return['## Release Manager', `Release workflow run: ${workflow.html_url}`].join('\n\n') 65 | - name: Find Release Manager Comment 66 | uses: peter-evans/find-comment@v2 67 | if: steps.release.outputs.pr-number 68 | id: found-comment 69 | with: 70 | issue-number: ${{ steps.release.outputs.pr-number }} 71 | comment-author: 'github-actions[bot]' 72 | body-includes: '## Release Manager' 73 | - name: Create Release Manager Comment 74 | id: create-comment 75 | if: steps.release.outputs.pr-number && !steps.found-comment.outputs.comment-id 76 | uses: peter-evans/create-or-update-comment@v3 77 | with: 78 | issue-number: ${{ steps.release.outputs.pr-number }} 79 | body: ${{ steps.comment-text.outputs.result }} 80 | - name: Update Release Manager Comment 81 | id: update-comment 82 | if: steps.release.outputs.pr-number && steps.found-comment.outputs.comment-id 83 | uses: peter-evans/create-or-update-comment@v3 84 | with: 85 | comment-id: ${{ steps.found-comment.outputs.comment-id }} 86 | body: ${{ steps.comment-text.outputs.result }} 87 | edit-mode: 'replace' 88 | - name: Create Check 89 | id: create-check 90 | uses: ./.github/actions/create-check 91 | if: steps.release.outputs.pr-sha 92 | with: 93 | name: "Release" 94 | token: ${{ secrets.GITHUB_TOKEN }} 95 | sha: ${{ steps.release.outputs.pr-sha }} 96 | 97 | update: 98 | needs: release 99 | outputs: 100 | sha: ${{ steps.commit.outputs.sha }} 101 | check-id: ${{ steps.create-check.outputs.check-id }} 102 | name: Update - Release 103 | if: github.repository_owner == 'npm' && needs.release.outputs.pr 104 | runs-on: ubuntu-latest 105 | defaults: 106 | run: 107 | shell: bash 108 | steps: 109 | - name: Checkout 110 | uses: actions/checkout@v4 111 | with: 112 | fetch-depth: 0 113 | ref: ${{ needs.release.outputs.pr-branch }} 114 | - name: Setup Git User 115 | run: | 116 | git config --global user.email "npm-cli+bot@github.com" 117 | git config --global user.name "npm CLI robot" 118 | - name: Setup Node 119 | uses: actions/setup-node@v4 120 | id: node 121 | with: 122 | node-version: 22.x 123 | check-latest: contains('22.x', '.x') 124 | - name: Install Latest npm 125 | uses: ./.github/actions/install-latest-npm 126 | with: 127 | node: ${{ steps.node.outputs.node-version }} 128 | - name: Install Dependencies 129 | run: npm i --ignore-scripts --no-audit --no-fund 130 | - name: Create Release Manager Checklist Text 131 | id: comment-text 132 | env: 133 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 134 | run: npm exec --offline -- template-oss-release-manager --pr="${{ needs.release.outputs.pr-number }}" --backport="" --defaultTag="latest" --publish 135 | - name: Append Release Manager Comment 136 | uses: peter-evans/create-or-update-comment@v3 137 | with: 138 | comment-id: ${{ needs.release.outputs.comment-id }} 139 | body: ${{ steps.comment-text.outputs.result }} 140 | edit-mode: 'append' 141 | - name: Run Post Pull Request Actions 142 | env: 143 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 144 | run: npm run rp-pull-request --ignore-scripts --if-present -- --pr="${{ needs.release.outputs.pr-number }}" --commentId="${{ needs.release.outputs.comment-id }}" 145 | - name: Commit 146 | id: commit 147 | env: 148 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 149 | run: | 150 | git commit --all --amend --no-edit || true 151 | git push --force-with-lease 152 | echo "sha=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT 153 | - name: Create Check 154 | id: create-check 155 | uses: ./.github/actions/create-check 156 | with: 157 | name: "Update - Release" 158 | check-name: "Release" 159 | token: ${{ secrets.GITHUB_TOKEN }} 160 | sha: ${{ steps.commit.outputs.sha }} 161 | - name: Conclude Check 162 | uses: LouisBrunner/checks-action@v1.6.0 163 | with: 164 | token: ${{ secrets.GITHUB_TOKEN }} 165 | conclusion: ${{ job.status }} 166 | check_id: ${{ needs.release.outputs.check-id }} 167 | 168 | ci: 169 | name: CI - Release 170 | needs: [ release, update ] 171 | if: needs.release.outputs.pr 172 | uses: ./.github/workflows/ci-release.yml 173 | with: 174 | ref: ${{ needs.release.outputs.pr-branch }} 175 | check-sha: ${{ needs.update.outputs.sha }} 176 | 177 | post-ci: 178 | needs: [ release, update, ci ] 179 | name: Post CI - Release 180 | if: github.repository_owner == 'npm' && needs.release.outputs.pr && always() 181 | runs-on: ubuntu-latest 182 | defaults: 183 | run: 184 | shell: bash 185 | steps: 186 | - name: Get CI Conclusion 187 | id: conclusion 188 | run: | 189 | result="" 190 | if [[ "${{ contains(needs.*.result, 'failure') }}" == "true" ]]; then 191 | result="failure" 192 | elif [[ "${{ contains(needs.*.result, 'cancelled') }}" == "true" ]]; then 193 | result="cancelled" 194 | else 195 | result="success" 196 | fi 197 | echo "result=$result" >> $GITHUB_OUTPUT 198 | - name: Conclude Check 199 | uses: LouisBrunner/checks-action@v1.6.0 200 | with: 201 | token: ${{ secrets.GITHUB_TOKEN }} 202 | conclusion: ${{ steps.conclusion.outputs.result }} 203 | check_id: ${{ needs.update.outputs.check-id }} 204 | 205 | post-release: 206 | needs: release 207 | outputs: 208 | comment-id: ${{ steps.create-comment.outputs.comment-id }} 209 | name: Post Release - Release 210 | if: github.repository_owner == 'npm' && needs.release.outputs.releases 211 | runs-on: ubuntu-latest 212 | defaults: 213 | run: 214 | shell: bash 215 | steps: 216 | - name: Create Release PR Comment Text 217 | id: comment-text 218 | uses: actions/github-script@v7 219 | env: 220 | RELEASES: ${{ needs.release.outputs.releases }} 221 | with: 222 | result-encoding: string 223 | script: | 224 | const releases = JSON.parse(process.env.RELEASES) 225 | const { runId, repo: { owner, repo } } = context 226 | const issue_number = releases[0].prNumber 227 | const runUrl = `https://github.com/${owner}/${repo}/actions/runs/${runId}` 228 | 229 | return [ 230 | '## Release Workflow\n', 231 | ...releases.map(r => `- \`${r.pkgName}@${r.version}\` ${r.url}`), 232 | `- Workflow run: :arrows_counterclockwise: ${runUrl}`, 233 | ].join('\n') 234 | - name: Create Release PR Comment 235 | id: create-comment 236 | uses: peter-evans/create-or-update-comment@v3 237 | with: 238 | issue-number: ${{ fromJSON(needs.release.outputs.releases)[0].prNumber }} 239 | body: ${{ steps.comment-text.outputs.result }} 240 | 241 | release-integration: 242 | needs: release 243 | name: Release Integration 244 | if: needs.release.outputs.releases 245 | uses: ./.github/workflows/release-integration.yml 246 | permissions: 247 | id-token: write 248 | secrets: 249 | PUBLISH_TOKEN: ${{ secrets.PUBLISH_TOKEN }} 250 | with: 251 | releases: ${{ needs.release.outputs.releases }} 252 | 253 | post-release-integration: 254 | needs: [ release, release-integration, post-release ] 255 | name: Post Release Integration - Release 256 | if: github.repository_owner == 'npm' && needs.release.outputs.releases && always() 257 | runs-on: ubuntu-latest 258 | defaults: 259 | run: 260 | shell: bash 261 | steps: 262 | - name: Get Post Release Conclusion 263 | id: conclusion 264 | run: | 265 | if [[ "${{ contains(needs.*.result, 'failure') }}" == "true" ]]; then 266 | result="x" 267 | elif [[ "${{ contains(needs.*.result, 'cancelled') }}" == "true" ]]; then 268 | result="heavy_multiplication_x" 269 | else 270 | result="white_check_mark" 271 | fi 272 | echo "result=$result" >> $GITHUB_OUTPUT 273 | - name: Find Release PR Comment 274 | uses: peter-evans/find-comment@v2 275 | id: found-comment 276 | with: 277 | issue-number: ${{ fromJSON(needs.release.outputs.releases)[0].prNumber }} 278 | comment-author: 'github-actions[bot]' 279 | body-includes: '## Release Workflow' 280 | - name: Create Release PR Comment Text 281 | id: comment-text 282 | if: steps.found-comment.outputs.comment-id 283 | uses: actions/github-script@v7 284 | env: 285 | RESULT: ${{ steps.conclusion.outputs.result }} 286 | BODY: ${{ steps.found-comment.outputs.comment-body }} 287 | with: 288 | result-encoding: string 289 | script: | 290 | const { RESULT, BODY } = process.env 291 | const body = [BODY.replace(/(Workflow run: :)[a-z_]+(:)/, `$1${RESULT}$2`)] 292 | if (RESULT !== 'white_check_mark') { 293 | body.push(':rotating_light::rotating_light::rotating_light:') 294 | body.push([ 295 | '@npm/cli-team: The post-release workflow failed for this release.', 296 | 'Manual steps may need to be taken after examining the workflow output.' 297 | ].join(' ')) 298 | body.push(':rotating_light::rotating_light::rotating_light:') 299 | } 300 | return body.join('\n\n').trim() 301 | - name: Update Release PR Comment 302 | if: steps.comment-text.outputs.result 303 | uses: peter-evans/create-or-update-comment@v3 304 | with: 305 | comment-id: ${{ steps.found-comment.outputs.comment-id }} 306 | body: ${{ steps.comment-text.outputs.result }} 307 | edit-mode: 'replace' 308 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # This file is automatically added by @npmcli/template-oss. Do not edit. 2 | 3 | # ignore everything in the root 4 | /* 5 | 6 | !**/.gitignore 7 | !/.commitlintrc.js 8 | !/.eslintrc.js 9 | !/.eslintrc.local.* 10 | !/.git-blame-ignore-revs 11 | !/.github/ 12 | !/.gitignore 13 | !/.npmrc 14 | !/.prettierignore 15 | !/.prettierrc.js 16 | !/.release-please-manifest.json 17 | !/bin/ 18 | !/CHANGELOG* 19 | !/CODE_OF_CONDUCT.md 20 | !/CONTRIBUTING.md 21 | !/docs/ 22 | !/lib/ 23 | !/LICENSE* 24 | !/map.js 25 | !/package.json 26 | !/README* 27 | !/release-please-config.json 28 | !/scripts/ 29 | !/SECURITY.md 30 | !/tap-snapshots/ 31 | !/test/ 32 | !/tsconfig.json 33 | tap-testdir*/ 34 | -------------------------------------------------------------------------------- /.npmrc: -------------------------------------------------------------------------------- 1 | ; This file is automatically added by @npmcli/template-oss. Do not edit. 2 | 3 | package-lock=false 4 | -------------------------------------------------------------------------------- /.release-please-manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | ".": "19.0.1" 3 | } 4 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | <!-- This file is automatically added by @npmcli/template-oss. Do not edit. --> 2 | 3 | All interactions in this repo are covered by the [npm Code of 4 | Conduct](https://docs.npmjs.com/policies/conduct) 5 | 6 | The npm cli team may, at its own discretion, moderate, remove, or edit 7 | any interactions such as pull requests, issues, and comments. 8 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | <!-- This file is automatically added by @npmcli/template-oss. Do not edit. --> 2 | 3 | # Contributing 4 | 5 | ## Code of Conduct 6 | 7 | All interactions in the **npm** organization on GitHub are considered to be covered by our standard [Code of Conduct](https://docs.npmjs.com/policies/conduct). 8 | 9 | ## Reporting Bugs 10 | 11 | Before submitting a new bug report please search for an existing or similar report. 12 | 13 | Use one of our existing issue templates if you believe you've come across a unique problem. 14 | 15 | Duplicate issues, or issues that don't use one of our templates may get closed without a response. 16 | 17 | ## Pull Request Conventions 18 | 19 | ### Commits 20 | 21 | We use [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/). 22 | 23 | When opening a pull request please be sure that either the pull request title, or each commit in the pull request, has one of the following prefixes: 24 | 25 | - `feat`: For when introducing a new feature. The result will be a new semver minor version of the package when it is next published. 26 | - `fix`: For bug fixes. The result will be a new semver patch version of the package when it is next published. 27 | - `docs`: For documentation updates. The result will be a new semver patch version of the package when it is next published. 28 | - `chore`: For changes that do not affect the published module. Often these are changes to tests. The result will be *no* change to the version of the package when it is next published (as the commit does not affect the published version). 29 | 30 | ### Test Coverage 31 | 32 | Pull requests made against this repo will run `npm test` automatically. Please make sure tests pass locally before submitting a PR. 33 | 34 | Every new feature or bug fix should come with a corresponding test or tests that validate the solutions. Testing also reports on code coverage and will fail if code coverage drops. 35 | 36 | ### Linting 37 | 38 | Linting is also done automatically once tests pass. `npm run lintfix` will fix most linting errors automatically. 39 | 40 | Please make sure linting passes before submitting a PR. 41 | 42 | ## What _not_ to contribute? 43 | 44 | ### Dependencies 45 | 46 | It should be noted that our team does not accept third-party dependency updates/PRs. If you submit a PR trying to update our dependencies we will close it with or without a reference to these contribution guidelines. 47 | 48 | ### Tools/Automation 49 | 50 | Our core team is responsible for the maintenance of the tooling/automation in this project and we ask contributors to not make changes to these when contributing (e.g. `.github/*`, `.eslintrc.json`, `.licensee.json`). Most of those files also have a header at the top to remind folks they are automatically generated. Pull requests that alter these will not be accepted. 51 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | ISC License 2 | 3 | Copyright (c) npm, Inc. 4 | 5 | Permission to use, copy, modify, and/or distribute this software for 6 | any purpose with or without fee is hereby granted, provided that the 7 | above copyright notice and this permission notice appear in all copies. 8 | 9 | THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS 10 | ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED 11 | WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE 12 | COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR 13 | CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS 14 | OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE 15 | OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE 16 | USE OR PERFORMANCE OF THIS SOFTWARE. 17 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # cacache [![npm version](https://img.shields.io/npm/v/cacache.svg)](https://npm.im/cacache) [![license](https://img.shields.io/npm/l/cacache.svg)](https://npm.im/cacache) [![Travis](https://img.shields.io/travis/npm/cacache.svg)](https://travis-ci.org/npm/cacache) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/npm/cacache?svg=true)](https://ci.appveyor.com/project/npm/cacache) [![Coverage Status](https://coveralls.io/repos/github/npm/cacache/badge.svg?branch=latest)](https://coveralls.io/github/npm/cacache?branch=latest) 2 | 3 | [`cacache`](https://github.com/npm/cacache) is a Node.js library for managing 4 | local key and content address caches. It's really fast, really good at 5 | concurrency, and it will never give you corrupted data, even if cache files 6 | get corrupted or manipulated. 7 | 8 | On systems that support user and group settings on files, cacache will 9 | match the `uid` and `gid` values to the folder where the cache lives, even 10 | when running as `root`. 11 | 12 | It was written to be used as [npm](https://npm.im)'s local cache, but can 13 | just as easily be used on its own. 14 | 15 | ## Install 16 | 17 | `$ npm install --save cacache` 18 | 19 | ## Table of Contents 20 | 21 | * [Example](#example) 22 | * [Features](#features) 23 | * [Contributing](#contributing) 24 | * [API](#api) 25 | * [Using localized APIs](#localized-api) 26 | * Reading 27 | * [`ls`](#ls) 28 | * [`ls.stream`](#ls-stream) 29 | * [`get`](#get-data) 30 | * [`get.stream`](#get-stream) 31 | * [`get.info`](#get-info) 32 | * [`get.hasContent`](#get-hasContent) 33 | * Writing 34 | * [`put`](#put-data) 35 | * [`put.stream`](#put-stream) 36 | * [`rm.all`](#rm-all) 37 | * [`rm.entry`](#rm-entry) 38 | * [`rm.content`](#rm-content) 39 | * [`index.compact`](#index-compact) 40 | * [`index.insert`](#index-insert) 41 | * Utilities 42 | * [`clearMemoized`](#clear-memoized) 43 | * [`tmp.mkdir`](#tmp-mkdir) 44 | * [`tmp.withTmp`](#with-tmp) 45 | * Integrity 46 | * [Subresource Integrity](#integrity) 47 | * [`verify`](#verify) 48 | * [`verify.lastRun`](#verify-last-run) 49 | 50 | ### Example 51 | 52 | ```javascript 53 | const cacache = require('cacache') 54 | const fs = require('fs') 55 | 56 | const cachePath = '/tmp/my-toy-cache' 57 | const key = 'my-unique-key-1234' 58 | 59 | // Cache it! Use `cachePath` as the root of the content cache 60 | cacache.put(cachePath, key, '10293801983029384').then(integrity => { 61 | console.log(`Saved content to ${cachePath}.`) 62 | }) 63 | 64 | const destination = '/tmp/mytar.tgz' 65 | 66 | // Copy the contents out of the cache and into their destination! 67 | // But this time, use stream instead! 68 | cacache.get.stream( 69 | cachePath, key 70 | ).pipe( 71 | fs.createWriteStream(destination) 72 | ).on('finish', () => { 73 | console.log('done extracting!') 74 | }) 75 | 76 | // The same thing, but skip the key index. 77 | cacache.get.byDigest(cachePath, integrityHash).then(data => { 78 | fs.writeFile(destination, data, err => { 79 | console.log('tarball data fetched based on its sha512sum and written out!') 80 | }) 81 | }) 82 | ``` 83 | 84 | ### Features 85 | 86 | * Extraction by key or by content address (shasum, etc) 87 | * [Subresource Integrity](#integrity) web standard support 88 | * Multi-hash support - safely host sha1, sha512, etc, in a single cache 89 | * Automatic content deduplication 90 | * Fault tolerance (immune to corruption, partial writes, process races, etc) 91 | * Consistency guarantees on read and write (full data verification) 92 | * Lockless, high-concurrency cache access 93 | * Streaming support 94 | * Promise support 95 | * Fast -- sub-millisecond reads and writes including verification 96 | * Arbitrary metadata storage 97 | * Garbage collection and additional offline verification 98 | * Thorough test coverage 99 | * There's probably a bloom filter in there somewhere. Those are cool, right? 🤔 100 | 101 | ### Contributing 102 | 103 | The cacache team enthusiastically welcomes contributions and project participation! There's a bunch of things you can do if you want to contribute! Please don't hesitate to jump in if you'd like to, or even ask us questions if something isn't clear. 104 | 105 | All participants and maintainers in this project are expected to follow [Code of Conduct](CODE_OF_CONDUCT.md), and just generally be excellent to each other. 106 | 107 | Please refer to the [Changelog](CHANGELOG.md) for project history details, too. 108 | 109 | Happy hacking! 110 | 111 | ### API 112 | 113 | #### <a name="ls"></a> `> cacache.ls(cache) -> Promise<Object>` 114 | 115 | Lists info for all entries currently in the cache as a single large object. Each 116 | entry in the object will be keyed by the unique index key, with corresponding 117 | [`get.info`](#get-info) objects as the values. 118 | 119 | ##### Example 120 | 121 | ```javascript 122 | cacache.ls(cachePath).then(console.log) 123 | // Output 124 | { 125 | 'my-thing': { 126 | key: 'my-thing', 127 | integrity: 'sha512-BaSe64/EnCoDED+HAsh==' 128 | path: '.testcache/content/deadbeef', // joined with `cachePath` 129 | time: 12345698490, 130 | size: 4023948, 131 | metadata: { 132 | name: 'blah', 133 | version: '1.2.3', 134 | description: 'this was once a package but now it is my-thing' 135 | } 136 | }, 137 | 'other-thing': { 138 | key: 'other-thing', 139 | integrity: 'sha1-ANothER+hasH=', 140 | path: '.testcache/content/bada55', 141 | time: 11992309289, 142 | size: 111112 143 | } 144 | } 145 | ``` 146 | 147 | #### <a name="ls-stream"></a> `> cacache.ls.stream(cache) -> Readable` 148 | 149 | Lists info for all entries currently in the cache as a single large object. 150 | 151 | This works just like [`ls`](#ls), except [`get.info`](#get-info) entries are 152 | returned as `'data'` events on the returned stream. 153 | 154 | ##### Example 155 | 156 | ```javascript 157 | cacache.ls.stream(cachePath).on('data', console.log) 158 | // Output 159 | { 160 | key: 'my-thing', 161 | integrity: 'sha512-BaSe64HaSh', 162 | path: '.testcache/content/deadbeef', // joined with `cachePath` 163 | time: 12345698490, 164 | size: 13423, 165 | metadata: { 166 | name: 'blah', 167 | version: '1.2.3', 168 | description: 'this was once a package but now it is my-thing' 169 | } 170 | } 171 | 172 | { 173 | key: 'other-thing', 174 | integrity: 'whirlpool-WoWSoMuchSupport', 175 | path: '.testcache/content/bada55', 176 | time: 11992309289, 177 | size: 498023984029 178 | } 179 | 180 | { 181 | ... 182 | } 183 | ``` 184 | 185 | #### <a name="get-data"></a> `> cacache.get(cache, key, [opts]) -> Promise({data, metadata, integrity})` 186 | 187 | Returns an object with the cached data, digest, and metadata identified by 188 | `key`. The `data` property of this object will be a `Buffer` instance that 189 | presumably holds some data that means something to you. I'm sure you know what 190 | to do with it! cacache just won't care. 191 | 192 | `integrity` is a [Subresource 193 | Integrity](#integrity) 194 | string. That is, a string that can be used to verify `data`, which looks like 195 | `<hash-algorithm>-<base64-integrity-hash>`. 196 | 197 | If there is no content identified by `key`, or if the locally-stored data does 198 | not pass the validity checksum, the promise will be rejected. 199 | 200 | A sub-function, `get.byDigest` may be used for identical behavior, except lookup 201 | will happen by integrity hash, bypassing the index entirely. This version of the 202 | function *only* returns `data` itself, without any wrapper. 203 | 204 | See: [options](#get-options) 205 | 206 | ##### Note 207 | 208 | This function loads the entire cache entry into memory before returning it. If 209 | you're dealing with Very Large data, consider using [`get.stream`](#get-stream) 210 | instead. 211 | 212 | ##### Example 213 | 214 | ```javascript 215 | // Look up by key 216 | cache.get(cachePath, 'my-thing').then(console.log) 217 | // Output: 218 | { 219 | metadata: { 220 | thingName: 'my' 221 | }, 222 | integrity: 'sha512-BaSe64HaSh', 223 | data: Buffer#<deadbeef>, 224 | size: 9320 225 | } 226 | 227 | // Look up by digest 228 | cache.get.byDigest(cachePath, 'sha512-BaSe64HaSh').then(console.log) 229 | // Output: 230 | Buffer#<deadbeef> 231 | ``` 232 | 233 | #### <a name="get-stream"></a> `> cacache.get.stream(cache, key, [opts]) -> Readable` 234 | 235 | Returns a [Readable Stream](https://nodejs.org/api/stream.html#stream_readable_streams) of the cached data identified by `key`. 236 | 237 | If there is no content identified by `key`, or if the locally-stored data does 238 | not pass the validity checksum, an error will be emitted. 239 | 240 | `metadata` and `integrity` events will be emitted before the stream closes, if 241 | you need to collect that extra data about the cached entry. 242 | 243 | A sub-function, `get.stream.byDigest` may be used for identical behavior, 244 | except lookup will happen by integrity hash, bypassing the index entirely. This 245 | version does not emit the `metadata` and `integrity` events at all. 246 | 247 | See: [options](#get-options) 248 | 249 | ##### Example 250 | 251 | ```javascript 252 | // Look up by key 253 | cache.get.stream( 254 | cachePath, 'my-thing' 255 | ).on('metadata', metadata => { 256 | console.log('metadata:', metadata) 257 | }).on('integrity', integrity => { 258 | console.log('integrity:', integrity) 259 | }).pipe( 260 | fs.createWriteStream('./x.tgz') 261 | ) 262 | // Outputs: 263 | metadata: { ... } 264 | integrity: 'sha512-SoMeDIGest+64==' 265 | 266 | // Look up by digest 267 | cache.get.stream.byDigest( 268 | cachePath, 'sha512-SoMeDIGest+64==' 269 | ).pipe( 270 | fs.createWriteStream('./x.tgz') 271 | ) 272 | ``` 273 | 274 | #### <a name="get-info"></a> `> cacache.get.info(cache, key) -> Promise` 275 | 276 | Looks up `key` in the cache index, returning information about the entry if 277 | one exists. 278 | 279 | ##### Fields 280 | 281 | * `key` - Key the entry was looked up under. Matches the `key` argument. 282 | * `integrity` - [Subresource Integrity hash](#integrity) for the content this entry refers to. 283 | * `path` - Filesystem path where content is stored, joined with `cache` argument. 284 | * `time` - Timestamp the entry was first added on. 285 | * `metadata` - User-assigned metadata associated with the entry/content. 286 | 287 | ##### Example 288 | 289 | ```javascript 290 | cacache.get.info(cachePath, 'my-thing').then(console.log) 291 | 292 | // Output 293 | { 294 | key: 'my-thing', 295 | integrity: 'sha256-MUSTVERIFY+ALL/THINGS==' 296 | path: '.testcache/content/deadbeef', 297 | time: 12345698490, 298 | size: 849234, 299 | metadata: { 300 | name: 'blah', 301 | version: '1.2.3', 302 | description: 'this was once a package but now it is my-thing' 303 | } 304 | } 305 | ``` 306 | 307 | #### <a name="get-hasContent"></a> `> cacache.get.hasContent(cache, integrity) -> Promise` 308 | 309 | Looks up a [Subresource Integrity hash](#integrity) in the cache. If content 310 | exists for this `integrity`, it will return an object, with the specific single integrity hash 311 | that was found in `sri` key, and the size of the found content as `size`. If no content exists for this integrity, it will return `false`. 312 | 313 | ##### Example 314 | 315 | ```javascript 316 | cacache.get.hasContent(cachePath, 'sha256-MUSTVERIFY+ALL/THINGS==').then(console.log) 317 | 318 | // Output 319 | { 320 | sri: { 321 | source: 'sha256-MUSTVERIFY+ALL/THINGS==', 322 | algorithm: 'sha256', 323 | digest: 'MUSTVERIFY+ALL/THINGS==', 324 | options: [] 325 | }, 326 | size: 9001 327 | } 328 | 329 | cacache.get.hasContent(cachePath, 'sha521-NOT+IN/CACHE==').then(console.log) 330 | 331 | // Output 332 | false 333 | ``` 334 | 335 | ##### <a name="get-options"></a> Options 336 | 337 | ##### `opts.integrity` 338 | If present, the pre-calculated digest for the inserted content. If this option 339 | is provided and does not match the post-insertion digest, insertion will fail 340 | with an `EINTEGRITY` error. 341 | 342 | ##### `opts.memoize` 343 | 344 | Default: null 345 | 346 | If explicitly truthy, cacache will read from memory and memoize data on bulk read. If `false`, cacache will read from disk data. Reader functions by default read from in-memory cache. 347 | 348 | ##### `opts.size` 349 | If provided, the data stream will be verified to check that enough data was 350 | passed through. If there's more or less data than expected, insertion will fail 351 | with an `EBADSIZE` error. 352 | 353 | 354 | #### <a name="put-data"></a> `> cacache.put(cache, key, data, [opts]) -> Promise` 355 | 356 | Inserts data passed to it into the cache. The returned Promise resolves with a 357 | digest (generated according to [`opts.algorithms`](#optsalgorithms)) after the 358 | cache entry has been successfully written. 359 | 360 | See: [options](#put-options) 361 | 362 | ##### Example 363 | 364 | ```javascript 365 | fetch( 366 | 'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz' 367 | ).then(data => { 368 | return cacache.put(cachePath, 'registry.npmjs.org|cacache@1.0.0', data) 369 | }).then(integrity => { 370 | console.log('integrity hash is', integrity) 371 | }) 372 | ``` 373 | 374 | #### <a name="put-stream"></a> `> cacache.put.stream(cache, key, [opts]) -> Writable` 375 | 376 | Returns a [Writable 377 | Stream](https://nodejs.org/api/stream.html#stream_writable_streams) that inserts 378 | data written to it into the cache. Emits an `integrity` event with the digest of 379 | written contents when it succeeds. 380 | 381 | See: [options](#put-options) 382 | 383 | ##### Example 384 | 385 | ```javascript 386 | request.get( 387 | 'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz' 388 | ).pipe( 389 | cacache.put.stream( 390 | cachePath, 'registry.npmjs.org|cacache@1.0.0' 391 | ).on('integrity', d => console.log(`integrity digest is ${d}`)) 392 | ) 393 | ``` 394 | 395 | ##### <a name="put-options"></a> Options 396 | 397 | ##### `opts.metadata` 398 | 399 | Arbitrary metadata to be attached to the inserted key. 400 | 401 | ##### `opts.size` 402 | 403 | If provided, the data stream will be verified to check that enough data was 404 | passed through. If there's more or less data than expected, insertion will fail 405 | with an `EBADSIZE` error. 406 | 407 | ##### `opts.integrity` 408 | 409 | If present, the pre-calculated digest for the inserted content. If this option 410 | is provided and does not match the post-insertion digest, insertion will fail 411 | with an `EINTEGRITY` error. 412 | 413 | `algorithms` has no effect if this option is present. 414 | 415 | ##### `opts.integrityEmitter` 416 | 417 | *Streaming only* If present, uses the provided event emitter as a source of 418 | truth for both integrity and size. This allows use cases where integrity is 419 | already being calculated outside of cacache to reuse that data instead of 420 | calculating it a second time. 421 | 422 | The emitter must emit both the `'integrity'` and `'size'` events. 423 | 424 | NOTE: If this option is provided, you must verify that you receive the correct 425 | integrity value yourself and emit an `'error'` event if there is a mismatch. 426 | [ssri Integrity Streams](https://github.com/npm/ssri#integrity-stream) do this for you when given an expected integrity. 427 | 428 | ##### `opts.algorithms` 429 | 430 | Default: ['sha512'] 431 | 432 | Hashing algorithms to use when calculating the [subresource integrity 433 | digest](#integrity) 434 | for inserted data. Can use any algorithm listed in `crypto.getHashes()` or 435 | `'omakase'`/`'お任せします'` to pick a random hash algorithm on each insertion. You 436 | may also use any anagram of `'modnar'` to use this feature. 437 | 438 | Currently only supports one algorithm at a time (i.e., an array length of 439 | exactly `1`). Has no effect if `opts.integrity` is present. 440 | 441 | ##### `opts.memoize` 442 | 443 | Default: null 444 | 445 | If provided, cacache will memoize the given cache insertion in memory, bypassing 446 | any filesystem checks for that key or digest in future cache fetches. Nothing 447 | will be written to the in-memory cache unless this option is explicitly truthy. 448 | 449 | If `opts.memoize` is an object or a `Map`-like (that is, an object with `get` 450 | and `set` methods), it will be written to instead of the global memoization 451 | cache. 452 | 453 | Reading from disk data can be forced by explicitly passing `memoize: false` to 454 | the reader functions, but their default will be to read from memory. 455 | 456 | ##### `opts.tmpPrefix` 457 | Default: null 458 | 459 | Prefix to append on the temporary directory name inside the cache's tmp dir. 460 | 461 | #### <a name="rm-all"></a> `> cacache.rm.all(cache) -> Promise` 462 | 463 | Clears the entire cache. Mainly by blowing away the cache directory itself. 464 | 465 | ##### Example 466 | 467 | ```javascript 468 | cacache.rm.all(cachePath).then(() => { 469 | console.log('THE APOCALYPSE IS UPON US 😱') 470 | }) 471 | ``` 472 | 473 | #### <a name="rm-entry"></a> `> cacache.rm.entry(cache, key, [opts]) -> Promise` 474 | 475 | Alias: `cacache.rm` 476 | 477 | Removes the index entry for `key`. Content will still be accessible if 478 | requested directly by content address ([`get.stream.byDigest`](#get-stream)). 479 | 480 | By default, this appends a new entry to the index with an integrity of `null`. 481 | If `opts.removeFully` is set to `true` then the index file itself will be 482 | physically deleted rather than appending a `null`. 483 | 484 | To remove the content itself (which might still be used by other entries), use 485 | [`rm.content`](#rm-content). Or, to safely vacuum any unused content, use 486 | [`verify`](#verify). 487 | 488 | ##### Example 489 | 490 | ```javascript 491 | cacache.rm.entry(cachePath, 'my-thing').then(() => { 492 | console.log('I did not like it anyway') 493 | }) 494 | ``` 495 | 496 | #### <a name="rm-content"></a> `> cacache.rm.content(cache, integrity) -> Promise` 497 | 498 | Removes the content identified by `integrity`. Any index entries referring to it 499 | will not be usable again until the content is re-added to the cache with an 500 | identical digest. 501 | 502 | ##### Example 503 | 504 | ```javascript 505 | cacache.rm.content(cachePath, 'sha512-SoMeDIGest/IN+BaSE64==').then(() => { 506 | console.log('data for my-thing is gone!') 507 | }) 508 | ``` 509 | 510 | #### <a name="index-compact"></a> `> cacache.index.compact(cache, key, matchFn, [opts]) -> Promise` 511 | 512 | Uses `matchFn`, which must be a synchronous function that accepts two entries 513 | and returns a boolean indicating whether or not the two entries match, to 514 | deduplicate all entries in the cache for the given `key`. 515 | 516 | If `opts.validateEntry` is provided, it will be called as a function with the 517 | only parameter being a single index entry. The function must return a Boolean, 518 | if it returns `true` the entry is considered valid and will be kept in the index, 519 | if it returns `false` the entry will be removed from the index. 520 | 521 | If `opts.validateEntry` is not provided, however, every entry in the index will 522 | be deduplicated and kept until the first `null` integrity is reached, removing 523 | all entries that were written before the `null`. 524 | 525 | The deduplicated list of entries is both written to the index, replacing the 526 | existing content, and returned in the Promise. 527 | 528 | #### <a name="index-insert"></a> `> cacache.index.insert(cache, key, integrity, opts) -> Promise` 529 | 530 | Writes an index entry to the cache for the given `key` without writing content. 531 | 532 | It is assumed if you are using this method, you have already stored the content 533 | some other way and you only wish to add a new index to that content. The `metadata` 534 | and `size` properties are read from `opts` and used as part of the index entry. 535 | 536 | Returns a Promise resolving to the newly added entry. 537 | 538 | #### <a name="clear-memoized"></a> `> cacache.clearMemoized()` 539 | 540 | Completely resets the in-memory entry cache. 541 | 542 | #### <a name="tmp-mkdir"></a> `> tmp.mkdir(cache, opts) -> Promise<Path>` 543 | 544 | Returns a unique temporary directory inside the cache's `tmp` dir. This 545 | directory will use the same safe user assignment that all the other stuff use. 546 | 547 | Once the directory is made, it's the user's responsibility that all files 548 | within are given the appropriate `gid`/`uid` ownership settings to match 549 | the rest of the cache. If not, you can ask cacache to do it for you by 550 | calling [`tmp.fix()`](#tmp-fix), which will fix all tmp directory 551 | permissions. 552 | 553 | If you want automatic cleanup of this directory, use 554 | [`tmp.withTmp()`](#with-tpm) 555 | 556 | See: [options](#tmp-options) 557 | 558 | ##### Example 559 | 560 | ```javascript 561 | cacache.tmp.mkdir(cache).then(dir => { 562 | fs.writeFile(path.join(dir, 'blablabla'), Buffer#<1234>, ...) 563 | }) 564 | ``` 565 | 566 | #### <a name="tmp-fix"></a> `> tmp.fix(cache) -> Promise` 567 | 568 | Sets the `uid` and `gid` properties on all files and folders within the tmp 569 | folder to match the rest of the cache. 570 | 571 | Use this after manually writing files into [`tmp.mkdir`](#tmp-mkdir) or 572 | [`tmp.withTmp`](#with-tmp). 573 | 574 | ##### Example 575 | 576 | ```javascript 577 | cacache.tmp.mkdir(cache).then(dir => { 578 | writeFile(path.join(dir, 'file'), someData).then(() => { 579 | // make sure we didn't just put a root-owned file in the cache 580 | cacache.tmp.fix().then(() => { 581 | // all uids and gids match now 582 | }) 583 | }) 584 | }) 585 | ``` 586 | 587 | #### <a name="with-tmp"></a> `> tmp.withTmp(cache, opts, cb) -> Promise` 588 | 589 | Creates a temporary directory with [`tmp.mkdir()`](#tmp-mkdir) and calls `cb` 590 | with it. The created temporary directory will be removed when the return value 591 | of `cb()` resolves, the tmp directory will be automatically deleted once that 592 | promise completes. 593 | 594 | The same caveats apply when it comes to managing permissions for the tmp dir's 595 | contents. 596 | 597 | See: [options](#tmp-options) 598 | 599 | ##### Example 600 | 601 | ```javascript 602 | cacache.tmp.withTmp(cache, dir => { 603 | return fs.writeFile(path.join(dir, 'blablabla'), 'blabla contents', { encoding: 'utf8' }) 604 | }).then(() => { 605 | // `dir` no longer exists 606 | }) 607 | ``` 608 | 609 | ##### <a name="tmp-options"></a> Options 610 | 611 | ##### `opts.tmpPrefix` 612 | Default: null 613 | 614 | Prefix to append on the temporary directory name inside the cache's tmp dir. 615 | 616 | #### <a name="integrity"></a> Subresource Integrity Digests 617 | 618 | For content verification and addressing, cacache uses strings following the 619 | [Subresource 620 | Integrity spec](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity). 621 | That is, any time cacache expects an `integrity` argument or option, it 622 | should be in the format `<hashAlgorithm>-<base64-hash>`. 623 | 624 | One deviation from the current spec is that cacache will support any hash 625 | algorithms supported by the underlying Node.js process. You can use 626 | `crypto.getHashes()` to see which ones you can use. 627 | 628 | ##### Generating Digests Yourself 629 | 630 | If you have an existing content shasum, they are generally formatted as a 631 | hexadecimal string (that is, a sha1 would look like: 632 | `5f5513f8822fdbe5145af33b64d8d970dcf95c6e`). In order to be compatible with 633 | cacache, you'll need to convert this to an equivalent subresource integrity 634 | string. For this example, the corresponding hash would be: 635 | `sha1-X1UT+IIv2+UUWvM7ZNjZcNz5XG4=`. 636 | 637 | If you want to generate an integrity string yourself for existing data, you can 638 | use something like this: 639 | 640 | ```javascript 641 | const crypto = require('crypto') 642 | const hashAlgorithm = 'sha512' 643 | const data = 'foobarbaz' 644 | 645 | const integrity = ( 646 | hashAlgorithm + 647 | '-' + 648 | crypto.createHash(hashAlgorithm).update(data).digest('base64') 649 | ) 650 | ``` 651 | 652 | You can also use [`ssri`](https://npm.im/ssri) to have a richer set of functionality 653 | around SRI strings, including generation, parsing, and translating from existing 654 | hex-formatted strings. 655 | 656 | #### <a name="verify"></a> `> cacache.verify(cache, opts) -> Promise` 657 | 658 | Checks out and fixes up your cache: 659 | 660 | * Cleans up corrupted or invalid index entries. 661 | * Custom entry filtering options. 662 | * Garbage collects any content entries not referenced by the index. 663 | * Checks integrity for all content entries and removes invalid content. 664 | * Fixes cache ownership. 665 | * Removes the `tmp` directory in the cache and all its contents. 666 | 667 | When it's done, it'll return an object with various stats about the verification 668 | process, including amount of storage reclaimed, number of valid entries, number 669 | of entries removed, etc. 670 | 671 | ##### <a name="verify-options"></a> Options 672 | 673 | ##### `opts.concurrency` 674 | 675 | Default: 20 676 | 677 | Number of concurrently read files in the filesystem while doing clean up. 678 | 679 | ##### `opts.filter` 680 | Receives a formatted entry. Return false to remove it. 681 | Note: might be called more than once on the same entry. 682 | 683 | ##### `opts.log` 684 | Custom logger function: 685 | ``` 686 | log: { silly () {} } 687 | log.silly('verify', 'verifying cache at', cache) 688 | ``` 689 | 690 | ##### Example 691 | 692 | ```sh 693 | echo somegarbage >> $CACHEPATH/content/deadbeef 694 | ``` 695 | 696 | ```javascript 697 | cacache.verify(cachePath).then(stats => { 698 | // deadbeef collected, because of invalid checksum. 699 | console.log('cache is much nicer now! stats:', stats) 700 | }) 701 | ``` 702 | 703 | #### <a name="verify-last-run"></a> `> cacache.verify.lastRun(cache) -> Promise` 704 | 705 | Returns a `Date` representing the last time `cacache.verify` was run on `cache`. 706 | 707 | ##### Example 708 | 709 | ```javascript 710 | cacache.verify(cachePath).then(() => { 711 | cacache.verify.lastRun(cachePath).then(lastTime => { 712 | console.log('cacache.verify was last called on' + lastTime) 713 | }) 714 | }) 715 | ``` 716 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | <!-- This file is automatically added by @npmcli/template-oss. Do not edit. --> 2 | 3 | GitHub takes the security of our software products and services seriously, including the open source code repositories managed through our GitHub organizations, such as [GitHub](https://github.com/GitHub). 4 | 5 | If you believe you have found a security vulnerability in this GitHub-owned open source repository, you can report it to us in one of two ways. 6 | 7 | If the vulnerability you have found is *not* [in scope for the GitHub Bug Bounty Program](https://bounty.github.com/#scope) or if you do not wish to be considered for a bounty reward, please report the issue to us directly through [opensource-security@github.com](mailto:opensource-security@github.com). 8 | 9 | If the vulnerability you have found is [in scope for the GitHub Bug Bounty Program](https://bounty.github.com/#scope) and you would like for your finding to be considered for a bounty reward, please submit the vulnerability to us through [HackerOne](https://hackerone.com/github) in order to be eligible to receive a bounty award. 10 | 11 | **Please do not report security vulnerabilities through public GitHub issues, discussions, or pull requests.** 12 | 13 | Thanks for helping make GitHub safe for everyone. 14 | -------------------------------------------------------------------------------- /lib/content/path.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const contentVer = require('../../package.json')['cache-version'].content 4 | const hashToSegments = require('../util/hash-to-segments') 5 | const path = require('path') 6 | const ssri = require('ssri') 7 | 8 | // Current format of content file path: 9 | // 10 | // sha512-BaSE64Hex= -> 11 | // ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee 12 | // 13 | module.exports = contentPath 14 | 15 | function contentPath (cache, integrity) { 16 | const sri = ssri.parse(integrity, { single: true }) 17 | // contentPath is the *strongest* algo given 18 | return path.join( 19 | contentDir(cache), 20 | sri.algorithm, 21 | ...hashToSegments(sri.hexDigest()) 22 | ) 23 | } 24 | 25 | module.exports.contentDir = contentDir 26 | 27 | function contentDir (cache) { 28 | return path.join(cache, `content-v${contentVer}`) 29 | } 30 | -------------------------------------------------------------------------------- /lib/content/read.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const fs = require('fs/promises') 4 | const fsm = require('fs-minipass') 5 | const ssri = require('ssri') 6 | const contentPath = require('./path') 7 | const Pipeline = require('minipass-pipeline') 8 | 9 | module.exports = read 10 | 11 | const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 12 | async function read (cache, integrity, opts = {}) { 13 | const { size } = opts 14 | const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { 15 | // get size 16 | const stat = size ? { size } : await fs.stat(cpath) 17 | return { stat, cpath, sri } 18 | }) 19 | 20 | if (stat.size > MAX_SINGLE_READ_SIZE) { 21 | return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() 22 | } 23 | 24 | const data = await fs.readFile(cpath, { encoding: null }) 25 | 26 | if (stat.size !== data.length) { 27 | throw sizeError(stat.size, data.length) 28 | } 29 | 30 | if (!ssri.checkData(data, sri)) { 31 | throw integrityError(sri, cpath) 32 | } 33 | 34 | return data 35 | } 36 | 37 | const readPipeline = (cpath, size, sri, stream) => { 38 | stream.push( 39 | new fsm.ReadStream(cpath, { 40 | size, 41 | readSize: MAX_SINGLE_READ_SIZE, 42 | }), 43 | ssri.integrityStream({ 44 | integrity: sri, 45 | size, 46 | }) 47 | ) 48 | return stream 49 | } 50 | 51 | module.exports.stream = readStream 52 | module.exports.readStream = readStream 53 | 54 | function readStream (cache, integrity, opts = {}) { 55 | const { size } = opts 56 | const stream = new Pipeline() 57 | // Set all this up to run on the stream and then just return the stream 58 | Promise.resolve().then(async () => { 59 | const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { 60 | // get size 61 | const stat = size ? { size } : await fs.stat(cpath) 62 | return { stat, cpath, sri } 63 | }) 64 | 65 | return readPipeline(cpath, stat.size, sri, stream) 66 | }).catch(err => stream.emit('error', err)) 67 | 68 | return stream 69 | } 70 | 71 | module.exports.copy = copy 72 | 73 | function copy (cache, integrity, dest) { 74 | return withContentSri(cache, integrity, (cpath) => { 75 | return fs.copyFile(cpath, dest) 76 | }) 77 | } 78 | 79 | module.exports.hasContent = hasContent 80 | 81 | async function hasContent (cache, integrity) { 82 | if (!integrity) { 83 | return false 84 | } 85 | 86 | try { 87 | return await withContentSri(cache, integrity, async (cpath, sri) => { 88 | const stat = await fs.stat(cpath) 89 | return { size: stat.size, sri, stat } 90 | }) 91 | } catch (err) { 92 | if (err.code === 'ENOENT') { 93 | return false 94 | } 95 | 96 | if (err.code === 'EPERM') { 97 | /* istanbul ignore else */ 98 | if (process.platform !== 'win32') { 99 | throw err 100 | } else { 101 | return false 102 | } 103 | } 104 | } 105 | } 106 | 107 | async function withContentSri (cache, integrity, fn) { 108 | const sri = ssri.parse(integrity) 109 | // If `integrity` has multiple entries, pick the first digest 110 | // with available local data. 111 | const algo = sri.pickAlgorithm() 112 | const digests = sri[algo] 113 | 114 | if (digests.length <= 1) { 115 | const cpath = contentPath(cache, digests[0]) 116 | return fn(cpath, digests[0]) 117 | } else { 118 | // Can't use race here because a generic error can happen before 119 | // a ENOENT error, and can happen before a valid result 120 | const results = await Promise.all(digests.map(async (meta) => { 121 | try { 122 | return await withContentSri(cache, meta, fn) 123 | } catch (err) { 124 | if (err.code === 'ENOENT') { 125 | return Object.assign( 126 | new Error('No matching content found for ' + sri.toString()), 127 | { code: 'ENOENT' } 128 | ) 129 | } 130 | return err 131 | } 132 | })) 133 | // Return the first non error if it is found 134 | const result = results.find((r) => !(r instanceof Error)) 135 | if (result) { 136 | return result 137 | } 138 | 139 | // Throw the No matching content found error 140 | const enoentError = results.find((r) => r.code === 'ENOENT') 141 | if (enoentError) { 142 | throw enoentError 143 | } 144 | 145 | // Throw generic error 146 | throw results.find((r) => r instanceof Error) 147 | } 148 | } 149 | 150 | function sizeError (expected, found) { 151 | /* eslint-disable-next-line max-len */ 152 | const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) 153 | err.expected = expected 154 | err.found = found 155 | err.code = 'EBADSIZE' 156 | return err 157 | } 158 | 159 | function integrityError (sri, path) { 160 | const err = new Error(`Integrity verification failed for ${sri} (${path})`) 161 | err.code = 'EINTEGRITY' 162 | err.sri = sri 163 | err.path = path 164 | return err 165 | } 166 | -------------------------------------------------------------------------------- /lib/content/rm.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const fs = require('fs/promises') 4 | const contentPath = require('./path') 5 | const { hasContent } = require('./read') 6 | 7 | module.exports = rm 8 | 9 | async function rm (cache, integrity) { 10 | const content = await hasContent(cache, integrity) 11 | // ~pretty~ sure we can't end up with a content lacking sri, but be safe 12 | if (content && content.sri) { 13 | await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true }) 14 | return true 15 | } else { 16 | return false 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /lib/content/write.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const events = require('events') 4 | 5 | const contentPath = require('./path') 6 | const fs = require('fs/promises') 7 | const { moveFile } = require('@npmcli/fs') 8 | const { Minipass } = require('minipass') 9 | const Pipeline = require('minipass-pipeline') 10 | const Flush = require('minipass-flush') 11 | const path = require('path') 12 | const ssri = require('ssri') 13 | const uniqueFilename = require('unique-filename') 14 | const fsm = require('fs-minipass') 15 | 16 | module.exports = write 17 | 18 | // Cache of move operations in process so we don't duplicate 19 | const moveOperations = new Map() 20 | 21 | async function write (cache, data, opts = {}) { 22 | const { algorithms, size, integrity } = opts 23 | 24 | if (typeof size === 'number' && data.length !== size) { 25 | throw sizeError(size, data.length) 26 | } 27 | 28 | const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) 29 | if (integrity && !ssri.checkData(data, integrity, opts)) { 30 | throw checksumError(integrity, sri) 31 | } 32 | 33 | for (const algo in sri) { 34 | const tmp = await makeTmp(cache, opts) 35 | const hash = sri[algo].toString() 36 | try { 37 | await fs.writeFile(tmp.target, data, { flag: 'wx' }) 38 | await moveToDestination(tmp, cache, hash, opts) 39 | } finally { 40 | if (!tmp.moved) { 41 | await fs.rm(tmp.target, { recursive: true, force: true }) 42 | } 43 | } 44 | } 45 | return { integrity: sri, size: data.length } 46 | } 47 | 48 | module.exports.stream = writeStream 49 | 50 | // writes proxied to the 'inputStream' that is passed to the Promise 51 | // 'end' is deferred until content is handled. 52 | class CacacheWriteStream extends Flush { 53 | constructor (cache, opts) { 54 | super() 55 | this.opts = opts 56 | this.cache = cache 57 | this.inputStream = new Minipass() 58 | this.inputStream.on('error', er => this.emit('error', er)) 59 | this.inputStream.on('drain', () => this.emit('drain')) 60 | this.handleContentP = null 61 | } 62 | 63 | write (chunk, encoding, cb) { 64 | if (!this.handleContentP) { 65 | this.handleContentP = handleContent( 66 | this.inputStream, 67 | this.cache, 68 | this.opts 69 | ) 70 | this.handleContentP.catch(error => this.emit('error', error)) 71 | } 72 | return this.inputStream.write(chunk, encoding, cb) 73 | } 74 | 75 | flush (cb) { 76 | this.inputStream.end(() => { 77 | if (!this.handleContentP) { 78 | const e = new Error('Cache input stream was empty') 79 | e.code = 'ENODATA' 80 | // empty streams are probably emitting end right away. 81 | // defer this one tick by rejecting a promise on it. 82 | return Promise.reject(e).catch(cb) 83 | } 84 | // eslint-disable-next-line promise/catch-or-return 85 | this.handleContentP.then( 86 | (res) => { 87 | res.integrity && this.emit('integrity', res.integrity) 88 | // eslint-disable-next-line promise/always-return 89 | res.size !== null && this.emit('size', res.size) 90 | cb() 91 | }, 92 | (er) => cb(er) 93 | ) 94 | }) 95 | } 96 | } 97 | 98 | function writeStream (cache, opts = {}) { 99 | return new CacacheWriteStream(cache, opts) 100 | } 101 | 102 | async function handleContent (inputStream, cache, opts) { 103 | const tmp = await makeTmp(cache, opts) 104 | try { 105 | const res = await pipeToTmp(inputStream, cache, tmp.target, opts) 106 | await moveToDestination( 107 | tmp, 108 | cache, 109 | res.integrity, 110 | opts 111 | ) 112 | return res 113 | } finally { 114 | if (!tmp.moved) { 115 | await fs.rm(tmp.target, { recursive: true, force: true }) 116 | } 117 | } 118 | } 119 | 120 | async function pipeToTmp (inputStream, cache, tmpTarget, opts) { 121 | const outStream = new fsm.WriteStream(tmpTarget, { 122 | flags: 'wx', 123 | }) 124 | 125 | if (opts.integrityEmitter) { 126 | // we need to create these all simultaneously since they can fire in any order 127 | const [integrity, size] = await Promise.all([ 128 | events.once(opts.integrityEmitter, 'integrity').then(res => res[0]), 129 | events.once(opts.integrityEmitter, 'size').then(res => res[0]), 130 | new Pipeline(inputStream, outStream).promise(), 131 | ]) 132 | return { integrity, size } 133 | } 134 | 135 | let integrity 136 | let size 137 | const hashStream = ssri.integrityStream({ 138 | integrity: opts.integrity, 139 | algorithms: opts.algorithms, 140 | size: opts.size, 141 | }) 142 | hashStream.on('integrity', i => { 143 | integrity = i 144 | }) 145 | hashStream.on('size', s => { 146 | size = s 147 | }) 148 | 149 | const pipeline = new Pipeline(inputStream, hashStream, outStream) 150 | await pipeline.promise() 151 | return { integrity, size } 152 | } 153 | 154 | async function makeTmp (cache, opts) { 155 | const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) 156 | await fs.mkdir(path.dirname(tmpTarget), { recursive: true }) 157 | return { 158 | target: tmpTarget, 159 | moved: false, 160 | } 161 | } 162 | 163 | async function moveToDestination (tmp, cache, sri) { 164 | const destination = contentPath(cache, sri) 165 | const destDir = path.dirname(destination) 166 | if (moveOperations.has(destination)) { 167 | return moveOperations.get(destination) 168 | } 169 | moveOperations.set( 170 | destination, 171 | fs.mkdir(destDir, { recursive: true }) 172 | .then(async () => { 173 | await moveFile(tmp.target, destination, { overwrite: false }) 174 | tmp.moved = true 175 | return tmp.moved 176 | }) 177 | .catch(err => { 178 | if (!err.message.startsWith('The destination file exists')) { 179 | throw Object.assign(err, { code: 'EEXIST' }) 180 | } 181 | }).finally(() => { 182 | moveOperations.delete(destination) 183 | }) 184 | 185 | ) 186 | return moveOperations.get(destination) 187 | } 188 | 189 | function sizeError (expected, found) { 190 | /* eslint-disable-next-line max-len */ 191 | const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) 192 | err.expected = expected 193 | err.found = found 194 | err.code = 'EBADSIZE' 195 | return err 196 | } 197 | 198 | function checksumError (expected, found) { 199 | const err = new Error(`Integrity check failed: 200 | Wanted: ${expected} 201 | Found: ${found}`) 202 | err.code = 'EINTEGRITY' 203 | err.expected = expected 204 | err.found = found 205 | return err 206 | } 207 | -------------------------------------------------------------------------------- /lib/entry-index.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const crypto = require('crypto') 4 | const { 5 | appendFile, 6 | mkdir, 7 | readFile, 8 | readdir, 9 | rm, 10 | writeFile, 11 | } = require('fs/promises') 12 | const { Minipass } = require('minipass') 13 | const path = require('path') 14 | const ssri = require('ssri') 15 | const uniqueFilename = require('unique-filename') 16 | 17 | const contentPath = require('./content/path') 18 | const hashToSegments = require('./util/hash-to-segments') 19 | const indexV = require('../package.json')['cache-version'].index 20 | const { moveFile } = require('@npmcli/fs') 21 | 22 | const lsStreamConcurrency = 5 23 | 24 | module.exports.NotFoundError = class NotFoundError extends Error { 25 | constructor (cache, key) { 26 | super(`No cache entry for ${key} found in ${cache}`) 27 | this.code = 'ENOENT' 28 | this.cache = cache 29 | this.key = key 30 | } 31 | } 32 | 33 | module.exports.compact = compact 34 | 35 | async function compact (cache, key, matchFn, opts = {}) { 36 | const bucket = bucketPath(cache, key) 37 | const entries = await bucketEntries(bucket) 38 | const newEntries = [] 39 | // we loop backwards because the bottom-most result is the newest 40 | // since we add new entries with appendFile 41 | for (let i = entries.length - 1; i >= 0; --i) { 42 | const entry = entries[i] 43 | // a null integrity could mean either a delete was appended 44 | // or the user has simply stored an index that does not map 45 | // to any content. we determine if the user wants to keep the 46 | // null integrity based on the validateEntry function passed in options. 47 | // if the integrity is null and no validateEntry is provided, we break 48 | // as we consider the null integrity to be a deletion of everything 49 | // that came before it. 50 | if (entry.integrity === null && !opts.validateEntry) { 51 | break 52 | } 53 | 54 | // if this entry is valid, and it is either the first entry or 55 | // the newEntries array doesn't already include an entry that 56 | // matches this one based on the provided matchFn, then we add 57 | // it to the beginning of our list 58 | if ((!opts.validateEntry || opts.validateEntry(entry) === true) && 59 | (newEntries.length === 0 || 60 | !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { 61 | newEntries.unshift(entry) 62 | } 63 | } 64 | 65 | const newIndex = '\n' + newEntries.map((entry) => { 66 | const stringified = JSON.stringify(entry) 67 | const hash = hashEntry(stringified) 68 | return `${hash}\t${stringified}` 69 | }).join('\n') 70 | 71 | const setup = async () => { 72 | const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) 73 | await mkdir(path.dirname(target), { recursive: true }) 74 | return { 75 | target, 76 | moved: false, 77 | } 78 | } 79 | 80 | const teardown = async (tmp) => { 81 | if (!tmp.moved) { 82 | return rm(tmp.target, { recursive: true, force: true }) 83 | } 84 | } 85 | 86 | const write = async (tmp) => { 87 | await writeFile(tmp.target, newIndex, { flag: 'wx' }) 88 | await mkdir(path.dirname(bucket), { recursive: true }) 89 | // we use @npmcli/move-file directly here because we 90 | // want to overwrite the existing file 91 | await moveFile(tmp.target, bucket) 92 | tmp.moved = true 93 | } 94 | 95 | // write the file atomically 96 | const tmp = await setup() 97 | try { 98 | await write(tmp) 99 | } finally { 100 | await teardown(tmp) 101 | } 102 | 103 | // we reverse the list we generated such that the newest 104 | // entries come first in order to make looping through them easier 105 | // the true passed to formatEntry tells it to keep null 106 | // integrity values, if they made it this far it's because 107 | // validateEntry returned true, and as such we should return it 108 | return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) 109 | } 110 | 111 | module.exports.insert = insert 112 | 113 | async function insert (cache, key, integrity, opts = {}) { 114 | const { metadata, size, time } = opts 115 | const bucket = bucketPath(cache, key) 116 | const entry = { 117 | key, 118 | integrity: integrity && ssri.stringify(integrity), 119 | time: time || Date.now(), 120 | size, 121 | metadata, 122 | } 123 | try { 124 | await mkdir(path.dirname(bucket), { recursive: true }) 125 | const stringified = JSON.stringify(entry) 126 | // NOTE - Cleverness ahoy! 127 | // 128 | // This works because it's tremendously unlikely for an entry to corrupt 129 | // another while still preserving the string length of the JSON in 130 | // question. So, we just slap the length in there and verify it on read. 131 | // 132 | // Thanks to @isaacs for the whiteboarding session that ended up with 133 | // this. 134 | await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) 135 | } catch (err) { 136 | if (err.code === 'ENOENT') { 137 | return undefined 138 | } 139 | 140 | throw err 141 | } 142 | return formatEntry(cache, entry) 143 | } 144 | 145 | module.exports.find = find 146 | 147 | async function find (cache, key) { 148 | const bucket = bucketPath(cache, key) 149 | try { 150 | const entries = await bucketEntries(bucket) 151 | return entries.reduce((latest, next) => { 152 | if (next && next.key === key) { 153 | return formatEntry(cache, next) 154 | } else { 155 | return latest 156 | } 157 | }, null) 158 | } catch (err) { 159 | if (err.code === 'ENOENT') { 160 | return null 161 | } else { 162 | throw err 163 | } 164 | } 165 | } 166 | 167 | module.exports.delete = del 168 | 169 | function del (cache, key, opts = {}) { 170 | if (!opts.removeFully) { 171 | return insert(cache, key, null, opts) 172 | } 173 | 174 | const bucket = bucketPath(cache, key) 175 | return rm(bucket, { recursive: true, force: true }) 176 | } 177 | 178 | module.exports.lsStream = lsStream 179 | 180 | function lsStream (cache) { 181 | const indexDir = bucketDir(cache) 182 | const stream = new Minipass({ objectMode: true }) 183 | 184 | // Set all this up to run on the stream and then just return the stream 185 | Promise.resolve().then(async () => { 186 | const { default: pMap } = await import('p-map') 187 | const buckets = await readdirOrEmpty(indexDir) 188 | await pMap(buckets, async (bucket) => { 189 | const bucketPath = path.join(indexDir, bucket) 190 | const subbuckets = await readdirOrEmpty(bucketPath) 191 | await pMap(subbuckets, async (subbucket) => { 192 | const subbucketPath = path.join(bucketPath, subbucket) 193 | 194 | // "/cachename/<bucket 0xFF>/<bucket 0xFF>./*" 195 | const subbucketEntries = await readdirOrEmpty(subbucketPath) 196 | await pMap(subbucketEntries, async (entry) => { 197 | const entryPath = path.join(subbucketPath, entry) 198 | try { 199 | const entries = await bucketEntries(entryPath) 200 | // using a Map here prevents duplicate keys from showing up 201 | // twice, I guess? 202 | const reduced = entries.reduce((acc, entry) => { 203 | acc.set(entry.key, entry) 204 | return acc 205 | }, new Map()) 206 | // reduced is a map of key => entry 207 | for (const entry of reduced.values()) { 208 | const formatted = formatEntry(cache, entry) 209 | if (formatted) { 210 | stream.write(formatted) 211 | } 212 | } 213 | } catch (err) { 214 | if (err.code === 'ENOENT') { 215 | return undefined 216 | } 217 | throw err 218 | } 219 | }, 220 | { concurrency: lsStreamConcurrency }) 221 | }, 222 | { concurrency: lsStreamConcurrency }) 223 | }, 224 | { concurrency: lsStreamConcurrency }) 225 | stream.end() 226 | return stream 227 | }).catch(err => stream.emit('error', err)) 228 | 229 | return stream 230 | } 231 | 232 | module.exports.ls = ls 233 | 234 | async function ls (cache) { 235 | const entries = await lsStream(cache).collect() 236 | return entries.reduce((acc, xs) => { 237 | acc[xs.key] = xs 238 | return acc 239 | }, {}) 240 | } 241 | 242 | module.exports.bucketEntries = bucketEntries 243 | 244 | async function bucketEntries (bucket, filter) { 245 | const data = await readFile(bucket, 'utf8') 246 | return _bucketEntries(data, filter) 247 | } 248 | 249 | function _bucketEntries (data) { 250 | const entries = [] 251 | data.split('\n').forEach((entry) => { 252 | if (!entry) { 253 | return 254 | } 255 | 256 | const pieces = entry.split('\t') 257 | if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { 258 | // Hash is no good! Corruption or malice? Doesn't matter! 259 | // EJECT EJECT 260 | return 261 | } 262 | let obj 263 | try { 264 | obj = JSON.parse(pieces[1]) 265 | } catch (_) { 266 | // eslint-ignore-next-line no-empty-block 267 | } 268 | // coverage disabled here, no need to test with an entry that parses to something falsey 269 | // istanbul ignore else 270 | if (obj) { 271 | entries.push(obj) 272 | } 273 | }) 274 | return entries 275 | } 276 | 277 | module.exports.bucketDir = bucketDir 278 | 279 | function bucketDir (cache) { 280 | return path.join(cache, `index-v${indexV}`) 281 | } 282 | 283 | module.exports.bucketPath = bucketPath 284 | 285 | function bucketPath (cache, key) { 286 | const hashed = hashKey(key) 287 | return path.join.apply( 288 | path, 289 | [bucketDir(cache)].concat(hashToSegments(hashed)) 290 | ) 291 | } 292 | 293 | module.exports.hashKey = hashKey 294 | 295 | function hashKey (key) { 296 | return hash(key, 'sha256') 297 | } 298 | 299 | module.exports.hashEntry = hashEntry 300 | 301 | function hashEntry (str) { 302 | return hash(str, 'sha1') 303 | } 304 | 305 | function hash (str, digest) { 306 | return crypto 307 | .createHash(digest) 308 | .update(str) 309 | .digest('hex') 310 | } 311 | 312 | function formatEntry (cache, entry, keepAll) { 313 | // Treat null digests as deletions. They'll shadow any previous entries. 314 | if (!entry.integrity && !keepAll) { 315 | return null 316 | } 317 | 318 | return { 319 | key: entry.key, 320 | integrity: entry.integrity, 321 | path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, 322 | size: entry.size, 323 | time: entry.time, 324 | metadata: entry.metadata, 325 | } 326 | } 327 | 328 | function readdirOrEmpty (dir) { 329 | return readdir(dir).catch((err) => { 330 | if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { 331 | return [] 332 | } 333 | 334 | throw err 335 | }) 336 | } 337 | -------------------------------------------------------------------------------- /lib/get.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const Collect = require('minipass-collect') 4 | const { Minipass } = require('minipass') 5 | const Pipeline = require('minipass-pipeline') 6 | 7 | const index = require('./entry-index') 8 | const memo = require('./memoization') 9 | const read = require('./content/read') 10 | 11 | async function getData (cache, key, opts = {}) { 12 | const { integrity, memoize, size } = opts 13 | const memoized = memo.get(cache, key, opts) 14 | if (memoized && memoize !== false) { 15 | return { 16 | metadata: memoized.entry.metadata, 17 | data: memoized.data, 18 | integrity: memoized.entry.integrity, 19 | size: memoized.entry.size, 20 | } 21 | } 22 | 23 | const entry = await index.find(cache, key, opts) 24 | if (!entry) { 25 | throw new index.NotFoundError(cache, key) 26 | } 27 | const data = await read(cache, entry.integrity, { integrity, size }) 28 | if (memoize) { 29 | memo.put(cache, entry, data, opts) 30 | } 31 | 32 | return { 33 | data, 34 | metadata: entry.metadata, 35 | size: entry.size, 36 | integrity: entry.integrity, 37 | } 38 | } 39 | module.exports = getData 40 | 41 | async function getDataByDigest (cache, key, opts = {}) { 42 | const { integrity, memoize, size } = opts 43 | const memoized = memo.get.byDigest(cache, key, opts) 44 | if (memoized && memoize !== false) { 45 | return memoized 46 | } 47 | 48 | const res = await read(cache, key, { integrity, size }) 49 | if (memoize) { 50 | memo.put.byDigest(cache, key, res, opts) 51 | } 52 | return res 53 | } 54 | module.exports.byDigest = getDataByDigest 55 | 56 | const getMemoizedStream = (memoized) => { 57 | const stream = new Minipass() 58 | stream.on('newListener', function (ev, cb) { 59 | ev === 'metadata' && cb(memoized.entry.metadata) 60 | ev === 'integrity' && cb(memoized.entry.integrity) 61 | ev === 'size' && cb(memoized.entry.size) 62 | }) 63 | stream.end(memoized.data) 64 | return stream 65 | } 66 | 67 | function getStream (cache, key, opts = {}) { 68 | const { memoize, size } = opts 69 | const memoized = memo.get(cache, key, opts) 70 | if (memoized && memoize !== false) { 71 | return getMemoizedStream(memoized) 72 | } 73 | 74 | const stream = new Pipeline() 75 | // Set all this up to run on the stream and then just return the stream 76 | Promise.resolve().then(async () => { 77 | const entry = await index.find(cache, key) 78 | if (!entry) { 79 | throw new index.NotFoundError(cache, key) 80 | } 81 | 82 | stream.emit('metadata', entry.metadata) 83 | stream.emit('integrity', entry.integrity) 84 | stream.emit('size', entry.size) 85 | stream.on('newListener', function (ev, cb) { 86 | ev === 'metadata' && cb(entry.metadata) 87 | ev === 'integrity' && cb(entry.integrity) 88 | ev === 'size' && cb(entry.size) 89 | }) 90 | 91 | const src = read.readStream( 92 | cache, 93 | entry.integrity, 94 | { ...opts, size: typeof size !== 'number' ? entry.size : size } 95 | ) 96 | 97 | if (memoize) { 98 | const memoStream = new Collect.PassThrough() 99 | memoStream.on('collect', data => memo.put(cache, entry, data, opts)) 100 | stream.unshift(memoStream) 101 | } 102 | stream.unshift(src) 103 | return stream 104 | }).catch((err) => stream.emit('error', err)) 105 | 106 | return stream 107 | } 108 | 109 | module.exports.stream = getStream 110 | 111 | function getStreamDigest (cache, integrity, opts = {}) { 112 | const { memoize } = opts 113 | const memoized = memo.get.byDigest(cache, integrity, opts) 114 | if (memoized && memoize !== false) { 115 | const stream = new Minipass() 116 | stream.end(memoized) 117 | return stream 118 | } else { 119 | const stream = read.readStream(cache, integrity, opts) 120 | if (!memoize) { 121 | return stream 122 | } 123 | 124 | const memoStream = new Collect.PassThrough() 125 | memoStream.on('collect', data => memo.put.byDigest( 126 | cache, 127 | integrity, 128 | data, 129 | opts 130 | )) 131 | return new Pipeline(stream, memoStream) 132 | } 133 | } 134 | 135 | module.exports.stream.byDigest = getStreamDigest 136 | 137 | function info (cache, key, opts = {}) { 138 | const { memoize } = opts 139 | const memoized = memo.get(cache, key, opts) 140 | if (memoized && memoize !== false) { 141 | return Promise.resolve(memoized.entry) 142 | } else { 143 | return index.find(cache, key) 144 | } 145 | } 146 | module.exports.info = info 147 | 148 | async function copy (cache, key, dest, opts = {}) { 149 | const entry = await index.find(cache, key, opts) 150 | if (!entry) { 151 | throw new index.NotFoundError(cache, key) 152 | } 153 | await read.copy(cache, entry.integrity, dest, opts) 154 | return { 155 | metadata: entry.metadata, 156 | size: entry.size, 157 | integrity: entry.integrity, 158 | } 159 | } 160 | 161 | module.exports.copy = copy 162 | 163 | async function copyByDigest (cache, key, dest, opts = {}) { 164 | await read.copy(cache, key, dest, opts) 165 | return key 166 | } 167 | 168 | module.exports.copy.byDigest = copyByDigest 169 | 170 | module.exports.hasContent = read.hasContent 171 | -------------------------------------------------------------------------------- /lib/index.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const get = require('./get.js') 4 | const put = require('./put.js') 5 | const rm = require('./rm.js') 6 | const verify = require('./verify.js') 7 | const { clearMemoized } = require('./memoization.js') 8 | const tmp = require('./util/tmp.js') 9 | const index = require('./entry-index.js') 10 | 11 | module.exports.index = {} 12 | module.exports.index.compact = index.compact 13 | module.exports.index.insert = index.insert 14 | 15 | module.exports.ls = index.ls 16 | module.exports.ls.stream = index.lsStream 17 | 18 | module.exports.get = get 19 | module.exports.get.byDigest = get.byDigest 20 | module.exports.get.stream = get.stream 21 | module.exports.get.stream.byDigest = get.stream.byDigest 22 | module.exports.get.copy = get.copy 23 | module.exports.get.copy.byDigest = get.copy.byDigest 24 | module.exports.get.info = get.info 25 | module.exports.get.hasContent = get.hasContent 26 | 27 | module.exports.put = put 28 | module.exports.put.stream = put.stream 29 | 30 | module.exports.rm = rm.entry 31 | module.exports.rm.all = rm.all 32 | module.exports.rm.entry = module.exports.rm 33 | module.exports.rm.content = rm.content 34 | 35 | module.exports.clearMemoized = clearMemoized 36 | 37 | module.exports.tmp = {} 38 | module.exports.tmp.mkdir = tmp.mkdir 39 | module.exports.tmp.withTmp = tmp.withTmp 40 | 41 | module.exports.verify = verify 42 | module.exports.verify.lastRun = verify.lastRun 43 | -------------------------------------------------------------------------------- /lib/memoization.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { LRUCache } = require('lru-cache') 4 | 5 | const MEMOIZED = new LRUCache({ 6 | max: 500, 7 | maxSize: 50 * 1024 * 1024, // 50MB 8 | ttl: 3 * 60 * 1000, // 3 minutes 9 | sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, 10 | }) 11 | 12 | module.exports.clearMemoized = clearMemoized 13 | 14 | function clearMemoized () { 15 | const old = {} 16 | MEMOIZED.forEach((v, k) => { 17 | old[k] = v 18 | }) 19 | MEMOIZED.clear() 20 | return old 21 | } 22 | 23 | module.exports.put = put 24 | 25 | function put (cache, entry, data, opts) { 26 | pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) 27 | putDigest(cache, entry.integrity, data, opts) 28 | } 29 | 30 | module.exports.put.byDigest = putDigest 31 | 32 | function putDigest (cache, integrity, data, opts) { 33 | pickMem(opts).set(`digest:${cache}:${integrity}`, data) 34 | } 35 | 36 | module.exports.get = get 37 | 38 | function get (cache, key, opts) { 39 | return pickMem(opts).get(`key:${cache}:${key}`) 40 | } 41 | 42 | module.exports.get.byDigest = getDigest 43 | 44 | function getDigest (cache, integrity, opts) { 45 | return pickMem(opts).get(`digest:${cache}:${integrity}`) 46 | } 47 | 48 | class ObjProxy { 49 | constructor (obj) { 50 | this.obj = obj 51 | } 52 | 53 | get (key) { 54 | return this.obj[key] 55 | } 56 | 57 | set (key, val) { 58 | this.obj[key] = val 59 | } 60 | } 61 | 62 | function pickMem (opts) { 63 | if (!opts || !opts.memoize) { 64 | return MEMOIZED 65 | } else if (opts.memoize.get && opts.memoize.set) { 66 | return opts.memoize 67 | } else if (typeof opts.memoize === 'object') { 68 | return new ObjProxy(opts.memoize) 69 | } else { 70 | return MEMOIZED 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /lib/put.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const index = require('./entry-index') 4 | const memo = require('./memoization') 5 | const write = require('./content/write') 6 | const Flush = require('minipass-flush') 7 | const { PassThrough } = require('minipass-collect') 8 | const Pipeline = require('minipass-pipeline') 9 | 10 | const putOpts = (opts) => ({ 11 | algorithms: ['sha512'], 12 | ...opts, 13 | }) 14 | 15 | module.exports = putData 16 | 17 | async function putData (cache, key, data, opts = {}) { 18 | const { memoize } = opts 19 | opts = putOpts(opts) 20 | const res = await write(cache, data, opts) 21 | const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size }) 22 | if (memoize) { 23 | memo.put(cache, entry, data, opts) 24 | } 25 | 26 | return res.integrity 27 | } 28 | 29 | module.exports.stream = putStream 30 | 31 | function putStream (cache, key, opts = {}) { 32 | const { memoize } = opts 33 | opts = putOpts(opts) 34 | let integrity 35 | let size 36 | let error 37 | 38 | let memoData 39 | const pipeline = new Pipeline() 40 | // first item in the pipeline is the memoizer, because we need 41 | // that to end first and get the collected data. 42 | if (memoize) { 43 | const memoizer = new PassThrough().on('collect', data => { 44 | memoData = data 45 | }) 46 | pipeline.push(memoizer) 47 | } 48 | 49 | // contentStream is a write-only, not a passthrough 50 | // no data comes out of it. 51 | const contentStream = write.stream(cache, opts) 52 | .on('integrity', (int) => { 53 | integrity = int 54 | }) 55 | .on('size', (s) => { 56 | size = s 57 | }) 58 | .on('error', (err) => { 59 | error = err 60 | }) 61 | 62 | pipeline.push(contentStream) 63 | 64 | // last but not least, we write the index and emit hash and size, 65 | // and memoize if we're doing that 66 | pipeline.push(new Flush({ 67 | async flush () { 68 | if (!error) { 69 | const entry = await index.insert(cache, key, integrity, { ...opts, size }) 70 | if (memoize && memoData) { 71 | memo.put(cache, entry, memoData, opts) 72 | } 73 | pipeline.emit('integrity', integrity) 74 | pipeline.emit('size', size) 75 | } 76 | }, 77 | })) 78 | 79 | return pipeline 80 | } 81 | -------------------------------------------------------------------------------- /lib/rm.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { rm } = require('fs/promises') 4 | const glob = require('./util/glob.js') 5 | const index = require('./entry-index') 6 | const memo = require('./memoization') 7 | const path = require('path') 8 | const rmContent = require('./content/rm') 9 | 10 | module.exports = entry 11 | module.exports.entry = entry 12 | 13 | function entry (cache, key, opts) { 14 | memo.clearMemoized() 15 | return index.delete(cache, key, opts) 16 | } 17 | 18 | module.exports.content = content 19 | 20 | function content (cache, integrity) { 21 | memo.clearMemoized() 22 | return rmContent(cache, integrity) 23 | } 24 | 25 | module.exports.all = all 26 | 27 | async function all (cache) { 28 | memo.clearMemoized() 29 | const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true }) 30 | return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true }))) 31 | } 32 | -------------------------------------------------------------------------------- /lib/util/glob.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { glob } = require('glob') 4 | const path = require('path') 5 | 6 | const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep) 7 | module.exports = (path, options) => glob(globify(path), options) 8 | -------------------------------------------------------------------------------- /lib/util/hash-to-segments.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | module.exports = hashToSegments 4 | 5 | function hashToSegments (hash) { 6 | return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] 7 | } 8 | -------------------------------------------------------------------------------- /lib/util/tmp.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { withTempDir } = require('@npmcli/fs') 4 | const fs = require('fs/promises') 5 | const path = require('path') 6 | 7 | module.exports.mkdir = mktmpdir 8 | 9 | async function mktmpdir (cache, opts = {}) { 10 | const { tmpPrefix } = opts 11 | const tmpDir = path.join(cache, 'tmp') 12 | await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) 13 | // do not use path.join(), it drops the trailing / if tmpPrefix is unset 14 | const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` 15 | return fs.mkdtemp(target, { owner: 'inherit' }) 16 | } 17 | 18 | module.exports.withTmp = withTmp 19 | 20 | function withTmp (cache, opts, cb) { 21 | if (!cb) { 22 | cb = opts 23 | opts = {} 24 | } 25 | return withTempDir(path.join(cache, 'tmp'), cb, opts) 26 | } 27 | -------------------------------------------------------------------------------- /lib/verify.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { 4 | mkdir, 5 | readFile, 6 | rm, 7 | stat, 8 | truncate, 9 | writeFile, 10 | } = require('fs/promises') 11 | const contentPath = require('./content/path') 12 | const fsm = require('fs-minipass') 13 | const glob = require('./util/glob.js') 14 | const index = require('./entry-index') 15 | const path = require('path') 16 | const ssri = require('ssri') 17 | 18 | const hasOwnProperty = (obj, key) => 19 | Object.prototype.hasOwnProperty.call(obj, key) 20 | 21 | const verifyOpts = (opts) => ({ 22 | concurrency: 20, 23 | log: { silly () {} }, 24 | ...opts, 25 | }) 26 | 27 | module.exports = verify 28 | 29 | async function verify (cache, opts) { 30 | opts = verifyOpts(opts) 31 | opts.log.silly('verify', 'verifying cache at', cache) 32 | 33 | const steps = [ 34 | markStartTime, 35 | fixPerms, 36 | garbageCollect, 37 | rebuildIndex, 38 | cleanTmp, 39 | writeVerifile, 40 | markEndTime, 41 | ] 42 | 43 | const stats = {} 44 | for (const step of steps) { 45 | const label = step.name 46 | const start = new Date() 47 | const s = await step(cache, opts) 48 | if (s) { 49 | Object.keys(s).forEach((k) => { 50 | stats[k] = s[k] 51 | }) 52 | } 53 | const end = new Date() 54 | if (!stats.runTime) { 55 | stats.runTime = {} 56 | } 57 | stats.runTime[label] = end - start 58 | } 59 | stats.runTime.total = stats.endTime - stats.startTime 60 | opts.log.silly( 61 | 'verify', 62 | 'verification finished for', 63 | cache, 64 | 'in', 65 | `${stats.runTime.total}ms` 66 | ) 67 | return stats 68 | } 69 | 70 | async function markStartTime () { 71 | return { startTime: new Date() } 72 | } 73 | 74 | async function markEndTime () { 75 | return { endTime: new Date() } 76 | } 77 | 78 | async function fixPerms (cache, opts) { 79 | opts.log.silly('verify', 'fixing cache permissions') 80 | await mkdir(cache, { recursive: true }) 81 | return null 82 | } 83 | 84 | // Implements a naive mark-and-sweep tracing garbage collector. 85 | // 86 | // The algorithm is basically as follows: 87 | // 1. Read (and filter) all index entries ("pointers") 88 | // 2. Mark each integrity value as "live" 89 | // 3. Read entire filesystem tree in `content-vX/` dir 90 | // 4. If content is live, verify its checksum and delete it if it fails 91 | // 5. If content is not marked as live, rm it. 92 | // 93 | async function garbageCollect (cache, opts) { 94 | opts.log.silly('verify', 'garbage collecting content') 95 | const { default: pMap } = await import('p-map') 96 | const indexStream = index.lsStream(cache) 97 | const liveContent = new Set() 98 | indexStream.on('data', (entry) => { 99 | if (opts.filter && !opts.filter(entry)) { 100 | return 101 | } 102 | 103 | // integrity is stringified, re-parse it so we can get each hash 104 | const integrity = ssri.parse(entry.integrity) 105 | for (const algo in integrity) { 106 | liveContent.add(integrity[algo].toString()) 107 | } 108 | }) 109 | await new Promise((resolve, reject) => { 110 | indexStream.on('end', resolve).on('error', reject) 111 | }) 112 | const contentDir = contentPath.contentDir(cache) 113 | const files = await glob(path.join(contentDir, '**'), { 114 | follow: false, 115 | nodir: true, 116 | nosort: true, 117 | }) 118 | const stats = { 119 | verifiedContent: 0, 120 | reclaimedCount: 0, 121 | reclaimedSize: 0, 122 | badContentCount: 0, 123 | keptSize: 0, 124 | } 125 | await pMap( 126 | files, 127 | async (f) => { 128 | const split = f.split(/[/\\]/) 129 | const digest = split.slice(split.length - 3).join('') 130 | const algo = split[split.length - 4] 131 | const integrity = ssri.fromHex(digest, algo) 132 | if (liveContent.has(integrity.toString())) { 133 | const info = await verifyContent(f, integrity) 134 | if (!info.valid) { 135 | stats.reclaimedCount++ 136 | stats.badContentCount++ 137 | stats.reclaimedSize += info.size 138 | } else { 139 | stats.verifiedContent++ 140 | stats.keptSize += info.size 141 | } 142 | } else { 143 | // No entries refer to this content. We can delete. 144 | stats.reclaimedCount++ 145 | const s = await stat(f) 146 | await rm(f, { recursive: true, force: true }) 147 | stats.reclaimedSize += s.size 148 | } 149 | return stats 150 | }, 151 | { concurrency: opts.concurrency } 152 | ) 153 | return stats 154 | } 155 | 156 | async function verifyContent (filepath, sri) { 157 | const contentInfo = {} 158 | try { 159 | const { size } = await stat(filepath) 160 | contentInfo.size = size 161 | contentInfo.valid = true 162 | await ssri.checkStream(new fsm.ReadStream(filepath), sri) 163 | } catch (err) { 164 | if (err.code === 'ENOENT') { 165 | return { size: 0, valid: false } 166 | } 167 | if (err.code !== 'EINTEGRITY') { 168 | throw err 169 | } 170 | 171 | await rm(filepath, { recursive: true, force: true }) 172 | contentInfo.valid = false 173 | } 174 | return contentInfo 175 | } 176 | 177 | async function rebuildIndex (cache, opts) { 178 | opts.log.silly('verify', 'rebuilding index') 179 | const { default: pMap } = await import('p-map') 180 | const entries = await index.ls(cache) 181 | const stats = { 182 | missingContent: 0, 183 | rejectedEntries: 0, 184 | totalEntries: 0, 185 | } 186 | const buckets = {} 187 | for (const k in entries) { 188 | /* istanbul ignore else */ 189 | if (hasOwnProperty(entries, k)) { 190 | const hashed = index.hashKey(k) 191 | const entry = entries[k] 192 | const excluded = opts.filter && !opts.filter(entry) 193 | excluded && stats.rejectedEntries++ 194 | if (buckets[hashed] && !excluded) { 195 | buckets[hashed].push(entry) 196 | } else if (buckets[hashed] && excluded) { 197 | // skip 198 | } else if (excluded) { 199 | buckets[hashed] = [] 200 | buckets[hashed]._path = index.bucketPath(cache, k) 201 | } else { 202 | buckets[hashed] = [entry] 203 | buckets[hashed]._path = index.bucketPath(cache, k) 204 | } 205 | } 206 | } 207 | await pMap( 208 | Object.keys(buckets), 209 | (key) => { 210 | return rebuildBucket(cache, buckets[key], stats, opts) 211 | }, 212 | { concurrency: opts.concurrency } 213 | ) 214 | return stats 215 | } 216 | 217 | async function rebuildBucket (cache, bucket, stats) { 218 | await truncate(bucket._path) 219 | // This needs to be serialized because cacache explicitly 220 | // lets very racy bucket conflicts clobber each other. 221 | for (const entry of bucket) { 222 | const content = contentPath(cache, entry.integrity) 223 | try { 224 | await stat(content) 225 | await index.insert(cache, entry.key, entry.integrity, { 226 | metadata: entry.metadata, 227 | size: entry.size, 228 | time: entry.time, 229 | }) 230 | stats.totalEntries++ 231 | } catch (err) { 232 | if (err.code === 'ENOENT') { 233 | stats.rejectedEntries++ 234 | stats.missingContent++ 235 | } else { 236 | throw err 237 | } 238 | } 239 | } 240 | } 241 | 242 | function cleanTmp (cache, opts) { 243 | opts.log.silly('verify', 'cleaning tmp directory') 244 | return rm(path.join(cache, 'tmp'), { recursive: true, force: true }) 245 | } 246 | 247 | async function writeVerifile (cache, opts) { 248 | const verifile = path.join(cache, '_lastverified') 249 | opts.log.silly('verify', 'writing verifile to ' + verifile) 250 | return writeFile(verifile, `${Date.now()}`) 251 | } 252 | 253 | module.exports.lastRun = lastRun 254 | 255 | async function lastRun (cache) { 256 | const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) 257 | return new Date(+data) 258 | } 259 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cacache", 3 | "version": "19.0.1", 4 | "cache-version": { 5 | "content": "2", 6 | "index": "5" 7 | }, 8 | "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", 9 | "main": "lib/index.js", 10 | "files": [ 11 | "bin/", 12 | "lib/" 13 | ], 14 | "scripts": { 15 | "test": "tap", 16 | "snap": "tap", 17 | "coverage": "tap", 18 | "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", 19 | "lint": "npm run eslint", 20 | "npmclilint": "npmcli-lint", 21 | "lintfix": "npm run eslint -- --fix", 22 | "postsnap": "npm run lintfix --", 23 | "postlint": "template-oss-check", 24 | "posttest": "npm run lint", 25 | "template-oss-apply": "template-oss-apply --force", 26 | "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" 27 | }, 28 | "repository": { 29 | "type": "git", 30 | "url": "git+https://github.com/npm/cacache.git" 31 | }, 32 | "keywords": [ 33 | "cache", 34 | "caching", 35 | "content-addressable", 36 | "sri", 37 | "sri hash", 38 | "subresource integrity", 39 | "cache", 40 | "storage", 41 | "store", 42 | "file store", 43 | "filesystem", 44 | "disk cache", 45 | "disk storage" 46 | ], 47 | "license": "ISC", 48 | "dependencies": { 49 | "@npmcli/fs": "^4.0.0", 50 | "fs-minipass": "^3.0.0", 51 | "glob": "^10.2.2", 52 | "lru-cache": "^10.0.1", 53 | "minipass": "^7.0.3", 54 | "minipass-collect": "^2.0.1", 55 | "minipass-flush": "^1.0.5", 56 | "minipass-pipeline": "^1.2.4", 57 | "p-map": "^7.0.2", 58 | "ssri": "^12.0.0", 59 | "tar": "^7.4.3", 60 | "unique-filename": "^4.0.0" 61 | }, 62 | "devDependencies": { 63 | "@npmcli/eslint-config": "^5.0.0", 64 | "@npmcli/template-oss": "4.23.4", 65 | "tap": "^16.0.0" 66 | }, 67 | "engines": { 68 | "node": "^18.17.0 || >=20.5.0" 69 | }, 70 | "templateOSS": { 71 | "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", 72 | "windowsCI": false, 73 | "version": "4.23.4", 74 | "publish": "true" 75 | }, 76 | "author": "GitHub Inc.", 77 | "tap": { 78 | "nyc-arg": [ 79 | "--exclude", 80 | "tap-snapshots/**" 81 | ] 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /release-please-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "group-pull-request-title-pattern": "chore: release ${version}", 3 | "pull-request-title-pattern": "chore: release${component} ${version}", 4 | "changelog-sections": [ 5 | { 6 | "type": "feat", 7 | "section": "Features", 8 | "hidden": false 9 | }, 10 | { 11 | "type": "fix", 12 | "section": "Bug Fixes", 13 | "hidden": false 14 | }, 15 | { 16 | "type": "docs", 17 | "section": "Documentation", 18 | "hidden": false 19 | }, 20 | { 21 | "type": "deps", 22 | "section": "Dependencies", 23 | "hidden": false 24 | }, 25 | { 26 | "type": "chore", 27 | "section": "Chores", 28 | "hidden": true 29 | } 30 | ], 31 | "packages": { 32 | ".": { 33 | "package-name": "" 34 | } 35 | }, 36 | "prerelease-type": "pre" 37 | } 38 | -------------------------------------------------------------------------------- /test/content/read.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const fs = require('fs') 4 | const path = require('path') 5 | const ssri = require('ssri') 6 | const t = require('tap') 7 | 8 | const CacheContent = require('../fixtures/cache-content') 9 | 10 | const read = require('../../lib/content/read') 11 | 12 | // defines reusable errors 13 | const genericError = new Error('ERR') 14 | genericError.code = 'ERR' 15 | const permissionError = new Error('EPERM') 16 | permissionError.code = 'EPERM' 17 | 18 | // helpers 19 | const getRead = (t, opts) => t.mock('../../lib/content/read', opts) 20 | const getReadStatFailure = (t, err) => getRead(t, { 21 | fs: { 22 | ...fs, 23 | statSync: () => { 24 | throw err 25 | }, 26 | }, 27 | 'fs/promises': { 28 | ...fs.promises, 29 | stat: async () => { 30 | throw err 31 | }, 32 | }, 33 | }) 34 | 35 | t.test('read: returns a Promise with cache content data', async t => { 36 | const CONTENT = Buffer.from('foobarbaz') 37 | const INTEGRITY = ssri.fromData(CONTENT) 38 | const CACHE = t.testdir( 39 | CacheContent({ 40 | [INTEGRITY]: CONTENT, 41 | }) 42 | ) 43 | const data = await read(CACHE, INTEGRITY) 44 | t.same(data, CONTENT, 'cache contents read correctly') 45 | }) 46 | 47 | t.test('read.stream: returns a stream with cache content data', async t => { 48 | const CONTENT = Buffer.from('foobarbaz') 49 | const INTEGRITY = ssri.fromData(CONTENT) 50 | const CACHE = t.testdir( 51 | CacheContent({ 52 | [INTEGRITY]: CONTENT, 53 | }) 54 | ) 55 | const stream = read.stream(CACHE, INTEGRITY) 56 | const [fromStream, fromBulk] = await Promise.all([ 57 | stream.concat(), 58 | read(CACHE, INTEGRITY, { size: CONTENT.length }), 59 | ]) 60 | t.same(fromStream, CONTENT, 'stream data checks out') 61 | t.same(fromBulk, CONTENT, 'promise data checks out') 62 | }) 63 | 64 | t.test('read: allows hashAlgorithm configuration', async t => { 65 | const CONTENT = Buffer.from('foobarbaz') 66 | const HASH = 'sha384' 67 | const INTEGRITY = ssri.fromData(CONTENT, { algorithms: [HASH] }) 68 | const CACHE = t.testdir( 69 | CacheContent({ 70 | [INTEGRITY]: CONTENT, 71 | }) 72 | ) 73 | const stream = read.stream(CACHE, INTEGRITY) 74 | const [fromStream, fromBulk] = await Promise.all([ 75 | stream.concat(), 76 | read(CACHE, INTEGRITY), 77 | ]) 78 | t.same(fromStream, CONTENT, 'stream used algorithm') 79 | t.same(fromBulk, CONTENT, 'promise used algorithm') 80 | }) 81 | 82 | t.test('read: errors if content missing', async t => { 83 | const CACHE = t.testdir({}) 84 | const stream = read.stream(CACHE, 'sha512-whatnot') 85 | stream.on('data', function (data) { 86 | throw new Error('unexpected data: ' + JSON.stringify(data)) 87 | }) 88 | stream.on('end', function () { 89 | throw new Error('end was emitted even though stream errored') 90 | }) 91 | await t.rejects( 92 | stream.promise(), 93 | { code: 'ENOENT' }, 94 | 'stream got the right error' 95 | ) 96 | await t.rejects( 97 | read(CACHE, 'sha512-whatnot'), 98 | { code: 'ENOENT' }, 99 | 'bulk got the right error' 100 | ) 101 | }) 102 | 103 | t.test('read: errors if content fails checksum', async t => { 104 | const CONTENT = Buffer.from('foobarbaz') 105 | const INTEGRITY = ssri.fromData(CONTENT) 106 | const CACHE = t.testdir( 107 | CacheContent({ 108 | [INTEGRITY]: CONTENT.slice(3), // invalid contents! 109 | }) 110 | ) 111 | const stream = read.readStream(CACHE, INTEGRITY) 112 | stream.on('end', function () { 113 | throw new Error('end was emitted even though stream errored') 114 | }) 115 | await t.rejects( 116 | stream.promise(), 117 | { code: 'EINTEGRITY' }, 118 | 'stream got the right error' 119 | ) 120 | await t.rejects( 121 | read(CACHE, INTEGRITY), 122 | { code: 'EINTEGRITY' }, 123 | 'bulk got the right error' 124 | ) 125 | }) 126 | 127 | t.test('read: errors if content size does not match size option', async t => { 128 | const CONTENT = Buffer.from('foobarbaz') 129 | const INTEGRITY = ssri.fromData(CONTENT) 130 | const CACHE = t.testdir( 131 | CacheContent({ 132 | [INTEGRITY]: CONTENT.slice(3), // invalid contents! 133 | }) 134 | ) 135 | const stream = read.readStream(CACHE, INTEGRITY, { size: CONTENT.length }) 136 | stream.on('end', function () { 137 | throw new Error('end was called even though stream errored') 138 | }) 139 | await t.rejects( 140 | stream.promise(), 141 | { code: 'EBADSIZE' }, 142 | 'stream got the right error' 143 | ) 144 | await t.rejects( 145 | read(CACHE, INTEGRITY, { size: CONTENT.length }), 146 | { code: 'EBADSIZE' }, 147 | 'bulk got the right error' 148 | ) 149 | }) 150 | 151 | t.test('read: error while parsing provided integrity data', function (t) { 152 | const CACHE = t.testdir() 153 | const INTEGRITY = 'sha1-deadbeef' 154 | const mockedRead = getRead(t, { 155 | ssri: { 156 | parse () { 157 | throw genericError 158 | }, 159 | }, 160 | }) 161 | 162 | t.plan(1) 163 | return t.rejects( 164 | mockedRead(CACHE, INTEGRITY), 165 | genericError, 166 | 'should reject promise upon catching internal errors' 167 | ) 168 | }) 169 | 170 | t.test('read: unknown error parsing nested integrity data', function (t) { 171 | const CACHE = t.testdir() 172 | const INTEGRITY = 'sha1-deadbeef sha1-13371337' 173 | 174 | // patches method in order to force a last error scenario 175 | const mockedRead = getRead(t, { 176 | ssri: { 177 | parse (sri) { 178 | if (sri !== INTEGRITY) { 179 | throw genericError 180 | } 181 | 182 | return ssri.parse(sri) 183 | }, 184 | }, 185 | }) 186 | 187 | t.plan(1) 188 | return t.rejects( 189 | mockedRead(CACHE, INTEGRITY), 190 | genericError, 191 | 'should throw unknown errors' 192 | ) 193 | }) 194 | 195 | t.test('read: returns only first result if other hashes fails', function (t) { 196 | // sets up a cache that has multiple entries under the 197 | // same algorithm but then only one has real contents in the fs 198 | const CONTENT = { 199 | foo: Buffer.from('foo'), 200 | bar: Buffer.from('bar'), 201 | } 202 | const INTEGRITY = ssri.fromData(CONTENT.foo).concat( 203 | ssri.fromData(CONTENT.bar) 204 | ) 205 | const CACHE = t.testdir( 206 | CacheContent({ 207 | [INTEGRITY.sha512[1]]: CONTENT.bar, 208 | }) 209 | ) 210 | 211 | t.plan(1) 212 | return t.resolveMatch( 213 | read(CACHE, INTEGRITY), 214 | CONTENT.bar, 215 | 'should return only the first valid result' 216 | ) 217 | }) 218 | 219 | t.test('read: opening large files', function (t) { 220 | const CACHE = t.testdir() 221 | const mockedRead = getRead(t, { 222 | 'fs/promises': { 223 | ...fs.promises, 224 | stat: async () => { 225 | return { size: Number.MAX_SAFE_INTEGER } 226 | }, 227 | }, 228 | 'fs-minipass': { 229 | ReadStream: class { 230 | constructor (path, opts) { 231 | t.match( 232 | opts, 233 | { 234 | readSize: 64 * 1024 * 1024, 235 | size: Number.MAX_SAFE_INTEGER, 236 | }, 237 | 'should use fs-minipass interface' 238 | ) 239 | } 240 | }, 241 | }, 242 | 'minipass-pipeline': Array, 243 | }) 244 | 245 | t.plan(1) 246 | mockedRead(CACHE, 'sha1-deadbeef') 247 | }) 248 | 249 | t.test('hasContent: tests content existence', async t => { 250 | const CACHE = t.testdir( 251 | CacheContent({ 252 | 'sha1-deadbeef': '', 253 | }) 254 | ) 255 | const content = await read.hasContent(CACHE, 'sha1-deadbeef') 256 | t.ok(content.sri, 'returned sri for this content') 257 | t.equal(content.size, 0, 'returned the right size for this content') 258 | t.ok(content.stat.isFile(), 'returned actual stat object') 259 | await t.resolveMatch( 260 | read.hasContent(CACHE, 'sha1-not-there'), 261 | false, 262 | 'returned false for missing content' 263 | ) 264 | await t.resolveMatch( 265 | read.hasContent(CACHE, 'sha1-not-here sha1-also-not-here'), 266 | false, 267 | 'multi-content hash failures work ok' 268 | ) 269 | }) 270 | 271 | t.test('hasContent: permission error', (t) => { 272 | const CACHE = t.testdir() 273 | // setup a syntetic permission error 274 | const mockedRead = getReadStatFailure(t, permissionError) 275 | 276 | t.plan(1) 277 | t.rejects( 278 | mockedRead.hasContent(CACHE, 'sha1-deadbeef sha1-13371337'), 279 | permissionError, 280 | 'should reject on permission errors' 281 | ) 282 | }) 283 | 284 | t.test('hasContent: generic error', (t) => { 285 | const CACHE = t.testdir() 286 | const mockedRead = getReadStatFailure(t, genericError) 287 | 288 | t.plan(1) 289 | t.resolves( 290 | mockedRead.hasContent(CACHE, 'sha1-deadbeef sha1-13371337'), 291 | 'should not reject on generic errors' 292 | ) 293 | }) 294 | 295 | t.test('hasContent: no integrity provided', (t) => { 296 | const CACHE = t.testdir() 297 | t.resolveMatch( 298 | read.hasContent(CACHE, ''), 299 | false, 300 | 'should resolve with a value of false' 301 | ) 302 | t.end() 303 | }) 304 | 305 | t.test('copy: copies content to a destination path', async t => { 306 | const CONTENT = Buffer.from('foobarbaz') 307 | const INTEGRITY = ssri.fromData(CONTENT) 308 | const CACHE = t.testdir( 309 | CacheContent({ 310 | [INTEGRITY]: CONTENT, 311 | }) 312 | ) 313 | const DEST = path.join(CACHE, 'foobar-file') 314 | await read.copy(CACHE, INTEGRITY, DEST) 315 | const data = await fs.promises.readFile(DEST) 316 | t.same(data, CONTENT, 'file successfully copied') 317 | }) 318 | -------------------------------------------------------------------------------- /test/content/rm.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const contentPath = require('../../lib/content/path') 4 | const fs = require('fs/promises') 5 | const t = require('tap') 6 | 7 | const CacheContent = require('../fixtures/cache-content') 8 | const rm = require('../../lib/content/rm') 9 | 10 | t.test('removes a content entry', function (t) { 11 | const CACHE = t.testdir( 12 | CacheContent({ 13 | 'sha1-deadbeef': '', 14 | }) 15 | ) 16 | return rm(CACHE, 'sha1-deadbeef') 17 | .then(() => fs.stat(contentPath(CACHE, 'sha1-deadbeef'))) 18 | .then(() => { 19 | throw new Error('expected an error') 20 | }) 21 | .catch((err) => { 22 | t.ok(err, 'fs.stat failed on rmed content') 23 | t.equal('ENOENT', err.code, 'file does not exist anymore') 24 | }) 25 | }) 26 | 27 | t.test('works fine if entry missing', function (t) { 28 | const CACHE = t.testdir(CacheContent({})) 29 | return rm(CACHE, 'sha1-deadbeef') 30 | .then(() => fs.stat(contentPath(CACHE, 'sha1-deadbeef'))) 31 | .then(() => { 32 | throw new Error('expected an error') 33 | }) 34 | .catch((err) => { 35 | t.ok(err, 'fs.stat failed on rmed content') 36 | t.equal('ENOENT', err.code, 'file does not exist anymore') 37 | }) 38 | }) 39 | -------------------------------------------------------------------------------- /test/content/write.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const events = require('events') 4 | const fs = require('fs') 5 | const { Minipass } = require('minipass') 6 | const path = require('path') 7 | const ssri = require('ssri') 8 | const t = require('tap') 9 | 10 | const CacheContent = require('../fixtures/cache-content') 11 | const contentPath = require('../../lib/content/path') 12 | 13 | const write = require('../../lib/content/write') 14 | 15 | t.test('basic put', async t => { 16 | const CACHE = t.testdir() 17 | const CONTENT = 'foobarbaz' 18 | // Default is sha512 19 | const INTEGRITY = ssri.fromData(CONTENT) 20 | let integrity 21 | await write.stream(CACHE).on('integrity', (i) => { 22 | integrity = i 23 | }).end(CONTENT).promise() 24 | const cpath = contentPath(CACHE, integrity) 25 | t.same(integrity, INTEGRITY, 'calculated integrity value matches') 26 | t.ok(fs.lstatSync(cpath).isFile(), 'content inserted as a single file') 27 | t.equal(fs.readFileSync(cpath, 'utf8'), CONTENT, 'contents are identical to inserted content') 28 | }) 29 | 30 | t.test('basic put, providing external integrity emitter', async (t) => { 31 | const CACHE = t.testdir() 32 | const CONTENT = 'foobarbaz' 33 | const INTEGRITY = ssri.fromData(CONTENT) 34 | 35 | const write = t.mock('../../lib/content/write.js', { 36 | ssri: { 37 | ...ssri, 38 | integrityStream: () => { 39 | throw new Error('Should not be called') 40 | }, 41 | }, 42 | }) 43 | 44 | const source = new Minipass().end(CONTENT) 45 | 46 | const tee = new Minipass() 47 | 48 | const integrityStream = ssri.integrityStream() 49 | // since the integrityStream is not going anywhere, we need to manually resume it 50 | // otherwise it'll get stuck in paused mode and will never process any data events 51 | integrityStream.resume() 52 | const integrityStreamP = Promise.all([ 53 | events.once(integrityStream, 'integrity').then((res) => res[0]), 54 | events.once(integrityStream, 'size').then((res) => res[0]), 55 | ]) 56 | 57 | const contentStream = write.stream(CACHE, { integrityEmitter: integrityStream }) 58 | const contentStreamP = Promise.all([ 59 | events.once(contentStream, 'integrity').then((res) => res[0]), 60 | events.once(contentStream, 'size').then((res) => res[0]), 61 | contentStream.promise(), 62 | ]) 63 | 64 | tee.pipe(integrityStream) 65 | tee.pipe(contentStream) 66 | source.pipe(tee) 67 | 68 | const [ 69 | [ssriIntegrity, ssriSize], 70 | [contentIntegrity, contentSize], 71 | ] = await Promise.all([ 72 | integrityStreamP, 73 | contentStreamP, 74 | ]) 75 | 76 | t.equal(ssriSize, CONTENT.length, 'ssri got the right size') 77 | t.equal(contentSize, CONTENT.length, 'content got the right size') 78 | t.same(ssriIntegrity, INTEGRITY, 'ssri got the right integrity') 79 | t.same(contentIntegrity, INTEGRITY, 'content got the right integrity') 80 | 81 | const cpath = contentPath(CACHE, ssriIntegrity) 82 | t.ok(fs.lstatSync(cpath).isFile(), 'content inserted as a single file') 83 | t.equal(fs.readFileSync(cpath, 'utf8'), CONTENT, 'contents are identical to inserted content') 84 | }) 85 | 86 | t.test("checks input digest doesn't match data", async t => { 87 | const CONTENT = 'foobarbaz' 88 | const integrity = ssri.fromData(CONTENT) 89 | let int1 = null 90 | let int2 = null 91 | const CACHE = t.testdir() 92 | 93 | await t.rejects( 94 | write.stream(CACHE, { integrity }).on('integrity', (int) => { 95 | int1 = int 96 | }) 97 | .end('bazbarfoo').promise(), 98 | { code: 'EINTEGRITY' }, 99 | 'returns integrity error' 100 | ) 101 | t.equal(int1, null, 'no digest emitted') 102 | await write.stream(CACHE, { integrity }).on('integrity', int => { 103 | int2 = int 104 | }) 105 | .end(CONTENT).promise() 106 | t.same(int2, integrity, 'returns a matching digest') 107 | }) 108 | 109 | t.test('errors if stream ends with no data', async t => { 110 | const CACHE = t.testdir() 111 | let integrity = null 112 | await t.rejects( 113 | write.stream(CACHE).end('').on('integrity', int => { 114 | integrity = int 115 | }).promise(), 116 | { code: 'ENODATA' }, 117 | 'get an error with a useful code' 118 | ) 119 | t.equal(integrity, null, 'no digest returned') 120 | }) 121 | 122 | t.test('errors if input size does not match expected', async t => { 123 | let int1 = null 124 | let int2 = null 125 | 126 | const CACHE = t.testdir() 127 | await t.rejects( 128 | write.stream(CACHE, { size: 5 }).on('integrity', int => { 129 | int1 = int 130 | }).end('abc').promise(), 131 | { code: 'EBADSIZE', expected: 5, found: 3 }, 132 | 'get an error when data smaller than expected' 133 | ) 134 | t.equal(int1, null, 'no digest returned') 135 | await t.rejects( 136 | write.stream(CACHE, { size: 5 }).on('integrity', int => { 137 | int2 = int 138 | }).end('abcdefghi').promise(), 139 | { code: 'EBADSIZE', expected: 5, found: 9 }, 140 | 'get an error when data bigger than expected' 141 | ) 142 | t.equal(int2, null, 'no digest returned') 143 | }) 144 | 145 | t.test('does not overwrite content if already on disk', async t => { 146 | const CONTENT = 'foobarbaz' 147 | const INTEGRITY = ssri.fromData(CONTENT) 148 | const CACHE = t.testdir( 149 | CacheContent({ 150 | [INTEGRITY]: 'nope', 151 | }) 152 | ) 153 | 154 | let int1 155 | let int2 156 | // With a digest -- early short-circuiting 157 | await write.stream(CACHE, { integrity: INTEGRITY }).on('integrity', int => { 158 | int1 = int 159 | }) 160 | .end(CONTENT).promise() 161 | t.same(int1, INTEGRITY, 'short-circuit returns a matching digest') 162 | const d1 = fs.readFileSync(contentPath(CACHE, INTEGRITY), 'utf8') 163 | t.equal(d1, 'nope', 'process short-circuited. Data not written.') 164 | await write.stream(CACHE).on('integrity', int => { 165 | int2 = int 166 | }) 167 | .end(CONTENT).promise() 168 | t.same(int2, INTEGRITY, 'full write returns a matching digest') 169 | const d2 = fs.readFileSync(contentPath(CACHE, INTEGRITY), 'utf8') 170 | t.equal(d2, 'nope', 'previously-written data intact - no dupe write') 171 | }) 172 | 173 | t.test('errors if input stream errors', async t => { 174 | let integrity = null 175 | const CACHE = t.testdir() 176 | const putter = write.stream(CACHE) 177 | .on('integrity', (int) => { 178 | integrity = int 179 | }) 180 | setTimeout(() => putter.inputStream.emit('error', new Error('bleh'))) 181 | await t.rejects(putter.promise(), { message: 'bleh' }) 182 | t.equal(integrity, null, 'no digest returned') 183 | t.throws(() => { 184 | fs.statSync(contentPath(CACHE, ssri.fromData('foobarbaz'))) 185 | }, { 186 | code: 'ENOENT', 187 | }, 'target file missing. No files created') 188 | }) 189 | 190 | t.test('exits normally if file already open', (t) => { 191 | const CONTENT = 'foobarbaz' 192 | const INTEGRITY = ssri.fromData(CONTENT) 193 | const CACHE = t.testdir( 194 | CacheContent({ 195 | [INTEGRITY]: CONTENT, 196 | }) 197 | ) 198 | let integrity 199 | // This case would only fail on Windows, when an entry is being read. 200 | // Generally, you'd get an EBUSY back. 201 | fs.open(contentPath(CACHE, INTEGRITY), 'r+', async function (err, fd) { 202 | if (err) { 203 | throw err 204 | } 205 | 206 | await write.stream(CACHE).on('integrity', int => { 207 | integrity = int 208 | }) 209 | .end(CONTENT) 210 | .promise() 211 | t.same(integrity, INTEGRITY, 'returns a matching digest') 212 | fs.closeSync(fd) 213 | fs.rmSync(contentPath(CACHE, INTEGRITY), { recursive: true, force: true }) 214 | t.end() 215 | }) 216 | }) 217 | 218 | t.test('cleans up tmp on successful completion', async t => { 219 | const CONTENT = 'foobarbaz' 220 | const CACHE = t.testdir() 221 | await write.stream(CACHE).end(CONTENT).promise() 222 | await new Promise((resolve, reject) => { 223 | const tmp = path.join(CACHE, 'tmp') 224 | fs.readdir(tmp, function (err, files) { 225 | if (!err || (err && err.code === 'ENOENT')) { 226 | files = files || [] 227 | t.same(files, [], 'nothing in the tmp dir!') 228 | resolve() 229 | } else { 230 | reject(err) 231 | } 232 | }) 233 | }) 234 | }) 235 | 236 | t.test('Handles moveFile error other than EEXIST', async t => { 237 | const write = t.mock('../../lib/content/write.js', { 238 | '@npmcli/fs': { 239 | moveFile: async () => { 240 | throw new Error('Unknown error') 241 | }, 242 | }, 243 | }) 244 | const CONTENT = 'foobarbaz' 245 | const CACHE = t.testdir() 246 | await t.rejects( 247 | write.stream(CACHE).end(CONTENT).promise(), 248 | { message: 'Unknown error' } 249 | ) 250 | }) 251 | 252 | t.test('cleans up tmp on streaming error', (t) => { 253 | const CONTENT = 'foobarbaz' 254 | const CACHE = t.testdir() 255 | return t.rejects( 256 | write.stream(CACHE, { size: 1 }) 257 | .end(CONTENT) 258 | .promise(), 259 | { code: 'EBADSIZE' }, 260 | 'got expected code' 261 | ) 262 | .then(() => new Promise((resolve, reject) => { 263 | const tmp = path.join(CACHE, 'tmp') 264 | fs.readdir(tmp, function (err, files) { 265 | if (!err || (err && err.code === 'ENOENT')) { 266 | files = files || [] 267 | t.same(files, [], 'nothing in the tmp dir!') 268 | resolve() 269 | } else { 270 | reject(err) 271 | } 272 | }) 273 | })) 274 | }) 275 | 276 | t.test('cleans up tmp on non streaming error', (t) => { 277 | // mock writefile and make it reject 278 | const CONTENT = 'foobarbaz' 279 | const CACHE = t.testdir({ 'content-v2': 'oh no a file' }) 280 | return t.rejects(write(CACHE, CONTENT)) 281 | .then(() => new Promise((resolve, reject) => { 282 | const tmp = path.join(CACHE, 'tmp') 283 | fs.readdir(tmp, function (err, files) { 284 | if (!err || (err && err.code === 'ENOENT')) { 285 | files = files || [] 286 | t.same(files, [], 'nothing in the tmp dir!') 287 | resolve() 288 | } else { 289 | reject(err) 290 | } 291 | }) 292 | })) 293 | }) 294 | 295 | t.test('checks the size of stream data if opts.size provided', (t) => { 296 | const CONTENT = 'foobarbaz' 297 | let int1 = null 298 | const int2 = null 299 | let int3 = null 300 | 301 | const CACHE = t.testdir() 302 | t.test('chair too small', t => { 303 | const w = write.stream(CACHE, { size: CONTENT.length }) 304 | w.write(CONTENT.slice(3)) 305 | w.on('integrity', int => { 306 | int1 = int 307 | }) 308 | setTimeout(() => w.end()) 309 | return t.rejects(w.promise(), { code: 'EBADSIZE' }, 'bad size error code') 310 | .then(() => t.equal(int1, null, 'no digest returned by first stream')) 311 | }) 312 | 313 | t.test('chair is too big', t => { 314 | const w = write.stream(CACHE, { size: CONTENT.length }) 315 | w.write(CONTENT) 316 | setTimeout(() => w.end('quux')) 317 | return t.rejects(w.promise(), { code: 'EBADSIZE' }, 'bad size error code') 318 | .then(() => t.equal(int2, null, 'no digest returned by second stream')) 319 | }) 320 | 321 | return t.test('chair is juuuuust right', t => { 322 | const w = write.stream(CACHE, { size: CONTENT.length }) 323 | w.write(CONTENT) 324 | w.on('integrity', int => { 325 | int3 = int 326 | }) 327 | setTimeout(() => w.end()) 328 | return w.promise().then(() => t.ok(int3, 'got a digest')) 329 | }) 330 | }) 331 | 332 | t.test('accepts multiple algorithms', async t => { 333 | const CACHE = t.testdir() 334 | const CONTENT = 'multiple algorithms!' 335 | const { integrity } = await write(CACHE, CONTENT, { algorithms: ['sha512', 'sha1'] }) 336 | const cpath512 = contentPath(CACHE, integrity.sha512.toString()) 337 | t.ok(fs.lstatSync(cpath512).isFile(), 'sha512 content written') 338 | const cpath1 = contentPath(CACHE, integrity.sha1.toString()) 339 | t.ok(fs.lstatSync(cpath1).isFile(), 'sha1 content written') 340 | t.equal(fs.readFileSync(cpath512, 'utf8'), 341 | CONTENT, 'sha512 contents are identical to inserted content') 342 | t.equal(fs.readFileSync(cpath1, 'utf8'), 343 | CONTENT, 'sha1 contents are identical to inserted content') 344 | }) 345 | 346 | t.test('writes to cache with default options', t => { 347 | const CACHE = t.testdir() 348 | return t.resolveMatch(write(CACHE, 'foo'), { 349 | size: 3, 350 | integrity: { 351 | sha512: [ 352 | { 353 | /* eslint-disable-next-line max-len */ 354 | source: 'sha512-9/u6bgY2+JDlb7vzKD5STG+jIErimDgtYkdB0NxmODJuKCxBvl5CVNiCB3LFUYosWowMf37aGVlKfrU5RT4e1w==', 355 | /* eslint-disable-next-line max-len */ 356 | digest: '9/u6bgY2+JDlb7vzKD5STG+jIErimDgtYkdB0NxmODJuKCxBvl5CVNiCB3LFUYosWowMf37aGVlKfrU5RT4e1w==', 357 | algorithm: 'sha512', 358 | options: [], 359 | }, 360 | ], 361 | }, 362 | }) 363 | }) 364 | -------------------------------------------------------------------------------- /test/entry-index.find.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const CacheIndex = require('./fixtures/cache-index') 4 | const path = require('path') 5 | const t = require('tap') 6 | 7 | const SIZE = 999 8 | const contentPath = require('../lib/content/path') 9 | const index = require('../lib/entry-index') 10 | 11 | t.test('index.find cache hit', async t => { 12 | const entry = { 13 | key: 'whatever', 14 | integrity: 'whatnot-deadbeef', 15 | time: 12345, 16 | metadata: 'omgsometa', 17 | size: 5, 18 | } 19 | const CACHE = t.testdir( 20 | CacheIndex({ 21 | whatever: entry, 22 | }) 23 | ) 24 | const info = await index.find(CACHE, entry.key) 25 | t.ok(info, 'cache hit') 26 | t.equal( 27 | info.path, 28 | contentPath(CACHE, entry.integrity), 29 | 'path added to info' 30 | ) 31 | delete info.path 32 | t.same(info, entry, 'rest of info matches entry on disk') 33 | }) 34 | 35 | t.test('index.find cache miss', async t => { 36 | const CACHE = t.testdir( 37 | CacheIndex({ 38 | foo: { key: 'foo' }, 39 | 'w/e': { key: 'w/e' }, 40 | }) 41 | ) 42 | await t.resolveMatch( 43 | index.find(CACHE, 'whatever'), 44 | null, 45 | 'cache miss when specific key not present' 46 | ) 47 | }) 48 | 49 | t.test('index.find no cache', async t => { 50 | await t.resolveMatch( 51 | index.find(path.resolve('adirectorythatdoesnotexit'), 'whatever'), 52 | null, 53 | 'if there is no cache dir, behaves like a cache miss' 54 | ) 55 | }) 56 | 57 | t.test('index.find key case-sensitivity', async t => { 58 | const CACHE = t.testdir( 59 | CacheIndex({ 60 | jsonstream: { 61 | key: 'jsonstream', 62 | integrity: 'sha1-lowercase', 63 | time: 54321, 64 | size: SIZE, 65 | }, 66 | JSONStream: { 67 | key: 'JSONStream', 68 | integrity: 'sha1-capitalised', 69 | time: 12345, 70 | size: SIZE, 71 | }, 72 | }) 73 | ) 74 | await t.resolveMatch( 75 | index.find(CACHE, 'JSONStream'), 76 | { key: 'JSONStream' }, 77 | 'fetched the correct entry' 78 | ) 79 | await t.resolveMatch( 80 | index.find(CACHE, 'jsonstream'), 81 | { key: 'jsonstream' }, 82 | 'fetched the correct entry' 83 | ) 84 | await t.resolveMatch( 85 | index.find(CACHE, 'jsonStream'), 86 | null, 87 | 'no entry for jsonStream' 88 | ) 89 | }) 90 | 91 | t.test('index.find path-breaking characters', async t => { 92 | const entry = { 93 | key: ';;!registry\nhttps://registry.npmjs.org/back \\ slash@Cool™?', 94 | integrity: 'sha1-deadbeef', 95 | time: 12345, 96 | metadata: 'omgsometa', 97 | size: 9, 98 | } 99 | const CACHE = t.testdir( 100 | CacheIndex({ 101 | [entry.key]: entry, 102 | }) 103 | ) 104 | const info = await index.find(CACHE, entry.key) 105 | t.ok(info, 'cache hit') 106 | delete info.path 107 | t.same( 108 | info, 109 | entry, 110 | 'info remains intact even with fs-unfriendly chars' 111 | ) 112 | }) 113 | 114 | t.test('index.find extremely long keys', async t => { 115 | let key = '' 116 | for (let i = 0; i < 10000; i++) { 117 | key += i 118 | } 119 | 120 | const entry = { 121 | key: key, 122 | integrity: 'sha1-deadbeef', 123 | time: 12345, 124 | metadata: 'woo', 125 | size: 10, 126 | } 127 | const CACHE = t.testdir( 128 | CacheIndex({ 129 | [entry.key]: entry, 130 | }) 131 | ) 132 | const info = await index.find(CACHE, entry.key) 133 | t.ok(info, 'cache hit') 134 | delete info.path 135 | t.same(info, entry, 'info remains intact even with absurdly long key') 136 | }) 137 | 138 | t.test('index.find multiple index entries for key', async t => { 139 | const key = 'whatever' 140 | const CACHE = t.testdir( 141 | CacheIndex({ 142 | whatever: [ 143 | { key: key, integrity: 'sha1-deadbeef', time: 54321 }, 144 | { key: key, integrity: 'sha1-bada55', time: 12345 }, 145 | ], 146 | }) 147 | ) 148 | const info = await index.find(CACHE, key) 149 | t.ok(info, 'cache hit') 150 | t.equal(info.integrity, 'sha1-bada55', 'most recent entry wins') 151 | }) 152 | 153 | t.test('index.find garbled data in index file', async t => { 154 | // Even though `index.insert()` is safe from direct 155 | // race conditions, it's still possible for individual 156 | // entries to become corrupted, or to be partially written, 157 | // since `index.find` does not acquire a write-preventing lock. 158 | // 159 | // Because entries are newline-prepended and only one 160 | // can be written at a time, the main possible corruption 161 | // source is if an append fails mid-write (for example, due 162 | // to the process crashing). In this case, the corrupt entry 163 | // will simply be skipped. 164 | const key = 'whatever' 165 | const stringified = JSON.stringify({ 166 | key: key, 167 | integrity: 'sha1-deadbeef', 168 | time: 54321, 169 | }) 170 | const CACHE = t.testdir( 171 | CacheIndex({ 172 | whatever: 173 | '\n' + 174 | `${index.hashEntry(stringified)}\t${stringified}` + 175 | '\n{"key": "' + 176 | key + 177 | '"\noway', 178 | }) 179 | ) 180 | const info = await index.find(CACHE, key) 181 | t.ok(info, 'cache hit in spite of crash-induced fail') 182 | t.equal(info.integrity, 'sha1-deadbeef', ' recent entry wins') 183 | }) 184 | 185 | t.test('index.find hash conflict in same bucket', async t => { 186 | // This... is very unlikely to happen. But hey. 187 | const entry = { 188 | key: 'whatever', 189 | integrity: 'sha1-deadbeef', 190 | time: 12345, 191 | metadata: 'yay', 192 | size: 8, 193 | } 194 | const CACHE = t.testdir( 195 | CacheIndex({ 196 | whatever: [ 197 | { key: 'ohnoes', integrity: 'sha1-welp!' }, 198 | entry, 199 | { key: 'nope', integrity: 'sha1-bada55' }, 200 | ], 201 | }) 202 | ) 203 | const info = await index.find(CACHE, entry.key) 204 | t.ok(info, 'cache hit') 205 | delete info.path 206 | t.same( 207 | info, 208 | entry, 209 | 'got the right one even though different keys exist in index' 210 | ) 211 | }) 212 | -------------------------------------------------------------------------------- /test/entry-index.insert.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const CacheIndex = require('./fixtures/cache-index') 4 | const contentPath = require('../lib/content/path') 5 | const fs = require('fs/promises') 6 | const t = require('tap') 7 | 8 | const index = require('../lib/entry-index') 9 | 10 | const key = 'foo' 11 | const integrity = 'sha512-deadbeef' 12 | const size = 999 13 | 14 | t.test('basic insertion', async t => { 15 | const cache = t.testdir({}) 16 | const bucket = index.bucketPath(cache, key) 17 | const insertEntry = await index.insert(cache, key, integrity, { size, metadata: 'foo' }) 18 | t.same( 19 | insertEntry, 20 | { 21 | key, 22 | integrity, 23 | path: contentPath(cache, integrity), 24 | time: insertEntry.time, 25 | metadata: 'foo', 26 | size, 27 | }, 28 | 'formatted entry returned' 29 | ) 30 | const data = await fs.readFile(bucket, 'utf8') 31 | t.equal(data[0], '\n', 'first entry starts with a \\n') 32 | const split = data.split('\t') 33 | t.equal( 34 | split[0].slice(1), 35 | index.hashEntry(split[1]), 36 | 'consistency header correct' 37 | ) 38 | const entry = JSON.parse(split[1]) 39 | t.ok(entry.time, 'entry has a timestamp') 40 | t.same( 41 | entry, 42 | { 43 | key, 44 | integrity, 45 | time: entry.time, 46 | metadata: 'foo', 47 | size, 48 | }, 49 | 'entry matches what was inserted' 50 | ) 51 | }) 52 | 53 | t.test('inserts additional entries into existing key', async t => { 54 | const cache = t.testdir({}) 55 | const bucket = index.bucketPath(cache, key) 56 | await index.insert(cache, key, integrity, { size, metadata: 1 }) 57 | await index.insert(cache, key, integrity, { size, metadata: 2 }) 58 | const data = await fs.readFile(bucket, 'utf8') 59 | const entries = data 60 | .split('\n') 61 | .slice(1) 62 | .map((line) => { 63 | return JSON.parse(line.split('\t')[1]) 64 | }) 65 | entries.forEach(function (e) { 66 | delete e.time 67 | }) 68 | t.same( 69 | entries, 70 | [ 71 | { 72 | key, 73 | integrity, 74 | metadata: 1, 75 | size, 76 | }, 77 | { 78 | key, 79 | integrity, 80 | metadata: 2, 81 | size, 82 | }, 83 | ], 84 | 'all entries present' 85 | ) 86 | }) 87 | 88 | t.test('separates entries even if one is corrupted', async t => { 89 | // TODO - check that middle-of-string corrupted writes won't hurt. 90 | const cache = t.testdir( 91 | CacheIndex({ 92 | foo: 93 | '\n' + 94 | JSON.stringify({ 95 | key, 96 | integrity: 'meh', 97 | time: 54321, 98 | size, 99 | }) + 100 | '\n{"key": "' + 101 | key + 102 | '"\noway', 103 | }) 104 | ) 105 | const bucket = index.bucketPath(cache, key) 106 | await index.insert(cache, key, integrity, { size }) 107 | const data = await fs.readFile(bucket, 'utf8') 108 | const entry = JSON.parse(data.split('\n')[4].split('\t')[1]) 109 | delete entry.time 110 | t.same( 111 | entry, 112 | { 113 | key, 114 | integrity, 115 | size, 116 | }, 117 | 'new entry unaffected by corruption' 118 | ) 119 | }) 120 | 121 | t.test('optional arbitrary metadata', async t => { 122 | const cache = t.testdir({}) 123 | const bucket = index.bucketPath(cache, key) 124 | const metadata = { foo: 'bar' } 125 | await index.insert(cache, key, integrity, { size, metadata: metadata }) 126 | const data = await fs.readFile(bucket, 'utf8') 127 | const entry = JSON.parse(data.split('\t')[1]) 128 | delete entry.time 129 | t.same( 130 | entry, 131 | { 132 | key, 133 | integrity, 134 | metadata: metadata, 135 | size, 136 | }, 137 | 'entry includes inserted metadata' 138 | ) 139 | }) 140 | 141 | t.test('key case-sensitivity', async t => { 142 | const cache = t.testdir({}) 143 | await Promise.all([ 144 | index.insert(cache, key, integrity, { size }), 145 | index.insert(cache, key.toUpperCase(), `${integrity}upper`, { size }), 146 | ]) 147 | const [entry, upperEntry] = await Promise.all([ 148 | index.find(cache, key), 149 | index.find(cache, key.toUpperCase()), 150 | ]) 151 | delete entry.time 152 | delete upperEntry.time 153 | t.same( 154 | { 155 | key: entry.key, 156 | integrity: entry.integrity, 157 | size, 158 | }, 159 | { 160 | key, 161 | integrity, 162 | size, 163 | }, 164 | 'regular entry exists' 165 | ) 166 | t.same( 167 | { 168 | key: upperEntry.key, 169 | integrity: upperEntry.integrity, 170 | size, 171 | }, 172 | { 173 | key: key.toUpperCase(), 174 | integrity: `${integrity}upper`, 175 | size, 176 | }, 177 | 'case-variant entry intact' 178 | ) 179 | }) 180 | 181 | t.test('path-breaking characters', async t => { 182 | const cache = t.testdir({}) 183 | const newKey = ';;!registry\nhttps://registry.npmjs.org/back \\ slash@Cool™?' 184 | await index.insert(cache, newKey, integrity, { size }) 185 | const bucket = index.bucketPath(cache, newKey) 186 | const data = await fs.readFile(bucket, 'utf8') 187 | const entry = JSON.parse(data.split('\t')[1]) 188 | delete entry.time 189 | t.same( 190 | entry, 191 | { 192 | key: newKey, 193 | integrity, 194 | size, 195 | }, 196 | 'entry exists and matches original key with invalid chars' 197 | ) 198 | }) 199 | 200 | t.test('extremely long keys', async t => { 201 | const cache = t.testdir({}) 202 | let newKey = '' 203 | for (let i = 0; i < 10000; i++) { 204 | newKey += i 205 | } 206 | 207 | await index.insert(cache, newKey, integrity, { size }) 208 | const bucket = index.bucketPath(cache, newKey) 209 | const data = await fs.readFile(bucket, 'utf8') 210 | const entry = JSON.parse(data.split('\t')[1]) 211 | delete entry.time 212 | t.same( 213 | entry, 214 | { 215 | key: newKey, 216 | integrity, 217 | size, 218 | }, 219 | 'entry exists in spite of INCREDIBLY LONG key' 220 | ) 221 | }) 222 | 223 | t.test('ENOENT from appendFile is ignored', async (t) => { 224 | const cache = t.testdir() 225 | 226 | const indexMocked = t.mock('../lib/entry-index.js', { 227 | 'fs/promises': { 228 | ...fs, 229 | appendFile: async () => { 230 | throw Object.assign(new Error('fake enoent'), { code: 'ENOENT' }) 231 | }, 232 | }, 233 | }) 234 | 235 | await t.resolves(() => indexMocked.insert(cache, key, integrity, { size })) 236 | }) 237 | 238 | t.test('generic error from appendFile rejects', async (t) => { 239 | const cache = t.testdir() 240 | 241 | const indexMocked = t.mock('../lib/entry-index.js', { 242 | 'fs/promises': { 243 | ...fs, 244 | appendFile: async () => { 245 | throw Object.assign(new Error('fake eperm'), { code: 'EPERM' }) 246 | }, 247 | }, 248 | }) 249 | 250 | await t.rejects(() => indexMocked.insert(cache, key, integrity, { size }), { code: 'EPERM' }) 251 | }) 252 | 253 | t.test('concurrent writes') 254 | -------------------------------------------------------------------------------- /test/entry-index.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const fs = require('fs') 4 | const path = require('path') 5 | 6 | const ssri = require('ssri') 7 | const t = require('tap') 8 | 9 | const index = require('../lib/entry-index') 10 | const CacheContent = require('./fixtures/cache-content') 11 | 12 | // defines reusable errors 13 | const genericError = new Error('ERR') 14 | genericError.code = 'ERR' 15 | const missingFileError = new Error('ENOENT') 16 | missingFileError.code = 'ENOENT' 17 | 18 | const getEntryIndex = (t, opts) => t.mock('../lib/entry-index', opts) 19 | const getEntryIndexReadFileFailure = (t, err) => getEntryIndex(t, { 20 | 'fs/promises': { 21 | ...fs.promises, 22 | readFile: async () => { 23 | throw err 24 | }, 25 | }, 26 | fs: { 27 | ...fs, 28 | readFileSync: () => { 29 | throw genericError 30 | }, 31 | }, 32 | }) 33 | 34 | // helpers 35 | const CONTENT = Buffer.from('foobarbaz', 'utf8') 36 | const INTEGRITY = ssri.fromData(CONTENT).toString() 37 | const KEY = 'my-test-key' 38 | const cacheContent = CacheContent({ 39 | [INTEGRITY]: CONTENT, 40 | }) 41 | 42 | t.test('compact', async (t) => { 43 | const cache = t.testdir(cacheContent) 44 | await Promise.all([ 45 | index.insert(cache, KEY, INTEGRITY, { metadata: { rev: 1 } }), 46 | index.insert(cache, KEY, INTEGRITY, { metadata: { rev: 2 } }), 47 | index.insert(cache, KEY, INTEGRITY, { metadata: { rev: 2 } }), 48 | index.insert(cache, KEY, INTEGRITY, { metadata: { rev: 1 } }), 49 | ]) 50 | 51 | const bucket = index.bucketPath(cache, KEY) 52 | const entries = await index.bucketEntries(bucket) 53 | t.equal(entries.length, 4, 'started with 4 entries') 54 | 55 | const filter = (entryA, entryB) => entryA.metadata.rev === entryB.metadata.rev 56 | const compacted = await index.compact(cache, KEY, filter) 57 | t.equal(compacted.length, 2, 'should return only two entries') 58 | 59 | const newEntries = await index.bucketEntries(bucket) 60 | t.equal(newEntries.length, 2, 'bucket was deduplicated') 61 | }) 62 | 63 | t.test('compact: treats null integrity without validateEntry as a delete', async (t) => { 64 | const cache = t.testdir(cacheContent) 65 | // this one does not use Promise.all because we want to be certain 66 | // things are written in the right order 67 | await index.insert(cache, KEY, INTEGRITY, { metadata: { rev: 1 } }) 68 | await index.insert(cache, KEY, INTEGRITY, { metadata: { rev: 2 } }) 69 | // this is a delete, revs 1, 2 and 3 will be omitted 70 | await index.insert(cache, KEY, null, { metadata: { rev: 3 } }) 71 | await index.insert(cache, KEY, INTEGRITY, { metadata: { rev: 4 } }) 72 | 73 | const bucket = index.bucketPath(cache, KEY) 74 | const entries = await index.bucketEntries(bucket) 75 | t.equal(entries.length, 4, 'started with 4 entries') 76 | 77 | const filter = (entryA, entryB) => entryA.metadata.rev === entryB.metadata.rev 78 | const compacted = await index.compact(cache, KEY, filter) 79 | t.equal(compacted.length, 1, 'should return only one entry') 80 | t.equal(compacted[0].metadata.rev, 4, 'kept rev 4') 81 | 82 | const newEntries = await index.bucketEntries(bucket) 83 | t.equal(newEntries.length, 1, 'bucket was deduplicated') 84 | }) 85 | 86 | t.test('compact: leverages validateEntry to skip invalid entries', async (t) => { 87 | const cache = t.testdir(cacheContent) 88 | await Promise.all([ 89 | index.insert(cache, KEY, INTEGRITY, { metadata: { rev: 1 } }), 90 | index.insert(cache, KEY, INTEGRITY, { metadata: { rev: 2 } }), 91 | index.insert(cache, KEY, INTEGRITY, { metadata: { rev: 2 } }), 92 | index.insert(cache, KEY, INTEGRITY, { metadata: { rev: 1 } }), 93 | ]) 94 | 95 | const bucket = index.bucketPath(cache, KEY) 96 | const entries = await index.bucketEntries(bucket) 97 | t.equal(entries.length, 4, 'started with 4 entries') 98 | 99 | const matchFn = (entryA, entryB) => 100 | entryA.metadata.rev === entryB.metadata.rev 101 | const validateEntry = (entry) => entry.metadata.rev > 1 102 | const compacted = await index.compact(cache, KEY, matchFn, { validateEntry }) 103 | t.equal(compacted.length, 1, 'should return only one entries') 104 | t.equal(compacted[0].metadata.rev, 2, 'kept the rev 2 entry') 105 | 106 | const newEntries = await index.bucketEntries(bucket) 107 | t.equal(newEntries.length, 1, 'bucket was deduplicated') 108 | }) 109 | 110 | t.test('compact: validateEntry allows for keeping null integrity', async (t) => { 111 | const cache = t.testdir(cacheContent) 112 | await Promise.all([ 113 | index.insert(cache, KEY, null, { metadata: { rev: 1 } }), 114 | index.insert(cache, KEY, null, { metadata: { rev: 2 } }), 115 | index.insert(cache, KEY, null, { metadata: { rev: 2 } }), 116 | index.insert(cache, KEY, null, { metadata: { rev: 1 } }), 117 | ]) 118 | 119 | const bucket = index.bucketPath(cache, KEY) 120 | const entries = await index.bucketEntries(bucket) 121 | t.equal(entries.length, 4, 'started with 4 entries') 122 | 123 | const matchFn = (entryA, entryB) => 124 | entryA.metadata.rev === entryB.metadata.rev 125 | const validateEntry = (entry) => entry.metadata.rev > 1 126 | const compacted = await index.compact(cache, KEY, matchFn, { validateEntry }) 127 | t.equal(compacted.length, 1, 'should return only one entry') 128 | t.equal(compacted[0].metadata.rev, 2, 'kept the rev 2 entry') 129 | 130 | const newEntries = await index.bucketEntries(bucket) 131 | t.equal(newEntries.length, 1, 'bucket was deduplicated') 132 | }) 133 | 134 | t.test('compact: error in moveFile removes temp', async (t) => { 135 | const cache = t.testdir(cacheContent) 136 | await Promise.all([ 137 | index.insert(cache, KEY, INTEGRITY, { metadata: { rev: 1 } }), 138 | index.insert(cache, KEY, INTEGRITY, { metadata: { rev: 2 } }), 139 | index.insert(cache, KEY, INTEGRITY, { metadata: { rev: 2 } }), 140 | index.insert(cache, KEY, INTEGRITY, { metadata: { rev: 1 } }), 141 | ]) 142 | 143 | const { compact } = getEntryIndex(t, { 144 | '@npmcli/fs': { moveFile: () => Promise.reject(new Error('foo')) }, 145 | }) 146 | const filter = (entryA, entryB) => entryA.metadata.rev === entryB.metadata.rev 147 | await t.rejects(compact(cache, KEY, filter), { message: 'foo' }, 'promise rejected') 148 | 149 | const tmpFiles = fs.readdirSync(path.join(cache, 'tmp')) 150 | t.equal(tmpFiles.length, 0, 'temp file is gone') 151 | }) 152 | 153 | t.test('delete: removeFully deletes the index entirely', async (t) => { 154 | const cache = t.testdir(cacheContent) 155 | const bucket = index.bucketPath(cache, KEY) 156 | await index.insert(cache, KEY, INTEGRITY) 157 | const entries = await index.bucketEntries(bucket) 158 | t.equal(entries.length, 1, 'has an entry') 159 | 160 | // do a normal delete first, this appends a null integrity 161 | await index.delete(cache, KEY) 162 | const delEntries = await index.bucketEntries(bucket) 163 | t.equal(delEntries.length, 2, 'should now have 2 entries') 164 | t.equal(delEntries[1].integrity, null, 'has a null integrity last') 165 | 166 | // then a full delete 167 | await index.delete(cache, KEY, { removeFully: true }) 168 | await t.rejects( 169 | index.bucketEntries(bucket), 170 | { code: 'ENOENT' }, 171 | 'rejects with ENOENT because file is gone' 172 | ) 173 | }) 174 | 175 | t.test('find: error on parsing json data', (t) => { 176 | const cache = t.testdir(cacheContent) 177 | // mocks readFile in order to return a borked json payload 178 | const { find } = getEntryIndex(t, { 179 | '@npmcli/fs': Object.assign({}, require('@npmcli/fs'), { 180 | readFile: async () => { 181 | return '\ncec8d2e4685534ed189b563c8ee1cb1cb7c72874\t{"""// foo' 182 | }, 183 | }), 184 | }) 185 | 186 | t.plan(1) 187 | t.resolveMatch( 188 | find(cache, KEY), 189 | null, 190 | 'should resolve with null' 191 | ) 192 | }) 193 | 194 | t.test('find: unknown error on finding entries', (t) => { 195 | const cache = t.testdir(cacheContent) 196 | const { find } = getEntryIndexReadFileFailure(t, genericError) 197 | 198 | t.plan(1) 199 | t.rejects( 200 | find(cache, KEY), 201 | genericError, 202 | 'should reject with the unknown error thrown' 203 | ) 204 | }) 205 | 206 | t.test('lsStream: unknown error reading files', async (t) => { 207 | const cache = t.testdir(cacheContent) 208 | await index.insert(cache, KEY, INTEGRITY) 209 | const { lsStream } = getEntryIndexReadFileFailure(t, genericError) 210 | 211 | return new Promise((resolve) => { 212 | lsStream(cache) 213 | .on('error', err => { 214 | t.equal(err, genericError, 'should emit an error') 215 | resolve() 216 | }) 217 | }) 218 | }) 219 | 220 | t.test('lsStream: missing files error', async (t) => { 221 | const cache = t.testdir(cacheContent) 222 | await index.insert(cache, KEY, INTEGRITY) 223 | 224 | const { lsStream } = getEntryIndexReadFileFailure(t, missingFileError) 225 | 226 | return new Promise((resolve, reject) => { 227 | lsStream(cache) 228 | .on('error', reject) 229 | .on('end', resolve) 230 | }) 231 | }) 232 | 233 | t.test('lsStream: unknown error reading dirs', (t) => { 234 | const cache = t.testdir(cacheContent) 235 | const { lsStream } = getEntryIndex(t, { 236 | 'fs/promises': { 237 | ...fs.promises, 238 | readdir: async () => { 239 | throw genericError 240 | }, 241 | }, 242 | }) 243 | 244 | lsStream(cache) 245 | .on('error', err => { 246 | t.equal(err, genericError, 'should emit an error') 247 | t.end() 248 | }) 249 | }) 250 | -------------------------------------------------------------------------------- /test/fixtures/cache-content.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const contentPath = require('../../lib/content/path') 4 | const path = require('path') 5 | 6 | module.exports = CacheContent 7 | 8 | function CacheContent (entries) { 9 | const tree = {} 10 | Object.keys(entries).forEach(function (k) { 11 | const cpath = contentPath('', k) 12 | const content = entries[k] 13 | const parts = cpath.split(path.sep) 14 | insertContent(tree, parts, content) 15 | }) 16 | return tree 17 | } 18 | 19 | function insertContent (tree, pathTo, content) { 20 | const key = pathTo[0] 21 | if (pathTo.length <= 1) { 22 | tree[key] = content 23 | } else { 24 | tree[key] = tree[key] || {} 25 | insertContent(tree[key], pathTo.slice(1), content) 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /test/fixtures/cache-index.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const index = require('../../lib/entry-index') 4 | const path = require('path') 5 | 6 | const bucketPath = index.bucketPath 7 | const hashEntry = index.hashEntry 8 | 9 | // Creates a simulated index using the chained lookup structure, from 10 | // an unhashed version of the index (basically `cacache.ls`). 11 | // 12 | // The returned object is for use with Tacks 13 | module.exports = CacheIndex 14 | 15 | function CacheIndex (entries) { 16 | const tree = {} 17 | Object.keys(entries).forEach(function (k) { 18 | const bpath = bucketPath('', k) 19 | const parts = bpath.split(path.sep) 20 | let lines = entries[k] 21 | let serialised 22 | if (typeof lines === 'string') { 23 | serialised = lines 24 | } else { 25 | if (typeof lines.length !== 'number') { 26 | lines = [lines] 27 | } 28 | 29 | serialised = 30 | '\n' + 31 | lines 32 | .map((line) => { 33 | const stringified = JSON.stringify(line) 34 | return `${hashEntry(stringified)}\t${stringified}` 35 | }) 36 | .join('\n') 37 | } 38 | insertContent(tree, parts, serialised) 39 | }) 40 | return tree 41 | } 42 | 43 | function insertContent (tree, pathTo, content) { 44 | const key = pathTo[0] 45 | if (pathTo.length <= 1) { 46 | if (tree[key]) { 47 | tree[key] = [tree[key], content].join('\n') 48 | } else { 49 | tree[key] = content 50 | } 51 | } else { 52 | tree[key] = tree[key] || {} 53 | insertContent(tree[key], pathTo.slice(1), content) 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /test/get.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const fs = require('fs/promises') 4 | const index = require('../lib/entry-index') 5 | const memo = require('../lib/memoization') 6 | const path = require('path') 7 | const t = require('tap') 8 | const ssri = require('ssri') 9 | 10 | const CacheContent = require('./fixtures/cache-content') 11 | 12 | const CONTENT = Buffer.from('foobarbaz', 'utf8') 13 | const SIZE = CONTENT.length 14 | const KEY = 'my-test-key' 15 | const INTEGRITY = ssri.fromData(CONTENT).toString() 16 | const METADATA = { foo: 'bar' } 17 | 18 | const { get } = require('..') 19 | 20 | function opts (extra) { 21 | return Object.assign( 22 | { 23 | size: SIZE, 24 | metadata: METADATA, 25 | }, 26 | extra 27 | ) 28 | } 29 | 30 | // Simple wrapper util cause this gets WORDY 31 | function streamGet (byDigest) { 32 | const args = [].slice.call(arguments, 1) 33 | let integrity 34 | let metadata 35 | let size 36 | const stream = (byDigest ? get.stream.byDigest : get.stream).apply(null, args) 37 | return stream 38 | .on('integrity', (int) => { 39 | integrity = ssri.stringify(int) 40 | }) 41 | .on('metadata', (m) => { 42 | metadata = m 43 | }) 44 | .on('size', (s) => { 45 | size = s 46 | }) 47 | .concat() 48 | .then((data) => ({ 49 | data, 50 | integrity, 51 | metadata, 52 | size, 53 | })) 54 | } 55 | 56 | t.test('get.info index entry lookup', async t => { 57 | const CACHE = t.testdir() 58 | const indexInsert = await index.insert(CACHE, KEY, INTEGRITY, opts()) 59 | const entry = await get.info(CACHE, KEY) 60 | t.same(entry, indexInsert, 'get.info() returned the right entry') 61 | }) 62 | 63 | t.test('get will throw ENOENT if not found', (t) => { 64 | const CACHE = t.testdir() 65 | return get(CACHE, KEY) 66 | .then(() => { 67 | throw new Error('lookup should fail') 68 | }) 69 | .catch((err) => { 70 | t.ok(err, 'got an error') 71 | t.equal(err.code, 'ENOENT', 'error code is ENOENT') 72 | return get.info(CACHE, KEY) 73 | }) 74 | .catch((err) => { 75 | t.ok(err, 'got an error') 76 | t.equal(err.code, 'ENOENT', 'error code is ENOENT') 77 | }) 78 | }) 79 | 80 | t.test('basic bulk get', async t => { 81 | const CACHE = t.testdir( 82 | CacheContent({ 83 | [INTEGRITY]: CONTENT, 84 | }) 85 | ) 86 | await index.insert(CACHE, KEY, INTEGRITY, opts()) 87 | await t.resolveMatch( 88 | get(CACHE, KEY), 89 | { 90 | metadata: METADATA, 91 | data: CONTENT, 92 | integrity: INTEGRITY, 93 | size: SIZE, 94 | }, 95 | 'bulk key get returned proper data' 96 | ) 97 | await t.resolveMatch( 98 | get.byDigest(CACHE, INTEGRITY), 99 | CONTENT, 100 | 'byDigest returned proper data' 101 | ) 102 | }) 103 | 104 | t.test('get.byDigest without memoization', async t => { 105 | const CACHE = t.testdir( 106 | CacheContent({ 107 | [INTEGRITY]: CONTENT, 108 | }) 109 | ) 110 | await index.insert(CACHE, KEY, INTEGRITY, opts()) 111 | const res = await get(CACHE, KEY) 112 | t.same( 113 | res, 114 | { 115 | metadata: METADATA, 116 | data: CONTENT, 117 | integrity: INTEGRITY, 118 | size: SIZE, 119 | }, 120 | 'bulk key get returned proper data') 121 | 122 | memo.clearMemoized() 123 | t.same(memo.get.byDigest(CACHE, INTEGRITY), undefined) 124 | const resByDig = await get.byDigest(CACHE, INTEGRITY) 125 | t.same(resByDig, CONTENT, 'byDigest returned proper data') 126 | t.same(memo.get.byDigest(CACHE, INTEGRITY), undefined) 127 | 128 | const resByDigMemo = await get.byDigest(CACHE, INTEGRITY) 129 | t.same(resByDigMemo, CONTENT, 'byDigest returned proper data') 130 | }) 131 | 132 | t.test('get.byDigest with memoization', async t => { 133 | const CACHE = t.testdir( 134 | CacheContent({ 135 | [INTEGRITY]: CONTENT, 136 | }) 137 | ) 138 | await index.insert(CACHE, KEY, INTEGRITY, opts()) 139 | const res = await get(CACHE, KEY) 140 | t.same( 141 | res, 142 | { 143 | metadata: METADATA, 144 | data: CONTENT, 145 | integrity: INTEGRITY, 146 | size: SIZE, 147 | }, 148 | 'bulk key get returned proper data') 149 | 150 | memo.clearMemoized() 151 | t.same(memo.get.byDigest(CACHE, INTEGRITY), undefined) 152 | const resByDig = await get.byDigest(CACHE, INTEGRITY, { memoize: true }) 153 | t.same(resByDig, CONTENT, 'byDigest returned proper data') 154 | t.notSame(memo.get.byDigest(CACHE, INTEGRITY), undefined) 155 | 156 | const resByDigMemo = await get.byDigest(CACHE, INTEGRITY, { memoize: true }) 157 | t.same(resByDigMemo, CONTENT, 'byDigest returned proper data') 158 | }) 159 | 160 | t.test('get without memoization', async t => { 161 | const CACHE = t.testdir( 162 | CacheContent({ 163 | [INTEGRITY]: CONTENT, 164 | }) 165 | ) 166 | await index.insert(CACHE, KEY, INTEGRITY, opts()) 167 | const res = await get(CACHE, KEY) 168 | t.same( 169 | res, 170 | { 171 | metadata: METADATA, 172 | data: CONTENT, 173 | integrity: INTEGRITY, 174 | size: SIZE, 175 | }, 176 | 'bulk key get returned proper data') 177 | 178 | memo.clearMemoized() 179 | t.same(memo.get(CACHE, KEY), undefined) 180 | const resByDig = await get(CACHE, KEY) 181 | t.same(resByDig, { 182 | metadata: METADATA, 183 | data: CONTENT, 184 | integrity: INTEGRITY, 185 | size: SIZE, 186 | }, 'get returned proper data') 187 | t.same(memo.get(CACHE, KEY), undefined) 188 | 189 | const resByDigMemo = await get(CACHE, KEY) 190 | t.same(resByDigMemo, { 191 | metadata: METADATA, 192 | data: CONTENT, 193 | integrity: INTEGRITY, 194 | size: SIZE, 195 | }, 'get returned proper data') 196 | }) 197 | 198 | t.test('get with memoization', async t => { 199 | const CACHE = t.testdir( 200 | CacheContent({ 201 | [INTEGRITY]: CONTENT, 202 | }) 203 | ) 204 | await index.insert(CACHE, KEY, INTEGRITY, opts()) 205 | const res = await get(CACHE, KEY) 206 | t.same( 207 | res, 208 | { 209 | metadata: METADATA, 210 | data: CONTENT, 211 | integrity: INTEGRITY, 212 | size: SIZE, 213 | }, 214 | 'bulk key get returned proper data') 215 | 216 | memo.clearMemoized() 217 | t.same(memo.get(CACHE, KEY), undefined) 218 | const resByDig = await get(CACHE, KEY, { memoize: true }) 219 | t.same(resByDig, { 220 | metadata: METADATA, 221 | data: CONTENT, 222 | integrity: INTEGRITY, 223 | size: SIZE, 224 | }, 'get returned proper data') 225 | t.notSame(memo.get(CACHE, KEY), undefined) 226 | 227 | const resByDigMemo = await get(CACHE, KEY, { memoize: true }) 228 | t.same(resByDigMemo, { 229 | metadata: METADATA, 230 | data: CONTENT, 231 | integrity: INTEGRITY, 232 | size: SIZE, 233 | }, 'get returned proper data') 234 | }) 235 | 236 | t.test('basic stream get', async t => { 237 | const CACHE = t.testdir( 238 | CacheContent({ 239 | [INTEGRITY]: CONTENT, 240 | }) 241 | ) 242 | await index.insert(CACHE, KEY, INTEGRITY, opts()) 243 | const [byKey, byDigest] = await Promise.all([ 244 | streamGet(false, CACHE, KEY), 245 | streamGet(true, CACHE, INTEGRITY), 246 | ]) 247 | t.same( 248 | byKey, 249 | { 250 | data: CONTENT, 251 | integrity: INTEGRITY, 252 | metadata: METADATA, 253 | size: SIZE, 254 | }, 255 | 'got all expected data and fields from key fetch' 256 | ) 257 | t.same(byDigest.data, CONTENT, 'got correct data from digest fetch') 258 | }) 259 | 260 | t.test('get.stream add new listeners post stream creation', async (t) => { 261 | const CACHE = t.testdir( 262 | CacheContent({ 263 | [INTEGRITY]: CONTENT, 264 | }) 265 | ) 266 | 267 | return index.insert(CACHE, KEY, INTEGRITY, opts()).then(() => { 268 | const OPTS = { memoize: false, size: CONTENT.length } 269 | const stream = get.stream(CACHE, KEY, OPTS) 270 | return Promise.all([ 271 | new Promise((resolve) => stream.on('integrity', resolve)), 272 | new Promise((resolve) => stream.on('metadata', resolve)), 273 | new Promise((resolve) => stream.on('size', resolve)), 274 | ]).then(() => { 275 | [ 276 | 'integrity', 277 | 'metadata', 278 | 'size', 279 | ].forEach(ev => { 280 | stream.on(ev, () => { 281 | t.ok(`${ev} listener added`) 282 | }) 283 | }) 284 | return stream.concat() 285 | }) 286 | }) 287 | }) 288 | 289 | t.test('get.copy will throw ENOENT if not found', (t) => { 290 | const CACHE = t.testdir() 291 | const DEST = path.join(CACHE, 'not-found') 292 | return get.copy(CACHE, 'NOT-FOUND', DEST) 293 | .then(() => { 294 | throw new Error('lookup should fail') 295 | }) 296 | .catch((err) => { 297 | t.ok(err, 'got an error') 298 | t.equal(err.code, 'ENOENT', 'error code is ENOENT') 299 | }) 300 | }) 301 | 302 | t.test('get.copy with fs.copyfile', (t) => { 303 | const CACHE = t.testdir( 304 | CacheContent({ 305 | [INTEGRITY]: CONTENT, 306 | }) 307 | ) 308 | const DEST = path.join(CACHE, 'copymehere') 309 | return index 310 | .insert(CACHE, KEY, INTEGRITY, opts()) 311 | .then(() => get.copy(CACHE, KEY, DEST)) 312 | .then((res) => { 313 | t.same( 314 | res, 315 | { 316 | metadata: METADATA, 317 | integrity: INTEGRITY, 318 | size: SIZE, 319 | }, 320 | 'copy operation returns basic metadata' 321 | ) 322 | return fs.readFile(DEST) 323 | }) 324 | .then((data) => { 325 | t.same(data, CONTENT, 'data copied by key matches') 326 | return fs.rm(DEST, { recursive: true, force: true }) 327 | }) 328 | .then(() => get.copy.byDigest(CACHE, INTEGRITY, DEST)) 329 | .then(() => fs.readFile(DEST)) 330 | .then((data) => { 331 | t.same(data, CONTENT, 'data copied by digest matches') 332 | return fs.rm(DEST, { recursive: true, force: true }) 333 | }) 334 | }) 335 | 336 | t.test('memoizes data on bulk read', (t) => { 337 | memo.clearMemoized() 338 | const CACHE = t.testdir( 339 | CacheContent({ 340 | [INTEGRITY]: CONTENT, 341 | }) 342 | ) 343 | return index.insert(CACHE, KEY, INTEGRITY, opts()).then((ENTRY) => { 344 | return get(CACHE, KEY) 345 | .then(() => { 346 | t.same(memo.get(CACHE, KEY), null, 'no memoization!') 347 | return get(CACHE, KEY, { memoize: true }) 348 | }) 349 | .then((res) => { 350 | t.same( 351 | res, 352 | { 353 | metadata: METADATA, 354 | data: CONTENT, 355 | integrity: INTEGRITY, 356 | size: SIZE, 357 | }, 358 | 'usual data returned' 359 | ) 360 | t.same( 361 | memo.get(CACHE, KEY), 362 | { 363 | entry: ENTRY, 364 | data: CONTENT, 365 | }, 366 | 'data inserted into memoization cache' 367 | ) 368 | return fs.rm(CACHE, { recursive: true, force: true }) 369 | }) 370 | .then(() => { 371 | return get(CACHE, KEY) 372 | }) 373 | .then((res) => { 374 | t.same( 375 | res, 376 | { 377 | metadata: METADATA, 378 | data: CONTENT, 379 | integrity: INTEGRITY, 380 | size: SIZE, 381 | }, 382 | 'memoized data fetched by default' 383 | ) 384 | return get(CACHE, KEY, { memoize: false }) 385 | .then(() => { 386 | throw new Error('expected get to fail') 387 | }) 388 | .catch((err) => { 389 | t.ok(err, 'got an error from unmemoized get') 390 | t.equal(err.code, 'ENOENT', 'cached content not found') 391 | t.same( 392 | memo.get(CACHE, KEY), 393 | { 394 | entry: ENTRY, 395 | data: CONTENT, 396 | }, 397 | 'data still in memoization cache' 398 | ) 399 | }) 400 | }) 401 | }) 402 | }) 403 | 404 | t.test('memoizes data on stream read', async t => { 405 | memo.clearMemoized() 406 | const CACHE = t.testdir( 407 | CacheContent({ 408 | [INTEGRITY]: CONTENT, 409 | }) 410 | ) 411 | const ENTRY = await index.insert(CACHE, KEY, INTEGRITY, opts()) 412 | await Promise.all([ 413 | streamGet(false, CACHE, KEY), 414 | streamGet(true, CACHE, INTEGRITY), 415 | ]) 416 | t.same(memo.get(CACHE, KEY), null, 'no memoization by key!') 417 | t.same( 418 | memo.get.byDigest(CACHE, INTEGRITY), 419 | null, 420 | 'no memoization by digest!' 421 | ) 422 | memo.clearMemoized() 423 | const byDigest = await streamGet(true, CACHE, INTEGRITY, { 424 | memoize: true, 425 | }) 426 | t.same(byDigest.data, CONTENT, 'usual data returned from stream') 427 | t.same(memo.get(CACHE, KEY), null, 'digest fetch = no key entry') 428 | t.same( 429 | memo.get.byDigest(CACHE, INTEGRITY), 430 | CONTENT, 431 | 'content memoized' 432 | ) 433 | t.same( 434 | memo.get.byDigest('whatev', INTEGRITY), 435 | null, 436 | 'content memoization filtered by cache' 437 | ) 438 | memo.clearMemoized() 439 | await t.resolveMatch( 440 | streamGet(false, CACHE, KEY, { memoize: true }), 441 | { 442 | metadata: METADATA, 443 | data: CONTENT, 444 | integrity: INTEGRITY, 445 | size: SIZE, 446 | }, 447 | 'usual data returned from key fetch' 448 | ) 449 | t.same( 450 | memo.get(CACHE, KEY), 451 | { 452 | entry: ENTRY, 453 | data: CONTENT, 454 | }, 455 | 'data inserted into memoization cache' 456 | ) 457 | t.same( 458 | memo.get.byDigest(CACHE, INTEGRITY), 459 | CONTENT, 460 | 'content memoized by digest, too' 461 | ) 462 | t.same( 463 | memo.get('whatev', KEY), 464 | null, 465 | 'entry memoization filtered by cache' 466 | ) 467 | await fs.rm(CACHE, { recursive: true, force: true }) 468 | await t.resolveMatch( 469 | streamGet(false, CACHE, KEY), 470 | { 471 | metadata: METADATA, 472 | data: CONTENT, 473 | integrity: INTEGRITY, 474 | size: SIZE, 475 | }, 476 | 'key fetch fulfilled by memoization cache' 477 | ) 478 | await t.resolveMatch( 479 | streamGet(true, CACHE, INTEGRITY), 480 | { data: CONTENT }, 481 | 'digest fetch fulfilled by memoization cache' 482 | ) 483 | await t.rejects( 484 | streamGet(false, CACHE, KEY, { memoize: false }), 485 | { code: 'ENOENT' }, 486 | 'key get memoization bypassed' 487 | ) 488 | await t.rejects( 489 | streamGet(true, CACHE, INTEGRITY, { memoize: false }), 490 | { code: 'ENOENT' }, 491 | 'digest get memoization bypassed' 492 | ) 493 | }) 494 | 495 | t.test('get.info uses memoized data', async t => { 496 | memo.clearMemoized() 497 | const CACHE = t.testdir() 498 | const ENTRY = { 499 | key: KEY, 500 | integrity: INTEGRITY, 501 | time: +new Date(), 502 | size: SIZE, 503 | metadata: null, 504 | } 505 | memo.put(CACHE, ENTRY, CONTENT) 506 | const info = await get.info(CACHE, KEY) 507 | t.same(info, ENTRY, 'got the entry from memoization cache') 508 | }) 509 | 510 | t.test('identical hashes with different algorithms do not conflict') 511 | t.test('throw error if something is really wrong with bucket') 512 | -------------------------------------------------------------------------------- /test/ls.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const CacheIndex = require('./fixtures/cache-index') 4 | const contentPath = require('../lib/content/path') 5 | const index = require('../lib/entry-index.js') 6 | const t = require('tap') 7 | 8 | const { ls } = require('..') 9 | 10 | t.test('basic listing', async t => { 11 | const contents = { 12 | whatever: { 13 | key: 'whatever', 14 | integrity: 'sha512-deadbeef', 15 | time: 12345, 16 | metadata: 'omgsometa', 17 | size: 234234, 18 | }, 19 | whatnot: { 20 | key: 'whatnot', 21 | integrity: 'sha512-bada55', 22 | time: 54321, 23 | metadata: null, 24 | size: 425345345, 25 | }, 26 | } 27 | const CACHE = t.testdir(CacheIndex(contents)) 28 | contents.whatever.path = contentPath(CACHE, contents.whatever.integrity) 29 | contents.whatnot.path = contentPath(CACHE, contents.whatnot.integrity) 30 | const listing = await ls(CACHE) 31 | t.same(listing, contents, 'index contents correct') 32 | const newListing = {} 33 | const stream = ls.stream(CACHE) 34 | stream.on('data', (entry) => { 35 | newListing[entry.key] = entry 36 | }) 37 | await stream.promise() 38 | t.same(newListing, contents, 'ls is streamable') 39 | }) 40 | 41 | t.test('separate keys in conflicting buckets', async t => { 42 | const contents = { 43 | whatever: { 44 | key: 'whatever', 45 | integrity: 'sha512-deadbeef', 46 | time: 12345, 47 | metadata: 'omgsometa', 48 | size: 5, 49 | }, 50 | whatev: { 51 | key: 'whatev', 52 | integrity: 'sha512-bada55', 53 | time: 54321, 54 | metadata: null, 55 | size: 99234234, 56 | }, 57 | } 58 | const CACHE = t.testdir( 59 | CacheIndex({ 60 | // put both in the same bucket 61 | whatever: [contents.whatever, contents.whatev], 62 | }) 63 | ) 64 | contents.whatever.path = contentPath(CACHE, contents.whatever.integrity) 65 | contents.whatev.path = contentPath(CACHE, contents.whatev.integrity) 66 | const listing = await ls(CACHE) 67 | t.same(listing, contents, 'index contents correct') 68 | }) 69 | 70 | t.test('works fine on an empty/missing cache', async t => { 71 | const CACHE = t.testdir() 72 | const listing = await ls(CACHE) 73 | t.same(listing, {}, 'returned an empty listing') 74 | }) 75 | 76 | t.test('ignores non-dir files', async t => { 77 | const index = CacheIndex({ 78 | whatever: { 79 | key: 'whatever', 80 | integrity: 'sha512-deadbeef', 81 | time: 12345, 82 | metadata: 'omgsometa', 83 | size: 234234, 84 | }, 85 | }) 86 | index.garbage = 'hello world' 87 | const CACHE = t.testdir(index) 88 | const listing = await ls(CACHE) 89 | t.equal(Object.keys(listing).length, 1, 'only 1 item in listing') 90 | t.equal(listing.whatever.key, 'whatever', 'only the correct entry listed') 91 | }) 92 | 93 | t.test('correctly ignores deleted entries', (t) => { 94 | const contents = { 95 | whatever: { 96 | key: 'whatever', 97 | integrity: 'sha512-deadbeef', 98 | time: 12345, 99 | metadata: 'omgsometa', 100 | size: 234234, 101 | }, 102 | whatnot: { 103 | key: 'whatnot', 104 | integrity: 'sha512-bada55', 105 | time: 54321, 106 | metadata: null, 107 | size: 425345345, 108 | }, 109 | whatwhere: { 110 | key: 'whatwhere', 111 | integrity: 'sha512-bada55e5', 112 | time: 54321, 113 | metadata: null, 114 | size: 425345345, 115 | }, 116 | } 117 | const CACHE = t.testdir(CacheIndex(contents)) 118 | contents.whatever.path = contentPath(CACHE, contents.whatever.integrity) 119 | contents.whatnot.path = contentPath(CACHE, contents.whatnot.integrity) 120 | contents.whatwhere.path = contentPath(CACHE, contents.whatwhere.integrity) 121 | return index 122 | .delete(CACHE, 'whatnot') 123 | .then(() => ls(CACHE)) 124 | .then((listing) => 125 | t.same( 126 | listing, 127 | { 128 | whatever: contents.whatever, 129 | whatwhere: contents.whatwhere, 130 | }, 131 | 'index contents correct' 132 | ) 133 | ) 134 | .then(() => { 135 | const listing = {} 136 | const stream = ls.stream(CACHE) 137 | stream.on('data', (entry) => { 138 | listing[entry.key] = entry 139 | }) 140 | return stream.promise().then(() => 141 | t.same( 142 | listing, 143 | { 144 | whatever: contents.whatever, 145 | whatwhere: contents.whatwhere, 146 | }, 147 | 'ls is streamable' 148 | ) 149 | ) 150 | }) 151 | }) 152 | -------------------------------------------------------------------------------- /test/memoization.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { test } = require('tap') 4 | 5 | const memo = require('../lib/memoization') 6 | 7 | const CACHE = 'mycache' 8 | const ENTRY = { 9 | key: 'foo', 10 | integrity: 'sha512-deadbeef', 11 | time: new Date(), 12 | metadata: null, 13 | } 14 | const DATA = 'foobarbaz' 15 | 16 | test('memoizes entry and data by key', (t) => { 17 | memo.put(CACHE, ENTRY, DATA) 18 | t.same( 19 | memo.clearMemoized(), 20 | { 21 | [`key:${CACHE}:${ENTRY.key}`]: { 22 | entry: ENTRY, 23 | data: DATA, 24 | }, 25 | [`digest:${CACHE}:${ENTRY.integrity}`]: DATA, 26 | }, 27 | 'cache has both key and digest entries' 28 | ) 29 | t.end() 30 | }) 31 | 32 | test('can fetch data by key', (t) => { 33 | memo.put(CACHE, ENTRY, DATA) 34 | t.same( 35 | memo.get(CACHE, ENTRY.key), 36 | { 37 | entry: ENTRY, 38 | data: DATA, 39 | }, 40 | 'fetched data correctly' 41 | ) 42 | t.same( 43 | memo.get(CACHE + 'meh', ENTRY.key), 44 | null, 45 | 'different caches store different keyspaces' 46 | ) 47 | memo.clearMemoized() 48 | t.end() 49 | }) 50 | 51 | test('can fetch data by digest', (t) => { 52 | memo.put(CACHE, ENTRY, DATA) 53 | t.same( 54 | memo.get.byDigest(CACHE, ENTRY.integrity), 55 | DATA, 56 | 'got raw data by digest, without an entry' 57 | ) 58 | memo.clearMemoized() 59 | t.end() 60 | }) 61 | 62 | test('can clear out the memoization cache', (t) => { 63 | memo.put(CACHE, ENTRY, DATA) 64 | memo.clearMemoized() 65 | t.same(memo.get(CACHE, ENTRY.key), null, 'entry not there anymore') 66 | t.same( 67 | memo.get.byDigest(ENTRY.integrity), 68 | null, 69 | 'digest-based data not there anymore' 70 | ) 71 | t.end() 72 | }) 73 | 74 | test('accepts optional injected cache', (t) => { 75 | memo.clearMemoized() 76 | const MEMO = new Map() 77 | memo.put(CACHE, ENTRY, DATA, { memoize: MEMO }) 78 | t.same( 79 | memo.get(CACHE, ENTRY.key), 80 | null, 81 | 'entry not in global memo cache' 82 | ) 83 | t.same( 84 | memo.get(CACHE, ENTRY.key, { memoize: MEMO }), 85 | { entry: ENTRY, data: DATA }, 86 | 'entry fetched from injected memoizer' 87 | ) 88 | t.same( 89 | memo.get.byDigest(CACHE, ENTRY.integrity, { memoize: MEMO }), 90 | DATA, 91 | 'content entry fetched from injected memoizer' 92 | ) 93 | t.same( 94 | MEMO.get(`key:${CACHE}:${ENTRY.key}`), 95 | { entry: ENTRY, data: DATA }, 96 | 'entry is in the injected memoizer' 97 | ) 98 | t.same( 99 | MEMO.get(`digest:${CACHE}:${ENTRY.integrity}`), 100 | DATA, 101 | 'content entry is in the injected memoizer' 102 | ) 103 | MEMO.clear() 104 | t.same( 105 | memo.get(CACHE, ENTRY.key, { memoize: MEMO }), 106 | null, 107 | 'tried to read from cleared memoizer' 108 | ) 109 | t.same( 110 | memo.get.byDigest(CACHE, ENTRY.integrity, { memoize: MEMO }), 111 | null, 112 | 'tried to read by digest from cleared memoizer' 113 | ) 114 | memo.put.byDigest(CACHE, ENTRY.integrity, DATA, { memoize: MEMO }) 115 | t.same( 116 | MEMO.get(`digest:${CACHE}:${ENTRY.integrity}`), 117 | DATA, 118 | 'content entry is in the injected memoizer' 119 | ) 120 | const obj = {} 121 | memo.put(CACHE, ENTRY, DATA, { memoize: obj }) 122 | t.same( 123 | memo.get(CACHE, ENTRY.key, { memoize: obj }), 124 | { entry: ENTRY, data: DATA }, 125 | 'entry fetched from injected object memoizer' 126 | ) 127 | t.same( 128 | memo.get.byDigest(CACHE, ENTRY.integrity, { memoize: MEMO }), 129 | DATA, 130 | 'content entry fetched from injected object memoizer' 131 | ) 132 | memo.clearMemoized() 133 | memo.put(CACHE, ENTRY, DATA, { memoize: 'foo' }) 134 | t.same( 135 | memo.get(CACHE, ENTRY.key, { memoize: 'foo' }), 136 | { entry: ENTRY, data: DATA }, 137 | 'entry fetched from global memoization obj on non-obj option' 138 | ) 139 | t.same( 140 | memo.get(CACHE, ENTRY.key, { memoize: 'foo' }), 141 | { entry: ENTRY, data: DATA }, 142 | 'entry fetched from global memoization obj on non-obj option' 143 | ) 144 | t.same( 145 | memo.get.byDigest(CACHE, ENTRY.integrity, { memoize: 'foo' }), 146 | DATA, 147 | 'content entry fetched global memoizer obj on non-obj option' 148 | ) 149 | t.same( 150 | memo.get.byDigest(CACHE, ENTRY.integrity, { memoize: 'foo' }), 151 | DATA, 152 | 'content entry fetched global memoizer obj on non-obj option' 153 | ) 154 | t.same( 155 | memo.get.byDigest(CACHE, ENTRY.integrity, { memoize: false }), 156 | DATA, 157 | 'content entry fetched global memoizer obj on non-obj option' 158 | ) 159 | t.same( 160 | memo.get.byDigest(CACHE, ENTRY.integrity, { memoize: false }), 161 | DATA, 162 | 'content entry fetched global memoizer obj on non-obj option' 163 | ) 164 | t.end() 165 | }) 166 | -------------------------------------------------------------------------------- /test/put.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const fs = require('fs/promises') 4 | const path = require('path') 5 | const index = require('../lib/entry-index') 6 | const memo = require('../lib/memoization') 7 | const t = require('tap') 8 | const ssri = require('ssri') 9 | 10 | const CONTENT = Buffer.from('foobarbaz', 'utf8') 11 | const KEY = 'my-test-key' 12 | const INTEGRITY = ssri.fromData(CONTENT).toString() 13 | const METADATA = { foo: 'bar' } 14 | const contentPath = require('../lib/content/path') 15 | 16 | const put = require('..').put 17 | 18 | t.test('basic bulk insertion', async t => { 19 | const CACHE = t.testdir() 20 | const integrity = await put(CACHE, KEY, CONTENT) 21 | t.equal(integrity.toString(), INTEGRITY, 'returned content integrity') 22 | const dataPath = contentPath(CACHE, integrity) 23 | const data = await fs.readFile(dataPath) 24 | t.same(data, CONTENT, 'content was correctly inserted') 25 | }) 26 | 27 | t.test('basic stream insertion', async t => { 28 | const CACHE = t.testdir() 29 | let int 30 | const stream = put.stream(CACHE, KEY).on('integrity', (i) => { 31 | int = i 32 | }) 33 | await stream.end(CONTENT).promise() 34 | t.equal(int.toString(), INTEGRITY, 'returned integrity matches expected') 35 | const data = await fs.readFile(contentPath(CACHE, int)) 36 | t.same(data, CONTENT, 'contents are identical to inserted content') 37 | }) 38 | 39 | t.test('adds correct entry to index before finishing', async t => { 40 | const CACHE = t.testdir() 41 | await put(CACHE, KEY, CONTENT, { metadata: METADATA }) 42 | const entry = await index.find(CACHE, KEY) 43 | t.ok(entry, 'got an entry') 44 | t.equal(entry.key, KEY, 'entry has the right key') 45 | t.equal(entry.integrity, INTEGRITY, 'entry has the right key') 46 | t.same(entry.metadata, METADATA, 'metadata also inserted') 47 | }) 48 | 49 | t.test('optionally memoizes data on bulk insertion', async t => { 50 | const CACHE = t.testdir() 51 | const integrity = await put(CACHE, KEY, CONTENT, { 52 | metadata: METADATA, 53 | memoize: true, 54 | }) 55 | t.equal(integrity.toString(), INTEGRITY, 'integrity returned as usual') 56 | const entry = await index.find(CACHE, KEY) // index.find is not memoized 57 | t.same( 58 | memo.get(CACHE, KEY), 59 | { 60 | data: CONTENT, 61 | entry: entry, 62 | }, 63 | 'content inserted into memoization cache by key' 64 | ) 65 | t.same( 66 | memo.get.byDigest(CACHE, INTEGRITY), 67 | CONTENT, 68 | 'content inserted into memoization cache by integrity' 69 | ) 70 | }) 71 | 72 | t.test('optionally memoizes data on stream insertion', async t => { 73 | const CACHE = t.testdir() 74 | let int 75 | const stream = put 76 | .stream(CACHE, KEY, { 77 | metadata: METADATA, 78 | memoize: true, 79 | }) 80 | .on('integrity', (i) => { 81 | int = i 82 | }) 83 | await stream.end(CONTENT).promise() 84 | t.equal(int.toString(), INTEGRITY, 'integrity emitted as usual') 85 | const data = await fs.readFile(contentPath(CACHE, int)) 86 | t.same(data, CONTENT, 'contents are identical to inserted content') 87 | const entry = await index.find(CACHE, KEY) // index.find is not memoized 88 | t.same( 89 | memo.get(CACHE, KEY), 90 | { 91 | data: CONTENT, 92 | entry: entry, 93 | }, 94 | 'content inserted into memoization cache by key' 95 | ) 96 | t.same( 97 | memo.get.byDigest(CACHE, INTEGRITY), 98 | CONTENT, 99 | 'content inserted into memoization cache by integrity' 100 | ) 101 | }) 102 | 103 | t.test('errors if integrity errors', async t => { 104 | const CACHE = t.testdir() 105 | await t.rejects( 106 | put(CACHE, KEY, CONTENT, { integrity: 'sha1-BaDDigEST' }), 107 | { code: 'EINTEGRITY' }, 108 | 'got error from bad integrity' 109 | ) 110 | }) 111 | 112 | t.test('signals error if error writing to cache', async t => { 113 | const CACHE = t.testdir() 114 | const [bulkErr, streamErr] = await Promise.all([ 115 | put(CACHE, KEY, CONTENT, { 116 | size: 2, 117 | }) 118 | .then(() => { 119 | throw new Error('expected to get a bad size error') 120 | }) 121 | .catch((err) => err), 122 | 123 | put.stream(CACHE, KEY, { size: 2 }).end(CONTENT).promise() 124 | .then(() => { 125 | throw new Error('expected to get a bad size error') 126 | }) 127 | .catch((err) => err), 128 | ]) 129 | t.equal(bulkErr.code, 'EBADSIZE', 'got error from bulk write') 130 | t.equal(streamErr.code, 'EBADSIZE', 'got error from stream write') 131 | }) 132 | 133 | t.test('concurrent puts with identical content', async t => { 134 | const CACHE = t.testdir() 135 | await Promise.all([ 136 | put(CACHE, KEY, CONTENT), 137 | put(CACHE, `${KEY}2`, CONTENT), 138 | put(CACHE, KEY, CONTENT), 139 | put(CACHE, `${KEY}2`, CONTENT), 140 | put(CACHE, KEY, CONTENT), 141 | put(CACHE, `${KEY}2`, CONTENT), 142 | put(CACHE, KEY, CONTENT), 143 | put(CACHE, `${KEY}2`, CONTENT), 144 | put(CACHE, KEY, CONTENT), 145 | put(CACHE, `${KEY}2`, CONTENT), 146 | put(CACHE, KEY, CONTENT), 147 | put(CACHE, `${KEY}2`, CONTENT), 148 | ]) 149 | const tmpFiles = await fs.readdir(path.join(CACHE, 'tmp')) 150 | t.strictSame(tmpFiles, [], 'Nothing left in tmp') 151 | }) 152 | -------------------------------------------------------------------------------- /test/rm.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const fs = require('fs/promises') 4 | const index = require('../lib/entry-index') 5 | const path = require('path') 6 | const t = require('tap') 7 | const ssri = require('ssri') 8 | 9 | const CacheContent = require('./fixtures/cache-content') 10 | const CONTENT = Buffer.from('foobarbaz') 11 | const KEY = 'my-test-key' 12 | const INTEGRITY = ssri.fromData(CONTENT) 13 | const METADATA = { foo: 'bar' } 14 | const contentPath = require('../lib/content/path') 15 | 16 | const get = require('..').get 17 | 18 | const rm = require('..').rm 19 | 20 | const cacheContent = CacheContent({ 21 | [INTEGRITY]: CONTENT, 22 | }) 23 | 24 | t.test('rm.entry removes entries, not content', async t => { 25 | const cache = t.testdir(cacheContent) 26 | await index.insert(cache, KEY, INTEGRITY, { metadata: METADATA }) 27 | t.equal(rm, rm.entry, 'rm is an alias for rm.entry') 28 | await rm.entry(cache, KEY) 29 | await t.rejects( 30 | get(cache, KEY), 31 | { 32 | code: 'ENOENT', 33 | message: new RegExp(KEY), 34 | }, 35 | 'entry no longer accessible' 36 | ) 37 | const data = await fs.readFile(contentPath(cache, INTEGRITY)) 38 | t.same(data, CONTENT, 'content remains in cache') 39 | }) 40 | 41 | t.test('rm.content removes content, not entries', (t) => { 42 | const cache = t.testdir(cacheContent) 43 | return index 44 | .insert(cache, KEY, INTEGRITY, { 45 | metadata: METADATA, 46 | }) 47 | .then(() => { 48 | return rm.content(cache, INTEGRITY) 49 | }) 50 | .then(() => { 51 | return get(cache, KEY) 52 | }) 53 | .then(() => { 54 | throw new Error('unexpected success') 55 | }) 56 | .catch((err) => { 57 | if (err.code === 'ENOENT') { 58 | t.match(err.message, /no such file/, 'entry no longer accessible') 59 | return 60 | } 61 | throw err 62 | }) 63 | .then(() => { 64 | return fs.readFile(contentPath(cache, INTEGRITY)) 65 | }) 66 | .then(() => { 67 | throw new Error('unexpected success') 68 | }) 69 | .catch((err) => { 70 | if (err.code === 'ENOENT') { 71 | t.match(err.message, /no such file/, 'content gone') 72 | return 73 | } 74 | throw err 75 | }) 76 | }) 77 | 78 | t.test('rm.all deletes content and index dirs', async t => { 79 | const cache = t.testdir(cacheContent) 80 | await index.insert(cache, KEY, INTEGRITY, { metadata: METADATA }) 81 | await fs.mkdir(path.join(cache, 'tmp')) 82 | await fs.writeFile(path.join(cache, 'other.js'), 'hi') 83 | await rm.all(cache) 84 | const files = await fs.readdir(cache) 85 | t.same( 86 | files.sort(), 87 | ['other.js', 'tmp'], 88 | 'removes content and index directories without touching other stuff' 89 | ) 90 | }) 91 | -------------------------------------------------------------------------------- /test/util/tmp.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const fs = require('fs/promises') 4 | const path = require('path') 5 | const t = require('tap') 6 | 7 | const CACHE = t.testdir() 8 | const tmp = require('../../lib/util/tmp.js') 9 | 10 | t.test('creates a unique tmpdir inside the cache', async t => { 11 | const dir = await tmp.mkdir(CACHE) 12 | t.match( 13 | path.relative(CACHE, dir), 14 | /^tmp[\\/].*/, 15 | 'returns a path inside tmp' 16 | ) 17 | const s = await fs.stat(dir) 18 | t.ok(s.isDirectory(), 'path points to an existing directory') 19 | }) 20 | 21 | t.test('provides a utility that does resource disposal on tmp', async t => { 22 | const dir = await tmp.withTmp(CACHE, async (dir) => { 23 | const s = await fs.stat(dir) 24 | t.ok(s.isDirectory(), 'path points to an existing directory') 25 | return dir 26 | }) 27 | const [nope, yes] = await Promise.all([ 28 | fs.stat(dir) 29 | .then(() => { 30 | throw new Error('expected fail') 31 | }) 32 | .catch((err) => { 33 | if (err.code === 'ENOENT') { 34 | return undefined 35 | } 36 | 37 | throw err 38 | }), 39 | fs.stat(path.join(CACHE, 'tmp')), 40 | ]) 41 | t.notOk(nope, 'tmp subdir removed') 42 | t.ok(yes.isDirectory(), 'tmp parent dir left intact') 43 | }) 44 | 45 | t.test('withTmp should accept both opts and cb params', async t => { 46 | await tmp.withTmp(CACHE, { tmpPrefix: 'foo' }, dir => { 47 | t.ok(dir, 'dir should contain a valid response') 48 | }) 49 | }) 50 | -------------------------------------------------------------------------------- /test/verify.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const contentPath = require('../lib/content/path') 4 | const index = require('../lib/entry-index') 5 | const fs = require('fs/promises') 6 | const path = require('path') 7 | const t = require('tap') 8 | const ssri = require('ssri') 9 | 10 | const CacheContent = require('./fixtures/cache-content') 11 | 12 | const CONTENT = Buffer.from('foobarbaz', 'utf8') 13 | const KEY = 'my-test-key' 14 | const INTEGRITY = ssri.fromData(CONTENT) 15 | const METADATA = { foo: 'bar' } 16 | 17 | const cacache = require('..') 18 | const verify = cacache.verify 19 | 20 | // defines reusable errors 21 | const genericError = new Error('ERR') 22 | genericError.code = 'ERR' 23 | 24 | // helpers 25 | const getVerify = (t, opts) => t.mock('../lib/verify', opts) 26 | 27 | async function mockCache (t) { 28 | const cacheContent = CacheContent({ 29 | [INTEGRITY]: CONTENT, 30 | }) 31 | cacheContent.tmp = {} 32 | const CACHE = t.testdir(cacheContent) 33 | await index.insert(CACHE, KEY, INTEGRITY, { 34 | metadata: METADATA, 35 | }) 36 | return CACHE 37 | } 38 | 39 | t.test('removes corrupted index entries from buckets', async t => { 40 | const CACHE = await mockCache(t) 41 | const BUCKET = index.bucketPath(CACHE, KEY) 42 | const BUCKETDATA = await fs.readFile(BUCKET, 'utf8') 43 | // traaaaash 44 | await fs.appendFile(BUCKET, '\n234uhhh') 45 | const stats = await verify(CACHE) 46 | t.equal( 47 | stats.missingContent, 48 | 0, 49 | 'content valid because of good entry' 50 | ) 51 | t.equal(stats.totalEntries, 1, 'only one entry counted') 52 | const bucketData = await fs.readFile(BUCKET, 'utf8') 53 | const bucketEntry = JSON.parse(bucketData.split('\t')[1]) 54 | const targetEntry = JSON.parse(BUCKETDATA.split('\t')[1]) 55 | targetEntry.time = bucketEntry.time // different timestamps 56 | t.same( 57 | bucketEntry, 58 | targetEntry, 59 | 'bucket only contains good entry' 60 | ) 61 | }) 62 | 63 | t.test('removes shadowed index entries from buckets', async t => { 64 | const CACHE = await mockCache(t) 65 | const BUCKET = index.bucketPath(CACHE, KEY) 66 | const newEntry = await index.insert(CACHE, KEY, INTEGRITY, { metadata: 'meh' }) 67 | const stats = await verify(CACHE) 68 | t.equal( 69 | stats.missingContent, 70 | 0, 71 | 'content valid because of good entry' 72 | ) 73 | t.equal(stats.totalEntries, 1, 'only one entry counted') 74 | const bucketData = await fs.readFile(BUCKET, 'utf8') 75 | const stringified = JSON.stringify({ 76 | key: newEntry.key, 77 | integrity: newEntry.integrity.toString(), 78 | time: +bucketData.match(/"time":([0-9]+)/)[1], 79 | metadata: newEntry.metadata, 80 | }) 81 | t.equal( 82 | bucketData, 83 | `\n${index.hashEntry(stringified)}\t${stringified}`, 84 | 'only the most recent entry is still in the bucket' 85 | ) 86 | }) 87 | 88 | t.test('accepts function for custom user filtering of index entries', async t => { 89 | const KEY2 = KEY + 'aaa' 90 | const KEY3 = KEY + 'bbb' 91 | const CACHE = await mockCache(t) 92 | const [entryA, entryB] = await Promise.all([ 93 | index.insert(CACHE, KEY2, INTEGRITY, { 94 | metadata: 'haayyyy', 95 | }), 96 | index.insert(CACHE, KEY3, INTEGRITY, { 97 | metadata: 'haayyyy again', 98 | }), 99 | ]) 100 | const newEntries = { 101 | [entryA.key]: entryA, 102 | [entryB.key]: entryB, 103 | } 104 | const stats = await verify(CACHE, { 105 | filter (entry) { 106 | return entry.key.length === KEY2.length 107 | }, 108 | }) 109 | t.same( 110 | { 111 | verifiedContent: stats.verifiedContent, 112 | rejectedEntries: stats.rejectedEntries, 113 | totalEntries: stats.totalEntries, 114 | }, 115 | { 116 | verifiedContent: 1, 117 | rejectedEntries: 1, 118 | totalEntries: 2, 119 | }, 120 | 'reported relevant changes' 121 | ) 122 | const entries = await index.ls(CACHE) 123 | entries[KEY2].time = newEntries[KEY2].time 124 | entries[KEY3].time = newEntries[KEY3].time 125 | t.same(entries, newEntries, 'original entry not included') 126 | }) 127 | 128 | t.test('removes corrupted content', async t => { 129 | const CACHE = await mockCache(t) 130 | const cpath = contentPath(CACHE, INTEGRITY) 131 | await fs.truncate(cpath, CONTENT.length - 1) 132 | const stats = await verify(CACHE) 133 | delete stats.startTime 134 | delete stats.runTime 135 | delete stats.endTime 136 | t.same( 137 | stats, 138 | { 139 | verifiedContent: 0, 140 | reclaimedCount: 1, 141 | reclaimedSize: CONTENT.length - 1, 142 | badContentCount: 1, 143 | keptSize: 0, 144 | missingContent: 1, 145 | rejectedEntries: 1, 146 | totalEntries: 0, 147 | }, 148 | 'reported correct collection counts' 149 | ) 150 | await t.rejects( 151 | fs.stat(cpath), 152 | /no such file/, 153 | 'content no longer in cache' 154 | ) 155 | }) 156 | 157 | t.test('removes content not referenced by any entries', async t => { 158 | const CACHE = t.testdir( 159 | CacheContent({ 160 | [INTEGRITY]: CONTENT, 161 | }) 162 | ) 163 | const stats = await verify(CACHE) 164 | delete stats.startTime 165 | delete stats.runTime 166 | delete stats.endTime 167 | t.same( 168 | stats, 169 | { 170 | verifiedContent: 0, 171 | reclaimedCount: 1, 172 | reclaimedSize: CONTENT.length, 173 | badContentCount: 0, 174 | keptSize: 0, 175 | missingContent: 0, 176 | rejectedEntries: 0, 177 | totalEntries: 0, 178 | }, 179 | 'reported correct collection counts' 180 | ) 181 | }) 182 | 183 | t.test('cleans up contents of tmp dir', async t => { 184 | const CACHE = await mockCache(t) 185 | const tmpFile = path.join(CACHE, 'tmp', 'x') 186 | const misc = path.join(CACHE, 'y') 187 | await Promise.all([fs.writeFile(tmpFile, ''), fs.writeFile(misc, '')]) 188 | await verify(CACHE) 189 | const [err, stat] = await Promise.all([ 190 | fs.stat(tmpFile).catch((err) => { 191 | if (err.code === 'ENOENT') { 192 | return err 193 | } 194 | 195 | throw err 196 | }), 197 | fs.stat(misc), 198 | ]) 199 | t.equal(err.code, 'ENOENT', 'tmp file was blown away') 200 | t.ok(stat, 'misc file was not touched') 201 | }) 202 | 203 | t.test('writes a file with last verification time', async t => { 204 | const CACHE = t.testdir() 205 | await verify(CACHE) 206 | const [fromLastRun, fromFile] = await Promise.all([ 207 | verify.lastRun(CACHE), 208 | fs.readFile(path.join(CACHE, '_lastverified'), 'utf8').then((data) => { 209 | return new Date(parseInt(data)) 210 | }), 211 | ]) 212 | t.equal(+fromLastRun, +fromFile, 'last verified was writen') 213 | }) 214 | 215 | t.test('missing file error when validating cache content', async t => { 216 | const missingFileError = new Error('ENOENT') 217 | missingFileError.code = 'ENOENT' 218 | const mockVerify = getVerify(t, { 219 | 'fs/promises': Object.assign({}, fs, { 220 | stat: async () => { 221 | throw missingFileError 222 | }, 223 | }), 224 | }) 225 | 226 | t.plan(1) 227 | const CACHE = await mockCache(t) 228 | await t.resolveMatch( 229 | mockVerify(CACHE), 230 | { 231 | verifiedContent: 0, 232 | rejectedEntries: 1, 233 | totalEntries: 0, 234 | }, 235 | 'should reject entry' 236 | ) 237 | }) 238 | 239 | t.test('unknown error when validating content', async t => { 240 | const mockVerify = getVerify(t, { 241 | 'fs/promises': Object.assign({}, fs, { 242 | stat: async () => { 243 | throw genericError 244 | }, 245 | }), 246 | }) 247 | 248 | t.plan(1) 249 | const CACHE = await mockCache(t) 250 | await t.rejects( 251 | mockVerify(CACHE), 252 | genericError, 253 | 'should throw any unknown errors' 254 | ) 255 | }) 256 | 257 | t.test('unknown error when checking sri stream', async t => { 258 | const mockVerify = getVerify(t, { 259 | ssri: Object.assign({}, ssri, { 260 | checkStream: () => Promise.reject(genericError), 261 | }), 262 | }) 263 | 264 | const CACHE = await mockCache(t) 265 | await t.rejects( 266 | mockVerify(CACHE), 267 | genericError, 268 | 'should throw any unknown errors' 269 | ) 270 | }) 271 | 272 | t.test('unknown error when rebuilding bucket', async t => { 273 | // rebuild bucket uses stat after content-validation 274 | // shouldFail controls the right time to mock the error 275 | let shouldFail = false 276 | const mockVerify = getVerify(t, { 277 | 'fs/promises': Object.assign({}, fs, { 278 | stat: async (path) => { 279 | if (shouldFail) { 280 | throw genericError 281 | } 282 | shouldFail = true 283 | return fs.stat(path) 284 | }, 285 | }), 286 | }) 287 | 288 | const CACHE = await mockCache(t) 289 | await t.rejects( 290 | mockVerify(CACHE), 291 | genericError, 292 | 'should throw any unknown errors' 293 | ) 294 | }) 295 | 296 | t.test('re-builds the index with the size parameter', async t => { 297 | const KEY2 = KEY + 'aaa' 298 | const KEY3 = KEY + 'bbb' 299 | const CACHE = await mockCache(t) 300 | await Promise.all([ 301 | index.insert(CACHE, KEY2, INTEGRITY, { 302 | metadata: 'haayyyy', 303 | size: 20, 304 | }), 305 | index.insert(CACHE, KEY3, INTEGRITY, { 306 | metadata: 'haayyyy again', 307 | size: 30, 308 | }), 309 | ]) 310 | const newEntries = await index.ls(CACHE) 311 | const stats = await verify(CACHE) 312 | t.same( 313 | { 314 | verifiedContent: stats.verifiedContent, 315 | rejectedEntries: stats.rejectedEntries, 316 | totalEntries: stats.totalEntries, 317 | }, 318 | { 319 | verifiedContent: 1, 320 | rejectedEntries: 0, 321 | totalEntries: 3, 322 | }, 323 | 'reported relevant changes' 324 | ) 325 | const entries = await index.ls(CACHE) 326 | entries[KEY].time = newEntries[KEY].time 327 | entries[KEY2].time = newEntries[KEY2].time 328 | entries[KEY3].time = newEntries[KEY3].time 329 | t.same( 330 | entries, 331 | newEntries, 332 | 'original index entries not preserved' 333 | ) 334 | }) 335 | 336 | t.test('hash collisions', async t => { 337 | const mockVerify = getVerify(t, { 338 | '../lib/entry-index': Object.assign({}, index, { 339 | hashKey: () => 'aaa', 340 | }), 341 | }) 342 | 343 | t.plan(1) 344 | const CACHE = await mockCache(t) 345 | await index.insert(CACHE, 'foo', INTEGRITY, { 346 | metadata: 'foo', 347 | }) 348 | const stats = await mockVerify(CACHE) 349 | t.same( 350 | { 351 | verifiedContent: stats.verifiedContent, 352 | rejectedEntries: stats.rejectedEntries, 353 | totalEntries: stats.totalEntries, 354 | }, 355 | { 356 | verifiedContent: 1, 357 | rejectedEntries: 0, 358 | totalEntries: 2, 359 | }, 360 | 'should resolve with no errors' 361 | ) 362 | }) 363 | 364 | t.test('hash collisions excluded', async t => { 365 | const mockVerify = getVerify(t, { 366 | '../lib/entry-index': Object.assign({}, index, { 367 | hashKey: () => 'aaa', 368 | }), 369 | }) 370 | 371 | t.plan(1) 372 | const CACHE = await mockCache(t) 373 | await index.insert(CACHE, 'foo', INTEGRITY, { metadata: 'foo' }) 374 | const stats = await mockVerify(CACHE, { filter: () => null }) 375 | t.same( 376 | { 377 | verifiedContent: stats.verifiedContent, 378 | rejectedEntries: stats.rejectedEntries, 379 | totalEntries: stats.totalEntries, 380 | }, 381 | { 382 | verifiedContent: 0, 383 | rejectedEntries: 2, 384 | totalEntries: 0, 385 | }, 386 | 'should resolve while also excluding filtered out entries' 387 | ) 388 | }) 389 | 390 | t.test('handles multiple hashes of the same content', async t => { 391 | const cache = t.testdir() 392 | let integrity 393 | // anything other than the default (currently sha512) 394 | await cacache.put.stream(cache, 'test', { algorithms: ['sha256'] }).on('integrity', i => { 395 | integrity = i 396 | }).end('CONTENT!').promise() 397 | await cacache.put.stream(cache, 'test', { integrity }).end('CONTENT!').promise() 398 | await cacache.verify(cache) 399 | const ls = await cacache.ls(cache) 400 | t.match(ls.test.integrity, 'sha512') 401 | t.match(ls.test.integrity, 'sha256') 402 | }) 403 | 404 | t.test('does not clobber entry time', async t => { 405 | const cache = t.testdir() 406 | const content = Buffer.from('CONTENT!', 'utf8') 407 | await cacache.put(cache, 'test', content) 408 | const entryBefore = await cacache.get.info(cache, 'test') 409 | await cacache.verify(cache) 410 | const entryAfter = await cacache.get.info(cache, 'test') 411 | t.equal(entryBefore.time, entryAfter.time, 'time does not change') 412 | }) 413 | --------------------------------------------------------------------------------