├── .github ├── dependabot.yml └── workflows │ ├── ci.yml │ ├── comment.yml │ ├── compat.yml │ └── release.yml ├── .gitignore ├── .pre-commit-config.yaml ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── Cargo.lock ├── Cargo.toml ├── DEVELOPMENT.md ├── LICENSE ├── README.md ├── dist-workspace.toml ├── docs ├── book.toml └── src │ ├── SUMMARY.md │ ├── build.md │ ├── contributing.md │ ├── extensions.md │ ├── index.md │ ├── installation.md │ ├── logo.svg │ ├── platforms.md │ └── test_coverage.md ├── src ├── find │ ├── main.rs │ ├── matchers │ │ ├── access.rs │ │ ├── delete.rs │ │ ├── empty.rs │ │ ├── entry.rs │ │ ├── exec.rs │ │ ├── fs.rs │ │ ├── glob.rs │ │ ├── group.rs │ │ ├── lname.rs │ │ ├── logical_matchers.rs │ │ ├── ls.rs │ │ ├── mod.rs │ │ ├── name.rs │ │ ├── path.rs │ │ ├── perm.rs │ │ ├── printer.rs │ │ ├── printf.rs │ │ ├── prune.rs │ │ ├── quit.rs │ │ ├── regex.rs │ │ ├── samefile.rs │ │ ├── size.rs │ │ ├── stat.rs │ │ ├── time.rs │ │ ├── type_matcher.rs │ │ └── user.rs │ └── mod.rs ├── lib.rs ├── testing │ └── commandline │ │ └── main.rs └── xargs │ ├── main.rs │ └── mod.rs ├── test_data ├── depth │ ├── 1 │ │ ├── 2 │ │ │ ├── 3 │ │ │ │ └── f3 │ │ │ └── f2 │ │ └── f1 │ └── f0 ├── links │ ├── abbbc │ └── subdir │ │ └── test ├── simple │ ├── abbbc │ └── subdir │ │ └── ABBBC └── size │ └── 512bytes ├── tests ├── common │ ├── mod.rs │ └── test_helpers.rs ├── exec_unit_tests.rs ├── find_cmd_tests.rs ├── find_exec_tests.rs └── xargs_tests.rs └── util ├── build-bfs.sh ├── build-gnu.sh ├── compare_bfs_result.py ├── compare_gnu_result.py ├── diff-bfs.sh └── diff-gnu.sh /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "cargo" 4 | directory: "/" 5 | schedule: 6 | interval: "daily" 7 | open-pull-requests-limit: 5 8 | - package-ecosystem: "github-actions" 9 | directory: "/" 10 | schedule: 11 | interval: daily 12 | open-pull-requests-limit: 5 13 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | on: [push, pull_request] 2 | 3 | name: Basic CI 4 | 5 | jobs: 6 | check: 7 | name: cargo check 8 | runs-on: ${{ matrix.os }} 9 | strategy: 10 | matrix: 11 | os: [ubuntu-latest, macOS-latest, windows-latest] 12 | steps: 13 | - uses: actions/checkout@v4 14 | - name: Install `rust` toolchain 15 | run: | 16 | ## Install `rust` toolchain 17 | rustup toolchain install stable --no-self-update -c rustfmt --profile minimal 18 | rustup default stable 19 | 20 | # For bindgen: https://github.com/rust-lang/rust-bindgen/issues/1797 21 | - uses: KyleMayes/install-llvm-action@v2 22 | if: matrix.os == 'windows-latest' 23 | with: 24 | version: "11.0" 25 | directory: ${{ runner.temp }}/llvm 26 | - run: echo "LIBCLANG_PATH=$((gcm clang).source -replace "clang.exe")" >> $env:GITHUB_ENV 27 | if: matrix.os == 'windows-latest' 28 | 29 | - name: Check 30 | run: | 31 | cargo check --all --all-features 32 | 33 | test: 34 | name: cargo test 35 | runs-on: ${{ matrix.os }} 36 | strategy: 37 | matrix: 38 | os: [ubuntu-latest, macOS-latest, windows-latest] 39 | steps: 40 | - uses: actions/checkout@v4 41 | - name: Install `rust` toolchain 42 | run: | 43 | ## Install `rust` toolchain 44 | rustup toolchain install stable --no-self-update -c rustfmt --profile minimal 45 | rustup default stable 46 | 47 | # For bindgen: https://github.com/rust-lang/rust-bindgen/issues/1797 48 | - uses: KyleMayes/install-llvm-action@v2 49 | if: matrix.os == 'windows-latest' 50 | with: 51 | version: "11.0" 52 | directory: ${{ runner.temp }}/llvm 53 | - run: echo "LIBCLANG_PATH=$((gcm clang).source -replace "clang.exe")" >> $env:GITHUB_ENV 54 | if: matrix.os == 'windows-latest' 55 | 56 | - name: Test 57 | run: | 58 | cargo test 59 | 60 | fmt: 61 | name: cargo fmt --all -- --check 62 | runs-on: ubuntu-latest 63 | steps: 64 | - uses: actions/checkout@v4 65 | - name: Install `rust` toolchain 66 | run: | 67 | ## Install `rust` toolchain 68 | rustup toolchain install stable --no-self-update -c rustfmt --profile minimal 69 | rustup default stable 70 | 71 | - run: rustup component add rustfmt 72 | - name: cargo fmt 73 | run: | 74 | cargo fmt --all -- --check 75 | 76 | clippy: 77 | name: cargo clippy -- -D warnings 78 | runs-on: ubuntu-latest 79 | steps: 80 | - uses: actions/checkout@v4 81 | - name: Install `rust` toolchain 82 | run: | 83 | ## Install `rust` toolchain 84 | rustup toolchain install stable --no-self-update -c rustfmt --profile minimal 85 | rustup default stable 86 | - run: rustup component add clippy 87 | - name: cargo clippy 88 | run: | 89 | cargo clippy --all-targets -- -D warnings 90 | 91 | grcov: 92 | name: Code coverage 93 | runs-on: ${{ matrix.os }} 94 | strategy: 95 | matrix: 96 | os: 97 | - ubuntu-latest 98 | toolchain: 99 | - nightly 100 | cargo_flags: 101 | - "--all-features" 102 | steps: 103 | - name: Checkout source code 104 | uses: actions/checkout@v4 105 | 106 | - name: Install `rust` toolchain 107 | run: | 108 | rustup toolchain install nightly --no-self-update -c rustfmt --profile minimal 109 | rustup default nightly 110 | 111 | - name: Install cargo-llvm-cov 112 | uses: taiki-e/install-action@cargo-llvm-cov 113 | - name: Generate code coverage 114 | run: cargo llvm-cov --all-features --lcov --branch --output-path lcov.info 115 | 116 | - name: Upload coverage as artifact 117 | uses: actions/upload-artifact@v4 118 | with: 119 | name: lcov.info 120 | path: lcov.info 121 | 122 | - name: Upload coverage to codecov.io 123 | uses: codecov/codecov-action@v5 124 | with: 125 | files: lcov.info 126 | fail_ci_if_error: true 127 | env: 128 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} 129 | -------------------------------------------------------------------------------- /.github/workflows/comment.yml: -------------------------------------------------------------------------------- 1 | on: 2 | workflow_run: 3 | workflows: [External-testsuites] 4 | types: [completed] 5 | 6 | name: Comment Test results on the PR 7 | 8 | permissions: {} 9 | jobs: 10 | upload-pr-comment: 11 | if: ${{ github.event.workflow_run.event == 'pull_request' }} 12 | 13 | name: Upload PR comment 14 | runs-on: ubuntu-latest 15 | permissions: 16 | actions: read 17 | pull-requests: write 18 | 19 | steps: 20 | - name: List Annotations 21 | uses: actions/github-script@v7 22 | with: 23 | script: | 24 | let artifacts = await github.rest.actions.listWorkflowRunArtifacts({ 25 | owner: context.repo.owner, 26 | repo: context.repo.repo, 27 | run_id: ${{ github.event.workflow_run.id }}, 28 | }); 29 | 30 | // List all artifacts 31 | let matchArtifact = artifacts.data.artifacts.filter((artifact) => { 32 | return artifact.name == "comment" 33 | })[0]; 34 | 35 | // Download the artifact to github.workspace 36 | let download = await github.rest.actions.downloadArtifact({ 37 | owner: context.repo.owner, 38 | repo: context.repo.repo, 39 | artifact_id: matchArtifact.id, 40 | archive_format: 'zip', 41 | }); 42 | 43 | let fs = require('fs'); 44 | fs.writeFileSync('${{ github.workspace }}/comment.zip', Buffer.from(download.data)); 45 | 46 | - run: unzip comment.zip 47 | 48 | - name: Comment on PR 49 | uses: actions/github-script@v7 50 | with: 51 | github-token: ${{ secrets.GITHUB_TOKEN }} 52 | script: | 53 | let fs = require('fs'); 54 | let annotations = JSON.parse(fs.readFileSync('./annotations.json', 'utf8')); 55 | 56 | let annotationContent = annotations 57 | .data 58 | .map(annotation => `${annotation.run}: ${annotation.annotation.message}`) 59 | .join('\n'); 60 | 61 | // check if no changes 62 | let gnuTestReport = annotationContent.includes('Run GNU findutils tests: Gnu tests No changes'); 63 | let bfsTestReport = annotationContent.includes('Run BFS tests: BFS tests No changes'); 64 | 65 | if (gnuTestReport && bfsTestReport) { 66 | console.log('No changes'); 67 | return; 68 | } 69 | 70 | // Comment on the PR 71 | github.rest.issues.createComment({ 72 | owner: context.repo.owner, 73 | repo: context.repo.repo, 74 | issue_number: annotations.pull_request_number, 75 | body: 'Commit ${{ github.event.workflow_run.head_sha }} has GNU testsuite comparison:\n```\n' + annotationContent + '\n```\n' 76 | }); -------------------------------------------------------------------------------- /.github/workflows/compat.yml: -------------------------------------------------------------------------------- 1 | on: [push, pull_request] 2 | 3 | name: External-testsuites 4 | 5 | jobs: 6 | gnu-tests: 7 | permissions: 8 | actions: read 9 | 10 | name: Run GNU findutils tests 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Checkout findutils 14 | uses: actions/checkout@v4 15 | with: 16 | path: findutils 17 | - name: Checkout GNU findutils 18 | uses: actions/checkout@v4 19 | with: 20 | repository: gnu-mirror-unofficial/findutils 21 | path: findutils.gnu 22 | ref: 5768a03ddfb5e18b1682e339d6cdd24ff721c510 23 | submodules: false 24 | 25 | - name: Override submodule URL and initialize submodules 26 | # Use github instead of upstream git server 27 | run: | 28 | git submodule sync --recursive 29 | git config submodule.gnulib.url https://github.com/coreutils/gnulib.git 30 | git submodule update --init --recursive --depth 1 31 | working-directory: findutils.gnu 32 | - name: Install `rust` toolchain 33 | run: | 34 | ## Install `rust` toolchain 35 | rustup toolchain install stable --no-self-update -c rustfmt --profile minimal 36 | rustup default stable 37 | - name: Install dependencies 38 | shell: bash 39 | run: | 40 | # Enable sources & install dependencies 41 | sudo sed -i 's/^Types: deb$/Types: deb deb-src/' /etc/apt/sources.list.d/ubuntu.sources 42 | sudo apt-get update 43 | sudo apt-get build-dep findutils 44 | - name: Run GNU tests 45 | shell: bash 46 | run: | 47 | cd findutils 48 | bash util/build-gnu.sh ||: 49 | - name: Extract testing info 50 | shell: bash 51 | run: | 52 | 53 | - name: Upload gnu-test-report 54 | uses: actions/upload-artifact@v4 55 | with: 56 | name: gnu-test-report 57 | path: | 58 | findutils.gnu/find/testsuite/*.log 59 | findutils.gnu/xargs/testsuite/*.log 60 | findutils.gnu/tests/**/*.log 61 | - name: Upload gnu-result 62 | uses: actions/upload-artifact@v4 63 | with: 64 | name: gnu-result 65 | path: gnu-result.json 66 | - name: Download artifacts (gnu-result and gnu-test-report) 67 | uses: actions/github-script@v7 68 | with: 69 | script: | 70 | let fs = require('fs'); 71 | fs.mkdirSync('${{ github.workspace }}/dl', { recursive: true }); 72 | 73 | async function downloadArtifact(artifactName) { 74 | // List all artifacts from the workflow run 75 | let artifacts = await github.rest.actions.listWorkflowRunArtifacts({ 76 | owner: context.repo.owner, 77 | repo: context.repo.repo, 78 | run_id: ${{ github.run_id }}, 79 | }); 80 | 81 | // Find the specified artifact 82 | let matchArtifact = artifacts.data.artifacts.find((artifact) => artifact.name === artifactName); 83 | if (!matchArtifact) { 84 | throw new Error(`Artifact "${artifactName}" not found.`); 85 | } 86 | 87 | // Download the artifact 88 | let download = await github.rest.actions.downloadArtifact({ 89 | owner: context.repo.owner, 90 | repo: context.repo.repo, 91 | artifact_id: matchArtifact.id, 92 | archive_format: 'zip', 93 | }); 94 | 95 | // Save the artifact to a file 96 | fs.writeFileSync(`${{ github.workspace }}/dl/${artifactName}.zip`, Buffer.from(download.data)); 97 | } 98 | 99 | // Download the required artifacts 100 | await downloadArtifact("gnu-result"); 101 | await downloadArtifact("gnu-test-report"); 102 | 103 | - name: Compare failing tests against master 104 | shell: bash 105 | run: | 106 | ./findutils/util/diff-gnu.sh ./dl ./findutils.gnu 107 | - name: Compare against main results 108 | shell: bash 109 | run: | 110 | unzip dl/gnu-result.zip -d dl/ 111 | unzip dl/gnu-test-report.zip -d dl/ 112 | mv dl/gnu-result.json latest-gnu-result.json 113 | python findutils/util/compare_gnu_result.py 114 | 115 | bfs-tests: 116 | name: Run BFS tests 117 | runs-on: ubuntu-latest 118 | steps: 119 | - name: Checkout findutils 120 | uses: actions/checkout@v4 121 | with: 122 | path: findutils 123 | - name: Checkout BFS 124 | uses: actions/checkout@v4 125 | with: 126 | repository: tavianator/bfs 127 | path: bfs 128 | ref: "4.0" 129 | - name: Install `rust` toolchain 130 | run: | 131 | ## Install `rust` toolchain 132 | rustup toolchain install stable --no-self-update -c rustfmt --profile minimal 133 | rustup default stable 134 | - name: Install dependencies 135 | shell: bash 136 | run: | 137 | # Enable sources & install dependencies 138 | sudo sed -i 's/^Types: deb$/Types: deb deb-src/' /etc/apt/sources.list.d/ubuntu.sources 139 | sudo apt-get update 140 | sudo apt-get build-dep bfs 141 | - name: Run BFS tests 142 | shell: bash 143 | run: | 144 | cd findutils 145 | bash util/build-bfs.sh ||: 146 | - name: Upload bfs-test-report 147 | uses: actions/upload-artifact@v4 148 | with: 149 | name: bfs-test-report 150 | path: bfs/tests.log 151 | - name: Upload bfs-result 152 | uses: actions/upload-artifact@v4 153 | with: 154 | name: bfs-result 155 | path: bfs-result.json 156 | - name: Download artifacts (gnu-result and bfs-test-report) 157 | uses: actions/github-script@v7 158 | with: 159 | script: | 160 | let fs = require('fs'); 161 | fs.mkdirSync('${{ github.workspace }}/dl', { recursive: true }); 162 | 163 | async function downloadArtifact(artifactName) { 164 | // List all artifacts from the workflow run 165 | let artifacts = await github.rest.actions.listWorkflowRunArtifacts({ 166 | owner: context.repo.owner, 167 | repo: context.repo.repo, 168 | run_id: ${{ github.run_id }}, 169 | }); 170 | 171 | // Find the specified artifact 172 | let matchArtifact = artifacts.data.artifacts.find((artifact) => artifact.name === artifactName); 173 | if (!matchArtifact) { 174 | throw new Error(`Artifact "${artifactName}" not found.`); 175 | } 176 | 177 | // Download the artifact 178 | let download = await github.rest.actions.downloadArtifact({ 179 | owner: context.repo.owner, 180 | repo: context.repo.repo, 181 | artifact_id: matchArtifact.id, 182 | archive_format: 'zip', 183 | }); 184 | 185 | // Save the artifact to a file 186 | fs.writeFileSync(`${{ github.workspace }}/dl/${artifactName}.zip`, Buffer.from(download.data)); 187 | } 188 | 189 | // Download the required artifacts 190 | await downloadArtifact("bfs-result"); 191 | await downloadArtifact("bfs-test-report"); 192 | - name: Compare failing tests against main 193 | shell: bash 194 | run: | 195 | ./findutils/util/diff-bfs.sh dl/tests.log bfs/tests.log 196 | - name: Compare against main results 197 | shell: bash 198 | run: | 199 | unzip dl/bfs-result.zip -d dl/ 200 | unzip dl/bfs-test-report.zip -d dl/ 201 | mv dl/bfs-result.json latest-bfs-result.json 202 | python findutils/util/compare_bfs_result.py 203 | 204 | upload-annotations: 205 | name: Upload annotations 206 | runs-on: ubuntu-latest 207 | needs: [gnu-tests, bfs-tests] 208 | if: ${{ github.event_name == 'pull_request' }} 209 | 210 | steps: 211 | - name: List Annotations 212 | uses: actions/github-script@v7 213 | 214 | with: 215 | script: | 216 | let runs = await github.rest.checks.listForRef({ 217 | owner: context.repo.owner, 218 | repo: context.repo.repo, 219 | ref: '${{ github.event.pull_request.head.sha }}' 220 | }); 221 | 222 | let names = ['Run GNU findutils tests', 'Run BFS tests']; 223 | let results = []; 224 | runs.data.check_runs.filter(check => names.includes(check.name)).forEach(run => results.push(run)); 225 | 226 | let annotations = { data: [], pull_request_number: '${{ github.event.number }}' }; 227 | for (let result of results) { 228 | let run = await github.rest.checks.listAnnotations({ 229 | owner: context.repo.owner, 230 | repo: context.repo.repo, 231 | check_run_id: result.id 232 | }); 233 | 234 | run.data.forEach(data => { 235 | annotations.data.push({ 236 | run: result.name, 237 | annotation: data 238 | }); 239 | }); 240 | } 241 | 242 | // Remove duplicate items. 243 | annotations.data = annotations.data.filter((value, index, self) => 244 | self.findIndex(v => v.annotation.message === value.annotation.message) === index); 245 | 246 | let fs = require('fs'); 247 | fs.writeFileSync('${{ github.workspace }}/annotations.json', JSON.stringify(annotations)); 248 | 249 | - name: Upload annotations 250 | uses: actions/upload-artifact@v4 251 | with: 252 | name: comment 253 | path: annotations.json 254 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | # This file was autogenerated by dist: https://opensource.axo.dev/cargo-dist/ 2 | # 3 | # Copyright 2022-2024, axodotdev 4 | # SPDX-License-Identifier: MIT or Apache-2.0 5 | # 6 | # CI that: 7 | # 8 | # * checks for a Git Tag that looks like a release 9 | # * builds artifacts with dist (archives, installers, hashes) 10 | # * uploads those artifacts to temporary workflow zip 11 | # * on success, uploads the artifacts to a GitHub Release 12 | # 13 | # Note that the GitHub Release will be created with a generated 14 | # title/body based on your changelogs. 15 | 16 | name: Release 17 | permissions: 18 | "contents": "write" 19 | 20 | # This task will run whenever you push a git tag that looks like a version 21 | # like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc. 22 | # Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where 23 | # PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION 24 | # must be a Cargo-style SemVer Version (must have at least major.minor.patch). 25 | # 26 | # If PACKAGE_NAME is specified, then the announcement will be for that 27 | # package (erroring out if it doesn't have the given version or isn't dist-able). 28 | # 29 | # If PACKAGE_NAME isn't specified, then the announcement will be for all 30 | # (dist-able) packages in the workspace with that version (this mode is 31 | # intended for workspaces with only one dist-able package, or with all dist-able 32 | # packages versioned/released in lockstep). 33 | # 34 | # If you push multiple tags at once, separate instances of this workflow will 35 | # spin up, creating an independent announcement for each one. However, GitHub 36 | # will hard limit this to 3 tags per commit, as it will assume more tags is a 37 | # mistake. 38 | # 39 | # If there's a prerelease-style suffix to the version, then the release(s) 40 | # will be marked as a prerelease. 41 | on: 42 | pull_request: 43 | push: 44 | tags: 45 | - '**[0-9]+.[0-9]+.[0-9]+*' 46 | 47 | jobs: 48 | # Run 'dist plan' (or host) to determine what tasks we need to do 49 | plan: 50 | runs-on: "ubuntu-latest" 51 | outputs: 52 | val: ${{ steps.plan.outputs.manifest }} 53 | tag: ${{ !github.event.pull_request && github.ref_name || '' }} 54 | tag-flag: ${{ !github.event.pull_request && format('--tag={0}', github.ref_name) || '' }} 55 | publishing: ${{ !github.event.pull_request }} 56 | env: 57 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 58 | steps: 59 | - uses: actions/checkout@v4 60 | with: 61 | submodules: recursive 62 | - name: Install dist 63 | # we specify bash to get pipefail; it guards against the `curl` command 64 | # failing. otherwise `sh` won't catch that `curl` returned non-0 65 | shell: bash 66 | run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.28.0/cargo-dist-installer.sh | sh" 67 | - name: Cache dist 68 | uses: actions/upload-artifact@v4 69 | with: 70 | name: cargo-dist-cache 71 | path: ~/.cargo/bin/dist 72 | # sure would be cool if github gave us proper conditionals... 73 | # so here's a doubly-nested ternary-via-truthiness to try to provide the best possible 74 | # functionality based on whether this is a pull_request, and whether it's from a fork. 75 | # (PRs run on the *source* but secrets are usually on the *target* -- that's *good* 76 | # but also really annoying to build CI around when it needs secrets to work right.) 77 | - id: plan 78 | run: | 79 | dist ${{ (!github.event.pull_request && format('host --steps=create --tag={0}', github.ref_name)) || 'plan' }} --output-format=json > plan-dist-manifest.json 80 | echo "dist ran successfully" 81 | cat plan-dist-manifest.json 82 | echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT" 83 | - name: "Upload dist-manifest.json" 84 | uses: actions/upload-artifact@v4 85 | with: 86 | name: artifacts-plan-dist-manifest 87 | path: plan-dist-manifest.json 88 | 89 | # Build and packages all the platform-specific things 90 | build-local-artifacts: 91 | name: build-local-artifacts (${{ join(matrix.targets, ', ') }}) 92 | # Let the initial task tell us to not run (currently very blunt) 93 | needs: 94 | - plan 95 | if: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix.include != null && (needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload') }} 96 | strategy: 97 | fail-fast: false 98 | # Target platforms/runners are computed by dist in create-release. 99 | # Each member of the matrix has the following arguments: 100 | # 101 | # - runner: the github runner 102 | # - dist-args: cli flags to pass to dist 103 | # - install-dist: expression to run to install dist on the runner 104 | # 105 | # Typically there will be: 106 | # - 1 "global" task that builds universal installers 107 | # - N "local" tasks that build each platform's binaries and platform-specific installers 108 | matrix: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix }} 109 | runs-on: ${{ matrix.runner }} 110 | container: ${{ matrix.container && matrix.container.image || null }} 111 | env: 112 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 113 | BUILD_MANIFEST_NAME: target/distrib/${{ join(matrix.targets, '-') }}-dist-manifest.json 114 | steps: 115 | - name: enable windows longpaths 116 | run: | 117 | git config --global core.longpaths true 118 | - uses: actions/checkout@v4 119 | with: 120 | submodules: recursive 121 | - name: Install Rust non-interactively if not already installed 122 | if: ${{ matrix.container }} 123 | run: | 124 | if ! command -v cargo > /dev/null 2>&1; then 125 | curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y 126 | echo "$HOME/.cargo/bin" >> $GITHUB_PATH 127 | fi 128 | - name: Install dist 129 | run: ${{ matrix.install_dist.run }} 130 | # Get the dist-manifest 131 | - name: Fetch local artifacts 132 | uses: actions/download-artifact@v4 133 | with: 134 | pattern: artifacts-* 135 | path: target/distrib/ 136 | merge-multiple: true 137 | - name: Install dependencies 138 | run: | 139 | ${{ matrix.packages_install }} 140 | - name: Build artifacts 141 | run: | 142 | # Actually do builds and make zips and whatnot 143 | dist build ${{ needs.plan.outputs.tag-flag }} --print=linkage --output-format=json ${{ matrix.dist_args }} > dist-manifest.json 144 | echo "dist ran successfully" 145 | - id: cargo-dist 146 | name: Post-build 147 | # We force bash here just because github makes it really hard to get values up 148 | # to "real" actions without writing to env-vars, and writing to env-vars has 149 | # inconsistent syntax between shell and powershell. 150 | shell: bash 151 | run: | 152 | # Parse out what we just built and upload it to scratch storage 153 | echo "paths<> "$GITHUB_OUTPUT" 154 | dist print-upload-files-from-manifest --manifest dist-manifest.json >> "$GITHUB_OUTPUT" 155 | echo "EOF" >> "$GITHUB_OUTPUT" 156 | 157 | cp dist-manifest.json "$BUILD_MANIFEST_NAME" 158 | - name: "Upload artifacts" 159 | uses: actions/upload-artifact@v4 160 | with: 161 | name: artifacts-build-local-${{ join(matrix.targets, '_') }} 162 | path: | 163 | ${{ steps.cargo-dist.outputs.paths }} 164 | ${{ env.BUILD_MANIFEST_NAME }} 165 | 166 | # Build and package all the platform-agnostic(ish) things 167 | build-global-artifacts: 168 | needs: 169 | - plan 170 | - build-local-artifacts 171 | runs-on: "ubuntu-latest" 172 | env: 173 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 174 | BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json 175 | steps: 176 | - uses: actions/checkout@v4 177 | with: 178 | submodules: recursive 179 | - name: Install cached dist 180 | uses: actions/download-artifact@v4 181 | with: 182 | name: cargo-dist-cache 183 | path: ~/.cargo/bin/ 184 | - run: chmod +x ~/.cargo/bin/dist 185 | # Get all the local artifacts for the global tasks to use (for e.g. checksums) 186 | - name: Fetch local artifacts 187 | uses: actions/download-artifact@v4 188 | with: 189 | pattern: artifacts-* 190 | path: target/distrib/ 191 | merge-multiple: true 192 | - id: cargo-dist 193 | shell: bash 194 | run: | 195 | dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json 196 | echo "dist ran successfully" 197 | 198 | # Parse out what we just built and upload it to scratch storage 199 | echo "paths<> "$GITHUB_OUTPUT" 200 | jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT" 201 | echo "EOF" >> "$GITHUB_OUTPUT" 202 | 203 | cp dist-manifest.json "$BUILD_MANIFEST_NAME" 204 | - name: "Upload artifacts" 205 | uses: actions/upload-artifact@v4 206 | with: 207 | name: artifacts-build-global 208 | path: | 209 | ${{ steps.cargo-dist.outputs.paths }} 210 | ${{ env.BUILD_MANIFEST_NAME }} 211 | # Determines if we should publish/announce 212 | host: 213 | needs: 214 | - plan 215 | - build-local-artifacts 216 | - build-global-artifacts 217 | # Only run if we're "publishing", and only if local and global didn't fail (skipped is fine) 218 | if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.build-local-artifacts.result == 'skipped' || needs.build-local-artifacts.result == 'success') }} 219 | env: 220 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 221 | runs-on: "ubuntu-latest" 222 | outputs: 223 | val: ${{ steps.host.outputs.manifest }} 224 | steps: 225 | - uses: actions/checkout@v4 226 | with: 227 | submodules: recursive 228 | - name: Install cached dist 229 | uses: actions/download-artifact@v4 230 | with: 231 | name: cargo-dist-cache 232 | path: ~/.cargo/bin/ 233 | - run: chmod +x ~/.cargo/bin/dist 234 | # Fetch artifacts from scratch-storage 235 | - name: Fetch artifacts 236 | uses: actions/download-artifact@v4 237 | with: 238 | pattern: artifacts-* 239 | path: target/distrib/ 240 | merge-multiple: true 241 | - id: host 242 | shell: bash 243 | run: | 244 | dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json 245 | echo "artifacts uploaded and released successfully" 246 | cat dist-manifest.json 247 | echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT" 248 | - name: "Upload dist-manifest.json" 249 | uses: actions/upload-artifact@v4 250 | with: 251 | # Overwrite the previous copy 252 | name: artifacts-dist-manifest 253 | path: dist-manifest.json 254 | # Create a GitHub Release while uploading all files to it 255 | - name: "Download GitHub Artifacts" 256 | uses: actions/download-artifact@v4 257 | with: 258 | pattern: artifacts-* 259 | path: artifacts 260 | merge-multiple: true 261 | - name: Cleanup 262 | run: | 263 | # Remove the granular manifests 264 | rm -f artifacts/*-dist-manifest.json 265 | - name: Create GitHub Release 266 | env: 267 | PRERELEASE_FLAG: "${{ fromJson(steps.host.outputs.manifest).announcement_is_prerelease && '--prerelease' || '' }}" 268 | ANNOUNCEMENT_TITLE: "${{ fromJson(steps.host.outputs.manifest).announcement_title }}" 269 | ANNOUNCEMENT_BODY: "${{ fromJson(steps.host.outputs.manifest).announcement_github_body }}" 270 | RELEASE_COMMIT: "${{ github.sha }}" 271 | run: | 272 | # Write and read notes from a file to avoid quoting breaking things 273 | echo "$ANNOUNCEMENT_BODY" > $RUNNER_TEMP/notes.txt 274 | 275 | gh release create "${{ needs.plan.outputs.tag }}" --target "$RELEASE_COMMIT" $PRERELEASE_FLAG --title "$ANNOUNCEMENT_TITLE" --notes-file "$RUNNER_TEMP/notes.txt" artifacts/* 276 | 277 | announce: 278 | needs: 279 | - plan 280 | - host 281 | # use "always() && ..." to allow us to wait for all publish jobs while 282 | # still allowing individual publish jobs to skip themselves (for prereleases). 283 | # "host" however must run to completion, no skipping allowed! 284 | if: ${{ always() && needs.host.result == 'success' }} 285 | runs-on: "ubuntu-latest" 286 | env: 287 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 288 | steps: 289 | - uses: actions/checkout@v4 290 | with: 291 | submodules: recursive 292 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | .gitignore 3 | .project 4 | .cargo 5 | .settings 6 | test_data/links/link-* 7 | /public/ 8 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: local 3 | hooks: 4 | - id: rust-linting 5 | name: Rust linting 6 | description: Run cargo fmt on files included in the commit. 7 | entry: cargo +nightly fmt -- 8 | pass_filenames: true 9 | types: [file, rust] 10 | language: system 11 | - id: rust-clippy 12 | name: Rust clippy 13 | description: Run cargo clippy on files included in the commit. 14 | entry: cargo +nightly clippy --workspace --all-targets --all-features -- 15 | pass_filenames: false 16 | types: [file, rust] 17 | language: system 18 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | We as members, contributors, and leaders pledge to make participation in our 6 | community a harassment-free experience for everyone, regardless of age, body 7 | size, visible or invisible disability, ethnicity, sex characteristics, gender 8 | identity and expression, level of experience, education, socioeconomic status, 9 | nationality, personal appearance, race, religion, or sexual identity 10 | and orientation. 11 | 12 | We pledge to act and interact in ways that contribute to an open, welcoming, 13 | diverse, inclusive, and healthy community. 14 | 15 | ## Our Standards 16 | 17 | Examples of behavior that contributes to a positive environment for our 18 | community include: 19 | 20 | * Demonstrating empathy and kindness toward other people 21 | * Being respectful of differing opinions, viewpoints, and experiences 22 | * Giving and gracefully accepting constructive feedback 23 | * Accepting responsibility and apologizing to those affected by our mistakes, 24 | and learning from the experience 25 | * Focusing on what is best not just for us as individuals, but for the 26 | overall community 27 | 28 | Examples of unacceptable behavior include: 29 | 30 | * The use of sexualized language or imagery, and sexual attention or 31 | advances of any kind 32 | * Trolling, insulting or derogatory comments, and personal or political attacks 33 | * Public or private harassment 34 | * Publishing others' private information, such as a physical or email 35 | address, without their explicit permission 36 | * Other conduct which could reasonably be considered inappropriate in a 37 | professional setting 38 | 39 | ## Enforcement Responsibilities 40 | 41 | Community leaders are responsible for clarifying and enforcing our standards of 42 | acceptable behavior and will take appropriate and fair corrective action in 43 | response to any behavior that they deem inappropriate, threatening, offensive, 44 | or harmful. 45 | 46 | Community leaders have the right and responsibility to remove, edit, or reject 47 | comments, commits, code, wiki edits, issues, and other contributions that are 48 | not aligned to this Code of Conduct, and will communicate reasons for moderation 49 | decisions when appropriate. 50 | 51 | ## Scope 52 | 53 | This Code of Conduct applies within all community spaces, and also applies when 54 | an individual is officially representing the community in public spaces. 55 | Examples of representing our community include using an official e-mail address, 56 | posting via an official social media account, or acting as an appointed 57 | representative at an online or offline event. 58 | 59 | ## Enforcement 60 | 61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 62 | reported to the community leaders responsible for enforcement at 63 | sylvestre@debian.org. 64 | All complaints will be reviewed and investigated promptly and fairly. 65 | 66 | All community leaders are obligated to respect the privacy and security of the 67 | reporter of any incident. 68 | 69 | ## Enforcement Guidelines 70 | 71 | Community leaders will follow these Community Impact Guidelines in determining 72 | the consequences for any action they deem in violation of this Code of Conduct: 73 | 74 | ### 1. Correction 75 | 76 | **Community Impact**: Use of inappropriate language or other behavior deemed 77 | unprofessional or unwelcome in the community. 78 | 79 | **Consequence**: A private, written warning from community leaders, providing 80 | clarity around the nature of the violation and an explanation of why the 81 | behavior was inappropriate. A public apology may be requested. 82 | 83 | ### 2. Warning 84 | 85 | **Community Impact**: A violation through a single incident or series 86 | of actions. 87 | 88 | **Consequence**: A warning with consequences for continued behavior. No 89 | interaction with the people involved, including unsolicited interaction with 90 | those enforcing the Code of Conduct, for a specified period of time. This 91 | includes avoiding interactions in community spaces as well as external channels 92 | like social media. Violating these terms may lead to a temporary or 93 | permanent ban. 94 | 95 | ### 3. Temporary Ban 96 | 97 | **Community Impact**: A serious violation of community standards, including 98 | sustained inappropriate behavior. 99 | 100 | **Consequence**: A temporary ban from any sort of interaction or public 101 | communication with the community for a specified period of time. No public or 102 | private interaction with the people involved, including unsolicited interaction 103 | with those enforcing the Code of Conduct, is allowed during this period. 104 | Violating these terms may lead to a permanent ban. 105 | 106 | ### 4. Permanent Ban 107 | 108 | **Community Impact**: Demonstrating a pattern of violation of community 109 | standards, including sustained inappropriate behavior, harassment of an 110 | individual, or aggression toward or disparagement of classes of individuals. 111 | 112 | **Consequence**: A permanent ban from any sort of public interaction within 113 | the community. 114 | 115 | ## Attribution 116 | 117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 118 | version 2.0, available at 119 | . 120 | 121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct 122 | enforcement ladder](https://github.com/mozilla/diversity). 123 | 124 | [homepage]: https://www.contributor-covenant.org 125 | 126 | For answers to common questions about this code of conduct, see the FAQ at 127 | . Translations are available at 128 | . 129 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | # Contributing to findutils 4 | 5 | Hi! Welcome to uutils/findutils! 6 | 7 | Thanks for wanting to contribute to this project! This document explains 8 | everything you need to know to contribute. Before you start make sure to also 9 | check out these documents: 10 | 11 | - Our community's [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md). 12 | - [DEVELOPMENT.md](./DEVELOPMENT.md) for setting up your development 13 | environment. 14 | 15 | Now follows a very important warning: 16 | 17 | > [!WARNING] 18 | > uutils is original code and cannot contain any code from GNU or 19 | > other implementations. This means that **we cannot accept any changes based on 20 | > the GNU source code**. To make sure that cannot happen, **you cannot link to 21 | > the GNU source code** either. It is however possible to look at other implementations 22 | > under a BSD or MIT license like [Apple's implementation](https://opensource.apple.com/source/file_cmds/) 23 | > or [OpenBSD](https://github.com/openbsd/src/tree/master/bin). 24 | 25 | Finally, feel free to join our [Discord](https://discord.gg/wQVJbvJ)! 26 | 27 | 28 | 29 | ## Design Goals 30 | 31 | We have the following goals with our development: 32 | 33 | - **Compatible**: The utilities should be a drop-in replacement for the GNU 34 | coreutils. 35 | - **Cross-platform**: All utilities should run on as many of the supported 36 | platforms as possible. 37 | - **Reliable**: The utilities should never unexpectedly fail. 38 | - **Performant**: Our utilities should be written in fast idiomatic Rust. We aim 39 | to match or exceed the performance of the GNU utilities. 40 | - **Well-tested**: We should have a lot of tests to be able to guarantee 41 | reliability and compatibility. 42 | 43 | ## How to Help 44 | 45 | There are several ways to help and writing code is just one of them. Reporting 46 | issues and writing documentation are just as important as writing code. 47 | 48 | ### Reporting Issues 49 | 50 | We can't fix bugs we don't know about, so good issues are super helpful! Here 51 | are some tips for writing good issues: 52 | 53 | - If you find a bug, make sure it's still a problem on the `main` branch. 54 | - Search through the existing issues to see whether it has already been 55 | reported. 56 | - Make sure to include all relevant information, such as: 57 | - Which version of uutils did you check? 58 | - Which version of GNU coreutils are you comparing with? 59 | - What platform are you on? 60 | - Provide a way to reliably reproduce the issue. 61 | - Be as specific as possible! 62 | 63 | ### Writing Documentation 64 | 65 | There's never enough documentation. If you come across any documentation that 66 | could be improved, feel free to submit a PR for it! 67 | 68 | ### Writing Code 69 | 70 | If you want to submit a PR, make sure that you've discussed the solution with 71 | the maintainers beforehand. We want to avoid situations where you put a lot of 72 | work into a fix that we can't merge! If there's no issue for what you're trying 73 | to fix yet, make one _before_ you start working on the PR. 74 | 75 | Generally, we try to follow what GNU is doing in terms of options and behavior. 76 | It is recommended to look at the GNU findtils manual 77 | ([on the web](https://www.gnu.org/software/findutils/manual/html_node/index.html), 78 | or locally using `info `). It is more in depth than the man pages and 79 | provides a good description of available features and their implementation 80 | details. But remember, you cannot look at the GNU source code! 81 | 82 | Also remember that we can only merge PRs which pass our test suite, follow 83 | rustfmt, and do not have any warnings from clippy. See 84 | [DEVELOPMENT.md](./DEVELOPMENT.md) for more information. Be sure to also read 85 | about our [Rust style](#our-rust-style). 86 | 87 | ## Our Rust Style 88 | 89 | We want uutils to be written in idiomatic Rust, so here are some guidelines to 90 | follow. Some of these are aspirational, meaning that we don't do them correctly 91 | everywhere in the code. If you find violations of the advice below, feel free to 92 | submit a patch! 93 | 94 | ### Don't `panic!` 95 | 96 | The coreutils should be very reliable. This means that we should never `panic!`. 97 | Therefore, you should avoid using `.unwrap()` and `panic!`. Sometimes the use of 98 | `unreachable!` can be justified with a comment explaining why that code is 99 | unreachable. 100 | 101 | ### Don't `exit` 102 | 103 | We want uutils to be embeddable in other programs. This means that no function 104 | in uutils should exit the program. Doing so would also lead to code with more 105 | confusing control flow. Avoid therefore `std::process::exit` and similar 106 | functions which exit the program early. 107 | 108 | ### `unsafe` 109 | 110 | uutils cannot be entirely safe, because we have to call out to `libc` and do 111 | syscalls. However, we still want to limit our use of `unsafe`. We generally only 112 | accept `unsafe` for FFI, with very few exceptions. Note that performance is very 113 | rarely a valid argument for using `unsafe`. 114 | 115 | If you still need to write code with `unsafe`, make sure to read the 116 | [Rustonomicon](https://doc.rust-lang.org/nomicon/intro.html) and annotate the 117 | calls with `// SAFETY:` comments explaining why the use of `unsafe` is sound. 118 | 119 | ### Macros 120 | 121 | Macros can be a great tool, but they are also usually hard to understand. They 122 | should be used sparingly. Make sure to explore simpler options before you reach 123 | for a solution involving macros. 124 | 125 | ### `str`, `OsStr` & `Path` 126 | 127 | Rust has many string-like types, and sometimes it's hard to choose the right 128 | one. It's tempting to use `str` (and `String`) for everything, but that is not 129 | always the right choice for uutils, because we need to support invalid UTF-8, 130 | just like the GNU coreutils. For example, paths on Linux might not be valid 131 | UTF-8! Whenever we are dealing with paths, we should therefore stick with 132 | `OsStr` and `Path`. Make sure that you only convert to `str`/`String` if you 133 | know that something is always valid UTF-8. If you need more operations on 134 | `OsStr`, you can use the [`bstr`](https://docs.rs/bstr/latest/bstr/) crate. 135 | 136 | ### Doc-comments 137 | 138 | We use rustdoc for our documentation, so it's best to follow 139 | [rustdoc's guidelines](https://doc.rust-lang.org/rustdoc/how-to-write-documentation.html#documenting-components). 140 | Make sure that your documentation is not just repeating the name of the 141 | function, but actually giving more useful information. Rustdoc recommends the 142 | following structure: 143 | 144 | ``` 145 | [short sentence explaining what it is] 146 | 147 | [more detailed explanation] 148 | 149 | [at least one code example that users can copy/paste to try it] 150 | 151 | [even more advanced explanations if necessary] 152 | ``` 153 | 154 | ### Other comments 155 | 156 | Comments should be written to _explain_ the code, not to _describe_ the code. 157 | Try to focus on explaining _why_ the code is the way it is. If you feel like you 158 | have to describe the code, that's usually a sign that you could improve the 159 | naming of variables and functions. 160 | 161 | If you edit a piece of code, make sure to update any comments that need to 162 | change as a result. The only thing worse than having no comments is having 163 | outdated comments! 164 | 165 | ## Git Etiquette 166 | 167 | To ensure easy collaboration, we have guidelines for using Git and GitHub. 168 | 169 | ### Commits 170 | 171 | - Make small and atomic commits. 172 | - Keep a clean history of commits. 173 | - Write informative commit messages. 174 | - Annotate your commit message with the component you're editing. For example: 175 | `cp: do not overwrite on with -i` or `uucore: add support for FreeBSD`. 176 | - Do not unnecessarily move items around in the code. This makes the changes 177 | much harder to review. If you do need to move things around, do that in a 178 | separate commit. 179 | 180 | ### Commit messages 181 | 182 | You can read this section in the Git book to learn how to write good commit 183 | messages: https://git-scm.com/book/en/v2/Distributed-Git-Contributing-to-a-Project. 184 | 185 | In addition, here are a few examples for a summary line when committing to 186 | uutils: 187 | 188 | - commit for a single utility 189 | 190 | ``` 191 | nohup: cleanup and refactor 192 | ``` 193 | 194 | - commit for a utility's tests 195 | 196 | ``` 197 | tests/rm: test new feature 198 | ``` 199 | 200 | Beyond changes to an individual utility or its tests, other summary lines for 201 | non-utility modules include: 202 | 203 | ``` 204 | README: add help 205 | uucore: add new modules 206 | uutils: add new utility 207 | gitignore: add temporary files 208 | ``` 209 | 210 | ### PRs 211 | 212 | - Make the titles of PRs descriptive. 213 | - This means describing the problem you solve. For example, do not write 214 | `Fix #1234`, but `ls: fix version sort order`. 215 | - You can prefix the title with the utility the PR concerns. 216 | - Keep PRs small and self-contained. A set of small PRs is much more likely to 217 | get merged quickly than one large PR. 218 | - Make sure the CI passes (up to intermittently failing tests). 219 | - You know your code best, that's why it's best if you can solve merge conflicts 220 | on your branch yourself. 221 | - It's up to you whether you want to use `git merge main` or 222 | `git rebase main`. 223 | - Feel free to ask for help with merge conflicts. 224 | - You do not need to ping maintainers to request a review, but it's fine to do 225 | so if you don't get a response within a few days. 226 | 227 | ## Platforms 228 | 229 | We take pride in supporting many operating systems and architectures. Any code 230 | you contribute must at least compile without warnings for all platforms in the 231 | CI. However, you can use `#[cfg(...)]` attributes to create platform dependent 232 | features. 233 | 234 | **Tip:** For Windows, Microsoft provides some images (VMWare, Hyper-V, 235 | VirtualBox and Parallels) for development: 236 | 237 | 238 | ## Licensing 239 | 240 | uutils is distributed under the terms of the MIT License; see the `LICENSE` file 241 | for details. This is a permissive license, which allows the software to be used 242 | with few restrictions. 243 | 244 | Copyrights in the uutils project are retained by their contributors, and no 245 | copyright assignment is required to contribute. 246 | 247 | If you wish to add or change dependencies as part of a contribution to the 248 | project, a tool like `cargo-license` can be used to show their license details. 249 | The following types of license are acceptable: 250 | 251 | - MIT License 252 | - Dual- or tri-license with an MIT License option ("Apache-2.0 or MIT" is a 253 | popular combination) 254 | - "MIT equivalent" license (2-clause BSD, 3-clause BSD, ISC) 255 | - License less restrictive than the MIT License (CC0 1.0 Universal) 256 | - Apache License version 2.0 257 | 258 | Licenses we will not use: 259 | 260 | - An ambiguous license, or no license 261 | - Strongly reciprocal licenses (GNU GPL, GNU LGPL) 262 | 263 | If you wish to add a reference but it doesn't meet these requirements, please 264 | raise an issue to describe the dependency. 265 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "findutils" 3 | version = "0.8.0" 4 | homepage = "https://github.com/uutils/findutils" 5 | repository = "https://github.com/uutils/findutils" 6 | edition = "2021" 7 | license = "MIT" 8 | readme = "README.md" 9 | description = "Rust implementation of GNU findutils" 10 | authors = ["uutils developers"] 11 | 12 | [dependencies] 13 | chrono = "0.4.41" 14 | clap = "4.5" 15 | faccess = "0.2.4" 16 | walkdir = "2.5" 17 | regex = "1.11" 18 | onig = { version = "6.5", default-features = false } 19 | uucore = { version = "0.0.30", features = ["entries", "fs", "fsext", "mode"] } 20 | nix = { version = "0.30", features = ["fs", "user"] } 21 | argmax = "0.3.1" 22 | 23 | [dev-dependencies] 24 | assert_cmd = "2" 25 | filetime = "0.2" 26 | nix = { version = "0.30", features = ["fs"] } 27 | predicates = "3" 28 | serial_test = "3.2" 29 | tempfile = "3" 30 | pretty_assertions = "1.4.1" 31 | 32 | [[bin]] 33 | name = "find" 34 | path = "src/find/main.rs" 35 | 36 | [[bin]] 37 | name = "xargs" 38 | path = "src/xargs/main.rs" 39 | 40 | [[bin]] 41 | name = "testing-commandline" 42 | path = "src/testing/commandline/main.rs" 43 | 44 | # The profile that 'cargo dist' will build with 45 | [profile.dist] 46 | inherits = "release" 47 | lto = "thin" 48 | 49 | 50 | [lints.clippy] 51 | multiple_crate_versions = "allow" 52 | cargo_common_metadata = "allow" 53 | uninlined_format_args = "allow" 54 | missing_panics_doc = "allow" 55 | 56 | use_self = "warn" 57 | needless_pass_by_value = "warn" 58 | semicolon_if_nothing_returned = "warn" 59 | single_char_pattern = "warn" 60 | explicit_iter_loop = "warn" 61 | if_not_else = "warn" 62 | manual_let_else = "warn" 63 | # Disable for now, we have a few occurrences 64 | # panic = "warn" 65 | -------------------------------------------------------------------------------- /DEVELOPMENT.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | # Setting up your local development environment 4 | 5 | For contributing rules and best practices please refer to [CONTRIBUTING.md](CONTRIBUTING.md) 6 | 7 | ## Before you start 8 | 9 | For this guide we assume that you already have a GitHub account and have `git` and your favorite code editor or IDE installed and configured. 10 | Before you start working on findutils, please follow these steps: 11 | 12 | 1. Fork the [findutils repository](https://github.com/uutils/findutils) to your GitHub account. 13 | ***Tip:*** See [this GitHub guide](https://docs.github.com/en/get-started/quickstart/fork-a-repo) for more information on this step. 14 | 2. Clone that fork to your local development environment: 15 | 16 | ```shell 17 | git clone https://github.com/YOUR-GITHUB-ACCOUNT/findutils 18 | cd findutils 19 | ``` 20 | 21 | ## Tools 22 | 23 | You will need the tools mentioned in this section to build and test your code changes locally. 24 | This section will explain how to install and configure these tools. 25 | We also have an extensive CI that uses these tools and will check your code before it can be merged. 26 | The next section [Testing](#testing) will explain how to run those checks locally to avoid waiting for the CI. 27 | 28 | ### Rust toolchain 29 | 30 | [Install Rust](https://www.rust-lang.org/tools/install) 31 | 32 | If you're using rustup to install and manage your Rust toolchains, `clippy` and `rustfmt` are usually already installed. If you are using one of the alternative methods, please make sure to install them manually. See following sub-sections for their usage: [clippy](#clippy) [rustfmt](#rustfmt). 33 | 34 | ***Tip*** You might also need to add 'llvm-tools' component if you are going to [generate code coverage reports locally](#code-coverage-report): 35 | 36 | ```shell 37 | rustup component add llvm-tools-preview 38 | ``` 39 | 40 | ### pre-commit hooks 41 | 42 | A configuration for `pre-commit` is provided in the repository. It allows 43 | automatically checking every git commit you make to ensure it compiles, and 44 | passes `clippy` and `rustfmt` without warnings. 45 | 46 | To use the provided hook: 47 | 48 | 1. [Install `pre-commit`](https://pre-commit.com/#install) 49 | 1. Run `pre-commit install` while in the repository directory 50 | 51 | Your git commits will then automatically be checked. If a check fails, an error 52 | message will explain why, and your commit will be canceled. You can then make 53 | the suggested changes, and run `git commit ...` again. 54 | 55 | **NOTE: On MacOS** the pre-commit hooks are currently broken. There are workarounds involving switching to unstable nightly Rust and components. 56 | 57 | ### clippy 58 | 59 | ```shell 60 | cargo clippy --all-targets --all-features 61 | ``` 62 | 63 | The `msrv` key in the clippy configuration file `clippy.toml` is used to disable 64 | lints pertaining to newer features by specifying the minimum supported Rust 65 | version (MSRV). 66 | 67 | ### rustfmt 68 | 69 | ```shell 70 | cargo fmt --all 71 | ``` 72 | 73 | ### cargo-deny 74 | 75 | This project uses [cargo-deny](https://github.com/EmbarkStudios/cargo-deny/) to 76 | detect duplicate dependencies, checks licenses, etc. To run it locally, first 77 | install it and then run with: 78 | 79 | ```shell 80 | cargo deny --all-features check all 81 | ``` 82 | 83 | ### Markdown linter 84 | 85 | We use [markdownlint](https://github.com/DavidAnson/markdownlint) to lint the 86 | Markdown files in the repository. 87 | 88 | ### Spell checker 89 | 90 | We use `cspell` as spell checker for all files in the project. If you are using 91 | VS Code, you can install the 92 | [code spell checker](https://marketplace.visualstudio.com/items?itemName=streetsidesoftware.code-spell-checker) 93 | extension to enable spell checking within your editor. Otherwise, you can 94 | install [cspell](https://cspell.org/) separately. 95 | 96 | If you want to make the spell checker ignore a word, you can add 97 | 98 | ```rust 99 | // spell-checker:ignore word_to_ignore 100 | ``` 101 | 102 | at the top of the file. 103 | 104 | ## Testing 105 | 106 | Just like with building, we follow the standard procedure for testing using 107 | Cargo: 108 | 109 | ```shell 110 | cargo test 111 | ``` 112 | 113 | ## Code coverage report 114 | 115 | Code coverage report can be generated using [grcov](https://github.com/mozilla/grcov). 116 | 117 | ### Using Nightly Rust 118 | 119 | To generate [gcov-based](https://github.com/mozilla/grcov#example-how-to-generate-gcda-files-for-a-rust-project) coverage report 120 | 121 | ```shell 122 | export CARGO_INCREMENTAL=0 123 | export RUSTFLAGS="-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort" 124 | export RUSTDOCFLAGS="-Cpanic=abort" 125 | cargo build 126 | cargo test 127 | grcov . -s . --binary-path ./target/debug/ -t html --branch --ignore-not-existing --ignore build.rs --excl-br-line "^\s*((debug_)?assert(_eq|_ne)?\#\[derive\()" -o ./target/debug/coverage/ 128 | # open target/debug/coverage/index.html in browser 129 | ``` 130 | 131 | if changes are not reflected in the report then run `cargo clean` and run the above commands. 132 | 133 | ### Using Stable Rust 134 | 135 | If you are using stable version of Rust that doesn't enable code coverage instrumentation by default 136 | then add `-Z-Zinstrument-coverage` flag to `RUSTFLAGS` env variable specified above. 137 | 138 | ## Tips for setting up on Mac 139 | 140 | ### C Compiler and linker 141 | 142 | On MacOS you'll need to install C compiler & linker: 143 | 144 | ```shell 145 | xcode-select --install 146 | ``` 147 | 148 | ## Tips for setting up on Windows 149 | 150 | ### MSVC build tools 151 | 152 | On Windows you'll need the MSVC build tools for Visual Studio 2013 or later. 153 | 154 | If you are using `rustup-init.exe` to install Rust toolchain, it will guide you through the process of downloading and installing these prerequisites. 155 | 156 | Otherwise please follow [this guide](https://learn.microsoft.com/en-us/windows/dev-environment/rust/setup). 157 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) Google Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of 4 | this software and associated documentation files (the "Software"), to deal in 5 | the Software without restriction, including without limitation the rights to 6 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 7 | the Software, and to permit persons to whom the Software is furnished to do so, 8 | subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in all 11 | copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 15 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 16 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 17 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 18 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 19 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # findutils 2 | 3 | [![Crates.io](https://img.shields.io/crates/v/findutils.svg)](https://crates.io/crates/findutils) 4 | [![Discord](https://img.shields.io/badge/discord-join-7289DA.svg?logo=discord&longCache=true&style=flat)](https://discord.gg/wQVJbvJ) 5 | [![License](http://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/uutils/findutils/blob/main/LICENSE) 6 | [![dependency status](https://deps.rs/repo/github/uutils/findutils/status.svg)](https://deps.rs/repo/github/uutils/findutils) 7 | [![codecov](https://codecov.io/gh/uutils/findutils/branch/master/graph/badge.svg)](https://codecov.io/gh/uutils/findutils) 8 | 9 | Rust implementation of [GNU findutils](https://www.gnu.org/software/findutils/): `xargs`, `find`, `locate` and `updatedb`. 10 | The goal is to be a full drop-in replacement of the original commands. 11 | 12 | ## Run the GNU testsuite on rust/findutils: 13 | 14 | ``` 15 | bash util/build-gnu.sh 16 | 17 | # To run a specific test: 18 | bash util/build-gnu.sh tests/misc/help-version.sh 19 | ``` 20 | 21 | ## Comparing with GNU 22 | 23 | ![Evolution over time - GNU testsuite](https://github.com/uutils/findutils-tracking/blob/main/gnu-results.svg?raw=true) 24 | ![Evolution over time - BFS testsuite](https://github.com/uutils/findutils-tracking/blob/main/bfs-results.svg?raw=true) 25 | 26 | ## Build/run with BFS 27 | 28 | [bfs](https://github.com/tavianator/bfs) is a variant of the UNIX find command that operates breadth-first rather than depth-first. 29 | 30 | ``` 31 | bash util/build-bfs.sh 32 | 33 | # To run a specific test: 34 | bash util/build-bfs.sh posix/basic 35 | ``` 36 | 37 | For more details, see https://github.com/uutils/findutils-tracking/ 38 | -------------------------------------------------------------------------------- /dist-workspace.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = ["cargo:."] 3 | 4 | # Config for 'dist' 5 | [dist] 6 | # The preferred dist version to use in CI (Cargo.toml SemVer syntax) 7 | cargo-dist-version = "0.28.0" 8 | # CI backends to support 9 | ci = "github" 10 | # The installers to generate for each app 11 | installers = ["shell"] 12 | # Target platforms to build apps for (Rust target-triple syntax) 13 | targets = ["aarch64-apple-darwin", "x86_64-apple-darwin", "x86_64-unknown-linux-gnu", "x86_64-pc-windows-msvc"] 14 | # Which actions to run on pull requests 15 | pr-run-mode = "plan" 16 | # Path that installers should place binaries in 17 | install-path = "CARGO_HOME" 18 | # Whether to install an updater program 19 | install-updater = false 20 | # Ignore out-of-date contents 21 | allow-dirty = ["ci"] 22 | 23 | [dist.github-custom-runners] 24 | x86_64-unknown-linux-gnu = "ubuntu-latest" 25 | -------------------------------------------------------------------------------- /docs/book.toml: -------------------------------------------------------------------------------- 1 | [book] 2 | authors = ["Terts Diepraam"] 3 | language = "en" 4 | multilingual = false 5 | src = "src" 6 | title = "findutils" 7 | 8 | [preprocessor.toc] 9 | command = "mdbook-toc" 10 | renderer = ["html"] 11 | -------------------------------------------------------------------------------- /docs/src/SUMMARY.md: -------------------------------------------------------------------------------- 1 | # Summary 2 | 3 | [Introduction](index.md) 4 | * [Installation](installation.md) 5 | * [Build from source](build.md) 6 | * [Platform support](platforms.md) 7 | * [Contributing](contributing.md) 8 | * [GNU test coverage](test_coverage.md) 9 | * [Extensions](extensions.md) 10 | -------------------------------------------------------------------------------- /docs/src/build.md: -------------------------------------------------------------------------------- 1 | # Build from source 2 | 3 | TODO -------------------------------------------------------------------------------- /docs/src/contributing.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | {{ #include ../../CONTRIBUTING.md }} 4 | -------------------------------------------------------------------------------- /docs/src/extensions.md: -------------------------------------------------------------------------------- 1 | # Extensions 2 | 3 | TODO -------------------------------------------------------------------------------- /docs/src/index.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | {{#include logo.svg}} 4 | 5 | 6 | 7 | 32 | 33 | # uutils findutils Documentation 34 | 35 | The uutils findutils project reimplements the GNU findutils in Rust. It is available for Linux, Windows, Mac 36 | and other platforms. 37 | 38 | uutils is licensed under the 39 | [MIT License](https://github.com/uutils/findutils/blob/main/LICENSE). 40 | 41 | ## Useful links 42 | 43 | - [Releases](https://github.com/uutils/findutils/releases) 44 | - [Source Code](https://github.com/uutils/findutils) 45 | - [Issues](https://github.com/uutils/findutils/issues) 46 | - [Discord](https://discord.gg/wQVJbvJ) 47 | 48 | > Note: This manual is automatically generated from the source code and is a 49 | > work in progress. 50 | -------------------------------------------------------------------------------- /docs/src/installation.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | # Installation 4 | 5 | This is a list of uutils packages in various distributions and package managers. 6 | Note that these are packaged by third-parties and the packages might contain 7 | patches. 8 | 9 | You can also [build findutils from source](build.md). 10 | 11 | 12 | 13 | ## Cargo 14 | 15 | [![crates.io package](https://repology.org/badge/version-for-repo/crates_io/rust:findutils.svg)](https://crates.io/crates/findutils) 16 | 17 | ```shell 18 | cargo install findutils 19 | ``` 20 | 21 | ## Linux 22 | 23 | ### Debian 24 | 25 | [![Debian 13 package](https://repology.org/badge/version-for-repo/debian_13/rust:findutils.svg)](https://packages.debian.org/trixie/source/rust-findutils) 26 | 27 | [![Debian Unstable package](https://repology.org/badge/version-for-repo/debian_unstable/rust:findutils.svg)](https://packages.debian.org/sid/source/rust-findutils) 28 | 29 | ```shell 30 | apt install rust-findutils 31 | # To use it: 32 | export PATH=/usr/lib/cargo/bin/findutils:$PATH 33 | ``` 34 | 35 | ### Gentoo 36 | 37 | [![Gentoo package](https://repology.org/badge/version-for-repo/gentoo/uutils-findutils.svg)](https://packages.gentoo.org/packages/sys-apps/uutils-findutils) 38 | 39 | ```shell 40 | emerge -pv sys-apps/uutils-findutils 41 | ``` 42 | 43 | ## MacOS 44 | 45 | ### Homebrew 46 | 47 | [![Homebrew package](https://repology.org/badge/version-for-repo/homebrew/uutils-findutils.svg)](https://formulae.brew.sh/formula/uutils-findutils) 48 | 49 | ```shell 50 | brew install uutils-findutils 51 | ``` 52 | 53 | 54 | ## FreeBSD 55 | 56 | [![FreeBSD port](https://repology.org/badge/version-for-repo/freebsd/rust-findutils.svg)](https://repology.org/project/rust-findutils/versions) 57 | 58 | ```sh 59 | pkg install rust-findutils 60 | ``` 61 | 62 | ## Windows 63 | 64 | As far as we are aware, `findutils` has not been packaged for any package managers on Windows yet. -------------------------------------------------------------------------------- /docs/src/logo.svg: -------------------------------------------------------------------------------- 1 | 62 | -------------------------------------------------------------------------------- /docs/src/platforms.md: -------------------------------------------------------------------------------- 1 | # Platform support 2 | 3 | TODO -------------------------------------------------------------------------------- /docs/src/test_coverage.md: -------------------------------------------------------------------------------- 1 | # GNU test coverage 2 | 3 | TODO -------------------------------------------------------------------------------- /src/find/main.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2017 Google Inc. 2 | // 3 | // Use of this source code is governed by a MIT-style 4 | // license that can be found in the LICENSE file or at 5 | // https://opensource.org/licenses/MIT. 6 | 7 | fn main() { 8 | // Ignores the SIGPIPE signal. 9 | // This is to solve the problem that when find is used with a pipe character, 10 | // the downstream software of the standard output stream closes the pipe and triggers a panic. 11 | uucore::panic::mute_sigpipe_panic(); 12 | 13 | let args = std::env::args().collect::>(); 14 | let strs: Vec<&str> = args.iter().map(std::convert::AsRef::as_ref).collect(); 15 | let deps = findutils::find::StandardDependencies::new(); 16 | std::process::exit(findutils::find::find_main(&strs, &deps)); 17 | } 18 | -------------------------------------------------------------------------------- /src/find/matchers/access.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2022 Tavian Barnes 2 | // 3 | // Use of this source code is governed by a MIT-style 4 | // license that can be found in the LICENSE file or at 5 | // https://opensource.org/licenses/MIT. 6 | 7 | use faccess::PathExt; 8 | 9 | use super::{Matcher, MatcherIO, WalkEntry}; 10 | 11 | /// Matcher for -{read,writ,execut}able. 12 | pub enum AccessMatcher { 13 | Readable, 14 | Writable, 15 | Executable, 16 | } 17 | 18 | impl Matcher for AccessMatcher { 19 | fn matches(&self, file_info: &WalkEntry, _: &mut MatcherIO) -> bool { 20 | let path = file_info.path(); 21 | 22 | match self { 23 | Self::Readable => path.readable(), 24 | Self::Writable => path.writable(), 25 | Self::Executable => path.executable(), 26 | } 27 | } 28 | } 29 | 30 | #[cfg(test)] 31 | mod tests { 32 | use super::*; 33 | 34 | use crate::find::matchers::tests::get_dir_entry_for; 35 | use crate::find::tests::FakeDependencies; 36 | 37 | #[test] 38 | fn access_matcher() { 39 | let file_info = get_dir_entry_for("test_data/simple", "abbbc"); 40 | let deps = FakeDependencies::new(); 41 | 42 | assert!( 43 | AccessMatcher::Readable.matches(&file_info, &mut deps.new_matcher_io()), 44 | "file should be readable" 45 | ); 46 | 47 | assert!( 48 | AccessMatcher::Writable.matches(&file_info, &mut deps.new_matcher_io()), 49 | "file should be writable" 50 | ); 51 | 52 | #[cfg(unix)] 53 | assert!( 54 | !AccessMatcher::Executable.matches(&file_info, &mut deps.new_matcher_io()), 55 | "file should not be executable" 56 | ); 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /src/find/matchers/delete.rs: -------------------------------------------------------------------------------- 1 | /* 2 | * This file is part of the uutils findutils package. 3 | * 4 | * (c) Arcterus 5 | * 6 | * For the full copyright and license information, please view the LICENSE 7 | * file that was distributed with this source code. 8 | */ 9 | 10 | use std::fs; 11 | use std::io::{self, stderr, Write}; 12 | 13 | use super::{Matcher, MatcherIO, WalkEntry}; 14 | 15 | pub struct DeleteMatcher; 16 | 17 | impl DeleteMatcher { 18 | pub fn new() -> Self { 19 | Self 20 | } 21 | 22 | fn delete(&self, entry: &WalkEntry) -> io::Result<()> { 23 | if entry.file_type().is_dir() && !entry.path_is_symlink() { 24 | fs::remove_dir(entry.path()) 25 | } else { 26 | fs::remove_file(entry.path()) 27 | } 28 | } 29 | } 30 | 31 | impl Matcher for DeleteMatcher { 32 | fn matches(&self, file_info: &WalkEntry, matcher_io: &mut MatcherIO) -> bool { 33 | let path = file_info.path(); 34 | let path_str = path.to_string_lossy(); 35 | 36 | // This is a quirk in find's traditional semantics probably due to 37 | // POSIX rmdir() not accepting "." (EINVAL). std::fs::remove_dir() 38 | // inherits the same behavior, so no reason to buck tradition. 39 | if path_str == "." { 40 | return true; 41 | } 42 | 43 | match self.delete(file_info) { 44 | Ok(()) => true, 45 | Err(e) => { 46 | matcher_io.set_exit_code(1); 47 | writeln!(&mut stderr(), "Failed to delete {path_str}: {e}").unwrap(); 48 | false 49 | } 50 | } 51 | } 52 | 53 | fn has_side_effects(&self) -> bool { 54 | true 55 | } 56 | } 57 | 58 | #[cfg(test)] 59 | mod tests { 60 | use std::fs::{create_dir, File}; 61 | use tempfile::Builder; 62 | 63 | use super::*; 64 | use crate::find::matchers::tests::get_dir_entry_for; 65 | use crate::find::tests::FakeDependencies; 66 | 67 | #[test] 68 | fn delete_matcher() { 69 | let matcher = DeleteMatcher::new(); 70 | let deps = FakeDependencies::new(); 71 | 72 | let temp_dir = Builder::new().prefix("test_data").tempdir().unwrap(); 73 | 74 | let temp_dir_path = temp_dir.path().to_string_lossy(); 75 | File::create(temp_dir.path().join("test")).expect("created test file"); 76 | create_dir(temp_dir.path().join("test_dir")).expect("created test directory"); 77 | let test_entry = get_dir_entry_for(&temp_dir_path, "test"); 78 | assert!( 79 | matcher.matches(&test_entry, &mut deps.new_matcher_io()), 80 | "DeleteMatcher should match a simple file", 81 | ); 82 | assert!( 83 | !temp_dir.path().join("test").exists(), 84 | "DeleteMatcher should actually delete files it matches", 85 | ); 86 | 87 | let temp_dir_entry = get_dir_entry_for(&temp_dir_path, "test_dir"); 88 | assert!( 89 | matcher.matches(&temp_dir_entry, &mut deps.new_matcher_io()), 90 | "DeleteMatcher should match directories", 91 | ); 92 | assert!( 93 | !temp_dir.path().join("test_dir").exists(), 94 | "DeleteMatcher should actually delete (empty) directories it matches", 95 | ); 96 | } 97 | } 98 | -------------------------------------------------------------------------------- /src/find/matchers/empty.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Collabora, Ltd. 2 | // 3 | // Use of this source code is governed by a MIT-style 4 | // license that can be found in the LICENSE file or at 5 | // https://opensource.org/licenses/MIT. 6 | 7 | use std::{ 8 | fs::read_dir, 9 | io::{stderr, Write}, 10 | }; 11 | 12 | use super::{Matcher, MatcherIO, WalkEntry}; 13 | 14 | pub struct EmptyMatcher; 15 | 16 | impl EmptyMatcher { 17 | pub fn new() -> Self { 18 | Self 19 | } 20 | } 21 | 22 | impl Matcher for EmptyMatcher { 23 | fn matches(&self, file_info: &WalkEntry, _: &mut MatcherIO) -> bool { 24 | if file_info.file_type().is_file() { 25 | match file_info.metadata() { 26 | Ok(meta) => meta.len() == 0, 27 | Err(err) => { 28 | writeln!( 29 | &mut stderr(), 30 | "Error getting size for {}: {}", 31 | file_info.path().display(), 32 | err 33 | ) 34 | .unwrap(); 35 | false 36 | } 37 | } 38 | } else if file_info.file_type().is_dir() { 39 | match read_dir(file_info.path()) { 40 | Ok(mut it) => it.next().is_none(), 41 | Err(err) => { 42 | writeln!( 43 | &mut stderr(), 44 | "Error getting contents of {}: {}", 45 | file_info.path().display(), 46 | err 47 | ) 48 | .unwrap(); 49 | false 50 | } 51 | } 52 | } else { 53 | false 54 | } 55 | } 56 | } 57 | 58 | #[cfg(test)] 59 | mod tests { 60 | use tempfile::Builder; 61 | 62 | use super::*; 63 | use crate::find::matchers::tests::get_dir_entry_for; 64 | use crate::find::tests::FakeDependencies; 65 | 66 | #[test] 67 | fn empty_files() { 68 | let empty_file_info = get_dir_entry_for("test_data/simple", "abbbc"); 69 | let nonempty_file_info = get_dir_entry_for("test_data/size", "512bytes"); 70 | 71 | let matcher = EmptyMatcher::new(); 72 | let deps = FakeDependencies::new(); 73 | 74 | assert!(matcher.matches(&empty_file_info, &mut deps.new_matcher_io())); 75 | assert!(!matcher.matches(&nonempty_file_info, &mut deps.new_matcher_io())); 76 | } 77 | 78 | #[test] 79 | fn empty_directories() { 80 | let temp_dir = Builder::new() 81 | .prefix("empty_directories") 82 | .tempdir() 83 | .unwrap(); 84 | let temp_dir_path = temp_dir.path().to_string_lossy(); 85 | let subdir_name = "subdir"; 86 | std::fs::create_dir(temp_dir.path().join(subdir_name)).unwrap(); 87 | 88 | let matcher = EmptyMatcher::new(); 89 | let deps = FakeDependencies::new(); 90 | 91 | let file_info = get_dir_entry_for(&temp_dir_path, subdir_name); 92 | assert!(matcher.matches(&file_info, &mut deps.new_matcher_io())); 93 | 94 | std::fs::File::create(temp_dir.path().join(subdir_name).join("a")).unwrap(); 95 | 96 | let file_info = get_dir_entry_for(&temp_dir_path, subdir_name); 97 | assert!(!matcher.matches(&file_info, &mut deps.new_matcher_io())); 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /src/find/matchers/entry.rs: -------------------------------------------------------------------------------- 1 | //! Paths encountered during a walk. 2 | 3 | use std::cell::OnceCell; 4 | use std::error::Error; 5 | use std::ffi::OsStr; 6 | use std::fmt::{self, Display, Formatter}; 7 | use std::fs::{self, Metadata}; 8 | use std::io::{self, ErrorKind}; 9 | #[cfg(unix)] 10 | use std::os::unix::fs::FileTypeExt; 11 | use std::path::{Path, PathBuf}; 12 | 13 | use walkdir::DirEntry; 14 | 15 | use super::Follow; 16 | 17 | /// Wrapper for a directory entry. 18 | #[derive(Debug)] 19 | enum Entry { 20 | /// Wraps an explicit path and depth. 21 | Explicit(PathBuf, usize), 22 | /// Wraps a WalkDir entry. 23 | WalkDir(DirEntry), 24 | } 25 | 26 | /// File types. 27 | #[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] 28 | pub enum FileType { 29 | Unknown, 30 | Fifo, 31 | CharDevice, 32 | Directory, 33 | BlockDevice, 34 | Regular, 35 | Symlink, 36 | Socket, 37 | } 38 | 39 | impl FileType { 40 | pub fn is_dir(self) -> bool { 41 | self == Self::Directory 42 | } 43 | 44 | pub fn is_file(self) -> bool { 45 | self == Self::Regular 46 | } 47 | 48 | pub fn is_symlink(self) -> bool { 49 | self == Self::Symlink 50 | } 51 | } 52 | 53 | impl From for FileType { 54 | fn from(t: fs::FileType) -> Self { 55 | if t.is_dir() { 56 | return Self::Directory; 57 | } 58 | if t.is_file() { 59 | return Self::Regular; 60 | } 61 | if t.is_symlink() { 62 | return Self::Symlink; 63 | } 64 | 65 | #[cfg(unix)] 66 | { 67 | if t.is_fifo() { 68 | return Self::Fifo; 69 | } 70 | if t.is_char_device() { 71 | return Self::CharDevice; 72 | } 73 | if t.is_block_device() { 74 | return Self::BlockDevice; 75 | } 76 | if t.is_socket() { 77 | return Self::Socket; 78 | } 79 | } 80 | 81 | Self::Unknown 82 | } 83 | } 84 | 85 | /// An error encountered while walking a file system. 86 | #[derive(Clone, Debug)] 87 | pub struct WalkError { 88 | /// The path that caused the error, if known. 89 | path: Option, 90 | /// The depth below the root path, if known. 91 | depth: Option, 92 | /// The io::Error::raw_os_error(), if known. 93 | raw: Option, 94 | } 95 | 96 | impl WalkError { 97 | /// Get the path this error occurred on, if known. 98 | pub fn path(&self) -> Option<&Path> { 99 | self.path.as_deref() 100 | } 101 | 102 | /// Get the traversal depth when this error occurred, if known. 103 | pub fn depth(&self) -> Option { 104 | self.depth 105 | } 106 | 107 | /// Get the kind of I/O error. 108 | pub fn kind(&self) -> ErrorKind { 109 | io::Error::from(self).kind() 110 | } 111 | 112 | /// Check for ErrorKind::{NotFound,NotADirectory}. 113 | pub fn is_not_found(&self) -> bool { 114 | if self.kind() == ErrorKind::NotFound { 115 | return true; 116 | } 117 | 118 | // NotADirectory is nightly-only 119 | #[cfg(unix)] 120 | { 121 | if self.raw == Some(uucore::libc::ENOTDIR) { 122 | return true; 123 | } 124 | } 125 | 126 | false 127 | } 128 | 129 | /// Check for ErrorKind::FilesystemLoop. 130 | pub fn is_loop(&self) -> bool { 131 | #[cfg(unix)] 132 | return self.raw == Some(uucore::libc::ELOOP); 133 | 134 | #[cfg(not(unix))] 135 | return false; 136 | } 137 | } 138 | 139 | impl Display for WalkError { 140 | fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> { 141 | let ioe = io::Error::from(self); 142 | if let Some(path) = &self.path { 143 | write!(f, "{}: {}", path.display(), ioe) 144 | } else { 145 | write!(f, "{}", ioe) 146 | } 147 | } 148 | } 149 | 150 | impl Error for WalkError {} 151 | 152 | impl From for WalkError { 153 | fn from(e: io::Error) -> Self { 154 | Self::from(&e) 155 | } 156 | } 157 | 158 | impl From<&io::Error> for WalkError { 159 | fn from(e: &io::Error) -> Self { 160 | Self { 161 | path: None, 162 | depth: None, 163 | raw: e.raw_os_error(), 164 | } 165 | } 166 | } 167 | 168 | impl From for WalkError { 169 | fn from(e: walkdir::Error) -> Self { 170 | Self::from(&e) 171 | } 172 | } 173 | 174 | impl From<&walkdir::Error> for WalkError { 175 | fn from(e: &walkdir::Error) -> Self { 176 | Self { 177 | path: e.path().map(|p| p.to_owned()), 178 | depth: Some(e.depth()), 179 | raw: e.io_error().and_then(|e| e.raw_os_error()), 180 | } 181 | } 182 | } 183 | 184 | impl From for io::Error { 185 | fn from(e: WalkError) -> Self { 186 | Self::from(&e) 187 | } 188 | } 189 | 190 | impl From<&WalkError> for io::Error { 191 | fn from(e: &WalkError) -> Self { 192 | e.raw 193 | .map(Self::from_raw_os_error) 194 | .unwrap_or_else(|| ErrorKind::Other.into()) 195 | } 196 | } 197 | 198 | /// A path encountered while walking a file system. 199 | #[derive(Debug)] 200 | pub struct WalkEntry { 201 | /// The wrapped path/dirent. 202 | inner: Entry, 203 | /// Whether to follow symlinks. 204 | follow: Follow, 205 | /// Cached metadata. 206 | meta: OnceCell>, 207 | } 208 | 209 | impl WalkEntry { 210 | /// Create a new WalkEntry for a specific file. 211 | pub fn new(path: impl Into, depth: usize, follow: Follow) -> Self { 212 | Self { 213 | inner: Entry::Explicit(path.into(), depth), 214 | follow, 215 | meta: OnceCell::new(), 216 | } 217 | } 218 | 219 | /// Convert a [walkdir::DirEntry] to a [WalkEntry]. Errors due to broken symbolic links will be 220 | /// converted to valid entries, but other errors will be propagated. 221 | pub fn from_walkdir( 222 | result: walkdir::Result, 223 | follow: Follow, 224 | ) -> Result { 225 | let result = result.map_err(WalkError::from); 226 | 227 | match result { 228 | Ok(entry) => { 229 | let ret = if entry.depth() == 0 && follow != Follow::Never { 230 | // DirEntry::file_type() is wrong for root symlinks when follow_root_links is set 231 | Self::new(entry.path(), 0, follow) 232 | } else { 233 | Self { 234 | inner: Entry::WalkDir(entry), 235 | follow, 236 | meta: OnceCell::new(), 237 | } 238 | }; 239 | Ok(ret) 240 | } 241 | Err(e) if e.is_not_found() => { 242 | // Detect broken symlinks and replace them with explicit entries 243 | if let (Some(path), Some(depth)) = (e.path(), e.depth()) { 244 | if let Ok(meta) = path.symlink_metadata() { 245 | return Ok(Self { 246 | inner: Entry::Explicit(path.into(), depth), 247 | follow: Follow::Never, 248 | meta: Ok(meta).into(), 249 | }); 250 | } 251 | } 252 | 253 | Err(e) 254 | } 255 | Err(e) => Err(e), 256 | } 257 | } 258 | 259 | /// Get the path to this entry. 260 | pub fn path(&self) -> &Path { 261 | match &self.inner { 262 | Entry::Explicit(path, _) => path.as_path(), 263 | Entry::WalkDir(ent) => ent.path(), 264 | } 265 | } 266 | 267 | /// Get the path to this entry. 268 | pub fn into_path(self) -> PathBuf { 269 | match self.inner { 270 | Entry::Explicit(path, _) => path, 271 | Entry::WalkDir(ent) => ent.into_path(), 272 | } 273 | } 274 | 275 | /// Get the name of this entry. 276 | pub fn file_name(&self) -> &OsStr { 277 | match &self.inner { 278 | Entry::Explicit(path, _) => { 279 | // Path::file_name() only works if the last component is normal 280 | path.components() 281 | .next_back() 282 | .map(|c| c.as_os_str()) 283 | .unwrap_or_else(|| path.as_os_str()) 284 | } 285 | Entry::WalkDir(ent) => ent.file_name(), 286 | } 287 | } 288 | 289 | /// Get the depth of this entry below the root. 290 | pub fn depth(&self) -> usize { 291 | match &self.inner { 292 | Entry::Explicit(_, depth) => *depth, 293 | Entry::WalkDir(ent) => ent.depth(), 294 | } 295 | } 296 | 297 | /// Get whether symbolic links are followed for this entry. 298 | pub fn follow(&self) -> bool { 299 | self.follow.follow_at_depth(self.depth()) 300 | } 301 | 302 | /// Get the metadata on a cache miss. 303 | fn get_metadata(&self) -> Result { 304 | self.follow.metadata_at_depth(self.path(), self.depth()) 305 | } 306 | 307 | /// Get the [Metadata] for this entry, following symbolic links if appropriate. 308 | /// Multiple calls to this function will cache and re-use the same [Metadata]. 309 | pub fn metadata(&self) -> Result<&Metadata, WalkError> { 310 | let result = self.meta.get_or_init(|| match &self.inner { 311 | Entry::Explicit(_, _) => Ok(self.get_metadata()?), 312 | Entry::WalkDir(ent) => Ok(ent.metadata()?), 313 | }); 314 | result.as_ref().map_err(|e| e.clone()) 315 | } 316 | 317 | /// Get the file type of this entry. 318 | pub fn file_type(&self) -> FileType { 319 | match &self.inner { 320 | Entry::Explicit(_, _) => self 321 | .metadata() 322 | .map(|m| m.file_type().into()) 323 | .unwrap_or(FileType::Unknown), 324 | Entry::WalkDir(ent) => ent.file_type().into(), 325 | } 326 | } 327 | 328 | /// Check whether this entry is a symbolic link, regardless of whether links 329 | /// are being followed. 330 | pub fn path_is_symlink(&self) -> bool { 331 | match &self.inner { 332 | Entry::Explicit(path, _) => { 333 | if self.follow() { 334 | path.symlink_metadata() 335 | .is_ok_and(|m| m.file_type().is_symlink()) 336 | } else { 337 | self.file_type().is_symlink() 338 | } 339 | } 340 | Entry::WalkDir(ent) => ent.path_is_symlink(), 341 | } 342 | } 343 | } 344 | -------------------------------------------------------------------------------- /src/find/matchers/exec.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2017 Google Inc. 2 | // 3 | // Use of this source code is governed by a MIT-style 4 | // license that can be found in the LICENSE file or at 5 | // https://opensource.org/licenses/MIT. 6 | 7 | use std::cell::RefCell; 8 | use std::error::Error; 9 | use std::ffi::OsString; 10 | use std::io::{stderr, Write}; 11 | use std::path::Path; 12 | use std::process::Command; 13 | 14 | use super::{Matcher, MatcherIO, WalkEntry}; 15 | 16 | enum Arg { 17 | FileArg(Vec), 18 | LiteralArg(OsString), 19 | } 20 | 21 | pub struct SingleExecMatcher { 22 | executable: String, 23 | args: Vec, 24 | exec_in_parent_dir: bool, 25 | } 26 | 27 | impl SingleExecMatcher { 28 | pub fn new( 29 | executable: &str, 30 | args: &[&str], 31 | exec_in_parent_dir: bool, 32 | ) -> Result> { 33 | let transformed_args = args 34 | .iter() 35 | .map(|&a| { 36 | let parts = a.split("{}").collect::>(); 37 | if parts.len() == 1 { 38 | // No {} present 39 | Arg::LiteralArg(OsString::from(a)) 40 | } else { 41 | Arg::FileArg(parts.iter().map(OsString::from).collect()) 42 | } 43 | }) 44 | .collect(); 45 | 46 | Ok(Self { 47 | executable: executable.to_string(), 48 | args: transformed_args, 49 | exec_in_parent_dir, 50 | }) 51 | } 52 | } 53 | 54 | impl Matcher for SingleExecMatcher { 55 | fn matches(&self, file_info: &WalkEntry, _: &mut MatcherIO) -> bool { 56 | let mut command = Command::new(&self.executable); 57 | let path_to_file = if self.exec_in_parent_dir { 58 | if let Some(f) = file_info.path().file_name() { 59 | Path::new(".").join(f) 60 | } else { 61 | Path::new(".").join(file_info.path()) 62 | } 63 | } else { 64 | file_info.path().to_path_buf() 65 | }; 66 | 67 | for arg in &self.args { 68 | match *arg { 69 | Arg::LiteralArg(ref a) => command.arg(a.as_os_str()), 70 | Arg::FileArg(ref parts) => command.arg(parts.join(path_to_file.as_os_str())), 71 | }; 72 | } 73 | if self.exec_in_parent_dir { 74 | match file_info.path().parent() { 75 | None => { 76 | // Root paths like "/" have no parent. Run them from the root to match GNU find. 77 | command.current_dir(file_info.path()); 78 | } 79 | Some(parent) if parent == Path::new("") => { 80 | // Paths like "foo" have a parent of "". Avoid chdir(""). 81 | } 82 | Some(parent) => { 83 | command.current_dir(parent); 84 | } 85 | } 86 | } 87 | match command.status() { 88 | Ok(status) => status.success(), 89 | Err(e) => { 90 | writeln!(&mut stderr(), "Failed to run {}: {}", self.executable, e).unwrap(); 91 | false 92 | } 93 | } 94 | } 95 | 96 | fn has_side_effects(&self) -> bool { 97 | true 98 | } 99 | } 100 | 101 | pub struct MultiExecMatcher { 102 | executable: String, 103 | args: Vec, 104 | exec_in_parent_dir: bool, 105 | /// Command to build while matching. 106 | command: RefCell>, 107 | } 108 | 109 | impl MultiExecMatcher { 110 | pub fn new( 111 | executable: &str, 112 | args: &[&str], 113 | exec_in_parent_dir: bool, 114 | ) -> Result> { 115 | let transformed_args = args.iter().map(OsString::from).collect(); 116 | 117 | Ok(Self { 118 | executable: executable.to_string(), 119 | args: transformed_args, 120 | exec_in_parent_dir, 121 | command: RefCell::new(None), 122 | }) 123 | } 124 | 125 | fn new_command(&self) -> argmax::Command { 126 | let mut command = argmax::Command::new(&self.executable); 127 | command.try_args(&self.args).unwrap(); 128 | command 129 | } 130 | 131 | fn run_command(&self, command: &mut argmax::Command, matcher_io: &mut MatcherIO) { 132 | match command.status() { 133 | Ok(status) => { 134 | if !status.success() { 135 | matcher_io.set_exit_code(1); 136 | } 137 | } 138 | Err(e) => { 139 | writeln!(&mut stderr(), "Failed to run {}: {}", self.executable, e).unwrap(); 140 | matcher_io.set_exit_code(1); 141 | } 142 | } 143 | } 144 | } 145 | 146 | impl Matcher for MultiExecMatcher { 147 | fn matches(&self, file_info: &WalkEntry, matcher_io: &mut MatcherIO) -> bool { 148 | let path_to_file = if self.exec_in_parent_dir { 149 | if let Some(f) = file_info.path().file_name() { 150 | Path::new(".").join(f) 151 | } else { 152 | Path::new(".").join(file_info.path()) 153 | } 154 | } else { 155 | file_info.path().to_path_buf() 156 | }; 157 | let mut command = self.command.borrow_mut(); 158 | let command = command.get_or_insert_with(|| self.new_command()); 159 | 160 | // Build command, or dispatch it before when it is long enough. 161 | if command.try_arg(&path_to_file).is_err() { 162 | if self.exec_in_parent_dir { 163 | match file_info.path().parent() { 164 | None => { 165 | // Root paths like "/" have no parent. Run them from the root to match GNU find. 166 | command.current_dir(file_info.path()); 167 | } 168 | Some(parent) if parent == Path::new("") => { 169 | // Paths like "foo" have a parent of "". Avoid chdir(""). 170 | } 171 | Some(parent) => { 172 | command.current_dir(parent); 173 | } 174 | } 175 | } 176 | self.run_command(command, matcher_io); 177 | 178 | // Reset command status. 179 | *command = self.new_command(); 180 | if let Err(e) = command.try_arg(&path_to_file) { 181 | writeln!( 182 | &mut stderr(), 183 | "Cannot fit a single argument {}: {}", 184 | &path_to_file.to_string_lossy(), 185 | e 186 | ) 187 | .unwrap(); 188 | matcher_io.set_exit_code(1); 189 | } 190 | } 191 | true 192 | } 193 | 194 | fn finished_dir(&self, dir: &Path, matcher_io: &mut MatcherIO) { 195 | // Dispatch command for -execdir. 196 | if self.exec_in_parent_dir { 197 | let mut command = self.command.borrow_mut(); 198 | if let Some(mut command) = command.take() { 199 | command.current_dir(Path::new(".").join(dir)); 200 | self.run_command(&mut command, matcher_io); 201 | } 202 | } 203 | } 204 | 205 | fn finished(&self, matcher_io: &mut MatcherIO) { 206 | // Dispatch command for -exec. 207 | if !self.exec_in_parent_dir { 208 | let mut command = self.command.borrow_mut(); 209 | if let Some(mut command) = command.take() { 210 | self.run_command(&mut command, matcher_io); 211 | } 212 | } 213 | } 214 | 215 | fn has_side_effects(&self) -> bool { 216 | true 217 | } 218 | } 219 | 220 | #[cfg(test)] 221 | /// No tests here, because we need to call out to an external executable. See 222 | /// `tests/exec_unit_tests.rs` instead. 223 | mod tests {} 224 | -------------------------------------------------------------------------------- /src/find/matchers/fs.rs: -------------------------------------------------------------------------------- 1 | // This file is part of the uutils findutils package. 2 | // 3 | // For the full copyright and license information, please view the LICENSE 4 | // file that was distributed with this source code. 5 | use super::{Matcher, MatcherIO, WalkEntry}; 6 | #[cfg(unix)] 7 | use uucore::error::UResult; 8 | 9 | /// The latest mapping from dev_id to fs_type, used for saving mount info reads 10 | #[cfg(unix)] 11 | pub struct Cache { 12 | dev_id: String, 13 | fs_type: String, 14 | } 15 | 16 | /// Get the filesystem type of a file. 17 | /// 1. get the metadata of the file 18 | /// 2. get the device ID of the metadata 19 | /// 3. search the cache, then the filesystem list 20 | /// 21 | /// Returns an empty string when no file system list matches. 22 | /// 23 | /// # Errors 24 | /// Returns an error if the metadata could not be read. 25 | /// Returns an error if the filesystem list could not be read. 26 | /// 27 | /// This is only supported on Unix. 28 | #[cfg(unix)] 29 | use std::{ 30 | cell::RefCell, 31 | io::{stderr, Write}, 32 | path::Path, 33 | }; 34 | 35 | #[cfg(unix)] 36 | pub fn get_file_system_type(path: &Path, cache: &RefCell>) -> UResult { 37 | use std::os::unix::fs::MetadataExt; 38 | 39 | // use symlink_metadata (lstat under the hood) instead of metadata (stat) to make sure that it 40 | // does not return an error when there is a (broken) symlink; this is aligned with GNU find. 41 | let metadata = match path.symlink_metadata() { 42 | Ok(metadata) => metadata, 43 | Err(err) => Err(err)?, 44 | }; 45 | let dev_id = metadata.dev().to_string(); 46 | 47 | if let Some(cache) = cache.borrow().as_ref() { 48 | if cache.dev_id == dev_id { 49 | return Ok(cache.fs_type.clone()); 50 | } 51 | } 52 | 53 | let fs_list = uucore::fsext::read_fs_list()?; 54 | let result = fs_list 55 | .into_iter() 56 | .filter(|fs| fs.dev_id == dev_id) 57 | .next_back() 58 | .map_or_else(String::new, |fs| fs.fs_type); 59 | 60 | // cache the latest query if not a match before 61 | cache.replace(Some(Cache { 62 | dev_id, 63 | fs_type: result.clone(), 64 | })); 65 | 66 | Ok(result) 67 | } 68 | 69 | /// This matcher handles the -fstype argument. 70 | /// It matches the filesystem type of the file. 71 | /// 72 | /// This is only supported on Unix. 73 | pub struct FileSystemMatcher { 74 | #[cfg(unix)] 75 | fs_text: String, 76 | #[cfg(unix)] 77 | cache: RefCell>, 78 | } 79 | 80 | impl FileSystemMatcher { 81 | #[cfg(unix)] 82 | pub fn new(fs_text: String) -> Self { 83 | Self { 84 | fs_text, 85 | cache: RefCell::new(None), 86 | } 87 | } 88 | 89 | #[cfg(not(unix))] 90 | pub fn new(_fs_text: String) -> Self { 91 | Self {} 92 | } 93 | } 94 | 95 | impl Matcher for FileSystemMatcher { 96 | #[cfg(unix)] 97 | fn matches(&self, file_info: &WalkEntry, _: &mut MatcherIO) -> bool { 98 | match get_file_system_type(file_info.path(), &self.cache) { 99 | Ok(result) => result == self.fs_text, 100 | Err(_) => { 101 | writeln!( 102 | &mut stderr(), 103 | "Error getting filesystem type for {}", 104 | file_info.path().to_string_lossy() 105 | ) 106 | .unwrap(); 107 | 108 | false 109 | } 110 | } 111 | } 112 | 113 | #[cfg(not(unix))] 114 | fn matches(&self, _file_info: &WalkEntry, _: &mut MatcherIO) -> bool { 115 | false 116 | } 117 | } 118 | 119 | #[cfg(test)] 120 | mod tests { 121 | #[test] 122 | #[cfg(unix)] 123 | fn test_fs_matcher() { 124 | use crate::find::{ 125 | matchers::{ 126 | fs::{get_file_system_type, Cache}, 127 | tests::get_dir_entry_for, 128 | Matcher, 129 | }, 130 | tests::FakeDependencies, 131 | }; 132 | use std::cell::RefCell; 133 | use std::fs::File; 134 | use tempfile::Builder; 135 | 136 | let deps = FakeDependencies::new(); 137 | let mut matcher_io = deps.new_matcher_io(); 138 | 139 | // create temp file and get its fs type 140 | // We pass this file and the corresponding file system type into the Matcher for comparison. 141 | let temp_dir = Builder::new().prefix("fs_matcher").tempdir().unwrap(); 142 | let foo_path = temp_dir.path().join("foo"); 143 | let _ = File::create(foo_path).expect("create temp file"); 144 | let file_info = get_dir_entry_for(&temp_dir.path().to_string_lossy(), "foo"); 145 | 146 | // create an empty cache for initial fs type lookup 147 | let empty_cache = RefCell::new(None); 148 | let target_fs_type = get_file_system_type(file_info.path(), &empty_cache).unwrap(); 149 | 150 | // should work with unmatched cache, and the cache should be set to the last query result 151 | let unmatched_cache = RefCell::new(Some(Cache { 152 | dev_id: "foo".to_string(), 153 | fs_type: "bar".to_string(), 154 | })); 155 | let target_fs_type_unmatched_cache = 156 | get_file_system_type(file_info.path(), &unmatched_cache).unwrap(); 157 | assert_eq!( 158 | target_fs_type, target_fs_type_unmatched_cache, 159 | "get_file_system_type should return correct result with unmatched cache" 160 | ); 161 | assert_eq!( 162 | unmatched_cache.borrow().as_ref().unwrap().fs_type, 163 | target_fs_type, 164 | "get_file_system_type should set the cache to the last query result" 165 | ); 166 | 167 | // should match fs type 168 | let matcher = super::FileSystemMatcher::new(target_fs_type.clone()); 169 | assert!( 170 | matcher.matches(&file_info, &mut matcher_io), 171 | "{} should match {}", 172 | file_info.path().to_string_lossy(), 173 | target_fs_type 174 | ); 175 | 176 | // should not match fs type 177 | let matcher = super::FileSystemMatcher::new(target_fs_type.clone() + "foo"); 178 | assert!( 179 | !matcher.matches(&file_info, &mut matcher_io), 180 | "{} should not match {}", 181 | file_info.path().to_string_lossy(), 182 | target_fs_type 183 | ); 184 | } 185 | } 186 | -------------------------------------------------------------------------------- /src/find/matchers/glob.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2022 Tavian Barnes 2 | // 3 | // Use of this source code is governed by a MIT-style 4 | // license that can be found in the LICENSE file or at 5 | // https://opensource.org/licenses/MIT. 6 | 7 | use onig::{Regex, RegexOptions, Syntax}; 8 | 9 | /// Parse a string as a POSIX Basic Regular Expression. 10 | fn parse_bre(expr: &str, options: RegexOptions) -> Result { 11 | let bre = Syntax::posix_basic(); 12 | Regex::with_options(expr, bre.options() | options, bre) 13 | } 14 | 15 | /// Push a literal character onto a regex, escaping it if necessary. 16 | fn regex_push_literal(regex: &mut String, ch: char) { 17 | // https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap09.html#tag_09_03_03 18 | if matches!(ch, '.' | '[' | '\\' | '*' | '^' | '$') { 19 | regex.push('\\'); 20 | } 21 | regex.push(ch); 22 | } 23 | 24 | /// Extracts a bracket expression from a glob. 25 | fn extract_bracket_expr(pattern: &str) -> Option<(String, &str)> { 26 | // https://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html#tag_18_13_01 27 | // 28 | // If an open bracket introduces a bracket expression as in XBD RE Bracket Expression, 29 | // except that the character ( '!' ) shall replace the 30 | // character ( '^' ) in its role in a non-matching list in the regular expression notation, 31 | // it shall introduce a pattern bracket expression. A bracket expression starting with an 32 | // unquoted character produces unspecified results. Otherwise, '[' shall match 33 | // the character itself. 34 | // 35 | // To check for valid bracket expressions, we scan for the closing bracket and 36 | // attempt to parse that segment as a regex. If that fails, we treat the '[' 37 | // literally. 38 | 39 | let mut expr = "[".to_string(); 40 | 41 | let mut chars = pattern.chars(); 42 | let mut next = chars.next(); 43 | 44 | // https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap09.html#tag_09_03_05 45 | // 46 | // 3. A non-matching list expression begins with a ( '^' ) ... 47 | // 48 | // (but in a glob, '!' is used instead of '^') 49 | if next == Some('!') { 50 | expr.push('^'); 51 | next = chars.next(); 52 | } 53 | 54 | // https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap09.html#tag_09_03_05 55 | // 56 | // 1. ... The ( ']' ) shall lose its special meaning and represent 57 | // itself in a bracket expression if it occurs first in the list (after an initial 58 | // ( '^' ), if any). 59 | if next == Some(']') { 60 | expr.push(']'); 61 | next = chars.next(); 62 | } 63 | 64 | while let Some(ch) = next { 65 | expr.push(ch); 66 | 67 | match ch { 68 | '[' => { 69 | // https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap09.html#tag_09_03_05 70 | // 71 | // 4. A collating symbol is a collating element enclosed within bracket-period 72 | // ( "[." and ".]" ) delimiters. ... 73 | // 74 | // 5. An equivalence class expression shall ... be expressed by enclosing any 75 | // one of the collating elements in the equivalence class within bracket- 76 | // equal ( "[=" and "=]" ) delimiters. 77 | // 78 | // 6. ... A character class expression is expressed as a character class name 79 | // enclosed within bracket- ( "[:" and ":]" ) delimiters. 80 | next = chars.next(); 81 | if let Some(delim) = next { 82 | expr.push(delim); 83 | 84 | if matches!(delim, '.' | '=' | ':') { 85 | let rest = chars.as_str(); 86 | let end = rest.find([delim, ']'])? + 2; 87 | expr.push_str(&rest[..end]); 88 | chars = rest[end..].chars(); 89 | } 90 | } 91 | } 92 | ']' => { 93 | // https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap09.html#tag_09_03_05 94 | // 95 | // 1. ... The ( ']' ) shall ... terminate the bracket 96 | // expression, unless it appears in a collating symbol (such as "[.].]" ) or is 97 | // the ending for a collating symbol, equivalence class, 98 | // or character class. 99 | break; 100 | } 101 | _ => {} 102 | } 103 | 104 | next = chars.next(); 105 | } 106 | 107 | if parse_bre(&expr, RegexOptions::REGEX_OPTION_NONE).is_ok() { 108 | Some((expr, chars.as_str())) 109 | } else { 110 | None 111 | } 112 | } 113 | 114 | /// Converts a POSIX glob into a POSIX Basic Regular Expression 115 | fn glob_to_regex(pattern: &str) -> Option { 116 | let mut regex = String::new(); 117 | 118 | let mut chars = pattern.chars(); 119 | while let Some(ch) = chars.next() { 120 | // https://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html#tag_18_13 121 | match ch { 122 | '?' => regex.push('.'), 123 | '*' => regex.push_str(".*"), 124 | '\\' => { 125 | if let Some(ch) = chars.next() { 126 | regex_push_literal(&mut regex, ch); 127 | } else { 128 | // https://pubs.opengroup.org/onlinepubs/9699919799/functions/fnmatch.html 129 | // 130 | // If pattern ends with an unescaped , fnmatch() shall return a 131 | // non-zero value (indicating either no match or an error). 132 | // 133 | // Most implementations return FNM_NOMATCH in this case, so create a pattern that 134 | // never matches. 135 | return None; 136 | } 137 | } 138 | '[' => { 139 | if let Some((expr, rest)) = extract_bracket_expr(chars.as_str()) { 140 | regex.push_str(&expr); 141 | chars = rest.chars(); 142 | } else { 143 | regex_push_literal(&mut regex, ch); 144 | } 145 | } 146 | _ => regex_push_literal(&mut regex, ch), 147 | } 148 | } 149 | 150 | Some(regex) 151 | } 152 | 153 | /// An fnmatch()-style glob matcher. 154 | pub struct Pattern { 155 | regex: Option, 156 | } 157 | 158 | impl Pattern { 159 | /// Parse an fnmatch()-style glob. 160 | pub fn new(pattern: &str, caseless: bool) -> Self { 161 | let options = if caseless { 162 | RegexOptions::REGEX_OPTION_IGNORECASE 163 | } else { 164 | RegexOptions::REGEX_OPTION_NONE 165 | }; 166 | 167 | // As long as glob_to_regex() is correct, this should never fail 168 | let regex = glob_to_regex(pattern).map(|r| parse_bre(&r, options).unwrap()); 169 | Self { regex } 170 | } 171 | 172 | /// Test if this pattern matches a string. 173 | pub fn matches(&self, string: &str) -> bool { 174 | self.regex.as_ref().is_some_and(|r| r.is_match(string)) 175 | } 176 | } 177 | 178 | #[cfg(test)] 179 | mod tests { 180 | use super::*; 181 | 182 | #[track_caller] 183 | fn assert_glob_regex(glob: &str, regex: &str) { 184 | assert_eq!(glob_to_regex(glob).as_deref(), Some(regex)); 185 | } 186 | 187 | #[test] 188 | fn literals() { 189 | assert_glob_regex(r"foo.bar", r"foo\.bar"); 190 | } 191 | 192 | #[test] 193 | fn regex_special() { 194 | assert_glob_regex(r"^foo.bar$", r"\^foo\.bar\$"); 195 | } 196 | 197 | #[test] 198 | fn wildcards() { 199 | assert_glob_regex(r"foo?bar*baz", r"foo.bar.*baz"); 200 | } 201 | 202 | #[test] 203 | fn escapes() { 204 | assert_glob_regex(r"fo\o\?bar\*baz\\", r"foo?bar\*baz\\"); 205 | } 206 | 207 | #[test] 208 | fn valid_brackets() { 209 | assert_glob_regex(r"foo[bar][!baz]", r"foo[bar][^baz]"); 210 | } 211 | 212 | #[test] 213 | fn complex_brackets() { 214 | assert_glob_regex( 215 | r"[!]!.*[\[.].][=]=][:space:]-]", 216 | r"[^]!.*[\[.].][=]=][:space:]-]", 217 | ); 218 | } 219 | 220 | #[test] 221 | fn invalid_brackets() { 222 | assert_glob_regex(r"foo[bar[!baz", r"foo\[bar\[!baz"); 223 | } 224 | 225 | #[test] 226 | fn incomplete_escape() { 227 | assert_eq!(glob_to_regex(r"foo\"), None); 228 | } 229 | 230 | #[test] 231 | fn pattern_matches() { 232 | assert!(Pattern::new(r"foo*bar", false).matches("foo--bar")); 233 | 234 | assert!(!Pattern::new(r"foo*bar", false).matches("bar--foo")); 235 | } 236 | 237 | #[test] 238 | fn caseless_matches() { 239 | assert!(Pattern::new(r"foo*BAR", true).matches("FOO--bar")); 240 | 241 | assert!(!Pattern::new(r"foo*BAR", true).matches("BAR--foo")); 242 | } 243 | 244 | #[test] 245 | fn incomplete_escape_matches() { 246 | assert!(!Pattern::new(r"foo\", false).matches("\n")); 247 | } 248 | } 249 | -------------------------------------------------------------------------------- /src/find/matchers/group.rs: -------------------------------------------------------------------------------- 1 | // This file is part of the uutils findutils package. 2 | // 3 | // For the full copyright and license information, please view the LICENSE 4 | // file that was distributed with this source code. 5 | 6 | use super::{ComparableValue, Matcher, MatcherIO, WalkEntry}; 7 | 8 | #[cfg(unix)] 9 | use nix::unistd::Group; 10 | #[cfg(unix)] 11 | use std::os::unix::fs::MetadataExt; 12 | 13 | pub struct GroupMatcher { 14 | gid: ComparableValue, 15 | } 16 | 17 | impl GroupMatcher { 18 | #[cfg(unix)] 19 | pub fn from_group_name(group: &str) -> Option { 20 | // get gid from group name 21 | let group = Group::from_name(group).ok()??; 22 | let gid = group.gid.as_raw(); 23 | Some(Self::from_gid(gid)) 24 | } 25 | 26 | pub fn from_gid(gid: u32) -> Self { 27 | Self::from_comparable(ComparableValue::EqualTo(gid as u64)) 28 | } 29 | 30 | pub fn from_comparable(gid: ComparableValue) -> Self { 31 | Self { gid } 32 | } 33 | 34 | #[cfg(windows)] 35 | pub fn from_group_name(_group: &str) -> Option { 36 | None 37 | } 38 | } 39 | 40 | impl Matcher for GroupMatcher { 41 | #[cfg(unix)] 42 | fn matches(&self, file_info: &WalkEntry, _: &mut MatcherIO) -> bool { 43 | match file_info.metadata() { 44 | Ok(metadata) => self.gid.matches(metadata.gid().into()), 45 | Err(_) => false, 46 | } 47 | } 48 | 49 | #[cfg(windows)] 50 | fn matches(&self, _file_info: &WalkEntry, _: &mut MatcherIO) -> bool { 51 | // The user group acquisition function for Windows systems is not implemented in MetadataExt, 52 | // so it is somewhat difficult to implement it. :( 53 | false 54 | } 55 | } 56 | 57 | pub struct NoGroupMatcher {} 58 | 59 | impl Matcher for NoGroupMatcher { 60 | #[cfg(unix)] 61 | fn matches(&self, file_info: &WalkEntry, _: &mut MatcherIO) -> bool { 62 | use nix::unistd::Gid; 63 | 64 | if file_info.path().is_symlink() { 65 | return false; 66 | } 67 | 68 | let Ok(metadata) = file_info.metadata() else { 69 | return true; 70 | }; 71 | 72 | let Ok(gid) = Group::from_gid(Gid::from_raw(metadata.gid())) else { 73 | return true; 74 | }; 75 | 76 | let Some(_group) = gid else { 77 | return true; 78 | }; 79 | 80 | false 81 | } 82 | 83 | #[cfg(windows)] 84 | fn matches(&self, _file_info: &WalkEntry, _: &mut MatcherIO) -> bool { 85 | false 86 | } 87 | } 88 | 89 | #[cfg(test)] 90 | mod tests { 91 | #[test] 92 | #[cfg(unix)] 93 | fn test_group_matcher() { 94 | use crate::find::matchers::{group::GroupMatcher, tests::get_dir_entry_for, Matcher}; 95 | use crate::find::tests::FakeDependencies; 96 | use chrono::Local; 97 | use nix::unistd::{Gid, Group}; 98 | use std::fs::File; 99 | use std::os::unix::fs::MetadataExt; 100 | use tempfile::Builder; 101 | 102 | let deps = FakeDependencies::new(); 103 | let mut matcher_io = deps.new_matcher_io(); 104 | 105 | let temp_dir = Builder::new().prefix("group_matcher").tempdir().unwrap(); 106 | let foo_path = temp_dir.path().join("foo"); 107 | let _ = File::create(foo_path).expect("create temp file"); 108 | let file_info = get_dir_entry_for(&temp_dir.path().to_string_lossy(), "foo"); 109 | let file_gid = file_info.metadata().unwrap().gid(); 110 | let file_group = Group::from_gid(Gid::from_raw(file_gid)) 111 | .unwrap() 112 | .unwrap() 113 | .name; 114 | 115 | let matcher = 116 | super::GroupMatcher::from_group_name(file_group.as_str()).expect("group should exist"); 117 | assert!( 118 | matcher.matches(&file_info, &mut matcher_io), 119 | "group should match" 120 | ); 121 | 122 | // Testing a non-existent group name 123 | let time_string = Local::now().format("%Y%m%d%H%M%S").to_string(); 124 | let matcher = GroupMatcher::from_group_name(time_string.as_str()); 125 | assert!( 126 | matcher.is_none(), 127 | "group name {} should not exist", 128 | time_string 129 | ); 130 | 131 | // Testing group id 132 | let matcher = GroupMatcher::from_gid(file_gid); 133 | assert!( 134 | matcher.matches(&file_info, &mut matcher_io), 135 | "group id should match" 136 | ); 137 | } 138 | } 139 | -------------------------------------------------------------------------------- /src/find/matchers/lname.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2017 Google Inc. 2 | // 3 | // Use of this source code is governed by a MIT-style 4 | // license that can be found in the LICENSE file or at 5 | // https://opensource.org/licenses/MIT. 6 | 7 | use std::io::{stderr, Write}; 8 | use std::path::PathBuf; 9 | 10 | use super::glob::Pattern; 11 | use super::{Matcher, MatcherIO, WalkEntry}; 12 | 13 | fn read_link_target(file_info: &WalkEntry) -> Option { 14 | match file_info.path().read_link() { 15 | Ok(target) => Some(target), 16 | Err(err) => { 17 | // If it's not a symlink, then it's not an error that should be 18 | // shown. 19 | if err.kind() != std::io::ErrorKind::InvalidInput { 20 | writeln!( 21 | &mut stderr(), 22 | "Error reading target of {}: {}", 23 | file_info.path().display(), 24 | err 25 | ) 26 | .unwrap(); 27 | } 28 | 29 | None 30 | } 31 | } 32 | } 33 | 34 | /// This matcher makes a comparison of the link target against a shell wildcard 35 | /// pattern. See `glob::Pattern` for details on the exact syntax. 36 | pub struct LinkNameMatcher { 37 | pattern: Pattern, 38 | } 39 | 40 | impl LinkNameMatcher { 41 | pub fn new(pattern_string: &str, caseless: bool) -> Self { 42 | let pattern = Pattern::new(pattern_string, caseless); 43 | Self { pattern } 44 | } 45 | } 46 | 47 | impl Matcher for LinkNameMatcher { 48 | fn matches(&self, file_info: &WalkEntry, _: &mut MatcherIO) -> bool { 49 | if let Some(target) = read_link_target(file_info) { 50 | self.pattern.matches(&target.to_string_lossy()) 51 | } else { 52 | false 53 | } 54 | } 55 | } 56 | 57 | #[cfg(test)] 58 | mod tests { 59 | use super::*; 60 | use crate::find::matchers::tests::get_dir_entry_for; 61 | use crate::find::tests::FakeDependencies; 62 | 63 | use std::io::ErrorKind; 64 | 65 | #[cfg(unix)] 66 | use std::os::unix::fs::symlink; 67 | #[cfg(windows)] 68 | use std::os::windows::fs::symlink_file; 69 | 70 | fn create_file_link() { 71 | #[cfg(unix)] 72 | if let Err(e) = symlink("abbbc", "test_data/links/link-f") { 73 | assert!( 74 | e.kind() == ErrorKind::AlreadyExists, 75 | "Failed to create sym link: {e:?}" 76 | ); 77 | } 78 | #[cfg(windows)] 79 | if let Err(e) = symlink_file("abbbc", "test_data/links/link-f") { 80 | assert!( 81 | e.kind() == ErrorKind::AlreadyExists, 82 | "Failed to create sym link: {:?}", 83 | e 84 | ); 85 | } 86 | } 87 | 88 | #[test] 89 | fn matches_against_link_target() { 90 | create_file_link(); 91 | 92 | let link_f = get_dir_entry_for("test_data/links", "link-f"); 93 | let matcher = LinkNameMatcher::new("ab?bc", false); 94 | let deps = FakeDependencies::new(); 95 | assert!(matcher.matches(&link_f, &mut deps.new_matcher_io())); 96 | } 97 | 98 | #[test] 99 | fn caseless_matches_against_link_target() { 100 | create_file_link(); 101 | 102 | let link_f = get_dir_entry_for("test_data/links", "link-f"); 103 | let matcher = LinkNameMatcher::new("AbB?c", true); 104 | let deps = FakeDependencies::new(); 105 | assert!(matcher.matches(&link_f, &mut deps.new_matcher_io())); 106 | } 107 | } 108 | -------------------------------------------------------------------------------- /src/find/matchers/ls.rs: -------------------------------------------------------------------------------- 1 | // This file is part of the uutils findutils package. 2 | // 3 | // For the full copyright and license information, please view the LICENSE 4 | // file that was distributed with this source code. 5 | 6 | use chrono::DateTime; 7 | use std::{ 8 | fs::File, 9 | io::{stderr, Write}, 10 | }; 11 | 12 | use super::{Matcher, MatcherIO, WalkEntry}; 13 | 14 | #[cfg(unix)] 15 | fn format_permissions(mode: uucore::libc::mode_t) -> String { 16 | let file_type = match mode & (uucore::libc::S_IFMT as uucore::libc::mode_t) { 17 | uucore::libc::S_IFDIR => "d", 18 | uucore::libc::S_IFREG => "-", 19 | _ => "?", 20 | }; 21 | 22 | // S_$$USR means "user permissions" 23 | let user_perms = format!( 24 | "{}{}{}", 25 | if mode & uucore::libc::S_IRUSR != 0 { 26 | "r" 27 | } else { 28 | "-" 29 | }, 30 | if mode & uucore::libc::S_IWUSR != 0 { 31 | "w" 32 | } else { 33 | "-" 34 | }, 35 | if mode & uucore::libc::S_IXUSR != 0 { 36 | "x" 37 | } else { 38 | "-" 39 | } 40 | ); 41 | 42 | // S_$$GRP means "group permissions" 43 | let group_perms = format!( 44 | "{}{}{}", 45 | if mode & uucore::libc::S_IRGRP != 0 { 46 | "r" 47 | } else { 48 | "-" 49 | }, 50 | if mode & uucore::libc::S_IWGRP != 0 { 51 | "w" 52 | } else { 53 | "-" 54 | }, 55 | if mode & uucore::libc::S_IXGRP != 0 { 56 | "x" 57 | } else { 58 | "-" 59 | } 60 | ); 61 | 62 | // S_$$OTH means "other permissions" 63 | let other_perms = format!( 64 | "{}{}{}", 65 | if mode & uucore::libc::S_IROTH != 0 { 66 | "r" 67 | } else { 68 | "-" 69 | }, 70 | if mode & uucore::libc::S_IWOTH != 0 { 71 | "w" 72 | } else { 73 | "-" 74 | }, 75 | if mode & uucore::libc::S_IXOTH != 0 { 76 | "x" 77 | } else { 78 | "-" 79 | } 80 | ); 81 | 82 | format!("{}{}{}{}", file_type, user_perms, group_perms, other_perms) 83 | } 84 | 85 | #[cfg(windows)] 86 | fn format_permissions(file_attributes: u32) -> String { 87 | let mut attributes = Vec::new(); 88 | 89 | // https://learn.microsoft.com/en-us/windows/win32/fileio/file-attribute-constants 90 | if file_attributes & 0x0001 != 0 { 91 | attributes.push("read-only"); 92 | } 93 | if file_attributes & 0x0002 != 0 { 94 | attributes.push("hidden"); 95 | } 96 | if file_attributes & 0x0004 != 0 { 97 | attributes.push("system"); 98 | } 99 | if file_attributes & 0x0020 != 0 { 100 | attributes.push("archive"); 101 | } 102 | if file_attributes & 0x0040 != 0 { 103 | attributes.push("compressed"); 104 | } 105 | if file_attributes & 0x0080 != 0 { 106 | attributes.push("offline"); 107 | } 108 | 109 | attributes.join(", ") 110 | } 111 | 112 | pub struct Ls { 113 | output_file: Option, 114 | } 115 | 116 | impl Ls { 117 | pub fn new(output_file: Option) -> Self { 118 | Self { output_file } 119 | } 120 | 121 | #[cfg(unix)] 122 | fn print( 123 | &self, 124 | file_info: &WalkEntry, 125 | matcher_io: &mut MatcherIO, 126 | mut out: impl Write, 127 | print_error_message: bool, 128 | ) { 129 | use nix::unistd::{Gid, Group, Uid, User}; 130 | use std::os::unix::fs::{MetadataExt, PermissionsExt}; 131 | 132 | let metadata = file_info.metadata().unwrap(); 133 | 134 | let inode_number = metadata.ino(); 135 | let number_of_blocks = { 136 | let size = metadata.size(); 137 | let number_of_blocks = size / 1024; 138 | let remainder = number_of_blocks % 4; 139 | 140 | if remainder == 0 { 141 | if number_of_blocks == 0 { 142 | 4 143 | } else { 144 | number_of_blocks 145 | } 146 | } else { 147 | number_of_blocks + (4 - (remainder)) 148 | } 149 | }; 150 | let permission = 151 | { format_permissions(metadata.permissions().mode() as uucore::libc::mode_t) }; 152 | let hard_links = metadata.nlink(); 153 | let user = { 154 | let uid = metadata.uid(); 155 | User::from_uid(Uid::from_raw(uid)).unwrap().unwrap().name 156 | }; 157 | let group = { 158 | let gid = metadata.gid(); 159 | Group::from_gid(Gid::from_raw(gid)).unwrap().unwrap().name 160 | }; 161 | let size = metadata.size(); 162 | let last_modified = { 163 | let system_time = metadata.modified().unwrap(); 164 | let now_utc: DateTime = system_time.into(); 165 | now_utc.format("%b %e %H:%M") 166 | }; 167 | let path = file_info.path().to_string_lossy(); 168 | 169 | match writeln!( 170 | out, 171 | " {:<4} {:>6} {:<10} {:>3} {:<8} {:<8} {:>8} {} {}", 172 | inode_number, 173 | number_of_blocks, 174 | permission, 175 | hard_links, 176 | user, 177 | group, 178 | size, 179 | last_modified, 180 | path, 181 | ) { 182 | Ok(_) => {} 183 | Err(e) => { 184 | if print_error_message { 185 | writeln!( 186 | &mut stderr(), 187 | "Error writing {:?} for {}", 188 | file_info.path().to_string_lossy(), 189 | e 190 | ) 191 | .unwrap(); 192 | matcher_io.set_exit_code(1); 193 | } 194 | } 195 | } 196 | } 197 | 198 | #[cfg(windows)] 199 | fn print( 200 | &self, 201 | file_info: &WalkEntry, 202 | matcher_io: &mut MatcherIO, 203 | mut out: impl Write, 204 | print_error_message: bool, 205 | ) { 206 | use std::os::windows::fs::MetadataExt; 207 | 208 | let metadata = file_info.metadata().unwrap(); 209 | 210 | let inode_number = 0; 211 | let number_of_blocks = { 212 | let size = metadata.file_size(); 213 | let number_of_blocks = size / 1024; 214 | let remainder = number_of_blocks % 4; 215 | 216 | if remainder == 0 { 217 | if number_of_blocks == 0 { 218 | 4 219 | } else { 220 | number_of_blocks 221 | } 222 | } else { 223 | number_of_blocks + (4 - (remainder)) 224 | } 225 | }; 226 | let permission = { format_permissions(metadata.file_attributes()) }; 227 | let hard_links = 0; 228 | let user = 0; 229 | let group = 0; 230 | let size = metadata.file_size(); 231 | let last_modified = { 232 | let system_time = metadata.modified().unwrap(); 233 | let now_utc: DateTime = system_time.into(); 234 | now_utc.format("%b %e %H:%M") 235 | }; 236 | let path = file_info.path().to_string_lossy(); 237 | 238 | match write!( 239 | out, 240 | " {:<4} {:>6} {:<10} {:>3} {:<8} {:<8} {:>8} {} {}\n", 241 | inode_number, 242 | number_of_blocks, 243 | permission, 244 | hard_links, 245 | user, 246 | group, 247 | size, 248 | last_modified, 249 | path, 250 | ) { 251 | Ok(_) => {} 252 | Err(e) => { 253 | if print_error_message { 254 | writeln!( 255 | &mut stderr(), 256 | "Error writing {:?} for {}", 257 | file_info.path().to_string_lossy(), 258 | e 259 | ) 260 | .unwrap(); 261 | matcher_io.set_exit_code(1); 262 | } 263 | } 264 | } 265 | } 266 | } 267 | 268 | impl Matcher for Ls { 269 | fn matches(&self, file_info: &WalkEntry, matcher_io: &mut MatcherIO) -> bool { 270 | if let Some(file) = &self.output_file { 271 | self.print(file_info, matcher_io, file, true); 272 | } else { 273 | self.print( 274 | file_info, 275 | matcher_io, 276 | &mut *matcher_io.deps.get_output().borrow_mut(), 277 | false, 278 | ); 279 | } 280 | true 281 | } 282 | 283 | fn has_side_effects(&self) -> bool { 284 | true 285 | } 286 | } 287 | 288 | #[cfg(test)] 289 | mod tests { 290 | #[test] 291 | #[cfg(unix)] 292 | fn test_format_permissions() { 293 | use super::format_permissions; 294 | 295 | let mode: uucore::libc::mode_t = 0o100644; 296 | let expected = "-rw-r--r--"; 297 | assert_eq!(format_permissions(mode), expected); 298 | 299 | let mode: uucore::libc::mode_t = 0o040755; 300 | let expected = "drwxr-xr-x"; 301 | assert_eq!(format_permissions(mode), expected); 302 | 303 | let mode: uucore::libc::mode_t = 0o100777; 304 | let expected = "-rwxrwxrwx"; 305 | assert_eq!(format_permissions(mode), expected); 306 | } 307 | } 308 | -------------------------------------------------------------------------------- /src/find/matchers/name.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2017 Google Inc. 2 | // 3 | // Use of this source code is governed by a MIT-style 4 | // license that can be found in the LICENSE file or at 5 | // https://opensource.org/licenses/MIT. 6 | 7 | use super::glob::Pattern; 8 | use super::{Matcher, MatcherIO, WalkEntry}; 9 | 10 | /// This matcher makes a comparison of the name against a shell wildcard 11 | /// pattern. See `glob::Pattern` for details on the exact syntax. 12 | pub struct NameMatcher { 13 | pattern: Pattern, 14 | } 15 | 16 | impl NameMatcher { 17 | pub fn new(pattern_string: &str, caseless: bool) -> Self { 18 | let pattern = Pattern::new(pattern_string, caseless); 19 | Self { pattern } 20 | } 21 | } 22 | 23 | impl Matcher for NameMatcher { 24 | fn matches(&self, file_info: &WalkEntry, _: &mut MatcherIO) -> bool { 25 | let name = file_info.file_name().to_string_lossy(); 26 | 27 | #[cfg(unix)] 28 | if name.len() > 1 && name.chars().all(|x| x == '/') { 29 | self.pattern.matches("/") 30 | } else { 31 | self.pattern.matches(&name) 32 | } 33 | 34 | #[cfg(windows)] 35 | self.pattern.matches(&name) 36 | } 37 | } 38 | 39 | #[cfg(test)] 40 | mod tests { 41 | use super::*; 42 | use crate::find::matchers::tests::get_dir_entry_for; 43 | use crate::find::tests::FakeDependencies; 44 | use std::io::ErrorKind; 45 | 46 | #[cfg(unix)] 47 | use std::os::unix::fs::symlink; 48 | 49 | #[cfg(windows)] 50 | use std::os::windows::fs::symlink_file; 51 | 52 | fn create_file_link() { 53 | #[cfg(unix)] 54 | if let Err(e) = symlink("abbbc", "test_data/links/link-f") { 55 | assert!( 56 | e.kind() == ErrorKind::AlreadyExists, 57 | "Failed to create sym link: {e:?}" 58 | ); 59 | } 60 | #[cfg(windows)] 61 | if let Err(e) = symlink_file("abbbc", "test_data/links/link-f") { 62 | assert!( 63 | e.kind() == ErrorKind::AlreadyExists, 64 | "Failed to create sym link: {:?}", 65 | e 66 | ); 67 | } 68 | } 69 | 70 | #[test] 71 | fn matching_with_wrong_case_returns_false() { 72 | let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); 73 | let matcher = NameMatcher::new("A*C", false); 74 | let deps = FakeDependencies::new(); 75 | assert!(!matcher.matches(&abbbc, &mut deps.new_matcher_io())); 76 | } 77 | 78 | #[test] 79 | fn matching_with_right_case_returns_true() { 80 | let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); 81 | let matcher = NameMatcher::new("abb?c", false); 82 | let deps = FakeDependencies::new(); 83 | assert!(matcher.matches(&abbbc, &mut deps.new_matcher_io())); 84 | } 85 | 86 | #[test] 87 | fn not_matching_returns_false() { 88 | let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); 89 | let matcher = NameMatcher::new("shouldn't match", false); 90 | let deps = FakeDependencies::new(); 91 | assert!(!matcher.matches(&abbbc, &mut deps.new_matcher_io())); 92 | } 93 | 94 | #[test] 95 | fn matches_against_link_file_name() { 96 | create_file_link(); 97 | 98 | let link_f = get_dir_entry_for("test_data/links", "link-f"); 99 | let matcher = NameMatcher::new("link?f", false); 100 | let deps = FakeDependencies::new(); 101 | assert!(matcher.matches(&link_f, &mut deps.new_matcher_io())); 102 | } 103 | 104 | #[test] 105 | fn caseless_matching_with_wrong_case_returns_true() { 106 | let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); 107 | let matcher = NameMatcher::new("A*C", true); 108 | let deps = FakeDependencies::new(); 109 | assert!(matcher.matches(&abbbc, &mut deps.new_matcher_io())); 110 | } 111 | 112 | #[test] 113 | fn caseless_matching_with_right_case_returns_true() { 114 | let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); 115 | let matcher = NameMatcher::new("abb?c", true); 116 | let deps = FakeDependencies::new(); 117 | assert!(matcher.matches(&abbbc, &mut deps.new_matcher_io())); 118 | } 119 | 120 | #[test] 121 | fn caseless_not_matching_returns_false() { 122 | let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); 123 | let matcher = NameMatcher::new("shouldn't match", true); 124 | let deps = FakeDependencies::new(); 125 | assert!(!matcher.matches(&abbbc, &mut deps.new_matcher_io())); 126 | } 127 | 128 | #[test] 129 | fn caseless_matches_against_link_file_name() { 130 | create_file_link(); 131 | 132 | let link_f = get_dir_entry_for("test_data/links", "link-f"); 133 | let matcher = NameMatcher::new("linK?f", true); 134 | let deps = FakeDependencies::new(); 135 | assert!(matcher.matches(&link_f, &mut deps.new_matcher_io())); 136 | } 137 | 138 | #[test] 139 | #[cfg(unix)] 140 | fn slash_match_returns_true() { 141 | let dir_to_match = get_dir_entry_for("///", ""); 142 | let matcher = NameMatcher::new("/", true); 143 | let deps = FakeDependencies::new(); 144 | assert!(matcher.matches(&dir_to_match, &mut deps.new_matcher_io())); 145 | } 146 | 147 | #[test] 148 | #[cfg(unix)] 149 | fn only_one_slash() { 150 | let dir_to_match = get_dir_entry_for("/", ""); 151 | let matcher = NameMatcher::new("/", false); 152 | let deps = FakeDependencies::new(); 153 | assert!(matcher.matches(&dir_to_match, &mut deps.new_matcher_io())); 154 | } 155 | } 156 | -------------------------------------------------------------------------------- /src/find/matchers/path.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2017 Google Inc. 2 | // 3 | // Use of this source code is governed by a MIT-style 4 | // license that can be found in the LICENSE file or at 5 | // https://opensource.org/licenses/MIT. 6 | 7 | use super::glob::Pattern; 8 | use super::{Matcher, MatcherIO, WalkEntry}; 9 | 10 | /// This matcher makes a comparison of the path against a shell wildcard 11 | /// pattern. See `glob::Pattern` for details on the exact syntax. 12 | pub struct PathMatcher { 13 | pattern: Pattern, 14 | } 15 | 16 | impl PathMatcher { 17 | pub fn new(pattern_string: &str, caseless: bool) -> Self { 18 | let pattern = Pattern::new(pattern_string, caseless); 19 | Self { pattern } 20 | } 21 | } 22 | 23 | impl Matcher for PathMatcher { 24 | fn matches(&self, file_info: &WalkEntry, _: &mut MatcherIO) -> bool { 25 | let path = file_info.path().to_string_lossy(); 26 | self.pattern.matches(&path) 27 | } 28 | } 29 | 30 | #[cfg(test)] 31 | mod tests { 32 | use super::*; 33 | use crate::find::matchers::tests::get_dir_entry_for; 34 | use crate::find::tests::FakeDependencies; 35 | 36 | // Variants of fix_up_slashes that properly escape the forward slashes for 37 | // being in a glob. 38 | #[cfg(windows)] 39 | fn fix_up_glob_slashes(re: &str) -> String { 40 | re.replace("/", "\\\\") 41 | } 42 | 43 | #[cfg(not(windows))] 44 | fn fix_up_glob_slashes(re: &str) -> String { 45 | re.to_owned() 46 | } 47 | 48 | #[test] 49 | fn matching_against_whole_path() { 50 | let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); 51 | let matcher = PathMatcher::new(&fix_up_glob_slashes("test_*/*/a*c"), false); 52 | let deps = FakeDependencies::new(); 53 | assert!(matcher.matches(&abbbc, &mut deps.new_matcher_io())); 54 | } 55 | 56 | #[test] 57 | fn not_matching_against_just_name() { 58 | let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); 59 | let matcher = PathMatcher::new("a*c", false); 60 | let deps = FakeDependencies::new(); 61 | assert!(!matcher.matches(&abbbc, &mut deps.new_matcher_io())); 62 | } 63 | 64 | #[test] 65 | fn not_matching_against_wrong_case() { 66 | let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); 67 | let matcher = PathMatcher::new(&fix_up_glob_slashes("test_*/*/A*C"), false); 68 | let deps = FakeDependencies::new(); 69 | assert!(!matcher.matches(&abbbc, &mut deps.new_matcher_io())); 70 | } 71 | 72 | #[test] 73 | fn caseless_matching() { 74 | let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); 75 | let matcher = PathMatcher::new(&fix_up_glob_slashes("test_*/*/A*C"), true); 76 | let deps = FakeDependencies::new(); 77 | assert!(matcher.matches(&abbbc, &mut deps.new_matcher_io())); 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /src/find/matchers/perm.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2017 Google Inc. 2 | // 3 | // Use of this source code is governed by a MIT-style 4 | // license that can be found in the LICENSE file or at 5 | // https://opensource.org/licenses/MIT. 6 | 7 | //! find's permission matching uses a very unix-centric approach, that would 8 | //! be tricky to both implement and use on a windows platform. So we don't 9 | //! even try. 10 | 11 | use std::error::Error; 12 | use std::io::{stderr, Write}; 13 | #[cfg(unix)] 14 | use uucore::mode::{parse_numeric, parse_symbolic}; 15 | 16 | use super::{Matcher, MatcherIO, WalkEntry}; 17 | 18 | #[derive(Clone, Copy, Debug, Eq, PartialEq)] 19 | #[cfg(unix)] 20 | pub enum ComparisonType { 21 | /// mode bits have to match exactly 22 | Exact, 23 | /// all specified mode bits must be set. Others can be as well 24 | AtLeast, 25 | /// at least one of the specified bits must be set (or if no bits are 26 | /// specified then any mode will match) 27 | AnyOf, 28 | } 29 | 30 | #[cfg(unix)] 31 | impl ComparisonType { 32 | fn mode_bits_match(self, pattern: u32, value: u32) -> bool { 33 | match self { 34 | Self::Exact => (0o7777 & value) == pattern, 35 | Self::AtLeast => (value & pattern) == pattern, 36 | Self::AnyOf => pattern == 0 || (value & pattern) > 0, 37 | } 38 | } 39 | } 40 | 41 | #[cfg(unix)] 42 | mod parsing { 43 | use super::{parse_numeric, parse_symbolic, ComparisonType, Error}; 44 | 45 | pub fn split_comparison_type(pattern: &str) -> (ComparisonType, &str) { 46 | let mut chars = pattern.chars(); 47 | 48 | match chars.next() { 49 | Some('-') => (ComparisonType::AtLeast, chars.as_str()), 50 | Some('/') => (ComparisonType::AnyOf, chars.as_str()), 51 | _ => (ComparisonType::Exact, pattern), 52 | } 53 | } 54 | 55 | pub fn parse_mode(pattern: &str, for_dir: bool) -> Result> { 56 | let mode = if pattern.contains(|c: char| c.is_ascii_digit()) { 57 | parse_numeric(0, pattern, for_dir)? 58 | } else { 59 | let mut mode = 0; 60 | for chunk in pattern.split(',') { 61 | mode = parse_symbolic(mode, chunk, 0, for_dir)?; 62 | } 63 | mode 64 | }; 65 | Ok(mode) 66 | } 67 | } 68 | 69 | #[cfg(unix)] 70 | #[derive(Debug)] 71 | pub struct PermMatcher { 72 | comparison_type: ComparisonType, 73 | file_pattern: u32, 74 | dir_pattern: u32, 75 | } 76 | 77 | #[cfg(not(unix))] 78 | pub struct PermMatcher {} 79 | 80 | impl PermMatcher { 81 | #[cfg(unix)] 82 | pub fn new(pattern: &str) -> Result> { 83 | let (comparison_type, pattern) = parsing::split_comparison_type(pattern); 84 | let file_pattern = parsing::parse_mode(pattern, false)?; 85 | let dir_pattern = parsing::parse_mode(pattern, false)?; 86 | Ok(Self { 87 | comparison_type, 88 | file_pattern, 89 | dir_pattern, 90 | }) 91 | } 92 | 93 | #[cfg(not(unix))] 94 | pub fn new(_dummy_pattern: &str) -> Result> { 95 | Err(From::from( 96 | "Permission matching is not available on this platform", 97 | )) 98 | } 99 | } 100 | 101 | impl Matcher for PermMatcher { 102 | #[cfg(unix)] 103 | fn matches(&self, file_info: &WalkEntry, _: &mut MatcherIO) -> bool { 104 | use std::os::unix::fs::PermissionsExt; 105 | match file_info.metadata() { 106 | Ok(metadata) => { 107 | let pattern = if metadata.is_dir() { 108 | self.dir_pattern 109 | } else { 110 | self.file_pattern 111 | }; 112 | self.comparison_type 113 | .mode_bits_match(pattern, metadata.permissions().mode()) 114 | } 115 | Err(e) => { 116 | writeln!( 117 | &mut stderr(), 118 | "Error getting permissions for {}: {}", 119 | file_info.path().to_string_lossy(), 120 | e 121 | ) 122 | .unwrap(); 123 | false 124 | } 125 | } 126 | } 127 | 128 | #[cfg(not(unix))] 129 | fn matches(&self, _dummy_file_info: &WalkEntry, _: &mut MatcherIO) -> bool { 130 | writeln!( 131 | &mut stderr(), 132 | "Permission matching not available on this platform!" 133 | ) 134 | .unwrap(); 135 | return false; 136 | } 137 | } 138 | 139 | #[cfg(test)] 140 | #[cfg(unix)] 141 | mod tests { 142 | use super::ComparisonType::*; 143 | use super::*; 144 | 145 | use crate::find::matchers::tests::get_dir_entry_for; 146 | use crate::find::tests::FakeDependencies; 147 | 148 | #[track_caller] 149 | fn assert_parse(pattern: &str, comparison_type: ComparisonType, mode: u32) { 150 | let matcher = PermMatcher::new(pattern).unwrap(); 151 | assert_eq!(matcher.comparison_type, comparison_type); 152 | assert_eq!(matcher.file_pattern, mode); 153 | assert_eq!(matcher.dir_pattern, mode); 154 | } 155 | 156 | #[test] 157 | fn parsing_prefix() { 158 | assert_parse("u=rwx", Exact, 0o700); 159 | assert_parse("-u=rwx", AtLeast, 0o700); 160 | assert_parse("/u=rwx", AnyOf, 0o700); 161 | 162 | assert_parse("700", Exact, 0o700); 163 | assert_parse("-700", AtLeast, 0o700); 164 | assert_parse("/700", AnyOf, 0o700); 165 | } 166 | 167 | #[test] 168 | fn parsing_octal() { 169 | assert_parse("/1", AnyOf, 0o001); 170 | assert_parse("/7777", AnyOf, 0o7777); 171 | } 172 | 173 | #[test] 174 | fn parsing_human_readable_individual_bits() { 175 | assert_parse("/u=r", AnyOf, 0o400); 176 | assert_parse("/u=w", AnyOf, 0o200); 177 | assert_parse("/u=x", AnyOf, 0o100); 178 | 179 | assert_parse("/g=r", AnyOf, 0o040); 180 | assert_parse("/g=w", AnyOf, 0o020); 181 | assert_parse("/g=x", AnyOf, 0o010); 182 | 183 | assert_parse("/o+r", AnyOf, 0o004); 184 | assert_parse("/o+w", AnyOf, 0o002); 185 | assert_parse("/o+x", AnyOf, 0o001); 186 | 187 | assert_parse("/a+r", AnyOf, 0o444); 188 | assert_parse("/a+w", AnyOf, 0o222); 189 | assert_parse("/a+x", AnyOf, 0o111); 190 | } 191 | 192 | #[test] 193 | fn parsing_human_readable_multiple_bits() { 194 | assert_parse("/u=rwx", AnyOf, 0o700); 195 | assert_parse("/a=rwx", AnyOf, 0o777); 196 | } 197 | 198 | #[test] 199 | fn parsing_human_readable_multiple_categories() { 200 | assert_parse("/u=rwx,g=rx,o+r", AnyOf, 0o754); 201 | assert_parse("/u=rwx,g=rx,o+r,a+w", AnyOf, 0o776); 202 | assert_parse("/ug=rwx,o+r", AnyOf, 0o774); 203 | } 204 | 205 | #[test] 206 | fn parsing_human_readable_set_id_bits() { 207 | assert_parse("/u=s", AnyOf, 0o4000); 208 | assert_parse("/g=s", AnyOf, 0o2000); 209 | assert_parse("/ug=s", AnyOf, 0o6000); 210 | assert_parse("/o=s", AnyOf, 0o0000); 211 | } 212 | 213 | #[test] 214 | fn parsing_human_readable_sticky_bit() { 215 | assert_parse("/o=t", AnyOf, 0o1000); 216 | } 217 | 218 | #[test] 219 | fn parsing_fails() { 220 | PermMatcher::new("urwx,g=rx,o+r").expect_err("missing equals should fail"); 221 | PermMatcher::new("d=rwx,g=rx,o+r").expect_err("invalid category should fail"); 222 | PermMatcher::new("u=dwx,g=rx,o+r").expect_err("invalid permission bit should fail"); 223 | PermMatcher::new("u_rwx,g=rx,o+r") 224 | .expect_err("invalid category/permission separator should fail"); 225 | PermMatcher::new("77777777777777").expect_err("overflowing octal value should fail"); 226 | 227 | // FIXME: uucore::mode shouldn't accept this 228 | // PermMatcher::new("u=rwxg=rx,o+r") 229 | // .expect_err("missing comma should fail"); 230 | } 231 | 232 | #[test] 233 | fn comparison_type_matching() { 234 | let c = ComparisonType::Exact; 235 | assert!( 236 | c.mode_bits_match(0, 0), 237 | "Exact: only 0 should match if pattern is 0" 238 | ); 239 | assert!( 240 | !c.mode_bits_match(0, 0o444), 241 | "Exact: only 0 should match if pattern is 0" 242 | ); 243 | assert!( 244 | c.mode_bits_match(0o444, 0o444), 245 | "Exact: identical bits should match" 246 | ); 247 | assert!( 248 | !c.mode_bits_match(0o444, 0o777), 249 | "Exact: non-identical bits should fail" 250 | ); 251 | assert!( 252 | c.mode_bits_match(0o444, 0o70444), 253 | "Exact:high-end bits should be ignored" 254 | ); 255 | 256 | let c = ComparisonType::AtLeast; 257 | assert!( 258 | c.mode_bits_match(0, 0), 259 | "AtLeast: anything should match if pattern is 0" 260 | ); 261 | assert!( 262 | c.mode_bits_match(0, 0o444), 263 | "AtLeast: anything should match if pattern is 0" 264 | ); 265 | assert!( 266 | c.mode_bits_match(0o444, 0o777), 267 | "AtLeast: identical bits should match" 268 | ); 269 | assert!( 270 | c.mode_bits_match(0o444, 0o777), 271 | "AtLeast: extra bits should match" 272 | ); 273 | assert!( 274 | !c.mode_bits_match(0o444, 0o700), 275 | "AtLeast: missing bits should fail" 276 | ); 277 | assert!( 278 | c.mode_bits_match(0o444, 0o70444), 279 | "AtLeast: high-end bits should be ignored" 280 | ); 281 | 282 | let c = ComparisonType::AnyOf; 283 | assert!( 284 | c.mode_bits_match(0, 0), 285 | "AnyOf: anything should match if pattern is 0" 286 | ); 287 | assert!( 288 | c.mode_bits_match(0, 0o444), 289 | "AnyOf: anything should match if pattern is 0" 290 | ); 291 | assert!( 292 | c.mode_bits_match(0o444, 0o777), 293 | "AnyOf: identical bits should match" 294 | ); 295 | assert!( 296 | c.mode_bits_match(0o444, 0o777), 297 | "AnyOf: extra bits should match" 298 | ); 299 | assert!( 300 | c.mode_bits_match(0o777, 0o001), 301 | "AnyOf: anything should match as long as it has one bit in common" 302 | ); 303 | assert!( 304 | !c.mode_bits_match(0o010, 0o001), 305 | "AnyOf: no matching bits shouldn't match" 306 | ); 307 | assert!( 308 | c.mode_bits_match(0o444, 0o70444), 309 | "AnyOf: high-end bits should be ignored" 310 | ); 311 | } 312 | 313 | #[test] 314 | fn perm_matches() { 315 | let file_info = get_dir_entry_for("test_data/simple", "abbbc"); 316 | let deps = FakeDependencies::new(); 317 | 318 | let matcher = PermMatcher::new("-u+r").unwrap(); 319 | assert!( 320 | matcher.matches(&file_info, &mut deps.new_matcher_io()), 321 | "user-readable pattern should match file" 322 | ); 323 | 324 | let matcher = PermMatcher::new("-u+x").unwrap(); 325 | assert!( 326 | !matcher.matches(&file_info, &mut deps.new_matcher_io()), 327 | "user-executable pattern should not match file" 328 | ); 329 | } 330 | } 331 | -------------------------------------------------------------------------------- /src/find/matchers/printer.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2017 Google Inc. 2 | // 3 | // Use of this source code is governed by a MIT-style 4 | // license that can be found in the LICENSE file or at 5 | // https://opensource.org/licenses/MIT. 6 | 7 | use std::fs::File; 8 | use std::io::{stderr, Write}; 9 | 10 | use super::{Matcher, MatcherIO, WalkEntry}; 11 | 12 | pub enum PrintDelimiter { 13 | Newline, 14 | Null, 15 | } 16 | 17 | impl std::fmt::Display for PrintDelimiter { 18 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 19 | match self { 20 | Self::Newline => writeln!(f), 21 | Self::Null => write!(f, "\0"), 22 | } 23 | } 24 | } 25 | 26 | /// This matcher just prints the name of the file to stdout. 27 | pub struct Printer { 28 | delimiter: PrintDelimiter, 29 | output_file: Option, 30 | } 31 | 32 | impl Printer { 33 | pub fn new(delimiter: PrintDelimiter, output_file: Option) -> Self { 34 | Self { 35 | delimiter, 36 | output_file, 37 | } 38 | } 39 | 40 | fn print( 41 | &self, 42 | file_info: &WalkEntry, 43 | matcher_io: &mut MatcherIO, 44 | mut out: impl Write, 45 | print_error_message: bool, 46 | ) { 47 | match write!( 48 | out, 49 | "{}{}", 50 | file_info.path().to_string_lossy(), 51 | self.delimiter 52 | ) { 53 | Ok(_) => {} 54 | Err(e) => { 55 | if print_error_message { 56 | writeln!( 57 | &mut stderr(), 58 | "Error writing {:?} for {}", 59 | file_info.path().to_string_lossy(), 60 | e 61 | ) 62 | .unwrap(); 63 | matcher_io.set_exit_code(1); 64 | } 65 | } 66 | } 67 | out.flush().unwrap(); 68 | } 69 | } 70 | 71 | impl Matcher for Printer { 72 | fn matches(&self, file_info: &WalkEntry, matcher_io: &mut MatcherIO) -> bool { 73 | if let Some(file) = &self.output_file { 74 | self.print(file_info, matcher_io, file, true); 75 | } else { 76 | self.print( 77 | file_info, 78 | matcher_io, 79 | &mut *matcher_io.deps.get_output().borrow_mut(), 80 | false, 81 | ); 82 | } 83 | true 84 | } 85 | 86 | fn has_side_effects(&self) -> bool { 87 | true 88 | } 89 | } 90 | 91 | #[cfg(test)] 92 | mod tests { 93 | use super::*; 94 | use crate::find::matchers::tests::get_dir_entry_for; 95 | use crate::find::tests::fix_up_slashes; 96 | use crate::find::tests::FakeDependencies; 97 | 98 | #[test] 99 | fn prints_newline() { 100 | let abbbc = get_dir_entry_for("./test_data/simple", "abbbc"); 101 | 102 | let matcher = Printer::new(PrintDelimiter::Newline, None); 103 | let deps = FakeDependencies::new(); 104 | assert!(matcher.matches(&abbbc, &mut deps.new_matcher_io())); 105 | assert_eq!( 106 | fix_up_slashes("./test_data/simple/abbbc\n"), 107 | deps.get_output_as_string() 108 | ); 109 | } 110 | 111 | #[test] 112 | fn prints_null() { 113 | let abbbc = get_dir_entry_for("./test_data/simple", "abbbc"); 114 | 115 | let matcher = Printer::new(PrintDelimiter::Null, None); 116 | let deps = FakeDependencies::new(); 117 | assert!(matcher.matches(&abbbc, &mut deps.new_matcher_io())); 118 | assert_eq!( 119 | fix_up_slashes("./test_data/simple/abbbc\0"), 120 | deps.get_output_as_string() 121 | ); 122 | } 123 | 124 | #[test] 125 | #[cfg(target_os = "linux")] 126 | fn prints_error_message() { 127 | let dev_full = File::open("/dev/full").unwrap(); 128 | let abbbc = get_dir_entry_for("./test_data/simple", "abbbc"); 129 | 130 | let matcher = Printer::new(PrintDelimiter::Newline, Some(dev_full)); 131 | let deps = FakeDependencies::new(); 132 | 133 | assert!(matcher.matches(&abbbc, &mut deps.new_matcher_io())); 134 | assert!(deps.get_output_as_string().is_empty()); 135 | } 136 | } 137 | -------------------------------------------------------------------------------- /src/find/matchers/prune.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2017 Google Inc. 2 | // 3 | // Use of this source code is governed by a MIT-style 4 | // license that can be found in the LICENSE file or at 5 | // https://opensource.org/licenses/MIT. 6 | 7 | use super::{Matcher, MatcherIO, WalkEntry}; 8 | 9 | /// This matcher checks the type of the file. 10 | pub struct PruneMatcher; 11 | 12 | impl PruneMatcher { 13 | pub fn new() -> Self { 14 | Self {} 15 | } 16 | } 17 | 18 | impl Matcher for PruneMatcher { 19 | fn matches(&self, file_info: &WalkEntry, matcher_io: &mut MatcherIO) -> bool { 20 | if file_info.file_type().is_dir() { 21 | matcher_io.mark_current_dir_to_be_skipped(); 22 | } 23 | 24 | true 25 | } 26 | } 27 | 28 | #[cfg(test)] 29 | mod tests { 30 | use super::*; 31 | use crate::find::matchers::tests::get_dir_entry_for; 32 | use crate::find::tests::FakeDependencies; 33 | 34 | #[test] 35 | fn file_type_matcher() { 36 | let dir = get_dir_entry_for("test_data", "simple"); 37 | let deps = FakeDependencies::new(); 38 | 39 | let mut matcher_io = deps.new_matcher_io(); 40 | assert!(!matcher_io.should_skip_current_dir()); 41 | let matcher = PruneMatcher::new(); 42 | assert!(matcher.matches(&dir, &mut matcher_io)); 43 | assert!(matcher_io.should_skip_current_dir()); 44 | } 45 | 46 | #[test] 47 | fn only_skips_directories() { 48 | let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); 49 | let deps = FakeDependencies::new(); 50 | 51 | let mut matcher_io = deps.new_matcher_io(); 52 | assert!(!matcher_io.should_skip_current_dir()); 53 | let matcher = PruneMatcher::new(); 54 | assert!(matcher.matches(&abbbc, &mut matcher_io)); 55 | assert!(!matcher_io.should_skip_current_dir()); 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /src/find/matchers/quit.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2017 Tavian Barnes 2 | // 3 | // Use of this source code is governed by a MIT-style 4 | // license that can be found in the LICENSE file or at 5 | // https://opensource.org/licenses/MIT. 6 | 7 | use super::{Matcher, MatcherIO, WalkEntry}; 8 | 9 | /// This matcher quits the search immediately. 10 | pub struct QuitMatcher; 11 | 12 | impl Matcher for QuitMatcher { 13 | fn matches(&self, _: &WalkEntry, matcher_io: &mut MatcherIO) -> bool { 14 | matcher_io.quit(); 15 | true 16 | } 17 | } 18 | 19 | #[cfg(test)] 20 | mod tests { 21 | use super::*; 22 | use crate::find::matchers::tests::get_dir_entry_for; 23 | use crate::find::tests::FakeDependencies; 24 | 25 | #[test] 26 | fn quits_when_matched() { 27 | let dir = get_dir_entry_for("test_data", "simple"); 28 | let deps = FakeDependencies::new(); 29 | 30 | let mut matcher_io = deps.new_matcher_io(); 31 | assert!(!matcher_io.should_quit()); 32 | let matcher = QuitMatcher; 33 | assert!(matcher.matches(&dir, &mut matcher_io)); 34 | assert!(matcher_io.should_quit()); 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /src/find/matchers/regex.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2022 Collabora, Ltd. 2 | // 3 | // Use of this source code is governed by a MIT-style 4 | // license that can be found in the LICENSE file or at 5 | // https://opensource.org/licenses/MIT. 6 | 7 | use std::{error::Error, fmt, str::FromStr}; 8 | 9 | use onig::{Regex, RegexOptions, Syntax}; 10 | 11 | use super::{Matcher, MatcherIO, WalkEntry}; 12 | 13 | #[derive(Debug)] 14 | pub struct ParseRegexTypeError(String); 15 | 16 | impl Error for ParseRegexTypeError {} 17 | 18 | impl fmt::Display for ParseRegexTypeError { 19 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 20 | write!( 21 | f, 22 | "Invalid regex type: {} (must be one of {})", 23 | self.0, 24 | RegexType::VALUES 25 | .iter() 26 | .map(|t| format!("'{t}'")) 27 | .collect::>() 28 | .join(", ") 29 | ) 30 | } 31 | } 32 | 33 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] 34 | pub enum RegexType { 35 | Emacs, 36 | Grep, 37 | PosixBasic, 38 | PosixExtended, 39 | } 40 | 41 | impl RegexType { 42 | pub const VALUES: &'static [Self] = &[ 43 | Self::Emacs, 44 | Self::Grep, 45 | Self::PosixBasic, 46 | Self::PosixExtended, 47 | ]; 48 | } 49 | 50 | impl fmt::Display for RegexType { 51 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 52 | match self { 53 | Self::Emacs => write!(f, "emacs"), 54 | Self::Grep => write!(f, "grep"), 55 | Self::PosixBasic => write!(f, "posix-basic"), 56 | Self::PosixExtended => write!(f, "posix-extended"), 57 | } 58 | } 59 | } 60 | 61 | impl FromStr for RegexType { 62 | type Err = ParseRegexTypeError; 63 | 64 | fn from_str(s: &str) -> Result { 65 | match s { 66 | "emacs" => Ok(Self::Emacs), 67 | "grep" => Ok(Self::Grep), 68 | "posix-basic" => Ok(Self::PosixBasic), 69 | "posix-extended" => Ok(Self::PosixExtended), 70 | // ed and sed are the same as posix-basic 71 | "ed" | "sed" => Ok(Self::PosixBasic), 72 | _ => Err(ParseRegexTypeError(s.to_owned())), 73 | } 74 | } 75 | } 76 | 77 | impl Default for RegexType { 78 | fn default() -> Self { 79 | Self::Emacs 80 | } 81 | } 82 | 83 | pub struct RegexMatcher { 84 | regex: Regex, 85 | } 86 | 87 | impl RegexMatcher { 88 | pub fn new( 89 | regex_type: RegexType, 90 | pattern: &str, 91 | ignore_case: bool, 92 | ) -> Result> { 93 | let syntax = match regex_type { 94 | RegexType::Emacs => Syntax::emacs(), 95 | RegexType::Grep => Syntax::grep(), 96 | RegexType::PosixBasic => Syntax::posix_basic(), 97 | RegexType::PosixExtended => Syntax::posix_extended(), 98 | }; 99 | 100 | let regex = Regex::with_options( 101 | pattern, 102 | if ignore_case { 103 | RegexOptions::REGEX_OPTION_IGNORECASE 104 | } else { 105 | RegexOptions::REGEX_OPTION_NONE 106 | }, 107 | syntax, 108 | )?; 109 | Ok(Self { regex }) 110 | } 111 | } 112 | 113 | impl Matcher for RegexMatcher { 114 | fn matches(&self, file_info: &WalkEntry, _: &mut MatcherIO) -> bool { 115 | self.regex 116 | .is_match(file_info.path().to_string_lossy().as_ref()) 117 | } 118 | } 119 | 120 | #[cfg(test)] 121 | mod tests { 122 | use super::*; 123 | use crate::find::matchers::tests::get_dir_entry_for; 124 | use crate::find::tests::FakeDependencies; 125 | 126 | const POSIX_BASIC_INTERVALS_RE: &str = r".*/ab\{1,3\}c"; 127 | const POSIX_EXTENDED_INTERVALS_RE: &str = r".*/ab{1,3}c"; 128 | const EMACS_AND_POSIX_EXTENDED_KLEENE_PLUS: &str = r".*/ab+c"; 129 | 130 | // Variants of fix_up_slashes that properly escape the forward slashes for 131 | // being in a regex. 132 | #[cfg(windows)] 133 | fn fix_up_regex_slashes(re: &str) -> String { 134 | re.replace("/", r"\\") 135 | } 136 | 137 | #[cfg(not(windows))] 138 | fn fix_up_regex_slashes(re: &str) -> String { 139 | re.to_owned() 140 | } 141 | 142 | #[test] 143 | fn case_sensitive_matching() { 144 | let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); 145 | let matcher = 146 | RegexMatcher::new(RegexType::Emacs, &fix_up_regex_slashes(".*/ab.BC"), false).unwrap(); 147 | let deps = FakeDependencies::new(); 148 | assert!(!matcher.matches(&abbbc, &mut deps.new_matcher_io())); 149 | } 150 | 151 | #[test] 152 | fn case_insensitive_matching() { 153 | let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); 154 | let matcher = 155 | RegexMatcher::new(RegexType::Emacs, &fix_up_regex_slashes(".*/ab.BC"), true).unwrap(); 156 | let deps = FakeDependencies::new(); 157 | assert!(matcher.matches(&abbbc, &mut deps.new_matcher_io())); 158 | } 159 | 160 | #[test] 161 | fn emacs_regex() { 162 | // Emacs syntax is mostly the same as POSIX extended but with escaped 163 | // brace intervals. 164 | let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); 165 | 166 | let matcher = RegexMatcher::new( 167 | RegexType::Emacs, 168 | &fix_up_regex_slashes(EMACS_AND_POSIX_EXTENDED_KLEENE_PLUS), 169 | true, 170 | ) 171 | .unwrap(); 172 | let deps = FakeDependencies::new(); 173 | assert!(matcher.matches(&abbbc, &mut deps.new_matcher_io())); 174 | 175 | let matcher = RegexMatcher::new( 176 | RegexType::Emacs, 177 | &fix_up_regex_slashes(POSIX_EXTENDED_INTERVALS_RE), 178 | true, 179 | ) 180 | .unwrap(); 181 | let deps = FakeDependencies::new(); 182 | assert!(!matcher.matches(&abbbc, &mut deps.new_matcher_io())); 183 | } 184 | 185 | #[test] 186 | fn posix_basic_regex() { 187 | let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); 188 | 189 | let matcher = RegexMatcher::new( 190 | RegexType::PosixBasic, 191 | &fix_up_regex_slashes(POSIX_BASIC_INTERVALS_RE), 192 | true, 193 | ) 194 | .unwrap(); 195 | let deps = FakeDependencies::new(); 196 | assert!(matcher.matches(&abbbc, &mut deps.new_matcher_io())); 197 | 198 | let matcher = RegexMatcher::new( 199 | RegexType::PosixBasic, 200 | &fix_up_regex_slashes(POSIX_EXTENDED_INTERVALS_RE), 201 | true, 202 | ) 203 | .unwrap(); 204 | let deps = FakeDependencies::new(); 205 | assert!(!matcher.matches(&abbbc, &mut deps.new_matcher_io())); 206 | } 207 | 208 | #[test] 209 | fn posix_extended_regex() { 210 | let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); 211 | 212 | let matcher = RegexMatcher::new( 213 | RegexType::PosixExtended, 214 | &fix_up_regex_slashes(POSIX_EXTENDED_INTERVALS_RE), 215 | true, 216 | ) 217 | .unwrap(); 218 | let deps = FakeDependencies::new(); 219 | assert!(matcher.matches(&abbbc, &mut deps.new_matcher_io())); 220 | 221 | let matcher = RegexMatcher::new( 222 | RegexType::PosixExtended, 223 | &fix_up_regex_slashes(POSIX_BASIC_INTERVALS_RE), 224 | true, 225 | ) 226 | .unwrap(); 227 | let deps = FakeDependencies::new(); 228 | assert!(!matcher.matches(&abbbc, &mut deps.new_matcher_io())); 229 | } 230 | } 231 | -------------------------------------------------------------------------------- /src/find/matchers/samefile.rs: -------------------------------------------------------------------------------- 1 | // This file is part of the uutils findutils package. 2 | // 3 | // For the full copyright and license information, please view the LICENSE 4 | // file that was distributed with this source code. 5 | 6 | use super::{Follow, Matcher, MatcherIO, WalkEntry, WalkError}; 7 | use std::error::Error; 8 | use std::path::Path; 9 | use uucore::fs::FileInformation; 10 | 11 | pub struct SameFileMatcher { 12 | info: FileInformation, 13 | } 14 | 15 | /// Gets FileInformation, possibly following symlinks, but falling back on 16 | /// broken links. 17 | fn get_file_info(path: &Path, follow: bool) -> Result { 18 | if follow { 19 | let result = FileInformation::from_path(path, true).map_err(WalkError::from); 20 | 21 | match result { 22 | Ok(info) => return Ok(info), 23 | Err(e) if !e.is_not_found() => return Err(e), 24 | _ => {} 25 | } 26 | } 27 | 28 | Ok(FileInformation::from_path(path, false)?) 29 | } 30 | 31 | impl SameFileMatcher { 32 | pub fn new(path: impl AsRef, follow: Follow) -> Result> { 33 | let info = get_file_info(path.as_ref(), follow != Follow::Never)?; 34 | Ok(Self { info }) 35 | } 36 | } 37 | 38 | impl Matcher for SameFileMatcher { 39 | fn matches(&self, file_info: &WalkEntry, _matcher_io: &mut MatcherIO) -> bool { 40 | if let Ok(info) = get_file_info(file_info.path(), file_info.follow()) { 41 | info == self.info 42 | } else { 43 | false 44 | } 45 | } 46 | } 47 | 48 | #[cfg(test)] 49 | mod tests { 50 | use super::*; 51 | 52 | use crate::find::matchers::tests::{get_dir_entry_follow, get_dir_entry_for}; 53 | use crate::find::tests::FakeDependencies; 54 | use std::fs::{self, File}; 55 | use tempfile::Builder; 56 | 57 | #[test] 58 | fn test_samefile() { 59 | let root = Builder::new().prefix("example").tempdir().unwrap(); 60 | let root_path = root.path(); 61 | 62 | let file_path = root_path.join("file"); 63 | File::create(&file_path).unwrap(); 64 | 65 | let link_path = root_path.join("link"); 66 | fs::hard_link(&file_path, &link_path).unwrap(); 67 | 68 | let other_path = root_path.join("other"); 69 | File::create(&other_path).unwrap(); 70 | 71 | let matcher = SameFileMatcher::new(&file_path, Follow::Never).unwrap(); 72 | 73 | let root_path = root_path.to_string_lossy(); 74 | let file_entry = get_dir_entry_for(&root_path, "file"); 75 | let link_entry = get_dir_entry_for(&root_path, "link"); 76 | let other_entry = get_dir_entry_for(&root_path, "other"); 77 | 78 | let deps = FakeDependencies::new(); 79 | assert!(matcher.matches(&file_entry, &mut deps.new_matcher_io())); 80 | assert!(matcher.matches(&link_entry, &mut deps.new_matcher_io())); 81 | assert!(!matcher.matches(&other_entry, &mut deps.new_matcher_io())); 82 | } 83 | 84 | #[test] 85 | fn test_follow() { 86 | let deps = FakeDependencies::new(); 87 | let matcher = SameFileMatcher::new("test_data/links/link-f", Follow::Roots).unwrap(); 88 | 89 | let entry = get_dir_entry_follow("test_data/links", "link-f", Follow::Never); 90 | assert!(!matcher.matches(&entry, &mut deps.new_matcher_io())); 91 | 92 | let entry = get_dir_entry_follow("test_data/links", "abbbc", Follow::Never); 93 | assert!(matcher.matches(&entry, &mut deps.new_matcher_io())); 94 | 95 | let entry = get_dir_entry_follow("test_data/links", "link-f", Follow::Roots); 96 | assert!(!matcher.matches(&entry, &mut deps.new_matcher_io())); 97 | 98 | let entry = get_dir_entry_follow("test_data/links", "abbbc", Follow::Roots); 99 | assert!(matcher.matches(&entry, &mut deps.new_matcher_io())); 100 | 101 | let entry = get_dir_entry_follow("test_data/links", "link-f", Follow::Always); 102 | assert!(matcher.matches(&entry, &mut deps.new_matcher_io())); 103 | 104 | let entry = get_dir_entry_follow("test_data/links", "abbbc", Follow::Always); 105 | assert!(matcher.matches(&entry, &mut deps.new_matcher_io())); 106 | } 107 | } 108 | -------------------------------------------------------------------------------- /src/find/matchers/size.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2017 Google Inc. 2 | // 3 | // Use of this source code is governed by a MIT-style 4 | // license that can be found in the LICENSE file or at 5 | // https://opensource.org/licenses/MIT. 6 | 7 | use std::error::Error; 8 | use std::io::{stderr, Write}; 9 | use std::str::FromStr; 10 | 11 | use super::{ComparableValue, Matcher, MatcherIO, WalkEntry}; 12 | 13 | #[derive(Clone, Copy, Debug)] 14 | enum Unit { 15 | Byte, 16 | TwoByteWord, 17 | Block, 18 | KibiByte, 19 | MebiByte, 20 | GibiByte, 21 | } 22 | 23 | impl FromStr for Unit { 24 | type Err = Box; 25 | fn from_str(s: &str) -> Result> { 26 | Ok(match s { 27 | "c" => Self::Byte, 28 | "w" => Self::TwoByteWord, 29 | "" | "b" => Self::Block, 30 | "k" => Self::KibiByte, 31 | "M" => Self::MebiByte, 32 | "G" => Self::GibiByte, 33 | _ => { 34 | return Err(From::from(format!( 35 | "Invalid suffix {s} for -size. Only allowed \ 36 | values are , b, c, w, k, M or G" 37 | ))); 38 | } 39 | }) 40 | } 41 | } 42 | 43 | fn byte_size_to_unit_size(unit: Unit, byte_size: u64) -> u64 { 44 | // Short circuit (to avoid a overflow error when subtracting 1 later on) 45 | if byte_size == 0 { 46 | return 0; 47 | } 48 | let bits_to_shift = match unit { 49 | Unit::Byte => 0, 50 | Unit::TwoByteWord => 1, 51 | Unit::Block => 9, 52 | Unit::KibiByte => 10, 53 | Unit::MebiByte => 20, 54 | Unit::GibiByte => 30, 55 | }; 56 | // Skip pointless arithmetic. 57 | if bits_to_shift == 0 { 58 | return byte_size; 59 | } 60 | // We want to round up (e.g. 1 byte - 1024 bytes = 1k. 61 | // 1025 bytes to 2048 bytes = 2k etc. 62 | ((byte_size - 1) >> bits_to_shift) + 1 63 | } 64 | 65 | /// Matcher that checks whether a file's size if {less than | equal to | more than} 66 | /// N units in size. 67 | pub struct SizeMatcher { 68 | value_to_match: ComparableValue, 69 | unit: Unit, 70 | } 71 | 72 | impl SizeMatcher { 73 | pub fn new( 74 | value_to_match: ComparableValue, 75 | suffix_string: &str, 76 | ) -> Result> { 77 | Ok(Self { 78 | unit: suffix_string.parse()?, 79 | value_to_match, 80 | }) 81 | } 82 | } 83 | 84 | impl Matcher for SizeMatcher { 85 | fn matches(&self, file_info: &WalkEntry, _: &mut MatcherIO) -> bool { 86 | match file_info.metadata() { 87 | Ok(metadata) => self 88 | .value_to_match 89 | .matches(byte_size_to_unit_size(self.unit, metadata.len())), 90 | Err(e) => { 91 | writeln!( 92 | &mut stderr(), 93 | "Error getting file size for {}: {}", 94 | file_info.path().to_string_lossy(), 95 | e 96 | ) 97 | .unwrap(); 98 | false 99 | } 100 | } 101 | } 102 | } 103 | 104 | #[cfg(test)] 105 | mod tests { 106 | use super::*; 107 | use crate::find::matchers::tests::get_dir_entry_for; 108 | use crate::find::tests::FakeDependencies; 109 | 110 | #[test] 111 | fn test_byte_size_to_unit_size() { 112 | assert_eq!(byte_size_to_unit_size(Unit::KibiByte, 0), 0); 113 | assert_eq!(byte_size_to_unit_size(Unit::KibiByte, 1), 1); 114 | assert_eq!(byte_size_to_unit_size(Unit::KibiByte, 1024), 1); 115 | assert_eq!(byte_size_to_unit_size(Unit::KibiByte, 1025), 2); 116 | assert_eq!(byte_size_to_unit_size(Unit::Byte, 1025), 1025); 117 | assert_eq!(byte_size_to_unit_size(Unit::TwoByteWord, 1025), 513); 118 | assert_eq!(byte_size_to_unit_size(Unit::Block, 1025), 3); 119 | assert_eq!(byte_size_to_unit_size(Unit::KibiByte, 1025), 2); 120 | assert_eq!(byte_size_to_unit_size(Unit::MebiByte, 1024 * 1024 + 1), 2); 121 | assert_eq!( 122 | byte_size_to_unit_size(Unit::GibiByte, 1024 * 1024 * 1024 + 1), 123 | 2 124 | ); 125 | } 126 | 127 | #[test] 128 | fn unit_from_string() { 129 | assert_eq!(byte_size_to_unit_size("c".parse().unwrap(), 2), 2); 130 | assert_eq!(byte_size_to_unit_size("w".parse().unwrap(), 3), 2); 131 | assert_eq!(byte_size_to_unit_size("b".parse().unwrap(), 513), 2); 132 | assert_eq!(byte_size_to_unit_size("".parse().unwrap(), 513), 2); 133 | assert_eq!(byte_size_to_unit_size("k".parse().unwrap(), 1025), 2); 134 | assert_eq!( 135 | byte_size_to_unit_size("M".parse().unwrap(), 1024 * 1024 + 1), 136 | 2 137 | ); 138 | assert_eq!( 139 | byte_size_to_unit_size("G".parse().unwrap(), 2024 * 1024 * 1024 + 1), 140 | 2 141 | ); 142 | } 143 | 144 | #[test] 145 | fn size_matcher_bad_unit() { 146 | if let Err(e) = SizeMatcher::new(ComparableValue::EqualTo(2), "xyz") { 147 | assert!( 148 | e.to_string().contains("Invalid suffix") && e.to_string().contains("xyz"), 149 | "bad description: {e}" 150 | ); 151 | } else { 152 | panic!("parsing a unit string should fail"); 153 | } 154 | } 155 | 156 | #[test] 157 | fn size_matcher() { 158 | let file_info = get_dir_entry_for("./test_data/size", "512bytes"); 159 | 160 | let equal_to_2_blocks = SizeMatcher::new(ComparableValue::EqualTo(2), "b").unwrap(); 161 | let equal_to_1_blocks = SizeMatcher::new(ComparableValue::EqualTo(1), "b").unwrap(); 162 | let deps = FakeDependencies::new(); 163 | 164 | assert!( 165 | !equal_to_2_blocks.matches(&file_info, &mut deps.new_matcher_io()), 166 | "512-byte file should not match size of 2 blocks" 167 | ); 168 | assert!( 169 | equal_to_1_blocks.matches(&file_info, &mut deps.new_matcher_io()), 170 | "512-byte file should match size of 1 block" 171 | ); 172 | } 173 | } 174 | -------------------------------------------------------------------------------- /src/find/matchers/stat.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2022 Tavian Barnes 2 | // 3 | // Use of this source code is governed by a MIT-style 4 | // license that can be found in the LICENSE file or at 5 | // https://opensource.org/licenses/MIT. 6 | 7 | use std::os::unix::fs::MetadataExt; 8 | 9 | use super::{ComparableValue, Matcher, MatcherIO, WalkEntry}; 10 | 11 | /// Inode number matcher. 12 | pub struct InodeMatcher { 13 | ino: ComparableValue, 14 | } 15 | 16 | impl InodeMatcher { 17 | pub fn new(ino: ComparableValue) -> Self { 18 | Self { ino } 19 | } 20 | } 21 | 22 | impl Matcher for InodeMatcher { 23 | fn matches(&self, file_info: &WalkEntry, _: &mut MatcherIO) -> bool { 24 | match file_info.metadata() { 25 | Ok(metadata) => self.ino.matches(metadata.ino()), 26 | Err(_) => false, 27 | } 28 | } 29 | } 30 | 31 | /// Link count matcher. 32 | pub struct LinksMatcher { 33 | nlink: ComparableValue, 34 | } 35 | 36 | impl LinksMatcher { 37 | pub fn new(nlink: ComparableValue) -> Self { 38 | Self { nlink } 39 | } 40 | } 41 | 42 | impl Matcher for LinksMatcher { 43 | fn matches(&self, file_info: &WalkEntry, _: &mut MatcherIO) -> bool { 44 | match file_info.metadata() { 45 | Ok(metadata) => self.nlink.matches(metadata.nlink()), 46 | Err(_) => false, 47 | } 48 | } 49 | } 50 | 51 | #[cfg(test)] 52 | #[cfg(unix)] 53 | mod tests { 54 | use super::*; 55 | 56 | use crate::find::matchers::tests::get_dir_entry_for; 57 | use crate::find::tests::FakeDependencies; 58 | 59 | #[test] 60 | fn inode_matcher() { 61 | let file_info = get_dir_entry_for("test_data/simple", "abbbc"); 62 | let metadata = file_info.metadata().unwrap(); 63 | let deps = FakeDependencies::new(); 64 | 65 | let matcher = InodeMatcher::new(ComparableValue::EqualTo(metadata.ino())); 66 | assert!( 67 | matcher.matches(&file_info, &mut deps.new_matcher_io()), 68 | "inode number should match" 69 | ); 70 | 71 | let matcher = InodeMatcher::new(ComparableValue::LessThan(metadata.ino())); 72 | assert!( 73 | !matcher.matches(&file_info, &mut deps.new_matcher_io()), 74 | "inode number should not match" 75 | ); 76 | 77 | let matcher = InodeMatcher::new(ComparableValue::MoreThan(metadata.ino())); 78 | assert!( 79 | !matcher.matches(&file_info, &mut deps.new_matcher_io()), 80 | "inode number should not match" 81 | ); 82 | } 83 | 84 | #[test] 85 | fn links_matcher() { 86 | let file_info = get_dir_entry_for("test_data/simple", "abbbc"); 87 | let deps = FakeDependencies::new(); 88 | 89 | let matcher = LinksMatcher::new(ComparableValue::EqualTo(1)); 90 | assert!( 91 | matcher.matches(&file_info, &mut deps.new_matcher_io()), 92 | "link count should match" 93 | ); 94 | 95 | let matcher = LinksMatcher::new(ComparableValue::LessThan(1)); 96 | assert!( 97 | !matcher.matches(&file_info, &mut deps.new_matcher_io()), 98 | "link count should not match" 99 | ); 100 | 101 | let matcher = LinksMatcher::new(ComparableValue::MoreThan(1)); 102 | assert!( 103 | !matcher.matches(&file_info, &mut deps.new_matcher_io()), 104 | "link count should not match" 105 | ); 106 | } 107 | } 108 | -------------------------------------------------------------------------------- /src/find/matchers/user.rs: -------------------------------------------------------------------------------- 1 | // This file is part of the uutils findutils package. 2 | // 3 | // For the full copyright and license information, please view the LICENSE 4 | // file that was distributed with this source code. 5 | 6 | use super::{ComparableValue, Matcher, MatcherIO, WalkEntry}; 7 | 8 | #[cfg(unix)] 9 | use nix::unistd::User; 10 | #[cfg(unix)] 11 | use std::os::unix::fs::MetadataExt; 12 | 13 | pub struct UserMatcher { 14 | uid: ComparableValue, 15 | } 16 | 17 | impl UserMatcher { 18 | #[cfg(unix)] 19 | pub fn from_user_name(user: &str) -> Option { 20 | // get uid from user name 21 | let user = User::from_name(user).ok()??; 22 | let uid = user.uid.as_raw(); 23 | Some(Self::from_uid(uid)) 24 | } 25 | 26 | pub fn from_uid(uid: u32) -> Self { 27 | Self::from_comparable(ComparableValue::EqualTo(uid as u64)) 28 | } 29 | 30 | pub fn from_comparable(uid: ComparableValue) -> Self { 31 | Self { uid } 32 | } 33 | 34 | #[cfg(windows)] 35 | pub fn from_user_name(_user: &str) -> Option { 36 | None 37 | } 38 | } 39 | 40 | impl Matcher for UserMatcher { 41 | #[cfg(unix)] 42 | fn matches(&self, file_info: &WalkEntry, _: &mut MatcherIO) -> bool { 43 | match file_info.metadata() { 44 | Ok(metadata) => self.uid.matches(metadata.uid().into()), 45 | Err(_) => false, 46 | } 47 | } 48 | 49 | #[cfg(windows)] 50 | fn matches(&self, _file_info: &WalkEntry, _: &mut MatcherIO) -> bool { 51 | false 52 | } 53 | } 54 | 55 | pub struct NoUserMatcher {} 56 | 57 | impl Matcher for NoUserMatcher { 58 | #[cfg(unix)] 59 | fn matches(&self, file_info: &WalkEntry, _: &mut MatcherIO) -> bool { 60 | use nix::unistd::Uid; 61 | 62 | if file_info.path().is_symlink() { 63 | return false; 64 | } 65 | 66 | let Ok(metadata) = file_info.metadata() else { 67 | return true; 68 | }; 69 | 70 | let Ok(uid) = User::from_uid(Uid::from_raw(metadata.uid())) else { 71 | return true; 72 | }; 73 | 74 | let Some(_user) = uid else { 75 | return true; 76 | }; 77 | 78 | false 79 | } 80 | 81 | #[cfg(windows)] 82 | fn matches(&self, _file_info: &WalkEntry, _: &mut MatcherIO) -> bool { 83 | false 84 | } 85 | } 86 | 87 | #[cfg(test)] 88 | mod tests { 89 | #[test] 90 | #[cfg(unix)] 91 | fn test_user_matcher() { 92 | use crate::find::matchers::{tests::get_dir_entry_for, user::UserMatcher, Matcher}; 93 | use crate::find::tests::FakeDependencies; 94 | use chrono::Local; 95 | use nix::unistd::{Uid, User}; 96 | use std::fs::File; 97 | use std::os::unix::fs::MetadataExt; 98 | use tempfile::Builder; 99 | 100 | let deps = FakeDependencies::new(); 101 | let mut matcher_io = deps.new_matcher_io(); 102 | 103 | let temp_dir = Builder::new().prefix("user_matcher").tempdir().unwrap(); 104 | let foo_path = temp_dir.path().join("foo"); 105 | let _ = File::create(foo_path).expect("create temp file"); 106 | let file_info = get_dir_entry_for(&temp_dir.path().to_string_lossy(), "foo"); 107 | let file_uid = file_info.metadata().unwrap().uid(); 108 | let file_user = User::from_uid(Uid::from_raw(file_uid)) 109 | .unwrap() 110 | .unwrap() 111 | .name; 112 | 113 | let matcher = UserMatcher::from_user_name(file_user.as_str()).expect("user should exist"); 114 | assert!( 115 | matcher.matches(&file_info, &mut matcher_io), 116 | "user should be the same" 117 | ); 118 | 119 | // Testing a non-existent group name 120 | let time_string = Local::now().format("%Y%m%d%H%M%S").to_string(); 121 | let matcher = UserMatcher::from_user_name(time_string.as_str()); 122 | assert!( 123 | matcher.is_none(), 124 | "user {} should not be the same", 125 | time_string 126 | ); 127 | 128 | // Testing user id 129 | let matcher = UserMatcher::from_uid(file_uid); 130 | assert!( 131 | matcher.matches(&file_info, &mut matcher_io), 132 | "user id should match" 133 | ); 134 | } 135 | } 136 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2017 Google Inc. 2 | // 3 | // Use of this source code is governed by a MIT-style 4 | // license that can be found in the LICENSE file or at 5 | // https://opensource.org/licenses/MIT. 6 | 7 | pub mod find; 8 | pub mod xargs; 9 | -------------------------------------------------------------------------------- /src/testing/commandline/main.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2017 Google Inc. 2 | // 3 | // Use of this source code is governed by a MIT-style 4 | // license that can be found in the LICENSE file or at 5 | // https://opensource.org/licenses/MIT. 6 | 7 | use std::env; 8 | use std::fs::{self, File, OpenOptions}; 9 | use std::io::{stdin, stdout, Read, Write}; 10 | use std::path::PathBuf; 11 | 12 | fn usage() -> ! { 13 | println!("Simple command-line app just used for testing -exec flags!"); 14 | std::process::exit(2); 15 | } 16 | 17 | enum ExitWith { 18 | Failure, 19 | UrgentFailure, 20 | #[cfg(unix)] 21 | Signal, 22 | } 23 | 24 | #[derive(Default)] 25 | struct Config { 26 | exit_with: Option, 27 | print_stdin: bool, 28 | no_print_cwd: bool, 29 | sort: bool, 30 | destination_dir: Option, 31 | } 32 | 33 | fn open_file(destination_dir: &str) -> File { 34 | let mut file_number = fs::read_dir(destination_dir) 35 | .expect("failed to read destination") 36 | .count(); 37 | 38 | loop { 39 | file_number += 1; 40 | let mut file_path: PathBuf = PathBuf::from(destination_dir); 41 | file_path.push(format!("{file_number}.txt")); 42 | if let Ok(f) = OpenOptions::new() 43 | .write(true) 44 | .create_new(true) 45 | .open(file_path) 46 | { 47 | return f; 48 | } 49 | } 50 | } 51 | 52 | fn write_content(mut f: impl Write, config: &Config, args: &[String]) { 53 | if !config.no_print_cwd { 54 | writeln!(f, "cwd={}", env::current_dir().unwrap().to_string_lossy()) 55 | .expect("failed to write to file"); 56 | } 57 | 58 | if config.print_stdin { 59 | let mut s = String::new(); 60 | stdin() 61 | .read_to_string(&mut s) 62 | .expect("failed to read from stdin"); 63 | writeln!(f, "stdin={}", s.trim()).expect("failed to write to file"); 64 | } 65 | 66 | writeln!(f, "args=").expect("failed to write to file"); 67 | 68 | // first two args are going to be the path to this executable and 69 | // the destination_dir we want to write to. Don't write either of those 70 | // as they'll be non-deterministic. 71 | for arg in &args[2..] { 72 | writeln!(f, "{arg}").expect("failed to write to file"); 73 | } 74 | } 75 | 76 | fn main() { 77 | let mut args = env::args().collect::>(); 78 | if args.len() < 2 || args[1] == "-h" || args[1] == "--help" { 79 | usage(); 80 | } 81 | let mut config = Config { 82 | destination_dir: if args[1] == "-" { 83 | None 84 | } else { 85 | Some(args[1].clone()) 86 | }, 87 | ..Default::default() 88 | }; 89 | 90 | for arg in &args[2..] { 91 | if arg.starts_with("--") { 92 | match arg.as_ref() { 93 | "--exit_with_failure" => { 94 | config.exit_with = Some(ExitWith::Failure); 95 | } 96 | "--exit_with_urgent_failure" => { 97 | config.exit_with = Some(ExitWith::UrgentFailure); 98 | } 99 | #[cfg(unix)] 100 | "--exit_with_signal" => { 101 | config.exit_with = Some(ExitWith::Signal); 102 | } 103 | "--no_print_cwd" => { 104 | config.no_print_cwd = true; 105 | } 106 | "--print_stdin" => { 107 | config.print_stdin = true; 108 | } 109 | "--sort" => { 110 | config.sort = true; 111 | } 112 | _ => { 113 | usage(); 114 | } 115 | } 116 | } 117 | } 118 | 119 | if config.sort { 120 | args[2..].sort(); 121 | } 122 | 123 | if let Some(destination_dir) = &config.destination_dir { 124 | write_content(open_file(destination_dir), &config, &args); 125 | } else { 126 | write_content(stdout(), &config, &args); 127 | } 128 | 129 | match config.exit_with { 130 | None => std::process::exit(0), 131 | Some(ExitWith::Failure) => std::process::exit(2), 132 | Some(ExitWith::UrgentFailure) => std::process::exit(255), 133 | #[cfg(unix)] 134 | Some(ExitWith::Signal) => unsafe { 135 | uucore::libc::raise(uucore::libc::SIGINT); 136 | }, 137 | } 138 | } 139 | -------------------------------------------------------------------------------- /src/xargs/main.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Collabora, Ltd. 2 | // 3 | // Use of this source code is governed by a MIT-style 4 | // license that can be found in the LICENSE file or at 5 | // https://opensource.org/licenses/MIT. 6 | 7 | fn main() { 8 | let args = std::env::args().collect::>(); 9 | std::process::exit(findutils::xargs::xargs_main( 10 | &args 11 | .iter() 12 | .map(std::convert::AsRef::as_ref) 13 | .collect::>(), 14 | )) 15 | } 16 | -------------------------------------------------------------------------------- /test_data/depth/1/2/3/f3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uutils/findutils/842648223980d67dc45647a70e61ea0ea1c45fe3/test_data/depth/1/2/3/f3 -------------------------------------------------------------------------------- /test_data/depth/1/2/f2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uutils/findutils/842648223980d67dc45647a70e61ea0ea1c45fe3/test_data/depth/1/2/f2 -------------------------------------------------------------------------------- /test_data/depth/1/f1: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uutils/findutils/842648223980d67dc45647a70e61ea0ea1c45fe3/test_data/depth/1/f1 -------------------------------------------------------------------------------- /test_data/depth/f0: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uutils/findutils/842648223980d67dc45647a70e61ea0ea1c45fe3/test_data/depth/f0 -------------------------------------------------------------------------------- /test_data/links/abbbc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uutils/findutils/842648223980d67dc45647a70e61ea0ea1c45fe3/test_data/links/abbbc -------------------------------------------------------------------------------- /test_data/links/subdir/test: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uutils/findutils/842648223980d67dc45647a70e61ea0ea1c45fe3/test_data/links/subdir/test -------------------------------------------------------------------------------- /test_data/simple/abbbc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uutils/findutils/842648223980d67dc45647a70e61ea0ea1c45fe3/test_data/simple/abbbc -------------------------------------------------------------------------------- /test_data/simple/subdir/ABBBC: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uutils/findutils/842648223980d67dc45647a70e61ea0ea1c45fe3/test_data/simple/subdir/ABBBC -------------------------------------------------------------------------------- /test_data/size/512bytes: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/common/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2017 Google Inc. 2 | // 3 | // Use of this source code is governed by a MIT-style 4 | // license that can be found in the LICENSE file or at 5 | // https://opensource.org/licenses/MIT. 6 | 7 | // As this module is included by all the integration tests, any function used 8 | // in one test but not another can cause a dead code warning. 9 | #[allow(dead_code)] 10 | pub mod test_helpers; 11 | -------------------------------------------------------------------------------- /tests/common/test_helpers.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2017 Google Inc. 2 | // 3 | // Use of this source code is governed by a MIT-style 4 | // license that can be found in the LICENSE file or at 5 | // https://opensource.org/licenses/MIT. 6 | 7 | use std::cell::RefCell; 8 | use std::env; 9 | use std::io::{Cursor, Read, Write}; 10 | use std::path::Path; 11 | use std::time::SystemTime; 12 | 13 | use findutils::find::matchers::{Follow, MatcherIO, WalkEntry}; 14 | use findutils::find::Dependencies; 15 | 16 | /// A copy of `find::tests::FakeDependencies`. 17 | /// TODO: find out how to share #[cfg(test)] functions/structs between unit 18 | /// and integration tests. 19 | pub struct FakeDependencies { 20 | pub output: RefCell>>, 21 | now: SystemTime, 22 | } 23 | 24 | impl FakeDependencies { 25 | pub fn new() -> Self { 26 | Self { 27 | output: RefCell::new(Cursor::new(Vec::::new())), 28 | now: SystemTime::now(), 29 | } 30 | } 31 | 32 | pub fn new_matcher_io(&self) -> MatcherIO<'_> { 33 | MatcherIO::new(self) 34 | } 35 | 36 | pub fn get_output_as_string(&self) -> String { 37 | let mut cursor = self.output.borrow_mut(); 38 | cursor.set_position(0); 39 | let mut contents = String::new(); 40 | cursor.read_to_string(&mut contents).unwrap(); 41 | contents 42 | } 43 | } 44 | 45 | impl Dependencies for FakeDependencies { 46 | fn get_output(&self) -> &RefCell { 47 | &self.output 48 | } 49 | 50 | fn now(&self) -> SystemTime { 51 | self.now 52 | } 53 | } 54 | 55 | pub fn path_to_testing_commandline() -> String { 56 | let mut path_to_use = env::current_exe() 57 | // this will be something along the lines of /my/homedir/findutils/target/debug/deps/findutils-5532804878869ef1 58 | .expect("can't find path of this executable") 59 | .parent() 60 | .expect("can't find parent directory of this executable") 61 | .to_path_buf(); 62 | // and we want /my/homedir/findutils/target/debug/testing-commandline 63 | if path_to_use.ends_with("deps") { 64 | path_to_use.pop(); 65 | } 66 | path_to_use = path_to_use.join("testing-commandline"); 67 | path_to_use.to_string_lossy().to_string() 68 | } 69 | 70 | #[cfg(windows)] 71 | /// A copy of find::tests::fix_up_slashes. 72 | /// TODO: find out how to share #[cfg(test)] functions/structs between unit 73 | /// and integration tests. 74 | pub fn fix_up_slashes(path: &str) -> String { 75 | path.replace("/", "\\") 76 | } 77 | 78 | #[cfg(not(windows))] 79 | pub fn fix_up_slashes(path: &str) -> String { 80 | path.to_string() 81 | } 82 | 83 | /// A copy of `find::matchers::tests::get_dir_entry_for`. 84 | /// TODO: find out how to share #[cfg(test)] functions/structs between unit 85 | /// and integration tests. 86 | pub fn get_dir_entry_for(root: &str, path: &str) -> WalkEntry { 87 | let root = fix_up_slashes(root); 88 | let root = Path::new(&root); 89 | 90 | let path = fix_up_slashes(path); 91 | let path = if path.is_empty() { 92 | root.to_owned() 93 | } else { 94 | root.join(path) 95 | }; 96 | 97 | let depth = path.components().count() - root.components().count(); 98 | 99 | WalkEntry::new(path, depth, Follow::Never) 100 | } 101 | -------------------------------------------------------------------------------- /tests/exec_unit_tests.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2017 Google Inc. 2 | // 3 | // Use of this source code is governed by a MIT-style 4 | // license that can be found in the LICENSE file or at 5 | // https://opensource.org/licenses/MIT. 6 | 7 | /// ! This file contains what would be normally be unit tests for `find::matchers::exec`. 8 | /// ! But as the tests require running an external executable, they need to be run 9 | /// ! as integration tests so we can ensure that our testing-commandline binary 10 | /// ! has been built. 11 | use std::env; 12 | use std::fs::File; 13 | use std::io::Read; 14 | use std::path::Path; 15 | use tempfile::Builder; 16 | 17 | use common::test_helpers::{ 18 | fix_up_slashes, get_dir_entry_for, path_to_testing_commandline, FakeDependencies, 19 | }; 20 | use findutils::find::matchers::exec::{MultiExecMatcher, SingleExecMatcher}; 21 | use findutils::find::matchers::{Matcher, MatcherIO}; 22 | 23 | mod common; 24 | 25 | #[test] 26 | fn matching_executes_code() { 27 | let temp_dir = Builder::new() 28 | .prefix("matching_executes_code") 29 | .tempdir() 30 | .unwrap(); 31 | let temp_dir_path = temp_dir.path().to_string_lossy(); 32 | 33 | let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); 34 | let matcher = SingleExecMatcher::new( 35 | &path_to_testing_commandline(), 36 | &[temp_dir_path.as_ref(), "abc", "{}", "xyz"], 37 | false, 38 | ) 39 | .expect("Failed to create matcher"); 40 | let deps = FakeDependencies::new(); 41 | assert!(matcher.matches(&abbbc, &mut deps.new_matcher_io())); 42 | 43 | let mut f = File::open(temp_dir.path().join("1.txt")).expect("Failed to open output file"); 44 | let mut s = String::new(); 45 | f.read_to_string(&mut s) 46 | .expect("failed to read output file"); 47 | assert_eq!( 48 | s, 49 | fix_up_slashes(&format!( 50 | "cwd={}\nargs=\nabc\ntest_data/simple/abbbc\nxyz\n", 51 | env::current_dir().unwrap().to_string_lossy() 52 | )) 53 | ); 54 | } 55 | 56 | #[test] 57 | fn matching_executes_code_in_files_directory() { 58 | let temp_dir = Builder::new() 59 | .prefix("matching_executes_code_in_files_directory") 60 | .tempdir() 61 | .unwrap(); 62 | let temp_dir_path = temp_dir.path().to_string_lossy(); 63 | 64 | let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); 65 | let matcher = SingleExecMatcher::new( 66 | &path_to_testing_commandline(), 67 | &[temp_dir_path.as_ref(), "abc", "{}", "xyz"], 68 | true, 69 | ) 70 | .expect("Failed to create matcher"); 71 | let deps = FakeDependencies::new(); 72 | assert!(matcher.matches(&abbbc, &mut deps.new_matcher_io())); 73 | 74 | let mut f = File::open(temp_dir.path().join("1.txt")).expect("Failed to open output file"); 75 | let mut s = String::new(); 76 | f.read_to_string(&mut s) 77 | .expect("failed to read output file"); 78 | assert_eq!( 79 | s, 80 | fix_up_slashes(&format!( 81 | "cwd={}/test_data/simple\nargs=\nabc\n./abbbc\nxyz\n", 82 | env::current_dir().unwrap().to_string_lossy() 83 | )) 84 | ); 85 | } 86 | 87 | #[test] 88 | fn matching_embedded_filename() { 89 | let temp_dir = Builder::new() 90 | .prefix("matching_embedded_filename") 91 | .tempdir() 92 | .unwrap(); 93 | let temp_dir_path = temp_dir.path().to_string_lossy(); 94 | 95 | let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); 96 | let matcher = SingleExecMatcher::new( 97 | &path_to_testing_commandline(), 98 | &[temp_dir_path.as_ref(), "abc{}x{}yz"], 99 | false, 100 | ) 101 | .expect("Failed to create matcher"); 102 | let deps = FakeDependencies::new(); 103 | assert!(matcher.matches(&abbbc, &mut deps.new_matcher_io())); 104 | 105 | let mut f = File::open(temp_dir.path().join("1.txt")).expect("Failed to open output file"); 106 | let mut s = String::new(); 107 | f.read_to_string(&mut s) 108 | .expect("failed to read output file"); 109 | assert_eq!( 110 | s, 111 | fix_up_slashes(&format!( 112 | "cwd={}\nargs=\nabctest_data/simple/abbbcxtest_data/simple/abbbcyz\n", 113 | env::current_dir().unwrap().to_string_lossy() 114 | )) 115 | ); 116 | } 117 | 118 | #[test] 119 | /// Running "find . -execdir whatever \;" failed with a No such file or directory error. 120 | /// It's now fixed, and this is a regression test to check that it stays fixed. 121 | fn execdir_in_current_directory() { 122 | let temp_dir = Builder::new() 123 | .prefix("execdir_in_current_directory") 124 | .tempdir() 125 | .unwrap(); 126 | let temp_dir_path = temp_dir.path().to_string_lossy(); 127 | 128 | let current_dir_entry = get_dir_entry_for(".", ""); 129 | let matcher = SingleExecMatcher::new( 130 | &path_to_testing_commandline(), 131 | &[temp_dir_path.as_ref(), "abc", "{}", "xyz"], 132 | true, 133 | ) 134 | .expect("Failed to create matcher"); 135 | let deps = FakeDependencies::new(); 136 | assert!(matcher.matches(¤t_dir_entry, &mut deps.new_matcher_io())); 137 | 138 | let mut f = File::open(temp_dir.path().join("1.txt")).expect("Failed to open output file"); 139 | let mut s = String::new(); 140 | f.read_to_string(&mut s) 141 | .expect("failed to read output file"); 142 | assert_eq!( 143 | s, 144 | fix_up_slashes(&format!( 145 | "cwd={}\nargs=\nabc\n./.\nxyz\n", 146 | env::current_dir().unwrap().to_string_lossy() 147 | )) 148 | ); 149 | } 150 | 151 | #[test] 152 | /// Regression test for "find / -execdir whatever \;" 153 | fn execdir_in_root_directory() { 154 | let temp_dir = Builder::new() 155 | .prefix("execdir_in_root_directory") 156 | .tempdir() 157 | .unwrap(); 158 | let temp_dir_path = temp_dir.path().to_string_lossy(); 159 | 160 | let cwd = env::current_dir().expect("no current directory"); 161 | let root_dir = cwd 162 | .ancestors() 163 | .last() 164 | .expect("current directory has no root"); 165 | let root_dir_entry = get_dir_entry_for(root_dir.to_str().unwrap(), ""); 166 | 167 | let matcher = SingleExecMatcher::new( 168 | &path_to_testing_commandline(), 169 | &[temp_dir_path.as_ref(), "abc", "{}", "xyz"], 170 | true, 171 | ) 172 | .expect("Failed to create matcher"); 173 | let deps = FakeDependencies::new(); 174 | assert!(matcher.matches(&root_dir_entry, &mut deps.new_matcher_io())); 175 | 176 | let mut f = File::open(temp_dir.path().join("1.txt")).expect("Failed to open output file"); 177 | let mut s = String::new(); 178 | f.read_to_string(&mut s) 179 | .expect("failed to read output file"); 180 | assert_eq!( 181 | s, 182 | fix_up_slashes(&format!( 183 | "cwd={}\nargs=\nabc\n{}\nxyz\n", 184 | root_dir.to_string_lossy(), 185 | root_dir.to_string_lossy(), 186 | )) 187 | ); 188 | } 189 | 190 | #[test] 191 | fn matching_fails_if_executable_fails() { 192 | let temp_dir = Builder::new() 193 | .prefix("matching_fails_if_executable_fails") 194 | .tempdir() 195 | .unwrap(); 196 | let temp_dir_path = temp_dir.path().to_string_lossy(); 197 | 198 | let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); 199 | let matcher = SingleExecMatcher::new( 200 | &path_to_testing_commandline(), 201 | &[ 202 | temp_dir_path.as_ref(), 203 | "--exit_with_failure", 204 | "abc", 205 | "{}", 206 | "xyz", 207 | ], 208 | true, 209 | ) 210 | .expect("Failed to create matcher"); 211 | let deps = FakeDependencies::new(); 212 | assert!(!matcher.matches(&abbbc, &mut deps.new_matcher_io())); 213 | 214 | let mut f = File::open(temp_dir.path().join("1.txt")).expect("Failed to open output file"); 215 | let mut s = String::new(); 216 | f.read_to_string(&mut s) 217 | .expect("failed to read output file"); 218 | assert_eq!( 219 | s, 220 | fix_up_slashes(&format!( 221 | "cwd={}/test_data/simple\nargs=\n--exit_with_failure\nabc\n.\ 222 | /abbbc\nxyz\n", 223 | env::current_dir().unwrap().to_string_lossy() 224 | )) 225 | ); 226 | } 227 | 228 | #[test] 229 | fn matching_multi_executes_code() { 230 | let temp_dir = Builder::new() 231 | .prefix("matching_executes_code") 232 | .tempdir() 233 | .unwrap(); 234 | let temp_dir_path = temp_dir.path().to_string_lossy(); 235 | 236 | let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); 237 | let matcher = MultiExecMatcher::new( 238 | &path_to_testing_commandline(), 239 | &[temp_dir_path.as_ref(), "abc"], 240 | false, 241 | ) 242 | .expect("Failed to create matcher"); 243 | let deps = FakeDependencies::new(); 244 | let mut matcher_io = MatcherIO::new(&deps); 245 | assert!(matcher.matches(&abbbc, &mut deps.new_matcher_io())); 246 | matcher.finished(&mut matcher_io); 247 | 248 | let mut f = File::open(temp_dir.path().join("1.txt")).expect("Failed to open output file"); 249 | let mut s = String::new(); 250 | f.read_to_string(&mut s) 251 | .expect("failed to read output file"); 252 | assert_eq!( 253 | s, 254 | fix_up_slashes(&format!( 255 | "cwd={}\nargs=\nabc\ntest_data/simple/abbbc\n", 256 | env::current_dir().unwrap().to_string_lossy() 257 | )) 258 | ); 259 | } 260 | 261 | #[test] 262 | fn execdir_multi_in_current_directory() { 263 | let temp_dir = Builder::new() 264 | .prefix("execdir_in_current_directory") 265 | .tempdir() 266 | .unwrap(); 267 | let temp_dir_path = temp_dir.path().to_string_lossy(); 268 | 269 | let current_dir_entry = get_dir_entry_for(".", ""); 270 | let matcher = MultiExecMatcher::new( 271 | &path_to_testing_commandline(), 272 | &[temp_dir_path.as_ref(), "abc"], 273 | true, 274 | ) 275 | .expect("Failed to create matcher"); 276 | let deps = FakeDependencies::new(); 277 | let mut matcher_io = MatcherIO::new(&deps); 278 | assert!(matcher.matches(¤t_dir_entry, &mut deps.new_matcher_io())); 279 | matcher.finished_dir(Path::new(""), &mut matcher_io); 280 | matcher.finished(&mut matcher_io); 281 | 282 | let mut f = File::open(temp_dir.path().join("1.txt")).expect("Failed to open output file"); 283 | let mut s = String::new(); 284 | f.read_to_string(&mut s) 285 | .expect("failed to read output file"); 286 | assert_eq!( 287 | s, 288 | fix_up_slashes(&format!( 289 | "cwd={}\nargs=\nabc\n./.\n", 290 | env::current_dir().unwrap().to_string_lossy() 291 | )) 292 | ); 293 | } 294 | 295 | #[test] 296 | fn multi_set_exit_code_if_executable_fails() { 297 | let temp_dir = Builder::new() 298 | .prefix("multi_set_exit_code_if_executable_fails") 299 | .tempdir() 300 | .unwrap(); 301 | let temp_dir_path = temp_dir.path().to_string_lossy(); 302 | 303 | let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); 304 | let matcher = MultiExecMatcher::new( 305 | &path_to_testing_commandline(), 306 | &[temp_dir_path.as_ref(), "--exit_with_failure", "abc"], 307 | true, 308 | ) 309 | .expect("Failed to create matcher"); 310 | let deps = FakeDependencies::new(); 311 | assert!(matcher.matches(&abbbc, &mut deps.new_matcher_io())); 312 | let mut matcher_io = deps.new_matcher_io(); 313 | matcher.finished_dir(Path::new("test_data/simple"), &mut matcher_io); 314 | assert!(matcher_io.exit_code() == 1); 315 | 316 | let mut f = File::open(temp_dir.path().join("1.txt")).expect("Failed to open output file"); 317 | let mut s = String::new(); 318 | f.read_to_string(&mut s) 319 | .expect("failed to read output file"); 320 | assert_eq!( 321 | s, 322 | fix_up_slashes(&format!( 323 | "cwd={}/test_data/simple\nargs=\n--exit_with_failure\nabc\n./abbbc\n", 324 | env::current_dir().unwrap().to_string_lossy() 325 | )) 326 | ); 327 | } 328 | 329 | #[test] 330 | fn multi_set_exit_code_if_command_fails() { 331 | let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); 332 | let matcher = MultiExecMatcher::new("1337", &["abc"], true).expect("Failed to create matcher"); 333 | let deps = FakeDependencies::new(); 334 | assert!(matcher.matches(&abbbc, &mut deps.new_matcher_io())); 335 | let mut matcher_io = deps.new_matcher_io(); 336 | matcher.finished_dir(Path::new("test_data/simple"), &mut matcher_io); 337 | assert!(matcher_io.exit_code() == 1); 338 | } 339 | -------------------------------------------------------------------------------- /tests/find_exec_tests.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2017 Google Inc. 2 | // 3 | // Use of this source code is governed by a MIT-style 4 | // license that can be found in the LICENSE file or at 5 | // https://opensource.org/licenses/MIT. 6 | 7 | /// ! This file contains what would be normally be unit tests for `find::find_main` 8 | /// ! related to -exec[dir] and ok[dir] clauses. 9 | /// ! But as the tests require running an external executable, they need to be run 10 | /// ! as integration tests so we can ensure that our testing-commandline binary 11 | /// ! has been built. 12 | use std::env; 13 | use std::fs::File; 14 | use std::io::Read; 15 | use tempfile::Builder; 16 | 17 | use common::test_helpers::{fix_up_slashes, path_to_testing_commandline, FakeDependencies}; 18 | use findutils::find::find_main; 19 | 20 | mod common; 21 | #[test] 22 | fn find_exec() { 23 | let temp_dir = tempfile::Builder::new() 24 | .prefix("find_exec") 25 | .tempdir() 26 | .unwrap(); 27 | let temp_dir_path = temp_dir.path().to_string_lossy(); 28 | let deps = FakeDependencies::new(); 29 | 30 | let rc = find_main( 31 | &[ 32 | "find", 33 | &fix_up_slashes("./test_data/simple/subdir"), 34 | "-type", 35 | "f", 36 | "-exec", 37 | &path_to_testing_commandline(), 38 | temp_dir_path.as_ref(), 39 | "(", 40 | "{}", 41 | "-o", 42 | ";", 43 | ], 44 | &deps, 45 | ); 46 | 47 | assert_eq!(rc, 0); 48 | // exec has side effects, so we won't output anything unless -print is 49 | // explicitly passed in. 50 | assert_eq!(deps.get_output_as_string(), ""); 51 | 52 | // check the executable ran as expected 53 | let mut f = File::open(temp_dir.path().join("1.txt")).expect("Failed to open output file"); 54 | let mut s = String::new(); 55 | f.read_to_string(&mut s) 56 | .expect("failed to read output file"); 57 | assert_eq!( 58 | s, 59 | fix_up_slashes(&format!( 60 | "cwd={}\nargs=\n(\n./test_data/simple/subdir/ABBBC\n-o\n", 61 | env::current_dir().unwrap().to_string_lossy() 62 | )) 63 | ); 64 | } 65 | 66 | #[test] 67 | fn find_execdir() { 68 | let temp_dir = Builder::new().prefix("example").tempdir().unwrap(); 69 | let temp_dir_path = temp_dir.path().to_string_lossy(); 70 | let deps = FakeDependencies::new(); 71 | // only look at files because the "size" of a directory is a system (and filesystem) 72 | // dependent thing and we want these tests to be universal. 73 | let rc = find_main( 74 | &[ 75 | "find", 76 | &fix_up_slashes("./test_data/simple/subdir"), 77 | "-type", 78 | "f", 79 | "-execdir", 80 | &path_to_testing_commandline(), 81 | temp_dir_path.as_ref(), 82 | ")", 83 | "{}", 84 | ",", 85 | ";", 86 | ], 87 | &deps, 88 | ); 89 | 90 | assert_eq!(rc, 0); 91 | // exec has side effects, so we won't output anything unless -print is 92 | // explicitly passed in. 93 | assert_eq!(deps.get_output_as_string(), ""); 94 | 95 | // check the executable ran as expected 96 | let mut f = File::open(temp_dir.path().join("1.txt")).expect("Failed to open output file"); 97 | let mut s = String::new(); 98 | f.read_to_string(&mut s) 99 | .expect("failed to read output file"); 100 | assert_eq!( 101 | s, 102 | fix_up_slashes(&format!( 103 | "cwd={}/test_data/simple/subdir\nargs=\n)\n./ABBBC\n,\n", 104 | env::current_dir().unwrap().to_string_lossy() 105 | )) 106 | ); 107 | } 108 | 109 | #[test] 110 | fn find_exec_multi() { 111 | let temp_dir = tempfile::Builder::new() 112 | .prefix("find_exec_multi") 113 | .tempdir() 114 | .unwrap(); 115 | let temp_dir_path = temp_dir.path().to_string_lossy(); 116 | let deps = FakeDependencies::new(); 117 | 118 | let rc = find_main( 119 | &[ 120 | "find", 121 | &fix_up_slashes("./test_data/simple"), 122 | "-type", 123 | "f", 124 | "-exec", 125 | &path_to_testing_commandline(), 126 | temp_dir_path.as_ref(), 127 | "--sort", 128 | "(", 129 | "-o", 130 | "{}", 131 | "+", 132 | ], 133 | &deps, 134 | ); 135 | 136 | assert_eq!(rc, 0); 137 | // exec has side effects, so we won't output anything unless -print is 138 | // explicitly passed in. 139 | assert_eq!(deps.get_output_as_string(), ""); 140 | 141 | // check the executable ran as expected 142 | let mut f = File::open(temp_dir.path().join("1.txt")).expect("Failed to open output file"); 143 | let mut s = String::new(); 144 | f.read_to_string(&mut s) 145 | .expect("failed to read output file"); 146 | assert_eq!( 147 | s, 148 | fix_up_slashes(&format!( 149 | "cwd={}\nargs=\n(\n--sort\n-o\n./test_data/simple/abbbc\n./test_data/simple/subdir/ABBBC\n", 150 | env::current_dir().unwrap().to_string_lossy() 151 | )) 152 | ); 153 | } 154 | 155 | #[test] 156 | fn find_execdir_multi() { 157 | let temp_dir = Builder::new() 158 | .prefix("find_execdir_multi") 159 | .tempdir() 160 | .unwrap(); 161 | let temp_dir_path = temp_dir.path().to_string_lossy(); 162 | let deps = FakeDependencies::new(); 163 | // only look at files because the "size" of a directory is a system (and filesystem) 164 | // dependent thing and we want these tests to be universal. 165 | let rc = find_main( 166 | &[ 167 | "find", 168 | &fix_up_slashes("./test_data/simple"), 169 | "-maxdepth", 170 | "1", 171 | "-execdir", 172 | &path_to_testing_commandline(), 173 | temp_dir_path.as_ref(), 174 | "--sort", 175 | ")", 176 | "{}", 177 | "+", 178 | ], 179 | &deps, 180 | ); 181 | 182 | assert_eq!(rc, 0); 183 | // exec has side effects, so we won't output anything unless -print is 184 | // explicitly passed in. 185 | assert_eq!(deps.get_output_as_string(), ""); 186 | 187 | // check the executable ran as expected 188 | let mut f = File::open(temp_dir.path().join("1.txt")).expect("Failed to open output file"); 189 | let mut s = String::new(); 190 | f.read_to_string(&mut s) 191 | .expect("failed to read output file"); 192 | assert_eq!( 193 | s, 194 | fix_up_slashes(&format!( 195 | "cwd={}/test_data\nargs=\n)\n--sort\n./simple\n", 196 | env::current_dir().unwrap().to_string_lossy() 197 | )) 198 | ); 199 | let mut f = File::open(temp_dir.path().join("2.txt")).expect("Failed to open output file"); 200 | let mut s = String::new(); 201 | f.read_to_string(&mut s) 202 | .expect("failed to read output file"); 203 | assert_eq!( 204 | s, 205 | fix_up_slashes(&format!( 206 | "cwd={}/test_data/simple\nargs=\n)\n--sort\n./abbbc\n./subdir\n", 207 | env::current_dir().unwrap().to_string_lossy() 208 | )) 209 | ); 210 | } 211 | -------------------------------------------------------------------------------- /util/build-bfs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eo pipefail 4 | 5 | if ! test -d ../bfs; then 6 | echo "Could not find ../bfs" 7 | echo "git clone https://github.com/tavianator/bfs.git" 8 | exit 1 9 | fi 10 | 11 | # build the rust implementation 12 | cargo build --release 13 | FIND=$(readlink -f target/release/find) 14 | 15 | cd ../bfs 16 | ./configure NOLIBS=y 17 | make -j "$(nproc)" bin/tests/{mksock,xtouch} 18 | 19 | # Run the GNU find compatibility tests by default 20 | if test "$#" -eq 0; then 21 | set -- --verbose=tests --gnu --sudo 22 | fi 23 | 24 | LOG_FILE=tests.log 25 | ./tests/tests.sh --bfs="$FIND" "$@" 2>&1 | tee "$LOG_FILE" || : 26 | 27 | PASS=$(sed -En 's|^\[PASS] *([0-9]+) / .*|\1|p' "$LOG_FILE") 28 | SKIP=$(sed -En 's|^\[SKIP] *([0-9]+) / .*|\1|p' "$LOG_FILE") 29 | FAIL=$(sed -En 's|^\[FAIL] *([0-9]+) / .*|\1|p' "$LOG_FILE") 30 | 31 | # Default any missing numbers to zero (e.g. no tests skipped) 32 | : ${PASS:=0} 33 | : ${SKIP:=0} 34 | : ${FAIL:=0} 35 | 36 | TOTAL=$((PASS + SKIP + FAIL)) 37 | if (( TOTAL <= 1 )); then 38 | echo "Error in the execution, failing early" 39 | exit 1 40 | fi 41 | 42 | output="BFS tests summary = TOTAL: $TOTAL / PASS: $PASS / SKIP: $SKIP / FAIL: $FAIL" 43 | echo "${output}" 44 | if (( FAIL > 0 )); then echo "::warning ::${output}"; fi 45 | 46 | jq -n \ 47 | --arg date "$(date --rfc-email)" \ 48 | --arg sha "$GITHUB_SHA" \ 49 | --arg total "$TOTAL" \ 50 | --arg pass "$PASS" \ 51 | --arg skip "$SKIP" \ 52 | --arg fail "$FAIL" \ 53 | '{($date): { sha: $sha, total: $total, pass: $pass, skip: $skip, fail: $fail, }}' > ../bfs-result.json 54 | -------------------------------------------------------------------------------- /util/build-gnu.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | if test ! -d ../findutils.gnu; then 6 | echo "Could not find ../findutils.gnu" 7 | echo "git clone https://git.savannah.gnu.org/git/findutils.git findutils.gnu" 8 | exit 1 9 | fi 10 | 11 | # build the rust implementation 12 | cargo build --release 13 | cp target/release/find ../findutils.gnu/find.rust 14 | cp target/release/xargs ../findutils.gnu/xargs.rust 15 | 16 | # Clone and build upstream repo 17 | cd ../findutils.gnu 18 | if test ! -f configure; then 19 | ./bootstrap 20 | ./configure --quiet 21 | make -j "$(nproc)" 22 | fi 23 | 24 | # overwrite the GNU version with the rust impl 25 | cp find.rust find/find 26 | cp xargs.rust xargs/xargs 27 | 28 | if test -n "$1"; then 29 | # if set, run only the test passed 30 | export RUN_TEST="TESTS=$1" 31 | fi 32 | 33 | # Run the tests 34 | make check-TESTS $RUN_TEST || : 35 | make -C find/testsuite check || : 36 | make -C xargs/testsuite check || : 37 | 38 | PASS=0 39 | SKIP=0 40 | FAIL=0 41 | XPASS=0 42 | ERROR=0 43 | 44 | LOG_FILE=./find/testsuite/find.log 45 | if test -f "$LOG_FILE"; then 46 | ((PASS += $(sed -En 's/# of expected passes\s*//p' "$LOG_FILE"))) || : 47 | ((FAIL += $(sed -En 's/# of unexpected failures\s*//p' "$LOG_FILE"))) || : 48 | fi 49 | 50 | LOG_FILE=./xargs/testsuite/xargs.log 51 | if test -f "$LOG_FILE"; then 52 | ((PASS += $(sed -En 's/# of expected passes\s*//p' "$LOG_FILE"))) || : 53 | ((FAIL += $(sed -En 's/# of unexpected failures\s*//p' "$LOG_FILE"))) || : 54 | fi 55 | 56 | ((TOTAL = PASS + FAIL)) || : 57 | 58 | LOG_FILE=./tests/test-suite.log 59 | if test -f "$LOG_FILE"; then 60 | ((TOTAL += $(sed -n "s/.*# TOTAL: \(.*\)/\1/p" "$LOG_FILE" | tr -d '\r' | head -n1))) || : 61 | ((PASS += $(sed -n "s/.*# PASS: \(.*\)/\1/p" "$LOG_FILE" | tr -d '\r' | head -n1))) || : 62 | ((SKIP += $(sed -n "s/.*# SKIP: \(.*\)/\1/p" "$LOG_FILE" | tr -d '\r' | head -n1))) || : 63 | ((FAIL += $(sed -n "s/.*# FAIL: \(.*\)/\1/p" "$LOG_FILE" | tr -d '\r' | head -n1))) || : 64 | ((XPASS += $(sed -n "s/.*# XPASS: \(.*\)/\1/p" "$LOG_FILE" | tr -d '\r' | head -n1))) || : 65 | ((ERROR += $(sed -n "s/.*# ERROR: \(.*\)/\1/p" "$LOG_FILE" | tr -d '\r' | head -n1))) || : 66 | fi 67 | 68 | if ((TOTAL <= 1)); then 69 | echo "Error in the execution, failing early" 70 | exit 1 71 | fi 72 | 73 | output="GNU tests summary = TOTAL: $TOTAL / PASS: $PASS / FAIL: $FAIL / ERROR: $ERROR" 74 | echo "${output}" 75 | if [[ "$FAIL" -gt 0 || "$ERROR" -gt 0 ]]; then echo "::warning ::${output}" ; fi 76 | jq -n \ 77 | --arg date "$(date --rfc-email)" \ 78 | --arg sha "$GITHUB_SHA" \ 79 | --arg total "$TOTAL" \ 80 | --arg pass "$PASS" \ 81 | --arg skip "$SKIP" \ 82 | --arg fail "$FAIL" \ 83 | --arg xpass "$XPASS" \ 84 | --arg error "$ERROR" \ 85 | '{($date): { sha: $sha, total: $total, pass: $pass, skip: $skip, fail: $fail, xpass: $xpass, error: $error, }}' > ../gnu-result.json 86 | -------------------------------------------------------------------------------- /util/compare_bfs_result.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """ 3 | Compare the current results to the last results gathered from the main branch to highlight 4 | if a PR is making the results better/worse 5 | """ 6 | 7 | import json 8 | import sys 9 | 10 | NEW = json.load(open("bfs-result.json")) 11 | OLD = json.load(open("latest-bfs-result.json")) 12 | 13 | # Extract the specific results from the dicts 14 | [last] = OLD.values() 15 | [current] = NEW.values() 16 | 17 | pass_d = int(current["pass"]) - int(last["pass"]) 18 | skip_d = int(current["skip"]) - int(last.get("skip", 0)) 19 | fail_d = int(current["fail"]) - int(last["fail"]) 20 | 21 | # Get an annotation to highlight changes 22 | print(f"::warning ::Changes from main: PASS {pass_d:+d} / SKIP {skip_d:+d} / FAIL {fail_d:+d}") 23 | 24 | # Check if there are no changes. 25 | if pass_d == 0: 26 | print("::warning ::BFS tests No changes") 27 | 28 | # If results are worse fail the job to draw attention 29 | if pass_d < 0: 30 | sys.exit(1) 31 | -------------------------------------------------------------------------------- /util/compare_gnu_result.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """ 3 | Compare the current results to the last results gathered from the main branch to highlight 4 | if a PR is making the results better/worse 5 | """ 6 | 7 | import json 8 | import sys 9 | 10 | NEW = json.load(open("gnu-result.json")) 11 | OLD = json.load(open("latest-gnu-result.json")) 12 | 13 | # Extract the specific results from the dicts 14 | last = OLD[list(OLD.keys())[0]] 15 | current = NEW[list(NEW.keys())[0]] 16 | 17 | pass_d = int(current["pass"]) - int(last["pass"]) 18 | fail_d = int(current["fail"]) - int(last["fail"]) 19 | error_d = int(current["error"]) - int(last["error"]) 20 | skip_d = int(current["skip"]) - int(last["skip"]) 21 | 22 | # Get an annotation to highlight changes 23 | print( 24 | f"::warning ::Changes from main: PASS {pass_d:+d} / FAIL {fail_d:+d} / ERROR {error_d:+d} / SKIP {skip_d:+d} " 25 | ) 26 | 27 | # Check if there are no changes. 28 | if pass_d == 0: 29 | print("::warning ::Gnu tests No changes") 30 | 31 | # If results are worse fail the job to draw attention 32 | if pass_d < 0: 33 | sys.exit(1) 34 | -------------------------------------------------------------------------------- /util/diff-bfs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eu 4 | 5 | export LC_COLLATE=C 6 | 7 | # Extract the failing test lines from log files 8 | failing_tests() { 9 | sed -En 's/^\[FAIL\] (.*[a-z].*)/\1/p' "$1" | sort 10 | } 11 | 12 | comm -3 <(failing_tests "$1") <(failing_tests "$2") | tr '\t' ',' | while IFS=, read old new; do 13 | if [ -n "$old" ]; then 14 | echo "::warning ::Congrats! The bfs test $old is now passing!" 15 | fi 16 | if [ -n "$new" ]; then 17 | echo "::error ::bfs test failed: $new. $new is passing on 'main'. Maybe you have to rebase?" 18 | fi 19 | done 20 | -------------------------------------------------------------------------------- /util/diff-gnu.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eu 4 | 5 | export LC_COLLATE=C 6 | 7 | # Extract the failing test lines from log files 8 | failing_tests() { 9 | sed -En 's/FAIL: ([^,:]*)[,:].*/\1/p' "$1"/{tests,{find,xargs}/testsuite}/*.log | sort 10 | } 11 | 12 | comm -3 <(failing_tests "$1") <(failing_tests "$2") | tr '\t' ',' | while IFS=, read old new foo; do 13 | if [ -n "$old" ]; then 14 | echo "::warning ::Congrats! The GNU test $old is now passing!" 15 | fi 16 | if [ -n "$new" ]; then 17 | echo "::error ::GNU test failed: $new. $new is passing on 'main'. Maybe you have to rebase?" 18 | fi 19 | done 20 | --------------------------------------------------------------------------------