├── rustfmt.toml ├── doc ├── .gitattributes ├── logo.png ├── screencast.sh ├── release-checklist.md ├── logo.svg └── fd.1 ├── .github ├── FUNDING.yml ├── ISSUE_TEMPLATE │ ├── config.yml │ ├── feature_request.md │ ├── question.md │ └── bug_report.yaml ├── dependabot.yml └── workflows │ └── CICD.yml ├── .gitignore ├── src ├── error.rs ├── filter │ ├── mod.rs │ ├── owner.rs │ ├── time.rs │ └── size.rs ├── exec │ ├── token.rs │ ├── job.rs │ ├── input.rs │ ├── command.rs │ └── mod.rs ├── filetypes.rs ├── exit_codes.rs ├── regex_helper.rs ├── filesystem.rs ├── dir_entry.rs ├── config.rs ├── output.rs ├── walk.rs └── main.rs ├── LICENSE-MIT ├── Makefile ├── CONTRIBUTING.md ├── Cargo.toml ├── LICENSE-APACHE ├── contrib └── completion │ └── _fd └── tests └── testenv └── mod.rs /rustfmt.toml: -------------------------------------------------------------------------------- 1 | # Defaults are used 2 | -------------------------------------------------------------------------------- /doc/.gitattributes: -------------------------------------------------------------------------------- 1 | * linguist-vendored 2 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: [sharkdp, tavianator] 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target/ 2 | /autocomplete/ 3 | **/*.rs.bk 4 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: true 2 | -------------------------------------------------------------------------------- /doc/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rockyzsu/fd/master/doc/logo.png -------------------------------------------------------------------------------- /src/error.rs: -------------------------------------------------------------------------------- 1 | pub fn print_error(msg: impl Into) { 2 | eprintln!("[fd error]: {}", msg.into()); 3 | } 4 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature Request 3 | about: Suggest an idea for this project. 4 | title: '' 5 | labels: feature-request 6 | assignees: '' 7 | 8 | --- 9 | -------------------------------------------------------------------------------- /src/filter/mod.rs: -------------------------------------------------------------------------------- 1 | pub use self::size::SizeFilter; 2 | pub use self::time::TimeFilter; 3 | 4 | #[cfg(unix)] 5 | pub use self::owner::OwnerFilter; 6 | 7 | mod size; 8 | mod time; 9 | 10 | #[cfg(unix)] 11 | mod owner; 12 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "cargo" 4 | directory: "/" 5 | schedule: 6 | interval: "monthly" 7 | - package-ecosystem: "github-actions" 8 | directory: "/" 9 | schedule: 10 | interval: "daily" 11 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/question.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Question 3 | about: Ask a question about 'fd'. 4 | title: '' 5 | labels: question 6 | assignees: '' 7 | 8 | --- 9 | 10 | 11 | 12 | **What version of `fd` are you using?** 13 | [paste the output of `fd --version` here] 14 | -------------------------------------------------------------------------------- /src/exec/token.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::{self, Display, Formatter}; 2 | 3 | /// Designates what should be written to a buffer 4 | /// 5 | /// Each `Token` contains either text, or a placeholder variant, which will be used to generate 6 | /// commands after all tokens for a given command template have been collected. 7 | #[derive(Clone, Debug, PartialEq, Eq)] 8 | pub enum Token { 9 | Placeholder, 10 | Basename, 11 | Parent, 12 | NoExt, 13 | BasenameNoExt, 14 | Text(String), 15 | } 16 | 17 | impl Display for Token { 18 | fn fmt(&self, f: &mut Formatter) -> fmt::Result { 19 | match *self { 20 | Token::Placeholder => f.write_str("{}")?, 21 | Token::Basename => f.write_str("{/}")?, 22 | Token::Parent => f.write_str("{//}")?, 23 | Token::NoExt => f.write_str("{.}")?, 24 | Token::BasenameNoExt => f.write_str("{/.}")?, 25 | Token::Text(ref string) => f.write_str(string)?, 26 | } 27 | Ok(()) 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017-present The fd developers 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /doc/screencast.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Designed to be executed via svg-term from the fd root directory: 3 | # svg-term --command="bash doc/screencast.sh" --out doc/screencast.svg --padding=10 4 | # Then run this (workaround for #1003): 5 | # sed -i '' 's/ $@ 19 | 20 | autocomplete/fd.fish: $(EXE) 21 | $(comp_dir) 22 | $(EXE) --gen-completions fish > $@ 23 | 24 | autocomplete/fd.ps1: $(EXE) 25 | $(comp_dir) 26 | $(EXE) --gen-completions powershell > $@ 27 | 28 | autocomplete/_fd: contrib/completion/_fd 29 | $(comp_dir) 30 | cp $< $@ 31 | 32 | install: $(EXE) completions 33 | install -Dm755 $(EXE) $(DESTDIR)$(bindir)/fd 34 | install -Dm644 autocomplete/fd.bash $(DESTDIR)/$(datadir)/bash-completion/completions/$(exe_name) 35 | install -Dm644 autocomplete/fd.fish $(DESTDIR)/$(datadir)/fish/vendor_completions.d/$(exe_name).fish 36 | install -Dm644 autocomplete/_fd $(DESTDIR)/$(datadir)/zsh/site-functions/_$(exe_name) 37 | install -Dm644 doc/fd.1 $(DESTDIR)/$(datadir)/man/man1/$(exe_name).1 38 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.yaml: -------------------------------------------------------------------------------- 1 | name: Bug Report 2 | description: Report a bug. 3 | title: "[BUG] " 4 | labels: bug 5 | body: 6 | - type: markdown 7 | attributes: 8 | value: | 9 | Please check out the [troubleshooting section](https://github.com/sharkdp/fd#troubleshooting) first. 10 | - type: checkboxes 11 | attributes: 12 | label: Checks 13 | options: 14 | - label: I have read the troubleshooting section and still think this is a bug. 15 | required: true 16 | - type: textarea 17 | id: bug 18 | attributes: 19 | label: "Describe the bug you encountered:" 20 | validations: 21 | required: true 22 | - type: textarea 23 | id: expected 24 | attributes: 25 | label: "Describe what you expected to happen:" 26 | - type: input 27 | id: version 28 | attributes: 29 | label: "What version of `fd` are you using?" 30 | placeholder: "paste the output of `fd --version` here" 31 | validations: 32 | required: true 33 | - type: textarea 34 | id: os 35 | attributes: 36 | label: Which operating system / distribution are you on? 37 | placeholder: | 38 | Unix: paste the output of `uname -srm` and `lsb_release -a` here. 39 | Windows: please tell us your Windows version 40 | render: shell 41 | validations: 42 | required: true 43 | -------------------------------------------------------------------------------- /src/filetypes.rs: -------------------------------------------------------------------------------- 1 | use crate::dir_entry; 2 | use crate::filesystem; 3 | 4 | use faccess::PathExt; 5 | 6 | /// Whether or not to show 7 | #[derive(Default)] 8 | pub struct FileTypes { 9 | pub files: bool, 10 | pub directories: bool, 11 | pub symlinks: bool, 12 | pub sockets: bool, 13 | pub pipes: bool, 14 | pub executables_only: bool, 15 | pub empty_only: bool, 16 | } 17 | 18 | impl FileTypes { 19 | pub fn should_ignore(&self, entry: &dir_entry::DirEntry) -> bool { 20 | if let Some(ref entry_type) = entry.file_type() { 21 | (!self.files && entry_type.is_file()) 22 | || (!self.directories && entry_type.is_dir()) 23 | || (!self.symlinks && entry_type.is_symlink()) 24 | || (!self.sockets && filesystem::is_socket(*entry_type)) 25 | || (!self.pipes && filesystem::is_pipe(*entry_type)) 26 | || (self.executables_only && !entry.path().executable()) 27 | || (self.empty_only && !filesystem::is_empty(entry)) 28 | || !(entry_type.is_file() 29 | || entry_type.is_dir() 30 | || entry_type.is_symlink() 31 | || filesystem::is_socket(*entry_type) 32 | || filesystem::is_pipe(*entry_type)) 33 | } else { 34 | true 35 | } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | ## Contributing to *fd* 2 | 3 | **Thank you very much for considering to contribute to this project!** 4 | 5 | We welcome any form of contribution: 6 | 7 | * New issues (feature requests, bug reports, questions, ideas, ...) 8 | * Pull requests (documentation improvements, code improvements, new features, ...) 9 | 10 | **Note**: Before you take the time to open a pull request, please open a ticket first. This will 11 | give us the chance to discuss any potential changes first. 12 | 13 | ## Add an entry to the changelog 14 | 15 | If your contribution changes the behavior of `fd` (as opposed to a typo-fix 16 | in the documentation), please update the [`CHANGELOG.md`](CHANGELOG.md#upcoming-release) file 17 | and describe your changes. This makes the release process much easier and 18 | therefore helps to get your changes into a new `fd` release faster. 19 | 20 | The top of the `CHANGELOG` contains an *"Upcoming release"* section with a few 21 | subsections (Features, Bugfixes, …). Please add your entry to the subsection 22 | that best describes your change. 23 | 24 | Entries follow this format: 25 | ``` 26 | - Short description of what has been changed, see #123 (@user) 27 | ``` 28 | Here, `#123` is the number of the original issue and/or your pull request. 29 | Please replace `@user` by your GitHub username. 30 | 31 | ## Important links 32 | 33 | * [Open issues](https://github.com/sharkdp/fd/issues) 34 | * [Open pull requests](https://github.com/sharkdp/fd/pulls) 35 | * [Development section in the README](https://github.com/sharkdp/fd#development) 36 | * [fd on crates.io](https://crates.io/crates/fd-find) 37 | * [LICENSE-APACHE](https://github.com/sharkdp/fd/blob/master/LICENSE-APACHE) and [LICENSE-MIT](https://github.com/sharkdp/fd/blob/master/LICENSE-MIT) 38 | -------------------------------------------------------------------------------- /src/exec/job.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Mutex; 2 | 3 | use crossbeam_channel::Receiver; 4 | 5 | use crate::config::Config; 6 | use crate::dir_entry::DirEntry; 7 | use crate::error::print_error; 8 | use crate::exit_codes::{merge_exitcodes, ExitCode}; 9 | use crate::walk::WorkerResult; 10 | 11 | use super::CommandSet; 12 | 13 | /// An event loop that listens for inputs from the `rx` receiver. Each received input will 14 | /// generate a command with the supplied command template. The generated command will then 15 | /// be executed, and this process will continue until the receiver's sender has closed. 16 | pub fn job( 17 | rx: Receiver, 18 | cmd: &CommandSet, 19 | out_perm: &Mutex<()>, 20 | config: &Config, 21 | ) -> ExitCode { 22 | // Output should be buffered when only running a single thread 23 | let buffer_output: bool = config.threads > 1; 24 | 25 | let mut results: Vec = Vec::new(); 26 | loop { 27 | // Obtain the next result from the receiver, else if the channel 28 | // has closed, exit from the loop 29 | let dir_entry: DirEntry = match rx.recv() { 30 | Ok(WorkerResult::Entry(dir_entry)) => dir_entry, 31 | Ok(WorkerResult::Error(err)) => { 32 | if config.show_filesystem_errors { 33 | print_error(err.to_string()); 34 | } 35 | continue; 36 | } 37 | Err(_) => break, 38 | }; 39 | 40 | // Generate a command, execute it and store its exit code. 41 | results.push(cmd.execute( 42 | dir_entry.stripped_path(config), 43 | config.path_separator.as_deref(), 44 | out_perm, 45 | buffer_output, 46 | )) 47 | } 48 | // Returns error in case of any error. 49 | merge_exitcodes(results) 50 | } 51 | 52 | pub fn batch(rx: Receiver, cmd: &CommandSet, config: &Config) -> ExitCode { 53 | let paths = rx 54 | .into_iter() 55 | .filter_map(|worker_result| match worker_result { 56 | WorkerResult::Entry(dir_entry) => Some(dir_entry.into_stripped_path(config)), 57 | WorkerResult::Error(err) => { 58 | if config.show_filesystem_errors { 59 | print_error(err.to_string()); 60 | } 61 | None 62 | } 63 | }); 64 | 65 | cmd.execute_batch(paths, config.batch_size, config.path_separator.as_deref()) 66 | } 67 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | authors = ["David Peter "] 3 | build = "build.rs" 4 | categories = ["command-line-utilities"] 5 | description = "fd is a simple, fast and user-friendly alternative to find." 6 | exclude = ["/benchmarks/*"] 7 | homepage = "https://github.com/sharkdp/fd" 8 | keywords = [ 9 | "search", 10 | "find", 11 | "file", 12 | "filesystem", 13 | "tool", 14 | ] 15 | license = "MIT/Apache-2.0" 16 | name = "fd-find" 17 | readme = "README.md" 18 | repository = "https://github.com/sharkdp/fd" 19 | version = "8.7.0" 20 | edition= "2021" 21 | rust-version = "1.70.0" 22 | 23 | [badges.appveyor] 24 | repository = "sharkdp/fd" 25 | 26 | [badges.travis-ci] 27 | repository = "sharkdp/fd" 28 | 29 | [[bin]] 30 | name = "fd" 31 | path = "src/main.rs" 32 | 33 | [build-dependencies] 34 | version_check = "0.9" 35 | 36 | [dependencies] 37 | nu-ansi-term = "0.47" 38 | argmax = "0.3.1" 39 | ignore = "0.4.20" 40 | num_cpus = "1.15" 41 | regex = "1.7.3" 42 | regex-syntax = "0.6" 43 | ctrlc = "3.2" 44 | humantime = "2.1" 45 | globset = "0.4" 46 | anyhow = "1.0" 47 | dirs-next = "2.0" 48 | normpath = "1.1.1" 49 | crossbeam-channel = "0.5.8" 50 | clap_complete = {version = "4.3.0", optional = true} 51 | faccess = "0.2.4" 52 | 53 | [dependencies.clap] 54 | version = "4.1.1" 55 | features = ["suggestions", "color", "wrap_help", "cargo", "unstable-grouped", "derive"] 56 | 57 | [dependencies.chrono] 58 | version = "0.4.23" 59 | default-features = false 60 | features = ["std", "clock"] 61 | 62 | [dependencies.lscolors] 63 | version = "0.14" 64 | default-features = false 65 | features = ["nu-ansi-term"] 66 | 67 | [target.'cfg(unix)'.dependencies] 68 | users = "0.11.0" 69 | nix = { version = "0.26.2", default-features = false, features = ["signal"] } 70 | 71 | [target.'cfg(all(unix, not(target_os = "redox")))'.dependencies] 72 | libc = "0.2" 73 | 74 | # FIXME: Re-enable jemalloc on macOS 75 | # jemalloc is currently disabled on macOS due to a bug in jemalloc in combination with macOS 76 | # Catalina. See https://github.com/sharkdp/fd/issues/498 for details. 77 | [target.'cfg(all(not(windows), not(target_os = "android"), not(target_os = "macos"), not(target_os = "freebsd"), not(target_os = "openbsd"), not(all(target_env = "musl", target_pointer_width = "32")), not(target_arch = "riscv64")))'.dependencies] 78 | jemallocator = {version = "0.5.0", optional = true} 79 | 80 | [dev-dependencies] 81 | diff = "0.1" 82 | tempfile = "3.5" 83 | filetime = "0.2" 84 | test-case = "3.1" 85 | 86 | [profile.release] 87 | lto = true 88 | strip = true 89 | codegen-units = 1 90 | 91 | [features] 92 | use-jemalloc = ["jemallocator"] 93 | completions = ["clap_complete"] 94 | base = ["use-jemalloc"] 95 | default = ["use-jemalloc", "completions"] 96 | -------------------------------------------------------------------------------- /doc/release-checklist.md: -------------------------------------------------------------------------------- 1 | # Release checklist 2 | 3 | This file can be used as-is, or copied into the GitHub PR description which includes 4 | necessary changes for the upcoming release. 5 | 6 | ## Version bump 7 | 8 | - [ ] Create a new branch for the required changes for this release. 9 | - [ ] Update version in `Cargo.toml`. Run `cargo build` to update `Cargo.lock`. 10 | Make sure to `git add` the `Cargo.lock` changes as well. 11 | - [ ] Find the current min. supported Rust version by running 12 | `grep '^\s*MIN_SUPPORTED_RUST_VERSION' .github/workflows/CICD.yml`. 13 | - [ ] Update the `fd` version and the min. supported Rust version in `README.md`. 14 | - [ ] Update `CHANGELOG.md`. Change the heading of the *"Upcoming release"* section 15 | to the version of this release. 16 | 17 | ## Pre-release checks and updates 18 | 19 | - [ ] Install the latest version (`cargo install --locked -f --path .`) and make 20 | sure that it is available on the `PATH` (`fd --version` should show the 21 | new version). 22 | - [ ] Review `-h`, `--help`, and the `man` page. 23 | - [ ] Run `fd -h` and copy the output to the *"Command-line options"* section in 24 | the README 25 | - [ ] Push all changes and wait for CI to succeed (before continuing with the 26 | next section). 27 | - [ ] Optional: manually test the new features and command-line options described 28 | in the `CHANGELOG.md`. 29 | - [ ] Run `cargo publish --dry-run` to make sure that it will succeed later 30 | (after creating the GitHub release). 31 | 32 | ## Release 33 | 34 | - [ ] Merge your release branch (should be a fast-forward merge). 35 | - [ ] Create a tag and push it: `git tag vX.Y.Z; git push origin tag vX.Y.Z`. 36 | This will trigger the deployment via GitHub Actions. 37 | REMINDER: If your `origin` is a fork, don't forget to push to e.g. `upstream` 38 | instead. 39 | - [ ] Go to https://github.com/sharkdp/fd/releases/new to create the new 40 | release. Select the new tag and also use it as the release title. For the 41 | release notes, copy the corresponding section from `CHANGELOG.md` and 42 | possibly add additional remarks for package maintainers. 43 | Publish the release. 44 | - [ ] Check if the binary deployment works (archives and Debian packages should 45 | appear when the CI run *for the Git tag* has finished). 46 | - [ ] Publish to crates.io by running `cargo publish` in a *clean* repository. 47 | One way to do this is to clone a fresh copy. 48 | 49 | ## Post-release 50 | 51 | - [ ] Prepare a new *"Upcoming release"* section at the top of `CHANGELOG.md`. 52 | Put this at the top: 53 | 54 | # Upcoming release 55 | 56 | ## Features 57 | 58 | 59 | ## Bugfixes 60 | 61 | 62 | ## Changes 63 | 64 | 65 | ## Other 66 | 67 | -------------------------------------------------------------------------------- /src/exit_codes.rs: -------------------------------------------------------------------------------- 1 | use std::process; 2 | 3 | #[cfg(unix)] 4 | use nix::sys::signal::{raise, signal, SigHandler, Signal}; 5 | 6 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] 7 | pub enum ExitCode { 8 | Success, 9 | HasResults(bool), 10 | GeneralError, 11 | KilledBySigint, 12 | } 13 | 14 | impl From for i32 { 15 | fn from(code: ExitCode) -> Self { 16 | match code { 17 | ExitCode::Success => 0, 18 | ExitCode::HasResults(has_results) => !has_results as i32, 19 | ExitCode::GeneralError => 1, 20 | ExitCode::KilledBySigint => 130, 21 | } 22 | } 23 | } 24 | 25 | impl ExitCode { 26 | fn is_error(self) -> bool { 27 | i32::from(self) != 0 28 | } 29 | 30 | /// Exit the process with the appropriate code. 31 | pub fn exit(self) -> ! { 32 | #[cfg(unix)] 33 | if self == ExitCode::KilledBySigint { 34 | // Get rid of the SIGINT handler, if present, and raise SIGINT 35 | unsafe { 36 | if signal(Signal::SIGINT, SigHandler::SigDfl).is_ok() { 37 | let _ = raise(Signal::SIGINT); 38 | } 39 | } 40 | } 41 | 42 | process::exit(self.into()) 43 | } 44 | } 45 | 46 | pub fn merge_exitcodes(results: impl IntoIterator) -> ExitCode { 47 | if results.into_iter().any(ExitCode::is_error) { 48 | return ExitCode::GeneralError; 49 | } 50 | ExitCode::Success 51 | } 52 | 53 | #[cfg(test)] 54 | mod tests { 55 | use super::*; 56 | 57 | #[test] 58 | fn success_when_no_results() { 59 | assert_eq!(merge_exitcodes([]), ExitCode::Success); 60 | } 61 | 62 | #[test] 63 | fn general_error_if_at_least_one_error() { 64 | assert_eq!( 65 | merge_exitcodes([ExitCode::GeneralError]), 66 | ExitCode::GeneralError 67 | ); 68 | assert_eq!( 69 | merge_exitcodes([ExitCode::KilledBySigint]), 70 | ExitCode::GeneralError 71 | ); 72 | assert_eq!( 73 | merge_exitcodes([ExitCode::KilledBySigint, ExitCode::Success]), 74 | ExitCode::GeneralError 75 | ); 76 | assert_eq!( 77 | merge_exitcodes([ExitCode::Success, ExitCode::GeneralError]), 78 | ExitCode::GeneralError 79 | ); 80 | assert_eq!( 81 | merge_exitcodes([ExitCode::GeneralError, ExitCode::KilledBySigint]), 82 | ExitCode::GeneralError 83 | ); 84 | } 85 | 86 | #[test] 87 | fn success_if_no_error() { 88 | assert_eq!(merge_exitcodes([ExitCode::Success]), ExitCode::Success); 89 | assert_eq!( 90 | merge_exitcodes([ExitCode::Success, ExitCode::Success]), 91 | ExitCode::Success 92 | ); 93 | } 94 | } 95 | -------------------------------------------------------------------------------- /src/exec/input.rs: -------------------------------------------------------------------------------- 1 | use std::ffi::{OsStr, OsString}; 2 | use std::path::{Path, PathBuf}; 3 | 4 | use crate::filesystem::strip_current_dir; 5 | 6 | /// Removes the parent component of the path 7 | pub fn basename(path: &Path) -> &OsStr { 8 | path.file_name().unwrap_or(path.as_os_str()) 9 | } 10 | 11 | /// Removes the extension from the path 12 | pub fn remove_extension(path: &Path) -> OsString { 13 | let dirname = dirname(path); 14 | let stem = path.file_stem().unwrap_or(path.as_os_str()); 15 | 16 | let path = PathBuf::from(dirname).join(stem); 17 | 18 | strip_current_dir(&path).to_owned().into_os_string() 19 | } 20 | 21 | /// Removes the basename from the path. 22 | pub fn dirname(path: &Path) -> OsString { 23 | path.parent() 24 | .map(|p| { 25 | if p == OsStr::new("") { 26 | OsString::from(".") 27 | } else { 28 | p.as_os_str().to_owned() 29 | } 30 | }) 31 | .unwrap_or_else(|| path.as_os_str().to_owned()) 32 | } 33 | 34 | #[cfg(test)] 35 | mod path_tests { 36 | use super::*; 37 | use std::path::MAIN_SEPARATOR; 38 | 39 | fn correct(input: &str) -> String { 40 | input.replace('/', &MAIN_SEPARATOR.to_string()) 41 | } 42 | 43 | macro_rules! func_tests { 44 | ($($name:ident: $func:ident for $input:expr => $output:expr)+) => { 45 | $( 46 | #[test] 47 | fn $name() { 48 | let input_path = PathBuf::from(&correct($input)); 49 | let output_string = OsString::from(correct($output)); 50 | assert_eq!($func(&input_path), output_string); 51 | } 52 | )+ 53 | } 54 | } 55 | 56 | func_tests! { 57 | remove_ext_simple: remove_extension for "foo.txt" => "foo" 58 | remove_ext_dir: remove_extension for "dir/foo.txt" => "dir/foo" 59 | hidden: remove_extension for ".foo" => ".foo" 60 | remove_ext_utf8: remove_extension for "💖.txt" => "💖" 61 | remove_ext_empty: remove_extension for "" => "" 62 | 63 | basename_simple: basename for "foo.txt" => "foo.txt" 64 | basename_dir: basename for "dir/foo.txt" => "foo.txt" 65 | basename_empty: basename for "" => "" 66 | basename_utf8_0: basename for "💖/foo.txt" => "foo.txt" 67 | basename_utf8_1: basename for "dir/💖.txt" => "💖.txt" 68 | 69 | dirname_simple: dirname for "foo.txt" => "." 70 | dirname_dir: dirname for "dir/foo.txt" => "dir" 71 | dirname_utf8_0: dirname for "💖/foo.txt" => "💖" 72 | dirname_utf8_1: dirname for "dir/💖.txt" => "dir" 73 | } 74 | 75 | #[test] 76 | #[cfg(windows)] 77 | fn dirname_root() { 78 | assert_eq!(dirname(&PathBuf::from("C:")), OsString::from("C:")); 79 | assert_eq!(dirname(&PathBuf::from("\\")), OsString::from("\\")); 80 | } 81 | 82 | #[test] 83 | #[cfg(not(windows))] 84 | fn dirname_root() { 85 | assert_eq!(dirname(&PathBuf::from("/")), OsString::from("/")); 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /src/exec/command.rs: -------------------------------------------------------------------------------- 1 | use std::io; 2 | use std::io::Write; 3 | use std::sync::Mutex; 4 | 5 | use argmax::Command; 6 | 7 | use crate::error::print_error; 8 | use crate::exit_codes::ExitCode; 9 | 10 | struct Outputs { 11 | stdout: Vec, 12 | stderr: Vec, 13 | } 14 | struct OutputBuffer<'a> { 15 | output_permission: &'a Mutex<()>, 16 | outputs: Vec, 17 | } 18 | 19 | impl<'a> OutputBuffer<'a> { 20 | fn new(output_permission: &'a Mutex<()>) -> Self { 21 | Self { 22 | output_permission, 23 | outputs: Vec::new(), 24 | } 25 | } 26 | 27 | fn push(&mut self, stdout: Vec, stderr: Vec) { 28 | self.outputs.push(Outputs { stdout, stderr }); 29 | } 30 | 31 | fn write(self) { 32 | // avoid taking the lock if there is nothing to do 33 | if self.outputs.is_empty() { 34 | return; 35 | } 36 | // While this lock is active, this thread will be the only thread allowed 37 | // to write its outputs. 38 | let _lock = self.output_permission.lock().unwrap(); 39 | 40 | let stdout = io::stdout(); 41 | let stderr = io::stderr(); 42 | 43 | let mut stdout = stdout.lock(); 44 | let mut stderr = stderr.lock(); 45 | 46 | for output in self.outputs.iter() { 47 | let _ = stdout.write_all(&output.stdout); 48 | let _ = stderr.write_all(&output.stderr); 49 | } 50 | } 51 | } 52 | 53 | /// Executes a command. 54 | pub fn execute_commands>>( 55 | cmds: I, 56 | out_perm: &Mutex<()>, 57 | enable_output_buffering: bool, 58 | ) -> ExitCode { 59 | let mut output_buffer = OutputBuffer::new(out_perm); 60 | for result in cmds { 61 | let mut cmd = match result { 62 | Ok(cmd) => cmd, 63 | Err(e) => return handle_cmd_error(None, e), 64 | }; 65 | 66 | // Spawn the supplied command. 67 | let output = if enable_output_buffering { 68 | cmd.output() 69 | } else { 70 | // If running on only one thread, don't buffer output 71 | // Allows for viewing and interacting with intermediate command output 72 | cmd.spawn().and_then(|c| c.wait_with_output()) 73 | }; 74 | 75 | // Then wait for the command to exit, if it was spawned. 76 | match output { 77 | Ok(output) => { 78 | if enable_output_buffering { 79 | output_buffer.push(output.stdout, output.stderr); 80 | } 81 | if output.status.code() != Some(0) { 82 | output_buffer.write(); 83 | return ExitCode::GeneralError; 84 | } 85 | } 86 | Err(why) => { 87 | output_buffer.write(); 88 | return handle_cmd_error(Some(&cmd), why); 89 | } 90 | } 91 | } 92 | output_buffer.write(); 93 | ExitCode::Success 94 | } 95 | 96 | pub fn handle_cmd_error(cmd: Option<&Command>, err: io::Error) -> ExitCode { 97 | match (cmd, err) { 98 | (Some(cmd), err) if err.kind() == io::ErrorKind::NotFound => { 99 | print_error(format!( 100 | "Command not found: {}", 101 | cmd.get_program().to_string_lossy() 102 | )); 103 | ExitCode::GeneralError 104 | } 105 | (_, err) => { 106 | print_error(format!("Problem while executing command: {}", err)); 107 | ExitCode::GeneralError 108 | } 109 | } 110 | } 111 | -------------------------------------------------------------------------------- /src/regex_helper.rs: -------------------------------------------------------------------------------- 1 | use regex_syntax::hir::Hir; 2 | use regex_syntax::ParserBuilder; 3 | 4 | /// Determine if a regex pattern contains a literal uppercase character. 5 | pub fn pattern_has_uppercase_char(pattern: &str) -> bool { 6 | let mut parser = ParserBuilder::new().allow_invalid_utf8(true).build(); 7 | 8 | parser 9 | .parse(pattern) 10 | .map(|hir| hir_has_uppercase_char(&hir)) 11 | .unwrap_or(false) 12 | } 13 | 14 | /// Determine if a regex expression contains a literal uppercase character. 15 | fn hir_has_uppercase_char(hir: &Hir) -> bool { 16 | use regex_syntax::hir::*; 17 | 18 | match hir.kind() { 19 | HirKind::Literal(Literal::Unicode(c)) => c.is_uppercase(), 20 | HirKind::Literal(Literal::Byte(b)) => char::from(*b).is_uppercase(), 21 | HirKind::Class(Class::Unicode(ranges)) => ranges 22 | .iter() 23 | .any(|r| r.start().is_uppercase() || r.end().is_uppercase()), 24 | HirKind::Class(Class::Bytes(ranges)) => ranges 25 | .iter() 26 | .any(|r| char::from(r.start()).is_uppercase() || char::from(r.end()).is_uppercase()), 27 | HirKind::Group(Group { hir, .. }) | HirKind::Repetition(Repetition { hir, .. }) => { 28 | hir_has_uppercase_char(hir) 29 | } 30 | HirKind::Concat(hirs) | HirKind::Alternation(hirs) => { 31 | hirs.iter().any(hir_has_uppercase_char) 32 | } 33 | _ => false, 34 | } 35 | } 36 | 37 | /// Determine if a regex pattern only matches strings starting with a literal dot (hidden files) 38 | pub fn pattern_matches_strings_with_leading_dot(pattern: &str) -> bool { 39 | let mut parser = ParserBuilder::new().allow_invalid_utf8(true).build(); 40 | 41 | parser 42 | .parse(pattern) 43 | .map(|hir| hir_matches_strings_with_leading_dot(&hir)) 44 | .unwrap_or(false) 45 | } 46 | 47 | /// See above. 48 | fn hir_matches_strings_with_leading_dot(hir: &Hir) -> bool { 49 | use regex_syntax::hir::*; 50 | 51 | // Note: this only really detects the simplest case where a regex starts with 52 | // "^\\.", i.e. a start text anchor and a literal dot character. There are a lot 53 | // of other patterns that ONLY match hidden files, e.g. ^(\\.foo|\\.bar) which are 54 | // not (yet) detected by this algorithm. 55 | match hir.kind() { 56 | HirKind::Concat(hirs) => { 57 | let mut hirs = hirs.iter(); 58 | if let Some(hir) = hirs.next() { 59 | if hir.kind() != &HirKind::Anchor(Anchor::StartText) { 60 | return false; 61 | } 62 | } else { 63 | return false; 64 | } 65 | 66 | if let Some(hir) = hirs.next() { 67 | hir.kind() == &HirKind::Literal(Literal::Unicode('.')) 68 | } else { 69 | false 70 | } 71 | } 72 | _ => false, 73 | } 74 | } 75 | 76 | #[test] 77 | fn pattern_has_uppercase_char_simple() { 78 | assert!(pattern_has_uppercase_char("A")); 79 | assert!(pattern_has_uppercase_char("foo.EXE")); 80 | 81 | assert!(!pattern_has_uppercase_char("a")); 82 | assert!(!pattern_has_uppercase_char("foo.exe123")); 83 | } 84 | 85 | #[test] 86 | fn pattern_has_uppercase_char_advanced() { 87 | assert!(pattern_has_uppercase_char("foo.[a-zA-Z]")); 88 | 89 | assert!(!pattern_has_uppercase_char(r"\Acargo")); 90 | assert!(!pattern_has_uppercase_char(r"carg\x6F")); 91 | } 92 | 93 | #[test] 94 | fn matches_strings_with_leading_dot_simple() { 95 | assert!(pattern_matches_strings_with_leading_dot("^\\.gitignore")); 96 | 97 | assert!(!pattern_matches_strings_with_leading_dot("^.gitignore")); 98 | assert!(!pattern_matches_strings_with_leading_dot("\\.gitignore")); 99 | assert!(!pattern_matches_strings_with_leading_dot("^gitignore")); 100 | } 101 | -------------------------------------------------------------------------------- /src/filesystem.rs: -------------------------------------------------------------------------------- 1 | use std::borrow::Cow; 2 | use std::env; 3 | use std::ffi::OsStr; 4 | use std::fs; 5 | use std::io; 6 | #[cfg(any(unix, target_os = "redox"))] 7 | use std::os::unix::fs::FileTypeExt; 8 | use std::path::{Path, PathBuf}; 9 | 10 | use normpath::PathExt; 11 | 12 | use crate::dir_entry; 13 | 14 | pub fn path_absolute_form(path: &Path) -> io::Result { 15 | if path.is_absolute() { 16 | return Ok(path.to_path_buf()); 17 | } 18 | 19 | let path = path.strip_prefix(".").unwrap_or(path); 20 | env::current_dir().map(|path_buf| path_buf.join(path)) 21 | } 22 | 23 | pub fn absolute_path(path: &Path) -> io::Result { 24 | let path_buf = path_absolute_form(path)?; 25 | 26 | #[cfg(windows)] 27 | let path_buf = Path::new( 28 | path_buf 29 | .as_path() 30 | .to_string_lossy() 31 | .trim_start_matches(r"\\?\"), 32 | ) 33 | .to_path_buf(); 34 | 35 | Ok(path_buf) 36 | } 37 | 38 | pub fn is_existing_directory(path: &Path) -> bool { 39 | // Note: we do not use `.exists()` here, as `.` always exists, even if 40 | // the CWD has been deleted. 41 | path.is_dir() && (path.file_name().is_some() || path.normalize().is_ok()) 42 | } 43 | 44 | pub fn is_empty(entry: &dir_entry::DirEntry) -> bool { 45 | if let Some(file_type) = entry.file_type() { 46 | if file_type.is_dir() { 47 | if let Ok(mut entries) = fs::read_dir(entry.path()) { 48 | entries.next().is_none() 49 | } else { 50 | false 51 | } 52 | } else if file_type.is_file() { 53 | entry.metadata().map(|m| m.len() == 0).unwrap_or(false) 54 | } else { 55 | false 56 | } 57 | } else { 58 | false 59 | } 60 | } 61 | 62 | #[cfg(any(unix, target_os = "redox"))] 63 | pub fn is_socket(ft: fs::FileType) -> bool { 64 | ft.is_socket() 65 | } 66 | 67 | #[cfg(windows)] 68 | pub fn is_socket(_: fs::FileType) -> bool { 69 | false 70 | } 71 | 72 | #[cfg(any(unix, target_os = "redox"))] 73 | pub fn is_pipe(ft: fs::FileType) -> bool { 74 | ft.is_fifo() 75 | } 76 | 77 | #[cfg(windows)] 78 | pub fn is_pipe(_: fs::FileType) -> bool { 79 | false 80 | } 81 | 82 | #[cfg(any(unix, target_os = "redox"))] 83 | pub fn osstr_to_bytes(input: &OsStr) -> Cow<[u8]> { 84 | use std::os::unix::ffi::OsStrExt; 85 | Cow::Borrowed(input.as_bytes()) 86 | } 87 | 88 | #[cfg(windows)] 89 | pub fn osstr_to_bytes(input: &OsStr) -> Cow<[u8]> { 90 | let string = input.to_string_lossy(); 91 | 92 | match string { 93 | Cow::Owned(string) => Cow::Owned(string.into_bytes()), 94 | Cow::Borrowed(string) => Cow::Borrowed(string.as_bytes()), 95 | } 96 | } 97 | 98 | /// Remove the `./` prefix from a path. 99 | pub fn strip_current_dir(path: &Path) -> &Path { 100 | path.strip_prefix(".").unwrap_or(path) 101 | } 102 | 103 | /// Default value for the path_separator, mainly for MSYS/MSYS2, which set the MSYSTEM 104 | /// environment variable, and we set fd's path separator to '/' rather than Rust's default of '\'. 105 | /// 106 | /// Returns Some to use a nonstandard path separator, or None to use rust's default on the target 107 | /// platform. 108 | pub fn default_path_separator() -> Option { 109 | if cfg!(windows) { 110 | let msystem = env::var("MSYSTEM").ok()?; 111 | match msystem.as_str() { 112 | "MINGW64" | "MINGW32" | "MSYS" => Some("/".to_owned()), 113 | _ => None, 114 | } 115 | } else { 116 | None 117 | } 118 | } 119 | 120 | #[cfg(test)] 121 | mod tests { 122 | use super::strip_current_dir; 123 | use std::path::Path; 124 | 125 | #[test] 126 | fn strip_current_dir_basic() { 127 | assert_eq!(strip_current_dir(Path::new("./foo")), Path::new("foo")); 128 | assert_eq!(strip_current_dir(Path::new("foo")), Path::new("foo")); 129 | assert_eq!( 130 | strip_current_dir(Path::new("./foo/bar/baz")), 131 | Path::new("foo/bar/baz") 132 | ); 133 | assert_eq!( 134 | strip_current_dir(Path::new("foo/bar/baz")), 135 | Path::new("foo/bar/baz") 136 | ); 137 | } 138 | } 139 | -------------------------------------------------------------------------------- /src/filter/owner.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{anyhow, Result}; 2 | use std::fs; 3 | 4 | #[derive(Clone, Copy, Debug, PartialEq, Eq)] 5 | pub struct OwnerFilter { 6 | uid: Check, 7 | gid: Check, 8 | } 9 | 10 | #[derive(Clone, Copy, Debug, PartialEq, Eq)] 11 | enum Check { 12 | Equal(T), 13 | NotEq(T), 14 | Ignore, 15 | } 16 | 17 | impl OwnerFilter { 18 | const IGNORE: Self = OwnerFilter { 19 | uid: Check::Ignore, 20 | gid: Check::Ignore, 21 | }; 22 | 23 | /// Parses an owner constraint 24 | /// Returns an error if the string is invalid 25 | /// Returns Ok(None) when string is acceptable but a noop (such as "" or ":") 26 | pub fn from_string(input: &str) -> Result { 27 | let mut it = input.split(':'); 28 | let (fst, snd) = (it.next(), it.next()); 29 | 30 | if it.next().is_some() { 31 | return Err(anyhow!( 32 | "more than one ':' present in owner string '{}'. See 'fd --help'.", 33 | input 34 | )); 35 | } 36 | 37 | let uid = Check::parse(fst, |s| { 38 | s.parse() 39 | .ok() 40 | .or_else(|| users::get_user_by_name(s).map(|user| user.uid())) 41 | .ok_or_else(|| anyhow!("'{}' is not a recognized user name", s)) 42 | })?; 43 | let gid = Check::parse(snd, |s| { 44 | s.parse() 45 | .ok() 46 | .or_else(|| users::get_group_by_name(s).map(|group| group.gid())) 47 | .ok_or_else(|| anyhow!("'{}' is not a recognized group name", s)) 48 | })?; 49 | 50 | Ok(OwnerFilter { uid, gid }) 51 | } 52 | 53 | /// If self is a no-op (ignore both uid and gid) then return `None`, otherwise wrap in a `Some` 54 | pub fn filter_ignore(self) -> Option { 55 | if self == Self::IGNORE { 56 | None 57 | } else { 58 | Some(self) 59 | } 60 | } 61 | 62 | pub fn matches(&self, md: &fs::Metadata) -> bool { 63 | use std::os::unix::fs::MetadataExt; 64 | 65 | self.uid.check(md.uid()) && self.gid.check(md.gid()) 66 | } 67 | } 68 | 69 | impl Check { 70 | fn check(&self, v: T) -> bool { 71 | match self { 72 | Check::Equal(x) => v == *x, 73 | Check::NotEq(x) => v != *x, 74 | Check::Ignore => true, 75 | } 76 | } 77 | 78 | fn parse(s: Option<&str>, f: F) -> Result 79 | where 80 | F: Fn(&str) -> Result, 81 | { 82 | let (s, equality) = match s { 83 | Some("") | None => return Ok(Check::Ignore), 84 | Some(s) if s.starts_with('!') => (&s[1..], false), 85 | Some(s) => (s, true), 86 | }; 87 | 88 | f(s).map(|x| { 89 | if equality { 90 | Check::Equal(x) 91 | } else { 92 | Check::NotEq(x) 93 | } 94 | }) 95 | } 96 | } 97 | 98 | #[cfg(test)] 99 | mod owner_parsing { 100 | use super::OwnerFilter; 101 | 102 | macro_rules! owner_tests { 103 | ($($name:ident: $value:expr => $result:pat,)*) => { 104 | $( 105 | #[test] 106 | fn $name() { 107 | let o = OwnerFilter::from_string($value); 108 | match o { 109 | $result => {}, 110 | _ => panic!("{:?} does not match {}", o, stringify!($result)), 111 | } 112 | } 113 | )* 114 | }; 115 | } 116 | 117 | use super::Check::*; 118 | owner_tests! { 119 | empty: "" => Ok(OwnerFilter::IGNORE), 120 | uid_only: "5" => Ok(OwnerFilter { uid: Equal(5), gid: Ignore }), 121 | uid_gid: "9:3" => Ok(OwnerFilter { uid: Equal(9), gid: Equal(3) }), 122 | gid_only: ":8" => Ok(OwnerFilter { uid: Ignore, gid: Equal(8) }), 123 | colon_only: ":" => Ok(OwnerFilter::IGNORE), 124 | trailing: "5:" => Ok(OwnerFilter { uid: Equal(5), gid: Ignore }), 125 | 126 | uid_negate: "!5" => Ok(OwnerFilter { uid: NotEq(5), gid: Ignore }), 127 | both_negate:"!4:!3" => Ok(OwnerFilter { uid: NotEq(4), gid: NotEq(3) }), 128 | uid_not_gid:"6:!8" => Ok(OwnerFilter { uid: Equal(6), gid: NotEq(8) }), 129 | 130 | more_colons:"3:5:" => Err(_), 131 | only_colons:"::" => Err(_), 132 | } 133 | } 134 | -------------------------------------------------------------------------------- /src/filter/time.rs: -------------------------------------------------------------------------------- 1 | use chrono::{offset::TimeZone, DateTime, Local, NaiveDate}; 2 | 3 | use std::time::SystemTime; 4 | 5 | /// Filter based on time ranges. 6 | #[derive(Debug, PartialEq, Eq)] 7 | pub enum TimeFilter { 8 | Before(SystemTime), 9 | After(SystemTime), 10 | } 11 | 12 | impl TimeFilter { 13 | fn from_str(ref_time: &SystemTime, s: &str) -> Option { 14 | humantime::parse_duration(s) 15 | .map(|duration| *ref_time - duration) 16 | .ok() 17 | .or_else(|| { 18 | DateTime::parse_from_rfc3339(s) 19 | .map(|dt| dt.into()) 20 | .ok() 21 | .or_else(|| { 22 | NaiveDate::parse_from_str(s, "%F") 23 | .ok() 24 | .and_then(|nd| nd.and_hms_opt(0, 0, 0)) 25 | .and_then(|ndt| Local.from_local_datetime(&ndt).single()) 26 | }) 27 | .or_else(|| Local.datetime_from_str(s, "%F %T").ok()) 28 | .map(|dt| dt.into()) 29 | }) 30 | } 31 | 32 | pub fn before(ref_time: &SystemTime, s: &str) -> Option { 33 | TimeFilter::from_str(ref_time, s).map(TimeFilter::Before) 34 | } 35 | 36 | pub fn after(ref_time: &SystemTime, s: &str) -> Option { 37 | TimeFilter::from_str(ref_time, s).map(TimeFilter::After) 38 | } 39 | 40 | pub fn applies_to(&self, t: &SystemTime) -> bool { 41 | match self { 42 | TimeFilter::Before(limit) => t < limit, 43 | TimeFilter::After(limit) => t > limit, 44 | } 45 | } 46 | } 47 | 48 | #[cfg(test)] 49 | mod tests { 50 | use super::*; 51 | use std::time::Duration; 52 | 53 | #[test] 54 | fn is_time_filter_applicable() { 55 | let ref_time = Local 56 | .datetime_from_str("2010-10-10 10:10:10", "%F %T") 57 | .unwrap() 58 | .into(); 59 | 60 | assert!(TimeFilter::after(&ref_time, "1min") 61 | .unwrap() 62 | .applies_to(&ref_time)); 63 | assert!(!TimeFilter::before(&ref_time, "1min") 64 | .unwrap() 65 | .applies_to(&ref_time)); 66 | 67 | let t1m_ago = ref_time - Duration::from_secs(60); 68 | assert!(!TimeFilter::after(&ref_time, "30sec") 69 | .unwrap() 70 | .applies_to(&t1m_ago)); 71 | assert!(TimeFilter::after(&ref_time, "2min") 72 | .unwrap() 73 | .applies_to(&t1m_ago)); 74 | 75 | assert!(TimeFilter::before(&ref_time, "30sec") 76 | .unwrap() 77 | .applies_to(&t1m_ago)); 78 | assert!(!TimeFilter::before(&ref_time, "2min") 79 | .unwrap() 80 | .applies_to(&t1m_ago)); 81 | 82 | let t10s_before = "2010-10-10 10:10:00"; 83 | assert!(!TimeFilter::before(&ref_time, t10s_before) 84 | .unwrap() 85 | .applies_to(&ref_time)); 86 | assert!(TimeFilter::before(&ref_time, t10s_before) 87 | .unwrap() 88 | .applies_to(&t1m_ago)); 89 | 90 | assert!(TimeFilter::after(&ref_time, t10s_before) 91 | .unwrap() 92 | .applies_to(&ref_time)); 93 | assert!(!TimeFilter::after(&ref_time, t10s_before) 94 | .unwrap() 95 | .applies_to(&t1m_ago)); 96 | 97 | let same_day = "2010-10-10"; 98 | assert!(!TimeFilter::before(&ref_time, same_day) 99 | .unwrap() 100 | .applies_to(&ref_time)); 101 | assert!(!TimeFilter::before(&ref_time, same_day) 102 | .unwrap() 103 | .applies_to(&t1m_ago)); 104 | 105 | assert!(TimeFilter::after(&ref_time, same_day) 106 | .unwrap() 107 | .applies_to(&ref_time)); 108 | assert!(TimeFilter::after(&ref_time, same_day) 109 | .unwrap() 110 | .applies_to(&t1m_ago)); 111 | 112 | let ref_time = DateTime::parse_from_rfc3339("2010-10-10T10:10:10+00:00") 113 | .unwrap() 114 | .into(); 115 | let t1m_ago = ref_time - Duration::from_secs(60); 116 | let t10s_before = "2010-10-10T10:10:00+00:00"; 117 | assert!(!TimeFilter::before(&ref_time, t10s_before) 118 | .unwrap() 119 | .applies_to(&ref_time)); 120 | assert!(TimeFilter::before(&ref_time, t10s_before) 121 | .unwrap() 122 | .applies_to(&t1m_ago)); 123 | 124 | assert!(TimeFilter::after(&ref_time, t10s_before) 125 | .unwrap() 126 | .applies_to(&ref_time)); 127 | assert!(!TimeFilter::after(&ref_time, t10s_before) 128 | .unwrap() 129 | .applies_to(&t1m_ago)); 130 | } 131 | } 132 | -------------------------------------------------------------------------------- /src/dir_entry.rs: -------------------------------------------------------------------------------- 1 | use std::cell::OnceCell; 2 | use std::ffi::OsString; 3 | use std::fs::{FileType, Metadata}; 4 | use std::path::{Path, PathBuf}; 5 | 6 | use lscolors::{Colorable, LsColors, Style}; 7 | 8 | use crate::config::Config; 9 | use crate::filesystem::strip_current_dir; 10 | 11 | enum DirEntryInner { 12 | Normal(ignore::DirEntry), 13 | BrokenSymlink(PathBuf), 14 | } 15 | 16 | pub struct DirEntry { 17 | inner: DirEntryInner, 18 | metadata: OnceCell>, 19 | style: OnceCell>, 20 | } 21 | 22 | impl DirEntry { 23 | #[inline] 24 | pub fn normal(e: ignore::DirEntry) -> Self { 25 | Self { 26 | inner: DirEntryInner::Normal(e), 27 | metadata: OnceCell::new(), 28 | style: OnceCell::new(), 29 | } 30 | } 31 | 32 | pub fn broken_symlink(path: PathBuf) -> Self { 33 | Self { 34 | inner: DirEntryInner::BrokenSymlink(path), 35 | metadata: OnceCell::new(), 36 | style: OnceCell::new(), 37 | } 38 | } 39 | 40 | pub fn path(&self) -> &Path { 41 | match &self.inner { 42 | DirEntryInner::Normal(e) => e.path(), 43 | DirEntryInner::BrokenSymlink(pathbuf) => pathbuf.as_path(), 44 | } 45 | } 46 | 47 | pub fn into_path(self) -> PathBuf { 48 | match self.inner { 49 | DirEntryInner::Normal(e) => e.into_path(), 50 | DirEntryInner::BrokenSymlink(p) => p, 51 | } 52 | } 53 | 54 | /// Returns the path as it should be presented to the user. 55 | pub fn stripped_path(&self, config: &Config) -> &Path { 56 | if config.strip_cwd_prefix { 57 | strip_current_dir(self.path()) 58 | } else { 59 | self.path() 60 | } 61 | } 62 | 63 | /// Returns the path as it should be presented to the user. 64 | pub fn into_stripped_path(self, config: &Config) -> PathBuf { 65 | if config.strip_cwd_prefix { 66 | self.stripped_path(config).to_path_buf() 67 | } else { 68 | self.into_path() 69 | } 70 | } 71 | 72 | pub fn file_type(&self) -> Option { 73 | match &self.inner { 74 | DirEntryInner::Normal(e) => e.file_type(), 75 | DirEntryInner::BrokenSymlink(_) => self.metadata().map(|m| m.file_type()), 76 | } 77 | } 78 | 79 | pub fn metadata(&self) -> Option<&Metadata> { 80 | self.metadata 81 | .get_or_init(|| match &self.inner { 82 | DirEntryInner::Normal(e) => e.metadata().ok(), 83 | DirEntryInner::BrokenSymlink(path) => path.symlink_metadata().ok(), 84 | }) 85 | .as_ref() 86 | } 87 | 88 | pub fn depth(&self) -> Option { 89 | match &self.inner { 90 | DirEntryInner::Normal(e) => Some(e.depth()), 91 | DirEntryInner::BrokenSymlink(_) => None, 92 | } 93 | } 94 | 95 | pub fn style(&self, ls_colors: &LsColors) -> Option<&Style> { 96 | self.style 97 | .get_or_init(|| ls_colors.style_for(self).cloned()) 98 | .as_ref() 99 | } 100 | } 101 | 102 | impl PartialEq for DirEntry { 103 | #[inline] 104 | fn eq(&self, other: &Self) -> bool { 105 | self.path() == other.path() 106 | } 107 | } 108 | 109 | impl Eq for DirEntry {} 110 | 111 | impl PartialOrd for DirEntry { 112 | #[inline] 113 | fn partial_cmp(&self, other: &Self) -> Option { 114 | self.path().partial_cmp(other.path()) 115 | } 116 | } 117 | 118 | impl Ord for DirEntry { 119 | #[inline] 120 | fn cmp(&self, other: &Self) -> std::cmp::Ordering { 121 | self.path().cmp(other.path()) 122 | } 123 | } 124 | 125 | impl Colorable for DirEntry { 126 | fn path(&self) -> PathBuf { 127 | self.path().to_owned() 128 | } 129 | 130 | fn file_name(&self) -> OsString { 131 | let name = match &self.inner { 132 | DirEntryInner::Normal(e) => e.file_name(), 133 | DirEntryInner::BrokenSymlink(path) => { 134 | // Path::file_name() only works if the last component is Normal, 135 | // but we want it for all component types, so we open code it. 136 | // Copied from LsColors::style_for_path_with_metadata(). 137 | path.components() 138 | .last() 139 | .map(|c| c.as_os_str()) 140 | .unwrap_or_else(|| path.as_os_str()) 141 | } 142 | }; 143 | name.to_owned() 144 | } 145 | 146 | fn file_type(&self) -> Option { 147 | self.file_type() 148 | } 149 | 150 | fn metadata(&self) -> Option { 151 | self.metadata().cloned() 152 | } 153 | } 154 | -------------------------------------------------------------------------------- /src/config.rs: -------------------------------------------------------------------------------- 1 | use std::{path::PathBuf, sync::Arc, time::Duration}; 2 | 3 | use lscolors::LsColors; 4 | use regex::bytes::RegexSet; 5 | 6 | use crate::exec::CommandSet; 7 | use crate::filetypes::FileTypes; 8 | #[cfg(unix)] 9 | use crate::filter::OwnerFilter; 10 | use crate::filter::{SizeFilter, TimeFilter}; 11 | 12 | /// Configuration options for *fd*. 13 | pub struct Config { 14 | /// Whether the search is case-sensitive or case-insensitive. 15 | pub case_sensitive: bool, 16 | 17 | /// Whether to search within the full file path or just the base name (filename or directory 18 | /// name). 19 | pub search_full_path: bool, 20 | 21 | /// Whether to ignore hidden files and directories (or not). 22 | pub ignore_hidden: bool, 23 | 24 | /// Whether to respect `.fdignore` files or not. 25 | pub read_fdignore: bool, 26 | 27 | /// Whether to respect ignore files in parent directories or not. 28 | pub read_parent_ignore: bool, 29 | 30 | /// Whether to respect VCS ignore files (`.gitignore`, ..) or not. 31 | pub read_vcsignore: bool, 32 | 33 | /// Whether to require a `.git` directory to respect gitignore files. 34 | pub require_git_to_read_vcsignore: bool, 35 | 36 | /// Whether to respect the global ignore file or not. 37 | pub read_global_ignore: bool, 38 | 39 | /// Whether to follow symlinks or not. 40 | pub follow_links: bool, 41 | 42 | /// Whether to limit the search to starting file system or not. 43 | pub one_file_system: bool, 44 | 45 | /// Whether elements of output should be separated by a null character 46 | pub null_separator: bool, 47 | 48 | /// The maximum search depth, or `None` if no maximum search depth should be set. 49 | /// 50 | /// A depth of `1` includes all files under the current directory, a depth of `2` also includes 51 | /// all files under subdirectories of the current directory, etc. 52 | pub max_depth: Option, 53 | 54 | /// The minimum depth for reported entries, or `None`. 55 | pub min_depth: Option, 56 | 57 | /// Whether to stop traversing into matching directories. 58 | pub prune: bool, 59 | 60 | /// The number of threads to use. 61 | pub threads: usize, 62 | 63 | /// If true, the program doesn't print anything and will instead return an exit code of 0 64 | /// if there's at least one match. Otherwise, the exit code will be 1. 65 | pub quiet: bool, 66 | 67 | /// Time to buffer results internally before streaming to the console. This is useful to 68 | /// provide a sorted output, in case the total execution time is shorter than 69 | /// `max_buffer_time`. 70 | pub max_buffer_time: Option, 71 | 72 | /// `None` if the output should not be colorized. Otherwise, a `LsColors` instance that defines 73 | /// how to style different filetypes. 74 | pub ls_colors: Option, 75 | 76 | /// Whether or not we are writing to an interactive terminal 77 | pub interactive_terminal: bool, 78 | 79 | /// The type of file to search for. If set to `None`, all file types are displayed. If 80 | /// set to `Some(..)`, only the types that are specified are shown. 81 | pub file_types: Option, 82 | 83 | /// The extension to search for. Only entries matching the extension will be included. 84 | /// 85 | /// The value (if present) will be a lowercase string without leading dots. 86 | pub extensions: Option, 87 | 88 | /// If a value is supplied, each item found will be used to generate and execute commands. 89 | pub command: Option>, 90 | 91 | /// Maximum number of search results to pass to each `command`. If zero, the number is 92 | /// unlimited. 93 | pub batch_size: usize, 94 | 95 | /// A list of glob patterns that should be excluded from the search. 96 | pub exclude_patterns: Vec, 97 | 98 | /// A list of custom ignore files. 99 | pub ignore_files: Vec, 100 | 101 | /// The given constraints on the size of returned files 102 | pub size_constraints: Vec, 103 | 104 | /// Constraints on last modification time of files 105 | pub time_constraints: Vec, 106 | 107 | #[cfg(unix)] 108 | /// User/group ownership constraint 109 | pub owner_constraint: Option, 110 | 111 | /// Whether or not to display filesystem errors 112 | pub show_filesystem_errors: bool, 113 | 114 | /// The separator used to print file paths. 115 | pub path_separator: Option, 116 | 117 | /// The actual separator, either the system default separator or `path_separator` 118 | pub actual_path_separator: String, 119 | 120 | /// The maximum number of search results 121 | pub max_results: Option, 122 | 123 | /// Whether or not to strip the './' prefix for search results 124 | pub strip_cwd_prefix: bool, 125 | } 126 | 127 | impl Config { 128 | /// Check whether results are being printed. 129 | pub fn is_printing(&self) -> bool { 130 | self.command.is_none() 131 | } 132 | } 133 | -------------------------------------------------------------------------------- /src/output.rs: -------------------------------------------------------------------------------- 1 | use std::borrow::Cow; 2 | use std::io::{self, Write}; 3 | 4 | use lscolors::{Indicator, LsColors, Style}; 5 | 6 | use crate::config::Config; 7 | use crate::dir_entry::DirEntry; 8 | use crate::error::print_error; 9 | use crate::exit_codes::ExitCode; 10 | 11 | fn replace_path_separator(path: &str, new_path_separator: &str) -> String { 12 | path.replace(std::path::MAIN_SEPARATOR, new_path_separator) 13 | } 14 | 15 | // TODO: this function is performance critical and can probably be optimized 16 | pub fn print_entry(stdout: &mut W, entry: &DirEntry, config: &Config) { 17 | let r = if let Some(ref ls_colors) = config.ls_colors { 18 | print_entry_colorized(stdout, entry, config, ls_colors) 19 | } else { 20 | print_entry_uncolorized(stdout, entry, config) 21 | }; 22 | 23 | if let Err(e) = r { 24 | if e.kind() == ::std::io::ErrorKind::BrokenPipe { 25 | // Exit gracefully in case of a broken pipe (e.g. 'fd ... | head -n 3'). 26 | ExitCode::Success.exit(); 27 | } else { 28 | print_error(format!("Could not write to output: {}", e)); 29 | ExitCode::GeneralError.exit(); 30 | } 31 | } 32 | } 33 | 34 | // Display a trailing slash if the path is a directory and the config option is enabled. 35 | // If the path_separator option is set, display that instead. 36 | // The trailing slash will not be colored. 37 | #[inline] 38 | fn print_trailing_slash( 39 | stdout: &mut W, 40 | entry: &DirEntry, 41 | config: &Config, 42 | style: Option<&Style>, 43 | ) -> io::Result<()> { 44 | if entry.file_type().map_or(false, |ft| ft.is_dir()) { 45 | write!( 46 | stdout, 47 | "{}", 48 | style 49 | .map(Style::to_nu_ansi_term_style) 50 | .unwrap_or_default() 51 | .paint(&config.actual_path_separator) 52 | )?; 53 | } 54 | Ok(()) 55 | } 56 | 57 | // TODO: this function is performance critical and can probably be optimized 58 | fn print_entry_colorized( 59 | stdout: &mut W, 60 | entry: &DirEntry, 61 | config: &Config, 62 | ls_colors: &LsColors, 63 | ) -> io::Result<()> { 64 | // Split the path between the parent and the last component 65 | let mut offset = 0; 66 | let path = entry.stripped_path(config); 67 | let path_str = path.to_string_lossy(); 68 | 69 | if let Some(parent) = path.parent() { 70 | offset = parent.to_string_lossy().len(); 71 | for c in path_str[offset..].chars() { 72 | if std::path::is_separator(c) { 73 | offset += c.len_utf8(); 74 | } else { 75 | break; 76 | } 77 | } 78 | } 79 | 80 | if offset > 0 { 81 | let mut parent_str = Cow::from(&path_str[..offset]); 82 | if let Some(ref separator) = config.path_separator { 83 | *parent_str.to_mut() = replace_path_separator(&parent_str, separator); 84 | } 85 | 86 | let style = ls_colors 87 | .style_for_indicator(Indicator::Directory) 88 | .map(Style::to_nu_ansi_term_style) 89 | .unwrap_or_default(); 90 | write!(stdout, "{}", style.paint(parent_str))?; 91 | } 92 | 93 | let style = entry 94 | .style(ls_colors) 95 | .map(Style::to_nu_ansi_term_style) 96 | .unwrap_or_default(); 97 | write!(stdout, "{}", style.paint(&path_str[offset..]))?; 98 | 99 | print_trailing_slash( 100 | stdout, 101 | entry, 102 | config, 103 | ls_colors.style_for_indicator(Indicator::Directory), 104 | )?; 105 | 106 | if config.null_separator { 107 | write!(stdout, "\0")?; 108 | } else { 109 | writeln!(stdout)?; 110 | } 111 | 112 | Ok(()) 113 | } 114 | 115 | // TODO: this function is performance critical and can probably be optimized 116 | fn print_entry_uncolorized_base( 117 | stdout: &mut W, 118 | entry: &DirEntry, 119 | config: &Config, 120 | ) -> io::Result<()> { 121 | let separator = if config.null_separator { "\0" } else { "\n" }; 122 | let path = entry.stripped_path(config); 123 | 124 | let mut path_string = path.to_string_lossy(); 125 | if let Some(ref separator) = config.path_separator { 126 | *path_string.to_mut() = replace_path_separator(&path_string, separator); 127 | } 128 | write!(stdout, "{}", path_string)?; 129 | print_trailing_slash(stdout, entry, config, None)?; 130 | write!(stdout, "{}", separator) 131 | } 132 | 133 | #[cfg(not(unix))] 134 | fn print_entry_uncolorized( 135 | stdout: &mut W, 136 | entry: &DirEntry, 137 | config: &Config, 138 | ) -> io::Result<()> { 139 | print_entry_uncolorized_base(stdout, entry, config) 140 | } 141 | 142 | #[cfg(unix)] 143 | fn print_entry_uncolorized( 144 | stdout: &mut W, 145 | entry: &DirEntry, 146 | config: &Config, 147 | ) -> io::Result<()> { 148 | use std::os::unix::ffi::OsStrExt; 149 | 150 | if config.interactive_terminal || config.path_separator.is_some() { 151 | // Fall back to the base implementation 152 | print_entry_uncolorized_base(stdout, entry, config) 153 | } else { 154 | // Print path as raw bytes, allowing invalid UTF-8 filenames to be passed to other processes 155 | let separator = if config.null_separator { b"\0" } else { b"\n" }; 156 | stdout.write_all(entry.stripped_path(config).as_os_str().as_bytes())?; 157 | print_trailing_slash(stdout, entry, config, None)?; 158 | stdout.write_all(separator) 159 | } 160 | } 161 | -------------------------------------------------------------------------------- /doc/logo.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 19 | 46 | 50 | 54 | 55 | 57 | 62 | 65 | fd 77 | fd 89 | 92 | 94 | 99 | 103 | 104 | 107 | 112 | 120 | 121 | 124 | 129 | 137 | 138 | 141 | 146 | 154 | 155 | 156 | 159 | 160 | 161 | 162 | -------------------------------------------------------------------------------- /src/filter/size.rs: -------------------------------------------------------------------------------- 1 | use std::sync::OnceLock; 2 | 3 | use anyhow::anyhow; 4 | use regex::Regex; 5 | 6 | static SIZE_CAPTURES: OnceLock = OnceLock::new(); 7 | 8 | #[derive(Clone, Copy, Debug, PartialEq, Eq)] 9 | pub enum SizeFilter { 10 | Max(u64), 11 | Min(u64), 12 | Equals(u64), 13 | } 14 | 15 | // SI prefixes (powers of 10) 16 | const KILO: u64 = 1000; 17 | const MEGA: u64 = KILO * 1000; 18 | const GIGA: u64 = MEGA * 1000; 19 | const TERA: u64 = GIGA * 1000; 20 | 21 | // Binary prefixes (powers of 2) 22 | const KIBI: u64 = 1024; 23 | const MEBI: u64 = KIBI * 1024; 24 | const GIBI: u64 = MEBI * 1024; 25 | const TEBI: u64 = GIBI * 1024; 26 | 27 | impl SizeFilter { 28 | pub fn from_string(s: &str) -> anyhow::Result { 29 | SizeFilter::parse_opt(s) 30 | .ok_or_else(|| anyhow!("'{}' is not a valid size constraint. See 'fd --help'.", s)) 31 | } 32 | 33 | fn parse_opt(s: &str) -> Option { 34 | let pattern = 35 | SIZE_CAPTURES.get_or_init(|| Regex::new(r"(?i)^([+-]?)(\d+)(b|[kmgt]i?b?)$").unwrap()); 36 | if !pattern.is_match(s) { 37 | return None; 38 | } 39 | 40 | let captures = pattern.captures(s)?; 41 | let limit_kind = captures.get(1).map_or("+", |m| m.as_str()); 42 | let quantity = captures 43 | .get(2) 44 | .and_then(|v| v.as_str().parse::().ok())?; 45 | 46 | let multiplier = match &captures.get(3).map_or("b", |m| m.as_str()).to_lowercase()[..] { 47 | v if v.starts_with("ki") => KIBI, 48 | v if v.starts_with('k') => KILO, 49 | v if v.starts_with("mi") => MEBI, 50 | v if v.starts_with('m') => MEGA, 51 | v if v.starts_with("gi") => GIBI, 52 | v if v.starts_with('g') => GIGA, 53 | v if v.starts_with("ti") => TEBI, 54 | v if v.starts_with('t') => TERA, 55 | "b" => 1, 56 | _ => return None, 57 | }; 58 | 59 | let size = quantity * multiplier; 60 | match limit_kind { 61 | "+" => Some(SizeFilter::Min(size)), 62 | "-" => Some(SizeFilter::Max(size)), 63 | "" => Some(SizeFilter::Equals(size)), 64 | _ => None, 65 | } 66 | } 67 | 68 | pub fn is_within(&self, size: u64) -> bool { 69 | match *self { 70 | SizeFilter::Max(limit) => size <= limit, 71 | SizeFilter::Min(limit) => size >= limit, 72 | SizeFilter::Equals(limit) => size == limit, 73 | } 74 | } 75 | } 76 | 77 | #[cfg(test)] 78 | mod tests { 79 | use super::*; 80 | 81 | macro_rules! gen_size_filter_parse_test { 82 | ($($name: ident: $val: expr,)*) => { 83 | $( 84 | #[test] 85 | fn $name() { 86 | let (txt, expected) = $val; 87 | let actual = SizeFilter::from_string(txt).unwrap(); 88 | assert_eq!(actual, expected); 89 | } 90 | )* 91 | }; 92 | } 93 | 94 | // Parsing and size conversion tests data. Ensure that each type gets properly interpreted. 95 | // Call with higher base values to ensure expected multiplication (only need a couple) 96 | gen_size_filter_parse_test! { 97 | byte_plus: ("+1b", SizeFilter::Min(1)), 98 | byte_plus_multiplier: ("+10b", SizeFilter::Min(10)), 99 | byte_minus: ("-1b", SizeFilter::Max(1)), 100 | kilo_plus: ("+1k", SizeFilter::Min(1000)), 101 | kilo_plus_suffix: ("+1kb", SizeFilter::Min(1000)), 102 | kilo_minus: ("-1k", SizeFilter::Max(1000)), 103 | kilo_minus_multiplier: ("-100k", SizeFilter::Max(100_000)), 104 | kilo_minus_suffix: ("-1kb", SizeFilter::Max(1000)), 105 | kilo_plus_upper: ("+1K", SizeFilter::Min(1000)), 106 | kilo_plus_suffix_upper: ("+1KB", SizeFilter::Min(1000)), 107 | kilo_minus_upper: ("-1K", SizeFilter::Max(1000)), 108 | kilo_minus_suffix_upper: ("-1Kb", SizeFilter::Max(1000)), 109 | kibi_plus: ("+1ki", SizeFilter::Min(1024)), 110 | kibi_plus_multiplier: ("+10ki", SizeFilter::Min(10_240)), 111 | kibi_plus_suffix: ("+1kib", SizeFilter::Min(1024)), 112 | kibi_minus: ("-1ki", SizeFilter::Max(1024)), 113 | kibi_minus_multiplier: ("-100ki", SizeFilter::Max(102_400)), 114 | kibi_minus_suffix: ("-1kib", SizeFilter::Max(1024)), 115 | kibi_plus_upper: ("+1KI", SizeFilter::Min(1024)), 116 | kibi_plus_suffix_upper: ("+1KiB", SizeFilter::Min(1024)), 117 | kibi_minus_upper: ("-1Ki", SizeFilter::Max(1024)), 118 | kibi_minus_suffix_upper: ("-1KIB", SizeFilter::Max(1024)), 119 | mega_plus: ("+1m", SizeFilter::Min(1_000_000)), 120 | mega_plus_suffix: ("+1mb", SizeFilter::Min(1_000_000)), 121 | mega_minus: ("-1m", SizeFilter::Max(1_000_000)), 122 | mega_minus_suffix: ("-1mb", SizeFilter::Max(1_000_000)), 123 | mega_plus_upper: ("+1M", SizeFilter::Min(1_000_000)), 124 | mega_plus_suffix_upper: ("+1MB", SizeFilter::Min(1_000_000)), 125 | mega_minus_upper: ("-1M", SizeFilter::Max(1_000_000)), 126 | mega_minus_suffix_upper: ("-1Mb", SizeFilter::Max(1_000_000)), 127 | mebi_plus: ("+1mi", SizeFilter::Min(1_048_576)), 128 | mebi_plus_suffix: ("+1mib", SizeFilter::Min(1_048_576)), 129 | mebi_minus: ("-1mi", SizeFilter::Max(1_048_576)), 130 | mebi_minus_suffix: ("-1mib", SizeFilter::Max(1_048_576)), 131 | mebi_plus_upper: ("+1MI", SizeFilter::Min(1_048_576)), 132 | mebi_plus_suffix_upper: ("+1MiB", SizeFilter::Min(1_048_576)), 133 | mebi_minus_upper: ("-1Mi", SizeFilter::Max(1_048_576)), 134 | mebi_minus_suffix_upper: ("-1MIB", SizeFilter::Max(1_048_576)), 135 | giga_plus: ("+1g", SizeFilter::Min(1_000_000_000)), 136 | giga_plus_suffix: ("+1gb", SizeFilter::Min(1_000_000_000)), 137 | giga_minus: ("-1g", SizeFilter::Max(1_000_000_000)), 138 | giga_minus_suffix: ("-1gb", SizeFilter::Max(1_000_000_000)), 139 | giga_plus_upper: ("+1G", SizeFilter::Min(1_000_000_000)), 140 | giga_plus_suffix_upper: ("+1GB", SizeFilter::Min(1_000_000_000)), 141 | giga_minus_upper: ("-1G", SizeFilter::Max(1_000_000_000)), 142 | giga_minus_suffix_upper: ("-1Gb", SizeFilter::Max(1_000_000_000)), 143 | gibi_plus: ("+1gi", SizeFilter::Min(1_073_741_824)), 144 | gibi_plus_suffix: ("+1gib", SizeFilter::Min(1_073_741_824)), 145 | gibi_minus: ("-1gi", SizeFilter::Max(1_073_741_824)), 146 | gibi_minus_suffix: ("-1gib", SizeFilter::Max(1_073_741_824)), 147 | gibi_plus_upper: ("+1GI", SizeFilter::Min(1_073_741_824)), 148 | gibi_plus_suffix_upper: ("+1GiB", SizeFilter::Min(1_073_741_824)), 149 | gibi_minus_upper: ("-1Gi", SizeFilter::Max(1_073_741_824)), 150 | gibi_minus_suffix_upper: ("-1GIB", SizeFilter::Max(1_073_741_824)), 151 | tera_plus: ("+1t", SizeFilter::Min(1_000_000_000_000)), 152 | tera_plus_suffix: ("+1tb", SizeFilter::Min(1_000_000_000_000)), 153 | tera_minus: ("-1t", SizeFilter::Max(1_000_000_000_000)), 154 | tera_minus_suffix: ("-1tb", SizeFilter::Max(1_000_000_000_000)), 155 | tera_plus_upper: ("+1T", SizeFilter::Min(1_000_000_000_000)), 156 | tera_plus_suffix_upper: ("+1TB", SizeFilter::Min(1_000_000_000_000)), 157 | tera_minus_upper: ("-1T", SizeFilter::Max(1_000_000_000_000)), 158 | tera_minus_suffix_upper: ("-1Tb", SizeFilter::Max(1_000_000_000_000)), 159 | tebi_plus: ("+1ti", SizeFilter::Min(1_099_511_627_776)), 160 | tebi_plus_suffix: ("+1tib", SizeFilter::Min(1_099_511_627_776)), 161 | tebi_minus: ("-1ti", SizeFilter::Max(1_099_511_627_776)), 162 | tebi_minus_suffix: ("-1tib", SizeFilter::Max(1_099_511_627_776)), 163 | tebi_plus_upper: ("+1TI", SizeFilter::Min(1_099_511_627_776)), 164 | tebi_plus_suffix_upper: ("+1TiB", SizeFilter::Min(1_099_511_627_776)), 165 | tebi_minus_upper: ("-1Ti", SizeFilter::Max(1_099_511_627_776)), 166 | tebi_minus_suffix_upper: ("-1TIB", SizeFilter::Max(1_099_511_627_776)), 167 | } 168 | 169 | /// Invalid parse testing 170 | macro_rules! gen_size_filter_failure { 171 | ($($name:ident: $value:expr,)*) => { 172 | $( 173 | #[test] 174 | fn $name() { 175 | let i = SizeFilter::from_string($value); 176 | assert!(i.is_err()); 177 | } 178 | )* 179 | }; 180 | } 181 | 182 | // Invalid parse data 183 | gen_size_filter_failure! { 184 | ensure_missing_number_returns_none: "+g", 185 | ensure_missing_unit_returns_none: "+18", 186 | ensure_bad_format_returns_none_1: "$10M", 187 | ensure_bad_format_returns_none_2: "badval", 188 | ensure_bad_format_returns_none_3: "9999", 189 | ensure_invalid_unit_returns_none_1: "+50a", 190 | ensure_invalid_unit_returns_none_2: "-10v", 191 | ensure_invalid_unit_returns_none_3: "+1Mv", 192 | ensure_bib_format_returns_none: "+1bib", 193 | ensure_bb_format_returns_none: "+1bb", 194 | } 195 | 196 | #[test] 197 | fn is_within_less_than() { 198 | let f = SizeFilter::from_string("-1k").unwrap(); 199 | assert!(f.is_within(999)); 200 | } 201 | 202 | #[test] 203 | fn is_within_less_than_equal() { 204 | let f = SizeFilter::from_string("-1k").unwrap(); 205 | assert!(f.is_within(1000)); 206 | } 207 | 208 | #[test] 209 | fn is_within_greater_than() { 210 | let f = SizeFilter::from_string("+1k").unwrap(); 211 | assert!(f.is_within(1001)); 212 | } 213 | 214 | #[test] 215 | fn is_within_greater_than_equal() { 216 | let f = SizeFilter::from_string("+1K").unwrap(); 217 | assert!(f.is_within(1000)); 218 | } 219 | } 220 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2017-2020 fd developers 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /contrib/completion/_fd: -------------------------------------------------------------------------------- 1 | #compdef fd 2 | 3 | ## 4 | # zsh completion function for fd 5 | # 6 | # Based on ripgrep completion function. 7 | # Originally based on code from the zsh-users project — see copyright notice 8 | # below. 9 | 10 | autoload -U is-at-least 11 | 12 | _fd() { 13 | local curcontext="$curcontext" no='!' ret=1 14 | local -a context line state state_descr _arguments_options fd_types fd_args 15 | local -A opt_args 16 | 17 | if is-at-least 5.2; then 18 | _arguments_options=( -s -S ) 19 | else 20 | _arguments_options=( -s ) 21 | fi 22 | 23 | fd_types=( 24 | {f,file}'\:"regular files"' 25 | {d,directory}'\:"directories"' 26 | {l,symlink}'\:"symbolic links"' 27 | {e,empty}'\:"empty files or directories"' 28 | {x,executable}'\:"executable (files)"' 29 | {s,socket}'\:"sockets"' 30 | {p,pipe}'\:"named pipes (FIFOs)"' 31 | ) 32 | 33 | # Do not complete rare options unless either the current prefix 34 | # matches one of those options or the user has the `complete-all` 35 | # style set. Note that this prefix check has to be updated manually to account 36 | # for all of the potential negation options listed below! 37 | if 38 | # (--[bpsu]* => match all options marked with '$no') 39 | [[ $PREFIX$SUFFIX == --[bopsun]* ]] || 40 | zstyle -t ":complete:$curcontext:*" complete-all 41 | then 42 | no= 43 | fi 44 | 45 | # We make heavy use of argument groups here to prevent the option specs from 46 | # growing unwieldy. These aren't supported in zsh <5.4, though, so we'll strip 47 | # them out below if necessary. This makes the exclusions inaccurate on those 48 | # older versions, but oh well — it's not that big a deal 49 | fd_args=( 50 | + '(hidden)' # hidden files 51 | {-H,--hidden}'[search hidden files/directories]' 52 | 53 | + '(no-ignore-full)' # all ignore files 54 | '(no-ignore-partial)'{-I,--no-ignore}"[don't respect .(git|fd)ignore and global ignore files]" 55 | $no'(no-ignore-partial)*'{-u,--unrestricted}'[alias for --no-ignore, when repeated also alias for --hidden]' 56 | 57 | + no-ignore-partial # some ignore files 58 | "(no-ignore-full --no-ignore-vcs)--no-ignore-vcs[don't respect .gitignore files]" 59 | "!(no-ignore-full --no-global-ignore-file)--no-global-ignore-file[don't respect the global ignore file]" 60 | $no'(no-ignore-full --no-ignore-parent)--no-ignore-parent[]' 61 | 62 | + '(case)' # case-sensitivity 63 | {-s,--case-sensitive}'[perform a case-sensitive search]' 64 | {-i,--ignore-case}'[perform a case-insensitive search]' 65 | 66 | + '(regex-pattern)' # regex-based search pattern 67 | '(no-regex-pattern)--regex[perform a regex-based search (default)]' 68 | 69 | + '(no-regex-pattern)' # non-regex-based search pattern 70 | {-g,--glob}'[perform a glob-based search]' 71 | {-F,--fixed-strings}'[treat pattern as literal string instead of a regex]' 72 | 73 | + '(no-require-git)' 74 | "$no(no-ignore-full --no-ignore-vcs --no-require-git)--no-require-git[don't require git repo to respect gitignores]" 75 | 76 | + '(match-full)' # match against full path 77 | {-p,--full-path}'[match the pattern against the full path instead of the basename]' 78 | 79 | + '(follow)' # follow symlinks 80 | {-L,--follow}'[follow symbolic links to directories]' 81 | 82 | + '(abs-path)' # show absolute paths 83 | '(long-listing)'{-a,--absolute-path}'[show absolute paths instead of relative paths]' 84 | 85 | + '(null-sep)' # use null separator for output 86 | '(long-listing)'{-0,--print0}'[separate search results by the null character]' 87 | 88 | + '(long-listing)' # long-listing output 89 | '(abs-path null-sep max-results exec-cmds)'{-l,--list-details}'[use a long listing format with file metadata]' 90 | 91 | + '(max-results)' # max number of results 92 | '(long-listing exec-cmds)--max-results=[limit number of search results to given count and quit]:count' 93 | '(long-listing exec-cmds)-1[limit to a single search result and quit]' 94 | 95 | + '(fs-errors)' # file-system errors 96 | $no'--show-errors[enable the display of filesystem errors]' 97 | 98 | + '(fs-traversal)' # file-system traversal 99 | $no"--one-file-system[don't descend into directories on other file systems]" 100 | '!--mount' 101 | '!--xdev' 102 | 103 | + dir-depth # directory depth 104 | '(--exact-depth -d --max-depth)'{-d+,--max-depth=}'[set max directory depth to descend when searching]:depth' 105 | '!(--exact-depth -d --max-depth)--maxdepth:depth' 106 | '(--exact-depth --min-depth)--min-depth=[set directory depth to descend before start searching]:depth' 107 | '(--exact-depth -d --max-depth --maxdepth --min-depth)--exact-depth=[only search at the exact given directory depth]:depth' 108 | 109 | + prune # pruning 110 | "--prune[don't traverse into matching directories]" 111 | 112 | + filter-misc # filter search 113 | '*'{-t+,--type=}"[filter search by type]:type:(($fd_types))" 114 | '*'{-e+,--extension=}'[filter search by file extension]:extension' 115 | '*'{-E+,--exclude=}'[exclude files/directories that match the given glob pattern]:glob pattern' 116 | '*'{-S+,--size=}'[limit search by file size]:size limit:->size' 117 | '(-o --owner)'{-o+,--owner=}'[filter by owning user and/or group]:owner and/or group:->owner' 118 | 119 | + ignore-file # extra ignore files 120 | '*--ignore-file=[add a custom, low-precedence ignore-file with .gitignore format]: :_files' 121 | 122 | + '(filter-mtime-newer)' # filter by files modified after than 123 | '--changed-within=[limit search to files/directories modified within the given date/duration]:date or duration' 124 | '--changed-after=[alias for --changed-within]:date/duration' 125 | '!--change-newer-than=:date/duration' 126 | '!--newer=:date/duration' 127 | 128 | + '(filter-mtime-older)' # filter by files modified before than 129 | '--changed-before=[limit search to files/directories modified before the given date/duration]:date or duration' 130 | '!--change-older-than=:date/duration' 131 | '!--older=:date/duration' 132 | 133 | + '(color)' # colorize output 134 | {-c+,--color=}'[declare when to colorize search results]:when to colorize:(( 135 | auto\:"show colors if the output goes to an interactive console (default)" 136 | never\:"do not use colorized output" 137 | always\:"always use colorized output" 138 | ))' 139 | 140 | + '(threads)' 141 | {-j+,--threads=}'[set the number of threads for searching and executing]:number of threads' 142 | 143 | + '(exec-cmds)' # execute command 144 | '(long-listing max-results)'{-x+,--exec=}'[execute command for each search result]:command: _command_names -e:*\;::program arguments: _normal' 145 | '(long-listing max-results)'{-X+,--exec-batch=}'[execute command for all search results at once]:command: _command_names -e:*\;::program arguments: _normal' 146 | '(long-listing max-results)--batch-size=[max number of args for each -X call]:size' 147 | 148 | + other 149 | '!(--max-buffer-time)--max-buffer-time=[set amount of time to buffer before showing output]:time (ms)' 150 | 151 | + '(about)' # about flags 152 | '(: * -)'{-h,--help}'[display help message]' 153 | '(: * -)'{-V,--version}'[display version information]' 154 | 155 | + path-sep # set path separator for output 156 | $no'(--path-separator)--path-separator=[set the path separator to use when printing file paths]:path separator' 157 | 158 | + search-path 159 | $no'(--base-directory)--base-directory=[change the current working directory to the given path]:directory:_files -/' 160 | $no'(*)*--search-path=[set search path (instead of positional arguments)]:directory:_files -/' 161 | 162 | + strip-cwd-prefix 163 | $no'(strip-cwd-prefix exec-cmds)--strip-cwd-prefix[Strip ./ prefix when output is redirected]' 164 | 165 | + and 166 | '--and=[additional required search path]:pattern' 167 | 168 | 169 | + args # positional arguments 170 | '1: :_guard "^-*" pattern' 171 | '(--search-path)*:directory:_files -/' 172 | ) 173 | 174 | # Strip out argument groups where unsupported (see above) 175 | is-at-least 5.4 || 176 | fd_args=( ${(@)args:#(#i)(+|[a-z0-9][a-z0-9_-]#|\([a-z0-9][a-z0-9_-]#\))} ) 177 | 178 | _arguments $_arguments_options : $fd_args && ret=0 179 | 180 | case ${state} in 181 | owner) 182 | compset -P '(\\|)\!' 183 | if compset -P '*:'; then 184 | _groups && ret=0 185 | else 186 | if 187 | compset -S ':*' || 188 | # Do not add the colon suffix when completing "!user 189 | # (with a starting double-quote) otherwise pressing tab again 190 | # after the inserted colon "!user: will complete history modifiers 191 | [[ $IPREFIX == (\\|\!)* && ($QIPREFIX == \"* && -z $QISUFFIX) ]] 192 | then 193 | _users && ret=0 194 | else 195 | local q 196 | # Since quotes are needed when using the negation prefix !, 197 | # automatically remove the colon suffix also when closing the quote 198 | if [[ $QIPREFIX == [\'\"]* ]]; then 199 | q=${QIPREFIX:0:1} 200 | fi 201 | _users -r ": \t\n\-$q" -S : && ret=0 202 | fi 203 | fi 204 | ;; 205 | 206 | size) 207 | if compset -P '[-+][0-9]##'; then 208 | local -a suff=( 209 | 'B:bytes' 210 | 'K:kilobytes (10^3 = 1000 bytes)' 211 | 'M:megabytes (10^6 = 1000^2 bytes)' 212 | 'G:gigabytes (10^9 = 1000^3 bytes)' 213 | 'T:terabytes (10^12 = 1000^4 bytes)' 214 | 'Ki:kibibytes ( 2^10 = 1024 bytes)' 215 | 'Mi:mebibytes ( 2^20 = 1024^2 bytes)' 216 | 'Gi:gigibytes ( 2^30 = 1024^3 bytes)' 217 | 'Ti:tebibytes ( 2^40 = 1024^4 bytes)' 218 | ) 219 | _describe -t units 'size limit units' suff -V 'units' 220 | elif compset -P '[-+]'; then 221 | _message -e 'size limit number (full format: <+->)' 222 | else 223 | _values 'size limit prefix (full format: )' \ 224 | '\+[file size must be greater or equal to]'\ 225 | '-[file size must be less than or equal to]' && ret=0 226 | fi 227 | ;; 228 | esac 229 | 230 | return ret 231 | } 232 | 233 | _fd "$@" 234 | 235 | # ------------------------------------------------------------------------------ 236 | # Copyright (c) 2011 GitHub zsh-users - http://github.com/zsh-users 237 | # All rights reserved. 238 | # 239 | # Redistribution and use in source and binary forms, with or without 240 | # modification, are permitted provided that the following conditions are met: 241 | # * Redistributions of source code must retain the above copyright 242 | # notice, this list of conditions and the following disclaimer. 243 | # * Redistributions in binary form must reproduce the above copyright 244 | # notice, this list of conditions and the following disclaimer in the 245 | # documentation and/or other materials provided with the distribution. 246 | # * Neither the name of the zsh-users nor the 247 | # names of its contributors may be used to endorse or promote products 248 | # derived from this software without specific prior written permission. 249 | # 250 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 251 | # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 252 | # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 253 | # DISCLAIMED. IN NO EVENT SHALL ZSH-USERS BE LIABLE FOR ANY 254 | # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 255 | # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 256 | # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 257 | # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 258 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 259 | # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 260 | # ------------------------------------------------------------------------------ 261 | # Description 262 | # ----------- 263 | # 264 | # Completion script for fd 265 | # 266 | # ------------------------------------------------------------------------------ 267 | # Authors 268 | # ------- 269 | # 270 | # * smancill (https://github.com/smancill) 271 | # 272 | # ------------------------------------------------------------------------------ 273 | 274 | # Local Variables: 275 | # mode: shell-script 276 | # coding: utf-8-unix 277 | # indent-tabs-mode: nil 278 | # sh-indentation: 2 279 | # sh-basic-offset: 2 280 | # End: 281 | # vim: ft=zsh sw=2 ts=2 et 282 | -------------------------------------------------------------------------------- /tests/testenv/mod.rs: -------------------------------------------------------------------------------- 1 | use std::env; 2 | use std::fs; 3 | use std::io::{self, Write}; 4 | #[cfg(unix)] 5 | use std::os::unix; 6 | #[cfg(windows)] 7 | use std::os::windows; 8 | use std::path::{Path, PathBuf}; 9 | use std::process; 10 | 11 | use tempfile::TempDir; 12 | 13 | /// Environment for the integration tests. 14 | pub struct TestEnv { 15 | /// Temporary working directory. 16 | temp_dir: TempDir, 17 | 18 | /// Path to the *fd* executable. 19 | fd_exe: PathBuf, 20 | 21 | /// Normalize each line by sorting the whitespace-separated words 22 | normalize_line: bool, 23 | 24 | /// Temporary directory for storing test config (global ignore file) 25 | config_dir: Option, 26 | } 27 | 28 | /// Create the working directory and the test files. 29 | fn create_working_directory( 30 | directories: &[&'static str], 31 | files: &[&'static str], 32 | ) -> Result { 33 | let temp_dir = tempfile::Builder::new().prefix("fd-tests").tempdir()?; 34 | 35 | { 36 | let root = temp_dir.path(); 37 | 38 | // Pretend that this is a Git repository in order for `.gitignore` files to be respected 39 | fs::create_dir_all(root.join(".git"))?; 40 | 41 | for directory in directories { 42 | fs::create_dir_all(root.join(directory))?; 43 | } 44 | 45 | for file in files { 46 | fs::File::create(root.join(file))?; 47 | } 48 | 49 | #[cfg(unix)] 50 | unix::fs::symlink(root.join("one/two"), root.join("symlink"))?; 51 | 52 | // Note: creating symlinks on Windows requires the `SeCreateSymbolicLinkPrivilege` which 53 | // is by default only granted for administrators. 54 | #[cfg(windows)] 55 | windows::fs::symlink_dir(root.join("one/two"), root.join("symlink"))?; 56 | 57 | fs::File::create(root.join(".fdignore"))?.write_all(b"fdignored.foo")?; 58 | 59 | fs::File::create(root.join(".gitignore"))?.write_all(b"gitignored.foo")?; 60 | } 61 | 62 | Ok(temp_dir) 63 | } 64 | 65 | fn create_config_directory_with_global_ignore(ignore_file_content: &str) -> io::Result { 66 | let config_dir = tempfile::Builder::new().prefix("fd-config").tempdir()?; 67 | let fd_dir = config_dir.path().join("fd"); 68 | fs::create_dir(&fd_dir)?; 69 | let mut ignore_file = fs::File::create(fd_dir.join("ignore"))?; 70 | ignore_file.write_all(ignore_file_content.as_bytes())?; 71 | 72 | Ok(config_dir) 73 | } 74 | 75 | /// Find the *fd* executable. 76 | fn find_fd_exe() -> PathBuf { 77 | // Tests exe is in target/debug/deps, the *fd* exe is in target/debug 78 | let root = env::current_exe() 79 | .expect("tests executable") 80 | .parent() 81 | .expect("tests executable directory") 82 | .parent() 83 | .expect("fd executable directory") 84 | .to_path_buf(); 85 | 86 | let exe_name = if cfg!(windows) { "fd.exe" } else { "fd" }; 87 | 88 | root.join(exe_name) 89 | } 90 | 91 | /// Format an error message for when *fd* did not exit successfully. 92 | fn format_exit_error(args: &[&str], output: &process::Output) -> String { 93 | format!( 94 | "`fd {}` did not exit successfully.\nstdout:\n---\n{}---\nstderr:\n---\n{}---", 95 | args.join(" "), 96 | String::from_utf8_lossy(&output.stdout), 97 | String::from_utf8_lossy(&output.stderr) 98 | ) 99 | } 100 | 101 | /// Format an error message for when the output of *fd* did not match the expected output. 102 | fn format_output_error(args: &[&str], expected: &str, actual: &str) -> String { 103 | // Generate diff text. 104 | let diff_text = diff::lines(expected, actual) 105 | .into_iter() 106 | .map(|diff| match diff { 107 | diff::Result::Left(l) => format!("-{}", l), 108 | diff::Result::Both(l, _) => format!(" {}", l), 109 | diff::Result::Right(r) => format!("+{}", r), 110 | }) 111 | .collect::>() 112 | .join("\n"); 113 | 114 | format!( 115 | concat!( 116 | "`fd {}` did not produce the expected output.\n", 117 | "Showing diff between expected and actual:\n{}\n" 118 | ), 119 | args.join(" "), 120 | diff_text 121 | ) 122 | } 123 | 124 | /// Normalize the output for comparison. 125 | fn normalize_output(s: &str, trim_start: bool, normalize_line: bool) -> String { 126 | // Split into lines and normalize separators. 127 | let mut lines = s 128 | .replace('\0', "NULL\n") 129 | .lines() 130 | .map(|line| { 131 | let line = if trim_start { line.trim_start() } else { line }; 132 | let line = line.replace('/', &std::path::MAIN_SEPARATOR.to_string()); 133 | if normalize_line { 134 | let mut words: Vec<_> = line.split_whitespace().collect(); 135 | words.sort_unstable(); 136 | return words.join(" "); 137 | } 138 | line 139 | }) 140 | .collect::>(); 141 | 142 | lines.sort(); 143 | lines.join("\n") 144 | } 145 | 146 | /// Trim whitespace from the beginning of each line. 147 | fn trim_lines(s: &str) -> String { 148 | s.lines() 149 | .map(|line| line.trim_start()) 150 | .fold(String::new(), |mut str, line| { 151 | str.push_str(line); 152 | str.push('\n'); 153 | str 154 | }) 155 | } 156 | 157 | impl TestEnv { 158 | pub fn new(directories: &[&'static str], files: &[&'static str]) -> TestEnv { 159 | let temp_dir = create_working_directory(directories, files).expect("working directory"); 160 | let fd_exe = find_fd_exe(); 161 | 162 | TestEnv { 163 | temp_dir, 164 | fd_exe, 165 | normalize_line: false, 166 | config_dir: None, 167 | } 168 | } 169 | 170 | pub fn normalize_line(self, normalize: bool) -> TestEnv { 171 | TestEnv { 172 | temp_dir: self.temp_dir, 173 | fd_exe: self.fd_exe, 174 | normalize_line: normalize, 175 | config_dir: self.config_dir, 176 | } 177 | } 178 | 179 | pub fn global_ignore_file(self, content: &str) -> TestEnv { 180 | let config_dir = 181 | create_config_directory_with_global_ignore(content).expect("config directory"); 182 | TestEnv { 183 | config_dir: Some(config_dir), 184 | ..self 185 | } 186 | } 187 | 188 | /// Create a broken symlink at the given path in the temp_dir. 189 | pub fn create_broken_symlink>( 190 | &mut self, 191 | link_path: P, 192 | ) -> Result { 193 | let root = self.test_root(); 194 | let broken_symlink_link = root.join(link_path); 195 | { 196 | let temp_target_dir = tempfile::Builder::new() 197 | .prefix("fd-tests-broken-symlink") 198 | .tempdir()?; 199 | let broken_symlink_target = temp_target_dir.path().join("broken_symlink_target"); 200 | fs::File::create(&broken_symlink_target)?; 201 | #[cfg(unix)] 202 | unix::fs::symlink(&broken_symlink_target, &broken_symlink_link)?; 203 | #[cfg(windows)] 204 | windows::fs::symlink_file(&broken_symlink_target, &broken_symlink_link)?; 205 | } 206 | Ok(broken_symlink_link) 207 | } 208 | 209 | /// Get the root directory for the tests. 210 | pub fn test_root(&self) -> PathBuf { 211 | self.temp_dir.path().to_path_buf() 212 | } 213 | 214 | /// Get the path of the fd executable. 215 | #[cfg_attr(windows, allow(unused))] 216 | pub fn test_exe(&self) -> &PathBuf { 217 | &self.fd_exe 218 | } 219 | 220 | /// Get the root directory of the file system. 221 | pub fn system_root(&self) -> PathBuf { 222 | let mut components = self.temp_dir.path().components(); 223 | PathBuf::from(components.next().expect("root directory").as_os_str()) 224 | } 225 | 226 | /// Assert that calling *fd* in the specified path under the root working directory, 227 | /// and with the specified arguments produces the expected output. 228 | pub fn assert_success_and_get_output>( 229 | &self, 230 | path: P, 231 | args: &[&str], 232 | ) -> process::Output { 233 | // Run *fd*. 234 | let output = self.run_command(path.as_ref(), args); 235 | 236 | // Check for exit status. 237 | if !output.status.success() { 238 | panic!("{}", format_exit_error(args, &output)); 239 | } 240 | 241 | output 242 | } 243 | 244 | pub fn assert_success_and_get_normalized_output>( 245 | &self, 246 | path: P, 247 | args: &[&str], 248 | ) -> String { 249 | let output = self.assert_success_and_get_output(path, args); 250 | normalize_output( 251 | &String::from_utf8_lossy(&output.stdout), 252 | false, 253 | self.normalize_line, 254 | ) 255 | } 256 | 257 | /// Assert that calling *fd* with the specified arguments produces the expected output. 258 | pub fn assert_output(&self, args: &[&str], expected: &str) { 259 | self.assert_output_subdirectory(".", args, expected) 260 | } 261 | 262 | /// Similar to assert_output, but able to handle non-utf8 output 263 | #[cfg(all(unix, not(target_os = "macos")))] 264 | pub fn assert_output_raw(&self, args: &[&str], expected: &[u8]) { 265 | let output = self.assert_success_and_get_output(".", args); 266 | 267 | assert_eq!(expected, &output.stdout[..]); 268 | } 269 | 270 | /// Assert that calling *fd* in the specified path under the root working directory, 271 | /// and with the specified arguments produces the expected output. 272 | pub fn assert_output_subdirectory>( 273 | &self, 274 | path: P, 275 | args: &[&str], 276 | expected: &str, 277 | ) { 278 | // Normalize both expected and actual output. 279 | let expected = normalize_output(expected, true, self.normalize_line); 280 | let actual = self.assert_success_and_get_normalized_output(path, args); 281 | 282 | // Compare actual output to expected output. 283 | if expected != actual { 284 | panic!("{}", format_output_error(args, &expected, &actual)); 285 | } 286 | } 287 | 288 | /// Assert that calling *fd* with the specified arguments produces the expected error, 289 | /// and does not succeed. 290 | pub fn assert_failure_with_error(&self, args: &[&str], expected: &str) { 291 | let status = self.assert_error_subdirectory(".", args, Some(expected)); 292 | if status.success() { 293 | panic!("error '{}' did not occur.", expected); 294 | } 295 | } 296 | 297 | /// Assert that calling *fd* with the specified arguments does not succeed. 298 | pub fn assert_failure(&self, args: &[&str]) { 299 | let status = self.assert_error_subdirectory(".", args, None); 300 | if status.success() { 301 | panic!("Failure did not occur as expected."); 302 | } 303 | } 304 | 305 | /// Assert that calling *fd* with the specified arguments produces the expected error. 306 | pub fn assert_error(&self, args: &[&str], expected: &str) -> process::ExitStatus { 307 | self.assert_error_subdirectory(".", args, Some(expected)) 308 | } 309 | 310 | fn run_command(&self, path: &Path, args: &[&str]) -> process::Output { 311 | // Setup *fd* command. 312 | let mut cmd = process::Command::new(&self.fd_exe); 313 | cmd.current_dir(self.temp_dir.path().join(path)); 314 | if let Some(config_dir) = &self.config_dir { 315 | cmd.env("XDG_CONFIG_HOME", config_dir.path()); 316 | } else { 317 | cmd.arg("--no-global-ignore-file"); 318 | } 319 | cmd.args(args); 320 | 321 | // Run *fd*. 322 | cmd.output().expect("fd output") 323 | } 324 | 325 | /// Assert that calling *fd* in the specified path under the root working directory, 326 | /// and with the specified arguments produces an error with the expected message. 327 | fn assert_error_subdirectory>( 328 | &self, 329 | path: P, 330 | args: &[&str], 331 | expected: Option<&str>, 332 | ) -> process::ExitStatus { 333 | let output = self.run_command(path.as_ref(), args); 334 | 335 | if let Some(expected) = expected { 336 | // Normalize both expected and actual output. 337 | let expected_error = trim_lines(expected); 338 | let actual_err = trim_lines(&String::from_utf8_lossy(&output.stderr)); 339 | 340 | // Compare actual output to expected output. 341 | if !actual_err.trim_start().starts_with(&expected_error) { 342 | panic!( 343 | "{}", 344 | format_output_error(args, &expected_error, &actual_err) 345 | ); 346 | } 347 | } 348 | 349 | output.status 350 | } 351 | } 352 | -------------------------------------------------------------------------------- /.github/workflows/CICD.yml: -------------------------------------------------------------------------------- 1 | name: CICD 2 | 3 | env: 4 | CICD_INTERMEDIATES_DIR: "_cicd-intermediates" 5 | MSRV_FEATURES: "--all-features" 6 | 7 | on: 8 | workflow_dispatch: 9 | pull_request: 10 | push: 11 | branches: 12 | - master 13 | tags: 14 | - '*' 15 | 16 | jobs: 17 | crate_metadata: 18 | name: Extract crate metadata 19 | runs-on: ubuntu-latest 20 | steps: 21 | - uses: actions/checkout@v3 22 | - name: Extract crate information 23 | id: crate_metadata 24 | run: | 25 | echo "name=fd" | tee -a $GITHUB_OUTPUT 26 | cargo metadata --no-deps --format-version 1 | jq -r '"version=" + .packages[0].version' | tee -a $GITHUB_OUTPUT 27 | cargo metadata --no-deps --format-version 1 | jq -r '"maintainer=" + .packages[0].authors[0]' | tee -a $GITHUB_OUTPUT 28 | cargo metadata --no-deps --format-version 1 | jq -r '"homepage=" + .packages[0].homepage' | tee -a $GITHUB_OUTPUT 29 | cargo metadata --no-deps --format-version 1 | jq -r '"msrv=" + .packages[0].rust_version' | tee -a $GITHUB_OUTPUT 30 | outputs: 31 | name: ${{ steps.crate_metadata.outputs.name }} 32 | version: ${{ steps.crate_metadata.outputs.version }} 33 | maintainer: ${{ steps.crate_metadata.outputs.maintainer }} 34 | homepage: ${{ steps.crate_metadata.outputs.homepage }} 35 | msrv: ${{ steps.crate_metadata.outputs.msrv }} 36 | 37 | ensure_cargo_fmt: 38 | name: Ensure 'cargo fmt' has been run 39 | runs-on: ubuntu-20.04 40 | steps: 41 | - uses: dtolnay/rust-toolchain@stable 42 | with: 43 | components: rustfmt 44 | - uses: actions/checkout@v3 45 | - run: cargo fmt -- --check 46 | 47 | min_version: 48 | name: Minimum supported rust version 49 | runs-on: ubuntu-20.04 50 | needs: crate_metadata 51 | steps: 52 | - name: Checkout source code 53 | uses: actions/checkout@v3 54 | 55 | - name: Install rust toolchain (v${{ needs.crate_metadata.outputs.msrv }}) 56 | uses: dtolnay/rust-toolchain@master 57 | with: 58 | toolchain: ${{ needs.crate_metadata.outputs.msrv }} 59 | components: clippy 60 | - name: Run clippy (on minimum supported rust version to prevent warnings we can't fix) 61 | run: cargo clippy --locked --all-targets ${{ env.MSRV_FEATURES }} 62 | - name: Run tests 63 | run: cargo test --locked ${{ env.MSRV_FEATURES }} 64 | 65 | build: 66 | name: ${{ matrix.job.target }} (${{ matrix.job.os }}) 67 | runs-on: ${{ matrix.job.os }} 68 | needs: crate_metadata 69 | strategy: 70 | fail-fast: false 71 | matrix: 72 | job: 73 | - { target: aarch64-unknown-linux-gnu , os: ubuntu-20.04, use-cross: true } 74 | - { target: arm-unknown-linux-gnueabihf , os: ubuntu-20.04, use-cross: true } 75 | - { target: arm-unknown-linux-musleabihf, os: ubuntu-20.04, use-cross: true } 76 | - { target: i686-pc-windows-msvc , os: windows-2019 } 77 | - { target: i686-unknown-linux-gnu , os: ubuntu-20.04, use-cross: true } 78 | - { target: i686-unknown-linux-musl , os: ubuntu-20.04, use-cross: true } 79 | - { target: x86_64-apple-darwin , os: macos-12 } 80 | - { target: x86_64-pc-windows-gnu , os: windows-2019 } 81 | - { target: x86_64-pc-windows-msvc , os: windows-2019 } 82 | - { target: x86_64-unknown-linux-gnu , os: ubuntu-20.04, use-cross: true } 83 | - { target: x86_64-unknown-linux-musl , os: ubuntu-20.04, use-cross: true } 84 | env: 85 | BUILD_CMD: cargo 86 | steps: 87 | - name: Checkout source code 88 | uses: actions/checkout@v3 89 | 90 | - name: Install prerequisites 91 | shell: bash 92 | run: | 93 | case ${{ matrix.job.target }} in 94 | arm-unknown-linux-*) sudo apt-get -y update ; sudo apt-get -y install gcc-arm-linux-gnueabihf ;; 95 | aarch64-unknown-linux-gnu) sudo apt-get -y update ; sudo apt-get -y install gcc-aarch64-linux-gnu ;; 96 | esac 97 | 98 | - name: Install Rust toolchain 99 | uses: dtolnay/rust-toolchain@stable 100 | with: 101 | targets: ${{ matrix.job.target }} 102 | 103 | - name: Install cross 104 | if: matrix.job.use-cross 105 | uses: taiki-e/install-action@v2 106 | with: 107 | tool: cross 108 | 109 | - name: Overwrite build command env variable 110 | if: matrix.job.use-cross 111 | shell: bash 112 | run: echo "BUILD_CMD=cross" >> $GITHUB_ENV 113 | 114 | - name: Show version information (Rust, cargo, GCC) 115 | shell: bash 116 | run: | 117 | gcc --version || true 118 | rustup -V 119 | rustup toolchain list 120 | rustup default 121 | cargo -V 122 | rustc -V 123 | 124 | - name: Build 125 | shell: bash 126 | run: $BUILD_CMD build --locked --release --target=${{ matrix.job.target }} 127 | 128 | - name: Set binary name & path 129 | id: bin 130 | shell: bash 131 | run: | 132 | # Figure out suffix of binary 133 | EXE_suffix="" 134 | case ${{ matrix.job.target }} in 135 | *-pc-windows-*) EXE_suffix=".exe" ;; 136 | esac; 137 | 138 | # Setup paths 139 | BIN_NAME="${{ needs.crate_metadata.outputs.name }}${EXE_suffix}" 140 | BIN_PATH="target/${{ matrix.job.target }}/release/${BIN_NAME}" 141 | 142 | # Let subsequent steps know where to find the binary 143 | echo "BIN_PATH=${BIN_PATH}" >> $GITHUB_OUTPUT 144 | echo "BIN_NAME=${BIN_NAME}" >> $GITHUB_OUTPUT 145 | 146 | - name: Set testing options 147 | id: test-options 148 | shell: bash 149 | run: | 150 | # test only library unit tests and binary for arm-type targets 151 | unset CARGO_TEST_OPTIONS 152 | unset CARGO_TEST_OPTIONS ; case ${{ matrix.job.target }} in arm-* | aarch64-*) CARGO_TEST_OPTIONS="--bin ${{ needs.crate_metadata.outputs.name }}" ;; esac; 153 | echo "CARGO_TEST_OPTIONS=${CARGO_TEST_OPTIONS}" >> $GITHUB_OUTPUT 154 | 155 | - name: Run tests 156 | shell: bash 157 | run: $BUILD_CMD test --locked --target=${{ matrix.job.target }} ${{ steps.test-options.outputs.CARGO_TEST_OPTIONS}} 158 | 159 | - name: Generate completions 160 | id: completions 161 | shell: bash 162 | run: make completions 163 | 164 | - name: Create tarball 165 | id: package 166 | shell: bash 167 | run: | 168 | PKG_suffix=".tar.gz" ; case ${{ matrix.job.target }} in *-pc-windows-*) PKG_suffix=".zip" ;; esac; 169 | PKG_BASENAME=${{ needs.crate_metadata.outputs.name }}-v${{ needs.crate_metadata.outputs.version }}-${{ matrix.job.target }} 170 | PKG_NAME=${PKG_BASENAME}${PKG_suffix} 171 | echo "PKG_NAME=${PKG_NAME}" >> $GITHUB_OUTPUT 172 | 173 | PKG_STAGING="${{ env.CICD_INTERMEDIATES_DIR }}/package" 174 | ARCHIVE_DIR="${PKG_STAGING}/${PKG_BASENAME}/" 175 | mkdir -p "${ARCHIVE_DIR}" 176 | 177 | # Binary 178 | cp "${{ steps.bin.outputs.BIN_PATH }}" "$ARCHIVE_DIR" 179 | 180 | # README, LICENSE and CHANGELOG files 181 | cp "README.md" "LICENSE-MIT" "LICENSE-APACHE" "CHANGELOG.md" "$ARCHIVE_DIR" 182 | 183 | # Man page 184 | cp 'doc/${{ needs.crate_metadata.outputs.name }}.1' "$ARCHIVE_DIR" 185 | 186 | # Autocompletion files 187 | cp -r autocomplete "${ARCHIVE_DIR}" 188 | 189 | # base compressed package 190 | pushd "${PKG_STAGING}/" >/dev/null 191 | case ${{ matrix.job.target }} in 192 | *-pc-windows-*) 7z -y a "${PKG_NAME}" "${PKG_BASENAME}"/* | tail -2 ;; 193 | *) tar czf "${PKG_NAME}" "${PKG_BASENAME}"/* ;; 194 | esac; 195 | popd >/dev/null 196 | 197 | # Let subsequent steps know where to find the compressed package 198 | echo "PKG_PATH=${PKG_STAGING}/${PKG_NAME}" >> $GITHUB_OUTPUT 199 | 200 | - name: Create Debian package 201 | id: debian-package 202 | shell: bash 203 | if: startsWith(matrix.job.os, 'ubuntu') 204 | run: | 205 | COPYRIGHT_YEARS="2018 - "$(date "+%Y") 206 | DPKG_STAGING="${{ env.CICD_INTERMEDIATES_DIR }}/debian-package" 207 | DPKG_DIR="${DPKG_STAGING}/dpkg" 208 | mkdir -p "${DPKG_DIR}" 209 | 210 | DPKG_BASENAME=${{ needs.crate_metadata.outputs.name }} 211 | DPKG_CONFLICTS=${{ needs.crate_metadata.outputs.name }}-musl 212 | case ${{ matrix.job.target }} in *-musl) DPKG_BASENAME=${{ needs.crate_metadata.outputs.name }}-musl ; DPKG_CONFLICTS=${{ needs.crate_metadata.outputs.name }} ;; esac; 213 | DPKG_VERSION=${{ needs.crate_metadata.outputs.version }} 214 | 215 | unset DPKG_ARCH 216 | case ${{ matrix.job.target }} in 217 | aarch64-*-linux-*) DPKG_ARCH=arm64 ;; 218 | arm-*-linux-*hf) DPKG_ARCH=armhf ;; 219 | i686-*-linux-*) DPKG_ARCH=i686 ;; 220 | x86_64-*-linux-*) DPKG_ARCH=amd64 ;; 221 | *) DPKG_ARCH=notset ;; 222 | esac; 223 | 224 | DPKG_NAME="${DPKG_BASENAME}_${DPKG_VERSION}_${DPKG_ARCH}.deb" 225 | echo "DPKG_NAME=${DPKG_NAME}" >> $GITHUB_OUTPUT 226 | 227 | # Binary 228 | install -Dm755 "${{ steps.bin.outputs.BIN_PATH }}" "${DPKG_DIR}/usr/bin/${{ steps.bin.outputs.BIN_NAME }}" 229 | 230 | # Man page 231 | install -Dm644 'doc/${{ needs.crate_metadata.outputs.name }}.1' "${DPKG_DIR}/usr/share/man/man1/${{ needs.crate_metadata.outputs.name }}.1" 232 | gzip -n --best "${DPKG_DIR}/usr/share/man/man1/${{ needs.crate_metadata.outputs.name }}.1" 233 | 234 | # Autocompletion files 235 | install -Dm644 'autocomplete/fd.bash' "${DPKG_DIR}/usr/share/bash-completion/completions/${{ needs.crate_metadata.outputs.name }}" 236 | install -Dm644 'autocomplete/fd.fish' "${DPKG_DIR}/usr/share/fish/vendor_completions.d/${{ needs.crate_metadata.outputs.name }}.fish" 237 | install -Dm644 'autocomplete/_fd' "${DPKG_DIR}/usr/share/zsh/vendor-completions/_${{ needs.crate_metadata.outputs.name }}" 238 | 239 | # README and LICENSE 240 | install -Dm644 "README.md" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/README.md" 241 | install -Dm644 "LICENSE-MIT" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/LICENSE-MIT" 242 | install -Dm644 "LICENSE-APACHE" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/LICENSE-APACHE" 243 | install -Dm644 "CHANGELOG.md" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/changelog" 244 | gzip -n --best "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/changelog" 245 | 246 | cat > "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/copyright" < "${DPKG_DIR}/DEBIAN/control" <> $GITHUB_OUTPUT 308 | 309 | # build dpkg 310 | fakeroot dpkg-deb --build "${DPKG_DIR}" "${DPKG_PATH}" 311 | 312 | - name: "Artifact upload: tarball" 313 | uses: actions/upload-artifact@master 314 | with: 315 | name: ${{ steps.package.outputs.PKG_NAME }} 316 | path: ${{ steps.package.outputs.PKG_PATH }} 317 | 318 | - name: "Artifact upload: Debian package" 319 | uses: actions/upload-artifact@master 320 | if: steps.debian-package.outputs.DPKG_NAME 321 | with: 322 | name: ${{ steps.debian-package.outputs.DPKG_NAME }} 323 | path: ${{ steps.debian-package.outputs.DPKG_PATH }} 324 | 325 | - name: Check for release 326 | id: is-release 327 | shell: bash 328 | run: | 329 | unset IS_RELEASE ; if [[ $GITHUB_REF =~ ^refs/tags/v[0-9].* ]]; then IS_RELEASE='true' ; fi 330 | echo "IS_RELEASE=${IS_RELEASE}" >> $GITHUB_OUTPUT 331 | 332 | - name: Publish archives and packages 333 | uses: softprops/action-gh-release@v1 334 | if: steps.is-release.outputs.IS_RELEASE 335 | with: 336 | files: | 337 | ${{ steps.package.outputs.PKG_PATH }} 338 | ${{ steps.debian-package.outputs.DPKG_PATH }} 339 | env: 340 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 341 | 342 | winget: 343 | name: Publish to Winget 344 | runs-on: windows-latest # Action can only run on Windows 345 | needs: build 346 | if: startsWith(github.ref, 'refs/tags/v') 347 | steps: 348 | - uses: vedantmgoyal2009/winget-releaser@v2 349 | with: 350 | identifier: sharkdp.fd 351 | installers-regex: '-pc-windows-msvc\.zip$' 352 | token: ${{ secrets.WINGET_TOKEN }} 353 | -------------------------------------------------------------------------------- /doc/fd.1: -------------------------------------------------------------------------------- 1 | .TH FD 1 2 | .SH NAME 3 | fd \- find entries in the filesystem 4 | .SH SYNOPSIS 5 | .B fd 6 | .RB [ \-HIEsiaLp0hV ] 7 | .RB [ \-d 8 | .IR depth ] 9 | .RB [ \-t 10 | .IR filetype ] 11 | .RB [ \-e 12 | .IR ext ] 13 | .RB [ \-E 14 | .IR exclude ] 15 | .RB [ \-c 16 | .IR when ] 17 | .RB [ \-j 18 | .IR num ] 19 | .RB [ \-x 20 | .IR cmd ] 21 | .RI [ pattern ] 22 | .RI [ path... ] 23 | .SH DESCRIPTION 24 | .B fd 25 | is a simple, fast and user-friendly alternative to 26 | .BR find (1). 27 | .P 28 | By default 29 | .B fd 30 | uses regular expressions for the pattern. However, this can be changed to use simple glob patterns 31 | with the '\-\-glob' option. 32 | .SH OPTIONS 33 | .TP 34 | .B \-H, \-\-hidden 35 | Include hidden files and directories in the search results 36 | (default: hidden files and directories are skipped). The flag can be overridden with '--no-hidden'. 37 | .TP 38 | .B \-I, \-\-no\-ignore 39 | Show search results from files and directories that would otherwise be ignored by 40 | .RS 41 | .IP \[bu] 2 42 | .I .gitignore 43 | .IP \[bu] 44 | .I .git/info/exclude 45 | .IP \[bu] 46 | The global gitignore configuration (by default 47 | .IR $HOME/.config/git/ignore ) 48 | .IP \[bu] 49 | .I .ignore 50 | .IP \[bu] 51 | .I .fdignore 52 | .IP \[bu] 53 | The global fd ignore file (usually 54 | .I $HOME/.config/fd/ignore 55 | ) 56 | .RE 57 | .IP 58 | The flag can be overridden with '--ignore'. 59 | .TP 60 | .B \-u, \-\-unrestricted 61 | Perform an unrestricted search, including ignored and hidden files. This is an alias for '--hidden --no-ignore'. 62 | .TP 63 | .B \-\-no\-ignore\-vcs 64 | Show search results from files and directories that would otherwise be ignored by gitignore files 65 | including 66 | .IR .gitignore , 67 | .IR .git/info/exclude , 68 | and the global gitignore configuration 69 | .RI ( core.excludesFile 70 | git setting, which defaults to 71 | .IR $HOME/.config/git/ignore ). 72 | The flag can be overridden with '--ignore-vcs'. 73 | .TP 74 | .B \-\-no\-require\-git 75 | Do not require a git repository to respect gitignores. By default, fd will only 76 | respect global gitignore rules, .gitignore rules and local exclude rules if fd 77 | detects that you are searching inside a git repository. This flag allows you to 78 | relax this restriction such that fd will respect all git related ignore rules 79 | regardless of whether you’re searching in a git repository or not. The flag can 80 | be overridden with '--require-git'. 81 | .TP 82 | .B \-\-no\-ignore\-parent 83 | Show search results from files and directories that would otherwise be ignored by gitignore files in 84 | parent directories. 85 | .TP 86 | .B \-s, \-\-case\-sensitive 87 | Perform a case-sensitive search. By default, fd uses case-insensitive searches, unless the 88 | pattern contains an uppercase character (smart case). 89 | .TP 90 | .B \-i, \-\-ignore\-case 91 | Perform a case-insensitive search. By default, fd uses case-insensitive searches, unless the 92 | pattern contains an uppercase character (smart case). 93 | .TP 94 | .B \-g, \-\-glob 95 | Perform a glob-based search instead of a regular expression search. 96 | If combined with the '\-\-full-path' option, '**' can be used to match multiple path components. 97 | .TP 98 | .B \-\-regex 99 | Perform a regular-expression based search (default). This can be used to override --glob. 100 | .TP 101 | .B \-F, \-\-fixed\-strings 102 | Treat the pattern as a literal string instead of a regular expression. Note that this also 103 | performs substring comparison. If you want to match on an exact filename, consider using '\-\-glob'. 104 | .TP 105 | .BI "\-\-and " pattern 106 | Add additional required search patterns, all of which must be matched. Multiple additional 107 | patterns can be specified. The patterns are regular expressions, unless '\-\-glob' 108 | or '\-\-fixed\-strings' is used. 109 | .TP 110 | .B \-a, \-\-absolute\-path 111 | Shows the full path starting from the root as opposed to relative paths. 112 | The flag can be overridden with '--relative-path'. 113 | .TP 114 | .B \-l, \-\-list\-details 115 | Use a detailed listing format like 'ls -l'. This is basically an alias 116 | for '--exec-batch ls -l' with some additional 'ls' options. This can be used 117 | to see more metadata, to show symlink targets and to achieve a deterministic 118 | sort order. 119 | .TP 120 | .B \-L, \-\-follow 121 | By default, fd does not descend into symlinked directories. Using this flag, symbolic links are 122 | also traversed. The flag can be overridden with '--no-follow'. 123 | .TP 124 | .B \-p, \-\-full\-path 125 | By default, the search pattern is only matched against the filename (or directory name). Using 126 | this flag, the 127 | .I pattern 128 | is matched against the full path. 129 | .TP 130 | .B \-0, \-\-print0 131 | Separate search results by the null character (instead of newlines). Useful for piping results to 132 | .IR xargs . 133 | .TP 134 | .B \-\-max\-results count 135 | Limit the number of search results to 'count' and quit immediately. 136 | .TP 137 | .B \-1 138 | Limit the search to a single result and quit immediately. This is an alias for '--max-results=1'. 139 | .TP 140 | .B \-q, \-\-quiet 141 | When the flag is present, the program does not print anything and will instead exit with a code of 0 if there is at least one search result. 142 | Otherwise, the exit code will be 1. 143 | This is mainly for usage in scripts and can be faster than checking for output because the search can be stopped early after the first match. 144 | .B \-\-has\-results 145 | can be used as an alias. 146 | .TP 147 | .B \-\-show-errors 148 | Enable the display of filesystem errors for situations such as insufficient 149 | permissions or dead symlinks. 150 | .TP 151 | .B \-\-strip-cwd-prefix 152 | By default, relative paths are prefixed with './' when the output goes to a non interactive terminal 153 | (TTY). Use this flag to disable this behaviour. 154 | .TP 155 | .B \-\-one\-file\-system, \-\-mount, \-\-xdev 156 | By default, fd will traverse the file system tree as far as other options dictate. With this flag, fd ensures that it does not descend into a different file system than the one it started in. Comparable to the -mount or -xdev filters of find(1). 157 | .TP 158 | .B \-h, \-\-help 159 | Print help information. 160 | .TP 161 | .B \-V, \-\-version 162 | Print version information. 163 | .TP 164 | .BI "\-d, \-\-max\-depth " d 165 | Limit directory traversal to at most 166 | .I d 167 | levels of depth. By default, there is no limit on the search depth. 168 | .TP 169 | .BI "\-\-min\-depth " d 170 | Only show search results starting at the given depth. See also: '--max-depth' and '--exact-depth'. 171 | .TP 172 | .BI "\-\-exact\-depth " d 173 | Only show search results at the exact given depth. This is an alias for '--min-depth --max-depth '. 174 | .TP 175 | .B \-\-prune 176 | Do not traverse into matching directories. 177 | .TP 178 | .BI "\-t, \-\-type " filetype 179 | Filter search by type: 180 | .RS 181 | .IP "f, file" 182 | regular files 183 | .IP "d, directory" 184 | directories 185 | .IP "l, symlink" 186 | symbolic links 187 | .IP "s, socket" 188 | sockets 189 | .IP "p, pipe" 190 | named pipes (FIFOs) 191 | .IP "x, executable" 192 | executable (files) 193 | .IP "e, empty" 194 | empty files or directories 195 | .RE 196 | 197 | .RS 198 | This option can be specified more than once to include multiple file types. 199 | Searching for '--type file --type symlink' will show both regular files as well as 200 | symlinks. Note that the 'executable' and 'empty' filters work differently: '--type 201 | executable' implies '--type file' by default. And '--type empty' searches for 202 | empty files and directories, unless either '--type file' or '--type directory' is 203 | specified in addition. 204 | 205 | Examples: 206 | - Only search for files: 207 | fd --type file … 208 | fd -tf … 209 | - Find both files and symlinks 210 | fd --type file --type symlink … 211 | fd -tf -tl … 212 | - Find executable files: 213 | fd --type executable 214 | fd -tx 215 | - Find empty files: 216 | fd --type empty --type file 217 | fd -te -tf 218 | - Find empty directories: 219 | fd --type empty --type directory 220 | fd -te -td 221 | .RE 222 | .TP 223 | .BI "\-e, \-\-extension " ext 224 | Filter search results by file extension 225 | .IR ext . 226 | This option can be used repeatedly to allow for multiple possible file extensions. 227 | 228 | If you want to search for files without extension, you can use the regex '^[^.]+$' 229 | as a normal search pattern. 230 | .TP 231 | .BI "\-E, \-\-exclude " pattern 232 | Exclude files/directories that match the given glob pattern. 233 | This overrides any other ignore logic. 234 | Multiple exclude patterns can be specified. 235 | Examples: 236 | \-\-exclude '*.pyc' 237 | \-\-exclude node_modules 238 | .TP 239 | .BI "\-\-ignore-file " path 240 | Add a custom ignore-file in '.gitignore' format. 241 | These files have a low precedence. 242 | .TP 243 | .BI "\-c, \-\-color " when 244 | Declare 245 | .I when 246 | to colorize search results: 247 | .RS 248 | .IP auto 249 | Colorize output when standard output is connected to terminal (default). 250 | .IP never 251 | Do not colorize output. 252 | .IP always 253 | Always colorize output. 254 | .RE 255 | .TP 256 | .BI "\-j, \-\-threads " num 257 | Set number of threads to use for searching & executing (default: number of available CPU cores). 258 | .TP 259 | .BI "\-S, \-\-size " size 260 | Limit results based on the size of files using the format 261 | .I <+-> 262 | .RS 263 | .IP '+' 264 | file size must be greater than or equal to this 265 | .IP '-' 266 | file size must be less than or equal to this 267 | .P 268 | If neither '+' nor '-' is specified, file size must be exactly equal to this. 269 | .IP 'NUM' 270 | The numeric size (e.g. 500) 271 | .IP 'UNIT' 272 | The units for NUM. They are not case-sensitive. 273 | Allowed unit values: 274 | .RS 275 | .IP 'b' 276 | bytes 277 | .IP 'k' 278 | kilobytes (base ten, 10^3 = 1000 bytes) 279 | .IP 'm' 280 | megabytes 281 | .IP 'g' 282 | gigabytes 283 | .IP 't' 284 | terabytes 285 | .IP 'ki' 286 | kibibytes (base two, 2^10 = 1024 bytes) 287 | .IP 'mi' 288 | mebibytes 289 | .IP 'gi' 290 | gibibytes 291 | .IP 'ti' 292 | tebibytes 293 | .RE 294 | .RE 295 | .TP 296 | .BI "\-\-changed-within " date|duration 297 | Filter results based on the file modification time. 298 | Files with modification times greater than the argument will be returned. 299 | The argument can be provided as a duration (\fI10h, 1d, 35min\fR) or as a specific point 300 | in time in either full RFC3339 format with time zone, or as a date or datetime in the 301 | local time zone (\fIYYYY-MM-DD\fR or \fIYYYY-MM-DD HH:MM:SS\fR). 302 | \fB\-\-change-newer-than\fR, 303 | .B --newer 304 | or 305 | .B --changed-after 306 | can be used as aliases. 307 | 308 | Examples: 309 | \-\-changed-within 2weeks 310 | \-\-change-newer-than "2018-10-27 10:00:00" 311 | \-\-newer 2018-10-27 312 | .TP 313 | .BI "\-\-changed-before " date|duration 314 | Filter results based on the file modification time. 315 | Files with modification times less than the argument will be returned. 316 | The argument can be provided as a duration (\fI10h, 1d, 35min\fR) or as a specific point 317 | in time in either full RFC3339 format with time zone, or as a date or datetime in the 318 | local time zone (\fIYYYY-MM-DD\fR or \fIYYYY-MM-DD HH:MM:SS\fR). 319 | .B --change-older-than 320 | or 321 | .B --older 322 | can be used as aliases. 323 | 324 | Examples: 325 | \-\-changed-before "2018-10-27 10:00:00" 326 | \-\-change-older-than 2weeks 327 | .TP 328 | .BI "-o, \-\-owner " [user][:group] 329 | Filter files by their user and/or group. Format: [(user|uid)][:(group|gid)]. Either side 330 | is optional. Precede either side with a '!' to exclude files instead. 331 | 332 | Examples: 333 | \-\-owner john 334 | \-\-owner :students 335 | \-\-owner "!john:students" 336 | .TP 337 | .BI "\-\-base\-directory " path 338 | Change the current working directory of fd to the provided path. This means that search results will 339 | be shown with respect to the given base path. Note that relative paths which are passed to fd via the 340 | positional \fIpath\fR argument or the \fB\-\-search\-path\fR option will also be resolved relative to 341 | this directory. 342 | .TP 343 | .BI "\-\-path\-separator " separator 344 | Set the path separator to use when printing file paths. The default is the OS-specific separator 345 | ('/' on Unix, '\\' on Windows). 346 | .TP 347 | .BI "\-\-search\-path " search\-path 348 | Provide paths to search as an alternative to the positional \fIpath\fR argument. Changes the usage to 349 | \'fd [FLAGS/OPTIONS] \-\-search\-path PATH \-\-search\-path PATH2 [PATTERN]\' 350 | .TP 351 | .BI "\-x, \-\-exec " command 352 | .RS 353 | Execute 354 | .I command 355 | for each search result in parallel (use --threads=1 for sequential command execution). 356 | 357 | Note that all subsequent positional arguments are considered to be arguments to the 358 | .I command 359 | - not to fd. 360 | It is therefore recommended to place the \-x/\-\-exec option last. Alternatively, you can supply 361 | a ';' argument to end the argument list and continue with more fd options. 362 | Most shells require ';' to be escaped: '\\;'. 363 | This option can be specified multiple times, in which case all commands are run for each 364 | file found, in the order they are provided. In that case, you must supply a ';' argument for 365 | all but the last commands. 366 | 367 | The following placeholders are substituted before the command is executed: 368 | .RS 369 | .IP {} 370 | path (of the current search result) 371 | .IP {/} 372 | basename 373 | .IP {//} 374 | parent directory 375 | .IP {.} 376 | path without file extension 377 | .IP {/.} 378 | basename without file extension 379 | .RE 380 | 381 | If no placeholder is present, an implicit "{}" at the end is assumed. 382 | 383 | Examples: 384 | 385 | - find all *.zip files and unzip them: 386 | 387 | fd -e zip -x unzip 388 | 389 | - find *.h and *.cpp files and run "clang-format -i .." for each of them: 390 | 391 | fd -e h -e cpp -x clang-format -i 392 | 393 | - Convert all *.jpg files to *.png files: 394 | 395 | fd -e jpg -x convert {} {.}.png 396 | .RE 397 | .TP 398 | .BI "\-X, \-\-exec-batch " command 399 | .RS 400 | Execute 401 | .I command 402 | once, with all search results as arguments. 403 | One of the following placeholders is substituted before the command is executed: 404 | .RS 405 | .IP {} 406 | path (of all search results) 407 | .IP {/} 408 | basename 409 | .IP {//} 410 | parent directory 411 | .IP {.} 412 | path without file extension 413 | .IP {/.} 414 | basename without file extension 415 | .RE 416 | 417 | If no placeholder is present, an implicit "{}" at the end is assumed. 418 | 419 | Like \-\-exec, this can be used multiple times, in which case each command will be run in 420 | the order given. 421 | 422 | Examples: 423 | 424 | - Find all test_*.py files and open them in your favorite editor: 425 | 426 | fd -g 'test_*.py' -X vim 427 | 428 | Note that this executes a single "vim" process with all search results as arguments. 429 | 430 | - Find all *.rs files and count the lines with "wc -l ...": 431 | 432 | fd -e rs -X wc -l 433 | .RE 434 | .TP 435 | .BI "\-\-batch-size " size 436 | Maximum number of arguments to pass to the command given with -X. If the number of results is 437 | greater than the given size, the command given with -X is run again with remaining arguments. A 438 | batch size of zero means there is no limit (default), but note that batching might still happen 439 | due to OS restrictions on the maximum length of command lines. 440 | .SH PATTERN SYNTAX 441 | The regular expression syntax used by fd is documented here: 442 | 443 | https://docs.rs/regex/1.0.0/regex/#syntax 444 | 445 | The glob syntax is documented here: 446 | 447 | https://docs.rs/globset/#syntax 448 | .SH ENVIRONMENT 449 | .TP 450 | .B LS_COLORS 451 | Determines how to colorize search results, see 452 | .BR dircolors (1) . 453 | .TP 454 | .B NO_COLOR 455 | Disables colorized output. 456 | .TP 457 | .B XDG_CONFIG_HOME, HOME 458 | Used to locate the global ignore file. If 459 | .B XDG_CONFIG_HOME 460 | is set, use 461 | .IR $XDG_CONFIG_HOME/fd/ignore . 462 | Otherwise, use 463 | .IR $HOME/.config/fd/ignore . 464 | .SH EXAMPLES 465 | .TP 466 | .RI "Find files and directories that match the pattern '" needle "':" 467 | $ fd needle 468 | .TP 469 | .RI "Start a search in a given directory (" /var/log "):" 470 | $ fd nginx /var/log 471 | .TP 472 | .RI "Find all Python files (all files with the extension " .py ") in the current directory:" 473 | $ fd -e py 474 | .TP 475 | .RI "Open all search results with vim:" 476 | $ fd pattern -X vim 477 | .SH BUGS 478 | Bugs can be reported on GitHub: https://github.com/sharkdp/fd/issues 479 | .SH SEE ALSO 480 | .BR find (1) 481 | -------------------------------------------------------------------------------- /src/walk.rs: -------------------------------------------------------------------------------- 1 | use std::ffi::OsStr; 2 | use std::io; 3 | use std::mem; 4 | use std::path::PathBuf; 5 | use std::sync::atomic::{AtomicBool, Ordering}; 6 | use std::sync::{Arc, Mutex}; 7 | use std::thread; 8 | use std::time::{Duration, Instant}; 9 | use std::{borrow::Cow, io::Write}; 10 | 11 | use anyhow::{anyhow, Result}; 12 | use crossbeam_channel::{bounded, Receiver, RecvTimeoutError, Sender}; 13 | use ignore::overrides::OverrideBuilder; 14 | use ignore::{self, WalkBuilder}; 15 | use regex::bytes::Regex; 16 | 17 | use crate::config::Config; 18 | use crate::dir_entry::DirEntry; 19 | use crate::error::print_error; 20 | use crate::exec; 21 | use crate::exit_codes::{merge_exitcodes, ExitCode}; 22 | use crate::filesystem; 23 | use crate::output; 24 | 25 | /// The receiver thread can either be buffering results or directly streaming to the console. 26 | #[derive(PartialEq)] 27 | enum ReceiverMode { 28 | /// Receiver is still buffering in order to sort the results, if the search finishes fast 29 | /// enough. 30 | Buffering, 31 | 32 | /// Receiver is directly printing results to the output. 33 | Streaming, 34 | } 35 | 36 | /// The Worker threads can result in a valid entry having PathBuf or an error. 37 | #[allow(clippy::large_enum_variant)] 38 | pub enum WorkerResult { 39 | // Errors should be rare, so it's probably better to allow large_enum_variant than 40 | // to box the Entry variant 41 | Entry(DirEntry), 42 | Error(ignore::Error), 43 | } 44 | 45 | /// Maximum size of the output buffer before flushing results to the console 46 | pub const MAX_BUFFER_LENGTH: usize = 1000; 47 | /// Default duration until output buffering switches to streaming. 48 | pub const DEFAULT_MAX_BUFFER_TIME: Duration = Duration::from_millis(100); 49 | 50 | /// Recursively scan the given search path for files / pathnames matching the patterns. 51 | /// 52 | /// If the `--exec` argument was supplied, this will create a thread pool for executing 53 | /// jobs in parallel from a given command line and the discovered paths. Otherwise, each 54 | /// path will simply be written to standard output. 55 | pub fn scan(paths: &[PathBuf], patterns: Arc>, config: Arc) -> Result { 56 | let first_path = &paths[0]; 57 | 58 | // Channel capacity was chosen empircally to perform similarly to an unbounded channel 59 | let (tx, rx) = bounded(0x4000 * config.threads); 60 | 61 | let mut override_builder = OverrideBuilder::new(first_path); 62 | 63 | for pattern in &config.exclude_patterns { 64 | override_builder 65 | .add(pattern) 66 | .map_err(|e| anyhow!("Malformed exclude pattern: {}", e))?; 67 | } 68 | let overrides = override_builder 69 | .build() 70 | .map_err(|_| anyhow!("Mismatch in exclude patterns"))?; 71 | 72 | let mut walker = WalkBuilder::new(first_path); 73 | walker 74 | .hidden(config.ignore_hidden) 75 | .ignore(config.read_fdignore) 76 | .parents(config.read_parent_ignore && (config.read_fdignore || config.read_vcsignore)) 77 | .git_ignore(config.read_vcsignore) 78 | .git_global(config.read_vcsignore) 79 | .git_exclude(config.read_vcsignore) 80 | .require_git(config.require_git_to_read_vcsignore) 81 | .overrides(overrides) 82 | .follow_links(config.follow_links) 83 | // No need to check for supported platforms, option is unavailable on unsupported ones 84 | .same_file_system(config.one_file_system) 85 | .max_depth(config.max_depth); 86 | 87 | if config.read_fdignore { 88 | walker.add_custom_ignore_filename(".fdignore"); 89 | } 90 | 91 | if config.read_global_ignore { 92 | #[cfg(target_os = "macos")] 93 | let config_dir_op = std::env::var_os("XDG_CONFIG_HOME") 94 | .map(PathBuf::from) 95 | .filter(|p| p.is_absolute()) 96 | .or_else(|| dirs_next::home_dir().map(|d| d.join(".config"))); 97 | 98 | #[cfg(not(target_os = "macos"))] 99 | let config_dir_op = dirs_next::config_dir(); 100 | 101 | if let Some(global_ignore_file) = config_dir_op 102 | .map(|p| p.join("fd").join("ignore")) 103 | .filter(|p| p.is_file()) 104 | { 105 | let result = walker.add_ignore(global_ignore_file); 106 | match result { 107 | Some(ignore::Error::Partial(_)) => (), 108 | Some(err) => { 109 | print_error(format!("Malformed pattern in global ignore file. {}.", err)); 110 | } 111 | None => (), 112 | } 113 | } 114 | } 115 | 116 | for ignore_file in &config.ignore_files { 117 | let result = walker.add_ignore(ignore_file); 118 | match result { 119 | Some(ignore::Error::Partial(_)) => (), 120 | Some(err) => { 121 | print_error(format!("Malformed pattern in custom ignore file. {}.", err)); 122 | } 123 | None => (), 124 | } 125 | } 126 | 127 | for path in &paths[1..] { 128 | walker.add(path); 129 | } 130 | 131 | let parallel_walker = walker.threads(config.threads).build_parallel(); 132 | 133 | // Flag for cleanly shutting down the parallel walk 134 | let quit_flag = Arc::new(AtomicBool::new(false)); 135 | // Flag specifically for quitting due to ^C 136 | let interrupt_flag = Arc::new(AtomicBool::new(false)); 137 | 138 | if config.ls_colors.is_some() && config.is_printing() { 139 | let quit_flag = Arc::clone(&quit_flag); 140 | let interrupt_flag = Arc::clone(&interrupt_flag); 141 | 142 | ctrlc::set_handler(move || { 143 | quit_flag.store(true, Ordering::Relaxed); 144 | 145 | if interrupt_flag.fetch_or(true, Ordering::Relaxed) { 146 | // Ctrl-C has been pressed twice, exit NOW 147 | ExitCode::KilledBySigint.exit(); 148 | } 149 | }) 150 | .unwrap(); 151 | } 152 | 153 | // Spawn the thread that receives all results through the channel. 154 | let receiver_thread = spawn_receiver(&config, &quit_flag, &interrupt_flag, rx); 155 | 156 | // Spawn the sender threads. 157 | spawn_senders(&config, &quit_flag, patterns, parallel_walker, tx); 158 | 159 | // Wait for the receiver thread to print out all results. 160 | let exit_code = receiver_thread.join().unwrap(); 161 | 162 | if interrupt_flag.load(Ordering::Relaxed) { 163 | Ok(ExitCode::KilledBySigint) 164 | } else { 165 | Ok(exit_code) 166 | } 167 | } 168 | 169 | /// Wrapper for the receiver thread's buffering behavior. 170 | struct ReceiverBuffer { 171 | /// The configuration. 172 | config: Arc, 173 | /// For shutting down the senders. 174 | quit_flag: Arc, 175 | /// The ^C notifier. 176 | interrupt_flag: Arc, 177 | /// Receiver for worker results. 178 | rx: Receiver, 179 | /// Standard output. 180 | stdout: W, 181 | /// The current buffer mode. 182 | mode: ReceiverMode, 183 | /// The deadline to switch to streaming mode. 184 | deadline: Instant, 185 | /// The buffer of quickly received paths. 186 | buffer: Vec, 187 | /// Result count. 188 | num_results: usize, 189 | } 190 | 191 | impl ReceiverBuffer { 192 | /// Create a new receiver buffer. 193 | fn new( 194 | config: Arc, 195 | quit_flag: Arc, 196 | interrupt_flag: Arc, 197 | rx: Receiver, 198 | stdout: W, 199 | ) -> Self { 200 | let max_buffer_time = config.max_buffer_time.unwrap_or(DEFAULT_MAX_BUFFER_TIME); 201 | let deadline = Instant::now() + max_buffer_time; 202 | 203 | Self { 204 | config, 205 | quit_flag, 206 | interrupt_flag, 207 | rx, 208 | stdout, 209 | mode: ReceiverMode::Buffering, 210 | deadline, 211 | buffer: Vec::with_capacity(MAX_BUFFER_LENGTH), 212 | num_results: 0, 213 | } 214 | } 215 | 216 | /// Process results until finished. 217 | fn process(&mut self) -> ExitCode { 218 | loop { 219 | if let Err(ec) = self.poll() { 220 | self.quit_flag.store(true, Ordering::Relaxed); 221 | return ec; 222 | } 223 | } 224 | } 225 | 226 | /// Receive the next worker result. 227 | fn recv(&self) -> Result { 228 | match self.mode { 229 | ReceiverMode::Buffering => { 230 | // Wait at most until we should switch to streaming 231 | self.rx.recv_deadline(self.deadline) 232 | } 233 | ReceiverMode::Streaming => { 234 | // Wait however long it takes for a result 235 | Ok(self.rx.recv()?) 236 | } 237 | } 238 | } 239 | 240 | /// Wait for a result or state change. 241 | fn poll(&mut self) -> Result<(), ExitCode> { 242 | match self.recv() { 243 | Ok(WorkerResult::Entry(dir_entry)) => { 244 | if self.config.quiet { 245 | return Err(ExitCode::HasResults(true)); 246 | } 247 | 248 | match self.mode { 249 | ReceiverMode::Buffering => { 250 | self.buffer.push(dir_entry); 251 | if self.buffer.len() > MAX_BUFFER_LENGTH { 252 | self.stream()?; 253 | } 254 | } 255 | ReceiverMode::Streaming => { 256 | self.print(&dir_entry)?; 257 | self.flush()?; 258 | } 259 | } 260 | 261 | self.num_results += 1; 262 | if let Some(max_results) = self.config.max_results { 263 | if self.num_results >= max_results { 264 | return self.stop(); 265 | } 266 | } 267 | } 268 | Ok(WorkerResult::Error(err)) => { 269 | if self.config.show_filesystem_errors { 270 | print_error(err.to_string()); 271 | } 272 | } 273 | Err(RecvTimeoutError::Timeout) => { 274 | self.stream()?; 275 | } 276 | Err(RecvTimeoutError::Disconnected) => { 277 | return self.stop(); 278 | } 279 | } 280 | 281 | Ok(()) 282 | } 283 | 284 | /// Output a path. 285 | fn print(&mut self, entry: &DirEntry) -> Result<(), ExitCode> { 286 | output::print_entry(&mut self.stdout, entry, &self.config); 287 | 288 | if self.interrupt_flag.load(Ordering::Relaxed) { 289 | // Ignore any errors on flush, because we're about to exit anyway 290 | let _ = self.flush(); 291 | return Err(ExitCode::KilledBySigint); 292 | } 293 | 294 | Ok(()) 295 | } 296 | 297 | /// Switch ourselves into streaming mode. 298 | fn stream(&mut self) -> Result<(), ExitCode> { 299 | self.mode = ReceiverMode::Streaming; 300 | 301 | let buffer = mem::take(&mut self.buffer); 302 | for path in buffer { 303 | self.print(&path)?; 304 | } 305 | 306 | self.flush() 307 | } 308 | 309 | /// Stop looping. 310 | fn stop(&mut self) -> Result<(), ExitCode> { 311 | if self.mode == ReceiverMode::Buffering { 312 | self.buffer.sort(); 313 | self.stream()?; 314 | } 315 | 316 | if self.config.quiet { 317 | Err(ExitCode::HasResults(self.num_results > 0)) 318 | } else { 319 | Err(ExitCode::Success) 320 | } 321 | } 322 | 323 | /// Flush stdout if necessary. 324 | fn flush(&mut self) -> Result<(), ExitCode> { 325 | if self.config.interactive_terminal && self.stdout.flush().is_err() { 326 | // Probably a broken pipe. Exit gracefully. 327 | return Err(ExitCode::GeneralError); 328 | } 329 | Ok(()) 330 | } 331 | } 332 | 333 | fn spawn_receiver( 334 | config: &Arc, 335 | quit_flag: &Arc, 336 | interrupt_flag: &Arc, 337 | rx: Receiver, 338 | ) -> thread::JoinHandle { 339 | let config = Arc::clone(config); 340 | let quit_flag = Arc::clone(quit_flag); 341 | let interrupt_flag = Arc::clone(interrupt_flag); 342 | 343 | let threads = config.threads; 344 | thread::spawn(move || { 345 | // This will be set to `Some` if the `--exec` argument was supplied. 346 | if let Some(ref cmd) = config.command { 347 | if cmd.in_batch_mode() { 348 | exec::batch(rx, cmd, &config) 349 | } else { 350 | let out_perm = Mutex::new(()); 351 | 352 | thread::scope(|scope| { 353 | // Each spawned job will store it's thread handle in here. 354 | let mut handles = Vec::with_capacity(threads); 355 | for _ in 0..threads { 356 | let rx = rx.clone(); 357 | 358 | // Spawn a job thread that will listen for and execute inputs. 359 | let handle = scope.spawn(|| exec::job(rx, cmd, &out_perm, &config)); 360 | 361 | // Push the handle of the spawned thread into the vector for later joining. 362 | handles.push(handle); 363 | } 364 | let exit_codes = handles.into_iter().map(|handle| handle.join().unwrap()); 365 | merge_exitcodes(exit_codes) 366 | }) 367 | } 368 | } else { 369 | let stdout = io::stdout(); 370 | let stdout = stdout.lock(); 371 | let stdout = io::BufWriter::new(stdout); 372 | 373 | let mut rxbuffer = ReceiverBuffer::new(config, quit_flag, interrupt_flag, rx, stdout); 374 | rxbuffer.process() 375 | } 376 | }) 377 | } 378 | 379 | fn spawn_senders( 380 | config: &Arc, 381 | quit_flag: &Arc, 382 | patterns: Arc>, 383 | parallel_walker: ignore::WalkParallel, 384 | tx: Sender, 385 | ) { 386 | parallel_walker.run(|| { 387 | let config = Arc::clone(config); 388 | let patterns = Arc::clone(&patterns); 389 | let tx_thread = tx.clone(); 390 | let quit_flag = Arc::clone(quit_flag); 391 | 392 | Box::new(move |entry_o| { 393 | if quit_flag.load(Ordering::Relaxed) { 394 | return ignore::WalkState::Quit; 395 | } 396 | 397 | let entry = match entry_o { 398 | Ok(ref e) if e.depth() == 0 => { 399 | // Skip the root directory entry. 400 | return ignore::WalkState::Continue; 401 | } 402 | Ok(e) => DirEntry::normal(e), 403 | Err(ignore::Error::WithPath { 404 | path, 405 | err: inner_err, 406 | }) => match inner_err.as_ref() { 407 | ignore::Error::Io(io_error) 408 | if io_error.kind() == io::ErrorKind::NotFound 409 | && path 410 | .symlink_metadata() 411 | .ok() 412 | .map_or(false, |m| m.file_type().is_symlink()) => 413 | { 414 | DirEntry::broken_symlink(path) 415 | } 416 | _ => { 417 | return match tx_thread.send(WorkerResult::Error(ignore::Error::WithPath { 418 | path, 419 | err: inner_err, 420 | })) { 421 | Ok(_) => ignore::WalkState::Continue, 422 | Err(_) => ignore::WalkState::Quit, 423 | } 424 | } 425 | }, 426 | Err(err) => { 427 | return match tx_thread.send(WorkerResult::Error(err)) { 428 | Ok(_) => ignore::WalkState::Continue, 429 | Err(_) => ignore::WalkState::Quit, 430 | } 431 | } 432 | }; 433 | 434 | if let Some(min_depth) = config.min_depth { 435 | if entry.depth().map_or(true, |d| d < min_depth) { 436 | return ignore::WalkState::Continue; 437 | } 438 | } 439 | 440 | // Check the name first, since it doesn't require metadata 441 | let entry_path = entry.path(); 442 | 443 | let search_str: Cow = if config.search_full_path { 444 | let path_abs_buf = filesystem::path_absolute_form(entry_path) 445 | .expect("Retrieving absolute path succeeds"); 446 | Cow::Owned(path_abs_buf.as_os_str().to_os_string()) 447 | } else { 448 | match entry_path.file_name() { 449 | Some(filename) => Cow::Borrowed(filename), 450 | None => unreachable!( 451 | "Encountered file system entry without a file name. This should only \ 452 | happen for paths like 'foo/bar/..' or '/' which are not supposed to \ 453 | appear in a file system traversal." 454 | ), 455 | } 456 | }; 457 | 458 | if !patterns 459 | .iter() 460 | .all(|pat| pat.is_match(&filesystem::osstr_to_bytes(search_str.as_ref()))) 461 | { 462 | return ignore::WalkState::Continue; 463 | } 464 | 465 | // Filter out unwanted extensions. 466 | if let Some(ref exts_regex) = config.extensions { 467 | if let Some(path_str) = entry_path.file_name() { 468 | if !exts_regex.is_match(&filesystem::osstr_to_bytes(path_str)) { 469 | return ignore::WalkState::Continue; 470 | } 471 | } else { 472 | return ignore::WalkState::Continue; 473 | } 474 | } 475 | 476 | // Filter out unwanted file types. 477 | if let Some(ref file_types) = config.file_types { 478 | if file_types.should_ignore(&entry) { 479 | return ignore::WalkState::Continue; 480 | } 481 | } 482 | 483 | #[cfg(unix)] 484 | { 485 | if let Some(ref owner_constraint) = config.owner_constraint { 486 | if let Some(metadata) = entry.metadata() { 487 | if !owner_constraint.matches(metadata) { 488 | return ignore::WalkState::Continue; 489 | } 490 | } else { 491 | return ignore::WalkState::Continue; 492 | } 493 | } 494 | } 495 | 496 | // Filter out unwanted sizes if it is a file and we have been given size constraints. 497 | if !config.size_constraints.is_empty() { 498 | if entry_path.is_file() { 499 | if let Some(metadata) = entry.metadata() { 500 | let file_size = metadata.len(); 501 | if config 502 | .size_constraints 503 | .iter() 504 | .any(|sc| !sc.is_within(file_size)) 505 | { 506 | return ignore::WalkState::Continue; 507 | } 508 | } else { 509 | return ignore::WalkState::Continue; 510 | } 511 | } else { 512 | return ignore::WalkState::Continue; 513 | } 514 | } 515 | 516 | // Filter out unwanted modification times 517 | if !config.time_constraints.is_empty() { 518 | let mut matched = false; 519 | if let Some(metadata) = entry.metadata() { 520 | if let Ok(modified) = metadata.modified() { 521 | matched = config 522 | .time_constraints 523 | .iter() 524 | .all(|tf| tf.applies_to(&modified)); 525 | } 526 | } 527 | if !matched { 528 | return ignore::WalkState::Continue; 529 | } 530 | } 531 | 532 | if config.is_printing() { 533 | if let Some(ls_colors) = &config.ls_colors { 534 | // Compute colors in parallel 535 | entry.style(ls_colors); 536 | } 537 | } 538 | 539 | let send_result = tx_thread.send(WorkerResult::Entry(entry)); 540 | 541 | if send_result.is_err() { 542 | return ignore::WalkState::Quit; 543 | } 544 | 545 | // Apply pruning. 546 | if config.prune { 547 | return ignore::WalkState::Skip; 548 | } 549 | 550 | ignore::WalkState::Continue 551 | }) 552 | }); 553 | } 554 | -------------------------------------------------------------------------------- /src/exec/mod.rs: -------------------------------------------------------------------------------- 1 | mod command; 2 | mod input; 3 | mod job; 4 | mod token; 5 | 6 | use std::borrow::Cow; 7 | use std::ffi::{OsStr, OsString}; 8 | use std::io; 9 | use std::iter; 10 | use std::path::{Component, Path, PathBuf, Prefix}; 11 | use std::process::Stdio; 12 | use std::sync::{Mutex, OnceLock}; 13 | 14 | use anyhow::{bail, Result}; 15 | use argmax::Command; 16 | use regex::Regex; 17 | 18 | use crate::exit_codes::{merge_exitcodes, ExitCode}; 19 | 20 | use self::command::{execute_commands, handle_cmd_error}; 21 | use self::input::{basename, dirname, remove_extension}; 22 | pub use self::job::{batch, job}; 23 | use self::token::Token; 24 | 25 | /// Execution mode of the command 26 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] 27 | pub enum ExecutionMode { 28 | /// Command is executed for each search result 29 | OneByOne, 30 | /// Command is run for a batch of results at once 31 | Batch, 32 | } 33 | 34 | #[derive(Debug, Clone, PartialEq)] 35 | pub struct CommandSet { 36 | mode: ExecutionMode, 37 | commands: Vec, 38 | } 39 | 40 | impl CommandSet { 41 | pub fn new(input: I) -> Result 42 | where 43 | I: IntoIterator, 44 | T: IntoIterator, 45 | S: AsRef, 46 | { 47 | Ok(CommandSet { 48 | mode: ExecutionMode::OneByOne, 49 | commands: input 50 | .into_iter() 51 | .map(CommandTemplate::new) 52 | .collect::>()?, 53 | }) 54 | } 55 | 56 | pub fn new_batch(input: I) -> Result 57 | where 58 | I: IntoIterator, 59 | T: IntoIterator, 60 | S: AsRef, 61 | { 62 | Ok(CommandSet { 63 | mode: ExecutionMode::Batch, 64 | commands: input 65 | .into_iter() 66 | .map(|args| { 67 | let cmd = CommandTemplate::new(args)?; 68 | if cmd.number_of_tokens() > 1 { 69 | bail!("Only one placeholder allowed for batch commands"); 70 | } 71 | if cmd.args[0].has_tokens() { 72 | bail!("First argument of exec-batch is expected to be a fixed executable"); 73 | } 74 | Ok(cmd) 75 | }) 76 | .collect::>>()?, 77 | }) 78 | } 79 | 80 | pub fn in_batch_mode(&self) -> bool { 81 | self.mode == ExecutionMode::Batch 82 | } 83 | 84 | pub fn execute( 85 | &self, 86 | input: &Path, 87 | path_separator: Option<&str>, 88 | out_perm: &Mutex<()>, 89 | buffer_output: bool, 90 | ) -> ExitCode { 91 | let commands = self 92 | .commands 93 | .iter() 94 | .map(|c| c.generate(input, path_separator)); 95 | execute_commands(commands, out_perm, buffer_output) 96 | } 97 | 98 | pub fn execute_batch(&self, paths: I, limit: usize, path_separator: Option<&str>) -> ExitCode 99 | where 100 | I: Iterator, 101 | { 102 | let builders: io::Result> = self 103 | .commands 104 | .iter() 105 | .map(|c| CommandBuilder::new(c, limit)) 106 | .collect(); 107 | 108 | match builders { 109 | Ok(mut builders) => { 110 | for path in paths { 111 | for builder in &mut builders { 112 | if let Err(e) = builder.push(&path, path_separator) { 113 | return handle_cmd_error(Some(&builder.cmd), e); 114 | } 115 | } 116 | } 117 | 118 | for builder in &mut builders { 119 | if let Err(e) = builder.finish() { 120 | return handle_cmd_error(Some(&builder.cmd), e); 121 | } 122 | } 123 | 124 | merge_exitcodes(builders.iter().map(|b| b.exit_code())) 125 | } 126 | Err(e) => handle_cmd_error(None, e), 127 | } 128 | } 129 | } 130 | 131 | /// Represents a multi-exec command as it is built. 132 | #[derive(Debug)] 133 | struct CommandBuilder { 134 | pre_args: Vec, 135 | path_arg: ArgumentTemplate, 136 | post_args: Vec, 137 | cmd: Command, 138 | count: usize, 139 | limit: usize, 140 | exit_code: ExitCode, 141 | } 142 | 143 | impl CommandBuilder { 144 | fn new(template: &CommandTemplate, limit: usize) -> io::Result { 145 | let mut pre_args = vec![]; 146 | let mut path_arg = None; 147 | let mut post_args = vec![]; 148 | 149 | for arg in &template.args { 150 | if arg.has_tokens() { 151 | path_arg = Some(arg.clone()); 152 | } else if path_arg.is_none() { 153 | pre_args.push(arg.generate("", None)); 154 | } else { 155 | post_args.push(arg.generate("", None)); 156 | } 157 | } 158 | 159 | let cmd = Self::new_command(&pre_args)?; 160 | 161 | Ok(Self { 162 | pre_args, 163 | path_arg: path_arg.unwrap(), 164 | post_args, 165 | cmd, 166 | count: 0, 167 | limit, 168 | exit_code: ExitCode::Success, 169 | }) 170 | } 171 | 172 | fn new_command(pre_args: &[OsString]) -> io::Result { 173 | let mut cmd = Command::new(&pre_args[0]); 174 | cmd.stdin(Stdio::inherit()); 175 | cmd.stdout(Stdio::inherit()); 176 | cmd.stderr(Stdio::inherit()); 177 | cmd.try_args(&pre_args[1..])?; 178 | Ok(cmd) 179 | } 180 | 181 | fn push(&mut self, path: &Path, separator: Option<&str>) -> io::Result<()> { 182 | if self.limit > 0 && self.count >= self.limit { 183 | self.finish()?; 184 | } 185 | 186 | let arg = self.path_arg.generate(path, separator); 187 | if !self 188 | .cmd 189 | .args_would_fit(iter::once(&arg).chain(&self.post_args)) 190 | { 191 | self.finish()?; 192 | } 193 | 194 | self.cmd.try_arg(arg)?; 195 | self.count += 1; 196 | Ok(()) 197 | } 198 | 199 | fn finish(&mut self) -> io::Result<()> { 200 | if self.count > 0 { 201 | self.cmd.try_args(&self.post_args)?; 202 | if !self.cmd.status()?.success() { 203 | self.exit_code = ExitCode::GeneralError; 204 | } 205 | 206 | self.cmd = Self::new_command(&self.pre_args)?; 207 | self.count = 0; 208 | } 209 | 210 | Ok(()) 211 | } 212 | 213 | fn exit_code(&self) -> ExitCode { 214 | self.exit_code 215 | } 216 | } 217 | 218 | /// Represents a template that is utilized to generate command strings. 219 | /// 220 | /// The template is meant to be coupled with an input in order to generate a command. The 221 | /// `generate_and_execute()` method will be used to generate a command and execute it. 222 | #[derive(Debug, Clone, PartialEq)] 223 | struct CommandTemplate { 224 | args: Vec, 225 | } 226 | 227 | impl CommandTemplate { 228 | fn new(input: I) -> Result 229 | where 230 | I: IntoIterator, 231 | S: AsRef, 232 | { 233 | static PLACEHOLDER_PATTERN: OnceLock = OnceLock::new(); 234 | 235 | let mut args = Vec::new(); 236 | let mut has_placeholder = false; 237 | 238 | for arg in input { 239 | let arg = arg.as_ref(); 240 | 241 | let mut tokens = Vec::new(); 242 | let mut start = 0; 243 | 244 | let pattern = 245 | PLACEHOLDER_PATTERN.get_or_init(|| Regex::new(r"\{(/?\.?|//)\}").unwrap()); 246 | 247 | for placeholder in pattern.find_iter(arg) { 248 | // Leading text before the placeholder. 249 | if placeholder.start() > start { 250 | tokens.push(Token::Text(arg[start..placeholder.start()].to_owned())); 251 | } 252 | 253 | start = placeholder.end(); 254 | 255 | match placeholder.as_str() { 256 | "{}" => tokens.push(Token::Placeholder), 257 | "{.}" => tokens.push(Token::NoExt), 258 | "{/}" => tokens.push(Token::Basename), 259 | "{//}" => tokens.push(Token::Parent), 260 | "{/.}" => tokens.push(Token::BasenameNoExt), 261 | _ => unreachable!("Unhandled placeholder"), 262 | } 263 | 264 | has_placeholder = true; 265 | } 266 | 267 | // Without a placeholder, the argument is just fixed text. 268 | if tokens.is_empty() { 269 | args.push(ArgumentTemplate::Text(arg.to_owned())); 270 | continue; 271 | } 272 | 273 | if start < arg.len() { 274 | // Trailing text after last placeholder. 275 | tokens.push(Token::Text(arg[start..].to_owned())); 276 | } 277 | 278 | args.push(ArgumentTemplate::Tokens(tokens)); 279 | } 280 | 281 | // We need to check that we have at least one argument, because if not 282 | // it will try to execute each file and directory it finds. 283 | // 284 | // Sadly, clap can't currently handle this for us, see 285 | // https://github.com/clap-rs/clap/issues/3542 286 | if args.is_empty() { 287 | bail!("No executable provided for --exec or --exec-batch"); 288 | } 289 | 290 | // If a placeholder token was not supplied, append one at the end of the command. 291 | if !has_placeholder { 292 | args.push(ArgumentTemplate::Tokens(vec![Token::Placeholder])); 293 | } 294 | 295 | Ok(CommandTemplate { args }) 296 | } 297 | 298 | fn number_of_tokens(&self) -> usize { 299 | self.args.iter().filter(|arg| arg.has_tokens()).count() 300 | } 301 | 302 | /// Generates and executes a command. 303 | /// 304 | /// Using the internal `args` field, and a supplied `input` variable, a `Command` will be 305 | /// build. 306 | fn generate(&self, input: &Path, path_separator: Option<&str>) -> io::Result { 307 | let mut cmd = Command::new(self.args[0].generate(input, path_separator)); 308 | for arg in &self.args[1..] { 309 | cmd.try_arg(arg.generate(input, path_separator))?; 310 | } 311 | Ok(cmd) 312 | } 313 | } 314 | 315 | /// Represents a template for a single command argument. 316 | /// 317 | /// The argument is either a collection of `Token`s including at least one placeholder variant, or 318 | /// a fixed text. 319 | #[derive(Clone, Debug, PartialEq)] 320 | enum ArgumentTemplate { 321 | Tokens(Vec), 322 | Text(String), 323 | } 324 | 325 | impl ArgumentTemplate { 326 | pub fn has_tokens(&self) -> bool { 327 | matches!(self, ArgumentTemplate::Tokens(_)) 328 | } 329 | 330 | /// Generate an argument from this template. If path_separator is Some, then it will replace 331 | /// the path separator in all placeholder tokens. Text arguments and tokens are not affected by 332 | /// path separator substitution. 333 | pub fn generate(&self, path: impl AsRef, path_separator: Option<&str>) -> OsString { 334 | use self::Token::*; 335 | let path = path.as_ref(); 336 | 337 | match *self { 338 | ArgumentTemplate::Tokens(ref tokens) => { 339 | let mut s = OsString::new(); 340 | for token in tokens { 341 | match *token { 342 | Basename => s.push(Self::replace_separator(basename(path), path_separator)), 343 | BasenameNoExt => s.push(Self::replace_separator( 344 | &remove_extension(basename(path).as_ref()), 345 | path_separator, 346 | )), 347 | NoExt => s.push(Self::replace_separator( 348 | &remove_extension(path), 349 | path_separator, 350 | )), 351 | Parent => s.push(Self::replace_separator(&dirname(path), path_separator)), 352 | Placeholder => { 353 | s.push(Self::replace_separator(path.as_ref(), path_separator)) 354 | } 355 | Text(ref string) => s.push(string), 356 | } 357 | } 358 | s 359 | } 360 | ArgumentTemplate::Text(ref text) => OsString::from(text), 361 | } 362 | } 363 | 364 | /// Replace the path separator in the input with the custom separator string. If path_separator 365 | /// is None, simply return a borrowed Cow of the input. Otherwise, the input is 366 | /// interpreted as a Path and its components are iterated through and re-joined into a new 367 | /// OsString. 368 | fn replace_separator<'a>(path: &'a OsStr, path_separator: Option<&str>) -> Cow<'a, OsStr> { 369 | // fast-path - no replacement necessary 370 | if path_separator.is_none() { 371 | return Cow::Borrowed(path); 372 | } 373 | 374 | let path_separator = path_separator.unwrap(); 375 | let mut out = OsString::with_capacity(path.len()); 376 | let mut components = Path::new(path).components().peekable(); 377 | 378 | while let Some(comp) = components.next() { 379 | match comp { 380 | // Absolute paths on Windows are tricky. A Prefix component is usually a drive 381 | // letter or UNC path, and is usually followed by RootDir. There are also 382 | // "verbatim" prefixes beginning with "\\?\" that skip normalization. We choose to 383 | // ignore verbatim path prefixes here because they're very rare, might be 384 | // impossible to reach here, and there's no good way to deal with them. If users 385 | // are doing something advanced involving verbatim windows paths, they can do their 386 | // own output filtering with a tool like sed. 387 | Component::Prefix(prefix) => { 388 | if let Prefix::UNC(server, share) = prefix.kind() { 389 | // Prefix::UNC is a parsed version of '\\server\share' 390 | out.push(path_separator); 391 | out.push(path_separator); 392 | out.push(server); 393 | out.push(path_separator); 394 | out.push(share); 395 | } else { 396 | // All other Windows prefix types are rendered as-is. This results in e.g. "C:" for 397 | // drive letters. DeviceNS and Verbatim* prefixes won't have backslashes converted, 398 | // but they're not returned by directories fd can search anyway so we don't worry 399 | // about them. 400 | out.push(comp.as_os_str()); 401 | } 402 | } 403 | 404 | // Root directory is always replaced with the custom separator. 405 | Component::RootDir => out.push(path_separator), 406 | 407 | // Everything else is joined normally, with a trailing separator if we're not last 408 | _ => { 409 | out.push(comp.as_os_str()); 410 | if components.peek().is_some() { 411 | out.push(path_separator); 412 | } 413 | } 414 | } 415 | } 416 | Cow::Owned(out) 417 | } 418 | } 419 | 420 | #[cfg(test)] 421 | mod tests { 422 | use super::*; 423 | 424 | #[test] 425 | fn tokens_with_placeholder() { 426 | assert_eq!( 427 | CommandSet::new(vec![vec![&"echo", &"${SHELL}:"]]).unwrap(), 428 | CommandSet { 429 | commands: vec![CommandTemplate { 430 | args: vec![ 431 | ArgumentTemplate::Text("echo".into()), 432 | ArgumentTemplate::Text("${SHELL}:".into()), 433 | ArgumentTemplate::Tokens(vec![Token::Placeholder]), 434 | ] 435 | }], 436 | mode: ExecutionMode::OneByOne, 437 | } 438 | ); 439 | } 440 | 441 | #[test] 442 | fn tokens_with_no_extension() { 443 | assert_eq!( 444 | CommandSet::new(vec![vec!["echo", "{.}"]]).unwrap(), 445 | CommandSet { 446 | commands: vec![CommandTemplate { 447 | args: vec![ 448 | ArgumentTemplate::Text("echo".into()), 449 | ArgumentTemplate::Tokens(vec![Token::NoExt]), 450 | ], 451 | }], 452 | mode: ExecutionMode::OneByOne, 453 | } 454 | ); 455 | } 456 | 457 | #[test] 458 | fn tokens_with_basename() { 459 | assert_eq!( 460 | CommandSet::new(vec![vec!["echo", "{/}"]]).unwrap(), 461 | CommandSet { 462 | commands: vec![CommandTemplate { 463 | args: vec![ 464 | ArgumentTemplate::Text("echo".into()), 465 | ArgumentTemplate::Tokens(vec![Token::Basename]), 466 | ], 467 | }], 468 | mode: ExecutionMode::OneByOne, 469 | } 470 | ); 471 | } 472 | 473 | #[test] 474 | fn tokens_with_parent() { 475 | assert_eq!( 476 | CommandSet::new(vec![vec!["echo", "{//}"]]).unwrap(), 477 | CommandSet { 478 | commands: vec![CommandTemplate { 479 | args: vec![ 480 | ArgumentTemplate::Text("echo".into()), 481 | ArgumentTemplate::Tokens(vec![Token::Parent]), 482 | ], 483 | }], 484 | mode: ExecutionMode::OneByOne, 485 | } 486 | ); 487 | } 488 | 489 | #[test] 490 | fn tokens_with_basename_no_extension() { 491 | assert_eq!( 492 | CommandSet::new(vec![vec!["echo", "{/.}"]]).unwrap(), 493 | CommandSet { 494 | commands: vec![CommandTemplate { 495 | args: vec![ 496 | ArgumentTemplate::Text("echo".into()), 497 | ArgumentTemplate::Tokens(vec![Token::BasenameNoExt]), 498 | ], 499 | }], 500 | mode: ExecutionMode::OneByOne, 501 | } 502 | ); 503 | } 504 | 505 | #[test] 506 | fn tokens_multiple() { 507 | assert_eq!( 508 | CommandSet::new(vec![vec!["cp", "{}", "{/.}.ext"]]).unwrap(), 509 | CommandSet { 510 | commands: vec![CommandTemplate { 511 | args: vec![ 512 | ArgumentTemplate::Text("cp".into()), 513 | ArgumentTemplate::Tokens(vec![Token::Placeholder]), 514 | ArgumentTemplate::Tokens(vec![ 515 | Token::BasenameNoExt, 516 | Token::Text(".ext".into()) 517 | ]), 518 | ], 519 | }], 520 | mode: ExecutionMode::OneByOne, 521 | } 522 | ); 523 | } 524 | 525 | #[test] 526 | fn tokens_single_batch() { 527 | assert_eq!( 528 | CommandSet::new_batch(vec![vec!["echo", "{.}"]]).unwrap(), 529 | CommandSet { 530 | commands: vec![CommandTemplate { 531 | args: vec![ 532 | ArgumentTemplate::Text("echo".into()), 533 | ArgumentTemplate::Tokens(vec![Token::NoExt]), 534 | ], 535 | }], 536 | mode: ExecutionMode::Batch, 537 | } 538 | ); 539 | } 540 | 541 | #[test] 542 | fn tokens_multiple_batch() { 543 | assert!(CommandSet::new_batch(vec![vec!["echo", "{.}", "{}"]]).is_err()); 544 | } 545 | 546 | #[test] 547 | fn template_no_args() { 548 | assert!(CommandTemplate::new::, &'static str>(vec![]).is_err()); 549 | } 550 | 551 | #[test] 552 | fn command_set_no_args() { 553 | assert!(CommandSet::new(vec![vec!["echo"], vec![]]).is_err()); 554 | } 555 | 556 | #[test] 557 | fn generate_custom_path_separator() { 558 | let arg = ArgumentTemplate::Tokens(vec![Token::Placeholder]); 559 | macro_rules! check { 560 | ($input:expr, $expected:expr) => { 561 | assert_eq!(arg.generate($input, Some("#")), OsString::from($expected)); 562 | }; 563 | } 564 | 565 | check!("foo", "foo"); 566 | check!("foo/bar", "foo#bar"); 567 | check!("/foo/bar/baz", "#foo#bar#baz"); 568 | } 569 | 570 | #[cfg(windows)] 571 | #[test] 572 | fn generate_custom_path_separator_windows() { 573 | let arg = ArgumentTemplate::Tokens(vec![Token::Placeholder]); 574 | macro_rules! check { 575 | ($input:expr, $expected:expr) => { 576 | assert_eq!(arg.generate($input, Some("#")), OsString::from($expected)); 577 | }; 578 | } 579 | 580 | // path starting with a drive letter 581 | check!(r"C:\foo\bar", "C:#foo#bar"); 582 | // UNC path 583 | check!(r"\\server\share\path", "##server#share#path"); 584 | // Drive Relative path - no separator after the colon omits the RootDir path component. 585 | // This is uncommon, but valid 586 | check!(r"C:foo\bar", "C:foo#bar"); 587 | 588 | // forward slashes should get normalized and interpreted as separators 589 | check!("C:/foo/bar", "C:#foo#bar"); 590 | check!("C:foo/bar", "C:foo#bar"); 591 | 592 | // Rust does not interpret "//server/share" as a UNC path, but rather as a normal 593 | // absolute path that begins with RootDir, and the two slashes get combined together as 594 | // a single path separator during normalization. 595 | //check!("//server/share/path", "##server#share#path"); 596 | } 597 | } 598 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | mod cli; 2 | mod config; 3 | mod dir_entry; 4 | mod error; 5 | mod exec; 6 | mod exit_codes; 7 | mod filesystem; 8 | mod filetypes; 9 | mod filter; 10 | mod output; 11 | mod regex_helper; 12 | mod walk; 13 | 14 | use std::env; 15 | use std::io::IsTerminal; 16 | use std::path::Path; 17 | use std::sync::Arc; 18 | use std::time; 19 | 20 | use anyhow::{anyhow, bail, Context, Result}; 21 | use clap::{CommandFactory, Parser}; 22 | use globset::GlobBuilder; 23 | use lscolors::LsColors; 24 | use regex::bytes::{Regex, RegexBuilder, RegexSetBuilder}; 25 | 26 | use crate::cli::{ColorWhen, Opts}; 27 | use crate::config::Config; 28 | use crate::exec::CommandSet; 29 | use crate::exit_codes::ExitCode; 30 | use crate::filetypes::FileTypes; 31 | #[cfg(unix)] 32 | use crate::filter::OwnerFilter; 33 | use crate::filter::TimeFilter; 34 | use crate::regex_helper::{pattern_has_uppercase_char, pattern_matches_strings_with_leading_dot}; 35 | 36 | // We use jemalloc for performance reasons, see https://github.com/sharkdp/fd/pull/481 37 | // FIXME: re-enable jemalloc on macOS, see comment in Cargo.toml file for more infos 38 | #[cfg(all( 39 | not(windows), 40 | not(target_os = "android"), 41 | not(target_os = "macos"), 42 | not(target_os = "freebsd"), 43 | not(target_os = "openbsd"), 44 | not(all(target_env = "musl", target_pointer_width = "32")), 45 | not(target_arch = "riscv64"), 46 | feature = "use-jemalloc" 47 | ))] 48 | #[global_allocator] 49 | static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; 50 | 51 | // vivid --color-mode 8-bit generate molokai 52 | const DEFAULT_LS_COLORS: &str = " 53 | ow=0:or=0;38;5;16;48;5;203:no=0:ex=1;38;5;203:cd=0;38;5;203;48;5;236:mi=0;38;5;16;48;5;203:*~=0;38;5;243:st=0:pi=0;38;5;16;48;5;81:fi=0:di=0;38;5;81:so=0;38;5;16;48;5;203:bd=0;38;5;81;48;5;236:tw=0:ln=0;38;5;203:*.m=0;38;5;48:*.o=0;38;5;243:*.z=4;38;5;203:*.a=1;38;5;203:*.r=0;38;5;48:*.c=0;38;5;48:*.d=0;38;5;48:*.t=0;38;5;48:*.h=0;38;5;48:*.p=0;38;5;48:*.cc=0;38;5;48:*.ll=0;38;5;48:*.jl=0;38;5;48:*css=0;38;5;48:*.md=0;38;5;185:*.gz=4;38;5;203:*.nb=0;38;5;48:*.mn=0;38;5;48:*.go=0;38;5;48:*.xz=4;38;5;203:*.so=1;38;5;203:*.rb=0;38;5;48:*.pm=0;38;5;48:*.bc=0;38;5;243:*.py=0;38;5;48:*.as=0;38;5;48:*.pl=0;38;5;48:*.rs=0;38;5;48:*.sh=0;38;5;48:*.7z=4;38;5;203:*.ps=0;38;5;186:*.cs=0;38;5;48:*.el=0;38;5;48:*.rm=0;38;5;208:*.hs=0;38;5;48:*.td=0;38;5;48:*.ui=0;38;5;149:*.ex=0;38;5;48:*.js=0;38;5;48:*.cp=0;38;5;48:*.cr=0;38;5;48:*.la=0;38;5;243:*.kt=0;38;5;48:*.ml=0;38;5;48:*.vb=0;38;5;48:*.gv=0;38;5;48:*.lo=0;38;5;243:*.hi=0;38;5;243:*.ts=0;38;5;48:*.ko=1;38;5;203:*.hh=0;38;5;48:*.pp=0;38;5;48:*.di=0;38;5;48:*.bz=4;38;5;203:*.fs=0;38;5;48:*.png=0;38;5;208:*.zsh=0;38;5;48:*.mpg=0;38;5;208:*.pid=0;38;5;243:*.xmp=0;38;5;149:*.iso=4;38;5;203:*.m4v=0;38;5;208:*.dot=0;38;5;48:*.ods=0;38;5;186:*.inc=0;38;5;48:*.sxw=0;38;5;186:*.aif=0;38;5;208:*.git=0;38;5;243:*.gvy=0;38;5;48:*.tbz=4;38;5;203:*.log=0;38;5;243:*.txt=0;38;5;185:*.ico=0;38;5;208:*.csx=0;38;5;48:*.vob=0;38;5;208:*.pgm=0;38;5;208:*.pps=0;38;5;186:*.ics=0;38;5;186:*.img=4;38;5;203:*.fon=0;38;5;208:*.hpp=0;38;5;48:*.bsh=0;38;5;48:*.sql=0;38;5;48:*TODO=1:*.php=0;38;5;48:*.pkg=4;38;5;203:*.ps1=0;38;5;48:*.csv=0;38;5;185:*.ilg=0;38;5;243:*.ini=0;38;5;149:*.pyc=0;38;5;243:*.psd=0;38;5;208:*.htc=0;38;5;48:*.swp=0;38;5;243:*.mli=0;38;5;48:*hgrc=0;38;5;149:*.bst=0;38;5;149:*.ipp=0;38;5;48:*.fsi=0;38;5;48:*.tcl=0;38;5;48:*.exs=0;38;5;48:*.out=0;38;5;243:*.jar=4;38;5;203:*.xls=0;38;5;186:*.ppm=0;38;5;208:*.apk=4;38;5;203:*.aux=0;38;5;243:*.rpm=4;38;5;203:*.dll=1;38;5;203:*.eps=0;38;5;208:*.exe=1;38;5;203:*.doc=0;38;5;186:*.wma=0;38;5;208:*.deb=4;38;5;203:*.pod=0;38;5;48:*.ind=0;38;5;243:*.nix=0;38;5;149:*.lua=0;38;5;48:*.epp=0;38;5;48:*.dpr=0;38;5;48:*.htm=0;38;5;185:*.ogg=0;38;5;208:*.bin=4;38;5;203:*.otf=0;38;5;208:*.yml=0;38;5;149:*.pro=0;38;5;149:*.cxx=0;38;5;48:*.tex=0;38;5;48:*.fnt=0;38;5;208:*.erl=0;38;5;48:*.sty=0;38;5;243:*.bag=4;38;5;203:*.rst=0;38;5;185:*.pdf=0;38;5;186:*.pbm=0;38;5;208:*.xcf=0;38;5;208:*.clj=0;38;5;48:*.gif=0;38;5;208:*.rar=4;38;5;203:*.elm=0;38;5;48:*.bib=0;38;5;149:*.tsx=0;38;5;48:*.dmg=4;38;5;203:*.tmp=0;38;5;243:*.bcf=0;38;5;243:*.mkv=0;38;5;208:*.svg=0;38;5;208:*.cpp=0;38;5;48:*.vim=0;38;5;48:*.bmp=0;38;5;208:*.ltx=0;38;5;48:*.fls=0;38;5;243:*.flv=0;38;5;208:*.wav=0;38;5;208:*.m4a=0;38;5;208:*.mid=0;38;5;208:*.hxx=0;38;5;48:*.pas=0;38;5;48:*.wmv=0;38;5;208:*.tif=0;38;5;208:*.kex=0;38;5;186:*.mp4=0;38;5;208:*.bak=0;38;5;243:*.xlr=0;38;5;186:*.dox=0;38;5;149:*.swf=0;38;5;208:*.tar=4;38;5;203:*.tgz=4;38;5;203:*.cfg=0;38;5;149:*.xml=0; 54 | 38;5;185:*.jpg=0;38;5;208:*.mir=0;38;5;48:*.sxi=0;38;5;186:*.bz2=4;38;5;203:*.odt=0;38;5;186:*.mov=0;38;5;208:*.toc=0;38;5;243:*.bat=1;38;5;203:*.asa=0;38;5;48:*.awk=0;38;5;48:*.sbt=0;38;5;48:*.vcd=4;38;5;203:*.kts=0;38;5;48:*.arj=4;38;5;203:*.blg=0;38;5;243:*.c++=0;38;5;48:*.odp=0;38;5;186:*.bbl=0;38;5;243:*.idx=0;38;5;243:*.com=1;38;5;203:*.mp3=0;38;5;208:*.avi=0;38;5;208:*.def=0;38;5;48:*.cgi=0;38;5;48:*.zip=4;38;5;203:*.ttf=0;38;5;208:*.ppt=0;38;5;186:*.tml=0;38;5;149:*.fsx=0;38;5;48:*.h++=0;38;5;48:*.rtf=0;38;5;186:*.inl=0;38;5;48:*.yaml=0;38;5;149:*.html=0;38;5;185:*.mpeg=0;38;5;208:*.java=0;38;5;48:*.hgrc=0;38;5;149:*.orig=0;38;5;243:*.conf=0;38;5;149:*.dart=0;38;5;48:*.psm1=0;38;5;48:*.rlib=0;38;5;243:*.fish=0;38;5;48:*.bash=0;38;5;48:*.make=0;38;5;149:*.docx=0;38;5;186:*.json=0;38;5;149:*.psd1=0;38;5;48:*.lisp=0;38;5;48:*.tbz2=4;38;5;203:*.diff=0;38;5;48:*.epub=0;38;5;186:*.xlsx=0;38;5;186:*.pptx=0;38;5;186:*.toml=0;38;5;149:*.h264=0;38;5;208:*.purs=0;38;5;48:*.flac=0;38;5;208:*.tiff=0;38;5;208:*.jpeg=0;38;5;208:*.lock=0;38;5;243:*.less=0;38;5;48:*.dyn_o=0;38;5;243:*.scala=0;38;5;48:*.mdown=0;38;5;185:*.shtml=0;38;5;185:*.class=0;38;5;243:*.cache=0;38;5;243:*.cmake=0;38;5;149:*passwd=0;38;5;149:*.swift=0;38;5;48:*shadow=0;38;5;149:*.xhtml=0;38;5;185:*.patch=0;38;5;48:*.cabal=0;38;5;48:*README=0;38;5;16;48;5;186:*.toast=4;38;5;203:*.ipynb=0;38;5;48:*COPYING=0;38;5;249:*.gradle=0;38;5;48:*.matlab=0;38;5;48:*.config=0;38;5;149:*LICENSE=0;38;5;249:*.dyn_hi=0;38;5;243:*.flake8=0;38;5;149:*.groovy=0;38;5;48:*INSTALL=0;38;5;16;48;5;186:*TODO.md=1:*.ignore=0;38;5;149:*Doxyfile=0;38;5;149:*TODO.txt=1:*setup.py=0;38;5;149:*Makefile=0;38;5;149:*.gemspec=0;38;5;149:*.desktop=0;38;5;149:*.rgignore=0;38;5;149:*.markdown=0;38;5;185:*COPYRIGHT=0;38;5;249:*configure=0;38;5;149:*.DS_Store=0;38;5;243:*.kdevelop=0;38;5;149:*.fdignore=0;38;5;149:*README.md=0;38;5;16;48;5;186:*.cmake.in=0;38;5;149:*SConscript=0;38;5;149:*CODEOWNERS=0;38;5;149:*.localized=0;38;5;243:*.gitignore=0;38;5;149:*Dockerfile=0;38;5;149:*.gitconfig=0;38;5;149:*INSTALL.md=0;38;5;16;48;5;186:*README.txt=0;38;5;16;48;5;186:*SConstruct=0;38;5;149:*.scons_opt=0;38;5;243:*.travis.yml=0;38;5;186:*.gitmodules=0;38;5;149:*.synctex.gz=0;38;5;243:*LICENSE-MIT=0;38;5;249:*MANIFEST.in=0;38;5;149:*Makefile.in=0;38;5;243:*Makefile.am=0;38;5;149:*INSTALL.txt=0;38;5;16;48;5;186:*configure.ac=0;38;5;149:*.applescript=0;38;5;48:*appveyor.yml=0;38;5;186:*.fdb_latexmk=0;38;5;243:*CONTRIBUTORS=0;38;5;16;48;5;186:*.clang-format=0;38;5;149:*LICENSE-APACHE=0;38;5;249:*CMakeLists.txt=0;38;5;149:*CMakeCache.txt=0;38;5;243:*.gitattributes=0;38;5;149:*CONTRIBUTORS.md=0;38;5;16;48;5;186:*.sconsign.dblite=0;38;5;243:*requirements.txt=0;38;5;149:*CONTRIBUTORS.txt=0;38;5;16;48;5;186:*package-lock.json=0;38;5;243:*.CFUserTextEncoding=0;38;5;243 55 | "; 56 | 57 | fn main() { 58 | let result = run(); 59 | match result { 60 | Ok(exit_code) => { 61 | exit_code.exit(); 62 | } 63 | Err(err) => { 64 | eprintln!("[fd error]: {:#}", err); 65 | ExitCode::GeneralError.exit(); 66 | } 67 | } 68 | } 69 | 70 | fn run() -> Result { 71 | let opts = Opts::parse(); 72 | 73 | #[cfg(feature = "completions")] 74 | if let Some(shell) = opts.gen_completions()? { 75 | return print_completions(shell); 76 | } 77 | 78 | set_working_dir(&opts)?; 79 | let search_paths = opts.search_paths()?; 80 | if search_paths.is_empty() { 81 | bail!("No valid search paths given."); 82 | } 83 | 84 | ensure_search_pattern_is_not_a_path(&opts)?; 85 | let pattern = &opts.pattern; 86 | let exprs = &opts.exprs; 87 | let empty = Vec::new(); 88 | 89 | let pattern_regexps = exprs 90 | .as_ref() 91 | .unwrap_or(&empty) 92 | .iter() 93 | .chain([pattern]) 94 | .map(|pat| build_pattern_regex(pat, &opts)) 95 | .collect::>>()?; 96 | 97 | let config = construct_config(opts, &pattern_regexps)?; 98 | 99 | ensure_use_hidden_option_for_leading_dot_pattern(&config, &pattern_regexps)?; 100 | 101 | let regexps = pattern_regexps 102 | .into_iter() 103 | .map(|pat| build_regex(pat, &config)) 104 | .collect::>>()?; 105 | 106 | walk::scan(&search_paths, Arc::new(regexps), Arc::new(config)) 107 | } 108 | 109 | #[cfg(feature = "completions")] 110 | #[cold] 111 | fn print_completions(shell: clap_complete::Shell) -> Result { 112 | // The program name is the first argument. 113 | let first_arg = env::args().next(); 114 | let program_name = first_arg 115 | .as_ref() 116 | .map(Path::new) 117 | .and_then(|path| path.file_stem()) 118 | .and_then(|file| file.to_str()) 119 | .unwrap_or("fd"); 120 | let mut cmd = Opts::command(); 121 | cmd.build(); 122 | clap_complete::generate(shell, &mut cmd, program_name, &mut std::io::stdout()); 123 | Ok(ExitCode::Success) 124 | } 125 | 126 | fn set_working_dir(opts: &Opts) -> Result<()> { 127 | if let Some(ref base_directory) = opts.base_directory { 128 | if !filesystem::is_existing_directory(base_directory) { 129 | return Err(anyhow!( 130 | "The '--base-directory' path '{}' is not a directory.", 131 | base_directory.to_string_lossy() 132 | )); 133 | } 134 | env::set_current_dir(base_directory).with_context(|| { 135 | format!( 136 | "Could not set '{}' as the current working directory", 137 | base_directory.to_string_lossy() 138 | ) 139 | })?; 140 | } 141 | Ok(()) 142 | } 143 | 144 | /// Detect if the user accidentally supplied a path instead of a search pattern 145 | fn ensure_search_pattern_is_not_a_path(opts: &Opts) -> Result<()> { 146 | if !opts.full_path 147 | && opts.pattern.contains(std::path::MAIN_SEPARATOR) 148 | && Path::new(&opts.pattern).is_dir() 149 | { 150 | Err(anyhow!( 151 | "The search pattern '{pattern}' contains a path-separation character ('{sep}') \ 152 | and will not lead to any search results.\n\n\ 153 | If you want to search for all files inside the '{pattern}' directory, use a match-all pattern:\n\n \ 154 | fd . '{pattern}'\n\n\ 155 | Instead, if you want your pattern to match the full file path, use:\n\n \ 156 | fd --full-path '{pattern}'", 157 | pattern = &opts.pattern, 158 | sep = std::path::MAIN_SEPARATOR, 159 | )) 160 | } else { 161 | Ok(()) 162 | } 163 | } 164 | 165 | fn build_pattern_regex(pattern: &str, opts: &Opts) -> Result { 166 | Ok(if opts.glob && !pattern.is_empty() { 167 | let glob = GlobBuilder::new(pattern).literal_separator(true).build()?; 168 | glob.regex().to_owned() 169 | } else if opts.fixed_strings { 170 | // Treat pattern as literal string if '--fixed-strings' is used 171 | regex::escape(pattern) 172 | } else { 173 | String::from(pattern) 174 | }) 175 | } 176 | 177 | fn check_path_separator_length(path_separator: Option<&str>) -> Result<()> { 178 | match (cfg!(windows), path_separator) { 179 | (true, Some(sep)) if sep.len() > 1 => Err(anyhow!( 180 | "A path separator must be exactly one byte, but \ 181 | the given separator is {} bytes: '{}'.\n\ 182 | In some shells on Windows, '/' is automatically \ 183 | expanded. Try to use '//' instead.", 184 | sep.len(), 185 | sep 186 | )), 187 | _ => Ok(()), 188 | } 189 | } 190 | 191 | fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result { 192 | // The search will be case-sensitive if the command line flag is set or 193 | // if any of the patterns has an uppercase character (smart case). 194 | let case_sensitive = !opts.ignore_case 195 | && (opts.case_sensitive 196 | || pattern_regexps 197 | .iter() 198 | .any(|pat| pattern_has_uppercase_char(pat))); 199 | 200 | let path_separator = opts 201 | .path_separator 202 | .take() 203 | .or_else(filesystem::default_path_separator); 204 | let actual_path_separator = path_separator 205 | .clone() 206 | .unwrap_or_else(|| std::path::MAIN_SEPARATOR.to_string()); 207 | check_path_separator_length(path_separator.as_deref())?; 208 | 209 | let size_limits = std::mem::take(&mut opts.size); 210 | let time_constraints = extract_time_constraints(&opts)?; 211 | #[cfg(unix)] 212 | let owner_constraint: Option = opts.owner.and_then(OwnerFilter::filter_ignore); 213 | 214 | #[cfg(windows)] 215 | let ansi_colors_support = 216 | nu_ansi_term::enable_ansi_support().is_ok() || std::env::var_os("TERM").is_some(); 217 | #[cfg(not(windows))] 218 | let ansi_colors_support = true; 219 | 220 | let interactive_terminal = std::io::stdout().is_terminal(); 221 | let colored_output = match opts.color { 222 | ColorWhen::Always => true, 223 | ColorWhen::Never => false, 224 | ColorWhen::Auto => { 225 | ansi_colors_support && env::var_os("NO_COLOR").is_none() && interactive_terminal 226 | } 227 | }; 228 | 229 | let ls_colors = if colored_output { 230 | Some(LsColors::from_env().unwrap_or_else(|| LsColors::from_string(DEFAULT_LS_COLORS))) 231 | } else { 232 | None 233 | }; 234 | let command = extract_command(&mut opts, colored_output)?; 235 | let has_command = command.is_some(); 236 | 237 | Ok(Config { 238 | case_sensitive, 239 | search_full_path: opts.full_path, 240 | ignore_hidden: !(opts.hidden || opts.rg_alias_ignore()), 241 | read_fdignore: !(opts.no_ignore || opts.rg_alias_ignore()), 242 | read_vcsignore: !(opts.no_ignore || opts.rg_alias_ignore() || opts.no_ignore_vcs), 243 | require_git_to_read_vcsignore: !opts.no_require_git, 244 | read_parent_ignore: !opts.no_ignore_parent, 245 | read_global_ignore: !(opts.no_ignore 246 | || opts.rg_alias_ignore() 247 | || opts.no_global_ignore_file), 248 | follow_links: opts.follow, 249 | one_file_system: opts.one_file_system, 250 | null_separator: opts.null_separator, 251 | quiet: opts.quiet, 252 | max_depth: opts.max_depth(), 253 | min_depth: opts.min_depth(), 254 | prune: opts.prune, 255 | threads: opts.threads(), 256 | max_buffer_time: opts.max_buffer_time, 257 | ls_colors, 258 | interactive_terminal, 259 | file_types: opts.filetype.as_ref().map(|values| { 260 | use crate::cli::FileType::*; 261 | let mut file_types = FileTypes::default(); 262 | for value in values { 263 | match value { 264 | File => file_types.files = true, 265 | Directory => file_types.directories = true, 266 | Symlink => file_types.symlinks = true, 267 | Executable => { 268 | file_types.executables_only = true; 269 | file_types.files = true; 270 | } 271 | Empty => file_types.empty_only = true, 272 | Socket => file_types.sockets = true, 273 | Pipe => file_types.pipes = true, 274 | } 275 | } 276 | 277 | // If only 'empty' was specified, search for both files and directories: 278 | if file_types.empty_only && !(file_types.files || file_types.directories) { 279 | file_types.files = true; 280 | file_types.directories = true; 281 | } 282 | 283 | file_types 284 | }), 285 | extensions: opts 286 | .extensions 287 | .as_ref() 288 | .map(|exts| { 289 | let patterns = exts 290 | .iter() 291 | .map(|e| e.trim_start_matches('.')) 292 | .map(|e| format!(r".\.{}$", regex::escape(e))); 293 | RegexSetBuilder::new(patterns) 294 | .case_insensitive(true) 295 | .build() 296 | }) 297 | .transpose()?, 298 | command: command.map(Arc::new), 299 | batch_size: opts.batch_size, 300 | exclude_patterns: opts.exclude.iter().map(|p| String::from("!") + p).collect(), 301 | ignore_files: std::mem::take(&mut opts.ignore_file), 302 | size_constraints: size_limits, 303 | time_constraints, 304 | #[cfg(unix)] 305 | owner_constraint, 306 | show_filesystem_errors: opts.show_errors, 307 | path_separator, 308 | actual_path_separator, 309 | max_results: opts.max_results(), 310 | strip_cwd_prefix: (opts.no_search_paths() 311 | && (opts.strip_cwd_prefix || !(opts.null_separator || has_command))), 312 | }) 313 | } 314 | 315 | fn extract_command(opts: &mut Opts, colored_output: bool) -> Result> { 316 | opts.exec 317 | .command 318 | .take() 319 | .map(Ok) 320 | .or_else(|| { 321 | if !opts.list_details { 322 | return None; 323 | } 324 | let color_arg = format!("--color={}", opts.color.as_str()); 325 | 326 | let res = determine_ls_command(&color_arg, colored_output) 327 | .map(|cmd| CommandSet::new_batch([cmd]).unwrap()); 328 | Some(res) 329 | }) 330 | .transpose() 331 | } 332 | 333 | fn determine_ls_command(color_arg: &str, colored_output: bool) -> Result> { 334 | #[allow(unused)] 335 | let gnu_ls = |command_name| { 336 | // Note: we use short options here (instead of --long-options) to support more 337 | // platforms (like BusyBox). 338 | vec![ 339 | command_name, 340 | "-l", // long listing format 341 | "-h", // human readable file sizes 342 | "-d", // list directories themselves, not their contents 343 | color_arg, 344 | ] 345 | }; 346 | let cmd: Vec<&str> = if cfg!(unix) { 347 | if !cfg!(any( 348 | target_os = "macos", 349 | target_os = "dragonfly", 350 | target_os = "freebsd", 351 | target_os = "netbsd", 352 | target_os = "openbsd" 353 | )) { 354 | // Assume ls is GNU ls 355 | gnu_ls("ls") 356 | } else { 357 | // MacOS, DragonFlyBSD, FreeBSD 358 | use std::process::{Command, Stdio}; 359 | 360 | // Use GNU ls, if available (support for --color=auto, better LS_COLORS support) 361 | let gnu_ls_exists = Command::new("gls") 362 | .arg("--version") 363 | .stdout(Stdio::null()) 364 | .stderr(Stdio::null()) 365 | .status() 366 | .is_ok(); 367 | 368 | if gnu_ls_exists { 369 | gnu_ls("gls") 370 | } else { 371 | let mut cmd = vec![ 372 | "ls", // BSD version of ls 373 | "-l", // long listing format 374 | "-h", // '--human-readable' is not available, '-h' is 375 | "-d", // '--directory' is not available, but '-d' is 376 | ]; 377 | 378 | if !cfg!(any(target_os = "netbsd", target_os = "openbsd")) && colored_output { 379 | // -G is not available in NetBSD's and OpenBSD's ls 380 | cmd.push("-G"); 381 | } 382 | 383 | cmd 384 | } 385 | } 386 | } else if cfg!(windows) { 387 | use std::process::{Command, Stdio}; 388 | 389 | // Use GNU ls, if available 390 | let gnu_ls_exists = Command::new("ls") 391 | .arg("--version") 392 | .stdout(Stdio::null()) 393 | .stderr(Stdio::null()) 394 | .status() 395 | .is_ok(); 396 | 397 | if gnu_ls_exists { 398 | gnu_ls("ls") 399 | } else { 400 | return Err(anyhow!( 401 | "'fd --list-details' is not supported on Windows unless GNU 'ls' is installed." 402 | )); 403 | } 404 | } else { 405 | return Err(anyhow!( 406 | "'fd --list-details' is not supported on this platform." 407 | )); 408 | }; 409 | Ok(cmd) 410 | } 411 | 412 | fn extract_time_constraints(opts: &Opts) -> Result> { 413 | let now = time::SystemTime::now(); 414 | let mut time_constraints: Vec = Vec::new(); 415 | if let Some(ref t) = opts.changed_within { 416 | if let Some(f) = TimeFilter::after(&now, t) { 417 | time_constraints.push(f); 418 | } else { 419 | return Err(anyhow!( 420 | "'{}' is not a valid date or duration. See 'fd --help'.", 421 | t 422 | )); 423 | } 424 | } 425 | if let Some(ref t) = opts.changed_before { 426 | if let Some(f) = TimeFilter::before(&now, t) { 427 | time_constraints.push(f); 428 | } else { 429 | return Err(anyhow!( 430 | "'{}' is not a valid date or duration. See 'fd --help'.", 431 | t 432 | )); 433 | } 434 | } 435 | Ok(time_constraints) 436 | } 437 | 438 | fn ensure_use_hidden_option_for_leading_dot_pattern( 439 | config: &Config, 440 | pattern_regexps: &[String], 441 | ) -> Result<()> { 442 | if cfg!(unix) 443 | && config.ignore_hidden 444 | && pattern_regexps 445 | .iter() 446 | .any(|pat| pattern_matches_strings_with_leading_dot(pat)) 447 | { 448 | Err(anyhow!( 449 | "The pattern(s) seems to only match files with a leading dot, but hidden files are \ 450 | filtered by default. Consider adding -H/--hidden to search hidden files as well \ 451 | or adjust your search pattern(s)." 452 | )) 453 | } else { 454 | Ok(()) 455 | } 456 | } 457 | 458 | fn build_regex(pattern_regex: String, config: &Config) -> Result { 459 | RegexBuilder::new(&pattern_regex) 460 | .case_insensitive(!config.case_sensitive) 461 | .dot_matches_new_line(true) 462 | .build() 463 | .map_err(|e| { 464 | anyhow!( 465 | "{}\n\nNote: You can use the '--fixed-strings' option to search for a \ 466 | literal string instead of a regular expression. Alternatively, you can \ 467 | also use the '--glob' option to match on a glob pattern.", 468 | e.to_string() 469 | ) 470 | }) 471 | } 472 | --------------------------------------------------------------------------------