├── .github └── workflows │ └── rust.yml ├── .gitignore ├── CNAME ├── Cargo.lock ├── Cargo.toml ├── Dockerfile ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── docs ├── fselect.1 └── usage.md ├── resources └── test │ ├── audio │ ├── silent-35s.mp3 │ └── silent.wav │ ├── image │ ├── rust-logo-blk.bmp │ ├── rust-logo-blk.gif │ ├── rust-logo-blk.jpeg │ ├── rust-logo-blk.jpg │ ├── rust-logo-blk.png │ ├── rust-logo-blk.svg │ ├── rust-logo-blk.tiff │ ├── rust-logo-blk.webp │ └── rust-logo-blk_corrupted.svg │ └── video │ ├── rust-logo-blk.mkv │ └── rust-logo-blk.mp4 └── src ├── config.rs ├── expr.rs ├── field.rs ├── fileinfo.rs ├── function.rs ├── ignore ├── docker.rs ├── hg.rs └── mod.rs ├── lexer.rs ├── main.rs ├── mode.rs ├── operators.rs ├── output ├── csv.rs ├── flat.rs ├── html.rs ├── json.rs └── mod.rs ├── parser.rs ├── query.rs ├── searcher.rs └── util ├── capabilities.rs ├── datetime.rs ├── dimensions ├── image.rs ├── mkv.rs ├── mod.rs ├── mp4.rs └── svg.rs ├── duration ├── mkv.rs ├── mod.rs ├── mp3.rs ├── mp4.rs └── wav.rs ├── glob.rs ├── japanese.rs ├── mod.rs ├── top_n.rs ├── variant.rs └── wbuf.rs /.github/workflows/rust.yml: -------------------------------------------------------------------------------- 1 | name: build 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | paths: 7 | - 'src/**' 8 | - 'Cargo.toml' 9 | - 'Cargo.lock' 10 | pull_request: 11 | branches: [ master ] 12 | paths: 13 | - 'src/**' 14 | - 'Cargo.toml' 15 | - 'Cargo.lock' 16 | workflow_dispatch: 17 | 18 | env: 19 | CARGO_TERM_COLOR: always 20 | 21 | jobs: 22 | 23 | build: 24 | strategy: 25 | matrix: 26 | os: ['ubuntu-latest', 'windows-latest', 'macos-latest'] 27 | runs-on: ${{ matrix.os }} 28 | steps: 29 | - uses: actions/checkout@v4 30 | - uses: actions/cache@v4 31 | id: cache-deps 32 | with: 33 | path: | 34 | ~/.cargo/bin/ 35 | ~/.cargo/registry/index/ 36 | ~/.cargo/registry/cache/ 37 | ~/.cargo/git/db/ 38 | target/ 39 | key: ${{ runner.os }}-cargo-${{ hashFiles('Cargo.lock') }} 40 | - name: Build 41 | run: cargo build --verbose 42 | - name: Run tests 43 | run: cargo test --verbose 44 | - name: Build test image and run tests in Docker 45 | if: matrix.os == 'ubuntu-latest' 46 | run: | 47 | cargo build --release 48 | mkdir docker-test && cp target/release/fselect docker-test/ 49 | cat > docker-test/Dockerfile <"] 5 | description = "Find files with SQL-like queries" 6 | keywords = ["find", "files", "sql", "query", "tool"] 7 | categories = ["filesystem", "command-line-utilities", "command-line-interface"] 8 | documentation = "https://github.com/jhspetersson/fselect/blob/master/docs/usage.md" 9 | homepage = "https://github.com/jhspetersson/fselect" 10 | repository = "https://github.com/jhspetersson/fselect" 11 | readme = "README.md" 12 | license = "MIT OR Apache-2.0" 13 | edition = "2024" 14 | 15 | [features] 16 | default = ["git", "users", "update-notifications"] 17 | git = ["dep:git2"] 18 | update-notifications = ["dep:update-informer"] 19 | users = ["dep:uzers"] 20 | 21 | [dependencies] 22 | bytecount = "0.6" 23 | chrono = "0.4" 24 | chrono-english = "0.1" 25 | csv = "1.0" 26 | directories = "6.0" 27 | git2 = { version = "0.20.0", default-features = false, optional = true } 28 | human-time = "0.1.6" 29 | humansize = "2.0" 30 | imagesize = "0.14" 31 | kamadak-exif = "0.6" 32 | lscolors = { version = "0.20", features = [ "nu-ansi-term" ] } 33 | matroska = "0.30" 34 | mp3-metadata = "0.4" 35 | mp4parse = "0.17" 36 | nu-ansi-term = "0.50" 37 | rand = "0.9" 38 | rbase64 = "2.0" 39 | regex = "1.1" 40 | rustyline = "16" 41 | serde = "1.0" 42 | serde_derive = "1.0" 43 | serde_json = "1.0" 44 | sha-1 = "0.10" 45 | sha2 = "0.10" 46 | sha3 = "0.10" 47 | svg = "0.18" 48 | toml = "0.8" 49 | tree_magic_mini = { version = "3.0", features = [ "with-gpl-data" ] } 50 | update-informer = { version = "1.1.0", optional = true } 51 | wana_kana = "4.0" 52 | wavers = "1.1" 53 | zip = "4" 54 | 55 | [target.'cfg(unix)'.dependencies] 56 | uzers = { version = "0.12", optional = true } 57 | xattr = "1.0" 58 | 59 | [profile.release] 60 | lto = true 61 | 62 | [package.metadata.deb] 63 | section = "utility" 64 | extended-description = """\ 65 | * SQL-like (not real SQL, but highly relaxed!) grammar easily understandable by humans 66 | * complex queries 67 | * aggregate, statistics, date, and other functions 68 | * search within archives 69 | * .gitignore, .hgignore, and .dockerignore support (experimental) 70 | * search by width and height of images, EXIF metadata 71 | * search by MP3 info 72 | * search by extended file attributes and Linux capabilities 73 | * search by file hashes 74 | * search by MIME type 75 | * shortcuts to common file types 76 | * interactive mode 77 | * various output formatting (CSV, JSON, and others)""" 78 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rust:latest 2 | 3 | WORKDIR /usr/src/fselect 4 | COPY . . 5 | 6 | RUN cargo install --locked --path . 7 | 8 | CMD ["cargo", "test", "--locked" , "--verbose", "--all"] 9 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Permission is hereby granted, free of charge, to any 2 | person obtaining a copy of this software and associated 3 | documentation files (the "Software"), to deal in the 4 | Software without restriction, including without 5 | limitation the rights to use, copy, modify, merge, 6 | publish, distribute, sublicense, and/or sell copies of 7 | the Software, and to permit persons to whom the Software 8 | is furnished to do so, subject to the following 9 | conditions: 10 | 11 | The above copyright notice and this permission notice 12 | shall be included in all copies or substantial portions 13 | of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 16 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 17 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 18 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 19 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 20 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 21 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 22 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 23 | DEALINGS IN THE SOFTWARE. 24 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # fselect 2 | Find files with SQL-like queries 3 | 4 | [![Crates.io](https://img.shields.io/crates/v/fselect.svg)](https://crates.io/crates/fselect) 5 | [![build](https://github.com/jhspetersson/fselect/actions/workflows/rust.yml/badge.svg)](https://github.com/jhspetersson/fselect/actions/workflows/rust.yml) 6 | 7 | ### Why use fselect? 8 | 9 | While it doesn't tend to fully replace traditional `find` and `ls`, **fselect** has these nice features: 10 | 11 | * SQL-like (not real SQL, but highly relaxed!) grammar easily understandable by humans 12 | * complex queries 13 | * aggregate, statistics, date, and other functions 14 | * search within archives 15 | * `.gitignore`, `.hgignore`, and `.dockerignore` support (experimental) 16 | * search by width and height of images, EXIF metadata 17 | * search by MP3 info 18 | * search by extended file attributes and Linux capabilities 19 | * search by file hashes 20 | * search by MIME type 21 | * shortcuts to common file types 22 | * interactive mode 23 | * various output formatting (CSV, JSON, and others) 24 | 25 | More is under way! 26 | 27 | ### Installation 28 | 29 | #### Latest release from source 30 | 31 | * Install [Rust with Cargo](https://www.rust-lang.org/en-US/install.html) and its dependencies to build a binary 32 | * Run `cargo install fselect` 33 | 34 | #### Arch Linux 35 | 36 | [AUR package](https://aur.archlinux.org/packages/fselect/), thanks to [@asm0dey](https://github.com/asm0dey) 37 | 38 | [AUR bin package](https://aur.archlinux.org/packages/fselect-bin/), thanks to [@4censord](https://github.com/4censord) 39 | 40 | #### NixOS 41 | 42 | [`fselect` in `nixpkgs`](https://github.com/filalex77/nixpkgs/blob/1eced92263395896c10cea69e5f60e8be5f43aeb/pkgs/tools/misc/fselect/default.nix), thanks to [@filalex77](https://github.com/filalex77) 43 | 44 | #### Other Linux 45 | 46 | [Static build with musl](https://github.com/jhspetersson/fselect/releases/download/0.8.12/fselect-x86_64-linux-musl.gz). 47 | 48 | #### Windows 64bit 49 | 50 | A statically precompiled [binary](https://github.com/jhspetersson/fselect/releases/download/0.8.12/fselect-x86_64-win.zip) is available at GitHub downloads. 51 | 52 | #### Windows via winget 53 | 54 | * Install [winget](https://github.com/microsoft/winget-cli) 55 | * Run `winget install -e --id fselect.fselect` 56 | 57 | #### Windows via Chocolatey 58 | 59 | * Install [Chocolatey](https://chocolatey.org/install) 60 | * Run `choco install fselect` 61 | 62 | #### Windows via Scoop 63 | 64 | * Install [Scoop](https://scoop.sh) 65 | * Run `scoop install fselect` 66 | 67 | #### Mac via Homebrew 68 | 69 | * Install [brew](https://brew.sh) 70 | * Run `brew install fselect` 71 | 72 | #### Mac via MacPorts 73 | 74 | * Install [MacPorts](https://www.macports.org) 75 | * Run: 76 | ``` 77 | sudo port selfupdate 78 | sudo port install fselect 79 | ``` 80 | 81 | ### Usage 82 | 83 | fselect [ARGS] COLUMN[, COLUMN...] [from ROOT[, ROOT...]] [where EXPR] [group by COLUMNS] [order by COLUMNS] [limit N] [into FORMAT] 84 | 85 | ### Interactive mode 86 | 87 | fselect -i 88 | 89 | ### Documentation 90 | 91 | [More detailed description. Look at examples first.](docs/usage.md) 92 | 93 | ### Examples 94 | 95 | Find temporary or config files (full path and size): 96 | 97 | fselect size, path from /home/user where name = '*.cfg' or name = '*.tmp' 98 | 99 | Windows users may omit the quotes: 100 | 101 | fselect size, path from C:\Users\user where name = *.cfg or name = *.tmp 102 | 103 | Or put all the arguments into the quotes like this: 104 | 105 | fselect "name from /home/user/tmp where size > 0" 106 | 107 | Search within a directory name with spaces (backticks are also supported): 108 | 109 | fselect "name from '/home/user/dir with spaces' where size > 0" 110 | fselect "name from `/home/user/dir with spaces` where size > 0" 111 | 112 | Or simply escape the single quote: 113 | 114 | fselect name from \'/home/user/dir with spaces\' where size gt 0 115 | 116 | Specify the file size, get an absolute path, and add it to the results: 117 | 118 | cd /home/user 119 | fselect size, abspath from ./tmp where size gt 2g 120 | fselect fsize, abspath from ./tmp where size = 5m 121 | fselect hsize, abspath from ./tmp where size lt 8k 122 | fselect name, size from ./tmp where size between 5mb and 6mb 123 | 124 | More complex query: 125 | 126 | fselect "name from /tmp where (name = *.tmp and size = 0) or (name = *.cfg and size > 1000000)" 127 | 128 | Aggregate functions (you can use curly braces if you want and even combine them with the regular parentheses): 129 | 130 | fselect "MIN(size), MAX{size}, AVG(size), SUM{size}, COUNT(*) from /home/user/Downloads" 131 | 132 | Formatting functions: 133 | 134 | fselect "LOWER(name), UPPER(name), LENGTH(name), YEAR(modified) from /home/user/Downloads" 135 | 136 | Get the year of the oldest file: 137 | 138 | fselect "MIN(YEAR(modified)) from /home/user" 139 | 140 | Use single quotes if you need to address files with spaces: 141 | 142 | fselect "path from '/home/user/Misc stuff' where name != 'Some file'" 143 | 144 | Regular expressions of [Rust flavor](https://docs.rs/regex/1.1.0/regex/#syntax) are supported: 145 | 146 | fselect name from /home/user where path =~ '.*Rust.*' 147 | 148 | Negate regular expressions: 149 | 150 | fselect "name from . where path !=~ '^\./config'" 151 | 152 | Simple globs expand automatically and work with `=` and `!=` operators: 153 | 154 | fselect name from /home/user where path = '*Rust*' 155 | 156 | Classic LIKE: 157 | 158 | fselect "path from /home/user where name like '%report-2018-__-__???'" 159 | 160 | Exact match operators to search with regexps disabled: 161 | 162 | fselect "path from /home/user where name === 'some_*_weird_*_name'" 163 | 164 | Find files by date: 165 | 166 | fselect path from /home/user where created = 2017-05-01 167 | fselect path from /home/user where modified = today 168 | fselect path from /home/user where accessed = yesterday 169 | fselect "path from /home/user where modified = 'apr 1'" 170 | fselect "path from /home/user where modified = 'last fri'" 171 | 172 | Be more specific to match all files created at an interval between 3PM and 4PM: 173 | 174 | fselect path from /home/user where created = '2017-05-01 15' 175 | 176 | And even more specific: 177 | 178 | fselect path from /home/user where created = '2017-05-01 15:10' 179 | fselect path from /home/user where created = '2017-05-01 15:10:30' 180 | 181 | Date and time intervals are possible (find everything updated since May 1st): 182 | 183 | fselect path from /home/user where modified gte 2017-05-01 184 | 185 | Default is the current directory: 186 | 187 | fselect path, size where name = '*.jpg' 188 | 189 | Search within multiple locations: 190 | 191 | fselect path from /home/user/oldstuff, /home/user/newstuff where name = '*.jpg' 192 | 193 | With minimum and/or maximum depth specified (`depth` is a synonym for `maxdepth`): 194 | 195 | fselect path from /home/user/oldstuff depth 5 where name = '*.jpg' 196 | fselect path from /home/user/oldstuff mindepth 2 maxdepth 5, /home/user/newstuff depth 10 where name = '*.jpg' 197 | 198 | Optionally follow symlinks: 199 | 200 | fselect path, size from /home/user symlinks where name = '*.jpg' 201 | 202 | Search within archives (currently only zip-archives are supported): 203 | 204 | fselect path, size from /home/user archives where name = '*.jpg' 205 | 206 | Or in combination: 207 | 208 | fselect size, path from /home/user depth 5 archives symlinks where name = '*.jpg' limit 100 209 | 210 | Enable `.gitignore` or `.hgignore` support: 211 | 212 | fselect size, path from /home/user/projects gitignore where name = '*.cpp' 213 | fselect size, path from /home/user/projects git where name = '*.cpp' 214 | fselect size, path from /home/user/projects hgignore where name = '*.py' 215 | 216 | Search by image dimensions: 217 | 218 | fselect CONCAT(width, 'x', height), path from /home/user/photos where width gte 2000 or height gte 2000 219 | 220 | Find square images: 221 | 222 | fselect path from /home/user/Photos where width = height 223 | 224 | Find images with a known name part but unknown extension: 225 | 226 | fselect path from /home/user/projects where name = "*RDS*" and width gte 1 227 | 228 | Find old-school rap MP3 files: 229 | 230 | fselect duration, path from /home/user/music where genre = Rap and bitrate = 320 and mp3_year lt 2000 231 | 232 | Shortcuts to common file extensions: 233 | 234 | fselect path from /home/user where is_archive = true 235 | fselect path, mime from /home/user where is_audio = 1 236 | fselect path, mime from /home/user where is_book != false 237 | 238 | Even simpler way of using boolean columns: 239 | 240 | fselect path from /home/user where is_doc 241 | fselect path from /home/user where is_image 242 | fselect path from /home/user where is_video 243 | 244 | Find files with dangerous permissions: 245 | 246 | fselect mode, path from /home/user where other_write or other_exec 247 | fselect mode, path from /home/user where other_all 248 | 249 | Simple glob-like expressions or even regular expressions in file mode are possible: 250 | 251 | fselect mode, path from /home/user where mode = '*rwx' 252 | fselect mode, path from /home/user where mode =~ '.*rwx$' 253 | 254 | Find files by owner's uid or gid: 255 | 256 | fselect uid, gid, path from /home/user where uid != 1000 or gid != 1000 257 | 258 | Or by owner's or group's name: 259 | 260 | fselect user, group, path from /home/user where user = mike or group = mike 261 | 262 | Find special files: 263 | 264 | fselect name from /usr/bin where suid 265 | fselect path from /tmp where is_pipe 266 | fselect path from /tmp where is_socket 267 | 268 | Find files with xattrs, check if a particular xattr exists, or get its value: 269 | 270 | fselect "path, has_xattrs, has_xattr(user.test), xattr(user.test) from /home/user" 271 | 272 | Include arbitrary text as columns: 273 | 274 | fselect "name, ' has size of ', size, ' bytes'" 275 | 276 | Group results: 277 | 278 | fselect "ext, count(*) from /tmp group by ext" 279 | 280 | Order results: 281 | 282 | fselect path from /tmp order by size desc, name 283 | fselect modified, fsize, path from ~ order by 1 desc, 3 284 | 285 | Finally, limit the results: 286 | 287 | fselect name from /home/user/samples limit 5 288 | 289 | Format output: 290 | 291 | fselect size, path from /home/user limit 5 into json 292 | fselect size, path from /home/user limit 5 into csv 293 | fselect size, path from /home/user limit 5 into html 294 | 295 | ### License 296 | 297 | MIT/Apache-2.0 298 | 299 | --- 300 | 301 | Supported by [JetBrains IDEA](https://jb.gg/OpenSourceSupport) open source license 302 | -------------------------------------------------------------------------------- /docs/fselect.1: -------------------------------------------------------------------------------- 1 | .TH FSELECT 1 2 | .SH NAME 3 | fselect \- find files with SQL-like queries 4 | .SH SYNOPSIS 5 | .B fselect 6 | .B [ARGS] 7 | COLUMN 8 | [, COLUMN ...] 9 | [ from ROOT [, ROOT...] ] 10 | [where EXPR] 11 | [group by COLUMNS] 12 | [order by COLUMNS] 13 | [limit N] 14 | [into FORMAT] 15 | .SH DESCRIPTION 16 | .B fselect 17 | is a simple utility with a SQL-like query for finding files. 18 | .PP 19 | You write SQL-like query, that's it. 20 | .PP 21 | fselect command itself is like a first keyword (select, i.e., file select). 22 | But if you'll put one more select behind occasionally, that's not a problem. 23 | .PP 24 | Next you put columns you are interested in. 25 | It could be file name or path, size, modification date, etc. 26 | See full list of possible columns. 27 | You can add columns with arbitrary text (put in quotes if it contains spaces). 28 | A few functions (aggregating and formatting) are there for your service. 29 | You can use arithmetic expressions when it makes sense. 30 | .PP 31 | Where to search? Specify with 32 | .B from 33 | keyword. You can list one or more directories separated with comma. 34 | If you leave the from, then current directory will be processed. 35 | .PP 36 | What to search? Use 37 | .B where 38 | with any number of conditions. 39 | .PP 40 | Order results like in real SQL with 41 | .B order by. 42 | All columns are supported for ordering by, as well as asc/desc parameters and positional numeric shortcuts. 43 | .PP 44 | Limiting search results is possible with limit. Formatting options are supported with 45 | .B into 46 | keyword. 47 | .PP 48 | If you want to use operators containing \> or \<, put the whole query into double quotes. 49 | This will protect query from the shell and output redirection. 50 | The same applies to queries with parentheses or *, ? and other special symbols that are shell metacharacters 51 | .RE 52 | .SH COLUMNS AND FIELDS 53 | .IP \(bu 54 | name 55 | .IP \(bu 56 | extension or ext 57 | .IP \(bu 58 | path 59 | .IP \(bu 60 | abspath 61 | .IP \(bu 62 | directory or dirname or dir 63 | .IP \(bu 64 | absdir 65 | .IP \(bu 66 | size 67 | .IP \(bu 68 | hsize or fsize 69 | .IP \(bu 70 | uid 71 | .IP \(bu 72 | gid 73 | .IP \(bu 74 | user 75 | .IP \(bu 76 | group 77 | .IP \(bu 78 | created 79 | .IP \(bu 80 | accessed 81 | .IP \(bu 82 | modified 83 | .IP \(bu 84 | is_dir 85 | .IP \(bu 86 | is_file 87 | .IP \(bu 88 | is_symlink 89 | .IP \(bu 90 | is_pipe or is_fifo 91 | .IP \(bu 92 | is_character or is_char 93 | .IP \(bu 94 | is_block 95 | .IP \(bu 96 | is_socket 97 | .IP \(bu 98 | device 99 | .IP \(bu 100 | inode 101 | .IP \(bu 102 | blocks 103 | .IP \(bu 104 | hardlinks 105 | .IP \(bu 106 | mode 107 | .IP \(bu 108 | user_read 109 | .IP \(bu 110 | user_write 111 | .IP \(bu 112 | user_exec 113 | .IP \(bu 114 | user_all 115 | .IP \(bu 116 | group_read 117 | .IP \(bu 118 | group_write 119 | .IP \(bu 120 | group_exec 121 | .IP \(bu 122 | group_all 123 | .IP \(bu 124 | other_read 125 | .IP \(bu 126 | other_write 127 | .IP \(bu 128 | other_exec 129 | .IP \(bu 130 | other_all 131 | .IP \(bu 132 | suid 133 | .IP \(bu 134 | sgid 135 | .IP \(bu 136 | is_hidden 137 | .IP \(bu 138 | has_xattrs 139 | .IP \(bu 140 | capabilities or caps 141 | .IP \(bu 142 | is_shebang 143 | .IP \(bu 144 | is_empty 145 | .IP \(bu 146 | width 147 | .IP \(bu 148 | height 149 | .IP \(bu 150 | duration 151 | .IP \(bu 152 | mp3_bitrate or bitrate 153 | .IP \(bu 154 | mp3_freq or freq 155 | .IP \(bu 156 | mp3_title or title 157 | .IP \(bu 158 | mp3_artist or artist 159 | .IP \(bu 160 | mp3_album or album 161 | .IP \(bu 162 | mp3_genre or genre 163 | .IP \(bu 164 | mp3_year 165 | .IP \(bu 166 | exif_datetime 167 | .IP \(bu 168 | exif_altitude or exif_alt 169 | .IP \(bu 170 | exif_latitude or exif_lat 171 | .IP \(bu 172 | exif_longitude or exif_lng or exif_lon 173 | .IP \(bu 174 | exif_make 175 | .IP \(bu 176 | exif_model 177 | .IP \(bu 178 | exif_software 179 | .IP \(bu 180 | exif_version 181 | .IP \(bu 182 | exif_exposure_time or exif_exptime 183 | .IP \(bu 184 | exif_aperture 185 | .IP \(bu 186 | exif_shutter_speed 187 | .IP \(bu 188 | exif_f_number or exif_f_num 189 | .IP \(bu 190 | exif_iso_speed or exif_iso 191 | .IP \(bu 192 | exif_focal_length or exif_focal_len 193 | .IP \(bu 194 | exif_lens_make 195 | .IP \(bu 196 | exif_lens_model 197 | .IP \(bu 198 | mime 199 | .IP \(bu 200 | is_binary 201 | .IP \(bu 202 | is_text 203 | .IP \(bu 204 | line_count 205 | .IP \(bu 206 | is_archive 207 | .IP \(bu 208 | is_audio 209 | .IP \(bu 210 | is_book 211 | .IP \(bu 212 | is_doc 213 | .IP \(bu 214 | is_font 215 | .IP \(bu 216 | is_image 217 | .IP \(bu 218 | is_source 219 | .IP \(bu 220 | is_video 221 | .IP \(bu 222 | sha1 223 | .IP \(bu 224 | sha2_256 or sha256 225 | .IP \(bu 226 | sha2_512 or sha512 227 | .IP \(bu 228 | sha3_512 or sha3 229 | .RE 230 | .SH ENVIRONMENT 231 | .TP 232 | .B LS_COLORS 233 | Determines how to colorize search results, see 234 | .BR dircolors (1) . 235 | .SH EXIT STATUS 236 | The 237 | .B fselect 238 | utility exists with status 0 as long as the provided query parses correctly. 239 | .SH EXAMPLES 240 | .TP 241 | .RI "Find files and directories that match the pattern '" needle "':" 242 | $ fselect name WHERE name =~ "needle" 243 | .TP 244 | .RI "Start a search in a given directory (" /var/log "):" 245 | $ fselect name FROM /var/log 246 | .SH SEE ALSO 247 | .BR find (1) 248 | .BR fd (1) 249 | -------------------------------------------------------------------------------- /resources/test/audio/silent-35s.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jhspetersson/fselect/e7e64a713d94535b1412af22db2f7706e6867548/resources/test/audio/silent-35s.mp3 -------------------------------------------------------------------------------- /resources/test/audio/silent.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jhspetersson/fselect/e7e64a713d94535b1412af22db2f7706e6867548/resources/test/audio/silent.wav -------------------------------------------------------------------------------- /resources/test/image/rust-logo-blk.bmp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jhspetersson/fselect/e7e64a713d94535b1412af22db2f7706e6867548/resources/test/image/rust-logo-blk.bmp -------------------------------------------------------------------------------- /resources/test/image/rust-logo-blk.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jhspetersson/fselect/e7e64a713d94535b1412af22db2f7706e6867548/resources/test/image/rust-logo-blk.gif -------------------------------------------------------------------------------- /resources/test/image/rust-logo-blk.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jhspetersson/fselect/e7e64a713d94535b1412af22db2f7706e6867548/resources/test/image/rust-logo-blk.jpeg -------------------------------------------------------------------------------- /resources/test/image/rust-logo-blk.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jhspetersson/fselect/e7e64a713d94535b1412af22db2f7706e6867548/resources/test/image/rust-logo-blk.jpg -------------------------------------------------------------------------------- /resources/test/image/rust-logo-blk.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jhspetersson/fselect/e7e64a713d94535b1412af22db2f7706e6867548/resources/test/image/rust-logo-blk.png -------------------------------------------------------------------------------- /resources/test/image/rust-logo-blk.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /resources/test/image/rust-logo-blk.tiff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jhspetersson/fselect/e7e64a713d94535b1412af22db2f7706e6867548/resources/test/image/rust-logo-blk.tiff -------------------------------------------------------------------------------- /resources/test/image/rust-logo-blk.webp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jhspetersson/fselect/e7e64a713d94535b1412af22db2f7706e6867548/resources/test/image/rust-logo-blk.webp -------------------------------------------------------------------------------- /resources/test/image/rust-logo-blk_corrupted.svg: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /resources/test/video/rust-logo-blk.mkv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jhspetersson/fselect/e7e64a713d94535b1412af22db2f7706e6867548/resources/test/video/rust-logo-blk.mkv -------------------------------------------------------------------------------- /resources/test/video/rust-logo-blk.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jhspetersson/fselect/e7e64a713d94535b1412af22db2f7706e6867548/resources/test/video/rust-logo-blk.mp4 -------------------------------------------------------------------------------- /src/config.rs: -------------------------------------------------------------------------------- 1 | //! Handles configuration loading and saving 2 | 3 | use std::fs; 4 | use std::io::{Read, Write}; 5 | use std::path::PathBuf; 6 | 7 | use directories::ProjectDirs; 8 | 9 | const ORGANIZATION: &str = "jhspetersson"; 10 | const APPLICATION: &str = "fselect"; 11 | const CONFIG_FILE: &str = "config.toml"; 12 | 13 | macro_rules! vec_of_strings { 14 | ($($str:literal),*) => { 15 | Some(vec![ 16 | $(String::from($str)),* 17 | ]) 18 | } 19 | } 20 | 21 | #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] 22 | pub struct Config { 23 | pub no_color: Option, 24 | pub gitignore: Option, 25 | pub hgignore: Option, 26 | pub dockerignore: Option, 27 | pub is_zip_archive: Option>, 28 | pub is_archive: Option>, 29 | pub is_audio: Option>, 30 | pub is_book: Option>, 31 | pub is_doc: Option>, 32 | pub is_font: Option>, 33 | pub is_image: Option>, 34 | pub is_source: Option>, 35 | pub is_video: Option>, 36 | pub default_file_size_format: Option, 37 | pub check_for_updates: Option, 38 | #[serde(skip_serializing, default = "get_false")] 39 | pub debug: bool, 40 | #[serde(skip)] 41 | save: bool, 42 | } 43 | 44 | fn get_false() -> bool { 45 | false 46 | } 47 | 48 | impl Config { 49 | pub fn new() -> Result { 50 | let mut config_file; 51 | 52 | if let Some(cf) = Self::get_current_dir_config() { 53 | config_file = cf; 54 | } else { 55 | let config_dir = Self::get_project_dir(); 56 | 57 | if config_dir.is_none() { 58 | return Ok(Config::default()); 59 | } 60 | 61 | config_file = config_dir.unwrap(); 62 | config_file.push(CONFIG_FILE); 63 | 64 | if !config_file.exists() { 65 | return Ok(Config::default()); 66 | } 67 | } 68 | 69 | Config::from(config_file) 70 | } 71 | 72 | pub fn from(config_file: PathBuf) -> Result { 73 | if let Ok(mut file) = fs::File::open(config_file) { 74 | let mut contents = String::new(); 75 | if file.read_to_string(&mut contents).is_ok() { 76 | toml::from_str(&contents).map_err(|err| err.to_string()) 77 | } else { 78 | Err("Could not read config file. Using default settings.".to_string()) 79 | } 80 | } else { 81 | Err("Could not open config file. Using default settings.".to_string()) 82 | } 83 | } 84 | 85 | fn get_current_dir_config() -> Option { 86 | if let Ok(mut pb) = std::env::current_exe() { 87 | pb.pop(); 88 | pb.push(CONFIG_FILE); 89 | if pb.exists() { 90 | return Some(pb); 91 | } 92 | } 93 | 94 | None 95 | } 96 | 97 | #[cfg(not(windows))] 98 | fn get_project_dir() -> Option { 99 | ProjectDirs::from("", ORGANIZATION, APPLICATION).map(|pd| pd.config_dir().to_path_buf()) 100 | } 101 | 102 | #[cfg(windows)] 103 | fn get_project_dir() -> Option { 104 | ProjectDirs::from("", ORGANIZATION, APPLICATION) 105 | .map(|pd| pd.config_dir().parent().unwrap().to_path_buf()) 106 | } 107 | 108 | pub fn save(&self) { 109 | if !self.save { 110 | return; 111 | } 112 | 113 | let config_dir = Self::get_project_dir(); 114 | 115 | if config_dir.is_none() { 116 | return; 117 | } 118 | 119 | let mut config_file = config_dir.unwrap(); 120 | let _ = fs::create_dir_all(&config_file); 121 | config_file.push(CONFIG_FILE); 122 | 123 | if config_file.exists() { 124 | return; 125 | } 126 | 127 | let toml = toml::to_string_pretty(&self).unwrap(); 128 | 129 | if let Ok(mut file) = fs::File::create(&config_file) { 130 | let _ = file.write_all(toml.as_bytes()); 131 | } 132 | } 133 | 134 | pub fn default() -> Config { 135 | Config { 136 | no_color: Some(false), 137 | gitignore: Some(false), 138 | hgignore: Some(false), 139 | dockerignore: Some(false), 140 | is_zip_archive: vec_of_strings![".zip", ".jar", ".war", ".ear"], 141 | is_archive: vec_of_strings![ 142 | ".7z", ".bz2", ".bzip2", ".gz", ".gzip", ".lz", ".rar", ".tar", ".xz", ".zip" 143 | ], 144 | is_audio: vec_of_strings![ 145 | ".aac", ".aiff", ".amr", ".flac", ".gsm", ".m4a", ".m4b", ".m4p", ".mp3", ".ogg", 146 | ".wav", ".wma" 147 | ], 148 | is_book: vec_of_strings![ 149 | ".azw3", ".chm", ".djv", ".djvu", ".epub", ".fb2", ".mobi", ".pdf" 150 | ], 151 | is_doc: vec_of_strings![ 152 | ".accdb", ".doc", ".docm", ".docx", ".dot", ".dotm", ".dotx", ".mdb", ".odp", 153 | ".ods", ".odt", ".pdf", ".potm", ".potx", ".ppt", ".pptm", ".pptx", ".rtf", ".xlm", 154 | ".xls", ".xlsm", ".xlsx", ".xlt", ".xltm", ".xltx", ".xps" 155 | ], 156 | is_font: vec_of_strings![ 157 | ".eot", ".fon", ".otc", ".otf", ".ttc", ".ttf", ".woff", ".woff2" 158 | ], 159 | is_image: vec_of_strings![ 160 | ".bmp", ".exr", ".gif", ".heic", ".jpeg", ".jpg", ".jxl", ".png", ".psb", ".psd", 161 | ".svg", ".tga", ".tiff", ".webp" 162 | ], 163 | is_source: vec_of_strings![ 164 | ".asm", ".awk", ".bas", ".c", ".cc", ".ceylon", ".clj", ".coffee", ".cpp", ".cs", ".d", 165 | ".dart", ".elm", ".erl", ".go", ".gradle", ".groovy", ".h", ".hh", ".hpp", ".java", 166 | ".jl", ".js", ".jsp", ".jsx", ".kt", ".kts", ".lua", ".nim", ".pas", ".php", ".pl", 167 | ".pm", ".py", ".rb", ".rs", ".scala", ".sol", ".swift", ".tcl", ".ts", ".tsx", 168 | ".vala", ".vb", ".zig" 169 | ], 170 | is_video: vec_of_strings![ 171 | ".3gp", ".avi", ".flv", ".m4p", ".m4v", ".mkv", ".mov", ".mp4", ".mpeg", ".mpg", 172 | ".webm", ".wmv" 173 | ], 174 | default_file_size_format: Some(String::new()), 175 | check_for_updates: Some(false), 176 | debug: false, 177 | save: true, 178 | } 179 | } 180 | } 181 | 182 | #[cfg(test)] 183 | mod tests { 184 | use super::*; 185 | 186 | #[test] 187 | fn default_config() { 188 | let config = Config::default(); 189 | 190 | assert!(config.is_source.unwrap().contains(&String::from(".rs"))); 191 | } 192 | } 193 | -------------------------------------------------------------------------------- /src/expr.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashSet; 2 | use std::fmt; 3 | use std::fmt::Display; 4 | use std::fmt::Formatter; 5 | 6 | use crate::field::Field; 7 | use crate::function::Function; 8 | use crate::operators::ArithmeticOp; 9 | use crate::operators::LogicalOp; 10 | use crate::operators::Op; 11 | 12 | #[derive(Debug, Clone, PartialOrd, PartialEq, Eq, Hash, Serialize)] 13 | pub struct Expr { 14 | pub left: Option>, 15 | pub arithmetic_op: Option, 16 | pub logical_op: Option, 17 | pub op: Option, 18 | pub right: Option>, 19 | pub minus: bool, 20 | pub field: Option, 21 | pub function: Option, 22 | pub args: Option>, 23 | pub val: Option, 24 | pub weight: i32, 25 | } 26 | 27 | impl Expr { 28 | pub fn new() -> Expr { 29 | Expr { 30 | left: None, 31 | arithmetic_op: None, 32 | logical_op: None, 33 | op: None, 34 | right: None, 35 | minus: false, 36 | field: None, 37 | function: None, 38 | args: None, 39 | val: None, 40 | weight: 0, 41 | } 42 | } 43 | 44 | pub fn op(left: Expr, op: Op, right: Expr) -> Expr { 45 | let left_weight = left.weight; 46 | let right_weight = right.weight; 47 | 48 | Expr { 49 | left: Some(Box::new(left)), 50 | arithmetic_op: None, 51 | logical_op: None, 52 | op: Some(op), 53 | right: Some(Box::new(right)), 54 | minus: false, 55 | field: None, 56 | function: None, 57 | args: None, 58 | val: None, 59 | weight: left_weight + right_weight, 60 | } 61 | } 62 | 63 | pub fn logical_op(left: Expr, logical_op: LogicalOp, right: Expr) -> Expr { 64 | let left_weight = left.weight; 65 | let right_weight = right.weight; 66 | 67 | Expr { 68 | left: Some(Box::new(left)), 69 | arithmetic_op: None, 70 | logical_op: Some(logical_op), 71 | op: None, 72 | right: Some(Box::new(right)), 73 | minus: false, 74 | field: None, 75 | function: None, 76 | args: None, 77 | val: None, 78 | weight: left_weight + right_weight, 79 | } 80 | } 81 | 82 | pub fn arithmetic_op(left: Expr, arithmetic_op: ArithmeticOp, right: Expr) -> Expr { 83 | let left_weight = left.weight; 84 | let right_weight = right.weight; 85 | 86 | Expr { 87 | left: Some(Box::new(left)), 88 | arithmetic_op: Some(arithmetic_op), 89 | logical_op: None, 90 | op: None, 91 | right: Some(Box::new(right)), 92 | minus: false, 93 | field: None, 94 | function: None, 95 | args: None, 96 | val: None, 97 | weight: left_weight + right_weight, 98 | } 99 | } 100 | 101 | pub fn field(field: Field) -> Expr { 102 | let weight = field.get_weight(); 103 | 104 | Expr { 105 | left: None, 106 | arithmetic_op: None, 107 | logical_op: None, 108 | op: None, 109 | right: None, 110 | minus: false, 111 | field: Some(field), 112 | function: None, 113 | args: None, 114 | val: None, 115 | weight, 116 | } 117 | } 118 | 119 | pub fn function(function: Function) -> Expr { 120 | let weight = function.get_weight(); 121 | 122 | Expr { 123 | left: None, 124 | arithmetic_op: None, 125 | logical_op: None, 126 | op: None, 127 | right: None, 128 | minus: false, 129 | field: None, 130 | function: Some(function), 131 | args: Some(vec![]), 132 | val: None, 133 | weight, 134 | } 135 | } 136 | 137 | pub fn function_left(function: Function, left: Option>) -> Expr { 138 | let weight = function.get_weight(); 139 | let left_weight = match left { 140 | Some(ref expr) => expr.weight, 141 | None => 0, 142 | }; 143 | 144 | Expr { 145 | left, 146 | arithmetic_op: None, 147 | logical_op: None, 148 | op: None, 149 | right: None, 150 | minus: false, 151 | field: None, 152 | function: Some(function), 153 | args: Some(vec![]), 154 | val: None, 155 | weight: weight + left_weight, 156 | } 157 | } 158 | 159 | pub fn value(value: String) -> Expr { 160 | Expr { 161 | left: None, 162 | arithmetic_op: None, 163 | logical_op: None, 164 | op: None, 165 | right: None, 166 | minus: false, 167 | field: None, 168 | function: None, 169 | args: None, 170 | val: Some(value), 171 | weight: 0, 172 | } 173 | } 174 | 175 | pub fn add_left(&mut self, left: Expr) { 176 | let left_weight = left.weight; 177 | self.left = Some(Box::new(left)); 178 | self.weight += left_weight; 179 | } 180 | 181 | pub fn set_args(&mut self, args: Vec) { 182 | let mut args_weight = 0; 183 | for arg in &args { 184 | args_weight += arg.weight; 185 | } 186 | self.args = Some(args); 187 | self.weight += args_weight; 188 | } 189 | 190 | pub fn has_aggregate_function(&self) -> bool { 191 | if let Some(ref left) = self.left { 192 | if left.has_aggregate_function() { 193 | return true; 194 | } 195 | } 196 | 197 | if let Some(ref right) = self.right { 198 | if right.has_aggregate_function() { 199 | return true; 200 | } 201 | } 202 | 203 | if let Some(ref function) = self.function { 204 | if function.is_aggregate_function() { 205 | return true; 206 | } 207 | } 208 | 209 | if let Some(ref args) = self.args { 210 | for arg in args { 211 | if arg.has_aggregate_function() { 212 | return true; 213 | } 214 | } 215 | } 216 | 217 | false 218 | } 219 | 220 | pub fn get_required_fields(&self) -> HashSet { 221 | let mut result = HashSet::new(); 222 | 223 | if let Some(ref left) = self.left { 224 | result.extend(left.get_required_fields()); 225 | } 226 | 227 | if let Some(ref right) = self.right { 228 | result.extend(right.get_required_fields()); 229 | } 230 | 231 | if let Some(field) = self.field { 232 | result.insert(field); 233 | } 234 | 235 | if let Some(ref args) = self.args { 236 | for arg in args { 237 | result.extend(arg.get_required_fields()); 238 | } 239 | } 240 | 241 | result 242 | } 243 | 244 | pub fn contains_numeric(&self) -> bool { 245 | Self::contains_numeric_field(self) 246 | } 247 | 248 | fn contains_numeric_field(expr: &Expr) -> bool { 249 | let field = match expr.field { 250 | Some(ref field) => field.is_numeric_field(), 251 | None => false, 252 | }; 253 | 254 | if field { 255 | return true; 256 | } 257 | 258 | let function = match expr.function { 259 | Some(ref function) => function.is_numeric_function(), 260 | None => false, 261 | }; 262 | 263 | if function { 264 | return true; 265 | } 266 | 267 | match expr.left { 268 | Some(ref left) => Self::contains_numeric_field(left), 269 | None => false, 270 | } 271 | } 272 | 273 | pub fn contains_datetime(&self) -> bool { 274 | Self::contains_datetime_field(self) 275 | } 276 | 277 | fn contains_datetime_field(expr: &Expr) -> bool { 278 | let field = match expr.field { 279 | Some(ref field) => field.is_datetime_field(), 280 | None => false, 281 | }; 282 | 283 | if field { 284 | return true; 285 | } 286 | 287 | match expr.left { 288 | Some(ref left) => Self::contains_datetime_field(left), 289 | None => false, 290 | } 291 | } 292 | 293 | pub fn contains_colorized(&self) -> bool { 294 | Self::contains_colorized_field(self) 295 | } 296 | 297 | fn contains_colorized_field(expr: &Expr) -> bool { 298 | if expr.function.is_some() { 299 | return false; 300 | } 301 | 302 | let field = match expr.field { 303 | Some(ref field) => field.is_colorized_field(), 304 | None => false, 305 | }; 306 | 307 | if field { 308 | return true; 309 | } 310 | 311 | match expr.left { 312 | Some(ref left) => Self::contains_colorized_field(left), 313 | None => false, 314 | } 315 | } 316 | } 317 | 318 | impl Display for Expr { 319 | fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { 320 | use std::fmt::Write; 321 | 322 | if self.minus { 323 | fmt.write_char('-')?; 324 | } 325 | 326 | if let Some(ref function) = self.function { 327 | fmt.write_str(&function.to_string())?; 328 | fmt.write_char('(')?; 329 | if let Some(ref left) = self.left { 330 | fmt.write_str(&left.to_string())?; 331 | } 332 | fmt.write_char(')')?; 333 | } else if let Some(ref left) = self.left { 334 | fmt.write_str(&left.to_string())?; 335 | } 336 | 337 | if let Some(ref field) = self.field { 338 | fmt.write_str(&field.to_string())?; 339 | } 340 | 341 | if let Some(ref val) = self.val { 342 | fmt.write_str(val)?; 343 | } 344 | 345 | if let Some(ref right) = self.right { 346 | fmt.write_str(&right.to_string())?; 347 | } 348 | 349 | Ok(()) 350 | } 351 | } 352 | 353 | #[cfg(test)] 354 | mod tests { 355 | use super::*; 356 | use crate::field::Field; 357 | use crate::function::Function; 358 | 359 | #[test] 360 | fn test_weight() { 361 | let expr = Expr::field(Field::Name); 362 | assert_eq!(expr.weight, 0); 363 | 364 | let expr = Expr::field(Field::Accessed); 365 | assert_eq!(expr.weight, 1); 366 | 367 | let expr = Expr::function(Function::Concat); 368 | assert_eq!(expr.weight, 0); 369 | 370 | let expr = Expr::function(Function::Contains); 371 | assert_eq!(expr.weight, 1024); 372 | 373 | let expr = Expr::function_left(Function::Contains, Some(Box::new(Expr::value("foo".to_string())))); 374 | assert_eq!(expr.weight, 1024); 375 | 376 | let expr = Expr::logical_op( 377 | Expr::op( 378 | Expr::field(Field::Size), 379 | Op::Gt, 380 | Expr::value(String::from("456")), 381 | ), 382 | LogicalOp::Or, 383 | Expr::op( 384 | Expr::field(Field::FormattedSize), 385 | Op::Lte, 386 | Expr::value(String::from("758")), 387 | ), 388 | ); 389 | assert_eq!(expr.weight, 2); 390 | 391 | let expr = Expr::logical_op( 392 | Expr::logical_op( 393 | Expr::op( 394 | Expr::field(Field::Name), 395 | Op::Ne, 396 | Expr::value(String::from("123")), 397 | ), 398 | LogicalOp::And, 399 | Expr::logical_op( 400 | Expr::op( 401 | Expr::field(Field::Size), 402 | Op::Gt, 403 | Expr::value(String::from("456")), 404 | ), 405 | LogicalOp::Or, 406 | Expr::op( 407 | Expr::field(Field::FormattedSize), 408 | Op::Lte, 409 | Expr::value(String::from("758")), 410 | ), 411 | ), 412 | ), 413 | LogicalOp::Or, 414 | Expr::op( 415 | Expr::field(Field::Name), 416 | Op::Eq, 417 | Expr::value(String::from("xxx")), 418 | ), 419 | ); 420 | assert_eq!(expr.weight, 2); 421 | } 422 | } -------------------------------------------------------------------------------- /src/field.rs: -------------------------------------------------------------------------------- 1 | //! Defines the various fields available in the query language 2 | 3 | use std::fmt::Display; 4 | use std::fmt::Error; 5 | use std::fmt::Formatter; 6 | use std::str::FromStr; 7 | 8 | use serde::ser::{Serialize, Serializer}; 9 | 10 | macro_rules! fields { 11 | ( 12 | $(#[$enum_attrs:meta])* 13 | $vis:vis enum $enum_name:ident { 14 | $( 15 | #[text = [$($text:literal),*]$(,)? $(data_type = $data_type:literal)?] 16 | $(@colorized = $colorized:literal)? 17 | $(@for_archived = $for_archived:literal)? 18 | $(@weight = $weight:literal)? 19 | $(@description = $description:literal)? 20 | $(#[$variant_attrs:meta])* 21 | $variant:ident 22 | ),* 23 | $(,)? 24 | } 25 | 26 | ) => { 27 | $(#[$enum_attrs])* 28 | $vis enum $enum_name { 29 | $( 30 | $(#[$variant_attrs])* 31 | $variant, 32 | )* 33 | } 34 | 35 | impl FromStr for $enum_name { 36 | type Err = String; 37 | 38 | fn from_str(s: &str) -> Result { 39 | let field = s.to_ascii_lowercase(); 40 | 41 | match field.as_str() { 42 | $( 43 | $(#[$variant_attrs])* 44 | $($text)|* => Ok($enum_name::$variant), 45 | )* 46 | _ => { 47 | let err = String::from("Unknown field ") + &field; 48 | Err(err) 49 | } 50 | } 51 | } 52 | } 53 | 54 | impl Display for $enum_name { 55 | fn fmt(&self, f: &mut Formatter) -> Result<(), Error> { 56 | write!(f, "{:?}", self) 57 | } 58 | } 59 | 60 | impl Serialize for $enum_name { 61 | fn serialize(&self, serializer: S) -> Result 62 | where 63 | S: Serializer, 64 | { 65 | serializer.serialize_str(&self.to_string()) 66 | } 67 | } 68 | 69 | impl $enum_name { 70 | pub fn is_numeric_field(&self) -> bool { 71 | match self { 72 | $( 73 | $(#[$variant_attrs])* 74 | $enum_name::$variant => { 75 | stringify!($($data_type)?) .replace("\"", "") == "numeric" 76 | } 77 | )* 78 | } 79 | } 80 | 81 | pub fn is_datetime_field(&self) -> bool { 82 | match self { 83 | $( 84 | $(#[$variant_attrs])* 85 | $enum_name::$variant => { 86 | stringify!($($data_type)?) .replace("\"", "") == "datetime" 87 | } 88 | )* 89 | } 90 | } 91 | 92 | pub fn is_boolean_field(&self) -> bool { 93 | match self { 94 | $( 95 | $(#[$variant_attrs])* 96 | $enum_name::$variant => { 97 | stringify!($($data_type)?) .replace("\"", "") == "boolean" 98 | } 99 | )* 100 | } 101 | } 102 | 103 | pub fn is_colorized_field(&self) -> bool { 104 | match self { 105 | $( 106 | $(#[$variant_attrs])* 107 | $enum_name::$variant => { 108 | stringify!($($colorized)?) == "true" 109 | } 110 | )* 111 | } 112 | } 113 | 114 | pub fn is_available_for_archived_files(&self) -> bool { 115 | match self { 116 | $( 117 | $(#[$variant_attrs])* 118 | $enum_name::$variant => { 119 | stringify!($($for_archived)?) == "true" 120 | } 121 | )* 122 | } 123 | } 124 | 125 | pub fn get_weight(&self) -> i32 { 126 | match self { 127 | $( 128 | $(#[$variant_attrs])* 129 | $enum_name::$variant => { 130 | stringify!($($weight)?) .parse().unwrap_or(0) 131 | } 132 | )* 133 | } 134 | } 135 | 136 | pub fn get_names_and_descriptions() -> Vec<(Vec<&'static str>, &'static str)> { 137 | vec![ 138 | $( 139 | $(#[$variant_attrs])* 140 | (vec![$($text,)*], $($description)?), 141 | )* 142 | ] 143 | } 144 | } 145 | }; 146 | } 147 | 148 | fields! { 149 | #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Hash)] 150 | pub enum Field { 151 | #[text = ["name"]] 152 | @colorized = true 153 | @for_archived = true 154 | @description = "Returns the name (with extension) of the file" 155 | Name, 156 | 157 | #[text = ["ext", "extension"]] 158 | @for_archived = true 159 | @description = "Returns the extension of the file" 160 | Extension, 161 | 162 | #[text = ["path"]] 163 | @for_archived = true 164 | @description = "Returns the path of the file" 165 | Path, 166 | 167 | #[text = ["abspath"]] 168 | @for_archived = true 169 | @weight = 1 170 | @description = "Returns the absolute path of the file" 171 | AbsPath, 172 | 173 | #[text = ["dir", "directory", "dirname"]] 174 | @for_archived = true 175 | @description = "Returns the directory of the file" 176 | Directory, 177 | 178 | #[text = ["absdir"]] 179 | @for_archived = true 180 | @weight = 1 181 | @description = "Returns the absolute directory of the file" 182 | AbsDir, 183 | 184 | #[text = ["size"], data_type = "numeric"] 185 | @for_archived = true 186 | @weight = 1 187 | @description = "Returns the size of the file in bytes" 188 | Size, 189 | 190 | #[text = ["fsize", "hsize"], data_type = "numeric"] 191 | @for_archived = true 192 | @weight = 1 193 | @description = "Returns the size of the file accompanied with the unit" 194 | FormattedSize, 195 | 196 | #[text = ["uid"], data_type = "numeric"] 197 | @weight = 1 198 | @description = "Returns the UID of the owner" 199 | Uid, 200 | 201 | #[text = ["gid"], data_type = "numeric"] 202 | @weight = 1 203 | @description = "Returns the GID of the owner's group" 204 | Gid, 205 | 206 | #[text = ["user"]] 207 | @weight = 1 208 | @description = "Returns the name of the owner for this file" 209 | #[cfg(all(unix, feature = "users"))] 210 | User, 211 | 212 | #[text = ["group"]] 213 | @weight = 1 214 | @description = "Returns the name of the owner's group for this file" 215 | #[cfg(all(unix, feature = "users"))] 216 | Group, 217 | 218 | #[text = ["created"], data_type = "datetime"] 219 | @weight = 1 220 | @description = "Returns the file creation date (YYYY-MM-DD HH:MM:SS)" 221 | Created, 222 | 223 | #[text = ["accessed"], data_type = "datetime"] 224 | @weight = 1 225 | @description = "Returns the time the file was last accessed (YYYY-MM-DD HH:MM:SS)" 226 | Accessed, 227 | 228 | #[text = ["modified"], data_type = "datetime"] 229 | @for_archived = true 230 | @weight = 1 231 | @description = "Returns the time the file was last modified (YYYY-MM-DD HH:MM:SS)" 232 | Modified, 233 | 234 | #[text = ["is_dir"], data_type = "boolean"] 235 | @for_archived = true 236 | @weight = 1 237 | @description = "Returns a boolean signifying whether the file path is a directory" 238 | IsDir, 239 | 240 | #[text = ["is_file"], data_type = "boolean"] 241 | @for_archived = true 242 | @weight = 1 243 | @description = "Returns a boolean signifying whether the file path is a file" 244 | IsFile, 245 | 246 | #[text = ["is_symlink"], data_type = "boolean"] 247 | @for_archived = true 248 | @weight = 1 249 | @description = "Returns a boolean signifying whether the file path is a symlink" 250 | IsSymlink, 251 | 252 | #[text = ["is_pipe", "is_fifo"], data_type = "boolean"] 253 | @for_archived = true 254 | @weight = 1 255 | @description = "Returns a boolean signifying whether the file path is a FIFO or pipe file" 256 | IsPipe, 257 | 258 | #[text = ["is_char", "is_character"], data_type = "boolean"] 259 | @for_archived = true 260 | @weight = 1 261 | @description = "Returns a boolean signifying whether the file path is a character device or character special file" 262 | IsCharacterDevice, 263 | 264 | #[text = ["is_block"], data_type = "boolean"] 265 | @for_archived = true 266 | @weight = 1 267 | @description = "Returns a boolean signifying whether the file path is a block or block special file" 268 | IsBlockDevice, 269 | 270 | #[text = ["is_socket"], data_type = "boolean"] 271 | @for_archived = true 272 | @weight = 1 273 | @description = "Returns a boolean signifying whether the file path is a socket file" 274 | IsSocket, 275 | 276 | #[text = ["device"]] 277 | @weight = 1 278 | @description = "Returns the code of device the file is stored on" 279 | Device, 280 | 281 | #[text = ["inode"]] 282 | @weight = 1 283 | @description = "Returns the number of inode" 284 | Inode, 285 | 286 | #[text = ["blocks"]] 287 | @weight = 1 288 | @description = "Returns the number of blocks (256 bytes) the file occupies" 289 | Blocks, 290 | 291 | #[text = ["hardlinks"]] 292 | @weight = 1 293 | @description = "Returns the number of hardlinks of the file" 294 | Hardlinks, 295 | 296 | #[text = ["mode"]] 297 | @for_archived = true 298 | @weight = 1 299 | @description = "Returns the permissions of the owner, group, and everybody (similar to the first field in `ls -la`)" 300 | Mode, 301 | 302 | #[text = ["user_read"], data_type = "boolean"] 303 | @for_archived = true 304 | @weight = 1 305 | @description = "Returns a boolean signifying whether the file can be read by the owner" 306 | UserRead, 307 | 308 | #[text = ["user_write"], data_type = "boolean"] 309 | @for_archived = true 310 | @weight = 1 311 | @description = "Returns a boolean signifying whether the file can be written by the owner" 312 | UserWrite, 313 | 314 | #[text = ["user_exec"], data_type = "boolean"] 315 | @for_archived = true 316 | @weight = 1 317 | @description = "Returns a boolean signifying whether the file can be executed by the owner" 318 | UserExec, 319 | 320 | #[text = ["user_all", "user_rwx"], data_type = "boolean"] 321 | @for_archived = true 322 | @weight = 1 323 | @description = "Returns a boolean signifying whether the file can be fully accessed by the owner" 324 | UserAll, 325 | 326 | #[text = ["group_read"], data_type = "boolean"] 327 | @for_archived = true 328 | @weight = 1 329 | @description = "Returns a boolean signifying whether the file can be read by the owner's group" 330 | GroupRead, 331 | 332 | #[text = ["group_write"], data_type = "boolean"] 333 | @for_archived = true 334 | @weight = 1 335 | @description = "Returns a boolean signifying whether the file can be written by the owner's group" 336 | GroupWrite, 337 | 338 | #[text = ["group_exec"], data_type = "boolean"] 339 | @for_archived = true 340 | @weight = 1 341 | @description = "Returns a boolean signifying whether the file can be executed by the owner's group" 342 | GroupExec, 343 | 344 | #[text = ["group_all", "group_rwx"], data_type = "boolean"] 345 | @for_archived = true 346 | @weight = 1 347 | @description = "Returns a boolean signifying whether the file can be fully accessed by the group" 348 | GroupAll, 349 | 350 | #[text = ["other_read"], data_type = "boolean"] 351 | @for_archived = true 352 | @weight = 1 353 | @description = "Returns a boolean signifying whether the file can be read by others" 354 | OtherRead, 355 | 356 | #[text = ["other_write"], data_type = "boolean"] 357 | @for_archived = true 358 | @weight = 1 359 | @description = "Returns a boolean signifying whether the file can be written by others" 360 | OtherWrite, 361 | 362 | #[text = ["other_exec"], data_type = "boolean"] 363 | @for_archived = true 364 | @weight = 1 365 | @description = "Returns a boolean signifying whether the file can be executed by others" 366 | OtherExec, 367 | 368 | #[text = ["other_all", "other_rwx"], data_type = "boolean"] 369 | @for_archived = true 370 | @weight = 1 371 | @description = "Returns a boolean signifying whether the file can be fully accessed by the others" 372 | OtherAll, 373 | 374 | #[text = ["suid"], data_type = "boolean"] 375 | @for_archived = true 376 | @weight = 1 377 | @description = "Returns a boolean signifying whether the file permissions have a SUID bit set" 378 | Suid, 379 | 380 | #[text = ["sgid"], data_type = "boolean"] 381 | @for_archived = true 382 | @weight = 1 383 | @description = "Returns a boolean signifying whether the file permissions have a SGID bit set" 384 | Sgid, 385 | 386 | #[text = ["is_hidden"], data_type = "boolean"] 387 | @for_archived = true 388 | @weight = 1 389 | @description = "Returns a boolean signifying whether the file is a hidden file (e.g., files that start with a dot on *nix)" 390 | IsHidden, 391 | 392 | #[text = ["has_xattrs"], data_type = "boolean"] 393 | @weight = 2 394 | @description = "Returns a boolean signifying whether the file has extended attributes" 395 | HasXattrs, 396 | 397 | #[text = ["capabilities", "caps"]] 398 | @weight = 2 399 | @description = "Returns a string describing Linux capabilities assigned to a file" 400 | Capabilities, 401 | 402 | #[text = ["is_shebang"], data_type = "boolean"] 403 | @weight = 2 404 | @description = "Returns a boolean signifying whether the file starts with a shebang (#!)" 405 | IsShebang, 406 | 407 | #[text = ["is_empty"], data_type = "boolean"] 408 | @for_archived = true 409 | @weight = 2 410 | @description = "Returns a boolean signifying whether the file is empty or the directory is empty" 411 | IsEmpty, 412 | 413 | #[text = ["width"], data_type = "numeric"] 414 | @weight = 16 415 | @description = "Returns the number of pixels along the width of the photo or MP4 file" 416 | Width, 417 | 418 | #[text = ["height"], data_type = "numeric"] 419 | @weight = 16 420 | @description = "Returns the number of pixels along the height of the photo or MP4 file" 421 | Height, 422 | 423 | #[text = ["duration"], data_type = "numeric"] 424 | @weight = 16 425 | @description = "Returns the duration of audio file in seconds" 426 | Duration, 427 | 428 | #[text = ["mp3_bitrate", "bitrate"], data_type = "numeric"] 429 | @weight = 16 430 | @description = "Returns the bitrate of the audio file in kbps" 431 | Bitrate, 432 | 433 | #[text = ["mp3_freq", "freq"], data_type = "numeric"] 434 | @weight = 16 435 | @description = "Returns the sampling rate of audio or video file" 436 | Freq, 437 | 438 | #[text = ["mp3_title", "title"]] 439 | @weight = 16 440 | @description = "Returns the title of the audio file taken from the file's metadata" 441 | Title, 442 | 443 | #[text = ["mp3_artist", "artist"]] 444 | @weight = 16 445 | @description = "Returns the artist of the audio file taken from the file's metadata" 446 | Artist, 447 | 448 | #[text = ["mp3_album", "album"]] 449 | @weight = 16 450 | @description = "Returns the album name of the audio file taken from the file's metadata" 451 | Album, 452 | 453 | #[text = ["mp3_year"], data_type = "numeric"] 454 | @weight = 16 455 | @description = "Returns the year of the audio file taken from the file's metadata" 456 | Year, 457 | 458 | #[text = ["mp3_genre", "genre"]] 459 | @weight = 16 460 | @description = "Returns the genre of the audio file taken from the file's metadata" 461 | Genre, 462 | 463 | #[text = ["exif_datetime"], data_type = "datetime"] 464 | @weight = 16 465 | @description = "Returns date and time of taken photo" 466 | ExifDateTime, 467 | 468 | #[text = ["exif_altitude", "exif_alt"], data_type = "numeric"] 469 | @weight = 16 470 | @description = "Returns GPS altitude of taken photo" 471 | ExifGpsAltitude, 472 | 473 | #[text = ["exif_latitude", "exif_lat"], data_type = "numeric"] 474 | @weight = 16 475 | @description = "Returns GPS latitude of taken photo" 476 | ExifGpsLatitude, 477 | 478 | #[text = ["exif_longitude", "exif_lon", "exif_lng"], data_type = "numeric"] 479 | @weight = 16 480 | @description = "Returns GPS longitude of taken photo" 481 | ExifGpsLongitude, 482 | 483 | #[text = ["exif_make"]] 484 | @weight = 16 485 | @description = "Returns name of the camera manufacturer" 486 | ExifMake, 487 | 488 | #[text = ["exif_model"]] 489 | @weight = 16 490 | @description = "Returns camera model" 491 | ExifModel, 492 | 493 | #[text = ["exif_software"]] 494 | @weight = 16 495 | @description = "Returns software name with which the photo was taken" 496 | ExifSoftware, 497 | 498 | #[text = ["exif_version"]] 499 | @weight = 16 500 | @description = "Returns the version of EXIF metadata" 501 | ExifVersion, 502 | 503 | #[text = ["exif_exposure_time", "exif_exptime"], data_type = "numeric"] 504 | @weight = 16 505 | @description = "Returns exposure time of the photo taken" 506 | ExifExposureTime, 507 | 508 | #[text = ["exif_aperture"], data_type = "numeric"] 509 | @weight = 16 510 | @description = "Returns aperture value of the photo taken" 511 | ExifAperture, 512 | 513 | #[text = ["exif_shutter_speed"], data_type = "numeric"] 514 | @weight = 16 515 | @description = "Returns shutter speed of the photo taken" 516 | ExifShutterSpeed, 517 | 518 | #[text = ["exif_f_number", "exif_f_num"], data_type = "numeric"] 519 | @weight = 16 520 | @description = "Returns F-number of the photo taken" 521 | ExifFNumber, 522 | 523 | #[text = ["exif_iso_speed", "exif_iso"]] 524 | @weight = 16 525 | @description = "Returns ISO speed of the photo taken" 526 | ExifIsoSpeed, 527 | 528 | #[text = ["exif_focal_length", "exif_focal_len"], data_type = "numeric"] 529 | @weight = 16 530 | @description = "Returns focal length of the photo taken" 531 | ExifFocalLength, 532 | 533 | #[text = ["exif_lens_make"]] 534 | @weight = 16 535 | @description = "Returns lens manufacturer used to take the photo" 536 | ExifLensMake, 537 | 538 | #[text = ["exif_lens_model"]] 539 | @weight = 16 540 | @description = "Returns lens model used to take the photo" 541 | ExifLensModel, 542 | 543 | #[text = ["mime"]] 544 | @weight = 16 545 | @description = "Returns MIME type of the file" 546 | Mime, 547 | 548 | #[text = ["line_count"], data_type = "numeric"] 549 | @weight = 1024 550 | @description = "Returns a number of lines in a text file" 551 | LineCount, 552 | 553 | #[text = ["is_binary"], data_type = "boolean"] 554 | @weight = 16 555 | @description = "Returns a boolean signifying whether the file has binary contents" 556 | IsBinary, 557 | 558 | #[text = ["is_text"], data_type = "boolean"] 559 | @weight = 16 560 | @description = "Returns a boolean signifying whether the file has text contents" 561 | IsText, 562 | 563 | #[text = ["is_archive"], data_type = "boolean"] 564 | @for_archived = true 565 | @description = "Returns a boolean signifying whether the file is an archival file" 566 | IsArchive, 567 | 568 | #[text = ["is_audio"], data_type = "boolean"] 569 | @for_archived = true 570 | @description = "Returns a boolean signifying whether the file is an audio file" 571 | IsAudio, 572 | 573 | #[text = ["is_book"], data_type = "boolean"] 574 | @for_archived = true 575 | @description = "Returns a boolean signifying whether the file is a book" 576 | IsBook, 577 | 578 | #[text = ["is_doc"], data_type = "boolean"] 579 | @for_archived = true 580 | @description = "Returns a boolean signifying whether the file is a document" 581 | IsDoc, 582 | 583 | #[text = ["is_font"], data_type = "boolean"] 584 | @for_archived = true 585 | @description = "Returns a boolean signifying whether the file is a font" 586 | IsFont, 587 | 588 | #[text = ["is_image"], data_type = "boolean"] 589 | @for_archived = true 590 | @description = "Returns a boolean signifying whether the file is an image" 591 | IsImage, 592 | 593 | #[text = ["is_source"], data_type = "boolean"] 594 | @for_archived = true 595 | @description = "Returns a boolean signifying whether the file is source code" 596 | IsSource, 597 | 598 | #[text = ["is_video"], data_type = "boolean"] 599 | @for_archived = true 600 | @description = "Returns a boolean signifying whether the file is a video file" 601 | IsVideo, 602 | 603 | #[text = ["sha1"]] 604 | @weight = 1024 605 | @description = "Returns SHA-1 digest of a file" 606 | Sha1, 607 | 608 | #[text = ["sha2_256", "sha256"]] 609 | @weight = 1024 610 | @description = "Returns SHA2-256 digest of a file" 611 | Sha256, 612 | 613 | #[text = ["sha2_512", "sha512"]] 614 | @weight = 1024 615 | @description = "Returns SHA2-512 digest of a file" 616 | Sha512, 617 | 618 | #[text = ["sha3_512", "sha3"]] 619 | @weight = 1024 620 | @description = "Returns SHA-3 digest of a file" 621 | Sha3, 622 | } 623 | } 624 | 625 | #[cfg(test)] 626 | mod tests { 627 | use super::*; 628 | 629 | #[test] 630 | fn test_colorized() { 631 | let field = Field::Name; 632 | assert_eq!(field.is_colorized_field(), true); 633 | 634 | let field = Field::Size; 635 | assert_eq!(field.is_colorized_field(), false); 636 | } 637 | 638 | #[test] 639 | fn test_is_numeric_field() { 640 | let field = Field::Size; 641 | assert_eq!(field.is_numeric_field(), true); 642 | 643 | let field = Field::Name; 644 | assert_eq!(field.is_numeric_field(), false); 645 | } 646 | 647 | #[test] 648 | fn test_is_datetime_field() { 649 | let field = Field::Created; 650 | assert_eq!(field.is_datetime_field(), true); 651 | 652 | let field = Field::Name; 653 | assert_eq!(field.is_datetime_field(), false); 654 | } 655 | 656 | #[test] 657 | fn test_is_boolean_field() { 658 | let field = Field::IsDir; 659 | assert_eq!(field.is_boolean_field(), true); 660 | 661 | let field = Field::Name; 662 | assert_eq!(field.is_boolean_field(), false); 663 | } 664 | 665 | #[test] 666 | fn test_is_available_for_archived_files() { 667 | let field = Field::Name; 668 | assert_eq!(field.is_available_for_archived_files(), true); 669 | 670 | let field = Field::LineCount; 671 | assert_eq!(field.is_available_for_archived_files(), false); 672 | } 673 | 674 | #[test] 675 | fn test_weight() { 676 | let field = Field::Name; 677 | assert_eq!(field.get_weight(), 0); 678 | 679 | let field = Field::Size; 680 | assert_eq!(field.get_weight(), 1); 681 | } 682 | } -------------------------------------------------------------------------------- /src/fileinfo.rs: -------------------------------------------------------------------------------- 1 | use zip::DateTime; 2 | 3 | pub struct FileInfo { 4 | pub name: String, 5 | pub size: u64, 6 | pub mode: Option, 7 | pub modified: Option, 8 | } 9 | 10 | pub fn to_file_info(zipped_file: &zip::read::ZipFile) -> FileInfo 11 | where 12 | R: std::io::Read + std::io::Seek 13 | { 14 | FileInfo { 15 | name: zipped_file.name().to_string(), 16 | size: zipped_file.size(), 17 | mode: zipped_file.unix_mode(), 18 | modified: zipped_file.last_modified(), 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /src/ignore/docker.rs: -------------------------------------------------------------------------------- 1 | //! Handles .dockerignore parsing 2 | 3 | use std::fs::File; 4 | use std::io::BufRead; 5 | use std::io::BufReader; 6 | use std::ops::Add; 7 | use std::ops::Index; 8 | use std::path::Path; 9 | use std::sync::LazyLock; 10 | 11 | use regex::Captures; 12 | use regex::Error; 13 | use regex::Regex; 14 | 15 | use crate::util::error_exit; 16 | 17 | #[derive(Clone, Debug)] 18 | pub struct DockerignoreFilter { 19 | pub regex: Regex, 20 | pub negate: bool, 21 | } 22 | 23 | impl DockerignoreFilter { 24 | fn new(regex: Regex, negate: bool) -> DockerignoreFilter { 25 | DockerignoreFilter { regex, negate } 26 | } 27 | } 28 | 29 | pub fn search_upstream_dockerignore( 30 | dockerignore_filters: &mut Vec, 31 | dir: &Path, 32 | ) { 33 | if let Ok(canonical_path) = crate::util::canonical_path(&dir.to_path_buf()) { 34 | let mut path = std::path::PathBuf::from(canonical_path); 35 | 36 | loop { 37 | let dockerignore_file = path.join(".dockerignore"); 38 | 39 | if dockerignore_file.is_file() { 40 | update_dockerignore_filters(dockerignore_filters, &mut path); 41 | return; 42 | } 43 | 44 | let parent_found = path.pop(); 45 | 46 | if !parent_found { 47 | return; 48 | } 49 | } 50 | } 51 | } 52 | 53 | fn update_dockerignore_filters(dockerignore_filters: &mut Vec, path: &Path) { 54 | let dockerignore_file = path.join(".dockerignore"); 55 | if dockerignore_file.is_file() { 56 | let regexes = parse_dockerignore(&dockerignore_file, &path); 57 | match regexes { 58 | Ok(ref regexes) => { 59 | dockerignore_filters.append(&mut regexes.clone()); 60 | } 61 | Err(err) => { 62 | eprintln!("{}: {}", path.to_string_lossy(), err); 63 | } 64 | } 65 | } 66 | } 67 | 68 | pub fn matches_dockerignore_filter( 69 | dockerignore_filters: &Vec, 70 | file_name: &str, 71 | ) -> bool { 72 | let mut matched = false; 73 | 74 | let file_name = file_name.to_string().replace("\\", "/").replace("//", "/"); 75 | 76 | for dockerignore_filter in dockerignore_filters { 77 | let is_match = dockerignore_filter.regex.is_match(&file_name); 78 | 79 | if is_match && dockerignore_filter.negate { 80 | return false; 81 | } 82 | 83 | if is_match { 84 | matched = true; 85 | } 86 | } 87 | 88 | matched 89 | } 90 | 91 | fn parse_dockerignore( 92 | file_path: &Path, 93 | dir_path: &Path, 94 | ) -> Result, String> { 95 | let mut result = vec![]; 96 | let mut err = String::new(); 97 | 98 | if let Ok(file) = File::open(file_path) { 99 | let reader = BufReader::new(file); 100 | reader 101 | .lines() 102 | .filter(|line| match line { 103 | Ok(line) => !line.trim().is_empty() && !line.starts_with("#"), 104 | _ => false, 105 | }) 106 | .for_each(|line| { 107 | if err.is_empty() { 108 | if let Ok(line) = line { 109 | let pattern = convert_dockerignore_pattern(&line, dir_path); 110 | match pattern { 111 | Ok(pattern) => result.push(pattern), 112 | Err(parse_err) => err = parse_err, 113 | } 114 | } 115 | } 116 | }); 117 | }; 118 | 119 | match err.is_empty() { 120 | true => Ok(result), 121 | false => Err(err), 122 | } 123 | } 124 | 125 | fn convert_dockerignore_pattern( 126 | pattern: &str, 127 | file_path: &Path, 128 | ) -> Result { 129 | let mut pattern = String::from(pattern); 130 | 131 | let mut negate = false; 132 | if pattern.starts_with("!") { 133 | pattern = pattern.replace("!", ""); 134 | negate = true; 135 | } 136 | 137 | match convert_dockerignore_glob(&pattern, file_path) { 138 | Ok(regex) => Ok(DockerignoreFilter::new(regex, negate)), 139 | _ => Err("Error creating regex while parsing .dockerignore glob: " 140 | .to_string() 141 | .add(&pattern)), 142 | } 143 | } 144 | 145 | static DOCKER_CONVERT_REPLACE_REGEX: LazyLock = LazyLock::new(|| { 146 | Regex::new("(\\*\\*|\\?|\\.|\\*)").unwrap() 147 | }); 148 | 149 | fn convert_dockerignore_glob(glob: &str, file_path: &Path) -> Result { 150 | let mut pattern = DOCKER_CONVERT_REPLACE_REGEX 151 | .replace_all(glob, |c: &Captures| { 152 | match c.index(0) { 153 | "**" => ".*", 154 | "." => "\\.", 155 | "*" => "[^/]*", 156 | "?" => "[^/]", 157 | _ => error_exit(".dockerignore", "Error parsing pattern"), 158 | } 159 | .to_string() 160 | }) 161 | .to_string(); 162 | 163 | while pattern.starts_with("/") || pattern.starts_with("\\") { 164 | pattern.remove(0); 165 | } 166 | 167 | #[cfg(windows)] 168 | let path = file_path 169 | .to_string_lossy() 170 | .to_string() 171 | .replace("\\", "/") 172 | .replace("//", "/"); 173 | 174 | #[cfg(not(windows))] 175 | let path = file_path.to_string_lossy().to_string(); 176 | 177 | pattern = path.replace("\\", "\\\\").add("/([^/]+/)*").add(&pattern); 178 | 179 | Regex::new(&pattern) 180 | } 181 | -------------------------------------------------------------------------------- /src/ignore/hg.rs: -------------------------------------------------------------------------------- 1 | //! Handles .hgignore parsing (Mercurial) 2 | 3 | use std::fs::File; 4 | use std::io::BufRead; 5 | use std::io::BufReader; 6 | use std::ops::Add; 7 | use std::ops::Index; 8 | use std::path::Path; 9 | use std::sync::LazyLock; 10 | 11 | use regex::Captures; 12 | use regex::Error; 13 | use regex::Regex; 14 | 15 | use crate::util::error_exit; 16 | 17 | #[derive(Clone, Debug)] 18 | pub struct HgignoreFilter { 19 | pub regex: Regex, 20 | } 21 | 22 | impl HgignoreFilter { 23 | fn new(regex: Regex) -> HgignoreFilter { 24 | HgignoreFilter { regex } 25 | } 26 | } 27 | 28 | pub fn search_upstream_hgignore(hgignore_filters: &mut Vec, dir: &Path) { 29 | if let Ok(canonical_path) = crate::util::canonical_path(&dir.to_path_buf()) { 30 | let mut path = std::path::PathBuf::from(canonical_path); 31 | 32 | loop { 33 | let hgignore_file = path.join(".hgignore"); 34 | let hg_directory = path.join(".hg"); 35 | 36 | if hgignore_file.is_file() && hg_directory.is_dir() { 37 | update_hgignore_filters(hgignore_filters, &mut path); 38 | return; 39 | } 40 | 41 | let parent_found = path.pop(); 42 | 43 | if !parent_found { 44 | return; 45 | } 46 | } 47 | } 48 | } 49 | 50 | fn update_hgignore_filters(hgignore_filters: &mut Vec, path: &Path) { 51 | let hgignore_file = path.join(".hgignore"); 52 | if hgignore_file.is_file() { 53 | let mut regexes = parse_hgignore(&hgignore_file, &path); 54 | match regexes { 55 | Ok(ref mut regexes) => { 56 | hgignore_filters.append(regexes); 57 | } 58 | Err(err) => { 59 | eprintln!("{}: {}", path.to_string_lossy(), err); 60 | } 61 | } 62 | } 63 | } 64 | 65 | pub fn matches_hgignore_filter(hgignore_filters: &Vec, file_name: &str) -> bool { 66 | let mut matched = false; 67 | 68 | for hgignore_filter in hgignore_filters { 69 | let is_match = hgignore_filter.regex.is_match(file_name); 70 | 71 | if is_match { 72 | matched = true; 73 | } 74 | } 75 | 76 | matched 77 | } 78 | 79 | enum Syntax { 80 | Regexp, 81 | Glob, 82 | } 83 | 84 | impl Syntax { 85 | fn from(s: &str) -> Result { 86 | if s == "regexp" { 87 | return Ok(Syntax::Regexp); 88 | } else if s == "glob" { 89 | return Ok(Syntax::Glob); 90 | } else { 91 | return Err("Error parsing syntax directive".to_string()); 92 | } 93 | } 94 | } 95 | 96 | fn parse_hgignore(file_path: &Path, dir_path: &Path) -> Result, String> { 97 | let mut result = vec![]; 98 | let mut err = String::new(); 99 | 100 | if let Ok(file) = File::open(file_path) { 101 | let mut syntax = Syntax::Regexp; 102 | 103 | let reader = BufReader::new(file); 104 | reader 105 | .lines() 106 | .filter(|line| match line { 107 | Ok(line) => !line.trim().is_empty() && !line.starts_with("#"), 108 | _ => false, 109 | }) 110 | .for_each(|line| { 111 | if err.is_empty() { 112 | match line { 113 | Ok(line) => { 114 | if line.starts_with("syntax:") { 115 | let line = line.replace("syntax:", ""); 116 | let syntax_directive = line.trim(); 117 | match Syntax::from(syntax_directive) { 118 | Ok(parsed_syntax) => syntax = parsed_syntax, 119 | Err(parse_err) => err = parse_err, 120 | } 121 | } else if line.starts_with("subinclude:") { 122 | let include = line.replace("subinclude:", ""); 123 | let mut parse_result = 124 | parse_hgignore(&Path::new(&include), dir_path); 125 | match parse_result { 126 | Ok(ref mut filters) => { 127 | result.append(filters); 128 | } 129 | Err(parse_err) => { 130 | err = parse_err; 131 | } 132 | }; 133 | } else { 134 | let pattern = convert_hgignore_pattern(&line, dir_path, &syntax); 135 | match pattern { 136 | Ok(pattern) => result.push(pattern), 137 | Err(parse_err) => err = parse_err, 138 | } 139 | } 140 | } 141 | _ => {} 142 | } 143 | } 144 | }); 145 | }; 146 | 147 | match err.is_empty() { 148 | true => Ok(result), 149 | false => Err(err), 150 | } 151 | } 152 | 153 | fn convert_hgignore_pattern( 154 | pattern: &str, 155 | file_path: &Path, 156 | syntax: &Syntax, 157 | ) -> Result { 158 | match syntax { 159 | Syntax::Glob => match convert_hgignore_glob(pattern, file_path) { 160 | Ok(regex) => Ok(HgignoreFilter::new(regex)), 161 | _ => Err("Error creating regex while parsing .hgignore glob: ".to_string() + pattern), 162 | }, 163 | Syntax::Regexp => match convert_hgignore_regexp(pattern, file_path) { 164 | Ok(regex) => Ok(HgignoreFilter::new(regex)), 165 | _ => Err("Error creating regex while parsing .hgignore regexp: ".to_string() + pattern), 166 | }, 167 | } 168 | } 169 | 170 | static HG_CONVERT_REPLACE_REGEX: LazyLock = LazyLock::new(|| { 171 | Regex::new("(\\*\\*|\\?|\\.|\\*)").unwrap() 172 | }); 173 | 174 | fn convert_hgignore_glob(glob: &str, file_path: &Path) -> Result { 175 | #[cfg(not(windows))] 176 | { 177 | let mut pattern = HG_CONVERT_REPLACE_REGEX 178 | .replace_all(&glob, |c: &Captures| { 179 | match c.index(0) { 180 | "**" => ".*", 181 | "." => "\\.", 182 | "*" => "[^/]*", 183 | "?" => "[^/]+", 184 | "[" => "\\[", 185 | "]" => "\\]", 186 | "(" => "\\(", 187 | ")" => "\\)", 188 | "^" => "\\^", 189 | "$" => "\\$", 190 | _ => error_exit(".hgignore", "Error parsing pattern"), 191 | } 192 | .to_string() 193 | }) 194 | .to_string(); 195 | 196 | pattern = file_path 197 | .to_string_lossy() 198 | .to_string() 199 | .replace("\\", "\\\\") 200 | .add("/([^/]+/)*") 201 | .add(&pattern); 202 | 203 | Regex::new(&pattern) 204 | } 205 | 206 | #[cfg(windows)] 207 | { 208 | let mut pattern = HG_CONVERT_REPLACE_REGEX 209 | .replace_all(&glob, |c: &Captures| { 210 | match c.index(0) { 211 | "**" => ".*", 212 | "." => "\\.", 213 | "*" => "[^\\\\]*", 214 | "?" => "[^\\\\]+", 215 | "[" => "\\[", 216 | "]" => "\\]", 217 | "(" => "\\(", 218 | ")" => "\\)", 219 | "^" => "\\^", 220 | "$" => "\\$", 221 | _ => error_exit(".hgignore", "Error parsing pattern"), 222 | } 223 | .to_string() 224 | }) 225 | .to_string(); 226 | 227 | pattern = file_path 228 | .to_string_lossy() 229 | .to_string() 230 | .replace("\\", "\\\\") 231 | .add("\\\\([^\\\\]+\\\\)*") 232 | .add(&pattern); 233 | 234 | Regex::new(&pattern) 235 | } 236 | } 237 | 238 | fn convert_hgignore_regexp(regexp: &str, file_path: &Path) -> Result { 239 | #[cfg(not(windows))] 240 | { 241 | let mut pattern = file_path.to_string_lossy().to_string(); 242 | if !regexp.starts_with("^") { 243 | pattern = pattern.add("/([^/]+/)*"); 244 | } 245 | 246 | if !regexp.starts_with("^") { 247 | pattern = pattern.add(".*"); 248 | } 249 | 250 | pattern = pattern.add(®exp.trim_start_matches("^")); 251 | 252 | Regex::new(&pattern) 253 | } 254 | 255 | #[cfg(windows)] 256 | { 257 | let mut pattern = file_path.to_string_lossy().to_string(); 258 | if !regexp.starts_with("^") { 259 | pattern = pattern.add("\\\\([^\\\\]+\\\\)*"); 260 | } 261 | 262 | if !regexp.starts_with("^") { 263 | pattern = pattern.add(".*"); 264 | } 265 | 266 | pattern = pattern.add(®exp.trim_start_matches("^")); 267 | 268 | Regex::new(&pattern) 269 | } 270 | } 271 | -------------------------------------------------------------------------------- /src/ignore/mod.rs: -------------------------------------------------------------------------------- 1 | pub(crate) mod docker; 2 | pub(crate) mod hg; 3 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | //! The entry point of the program 2 | //! Handles the command line arguments parsing 3 | 4 | #[macro_use] 5 | extern crate serde_derive; 6 | #[cfg(all(unix, feature = "users"))] 7 | extern crate uzers; 8 | #[cfg(unix)] 9 | extern crate xattr; 10 | 11 | use std::env; 12 | use std::io::{stdout, IsTerminal}; 13 | use std::path::PathBuf; 14 | use std::process::ExitCode; 15 | #[cfg(feature = "update-notifications")] 16 | use std::time::Duration; 17 | 18 | use nu_ansi_term::Color::*; 19 | use rustyline::error::ReadlineError; 20 | use rustyline::DefaultEditor; 21 | #[cfg(feature = "update-notifications")] 22 | use update_informer::{registry, Check}; 23 | 24 | use crate::config::Config; 25 | use crate::field::Field; 26 | use crate::parser::Parser; 27 | use crate::searcher::Searcher; 28 | use crate::util::{error_exit, error_message}; 29 | use crate::util::str_to_bool; 30 | 31 | mod config; 32 | mod expr; 33 | mod field; 34 | mod fileinfo; 35 | mod function; 36 | mod ignore; 37 | mod lexer; 38 | mod mode; 39 | mod operators; 40 | mod output; 41 | mod parser; 42 | mod query; 43 | mod searcher; 44 | mod util; 45 | 46 | fn main() -> ExitCode { 47 | let default_config = Config::default(); 48 | 49 | let mut config = match Config::new() { 50 | Ok(cnf) => cnf, 51 | Err(err) => { 52 | eprintln!("{}", err); 53 | default_config.clone() 54 | } 55 | }; 56 | 57 | let env_var_value = std::env::var("NO_COLOR").ok().unwrap_or_default(); 58 | let env_no_color = str_to_bool(&env_var_value).unwrap_or(false); 59 | let mut no_color = env_no_color || config.no_color.unwrap_or(false); 60 | 61 | #[cfg(windows)] 62 | { 63 | if !no_color { 64 | let res = nu_ansi_term::enable_ansi_support(); 65 | let win_init_ok = match res { 66 | Ok(()) => true, 67 | Err(203) => true, 68 | _ => false, 69 | }; 70 | no_color = !win_init_ok; 71 | } 72 | } 73 | 74 | if env::args().len() == 1 { 75 | short_usage_info(no_color); 76 | help_hint(); 77 | return ExitCode::SUCCESS; 78 | } 79 | 80 | let mut args: Vec = env::args().collect(); 81 | args.remove(0); 82 | 83 | let mut first_arg = args[0].to_ascii_lowercase(); 84 | 85 | if first_arg.contains("version") || first_arg.starts_with("-v") { 86 | short_usage_info(no_color); 87 | return ExitCode::SUCCESS; 88 | } 89 | 90 | if first_arg.contains("help") 91 | || first_arg.starts_with("-h") 92 | || first_arg.starts_with("/?") 93 | || first_arg.starts_with("/h") 94 | { 95 | usage_info(config, default_config, no_color); 96 | return ExitCode::SUCCESS; 97 | } 98 | 99 | let mut interactive = false; 100 | 101 | loop { 102 | if first_arg.contains("nocolor") || first_arg.contains("no-color") { 103 | no_color = true; 104 | } else if first_arg.starts_with("-i") 105 | || first_arg.starts_with("--i") 106 | || first_arg.starts_with("/i") 107 | { 108 | interactive = true; 109 | } else if first_arg.starts_with("-c") 110 | || first_arg.starts_with("--config") 111 | || first_arg.starts_with("/c") 112 | { 113 | let config_path = args[1].to_ascii_lowercase(); 114 | config = match Config::from(PathBuf::from(&config_path)) { 115 | Ok(cnf) => cnf, 116 | Err(err) => { 117 | eprintln!("{}", err); 118 | default_config.clone() 119 | } 120 | }; 121 | 122 | args.remove(0); 123 | } else { 124 | break; 125 | } 126 | 127 | args.remove(0); 128 | 129 | if args.is_empty() { 130 | if !interactive { 131 | short_usage_info(no_color); 132 | help_hint(); 133 | return ExitCode::SUCCESS; 134 | } else { 135 | break; 136 | } 137 | } 138 | 139 | first_arg = args[0].to_ascii_lowercase(); 140 | } 141 | 142 | let mut exit_value = None::; 143 | 144 | if interactive { 145 | match DefaultEditor::new() { 146 | Ok(mut rl) => loop { 147 | let readline = rl.readline("query> "); 148 | match readline { 149 | Ok(cmd) 150 | if cmd.to_ascii_lowercase().trim() == "quit" 151 | || cmd.to_ascii_lowercase().trim() == "exit" => 152 | { 153 | break 154 | } 155 | Ok(query) => { 156 | let _ = rl.add_history_entry(query.as_str()); 157 | exec_search(vec![query], &mut config, &default_config, no_color); 158 | } 159 | Err(ReadlineError::Interrupted) => { 160 | println!("CTRL-C"); 161 | break; 162 | } 163 | Err(ReadlineError::Eof) => { 164 | println!("CTRL-D"); 165 | break; 166 | } 167 | Err(err) => { 168 | let err = format!("{:?}", err); 169 | error_message("input", &err); 170 | break; 171 | } 172 | } 173 | }, 174 | _ => { 175 | error_message("editor", "couldn't open line editor"); 176 | exit_value = Some(2); 177 | } 178 | } 179 | } else { 180 | exit_value = Some(exec_search(args, &mut config, &default_config, no_color)); 181 | } 182 | 183 | config.save(); 184 | 185 | #[cfg(feature = "update-notifications")] 186 | if config.check_for_updates.unwrap_or(false) && stdout().is_terminal() { 187 | let name = env!("CARGO_PKG_NAME"); 188 | let version = env!("CARGO_PKG_VERSION"); 189 | let informer = update_informer::new(registry::Crates, name, version) 190 | .interval(Duration::from_secs(60 * 60 * 24)); 191 | 192 | if let Some(version) = informer.check_version().ok().flatten() { 193 | println!("\nNew version is available! : {}", version); 194 | } 195 | } 196 | 197 | if let Some(exit_value) = exit_value { 198 | return ExitCode::from(exit_value); 199 | } 200 | 201 | ExitCode::SUCCESS 202 | } 203 | 204 | fn exec_search(query: Vec, config: &mut Config, default_config: &Config, no_color: bool) -> u8 { 205 | if config.debug { 206 | dbg!(&query); 207 | } 208 | 209 | let mut parser = Parser::new(); 210 | let query = parser.parse(query, config.debug); 211 | 212 | if config.debug { 213 | dbg!(&query); 214 | } 215 | 216 | if parser.there_are_remaining_lexemes() { 217 | error_exit("query", "could not parse tokens at the end of the query"); 218 | } 219 | 220 | match query { 221 | Ok(query) => { 222 | let is_terminal = stdout().is_terminal(); 223 | let use_colors = !no_color && is_terminal; 224 | 225 | let mut searcher = Searcher::new(&query, config, default_config, use_colors); 226 | searcher.list_search_results().unwrap(); 227 | 228 | let error_count = searcher.error_count; 229 | match error_count { 230 | 0 => 0, 231 | _ => 1, 232 | } 233 | } 234 | Err(err) => { 235 | error_message("query", &err); 236 | 2 237 | } 238 | } 239 | } 240 | 241 | fn short_usage_info(no_color: bool) { 242 | const VERSION: &str = env!("CARGO_PKG_VERSION"); 243 | 244 | print!("fselect "); 245 | 246 | if no_color { 247 | println!("{}", VERSION); 248 | } else { 249 | println!("{}", Yellow.paint(VERSION)); 250 | } 251 | 252 | println!("Find files with SQL-like queries."); 253 | 254 | if no_color { 255 | println!("https://github.com/jhspetersson/fselect"); 256 | } else { 257 | println!( 258 | "{}", 259 | Cyan.underline() 260 | .paint("https://github.com/jhspetersson/fselect") 261 | ); 262 | } 263 | 264 | println!(); 265 | println!("Usage: fselect [ARGS] COLUMN[, COLUMN...] [from PATH[, PATH...]] [where EXPR] [group by COLUMN, ...] [order by COLUMN (asc|desc), ...] [limit N] [into FORMAT]"); 266 | } 267 | 268 | fn help_hint() { 269 | println!( 270 | " 271 | For more detailed instructions please refer to the URL above or run fselect --help" 272 | ); 273 | } 274 | 275 | fn usage_info(config: Config, default_config: Config, no_color: bool) { 276 | short_usage_info(no_color); 277 | 278 | let is_archive = config 279 | .is_archive 280 | .unwrap_or(default_config.is_archive.unwrap()) 281 | .join(", "); 282 | let is_audio = config 283 | .is_audio 284 | .unwrap_or(default_config.is_audio.unwrap()) 285 | .join(", "); 286 | let is_book = config 287 | .is_book 288 | .unwrap_or(default_config.is_book.unwrap()) 289 | .join(", "); 290 | let is_doc = config 291 | .is_doc 292 | .unwrap_or(default_config.is_doc.unwrap()) 293 | .join(", "); 294 | let is_font = config 295 | .is_font 296 | .unwrap_or(default_config.is_font.unwrap()) 297 | .join(", "); 298 | let is_image = config 299 | .is_image 300 | .unwrap_or(default_config.is_image.unwrap()) 301 | .join(", "); 302 | let is_source = config 303 | .is_source 304 | .unwrap_or(default_config.is_source.unwrap()) 305 | .join(", "); 306 | let is_video = config 307 | .is_video 308 | .unwrap_or(default_config.is_video.unwrap()) 309 | .join(", "); 310 | 311 | println!(" 312 | Files Detected as Archives: {is_archive} 313 | Files Detected as Audio: {is_audio} 314 | Files Detected as Book: {is_book} 315 | Files Detected as Document: {is_doc} 316 | Files Detected as Fonts: {is_font} 317 | Files Detected as Image: {is_image} 318 | Files Detected as Source Code: {is_source} 319 | Files Detected as Video: {is_video} 320 | 321 | Path Options: 322 | mindepth N Minimum search depth. Default is unlimited. Depth 1 means skip one directory level and search further. 323 | maxdepth N | depth N Maximum search depth. Default is unlimited. Depth 1 means search the mentioned directory only. Depth 2 means search mentioned directory and its subdirectories. 324 | symlinks | sym If specified, search process will follow symlinks. Default is not to follow. 325 | hardlinks | hard If specified, search process will track hardlinks. Default is not to track. 326 | archives | arc Search within archives. Only zip archives are supported. Default is not to include archived content into the search results. 327 | gitignore | git Search respects .gitignore files found. 328 | hgignore | hg Search respects .hgignore files found. 329 | dockerignore | docker Search respects .dockerignore files found. 330 | nogitignore | nogit Disable .gitignore parsing during the search. 331 | nohgignore | nohg Disable .hgignore parsing during the search. 332 | nodockerignore | nodocker Disable .dockerignore parsing during the search. 333 | dfs Depth-first search mode. 334 | bfs Breadth-first search mode. This is the default. 335 | regexp | rx Use regular expressions to search within multiple roots. 336 | 337 | Regex syntax: 338 | {} 339 | 340 | Column Options: 341 | {} 342 | 343 | Functions: 344 | Aggregate: 345 | AVG Returns average of all values 346 | COUNT Returns number of all values 347 | MAX Returns maximum value 348 | MIN Returns minimum value 349 | SUM Returns sum of all values 350 | STDDEV_POP | STDDEV | STD Population standard deviation, the square root of variance 351 | STDDEV_SAMP Sample standard deviation, the square root of sample variance 352 | VAR_POP | VARIANCE Population variance 353 | VAR_SAMP Sample variance 354 | Date: 355 | CURRENT_DATE | CUR_DATE | 356 | CURDATE Returns current date 357 | DAY Returns day of the month 358 | MONTH Returns month of the year 359 | YEAR Returns year of the date 360 | DOW | DAYOFWEEK Returns day of the week (1 - Sunday, 2 - Monday, etc.) 361 | User: 362 | CURRENT_USER Returns the current username (unix-only) 363 | CURRENT_UID Returns the current real UID (unix-only) 364 | CURRENT_GROUP Returns the current primary groupname (unix-only) 365 | CURRENT_GID Returns the current primary GID (unix-only) 366 | Xattr: 367 | HAS_XATTR Used to check if xattr exists (unix-only) 368 | XATTR Returns value of xattr (unix-only) 369 | HAS_CAPABILITIES | HAS_CAPS Check if any Linux capability exists for the file 370 | HAS_CAPABILITY or HAS_CAP Check if given Linux capability exists for the file 371 | String: 372 | LENGTH | LEN Returns length of string value 373 | LOWER | LOWERCASE | LCASE Returns lowercase value 374 | UPPER | UPPERCASE | UCASE Returns uppercase value 375 | INITCAP Returns first letter of each word uppercase, all other letters lowercase 376 | TO_BASE64 | BASE64 Returns Base64 digest of a value 377 | FROM_BASE64 Returns decoded value from a Base64 digest 378 | LOCATE | POSITION Returns position of the substring in the string 379 | SUBSTRING | SUBSTR Returns part of the string value 380 | REPLACE Returns string with substring replaced with another one 381 | TRIM Returns string with whitespaces at the beginning and the end stripped 382 | LTRIM Returns string with whitespaces at the beginning stripped 383 | RTRIM Returns string with whitespaces at the end stripped 384 | Japanese string: 385 | CONTAINS_JAPANESE Used to check if string value contains Japanese symbols 386 | CONTAINS_KANA Used to check if string value contains kana symbols 387 | CONTAINS_HIRAGANA Used to check if string value contains hiragana symbols 388 | CONTAINS_KATAKANA Used to check if string value contains katakana symbols 389 | CONTAINS_KANJI Used to check if string value contains kanji symbols 390 | Other: 391 | BIN Returns binary representation of an integer value 392 | HEX Returns hexadecimal representation of an integer value 393 | OCT Returns octal representation of an integer value 394 | ABS Returns absolute value of the number 395 | POWER | POW Raise the value to the specified power 396 | SQRT Returns square root of the value 397 | LOG Returns logarithm of the value 398 | LN Returns natural logarithm of the value 399 | EXP Returns e raised to the power of the value 400 | LEAST Returns the smallest value 401 | GREATEST Returns the largest value 402 | CONTAINS Returns true, if file contains string, false if not 403 | COALESCE Returns first nonempty expression value 404 | CONCAT Returns concatenated string of expression values 405 | CONCAT_WS Returns concatenated string of expression values with specified delimiter 406 | FORMAT_SIZE Returns formatted size of a file 407 | FORMAT_TIME | PRETTY_TIME Returns human-readable durations of time in seconds 408 | RANDOM | RAND Returns random integer (from zero to max int, from zero to arg, or from arg1 to arg2) 409 | 410 | Expressions: 411 | Operators: 412 | = | == | eq Used to check for equality between the column field and value 413 | === | eeq Used to check for strict equality between column field and value irregardless of any special regex characters 414 | != | <> | ne Used to check for inequality between column field and value 415 | !== | ene Used to check for inequality between column field and value irregardless of any special regex characters 416 | < | lt Used to check whether the column value is less than the value 417 | <= | lte | le Used to check whether the column value is less than or equal to the value 418 | > | gt Used to check whether the column value is greater than the value 419 | >= | gte | ge Used to check whether the column value is greater than or equal to the value 420 | ~= | =~ | regexp | rx Used to check if the column value matches the regex pattern 421 | !=~ | !~= | notrx Used to check if the column value doesn't match the regex pattern 422 | like Used to check if the column value matches the pattern which follows SQL conventions 423 | notlike Used to check if the column value doesn't match the pattern which follows SQL conventions 424 | between Used to check if the column value lies between two values inclusive 425 | in Used to check if the column value is in the list of values 426 | Logical Operators: 427 | and Used as an AND operator for two conditions made with the above operators 428 | or Used as an OR operator for two conditions made with the above operators 429 | 430 | Format: 431 | tabs (default) Outputs each file with its column value(s) on a line with each column value delimited by a tab 432 | lines Outputs each column value on a new line 433 | list Outputs entire output onto a single line for xargs 434 | csv Outputs each file with its column value(s) on a line with each column value delimited by a comma 435 | json Outputs a JSON array with JSON objects holding the column value(s) of each file 436 | html Outputs HTML document with table 437 | ", Cyan.underline().paint("https://docs.rs/regex/1.10.2/regex/#syntax"), 438 | Field::get_names_and_descriptions().iter() 439 | .map(|(names, description)| names.join(" | ").to_string() + " ".repeat(if 32 > names.join(" | ").to_string().len() { 32 - names.join(" | ").to_string().len() } else { 1 }).as_str() + description) 440 | .collect::>().join("\n ") 441 | ); 442 | } 443 | -------------------------------------------------------------------------------- /src/mode.rs: -------------------------------------------------------------------------------- 1 | //! This module contains functions for working with file modes / permissions 2 | 3 | use std::fs::Metadata; 4 | #[cfg(unix)] 5 | use std::os::unix::fs::MetadataExt; 6 | #[cfg(windows)] 7 | use std::os::windows::fs::MetadataExt; 8 | 9 | pub fn get_mode(meta: &Metadata) -> String { 10 | #[cfg(unix)] 11 | { 12 | format_mode(meta.mode()) 13 | } 14 | 15 | #[cfg(windows)] 16 | { 17 | format_mode(meta.file_attributes()) 18 | } 19 | } 20 | 21 | pub fn format_mode(mode: u32) -> String { 22 | #[cfg(unix)] 23 | { 24 | get_mode_unix(mode) 25 | } 26 | 27 | #[cfg(windows)] 28 | { 29 | get_mode_windows(mode) 30 | } 31 | } 32 | 33 | #[cfg(unix)] 34 | fn get_mode_unix(mode: u32) -> String { 35 | let mut s = String::new(); 36 | 37 | if mode_is_link(mode) { 38 | s.push('l') 39 | } else if mode_is_block_device(mode) { 40 | s.push('b') 41 | } else if mode_is_char_device(mode) { 42 | s.push('c') 43 | } else if mode_is_socket(mode) { 44 | s.push('s') 45 | } else if mode_is_pipe(mode) { 46 | s.push('p') 47 | } else if mode_is_directory(mode) { 48 | s.push('d') 49 | } else { 50 | s.push('-') 51 | } 52 | 53 | // user 54 | 55 | if mode_user_read(mode) { 56 | s.push('r') 57 | } else { 58 | s.push('-') 59 | } 60 | 61 | if mode_user_write(mode) { 62 | s.push('w') 63 | } else { 64 | s.push('-') 65 | } 66 | 67 | if mode_user_exec(mode) { 68 | if mode_suid(mode) { 69 | s.push('s') 70 | } else { 71 | s.push('x') 72 | } 73 | } else if mode_suid(mode) { 74 | s.push('S') 75 | } else { 76 | s.push('-') 77 | } 78 | 79 | // group 80 | 81 | if mode_group_read(mode) { 82 | s.push('r') 83 | } else { 84 | s.push('-') 85 | } 86 | 87 | if mode_group_write(mode) { 88 | s.push('w') 89 | } else { 90 | s.push('-') 91 | } 92 | 93 | if mode_group_exec(mode) { 94 | if mode_sgid(mode) { 95 | s.push('s') 96 | } else { 97 | s.push('x') 98 | } 99 | } else if mode_sgid(mode) { 100 | s.push('S') 101 | } else { 102 | s.push('-') 103 | } 104 | 105 | // other 106 | 107 | if mode_other_read(mode) { 108 | s.push('r') 109 | } else { 110 | s.push('-') 111 | } 112 | 113 | if mode_other_write(mode) { 114 | s.push('w') 115 | } else { 116 | s.push('-') 117 | } 118 | 119 | if mode_other_exec(mode) { 120 | if mode_sticky(mode) { 121 | s.push('t') 122 | } else { 123 | s.push('x') 124 | } 125 | } else if mode_sticky(mode) { 126 | s.push('T') 127 | } else { 128 | s.push('-') 129 | } 130 | 131 | s 132 | } 133 | 134 | #[allow(unused)] 135 | pub fn get_mode_from_boxed_unix_int(meta: &Metadata) -> Option { 136 | #[cfg(unix)] 137 | { 138 | Some(meta.mode()) 139 | } 140 | 141 | #[cfg(not(unix))] 142 | { 143 | None 144 | } 145 | } 146 | 147 | pub fn user_read(meta: &Metadata) -> bool { 148 | match get_mode_from_boxed_unix_int(meta) { 149 | Some(mode) => mode_user_read(mode), 150 | None => false, 151 | } 152 | } 153 | 154 | pub fn mode_user_read(mode: u32) -> bool { 155 | mode & S_IRUSR == S_IRUSR 156 | } 157 | 158 | pub fn user_write(meta: &Metadata) -> bool { 159 | match get_mode_from_boxed_unix_int(meta) { 160 | Some(mode) => mode_user_write(mode), 161 | None => false, 162 | } 163 | } 164 | 165 | pub fn mode_user_write(mode: u32) -> bool { 166 | mode & S_IWUSR == S_IWUSR 167 | } 168 | 169 | pub fn user_exec(meta: &Metadata) -> bool { 170 | match get_mode_from_boxed_unix_int(meta) { 171 | Some(mode) => mode_user_exec(mode), 172 | None => false, 173 | } 174 | } 175 | 176 | pub fn mode_user_exec(mode: u32) -> bool { 177 | mode & S_IXUSR == S_IXUSR 178 | } 179 | 180 | pub fn user_all(meta: &Metadata) -> bool { 181 | user_read(meta) && user_write(meta) && user_exec(meta) 182 | } 183 | 184 | pub fn mode_user_all(mode: u32) -> bool { 185 | mode_user_read(mode) && mode_user_write(mode) && mode_user_exec(mode) 186 | } 187 | 188 | pub fn group_read(meta: &Metadata) -> bool { 189 | match get_mode_from_boxed_unix_int(meta) { 190 | Some(mode) => mode_group_read(mode), 191 | None => false, 192 | } 193 | } 194 | 195 | pub fn mode_group_read(mode: u32) -> bool { 196 | mode & S_IRGRP == S_IRGRP 197 | } 198 | 199 | pub fn group_write(meta: &Metadata) -> bool { 200 | match get_mode_from_boxed_unix_int(meta) { 201 | Some(mode) => mode_group_write(mode), 202 | None => false, 203 | } 204 | } 205 | 206 | pub fn mode_group_write(mode: u32) -> bool { 207 | mode & S_IWGRP == S_IWGRP 208 | } 209 | 210 | pub fn group_exec(meta: &Metadata) -> bool { 211 | match get_mode_from_boxed_unix_int(meta) { 212 | Some(mode) => mode_group_exec(mode), 213 | None => false, 214 | } 215 | } 216 | 217 | pub fn mode_group_exec(mode: u32) -> bool { 218 | mode & S_IXGRP == S_IXGRP 219 | } 220 | 221 | pub fn group_all(meta: &Metadata) -> bool { 222 | group_read(meta) && group_write(meta) && group_exec(meta) 223 | } 224 | 225 | pub fn mode_group_all(mode: u32) -> bool { 226 | mode_group_read(mode) && mode_group_write(mode) && mode_group_exec(mode) 227 | } 228 | 229 | pub fn other_read(meta: &Metadata) -> bool { 230 | match get_mode_from_boxed_unix_int(meta) { 231 | Some(mode) => mode_other_read(mode), 232 | None => false, 233 | } 234 | } 235 | 236 | pub fn mode_other_read(mode: u32) -> bool { 237 | mode & S_IROTH == S_IROTH 238 | } 239 | 240 | pub fn other_write(meta: &Metadata) -> bool { 241 | match get_mode_from_boxed_unix_int(meta) { 242 | Some(mode) => mode_other_write(mode), 243 | None => false, 244 | } 245 | } 246 | 247 | pub fn mode_other_write(mode: u32) -> bool { 248 | mode & S_IWOTH == S_IWOTH 249 | } 250 | 251 | pub fn other_exec(meta: &Metadata) -> bool { 252 | match get_mode_from_boxed_unix_int(meta) { 253 | Some(mode) => mode_other_exec(mode), 254 | None => false, 255 | } 256 | } 257 | 258 | pub fn mode_other_exec(mode: u32) -> bool { 259 | mode & S_IXOTH == S_IXOTH 260 | } 261 | 262 | pub fn other_all(meta: &Metadata) -> bool { 263 | other_read(meta) && other_write(meta) && other_exec(meta) 264 | } 265 | 266 | pub fn mode_other_all(mode: u32) -> bool { 267 | mode_other_read(mode) && mode_other_write(mode) && mode_other_exec(mode) 268 | } 269 | 270 | pub fn suid_bit_set(meta: &Metadata) -> bool { 271 | match get_mode_from_boxed_unix_int(meta) { 272 | Some(mode) => mode_suid(mode), 273 | None => false, 274 | } 275 | } 276 | 277 | pub fn mode_suid(mode: u32) -> bool { 278 | mode & S_ISUID == S_ISUID 279 | } 280 | 281 | pub fn sgid_bit_set(meta: &Metadata) -> bool { 282 | match get_mode_from_boxed_unix_int(meta) { 283 | Some(mode) => mode_sgid(mode), 284 | None => false, 285 | } 286 | } 287 | 288 | pub fn mode_sgid(mode: u32) -> bool { 289 | mode & S_ISGID == S_ISGID 290 | } 291 | 292 | #[cfg(unix)] 293 | pub fn mode_sticky(mode: u32) -> bool { 294 | mode & S_ISVTX == S_ISVTX 295 | } 296 | 297 | pub fn is_pipe(meta: &Metadata) -> bool { 298 | match get_mode_from_boxed_unix_int(meta) { 299 | Some(mode) => mode_is_pipe(mode), 300 | None => false, 301 | } 302 | } 303 | 304 | pub fn mode_is_pipe(mode: u32) -> bool { 305 | mode & S_IFIFO == S_IFIFO 306 | } 307 | 308 | pub fn is_char_device(meta: &Metadata) -> bool { 309 | match get_mode_from_boxed_unix_int(meta) { 310 | Some(mode) => mode_is_char_device(mode), 311 | None => false, 312 | } 313 | } 314 | 315 | pub fn mode_is_char_device(mode: u32) -> bool { 316 | mode & S_IFCHR == S_IFCHR 317 | } 318 | 319 | pub fn is_block_device(meta: &Metadata) -> bool { 320 | match get_mode_from_boxed_unix_int(meta) { 321 | Some(mode) => mode_is_block_device(mode), 322 | None => false, 323 | } 324 | } 325 | 326 | pub fn mode_is_block_device(mode: u32) -> bool { 327 | mode & S_IFBLK == S_IFBLK 328 | } 329 | 330 | #[cfg(unix)] 331 | pub fn mode_is_directory(mode: u32) -> bool { 332 | mode & S_IFDIR == S_IFDIR 333 | } 334 | 335 | #[cfg(unix)] 336 | pub fn mode_is_link(mode: u32) -> bool { 337 | mode & S_IFLNK == S_IFLNK 338 | } 339 | 340 | pub fn is_socket(meta: &Metadata) -> bool { 341 | match get_mode_from_boxed_unix_int(meta) { 342 | Some(mode) => mode_is_socket(mode), 343 | None => false, 344 | } 345 | } 346 | 347 | pub fn mode_is_socket(mode: u32) -> bool { 348 | mode & S_IFSOCK == S_IFSOCK 349 | } 350 | 351 | const S_IRUSR: u32 = 0o400; 352 | const S_IWUSR: u32 = 0o200; 353 | const S_IXUSR: u32 = 0o100; 354 | 355 | const S_IRGRP: u32 = 0o40; 356 | const S_IWGRP: u32 = 0o20; 357 | const S_IXGRP: u32 = 0o10; 358 | 359 | const S_IROTH: u32 = 0o4; 360 | const S_IWOTH: u32 = 0o2; 361 | const S_IXOTH: u32 = 0o1; 362 | 363 | const S_ISUID: u32 = 0o4000; 364 | const S_ISGID: u32 = 0o2000; 365 | #[cfg(unix)] 366 | const S_ISVTX: u32 = 0o1000; 367 | 368 | const S_IFBLK: u32 = 0o60000; 369 | #[cfg(unix)] 370 | const S_IFDIR: u32 = 0o40000; 371 | const S_IFCHR: u32 = 0o20000; 372 | const S_IFIFO: u32 = 0o10000; 373 | #[cfg(unix)] 374 | const S_IFLNK: u32 = 0o120000; 375 | const S_IFSOCK: u32 = 0o140000; 376 | 377 | #[cfg(windows)] 378 | fn get_mode_windows(mode: u32) -> String { 379 | const FILE_ATTRIBUTE_ARCHIVE: u32 = 0x20; 380 | const FILE_ATTRIBUTE_COMPRESSED: u32 = 0x800; 381 | const FILE_ATTRIBUTE_DEVICE: u32 = 0x40; 382 | const FILE_ATTRIBUTE_DIRECTORY: u32 = 0x10; 383 | const FILE_ATTRIBUTE_ENCRYPTED: u32 = 0x4000; 384 | const FILE_ATTRIBUTE_HIDDEN: u32 = 0x2; 385 | const FILE_ATTRIBUTE_INTEGRITY_STREAM: u32 = 0x8000; 386 | const FILE_ATTRIBUTE_NORMAL: u32 = 0x80; 387 | const FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: u32 = 0x2000; 388 | const FILE_ATTRIBUTE_NO_SCRUB_DATA: u32 = 0x20000; 389 | const FILE_ATTRIBUTE_OFFLINE: u32 = 0x1000; 390 | const FILE_ATTRIBUTE_READONLY: u32 = 0x1; 391 | const FILE_ATTRIBUTE_RECALL_ON_DATA_ACCESS: u32 = 0x400000; 392 | const FILE_ATTRIBUTE_RECALL_ON_OPEN: u32 = 0x40000; 393 | const FILE_ATTRIBUTE_REPARSE_POINT: u32 = 0x400; 394 | const FILE_ATTRIBUTE_SPARSE_FILE: u32 = 0x200; 395 | const FILE_ATTRIBUTE_SYSTEM: u32 = 0x4; 396 | const FILE_ATTRIBUTE_TEMPORARY: u32 = 0x100; 397 | const FILE_ATTRIBUTE_VIRTUAL: u32 = 0x10000; 398 | 399 | let mut v = vec![]; 400 | 401 | if mode & FILE_ATTRIBUTE_ARCHIVE == FILE_ATTRIBUTE_ARCHIVE { 402 | v.push("Archive"); 403 | } 404 | 405 | if mode & FILE_ATTRIBUTE_COMPRESSED == FILE_ATTRIBUTE_COMPRESSED { 406 | v.push("Compressed"); 407 | } 408 | 409 | if mode & FILE_ATTRIBUTE_DEVICE == FILE_ATTRIBUTE_DEVICE { 410 | v.push("Device"); 411 | } 412 | 413 | if mode & FILE_ATTRIBUTE_DIRECTORY == FILE_ATTRIBUTE_DIRECTORY { 414 | v.push("Directory"); 415 | } 416 | 417 | if mode & FILE_ATTRIBUTE_ENCRYPTED == FILE_ATTRIBUTE_ENCRYPTED { 418 | v.push("Encrypted"); 419 | } 420 | 421 | if mode & FILE_ATTRIBUTE_HIDDEN == FILE_ATTRIBUTE_HIDDEN { 422 | v.push("Hidden"); 423 | } 424 | 425 | if mode & FILE_ATTRIBUTE_INTEGRITY_STREAM == FILE_ATTRIBUTE_INTEGRITY_STREAM { 426 | v.push("Integrity Stream"); 427 | } 428 | 429 | if mode & FILE_ATTRIBUTE_NORMAL == FILE_ATTRIBUTE_NORMAL { 430 | v.push("Normal"); 431 | } 432 | 433 | if mode & FILE_ATTRIBUTE_NOT_CONTENT_INDEXED == FILE_ATTRIBUTE_NOT_CONTENT_INDEXED { 434 | v.push("Not indexed"); 435 | } 436 | 437 | if mode & FILE_ATTRIBUTE_NO_SCRUB_DATA == FILE_ATTRIBUTE_NO_SCRUB_DATA { 438 | v.push("No scrub data"); 439 | } 440 | 441 | if mode & FILE_ATTRIBUTE_OFFLINE == FILE_ATTRIBUTE_OFFLINE { 442 | v.push("Offline"); 443 | } 444 | 445 | if mode & FILE_ATTRIBUTE_READONLY == FILE_ATTRIBUTE_READONLY { 446 | v.push("Readonly"); 447 | } 448 | 449 | if mode & FILE_ATTRIBUTE_RECALL_ON_DATA_ACCESS == FILE_ATTRIBUTE_RECALL_ON_DATA_ACCESS { 450 | v.push("Recall on data access"); 451 | } 452 | 453 | if mode & FILE_ATTRIBUTE_RECALL_ON_OPEN == FILE_ATTRIBUTE_RECALL_ON_OPEN { 454 | v.push("Recall on open"); 455 | } 456 | 457 | if mode & FILE_ATTRIBUTE_REPARSE_POINT == FILE_ATTRIBUTE_REPARSE_POINT { 458 | v.push("Reparse point"); 459 | } 460 | 461 | if mode & FILE_ATTRIBUTE_SPARSE_FILE == FILE_ATTRIBUTE_SPARSE_FILE { 462 | v.push("Sparse"); 463 | } 464 | 465 | if mode & FILE_ATTRIBUTE_SYSTEM == FILE_ATTRIBUTE_SYSTEM { 466 | v.push("System"); 467 | } 468 | 469 | if mode & FILE_ATTRIBUTE_TEMPORARY == FILE_ATTRIBUTE_TEMPORARY { 470 | v.push("Temporary"); 471 | } 472 | 473 | if mode & FILE_ATTRIBUTE_VIRTUAL == FILE_ATTRIBUTE_VIRTUAL { 474 | v.push("Virtual"); 475 | } 476 | 477 | v.join(", ") 478 | } 479 | 480 | #[allow(unused)] 481 | pub fn get_uid(meta: &Metadata) -> Option { 482 | #[cfg(unix)] 483 | { 484 | Some(meta.uid()) 485 | } 486 | 487 | #[cfg(not(unix))] 488 | { 489 | None 490 | } 491 | } 492 | 493 | #[allow(unused)] 494 | pub fn get_gid(meta: &Metadata) -> Option { 495 | #[cfg(unix)] 496 | { 497 | Some(meta.gid()) 498 | } 499 | 500 | #[cfg(not(unix))] 501 | { 502 | None 503 | } 504 | } 505 | 506 | #[cfg(test)] 507 | mod tests { 508 | use super::*; 509 | 510 | #[test] 511 | fn test_format_mode() { 512 | #[cfg(unix)] 513 | { 514 | // Regular file with rwxr-xr-- permissions (0754 in octal) 515 | let mode = 0o100754; 516 | assert_eq!(format_mode(mode), "-rwxr-xr--"); 517 | 518 | // Directory with rwxr-xr-x permissions (0755 in octal) 519 | let mode = 0o40755; 520 | assert_eq!(format_mode(mode), "drwxr-xr-x"); 521 | 522 | // Symbolic link with rwxrwxrwx permissions (0777 in octal) 523 | let mode = 0o120777; 524 | assert_eq!(format_mode(mode), "lrwxrwxrwx"); 525 | 526 | // File with setuid bit (4755 in octal) 527 | let mode = 0o104755; 528 | assert_eq!(format_mode(mode), "-rwsr-xr-x"); 529 | 530 | // File with setgid bit (2755 in octal) 531 | let mode = 0o102755; 532 | assert_eq!(format_mode(mode), "-rwxr-sr-x"); 533 | 534 | // Directory with sticky bit (1755 in octal) 535 | let mode = 0o41755; 536 | assert_eq!(format_mode(mode), "drwxr-xr-t"); 537 | } 538 | 539 | #[cfg(windows)] 540 | { 541 | const FILE_ATTRIBUTE_READONLY: u32 = 0x1; 542 | const FILE_ATTRIBUTE_HIDDEN: u32 = 0x2; 543 | const FILE_ATTRIBUTE_DIRECTORY: u32 = 0x10; 544 | 545 | let mode = FILE_ATTRIBUTE_READONLY; 546 | assert_eq!(format_mode(mode), "Readonly"); 547 | 548 | let mode = FILE_ATTRIBUTE_HIDDEN; 549 | assert_eq!(format_mode(mode), "Hidden"); 550 | 551 | let mode = FILE_ATTRIBUTE_DIRECTORY; 552 | assert_eq!(format_mode(mode), "Directory"); 553 | 554 | let mode = FILE_ATTRIBUTE_READONLY | FILE_ATTRIBUTE_HIDDEN; 555 | assert_eq!(format_mode(mode), "Hidden, Readonly"); 556 | } 557 | } 558 | 559 | #[test] 560 | fn test_mode_user_permissions() { 561 | let mode = 0o754; // rwxr-xr-- 562 | 563 | assert!(mode_user_read(mode)); 564 | assert!(mode_user_write(mode)); 565 | assert!(mode_user_exec(mode)); 566 | assert!(mode_user_all(mode)); 567 | 568 | let mode = 0o654; // rw-r-xr-- 569 | 570 | assert!(mode_user_read(mode)); 571 | assert!(mode_user_write(mode)); 572 | assert!(!mode_user_exec(mode)); 573 | assert!(!mode_user_all(mode)); 574 | } 575 | 576 | #[test] 577 | fn test_mode_group_permissions() { 578 | // Test group permission checks 579 | let mode = 0o754; // rwxr-xr-- 580 | 581 | assert!(mode_group_read(mode)); 582 | assert!(!mode_group_write(mode)); 583 | assert!(mode_group_exec(mode)); 584 | assert!(!mode_group_all(mode)); 585 | 586 | let mode = 0o774; // rwxrwxr-- 587 | 588 | assert!(mode_group_read(mode)); 589 | assert!(mode_group_write(mode)); 590 | assert!(mode_group_exec(mode)); 591 | assert!(mode_group_all(mode)); 592 | } 593 | 594 | #[test] 595 | fn test_mode_other_permissions() { 596 | let mode = 0o754; // rwxr-xr-- 597 | 598 | assert!(mode_other_read(mode)); 599 | assert!(!mode_other_write(mode)); 600 | assert!(!mode_other_exec(mode)); 601 | assert!(!mode_other_all(mode)); 602 | 603 | let mode = 0o757; // rwxr-xrwx 604 | 605 | assert!(mode_other_read(mode)); 606 | assert!(mode_other_write(mode)); 607 | assert!(mode_other_exec(mode)); 608 | assert!(mode_other_all(mode)); 609 | } 610 | 611 | #[test] 612 | fn test_mode_special_bits() { 613 | // Test setuid bit 614 | let mode = 0o4755; // rwsr-xr-x 615 | assert!(mode_suid(mode)); 616 | 617 | // Test setgid bit 618 | let mode = 0o2755; // rwxr-sr-x 619 | assert!(mode_sgid(mode)); 620 | 621 | // Test sticky bit (Unix only) 622 | #[cfg(unix)] 623 | { 624 | let mode = 0o1755; // rwxr-xr-t 625 | assert!(mode_sticky(mode)); 626 | } 627 | } 628 | 629 | #[test] 630 | fn test_mode_file_types() { 631 | // Test directory 632 | #[cfg(unix)] 633 | { 634 | let mode = 0o40755; // drwxr-xr-x 635 | assert!(mode_is_directory(mode)); 636 | assert!(!mode_is_link(mode)); 637 | } 638 | 639 | // Test symbolic link 640 | #[cfg(unix)] 641 | { 642 | let mode = 0o120755; // lrwxr-xr-x 643 | assert!(mode_is_link(mode)); 644 | assert!(!mode_is_directory(mode)); 645 | } 646 | 647 | // Test block device 648 | let mode = 0o60644; // brw-r--r-- 649 | assert!(mode_is_block_device(mode)); 650 | 651 | // Test character device 652 | let mode = 0o20644; // crw-r--r-- 653 | assert!(mode_is_char_device(mode)); 654 | 655 | // Test FIFO/pipe 656 | let mode = 0o10644; // prw-r--r-- 657 | assert!(mode_is_pipe(mode)); 658 | 659 | // Test socket 660 | let mode = 0o140644; // srw-r--r-- 661 | assert!(mode_is_socket(mode)); 662 | } 663 | 664 | #[test] 665 | fn test_get_uid_gid() { 666 | // These functions are platform-specific, so we test the behavior 667 | // rather than the actual values 668 | 669 | #[cfg(unix)] 670 | { 671 | // On Unix, we should get Some value 672 | use std::fs::File; 673 | if let Ok(meta) = File::open("Cargo.toml").and_then(|f| f.metadata()) { 674 | assert!(get_uid(&meta).is_some()); 675 | assert!(get_gid(&meta).is_some()); 676 | } 677 | } 678 | 679 | #[cfg(not(unix))] 680 | { 681 | // On non-Unix platforms, we should get None 682 | use std::fs::File; 683 | if let Ok(meta) = File::open("Cargo.toml").and_then(|f| f.metadata()) { 684 | assert!(get_uid(&meta).is_none()); 685 | assert!(get_gid(&meta).is_none()); 686 | } 687 | } 688 | } 689 | } 690 | -------------------------------------------------------------------------------- /src/operators.rs: -------------------------------------------------------------------------------- 1 | //! Defines the arithmetic operators used in the query language 2 | 3 | use crate::util::Variant; 4 | 5 | #[derive(Debug, Clone, Eq, Hash, PartialEq, PartialOrd, Serialize)] 6 | pub enum LogicalOp { 7 | And, 8 | Or, 9 | } 10 | 11 | #[derive(Debug, Clone, Copy, Eq, Hash, PartialEq, PartialOrd, Serialize)] 12 | pub enum Op { 13 | Eq, 14 | Ne, 15 | Eeq, 16 | Ene, 17 | Gt, 18 | Gte, 19 | Lt, 20 | Lte, 21 | Rx, 22 | NotRx, 23 | Like, 24 | NotLike, 25 | Between, 26 | NotBetween, 27 | In, 28 | NotIn, 29 | } 30 | 31 | impl Op { 32 | pub fn from(text: String) -> Option { 33 | match text.to_lowercase().as_str() { 34 | "=" | "==" | "eq" => Some(Op::Eq), 35 | "!=" | "<>" | "ne" => Some(Op::Ne), 36 | "===" | "eeq" => Some(Op::Eeq), 37 | "!==" | "ene" => Some(Op::Ene), 38 | ">" | "gt" => Some(Op::Gt), 39 | ">=" | "gte" | "ge" => Some(Op::Gte), 40 | "<" | "lt" => Some(Op::Lt), 41 | "<=" | "lte" | "le" => Some(Op::Lte), 42 | "~=" | "=~" | "regexp" | "rx" => Some(Op::Rx), 43 | "!=~" | "!~=" | "notrx" => Some(Op::NotRx), 44 | "like" => Some(Op::Like), 45 | "notlike" => Some(Op::NotLike), 46 | "between" => Some(Op::Between), 47 | "in" => Some(Op::In), 48 | _ => None, 49 | } 50 | } 51 | 52 | pub fn from_with_not(text: String, not: bool) -> Option { 53 | let op = Op::from(text); 54 | match op { 55 | Some(op) if not => Some(Self::negate(op)), 56 | _ => op, 57 | } 58 | } 59 | 60 | pub fn negate(op: Op) -> Op { 61 | match op { 62 | Op::Eq => Op::Ne, 63 | Op::Ne => Op::Eq, 64 | Op::Eeq => Op::Ene, 65 | Op::Ene => Op::Eeq, 66 | Op::Gt => Op::Lt, 67 | Op::Lt => Op::Gt, 68 | Op::Gte => Op::Lte, 69 | Op::Lte => Op::Gte, 70 | Op::Rx => Op::NotRx, 71 | Op::NotRx => Op::Rx, 72 | Op::Like => Op::NotLike, 73 | Op::NotLike => Op::Like, 74 | Op::Between => Op::NotBetween, 75 | Op::NotBetween => Op::Between, 76 | Op::In => Op::NotIn, 77 | Op::NotIn => Op::In, 78 | } 79 | } 80 | } 81 | 82 | #[derive(Debug, Clone, PartialOrd, PartialEq, Eq, Hash, Serialize)] 83 | pub enum ArithmeticOp { 84 | Add, 85 | Subtract, 86 | Divide, 87 | Multiply, 88 | Modulo, 89 | } 90 | 91 | impl ArithmeticOp { 92 | pub fn from(text: String) -> Option { 93 | match text.to_lowercase().as_str() { 94 | "+" | "plus" => Some(ArithmeticOp::Add), 95 | "-" | "minus" => Some(ArithmeticOp::Subtract), 96 | "*" | "mul" => Some(ArithmeticOp::Multiply), 97 | "/" | "div" => Some(ArithmeticOp::Divide), 98 | "%" | "mod" => Some(ArithmeticOp::Modulo), 99 | _ => None, 100 | } 101 | } 102 | 103 | pub fn calc(&self, left: &Variant, right: &Variant) -> Variant { 104 | let result = match &self { 105 | ArithmeticOp::Add => left.to_float() + right.to_float(), 106 | ArithmeticOp::Subtract => left.to_float() - right.to_float(), 107 | ArithmeticOp::Multiply => left.to_float() * right.to_float(), 108 | ArithmeticOp::Divide => left.to_float() / right.to_float(), 109 | ArithmeticOp::Modulo => left.to_float() % right.to_float(), 110 | }; 111 | 112 | Variant::from_float(result) 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /src/output/csv.rs: -------------------------------------------------------------------------------- 1 | //! Handles export of results in CSV format 2 | 3 | use crate::output::ResultsFormatter; 4 | use crate::util::WritableBuffer; 5 | 6 | #[derive(Default)] 7 | pub struct CsvFormatter { 8 | records: Vec, 9 | } 10 | 11 | impl ResultsFormatter for CsvFormatter { 12 | fn header(&mut self) -> Option { 13 | None 14 | } 15 | 16 | fn row_started(&mut self) -> Option { 17 | None 18 | } 19 | 20 | fn format_element(&mut self, _: &str, record: &str, _is_last: bool) -> Option { 21 | self.records.push(record.to_owned()); 22 | None 23 | } 24 | 25 | fn row_ended(&mut self) -> Option { 26 | let mut csv_output = WritableBuffer::new(); 27 | { 28 | let mut csv_writer = csv::Writer::from_writer(&mut csv_output); 29 | let _ = csv_writer.write_record(&self.records); 30 | self.records.clear(); 31 | } 32 | Some(csv_output.into()) 33 | } 34 | 35 | fn footer(&mut self) -> Option { 36 | None 37 | } 38 | } 39 | 40 | #[cfg(test)] 41 | mod test { 42 | use crate::output::csv::CsvFormatter; 43 | use crate::output::test::write_test_items; 44 | 45 | #[test] 46 | fn test() { 47 | let result = write_test_items(&mut CsvFormatter::default()); 48 | assert_eq!("foo_value,BAR value\n123,\n", result); 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /src/output/flat.rs: -------------------------------------------------------------------------------- 1 | //! Handles export of results in line-separated, list-separated, and tab-separated formats 2 | 3 | use crate::output::ResultsFormatter; 4 | 5 | pub const LINES_FORMATTER: FlatWriter = FlatWriter { 6 | record_separator: '\n', 7 | line_separator: Some('\n'), 8 | }; 9 | 10 | pub const LIST_FORMATTER: FlatWriter = FlatWriter { 11 | record_separator: '\0', 12 | line_separator: Some('\0'), 13 | }; 14 | 15 | pub const TABS_FORMATTER: FlatWriter = FlatWriter { 16 | record_separator: '\t', 17 | line_separator: Some('\n'), 18 | }; 19 | 20 | pub struct FlatWriter { 21 | record_separator: char, 22 | line_separator: Option, 23 | } 24 | 25 | impl ResultsFormatter for FlatWriter { 26 | fn header(&mut self) -> Option { 27 | None 28 | } 29 | 30 | fn row_started(&mut self) -> Option { 31 | None 32 | } 33 | 34 | fn format_element(&mut self, _: &str, record: &str, is_last: bool) -> Option { 35 | match is_last { 36 | true => Some(record.to_string()), 37 | false => Some(format!("{}{}", record, self.record_separator)), 38 | } 39 | } 40 | 41 | fn row_ended(&mut self) -> Option { 42 | self.line_separator.map(String::from) 43 | } 44 | 45 | fn footer(&mut self) -> Option { 46 | None 47 | } 48 | } 49 | 50 | #[cfg(test)] 51 | mod test { 52 | #![allow(const_item_mutation)] 53 | use crate::output::flat::{LINES_FORMATTER, LIST_FORMATTER, TABS_FORMATTER}; 54 | use crate::output::test::write_test_items; 55 | 56 | #[test] 57 | fn test_lines() { 58 | let result = write_test_items(&mut LINES_FORMATTER); 59 | assert_eq!("foo_value\nBAR value\n123\n\n", result); 60 | } 61 | 62 | #[test] 63 | fn test_list() { 64 | let result = write_test_items(&mut LIST_FORMATTER); 65 | assert_eq!("foo_value\0BAR value\0123\0\0", result); 66 | } 67 | 68 | #[test] 69 | fn test_tab() { 70 | let result = write_test_items(&mut TABS_FORMATTER); 71 | assert_eq!("foo_value\tBAR value\n123\t\n", result); 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /src/output/html.rs: -------------------------------------------------------------------------------- 1 | //! Handles export of results in HTML format 2 | 3 | use crate::output::ResultsFormatter; 4 | 5 | pub struct HtmlFormatter; 6 | 7 | impl ResultsFormatter for HtmlFormatter { 8 | fn header(&mut self) -> Option { 9 | Some("".to_owned()) 10 | } 11 | 12 | fn row_started(&mut self) -> Option { 13 | Some("".to_owned()) 14 | } 15 | 16 | fn format_element(&mut self, _: &str, record: &str, _is_last: bool) -> Option { 17 | Some(format!("", record)) 18 | } 19 | 20 | fn row_ended(&mut self) -> Option { 21 | Some("".to_owned()) 22 | } 23 | 24 | fn footer(&mut self) -> Option { 25 | Some("
{}
".to_owned()) 26 | } 27 | } 28 | 29 | #[cfg(test)] 30 | mod test { 31 | use crate::output::html::HtmlFormatter; 32 | use crate::output::test::write_test_items; 33 | 34 | #[test] 35 | fn test() { 36 | let result = write_test_items(&mut HtmlFormatter); 37 | assert_eq!("
foo_valueBAR value
123
", result); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /src/output/json.rs: -------------------------------------------------------------------------------- 1 | //! Handles export of results in JSON format 2 | 3 | use std::collections::BTreeMap; 4 | 5 | use crate::output::ResultsFormatter; 6 | 7 | #[derive(Default)] 8 | pub struct JsonFormatter { 9 | file_map: BTreeMap, 10 | } 11 | 12 | impl ResultsFormatter for JsonFormatter { 13 | fn header(&mut self) -> Option { 14 | Some("[".to_owned()) 15 | } 16 | 17 | fn row_started(&mut self) -> Option { 18 | None 19 | } 20 | 21 | fn format_element(&mut self, name: &str, record: &str, _is_last: bool) -> Option { 22 | self.file_map.insert(name.to_owned(), record.to_owned()); 23 | None 24 | } 25 | 26 | fn row_ended(&mut self) -> Option { 27 | let result = serde_json::to_string(&self.file_map).unwrap(); 28 | self.file_map.clear(); 29 | Some(result) 30 | } 31 | 32 | fn footer(&mut self) -> Option { 33 | Some("]".to_owned()) 34 | } 35 | 36 | fn row_separator(&self) -> Option { 37 | Some(",".to_owned()) 38 | } 39 | } 40 | 41 | #[cfg(test)] 42 | mod test { 43 | use crate::output::json::JsonFormatter; 44 | use crate::output::test::write_test_items; 45 | 46 | #[test] 47 | fn test() { 48 | let result = write_test_items(&mut JsonFormatter::default()); 49 | assert_eq!( 50 | r#"[{"bar":"BAR value","foo":"foo_value"},{"bar":"","foo":"123"}]"#, 51 | result 52 | ); 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /src/output/mod.rs: -------------------------------------------------------------------------------- 1 | use std::io::Write; 2 | 3 | use crate::output::csv::CsvFormatter; 4 | use crate::output::flat::{LINES_FORMATTER, LIST_FORMATTER, TABS_FORMATTER}; 5 | use crate::output::html::HtmlFormatter; 6 | use crate::output::json::JsonFormatter; 7 | use crate::query::OutputFormat; 8 | 9 | mod csv; 10 | mod flat; 11 | mod html; 12 | mod json; 13 | 14 | pub trait ResultsFormatter { 15 | fn header(&mut self) -> Option; 16 | fn row_started(&mut self) -> Option; 17 | fn format_element(&mut self, name: &str, record: &str, is_last: bool) -> Option; 18 | fn row_ended(&mut self) -> Option; 19 | fn footer(&mut self) -> Option; 20 | 21 | fn row_separator(&self) -> Option { 22 | None 23 | } 24 | } 25 | 26 | pub struct ResultsWriter { 27 | formatter: Box, 28 | } 29 | 30 | impl ResultsWriter { 31 | pub fn new(format: &OutputFormat) -> ResultsWriter { 32 | ResultsWriter { 33 | formatter: select_formatter(format), 34 | } 35 | } 36 | 37 | pub fn write_header(&mut self, writer: &mut dyn Write) -> std::io::Result<()> { 38 | self.formatter 39 | .header() 40 | .map_or(Ok(()), |value| write!(writer, "{}", value)) 41 | } 42 | 43 | pub fn write_row_separator(&mut self, writer: &mut dyn Write) -> std::io::Result<()> { 44 | self.formatter 45 | .row_separator() 46 | .map_or(Ok(()), |value| write!(writer, "{}", value)) 47 | } 48 | 49 | pub fn write_row( 50 | &mut self, 51 | writer: &mut dyn Write, 52 | values: Vec<(String, String)>, 53 | ) -> std::io::Result<()> { 54 | self.write_row_start(writer)?; 55 | let len = values.len(); 56 | for (pos, (name, value)) in values.iter().enumerate() { 57 | self.write_row_item(writer, name, value, pos == len - 1)?; 58 | } 59 | self.write_row_end(writer) 60 | } 61 | 62 | pub fn write_footer(&mut self, writer: &mut dyn Write) -> std::io::Result<()> { 63 | self.formatter 64 | .footer() 65 | .map_or(Ok(()), |value| write!(writer, "{}", value)) 66 | } 67 | 68 | fn write_row_start(&mut self, writer: &mut dyn Write) -> std::io::Result<()> { 69 | self.formatter 70 | .row_started() 71 | .map_or(Ok(()), |value| write!(writer, "{}", value)) 72 | } 73 | fn write_row_item( 74 | &mut self, 75 | writer: &mut dyn Write, 76 | name: &str, 77 | value: &str, 78 | is_last: bool, 79 | ) -> std::io::Result<()> { 80 | self.formatter 81 | .format_element(name, value, is_last) 82 | .map_or(Ok(()), |value| write!(writer, "{}", value)) 83 | } 84 | 85 | fn write_row_end(&mut self, writer: &mut dyn Write) -> std::io::Result<()> { 86 | self.formatter 87 | .row_ended() 88 | .map_or(Ok(()), |value| write!(writer, "{}", value)) 89 | } 90 | } 91 | 92 | fn select_formatter(format: &OutputFormat) -> Box { 93 | match format { 94 | OutputFormat::Tabs => Box::new(TABS_FORMATTER), 95 | OutputFormat::Lines => Box::new(LINES_FORMATTER), 96 | OutputFormat::List => Box::new(LIST_FORMATTER), 97 | OutputFormat::Csv => Box::::default(), 98 | OutputFormat::Json => Box::::default(), 99 | OutputFormat::Html => Box::new(HtmlFormatter), 100 | } 101 | } 102 | 103 | #[cfg(test)] 104 | mod test { 105 | use crate::output::ResultsFormatter; 106 | 107 | pub(crate) fn write_test_items(under_test: &mut T) -> String { 108 | let mut result = String::from(""); 109 | under_test.header().and_then(|s| Some(result.push_str(&s))); 110 | under_test 111 | .row_started() 112 | .and_then(|s| Some(result.push_str(&s))); 113 | under_test 114 | .format_element("foo", "foo_value", false) 115 | .and_then(|s| Some(result.push_str(&s))); 116 | under_test 117 | .format_element("bar", "BAR value", true) 118 | .and_then(|s| Some(result.push_str(&s))); 119 | under_test 120 | .row_ended() 121 | .and_then(|s| Some(result.push_str(&s))); 122 | under_test 123 | .row_separator() 124 | .and_then(|s| Some(result.push_str(&s))); 125 | under_test 126 | .row_started() 127 | .and_then(|s| Some(result.push_str(&s))); 128 | under_test 129 | .format_element("foo", "123", false) 130 | .and_then(|s| Some(result.push_str(&s))); 131 | under_test 132 | .format_element("bar", "", true) 133 | .and_then(|s| Some(result.push_str(&s))); 134 | under_test 135 | .row_ended() 136 | .and_then(|s| Some(result.push_str(&s))); 137 | under_test.footer().and_then(|s| Some(result.push_str(&s))); 138 | result 139 | } 140 | } 141 | -------------------------------------------------------------------------------- /src/query.rs: -------------------------------------------------------------------------------- 1 | //! Defines the query struct and related types. 2 | //! Query parsing is handled in the `parser` module 3 | 4 | use std::collections::HashSet; 5 | use std::rc::Rc; 6 | 7 | use crate::expr::Expr; 8 | use crate::field::Field; 9 | use crate::query::TraversalMode::Bfs; 10 | 11 | #[derive(Debug, Clone)] 12 | /// Represents a query to be executed on . 13 | /// 14 | pub struct Query { 15 | /// File fields to be selected 16 | pub fields: Vec, 17 | /// Root directories to search 18 | pub roots: Vec, 19 | /// "where" filter expression 20 | pub expr: Option, 21 | /// Fields to group by 22 | pub grouping_fields: Rc>, 23 | /// Fields to order by 24 | pub ordering_fields: Rc>, 25 | /// Ordering direction (true for asc, false for desc) 26 | pub ordering_asc: Rc>, 27 | /// Max amount of results to return 28 | pub limit: u32, 29 | /// Output format 30 | pub output_format: OutputFormat, 31 | } 32 | 33 | impl Query { 34 | pub fn get_all_fields(&self) -> HashSet { 35 | let mut result = HashSet::new(); 36 | 37 | for column_expr in &self.fields { 38 | result.extend(column_expr.get_required_fields()); 39 | } 40 | 41 | result 42 | } 43 | 44 | pub fn is_ordered(&self) -> bool { 45 | !self.ordering_fields.is_empty() 46 | } 47 | 48 | pub fn has_aggregate_column(&self) -> bool { 49 | self.fields.iter().any(|ref f| f.has_aggregate_function()) 50 | } 51 | } 52 | 53 | #[derive(Debug, Clone, PartialEq)] 54 | /// Represents a root directory to start the search from, with traversal options. 55 | pub struct Root { 56 | pub path: String, 57 | pub options: RootOptions, 58 | } 59 | 60 | #[derive(Debug, Clone, PartialEq)] 61 | /// Represents the traversal options for a root directory. 62 | pub struct RootOptions { 63 | /// Minimum depth to search 64 | pub min_depth: u32, 65 | /// Maximum depth to search 66 | pub max_depth: u32, 67 | /// Whether to search archives 68 | pub archives: bool, 69 | /// Whether to follow symlinks 70 | pub symlinks: bool, 71 | /// Whether to track hardlinks 72 | pub hardlinks: bool, 73 | /// Whether to respect .gitignore files 74 | pub gitignore: Option, 75 | /// Whether to respect .hgignore files 76 | pub hgignore: Option, 77 | /// Whether to respect .dockerignore files 78 | pub dockerignore: Option, 79 | /// The traversal mode to use 80 | pub traversal: TraversalMode, 81 | /// Treat the path as a regular expression 82 | pub regexp: bool, 83 | } 84 | 85 | impl RootOptions { 86 | pub fn new() -> RootOptions { 87 | RootOptions { 88 | min_depth: 0, 89 | max_depth: 0, 90 | archives: false, 91 | symlinks: false, 92 | hardlinks: false, 93 | gitignore: None, 94 | hgignore: None, 95 | dockerignore: None, 96 | traversal: Bfs, 97 | regexp: false, 98 | } 99 | } 100 | 101 | #[cfg(test)] 102 | pub fn from( 103 | min_depth: u32, 104 | max_depth: u32, 105 | archives: bool, 106 | symlinks: bool, 107 | hardlinks: bool, 108 | gitignore: Option, 109 | hgignore: Option, 110 | dockerignore: Option, 111 | traversal: TraversalMode, 112 | regexp: bool, 113 | ) -> RootOptions { 114 | RootOptions { 115 | min_depth, 116 | max_depth, 117 | archives, 118 | symlinks, 119 | hardlinks, 120 | gitignore, 121 | hgignore, 122 | dockerignore, 123 | traversal, 124 | regexp, 125 | } 126 | } 127 | } 128 | 129 | impl Root { 130 | pub fn new(path: String, options: RootOptions) -> Root { 131 | Root { path, options } 132 | } 133 | 134 | pub fn default(options: Option) -> Root { 135 | Root { 136 | path: String::from("."), 137 | options: options.unwrap_or_else(RootOptions::new), 138 | } 139 | } 140 | 141 | pub fn clone_with_path(new_path: String, source: Root) -> Root { 142 | Root { 143 | path: new_path, 144 | ..source 145 | } 146 | } 147 | } 148 | 149 | #[derive(Debug, Clone, Copy, PartialEq)] 150 | pub enum TraversalMode { 151 | Bfs, 152 | Dfs, 153 | } 154 | 155 | #[derive(Debug, Clone, PartialEq)] 156 | pub enum OutputFormat { 157 | Tabs, 158 | Lines, 159 | List, 160 | Csv, 161 | Json, 162 | Html, 163 | } 164 | 165 | impl OutputFormat { 166 | pub fn from(s: &str) -> Option { 167 | let s = s.to_lowercase(); 168 | 169 | match s.as_str() { 170 | "lines" => Some(OutputFormat::Lines), 171 | "list" => Some(OutputFormat::List), 172 | "csv" => Some(OutputFormat::Csv), 173 | "json" => Some(OutputFormat::Json), 174 | "tabs" => Some(OutputFormat::Tabs), 175 | "html" => Some(OutputFormat::Html), 176 | _ => None, 177 | } 178 | } 179 | } 180 | -------------------------------------------------------------------------------- /src/util/capabilities.rs: -------------------------------------------------------------------------------- 1 | #[cfg(target_os = "linux")] 2 | macro_rules! check_cap { 3 | ($cap_name: ident, $code: expr, $permitted: ident, $inherited: ident, $effective: ident, $result: ident) => { 4 | if let Some(str_result) = check_capability($permitted, $inherited, 1 << $code) { 5 | $result.push(stringify!($cap_name).to_owned() + "=" + &$effective + &str_result); 6 | } 7 | }; 8 | } 9 | 10 | #[cfg(target_os = "linux")] 11 | pub fn parse_capabilities(caps: Vec) -> String { 12 | if caps.len() < 12 { 13 | return String::new(); 14 | } 15 | 16 | let mut result: Vec = vec![]; 17 | 18 | let effective = if caps[0] == 1 { 19 | String::from("e") 20 | } else { 21 | String::new() 22 | }; 23 | 24 | let permitted = u32::from_le_bytes(caps[4..8].try_into().unwrap()); 25 | let inherited = u32::from_le_bytes(caps[8..12].try_into().unwrap()); 26 | 27 | check_cap!(cap_chown, 0, permitted, inherited, effective, result); 28 | check_cap!(cap_dac_override, 1, permitted, inherited, effective, result); 29 | check_cap!( 30 | cap_dac_read_search, 31 | 2, 32 | permitted, 33 | inherited, 34 | effective, 35 | result 36 | ); 37 | check_cap!(cap_fowner, 3, permitted, inherited, effective, result); 38 | check_cap!(cap_fsetid, 4, permitted, inherited, effective, result); 39 | check_cap!(cap_kill, 5, permitted, inherited, effective, result); 40 | check_cap!(cap_setgid, 6, permitted, inherited, effective, result); 41 | check_cap!(cap_setuid, 7, permitted, inherited, effective, result); 42 | check_cap!(cap_setpcap, 8, permitted, inherited, effective, result); 43 | check_cap!( 44 | cap_linux_immutable, 45 | 9, 46 | permitted, 47 | inherited, 48 | effective, 49 | result 50 | ); 51 | check_cap!( 52 | cap_net_bind_service, 53 | 10, 54 | permitted, 55 | inherited, 56 | effective, 57 | result 58 | ); 59 | check_cap!( 60 | cap_net_broadcast, 61 | 11, 62 | permitted, 63 | inherited, 64 | effective, 65 | result 66 | ); 67 | check_cap!(cap_net_admin, 12, permitted, inherited, effective, result); 68 | check_cap!(cap_net_raw, 13, permitted, inherited, effective, result); 69 | check_cap!(cap_ipc_lock, 14, permitted, inherited, effective, result); 70 | check_cap!(cap_ipc_owner, 15, permitted, inherited, effective, result); 71 | check_cap!(cap_sys_module, 16, permitted, inherited, effective, result); 72 | check_cap!(cap_sys_rawio, 17, permitted, inherited, effective, result); 73 | check_cap!(cap_sys_chroot, 18, permitted, inherited, effective, result); 74 | check_cap!(cap_sys_ptrace, 19, permitted, inherited, effective, result); 75 | check_cap!(cap_sys_pacct, 20, permitted, inherited, effective, result); 76 | check_cap!(cap_sys_admin, 21, permitted, inherited, effective, result); 77 | check_cap!(cap_sys_boot, 22, permitted, inherited, effective, result); 78 | check_cap!(cap_sys_nice, 23, permitted, inherited, effective, result); 79 | check_cap!( 80 | cap_sys_resource, 81 | 24, 82 | permitted, 83 | inherited, 84 | effective, 85 | result 86 | ); 87 | check_cap!(cap_sys_time, 25, permitted, inherited, effective, result); 88 | check_cap!( 89 | cap_sys_tty_config, 90 | 26, 91 | permitted, 92 | inherited, 93 | effective, 94 | result 95 | ); 96 | check_cap!(cap_mknod, 27, permitted, inherited, effective, result); 97 | check_cap!(cap_lease, 28, permitted, inherited, effective, result); 98 | check_cap!(cap_audit_write, 29, permitted, inherited, effective, result); 99 | check_cap!( 100 | cap_audit_control, 101 | 30, 102 | permitted, 103 | inherited, 104 | effective, 105 | result 106 | ); 107 | check_cap!(cap_setfcap, 31, permitted, inherited, effective, result); 108 | 109 | if caps.len() >= 20 { 110 | let permitted = u32::from_le_bytes(caps[12..16].try_into().unwrap()); 111 | let inherited = u32::from_le_bytes(caps[16..20].try_into().unwrap()); 112 | 113 | check_cap!( 114 | cap_mac_override, 115 | 32 - 32, 116 | permitted, 117 | inherited, 118 | effective, 119 | result 120 | ); 121 | check_cap!( 122 | cap_mac_admin, 123 | 33 - 32, 124 | permitted, 125 | inherited, 126 | effective, 127 | result 128 | ); 129 | check_cap!(cap_syslog, 34 - 32, permitted, inherited, effective, result); 130 | check_cap!( 131 | cap_wake_alarm, 132 | 35 - 32, 133 | permitted, 134 | inherited, 135 | effective, 136 | result 137 | ); 138 | check_cap!( 139 | cap_block_suspend, 140 | 36 - 32, 141 | permitted, 142 | inherited, 143 | effective, 144 | result 145 | ); 146 | check_cap!( 147 | cap_audit_read, 148 | 37 - 32, 149 | permitted, 150 | inherited, 151 | effective, 152 | result 153 | ); 154 | check_cap!( 155 | cap_perfmon, 156 | 38 - 32, 157 | permitted, 158 | inherited, 159 | effective, 160 | result 161 | ); 162 | check_cap!(cap_bpf, 39 - 32, permitted, inherited, effective, result); 163 | check_cap!( 164 | cap_checkpoint_restore, 165 | 40 - 32, 166 | permitted, 167 | inherited, 168 | effective, 169 | result 170 | ); 171 | } 172 | 173 | result.join(" ") 174 | } 175 | 176 | #[cfg(target_os = "linux")] 177 | fn check_capability(perm: u32, inh: u32, cap: u32) -> Option { 178 | if inh & cap == cap && perm & cap == cap { 179 | Some(String::from("ip")) 180 | } else if perm & cap == cap { 181 | Some(String::from("p")) 182 | } else if inh & cap == cap { 183 | Some(String::from("i")) 184 | } else { 185 | None 186 | } 187 | } 188 | -------------------------------------------------------------------------------- /src/util/datetime.rs: -------------------------------------------------------------------------------- 1 | use std::sync::LazyLock; 2 | 3 | use chrono::{Datelike, Duration, Local, LocalResult, NaiveDate, NaiveDateTime, TimeZone, Timelike}; 4 | use chrono_english::{parse_date_string, Dialect}; 5 | use regex::Regex; 6 | 7 | static DATE_REGEX: LazyLock = LazyLock::new(|| { 8 | Regex::new("(\\d{4})(-|:)(\\d{1,2})(-|:)(\\d{1,2}) ?(\\d{1,2})?:?(\\d{1,2})?:?(\\d{1,2})?").unwrap() 9 | }); 10 | 11 | pub fn parse_datetime(s: &str) -> Result<(NaiveDateTime, NaiveDateTime), String> { 12 | if s == "today" { 13 | let date = Local::now().date_naive(); 14 | let start = date.and_hms_opt(0, 0, 0).unwrap(); 15 | let finish = date.and_hms_opt(23, 59, 59).unwrap(); 16 | 17 | return Ok((start, finish)); 18 | } 19 | 20 | if s == "yesterday" { 21 | let date = Local::now().date_naive() - Duration::try_days(1).unwrap(); 22 | let start = date.and_hms_opt(0, 0, 0).unwrap(); 23 | let finish = date.and_hms_opt(23, 59, 59).unwrap(); 24 | 25 | return Ok((start, finish)); 26 | } 27 | 28 | match DATE_REGEX.captures(s) { 29 | Some(cap) => { 30 | let year: i32 = cap[1].parse().unwrap(); 31 | let month: u32 = cap[3].parse().unwrap(); 32 | let day: u32 = cap[5].parse().unwrap(); 33 | 34 | let hour_start: u32; 35 | let hour_finish: u32; 36 | match cap.get(6) { 37 | Some(val) => { 38 | hour_start = val.as_str().parse().unwrap(); 39 | hour_finish = hour_start; 40 | } 41 | None => { 42 | hour_start = 0; 43 | hour_finish = 23; 44 | } 45 | } 46 | 47 | let min_start: u32; 48 | let min_finish: u32; 49 | match cap.get(7) { 50 | Some(val) => { 51 | min_start = val.as_str().parse().unwrap(); 52 | min_finish = min_start; 53 | } 54 | None => { 55 | min_start = 0; 56 | min_finish = 59; 57 | } 58 | } 59 | 60 | let sec_start: u32; 61 | let sec_finish: u32; 62 | match cap.get(8) { 63 | Some(val) => { 64 | sec_start = val.as_str().parse().unwrap(); 65 | sec_finish = sec_start; 66 | } 67 | None => { 68 | sec_start = 0; 69 | sec_finish = 59; 70 | } 71 | } 72 | 73 | match Local.with_ymd_and_hms(year, month, day, 0, 0, 0) { 74 | LocalResult::Single(date) => { 75 | let start = date 76 | .naive_local() 77 | .with_hour(hour_start) 78 | .unwrap() 79 | .with_minute(min_start) 80 | .unwrap() 81 | .with_second(sec_start) 82 | .unwrap(); 83 | let finish = date 84 | .naive_local() 85 | .with_hour(hour_finish) 86 | .unwrap() 87 | .with_minute(min_finish) 88 | .unwrap() 89 | .with_second(sec_finish) 90 | .unwrap(); 91 | 92 | Ok((start, finish)) 93 | } 94 | _ => Err("Error converting date/time to local: ".to_string() + s), 95 | } 96 | } 97 | None => { 98 | if s.len() >= 5 { 99 | match parse_date_string(s, Local::now(), Dialect::Uk) { 100 | Ok(date_time) => { 101 | let date_time = date_time.naive_local(); 102 | let finish = if date_time.hour() == 0 103 | && date_time.minute() == 0 104 | && date_time.second() == 0 105 | { 106 | date_time 107 | .with_hour(23) 108 | .unwrap() 109 | .with_minute(59) 110 | .unwrap() 111 | .with_second(59) 112 | .unwrap() 113 | } else { 114 | date_time 115 | }; 116 | 117 | Ok((date_time, finish)) 118 | } 119 | _ => Err("Error parsing date/time value: ".to_string() + s), 120 | } 121 | } else if s.len() >= 2 && (s.starts_with("+") || s.starts_with("-")) { 122 | let days = s.parse::().unwrap(); 123 | let date = Local::now().date_naive() + Duration::days(days); 124 | let start = date.and_hms_opt(0, 0, 0).unwrap(); 125 | let finish = date.and_hms_opt(23, 59, 59).unwrap(); 126 | 127 | Ok((start, finish)) 128 | } else { 129 | Err("Error parsing date/time value: ".to_string() + s) 130 | } 131 | } 132 | } 133 | } 134 | 135 | pub fn to_local_datetime(dt: &zip::DateTime) -> NaiveDateTime { 136 | Local::now() 137 | .naive_local() 138 | .with_year(dt.year() as i32) 139 | .unwrap() 140 | .with_month(dt.month() as u32) 141 | .unwrap() 142 | .with_day(dt.day() as u32) 143 | .unwrap() 144 | .with_hour(dt.hour() as u32) 145 | .unwrap() 146 | .with_minute(dt.minute() as u32) 147 | .unwrap() 148 | .with_second(dt.second() as u32) 149 | .unwrap() 150 | } 151 | 152 | pub fn format_datetime(dt: &NaiveDateTime) -> String { 153 | format!("{}", dt.format("%Y-%m-%d %H:%M:%S")) 154 | } 155 | 156 | pub fn format_date(date: &NaiveDate) -> String { 157 | format!("{}", date.format("%Y-%m-%d")) 158 | } 159 | 160 | #[cfg(test)] 161 | mod tests { 162 | use super::*; 163 | use chrono::{Local, NaiveDate}; 164 | 165 | #[test] 166 | fn test_parse_today() { 167 | let result = parse_datetime("today").unwrap(); 168 | let now = Local::now().date_naive(); 169 | let start = now.and_hms_opt(0, 0, 0).unwrap(); 170 | let finish = now.and_hms_opt(23, 59, 59).unwrap(); 171 | 172 | assert_eq!(result.0, start); 173 | assert_eq!(result.1, finish); 174 | } 175 | 176 | #[test] 177 | fn test_parse_yesterday() { 178 | let result = parse_datetime("yesterday").unwrap(); 179 | let yesterday = Local::now().date_naive() - chrono::Duration::days(1); 180 | let start = yesterday.and_hms_opt(0, 0, 0).unwrap(); 181 | let finish = yesterday.and_hms_opt(23, 59, 59).unwrap(); 182 | 183 | assert_eq!(result.0, start); 184 | assert_eq!(result.1, finish); 185 | } 186 | 187 | #[test] 188 | fn test_parse_two_days_ago() { 189 | let result = parse_datetime("2 days ago 00:00").unwrap(); 190 | let two_days_ago = Local::now().date_naive() - chrono::Duration::days(2); 191 | let start = two_days_ago.and_hms_opt(0, 0, 0).unwrap(); 192 | let finish = two_days_ago.and_hms_opt(23, 59, 59).unwrap(); 193 | 194 | assert_eq!(result.0, start); 195 | assert_eq!(result.1, finish); 196 | } 197 | 198 | #[test] 199 | fn test_parse_two_days_ago_simplified() { 200 | let result = parse_datetime("-2").unwrap(); 201 | let two_days_ago = Local::now().date_naive() - chrono::Duration::days(2); 202 | let start = two_days_ago.and_hms_opt(0, 0, 0).unwrap(); 203 | let finish = two_days_ago.and_hms_opt(23, 59, 59).unwrap(); 204 | 205 | assert_eq!(result.0, start); 206 | assert_eq!(result.1, finish); 207 | } 208 | 209 | #[test] 210 | fn test_parse_specific_date() { 211 | let result = parse_datetime("2023-12-11").unwrap(); 212 | let date = NaiveDate::from_ymd_opt(2023, 12, 11).unwrap(); 213 | let start = date.and_hms_opt(0, 0, 0).unwrap(); 214 | let finish = date.and_hms_opt(23, 59, 59).unwrap(); 215 | 216 | assert_eq!(result.0, start); 217 | assert_eq!(result.1, finish); 218 | } 219 | 220 | #[test] 221 | fn test_parse_specific_datetime() { 222 | let result = parse_datetime("2023-12-11 14:30:45").unwrap(); 223 | let date = NaiveDate::from_ymd_opt(2023, 12, 11).unwrap(); 224 | let start = date.and_hms_opt(14, 30, 45).unwrap(); 225 | let finish = start; 226 | 227 | assert_eq!(result.0, start); 228 | assert_eq!(result.1, finish); 229 | } 230 | 231 | #[test] 232 | fn test_invalid_format() { 233 | let result = parse_datetime("invalid-date"); 234 | 235 | assert!(result.is_err()); 236 | assert_eq!(result.unwrap_err(), "Error parsing date/time value: invalid-date"); 237 | } 238 | 239 | #[test] 240 | fn test_partial_date_parsing() { 241 | let result = parse_datetime("2023-12-11 14:30").unwrap(); 242 | let date = NaiveDate::from_ymd_opt(2023, 12, 11).unwrap(); 243 | let start = date.and_hms_opt(14, 30, 0).unwrap(); 244 | let finish = date.and_hms_opt(14, 30, 59).unwrap(); 245 | 246 | assert_eq!(result.0, start); 247 | assert_eq!(result.1, finish); 248 | } 249 | } 250 | -------------------------------------------------------------------------------- /src/util/dimensions/image.rs: -------------------------------------------------------------------------------- 1 | use std::io; 2 | use std::path::Path; 3 | 4 | use imagesize::ImageError; 5 | 6 | use crate::util::dimensions::DimensionsExtractor; 7 | use crate::util::Dimensions; 8 | 9 | pub struct ImageDimensionsExtractor; 10 | 11 | impl ImageDimensionsExtractor { 12 | const EXTENSIONS: [&'static str; 13] = [ 13 | "bmp", "gif", "heic", "heif", "jpeg", "jpg", "jxl", "png", "psb", "psd", "tga", "tiff", 14 | "webp", 15 | ]; 16 | } 17 | 18 | impl DimensionsExtractor for ImageDimensionsExtractor { 19 | fn supports_ext(&self, ext_lowercase: &str) -> bool { 20 | ImageDimensionsExtractor::EXTENSIONS 21 | .iter() 22 | .any(|&supported| supported == ext_lowercase) 23 | } 24 | 25 | fn try_read_dimensions(&self, path: &Path) -> io::Result> { 26 | let dimensions = imagesize::size(path).map_err(|err| match err { 27 | ImageError::NotSupported => { 28 | io::Error::new(io::ErrorKind::InvalidInput, ImageError::NotSupported) 29 | } 30 | ImageError::CorruptedImage => { 31 | io::Error::new(io::ErrorKind::InvalidData, ImageError::CorruptedImage) 32 | } 33 | ImageError::IoError(e) => e, 34 | })?; 35 | Ok(Some(Dimensions { 36 | width: dimensions.width, 37 | height: dimensions.height, 38 | })) 39 | } 40 | } 41 | 42 | #[cfg(test)] 43 | mod test { 44 | use super::ImageDimensionsExtractor; 45 | use crate::util::dimensions::{test::test_successful, Dimensions}; 46 | use std::error::Error; 47 | 48 | fn do_test_success(ext: &str, w: usize, h: usize) -> Result<(), Box> { 49 | let res_path = String::from("image/rust-logo-blk.") + ext; 50 | test_successful( 51 | ImageDimensionsExtractor, 52 | &res_path, 53 | Some(Dimensions { 54 | width: w, 55 | height: h, 56 | }), 57 | ) 58 | } 59 | 60 | #[test] 61 | pub fn test_bmp() -> Result<(), Box> { 62 | do_test_success("bmp", 144, 144) 63 | } 64 | 65 | #[test] 66 | pub fn test_gif() -> Result<(), Box> { 67 | do_test_success("gif", 144, 144) 68 | } 69 | 70 | #[test] 71 | pub fn test_jpeg() -> Result<(), Box> { 72 | do_test_success("jpeg", 144, 144) 73 | } 74 | 75 | #[test] 76 | pub fn test_jpg() -> Result<(), Box> { 77 | do_test_success("jpg", 144, 144) 78 | } 79 | 80 | #[test] 81 | pub fn test_png() -> Result<(), Box> { 82 | do_test_success("png", 144, 144) 83 | } 84 | 85 | #[test] 86 | pub fn test_tiff() -> Result<(), Box> { 87 | do_test_success("tiff", 144, 144) 88 | } 89 | 90 | #[test] 91 | pub fn test_webp() -> Result<(), Box> { 92 | do_test_success("webp", 144, 144) 93 | } 94 | } 95 | -------------------------------------------------------------------------------- /src/util/dimensions/mkv.rs: -------------------------------------------------------------------------------- 1 | use std::fs::File; 2 | use std::io; 3 | use std::path::Path; 4 | 5 | use matroska::MatroskaError; 6 | 7 | use crate::util::dimensions::DimensionsExtractor; 8 | use crate::util::Dimensions; 9 | 10 | pub struct MkvDimensionsExtractor; 11 | 12 | impl DimensionsExtractor for MkvDimensionsExtractor { 13 | fn supports_ext(&self, ext_lowercase: &str) -> bool { 14 | "mkv" == ext_lowercase || "webm" == ext_lowercase 15 | } 16 | 17 | fn try_read_dimensions(&self, path: &Path) -> io::Result> { 18 | let fd = File::open(path)?; 19 | let matroska = matroska::Matroska::open(fd).map_err(|err| match err { 20 | MatroskaError::Io(io) => io, 21 | MatroskaError::UTF8(utf8) => io::Error::new(io::ErrorKind::InvalidData, utf8), 22 | e => io::Error::new(io::ErrorKind::InvalidData, e), 23 | })?; 24 | Ok(matroska 25 | .tracks 26 | .iter() 27 | .find(|&track| track.tracktype == matroska::Tracktype::Video) 28 | .and_then(|ref track| { 29 | if let matroska::Settings::Video(settings) = &track.settings { 30 | Some(Dimensions { 31 | width: settings.pixel_width as usize, 32 | height: settings.pixel_height as usize, 33 | }) 34 | } else { 35 | None 36 | } 37 | })) 38 | } 39 | } 40 | 41 | #[cfg(test)] 42 | mod test { 43 | use super::MkvDimensionsExtractor; 44 | use crate::util::dimensions::{test::test_successful, Dimensions}; 45 | use std::error::Error; 46 | 47 | #[test] 48 | fn test_success() -> Result<(), Box> { 49 | test_successful( 50 | MkvDimensionsExtractor, 51 | "video/rust-logo-blk.mkv", 52 | Some(Dimensions { 53 | width: 144, 54 | height: 144, 55 | }), 56 | ) 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /src/util/dimensions/mod.rs: -------------------------------------------------------------------------------- 1 | use std::io; 2 | 3 | mod image; 4 | mod mkv; 5 | mod mp4; 6 | mod svg; 7 | 8 | use self::svg::SvgDimensionsExtractor; 9 | use image::ImageDimensionsExtractor; 10 | use mkv::MkvDimensionsExtractor; 11 | use mp4::Mp4DimensionsExtractor; 12 | use std::path::Path; 13 | 14 | #[derive(PartialEq, Eq, Clone, Debug)] 15 | pub struct Dimensions { 16 | pub width: usize, 17 | pub height: usize, 18 | } 19 | 20 | pub trait DimensionsExtractor { 21 | fn supports_ext(&self, ext_lowercase: &str) -> bool; 22 | fn try_read_dimensions(&self, path: &Path) -> io::Result>; 23 | } 24 | 25 | const EXTRACTORS: [&dyn DimensionsExtractor; 4] = [ 26 | &MkvDimensionsExtractor, 27 | &Mp4DimensionsExtractor, 28 | &SvgDimensionsExtractor, 29 | &ImageDimensionsExtractor, 30 | ]; 31 | 32 | pub fn get_dimensions>(path: T) -> Option { 33 | let path_ref = path.as_ref(); 34 | let extension = path_ref.extension()?.to_str()?; 35 | 36 | EXTRACTORS 37 | .iter() 38 | .find(|extractor| extractor.supports_ext(&extension.to_lowercase())) 39 | .and_then(|extractor| extractor.try_read_dimensions(path_ref).unwrap_or_default()) 40 | } 41 | 42 | #[cfg(test)] 43 | mod test { 44 | use crate::util::dimensions::DimensionsExtractor; 45 | use crate::util::Dimensions; 46 | use std::error::Error; 47 | use std::ffi::OsStr; 48 | use std::path::PathBuf; 49 | 50 | pub(crate) fn test_successful( 51 | under_test: T, 52 | test_res_path: &str, 53 | expected: Option, 54 | ) -> Result<(), Box> { 55 | let path_string = std::env::var("CARGO_MANIFEST_DIR")? + "/resources/test/" + test_res_path; 56 | let path = PathBuf::from(path_string); 57 | assert!(under_test.supports_ext(path.extension().and_then(OsStr::to_str).unwrap())); 58 | assert_eq!(under_test.try_read_dimensions(&path)?, expected); 59 | 60 | Ok(()) 61 | } 62 | 63 | pub(crate) fn test_fail( 64 | under_test: T, 65 | test_res_path: &str, 66 | expected: std::io::ErrorKind, 67 | ) -> Result<(), Box> { 68 | let path_string = std::env::var("CARGO_MANIFEST_DIR")? + "/resources/test/" + test_res_path; 69 | let path = PathBuf::from(path_string); 70 | assert!(under_test.supports_ext(path.extension().and_then(OsStr::to_str).unwrap())); 71 | let result = under_test.try_read_dimensions(&path); 72 | assert_eq!(result.map_err(|err| err.kind()), Err(expected)); 73 | 74 | Ok(()) 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /src/util/dimensions/mp4.rs: -------------------------------------------------------------------------------- 1 | use std::fs::File; 2 | use std::io; 3 | use std::io::Read; 4 | use std::path::Path; 5 | 6 | use crate::util::dimensions::DimensionsExtractor; 7 | use crate::util::Dimensions; 8 | 9 | pub struct Mp4DimensionsExtractor; 10 | 11 | impl DimensionsExtractor for Mp4DimensionsExtractor { 12 | fn supports_ext(&self, ext_lowercase: &str) -> bool { 13 | "mp4" == ext_lowercase 14 | } 15 | 16 | fn try_read_dimensions(&self, path: &Path) -> io::Result> { 17 | let mut fd = File::open(path)?; 18 | let mut buf = Vec::new(); 19 | let _ = fd.read_to_end(&mut buf)?; 20 | let mut c = io::Cursor::new(&buf); 21 | let context = mp4parse::read_mp4(&mut c)?; 22 | Ok(context 23 | .tracks 24 | .iter() 25 | .find(|track| track.track_type == mp4parse::TrackType::Video) 26 | .and_then(|ref track| { 27 | track.tkhd.as_ref().map(|tkhd| Dimensions { 28 | width: (tkhd.width / 65536) as usize, 29 | height: (tkhd.height / 65536) as usize, 30 | }) 31 | })) 32 | } 33 | } 34 | 35 | #[cfg(test)] 36 | mod test { 37 | use super::Mp4DimensionsExtractor; 38 | use crate::util::dimensions::{test::test_successful, Dimensions}; 39 | use std::error::Error; 40 | 41 | #[test] 42 | fn test_success() -> Result<(), Box> { 43 | test_successful( 44 | Mp4DimensionsExtractor, 45 | "video/rust-logo-blk.mp4", 46 | Some(Dimensions { 47 | width: 144, 48 | height: 144, 49 | }), 50 | ) 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /src/util/dimensions/svg.rs: -------------------------------------------------------------------------------- 1 | use std::io; 2 | use std::path::Path; 3 | 4 | use svg::node::element::tag::SVG; 5 | use svg::parser::Event; 6 | 7 | use crate::util::dimensions::DimensionsExtractor; 8 | use crate::util::Dimensions; 9 | 10 | pub struct SvgDimensionsExtractor; 11 | 12 | impl SvgDimensionsExtractor {} 13 | 14 | impl DimensionsExtractor for SvgDimensionsExtractor { 15 | fn supports_ext(&self, ext_lowercase: &str) -> bool { 16 | "svg" == ext_lowercase 17 | } 18 | 19 | fn try_read_dimensions(&self, path: &Path) -> io::Result> { 20 | let mut content = String::new(); 21 | for event in svg::open(path, &mut content).unwrap() { 22 | if let Event::Tag(SVG, _, attributes) = event { 23 | if let (Some(width_value), Some(height_value)) = 24 | (attributes.get("height"), attributes.get("width")) 25 | { 26 | let width = width_value 27 | .parse::() 28 | .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?; 29 | let height = height_value 30 | .parse::() 31 | .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?; 32 | return Ok(Some(Dimensions { width, height })); 33 | } 34 | } 35 | } 36 | 37 | Ok(None) 38 | } 39 | } 40 | 41 | #[cfg(test)] 42 | mod test { 43 | use super::SvgDimensionsExtractor; 44 | use crate::util::dimensions::{test::test_fail, test::test_successful, Dimensions}; 45 | use std::error::Error; 46 | use std::io; 47 | 48 | #[test] 49 | fn test_success() -> Result<(), Box> { 50 | test_successful( 51 | SvgDimensionsExtractor, 52 | "image/rust-logo-blk.svg", 53 | Some(Dimensions { 54 | width: 144, 55 | height: 144, 56 | }), 57 | ) 58 | } 59 | 60 | #[test] 61 | fn test_corrupted() -> Result<(), Box> { 62 | test_fail( 63 | SvgDimensionsExtractor, 64 | "image/rust-logo-blk_corrupted.svg", 65 | io::ErrorKind::InvalidData, 66 | ) 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /src/util/duration/mkv.rs: -------------------------------------------------------------------------------- 1 | use std::fs::File; 2 | use std::io; 3 | use std::path::Path; 4 | 5 | use matroska::MatroskaError; 6 | use mp3_metadata::MP3Metadata; 7 | 8 | use crate::util::duration::DurationExtractor; 9 | use crate::util::Duration; 10 | 11 | pub struct MkvDurationExtractor; 12 | 13 | impl DurationExtractor for MkvDurationExtractor { 14 | fn supports_ext(&self, ext_lowercase: &str) -> bool { 15 | "mkv" == ext_lowercase || "webm" == ext_lowercase 16 | } 17 | 18 | fn try_read_duration( 19 | &self, 20 | path: &Path, 21 | _: &Option, 22 | ) -> io::Result> { 23 | let fd = File::open(path)?; 24 | let matroska = matroska::Matroska::open(fd).map_err(|err| match err { 25 | MatroskaError::Io(io) => io, 26 | MatroskaError::UTF8(utf8) => io::Error::new(io::ErrorKind::InvalidData, utf8), 27 | e => io::Error::new(io::ErrorKind::InvalidData, e), 28 | })?; 29 | 30 | match matroska.info.duration { 31 | Some(duration) => { 32 | return Ok(Some(Duration { 33 | length: duration.as_secs() as usize, 34 | })) 35 | } 36 | None => return Ok(None), 37 | } 38 | } 39 | } 40 | 41 | #[cfg(test)] 42 | mod test { 43 | use super::MkvDurationExtractor; 44 | use crate::util::duration::DurationExtractor; 45 | use crate::util::Duration; 46 | use std::error::Error; 47 | use std::path::PathBuf; 48 | 49 | #[test] 50 | fn test_success() -> Result<(), Box> { 51 | let path_string = 52 | std::env::var("CARGO_MANIFEST_DIR")? + "/resources/test/" + "video/rust-logo-blk.mkv"; 53 | let path = PathBuf::from(path_string); 54 | assert_eq!( 55 | MkvDurationExtractor.try_read_duration(&path, &None)?, 56 | Some(Duration { length: 1 }), 57 | ); 58 | Ok(()) 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /src/util/duration/mod.rs: -------------------------------------------------------------------------------- 1 | mod mkv; 2 | mod mp3; 3 | mod mp4; 4 | mod wav; 5 | 6 | use std::io; 7 | use std::path::Path; 8 | 9 | use mp3_metadata::MP3Metadata; 10 | 11 | use mkv::MkvDurationExtractor; 12 | use mp3::Mp3DurationExtractor; 13 | use mp4::Mp4DurationExtractor; 14 | use wav::WavDurationExtractor; 15 | 16 | #[derive(PartialEq, Eq, Clone, Debug)] 17 | pub struct Duration { 18 | pub length: usize, 19 | } 20 | 21 | pub trait DurationExtractor { 22 | fn supports_ext(&self, ext_lowercase: &str) -> bool; 23 | fn try_read_duration( 24 | &self, 25 | path: &Path, 26 | mp3_metadata: &Option, 27 | ) -> io::Result>; 28 | } 29 | 30 | const EXTRACTORS: [&dyn DurationExtractor; 4] = [ 31 | &Mp3DurationExtractor, 32 | &Mp4DurationExtractor, 33 | &MkvDurationExtractor, 34 | &WavDurationExtractor, 35 | ]; 36 | 37 | pub fn get_duration>( 38 | path: T, 39 | mp3_metadata: &Option, 40 | ) -> Option { 41 | let path_ref = path.as_ref(); 42 | let extension = path_ref.extension()?.to_str()?; 43 | 44 | EXTRACTORS 45 | .iter() 46 | .find(|extractor| extractor.supports_ext(&extension.to_lowercase())) 47 | .and_then(|extractor| { 48 | extractor 49 | .try_read_duration(path_ref, mp3_metadata) 50 | .unwrap_or_default() 51 | }) 52 | } 53 | -------------------------------------------------------------------------------- /src/util/duration/mp3.rs: -------------------------------------------------------------------------------- 1 | use std::io; 2 | use std::path::Path; 3 | 4 | use mp3_metadata::MP3Metadata; 5 | 6 | use crate::util::duration::DurationExtractor; 7 | use crate::util::Duration; 8 | 9 | pub struct Mp3DurationExtractor; 10 | 11 | impl DurationExtractor for Mp3DurationExtractor { 12 | fn supports_ext(&self, ext_lowercase: &str) -> bool { 13 | "mp3" == ext_lowercase 14 | } 15 | 16 | fn try_read_duration( 17 | &self, 18 | _: &Path, 19 | mp3_metadata: &Option, 20 | ) -> io::Result> { 21 | match mp3_metadata { 22 | Some(mp3_metadata) => Ok(Some(Duration { 23 | length: mp3_metadata.duration.as_secs() as usize, 24 | })), 25 | None => Ok(None), 26 | } 27 | } 28 | } 29 | 30 | #[cfg(test)] 31 | mod test { 32 | use crate::util::duration::DurationExtractor; 33 | use crate::util::duration::Mp3DurationExtractor; 34 | use crate::util::Duration; 35 | use crate::util::MP3Metadata; 36 | use crate::PathBuf; 37 | use std::error::Error; 38 | 39 | #[test] 40 | fn test_success() -> Result<(), Box> { 41 | let path_string = 42 | std::env::var("CARGO_MANIFEST_DIR")? + "/resources/test/" + "audio/silent-35s.mp3"; 43 | let path = PathBuf::from(path_string); 44 | 45 | let mp3_metadata = |path: PathBuf| -> Option { 46 | match mp3_metadata::read_from_file(path) { 47 | Ok(mp3_meta) => Some(mp3_meta), 48 | _ => None, 49 | } 50 | }(path.clone()); 51 | 52 | assert_eq!( 53 | Mp3DurationExtractor.try_read_duration(&path, &mp3_metadata)?, 54 | Some(Duration { length: 35 }), 55 | ); 56 | Ok(()) 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /src/util/duration/mp4.rs: -------------------------------------------------------------------------------- 1 | use std::fs::File; 2 | use std::io; 3 | use std::io::Read; 4 | use std::path::Path; 5 | 6 | use mp3_metadata::MP3Metadata; 7 | 8 | use crate::util::duration::DurationExtractor; 9 | use crate::util::Duration; 10 | 11 | pub struct Mp4DurationExtractor; 12 | 13 | impl DurationExtractor for Mp4DurationExtractor { 14 | fn supports_ext(&self, ext_lowercase: &str) -> bool { 15 | "mp4" == ext_lowercase 16 | } 17 | 18 | fn try_read_duration( 19 | &self, 20 | path: &Path, 21 | _: &Option, 22 | ) -> io::Result> { 23 | let mut fd = File::open(path)?; 24 | let mut buf = Vec::new(); 25 | let _ = fd.read_to_end(&mut buf)?; 26 | let mut c = io::Cursor::new(&buf); 27 | let context = mp4parse::read_mp4(&mut c)?; 28 | Ok(context 29 | .tracks 30 | .iter() 31 | .find(|track| track.track_type == mp4parse::TrackType::Video) 32 | .and_then(|ref track| { 33 | track.tkhd.as_ref().map(|tkhd| Duration { 34 | length: (tkhd.duration / 1000) as usize, 35 | }) 36 | })) 37 | } 38 | } 39 | 40 | #[cfg(test)] 41 | mod test { 42 | use super::Mp4DurationExtractor; 43 | use crate::util::duration::DurationExtractor; 44 | use crate::util::Duration; 45 | use crate::PathBuf; 46 | use std::error::Error; 47 | 48 | #[test] 49 | fn test_success() -> Result<(), Box> { 50 | let path_string = 51 | std::env::var("CARGO_MANIFEST_DIR")? + "/resources/test/" + "video/rust-logo-blk.mp4"; 52 | let path = PathBuf::from(path_string); 53 | assert_eq!( 54 | Mp4DurationExtractor.try_read_duration(&path, &None)?, 55 | Some(Duration { length: 1 }), 56 | ); 57 | Ok(()) 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /src/util/duration/wav.rs: -------------------------------------------------------------------------------- 1 | use std::io; 2 | use std::path::Path; 3 | 4 | use mp3_metadata::MP3Metadata; 5 | use wavers::Wav; 6 | 7 | use crate::util::duration::DurationExtractor; 8 | use crate::util::Duration; 9 | 10 | pub struct WavDurationExtractor; 11 | 12 | impl DurationExtractor for WavDurationExtractor { 13 | fn supports_ext(&self, ext_lowercase: &str) -> bool { 14 | "wav" == ext_lowercase 15 | } 16 | 17 | fn try_read_duration( 18 | &self, 19 | path: &Path, 20 | _: &Option, 21 | ) -> io::Result> { 22 | let wav: Wav = 23 | Wav::from_path(path).map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?; 24 | Ok(Some(Duration { 25 | length: wav.duration() as usize, 26 | })) 27 | } 28 | } 29 | 30 | #[cfg(test)] 31 | mod test { 32 | use super::*; 33 | use crate::util::duration::DurationExtractor; 34 | use crate::util::Duration; 35 | use std::error::Error; 36 | use std::path::PathBuf; 37 | 38 | #[test] 39 | fn test_success() -> Result<(), Box> { 40 | let path_string = 41 | std::env::var("CARGO_MANIFEST_DIR")? + "/resources/test/" + "audio/silent.wav"; 42 | let path = PathBuf::from(path_string); 43 | assert_eq!( 44 | WavDurationExtractor.try_read_duration(&path, &None)?, 45 | Some(Duration { length: 15 }), 46 | ); 47 | Ok(()) 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /src/util/glob.rs: -------------------------------------------------------------------------------- 1 | use std::ops::Index; 2 | 3 | use regex::Captures; 4 | use regex::Regex; 5 | 6 | use crate::util::error_exit; 7 | 8 | pub fn is_glob(s: &str) -> bool { 9 | s.contains("*") || s.contains('?') 10 | } 11 | 12 | pub fn convert_glob_to_pattern(s: &str) -> String { 13 | let string = s.to_string(); 14 | let regex = Regex::new("(\\?|\\.|\\*|\\[|\\]|\\(|\\)|\\^|\\$)").unwrap(); 15 | let string = regex.replace_all(&string, |c: &Captures| { 16 | match c.index(0) { 17 | "." => "\\.", 18 | "*" => ".*", 19 | "?" => ".", 20 | "[" => "\\[", 21 | "]" => "\\]", 22 | "(" => "\\(", 23 | ")" => "\\)", 24 | "^" => "\\^", 25 | "$" => "\\$", 26 | _ => error_exit("Error parsing glob expression", s), 27 | } 28 | .to_string() 29 | }); 30 | 31 | format!("^(?i){}$", string) 32 | } 33 | 34 | pub fn convert_like_to_pattern(s: &str) -> String { 35 | let string = s.to_string(); 36 | let regex = Regex::new("(%|_|\\?|\\.|\\*|\\[|\\]|\\(|\\)|\\^|\\$)").unwrap(); 37 | let string = regex.replace_all(&string, |c: &Captures| { 38 | match c.index(0) { 39 | "%" => ".*", 40 | "_" => ".", 41 | "?" => ".?", 42 | "." => "\\.", 43 | "*" => "\\*", 44 | "[" => "\\[", 45 | "]" => "\\]", 46 | "(" => "\\(", 47 | ")" => "\\)", 48 | "^" => "\\^", 49 | "$" => "\\$", 50 | _ => error_exit("Error parsing LIKE expression", s), 51 | } 52 | .to_string() 53 | }); 54 | 55 | format!("^(?i){}$", string) 56 | } 57 | 58 | #[cfg(test)] 59 | mod tests { 60 | use super::*; 61 | 62 | #[test] 63 | fn test_is_glob_with_asterisk() { 64 | assert!(is_glob("file*.txt")); 65 | assert!(is_glob("*file.txt")); 66 | assert!(is_glob("file.txt*")); 67 | } 68 | 69 | #[test] 70 | fn test_is_glob_with_question_mark() { 71 | assert!(is_glob("file?.txt")); 72 | assert!(is_glob("?file.txt")); 73 | assert!(is_glob("file.txt?")); 74 | } 75 | 76 | #[test] 77 | fn test_is_glob_with_no_glob_chars() { 78 | assert!(!is_glob("file.txt")); 79 | assert!(!is_glob("path/to/file.txt")); 80 | assert!(!is_glob("")); 81 | } 82 | 83 | #[test] 84 | fn test_convert_glob_to_pattern_asterisk() { 85 | let pattern = convert_glob_to_pattern("*.txt"); 86 | assert_eq!(pattern, "^(?i).*\\.txt$"); 87 | } 88 | 89 | #[test] 90 | fn test_convert_glob_to_pattern_question_mark() { 91 | let pattern = convert_glob_to_pattern("file?.txt"); 92 | assert_eq!(pattern, "^(?i)file.\\.txt$"); 93 | } 94 | 95 | #[test] 96 | fn test_convert_glob_to_pattern_mixed() { 97 | let pattern = convert_glob_to_pattern("file-*.?xt"); 98 | assert_eq!(pattern, "^(?i)file-.*\\..xt$"); 99 | } 100 | 101 | #[test] 102 | fn test_convert_glob_to_pattern_special_chars() { 103 | let pattern = convert_glob_to_pattern("file[1-3].txt"); 104 | assert_eq!(pattern, "^(?i)file\\[1-3\\]\\.txt$"); 105 | } 106 | 107 | #[test] 108 | fn test_convert_like_to_pattern_percent() { 109 | let pattern = convert_like_to_pattern("%.txt"); 110 | assert_eq!(pattern, "^(?i).*\\.txt$"); 111 | } 112 | 113 | #[test] 114 | fn test_convert_like_to_pattern_underscore() { 115 | let pattern = convert_like_to_pattern("file_.txt"); 116 | assert_eq!(pattern, "^(?i)file.\\.txt$"); 117 | } 118 | 119 | #[test] 120 | fn test_convert_like_to_pattern_mixed() { 121 | let pattern = convert_like_to_pattern("file-%.txt"); 122 | assert_eq!(pattern, "^(?i)file-.*\\.txt$"); 123 | } 124 | 125 | #[test] 126 | fn test_convert_like_to_pattern_question_mark() { 127 | let pattern = convert_like_to_pattern("file?.txt"); 128 | assert_eq!(pattern, "^(?i)file.?\\.txt$"); 129 | } 130 | 131 | #[test] 132 | fn test_convert_like_to_pattern_special_chars() { 133 | let pattern = convert_like_to_pattern("file*.txt"); 134 | assert_eq!(pattern, "^(?i)file\\*\\.txt$"); 135 | } 136 | } 137 | -------------------------------------------------------------------------------- /src/util/japanese.rs: -------------------------------------------------------------------------------- 1 | pub fn contains_japanese(s: &str) -> bool { 2 | s.chars().any(wana_kana::utils::is_char_japanese) 3 | } 4 | 5 | pub fn contains_hiragana(s: &str) -> bool { 6 | s.chars().any(wana_kana::utils::is_char_hiragana) 7 | } 8 | 9 | pub fn contains_katakana(s: &str) -> bool { 10 | s.chars().any(wana_kana::utils::is_char_katakana) 11 | } 12 | 13 | pub fn contains_kana(s: &str) -> bool { 14 | s.chars().any(wana_kana::utils::is_char_kana) 15 | } 16 | 17 | pub fn contains_kanji(s: &str) -> bool { 18 | s.chars().any(wana_kana::utils::is_char_kanji) 19 | } 20 | -------------------------------------------------------------------------------- /src/util/top_n.rs: -------------------------------------------------------------------------------- 1 | use std::collections::BTreeMap; 2 | 3 | pub struct TopN { 4 | limit: Option, 5 | count: u32, 6 | echelons: BTreeMap>, 7 | } 8 | 9 | impl TopN { 10 | pub fn new(limit: u32) -> TopN { 11 | debug_assert_ne!(limit, 0); 12 | TopN { 13 | limit: Some(limit), 14 | count: 0, 15 | echelons: BTreeMap::new(), 16 | } 17 | } 18 | 19 | pub fn limitless() -> TopN { 20 | TopN { 21 | limit: None, 22 | count: 0, 23 | echelons: BTreeMap::new(), 24 | } 25 | } 26 | 27 | pub fn insert(&mut self, k: K, v: V) -> Option 28 | where 29 | K: Clone, 30 | { 31 | self.count += 1; 32 | self.echelons.entry(k).or_default().push(v); 33 | 34 | if let Some(limit) = self.limit { 35 | if limit < self.count { 36 | self.count -= 1; 37 | 38 | let last_key = self.echelons.iter().next_back().unwrap().0.clone(); 39 | 40 | let mut last_echelon = self.echelons.remove(&last_key).unwrap(); 41 | let popped = last_echelon.pop().unwrap(); 42 | if !last_echelon.is_empty() { 43 | self.echelons.insert(last_key, last_echelon); 44 | } 45 | return Some(popped); 46 | } 47 | } 48 | None 49 | } 50 | 51 | // see: https://github.com/rust-lang/rfcs/blob/master/text/1522-conservative-impl-trait.md 52 | // pub fn values(&self) -> impl Iterator { 53 | // self.echelons.values().flat_map(|v| v) 54 | // } 55 | pub fn values(&self) -> Vec 56 | where 57 | V: Clone, 58 | { 59 | self.echelons 60 | .values() 61 | .flat_map(|v| v.iter().cloned()) 62 | .collect() 63 | } 64 | } 65 | 66 | #[cfg(test)] 67 | mod tests { 68 | use super::*; 69 | 70 | #[test] 71 | fn test_insert_one() { 72 | let mut top_n = TopN::new(5); 73 | top_n.insert("asdf", 1); 74 | } 75 | 76 | #[test] 77 | fn test_insert_to_limit() { 78 | let mut top_n = TopN::new(2); 79 | top_n.insert("asdf", 1); 80 | top_n.insert("xyz", 2); 81 | } 82 | 83 | #[test] 84 | fn test_insert_past_limit_bigger_discarded() { 85 | let mut top_n = TopN::new(2); 86 | top_n.insert("a", 1); 87 | top_n.insert("b", 2); 88 | top_n.insert("z", -1); 89 | assert_eq!(top_n.values(), vec![1, 2]); 90 | } 91 | 92 | #[test] 93 | fn test_insert_past_limit_equal_discarded() { 94 | let mut top_n = TopN::new(2); 95 | top_n.insert("a", 1); 96 | top_n.insert("b", 2); 97 | top_n.insert("b", -1); 98 | assert_eq!(top_n.values(), vec![1, 2]); 99 | } 100 | 101 | #[test] 102 | fn test_insert_past_limit_smaller_last_one_discarded() { 103 | let mut top_n = TopN::new(2); 104 | top_n.insert("b", "second"); 105 | top_n.insert("c", "last"); 106 | top_n.insert("a", "first"); 107 | assert_eq!(top_n.values(), vec!["first", "second"]); 108 | } 109 | 110 | #[test] 111 | fn test_insert_past_limit_comprehensive() { 112 | let mut top_n = TopN::new(5); 113 | top_n.insert("asdf", 1); 114 | assert_eq!(top_n.values(), vec![1]); 115 | top_n.insert("asdf", 3); 116 | assert_eq!(top_n.values(), vec![1, 3]); 117 | top_n.insert("asdf", 3); 118 | assert_eq!(top_n.values(), vec![1, 3, 3]); 119 | top_n.insert("xyz", 4); 120 | assert_eq!(top_n.values(), vec![1, 3, 3, 4]); 121 | top_n.insert("asdf", 2); 122 | assert_eq!(top_n.values(), vec![1, 3, 3, 2, 4]); 123 | top_n.insert("xyz", 5); 124 | assert_eq!(top_n.values(), vec![1, 3, 3, 2, 4]); 125 | top_n.insert("asdf", -1); 126 | assert_eq!(top_n.values(), vec![1, 3, 3, 2, -1]); 127 | } 128 | 129 | #[test] 130 | fn test_limitless() { 131 | let mut top_n = TopN::limitless(); 132 | top_n.insert("z", 3); 133 | top_n.insert("y", 2); 134 | top_n.insert("a", 1); 135 | top_n.insert("a", 0); 136 | assert_eq!(top_n.values(), vec![1, 0, 2, 3]); 137 | } 138 | } 139 | -------------------------------------------------------------------------------- /src/util/variant.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::{Display, Error, Formatter}; 2 | 3 | use chrono::NaiveDateTime; 4 | 5 | use crate::util::{error_exit, format_datetime, parse_datetime, parse_filesize, str_to_bool}; 6 | 7 | #[derive(Clone, Debug)] 8 | pub enum VariantType { 9 | String, 10 | Int, 11 | Float, 12 | Bool, 13 | DateTime, 14 | } 15 | 16 | #[derive(Debug)] 17 | pub struct Variant { 18 | value_type: VariantType, 19 | string_value: String, 20 | int_value: Option, 21 | float_value: Option, 22 | bool_value: Option, 23 | dt_from: Option, 24 | dt_to: Option, 25 | } 26 | 27 | impl Variant { 28 | pub fn empty(value_type: VariantType) -> Variant { 29 | Variant { 30 | value_type, 31 | string_value: String::new(), 32 | int_value: None, 33 | float_value: None, 34 | bool_value: None, 35 | dt_from: None, 36 | dt_to: None, 37 | } 38 | } 39 | 40 | pub fn get_type(&self) -> &VariantType { 41 | &self.value_type 42 | } 43 | 44 | pub fn from_int(value: i64) -> Variant { 45 | Variant { 46 | value_type: VariantType::Int, 47 | string_value: format!("{}", value), 48 | int_value: Some(value), 49 | float_value: Some(value as f64), 50 | bool_value: None, 51 | dt_from: None, 52 | dt_to: None, 53 | } 54 | } 55 | 56 | pub fn from_float(value: f64) -> Variant { 57 | Variant { 58 | value_type: VariantType::Float, 59 | string_value: format!("{}", value), 60 | int_value: Some(value as i64), 61 | float_value: Some(value), 62 | bool_value: None, 63 | dt_from: None, 64 | dt_to: None, 65 | } 66 | } 67 | 68 | pub fn from_string(value: &String) -> Variant { 69 | Variant { 70 | value_type: VariantType::String, 71 | string_value: value.to_owned(), 72 | int_value: None, 73 | float_value: None, 74 | bool_value: None, 75 | dt_from: None, 76 | dt_to: None, 77 | } 78 | } 79 | 80 | pub fn from_signed_string(value: &String, minus: bool) -> Variant { 81 | let string_value = match minus { 82 | true => { 83 | let mut result = String::from("-"); 84 | result += &value.to_owned(); 85 | 86 | result 87 | } 88 | false => value.to_owned(), 89 | }; 90 | 91 | Variant { 92 | value_type: VariantType::String, 93 | string_value, 94 | int_value: None, 95 | float_value: None, 96 | bool_value: None, 97 | dt_from: None, 98 | dt_to: None, 99 | } 100 | } 101 | 102 | pub fn from_bool(value: bool) -> Variant { 103 | Variant { 104 | value_type: VariantType::Bool, 105 | string_value: match value { 106 | true => String::from("true"), 107 | _ => String::from("false"), 108 | }, 109 | int_value: match value { 110 | true => Some(1), 111 | _ => Some(0), 112 | }, 113 | float_value: None, 114 | bool_value: Some(value), 115 | dt_from: None, 116 | dt_to: None, 117 | } 118 | } 119 | 120 | pub fn from_datetime(value: NaiveDateTime) -> Variant { 121 | Variant { 122 | value_type: VariantType::DateTime, 123 | string_value: format_datetime(&value), 124 | int_value: Some(0), 125 | float_value: None, 126 | bool_value: None, 127 | dt_from: Some(value), 128 | dt_to: Some(value), 129 | } 130 | } 131 | 132 | pub fn to_string(&self) -> String { 133 | self.string_value.to_owned() 134 | } 135 | 136 | pub fn to_int(&self) -> i64 { 137 | match self.int_value { 138 | Some(i) => i, 139 | None => { 140 | if self.float_value.is_some() { 141 | return self.float_value.unwrap() as i64; 142 | } 143 | 144 | let int_value = self.string_value.parse::(); 145 | match int_value { 146 | Ok(i) => i as i64, 147 | _ => match parse_filesize(&self.string_value) { 148 | Some(size) => size as i64, 149 | _ => 0, 150 | }, 151 | } 152 | } 153 | } 154 | } 155 | 156 | pub fn to_float(&self) -> f64 { 157 | if self.float_value.is_some() { 158 | return self.float_value.unwrap(); 159 | } 160 | 161 | match self.int_value { 162 | Some(i) => i as f64, 163 | None => { 164 | let float_value = self.string_value.parse::(); 165 | match float_value { 166 | Ok(f) => f, 167 | _ => match parse_filesize(&self.string_value) { 168 | Some(size) => size as f64, 169 | _ => 0.0, 170 | }, 171 | } 172 | } 173 | } 174 | } 175 | 176 | pub fn to_bool(&self) -> bool { 177 | if let Some(value) = self.bool_value { 178 | value 179 | } else if !self.string_value.is_empty() { 180 | str_to_bool(&self.string_value).expect("Can't parse boolean value") 181 | } else if let Some(int_value) = self.int_value { 182 | int_value == 1 183 | } else if let Some(float_value) = self.float_value { 184 | float_value == 1.0 185 | } else { 186 | false 187 | } 188 | } 189 | 190 | pub fn to_datetime(&self) -> (NaiveDateTime, NaiveDateTime) { 191 | if self.dt_from.is_none() { 192 | match parse_datetime(&self.string_value) { 193 | Ok((dt_from, dt_to)) => { 194 | return (dt_from, dt_to); 195 | } 196 | _ => error_exit("Can't parse datetime", &self.string_value), 197 | } 198 | } 199 | 200 | (self.dt_from.unwrap(), self.dt_to.unwrap()) 201 | } 202 | } 203 | 204 | impl Display for Variant { 205 | fn fmt(&self, f: &mut Formatter) -> Result<(), Error> { 206 | write!(f, "{}", self.to_string()) 207 | } 208 | } -------------------------------------------------------------------------------- /src/util/wbuf.rs: -------------------------------------------------------------------------------- 1 | use std::io; 2 | use std::io::Write; 3 | 4 | pub struct WritableBuffer { 5 | buf: String, 6 | } 7 | 8 | impl WritableBuffer { 9 | pub fn new() -> WritableBuffer { 10 | WritableBuffer { buf: String::new() } 11 | } 12 | } 13 | 14 | impl From for String { 15 | fn from(wb: WritableBuffer) -> Self { 16 | wb.buf 17 | } 18 | } 19 | 20 | impl Write for WritableBuffer { 21 | fn write(&mut self, buf: &[u8]) -> io::Result { 22 | use std::fmt::Write; 23 | match String::from_utf8(buf.into()) { 24 | Ok(string) => { 25 | let l = string.len(); 26 | match self.buf.write_str(string.as_str()) { 27 | Ok(()) => Ok(l), 28 | Err(_) => Err(io::ErrorKind::InvalidInput.into()), 29 | } 30 | } 31 | Err(_) => Err(io::ErrorKind::InvalidInput.into()), 32 | } 33 | } 34 | 35 | fn flush(&mut self) -> io::Result<()> { 36 | Ok(()) 37 | } 38 | } 39 | --------------------------------------------------------------------------------