├── gh_rsa.enc ├── src ├── hashing │ ├── md5.rs │ ├── crc32c.rs │ ├── blake2b.rs │ ├── blake2s.rs │ ├── blake3.rs │ ├── crc16.rs │ ├── whirlpool.rs │ ├── crc8.rs │ ├── blake.rs │ ├── xor8.rs │ ├── md6128_256_512.rs │ ├── sha3256_3512.rs │ ├── sha1_2256_2224_2384_2512.rs │ ├── crc32_64.rs │ └── mod.rs ├── util.rs ├── error.rs ├── main.rs ├── ops │ ├── compare.rs │ ├── mod.rs │ └── write.rs ├── algorithms.rs ├── lib.rs └── options.rs ├── .gitignore ├── rustfmt.toml ├── tests ├── error.rs └── algorithms.rs ├── checksums.sublime-project ├── README.md ├── LICENSE ├── Cargo.toml ├── appveyor.yml ├── .travis.yml └── checksums.md /gh_rsa.enc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nabijaczleweli/checksums/HEAD/gh_rsa.enc -------------------------------------------------------------------------------- /src/hashing/md5.rs: -------------------------------------------------------------------------------- 1 | use self::super::hash_string; 2 | use md5::{Md5, Digest}; 3 | 4 | hash_func_write!(Md5::new(), 5 | |ctx: Md5| hash_string(&*ctx.finalize())); 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | !.travis.yml 4 | !gh_rsa.enc 5 | !appveyor.yml 6 | !LICENSE 7 | !Cargo.toml 8 | !rustfmt.toml 9 | !*.sublime-project 10 | !*.md 11 | !src 12 | !src/** 13 | !tests 14 | !tests/** 15 | -------------------------------------------------------------------------------- /src/hashing/crc32c.rs: -------------------------------------------------------------------------------- 1 | use crc32c::crc32c_append; 2 | 3 | 4 | hash_func!(0u32, 5 | |state_crc: &mut u32, buffer: &[u8]| *state_crc = crc32c_append(*state_crc, buffer), 6 | |state_crc: u32| format!("{:08X}", state_crc)); 7 | -------------------------------------------------------------------------------- /rustfmt.toml: -------------------------------------------------------------------------------- 1 | max_width = 160 2 | ideal_width = 128 3 | fn_call_width = 96 4 | fn_args_paren_newline = false 5 | fn_args_density = "Compressed" 6 | struct_trailing_comma = "Always" 7 | wrap_comments = true 8 | -------------------------------------------------------------------------------- /src/hashing/blake2b.rs: -------------------------------------------------------------------------------- 1 | use self::super::hash_string; 2 | use blake2::{Blake2b, Digest}; 3 | 4 | hash_func!(Blake2b::new(), 5 | |blake: &mut Blake2b, buffer: &[u8]| blake.update(buffer), 6 | |blake: Blake2b| hash_string(&blake.finalize())); 7 | -------------------------------------------------------------------------------- /src/hashing/blake2s.rs: -------------------------------------------------------------------------------- 1 | use self::super::hash_string; 2 | use blake2::{Blake2s, Digest}; 3 | 4 | hash_func!(Blake2s::new(), 5 | |blake: &mut Blake2s, buffer: &[u8]| blake.update(buffer), 6 | |blake: Blake2s| hash_string(&blake.finalize())); 7 | -------------------------------------------------------------------------------- /src/hashing/blake3.rs: -------------------------------------------------------------------------------- 1 | use self::super::hash_string; 2 | 3 | hash_func!(blake3::Hasher::new(), 4 | |blake: &mut blake3::Hasher, buffer: &[u8]| { blake.update(buffer); }, 5 | |blake: blake3::Hasher| hash_string(blake.finalize().as_bytes())); 6 | -------------------------------------------------------------------------------- /src/hashing/crc16.rs: -------------------------------------------------------------------------------- 1 | use crc16::{State, ARC}; 2 | use ::Algorithm; 3 | 4 | 5 | hash_func!(State::new(), 6 | |state: &mut State, buffer: &[u8]| state.update(buffer), 7 | |state: State| format!("{:01$X}", state.get(), Algorithm::CRC16.hexlen())); 8 | -------------------------------------------------------------------------------- /src/hashing/whirlpool.rs: -------------------------------------------------------------------------------- 1 | use self::super::hash_string; 2 | use whirlpool::{Whirlpool, Digest}; 3 | 4 | hash_func!(Whirlpool::new(), 5 | |whirlpool: &mut Whirlpool, buffer: &[u8]| whirlpool.update(buffer), 6 | |whirlpool: Whirlpool| hash_string(&whirlpool.finalize())); 7 | -------------------------------------------------------------------------------- /src/hashing/crc8.rs: -------------------------------------------------------------------------------- 1 | use self::super::hash_string; 2 | use crc8::Crc8; 3 | 4 | 5 | hash_func!((Crc8::create_lsb(0x9b), 0u8), 6 | |state_crc: &mut (Crc8, u8), buffer: &[u8]| state_crc.1 = state_crc.0.calc(buffer, buffer.len() as i32, state_crc.1), 7 | |state_crc: (Crc8, u8)| hash_string(&[state_crc.1])); 8 | -------------------------------------------------------------------------------- /src/hashing/blake.rs: -------------------------------------------------------------------------------- 1 | use self::super::hash_string; 2 | use blake::Blake; 3 | 4 | 5 | hash_func_write!(Blake::new(512).unwrap(), 6 | |mut blake: Blake| { 7 | let mut result = [0; 64]; 8 | blake.finalise(&mut result); 9 | hash_string(&result) 10 | }); 11 | -------------------------------------------------------------------------------- /src/hashing/xor8.rs: -------------------------------------------------------------------------------- 1 | use std::io::{BufReader, Read}; 2 | use self::super::hash_string; 3 | 4 | 5 | // Pseudocode: https://en.wikipedia.org/wiki/Longitudinal_redundancy_check 6 | pub fn hash(reader: &mut R) -> String { 7 | let mut lrc = 0u16; 8 | for b in BufReader::new(reader).bytes() { 9 | lrc = (lrc + b.unwrap() as u16) & 0xFF; 10 | } 11 | let lrc = (((lrc ^ 0xFF) + 1) & 0xFF) as u8; 12 | 13 | hash_string(&[lrc]) 14 | } 15 | -------------------------------------------------------------------------------- /src/hashing/md6128_256_512.rs: -------------------------------------------------------------------------------- 1 | macro_rules! make_md_mod { 2 | ($modname:ident, $bytesize:expr) => { 3 | pub mod $modname { 4 | use self::super::super::hash_string; 5 | use md6::Md6; 6 | 7 | 8 | hash_func_write!(Md6::new($bytesize * 8).unwrap(), 9 | |mut md6: Md6| { 10 | let mut result = [0; $bytesize]; 11 | md6.finalise(&mut result); 12 | hash_string(&result) 13 | }); 14 | } 15 | } 16 | } 17 | 18 | 19 | make_md_mod!(md6128, 16); 20 | make_md_mod!(md6256, 32); 21 | make_md_mod!(md6512, 64); 22 | -------------------------------------------------------------------------------- /src/hashing/sha3256_3512.rs: -------------------------------------------------------------------------------- 1 | macro_rules! make_sha_mod { 2 | ($modname:ident, $len:expr, $keccak_new:expr) => { 3 | pub mod $modname { 4 | use self::super::super::hash_string; 5 | use tiny_keccak::{Hasher, Sha3}; 6 | 7 | 8 | hash_func!($keccak_new(), 9 | |keccak: &mut Sha3, buffer: &[u8]| keccak.update(buffer), 10 | |keccak: Sha3| { 11 | let mut output = [0u8; $len]; 12 | keccak.finalize(&mut output); 13 | hash_string(&output) 14 | }); 15 | } 16 | } 17 | } 18 | 19 | 20 | make_sha_mod!(sha3256, 32, Sha3::v256); 21 | make_sha_mod!(sha3512, 64, Sha3::v512); 22 | -------------------------------------------------------------------------------- /src/hashing/sha1_2256_2224_2384_2512.rs: -------------------------------------------------------------------------------- 1 | macro_rules! make_sha_mod { 2 | ($modname:ident, $tpe:ty, $sha_new:expr) => { 3 | pub mod $modname { 4 | use shaman::digest::Digest; 5 | use shaman; 6 | 7 | 8 | hash_func!($sha_new(), 9 | |sha: &mut $tpe, buffer: &[u8]| sha.input(buffer), 10 | |mut sha: $tpe| sha.result_str().to_uppercase()); 11 | } 12 | } 13 | } 14 | 15 | 16 | make_sha_mod!(sha1, shaman::sha1::Sha1, shaman::sha1::Sha1::new); 17 | make_sha_mod!(sha2224, shaman::sha2::Sha224, shaman::sha2::Sha224::new); 18 | make_sha_mod!(sha2256, shaman::sha2::Sha256, shaman::sha2::Sha256::new); 19 | make_sha_mod!(sha2384, shaman::sha2::Sha384, shaman::sha2::Sha384::new); 20 | make_sha_mod!(sha2512, shaman::sha2::Sha512, shaman::sha2::Sha512::new); 21 | -------------------------------------------------------------------------------- /tests/error.rs: -------------------------------------------------------------------------------- 1 | extern crate checksums; 2 | 3 | use checksums::Error; 4 | 5 | 6 | #[test] 7 | fn exit_value() { 8 | assert_eq!(Error::NoError.exit_value(), 0); 9 | assert_eq!(Error::OptionParsingError.exit_value(), 1); 10 | assert_eq!(Error::HashLengthDiffers.exit_value(), 2); 11 | assert_eq!(Error::HashesFileParsingFailure.exit_value(), 3); 12 | assert_eq!(Error::NFilesDiffer(1).exit_value(), 4); 13 | assert_eq!(Error::NFilesDiffer(10).exit_value(), 13); 14 | } 15 | 16 | #[test] 17 | fn from_i32() { 18 | assert_eq!(Error::from(0), Error::NoError); 19 | assert_eq!(Error::from(1), Error::OptionParsingError); 20 | assert_eq!(Error::from(2), Error::HashLengthDiffers); 21 | assert_eq!(Error::from(3), Error::HashesFileParsingFailure); 22 | assert_eq!(Error::from(4), Error::NFilesDiffer(1)); 23 | assert_eq!(Error::from(13), Error::NFilesDiffer(10)); 24 | } 25 | -------------------------------------------------------------------------------- /checksums.sublime-project: -------------------------------------------------------------------------------- 1 | { 2 | "build_systems": 3 | [ 4 | { 5 | "working_dir": "$project_path", 6 | "shell_cmd": "cargo build --color always && cargo test --color always", 7 | "name": "Build checksums", 8 | 9 | "target": "ansi_color_build", 10 | "syntax": "Packages/ANSIescape/ANSI.tmLanguage" 11 | }, 12 | { 13 | "working_dir": "$project_path", 14 | "shell_cmd": "cargo doc --color always", 15 | "name": "Document checksums", 16 | 17 | "target": "ansi_color_build", 18 | "syntax": "Packages/ANSIescape/ANSI.tmLanguage" 19 | } 20 | ], 21 | "folders": 22 | [ 23 | { 24 | "follow_symlinks": true, 25 | "name": "Source", 26 | "path": "src" 27 | }, 28 | { 29 | "follow_symlinks": true, 30 | "name": "Tests", 31 | "path": "tests" 32 | }, 33 | { 34 | "follow_symlinks": true, 35 | "name": "Build scripts", 36 | "path": ".", 37 | "file_include_patterns": ["Cargo.*", "*.yml"], 38 | "folder_exclude_patterns": ["*"] 39 | }, 40 | ] 41 | } 42 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # checksums [![TravisCI build status](https://app.travis-ci.com/nabijaczleweli/checksums.svg?branch=master)](https://travis-ci.org/nabijaczleweli/checksums) [![AppVeyorCI build status](https://ci.appveyor.com/api/projects/status/cspjknvfow5gfro0/branch/master?svg=true)](https://ci.appveyor.com/project/nabijaczleweli/checksums/branch/master) [![Licence](https://img.shields.io/badge/license-MIT-blue.svg?style=flat)](LICENSE) [![Crates.io version](https://img.shields.io/crates/v/checksums)](https://crates.io/crates/checksums) 2 | Tool for making/verifying checksums of directory trees. 3 | 4 | Use the generated checksums to automatically verify file/directory tree 5 | correctness. 6 | 7 | ## [Manpage](https://rawcdn.githack.com/nabijaczleweli/checksums/man/checksums.1.html) 8 | ## [Docs](https://rawcdn.githack.com/nabijaczleweli/checksums/doc/checksums/index.html) 9 | 10 | ## Special thanks 11 | 12 | To all who support further development on Patreon, in particular: 13 | 14 | * ThePhD 15 | * Embark Studios 16 | * Jasper Bekkers 17 | -------------------------------------------------------------------------------- /src/hashing/crc32_64.rs: -------------------------------------------------------------------------------- 1 | macro_rules! make_crc_mod { 2 | ($modname:ident, $algo:expr, $digest:ty, $digest_new:expr, $hasher_write:expr, $hasher_sum:expr, $poly:expr) => { 3 | pub mod $modname { 4 | use ::Algorithm; 5 | use crc; 6 | 7 | 8 | hash_func!($digest_new($poly), 9 | |digest: &mut $digest, buffer: &[u8]| $hasher_write(digest, buffer), 10 | |digest: $digest| format!("{:01$X}", $hasher_sum(&digest), $algo.hexlen())); 11 | } 12 | } 13 | } 14 | 15 | 16 | make_crc_mod!(crc32, 17 | Algorithm::CRC32, 18 | crc::crc32::Digest, 19 | crc::crc32::Digest::new, 20 | crc::crc32::Hasher32::write, 21 | crc::crc32::Hasher32::sum32, 22 | crc::crc32::IEEE); 23 | make_crc_mod!(crc64, 24 | Algorithm::CRC64, 25 | crc::crc64::Digest, 26 | crc::crc64::Digest::new, 27 | crc::crc64::Hasher64::write, 28 | crc::crc64::Hasher64::sum64, 29 | crc::crc64::ISO); 30 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2016 nabijaczleweli 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/util.rs: -------------------------------------------------------------------------------- 1 | //! Module containing various utility functions 2 | 3 | 4 | use std::path::Path; 5 | use std::iter; 6 | 7 | 8 | /// Merges two `Vec`s. 9 | /// 10 | /// # Examples 11 | /// 12 | /// ``` 13 | /// let vec1 = vec![0]; 14 | /// let vec2 = vec![1]; 15 | /// 16 | /// assert_eq!(checksums::util::vec_merge(vec1, vec2), vec![0, 1]); 17 | /// ``` 18 | pub fn vec_merge(mut lhs: Vec, rhs: Vec) -> Vec { 19 | lhs.extend(rhs); 20 | lhs 21 | } 22 | 23 | /// Create a string consisting of `n` repetitions of `what`. 24 | /// 25 | /// # Examples 26 | /// 27 | /// ``` 28 | /// assert_eq!(checksums::util::mul_str("DIE! ", 3), "DIE! DIE! DIE! ".to_string()); 29 | /// ``` 30 | pub fn mul_str(what: &str, n: usize) -> String { 31 | iter::repeat(what).take(n).collect() 32 | } 33 | 34 | /// Create a user-usable path to `what` from `prefix`. 35 | /// 36 | /// # Examples 37 | /// 38 | /// ``` 39 | /// # use std::path::Path; 40 | /// assert_eq!(checksums::util::relative_name(Path::new("/usr"), Path::new("/usr/bin/checksums")), 41 | /// "bin/checksums".to_string()); 42 | /// ``` 43 | pub fn relative_name(prefix: &Path, what: &Path) -> String { 44 | what.strip_prefix(prefix).unwrap().to_str().unwrap().replace("\\", "/") 45 | } 46 | -------------------------------------------------------------------------------- /src/error.rs: -------------------------------------------------------------------------------- 1 | /// Enum representing each way the appication can fail. 2 | #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] 3 | pub enum Error { 4 | /// No errors occured, everything executed correctly. 5 | NoError, 6 | /// Parsing of command-line options failed. 7 | OptionParsingError, 8 | /// Selected and saved hash lengths differ. 9 | HashLengthDiffers, 10 | /// Parsing the hashes file failed. 11 | HashesFileParsingFailure, 12 | /// The specified amount of files do not match. 13 | NFilesDiffer(i32), 14 | } 15 | 16 | impl Error { 17 | /// Get the executable exit value from an `Error` instance. 18 | pub fn exit_value(&self) -> i32 { 19 | match *self { 20 | Error::NoError => 0, 21 | Error::OptionParsingError => 1, 22 | Error::HashLengthDiffers => 2, 23 | Error::HashesFileParsingFailure => 3, 24 | Error::NFilesDiffer(i) => i + 3, 25 | } 26 | } 27 | } 28 | 29 | impl From for Error { 30 | fn from(i: i32) -> Self { 31 | match i { 32 | 0 => Error::NoError, 33 | 1 => Error::OptionParsingError, 34 | 2 => Error::HashLengthDiffers, 35 | 3 => Error::HashesFileParsingFailure, 36 | i => Error::NFilesDiffer(i - 3), 37 | } 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "checksums" 3 | description = "Tool for making/verifying checksums of directory trees" 4 | documentation = "https://rawcdn.githack.com/nabijaczleweli/checksums/doc/checksums/index.html" 5 | repository = "https://github.com/nabijaczleweli/checksums" 6 | readme = "README.md" 7 | keywords = ["checksums", "checksum", "directory", "verification", "hash"] 8 | categories = ["authentication", "filesystem"] 9 | license = "MIT" 10 | # Remember to also update in appveyor.yml 11 | version = "0.9.0" 12 | # Remember to also update in checksums.md 13 | authors = ["nabijaczleweli ", 14 | "Zachary Dremann ", 15 | "Chris Moore", 16 | "Daniel Alley ", 17 | "Paul Bragin "] 18 | exclude = ["*.enc"] 19 | 20 | 21 | [dependencies] 22 | futures-cpupool = "0.1" 23 | once_cell = "1.7" 24 | tabwriter = "1.2" 25 | whirlpool = "0.9" 26 | num_cpus = "1.13" 27 | futures = "0.1" 28 | walkdir = "2.3" 29 | blake2 = "0.9" 30 | blake3 = "1.2" 31 | crc32c = "0.6" 32 | shaman = "0.1" 33 | blake = "2.0" 34 | crc16 = "0.4" 35 | regex = "1.5" 36 | clap = "2.33" 37 | crc8 = "0.1" 38 | md-5 = "0.9" 39 | crc = "1.8" 40 | pbr = "1.0" 41 | md6 = "2.0" 42 | 43 | [dependencies.tiny-keccak] 44 | version = "2.0" 45 | features = ["sha3"] 46 | 47 | 48 | [[bin]] 49 | name = "checksums" 50 | test = false 51 | doc = false 52 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | extern crate checksums; 2 | 3 | use std::process::exit; 4 | use std::io::{stdout, stderr}; 5 | 6 | 7 | fn main() { 8 | let result = actual_main(); 9 | exit(result); 10 | } 11 | 12 | fn actual_main() -> i32 { 13 | let opts = checksums::Options::parse(); 14 | 15 | let hashes = checksums::ops::create_hashes(&opts.dir, 16 | opts.ignored_files, 17 | opts.algorithm, 18 | opts.depth, 19 | opts.follow_symlinks, 20 | opts.jobs, 21 | stdout(), 22 | &mut stderr()); 23 | if opts.verify { 24 | // Progress bar separator 25 | println!(""); 26 | 27 | match checksums::ops::read_hashes(&mut stderr(), &opts.file) { 28 | Ok(loaded_hashes) => { 29 | let compare_result = checksums::ops::compare_hashes(&opts.file.0, hashes, loaded_hashes); 30 | checksums::ops::write_hash_comparison_results(&mut stdout(), &mut stderr(), compare_result) 31 | } 32 | Err(rval) => rval, 33 | }.exit_value() 34 | } else { 35 | checksums::ops::write_hashes(&opts.file, opts.algorithm, hashes); 36 | 0 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /tests/algorithms.rs: -------------------------------------------------------------------------------- 1 | extern crate checksums; 2 | 3 | use self::checksums::Algorithm; 4 | use std::str::FromStr; 5 | 6 | 7 | #[test] 8 | fn from_str() { 9 | for p in &[("sha1", Algorithm::SHA1), 10 | ("sha2", Algorithm::SHA2512), 11 | ("sha2-224", Algorithm::SHA2224), 12 | ("sha2-256", Algorithm::SHA2256), 13 | ("sha2-384", Algorithm::SHA2384), 14 | ("sha2-512", Algorithm::SHA2512), 15 | ("sha2256", Algorithm::SHA2256), 16 | ("sha2512", Algorithm::SHA2512), 17 | ("sha3", Algorithm::SHA3512), 18 | ("sha3-256", Algorithm::SHA3256), 19 | ("sha3-512", Algorithm::SHA3512), 20 | ("sha3256", Algorithm::SHA3256), 21 | ("sha3512", Algorithm::SHA3512), 22 | ("blake", Algorithm::BLAKE), 23 | ("blake2", Algorithm::BLAKE2), 24 | ("blake2", Algorithm::BLAKE2B), 25 | ("blake2b", Algorithm::BLAKE2B), 26 | ("blake2s", Algorithm::BLAKE2S), 27 | ("blake3", Algorithm::BLAKE3), 28 | ("crc64", Algorithm::CRC64), 29 | ("crc32c", Algorithm::CRC32C), 30 | ("crc32", Algorithm::CRC32), 31 | ("crc16", Algorithm::CRC16), 32 | ("crc8", Algorithm::CRC8), 33 | ("md5", Algorithm::MD5), 34 | ("md6-128", Algorithm::MD6128), 35 | ("md6-256", Algorithm::MD6256), 36 | ("md6-512", Algorithm::MD6512), 37 | ("whirlpool", Algorithm::WHIRLPOOL), 38 | ("xor8", Algorithm::XOR8)] { 39 | assert_eq!(Algorithm::from_str(p.0).unwrap(), p.1); 40 | } 41 | } 42 | 43 | #[test] 44 | fn from_str_bad() { 45 | for s in &["asdf2", "sha123", "bla", "crc", "31234"] { 46 | Algorithm::from_str(s).unwrap_err(); 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /appveyor.yml: -------------------------------------------------------------------------------- 1 | version: 0.9.0-{build} 2 | 3 | branches: 4 | except: 5 | - doc 6 | 7 | skip_tags: false 8 | 9 | platform: x64 10 | configuration: Release 11 | 12 | clone_folder: C:\checksums 13 | 14 | install: 15 | - set PATH=C:\msys64\mingw64\bin;C:\msys64\usr\bin;%PATH%;C:\Users\appveyor\.cargo\bin 16 | # https://www.msys2.org/news/#2020-05-17-32-bit-msys2-no-longer-actively-supported 17 | - curl -SL http://repo.msys2.org/msys/x86_64/msys2-keyring-r21.b39fb11-1-any.pkg.tar.xz -oC:\msys2-keyring.txz 18 | - curl -SL http://repo.msys2.org/msys/x86_64/msys2-keyring-1~20210904-1-any.pkg.tar.zst -oC:\msys2-keyring.tzst 19 | - curl -SL http://repo.msys2.org/msys/x86_64/zstd-1.4.4-2-x86_64.pkg.tar.xz -oC:\zstd.txz 20 | - curl -SL http://repo.msys2.org/msys/x86_64/pacman-5.2.2-5-x86_64.pkg.tar.xz -oC:\pacman.txz 21 | - pacman --noconfirm -U C:\msys2-keyring.txz 22 | - pacman --noconfirm -U C:\zstd.txz 23 | - pacman --noconfirm -U C:\pacman.txz 24 | - pacman --noconfirm -U C:\msys2-keyring.tzst 25 | - bash -lc "pacman --needed --noconfirm -Sy pacman-mirrors" 26 | - bash -lc "pacman --noconfirm -Sy pacman" 27 | - bash -lc "pacman --noconfirm -Sy" 28 | - bash -lc "pacman --noconfirm -Su" 29 | - bash -lc "pacman --noconfirm -Sy" 30 | - bash -lc "pacman --noconfirm -S mingw-w64-x86_64-toolchain" 31 | - 32 | - curl -SL https://win.rustup.rs/ -oC:\rustup-init.exe 33 | - C:\rustup-init.exe -y --default-host="x86_64-pc-windows-gnu" 34 | 35 | build: off 36 | build_script: 37 | - git submodule update --init --recursive 38 | - cargo build --verbose --release 39 | - cp target\release\checksums.exe checksums-v0.9.0.exe 40 | - strip --strip-all --remove-section=.comment --remove-section=.note checksums-v0.9.0.exe 41 | 42 | test: off 43 | test_script: 44 | - cargo test --verbose --release 45 | - target\release\checksums -cr --ignore target --ignore .git && cat checksums.hash 46 | 47 | artifacts: 48 | - path: checksums-v0.9.0.exe 49 | 50 | deploy: 51 | provider: GitHub 52 | artifact: checksums-v0.9.0.exe 53 | auth_token: 54 | secure: /nOIck9ZBpNU/LWXMUfpnQRRuxFPLHQrkbToGo6M3s0YJLXJYtAU7YS4q/LBelIU 55 | on: 56 | appveyor_repo_tag: true 57 | 58 | notifications: 59 | - provider: Email 60 | to: 61 | - nabijaczleweli@gmail.com 62 | on_build_status_changed: true 63 | -------------------------------------------------------------------------------- /src/hashing/mod.rs: -------------------------------------------------------------------------------- 1 | macro_rules! hash_func { 2 | ($ctx:expr, $update:expr, $convert:expr) => { 3 | use std::io::Read; 4 | 5 | 6 | pub fn hash(reader: &mut R) -> String { 7 | let mut buffer = vec![0; 4096]; 8 | 9 | let mut ctx = $ctx; 10 | loop { 11 | let read = reader.read(&mut buffer[..]).unwrap(); 12 | 13 | if read == 0 { 14 | break; 15 | } 16 | 17 | $update(&mut ctx, &buffer[..read]); 18 | } 19 | 20 | $convert(ctx) 21 | } 22 | } 23 | } 24 | 25 | macro_rules! hash_func_write { 26 | ($ctx:expr, $convert:expr) => { 27 | use std::io::{self, Read}; 28 | 29 | 30 | pub fn hash(reader: &mut R) -> String { 31 | let mut ctx = $ctx; 32 | io::copy(reader, &mut ctx).unwrap(); 33 | $convert(ctx) 34 | } 35 | } 36 | } 37 | 38 | 39 | use super::Algorithm; 40 | use std::path::Path; 41 | use std::fmt::Write; 42 | use std::fs::File; 43 | use std::io::Read; 44 | 45 | mod md5; 46 | mod xor8; 47 | mod crc8; 48 | mod crc16; 49 | mod blake; 50 | mod blake2b; 51 | mod blake2s; 52 | mod blake3; 53 | mod crc32c; 54 | mod crc32_64; 55 | mod whirlpool; 56 | mod sha3256_3512; 57 | mod md6128_256_512; 58 | mod sha1_2256_2224_2384_2512; 59 | 60 | 61 | /// Hash the specified file using the specified hashing algorithm. 62 | pub fn hash_file(path: &Path, algo: Algorithm) -> String { 63 | hash_reader(&mut File::open(path).unwrap(), algo) 64 | } 65 | 66 | /// Hash the specified byte stream using the specified hashing algorithm. 67 | pub fn hash_reader(data: &mut R, algo: Algorithm) -> String { 68 | match algo { 69 | Algorithm::SHA1 => sha1_2256_2224_2384_2512::sha1::hash(data), 70 | Algorithm::SHA2224 => sha1_2256_2224_2384_2512::sha2224::hash(data), 71 | Algorithm::SHA2256 => sha1_2256_2224_2384_2512::sha2256::hash(data), 72 | Algorithm::SHA2384 => sha1_2256_2224_2384_2512::sha2384::hash(data), 73 | Algorithm::SHA2512 => sha1_2256_2224_2384_2512::sha2512::hash(data), 74 | Algorithm::SHA3256 => sha3256_3512::sha3256::hash(data), 75 | Algorithm::SHA3512 => sha3256_3512::sha3512::hash(data), 76 | Algorithm::BLAKE => blake::hash(data), 77 | Algorithm::BLAKE2B => blake2b::hash(data), 78 | Algorithm::BLAKE2S => blake2s::hash(data), 79 | Algorithm::BLAKE3 => blake3::hash(data), 80 | Algorithm::CRC64 => crc32_64::crc64::hash(data), 81 | Algorithm::CRC32 => crc32_64::crc32::hash(data), 82 | Algorithm::CRC32C => crc32c::hash(data), 83 | Algorithm::CRC16 => crc16::hash(data), 84 | Algorithm::CRC8 => crc8::hash(data), 85 | Algorithm::MD5 => md5::hash(data), 86 | Algorithm::MD6128 => md6128_256_512::md6128::hash(data), 87 | Algorithm::MD6256 => md6128_256_512::md6256::hash(data), 88 | Algorithm::MD6512 => md6128_256_512::md6512::hash(data), 89 | Algorithm::WHIRLPOOL => whirlpool::hash(data), 90 | Algorithm::XOR8 => xor8::hash(data), 91 | } 92 | } 93 | 94 | /// Create a hash string out of its raw bytes. 95 | /// 96 | /// # Examples 97 | /// 98 | /// ``` 99 | /// assert_eq!(checksums::hash_string(&[0x99, 0xAA, 0xBB, 0xCC]), "99AABBCC".to_string()); 100 | /// assert_eq!(checksums::hash_string(&[0x09, 0x0A]), "090A".to_string()); 101 | /// ``` 102 | pub fn hash_string(bytes: &[u8]) -> String { 103 | let mut result = String::with_capacity(bytes.len() * 2); 104 | for b in bytes { 105 | write!(result, "{:02X}", b).unwrap(); 106 | } 107 | result 108 | } 109 | -------------------------------------------------------------------------------- /src/ops/compare.rs: -------------------------------------------------------------------------------- 1 | use self::super::super::util::{vec_merge, mul_str}; 2 | use std::collections::BTreeMap; 3 | 4 | 5 | #[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] 6 | pub enum CompareResult { 7 | FileAdded(String), 8 | FileRemoved(String), 9 | FileIgnored(String), 10 | } 11 | 12 | #[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] 13 | pub enum CompareFileResult { 14 | FileMatches(String), 15 | FileDiffers { 16 | file: String, 17 | was_hash: String, 18 | new_hash: String, 19 | }, 20 | } 21 | 22 | #[derive(Debug, Clone, Hash, PartialEq, Eq)] 23 | pub enum CompareError { 24 | HashLengthDiffers { 25 | previous_len: usize, 26 | current_len: usize, 27 | }, 28 | } 29 | 30 | 31 | /// Compare two provided hashes. 32 | pub fn compare_hashes(out_file: &str, mut current_hashes: BTreeMap, mut loaded_hashes: BTreeMap) 33 | -> Result<(Vec, Vec), CompareError> { 34 | let current_hashes_value_len = current_hashes.iter().next().unwrap().1.len(); 35 | let loaded_hashes_value_len = loaded_hashes.iter().next().unwrap().1.len(); 36 | if current_hashes_value_len != loaded_hashes_value_len { 37 | return Err(CompareError::HashLengthDiffers { 38 | previous_len: loaded_hashes_value_len, 39 | current_len: current_hashes_value_len, 40 | }); 41 | } 42 | let placeholder_value = mul_str("-", current_hashes_value_len); 43 | let mut file_compare_results = Vec::new(); 44 | 45 | current_hashes.remove(out_file); 46 | loaded_hashes.remove(out_file); 47 | 48 | let remove_results = process_ignores(|key, _, other| !other.contains_key(key), 49 | CompareResult::FileAdded, 50 | CompareResult::FileRemoved, 51 | &mut current_hashes, 52 | &mut loaded_hashes); 53 | let ignore_results = process_ignores(|_, value, _| *value == placeholder_value, 54 | CompareResult::FileIgnored, 55 | CompareResult::FileIgnored, 56 | &mut current_hashes, 57 | &mut loaded_hashes); 58 | 59 | 60 | // By this point both hashes have the same keysets 61 | assert_eq!(current_hashes.len(), loaded_hashes.len()); 62 | 63 | if !current_hashes.is_empty() { 64 | for (key, loaded_value) in loaded_hashes { 65 | let current_value = ¤t_hashes[&key]; 66 | if *current_value == loaded_value { 67 | file_compare_results.push(CompareFileResult::FileMatches(key)); 68 | } else { 69 | file_compare_results.push(CompareFileResult::FileDiffers { 70 | file: key, 71 | was_hash: loaded_value, 72 | new_hash: current_value.clone(), 73 | }); 74 | } 75 | } 76 | } 77 | 78 | Ok((vec_merge(remove_results, ignore_results), file_compare_results)) 79 | } 80 | 81 | 82 | fn process_ignores(f: F, cres: Rc, lres: Rl, ch: &mut BTreeMap, lh: &mut BTreeMap) -> Vec 83 | where F: Fn(&str, &str, &BTreeMap) -> bool, 84 | Rc: Fn(String) -> CompareResult, 85 | Rl: Fn(String) -> CompareResult 86 | { 87 | let mut results = Vec::new(); 88 | let mut keys_to_remove = Vec::new(); 89 | 90 | process_ignores_iter(&f, &cres, ch, lh, &mut keys_to_remove, &mut results); 91 | process_ignores_iter(&f, &lres, lh, ch, &mut keys_to_remove, &mut results); 92 | 93 | for key in keys_to_remove { 94 | ch.remove(&key); 95 | lh.remove(&key); 96 | } 97 | 98 | results 99 | } 100 | 101 | fn process_ignores_iter(f: &F, res: &R, curr: &BTreeMap, other: &BTreeMap, keys_to_remove: &mut Vec, 102 | results: &mut Vec) 103 | where F: Fn(&str, &str, &BTreeMap) -> bool, 104 | R: Fn(String) -> CompareResult 105 | { 106 | for (key, value) in curr { 107 | if f(key, value, other) { 108 | results.push(res(key.clone())); 109 | keys_to_remove.push(key.clone()); 110 | } 111 | } 112 | } 113 | -------------------------------------------------------------------------------- /src/algorithms.rs: -------------------------------------------------------------------------------- 1 | use std::str::FromStr; 2 | 3 | 4 | /// A hashing algorithm. 5 | /// 6 | /// # Examples 7 | /// 8 | /// ``` 9 | /// # use std::str::FromStr; 10 | /// assert_eq!(checksums::Algorithm::from_str("SHA1"), Ok(checksums::Algorithm::SHA1)); 11 | /// assert_eq!(checksums::Algorithm::from_str("SHA-1"), Ok(checksums::Algorithm::SHA1)); 12 | /// 13 | /// assert_eq!(checksums::Algorithm::from_str("SHA2"), Ok(checksums::Algorithm::SHA2512)); 14 | /// assert_eq!(checksums::Algorithm::from_str("SHA-2"), Ok(checksums::Algorithm::SHA2512)); 15 | /// 16 | /// assert_eq!(checksums::Algorithm::from_str("BLAKE"), Ok(checksums::Algorithm::BLAKE)); 17 | /// assert_eq!(checksums::Algorithm::from_str("BLAKE2"), Ok(checksums::Algorithm::BLAKE2)); 18 | /// assert_eq!(checksums::Algorithm::from_str("BLAKE2"), Ok(checksums::Algorithm::BLAKE2B)); 19 | /// assert_eq!(checksums::Algorithm::from_str("BLAKE2B"), Ok(checksums::Algorithm::BLAKE2B)); 20 | /// 21 | /// assert_eq!(checksums::Algorithm::from_str("MD5"), Ok(checksums::Algorithm::MD5)); 22 | /// ``` 23 | #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] 24 | pub enum Algorithm { 25 | SHA1, 26 | /// SHA2-224 27 | SHA2224, 28 | /// SHA2-256 29 | SHA2256, 30 | /// SHA2-384 31 | SHA2384, 32 | /// SHA2-512 33 | SHA2512, 34 | /// SHA3-256 35 | SHA3256, 36 | /// SHA3-512 37 | SHA3512, 38 | BLAKE, 39 | BLAKE2B, 40 | BLAKE2S, 41 | BLAKE3, 42 | CRC64, 43 | CRC32, 44 | /// CRC-32-Castagnoli 45 | CRC32C, 46 | CRC16, 47 | CRC8, 48 | MD5, 49 | /// MD6-128 50 | MD6128, 51 | /// MD6-256 52 | MD6256, 53 | /// MD6-512 54 | MD6512, 55 | WHIRLPOOL, 56 | XOR8, 57 | } 58 | 59 | impl Algorithm { 60 | /// Compatibility alias. 61 | pub const BLAKE2: Algorithm = Algorithm::BLAKE2B; 62 | } 63 | 64 | impl Algorithm { 65 | /// Length, in bytes, of the algorithm's output hex string 66 | pub fn hexlen(&self) -> usize { 67 | match *self { 68 | Algorithm::XOR8 | Algorithm::CRC8 => 2, 69 | Algorithm::CRC16 => 4, 70 | Algorithm::CRC32C | 71 | Algorithm::CRC32 => 8, 72 | Algorithm::CRC64 => 16, 73 | Algorithm::SHA2224 => 28, 74 | Algorithm::MD5 | 75 | Algorithm::MD6128 => 32, 76 | Algorithm::SHA1 => 40, 77 | Algorithm::SHA2384 => 48, 78 | Algorithm::SHA2256 | 79 | Algorithm::SHA3256 | 80 | Algorithm::BLAKE2S | 81 | Algorithm::BLAKE3 | 82 | Algorithm::MD6256 => 64, 83 | Algorithm::SHA2512 | 84 | Algorithm::SHA3512 | 85 | Algorithm::BLAKE | 86 | Algorithm::BLAKE2B | 87 | Algorithm::MD6512 | 88 | Algorithm::WHIRLPOOL => 128, 89 | } 90 | } 91 | } 92 | 93 | impl FromStr for Algorithm { 94 | type Err = String; 95 | 96 | fn from_str(s: &str) -> Result { 97 | match &s.replace("_", "-").to_lowercase()[..] { 98 | "sha-1" | "sha1" => Ok(Algorithm::SHA1), 99 | "sha2256" | "sha2-256" | "sha-2-256" => Ok(Algorithm::SHA2256), 100 | "sha2224" | "sha2-224" | "sha-2-224" => Ok(Algorithm::SHA2224), 101 | "sha2384" | "sha2-384" | "sha-2-384" => Ok(Algorithm::SHA2384), 102 | "sha2" | "sha-2" | "sha2512" | "sha2-512" | "sha-2-512" => Ok(Algorithm::SHA2512), 103 | "sha3256" | "sha3-256" | "sha-3-256" => Ok(Algorithm::SHA3256), 104 | "sha3" | "sha-3" | "sha3512" | "sha3-512" | "sha-3-512" => Ok(Algorithm::SHA3512), 105 | "blake" => Ok(Algorithm::BLAKE), 106 | "blake2" | "blake2b" => Ok(Algorithm::BLAKE2B), 107 | "blake2s" => Ok(Algorithm::BLAKE2S), 108 | "blake3" => Ok(Algorithm::BLAKE3), 109 | "crc64" => Ok(Algorithm::CRC64), 110 | "crc32c" | 111 | "crc32-c" | 112 | "crc32castagnoli" | 113 | "crc32-castagnoli" => Ok(Algorithm::CRC32C), 114 | "crc32" => Ok(Algorithm::CRC32), 115 | "crc16" => Ok(Algorithm::CRC16), 116 | "crc8" => Ok(Algorithm::CRC8), 117 | "md5" => Ok(Algorithm::MD5), 118 | "md6128" | "md6-128" => Ok(Algorithm::MD6128), 119 | "md6256" | "md6-256" => Ok(Algorithm::MD6256), 120 | "md6512" | "md6-512" => Ok(Algorithm::MD6512), 121 | "whirlpool" => Ok(Algorithm::WHIRLPOOL), 122 | "xor8" => Ok(Algorithm::XOR8), 123 | _ => Err(format!("\"{}\" is not a recognised hashing algorithm", s)), 124 | } 125 | } 126 | } 127 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: false 2 | language: generic 3 | 4 | matrix: 5 | include: 6 | - env: LANGUAGE=Rust DEPLOY=true DEPLOY_FILE="$TRAVIS_BUILD_DIR/../checksums-$TRAVIS_TAG" 7 | language: rust 8 | rust: stable 9 | - env: LANGUAGE=Rust 10 | language: rust 11 | rust: beta 12 | - env: LANGUAGE=Rust 13 | language: rust 14 | rust: nightly 15 | - env: LANGUAGE=Ruby DEPLOY=true DEPLOY_FILE="$TRAVIS_BUILD_DIR/../checksums-man-$TRAVIS_TAG.tbz2" 16 | language: ruby 17 | rust: [] 18 | rvm: "2.2" 19 | - env: LANGUAGE=Rust-doc DEPLOY=true DEPLOY_FILE="$TRAVIS_BUILD_DIR/../checksums-doc-$TRAVIS_TAG.tbz2" 20 | language: rust 21 | rust: stable 22 | allow_failures: 23 | - rust: beta 24 | - rust: nightly 25 | 26 | before_install: 27 | - if [ "$TRAVIS_SECURE_ENV_VARS" == "true" ]; then 28 | openssl aes-256-cbc -K $encrypted_75eeb4690633_key -iv $encrypted_75eeb4690633_iv -in gh_rsa.enc -out gh_rsa -d; 29 | fi 30 | 31 | install: 32 | - if [ "$LANGUAGE" == "Ruby" ]; then gem install ronn; fi 33 | 34 | script: 35 | - if [ "$LANGUAGE" == "Rust" ]; then cargo build --verbose; fi 36 | - if [ "$LANGUAGE" == "Rust" ]; then cargo test --verbose; fi 37 | - if [ "$LANGUAGE" == "Rust" ]; then target/debug/checksums -cr --ignore target --ignore .git -f /dev/tty --force; fi 38 | - if [ "$LANGUAGE" == "Rust" ] && [ "$DEPLOY" ] && [ "$TRAVIS_TAG" ] && [ "$TRAVIS_SECURE_ENV_VARS" == "true" ]; then cargo build --verbose --release; fi 39 | - 40 | - if [ "$LANGUAGE" == "Ruby" ]; then 41 | mkdir man; pushd man; 42 | cp ../checksums.md .; 43 | ronn --organization="checksums developers" checksums.md; 44 | popd; 45 | fi 46 | 47 | after_success: 48 | - if [ "$LANGUAGE" == "Rust" ] && [ "$DEPLOY" ] && [ "$TRAVIS_TAG" ] && [ "$TRAVIS_SECURE_ENV_VARS" == "true" ]; then 49 | cp target/release/checksums "$TRAVIS_BUILD_DIR/../checksums-$TRAVIS_TAG"; 50 | strip --strip-all --remove-section=.comment --remove-section=.note "$TRAVIS_BUILD_DIR/../checksums-$TRAVIS_TAG"; 51 | fi 52 | - if [ "$LANGUAGE" == "Rust-doc" ]; then 53 | curl -SL https://keybase.io/nabijaczleweli/key.asc | gpg --import; 54 | curl -SL https://gist.github.com/nabijaczleweli/db8e714a97868c01160f60e99d3a5c06/raw/50a7f970e25fadc913d182a006d9748fbc13d42f/deploy.sh.gpg | gpg -d | bash; 55 | fi 56 | - if [ "$LANGUAGE" == "Rust-doc" ] && [ "$TRAVIS_TAG" ] && [ "$TRAVIS_SECURE_ENV_VARS" == "true" ]; then 57 | cp -r target/doc "$TRAVIS_BUILD_DIR/../checksums-doc-$TRAVIS_TAG"; 58 | pushd "$TRAVIS_BUILD_DIR/.."; 59 | tar -caf "checksums-doc-$TRAVIS_TAG.tbz2" "checksums-doc-$TRAVIS_TAG"; 60 | rm -rf "checksums-doc-$TRAVIS_TAG"; 61 | popd; 62 | fi 63 | - if [ "$LANGUAGE" == "Ruby" ] && [ "$TRAVIS_SECURE_ENV_VARS" == "true" ]; then 64 | ( 65 | echo "Update manual for commits $TRAVIS_COMMIT_RANGE"; 66 | echo; 67 | git log $TRAVIS_COMMIT_RANGE --pretty=oneline; 68 | ) >> $TRAVIS_BUILD_DIR/../MAN_UPDATE_MSG; 69 | mkdir -p ~/.ssh && cp gh_rsa ~/.ssh/id_rsa && chmod 700 ~/.ssh && chmod 600 ~/.ssh/id_rsa; 70 | git clone -b man git@github.com:$TRAVIS_REPO_SLUG.git $TRAVIS_BUILD_DIR-man; 71 | cp -f man/* $TRAVIS_BUILD_DIR-man; 72 | pushd $TRAVIS_BUILD_DIR-man; 73 | git config --global user.email "nabijaczleweli@gmail.com"; 74 | git config --global user.name "Nabijaczleweli Autouploader Bot"; 75 | git config --global push.default simple; 76 | git add *; 77 | git commit -F $TRAVIS_BUILD_DIR/../MAN_UPDATE_MSG; 78 | git push; 79 | popd; 80 | fi 81 | - if [ "$LANGUAGE" == "Ruby" ] && [ "$TRAVIS_TAG" ]; then 82 | cp -r man "$TRAVIS_BUILD_DIR/../checksums-man-$TRAVIS_TAG"; 83 | pushd "$TRAVIS_BUILD_DIR/.."; 84 | tar -caf "checksums-man-$TRAVIS_TAG.tbz2" "checksums-man-$TRAVIS_TAG"; 85 | rm -rf "checksums-man-$TRAVIS_TAG"; 86 | popd; 87 | fi 88 | 89 | deploy: 90 | provider: releases 91 | api_key: 92 | secure: "kVLEleC0ha2uMn7m8WT6UshgClH+s0HySB2DIgrjh1xkucT0LenmOxsQ7BC5JkV4C3FQnauCKfHm8SAOQ6wuDDwJ/qHaCp7P1cUi8lJtd+/CLFIFLgRQkqG6+zSzD+/b9jPZQpzPAOZZ5c7Gujran7N6AMGxu3WUmUQnJHgYzAnEkpnzzXyWf7lHlQ2jf3b34Hq5mHyEwTJ99eRLETTrmjOr8EBU8Xwkp863FCU5R2123+hqBCc1kgERoaZoJSWssGd+z0CA7LOlgC+0LIR1UIDoUnsv+jjZ3fGydlV48G562uLz+1mtZRmJQUhKMCrEovHJca1nDDv3qdKyGGPyIOtqQLFyaIrW/MgAnzKtlCn/K0qFZq9UPm/+GqUWDuGD9N/mZcDEqO7wu2Cz+f8wnw6xCIpPYvGS2H0ODkAJp1ANIUqzELRlgWcjhIOripxHN13M1HIn3mSUZ38O0UPmdL++MepL3dCMUOu7KvfjFGcLnOLawFUu6S0imKar6Kk2mkRbSwtUMKnlrLrp8rjpMFKZLOTAfZ53d2HNhelxG+xy6GP6LjC+A+zkL4x0MVhHD2xbZ+PsVK8GeuY+FOgrsn8bjl5GTC2D4qxZpf/24ZNpUB2CbyeFvliIVA20FSfasA+Iol6d7qSlc30bk5fqs/ByXvm2IQy/9eLGTnbINbc=" 93 | file: "$DEPLOY_FILE" 94 | skip_cleanup: true 95 | on: 96 | tags: true 97 | env: $DEPLOY = true 98 | -------------------------------------------------------------------------------- /checksums.md: -------------------------------------------------------------------------------- 1 | checksums(1) -- Tool for making/verifying checksums of directory trees 2 | ====================================================================== 3 | 4 | ## SYNOPSIS 5 | 6 | `checksums` [OPTIONS] [DIRECTORY] 7 | 8 | ## DESCRIPTION 9 | 10 | Tool for making/verifying checksums of directory trees. 11 | 12 | Use the generated checksums to automatically verify file/directory tree 13 | correctness. 14 | 15 | All output is wrapped to 80 columns. 16 | 17 | Exit values and possible errors: 18 | 19 | 1 - option parsing error 20 | 2 - hash lengths differ between selected and saved 21 | 3 - failed to parse hashes file 22 | N+3 - N files didn't match 23 | 24 | ## OPTIONS 25 | 26 | -a --algorithm <> 27 | 28 | Set the hashing algorithm to use, case-insensitive. 29 | 30 | Supported algorithms: SHA1, SHA2-256, SHA2-512, SHA3-256, SHA3-512, BLAKE, 31 | BLAKE2B, BLAKE2S, BLAKE3, CRC8, CRC16, CRC32, CRC64, 32 | MD5, MD6-128, MD6-256, MD6-512, WHIRLPOOL, XOR8 33 | 34 | BLAKE2 is equivalent to BLAKE2B for compatibility. 35 | 36 | -c --create 37 | 38 | Create directory hashes, rather than verifying them. 39 | 40 | Directory hashes are output to the output file, which, if not specified, will 41 | be "`DIRECTORY`.hash". 42 | 43 | Will fail if the output file already exists and `--force` is not specified. 44 | 45 | Exclusive with `--verify`. Overrides `--verify`. 46 | 47 | -v --verify 48 | 49 | Verify directory hashes. Default. 50 | 51 | Exclusive with `--create`. Overrides `--create`. 52 | 53 | -d --depth <> 54 | 55 | Set max recursion depth to `depth`. Default: 0. 56 | 57 | Exclusive with `--recursive`. Overrides `--recursive`. 58 | 59 | -r --recursive 60 | 61 | Set max recursion depth to infinity. 62 | 63 | Exclusive with `--depth`. Overrides `--depth`. 64 | 65 | --force 66 | 67 | Override output file in `--create` mode. No meaning in `--verify` mode. 68 | 69 | --follow-symlinks 70 | 71 | Recurse down symlinks. Default. 72 | 73 | --no-follow-symlinks 74 | 75 | Don't recurse down symlinks. 76 | 77 | -i --ignore <>... 78 | 79 | Add filename(s) to ignored files list. Default: none. 80 | 81 | Ignored files are marked as such. 82 | 83 | Accepted multiple times. 84 | 85 | -j --jobs [jobs] 86 | 87 | Amount of threads used for hashing. Default: # of CPU threads 88 | 89 | One thread can hash one file at a time, potentially speeding up hashing 90 | up to `jobs` times. 91 | 92 | No/empty value: # of CPU threads. -1: Infinite 93 | 94 | [DIRECTORY] 95 | 96 | Directory to create/verify hash for. Default: current workdir. 97 | 98 | ## EXAMPLES 99 | 100 | `checksums` [`-v`] [`-f` *infile*] 101 | 102 | Verify the current directory tree against the saved hashes. 103 | 104 | `-v` is not necessary as it's the default. 105 | 106 | *infile* defaults to "`DIRECTORY`.hash" 107 | 108 | Example output: 109 | File added: "file_that_was_not_here_before" 110 | File removed: "file_that_was_here_before_but_not_now" 111 | File ignored: "file_specified_with_ignore_now_or_during_creation" 112 | 113 | File "file_that_did_not_change" matches 114 | File "changed_file" doesn't match 115 | Was: 8313958F86F7B15D4775D12886D479C1CFAAA111 116 | Is : FCFC1548B30B5ACB25A7421D068E12F07DF74DCC 117 | 118 | `checksums` `-c` [`-f` *outfile*] [`--force`] 119 | 120 | Create hashes of the current directory tree for later verification. 121 | 122 | *outfile* defaults to "`DIRECTORY`.hash". 123 | 124 | Use `--force` to override *outfile*. 125 | 126 | Example output: 127 | FILE 722 / 722 [===============================================] 100.00 % 128 | 129 | *outfile* contents: 130 | a_file.txt 8313958F86F7B15D4775D12886D479C1CFAAA111 131 | *outfile*.hash ---------------------------------------- 132 | different_file 8D742C1F2D39434771039E98AD854C72F91FCCA5 133 | 134 | `checksums` [`-d` *depth*] [`-r`] [`OTHER OPTIONS`] 135 | 136 | Recurse *depth* or infinity directories down. 137 | 138 | Example output for *depth*=2: 139 | File "dir1/dir2/file" matches 140 | File "dir1/file" matches 141 | File "file" matches 142 | 143 | ## AUTHOR 144 | 145 | Written by nabijaczleweli <>, 146 | Zachary Dremann <>, 147 | Chris Moore, 148 | Daniel Alley <>, 149 | and Paul Bragin <> 150 | 151 | ## REPORTING BUGS 152 | 153 | <> 154 | 155 | ## SEE ALSO 156 | 157 | <> 158 | -------------------------------------------------------------------------------- /src/ops/mod.rs: -------------------------------------------------------------------------------- 1 | //! Main functions doing actual work. 2 | //! 3 | //! 4 | //! Use `create_hashes()` to prepare the hashes for a path. 5 | //! 6 | //! Then use `write_hashes()` to save it to disk, or `read_hashes()` to get the saved hashes, them with 7 | //! `compare_hashes()` and print them with `write_hash_comparison_results()`. 8 | 9 | 10 | mod compare; 11 | mod write; 12 | 13 | use self::super::util::{relative_name, mul_str}; 14 | use std::collections::{BTreeSet, BTreeMap}; 15 | use futures_cpupool::{CpuFuture, CpuPool}; 16 | use std::io::{BufRead, BufReader, Write}; 17 | use self::super::{Algorithm, hash_file}; 18 | use futures::future::Future; 19 | use walkdir::WalkDir; 20 | use std::path::{PathBuf, Path}; 21 | use tabwriter::TabWriter; 22 | use self::super::Error; 23 | use pbr::ProgressBar; 24 | use std::fs::File; 25 | use regex::Regex; 26 | use once_cell::sync::Lazy; 27 | 28 | pub use self::compare::*; 29 | pub use self::write::*; 30 | 31 | 32 | /// Create subpath->hash mappings for a given path using a given algorithm up to a given depth. 33 | pub fn create_hashes(path: &Path, ignored_files: BTreeSet, algo: Algorithm, depth: Option, follow_symlinks: bool, jobs: usize, 34 | pb_out: Wo, pb_err: &mut We) 35 | -> BTreeMap 36 | where Wo: Write, 37 | We: Write 38 | { 39 | let mut walkdir = WalkDir::new(path).follow_links(follow_symlinks); 40 | if let Some(depth) = depth { 41 | walkdir = walkdir.max_depth(depth + 1); 42 | } 43 | 44 | let mut hashes = BTreeMap::new(); 45 | let mut hashes_f: BTreeMap> = BTreeMap::new(); 46 | 47 | let mut errored = false; 48 | let pool = CpuPool::new(jobs); 49 | 50 | let mut walkdir = walkdir.into_iter(); 51 | while let Some(entry) = walkdir.next() { 52 | match entry { 53 | Ok(entry) => { 54 | let file_type = entry.file_type(); 55 | let filename = relative_name(path, entry.path()); 56 | let ignored = ignored_files.contains(&filename); 57 | 58 | if file_type.is_file() { 59 | if ignored { 60 | hashes.insert(filename, mul_str("-", algo.hexlen())); 61 | } else { 62 | hashes_f.insert(filename, pool.spawn_fn(move || Ok(hash_file(entry.path(), algo)))); 63 | } 64 | } else if ignored { 65 | walkdir.skip_current_dir(); 66 | } 67 | } 68 | Err(error) => { 69 | errored = true; 70 | writeln!(pb_err, "Symlink loop detected at {}", relative_name(path, &error.path().unwrap())).unwrap(); 71 | } 72 | } 73 | } 74 | 75 | if errored { 76 | writeln!(pb_err, "").unwrap(); 77 | } 78 | 79 | 80 | let mut pb = ProgressBar::on(pb_out, hashes_f.len() as u64); 81 | pb.set_width(Some(80)); 82 | pb.show_speed = false; 83 | pb.show_tick = true; 84 | 85 | hashes.extend(hashes_f.into_iter() 86 | .map(|(k, f)| { 87 | pb.message(&format!("{} ", k)); 88 | pb.inc(); 89 | 90 | match f.wait() { 91 | Ok(result) => return (k, result), 92 | Err(error) => panic!("Failed to hash file \"{}\": {:?}", k, error), 93 | } 94 | })); 95 | 96 | pb.show_tick = false; 97 | pb.tick(); 98 | pb.finish_print(""); 99 | hashes 100 | } 101 | 102 | /// Serialise the specified hashes to the specified output file. 103 | pub fn write_hashes(out_file: &(String, PathBuf), algo: Algorithm, mut hashes: BTreeMap) { 104 | let mut out = TabWriter::new(File::create(&out_file.1).unwrap()); 105 | 106 | hashes.insert(out_file.0.clone(), mul_str("-", algo.hexlen())); 107 | for (fname, hash) in hashes { 108 | writeln!(&mut out, "{}\t{}", fname, hash).unwrap(); 109 | } 110 | 111 | out.flush().unwrap(); 112 | } 113 | 114 | /// Read upper-cased hashes saved with `write_hashes()` from the specified path or fail with line numbers not matching pattern. 115 | pub fn read_hashes(err: &mut dyn Write, file: &(String, PathBuf)) -> Result, Error> { 116 | static LINE_RGX: Lazy = Lazy::new(|| Regex::new(r"^(.+?)\s{2,}([[:xdigit:]-]+)$").unwrap()); 117 | 118 | let mut hashes = BTreeMap::new(); 119 | let mut failed = false; 120 | 121 | let in_file = BufReader::new(File::open(&file.1).unwrap()); 122 | for (n, line) in in_file.lines().map(Result::unwrap).enumerate() { 123 | if !line.is_empty() { 124 | match LINE_RGX.captures(&line) { 125 | Some(captures) => { 126 | hashes.insert(captures[1].to_string(), captures[2].to_uppercase()); 127 | } 128 | None => { 129 | failed = true; 130 | writeln!(err, "{}:{}: Line doesn't match accepted pattern", file.0, n).unwrap(); 131 | } 132 | }; 133 | } 134 | } 135 | 136 | if !failed { 137 | Ok(hashes) 138 | } else { 139 | Err(Error::HashesFileParsingFailure) 140 | } 141 | } 142 | -------------------------------------------------------------------------------- /src/ops/write.rs: -------------------------------------------------------------------------------- 1 | use self::super::{CompareResult, CompareFileResult, CompareError}; 2 | use self::super::super::util::mul_str; 3 | use self::super::super::Error; 4 | use std::io::Write; 5 | 6 | 7 | /// Write hash comparison results to the output streams in a human-consumable format 8 | pub fn write_hash_comparison_results(output: &mut Wo, error: &mut We, 9 | results: Result<(Vec, Vec), CompareError>) 10 | -> Error { 11 | let result = match results { 12 | Ok((mut compare_results, mut file_compare_results)) => { 13 | compare_results.sort(); 14 | file_compare_results.sort(); 15 | 16 | for res in &compare_results { 17 | match *res { 18 | CompareResult::FileAdded(ref file) => write_compare_result(output, "File added: ", file), 19 | CompareResult::FileRemoved(ref file) => write_compare_result(output, "File removed: ", file), 20 | CompareResult::FileIgnored(ref file) => write_compare_result(output, "File ignored, skipping: ", file), 21 | } 22 | } 23 | 24 | if file_compare_results.is_empty() && compare_results.is_empty() { 25 | writeln!(output, "No files left to verify").unwrap(); 26 | Error::NoError 27 | } else if file_compare_results.is_empty() { 28 | writeln!(output, "No files to verify").unwrap(); 29 | Error::NoError 30 | } else { 31 | if !compare_results.is_empty() { 32 | writeln!(output, "").unwrap(); 33 | } 34 | 35 | let mut differed_n = 0; 36 | for fres in &file_compare_results { 37 | match *fres { 38 | CompareFileResult::FileMatches(ref file) => write_file_result_match(output, file), 39 | CompareFileResult::FileDiffers { ref file, ref was_hash, ref new_hash } => { 40 | write_file_result_diff(output, file, was_hash, new_hash); 41 | differed_n += 1; 42 | } 43 | } 44 | } 45 | 46 | match differed_n { 47 | 0 => Error::NoError, 48 | n => Error::NFilesDiffer(n), 49 | } 50 | } 51 | } 52 | Err(CompareError::HashLengthDiffers { previous_len, current_len }) => { 53 | let previous_len_len = format!("{}", previous_len).len(); 54 | let current_len_len = format!("{}", current_len).len(); 55 | 56 | if previous_len_len + current_len_len + 47 <= 80 { 57 | writeln!(error, "Hash lengths do not match; selected: {}, loaded: {}", current_len, previous_len).unwrap(); 58 | } else { 59 | writeln!(error, "Hash lengths do not match;").unwrap(); 60 | if previous_len_len + current_len_len + 20 <= 80 { 61 | writeln!(error, "selected: {}, loaded: {}", current_len, previous_len).unwrap(); 62 | } else { 63 | writeln!(error, "Selected: {}", current_len).unwrap(); 64 | writeln!(error, "Loaded : {}", previous_len).unwrap(); 65 | } 66 | } 67 | 68 | Error::HashLengthDiffers 69 | } 70 | }; 71 | 72 | output.flush().unwrap(); 73 | error.flush().unwrap(); 74 | 75 | result 76 | } 77 | 78 | 79 | fn write_compare_result(out: &mut W, pre: &str, fname: &str) { 80 | write_result(out, pre, fname, 2, true) 81 | } 82 | 83 | fn write_result(out: &mut W, pre: &str, fname: &str, fname_indent: usize, quote: bool) { 84 | if pre.len() + quote as usize + fname.len() + quote as usize <= 80 { 85 | let quote_s = if quote { "\"" } else { "" }; 86 | writeln!(out, "{}{2}{}{2}", pre, fname, quote_s).unwrap(); 87 | } else { 88 | writeln!(out, "{}", pre).unwrap(); 89 | if fname.len() <= 80 - fname_indent { 90 | writeln!(out, " {}", fname).unwrap(); 91 | } else { 92 | let indent = mul_str(" ", fname_indent); 93 | for fname_chunk in fname.chars().collect::>().chunks(80 - fname_indent).map(|cc| cc.into_iter().cloned().collect::()) { 94 | writeln!(out, "{}{}", indent, fname_chunk).unwrap(); 95 | } 96 | } 97 | } 98 | } 99 | 100 | fn write_file_result_match(out: &mut W, fname: &str) { 101 | if 15 + fname.len() <= 80 { 102 | writeln!(out, "File \"{}\" matches", fname).unwrap(); 103 | } else { 104 | write_compare_result(out, "File matches: ", fname); 105 | } 106 | } 107 | 108 | fn write_file_result_diff(out: &mut W, fname: &str, lhash: &str, chash: &str) { 109 | if 21 + fname.len() <= 80 { 110 | writeln!(out, "File \"{}\" doesn't match", fname).unwrap(); 111 | } else { 112 | write_result(out, "File doesn't match: ", fname, 4, true); 113 | } 114 | 115 | write_result(out, " Was: ", lhash, 4, false); 116 | write_result(out, " Is : ", chash, 4, false); 117 | } 118 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | //! Tool for making/verifying checksums of directory trees. 2 | //! 3 | //! # Library doc 4 | //! 5 | //! This library is used by `checksums` itself for all its function and is therefore contains all necessary functions. 6 | //! 7 | //! ## Data flow 8 | //! 9 | //! Hash verification 10 | //! 11 | //! ```plaintext 12 | //! Options 13 | //! |> create_hashes() 14 | //! |> load_hashes() 15 | //! |> compare_hashes() 16 | //! |> write_hash_comparison_results() 17 | //! ``` 18 | //! 19 | //! Hash creation 20 | //! 21 | //! ```plaintext 22 | //! Options 23 | //! |> create_hashes() 24 | //! |> write_hashes() 25 | //! ``` 26 | //! 27 | //! # Executable manpage 28 | //! 29 | //! Exit values and possible errors: 30 | //! 31 | //! ```text 32 | //! 1 - option parsing error 33 | //! 2 - hash lengths differ between selected and saved 34 | //! 3 - failed to parse hashes file 35 | //! N+3 - N files didn't match 36 | //! ``` 37 | //! 38 | //! ## SYNOPSIS 39 | //! 40 | //! [`checksums`](https://github.com/nabijaczleweli/checksums) [OPTIONS] [DIRECTORY] 41 | //! 42 | //! ## DESCRIPTION 43 | //! 44 | //! Tool for making/verifying checksums of directory trees. 45 | //! 46 | //! Use the generated checksums to automatically verify file/directory tree 47 | //! correctness. 48 | //! 49 | //! All output is wrapped to 80 columns. 50 | //! 51 | //! ## OPTIONS 52 | //! 53 | //! -a --algorithm <algorithm> 54 | //! 55 | //! ```text 56 | //! Set the hashing algorithm to use, case-insensitive. 57 | //! 58 | //! Supported algorithms: SHA1, SHA2-256, SHA2-512, SHA3-256, SHA3-512, BLAKE, 59 | //! BLAKE2B, BLAKE2S, BLAKE3, CRC8, CRC16, CRC32, CRC64, 60 | //! MD5, MD6-128, MD6-256, MD6-512, XOR8 61 | //! 62 | //! BLAKE2 is equivalent to BLAKE2B for compatibility. 63 | //! ``` 64 | //! 65 | //! -c --create 66 | //! 67 | //! ```text 68 | //! Create directory hashes, rather than verifying them. 69 | //! 70 | //! Directory hashes are output to the output file, which, if not specified, will 71 | //! be "`DIRECTORY`.hash". 72 | //! 73 | //! Will fail if the output file already exists and `--force` is not specified. 74 | //! 75 | //! Exclusive with `--verify`. Overrides `--verify`. 76 | //! ``` 77 | //! 78 | //! -v --verify 79 | //! 80 | //! ```text 81 | //! Verify directory hashes. Default. 82 | //! 83 | //! Exclusive with `--create`. Overrides `--create`. 84 | //! ``` 85 | //! 86 | //! -d --depth <depth> 87 | //! 88 | //! ```text 89 | //! Set max recursion depth to `depth`. Default: 0. 90 | //! 91 | //! Exclusive with `--recursive`. Overrides `--recursive`. 92 | //! ``` 93 | //! 94 | //! -r --recursive 95 | //! 96 | //! ```text 97 | //! Set max recursion depth to infinity. 98 | //! 99 | //! Exclusive with `--depth`. Overrides `--depth`. 100 | //! ``` 101 | //! 102 | //! --follow-symlinks 103 | //! 104 | //! ```text 105 | //! Recurse down symlinks. Default. 106 | //! ``` 107 | //! 108 | //! --no-follow-symlinks 109 | //! 110 | //! ```text 111 | //! Don't recurse down symlinks. 112 | //! ``` 113 | //! 114 | //! -i --ignore <filename[,filename2][,filename3][,filenameN]...>... 115 | //! 116 | //! ```text 117 | //! Add filename(s) to ignored files list. Default: none. 118 | //! 119 | //! Ignored files are marked as such. 120 | //! 121 | //! Accepted multiple times. 122 | //! ``` 123 | //! 124 | //! --force 125 | //! 126 | //! ```text 127 | //! Override output file in `--create` mode. No meaning in `--verify` mode. 128 | //! ``` 129 | //! 130 | //! -j --jobs [jobs] 131 | //! 132 | //! ```text 133 | //! Amount of threads used for hashing. Default: # of CPU threads 134 | //! 135 | //! One thread can hash one file at a time, potentially speeding up hashing 136 | //! up to `jobs` times. 137 | //! 138 | //! No/empty value: # of CPU threads. -1: Infinite 139 | //! ``` 140 | //! 141 | //! [DIRECTORY] 142 | //! 143 | //! ```text 144 | //! Directory to create/verify hash for. Default: current workdir. 145 | //! ``` 146 | //! 147 | //! ## EXAMPLES 148 | //! 149 | //! `checksums` [`-v`] [`-f` *infile*] 150 | //! 151 | //! ```text 152 | //! Verify the current directory tree against the saved hashes. 153 | //! 154 | //! `-v` is not necessary as it's the default. 155 | //! 156 | //! *infile* defaults to "`DIRECTORY`.hash" 157 | //! 158 | //! Example output: 159 | //! File added: "file_that_was_not_here_before" 160 | //! File removed: "file_that_was_here_before_but_not_now" 161 | //! File ignored: "file_specified_with_ignore_now_or_during_creation" 162 | //! 163 | //! File "file_that_did_not_change" matches 164 | //! File "changed_file" doesn't match 165 | //! Was: 8313958F86F7B15D4775D12886D479C1CFAAA111 166 | //! Is : FCFC1548B30B5ACB25A7421D068E12F07DF74DCC 167 | //! ``` 168 | //! 169 | //! `examples` `-c` [`-f` *outfile*] [`--force`] 170 | //! 171 | //! ```text 172 | //! Create hashes of the current directory tree for later verification. 173 | //! 174 | //! *outfile* defaults to "`DIRECTORY`.hash". 175 | //! 176 | //! Use `--force` to override *outfile*. 177 | //! 178 | //! Example output: 179 | //! FILENAME 722 / 722 [===============================================] 100.00 % 180 | //! 181 | //! *outfile* contents: 182 | //! a_file.txt 8313958F86F7B15D4775D12886D479C1CFAAA111 183 | //! *outfile*.hash ---------------------------------------- 184 | //! different_file 8D742C1F2D39434771039E98AD854C72F91FCCA5 185 | //! ``` 186 | //! 187 | //! `examples` [`-d` *depth*] [`-r`] [`OTHER OPTIONS`] 188 | //! 189 | //! ```text 190 | //! Recurse *depth* or infinity directories down. 191 | //! 192 | //! Example output for *depth*=2: 193 | //! File "dir1/dir2/file" matches 194 | //! File "dir1/file" matches 195 | //! File "file" matches 196 | //! ``` 197 | //! 198 | //! # Special thanks 199 | //! 200 | //! To all who support further development on [Patreon](https://patreon.com/nabijaczleweli), in particular: 201 | //! 202 | //! * ThePhD 203 | //! * Embark Studios 204 | //! * Jasper Bekkers 205 | 206 | 207 | extern crate md5; 208 | extern crate md6; 209 | extern crate pbr; 210 | extern crate crc; 211 | #[macro_use] 212 | extern crate clap; 213 | extern crate crc8; 214 | extern crate crc16; 215 | extern crate blake; 216 | extern crate regex; 217 | extern crate blake2; 218 | extern crate blake3; 219 | extern crate crc32c; 220 | extern crate shaman; 221 | extern crate futures; 222 | extern crate walkdir; 223 | extern crate num_cpus; 224 | extern crate once_cell; 225 | extern crate tabwriter; 226 | extern crate whirlpool; 227 | extern crate tiny_keccak; 228 | extern crate futures_cpupool; 229 | 230 | mod error; 231 | mod hashing; 232 | mod options; 233 | mod algorithms; 234 | 235 | pub mod ops; 236 | pub mod util; 237 | 238 | pub use hashing::*; 239 | pub use error::Error; 240 | pub use options::Options; 241 | pub use algorithms::Algorithm; 242 | -------------------------------------------------------------------------------- /src/options.rs: -------------------------------------------------------------------------------- 1 | //! Option parsing and management. 2 | //! 3 | //! Use the `Options::parse()` function to get the program's configuration, 4 | //! as parsed from the commandline. 5 | //! 6 | //! # Examples 7 | //! 8 | //! ```skip 9 | //! let opts = Options::parse(); 10 | //! println!("{:#?}", opts); 11 | //! ``` 12 | 13 | 14 | use clap::{self, App, Arg, AppSettings}; 15 | use std::collections::BTreeSet; 16 | use std::path::{Path, PathBuf}; 17 | use self::super::Algorithm; 18 | use std::str::FromStr; 19 | use num_cpus; 20 | use std::fs; 21 | 22 | 23 | /// Representation of the application's all configurable values. 24 | #[derive(Debug, Clone, Hash, PartialEq, Eq)] 25 | pub struct Options { 26 | /// Directory to hash/verify. Default: `"."` 27 | pub dir: PathBuf, 28 | /// Hashing algorithm to use. Default: `"SHA1"` 29 | pub algorithm: Algorithm, 30 | /// Whether to verify or create checksums. Default: yes 31 | pub verify: bool, 32 | /// Max recursion depth. Infinite if None. Default: `0` 33 | pub depth: Option, 34 | /// In-/Output filename. Default: `"./INFERRED_FROM_DIRECTORY.hash"` 35 | pub file: (String, PathBuf), 36 | /// Whether to recurse down symlinks. Default: `true` 37 | pub follow_symlinks: bool, 38 | /// Files/directories to ignore. Default: none 39 | pub ignored_files: BTreeSet, 40 | /// # of threads used for hashing. 41 | /// 42 | /// Default: 1 43 | pub jobs: usize, 44 | } 45 | 46 | impl Options { 47 | /// Parse `env`-wide command-line arguments into an `Options` instance 48 | pub fn parse() -> Options { 49 | let matches = App::new("checksums") 50 | .setting(AppSettings::ColoredHelp) 51 | .version(crate_version!()) 52 | .author(crate_authors!("\n")) 53 | .about("Tool for making/verifying checksums of directory trees") 54 | .args(&[Arg::from_usage("[DIRECTORY] 'Directory to hash/verify'").default_value(".").validator(Options::directory_validator), 55 | Arg::from_usage("--algorithm=[algorithm] -a 'Hashing algorithm to use. {n}\ 56 | Supported algorithms: SHA{1,2-{224,256,384,512},3-{256,512}}, \ 57 | BLAKE{,2=2B,2S,3}, \ 58 | CRC{64,32{,C},16,8}, \ 59 | MD{5,6-{128,256,512}}, \ 60 | WHIRLPOOL, \ 61 | XOR8'") 62 | .next_line_help(true) 63 | .default_value("SHA1") 64 | .validator(Options::algorithm_validator), 65 | Arg::from_usage("--create -c 'Make checksums'").overrides_with("verify"), 66 | Arg::from_usage("--verify -v 'Verify checksums (default)'").overrides_with("create"), 67 | Arg::from_usage("--depth=[depth] -d 'Max recursion depth. `-1` for infinite.'. Default: don't recurse") 68 | .validator(Options::depth_validator) 69 | .overrides_with("recursive"), 70 | Arg::from_usage("--recursive -r 'Infinite recursion depth.'").overrides_with("depth"), 71 | Arg::from_usage("--file=[file] -f 'File with hashes to be read/created'").validator(Options::file_validator), 72 | Arg::from_usage("--force 'Override output file'"), 73 | Arg::from_usage("--follow-symlinks 'Recurse down symlinks. Default: yes'").overrides_with("no-follow-symlinks"), 74 | Arg::from_usage("--no-follow-symlinks 'Don\'t recurse down symlinks'").overrides_with("follow-symlinks"), 75 | Arg::from_usage("-i --ignore [file]... 'Ignore specified file(s)'"), 76 | Arg::from_usage("-j --jobs=[jobs] '# of threads used for hashing. No/empty value: # of CPU threads. -1: Infinite'") 77 | .empty_values(true) 78 | .validator(Options::jobs_validator)]) 79 | .get_matches(); 80 | 81 | let dir = fs::canonicalize(matches.value_of("DIRECTORY").unwrap()).unwrap(); 82 | let verify = !matches.is_present("create"); 83 | let file = Options::file_process(matches.value_of("file"), &dir); 84 | 85 | if file.1.exists() && !verify && !matches.is_present("force") { 86 | clap::Error { 87 | message: "The output file exists and was not overridden to prevent data loss.\n\ 88 | Pass the --force option to suppress this error." 89 | .to_string(), 90 | kind: clap::ErrorKind::MissingRequiredArgument, 91 | info: None, 92 | } 93 | .exit(); 94 | } else if !file.1.exists() && verify { 95 | clap::Error { 96 | message: format!("Can't find checksums file \"{}\".\n\ 97 | Did you mean to create it with -c?", file.0), 98 | kind: clap::ErrorKind::InvalidValue, 99 | info: None, 100 | } 101 | .exit(); 102 | } 103 | 104 | Options { 105 | dir: dir, 106 | algorithm: Algorithm::from_str(matches.value_of("algorithm").unwrap()).unwrap(), 107 | verify: verify, 108 | depth: if matches.is_present("recursive") { 109 | None 110 | } else { 111 | let i = matches.value_of("depth").map(|s| s.parse::().unwrap()).unwrap_or(0); 112 | if i < 0 { None } else { Some(i as usize) } 113 | }, 114 | file: file, 115 | follow_symlinks: !matches.is_present("no-follow-symlinks"), 116 | ignored_files: matches.values_of("ignore").map(|v| v.map(String::from).collect()).unwrap_or_default(), 117 | jobs: match matches.value_of("jobs") { 118 | None | Some("") => num_cpus::get() as usize, 119 | Some(s) => { 120 | match i32::from_str(s).unwrap() { 121 | -1 => usize::max_value(), 122 | i => i as usize, 123 | } 124 | } 125 | }, 126 | } 127 | } 128 | 129 | fn algorithm_validator(s: String) -> Result<(), String> { 130 | Algorithm::from_str(&s).map(|_| ()) 131 | } 132 | 133 | fn directory_validator(s: String) -> Result<(), String> { 134 | fs::canonicalize(s).map_err(|e| format!("directory: {}", e.to_string())).and_then(|p| { 135 | if p.is_file() { 136 | Err("DIRECTORY cannot be a file.".to_string()) 137 | } else { 138 | Ok(()) 139 | } 140 | }) 141 | } 142 | 143 | fn depth_validator(s: String) -> Result<(), String> { 144 | s.parse::().map(|_| ()).map_err(|e| e.to_string()) 145 | } 146 | 147 | fn file_validator(s: String) -> Result<(), String> { 148 | let mut buf = PathBuf::from(s); 149 | if buf.exists() && buf.is_dir() { 150 | Err("file exists and is a directory".to_string()) 151 | } else { 152 | buf.pop(); 153 | 154 | // Handle pathless filename 155 | if buf.as_os_str().is_empty() { 156 | Ok(()) 157 | } else { 158 | buf.canonicalize().map(|_| ()).map_err(|e| format!("file: {}", e.to_string())) 159 | } 160 | } 161 | } 162 | 163 | fn jobs_validator(s: String) -> Result<(), String> { 164 | if s.is_empty() { 165 | Ok(()) 166 | } else { 167 | i32::from_str(&s).map_err(|e| format!("jobs: {}", e)).and_then(|i| { 168 | if i == 0 { 169 | Err("cannot execute 0 jobs".to_string()) 170 | } else if i < -1 { 171 | Err("cannot execute a negative amount of jobs".to_string()) 172 | } else { 173 | Ok(()) 174 | } 175 | }) 176 | } 177 | } 178 | 179 | 180 | fn file_process(file: Option<&str>, dir: &PathBuf) -> (String, PathBuf) { 181 | match file { 182 | Some(file) => { 183 | let mut file = PathBuf::from(file); 184 | let file_name = file.file_name().unwrap().to_os_string(); 185 | 186 | file.pop(); 187 | // Handle pathless filename 188 | if file.as_os_str().is_empty() { 189 | file.push("."); 190 | } 191 | 192 | (file_name.to_str().unwrap().to_string(), 193 | file.canonicalize() 194 | .map(|mut p| { 195 | p.push(file_name); 196 | p 197 | }) 198 | .unwrap()) 199 | } 200 | None => { 201 | let mut file = dir.clone(); 202 | match dir.file_name() { 203 | Some(fname) => file.push(fname), 204 | None => file.push(Options::root_fname(dir)), 205 | } 206 | file.set_extension("hash"); 207 | 208 | (file.file_name().unwrap().to_str().unwrap().to_string(), file) 209 | } 210 | } 211 | } 212 | 213 | #[cfg(windows)] 214 | fn root_fname(dir: &Path) -> String { 215 | let dir = dir.as_os_str().to_str().unwrap().to_string(); 216 | dir[dir.len() - 3..dir.len() - 2].to_string() 217 | } 218 | 219 | #[cfg(not(windows))] 220 | fn root_fname(_: &Path) -> String { 221 | "root".to_string() 222 | } 223 | } 224 | --------------------------------------------------------------------------------