├── .github └── workflows │ └── build.yml ├── .gitignore ├── Cargo.toml ├── LICENSE ├── README.md ├── benches └── multihash.rs ├── derive ├── Cargo.toml └── src │ ├── lib.rs │ ├── multihash.rs │ └── utils.rs ├── examples └── custom_table.rs ├── rustfmt.toml ├── src ├── arb.rs ├── error.rs ├── hasher.rs ├── hasher_impl.rs ├── lib.rs ├── multihash.rs └── multihash_impl.rs └── tests └── lib.rs /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | on: [push, pull_request] 2 | 3 | name: build 4 | 5 | jobs: 6 | build: 7 | name: Lints and Test 8 | strategy: 9 | matrix: 10 | platform: [ubuntu-latest, macos-latest, windows-latest] 11 | toolchain: [stable] 12 | runs-on: ${{ matrix.platform }} 13 | 14 | steps: 15 | - name: Checkout sources 16 | uses: actions/checkout@v2 17 | 18 | - name: Install Rust toolchain 19 | uses: actions-rs/toolchain@v1 20 | with: 21 | profile: minimal 22 | toolchain: ${{ matrix.toolchain }} 23 | override: true 24 | components: rustfmt, clippy 25 | 26 | - name: Run cargo fmt 27 | uses: actions-rs/cargo@v1 28 | with: 29 | command: fmt 30 | args: --all -- --check 31 | 32 | - name: Run cargo clippy 33 | uses: actions-rs/cargo@v1 34 | with: 35 | command: clippy 36 | args: --all-targets --all-features --workspace -- -D warnings 37 | 38 | - name: Run cargo test 39 | uses: actions-rs/cargo@v1 40 | with: 41 | command: test 42 | args: --all-features --workspace 43 | 44 | coverage: 45 | name: Code Coverage 46 | runs-on: ubuntu-latest 47 | steps: 48 | - name: Checkout sources 49 | uses: actions/checkout@v2 50 | 51 | - name: Install stable Rust toolchain 52 | uses: actions-rs/toolchain@v1 53 | with: 54 | profile: minimal 55 | toolchain: stable 56 | override: true 57 | 58 | - name: Run cargo-tarpaulin 59 | uses: actions-rs/tarpaulin@v0.1 60 | with: 61 | version: '0.11.0' 62 | args: '-- --test-threads 1' 63 | 64 | - name: Upload to codecov.io 65 | uses: codecov/codecov-action@v1.0.6 66 | 67 | no-default-features: 68 | name: Clippy with no default features (for no_std envs) 69 | runs-on: ubuntu-latest 70 | steps: 71 | - name: Checkout sources 72 | uses: actions/checkout@v2 73 | 74 | - name: Install Rust toolchain 75 | uses: actions-rs/toolchain@v1 76 | with: 77 | profile: minimal 78 | toolchain: stable 79 | override: true 80 | components: clippy 81 | 82 | - name: Run cargo clippy 83 | uses: actions-rs/cargo@v1 84 | with: 85 | command: clippy 86 | args: --no-default-features --workspace -- -D warnings 87 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | Cargo.lock 3 | *.bk -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = ["derive", "."] 3 | 4 | [package] 5 | name = "tiny-multihash" 6 | version = "0.5.0" 7 | authors = ["David Craven "] 8 | edition = "2018" 9 | description = "Implementation of the multihash format" 10 | repository = "https://github.com/ipfs-rust/tiny-multihash" 11 | documentation = "https://docs.rs/tiny-multihash" 12 | readme = "README.md" 13 | keywords = ["multihash", "ipfs"] 14 | license = "MIT" 15 | 16 | [features] 17 | default = ["std", "all", "derive", "multihash-impl"] 18 | std = ["unsigned-varint/std", "tiny-multihash-derive/std"] 19 | multihash-impl = ["derive", "all"] 20 | derive = ["tiny-multihash-derive"] 21 | test = ["multihash-impl", "quickcheck", "rand"] 22 | all = ["blake2b", "blake2s", "blake3", "sha1", "sha2", "sha3", "strobe"] 23 | scale-codec = ["parity-scale-codec"] 24 | serde-codec = ["serde", "generic-array/serde"] 25 | 26 | blake2b = ["blake2b_simd"] 27 | blake2s = ["blake2s_simd"] 28 | sha1 = ["digest", "sha-1"] 29 | sha2 = ["digest", "sha-2"] 30 | sha3 = ["digest", "sha-3"] 31 | strobe = ["strobe-rs"] 32 | 33 | [dependencies] 34 | generic-array = "0.14.4" 35 | parity-scale-codec = { version = "1.3.5", optional = true, default-features = false, features = ["derive"] } 36 | quickcheck = { version = "0.9.2", optional = true } 37 | rand = { version = "0.7.3", optional = true } 38 | serde = { version = "1.0.116", optional = true, default-features = false, features = ["derive"] } 39 | tiny-multihash-derive = { version = "0.5.0", path = "derive", default-features = false, optional = true } 40 | unsigned-varint = "0.5.1" 41 | 42 | blake2b_simd = { version = "0.5.10", default-features = false, optional = true } 43 | blake2s_simd = { version = "0.5.10", default-features = false, optional = true } 44 | blake3 = { version = "0.3.7", default-features = false, optional = true } 45 | digest = { version = "0.9.0", default-features = false, optional = true } 46 | sha-1 = { version = "0.9.1", default-features = false, optional = true } 47 | sha-2 = { version = "0.9.0", default-features = false, optional = true, package = "sha2" } 48 | sha-3 = { version = "0.9.0", default-features = false, optional = true, package = "sha3" } 49 | strobe-rs = { version = "0.5.4", optional = true } 50 | 51 | [dev-dependencies] 52 | criterion = "0.3.3" 53 | quickcheck = "0.9.2" 54 | rand = "0.7.3" 55 | serde_json = "1.0.58" 56 | 57 | [[bench]] 58 | name = "multihash" 59 | harness = false 60 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License 2 | 3 | Copyright (C) 2015-2016 Friedel Ziegelmayer 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of 6 | this software and associated documentation files (the "Software"), to deal in 7 | the Software without restriction, including without limitation the rights to 8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies 9 | of the Software, and to permit persons to whom the Software is furnished to do 10 | so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 17 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 18 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 19 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 20 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | Status API Training Shop Blog About Pricing 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # tiny-multihash 2 | 3 | [![Crates.io](https://img.shields.io/crates/v/multihash?style=flat-square)](https://crates.io/crates/tiny-multihash) 4 | [![Documentation](https://docs.rs/multihash/badge.svg?style=flat-square)](https://docs.rs/tiny-multihash) 5 | 6 | > [multihash](https://github.com/multiformats/multihash) implementation in Rust. 7 | 8 | ## Table of Contents 9 | 10 | - [Install](#install) 11 | - [Usage](#usage) 12 | - [Supported Hash Types](#supported-hash-types) 13 | - [Maintainers](#maintainers) 14 | - [Contribute](#contribute) 15 | - [License](#license) 16 | 17 | ## Install 18 | 19 | First add this to your `Cargo.toml` 20 | 21 | ```toml 22 | [dependencies] 23 | tiny-multihash = "*" 24 | ``` 25 | 26 | Then run `cargo build`. 27 | 28 | ## Usage 29 | 30 | ```rust 31 | use tiny_multihash::{Multihash, MultihashDigest, SHA2_256}; 32 | 33 | fn main() { 34 | let hash = Multihash::new(SHA2_256, b"my hash"); 35 | println!("{:?}", hash); 36 | } 37 | ``` 38 | 39 | ### Using a custom code table 40 | 41 | You can derive your own application specific code table: 42 | 43 | ```rust 44 | use tiny_multihash::derive::Multihash; 45 | use tiny_multihash::{Hasher, MultihashDigest}; 46 | 47 | const FOO: u64 = 0x01; 48 | const BAR: u64 = 0x02; 49 | 50 | #[derive(Clone, Debug, Eq, Multihash, PartialEq)] 51 | pub enum Multihash { 52 | #[mh(code = FOO, hasher = tiny_multihash::Sha2_256)] 53 | Foo(tiny_multihash::Sha2Digest), 54 | #[mh(code = BAR, hasher = tiny_multihash::Sha2_512)] 55 | Bar(tiny_multihash::Sha2Digest), 56 | } 57 | 58 | fn main() { 59 | let hash = Multihash::new(FOO, b"my hash"); 60 | println!("{:?}", hash); 61 | } 62 | ``` 63 | 64 | ## Supported Hash Types 65 | 66 | * `SHA1` 67 | * `SHA2-256` 68 | * `SHA2-512` 69 | * `SHA3`/`Keccak` 70 | * `Blake2b-256`/`Blake2b-512`/`Blake2s-128`/`Blake2s-256` 71 | 72 | ## Maintainers 73 | 74 | [@dvc94ch](https://github.com/dvc94ch) 75 | -------------------------------------------------------------------------------- /benches/multihash.rs: -------------------------------------------------------------------------------- 1 | use criterion::{black_box, criterion_group, criterion_main, Criterion}; 2 | use rand::Rng; 3 | 4 | use tiny_multihash::{ 5 | Blake2b256, Blake2b512, Blake2s128, Blake2s256, Blake3_256, Hasher, Keccak224, Keccak256, 6 | Keccak384, Keccak512, Sha1, Sha2_256, Sha2_512, Sha3_224, Sha3_256, Sha3_384, Sha3_512, 7 | StatefulHasher, Strobe256, Strobe512, 8 | }; 9 | 10 | macro_rules! group_digest { 11 | ($criterion:ident, $( $id:expr => $hash:ident, $input:expr)* ) => {{ 12 | let mut group = $criterion.benchmark_group("digest"); 13 | $( 14 | group.bench_function($id, |b| { 15 | b.iter(|| { 16 | let _ = black_box($hash::digest($input)); 17 | }) 18 | }); 19 | )* 20 | group.finish(); 21 | }}; 22 | } 23 | 24 | macro_rules! group_stream { 25 | ($criterion:ident, $( $id:expr => $hash:ident, $input:expr)* ) => {{ 26 | let mut group = $criterion.benchmark_group("stream"); 27 | $( 28 | group.bench_function($id, |b| { 29 | b.iter(|| { 30 | let _ = black_box({ 31 | let mut hasher = <$hash>::default(); 32 | for i in 0..3 { 33 | let start = i * 256; 34 | hasher.update(&$input[start..(start + 256)]); 35 | } 36 | hasher.finalize() 37 | }); 38 | }) 39 | }); 40 | )* 41 | group.finish(); 42 | }}; 43 | } 44 | 45 | fn bench_digest(c: &mut Criterion) { 46 | let mut rng = rand::thread_rng(); 47 | let data: Vec = (0..1024).map(|_| rng.gen()).collect(); 48 | group_digest!(c, 49 | //"identity" => Identity2, &data 50 | "sha1" => Sha1, &data 51 | "sha2_256" => Sha2_256, &data 52 | "sha2_512" => Sha2_512, &data 53 | "sha3_224" => Sha3_224, &data 54 | "sha3_256" => Sha3_256, &data 55 | "sha3_384" => Sha3_384, &data 56 | "sha3_512" => Sha3_512, &data 57 | "keccak_224" => Keccak224, &data 58 | "keccak_256" => Keccak256, &data 59 | "keccak_384" => Keccak384, &data 60 | "keccak_512" => Keccak512, &data 61 | "blake2b_256" => Blake2b256, &data 62 | "blake2b_512" => Blake2b512, &data 63 | "blake2s_128" => Blake2s128, &data 64 | "blake2s_256" => Blake2s256, &data 65 | "blake3_256" => Blake3_256, &data 66 | "strobe_256" => Strobe256, &data 67 | "strobe_512" => Strobe512, &data 68 | ); 69 | } 70 | 71 | /// Chunks the data into 256-byte slices. 72 | fn bench_stream(c: &mut Criterion) { 73 | let mut rng = rand::thread_rng(); 74 | let data: Vec = (0..1024).map(|_| rng.gen()).collect(); 75 | group_stream!(c, 76 | //"identity" => Identity, &data 77 | "sha1" => Sha1, &data 78 | "sha2_256" => Sha2_256, &data 79 | "sha2_512" => Sha2_512, &data 80 | "sha3_224" => Sha3_224, &data 81 | "sha3_256" => Sha3_256, &data 82 | "sha3_384" => Sha3_384, &data 83 | "sha3_512" => Sha3_512, &data 84 | "keccak_224" => Keccak224, &data 85 | "keccak_256" => Keccak256, &data 86 | "keccak_384" => Keccak384, &data 87 | "keccak_512" => Keccak512, &data 88 | "blake2b_256" => Blake2b256, &data 89 | "blake2b_512" => Blake2b512, &data 90 | "blake2s_128" => Blake2s128, &data 91 | "blake2s_256" => Blake2s256, &data 92 | "blake3_256" => Blake3_256, &data 93 | "strobe_256" => Strobe256, &data 94 | "strobe_512" => Strobe512, &data 95 | ); 96 | } 97 | 98 | criterion_group!(benches, bench_digest, bench_stream); 99 | criterion_main!(benches); 100 | -------------------------------------------------------------------------------- /derive/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "tiny-multihash-derive" 3 | version = "0.5.0" 4 | authors = ["David Craven "] 5 | edition = "2018" 6 | description = "Proc macro for deriving custom multihash tables." 7 | license = "MIT" 8 | repository = "https://github.com/ipfs-rust/tiny-multihash" 9 | 10 | [lib] 11 | proc-macro = true 12 | 13 | [dependencies] 14 | proc-macro2 = { version = "1.0.24", features = ["span-locations"] } 15 | proc-macro-crate = "0.1.5" 16 | proc-macro-error = "1.0.4" 17 | quote = "1.0.7" 18 | syn = "1.0.42" 19 | synstructure = "0.12.4" 20 | 21 | [features] 22 | default = ["std"] 23 | std = [] 24 | 25 | [dev-dependencies] 26 | pretty_assertions = "0.6.1" 27 | tiny-multihash = { path = "..", default-features = false } 28 | -------------------------------------------------------------------------------- /derive/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! This proc macro derives a custom Multihash code table from a list of hashers. 2 | //! 3 | //! The digests are stack allocated with a fixed size. That size needs to be big enough to hold any 4 | //! of the specified hash digests. This cannot be determined reliably on compile-time, hence it 5 | //! needs to set manually via the `alloc_size` attribute. Also you might want to set it to bigger 6 | //! sizes then necessarily needed for backwards/forward compatibility. 7 | //! 8 | //! If you set `#mh(alloc_size = …)` to a too low value, you will get compiler errors. Please note 9 | //! the the sizes are checked only on a syntactic level and *not* on the type level. This means 10 | //! that digest need to have a size generic, which is a valid `typenum`, for example `U32` or 11 | //! `generic_array::typenum::U64`. 12 | //! 13 | //! You can disable those compiler errors with setting the `no_alloc_size_errors` attribute. This 14 | //! can be useful if you e.g. have specified type aliases for your hash digests and you are sure 15 | //! you use the correct value for `alloc_size`. 16 | //! 17 | //! # Example 18 | //! 19 | //! ``` 20 | //! use tiny_multihash::derive::Multihash; 21 | //! use tiny_multihash::{U32, U64, MultihashCode}; 22 | //! 23 | //! #[derive(Clone, Copy, Debug, Eq, Multihash, PartialEq)] 24 | //! #[mh(alloc_size = U64)] 25 | //! pub enum Code { 26 | //! #[mh(code = 0x01, hasher = tiny_multihash::Sha2_256, digest = tiny_multihash::Sha2Digest)] 27 | //! Foo, 28 | //! #[mh(code = 0x02, hasher = tiny_multihash::Sha2_512, digest = tiny_multihash::Sha2Digest)] 29 | //! Bar, 30 | //! } 31 | //! 32 | //! let hash = Code::Foo.digest(b"hello world!"); 33 | //! println!("{:02x?}", hash); 34 | //! ``` 35 | extern crate proc_macro; 36 | 37 | mod multihash; 38 | mod utils; 39 | 40 | use proc_macro::TokenStream; 41 | use proc_macro_error::proc_macro_error; 42 | use synstructure::{decl_derive, Structure}; 43 | 44 | decl_derive!([Multihash, attributes(mh)] => #[proc_macro_error] multihash); 45 | fn multihash(s: Structure) -> TokenStream { 46 | multihash::multihash(s).into() 47 | } 48 | -------------------------------------------------------------------------------- /derive/src/multihash.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashSet; 2 | 3 | use crate::utils; 4 | use proc_macro2::TokenStream; 5 | use quote::quote; 6 | #[cfg(not(test))] 7 | use quote::ToTokens; 8 | use syn::parse::{Parse, ParseStream}; 9 | use syn::spanned::Spanned; 10 | use synstructure::{Structure, VariantInfo}; 11 | 12 | mod kw { 13 | use syn::custom_keyword; 14 | 15 | custom_keyword!(code); 16 | custom_keyword!(digest); 17 | custom_keyword!(hasher); 18 | custom_keyword!(mh); 19 | custom_keyword!(alloc_size); 20 | custom_keyword!(no_alloc_size_errors); 21 | } 22 | 23 | /// Attributes for the enum items. 24 | #[derive(Debug)] 25 | enum MhAttr { 26 | Code(utils::Attr), 27 | Hasher(utils::Attr>), 28 | Digest(utils::Attr), 29 | } 30 | 31 | impl Parse for MhAttr { 32 | fn parse(input: ParseStream) -> syn::Result { 33 | if input.peek(kw::code) { 34 | Ok(MhAttr::Code(input.parse()?)) 35 | } else if input.peek(kw::hasher) { 36 | Ok(MhAttr::Hasher(input.parse()?)) 37 | } else { 38 | Ok(MhAttr::Digest(input.parse()?)) 39 | } 40 | } 41 | } 42 | 43 | /// Attributes of the top-level derive. 44 | #[derive(Debug)] 45 | enum DeriveAttr { 46 | AllocSize(utils::Attr), 47 | NoAllocSizeErrors(kw::no_alloc_size_errors), 48 | } 49 | 50 | impl Parse for DeriveAttr { 51 | fn parse(input: ParseStream) -> syn::Result { 52 | if input.peek(kw::alloc_size) { 53 | Ok(Self::AllocSize(input.parse()?)) 54 | } else if input.peek(kw::no_alloc_size_errors) { 55 | Ok(Self::NoAllocSizeErrors(input.parse()?)) 56 | } else { 57 | Err(syn::Error::new(input.span(), "unknown attribute")) 58 | } 59 | } 60 | } 61 | 62 | struct Params { 63 | mh_crate: syn::Ident, 64 | code_enum: syn::Ident, 65 | } 66 | 67 | #[derive(Debug)] 68 | struct Hash { 69 | ident: syn::Ident, 70 | code: syn::Expr, 71 | hasher: Box, 72 | digest: syn::Path, 73 | } 74 | 75 | impl Hash { 76 | fn code_into_u64(&self, params: &Params) -> TokenStream { 77 | let ident = &self.ident; 78 | let code_enum = ¶ms.code_enum; 79 | let code = &self.code; 80 | quote!(#code_enum::#ident => #code) 81 | } 82 | 83 | fn code_from_u64(&self) -> TokenStream { 84 | let ident = &self.ident; 85 | let code = &self.code; 86 | quote!(#code => Ok(Self::#ident)) 87 | } 88 | 89 | fn code_digest(&self, params: &Params) -> TokenStream { 90 | let ident = &self.ident; 91 | let hasher = &self.hasher; 92 | let code = &self.code; 93 | let mh_crate = ¶ms.mh_crate; 94 | quote!(Self::#ident => { 95 | let digest = #hasher::digest(input); 96 | #mh_crate::Multihash::wrap(#code, &digest.as_ref()).unwrap() 97 | }) 98 | } 99 | 100 | fn from_digest(&self, params: &Params) -> TokenStream { 101 | let digest = &self.digest; 102 | let code_enum = ¶ms.code_enum; 103 | let ident = &self.ident; 104 | quote! { 105 | impl From<&#digest> for #code_enum { 106 | fn from(digest: &#digest) -> Self { 107 | Self::#ident 108 | } 109 | } 110 | } 111 | } 112 | } 113 | 114 | impl<'a> From<&'a VariantInfo<'a>> for Hash { 115 | fn from(bi: &'a VariantInfo<'a>) -> Self { 116 | let mut code = None; 117 | let mut digest = None; 118 | let mut hasher = None; 119 | for attr in bi.ast().attrs { 120 | let attr: Result, _> = syn::parse2(attr.tokens.clone()); 121 | if let Ok(attr) = attr { 122 | for attr in attr.attrs { 123 | match attr { 124 | MhAttr::Code(attr) => code = Some(attr.value), 125 | MhAttr::Hasher(attr) => hasher = Some(attr.value), 126 | MhAttr::Digest(attr) => digest = Some(attr.value), 127 | } 128 | } 129 | } 130 | } 131 | 132 | let ident = bi.ast().ident.clone(); 133 | let code = code.unwrap_or_else(|| { 134 | let msg = "Missing code attribute: e.g. #[mh(code = multihash::SHA3_256)]"; 135 | #[cfg(test)] 136 | panic!(msg); 137 | #[cfg(not(test))] 138 | proc_macro_error::abort!(ident, msg); 139 | }); 140 | let hasher = hasher.unwrap_or_else(|| { 141 | let msg = "Missing hasher attribute: e.g. #[mh(hasher = multihash::Sha2_256)]"; 142 | #[cfg(test)] 143 | panic!(msg); 144 | #[cfg(not(test))] 145 | proc_macro_error::abort!(ident, msg); 146 | }); 147 | let digest = digest.unwrap_or_else(|| { 148 | let msg = "Missing digest atttibute: e.g. #[mh(digest = multihash::Sha2Digest)]"; 149 | #[cfg(test)] 150 | panic!(msg); 151 | #[cfg(not(test))] 152 | proc_macro_error::abort!(ident, msg); 153 | }); 154 | Self { 155 | ident, 156 | code, 157 | digest, 158 | hasher, 159 | } 160 | } 161 | } 162 | 163 | /// Parse top-level enum [#mh()] attributes. 164 | /// 165 | /// Returns the `alloc_size` and whether errors regarding to `alloc_size` should be reported or not. 166 | fn parse_code_enum_attrs(ast: &syn::DeriveInput) -> (syn::Type, bool) { 167 | let mut alloc_size = None; 168 | let mut no_alloc_size_errors = false; 169 | 170 | for attr in &ast.attrs { 171 | let derive_attrs: Result, _> = syn::parse2(attr.tokens.clone()); 172 | if let Ok(derive_attrs) = derive_attrs { 173 | for derive_attr in derive_attrs.attrs { 174 | match derive_attr { 175 | DeriveAttr::AllocSize(alloc_size_attr) => { 176 | alloc_size = Some(alloc_size_attr.value) 177 | } 178 | DeriveAttr::NoAllocSizeErrors(_) => no_alloc_size_errors = true, 179 | } 180 | } 181 | } 182 | } 183 | match alloc_size { 184 | Some(alloc_size) => (alloc_size, no_alloc_size_errors), 185 | None => { 186 | let msg = "enum is missing `alloc_size` attribute: e.g. #[mh(alloc_size = U64)]"; 187 | #[cfg(test)] 188 | panic!(msg); 189 | #[cfg(not(test))] 190 | proc_macro_error::abort!(&ast.ident, msg); 191 | } 192 | } 193 | } 194 | 195 | /// Return an error if the same code is used several times. 196 | /// 197 | /// This only checks for string equality, though this should still catch most errors caused by 198 | /// copy and pasting. 199 | fn error_code_duplicates(hashes: &[Hash]) { 200 | // Use a temporary store to determine whether a certain value is unique or not 201 | let mut uniq = HashSet::new(); 202 | 203 | hashes.iter().for_each(|hash| { 204 | let code = &hash.code; 205 | let msg = format!( 206 | "the #mh(code) attribute `{}` is defined multiple times", 207 | quote!(#code) 208 | ); 209 | 210 | // It's a duplicate 211 | if !uniq.insert(code) { 212 | #[cfg(test)] 213 | panic!(msg); 214 | #[cfg(not(test))] 215 | { 216 | let already_defined = uniq.get(code).unwrap(); 217 | let line = already_defined.to_token_stream().span().start().line; 218 | proc_macro_error::emit_error!( 219 | &hash.code, msg; 220 | note = "previous definition of `{}` at line {}", quote!(#code), line; 221 | ); 222 | } 223 | } 224 | }); 225 | } 226 | 227 | /// An error that contains a span in order to produce nice error messages. 228 | #[derive(Debug)] 229 | struct ParseError(proc_macro2::Span); 230 | 231 | /// Parse a path containing a `typenum` unsigned integer (e.g. `U64`) into a u64 232 | fn parse_unsigned_typenum(typenum_path: &syn::Type) -> Result { 233 | match typenum_path { 234 | syn::Type::Path(type_path) => match type_path.path.segments.last() { 235 | Some(path_segment) => { 236 | let typenum_ident = &path_segment.ident; 237 | let typenum = typenum_ident.to_string(); 238 | match typenum.as_str().split_at(1) { 239 | ("U", byte_size) => byte_size 240 | .parse::() 241 | .map_err(|_| ParseError(typenum_ident.span())), 242 | _ => Err(ParseError(typenum_ident.span())), 243 | } 244 | } 245 | None => Err(ParseError(type_path.path.span())), 246 | }, 247 | _ => Err(ParseError(typenum_path.span())), 248 | } 249 | } 250 | 251 | /// Returns the max size as u64. 252 | /// 253 | /// Emits an error if the `#mh(alloc_size)` attribute doesn't contain a valid unsigned integer 254 | /// `typenum`. 255 | fn parse_alloc_size_attribute(alloc_size: &syn::Type) -> u64 { 256 | parse_unsigned_typenum(&alloc_size).unwrap_or_else(|_| { 257 | let msg = "`alloc_size` attribute must be a `typenum`, e.g. #[mh(alloc_size = U64)]"; 258 | #[cfg(test)] 259 | panic!(msg); 260 | #[cfg(not(test))] 261 | proc_macro_error::abort!(&alloc_size, msg); 262 | }) 263 | } 264 | 265 | /// Return a warning/error if the specified alloc_size is smaller than the biggest digest 266 | fn error_alloc_size(hashes: &[Hash], expected_alloc_size_type: &syn::Type) { 267 | let expected_alloc_size = parse_alloc_size_attribute(expected_alloc_size_type); 268 | 269 | let maybe_error: Result<(), ParseError> = hashes 270 | .iter() 271 | .map(|hash| { 272 | // The digest type must have a size parameter of the shape `U`, else we error. 273 | match hash.digest.segments.last() { 274 | Some(path_segment) => match &path_segment.arguments { 275 | syn::PathArguments::AngleBracketed(arguments) => match arguments.args.last() { 276 | Some(syn::GenericArgument::Type(path)) => { 277 | match parse_unsigned_typenum(&path) { 278 | Ok(max_digest_size) => { 279 | if max_digest_size > expected_alloc_size { 280 | let msg = format!("The `#mh(alloc_size) attribute must be bigger than the maximum defined digest size (U{})", 281 | max_digest_size); 282 | #[cfg(test)] 283 | panic!(msg); 284 | #[cfg(not(test))] 285 | { 286 | let digest = &hash.digest.to_token_stream().to_string().replace(" ", ""); 287 | let line = &hash.digest.span().start().line; 288 | proc_macro_error::emit_error!( 289 | &expected_alloc_size_type, msg; 290 | note = "the bigger digest is `{}` at line {}", digest, line; 291 | ); 292 | } 293 | } 294 | Ok(()) 295 | }, 296 | Err(err) => Err(err), 297 | } 298 | }, 299 | _ => Err(ParseError(arguments.args.span())), 300 | }, 301 | _ => Err(ParseError(path_segment.span())), 302 | }, 303 | None => Err(ParseError(hash.digest.span())), 304 | } 305 | }).collect(); 306 | 307 | if let Err(_error) = maybe_error { 308 | let msg = "Invalid byte size. It must be a unsigned integer typenum, e.g. `U32`"; 309 | #[cfg(test)] 310 | panic!(msg); 311 | #[cfg(not(test))] 312 | { 313 | proc_macro_error::emit_error!(&_error.0, msg); 314 | } 315 | } 316 | } 317 | 318 | pub fn multihash(s: Structure) -> TokenStream { 319 | let mh_crate = utils::use_crate("tiny-multihash"); 320 | let code_enum = &s.ast().ident; 321 | let (alloc_size, no_alloc_size_errors) = parse_code_enum_attrs(&s.ast()); 322 | let hashes: Vec<_> = s.variants().iter().map(Hash::from).collect(); 323 | 324 | error_code_duplicates(&hashes); 325 | 326 | if !no_alloc_size_errors { 327 | error_alloc_size(&hashes, &alloc_size); 328 | } 329 | 330 | let params = Params { 331 | mh_crate: mh_crate.clone(), 332 | code_enum: code_enum.clone(), 333 | }; 334 | 335 | let code_into_u64 = hashes.iter().map(|h| h.code_into_u64(¶ms)); 336 | let code_from_u64 = hashes.iter().map(|h| h.code_from_u64()); 337 | let code_digest = hashes.iter().map(|h| h.code_digest(¶ms)); 338 | let from_digest = hashes.iter().map(|h| h.from_digest(¶ms)); 339 | 340 | quote! { 341 | impl #mh_crate::MultihashCode for #code_enum { 342 | type AllocSize = #alloc_size; 343 | 344 | fn digest(&self, input: &[u8]) -> #mh_crate::Multihash { 345 | use #mh_crate::Hasher; 346 | match self { 347 | #(#code_digest,)* 348 | } 349 | } 350 | 351 | fn multihash_from_digest<'a, S, D>(digest: &'a D) -> #mh_crate::Multihash 352 | where 353 | S: #mh_crate::Size, 354 | D: #mh_crate::Digest, 355 | Self: From<&'a D>, 356 | { 357 | let code = Self::from(&digest); 358 | #mh_crate::Multihash::wrap(code.into(), &digest.as_ref()).unwrap() 359 | } 360 | } 361 | 362 | impl From<#code_enum> for u64 { 363 | fn from(code: #code_enum) -> Self { 364 | match code { 365 | #(#code_into_u64,)* 366 | } 367 | } 368 | } 369 | 370 | impl core::convert::TryFrom for #code_enum { 371 | type Error = #mh_crate::Error; 372 | 373 | fn try_from(code: u64) -> Result { 374 | match code { 375 | #(#code_from_u64,)* 376 | _ => Err(#mh_crate::Error::UnsupportedCode(code)) 377 | } 378 | } 379 | } 380 | 381 | #(#from_digest)* 382 | } 383 | } 384 | 385 | #[cfg(test)] 386 | mod tests { 387 | use super::*; 388 | 389 | #[test] 390 | fn test_multihash_derive() { 391 | let input = quote! { 392 | #[derive(Clone, Multihash)] 393 | #[mh(alloc_size = U32)] 394 | pub enum Code { 395 | #[mh(code = tiny_multihash::IDENTITY, hasher = tiny_multihash::Identity256, digest = tiny_multihash::IdentityDigest)] 396 | Identity256, 397 | /// Multihash array for hash function. 398 | #[mh(code = 0x38b64f, hasher = tiny_multihash::Strobe256, digest = tiny_multihash::StrobeDigest)] 399 | Strobe256, 400 | } 401 | }; 402 | let expected = quote! { 403 | impl tiny_multihash::MultihashCode for Code { 404 | type AllocSize = U32; 405 | 406 | fn digest(&self, input: &[u8]) -> tiny_multihash::Multihash { 407 | use tiny_multihash::Hasher; 408 | match self { 409 | Self::Identity256 => { 410 | let digest = tiny_multihash::Identity256::digest(input); 411 | tiny_multihash::Multihash::wrap(tiny_multihash::IDENTITY, &digest.as_ref()).unwrap() 412 | }, 413 | Self::Strobe256 => { 414 | let digest = tiny_multihash::Strobe256::digest(input); 415 | tiny_multihash::Multihash::wrap(0x38b64f, &digest.as_ref()).unwrap() 416 | }, 417 | } 418 | } 419 | 420 | fn multihash_from_digest<'a, S, D>(digest: &'a D) -> tiny_multihash::Multihash 421 | where 422 | S: tiny_multihash::Size, 423 | D: tiny_multihash::Digest, 424 | Self: From<&'a D>, 425 | { 426 | let code = Self::from(&digest); 427 | tiny_multihash::Multihash::wrap(code.into(), &digest.as_ref()).unwrap() 428 | } 429 | } 430 | 431 | 432 | impl From for u64 { 433 | fn from(code: Code) -> Self { 434 | match code { 435 | Code::Identity256 => tiny_multihash::IDENTITY, 436 | Code::Strobe256 => 0x38b64f, 437 | } 438 | } 439 | } 440 | 441 | impl core::convert::TryFrom for Code { 442 | type Error = tiny_multihash::Error; 443 | 444 | fn try_from(code: u64) -> Result { 445 | match code { 446 | tiny_multihash::IDENTITY => Ok(Self::Identity256), 447 | 0x38b64f => Ok(Self::Strobe256), 448 | _ => Err(tiny_multihash::Error::UnsupportedCode(code)) 449 | } 450 | } 451 | } 452 | 453 | impl From<&tiny_multihash::IdentityDigest > for Code { 454 | fn from(digest: &tiny_multihash::IdentityDigest) -> Self { 455 | Self::Identity256 456 | } 457 | } 458 | impl From<&tiny_multihash::StrobeDigest > for Code { 459 | fn from(digest: &tiny_multihash::StrobeDigest) -> Self { 460 | Self::Strobe256 461 | } 462 | } 463 | }; 464 | let derive_input = syn::parse2(input).unwrap(); 465 | let s = Structure::new(&derive_input); 466 | let result = multihash(s); 467 | utils::assert_proc_macro(result, expected); 468 | } 469 | 470 | #[test] 471 | #[should_panic( 472 | expected = "the #mh(code) attribute `tiny_multihash :: SHA2_256` is defined multiple times" 473 | )] 474 | fn test_multihash_error_code_duplicates() { 475 | let input = quote! { 476 | #[derive(Clone, Multihash)] 477 | #[mh(alloc_size = U64)] 478 | pub enum Multihash { 479 | #[mh(code = tiny_multihash::SHA2_256, hasher = tiny_multihash::Sha2_256, digest = tiny_multihash::Sha2Digest)] 480 | Identity256, 481 | #[mh(code = tiny_multihash::SHA2_256, hasher = tiny_multihash::Sha2_256, digest = tiny_multihash::Sha2Digest)] 482 | Identity256, 483 | } 484 | }; 485 | let derive_input = syn::parse2(input).unwrap(); 486 | let s = Structure::new(&derive_input); 487 | multihash(s); 488 | } 489 | 490 | #[test] 491 | #[should_panic(expected = "the #mh(code) attribute `0x14` is defined multiple times")] 492 | fn test_multihash_error_code_duplicates_numbers() { 493 | let input = quote! { 494 | #[derive(Clone, Multihash)] 495 | #[mh(alloc_size = U32)] 496 | pub enum Code { 497 | #[mh(code = 0x14, hasher = tiny_multihash::Sha2_256, digest = tiny_multihash::Sha2Digest)] 498 | Identity256, 499 | #[mh(code = 0x14, hasher = tiny_multihash::Sha2_256, digest = tiny_multihash::Sha2Digest)] 500 | Identity256, 501 | } 502 | }; 503 | let derive_input = syn::parse2(input).unwrap(); 504 | let s = Structure::new(&derive_input); 505 | multihash(s); 506 | } 507 | 508 | #[test] 509 | #[should_panic( 510 | expected = "enum is missing `alloc_size` attribute: e.g. #[mh(alloc_size = U64)]" 511 | )] 512 | fn test_multihash_error_no_alloc_size() { 513 | let input = quote! { 514 | #[derive(Clone, Multihash)] 515 | pub enum Code { 516 | #[mh(code = 0x14, hasher = tiny_multihash::Sha2_256, digest = tiny_multihash::Sha2Digest)] 517 | Sha2_256, 518 | } 519 | }; 520 | let derive_input = syn::parse2(input).unwrap(); 521 | let s = Structure::new(&derive_input); 522 | multihash(s); 523 | } 524 | 525 | #[test] 526 | #[should_panic( 527 | expected = "The `#mh(alloc_size) attribute must be bigger than the maximum defined digest size (U32)" 528 | )] 529 | fn test_multihash_error_too_small_alloc_size() { 530 | let input = quote! { 531 | #[derive(Clone, Multihash)] 532 | #[mh(alloc_size = U16)] 533 | pub enum Code { 534 | #[mh(code = 0x14, hasher = tiny_multihash::Sha2_256, digest = tiny_multihash::Sha2Digest)] 535 | Sha2_256, 536 | } 537 | }; 538 | let derive_input = syn::parse2(input).unwrap(); 539 | let s = Structure::new(&derive_input); 540 | multihash(s); 541 | } 542 | 543 | #[test] 544 | #[should_panic( 545 | expected = "Invalid byte size. It must be a unsigned integer typenum, e.g. `U32`" 546 | )] 547 | fn test_multihash_error_digest_invalid_size_type() { 548 | let input = quote! { 549 | #[derive(Clone, Multihash)] 550 | #[mh(alloc_size = U32)] 551 | pub enum Code { 552 | #[mh(code = 0x14, hasher = tiny_multihash::Sha2_256, digest = tiny_multihash::Sha2Digest)] 553 | Sha2_256, 554 | } 555 | }; 556 | let derive_input = syn::parse2(input).unwrap(); 557 | let s = Structure::new(&derive_input); 558 | multihash(s); 559 | } 560 | 561 | #[test] 562 | #[should_panic( 563 | expected = "Invalid byte size. It must be a unsigned integer typenum, e.g. `U32`" 564 | )] 565 | fn test_multihash_error_digest_invalid_size_type2() { 566 | let input = quote! { 567 | #[derive(Clone, Multihash)] 568 | #[mh(alloc_size = U32)] 569 | pub enum Code { 570 | #[mh(code = 0x14, hasher = tiny_multihash::Sha2_256, digest = tiny_multihash::Sha2Digest<_>)] 571 | Sha2_256, 572 | } 573 | }; 574 | let derive_input = syn::parse2(input).unwrap(); 575 | let s = Structure::new(&derive_input); 576 | multihash(s); 577 | } 578 | 579 | #[test] 580 | #[should_panic( 581 | expected = "Invalid byte size. It must be a unsigned integer typenum, e.g. `U32`" 582 | )] 583 | fn test_multihash_error_digest_without_typenum() { 584 | let input = quote! { 585 | #[derive(Clone, Multihash)] 586 | #[mh(alloc_size = U32)] 587 | pub enum Code { 588 | #[mh(code = 0x14, hasher = tiny_multihash::Sha2_256, digest = Sha2_256Digest)] 589 | Sha2_256, 590 | } 591 | }; 592 | let derive_input = syn::parse2(input).unwrap(); 593 | let s = Structure::new(&derive_input); 594 | multihash(s); 595 | } 596 | 597 | // This one does not panic, die to `no_alloc_size_errors` 598 | #[test] 599 | fn test_multihash_error_digest_without_typenum_no_alloc_size_errors() { 600 | let input = quote! { 601 | #[derive(Clone, Multihash)] 602 | #[mh(alloc_size = U32, no_alloc_size_errors)] 603 | pub enum Code { 604 | #[mh(code = 0x14, hasher = tiny_multihash::Sha2_256, digest = Sha2_256Digest)] 605 | Sha2_256, 606 | } 607 | }; 608 | let derive_input = syn::parse2(input).unwrap(); 609 | let s = Structure::new(&derive_input); 610 | multihash(s); 611 | } 612 | } 613 | -------------------------------------------------------------------------------- /derive/src/utils.rs: -------------------------------------------------------------------------------- 1 | use proc_macro2::Span; 2 | use syn::parse::{Parse, ParseStream}; 3 | use syn::punctuated::Punctuated; 4 | 5 | pub fn use_crate(name: &str) -> syn::Ident { 6 | let krate = proc_macro_crate::crate_name(name).unwrap_or_else(|_| "crate".into()); 7 | syn::Ident::new(&krate, Span::call_site()) 8 | } 9 | 10 | #[derive(Debug)] 11 | pub struct Attrs { 12 | pub paren: syn::token::Paren, 13 | pub attrs: Punctuated, 14 | } 15 | 16 | impl Parse for Attrs { 17 | fn parse(input: ParseStream) -> syn::Result { 18 | let content; 19 | let paren = syn::parenthesized!(content in input); 20 | let attrs = content.parse_terminated(A::parse)?; 21 | Ok(Self { paren, attrs }) 22 | } 23 | } 24 | 25 | #[derive(Debug)] 26 | pub struct Attr { 27 | pub key: K, 28 | pub eq: syn::token::Eq, 29 | pub value: V, 30 | } 31 | 32 | impl Parse for Attr { 33 | fn parse(input: ParseStream) -> syn::Result { 34 | Ok(Self { 35 | key: input.parse()?, 36 | eq: input.parse()?, 37 | value: input.parse()?, 38 | }) 39 | } 40 | } 41 | 42 | #[cfg(test)] 43 | pub(crate) fn assert_proc_macro( 44 | result: proc_macro2::TokenStream, 45 | expected: proc_macro2::TokenStream, 46 | ) { 47 | let result = result.to_string(); 48 | let expected = expected.to_string(); 49 | pretty_assertions::assert_eq!(result, expected); 50 | } 51 | -------------------------------------------------------------------------------- /examples/custom_table.rs: -------------------------------------------------------------------------------- 1 | use std::convert::TryFrom; 2 | 3 | use tiny_multihash::derive::Multihash; 4 | use tiny_multihash::typenum::{U20, U25, U64}; 5 | use tiny_multihash::{ 6 | Digest, Error, Hasher, Multihash, MultihashCode, Sha2Digest, Sha2_256, Size, StatefulHasher, 7 | }; 8 | 9 | // You can implement a custom hasher. This is a SHA2 256-bit hasher that returns a hash that is 10 | // truncated to 160 bits. 11 | #[derive(Default, Debug)] 12 | pub struct Sha2_256Truncated20(Sha2_256); 13 | impl StatefulHasher for Sha2_256Truncated20 { 14 | type Size = U20; 15 | type Digest = Sha2Digest; 16 | fn update(&mut self, input: &[u8]) { 17 | self.0.update(input) 18 | } 19 | fn finalize(&self) -> Self::Digest { 20 | let digest = self.0.finalize(); 21 | let truncated = &digest.as_ref()[..20]; 22 | Self::Digest::try_from(truncated).expect("digest sizes always match") 23 | } 24 | fn reset(&mut self) { 25 | self.0.reset(); 26 | } 27 | } 28 | 29 | #[derive(Clone, Copy, Debug, Eq, Multihash, PartialEq)] 30 | #[mh(alloc_size = U64)] 31 | pub enum Code { 32 | /// Example for using a custom hasher which returns truncated hashes 33 | #[mh(code = 0x12, hasher = Sha2_256Truncated20, digest = tiny_multihash::Sha2Digest)] 34 | Sha2_256Truncated20, 35 | /// Example for using a hasher with a bit size that is not exported by default 36 | #[mh(code = 0xb219, hasher = tiny_multihash::Blake2bHasher::, digest = tiny_multihash::Blake2bDigest)] 37 | Blake2b200, 38 | } 39 | 40 | fn main() { 41 | // Create new hashes from some input data. This is done through the `Code` enum we derived 42 | // Multihash from. 43 | let blake_hash = Code::Blake2b200.digest(b"hello world!"); 44 | println!("{:02x?}", blake_hash); 45 | let truncated_sha2_hash = Code::Sha2_256Truncated20.digest(b"hello world!"); 46 | println!("{:02x?}", truncated_sha2_hash); 47 | 48 | // Sometimes you might not need to hash new data, you just want to get the information about 49 | // a Multihash. 50 | let truncated_sha2_bytes = truncated_sha2_hash.to_bytes(); 51 | let unknown_hash = Multihash::::from_bytes(&truncated_sha2_bytes).unwrap(); 52 | println!("SHA2 256-bit hash truncated to 160 bits:"); 53 | println!(" code: {:x?}", unknown_hash.code()); 54 | println!(" size: {}", unknown_hash.size()); 55 | println!(" digest: {:02x?}", unknown_hash.digest()); 56 | 57 | // Though you might want to hash something new, with the same hasher that some other Multihash 58 | // used. 59 | Code::try_from(unknown_hash.code()) 60 | .unwrap() 61 | .digest(b"hashing something new"); 62 | } 63 | -------------------------------------------------------------------------------- /rustfmt.toml: -------------------------------------------------------------------------------- 1 | format_code_in_doc_comments = true 2 | -------------------------------------------------------------------------------- /src/arb.rs: -------------------------------------------------------------------------------- 1 | use quickcheck::{Arbitrary, Gen}; 2 | use rand::seq::SliceRandom; 3 | 4 | use crate::{Code, Multihash, MultihashCode, U64}; 5 | 6 | const HASHES: [Code; 16] = [ 7 | Code::Sha1, 8 | Code::Sha2_256, 9 | Code::Sha2_512, 10 | Code::Sha3_224, 11 | Code::Sha3_256, 12 | Code::Sha3_384, 13 | Code::Sha3_512, 14 | Code::Keccak224, 15 | Code::Keccak256, 16 | Code::Keccak384, 17 | Code::Keccak512, 18 | Code::Blake2b256, 19 | Code::Blake2b512, 20 | Code::Blake2s128, 21 | Code::Blake2s256, 22 | Code::Blake3_256, 23 | ]; 24 | 25 | /// Generates a random valid multihash. 26 | /// 27 | /// This is done by encoding a random piece of data. 28 | impl Arbitrary for Multihash { 29 | fn arbitrary(g: &mut G) -> Self { 30 | let code = *HASHES.choose(g).unwrap(); 31 | let data: Vec = Arbitrary::arbitrary(g); 32 | // encoding an actual random piece of data might be better than just choosing 33 | // random numbers of the appropriate size, since some hash algos might produce 34 | // a limited set of values 35 | code.digest(&data) 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/error.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "std")] 2 | use std::io::Error as IoError; 3 | use unsigned_varint::decode::Error as DecodeError; 4 | #[cfg(feature = "std")] 5 | use unsigned_varint::io::ReadError; 6 | 7 | /// Multihash error. 8 | #[derive(Debug)] 9 | pub enum Error { 10 | /// Io error. 11 | #[cfg(feature = "std")] 12 | Io(IoError), 13 | /// Unsupported multihash code. 14 | UnsupportedCode(u64), 15 | /// Invalid multihash size. 16 | InvalidSize(u64), 17 | /// Invalid varint. 18 | Varint(DecodeError), 19 | } 20 | 21 | impl core::fmt::Display for Error { 22 | fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { 23 | match self { 24 | #[cfg(feature = "std")] 25 | Self::Io(err) => write!(f, "{}", err), 26 | Self::UnsupportedCode(code) => write!(f, "Unsupported multihash code {}.", code), 27 | Self::InvalidSize(size) => write!(f, "Invalid multihash size {}.", size), 28 | Self::Varint(err) => write!(f, "{}", err), 29 | } 30 | } 31 | } 32 | 33 | #[cfg(feature = "std")] 34 | impl std::error::Error for Error {} 35 | 36 | #[cfg(feature = "std")] 37 | impl From for Error { 38 | fn from(err: IoError) -> Self { 39 | Self::Io(err) 40 | } 41 | } 42 | 43 | #[cfg(feature = "std")] 44 | impl From for Error { 45 | fn from(err: ReadError) -> Self { 46 | match err { 47 | ReadError::Io(err) => Self::Io(err), 48 | ReadError::Decode(err) => Self::Varint(err), 49 | _ => unreachable!(), 50 | } 51 | } 52 | } 53 | 54 | /// Multihash result. 55 | pub type Result = core::result::Result; 56 | -------------------------------------------------------------------------------- /src/hasher.rs: -------------------------------------------------------------------------------- 1 | use crate::error::Error; 2 | use core::fmt::Debug; 3 | use generic_array::typenum::marker_traits::Unsigned; 4 | use generic_array::{ArrayLength, GenericArray}; 5 | 6 | /// Size marker trait. 7 | pub trait Size: 8 | ArrayLength + Debug + Default + Eq + core::hash::Hash + Send + Sync + 'static 9 | { 10 | } 11 | 12 | impl + Debug + Default + Eq + core::hash::Hash + Send + Sync + 'static> Size 13 | for T 14 | { 15 | } 16 | 17 | /// Stack allocated digest trait. 18 | pub trait Digest: 19 | AsRef<[u8]> 20 | + AsMut<[u8]> 21 | + From> 22 | + Into> 23 | + Clone 24 | + core::hash::Hash 25 | + Debug 26 | + Default 27 | + Eq 28 | + Send 29 | + Sync 30 | + 'static 31 | { 32 | /// Size of the digest. 33 | fn size(&self) -> u8 { 34 | S::to_u8() 35 | } 36 | 37 | /// Wraps the digest bytes. 38 | fn wrap(digest: &[u8]) -> Result { 39 | if digest.len() != S::to_usize() { 40 | return Err(Error::InvalidSize(digest.len() as _)); 41 | } 42 | let mut array = GenericArray::default(); 43 | let len = digest.len().min(array.len()); 44 | array[..len].copy_from_slice(&digest[..len]); 45 | Ok(array.into()) 46 | } 47 | 48 | /// Reads a multihash digest from a byte stream that contains the digest prefixed with the size. 49 | /// 50 | /// The byte stream must not contain the code as prefix. 51 | #[cfg(feature = "std")] 52 | fn from_reader(mut r: R) -> Result 53 | where 54 | R: std::io::Read, 55 | { 56 | use unsigned_varint::io::read_u64; 57 | 58 | let size = read_u64(&mut r)?; 59 | if size > S::to_u64() || size > u8::max_value() as u64 { 60 | return Err(Error::InvalidSize(size)); 61 | } 62 | let mut digest = GenericArray::default(); 63 | r.read_exact(&mut digest[..size as usize])?; 64 | Ok(Self::from(digest)) 65 | } 66 | } 67 | 68 | /// Trait implemented by a hash function implementation. 69 | pub trait StatefulHasher: Default + Send + Sync { 70 | /// The maximum Digest size for that hasher (it is stack allocated). 71 | type Size: Size; 72 | 73 | /// The Digest type to distinguish the output of different `Hasher` implementations. 74 | type Digest: Digest; 75 | 76 | /// Consume input and update internal state. 77 | fn update(&mut self, input: &[u8]); 78 | 79 | /// Returns the final digest. 80 | fn finalize(&self) -> Self::Digest; 81 | 82 | /// Reset the internal hasher state. 83 | fn reset(&mut self); 84 | } 85 | 86 | /// Trait implemented by a hash function implementation. 87 | /// 88 | /// It specifies its own Digest type, so that the output of the hash function can later be 89 | /// distinguished. This way you can create a [`MultihashDigest`] from a `Digest`. 90 | /// 91 | /// Every hashing algorithm that is used with Multihash needs to implement those. This trait is 92 | /// very similar to the external [`digest::Digest` trait]. There is a small significant 93 | /// difference, which needed the introduction of this `Hasher` trait instead of re-using the 94 | /// widely used `digest::Digest` trait. 95 | /// 96 | /// The external `digest::Digest` trait has a single return type called [`Output`], which is used 97 | /// for all hashers that implement it. It's basically a wrapper around the hashed result bytes. 98 | /// For Multihashes we need to distinguish those bytes, as we care about which hash function they 99 | /// were created with (which is the whole point of [Multihashes]). Therefore the [`Hasher`] trait 100 | /// defines an [associated type] [`Hasher::Digest`] for the output of the hasher. This way the 101 | /// implementers can specify their own, hasher specific type (which implements [`Digest`]) for 102 | /// their output. 103 | /// 104 | /// [`digest::Digest` trait]: https://docs.rs/digest/0.9.0/digest/trait.Digest.html 105 | /// [`Output`]: https://docs.rs/digest/0.9.0/digest/type.Output.html 106 | /// [Multihashes]: https://github.com/multiformats/multihash 107 | /// [associated type]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#specifying-placeholder-types-in-trait-definitions-with-associated-types 108 | /// [`MultihashDigest`]: crate::MultihashDigest 109 | pub trait Hasher: Default + Send + Sync { 110 | /// The maximum Digest size for that hasher (it is stack allocated). 111 | type Size: Size; 112 | 113 | /// The Digest type to distinguish the output of different `Hasher` implementations. 114 | type Digest: Digest; 115 | 116 | /// Returns the allocated size of the digest. 117 | fn size() -> u8 { 118 | Self::Size::to_u8() 119 | } 120 | 121 | /// Hashes the given `input` data and returns its hash digest. 122 | fn digest(input: &[u8]) -> Self::Digest 123 | where 124 | Self: Sized; 125 | } 126 | 127 | impl Hasher for T { 128 | type Size = T::Size; 129 | type Digest = T::Digest; 130 | 131 | fn digest(input: &[u8]) -> Self::Digest { 132 | let mut hasher = Self::default(); 133 | hasher.update(input); 134 | hasher.finalize() 135 | } 136 | } 137 | 138 | /// New type wrapper for a hasher that implements the `std::io::Write` trait. 139 | #[cfg(feature = "std")] 140 | pub struct WriteHasher(H); 141 | 142 | #[cfg(feature = "std")] 143 | impl std::io::Write for WriteHasher { 144 | fn write(&mut self, buf: &[u8]) -> std::io::Result { 145 | self.0.update(buf); 146 | Ok(buf.len()) 147 | } 148 | 149 | fn flush(&mut self) -> std::io::Result<()> { 150 | Ok(()) 151 | } 152 | } 153 | -------------------------------------------------------------------------------- /src/hasher_impl.rs: -------------------------------------------------------------------------------- 1 | use crate::error::Error; 2 | use crate::hasher::{Digest, Size, StatefulHasher}; 3 | use core::convert::TryFrom; 4 | use generic_array::GenericArray; 5 | 6 | macro_rules! derive_digest { 7 | ($name:ident) => { 8 | /// Multihash digest. 9 | #[derive(Clone, Debug, Default, Eq, Hash, PartialEq)] 10 | pub struct $name(GenericArray); 11 | 12 | impl Copy for $name where S::ArrayType: Copy {} 13 | 14 | impl AsRef<[u8]> for $name { 15 | fn as_ref(&self) -> &[u8] { 16 | &self.0 17 | } 18 | } 19 | 20 | impl AsMut<[u8]> for $name { 21 | fn as_mut(&mut self) -> &mut [u8] { 22 | &mut self.0 23 | } 24 | } 25 | 26 | impl From> for $name { 27 | fn from(array: GenericArray) -> Self { 28 | Self(array) 29 | } 30 | } 31 | 32 | impl From<$name> for GenericArray { 33 | fn from(digest: $name) -> Self { 34 | digest.0 35 | } 36 | } 37 | 38 | /// Convert slice to `Digest`. 39 | /// 40 | /// It errors when the length of the slice does not match the size of the `Digest`. 41 | impl TryFrom<&[u8]> for $name { 42 | type Error = Error; 43 | 44 | fn try_from(slice: &[u8]) -> Result { 45 | Self::wrap(slice) 46 | } 47 | } 48 | 49 | impl Digest for $name {} 50 | }; 51 | } 52 | 53 | #[cfg(any(feature = "blake2b", feature = "blake2s"))] 54 | macro_rules! derive_hasher_blake { 55 | ($module:ident, $name:ident, $digest:ident) => { 56 | derive_digest!($digest); 57 | 58 | /// Multihash hasher. 59 | #[derive(Debug)] 60 | pub struct $name { 61 | _marker: PhantomData, 62 | state: $module::State, 63 | } 64 | 65 | impl Default for $name { 66 | fn default() -> Self { 67 | let mut params = $module::Params::new(); 68 | params.hash_length(S::to_usize()); 69 | Self { 70 | _marker: PhantomData, 71 | state: params.to_state(), 72 | } 73 | } 74 | } 75 | 76 | impl StatefulHasher for $name { 77 | type Size = S; 78 | type Digest = $digest; 79 | 80 | fn update(&mut self, input: &[u8]) { 81 | self.state.update(input); 82 | } 83 | 84 | fn finalize(&self) -> Self::Digest { 85 | let digest = self.state.finalize(); 86 | GenericArray::clone_from_slice(digest.as_bytes()).into() 87 | } 88 | 89 | fn reset(&mut self) { 90 | let Self { state, .. } = Self::default(); 91 | self.state = state; 92 | } 93 | } 94 | }; 95 | } 96 | 97 | #[cfg(feature = "blake2b")] 98 | pub mod blake2b { 99 | use super::*; 100 | use core::marker::PhantomData; 101 | use generic_array::typenum::{U32, U64}; 102 | 103 | derive_hasher_blake!(blake2b_simd, Blake2bHasher, Blake2bDigest); 104 | 105 | /// 256 bit blake2b hasher. 106 | pub type Blake2b256 = Blake2bHasher; 107 | 108 | /// 512 bit blake2b hasher. 109 | pub type Blake2b512 = Blake2bHasher; 110 | } 111 | 112 | #[cfg(feature = "blake2s")] 113 | pub mod blake2s { 114 | use super::*; 115 | use core::marker::PhantomData; 116 | use generic_array::typenum::{U16, U32}; 117 | 118 | derive_hasher_blake!(blake2s_simd, Blake2sHasher, Blake2sDigest); 119 | 120 | /// 256 bit blake2b hasher. 121 | pub type Blake2s128 = Blake2sHasher; 122 | 123 | /// 512 bit blake2b hasher. 124 | pub type Blake2s256 = Blake2sHasher; 125 | } 126 | 127 | #[cfg(feature = "blake3")] 128 | pub mod blake3 { 129 | use super::*; 130 | use core::marker::PhantomData; 131 | use generic_array::typenum::U32; 132 | 133 | // derive_hasher_blake!(blake3, Blake3Hasher, Blake3Digest); 134 | derive_digest!(Blake3Digest); 135 | 136 | /// Multihash hasher. 137 | #[derive(Debug)] 138 | pub struct Blake3Hasher { 139 | _marker: PhantomData, 140 | hasher: ::blake3::Hasher, 141 | } 142 | 143 | impl Default for Blake3Hasher { 144 | fn default() -> Self { 145 | let hasher = ::blake3::Hasher::new(); 146 | 147 | Self { 148 | _marker: PhantomData, 149 | hasher, 150 | } 151 | } 152 | } 153 | 154 | impl StatefulHasher for Blake3Hasher { 155 | type Size = S; 156 | type Digest = Blake3Digest; 157 | 158 | fn update(&mut self, input: &[u8]) { 159 | self.hasher.update(input); 160 | } 161 | 162 | fn finalize(&self) -> Self::Digest { 163 | let digest = self.hasher.finalize(); //default is 32 bytes anyway 164 | GenericArray::clone_from_slice(digest.as_bytes()).into() 165 | } 166 | 167 | fn reset(&mut self) { 168 | self.hasher.reset(); 169 | } 170 | } 171 | 172 | /// blake3-256 hasher. 173 | pub type Blake3_256 = Blake3Hasher; 174 | } 175 | 176 | #[cfg(feature = "digest")] 177 | macro_rules! derive_hasher_sha { 178 | ($module:ty, $name:ident, $size:ty, $digest:ident) => { 179 | /// Multihash hasher. 180 | #[derive(Debug, Default)] 181 | pub struct $name { 182 | state: $module, 183 | } 184 | 185 | impl $crate::hasher::StatefulHasher for $name { 186 | type Size = $size; 187 | type Digest = $digest; 188 | 189 | fn update(&mut self, input: &[u8]) { 190 | use digest::Digest; 191 | self.state.update(input) 192 | } 193 | 194 | fn finalize(&self) -> Self::Digest { 195 | use digest::Digest; 196 | Self::Digest::from(self.state.clone().finalize()) 197 | } 198 | 199 | fn reset(&mut self) { 200 | use digest::Digest; 201 | self.state.reset(); 202 | } 203 | } 204 | }; 205 | } 206 | 207 | #[cfg(feature = "sha1")] 208 | pub mod sha1 { 209 | use super::*; 210 | use generic_array::typenum::U20; 211 | 212 | derive_digest!(Sha1Digest); 213 | derive_hasher_sha!(::sha1::Sha1, Sha1, U20, Sha1Digest); 214 | } 215 | 216 | #[cfg(feature = "sha2")] 217 | pub mod sha2 { 218 | use super::*; 219 | use generic_array::typenum::{U32, U64}; 220 | 221 | derive_digest!(Sha2Digest); 222 | derive_hasher_sha!(sha_2::Sha256, Sha2_256, U32, Sha2Digest); 223 | derive_hasher_sha!(sha_2::Sha512, Sha2_512, U64, Sha2Digest); 224 | } 225 | 226 | #[cfg(feature = "sha3")] 227 | pub mod sha3 { 228 | use super::*; 229 | use generic_array::typenum::{U28, U32, U48, U64}; 230 | 231 | derive_digest!(Sha3Digest); 232 | derive_hasher_sha!(sha_3::Sha3_224, Sha3_224, U28, Sha3Digest); 233 | derive_hasher_sha!(sha_3::Sha3_256, Sha3_256, U32, Sha3Digest); 234 | derive_hasher_sha!(sha_3::Sha3_384, Sha3_384, U48, Sha3Digest); 235 | derive_hasher_sha!(sha_3::Sha3_512, Sha3_512, U64, Sha3Digest); 236 | 237 | derive_digest!(KeccakDigest); 238 | derive_hasher_sha!(sha_3::Keccak224, Keccak224, U28, KeccakDigest); 239 | derive_hasher_sha!(sha_3::Keccak256, Keccak256, U32, KeccakDigest); 240 | derive_hasher_sha!(sha_3::Keccak384, Keccak384, U48, KeccakDigest); 241 | derive_hasher_sha!(sha_3::Keccak512, Keccak512, U64, KeccakDigest); 242 | } 243 | 244 | pub mod identity { 245 | use super::*; 246 | use crate::error::Error; 247 | use generic_array::typenum::U32; 248 | 249 | /// Multihash digest. 250 | #[derive(Clone, Debug, Default, Eq, Hash, PartialEq)] 251 | pub struct IdentityDigest(u8, GenericArray); 252 | 253 | impl AsRef<[u8]> for IdentityDigest { 254 | fn as_ref(&self) -> &[u8] { 255 | &self.1[..self.0 as usize] 256 | } 257 | } 258 | 259 | impl AsMut<[u8]> for IdentityDigest { 260 | fn as_mut(&mut self) -> &mut [u8] { 261 | &mut self.1[..self.0 as usize] 262 | } 263 | } 264 | 265 | impl From> for IdentityDigest { 266 | fn from(array: GenericArray) -> Self { 267 | Self(array.len() as u8, array) 268 | } 269 | } 270 | 271 | impl From> for GenericArray { 272 | fn from(digest: IdentityDigest) -> Self { 273 | digest.1 274 | } 275 | } 276 | 277 | impl Digest for IdentityDigest { 278 | fn size(&self) -> u8 { 279 | self.0 280 | } 281 | 282 | // A custom implementation is needed as an identity hash value might be shorter than the 283 | // allocated Digest. 284 | fn wrap(digest: &[u8]) -> Result { 285 | if digest.len() > S::to_usize() { 286 | return Err(Error::InvalidSize(digest.len() as _)); 287 | } 288 | let mut array = GenericArray::default(); 289 | let len = digest.len().min(array.len()); 290 | array[..len].copy_from_slice(&digest[..len]); 291 | Ok(Self(len as u8, array)) 292 | } 293 | 294 | // A custom implementation is needed as an identity hash also stores the actual size of 295 | // the given digest. 296 | #[cfg(feature = "std")] 297 | fn from_reader(mut r: R) -> Result 298 | where 299 | R: std::io::Read, 300 | { 301 | use unsigned_varint::io::read_u64; 302 | 303 | let size = read_u64(&mut r)?; 304 | if size > S::to_u64() || size > u8::max_value() as u64 { 305 | return Err(Error::InvalidSize(size)); 306 | } 307 | let mut digest = GenericArray::default(); 308 | r.read_exact(&mut digest[..size as usize])?; 309 | Ok(Self(size as u8, digest)) 310 | } 311 | } 312 | 313 | /// Identity hasher with a maximum size. 314 | /// 315 | /// # Panics 316 | /// 317 | /// Panics if the input is bigger than the maximum size. 318 | #[derive(Debug, Default)] 319 | pub struct IdentityHasher { 320 | bytes: GenericArray, 321 | i: usize, 322 | } 323 | 324 | impl StatefulHasher for IdentityHasher { 325 | type Size = S; 326 | type Digest = IdentityDigest; 327 | 328 | fn update(&mut self, input: &[u8]) { 329 | let start = self.i.min(self.bytes.len()); 330 | let end = (self.i + input.len()).min(self.bytes.len()); 331 | self.bytes[start..end].copy_from_slice(&input); 332 | self.i = end; 333 | } 334 | 335 | fn finalize(&self) -> Self::Digest { 336 | IdentityDigest(self.i as u8, self.bytes.clone()) 337 | } 338 | 339 | fn reset(&mut self) { 340 | self.bytes = Default::default(); 341 | self.i = 0; 342 | } 343 | } 344 | 345 | /// 32 byte Identity hasher (constrained to 32 bytes). 346 | /// 347 | /// # Panics 348 | /// 349 | /// Panics if the input is bigger than 32 bytes. 350 | pub type Identity256 = IdentityHasher; 351 | } 352 | 353 | pub mod unknown { 354 | use super::*; 355 | derive_digest!(UnknownDigest); 356 | } 357 | 358 | #[cfg(feature = "strobe")] 359 | pub mod strobe { 360 | use super::*; 361 | use core::marker::PhantomData; 362 | use generic_array::typenum::{U32, U64}; 363 | use strobe_rs::{SecParam, Strobe}; 364 | 365 | derive_digest!(StrobeDigest); 366 | 367 | /// Strobe hasher. 368 | pub struct StrobeHasher { 369 | _marker: PhantomData, 370 | strobe: Strobe, 371 | initialized: bool, 372 | } 373 | 374 | impl Default for StrobeHasher { 375 | fn default() -> Self { 376 | Self { 377 | _marker: PhantomData, 378 | strobe: Strobe::new(b"StrobeHash", SecParam::B128), 379 | initialized: false, 380 | } 381 | } 382 | } 383 | 384 | impl StatefulHasher for StrobeHasher { 385 | type Size = S; 386 | type Digest = StrobeDigest; 387 | 388 | fn update(&mut self, input: &[u8]) { 389 | self.strobe.ad(input, self.initialized); 390 | self.initialized = true; 391 | } 392 | 393 | fn finalize(&self) -> Self::Digest { 394 | let mut hash = GenericArray::default(); 395 | self.strobe.clone().prf(&mut hash, false); 396 | Self::Digest::from(hash) 397 | } 398 | 399 | fn reset(&mut self) { 400 | let Self { strobe, .. } = Self::default(); 401 | self.strobe = strobe; 402 | self.initialized = false; 403 | } 404 | } 405 | 406 | /// 256 bit strobe hasher. 407 | pub type Strobe256 = StrobeHasher; 408 | 409 | /// 512 bit strobe hasher. 410 | pub type Strobe512 = StrobeHasher; 411 | } 412 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | //! Multihash implementation. 2 | //! 3 | //! Feature Flags 4 | //! ------------- 5 | //! 6 | //! Multihash has lots of [feature flags], by default, all features (except for `test`) are 7 | //! enabled. 8 | //! 9 | //! Some of them are about specific hash functions, these are: 10 | //! 11 | //! - `blake2b`: Enable Blake2b hashers 12 | //! - `blake2s`: Enable Blake2s hashers 13 | //! - `sha1`: Enable SHA-1 hashers 14 | //! - `sha2`: Enable SHA-2 hashers 15 | //! - `sha3`: Enable SHA-3 hashers 16 | //! - `strobe`: Enable Strobe hashers 17 | //! 18 | //! In order to enable all hashers, you can set the `all` feature flag. 19 | //! 20 | //! The library has support for `no_std`, if you disable the `std` feature flag. 21 | //! 22 | //! The `multihash-impl` feature flag enables a default Multihash implementation that contains all 23 | //! bundled hashers (which may be disabled via the feature flags mentioned above). If only want a 24 | //! specific subset of hash algorithms or add one which isn't supporte by default, you will likely 25 | //! disable that feature and enable `derive` in order to be able to use the [`Multihash` derive]. 26 | //! 27 | //! The `test` feature flag enables property based testing features. 28 | //! 29 | //! [feature flags]: https://doc.rust-lang.org/cargo/reference/manifest.html#the-features-section 30 | //! [`Multihash` derive]: crate::derive 31 | 32 | #![deny(missing_docs)] 33 | #![cfg_attr(not(feature = "std"), no_std)] 34 | 35 | #[cfg(any(test, feature = "test"))] 36 | mod arb; 37 | mod error; 38 | mod hasher; 39 | mod hasher_impl; 40 | mod multihash; 41 | #[cfg(feature = "multihash-impl")] 42 | mod multihash_impl; 43 | 44 | pub use crate::error::{Error, Result}; 45 | #[cfg(feature = "std")] 46 | pub use crate::hasher::WriteHasher; 47 | pub use crate::hasher::{Digest, Hasher, Size, StatefulHasher}; 48 | pub use crate::multihash::{Multihash, MultihashCode}; 49 | pub use generic_array::typenum::{self, U128, U16, U20, U28, U32, U48, U64}; 50 | #[cfg(feature = "derive")] 51 | pub use tiny_multihash_derive as derive; 52 | 53 | #[cfg(feature = "multihash-impl")] 54 | pub use crate::multihash_impl::Code; 55 | 56 | #[cfg(feature = "blake2b")] 57 | pub use crate::hasher_impl::blake2b::{Blake2b256, Blake2b512, Blake2bDigest, Blake2bHasher}; 58 | #[cfg(feature = "blake2s")] 59 | pub use crate::hasher_impl::blake2s::{Blake2s128, Blake2s256, Blake2sDigest, Blake2sHasher}; 60 | #[cfg(feature = "blake3")] 61 | pub use crate::hasher_impl::blake3::{Blake3Digest, Blake3Hasher, Blake3_256}; 62 | pub use crate::hasher_impl::identity::{Identity256, IdentityDigest, IdentityHasher}; 63 | #[cfg(feature = "sha1")] 64 | pub use crate::hasher_impl::sha1::{Sha1, Sha1Digest}; 65 | #[cfg(feature = "sha2")] 66 | pub use crate::hasher_impl::sha2::{Sha2Digest, Sha2_256, Sha2_512}; 67 | #[cfg(feature = "sha3")] 68 | pub use crate::hasher_impl::sha3::{Keccak224, Keccak256, Keccak384, Keccak512, KeccakDigest}; 69 | #[cfg(feature = "sha3")] 70 | pub use crate::hasher_impl::sha3::{Sha3Digest, Sha3_224, Sha3_256, Sha3_384, Sha3_512}; 71 | #[cfg(feature = "strobe")] 72 | pub use crate::hasher_impl::strobe::{Strobe256, Strobe512, StrobeDigest, StrobeHasher}; 73 | pub use crate::hasher_impl::unknown::UnknownDigest; 74 | -------------------------------------------------------------------------------- /src/multihash.rs: -------------------------------------------------------------------------------- 1 | use crate::hasher::{Digest, Size}; 2 | use crate::Error; 3 | use core::convert::TryFrom; 4 | #[cfg(feature = "std")] 5 | use core::convert::TryInto; 6 | use core::fmt::Debug; 7 | use generic_array::{ArrayLength, GenericArray}; 8 | 9 | /// Trait that implements hashing. 10 | /// 11 | /// It is usually implemented by a custom code table enum that derives the [`Multihash` derive]. 12 | /// 13 | /// [`Multihash` derive]: crate::derive 14 | pub trait MultihashCode: 15 | TryFrom + Into + Send + Sync + Unpin + Copy + Eq + Debug + 'static 16 | { 17 | /// The maximum size a hash will allocate. 18 | type AllocSize: Size; 19 | 20 | /// Calculate the hash of some input data. 21 | /// 22 | /// # Example 23 | /// 24 | /// ``` 25 | /// // `Code` implements `MultihashCode` 26 | /// use tiny_multihash::{Code, MultihashCode}; 27 | /// 28 | /// let hash = Code::Sha3_256.digest(b"Hello world!"); 29 | /// println!("{:02x?}", hash); 30 | /// ``` 31 | fn digest(&self, input: &[u8]) -> Multihash; 32 | 33 | /// Create a multihash from an existing [`Digest`]. 34 | /// 35 | /// # Example 36 | /// 37 | /// ``` 38 | /// use tiny_multihash::{Code, MultihashCode, Sha3_256, StatefulHasher}; 39 | /// 40 | /// let mut hasher = Sha3_256::default(); 41 | /// hasher.update(b"Hello world!"); 42 | /// let hash = Code::multihash_from_digest(&hasher.finalize()); 43 | /// println!("{:02x?}", hash); 44 | /// ``` 45 | fn multihash_from_digest<'a, S, D>(digest: &'a D) -> Multihash 46 | where 47 | S: Size, 48 | D: Digest, 49 | Self: From<&'a D>; 50 | } 51 | 52 | /// A Multihash instance that only supports the basic functionality and no hashing. 53 | /// 54 | /// With this Multihash implementation you can operate on Multihashes in a generic way, but 55 | /// no hasher implementation is associated with the code. 56 | /// 57 | /// # Example 58 | /// 59 | /// ``` 60 | /// use tiny_multihash::{Multihash, U64}; 61 | /// 62 | /// const Sha3_256: u64 = 0x16; 63 | /// let digest_bytes = [ 64 | /// 0x16, 0x20, 0x64, 0x4b, 0xcc, 0x7e, 0x56, 0x43, 0x73, 0x04, 0x09, 0x99, 0xaa, 0xc8, 0x9e, 65 | /// 0x76, 0x22, 0xf3, 0xca, 0x71, 0xfb, 0xa1, 0xd9, 0x72, 0xfd, 0x94, 0xa3, 0x1c, 0x3b, 0xfb, 66 | /// 0xf2, 0x4e, 0x39, 0x38, 67 | /// ]; 68 | /// let mh = Multihash::::from_bytes(&digest_bytes).unwrap(); 69 | /// assert_eq!(mh.code(), Sha3_256); 70 | /// assert_eq!(mh.size(), 32); 71 | /// assert_eq!(mh.digest(), &digest_bytes[2..]); 72 | /// ``` 73 | #[cfg_attr(feature = "serde-codec", derive(serde::Deserialize))] 74 | #[cfg_attr(feature = "serde-codec", derive(serde::Serialize))] 75 | #[cfg_attr(feature = "serde-codec", serde(bound = "S: Size"))] 76 | #[derive(Clone, Debug, Default, Eq, PartialEq)] 77 | pub struct Multihash { 78 | /// The code of the Multihash. 79 | code: u64, 80 | /// The actual size of the digest in bytes (not the allocated size). 81 | size: u8, 82 | /// The digest. 83 | digest: GenericArray, 84 | } 85 | 86 | impl Copy for Multihash where >::ArrayType: Copy {} 87 | 88 | impl Multihash { 89 | /// Wraps the digest in a multihash. 90 | pub fn wrap(code: u64, input_digest: &[u8]) -> Result { 91 | if input_digest.len() > S::to_usize() { 92 | return Err(Error::InvalidSize(input_digest.len() as _)); 93 | } 94 | let size = input_digest.len(); 95 | let mut digest = GenericArray::default(); 96 | digest[..size].copy_from_slice(input_digest); 97 | Ok(Self { 98 | code, 99 | size: size as u8, 100 | digest, 101 | }) 102 | } 103 | 104 | /// Returns the code of the multihash. 105 | pub fn code(&self) -> u64 { 106 | self.code 107 | } 108 | 109 | /// Returns the size of the digest. 110 | pub fn size(&self) -> u8 { 111 | self.size 112 | } 113 | 114 | /// Returns the digest. 115 | pub fn digest(&self) -> &[u8] { 116 | &self.digest[..self.size as usize] 117 | } 118 | 119 | /// Reads a multihash from a byte stream. 120 | #[cfg(feature = "std")] 121 | pub fn read(r: R) -> Result 122 | where 123 | Self: Sized, 124 | { 125 | let (code, size, digest) = read_multihash(r)?; 126 | Ok(Self { code, size, digest }) 127 | } 128 | 129 | /// Parses a multihash from a bytes. 130 | /// 131 | /// You need to make sure the passed in bytes have the correct length. The digest length 132 | /// needs to match the `size` value of the multihash. 133 | #[cfg(feature = "std")] 134 | pub fn from_bytes(mut bytes: &[u8]) -> Result 135 | where 136 | Self: Sized, 137 | { 138 | let result = Self::read(&mut bytes)?; 139 | // There were more bytes supplied than read 140 | if !bytes.is_empty() { 141 | return Err(Error::InvalidSize(bytes.len().try_into().expect( 142 | "Currently the maximum size is 255, therefore always fits into usize", 143 | ))); 144 | } 145 | 146 | Ok(result) 147 | } 148 | 149 | /// Writes a multihash to a byte stream. 150 | #[cfg(feature = "std")] 151 | pub fn write(&self, w: W) -> Result<(), Error> { 152 | write_multihash(w, self.code(), self.size(), self.digest()) 153 | } 154 | 155 | /// Returns the bytes of a multihash. 156 | #[cfg(feature = "std")] 157 | pub fn to_bytes(&self) -> Vec { 158 | let mut bytes = vec![]; 159 | self.write(&mut bytes) 160 | .expect("writing to a vec should never fail"); 161 | bytes 162 | } 163 | } 164 | 165 | #[cfg(feature = "scale-codec")] 166 | impl parity_scale_codec::Encode for Multihash { 167 | fn encode_to(&self, dest: &mut EncOut) { 168 | let mut digest = [0; 32]; 169 | digest.copy_from_slice(&self.digest); 170 | dest.push(&self.code); 171 | dest.push(&self.size); 172 | dest.push(&digest); 173 | } 174 | } 175 | 176 | #[cfg(feature = "scale-codec")] 177 | impl parity_scale_codec::EncodeLike for Multihash {} 178 | 179 | #[cfg(feature = "scale-codec")] 180 | impl parity_scale_codec::Decode for Multihash { 181 | fn decode( 182 | input: &mut DecIn, 183 | ) -> Result { 184 | Ok(Multihash { 185 | code: parity_scale_codec::Decode::decode(input)?, 186 | size: parity_scale_codec::Decode::decode(input)?, 187 | digest: { 188 | let digest = <[u8; 32]>::decode(input)?; 189 | GenericArray::clone_from_slice(&digest) 190 | }, 191 | }) 192 | } 193 | } 194 | 195 | #[cfg(feature = "scale-codec")] 196 | impl parity_scale_codec::Encode for Multihash { 197 | fn encode_to(&self, dest: &mut EncOut) { 198 | let mut digest = [0; 64]; 199 | digest.copy_from_slice(&self.digest); 200 | dest.push(&self.code); 201 | dest.push(&self.size); 202 | dest.push(&digest); 203 | } 204 | } 205 | 206 | #[cfg(feature = "scale-codec")] 207 | impl parity_scale_codec::EncodeLike for Multihash {} 208 | 209 | #[cfg(feature = "scale-codec")] 210 | impl parity_scale_codec::Decode for Multihash { 211 | fn decode( 212 | input: &mut DecIn, 213 | ) -> Result { 214 | Ok(Multihash { 215 | code: parity_scale_codec::Decode::decode(input)?, 216 | size: parity_scale_codec::Decode::decode(input)?, 217 | digest: { 218 | let digest = <[u8; 64]>::decode(input)?; 219 | GenericArray::clone_from_slice(&digest) 220 | }, 221 | }) 222 | } 223 | } 224 | 225 | /// Writes the multihash to a byte stream. 226 | #[cfg(feature = "std")] 227 | pub fn write_multihash(mut w: W, code: u64, size: u8, digest: &[u8]) -> Result<(), Error> 228 | where 229 | W: std::io::Write, 230 | { 231 | use unsigned_varint::encode as varint_encode; 232 | 233 | let mut code_buf = varint_encode::u64_buffer(); 234 | let code = varint_encode::u64(code, &mut code_buf); 235 | 236 | let mut size_buf = varint_encode::u8_buffer(); 237 | let size = varint_encode::u8(size, &mut size_buf); 238 | 239 | w.write_all(code)?; 240 | w.write_all(size)?; 241 | w.write_all(digest)?; 242 | Ok(()) 243 | } 244 | 245 | /// Reads a multihash from a byte stream that contains a full multihash (code, size and the digest) 246 | /// 247 | /// Returns the code, size and the digest. The size is the actual size and not the 248 | /// maximum/allocated size of the digest. 249 | /// 250 | /// Currently the maximum size for a digest is 255 bytes. 251 | #[cfg(feature = "std")] 252 | pub fn read_multihash(mut r: R) -> Result<(u64, u8, GenericArray), Error> 253 | where 254 | R: std::io::Read, 255 | S: Size, 256 | { 257 | use unsigned_varint::io::read_u64; 258 | 259 | let code = read_u64(&mut r)?; 260 | let size = read_u64(&mut r)?; 261 | 262 | if size > S::to_u64() || size > u8::MAX as u64 { 263 | return Err(Error::InvalidSize(size)); 264 | } 265 | 266 | let mut digest = GenericArray::default(); 267 | r.read_exact(&mut digest[..size as usize])?; 268 | Ok((code, size as u8, digest)) 269 | } 270 | 271 | #[cfg(test)] 272 | mod tests { 273 | use super::*; 274 | use crate::multihash_impl::Code; 275 | 276 | #[test] 277 | fn roundtrip() { 278 | let hash = Code::Sha2_256.digest(b"hello world"); 279 | let mut buf = [0u8; 35]; 280 | hash.write(&mut buf[..]).unwrap(); 281 | let hash2 = Multihash::read(&buf[..]).unwrap(); 282 | assert_eq!(hash, hash2); 283 | } 284 | 285 | #[test] 286 | #[cfg(feature = "scale-codec")] 287 | fn test_scale() { 288 | use parity_scale_codec::{Decode, Encode}; 289 | 290 | let mh = Multihash::::default(); 291 | let bytes = mh.encode(); 292 | let mh2: Multihash = Decode::decode(&mut &bytes[..]).unwrap(); 293 | assert_eq!(mh, mh2); 294 | } 295 | 296 | #[test] 297 | #[cfg(feature = "serde-codec")] 298 | fn test_serde() { 299 | let mh = Multihash::::default(); 300 | let bytes = serde_json::to_string(&mh).unwrap(); 301 | let mh2 = serde_json::from_str(&bytes).unwrap(); 302 | assert_eq!(mh, mh2); 303 | } 304 | } 305 | -------------------------------------------------------------------------------- /src/multihash_impl.rs: -------------------------------------------------------------------------------- 1 | use tiny_multihash_derive::Multihash; 2 | 3 | /// Default (cryptographically secure) Multihash implementation. 4 | /// 5 | /// This is a default set of hashing algorithms. Usually applications would use their own subset of 6 | /// algorithms. See the [`Multihash` derive] for more information. 7 | /// 8 | /// [`Multihash` derive]: crate::derive 9 | #[derive(Copy, Clone, Debug, Eq, Multihash, PartialEq)] 10 | #[mh(alloc_size = crate::U64)] 11 | pub enum Code { 12 | /// SHA-1 (20-byte hash size) 13 | #[cfg(feature = "sha1")] 14 | #[mh(code = 0x11, hasher = crate::Sha1, digest = crate::Sha1Digest)] 15 | Sha1, 16 | /// SHA-256 (32-byte hash size) 17 | #[cfg(feature = "sha2")] 18 | #[mh(code = 0x12, hasher = crate::Sha2_256, digest = crate::Sha2Digest)] 19 | Sha2_256, 20 | /// SHA-512 (64-byte hash size) 21 | #[cfg(feature = "sha2")] 22 | #[mh(code = 0x13, hasher = crate::Sha2_512, digest = crate::Sha2Digest)] 23 | Sha2_512, 24 | /// SHA3-224 (28-byte hash size) 25 | #[cfg(feature = "sha3")] 26 | #[mh(code = 0x17, hasher = crate::Sha3_224, digest = crate::Sha3Digest)] 27 | Sha3_224, 28 | /// SHA3-256 (32-byte hash size) 29 | #[cfg(feature = "sha3")] 30 | #[mh(code = 0x16, hasher = crate::Sha3_256, digest = crate::Sha3Digest)] 31 | Sha3_256, 32 | /// SHA3-384 (48-byte hash size) 33 | #[cfg(feature = "sha3")] 34 | #[mh(code = 0x15, hasher = crate::Sha3_384, digest = crate::Sha3Digest)] 35 | Sha3_384, 36 | /// SHA3-512 (64-byte hash size) 37 | #[cfg(feature = "sha3")] 38 | #[mh(code = 0x14, hasher = crate::Sha3_512, digest = crate::Sha3Digest)] 39 | Sha3_512, 40 | /// Keccak-224 (28-byte hash size) 41 | #[cfg(feature = "sha3")] 42 | #[mh(code = 0x1a, hasher = crate::Keccak224, digest = crate::KeccakDigest)] 43 | Keccak224, 44 | /// Keccak-256 (32-byte hash size) 45 | #[cfg(feature = "sha3")] 46 | #[mh(code = 0x1b, hasher = crate::Keccak256, digest = crate::KeccakDigest)] 47 | Keccak256, 48 | /// Keccak-384 (48-byte hash size) 49 | #[cfg(feature = "sha3")] 50 | #[mh(code = 0x1c, hasher = crate::Keccak384, digest = crate::KeccakDigest)] 51 | Keccak384, 52 | /// Keccak-512 (64-byte hash size) 53 | #[cfg(feature = "sha3")] 54 | #[mh(code = 0x1d, hasher = crate::Keccak512, digest = crate::KeccakDigest)] 55 | Keccak512, 56 | /// BLAKE2b-256 (32-byte hash size) 57 | #[cfg(feature = "blake2b")] 58 | #[mh(code = 0xb220, hasher = crate::Blake2b256, digest = crate::Blake2bDigest)] 59 | Blake2b256, 60 | /// BLAKE2b-512 (64-byte hash size) 61 | #[cfg(feature = "blake2b")] 62 | #[mh(code = 0xb240, hasher = crate::Blake2b512, digest = crate::Blake2bDigest)] 63 | Blake2b512, 64 | /// BLAKE2s-128 (16-byte hash size) 65 | #[cfg(feature = "blake2s")] 66 | #[mh(code = 0xb250, hasher = crate::Blake2s128, digest = crate::Blake2sDigest)] 67 | Blake2s128, 68 | /// BLAKE2s-256 (32-byte hash size) 69 | #[cfg(feature = "blake2s")] 70 | #[mh(code = 0xb260, hasher = crate::Blake2s256, digest = crate::Blake2sDigest)] 71 | Blake2s256, 72 | /// BLAKE3-256 (32-byte hash size) 73 | #[cfg(feature = "blake3")] 74 | #[mh(code = 0x1e, hasher = crate::Blake3_256, digest = crate::Blake3Digest)] 75 | Blake3_256, 76 | } 77 | 78 | #[cfg(test)] 79 | mod tests { 80 | use super::*; 81 | use crate::hasher::Hasher; 82 | use crate::hasher_impl::sha3::{Sha3_256, Sha3_512}; 83 | use crate::multihash::MultihashCode; 84 | 85 | #[test] 86 | fn test_hasher_256() { 87 | let digest = Sha3_256::digest(b"hello world"); 88 | let hash = Code::multihash_from_digest(&digest); 89 | let hash2 = Code::Sha3_256.digest(b"hello world"); 90 | assert_eq!(hash.code(), u64::from(Code::Sha3_256)); 91 | assert_eq!(hash.size(), 32); 92 | assert_eq!(hash.digest(), &digest.as_ref()[..]); 93 | assert_eq!(hash, hash2); 94 | } 95 | 96 | #[test] 97 | fn test_hasher_512() { 98 | let digest = Sha3_512::digest(b"hello world"); 99 | let hash = Code::multihash_from_digest(&digest); 100 | let hash2 = Code::Sha3_512.digest(b"hello world"); 101 | assert_eq!(hash.code(), u64::from(Code::Sha3_512)); 102 | assert_eq!(hash.size(), 64); 103 | assert_eq!(hash.digest(), &digest.as_ref()[..]); 104 | assert_eq!(hash, hash2); 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /tests/lib.rs: -------------------------------------------------------------------------------- 1 | use std::io::Cursor; 2 | 3 | use tiny_multihash::{ 4 | derive::Multihash, Blake2b256, Blake2b512, Blake2bDigest, Blake2s128, Blake2s256, 5 | Blake2sDigest, Blake3Digest, Blake3_256, Digest, Error, Hasher, Identity256, IdentityDigest, 6 | Keccak224, Keccak256, Keccak384, Keccak512, KeccakDigest, Multihash, MultihashCode, Sha1, 7 | Sha1Digest, Sha2Digest, Sha2_256, Sha2_512, Sha3Digest, Sha3_224, Sha3_256, Sha3_384, Sha3_512, 8 | Size, StatefulHasher, Strobe256, Strobe512, StrobeDigest, U16, U20, U28, U32, U48, U64, 9 | }; 10 | 11 | #[derive(Clone, Copy, Debug, Eq, Multihash, PartialEq)] 12 | #[mh(alloc_size = U64)] 13 | pub enum Code { 14 | #[mh(code = 0x00, hasher = Identity256, digest = IdentityDigest)] 15 | Identity, 16 | #[mh(code = 0x11, hasher = Sha1, digest = Sha1Digest)] 17 | Sha1, 18 | #[mh(code = 0x12, hasher = Sha2_256, digest = Sha2Digest)] 19 | Sha2_256, 20 | #[mh(code = 0x13, hasher = Sha2_512, digest = Sha2Digest)] 21 | Sha2_512, 22 | #[mh(code = 0x17, hasher = Sha3_224, digest = Sha3Digest)] 23 | Sha3_224, 24 | #[mh(code = 0x16, hasher = Sha3_256, digest = Sha3Digest)] 25 | Sha3_256, 26 | #[mh(code = 0x15, hasher = Sha3_384, digest = Sha3Digest)] 27 | Sha3_384, 28 | #[mh(code = 0x14, hasher = Sha3_512, digest = Sha3Digest)] 29 | Sha3_512, 30 | #[mh(code = 0x1a, hasher = Keccak224, digest = KeccakDigest)] 31 | Keccak224, 32 | #[mh(code = 0x1b, hasher = Keccak256, digest = KeccakDigest)] 33 | Keccak256, 34 | #[mh(code = 0x1c, hasher = Keccak384, digest = KeccakDigest)] 35 | Keccak384, 36 | #[mh(code = 0x1d, hasher = Keccak512, digest = KeccakDigest)] 37 | Keccak512, 38 | #[mh(code = 0xb220, hasher = Blake2b256, digest = Blake2bDigest)] 39 | Blake2b256, 40 | #[mh(code = 0xb240, hasher = Blake2b512, digest = Blake2bDigest)] 41 | Blake2b512, 42 | #[mh(code = 0xb250, hasher = Blake2s128, digest = Blake2sDigest)] 43 | Blake2s128, 44 | #[mh(code = 0xb260, hasher = Blake2s256, digest = Blake2sDigest)] 45 | Blake2s256, 46 | #[mh(code = 0x1e, hasher = Blake3_256, digest = Blake3Digest)] 47 | Blake3_256, 48 | #[mh(code = 0x3312e7, hasher = Strobe256, digest = StrobeDigest)] 49 | Strobe256, 50 | #[mh(code = 0x3312e8, hasher = Strobe512, digest = StrobeDigest)] 51 | Strobe512, 52 | } 53 | 54 | /// Helper function to convert a hex-encoded byte array back into a bytearray 55 | fn hex_to_bytes(s: &str) -> Vec { 56 | let mut c = 0; 57 | let mut v = Vec::new(); 58 | while c < s.len() { 59 | v.push(u8::from_str_radix(&s[c..c + 2], 16).unwrap()); 60 | c += 2; 61 | } 62 | v 63 | } 64 | 65 | macro_rules! assert_encode { 66 | // Mutlihash enum member, Multihash code, input, Multihash as hex 67 | {$( $alg:ty, $code:expr, $data:expr, $expect:expr; )*} => { 68 | $( 69 | let expected = hex_to_bytes($expect); 70 | 71 | // From code 72 | assert_eq!( 73 | $code.digest($data).to_bytes(), 74 | expected, 75 | "{:?} encodes correctly (from code)", stringify!($alg) 76 | ); 77 | 78 | // From digest 79 | assert_eq!( 80 | Code::multihash_from_digest(&<$alg>::digest($data)).to_bytes(), 81 | expected, 82 | "{:?} encodes correctly (from digest)", stringify!($alg) 83 | ); 84 | 85 | // From incremental hashing 86 | let mut hasher = <$alg>::default(); 87 | hasher.update($data); 88 | assert_eq!( 89 | Code::multihash_from_digest(&hasher.finalize()).to_bytes(), 90 | expected, 91 | "{:?} encodes correctly (from hasher)", stringify!($alg) 92 | ); 93 | )* 94 | } 95 | } 96 | 97 | #[allow(clippy::cognitive_complexity)] 98 | #[test] 99 | fn multihash_encode() { 100 | assert_encode! { 101 | Identity256, Code::Identity, b"beep boop", "00096265657020626f6f70"; 102 | Sha1, Code::Sha1, b"beep boop", "11147c8357577f51d4f0a8d393aa1aaafb28863d9421"; 103 | Sha2_256, Code::Sha2_256, b"helloworld", "1220936a185caaa266bb9cbe981e9e05cb78cd732b0b3280eb944412bb6f8f8f07af"; 104 | Sha2_256, Code::Sha2_256, b"beep boop", "122090ea688e275d580567325032492b597bc77221c62493e76330b85ddda191ef7c"; 105 | Sha2_512, Code::Sha2_512, b"hello world", "1340309ecc489c12d6eb4cc40f50c902f2b4d0ed77ee511a7c7a9bcd3ca86d4cd86f989dd35bc5ff499670da34255b45b0cfd830e81f605dcf7dc5542e93ae9cd76f"; 106 | Sha3_224, Code::Sha3_224, b"hello world", "171Cdfb7f18c77e928bb56faeb2da27291bd790bc1045cde45f3210bb6c5"; 107 | Sha3_256, Code::Sha3_256, b"hello world", "1620644bcc7e564373040999aac89e7622f3ca71fba1d972fd94a31c3bfbf24e3938"; 108 | Sha3_384, Code::Sha3_384, b"hello world", "153083bff28dde1b1bf5810071c6643c08e5b05bdb836effd70b403ea8ea0a634dc4997eb1053aa3593f590f9c63630dd90b"; 109 | Sha3_512, Code::Sha3_512, b"hello world", "1440840006653e9ac9e95117a15c915caab81662918e925de9e004f774ff82d7079a40d4d27b1b372657c61d46d470304c88c788b3a4527ad074d1dccbee5dbaa99a"; 110 | Keccak224, Code::Keccak224, b"hello world", "1A1C25f3ecfebabe99686282f57f5c9e1f18244cfee2813d33f955aae568"; 111 | Keccak256, Code::Keccak256, b"hello world", "1B2047173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad"; 112 | Keccak384, Code::Keccak384, b"hello world", "1C3065fc99339a2a40e99d3c40d695b22f278853ca0f925cde4254bcae5e22ece47e6441f91b6568425adc9d95b0072eb49f"; 113 | Keccak512, Code::Keccak512, b"hello world", "1D403ee2b40047b8060f68c67242175660f4174d0af5c01d47168ec20ed619b0b7c42181f40aa1046f39e2ef9efc6910782a998e0013d172458957957fac9405b67d"; 114 | Blake2b512, Code::Blake2b512, b"hello world", "c0e40240021ced8799296ceca557832ab941a50b4a11f83478cf141f51f933f653ab9fbcc05a037cddbed06e309bf334942c4e58cdf1a46e237911ccd7fcf9787cbc7fd0"; 115 | Blake2s256, Code::Blake2s256, b"hello world", "e0e402209aec6806794561107e594b1f6a8a6b0c92a0cba9acf5e5e93cca06f781813b0b"; 116 | Blake2b256, Code::Blake2b256, b"hello world", "a0e40220256c83b297114d201b30179f3f0ef0cace9783622da5974326b436178aeef610"; 117 | Blake2s128, Code::Blake2s128, b"hello world", "d0e4021037deae0226c30da2ab424a7b8ee14e83"; 118 | Blake3_256, Code::Blake3_256, b"hello world", "1e20d74981efa70a0c880b8d8c1985d075dbcbf679b99a5f9914e5aaf96b831a9e24"; 119 | } 120 | } 121 | 122 | macro_rules! assert_decode { 123 | //{$( $alg:ident, $hash:expr; )*} => { 124 | {$( $code:expr, $hash:expr; )*} => { 125 | $( 126 | let hash = hex_to_bytes($hash); 127 | assert_eq!( 128 | Multihash::::from_bytes(&hash).unwrap().code(), 129 | u64::from($code), 130 | "{:?} decodes correctly", stringify!($code) 131 | ); 132 | )* 133 | } 134 | } 135 | 136 | #[test] 137 | fn assert_decode() { 138 | assert_decode! { 139 | Code::Identity, "000a68656c6c6f776f726c64"; 140 | Code::Sha1, "11147c8357577f51d4f0a8d393aa1aaafb28863d9421"; 141 | Code::Sha2_256, "1220936a185caaa266bb9cbe981e9e05cb78cd732b0b3280eb944412bb6f8f8f07af"; 142 | Code::Sha2_256, "122090ea688e275d580567325032492b597bc77221c62493e76330b85ddda191ef7c"; 143 | Code::Sha2_512, "1340309ecc489c12d6eb4cc40f50c902f2b4d0ed77ee511a7c7a9bcd3ca86d4cd86f989dd35bc5ff499670da34255b45b0cfd830e81f605dcf7dc5542e93ae9cd76f"; 144 | Code::Sha3_224, "171Cdfb7f18c77e928bb56faeb2da27291bd790bc1045cde45f3210bb6c5"; 145 | Code::Sha3_256, "1620644bcc7e564373040999aac89e7622f3ca71fba1d972fd94a31c3bfbf24e3938"; 146 | Code::Sha3_384, "153083bff28dde1b1bf5810071c6643c08e5b05bdb836effd70b403ea8ea0a634dc4997eb1053aa3593f590f9c63630dd90b"; 147 | Code::Sha3_512, "1440840006653e9ac9e95117a15c915caab81662918e925de9e004f774ff82d7079a40d4d27b1b372657c61d46d470304c88c788b3a4527ad074d1dccbee5dbaa99a"; 148 | Code::Keccak224, "1A1C25f3ecfebabe99686282f57f5c9e1f18244cfee2813d33f955aae568"; 149 | Code::Keccak256, "1B2047173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad"; 150 | Code::Keccak384, "1C3065fc99339a2a40e99d3c40d695b22f278853ca0f925cde4254bcae5e22ece47e6441f91b6568425adc9d95b0072eb49f"; 151 | Code::Keccak512, "1D403ee2b40047b8060f68c67242175660f4174d0af5c01d47168ec20ed619b0b7c42181f40aa1046f39e2ef9efc6910782a998e0013d172458957957fac9405b67d"; 152 | Code::Blake2b512, "c0e40240021ced8799296ceca557832ab941a50b4a11f83478cf141f51f933f653ab9fbcc05a037cddbed06e309bf334942c4e58cdf1a46e237911ccd7fcf9787cbc7fd0"; 153 | Code::Blake2s256, "e0e402209aec6806794561107e594b1f6a8a6b0c92a0cba9acf5e5e93cca06f781813b0b"; 154 | Code::Blake2b256, "a0e40220256c83b297114d201b30179f3f0ef0cace9783622da5974326b436178aeef610"; 155 | Code::Blake2s128, "d0e4021037deae0226c30da2ab424a7b8ee14e83"; 156 | Code::Blake3_256, "1e20d74981efa70a0c880b8d8c1985d075dbcbf679b99a5f9914e5aaf96b831a9e24"; 157 | } 158 | } 159 | 160 | macro_rules! assert_roundtrip { 161 | ($( $code:expr, $alg:ident; )*) => { 162 | $( 163 | // Hashing with one call 164 | { 165 | let hash = $code.digest(b"helloworld"); 166 | assert_eq!( 167 | Multihash::::from_bytes(&hash.to_bytes()).unwrap().code(), 168 | hash.code() 169 | ); 170 | } 171 | // Hashing incrementally 172 | { 173 | let mut hasher = <$alg>::default(); 174 | hasher.update(b"helloworld"); 175 | let hash = Code::multihash_from_digest(&hasher.finalize()); 176 | assert_eq!( 177 | Multihash::::from_bytes(&hash.to_bytes()).unwrap().code(), 178 | hash.code() 179 | ); 180 | } 181 | )* 182 | } 183 | } 184 | 185 | #[allow(clippy::cognitive_complexity)] 186 | #[test] 187 | fn assert_roundtrip() { 188 | assert_roundtrip!( 189 | Code::Identity, Identity256; 190 | Code::Sha1, Sha1; 191 | Code::Sha2_256, Sha2_256; 192 | Code::Sha2_512, Sha2_512; 193 | Code::Sha3_224, Sha3_224; 194 | Code::Sha3_256, Sha3_256; 195 | Code::Sha3_384, Sha3_384; 196 | Code::Sha3_512, Sha3_512; 197 | Code::Keccak224, Keccak224; 198 | Code::Keccak256, Keccak256; 199 | Code::Keccak384, Keccak384; 200 | Code::Keccak512, Keccak512; 201 | Code::Blake2b512, Blake2b512; 202 | Code::Blake2s256, Blake2s256; 203 | Code::Blake3_256, Blake3_256; 204 | ); 205 | } 206 | 207 | /// Testing the public interface of `Multihash` and coversions to it 208 | fn multihash_methods(code: Code, prefix: &str, digest_str: &str) 209 | where 210 | H: StatefulHasher, 211 | Code: for<'a> From<&'a H::Digest>, 212 | { 213 | let digest = hex_to_bytes(digest_str); 214 | let expected_bytes = hex_to_bytes(&format!("{}{}", prefix, digest_str)); 215 | let mut expected_cursor = Cursor::new(&expected_bytes); 216 | let multihash = code.digest(b"hello world"); 217 | 218 | assert_eq!(Multihash::wrap(code.into(), &digest).unwrap(), multihash); 219 | assert_eq!(multihash.code(), u64::from(code)); 220 | assert_eq!(multihash.size() as usize, digest.len()); 221 | assert_eq!(multihash.digest(), digest); 222 | assert_eq!(Multihash::read(&mut expected_cursor).unwrap(), multihash); 223 | assert_eq!(Multihash::from_bytes(&expected_bytes).unwrap(), multihash); 224 | let mut written_buf = Vec::new(); 225 | multihash.write(&mut written_buf).unwrap(); 226 | assert_eq!(written_buf, expected_bytes); 227 | assert_eq!(multihash.to_bytes(), expected_bytes); 228 | 229 | // Test from hasher digest conversion 230 | let mut hasher = H::default(); 231 | hasher.update(b"hello world"); 232 | let multihash_from_digest = Code::multihash_from_digest(&hasher.finalize()); 233 | assert_eq!(multihash_from_digest.code(), u64::from(code)); 234 | assert_eq!(multihash_from_digest.size() as usize, digest.len()); 235 | assert_eq!(multihash_from_digest.digest(), digest); 236 | } 237 | 238 | #[test] 239 | fn test_multihash_methods() { 240 | multihash_methods::(Code::Identity, "000b", "68656c6c6f20776f726c64"); 241 | multihash_methods::( 242 | Code::Sha1, 243 | "1114", 244 | "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", 245 | ); 246 | multihash_methods::( 247 | Code::Sha2_256, 248 | "1220", 249 | "b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9", 250 | ); 251 | multihash_methods::( 252 | Code::Sha2_512, 253 | "1340", 254 | "309ecc489c12d6eb4cc40f50c902f2b4d0ed77ee511a7c7a9bcd3ca86d4cd86f989dd35bc5ff499670da34255b45b0cfd830e81f605dcf7dc5542e93ae9cd76f"); 255 | multihash_methods::( 256 | Code::Sha3_224, 257 | "171C", 258 | "dfb7f18c77e928bb56faeb2da27291bd790bc1045cde45f3210bb6c5", 259 | ); 260 | multihash_methods::( 261 | Code::Sha3_256, 262 | "1620", 263 | "644bcc7e564373040999aac89e7622f3ca71fba1d972fd94a31c3bfbf24e3938", 264 | ); 265 | multihash_methods::( 266 | Code::Sha3_384, 267 | "1530", 268 | "83bff28dde1b1bf5810071c6643c08e5b05bdb836effd70b403ea8ea0a634dc4997eb1053aa3593f590f9c63630dd90b"); 269 | multihash_methods::( 270 | Code::Sha3_512, 271 | "1440", 272 | "840006653e9ac9e95117a15c915caab81662918e925de9e004f774ff82d7079a40d4d27b1b372657c61d46d470304c88c788b3a4527ad074d1dccbee5dbaa99a"); 273 | multihash_methods::( 274 | Code::Keccak224, 275 | "1A1C", 276 | "25f3ecfebabe99686282f57f5c9e1f18244cfee2813d33f955aae568", 277 | ); 278 | multihash_methods::( 279 | Code::Keccak256, 280 | "1B20", 281 | "47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad", 282 | ); 283 | multihash_methods::( 284 | Code::Keccak384, 285 | "1C30", 286 | "65fc99339a2a40e99d3c40d695b22f278853ca0f925cde4254bcae5e22ece47e6441f91b6568425adc9d95b0072eb49f"); 287 | multihash_methods::( 288 | Code::Keccak512, 289 | "1D40", 290 | "3ee2b40047b8060f68c67242175660f4174d0af5c01d47168ec20ed619b0b7c42181f40aa1046f39e2ef9efc6910782a998e0013d172458957957fac9405b67d"); 291 | multihash_methods::( 292 | Code::Blake2b512, 293 | "c0e40240", 294 | "021ced8799296ceca557832ab941a50b4a11f83478cf141f51f933f653ab9fbcc05a037cddbed06e309bf334942c4e58cdf1a46e237911ccd7fcf9787cbc7fd0"); 295 | multihash_methods::( 296 | Code::Blake2s256, 297 | "e0e40220", 298 | "9aec6806794561107e594b1f6a8a6b0c92a0cba9acf5e5e93cca06f781813b0b", 299 | ); 300 | multihash_methods::( 301 | Code::Blake2b256, 302 | "a0e40220", 303 | "256c83b297114d201b30179f3f0ef0cace9783622da5974326b436178aeef610", 304 | ); 305 | multihash_methods::( 306 | Code::Blake2s128, 307 | "d0e40210", 308 | "37deae0226c30da2ab424a7b8ee14e83", 309 | ); 310 | multihash_methods::( 311 | Code::Blake3_256, 312 | "1e20", 313 | "d74981efa70a0c880b8d8c1985d075dbcbf679b99a5f9914e5aaf96b831a9e24", 314 | ); 315 | } 316 | 317 | #[test] 318 | #[should_panic] 319 | fn test_long_identity_hash() { 320 | // The identity hash panics if the input size is bigger than the maximum size 321 | let input = b"abcdefghijklmnopqrstuvwxyz abcdefghijklmnopqrstuvwxyz"; 322 | Identity256::digest(input); 323 | } 324 | 325 | #[test] 326 | fn multihash_errors() { 327 | assert!( 328 | Multihash::::from_bytes(&[]).is_err(), 329 | "Should error on empty data" 330 | ); 331 | assert!( 332 | Multihash::::from_bytes(&[1, 2, 3]).is_err(), 333 | "Should error on invalid multihash" 334 | ); 335 | assert!( 336 | Multihash::::from_bytes(&[1, 2, 3]).is_err(), 337 | "Should error on invalid prefix" 338 | ); 339 | assert!( 340 | Multihash::::from_bytes(&[0x12, 0x20, 0xff]).is_err(), 341 | "Should error on correct prefix with wrong digest" 342 | ); 343 | let identity_code: u8 = 0x00; 344 | let identity_length = 3; 345 | assert!( 346 | Multihash::::from_bytes(&[identity_code, identity_length, 1, 2, 3, 4]).is_err(), 347 | "Should error on wrong hash length" 348 | ); 349 | } 350 | --------------------------------------------------------------------------------