diff --git a/Cargo.toml b/Cargo.toml index 9d3329b7..30a3c271 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -21,7 +21,7 @@ derive = ["multihash-derive"] arb = ["quickcheck", "rand"] secure-hashes = ["blake2b", "blake2s", "blake3", "sha2", "sha3"] scale-codec = ["parity-scale-codec"] -serde-codec = ["serde", "generic-array/serde"] +serde-codec = ["serde", "serde-big-array"] blake2b = ["blake2b_simd"] blake2s = ["blake2s_simd"] @@ -32,13 +32,13 @@ sha3 = ["digest", "sha-3"] strobe = ["strobe-rs"] [dependencies] -generic-array = "0.14.4" parity-scale-codec = { version = "2.1.1", default-features = false, features = ["derive"], optional = true } quickcheck = { version = "0.9.2", optional = true } rand = { version = "0.7.3", optional = true } -serde = { version = "1.0.116", default-features = false, features = ["derive"], optional = true } -multihash-derive = { version = "0.7.2", path = "derive", default-features = false, optional = true } -unsigned-varint = { version = "0.7.0", default-features = false } +serde = { version = "1.0.116", optional = true, default-features = false, features = ["derive"] } +serde-big-array = { version = "0.3.2", optional = true, features = ["const-generics"] } +multihash-derive = { version = "^0.7.1", path = "derive", default-features = false, optional = true } +unsigned-varint = "0.7.0" blake2b_simd = { version = "0.5.10", default-features = false, optional = true } blake2s_simd = { version = "0.5.10", default-features = false, optional = true } diff --git a/README.md b/README.md index f3196616..b0835557 100644 --- a/README.md +++ b/README.md @@ -33,6 +33,8 @@ multihash = "*" Then run `cargo build`. +MSRV 1.51.0 due to use of const generics + ## Usage ```rust @@ -50,14 +52,14 @@ You can derive your own application specific code table: ```rust use multihash::derive::Multihash; -use multihash::{MultihashDigest, U32, U64}; +use multihash::MultihashCode; #[derive(Clone, Copy, Debug, Eq, Multihash, PartialEq)] -#[mh(alloc_size = U64)] +#[mh(alloc_size = 64)] pub enum Code { - #[mh(code = 0x01, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest)] + #[mh(code = 0x01, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest<32>)] Foo, - #[mh(code = 0x02, hasher = multihash::Sha2_512, digest = multihash::Sha2Digest)] + #[mh(code = 0x02, hasher = multihash::Sha2_512, digest = multihash::Sha2Digest<64>)] Bar, } diff --git a/derive/src/lib.rs b/derive/src/lib.rs index f616551a..84189842 100644 --- a/derive/src/lib.rs +++ b/derive/src/lib.rs @@ -8,8 +8,8 @@ //! //! If you set `#mh(alloc_size = …)` to a too low value, you will get compiler errors. Please note //! the the sizes are checked only on a syntactic level and *not* on the type level. This means -//! that digest need to have a size generic, which is a valid `typenum`, for example `U32` or -//! `generic_array::typenum::U64`. +//! that digest need to have a size const generic, which is a valid `usize`, for example `32` or +//! `64`. //! //! You can disable those compiler errors with setting the `no_alloc_size_errors` attribute. This //! can be useful if you e.g. have specified type aliases for your hash digests and you are sure @@ -19,14 +19,14 @@ //! //! ``` //! use multihash::derive::Multihash; -//! use multihash::{MultihashDigest, U32, U64}; +//! use multihash::MultihashDigest; //! //! #[derive(Clone, Copy, Debug, Eq, Multihash, PartialEq)] -//! #[mh(alloc_size = U64)] +//! #[mh(alloc_size = 64)] //! pub enum Code { -//! #[mh(code = 0x01, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest)] +//! #[mh(code = 0x01, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest<32>)] //! Foo, -//! #[mh(code = 0x02, hasher = multihash::Sha2_512, digest = multihash::Sha2Digest)] +//! #[mh(code = 0x02, hasher = multihash::Sha2_512, digest = multihash::Sha2Digest<64>)] //! Bar, //! } //! diff --git a/derive/src/multihash.rs b/derive/src/multihash.rs index 8e69f37b..c8caf4b9 100644 --- a/derive/src/multihash.rs +++ b/derive/src/multihash.rs @@ -43,7 +43,7 @@ impl Parse for MhAttr { /// Attributes of the top-level derive. #[derive(Debug)] enum DeriveAttr { - AllocSize(utils::Attr), + AllocSize(utils::Attr), NoAllocSizeErrors(kw::no_alloc_size_errors), } @@ -143,7 +143,7 @@ impl<'a> From<&'a VariantInfo<'a>> for Hash { proc_macro_error::abort!(ident, msg); }); let digest = digest.unwrap_or_else(|| { - let msg = "Missing digest atttibute: e.g. #[mh(digest = multihash::Sha2Digest)]"; + let msg = "Missing digest atttibute: e.g. #[mh(digest = multihash::Sha2Digest<32>)]"; #[cfg(test)] panic!("{}", msg); #[cfg(not(test))] @@ -161,7 +161,7 @@ impl<'a> From<&'a VariantInfo<'a>> for Hash { /// Parse top-level enum [#mh()] attributes. /// /// Returns the `alloc_size` and whether errors regarding to `alloc_size` should be reported or not. -fn parse_code_enum_attrs(ast: &syn::DeriveInput) -> (syn::Type, bool) { +fn parse_code_enum_attrs(ast: &syn::DeriveInput) -> (syn::LitInt, bool) { let mut alloc_size = None; let mut no_alloc_size_errors = false; @@ -181,7 +181,7 @@ fn parse_code_enum_attrs(ast: &syn::DeriveInput) -> (syn::Type, bool) { match alloc_size { Some(alloc_size) => (alloc_size, no_alloc_size_errors), None => { - let msg = "enum is missing `alloc_size` attribute: e.g. #[mh(alloc_size = U64)]"; + let msg = "enum is missing `alloc_size` attribute: e.g. #[mh(alloc_size = 64)]"; #[cfg(test)] panic!("{}", msg); #[cfg(not(test))] @@ -226,33 +226,12 @@ fn error_code_duplicates(hashes: &[Hash]) { #[derive(Debug)] struct ParseError(Span); -/// Parse a path containing a `typenum` unsigned integer (e.g. `U64`) into a u64 -fn parse_unsigned_typenum(typenum_path: &syn::Type) -> Result { - match typenum_path { - syn::Type::Path(type_path) => match type_path.path.segments.last() { - Some(path_segment) => { - let typenum_ident = &path_segment.ident; - let typenum = typenum_ident.to_string(); - match typenum.as_str().split_at(1) { - ("U", byte_size) => byte_size - .parse::() - .map_err(|_| ParseError(typenum_ident.span())), - _ => Err(ParseError(typenum_ident.span())), - } - } - None => Err(ParseError(type_path.path.span())), - }, - _ => Err(ParseError(typenum_path.span())), - } -} - /// Returns the max size as u64. /// -/// Emits an error if the `#mh(alloc_size)` attribute doesn't contain a valid unsigned integer -/// `typenum`. -fn parse_alloc_size_attribute(alloc_size: &syn::Type) -> u64 { - parse_unsigned_typenum(&alloc_size).unwrap_or_else(|_| { - let msg = "`alloc_size` attribute must be a `typenum`, e.g. #[mh(alloc_size = U64)]"; +/// Emits an error if the `#mh(alloc_size)` attribute doesn't contain a valid unsigned integer. +fn parse_alloc_size_attribute(alloc_size: &syn::LitInt) -> u64 { + alloc_size.base10_parse().unwrap_or_else(|_| { + let msg = "`alloc_size` attribute must be an integer, e.g. #[mh(alloc_size = 64)]"; #[cfg(test)] panic!("{}", msg); #[cfg(not(test))] @@ -261,38 +240,39 @@ fn parse_alloc_size_attribute(alloc_size: &syn::Type) -> u64 { } /// Return a warning/error if the specified alloc_size is smaller than the biggest digest -fn error_alloc_size(hashes: &[Hash], expected_alloc_size_type: &syn::Type) { +fn error_alloc_size(hashes: &[Hash], expected_alloc_size_type: &syn::LitInt) { let expected_alloc_size = parse_alloc_size_attribute(expected_alloc_size_type); let maybe_error: Result<(), ParseError> = hashes .iter() .try_for_each(|hash| { - // The digest type must have a size parameter of the shape `U`, else we error. + // The digest type must have an integer as size parameter, else we error. match hash.digest.segments.last() { Some(path_segment) => match &path_segment.arguments { syn::PathArguments::AngleBracketed(arguments) => match arguments.args.last() { - Some(syn::GenericArgument::Type(path)) => { - match parse_unsigned_typenum(&path) { - Ok(max_digest_size) => { - if max_digest_size > expected_alloc_size { - let msg = format!("The `#mh(alloc_size) attribute must be bigger than the maximum defined digest size (U{})", - max_digest_size); - #[cfg(test)] - panic!("{}", msg); - #[cfg(not(test))] - { - let digest = &hash.digest.to_token_stream().to_string().replace(" ", ""); - let line = &hash.digest.span().start().line; - proc_macro_error::emit_error!( - &expected_alloc_size_type, msg; - note = "the bigger digest is `{}` at line {}", digest, line; - ); - } - } - Ok(()) - }, - Err(err) => Err(err), - } + Some(syn::GenericArgument::Const(syn::Expr::Lit(expr_lit))) => match &expr_lit.lit { + syn::Lit::Int(lit_int) => match lit_int.base10_parse::() { + Ok(max_digest_size) => { + if max_digest_size > expected_alloc_size { + let msg = format!("The `#mh(alloc_size) attribute must be bigger than the maximum defined digest size ({})", + max_digest_size); + #[cfg(test)] + panic!("{}", msg); + #[cfg(not(test))] + { + let digest = &hash.digest.to_token_stream().to_string().replace(" ", ""); + let line = &hash.digest.span().start().line; + proc_macro_error::emit_error!( + &expected_alloc_size_type, msg; + note = "the bigger digest is `{}` at line {}", digest, line; + ); + } + } + Ok(()) + }, + _ => Err(ParseError(lit_int.span())), + }, + _ => Err(ParseError(expr_lit.span())), }, _ => Err(ParseError(arguments.args.span())), }, @@ -303,7 +283,7 @@ fn error_alloc_size(hashes: &[Hash], expected_alloc_size_type: &syn::Type) { }); if let Err(_error) = maybe_error { - let msg = "Invalid byte size. It must be a unsigned integer typenum, e.g. `U32`"; + let msg = "Invalid byte size. It must be a unsigned integer, e.g. `32`"; #[cfg(test)] panic!("{}", msg); #[cfg(not(test))] @@ -344,9 +324,7 @@ pub fn multihash(s: Structure) -> TokenStream { /// A Multihash with the same allocated size as the Multihashes produces by this derive. pub type Multihash = #mh_crate::MultihashGeneric::<#alloc_size>; - impl #mh_crate::MultihashDigest for #code_enum { - type AllocSize = #alloc_size; - + impl #mh_crate::MultihashDigest<#alloc_size> for #code_enum { fn digest(&self, input: &[u8]) -> Multihash { use #mh_crate::Hasher; match self { @@ -355,9 +333,8 @@ pub fn multihash(s: Structure) -> TokenStream { } } - fn multihash_from_digest<'a, S, D>(digest: &'a D) -> Multihash + fn multihash_from_digest<'a, D, const S: usize>(digest: &'a D) -> Multihash where - S: #mh_crate::Size, D: #mh_crate::Digest, Self: From<&'a D>, { @@ -398,21 +375,20 @@ mod tests { fn test_multihash_derive() { let input = quote! { #[derive(Clone, Multihash)] - #[mh(alloc_size = U32)] + #[mh(alloc_size = 32)] pub enum Code { - #[mh(code = multihash::IDENTITY, hasher = multihash::Identity256, digest = multihash::IdentityDigest)] + #[mh(code = multihash::IDENTITY, hasher = multihash::Identity256, digest = multihash::IdentityDigest<32>)] Identity256, /// Multihash array for hash function. - #[mh(code = 0x38b64f, hasher = multihash::Strobe256, digest = multihash::StrobeDigest)] + #[mh(code = 0x38b64f, hasher = multihash::Strobe256, digest = multihash::StrobeDigest<32>)] Strobe256, } }; let expected = quote! { /// A Multihash with the same allocated size as the Multihashes produces by this derive. - pub type Multihash = multihash::MultihashGeneric::; + pub type Multihash = multihash::MultihashGeneric::<32>; - impl multihash::MultihashDigest for Code { - type AllocSize = U32; + impl multihash::MultihashDigest<32> for Code { fn digest(&self, input: &[u8]) -> Multihash { use multihash::Hasher; @@ -429,9 +405,8 @@ mod tests { } } - fn multihash_from_digest<'a, S, D>(digest: &'a D) -> Multihash + fn multihash_from_digest<'a, D, const S: usize>(digest: &'a D) -> Multihash where - S: multihash::Size, D: multihash::Digest, Self: From<&'a D>, { @@ -463,13 +438,13 @@ mod tests { } } - impl From<&multihash::IdentityDigest > for Code { - fn from(digest: &multihash::IdentityDigest) -> Self { + impl From<&multihash::IdentityDigest<32> > for Code { + fn from(digest: &multihash::IdentityDigest<32>) -> Self { Self::Identity256 } } - impl From<&multihash::StrobeDigest > for Code { - fn from(digest: &multihash::StrobeDigest) -> Self { + impl From<&multihash::StrobeDigest<32> > for Code { + fn from(digest: &multihash::StrobeDigest<32>) -> Self { Self::Strobe256 } } @@ -487,11 +462,11 @@ mod tests { fn test_multihash_error_code_duplicates() { let input = quote! { #[derive(Clone, Multihash)] - #[mh(alloc_size = U64)] + #[mh(alloc_size = 64)] pub enum Multihash { - #[mh(code = multihash::SHA2_256, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest)] + #[mh(code = multihash::SHA2_256, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest<32>)] Identity256, - #[mh(code = multihash::SHA2_256, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest)] + #[mh(code = multihash::SHA2_256, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest<32>)] Identity256, } }; @@ -505,11 +480,11 @@ mod tests { fn test_multihash_error_code_duplicates_numbers() { let input = quote! { #[derive(Clone, Multihash)] - #[mh(alloc_size = U32)] + #[mh(alloc_size = 32)] pub enum Code { - #[mh(code = 0x14, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest)] + #[mh(code = 0x14, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest<32>)] Identity256, - #[mh(code = 0x14, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest)] + #[mh(code = 0x14, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest<32>)] Identity256, } }; @@ -520,13 +495,13 @@ mod tests { #[test] #[should_panic( - expected = "enum is missing `alloc_size` attribute: e.g. #[mh(alloc_size = U64)]" + expected = "enum is missing `alloc_size` attribute: e.g. #[mh(alloc_size = 64)]" )] fn test_multihash_error_no_alloc_size() { let input = quote! { #[derive(Clone, Multihash)] pub enum Code { - #[mh(code = 0x14, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest)] + #[mh(code = 0x14, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest<32>)] Sha2_256, } }; @@ -537,14 +512,14 @@ mod tests { #[test] #[should_panic( - expected = "The `#mh(alloc_size) attribute must be bigger than the maximum defined digest size (U32)" + expected = "The `#mh(alloc_size) attribute must be bigger than the maximum defined digest size (32)" )] fn test_multihash_error_too_small_alloc_size() { let input = quote! { #[derive(Clone, Multihash)] - #[mh(alloc_size = U16)] + #[mh(alloc_size = 16)] pub enum Code { - #[mh(code = 0x14, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest)] + #[mh(code = 0x14, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest<32>)] Sha2_256, } }; @@ -554,13 +529,11 @@ mod tests { } #[test] - #[should_panic( - expected = "Invalid byte size. It must be a unsigned integer typenum, e.g. `U32`" - )] + #[should_panic(expected = "Invalid byte size. It must be a unsigned integer, e.g. `32`")] fn test_multihash_error_digest_invalid_size_type() { let input = quote! { #[derive(Clone, Multihash)] - #[mh(alloc_size = U32)] + #[mh(alloc_size = 32)] pub enum Code { #[mh(code = 0x14, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest)] Sha2_256, @@ -572,13 +545,11 @@ mod tests { } #[test] - #[should_panic( - expected = "Invalid byte size. It must be a unsigned integer typenum, e.g. `U32`" - )] + #[should_panic(expected = "Invalid byte size. It must be a unsigned integer, e.g. `32`")] fn test_multihash_error_digest_invalid_size_type2() { let input = quote! { #[derive(Clone, Multihash)] - #[mh(alloc_size = U32)] + #[mh(alloc_size = 32)] pub enum Code { #[mh(code = 0x14, hasher = multihash::Sha2_256, digest = multihash::Sha2Digest<_>)] Sha2_256, @@ -590,13 +561,11 @@ mod tests { } #[test] - #[should_panic( - expected = "Invalid byte size. It must be a unsigned integer typenum, e.g. `U32`" - )] - fn test_multihash_error_digest_without_typenum() { + #[should_panic(expected = "Invalid byte size. It must be a unsigned integer, e.g. `32`")] + fn test_multihash_error_digest_without_size() { let input = quote! { #[derive(Clone, Multihash)] - #[mh(alloc_size = U32)] + #[mh(alloc_size = 32)] pub enum Code { #[mh(code = 0x14, hasher = multihash::Sha2_256, digest = Sha2_256Digest)] Sha2_256, @@ -609,10 +578,10 @@ mod tests { // This one does not panic, die to `no_alloc_size_errors` #[test] - fn test_multihash_error_digest_without_typenum_no_alloc_size_errors() { + fn test_multihash_error_digest_without_size_no_alloc_size_errors() { let input = quote! { #[derive(Clone, Multihash)] - #[mh(alloc_size = U32, no_alloc_size_errors)] + #[mh(alloc_size = 32, no_alloc_size_errors)] pub enum Code { #[mh(code = 0x14, hasher = multihash::Sha2_256, digest = Sha2_256Digest)] Sha2_256, diff --git a/examples/custom_table.rs b/examples/custom_table.rs index 01ae32d0..183f3977 100644 --- a/examples/custom_table.rs +++ b/examples/custom_table.rs @@ -1,19 +1,16 @@ use std::convert::TryFrom; use multihash::derive::Multihash; -use multihash::typenum::{U20, U25, U64}; use multihash::{ - Digest, Error, Hasher, MultihashDigest, MultihashGeneric, Sha2Digest, Sha2_256, Size, - StatefulHasher, + Digest, Error, Hasher, MultihashDigest, MultihashGeneric, Sha2Digest, Sha2_256, StatefulHasher, }; // You can implement a custom hasher. This is a SHA2 256-bit hasher that returns a hash that is // truncated to 160 bits. #[derive(Default, Debug)] pub struct Sha2_256Truncated20(Sha2_256); -impl StatefulHasher for Sha2_256Truncated20 { - type Size = U20; - type Digest = Sha2Digest; +impl StatefulHasher<20> for Sha2_256Truncated20 { + type Digest = Sha2Digest<{ Self::SIZE }>; fn update(&mut self, input: &[u8]) { self.0.update(input) } @@ -28,13 +25,13 @@ impl StatefulHasher for Sha2_256Truncated20 { } #[derive(Clone, Copy, Debug, Eq, Multihash, PartialEq)] -#[mh(alloc_size = U64)] +#[mh(alloc_size = 64)] pub enum Code { /// Example for using a custom hasher which returns truncated hashes - #[mh(code = 0x12, hasher = Sha2_256Truncated20, digest = multihash::Sha2Digest)] + #[mh(code = 0x12, hasher = Sha2_256Truncated20, digest = multihash::Sha2Digest<20>)] Sha2_256Truncated20, /// Example for using a hasher with a bit size that is not exported by default - #[mh(code = 0xb219, hasher = multihash::Blake2bHasher::, digest = multihash::Blake2bDigest)] + #[mh(code = 0xb219, hasher = multihash::Blake2bHasher::<25>, digest = multihash::Blake2bDigest<25>)] Blake2b200, } diff --git a/src/arb.rs b/src/arb.rs index 8196bb6b..84eb6af9 100644 --- a/src/arb.rs +++ b/src/arb.rs @@ -4,10 +4,10 @@ use rand::{ Rng, }; -use crate::{MultihashGeneric, U64}; +use crate::MultihashGeneric; /// Generates a random valid multihash. -impl Arbitrary for MultihashGeneric { +impl Arbitrary for MultihashGeneric<64> { fn arbitrary(g: &mut G) -> Self { // In real world lower multihash codes are more likely to happen, hence distribute them // with bias towards smaller values. diff --git a/src/hasher.rs b/src/hasher.rs index dd224488..63c73827 100644 --- a/src/hasher.rs +++ b/src/hasher.rs @@ -1,25 +1,12 @@ use crate::error::Error; use core::fmt::Debug; -use generic_array::typenum::marker_traits::Unsigned; -use generic_array::{ArrayLength, GenericArray}; - -/// Size marker trait. -pub trait Size: - ArrayLength + Debug + Default + Eq + core::hash::Hash + Send + Sync + 'static -{ -} - -impl + Debug + Default + Eq + core::hash::Hash + Send + Sync + 'static> Size - for T -{ -} /// Stack allocated digest trait. -pub trait Digest: +pub trait Digest: AsRef<[u8]> + AsMut<[u8]> - + From> - + Into> + + From<[u8; S]> + + Into<[u8; S]> + Clone + core::hash::Hash + Debug @@ -29,17 +16,15 @@ pub trait Digest: + Sync + 'static { - /// Size of the digest. - fn size(&self) -> u8 { - S::to_u8() - } + /// Size of the digest. Maximum for Some of the Blake family is 2^64-1 bytes + const SIZE: usize = S; /// Wraps the digest bytes. fn wrap(digest: &[u8]) -> Result { - if digest.len() != S::to_usize() { + if digest.len() != S { return Err(Error::InvalidSize(digest.len() as _)); } - let mut array = GenericArray::default(); + let mut array = [0; S]; let len = digest.len().min(array.len()); array[..len].copy_from_slice(&digest[..len]); Ok(array.into()) @@ -56,22 +41,19 @@ pub trait Digest: use unsigned_varint::io::read_u64; let size = read_u64(&mut r)?; - if size > S::to_u64() || size > u8::max_value() as u64 { + if size > S as u64 || size > u8::MAX as u64 { return Err(Error::InvalidSize(size)); } - let mut digest = GenericArray::default(); + let mut digest = [0; S]; r.read_exact(&mut digest[..size as usize])?; Ok(Self::from(digest)) } } /// Trait implemented by a hash function implementation. -pub trait StatefulHasher: Default + Send + Sync { - /// The maximum Digest size for that hasher (it is stack allocated). - type Size: Size; - +pub trait StatefulHasher: Default + Send + Sync { /// The Digest type to distinguish the output of different `Hasher` implementations. - type Digest: Digest; + type Digest: Digest; /// Consume input and update internal state. fn update(&mut self, input: &[u8]); @@ -106,17 +88,12 @@ pub trait StatefulHasher: Default + Send + Sync { /// [Multihashes]: /~https://github.com/multiformats/multihash /// [associated type]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#specifying-placeholder-types-in-trait-definitions-with-associated-types /// [`MultihashDigest`]: crate::MultihashDigest -pub trait Hasher: Default + Send + Sync { - /// The maximum Digest size for that hasher (it is stack allocated). - type Size: Size; - +pub trait Hasher: Default + Send + Sync { /// The Digest type to distinguish the output of different `Hasher` implementations. - type Digest: Digest; + type Digest: Digest; - /// Returns the allocated size of the digest. - fn size() -> u8 { - Self::Size::to_u8() - } + ///the allocated size of the digest. + const SIZE: usize = S; /// Hashes the given `input` data and returns its hash digest. fn digest(input: &[u8]) -> Self::Digest @@ -124,8 +101,7 @@ pub trait Hasher: Default + Send + Sync { Self: Sized; } -impl Hasher for T { - type Size = T::Size; +impl, const S: usize> Hasher for T { type Digest = T::Digest; fn digest(input: &[u8]) -> Self::Digest { diff --git a/src/hasher_impl.rs b/src/hasher_impl.rs index 422e6b54..8fb44400 100644 --- a/src/hasher_impl.rs +++ b/src/hasher_impl.rs @@ -1,35 +1,38 @@ use crate::error::Error; -use crate::hasher::{Digest, Size, StatefulHasher}; +use crate::hasher::{Digest, StatefulHasher}; use core::convert::TryFrom; -use generic_array::GenericArray; macro_rules! derive_digest { ($name:ident) => { /// Multihash digest. - #[derive(Clone, Debug, Default, Eq, Hash, PartialEq)] - pub struct $name(GenericArray); + #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] + pub struct $name([u8; S]); - impl Copy for $name where S::ArrayType: Copy {} + impl Default for $name { + fn default() -> Self { + [0u8; S].into() + } + } - impl AsRef<[u8]> for $name { + impl AsRef<[u8]> for $name { fn as_ref(&self) -> &[u8] { &self.0 } } - impl AsMut<[u8]> for $name { + impl AsMut<[u8]> for $name { fn as_mut(&mut self) -> &mut [u8] { &mut self.0 } } - impl From> for $name { - fn from(array: GenericArray) -> Self { + impl From<[u8; S]> for $name { + fn from(array: [u8; S]) -> Self { Self(array) } } - impl From<$name> for GenericArray { + impl From<$name> for [u8; S] { fn from(digest: $name) -> Self { digest.0 } @@ -38,7 +41,7 @@ macro_rules! derive_digest { /// Convert slice to `Digest`. /// /// It errors when the length of the slice does not match the size of the `Digest`. - impl TryFrom<&[u8]> for $name { + impl TryFrom<&[u8]> for $name { type Error = Error; fn try_from(slice: &[u8]) -> Result { @@ -46,14 +49,14 @@ macro_rules! derive_digest { } } - impl Digest for $name {} + impl Digest for $name {} }; } macro_rules! derive_write { ($name:ident) => { #[cfg(feature = "std")] - impl std::io::Write for $name { + impl std::io::Write for $name { fn write(&mut self, buf: &[u8]) -> std::io::Result { self.update(buf); Ok(buf.len()) @@ -73,25 +76,22 @@ macro_rules! derive_hasher_blake { /// Multihash hasher. #[derive(Debug)] - pub struct $name { - _marker: PhantomData, + pub struct $name { state: $module::State, } - impl Default for $name { + impl Default for $name { fn default() -> Self { let mut params = $module::Params::new(); - params.hash_length(S::to_usize()); + params.hash_length(S); Self { - _marker: PhantomData, state: params.to_state(), } } } - impl StatefulHasher for $name { - type Size = S; - type Digest = $digest; + impl StatefulHasher for $name { + type Digest = $digest; fn update(&mut self, input: &[u8]) { self.state.update(input); @@ -99,7 +99,9 @@ macro_rules! derive_hasher_blake { fn finalize(&self) -> Self::Digest { let digest = self.state.finalize(); - GenericArray::clone_from_slice(digest.as_bytes()).into() + let mut array = [0; S]; + array.clone_from_slice(digest.as_bytes()); + array.into() } fn reset(&mut self) { @@ -115,63 +117,52 @@ macro_rules! derive_hasher_blake { #[cfg(feature = "blake2b")] pub mod blake2b { use super::*; - use core::marker::PhantomData; - use generic_array::typenum::{U32, U64}; derive_hasher_blake!(blake2b_simd, Blake2bHasher, Blake2bDigest); /// 256 bit blake2b hasher. - pub type Blake2b256 = Blake2bHasher; + pub type Blake2b256 = Blake2bHasher<32>; /// 512 bit blake2b hasher. - pub type Blake2b512 = Blake2bHasher; + pub type Blake2b512 = Blake2bHasher<64>; } #[cfg(feature = "blake2s")] pub mod blake2s { use super::*; - use core::marker::PhantomData; - use generic_array::typenum::{U16, U32}; derive_hasher_blake!(blake2s_simd, Blake2sHasher, Blake2sDigest); /// 256 bit blake2b hasher. - pub type Blake2s128 = Blake2sHasher; + pub type Blake2s128 = Blake2sHasher<16>; /// 512 bit blake2b hasher. - pub type Blake2s256 = Blake2sHasher; + pub type Blake2s256 = Blake2sHasher<32>; } #[cfg(feature = "blake3")] pub mod blake3 { use super::*; - use core::marker::PhantomData; - use generic_array::typenum::U32; // derive_hasher_blake!(blake3, Blake3Hasher, Blake3Digest); derive_digest!(Blake3Digest); /// Multihash hasher. #[derive(Debug)] - pub struct Blake3Hasher { - _marker: PhantomData, + pub struct Blake3Hasher { hasher: ::blake3::Hasher, } - impl Default for Blake3Hasher { + impl Default for Blake3Hasher { fn default() -> Self { let hasher = ::blake3::Hasher::new(); - Self { - _marker: PhantomData, - hasher, - } + Self { hasher } } } - impl StatefulHasher for Blake3Hasher { - type Size = S; - type Digest = Blake3Digest; + impl StatefulHasher for Blake3Hasher { + type Digest = Blake3Digest; fn update(&mut self, input: &[u8]) { self.hasher.update(input); @@ -179,7 +170,9 @@ pub mod blake3 { fn finalize(&self) -> Self::Digest { let digest = self.hasher.finalize(); //default is 32 bytes anyway - GenericArray::clone_from_slice(digest.as_bytes()).into() + let mut array = [0; S]; + array.clone_from_slice(digest.as_bytes()); + array.into() } fn reset(&mut self) { @@ -190,21 +183,20 @@ pub mod blake3 { derive_write!(Blake3Hasher); /// blake3-256 hasher. - pub type Blake3_256 = Blake3Hasher; + pub type Blake3_256 = Blake3Hasher<32>; } #[cfg(feature = "digest")] macro_rules! derive_hasher_sha { - ($module:ty, $name:ident, $size:ty, $digest:ident) => { + ($module:ty, $name:ident, $size:expr, $digest:ident) => { /// Multihash hasher. #[derive(Debug, Default)] pub struct $name { state: $module, } - impl $crate::hasher::StatefulHasher for $name { - type Size = $size; - type Digest = $digest; + impl $crate::hasher::StatefulHasher<$size> for $name { + type Digest = $digest<$size>; fn update(&mut self, input: &[u8]) { use digest::Digest; @@ -213,7 +205,10 @@ macro_rules! derive_hasher_sha { fn finalize(&self) -> Self::Digest { use digest::Digest; - Self::Digest::from(self.state.clone().finalize()) + let digest = self.state.clone().finalize(); + let mut array = [0; $size]; + array.copy_from_slice(digest.as_slice()); + array.into() } fn reset(&mut self) { @@ -239,88 +234,88 @@ macro_rules! derive_hasher_sha { #[cfg(feature = "sha1")] pub mod sha1 { use super::*; - use generic_array::typenum::U20; derive_digest!(Sha1Digest); - derive_hasher_sha!(::sha1::Sha1, Sha1, U20, Sha1Digest); + derive_hasher_sha!(::sha1::Sha1, Sha1, 20, Sha1Digest); } #[cfg(feature = "sha2")] pub mod sha2 { use super::*; - use generic_array::typenum::{U32, U64}; derive_digest!(Sha2Digest); - derive_hasher_sha!(sha_2::Sha256, Sha2_256, U32, Sha2Digest); - derive_hasher_sha!(sha_2::Sha512, Sha2_512, U64, Sha2Digest); + derive_hasher_sha!(sha_2::Sha256, Sha2_256, 32, Sha2Digest); + derive_hasher_sha!(sha_2::Sha512, Sha2_512, 64, Sha2Digest); } #[cfg(feature = "sha3")] pub mod sha3 { use super::*; - use generic_array::typenum::{U28, U32, U48, U64}; derive_digest!(Sha3Digest); - derive_hasher_sha!(sha_3::Sha3_224, Sha3_224, U28, Sha3Digest); - derive_hasher_sha!(sha_3::Sha3_256, Sha3_256, U32, Sha3Digest); - derive_hasher_sha!(sha_3::Sha3_384, Sha3_384, U48, Sha3Digest); - derive_hasher_sha!(sha_3::Sha3_512, Sha3_512, U64, Sha3Digest); + derive_hasher_sha!(sha_3::Sha3_224, Sha3_224, 28, Sha3Digest); + derive_hasher_sha!(sha_3::Sha3_256, Sha3_256, 32, Sha3Digest); + derive_hasher_sha!(sha_3::Sha3_384, Sha3_384, 48, Sha3Digest); + derive_hasher_sha!(sha_3::Sha3_512, Sha3_512, 64, Sha3Digest); derive_digest!(KeccakDigest); - derive_hasher_sha!(sha_3::Keccak224, Keccak224, U28, KeccakDigest); - derive_hasher_sha!(sha_3::Keccak256, Keccak256, U32, KeccakDigest); - derive_hasher_sha!(sha_3::Keccak384, Keccak384, U48, KeccakDigest); - derive_hasher_sha!(sha_3::Keccak512, Keccak512, U64, KeccakDigest); + derive_hasher_sha!(sha_3::Keccak224, Keccak224, 28, KeccakDigest); + derive_hasher_sha!(sha_3::Keccak256, Keccak256, 32, KeccakDigest); + derive_hasher_sha!(sha_3::Keccak384, Keccak384, 48, KeccakDigest); + derive_hasher_sha!(sha_3::Keccak512, Keccak512, 64, KeccakDigest); } pub mod identity { use super::*; use crate::error::Error; - use generic_array::typenum::U32; /// Multihash digest. - #[derive(Clone, Debug, Default, Eq, Hash, PartialEq)] - pub struct IdentityDigest(u8, GenericArray); + #[derive(Clone, Debug, Eq, Hash, PartialEq)] + pub struct IdentityDigest(usize, [u8; S]); + + impl Default for IdentityDigest { + fn default() -> Self { + Self { 0: 0, 1: [0u8; S] } + } + } - impl AsRef<[u8]> for IdentityDigest { + impl AsRef<[u8]> for IdentityDigest { fn as_ref(&self) -> &[u8] { &self.1[..self.0 as usize] } } - impl AsMut<[u8]> for IdentityDigest { + impl AsMut<[u8]> for IdentityDigest { fn as_mut(&mut self) -> &mut [u8] { &mut self.1[..self.0 as usize] } } - impl From> for IdentityDigest { - fn from(array: GenericArray) -> Self { - Self(array.len() as u8, array) + impl From<[u8; S]> for IdentityDigest { + fn from(array: [u8; S]) -> Self { + Self(array.len(), array) } } - impl From> for GenericArray { + impl From> for [u8; S] { fn from(digest: IdentityDigest) -> Self { digest.1 } } - impl Digest for IdentityDigest { - fn size(&self) -> u8 { - self.0 - } + impl Digest for IdentityDigest { + const SIZE: usize = S; // A custom implementation is needed as an identity hash value might be shorter than the // allocated Digest. fn wrap(digest: &[u8]) -> Result { - if digest.len() > S::to_usize() { + if digest.len() > S { return Err(Error::InvalidSize(digest.len() as _)); } - let mut array = GenericArray::default(); + let mut array = [0; S]; let len = digest.len().min(array.len()); array[..len].copy_from_slice(&digest[..len]); - Ok(Self(len as u8, array)) + Ok(Self(len, array)) } // A custom implementation is needed as an identity hash also stores the actual size of @@ -333,12 +328,12 @@ pub mod identity { use unsigned_varint::io::read_u64; let size = read_u64(&mut r)?; - if size > S::to_u64() || size > u8::max_value() as u64 { + if size > S as u64 || size > u8::MAX as u64 { return Err(Error::InvalidSize(size)); } - let mut digest = GenericArray::default(); + let mut digest = [0; S]; r.read_exact(&mut digest[..size as usize])?; - Ok(Self(size as u8, digest)) + Ok(Self(size as usize, digest)) } } @@ -347,15 +342,23 @@ pub mod identity { /// # Panics /// /// Panics if the input is bigger than the maximum size. - #[derive(Debug, Default)] - pub struct IdentityHasher { - bytes: GenericArray, + #[derive(Debug)] + pub struct IdentityHasher { + bytes: [u8; S], i: usize, } - impl StatefulHasher for IdentityHasher { - type Size = S; - type Digest = IdentityDigest; + impl Default for IdentityHasher { + fn default() -> Self { + Self { + i: 0, + bytes: [0u8; S], + } + } + } + + impl StatefulHasher for IdentityHasher { + type Digest = IdentityDigest; fn update(&mut self, input: &[u8]) { let start = self.i.min(self.bytes.len()); @@ -365,11 +368,11 @@ pub mod identity { } fn finalize(&self) -> Self::Digest { - IdentityDigest(self.i as u8, self.bytes.clone()) + IdentityDigest(self.i, self.bytes) } fn reset(&mut self) { - self.bytes = Default::default(); + self.bytes = [0; S]; self.i = 0; } } @@ -381,7 +384,7 @@ pub mod identity { /// # Panics /// /// Panics if the input is bigger than 32 bytes. - pub type Identity256 = IdentityHasher; + pub type Identity256 = IdentityHasher<32>; } pub mod unknown { @@ -392,32 +395,27 @@ pub mod unknown { #[cfg(feature = "strobe")] pub mod strobe { use super::*; - use core::marker::PhantomData; - use generic_array::typenum::{U32, U64}; use strobe_rs::{SecParam, Strobe}; derive_digest!(StrobeDigest); /// Strobe hasher. - pub struct StrobeHasher { - _marker: PhantomData, + pub struct StrobeHasher { strobe: Strobe, initialized: bool, } - impl Default for StrobeHasher { + impl Default for StrobeHasher { fn default() -> Self { Self { - _marker: PhantomData, strobe: Strobe::new(b"StrobeHash", SecParam::B128), initialized: false, } } } - impl StatefulHasher for StrobeHasher { - type Size = S; - type Digest = StrobeDigest; + impl StatefulHasher for StrobeHasher { + type Digest = StrobeDigest; fn update(&mut self, input: &[u8]) { self.strobe.ad(input, self.initialized); @@ -425,7 +423,7 @@ pub mod strobe { } fn finalize(&self) -> Self::Digest { - let mut hash = GenericArray::default(); + let mut hash = [0; S]; self.strobe.clone().prf(&mut hash, false); Self::Digest::from(hash) } @@ -440,8 +438,8 @@ pub mod strobe { derive_write!(StrobeHasher); /// 256 bit strobe hasher. - pub type Strobe256 = StrobeHasher; + pub type Strobe256 = StrobeHasher<32>; /// 512 bit strobe hasher. - pub type Strobe512 = StrobeHasher; + pub type Strobe512 = StrobeHasher<64>; } diff --git a/src/lib.rs b/src/lib.rs index 828f9ff0..619b98b1 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -49,7 +49,7 @@ //! [Serde]: https://serde.rs //! [SCALE Codec]: /~https://github.com/paritytech/parity-scale-codec -#![deny(missing_docs)] +#![deny(missing_docs, unsafe_code)] #![cfg_attr(not(feature = "std"), no_std)] #[cfg(any(test, feature = "arb"))] @@ -62,9 +62,8 @@ mod multihash; mod multihash_impl; pub use crate::error::{Error, Result}; -pub use crate::hasher::{Digest, Hasher, Size, StatefulHasher}; +pub use crate::hasher::{Digest, Hasher, StatefulHasher}; pub use crate::multihash::{Multihash as MultihashGeneric, MultihashDigest}; -pub use generic_array::typenum::{self, U128, U16, U20, U28, U32, U48, U64}; #[cfg(feature = "derive")] pub use multihash_derive as derive; diff --git a/src/multihash.rs b/src/multihash.rs index 7bcc3830..e7ffffab 100644 --- a/src/multihash.rs +++ b/src/multihash.rs @@ -1,22 +1,20 @@ -use crate::hasher::{Digest, Size}; +use crate::hasher::Digest; use crate::Error; use core::convert::TryFrom; #[cfg(feature = "std")] use core::convert::TryInto; use core::fmt::Debug; -use generic_array::{ArrayLength, GenericArray}; +#[cfg(feature = "serde-codec")] +use serde_big_array::BigArray; /// Trait that implements hashing. /// /// It is usually implemented by a custom code table enum that derives the [`Multihash` derive]. /// /// [`Multihash` derive]: crate::derive -pub trait MultihashDigest: +pub trait MultihashDigest: TryFrom + Into + Send + Sync + Unpin + Copy + Eq + Debug + 'static { - /// The maximum size a hash will allocate. - type AllocSize: Size; - /// Calculate the hash of some input data. /// /// # Example @@ -28,7 +26,7 @@ pub trait MultihashDigest: /// let hash = Code::Sha3_256.digest(b"Hello world!"); /// println!("{:02x?}", hash); /// ``` - fn digest(&self, input: &[u8]) -> Multihash; + fn digest(&self, input: &[u8]) -> Multihash; /// Create a multihash from an existing [`Digest`]. /// @@ -43,10 +41,9 @@ pub trait MultihashDigest: /// println!("{:02x?}", hash); /// ``` #[allow(clippy::needless_lifetimes)] - fn multihash_from_digest<'a, S, D>(digest: &'a D) -> Multihash + fn multihash_from_digest<'a, D, const DIGEST_SIZE: usize>(digest: &'a D) -> Multihash where - S: Size, - D: Digest, + D: Digest, Self: From<&'a D>; } @@ -73,27 +70,35 @@ pub trait MultihashDigest: /// ``` #[cfg_attr(feature = "serde-codec", derive(serde::Deserialize))] #[cfg_attr(feature = "serde-codec", derive(serde::Serialize))] -#[cfg_attr(feature = "serde-codec", serde(bound = "S: Size"))] -#[derive(Clone, Debug, Default, Eq, Ord, PartialEq, PartialOrd)] -pub struct Multihash { +#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)] +pub struct Multihash { /// The code of the Multihash. code: u64, /// The actual size of the digest in bytes (not the allocated size). size: u8, /// The digest. - digest: GenericArray, + #[cfg_attr(feature = "serde-codec", serde(with = "BigArray"))] + digest: [u8; S], } -impl Copy for Multihash where >::ArrayType: Copy {} +impl Default for Multihash { + fn default() -> Self { + Self { + code: 0, + size: 0, + digest: [0; S], + } + } +} -impl Multihash { +impl Multihash { /// Wraps the digest in a multihash. pub fn wrap(code: u64, input_digest: &[u8]) -> Result { - if input_digest.len() > S::to_usize() { + if input_digest.len() > S { return Err(Error::InvalidSize(input_digest.len() as _)); } let size = input_digest.len(); - let mut digest = GenericArray::default(); + let mut digest = [0; S]; digest[..size].copy_from_slice(input_digest); Ok(Self { code, @@ -155,7 +160,7 @@ impl Multihash { /// Returns the bytes of a multihash. #[cfg(feature = "std")] - pub fn to_bytes(&self) -> Vec { + pub fn to_bytes(self) -> Vec { let mut bytes = Vec::with_capacity(self.size().into()); self.write(&mut bytes) .expect("writing to a vec should never fail"); @@ -165,7 +170,7 @@ impl Multihash { // Don't hash the whole allocated space, but just the actual digest #[allow(clippy::derive_hash_xor_eq)] -impl core::hash::Hash for Multihash { +impl core::hash::Hash for Multihash { fn hash(&self, state: &mut T) { self.code.hash(state); self.digest().hash(state); @@ -173,46 +178,16 @@ impl core::hash::Hash for Multihash { } #[cfg(feature = "std")] -impl From> for Vec { +impl From> for Vec { fn from(multihash: Multihash) -> Self { multihash.to_bytes() } } #[cfg(feature = "scale-codec")] -impl parity_scale_codec::Encode for Multihash { - fn encode_to(&self, dest: &mut EncOut) { - let mut digest = [0; 32]; - digest.copy_from_slice(&self.digest); - self.code.encode_to(dest); - self.size.encode_to(dest); - digest.encode_to(dest); - } -} - -#[cfg(feature = "scale-codec")] -impl parity_scale_codec::EncodeLike for Multihash {} - -#[cfg(feature = "scale-codec")] -impl parity_scale_codec::Decode for Multihash { - fn decode( - input: &mut DecIn, - ) -> Result { - Ok(Multihash { - code: parity_scale_codec::Decode::decode(input)?, - size: parity_scale_codec::Decode::decode(input)?, - digest: { - let digest = <[u8; 32]>::decode(input)?; - GenericArray::clone_from_slice(&digest) - }, - }) - } -} - -#[cfg(feature = "scale-codec")] -impl parity_scale_codec::Encode for Multihash { +impl parity_scale_codec::Encode for Multihash { fn encode_to(&self, dest: &mut EncOut) { - let mut digest = [0; 64]; + let mut digest = [0; S]; digest.copy_from_slice(&self.digest); self.code.encode_to(dest); self.size.encode_to(dest); @@ -221,20 +196,17 @@ impl parity_scale_codec::Encode for Multihash { } #[cfg(feature = "scale-codec")] -impl parity_scale_codec::EncodeLike for Multihash {} +impl parity_scale_codec::EncodeLike for Multihash {} #[cfg(feature = "scale-codec")] -impl parity_scale_codec::Decode for Multihash { +impl parity_scale_codec::Decode for Multihash { fn decode( input: &mut DecIn, ) -> Result { Ok(Multihash { code: parity_scale_codec::Decode::decode(input)?, size: parity_scale_codec::Decode::decode(input)?, - digest: { - let digest = <[u8; 64]>::decode(input)?; - GenericArray::clone_from_slice(&digest) - }, + digest: <[u8; S]>::decode(input)?, }) } } @@ -266,21 +238,20 @@ where /// /// Currently the maximum size for a digest is 255 bytes. #[cfg(feature = "std")] -pub fn read_multihash(mut r: R) -> Result<(u64, u8, GenericArray), Error> +pub fn read_multihash(mut r: R) -> Result<(u64, u8, [u8; S]), Error> where R: std::io::Read, - S: Size, { use unsigned_varint::io::read_u64; let code = read_u64(&mut r)?; let size = read_u64(&mut r)?; - if size > S::to_u64() || size > u8::MAX as u64 { + if size > S as u64 || size > u8::MAX as u64 { return Err(Error::InvalidSize(size)); } - let mut digest = GenericArray::default(); + let mut digest = [0; S]; r.read_exact(&mut digest[..size as usize])?; Ok((code, size as u8, digest)) } @@ -304,16 +275,16 @@ mod tests { fn test_scale() { use parity_scale_codec::{Decode, Encode}; - let mh = Multihash::::default(); + let mh = Multihash::<32>::default(); let bytes = mh.encode(); - let mh2: Multihash = Decode::decode(&mut &bytes[..]).unwrap(); + let mh2: Multihash<32> = Decode::decode(&mut &bytes[..]).unwrap(); assert_eq!(mh, mh2); } #[test] #[cfg(feature = "serde-codec")] fn test_serde() { - let mh = Multihash::::default(); + let mh = Multihash::<32>::default(); let bytes = serde_json::to_string(&mh).unwrap(); let mh2 = serde_json::from_str(&bytes).unwrap(); assert_eq!(mh, mh2); diff --git a/src/multihash_impl.rs b/src/multihash_impl.rs index 3e2aa879..f52e0e03 100644 --- a/src/multihash_impl.rs +++ b/src/multihash_impl.rs @@ -7,73 +7,73 @@ use multihash_derive::Multihash; /// /// [`Multihash` derive]: crate::derive #[derive(Copy, Clone, Debug, Eq, Multihash, PartialEq)] -#[mh(alloc_size = crate::U64)] +#[mh(alloc_size = 64)] pub enum Code { /// SHA-256 (32-byte hash size) #[cfg(feature = "sha2")] - #[mh(code = 0x12, hasher = crate::Sha2_256, digest = crate::Sha2Digest)] + #[mh(code = 0x12, hasher = crate::Sha2_256, digest = crate::Sha2Digest<32>)] Sha2_256, /// SHA-512 (64-byte hash size) #[cfg(feature = "sha2")] - #[mh(code = 0x13, hasher = crate::Sha2_512, digest = crate::Sha2Digest)] + #[mh(code = 0x13, hasher = crate::Sha2_512, digest = crate::Sha2Digest<64>)] Sha2_512, /// SHA3-224 (28-byte hash size) #[cfg(feature = "sha3")] - #[mh(code = 0x17, hasher = crate::Sha3_224, digest = crate::Sha3Digest)] + #[mh(code = 0x17, hasher = crate::Sha3_224, digest = crate::Sha3Digest<28>)] Sha3_224, /// SHA3-256 (32-byte hash size) #[cfg(feature = "sha3")] - #[mh(code = 0x16, hasher = crate::Sha3_256, digest = crate::Sha3Digest)] + #[mh(code = 0x16, hasher = crate::Sha3_256, digest = crate::Sha3Digest<32>)] Sha3_256, /// SHA3-384 (48-byte hash size) #[cfg(feature = "sha3")] - #[mh(code = 0x15, hasher = crate::Sha3_384, digest = crate::Sha3Digest)] + #[mh(code = 0x15, hasher = crate::Sha3_384, digest = crate::Sha3Digest<48>)] Sha3_384, /// SHA3-512 (64-byte hash size) #[cfg(feature = "sha3")] - #[mh(code = 0x14, hasher = crate::Sha3_512, digest = crate::Sha3Digest)] + #[mh(code = 0x14, hasher = crate::Sha3_512, digest = crate::Sha3Digest<64>)] Sha3_512, /// Keccak-224 (28-byte hash size) #[cfg(feature = "sha3")] - #[mh(code = 0x1a, hasher = crate::Keccak224, digest = crate::KeccakDigest)] + #[mh(code = 0x1a, hasher = crate::Keccak224, digest = crate::KeccakDigest<28>)] Keccak224, /// Keccak-256 (32-byte hash size) #[cfg(feature = "sha3")] - #[mh(code = 0x1b, hasher = crate::Keccak256, digest = crate::KeccakDigest)] + #[mh(code = 0x1b, hasher = crate::Keccak256, digest = crate::KeccakDigest<32>)] Keccak256, /// Keccak-384 (48-byte hash size) #[cfg(feature = "sha3")] - #[mh(code = 0x1c, hasher = crate::Keccak384, digest = crate::KeccakDigest)] + #[mh(code = 0x1c, hasher = crate::Keccak384, digest = crate::KeccakDigest<48>)] Keccak384, /// Keccak-512 (64-byte hash size) #[cfg(feature = "sha3")] - #[mh(code = 0x1d, hasher = crate::Keccak512, digest = crate::KeccakDigest)] + #[mh(code = 0x1d, hasher = crate::Keccak512, digest = crate::KeccakDigest<64>)] Keccak512, /// BLAKE2b-256 (32-byte hash size) #[cfg(feature = "blake2b")] - #[mh(code = 0xb220, hasher = crate::Blake2b256, digest = crate::Blake2bDigest)] + #[mh(code = 0xb220, hasher = crate::Blake2b256, digest = crate::Blake2bDigest<32>)] Blake2b256, /// BLAKE2b-512 (64-byte hash size) #[cfg(feature = "blake2b")] - #[mh(code = 0xb240, hasher = crate::Blake2b512, digest = crate::Blake2bDigest)] + #[mh(code = 0xb240, hasher = crate::Blake2b512, digest = crate::Blake2bDigest<64>)] Blake2b512, /// BLAKE2s-128 (16-byte hash size) #[cfg(feature = "blake2s")] - #[mh(code = 0xb250, hasher = crate::Blake2s128, digest = crate::Blake2sDigest)] + #[mh(code = 0xb250, hasher = crate::Blake2s128, digest = crate::Blake2sDigest<16>)] Blake2s128, /// BLAKE2s-256 (32-byte hash size) #[cfg(feature = "blake2s")] - #[mh(code = 0xb260, hasher = crate::Blake2s256, digest = crate::Blake2sDigest)] + #[mh(code = 0xb260, hasher = crate::Blake2s256, digest = crate::Blake2sDigest<32>)] Blake2s256, /// BLAKE3-256 (32-byte hash size) #[cfg(feature = "blake3")] - #[mh(code = 0x1e, hasher = crate::Blake3_256, digest = crate::Blake3Digest)] + #[mh(code = 0x1e, hasher = crate::Blake3_256, digest = crate::Blake3Digest<32>)] Blake3_256, // The following hashes are not cryptographically secure hashes and are not enabled by default /// Identity hash (max. 64 bytes) #[cfg(feature = "identity")] - #[mh(code = 0x00, hasher = crate::IdentityHasher::, digest = crate::IdentityDigest)] + #[mh(code = 0x00, hasher = crate::IdentityHasher::<64>, digest = crate::IdentityDigest<64>)] Identity, } diff --git a/tests/lib.rs b/tests/lib.rs index 56665012..0b63209c 100644 --- a/tests/lib.rs +++ b/tests/lib.rs @@ -5,50 +5,49 @@ use multihash::{ Blake2sDigest, Blake3Digest, Blake3_256, Digest, Error, Hasher, Identity256, IdentityDigest, Keccak224, Keccak256, Keccak384, Keccak512, KeccakDigest, MultihashDigest, MultihashGeneric, Sha1, Sha1Digest, Sha2Digest, Sha2_256, Sha2_512, Sha3Digest, Sha3_224, Sha3_256, Sha3_384, - Sha3_512, Size, StatefulHasher, Strobe256, Strobe512, StrobeDigest, U16, U20, U28, U32, U48, - U64, + Sha3_512, StatefulHasher, Strobe256, Strobe512, StrobeDigest, }; #[derive(Clone, Copy, Debug, Eq, Multihash, PartialEq)] -#[mh(alloc_size = U64)] +#[mh(alloc_size = 64)] pub enum Code { - #[mh(code = 0x00, hasher = Identity256, digest = IdentityDigest)] + #[mh(code = 0x00, hasher = Identity256, digest = IdentityDigest<32>)] Identity, - #[mh(code = 0x11, hasher = Sha1, digest = Sha1Digest)] + #[mh(code = 0x11, hasher = Sha1, digest = Sha1Digest<20>)] Sha1, - #[mh(code = 0x12, hasher = Sha2_256, digest = Sha2Digest)] + #[mh(code = 0x12, hasher = Sha2_256, digest = Sha2Digest<32>)] Sha2_256, - #[mh(code = 0x13, hasher = Sha2_512, digest = Sha2Digest)] + #[mh(code = 0x13, hasher = Sha2_512, digest = Sha2Digest<64>)] Sha2_512, - #[mh(code = 0x17, hasher = Sha3_224, digest = Sha3Digest)] + #[mh(code = 0x17, hasher = Sha3_224, digest = Sha3Digest<28>)] Sha3_224, - #[mh(code = 0x16, hasher = Sha3_256, digest = Sha3Digest)] + #[mh(code = 0x16, hasher = Sha3_256, digest = Sha3Digest<32>)] Sha3_256, - #[mh(code = 0x15, hasher = Sha3_384, digest = Sha3Digest)] + #[mh(code = 0x15, hasher = Sha3_384, digest = Sha3Digest<48>)] Sha3_384, - #[mh(code = 0x14, hasher = Sha3_512, digest = Sha3Digest)] + #[mh(code = 0x14, hasher = Sha3_512, digest = Sha3Digest<64>)] Sha3_512, - #[mh(code = 0x1a, hasher = Keccak224, digest = KeccakDigest)] + #[mh(code = 0x1a, hasher = Keccak224, digest = KeccakDigest<28>)] Keccak224, - #[mh(code = 0x1b, hasher = Keccak256, digest = KeccakDigest)] + #[mh(code = 0x1b, hasher = Keccak256, digest = KeccakDigest<32>)] Keccak256, - #[mh(code = 0x1c, hasher = Keccak384, digest = KeccakDigest)] + #[mh(code = 0x1c, hasher = Keccak384, digest = KeccakDigest<48>)] Keccak384, - #[mh(code = 0x1d, hasher = Keccak512, digest = KeccakDigest)] + #[mh(code = 0x1d, hasher = Keccak512, digest = KeccakDigest<64>)] Keccak512, - #[mh(code = 0xb220, hasher = Blake2b256, digest = Blake2bDigest)] + #[mh(code = 0xb220, hasher = Blake2b256, digest = Blake2bDigest<32>)] Blake2b256, - #[mh(code = 0xb240, hasher = Blake2b512, digest = Blake2bDigest)] + #[mh(code = 0xb240, hasher = Blake2b512, digest = Blake2bDigest<64>)] Blake2b512, - #[mh(code = 0xb250, hasher = Blake2s128, digest = Blake2sDigest)] + #[mh(code = 0xb250, hasher = Blake2s128, digest = Blake2sDigest<16>)] Blake2s128, - #[mh(code = 0xb260, hasher = Blake2s256, digest = Blake2sDigest)] + #[mh(code = 0xb260, hasher = Blake2s256, digest = Blake2sDigest<32>)] Blake2s256, - #[mh(code = 0x1e, hasher = Blake3_256, digest = Blake3Digest)] + #[mh(code = 0x1e, hasher = Blake3_256, digest = Blake3Digest<32>)] Blake3_256, - #[mh(code = 0x3312e7, hasher = Strobe256, digest = StrobeDigest)] + #[mh(code = 0x3312e7, hasher = Strobe256, digest = StrobeDigest<16>)] Strobe256, - #[mh(code = 0x3312e8, hasher = Strobe512, digest = StrobeDigest)] + #[mh(code = 0x3312e8, hasher = Strobe512, digest = StrobeDigest<32>)] Strobe512, } @@ -204,9 +203,9 @@ fn assert_roundtrip() { } /// Testing the public interface of `Multihash` and coversions to it -fn multihash_methods(code: Code, prefix: &str, digest_str: &str) +fn multihash_methods(code: Code, prefix: &str, digest_str: &str) where - H: StatefulHasher, + H: StatefulHasher, Code: for<'a> From<&'a H::Digest>, { let digest = hex::decode(digest_str).unwrap(); @@ -236,77 +235,81 @@ where #[test] fn test_multihash_methods() { - multihash_methods::(Code::Identity, "000b", "68656c6c6f20776f726c64"); - multihash_methods::( + multihash_methods::( + Code::Identity, + "000b", + "68656c6c6f20776f726c64", + ); + multihash_methods::( Code::Sha1, "1114", "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", ); - multihash_methods::( + multihash_methods::( Code::Sha2_256, "1220", "b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9", ); - multihash_methods::( + multihash_methods::( Code::Sha2_512, "1340", "309ecc489c12d6eb4cc40f50c902f2b4d0ed77ee511a7c7a9bcd3ca86d4cd86f989dd35bc5ff499670da34255b45b0cfd830e81f605dcf7dc5542e93ae9cd76f"); - multihash_methods::( + multihash_methods::( Code::Sha3_224, "171C", "dfb7f18c77e928bb56faeb2da27291bd790bc1045cde45f3210bb6c5", ); - multihash_methods::( + multihash_methods::( Code::Sha3_256, "1620", "644bcc7e564373040999aac89e7622f3ca71fba1d972fd94a31c3bfbf24e3938", ); - multihash_methods::( + multihash_methods::( Code::Sha3_384, "1530", "83bff28dde1b1bf5810071c6643c08e5b05bdb836effd70b403ea8ea0a634dc4997eb1053aa3593f590f9c63630dd90b"); - multihash_methods::( + multihash_methods::( Code::Sha3_512, "1440", "840006653e9ac9e95117a15c915caab81662918e925de9e004f774ff82d7079a40d4d27b1b372657c61d46d470304c88c788b3a4527ad074d1dccbee5dbaa99a"); - multihash_methods::( + multihash_methods::( Code::Keccak224, "1A1C", "25f3ecfebabe99686282f57f5c9e1f18244cfee2813d33f955aae568", ); - multihash_methods::( + multihash_methods::( Code::Keccak256, "1B20", "47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad", ); - multihash_methods::( + multihash_methods::( Code::Keccak384, "1C30", "65fc99339a2a40e99d3c40d695b22f278853ca0f925cde4254bcae5e22ece47e6441f91b6568425adc9d95b0072eb49f"); - multihash_methods::( + multihash_methods::( Code::Keccak512, "1D40", "3ee2b40047b8060f68c67242175660f4174d0af5c01d47168ec20ed619b0b7c42181f40aa1046f39e2ef9efc6910782a998e0013d172458957957fac9405b67d"); - multihash_methods::( + multihash_methods::( Code::Blake2b512, "c0e40240", "021ced8799296ceca557832ab941a50b4a11f83478cf141f51f933f653ab9fbcc05a037cddbed06e309bf334942c4e58cdf1a46e237911ccd7fcf9787cbc7fd0"); - multihash_methods::( + multihash_methods::( Code::Blake2s256, "e0e40220", "9aec6806794561107e594b1f6a8a6b0c92a0cba9acf5e5e93cca06f781813b0b", ); - multihash_methods::( + multihash_methods::( Code::Blake2b256, "a0e40220", "256c83b297114d201b30179f3f0ef0cace9783622da5974326b436178aeef610", ); - multihash_methods::( + multihash_methods::( Code::Blake2s128, "d0e40210", "37deae0226c30da2ab424a7b8ee14e83", ); - multihash_methods::( + multihash_methods::( Code::Blake3_256, "1e20", "d74981efa70a0c880b8d8c1985d075dbcbf679b99a5f9914e5aaf96b831a9e24",