/// Feeds a slice of this type into the state provided.
#[stable(feature = "hash_slice", since = "1.3.0")]
- fn hash_slice<H: Hasher>(data: &[Self], state: &mut H) where Self: Sized {
+ fn hash_slice<H: Hasher>(data: &[Self], state: &mut H)
+ where Self: Sized
+ {
for piece in data {
piece.hash(state);
}
/// Write a single `u8` into this hasher
#[inline]
#[stable(feature = "hasher_write", since = "1.3.0")]
- fn write_u8(&mut self, i: u8) { self.write(&[i]) }
+ fn write_u8(&mut self, i: u8) {
+ self.write(&[i])
+ }
/// Write a single `u16` into this hasher.
#[inline]
#[stable(feature = "hasher_write", since = "1.3.0")]
#[stable(feature = "hasher_write", since = "1.3.0")]
fn write_usize(&mut self, i: usize) {
let bytes = unsafe {
- ::slice::from_raw_parts(&i as *const usize as *const u8,
- mem::size_of::<usize>())
+ ::slice::from_raw_parts(&i as *const usize as *const u8, mem::size_of::<usize>())
};
self.write(bytes);
}
/// Write a single `i8` into this hasher.
#[inline]
#[stable(feature = "hasher_write", since = "1.3.0")]
- fn write_i8(&mut self, i: i8) { self.write_u8(i as u8) }
+ fn write_i8(&mut self, i: i8) {
+ self.write_u8(i as u8)
+ }
/// Write a single `i16` into this hasher.
#[inline]
#[stable(feature = "hasher_write", since = "1.3.0")]
- fn write_i16(&mut self, i: i16) { self.write_u16(i as u16) }
+ fn write_i16(&mut self, i: i16) {
+ self.write_u16(i as u16)
+ }
/// Write a single `i32` into this hasher.
#[inline]
#[stable(feature = "hasher_write", since = "1.3.0")]
- fn write_i32(&mut self, i: i32) { self.write_u32(i as u32) }
+ fn write_i32(&mut self, i: i32) {
+ self.write_u32(i as u32)
+ }
/// Write a single `i64` into this hasher.
#[inline]
#[stable(feature = "hasher_write", since = "1.3.0")]
- fn write_i64(&mut self, i: i64) { self.write_u64(i as u64) }
+ fn write_i64(&mut self, i: i64) {
+ self.write_u64(i as u64)
+ }
/// Write a single `isize` into this hasher.
#[inline]
#[stable(feature = "hasher_write", since = "1.3.0")]
- fn write_isize(&mut self, i: isize) { self.write_usize(i as usize) }
+ fn write_isize(&mut self, i: isize) {
+ self.write_usize(i as usize)
+ }
}
//////////////////////////////////////////////////////////////////////////////
// and simd implementations of SipHash will use vectors
// of v02 and v13. By placing them in this order in the struct,
// the compiler can pick up on just a few simd optimizations by itself.
- v0: u64, // hash state
+ v0: u64, // hash state
v2: u64,
v1: u64,
v3: u64,
tail: u64, // unprocessed bytes le
- ntail: usize, // how many bytes in tail are valid
+ ntail: usize, // how many bytes in tail are valid
}
// sadly, these macro definitions can't appear later,
unsafe fn load_u64_le(buf: &[u8], i: usize) -> u64 {
debug_assert!(i + 8 <= buf.len());
let mut data = 0u64;
- ptr::copy_nonoverlapping(buf.get_unchecked(i),
- &mut data as *mut _ as *mut u8, 8);
+ ptr::copy_nonoverlapping(buf.get_unchecked(i), &mut data as *mut _ as *mut u8, 8);
data.to_le()
}
if self.ntail != 0 {
needed = 8 - self.ntail;
if length < needed {
- self.tail |= u8to64_le!(msg, 0, length) << 8*self.ntail;
+ self.tail |= u8to64_le!(msg, 0, length) << 8 * self.ntail;
self.ntail += length;
return
}
- let m = self.tail | u8to64_le!(msg, 0, needed) << 8*self.ntail;
+ let m = self.tail | u8to64_le!(msg, 0, needed) << 8 * self.ntail;
self.v3 ^= m;
compress!(self.v0, self.v1, self.v2, self.v3);