diff --git a/Cargo.toml b/Cargo.toml index 13295f766..4d23f49f8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -20,6 +20,7 @@ lazy_static = "0.1.*" secp256k1 = "0.5.1" rust-crypto = "0.2.34" elastic-array = "0.4" +heapsize = "0.2" [dev-dependencies] json-tests = { path = "json-tests" } diff --git a/src/db.rs b/src/db.rs deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/hash.rs b/src/hash.rs index b51bbdacf..e86f8e834 100644 --- a/src/hash.rs +++ b/src/hash.rs @@ -43,19 +43,17 @@ macro_rules! impl_hash { impl Deref for $from { type Target = [u8]; + #[inline] fn deref(&self) -> &[u8] { - unsafe { - ::std::slice::from_raw_parts(self.0.as_ptr(), $size) - } + &self.0 } } impl DerefMut for $from { + #[inline] fn deref_mut(&mut self) -> &mut [u8] { - unsafe { - ::std::slice::from_raw_parts_mut(self.0.as_mut_ptr(), $size) - } + &mut self.0 } } @@ -327,6 +325,7 @@ impl_hash!(H64, 8); impl_hash!(H128, 16); impl_hash!(Address, 20); impl_hash!(H256, 32); +impl_hash!(H264, 33); impl_hash!(H512, 64); impl_hash!(H520, 65); impl_hash!(H1024, 128); diff --git a/src/heapsizeof.rs b/src/heapsizeof.rs new file mode 100644 index 000000000..c6d4cace4 --- /dev/null +++ b/src/heapsizeof.rs @@ -0,0 +1,5 @@ +use uint::*; +use hash::*; + +known_heap_size!(0, H32, H64, H128, Address, H256, H264, H512, H520, H1024, H2048); +known_heap_size!(0, U128, U256); diff --git a/src/lib.rs b/src/lib.rs index 12998529a..9120f0977 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -32,10 +32,14 @@ extern crate mio; extern crate rand; extern crate rocksdb; extern crate tiny_keccak; + +#[macro_use] +extern crate heapsize; #[macro_use] extern crate log; #[macro_use] extern crate lazy_static; + extern crate env_logger; extern crate time; @@ -50,7 +54,6 @@ pub mod uint; pub mod bytes; pub mod rlp; pub mod vector; -pub mod db; pub mod sha3; pub mod hashdb; pub mod memorydb; @@ -61,5 +64,7 @@ pub mod crypto; pub mod triehash; pub mod trie; pub mod nibbleslice; +pub mod heapsizeof; +pub mod squeeze; //pub mod network; diff --git a/src/rlp/rlperrors.rs b/src/rlp/rlperrors.rs index ada3c2a47..4ee41a2ce 100644 --- a/src/rlp/rlperrors.rs +++ b/src/rlp/rlperrors.rs @@ -8,6 +8,7 @@ pub enum DecoderError { RlpIsTooShort, RlpExpectedToBeList, RlpExpectedToBeData, + RlpIncorrectListLen } impl StdError for DecoderError { diff --git a/src/rlp/rlptraits.rs b/src/rlp/rlptraits.rs index 4f9ad09e5..067c438bf 100644 --- a/src/rlp/rlptraits.rs +++ b/src/rlp/rlptraits.rs @@ -1,11 +1,10 @@ use rlp::DecoderError; -pub trait Decoder { +pub trait Decoder: Sized { fn read_value(&self, f: F) -> Result where F: FnOnce(&[u8]) -> Result; - fn read_list(&self, f: F) -> Result - where F: FnOnce(&[Self]) -> Result; + fn as_list(&self) -> Result, DecoderError>; } pub trait Decodable: Sized { diff --git a/src/rlp/tests.rs b/src/rlp/tests.rs index e44953e07..b4a60a3de 100644 --- a/src/rlp/tests.rs +++ b/src/rlp/tests.rs @@ -343,3 +343,11 @@ fn test_rlp_json() { }); } +#[test] +fn test_decoding_array() { + let v = vec![5u16, 2u16]; + let res = rlp::encode(&v); + let arr: [u16; 2] = rlp::decode(&res); + assert_eq!(arr[0], 5); + assert_eq!(arr[1], 2); +} diff --git a/src/rlp/untrusted_rlp.rs b/src/rlp/untrusted_rlp.rs index ec68fce99..a8cecf09f 100644 --- a/src/rlp/untrusted_rlp.rs +++ b/src/rlp/untrusted_rlp.rs @@ -305,13 +305,11 @@ impl<'a> Decoder for BasicDecoder<'a> { } } - fn read_list(&self, f: F) -> Result - where F: FnOnce(&[Self]) -> Result { - + fn as_list(&self) -> Result, DecoderError> { let v: Vec> = self.rlp.iter() .map(| i | BasicDecoder::new(i)) .collect(); - f(&v) + Ok(v) } } @@ -325,9 +323,8 @@ impl Decodable for T where T: FromBytes { impl Decodable for Vec where T: Decodable { fn decode(decoder: &D) -> Result where D: Decoder { - decoder.read_list(| decoders | { - decoders.iter().map(|d| T::decode(d)).collect() - }) + let decoders = try!(decoder.as_list()); + decoders.iter().map(|d| T::decode(d)).collect() } } @@ -352,3 +349,38 @@ impl Decodable for Option where T: Decodable { }) } } + +macro_rules! impl_array_decodable { + ($index_type:ty, $len:expr ) => ( + impl Decodable for [T; $len] where T: Decodable { + fn decode(decoder: &D) -> Result where D: Decoder { + let decoders = try!(decoder.as_list()); + + let mut result: [T; $len] = unsafe { ::std::mem::uninitialized() }; + if decoders.len() != $len { + return Err(DecoderError::RlpIncorrectListLen); + } + + for i in 0..decoders.len() { + result[i] = try!(T::decode(&decoders[i])); + } + + Ok(result) + } + } + ) +} + +macro_rules! impl_array_decodable_recursive { + ($index_type:ty, ) => (); + ($index_type:ty, $len:expr, $($more:expr,)*) => ( + impl_array_decodable!($index_type, $len); + impl_array_decodable_recursive!($index_type, $($more,)*); + ); +} + +impl_array_decodable_recursive!( + u8, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, + 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, + 32, 40, 48, 56, 64, 72, 96, 128, 160, 192, 224, +); diff --git a/src/squeeze.rs b/src/squeeze.rs new file mode 100644 index 000000000..e81a13793 --- /dev/null +++ b/src/squeeze.rs @@ -0,0 +1,67 @@ +//! Helper module that should be used to randomly squeeze +//! caches to a given size in bytes +//! +//! ``` +//! extern crate heapsize; +//! extern crate ethcore_util as util; +//! use std::collections::HashMap; +//! use std::mem::size_of; +//! use heapsize::HeapSizeOf; +//! use util::squeeze::Squeeze; +//! +//! fn main() { +//! let initial_size = 60; +//! let mut map: HashMap = HashMap::with_capacity(initial_size); +//! assert!(map.capacity() >= initial_size); +//! for i in 0..initial_size { +//! map.insert(i as u8, i as u8); +//! } +//! +//! assert_eq!(map.heap_size_of_children(), map.capacity() * 2 * size_of::()); +//! assert_eq!(map.len(), initial_size); +//! let initial_heap_size = map.heap_size_of_children(); +//! +//! // squeeze it to size of key and value +//! map.squeeze(2 * size_of::()); +//! assert_eq!(map.len(), 1); +//! +//! // its likely that heap size was reduced, but we can't be 100% sure +//! assert!(initial_heap_size >= map.heap_size_of_children()); +//! } +//! ``` + +use std::collections::HashMap; +use std::hash::Hash; +use heapsize::HeapSizeOf; + +/// Should be used to squeeze collections to certain size in bytes +pub trait Squeeze { + fn squeeze(&mut self, size: usize); +} + +impl Squeeze for HashMap where K: Eq + Hash + Clone + HeapSizeOf, T: HeapSizeOf { + fn squeeze(&mut self, size: usize) { + if self.len() == 0 { + return + } + + let size_of_entry = self.heap_size_of_children() / self.capacity(); + let all_entries = size_of_entry * self.len(); + let mut shrinked_size = all_entries; + + while self.len() > 0 && shrinked_size > size { + // could be optimized + let key = self.keys().next().unwrap().clone(); + self.remove(&key); + shrinked_size -= size_of_entry; + } + + self.shrink_to_fit(); + + // if we squeezed something, but not enough, squeeze again + if all_entries != shrinked_size && self.heap_size_of_children() > size { + self.squeeze(size); + } + } +} + diff --git a/src/uint.rs b/src/uint.rs index 09b654270..88ed49712 100644 --- a/src/uint.rs +++ b/src/uint.rs @@ -396,7 +396,6 @@ macro_rules! construct_uint { ); } -construct_uint!(U512, 8); construct_uint!(U256, 4); construct_uint!(U128, 2); @@ -410,10 +409,6 @@ impl From for U256 { } } - - - - pub const ZERO_U256: U256 = U256([0x00u64; 4]); pub const ONE_U256: U256 = U256([0x01u64, 0x00u64, 0x00u64, 0x00u64]); pub const BAD_U256: U256 = U256([0xffffffffffffffffu64; 4]);