Merge remote-tracking branch 'origin/master' into triemut

This commit is contained in:
Gav Wood 2015-12-19 12:38:02 +00:00
commit cc192b2264
11 changed files with 134 additions and 22 deletions

View File

@ -20,6 +20,7 @@ lazy_static = "0.1.*"
secp256k1 = "0.5.1" secp256k1 = "0.5.1"
rust-crypto = "0.2.34" rust-crypto = "0.2.34"
elastic-array = "0.4" elastic-array = "0.4"
heapsize = "0.2"
[dev-dependencies] [dev-dependencies]
json-tests = { path = "json-tests" } json-tests = { path = "json-tests" }

View File

View File

@ -43,19 +43,17 @@ macro_rules! impl_hash {
impl Deref for $from { impl Deref for $from {
type Target = [u8]; type Target = [u8];
#[inline] #[inline]
fn deref(&self) -> &[u8] { fn deref(&self) -> &[u8] {
unsafe { &self.0
::std::slice::from_raw_parts(self.0.as_ptr(), $size)
}
} }
} }
impl DerefMut for $from { impl DerefMut for $from {
#[inline] #[inline]
fn deref_mut(&mut self) -> &mut [u8] { fn deref_mut(&mut self) -> &mut [u8] {
unsafe { &mut self.0
::std::slice::from_raw_parts_mut(self.0.as_mut_ptr(), $size)
}
} }
} }
@ -327,6 +325,7 @@ impl_hash!(H64, 8);
impl_hash!(H128, 16); impl_hash!(H128, 16);
impl_hash!(Address, 20); impl_hash!(Address, 20);
impl_hash!(H256, 32); impl_hash!(H256, 32);
impl_hash!(H264, 33);
impl_hash!(H512, 64); impl_hash!(H512, 64);
impl_hash!(H520, 65); impl_hash!(H520, 65);
impl_hash!(H1024, 128); impl_hash!(H1024, 128);

5
src/heapsizeof.rs Normal file
View File

@ -0,0 +1,5 @@
use uint::*;
use hash::*;
known_heap_size!(0, H32, H64, H128, Address, H256, H264, H512, H520, H1024, H2048);
known_heap_size!(0, U128, U256);

View File

@ -32,10 +32,14 @@ extern crate mio;
extern crate rand; extern crate rand;
extern crate rocksdb; extern crate rocksdb;
extern crate tiny_keccak; extern crate tiny_keccak;
#[macro_use]
extern crate heapsize;
#[macro_use] #[macro_use]
extern crate log; extern crate log;
#[macro_use] #[macro_use]
extern crate lazy_static; extern crate lazy_static;
extern crate env_logger; extern crate env_logger;
extern crate time; extern crate time;
@ -50,7 +54,6 @@ pub mod uint;
pub mod bytes; pub mod bytes;
pub mod rlp; pub mod rlp;
pub mod vector; pub mod vector;
pub mod db;
pub mod sha3; pub mod sha3;
pub mod hashdb; pub mod hashdb;
pub mod memorydb; pub mod memorydb;
@ -61,5 +64,7 @@ pub mod crypto;
pub mod triehash; pub mod triehash;
pub mod trie; pub mod trie;
pub mod nibbleslice; pub mod nibbleslice;
pub mod heapsizeof;
pub mod squeeze;
//pub mod network; //pub mod network;

View File

@ -8,6 +8,7 @@ pub enum DecoderError {
RlpIsTooShort, RlpIsTooShort,
RlpExpectedToBeList, RlpExpectedToBeList,
RlpExpectedToBeData, RlpExpectedToBeData,
RlpIncorrectListLen
} }
impl StdError for DecoderError { impl StdError for DecoderError {

View File

@ -1,11 +1,10 @@
use rlp::DecoderError; use rlp::DecoderError;
pub trait Decoder { pub trait Decoder: Sized {
fn read_value<T, F>(&self, f: F) -> Result<T, DecoderError> fn read_value<T, F>(&self, f: F) -> Result<T, DecoderError>
where F: FnOnce(&[u8]) -> Result<T, DecoderError>; where F: FnOnce(&[u8]) -> Result<T, DecoderError>;
fn read_list<T, F>(&self, f: F) -> Result<T, DecoderError> fn as_list(&self) -> Result<Vec<Self>, DecoderError>;
where F: FnOnce(&[Self]) -> Result<T, DecoderError>;
} }
pub trait Decodable: Sized { pub trait Decodable: Sized {

View File

@ -343,3 +343,11 @@ fn test_rlp_json() {
}); });
} }
#[test]
fn test_decoding_array() {
let v = vec![5u16, 2u16];
let res = rlp::encode(&v);
let arr: [u16; 2] = rlp::decode(&res);
assert_eq!(arr[0], 5);
assert_eq!(arr[1], 2);
}

View File

@ -305,13 +305,11 @@ impl<'a> Decoder for BasicDecoder<'a> {
} }
} }
fn read_list<T, F>(&self, f: F) -> Result<T, DecoderError> fn as_list(&self) -> Result<Vec<Self>, DecoderError> {
where F: FnOnce(&[Self]) -> Result<T, DecoderError> {
let v: Vec<BasicDecoder<'a>> = self.rlp.iter() let v: Vec<BasicDecoder<'a>> = self.rlp.iter()
.map(| i | BasicDecoder::new(i)) .map(| i | BasicDecoder::new(i))
.collect(); .collect();
f(&v) Ok(v)
} }
} }
@ -325,9 +323,8 @@ impl<T> Decodable for T where T: FromBytes {
impl<T> Decodable for Vec<T> where T: Decodable { impl<T> Decodable for Vec<T> where T: Decodable {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder {
decoder.read_list(| decoders | { let decoders = try!(decoder.as_list());
decoders.iter().map(|d| T::decode(d)).collect() decoders.iter().map(|d| T::decode(d)).collect()
})
} }
} }
@ -352,3 +349,38 @@ impl<T> Decodable for Option<T> where T: Decodable {
}) })
} }
} }
macro_rules! impl_array_decodable {
($index_type:ty, $len:expr ) => (
impl<T> Decodable for [T; $len] where T: Decodable {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder {
let decoders = try!(decoder.as_list());
let mut result: [T; $len] = unsafe { ::std::mem::uninitialized() };
if decoders.len() != $len {
return Err(DecoderError::RlpIncorrectListLen);
}
for i in 0..decoders.len() {
result[i] = try!(T::decode(&decoders[i]));
}
Ok(result)
}
}
)
}
macro_rules! impl_array_decodable_recursive {
($index_type:ty, ) => ();
($index_type:ty, $len:expr, $($more:expr,)*) => (
impl_array_decodable!($index_type, $len);
impl_array_decodable_recursive!($index_type, $($more,)*);
);
}
impl_array_decodable_recursive!(
u8, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
32, 40, 48, 56, 64, 72, 96, 128, 160, 192, 224,
);

67
src/squeeze.rs Normal file
View File

@ -0,0 +1,67 @@
//! Helper module that should be used to randomly squeeze
//! caches to a given size in bytes
//!
//! ```
//! extern crate heapsize;
//! extern crate ethcore_util as util;
//! use std::collections::HashMap;
//! use std::mem::size_of;
//! use heapsize::HeapSizeOf;
//! use util::squeeze::Squeeze;
//!
//! fn main() {
//! let initial_size = 60;
//! let mut map: HashMap<u8, u8> = HashMap::with_capacity(initial_size);
//! assert!(map.capacity() >= initial_size);
//! for i in 0..initial_size {
//! map.insert(i as u8, i as u8);
//! }
//!
//! assert_eq!(map.heap_size_of_children(), map.capacity() * 2 * size_of::<u8>());
//! assert_eq!(map.len(), initial_size);
//! let initial_heap_size = map.heap_size_of_children();
//!
//! // squeeze it to size of key and value
//! map.squeeze(2 * size_of::<u8>());
//! assert_eq!(map.len(), 1);
//!
//! // its likely that heap size was reduced, but we can't be 100% sure
//! assert!(initial_heap_size >= map.heap_size_of_children());
//! }
//! ```
use std::collections::HashMap;
use std::hash::Hash;
use heapsize::HeapSizeOf;
/// Should be used to squeeze collections to certain size in bytes
pub trait Squeeze {
fn squeeze(&mut self, size: usize);
}
impl<K, T> Squeeze for HashMap<K, T> where K: Eq + Hash + Clone + HeapSizeOf, T: HeapSizeOf {
fn squeeze(&mut self, size: usize) {
if self.len() == 0 {
return
}
let size_of_entry = self.heap_size_of_children() / self.capacity();
let all_entries = size_of_entry * self.len();
let mut shrinked_size = all_entries;
while self.len() > 0 && shrinked_size > size {
// could be optimized
let key = self.keys().next().unwrap().clone();
self.remove(&key);
shrinked_size -= size_of_entry;
}
self.shrink_to_fit();
// if we squeezed something, but not enough, squeeze again
if all_entries != shrinked_size && self.heap_size_of_children() > size {
self.squeeze(size);
}
}
}

View File

@ -396,7 +396,6 @@ macro_rules! construct_uint {
); );
} }
construct_uint!(U512, 8);
construct_uint!(U256, 4); construct_uint!(U256, 4);
construct_uint!(U128, 2); construct_uint!(U128, 2);
@ -410,10 +409,6 @@ impl From<U128> for U256 {
} }
} }
pub const ZERO_U256: U256 = U256([0x00u64; 4]); pub const ZERO_U256: U256 = U256([0x00u64; 4]);
pub const ONE_U256: U256 = U256([0x01u64, 0x00u64, 0x00u64, 0x00u64]); pub const ONE_U256: U256 = U256([0x01u64, 0x00u64, 0x00u64, 0x00u64]);
pub const BAD_U256: U256 = U256([0xffffffffffffffffu64; 4]); pub const BAD_U256: U256 = U256([0xffffffffffffffffu64; 4]);