Merge branch 'master' of github.com:gavofyork/ethcore-util

This commit is contained in:
Gav Wood 2015-11-29 18:45:53 +01:00
commit 962fa4d812
3 changed files with 114 additions and 16 deletions

View File

@ -446,7 +446,7 @@ impl RlpStream {
stream
}
/// apends value to the end of stream, chainable
/// Apends value to the end of stream, chainable.
pub fn append<'a, E>(&'a mut self, object: &E) -> &'a mut RlpStream
where E: Encodable
{
@ -454,13 +454,13 @@ impl RlpStream {
object.encode(&mut self.encoder);
// if list is finished, prepend the length
self.try_to_finish();
self.try_to_finish(1);
// return chainable self
self
}
/// declare appending the list of given size
/// Declare appending the list of given size, chainable.
pub fn append_list<'a>(&'a mut self, len: usize) -> &'a mut RlpStream {
// push new list
let position = self.encoder.bytes.len();
@ -468,7 +468,7 @@ impl RlpStream {
0 => {
// we may finish, if the appended list len is equal 0
self.encoder.bytes.push(0xc0u8);
self.try_to_finish();
self.try_to_finish(1);
}
_ => self.unfinished_lists.push_back(ListInfo::new(position, len)),
}
@ -477,6 +477,18 @@ impl RlpStream {
self
}
/// Appends raw (pre-serialised) RLP data. Use with caution. Chainable.
pub fn append_raw<'a>(&'a mut self, bytes: &[u8], item_count: usize) -> &'a RlpStream {
// push raw items
self.encoder.bytes.extend(bytes);
// try to finish and prepend the length
self.try_to_finish(item_count);
// return chainable self
self
}
/// return true if stream is ready
pub fn is_finished(&self) -> bool {
self.unfinished_lists.back().is_none()
@ -491,11 +503,14 @@ impl RlpStream {
}
/// try to finish lists
fn try_to_finish(&mut self) -> () {
fn try_to_finish(&mut self, inserted_items: usize) -> () {
let should_finish = match self.unfinished_lists.back_mut() {
None => false,
Some(ref mut x) => {
x.current += 1;
x.current += inserted_items;
if x.current > x.max {
panic!("You cannot append more items then you expect!");
}
x.current == x.max
}
};
@ -504,7 +519,7 @@ impl RlpStream {
let x = self.unfinished_lists.pop_back().unwrap();
let len = self.encoder.bytes.len() - x.position;
self.encoder.insert_list_len_at_pos(len, x.position);
self.try_to_finish();
self.try_to_finish(1);
}
}
}

View File

@ -1,10 +1,12 @@
//! Generete trie root
//use std::collections::HashMap;
//use hash::*;
//use rlp;
use std::collections::BTreeMap;
use std::cmp;
use hash::*;
use sha3::*;
use rlp;
use rlp::RlpStream;
///
/// Hex-prefix Notation. First nibble has flags: oddness = 2^0 & termination = 2^1.
///
/// The "termination marker" and "leaf-node" specifier are completely equivalent.
@ -57,8 +59,8 @@
/// }
/// ```
///
pub fn hex_prefix_encode(hex: &[u8], leaf: bool) -> Vec<u8> {
let inlen = hex.len();
pub fn hex_prefix_encode(nibbles: &[u8], leaf: bool) -> Vec<u8> {
let inlen = nibbles.len();
let oddness_factor = inlen % 2;
// next even number divided by two
let reslen = (inlen + 2) >> 1;
@ -68,7 +70,7 @@ pub fn hex_prefix_encode(hex: &[u8], leaf: bool) -> Vec<u8> {
let first_byte = {
let mut bits = ((inlen as u8 & 1) + (2 * leaf as u8)) << 4;
if oddness_factor == 1 {
bits += hex[0];
bits += nibbles[0];
}
bits
};
@ -77,7 +79,7 @@ pub fn hex_prefix_encode(hex: &[u8], leaf: bool) -> Vec<u8> {
let mut offset = oddness_factor;
while offset < inlen {
let byte = (hex[offset] << 4) + hex[offset + 1];
let byte = (nibbles[offset] << 4) + nibbles[offset + 1];
res.push(byte);
offset += 2;
}
@ -85,6 +87,85 @@ pub fn hex_prefix_encode(hex: &[u8], leaf: bool) -> Vec<u8> {
res
}
/// Converts slice of bytes to nibbles.
///
/// ```rust
/// extern crate ethcore_util as util;
/// use util::triehash::*;
///
/// fn main () {
/// let v = vec![0x31, 0x23, 0x45];
/// let e = vec![3, 1, 2, 3, 4, 5];
/// assert_eq!(as_nibbles(&v), e);
/// }
/// ```
pub fn as_nibbles(bytes: &[u8]) -> Vec<u8> {
let mut res = vec![];
res.reserve(bytes.len() * 2);
for i in 0..bytes.len() {
res.push(bytes[i] >> 4);
res.push((bytes[i] << 4) >> 4);
}
res
}
struct NibblePair {
nibble: Vec<u8>,
data: Vec<u8>
}
pub fn ordered_trie_root(data: Vec<Vec<u8>>) -> H256 {
let vec: Vec<NibblePair> = data
// first put elements into btree to sort them by nibbles
// optimize it later
.into_iter()
.fold(BTreeMap::new(), | mut acc, vec | {
let len = acc.len();
acc.insert(as_nibbles(&rlp::encode(&len)), vec);
acc
})
// then move them to a vector
.into_iter()
.map(|(k, v)| NibblePair { nibble: k, data: v } )
.collect();
let out = match vec.len() {
0 => rlp::encode(&""),
_ => {
let mut stream = RlpStream::new();
hash256rlp(&vec, 0, &mut stream);
stream.out().unwrap()
}
};
out.sha3()
}
fn shared_prefix_length<T>(v1: &[T], v2: &[T]) -> usize where T: Eq {
let len = cmp::min(v1.len(), v2.len());
(0..len).take_while(|&i| v1[i] == v2[i]).count()
}
fn hash256rlp(vec: &[NibblePair], pre_len: usize, stream: &mut RlpStream) {
match vec.len() {
0 => stream.append(&""),
1 => stream.append_list(2).append(&hex_prefix_encode(&vec[0].nibble, true)).append(&vec[0].data),
_ => {
let shared_prefix = vec.iter()
// skip first element
.skip(1)
// get minimum number of shared nibbles between first string and each successive
.fold(usize::max_value(), | acc, pair | cmp::min(shared_prefix_length(&vec[0].nibble, &pair.nibble), acc) );
//match shared_prefix > pre_len {
//true => hex_prefix_encode(&vec[0].nibble
//}
panic!();
}
};
}
#[cfg(test)]
mod tests {
}

View File

@ -1,4 +1,3 @@
use std::ptr;
pub trait InsertSlice<T> {
@ -32,3 +31,6 @@ impl<T> InsertSlice<T> for Vec<T> {
}
}
}
pub trait SharedPreifx {
}