From 65d49f280c7b10765c778fcbbc4bcf0900dbd901 Mon Sep 17 00:00:00 2001 From: Gav Wood Date: Wed, 2 Dec 2015 15:03:20 +0100 Subject: [PATCH 01/25] Trie fix. --- src/trie.rs | 142 +++++++++++++++++++++++++++++++----------------- src/triehash.rs | 4 +- 2 files changed, 94 insertions(+), 52 deletions(-) diff --git a/src/trie.rs b/src/trie.rs index 2fb05ccfb..e140b851c 100644 --- a/src/trie.rs +++ b/src/trie.rs @@ -27,10 +27,10 @@ pub trait Trie { #[derive(Eq, PartialEq, Debug)] pub enum Node<'a> { - NullRoot, + Empty, Leaf(NibbleSlice<'a>, &'a[u8]), Extension(NibbleSlice<'a>, &'a[u8]), - Branch([Option<&'a[u8]>; 16], Option<&'a [u8]>) + Branch([&'a[u8]; 16], Option<&'a [u8]>) } impl <'a>Node<'a> { @@ -48,14 +48,14 @@ impl <'a>Node<'a> { }, // branch - first 16 are nodes, 17th is a value (or empty). Prototype::List(17) => { - let mut nodes: [Option<&'a [u8]>; 16] = unsafe { ::std::mem::uninitialized() }; + let mut nodes: [&'a [u8]; 16] = unsafe { ::std::mem::uninitialized() }; for i in 0..16 { - nodes[i] = if r.at(i).is_empty() { None } else { Some(r.at(i).raw()) } + nodes[i] = r.at(i).raw(); } Node::Branch(nodes, if r.at(16).is_empty() { None } else { Some(r.at(16).data()) }) }, // an empty branch index. - Prototype::Data(0) => Node::NullRoot, + Prototype::Data(0) => Node::Empty, // something went wrong. _ => panic!("Rlp is not valid.") } @@ -78,11 +78,8 @@ impl <'a>Node<'a> { }, Node::Branch(ref nodes, ref value) => { let mut stream = RlpStream::new_list(17); - for i in 0..16 { - match nodes[i] { - Some(n) => { stream.append_raw(n, 1); }, - None => { stream.append_empty_data(); }, - } + for i in 0..16 { + stream.append_raw(nodes[i], 1); } match *value { Some(n) => { stream.append(&n); }, @@ -90,7 +87,7 @@ impl <'a>Node<'a> { } stream.out() }, - Node::NullRoot => { + Node::Empty => { let mut stream = RlpStream::new(); stream.append_empty_data(); stream.out() @@ -155,7 +152,7 @@ impl fmt::Debug for TrieDB { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { try!(writeln!(f, "[")); let root_rlp = self.db.lookup(&self.root).expect("Trie root not found!"); - try!(self.fmt_all(root_rlp, f, 0)); + try!(self.fmt_all(Node::decoded(root_rlp), f, 0)); writeln!(f, "]") } } @@ -198,13 +195,16 @@ impl TrieDB { Ok(()) } - fn fmt_all(&self, node: &[u8], f: &mut fmt::Formatter, deepness: usize) -> fmt::Result { - let node = Node::decoded(node); + fn get_node<'a>(&'a self, node: &'a [u8]) -> Node { + Node::decoded(self.get_raw_or_lookup(node)) + } + + fn fmt_all(&self, node: Node, f: &mut fmt::Formatter, deepness: usize) -> fmt::Result { match node { Node::Leaf(slice, value) => try!(writeln!(f, "-{:?}: {:?}.", slice, value.pretty())), Node::Extension(ref slice, ref item) => { try!(write!(f, "-{:?}- ", slice)); - try!(self.fmt_all(self.get_raw_or_lookup(item), f, deepness)); + try!(self.fmt_all(self.get_node(item), f, deepness)); }, Node::Branch(ref nodes, ref value) => { try!(writeln!(f, "")); @@ -216,18 +216,18 @@ impl TrieDB { &None => {} } for i in 0..16 { - match nodes[i] { - Some(n) => { + match self.get_node(nodes[i]) { + Node::Empty => {}, + n => { try!(self.fmt_indent(f, deepness + 1)); try!(write!(f, "{:x}: ", i)); - try!(self.fmt_all(self.get_raw_or_lookup(n), f, deepness + 1)); - }, - None => {}, + try!(self.fmt_all(n, f, deepness + 1)); + } } } }, // empty - Node::NullRoot => { + Node::Empty => { try!(writeln!(f, "")); } }; @@ -247,12 +247,7 @@ impl TrieDB { }, Node::Branch(ref nodes, value) => match key.is_empty() { true => value, - false => match nodes[key.at(0) as usize] { - Some(payload) => { - self.get_from_node(self.get_raw_or_lookup(payload), &key.mid(1)) - }, - None => None - } + false => self.get_from_node(self.get_raw_or_lookup(nodes[key.at(0) as usize]), &key.mid(1)) }, _ => None } @@ -277,6 +272,16 @@ impl TrieDB { trace!("/"); } + fn delete(&mut self, key: &NibbleSlice) { + trace!("DELETE: {:?}", key); + // determine what the new root is, insert new nodes and remove old as necessary. + let mut todo: Diff = Diff::new(); + let root_rlp = self.cleared(self.db.lookup(&self.root).expect("Trie root not found!"), key, &mut todo); + self.apply(todo); + self.set_root_rlp(&root_rlp); + trace!("/"); + } + fn compose_leaf(partial: &NibbleSlice, value: &[u8]) -> Bytes { trace!("compose_leaf {:?} {:?} ({:?})", partial, value.pretty(), partial.encoded(true).pretty()); let mut s = RlpStream::new_list(2); @@ -288,12 +293,12 @@ impl TrieDB { } fn compose_raw(partial: &NibbleSlice, raw_payload: &[u8], is_leaf: bool) -> Bytes { - println!("compose_raw {:?} {:?} {:?} ({:?})", partial, raw_payload.pretty(), is_leaf, partial.encoded(is_leaf)); + trace!("compose_raw {:?} {:?} {:?} ({:?})", partial, raw_payload.pretty(), is_leaf, partial.encoded(is_leaf)); let mut s = RlpStream::new_list(2); s.append(&partial.encoded(is_leaf)); s.append_raw(raw_payload, 1); let r = s.out(); - println!("compose_raw: -> {:?}", r.pretty()); + trace!("compose_raw: -> {:?}", r.pretty()); r } @@ -361,17 +366,18 @@ impl TrieDB { s.out() } - fn transmuted_leaf_to_branch(orig_partial: &NibbleSlice, orig_raw_payload: &[u8], diff: &mut Diff) -> Bytes { + fn transmuted_leaf_to_branch(orig_partial: &NibbleSlice, value: &[u8], diff: &mut Diff) -> Bytes { trace!("transmuted_leaf_to_branch"); let mut s = RlpStream::new_list(17); let index = if orig_partial.is_empty() {16} else {orig_partial.at(0)}; // orig is leaf - orig_raw_payload is data representing the actual value. for i in 0..17 { - if index == i { - // this is our node. - diff.new_node(Self::compose_raw(&orig_partial.mid(if i == 16 {0} else {1}), orig_raw_payload, true), &mut s); - } else { - s.append_empty_data(); + match (index == i, i) { + (true, 16) => // leaf entry - just replace. + { s.append(&value); }, + (true, _) => // easy - original had empty slot. + diff.new_node(Self::compose_leaf(&orig_partial.mid(1), value), &mut s), + (false, _) => { s.append_empty_data(); } } } s.out() @@ -383,7 +389,7 @@ impl TrieDB { fn transmuted_to_branch_and_augmented(&self, orig_is_leaf: bool, orig_partial: &NibbleSlice, orig_raw_payload: &[u8], partial: &NibbleSlice, value: &[u8], diff: &mut Diff) -> Bytes { trace!("transmuted_to_branch_and_augmented"); let intermediate = match orig_is_leaf { - true => Self::transmuted_leaf_to_branch(orig_partial, orig_raw_payload, diff), + true => Self::transmuted_leaf_to_branch(orig_partial, Rlp::new(orig_raw_payload).data(), diff), false => Self::transmuted_extension_to_branch(orig_partial, orig_raw_payload, diff), }; self.augmented(&intermediate, partial, value, diff) @@ -420,7 +426,7 @@ impl TrieDB { /// The database will be updated so as to make the returned RLP valid through inserting /// and deleting nodes as necessary. /// - /// **This operation will not insert the new node now destroy the original.** + /// **This operation will not insert the new node nor destroy the original.** fn augmented(&self, old: &[u8], partial: &NibbleSlice, value: &[u8], diff: &mut Diff) -> Bytes { trace!("augmented (old: {:?}, partial: {:?}, value: {:?})", old.pretty(), partial, value.pretty()); // already have an extension. either fast_forward, cleve or transmute_to_branch. @@ -486,6 +492,25 @@ impl TrieDB { _ => panic!("Invalid RLP for node: {:?}", old.pretty()), } } + + + /// Determine the RLP of the node, assuming we're removing `partial` from the + /// node currently of data `old`. This will *not* delete any hash of `old` from the database; + /// it will just return the new RLP that represents the new node. + /// + /// The database will be updated so as to make the returned RLP valid through inserting + /// and deleting nodes as necessary. + /// + /// **This operation will not insert the new node nor destroy the original.** + fn cleared(&self, old: &[u8], partial: &NibbleSlice, _diff: &mut Diff) -> Bytes { + trace!("cleared (old: {:?}, partial: {:?})", old.pretty(), partial); + + unimplemented!(); + +/* match (Node::decoded(old)) { + + }*/ + } } impl Trie for TrieDB { @@ -503,8 +528,8 @@ impl Trie for TrieDB { self.add(&NibbleSlice::new(key), value); } - fn remove(&mut self, _key: &[u8]) { - unimplemented!(); + fn remove(&mut self, key: &[u8]) { + self.delete(&NibbleSlice::new(key)); } } @@ -512,12 +537,12 @@ impl Trie for TrieDB { mod tests { use rustc_serialize::hex::FromHex; use triehash::*; - use hash::*; use super::*; use nibbleslice::*; use rlp; use env_logger; use rand::random; + use bytes::ToPretty; #[test] fn test_node_leaf() { @@ -546,7 +571,7 @@ mod tests { #[test] fn test_node_empty_branch() { - let branch = Node::Branch([None; 16], None); + let branch = Node::Branch([&b""[..]; 16], None); let rlp = branch.encoded(); let branch2 = Node::decoded(&rlp); assert_eq!(branch, branch2); @@ -555,8 +580,8 @@ mod tests { #[test] fn test_node_branch() { let k = rlp::encode(&"cat"); - let mut nodes: [Option<&[u8]>; 16] = unsafe { ::std::mem::uninitialized() }; - for i in 0..16 { nodes[i] = Some(&k); } + let mut nodes: [&[u8]; 16] = unsafe { ::std::mem::uninitialized() }; + for i in 0..16 { nodes[i] = &k; } let v: Vec = From::from("dog"); let branch = Node::Branch(nodes, Some(&v)); let rlp = branch.encoded(); @@ -631,20 +656,37 @@ mod tests { #[test] fn stress() { - for _ in 0..1000 { + for _ in 0..10000 { let mut x: Vec<(Vec, Vec)> = Vec::new(); - for j in 0..100u32 { + for j in 0..4u32 { let key = random_key(); x.push((key, rlp::encode(&j))); } let real = trie_root(x.clone()); - - let mem = trie_root_mem(&x); - assert_eq!(mem, real); + let memtrie = trie_root_mem(&x); + let mut y = x.clone(); + y.sort_by(|ref a, ref b| a.0.cmp(&b.0)); + let memtrie_sorted = trie_root_mem(&y); + if *memtrie.root() != real || *memtrie_sorted.root() != real { + println!("TRIE MISMATCH"); + println!(""); + println!("ORIGINAL... {:?}", memtrie.root()); + for i in x.iter() { + println!("{:?} -> {:?}", i.0.pretty(), i.1.pretty()); + } + println!("{:?}", memtrie); + println!("SORTED... {:?}", memtrie_sorted.root()); + for i in y.iter() { + println!("{:?} -> {:?}", i.0.pretty(), i.1.pretty()); + } + println!("{:?}", memtrie_sorted); + } + assert_eq!(*memtrie.root(), real); + assert_eq!(*memtrie_sorted.root(), real); } } - fn trie_root_mem(v: &Vec<(Vec, Vec)>) -> H256 { + fn trie_root_mem(v: &Vec<(Vec, Vec)>) -> TrieDB { let mut t = TrieDB::new_memory(); for i in 0..v.len() { @@ -653,7 +695,7 @@ mod tests { t.insert(&key, &val); } - t.root().clone() + t } #[test] diff --git a/src/triehash.rs b/src/triehash.rs index 56dafcb2e..c06cdb249 100644 --- a/src/triehash.rs +++ b/src/triehash.rs @@ -174,7 +174,7 @@ fn hash256rlp(input: &[(Vec, Vec)], pre_len: usize, stream: &mut RlpStre cmp::min(key.shared_prefix_len(&k), acc) }); - println!("shared_prefix: {}, prefix_len: {}", shared_prefix, pre_len); +// println!("shared_prefix: {}, prefix_len: {}", shared_prefix, pre_len); // if shared prefix is higher than current prefix append its // new part of the key to the stream // then recursively append suffixes of all items who had this key @@ -200,7 +200,7 @@ fn hash256rlp(input: &[(Vec, Vec)], pre_len: usize, stream: &mut RlpStre // cout how many successive elements have same next nibble let len = match begin < input.len() { true => input[begin..].iter() - .take_while(| pair | { println!("{:?}", pair.0); pair.0[pre_len] == i }).count(), + .take_while(| pair | { /*println!("{:?}", pair.0);*/ pair.0[pre_len] == i }).count(), //.take_while(|&q| q == i).count(), false => 0 }; From c184758972f4b472afb717648964109b197cdefc Mon Sep 17 00:00:00 2001 From: Gav Wood Date: Wed, 2 Dec 2015 15:04:34 +0100 Subject: [PATCH 02/25] Reduce time needed for stress test. --- src/trie.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/trie.rs b/src/trie.rs index e140b851c..704db94d4 100644 --- a/src/trie.rs +++ b/src/trie.rs @@ -656,7 +656,7 @@ mod tests { #[test] fn stress() { - for _ in 0..10000 { + for _ in 0..5000 { let mut x: Vec<(Vec, Vec)> = Vec::new(); for j in 0..4u32 { let key = random_key(); From 4372a1cd55cc68d502789361d9ad3ae9bce57a1a Mon Sep 17 00:00:00 2001 From: debris Date: Wed, 2 Dec 2015 17:00:49 +0100 Subject: [PATCH 03/25] removed submodule --- .gitmodules | 3 --- tests | 1 - 2 files changed, 4 deletions(-) delete mode 100644 .gitmodules delete mode 160000 tests diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index 2571e36c6..000000000 --- a/.gitmodules +++ /dev/null @@ -1,3 +0,0 @@ -[submodule "tests"] - path = tests - url = https://github.com/ethereum/tests diff --git a/tests b/tests deleted file mode 160000 index 2e4987ad2..000000000 --- a/tests +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 2e4987ad2a973e2cf85ef742a8b9bd094363cd18 From 9c85e4d305a569d1f7a23b5878dabb5b3ba2fad2 Mon Sep 17 00:00:00 2001 From: debris Date: Wed, 2 Dec 2015 17:52:06 +0100 Subject: [PATCH 04/25] commented out tests --- src/triehash.rs | 86 ++++++++++++++++++++++++++++++++++++++++++------- 1 file changed, 74 insertions(+), 12 deletions(-) diff --git a/src/triehash.rs b/src/triehash.rs index 778dc7f17..ab0c75683 100644 --- a/src/triehash.rs +++ b/src/triehash.rs @@ -278,7 +278,7 @@ fn test_hex_prefix_encode() { #[cfg(test)] mod tests { use std::str::FromStr; - use std::collections::BTreeMap; + use std::collections::HashMap; use rustc_serialize::hex::FromHex; use rustc_serialize::json::Json; use bytes::*; @@ -379,18 +379,80 @@ mod tests { assert_eq!(trie_root(v), H256::from_str("9f6221ebb8efe7cff60a716ecb886e67dd042014be444669f0159d8e68b42100").unwrap()); } - #[test] - fn test_triehash_json_trietest_json() { - let data = include_bytes!("../tests/TrieTests/trietest.json"); + //fn hex_or_string(s: &str) -> Vec { + //match s.starts_with("0x") { + //true => s[2..].from_hex().unwrap(), + //false => From::from(s) + //} + //} + + //pub fn yield_json_trietest(data: &[u8], name: &str, insert: &mut I, remove: &mut R) -> H256 where I: FnMut(Vec, Vec), R: FnMut(Vec) { + ////let data = include_bytes!("../tests/TrieTests/trietest.json"); - let s = String::from_bytes(data).unwrap(); - let json = Json::from_str(&s).unwrap(); - let obj = json.as_object().unwrap(); + //let s = String::from_bytes(data).unwrap(); + //let json = Json::from_str(&s).unwrap(); + //let obj = json.as_object().unwrap(); + //println!("here"); + + //let value = &obj[name]; + + //let i = &value["in"]; + //let o = &value["root"]; + + //let root_str = o.as_string().unwrap(); + + //println!("here2"); + //for i in i.as_array().unwrap().iter() { + //let key = hex_or_string(i[0].as_string().unwrap()); + + //match i[1].is_null() { + //true => remove(key), + //false => insert(key, hex_or_string(i[1].as_string().unwrap())) + //}; + //} + + //H256::from_str(&root_str[2..]).unwrap() + //} + + ////fn load_json_trietest(data: &[u8], name: &str) -> (Vec<(Vec, Vec)>, H256) { + ////use std::cell::RefCell; + //let map = RefCell::new(HashMap::new()); + //let root = yield_json_trietest(data, name, &mut | key, value | { + //map.borrow_mut().insert(key, value); + //}, &mut | key | { + //map.borrow_mut().remove(&key); + //}); + + //let res = map.into_inner() + //.into_iter() + //.map(|p| p) + //.collect(); + //(res, root) + //} + + //#[test] + //fn triehash_json_empty_values() { + //let (input, root) = load_json_trietest(include_bytes!("../tests/TrieTests/trietest.json"), "emptyValues"); + //assert_eq!(trie_root(input), root); + //} + + //#[test] + //fn triehash_json_branching_tests() { + //let (input, root) = load_json_trietest(include_bytes!("../tests/TrieTests/trietest.json"), "branchingTests"); + //assert_eq!(trie_root(input), root); + //} + + //#[test] + //fn triehash_json_jeff_tests() { + //let (input, root) = load_json_trietest(include_bytes!("../tests/TrieTests/trietest.json"), "jeff"); + //assert_eq!(trie_root(input), root); + //} + + ////#[test] + ////fn triehash_json_test1() { + //let (input, root) = load_json_trietest(include_bytes!("../tests/TrieTests/hex_encoded_securetrie_test.json"), "test1"); + //assert_eq!(trie_root(input), root); + //} - for (key, value) in obj.iter() { - println!("running test: {}", key); - } - assert!(false); - } } From 2cbbc872ff14f7462f0f06ff01374587b1773a9c Mon Sep 17 00:00:00 2001 From: debris Date: Wed, 2 Dec 2015 21:49:57 +0100 Subject: [PATCH 05/25] json-tests init --- Cargo.toml | 3 + json-tests/Cargo.toml | 8 +++ json-tests/README.md | 15 +++++ json-tests/json/trie/README.md | 35 ++++++++++ json-tests/json/trie/basic.json | 11 ++++ json-tests/src/lib.rs | 68 +++++++++++++++++++ json-tests/src/trie.rs | 113 ++++++++++++++++++++++++++++++++ src/triehash.rs | 85 +++--------------------- 8 files changed, 263 insertions(+), 75 deletions(-) create mode 100644 json-tests/Cargo.toml create mode 100644 json-tests/README.md create mode 100644 json-tests/json/trie/README.md create mode 100644 json-tests/json/trie/basic.json create mode 100644 json-tests/src/lib.rs create mode 100644 json-tests/src/trie.rs diff --git a/Cargo.toml b/Cargo.toml index de4dcd41b..41ca44580 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -20,3 +20,6 @@ num = "0.1" lazy_static = "0.1.*" secp256k1 = "0.5.1" rust-crypto = "0.2.34" + +[dev-dependencies] +json-tests = { path = "json-tests" } diff --git a/json-tests/Cargo.toml b/json-tests/Cargo.toml new file mode 100644 index 000000000..3185e2e59 --- /dev/null +++ b/json-tests/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "json-tests" +version = "0.1.0" +authors = ["debris "] + +[dependencies] +rustc-serialize = "0.3" +glob = "*" diff --git a/json-tests/README.md b/json-tests/README.md new file mode 100644 index 000000000..170cd588d --- /dev/null +++ b/json-tests/README.md @@ -0,0 +1,15 @@ +# How to write json test file? + +Cause it's very hard to write generic json test files, each subdirectory should follow its own +convention. BUT all json files `within` same directory should be consistent. + +### Test file should always contain a single file with input and output. + +```json +{ + input: ..., + output: ... +} +``` + +As a reference, please use trietests. diff --git a/json-tests/json/trie/README.md b/json-tests/json/trie/README.md new file mode 100644 index 000000000..6d7f479a3 --- /dev/null +++ b/json-tests/json/trie/README.md @@ -0,0 +1,35 @@ +# Trie tests guideline + +Trie test input is an array of operations. Each operation must have 2 fields: + +- `operation` - string, either `insert` or `remove` +- `key` - string, or hex value prefixed with `0x` + +And optional field: + +- `value`- which is used by `insert` operation + +### Example + +```json +{ + "input": + [ + { + "operation": "insert", + "key": "world", + "value": "hello" + }, + { + "operation": "insert", + "key": "0x1234", + "value": "ooooops" + }, + { + "operation": "remove", + "key": "0x1234" + } + ], + "output": "0x5991bb8c6514148a29db676a14ac506cd2cd5775ace63c30a4fe457715e9ac84" +} +``` diff --git a/json-tests/json/trie/basic.json b/json-tests/json/trie/basic.json new file mode 100644 index 000000000..f737ef337 --- /dev/null +++ b/json-tests/json/trie/basic.json @@ -0,0 +1,11 @@ +{ + "input": + [ + { + "operation": "insert", + "key": "A", + "value": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + } + ], + "output": "0xd23786fb4a010da3ce639d66d5e904a11dbc02746d1ce25029e53290cabf28ab" +} diff --git a/json-tests/src/lib.rs b/json-tests/src/lib.rs new file mode 100644 index 000000000..64466084a --- /dev/null +++ b/json-tests/src/lib.rs @@ -0,0 +1,68 @@ +extern crate rustc_serialize; +extern crate glob; + +use std::str::from_utf8; +use std::path::*; +use std::io::prelude::*; +use std::fs::File; +use glob::glob; +use rustc_serialize::*; + +pub mod trie; + +pub trait JsonTest: Sized { + type Input; + type Output; + + fn new(data: &[u8]) -> Self; + fn input(&self) -> Self::Input; + fn output(&self) -> Self::Output; +} + +pub struct JsonLoader { + json: json::Json +} + +impl JsonTest for JsonLoader { + type Input = json::Json; + type Output = json::Json; + + fn new(data: &[u8]) -> Self { + JsonLoader { + json: json::Json::from_str(from_utf8(data).unwrap()).unwrap() + } + } + fn input(&self) -> Self::Input { + self.json.as_object().unwrap()["input"].clone() + } + + fn output(&self) -> Self::Output { + self.json.as_object().unwrap()["output"].clone() + } +} + +pub fn execute_test(data: &[u8], f: &mut F) where T: JsonTest, F: FnMut(T::Input, T::Output) { + let test = T::new(data); + f(test.input(), test.output()) +} + +pub fn execute_test_from_file(path: &Path, f: &mut F) where T: JsonTest, F: FnMut(T::Input, T::Output) { + let mut file = File::open(path).unwrap(); + let mut buffer = vec![]; + let _ = file.read_to_end(&mut buffer); + let test = T::new(&buffer); + f(test.input(), test.output()) +} + +pub fn execute_tests_in_directory(pattern: &str, f: &mut F) where T: JsonTest, F: FnMut(String, T::Input, T::Output) { + for path in glob(pattern).unwrap().filter_map(Result::ok) { + execute_test_from_file::(&path, &mut | input, output | { + f(path.to_str().unwrap().to_string(), input, output); + }); + } +} + +#[test] +fn it_works() { +} + diff --git a/json-tests/src/trie.rs b/json-tests/src/trie.rs new file mode 100644 index 000000000..f065085fe --- /dev/null +++ b/json-tests/src/trie.rs @@ -0,0 +1,113 @@ +//! json trie tests +use std::collections::HashMap; +use rustc_serialize::*; +use rustc_serialize::hex::FromHex; +use super::{JsonTest, JsonLoader}; + +pub enum OperationType { + Insert, + Remove +} + +impl Decodable for OperationType { + fn decode(d: &mut D) -> Result where D: Decoder { + match try!(String::decode(d)).as_ref() { + "insert" => Ok(OperationType::Insert), + "remove" => Ok(OperationType::Remove), + other => panic!("invalid operation type: {}", other) + } + } +} + +#[derive(RustcDecodable)] +struct RawOperation { + operation: OperationType, + key: String, + value: Option +} + +pub struct Operation { + pub operation: OperationType, + pub key: Vec, + pub value: Option> +} + +fn hex_or_string(s: &str) -> Vec { + match s.starts_with("0x") { + true => s[2..].from_hex().unwrap(), + false => From::from(s) + } +} + +impl Into for RawOperation { + fn into(self) -> Operation { + Operation { + operation: self.operation, + key: hex_or_string(&self.key), + value: self.value.map(|v| { + hex_or_string(&v) + }) + } + } +} + +pub struct TrieTest { + loader: JsonLoader +} + +impl JsonTest for TrieTest { + type Input = Vec; + type Output = Vec; + + fn new(data: &[u8]) -> Self { + TrieTest { + loader: JsonLoader::new(data) + } + } + + fn input(&self) -> Self::Input { + let mut decoder = json::Decoder::new(self.loader.input()); + let raw: Vec = Decodable::decode(&mut decoder).unwrap(); + raw.into_iter() + .map(|i| i.into()) + .collect() + } + fn output(&self) -> Self::Output { + hex_or_string(self.loader.output().as_string().unwrap()) + } +} + +pub struct TriehashTest { + trietest: TrieTest +} + +impl JsonTest for TriehashTest { + type Input = Vec<(Vec, Vec)>; + type Output = Vec; + + fn new(data: &[u8]) -> Self { + TriehashTest { + trietest: TrieTest::new(data) + } + } + + fn input(&self) -> Self::Input { + self.trietest.input() + .into_iter() + .fold(HashMap::new(), | mut map, o | { + match o.operation { + OperationType::Insert => map.insert(o.key, o.value.unwrap()), + OperationType::Remove => map.remove(&o.key) + }; + map + }) + .into_iter() + .map(|p| { p }) + .collect() + } + + fn output(&self) -> Self::Output { + self.trietest.output() + } +} + diff --git a/src/triehash.rs b/src/triehash.rs index ab0c75683..0467ea284 100644 --- a/src/triehash.rs +++ b/src/triehash.rs @@ -379,80 +379,15 @@ mod tests { assert_eq!(trie_root(v), H256::from_str("9f6221ebb8efe7cff60a716ecb886e67dd042014be444669f0159d8e68b42100").unwrap()); } - //fn hex_or_string(s: &str) -> Vec { - //match s.starts_with("0x") { - //true => s[2..].from_hex().unwrap(), - //false => From::from(s) - //} - //} - - //pub fn yield_json_trietest(data: &[u8], name: &str, insert: &mut I, remove: &mut R) -> H256 where I: FnMut(Vec, Vec), R: FnMut(Vec) { - ////let data = include_bytes!("../tests/TrieTests/trietest.json"); - - //let s = String::from_bytes(data).unwrap(); - //let json = Json::from_str(&s).unwrap(); - //let obj = json.as_object().unwrap(); - //println!("here"); - - //let value = &obj[name]; - - //let i = &value["in"]; - //let o = &value["root"]; - - //let root_str = o.as_string().unwrap(); - - //println!("here2"); - //for i in i.as_array().unwrap().iter() { - //let key = hex_or_string(i[0].as_string().unwrap()); - - //match i[1].is_null() { - //true => remove(key), - //false => insert(key, hex_or_string(i[1].as_string().unwrap())) - //}; - //} - - //H256::from_str(&root_str[2..]).unwrap() - //} - - ////fn load_json_trietest(data: &[u8], name: &str) -> (Vec<(Vec, Vec)>, H256) { - ////use std::cell::RefCell; - //let map = RefCell::new(HashMap::new()); - //let root = yield_json_trietest(data, name, &mut | key, value | { - //map.borrow_mut().insert(key, value); - //}, &mut | key | { - //map.borrow_mut().remove(&key); - //}); - - //let res = map.into_inner() - //.into_iter() - //.map(|p| p) - //.collect(); - //(res, root) - //} - - //#[test] - //fn triehash_json_empty_values() { - //let (input, root) = load_json_trietest(include_bytes!("../tests/TrieTests/trietest.json"), "emptyValues"); - //assert_eq!(trie_root(input), root); - //} - - //#[test] - //fn triehash_json_branching_tests() { - //let (input, root) = load_json_trietest(include_bytes!("../tests/TrieTests/trietest.json"), "branchingTests"); - //assert_eq!(trie_root(input), root); - //} - - //#[test] - //fn triehash_json_jeff_tests() { - //let (input, root) = load_json_trietest(include_bytes!("../tests/TrieTests/trietest.json"), "jeff"); - //assert_eq!(trie_root(input), root); - //} - - ////#[test] - ////fn triehash_json_test1() { - //let (input, root) = load_json_trietest(include_bytes!("../tests/TrieTests/hex_encoded_securetrie_test.json"), "test1"); - //assert_eq!(trie_root(input), root); - //} - + extern crate json_tests; + use self::json_tests::*; + #[test] + fn run_trie_tests() { + execute_tests_in_directory::("json-tests/json/trie/*.json", &mut | file, input, output | { + println!("file: {}, output: {:?}", file, output); + assert_eq!(trie_root(input), H256::from_slice(&output)); + }); + } } + From 51bd813734038726cfa04cb888ee91b070400b25 Mon Sep 17 00:00:00 2001 From: debris Date: Wed, 2 Dec 2015 21:51:20 +0100 Subject: [PATCH 06/25] updated gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index eabd0a44e..959045cf9 100644 --- a/.gitignore +++ b/.gitignore @@ -11,6 +11,7 @@ Cargo.lock # Generated by Cargo /target/ +/json-tests/target/ # Vim *.swp From b9181b42bc7dfb8673751e3eb8e84a2fa3ce2156 Mon Sep 17 00:00:00 2001 From: debris Date: Wed, 2 Dec 2015 22:49:47 +0100 Subject: [PATCH 07/25] *.json test files --- json-tests/json/trie/branching.json | 229 +++++++++++++++++++++++++ json-tests/json/trie/dogs.json | 21 +++ json-tests/json/trie/empty.json | 4 + json-tests/json/trie/empty_values.json | 44 +++++ json-tests/json/trie/foo.json | 16 ++ json-tests/json/trie/jeff.json | 58 +++++++ json-tests/src/trie.rs | 1 + src/triehash.rs | 34 ---- 8 files changed, 373 insertions(+), 34 deletions(-) create mode 100644 json-tests/json/trie/branching.json create mode 100644 json-tests/json/trie/dogs.json create mode 100644 json-tests/json/trie/empty.json create mode 100644 json-tests/json/trie/empty_values.json create mode 100644 json-tests/json/trie/foo.json create mode 100644 json-tests/json/trie/jeff.json diff --git a/json-tests/json/trie/branching.json b/json-tests/json/trie/branching.json new file mode 100644 index 000000000..7bbb13f03 --- /dev/null +++ b/json-tests/json/trie/branching.json @@ -0,0 +1,229 @@ +{ + "input": [ + { + "operation": "insert", + "key": "0x04110d816c380812a427968ece99b1c963dfbce6", + "value": "something" + }, + { + "operation": "insert", + "key": "0x095e7baea6a6c7c4c2dfeb977efac326af552d87", + "value": "something" + }, + { + "operation": "insert", + "key": "0x0a517d755cebbf66312b30fff713666a9cb917e0", + "value": "something" + }, + { + "operation": "insert", + "key": "0x24dd378f51adc67a50e339e8031fe9bd4aafab36", + "value": "something" + }, + { + "operation": "insert", + "key": "0x293f982d000532a7861ab122bdc4bbfd26bf9030", + "value": "something" + }, + { + "operation": "insert", + "key": "0x2cf5732f017b0cf1b1f13a1478e10239716bf6b5", + "value": "something" + }, + { + "operation": "insert", + "key": "0x31c640b92c21a1f1465c91070b4b3b4d6854195f", + "value": "something" + }, + { + "operation": "insert", + "key": "0x37f998764813b136ddf5a754f34063fd03065e36", + "value": "something" + }, + { + "operation": "insert", + "key": "0x37fa399a749c121f8a15ce77e3d9f9bec8020d7a", + "value": "something" + }, + { + "operation": "insert", + "key": "0x4f36659fa632310b6ec438dea4085b522a2dd077", + "value": "something" + }, + { + "operation": "insert", + "key": "0x62c01474f089b07dae603491675dc5b5748f7049", + "value": "something" + }, + { + "operation": "insert", + "key": "0x729af7294be595a0efd7d891c9e51f89c07950c7", + "value": "something" + }, + { + "operation": "insert", + "key": "0x83e3e5a16d3b696a0314b30b2534804dd5e11197", + "value": "something" + }, + { + "operation": "insert", + "key": "0x8703df2417e0d7c59d063caa9583cb10a4d20532", + "value": "something" + }, + { + "operation": "insert", + "key": "0x8dffcd74e5b5923512916c6a64b502689cfa65e1", + "value": "something" + }, + { + "operation": "insert", + "key": "0x95a4d7cccb5204733874fa87285a176fe1e9e240", + "value": "something" + }, + { + "operation": "insert", + "key": "0x99b2fcba8120bedd048fe79f5262a6690ed38c39", + "value": "something" + }, + { + "operation": "insert", + "key": "0xa4202b8b8afd5354e3e40a219bdc17f6001bf2cf", + "value": "something" + }, + { + "operation": "insert", + "key": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", + "value": "something" + }, + { + "operation": "insert", + "key": "0xa9647f4a0a14042d91dc33c0328030a7157c93ae", + "value": "something" + }, + { + "operation": "insert", + "key": "0xaa6cffe5185732689c18f37a7f86170cb7304c2a", + "value": "something" + }, + { + "operation": "insert", + "key": "0xaae4a2e3c51c04606dcb3723456e58f3ed214f45", + "value": "something" + }, + { + "operation": "insert", + "key": "0xc37a43e940dfb5baf581a0b82b351d48305fc885", + "value": "something" + }, + { + "operation": "insert", + "key": "0xd2571607e241ecf590ed94b12d87c94babe36db6", + "value": "something" + }, + { + "operation": "insert", + "key": "0xf735071cbee190d76b704ce68384fc21e389fbe7", + "value": "something" + }, + { + "operation": "remove", + "key": "0x04110d816c380812a427968ece99b1c963dfbce6" + }, + { + "operation": "remove", + "key": "0x095e7baea6a6c7c4c2dfeb977efac326af552d87" + }, + { + "operation": "remove", + "key": "0x0a517d755cebbf66312b30fff713666a9cb917e0" + }, + { + "operation": "remove", + "key": "0x24dd378f51adc67a50e339e8031fe9bd4aafab36" + }, + { + "operation": "remove", + "key": "0x293f982d000532a7861ab122bdc4bbfd26bf9030" + }, + { + "operation": "remove", + "key": "0x2cf5732f017b0cf1b1f13a1478e10239716bf6b5" + }, + { + "operation": "remove", + "key": "0x31c640b92c21a1f1465c91070b4b3b4d6854195f" + }, + { + "operation": "remove", + "key": "0x37f998764813b136ddf5a754f34063fd03065e36" + }, + { + "operation": "remove", + "key": "0x37fa399a749c121f8a15ce77e3d9f9bec8020d7a" + }, + { + "operation": "remove", + "key": "0x4f36659fa632310b6ec438dea4085b522a2dd077" + }, + { + "operation": "remove", + "key": "0x62c01474f089b07dae603491675dc5b5748f7049" + }, + { + "operation": "remove", + "key": "0x729af7294be595a0efd7d891c9e51f89c07950c7" + }, + { + "operation": "remove", + "key": "0x83e3e5a16d3b696a0314b30b2534804dd5e11197" + }, + { + "operation": "remove", + "key": "0x8703df2417e0d7c59d063caa9583cb10a4d20532" + }, + { + "operation": "remove", + "key": "0x8dffcd74e5b5923512916c6a64b502689cfa65e1" + }, + { + "operation": "remove", + "key": "0x95a4d7cccb5204733874fa87285a176fe1e9e240" + }, + { + "operation": "remove", + "key": "0x99b2fcba8120bedd048fe79f5262a6690ed38c39" + }, + { + "operation": "remove", + "key": "0xa4202b8b8afd5354e3e40a219bdc17f6001bf2cf" + }, + { + "operation": "remove", + "key": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" + }, + { + "operation": "remove", + "key": "0xa9647f4a0a14042d91dc33c0328030a7157c93ae" + }, + { + "operation": "remove", + "key": "0xaa6cffe5185732689c18f37a7f86170cb7304c2a" + }, + { + "operation": "remove", + "key": "0xaae4a2e3c51c04606dcb3723456e58f3ed214f45" + }, + { + "operation": "remove", + "key": "0xc37a43e940dfb5baf581a0b82b351d48305fc885" + }, + { + "operation": "remove", + "key": "0xd2571607e241ecf590ed94b12d87c94babe36db6" + }, + { + "operation": "remove", + "key": "0xf735071cbee190d76b704ce68384fc21e389fbe7" + }], + "output": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421" +} \ No newline at end of file diff --git a/json-tests/json/trie/dogs.json b/json-tests/json/trie/dogs.json new file mode 100644 index 000000000..a6ff7c891 --- /dev/null +++ b/json-tests/json/trie/dogs.json @@ -0,0 +1,21 @@ +{ + "input": + [ + { + "operation": "insert", + "key": "doe", + "value": "reindeer" + }, + { + "operation": "insert", + "key": "dogglesworth", + "value": "cat" + }, + { + "operation": "insert", + "key": "dog", + "value": "puppy" + } + ], + "output": "0x8aad789dff2f538bca5d8ea56e8abe10f4c7ba3a5dea95fea4cd6e7c3a1168d3" +} diff --git a/json-tests/json/trie/empty.json b/json-tests/json/trie/empty.json new file mode 100644 index 000000000..ca146df54 --- /dev/null +++ b/json-tests/json/trie/empty.json @@ -0,0 +1,4 @@ +{ + "input": [], + "output": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421" +} diff --git a/json-tests/json/trie/empty_values.json b/json-tests/json/trie/empty_values.json new file mode 100644 index 000000000..ac367f913 --- /dev/null +++ b/json-tests/json/trie/empty_values.json @@ -0,0 +1,44 @@ +{ + "input": + [ + { + "operation": "insert", + "key": "do", + "value": "verb" + }, + { + "operation": "insert", + "key": "ether", + "value": "wookiedoo" + }, + { + "operation": "insert", + "key": "horse", + "value": "stallion" + }, + { + "operation": "insert", + "key": "shaman", + "value": "horse" + }, + { + "operation": "insert", + "key": "doge", + "value": "coin" + }, + { + "operation": "remove", + "key": "ether" + }, + { + "operation": "insert", + "key": "dog", + "value": "puppy" + }, + { + "operation": "remove", + "key": "shaman" + } + ], + "output": "0x5991bb8c6514148a29db676a14ac506cd2cd5775ace63c30a4fe457715e9ac84" +} diff --git a/json-tests/json/trie/foo.json b/json-tests/json/trie/foo.json new file mode 100644 index 000000000..4b8c0a87f --- /dev/null +++ b/json-tests/json/trie/foo.json @@ -0,0 +1,16 @@ +{ + "input": + [ + { + "operation": "insert", + "key": "foo", + "value": "bar" + }, + { + "operation": "insert", + "key": "food", + "value": "bass" + } + ], + "output": "0x17beaa1648bafa633cda809c90c04af50fc8aed3cb40d16efbddee6fdf63c4c3" +} diff --git a/json-tests/json/trie/jeff.json b/json-tests/json/trie/jeff.json new file mode 100644 index 000000000..1f3093fad --- /dev/null +++ b/json-tests/json/trie/jeff.json @@ -0,0 +1,58 @@ +{ + "input": [ + { + "operation": "insert", + "key": "0x0000000000000000000000000000000000000000000000000000000000000045", + "value": "0x22b224a1420a802ab51d326e29fa98e34c4f24ea" + }, + { + "operation": "insert", + "key": "0x0000000000000000000000000000000000000000000000000000000000000046", + "value": "0x67706c2076330000000000000000000000000000000000000000000000000000" + }, + { + "operation": "insert", + "key": "0x0000000000000000000000000000000000000000000000000000001234567890", + "value": "0x697c7b8c961b56f675d570498424ac8de1a918f6" + }, + { + "operation": "insert", + "key": "0x000000000000000000000000697c7b8c961b56f675d570498424ac8de1a918f6", + "value": "0x1234567890" + }, + { + "operation": "insert", + "key": "0x0000000000000000000000007ef9e639e2733cb34e4dfc576d4b23f72db776b2", + "value": "0x4655474156000000000000000000000000000000000000000000000000000000" + }, + { + "operation": "insert", + "key": "0x000000000000000000000000ec4f34c97e43fbb2816cfd95e388353c7181dab1", + "value": "0x4e616d6552656700000000000000000000000000000000000000000000000000" + }, + { + "operation": "insert", + "key": "0x4655474156000000000000000000000000000000000000000000000000000000", + "value": "0x7ef9e639e2733cb34e4dfc576d4b23f72db776b2" + }, + { + "operation": "insert", + "key": "0x4e616d6552656700000000000000000000000000000000000000000000000000", + "value": "0xec4f34c97e43fbb2816cfd95e388353c7181dab1" + }, + { + "operation": "remove", + "key": "0x0000000000000000000000000000000000000000000000000000001234567890" + }, + { + "operation": "insert", + "key": "0x000000000000000000000000697c7b8c961b56f675d570498424ac8de1a918f6", + "value": "0x6f6f6f6820736f2067726561742c207265616c6c6c793f000000000000000000" + }, + { + "operation": "insert", + "key": "0x6f6f6f6820736f2067726561742c207265616c6c6c793f000000000000000000", + "value": "0x697c7b8c961b56f675d570498424ac8de1a918f6" + }], + "output": "0x9f6221ebb8efe7cff60a716ecb886e67dd042014be444669f0159d8e68b42100" +} \ No newline at end of file diff --git a/json-tests/src/trie.rs b/json-tests/src/trie.rs index f065085fe..f6d4eaae1 100644 --- a/json-tests/src/trie.rs +++ b/json-tests/src/trie.rs @@ -72,6 +72,7 @@ impl JsonTest for TrieTest { .map(|i| i.into()) .collect() } + fn output(&self) -> Self::Output { hex_or_string(self.loader.output().as_string().unwrap()) } diff --git a/src/triehash.rs b/src/triehash.rs index 0467ea284..a8b1670d3 100644 --- a/src/triehash.rs +++ b/src/triehash.rs @@ -285,40 +285,6 @@ mod tests { use hash::*; use triehash::*; - #[test] - fn empty_trie_root() { - assert_eq!(trie_root(vec![]), H256::from_str("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421").unwrap()); - } - - #[test] - fn single_trie_item() { - let v = vec![(From::from("A"), From::from("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"))]; - assert_eq!(trie_root(v), H256::from_str("d23786fb4a010da3ce639d66d5e904a11dbc02746d1ce25029e53290cabf28ab").unwrap()); - } - - #[test] - fn foo_trie_item() { - - let v = vec![ - (From::from("foo"), From::from("bar")), - (From::from("food"), From::from("bass")) - ]; - - assert_eq!(trie_root(v), H256::from_str("17beaa1648bafa633cda809c90c04af50fc8aed3cb40d16efbddee6fdf63c4c3").unwrap()); - } - - #[test] - fn dogs_trie_item() { - - let v = vec![ - (From::from("doe"), From::from("reindeer")), - (From::from("dog"), From::from("puppy")), - (From::from("dogglesworth"), From::from("cat")), - ]; - - assert_eq!(trie_root(v), H256::from_str("8aad789dff2f538bca5d8ea56e8abe10f4c7ba3a5dea95fea4cd6e7c3a1168d3").unwrap()); - } - #[test] fn puppy_trie_items() { From 166b84d489ffe2ddfe2d282be9c096db4c8553e8 Mon Sep 17 00:00:00 2001 From: debris Date: Wed, 2 Dec 2015 22:56:38 +0100 Subject: [PATCH 08/25] removed duplicate tests --- json-tests/src/lib.rs | 6 +---- src/triehash.rs | 62 ++++--------------------------------------- 2 files changed, 6 insertions(+), 62 deletions(-) diff --git a/json-tests/src/lib.rs b/json-tests/src/lib.rs index 64466084a..8a800e8f9 100644 --- a/json-tests/src/lib.rs +++ b/json-tests/src/lib.rs @@ -54,7 +54,7 @@ pub fn execute_test_from_file(path: &Path, f: &mut F) where T: JsonTest, F f(test.input(), test.output()) } -pub fn execute_tests_in_directory(pattern: &str, f: &mut F) where T: JsonTest, F: FnMut(String, T::Input, T::Output) { +pub fn execute_tests_from_directory(pattern: &str, f: &mut F) where T: JsonTest, F: FnMut(String, T::Input, T::Output) { for path in glob(pattern).unwrap().filter_map(Result::ok) { execute_test_from_file::(&path, &mut | input, output | { f(path.to_str().unwrap().to_string(), input, output); @@ -62,7 +62,3 @@ pub fn execute_tests_in_directory(pattern: &str, f: &mut F) where T: JsonT } } -#[test] -fn it_works() { -} - diff --git a/src/triehash.rs b/src/triehash.rs index a8b1670d3..6cfa2103d 100644 --- a/src/triehash.rs +++ b/src/triehash.rs @@ -277,29 +277,13 @@ fn test_hex_prefix_encode() { #[cfg(test)] mod tests { - use std::str::FromStr; - use std::collections::HashMap; - use rustc_serialize::hex::FromHex; - use rustc_serialize::json::Json; - use bytes::*; + extern crate json_tests; + use self::json_tests::*; use hash::*; use triehash::*; #[test] - fn puppy_trie_items() { - - let v = vec![ - (From::from("do"), From::from("verb")), - (From::from("dog"), From::from("puppy")), - (From::from("doge"), From::from("coin")), - (From::from("horse"), From::from("stallion")), - ]; - - assert_eq!(trie_root(v), H256::from_str("5991bb8c6514148a29db676a14ac506cd2cd5775ace63c30a4fe457715e9ac84").unwrap()); - } - - #[test] - fn out_of_order() { + fn test_trie_out_of_order() { assert!(trie_root(vec![ (vec![0x01u8, 0x23], vec![0x01u8, 0x23]), (vec![0x81u8, 0x23], vec![0x81u8, 0x23]), @@ -313,44 +297,8 @@ mod tests { } #[test] - fn test_trie_root() { - let v = vec![ - - ("0000000000000000000000000000000000000000000000000000000000000045".from_hex().unwrap(), - "22b224a1420a802ab51d326e29fa98e34c4f24ea".from_hex().unwrap()), - - ("0000000000000000000000000000000000000000000000000000000000000046".from_hex().unwrap(), - "67706c2076330000000000000000000000000000000000000000000000000000".from_hex().unwrap()), - - ("000000000000000000000000697c7b8c961b56f675d570498424ac8de1a918f6".from_hex().unwrap(), - "6f6f6f6820736f2067726561742c207265616c6c6c793f000000000000000000".from_hex().unwrap()), - - ("0000000000000000000000007ef9e639e2733cb34e4dfc576d4b23f72db776b2".from_hex().unwrap(), - "4655474156000000000000000000000000000000000000000000000000000000".from_hex().unwrap()), - - ("000000000000000000000000ec4f34c97e43fbb2816cfd95e388353c7181dab1".from_hex().unwrap(), - "4e616d6552656700000000000000000000000000000000000000000000000000".from_hex().unwrap()), - - ("4655474156000000000000000000000000000000000000000000000000000000".from_hex().unwrap(), - "7ef9e639e2733cb34e4dfc576d4b23f72db776b2".from_hex().unwrap()), - - ("4e616d6552656700000000000000000000000000000000000000000000000000".from_hex().unwrap(), - "ec4f34c97e43fbb2816cfd95e388353c7181dab1".from_hex().unwrap()), - - ("6f6f6f6820736f2067726561742c207265616c6c6c793f000000000000000000".from_hex().unwrap(), - "697c7b8c961b56f675d570498424ac8de1a918f6".from_hex().unwrap()) - - ]; - - assert_eq!(trie_root(v), H256::from_str("9f6221ebb8efe7cff60a716ecb886e67dd042014be444669f0159d8e68b42100").unwrap()); - } - - extern crate json_tests; - use self::json_tests::*; - - #[test] - fn run_trie_tests() { - execute_tests_in_directory::("json-tests/json/trie/*.json", &mut | file, input, output | { + fn test_trie_json() { + execute_tests_from_directory::("json-tests/json/trie/*.json", &mut | file, input, output | { println!("file: {}, output: {:?}", file, output); assert_eq!(trie_root(input), H256::from_slice(&output)); }); From 8b481d5e11bcdf4c6907da7f02ae7a10d310bf12 Mon Sep 17 00:00:00 2001 From: Gav Wood Date: Wed, 2 Dec 2015 22:59:00 +0100 Subject: [PATCH 09/25] Remove. Untested. --- src/nibbleslice.rs | 45 +++- src/trie.rs | 607 ++++++++++++++++++++++++++++++--------------- src/triehash.rs | 4 +- 3 files changed, 443 insertions(+), 213 deletions(-) diff --git a/src/nibbleslice.rs b/src/nibbleslice.rs index 5f2615991..6f4232945 100644 --- a/src/nibbleslice.rs +++ b/src/nibbleslice.rs @@ -30,6 +30,8 @@ use bytes::*; pub struct NibbleSlice<'a> { data: &'a [u8], offset: usize, + data_encode_suffix: &'a [u8], + offset_encode_suffix: usize, } impl<'a, 'view> NibbleSlice<'a> where 'a: 'view { @@ -37,7 +39,26 @@ impl<'a, 'view> NibbleSlice<'a> where 'a: 'view { pub fn new(data: &[u8]) -> NibbleSlice { NibbleSlice::new_offset(data, 0) } /// Create a new nibble slice with the given byte-slice with a nibble offset. - pub fn new_offset(data: &'a [u8], offset: usize) -> NibbleSlice { NibbleSlice{data: data, offset: offset} } + pub fn new_offset(data: &'a [u8], offset: usize) -> NibbleSlice { NibbleSlice{data: data, offset: offset, data_encode_suffix: &b""[..], offset_encode_suffix: 0} } + + /// + pub fn new_composed(a: &'a NibbleSlice, b: &'a NibbleSlice) -> NibbleSlice<'a> { NibbleSlice{data: a.data, offset: a.offset, data_encode_suffix: b.data, offset_encode_suffix: b.offset} } + + /*pub fn new_composed_bytes_offset(a: &NibbleSlice, b: &NibbleSlice) -> (Bytes, usize) { + let r: Vec::with_capacity((a.len() + b.len() + 1) / 2); + let mut i = (a.len() + b.len()) % 2; + while i < a.len() { + match i % 2 { + 0 => , + 1 => , + } + i += 1; + } + while i < a.len() + b.len() { + i += 1; + } + (r, a.len() + b.len()) + }*/ /// Create a new nibble slice from the given HPE encoded data (e.g. output of `encoded()`). pub fn from_encoded(data: &'a [u8]) -> (NibbleSlice, bool) { @@ -48,20 +69,32 @@ impl<'a, 'view> NibbleSlice<'a> where 'a: 'view { pub fn is_empty(&self) -> bool { self.len() == 0 } /// Get the length (in nibbles, naturally) of this slice. - pub fn len(&self) -> usize { self.data.len() * 2 - self.offset } + pub fn len(&self) -> usize { (self.data.len() + self.data_encode_suffix.len()) * 2 - self.offset - self.offset_encode_suffix } /// Get the nibble at position `i`. pub fn at(&self, i: usize) -> u8 { - if (self.offset + i) & 1 == 1 { - self.data[(self.offset + i) / 2] & 15u8 + let l = self.data.len() * 2 - self.offset; + if i < l { + if (self.offset + i) & 1 == 1 { + self.data[(self.offset + i) / 2] & 15u8 + } + else { + self.data[(self.offset + i) / 2] >> 4 + } } else { - self.data[(self.offset + i) / 2] >> 4 + let i = i - l; + if (self.offset_encode_suffix + i) & 1 == 1 { + self.data_encode_suffix[(self.offset_encode_suffix + i) / 2] & 15u8 + } + else { + self.data_encode_suffix[(self.offset_encode_suffix + i) / 2] >> 4 + } } } /// Return object which represents a view on to this slice (further) offset by `i` nibbles. - pub fn mid(&'view self, i: usize) -> NibbleSlice<'a> { NibbleSlice{ data: self.data, offset: self.offset + i} } + pub fn mid(&'view self, i: usize) -> NibbleSlice<'a> { NibbleSlice{ data: self.data, offset: self.offset + i, data_encode_suffix: &b""[..], offset_encode_suffix: 0 } } /// Do we start with the same nibbles as the whole of `them`? pub fn starts_with(&self, them: &Self) -> bool { self.common_prefix(them) == them.len() } diff --git a/src/trie.rs b/src/trie.rs index 704db94d4..b205eb842 100644 --- a/src/trie.rs +++ b/src/trie.rs @@ -33,74 +33,6 @@ pub enum Node<'a> { Branch([&'a[u8]; 16], Option<&'a [u8]>) } -impl <'a>Node<'a> { - pub fn decoded(node_rlp: &'a [u8]) -> Node<'a> { - let r = Rlp::new(node_rlp); - match r.prototype() { - // either leaf or extension - decode first item with NibbleSlice::??? - // and use is_leaf return to figure out which. - // if leaf, second item is a value (is_data()) - // if extension, second item is a node (either SHA3 to be looked up and - // fed back into this function or inline RLP which can be fed back into this function). - Prototype::List(2) => match NibbleSlice::from_encoded(r.at(0).data()) { - (slice, true) => Node::Leaf(slice, r.at(1).data()), - (slice, false) => Node::Extension(slice, r.at(1).raw()), - }, - // branch - first 16 are nodes, 17th is a value (or empty). - Prototype::List(17) => { - let mut nodes: [&'a [u8]; 16] = unsafe { ::std::mem::uninitialized() }; - for i in 0..16 { - nodes[i] = r.at(i).raw(); - } - Node::Branch(nodes, if r.at(16).is_empty() { None } else { Some(r.at(16).data()) }) - }, - // an empty branch index. - Prototype::Data(0) => Node::Empty, - // something went wrong. - _ => panic!("Rlp is not valid.") - } - } - - // todo: should check length before encoding, cause it may just be sha3 of data - pub fn encoded(&self) -> Bytes { - match *self { - Node::Leaf(ref slice, ref value) => { - let mut stream = RlpStream::new_list(2); - stream.append(&slice.encoded(true)); - stream.append(value); - stream.out() - }, - Node::Extension(ref slice, ref raw_rlp) => { - let mut stream = RlpStream::new_list(2); - stream.append(&slice.encoded(false)); - stream.append_raw(raw_rlp, 1); - stream.out() - }, - Node::Branch(ref nodes, ref value) => { - let mut stream = RlpStream::new_list(17); - for i in 0..16 { - stream.append_raw(nodes[i], 1); - } - match *value { - Some(n) => { stream.append(&n); }, - None => { stream.append_empty_data(); }, - } - stream.out() - }, - Node::Empty => { - let mut stream = RlpStream::new(); - stream.append_empty_data(); - stream.out() - } - } - } -} - -//enum ValidationResult<'a> { - //Valid, - //Invalid { node: Node<'a>, depth: usize } -//} - enum Operation { New(H256, Bytes), Delete(H256), @@ -137,12 +69,125 @@ impl Diff { } } + fn delete_node_from_slice(&mut self, old: &[u8]) { + let r = Rlp::new(old); + if r.is_data() && r.size() == 32 { + self.0.push(Operation::Delete(H256::decode(&r))); + } + } + fn replace_node(&mut self, old: &Rlp, rlp: Bytes, out: &mut RlpStream) { self.delete_node(old); self.new_node(rlp, out); } } +impl <'a>Node<'a> { + fn decoded(node_rlp: &'a [u8]) -> Node<'a> { + let r = Rlp::new(node_rlp); + match r.prototype() { + // either leaf or extension - decode first item with NibbleSlice::??? + // and use is_leaf return to figure out which. + // if leaf, second item is a value (is_data()) + // if extension, second item is a node (either SHA3 to be looked up and + // fed back into this function or inline RLP which can be fed back into this function). + Prototype::List(2) => match NibbleSlice::from_encoded(r.at(0).data()) { + (slice, true) => Node::Leaf(slice, r.at(1).data()), + (slice, false) => Node::Extension(slice, r.at(1).raw()), + }, + // branch - first 16 are nodes, 17th is a value (or empty). + Prototype::List(17) => { + let mut nodes: [&'a [u8]; 16] = unsafe { ::std::mem::uninitialized() }; + for i in 0..16 { + nodes[i] = r.at(i).raw(); + } + Node::Branch(nodes, if r.at(16).is_empty() { None } else { Some(r.at(16).data()) }) + }, + // an empty branch index. + Prototype::Data(0) => Node::Empty, + // something went wrong. + _ => panic!("Rlp is not valid.") + } + } + + // todo: should check length before encoding, cause it may just be sha3 of data + fn encoded(&self) -> Bytes { + match *self { + Node::Leaf(ref slice, ref value) => { + let mut stream = RlpStream::new_list(2); + stream.append(&slice.encoded(true)); + stream.append(value); + stream.out() + }, + Node::Extension(ref slice, ref raw_rlp) => { + let mut stream = RlpStream::new_list(2); + stream.append(&slice.encoded(false)); + stream.append_raw(raw_rlp, 1); + stream.out() + }, + Node::Branch(ref nodes, ref value) => { + let mut stream = RlpStream::new_list(17); + for i in 0..16 { + stream.append_raw(nodes[i], 1); + } + match *value { + Some(n) => { stream.append(&n); }, + None => { stream.append_empty_data(); }, + } + stream.out() + }, + Node::Empty => { + let mut stream = RlpStream::new(); + stream.append_empty_data(); + stream.out() + } + } + } + + fn encoded_and_added(&self, diff: &mut Diff) -> Bytes { + let mut stream = RlpStream::new(); + match *self { + Node::Leaf(ref slice, ref value) => { + stream.append_list(2); + stream.append(&slice.encoded(true)); + stream.append(value); + }, + Node::Extension(ref slice, ref raw_rlp) => { + stream.append_list(2); + stream.append(&slice.encoded(false)); + stream.append_raw(raw_rlp, 1); + }, + Node::Branch(ref nodes, ref value) => { + stream.append_list(17); + for i in 0..16 { + stream.append_raw(nodes[i], 1); + } + match *value { + Some(n) => { stream.append(&n); }, + None => { stream.append_empty_data(); }, + } + }, + Node::Empty => { + stream.append_empty_data(); + } + } + let node = stream.out(); + match node.len() { + 0 ... 31 => node, + _ => { + let mut stream = RlpStream::new(); + diff.new_node(node, &mut stream); + stream.out() + } + } + } +} + +//enum ValidationResult<'a> { + //Valid, + //Invalid { node: Node<'a>, depth: usize } +//} + pub struct TrieDB { db: Box, root: H256, @@ -157,6 +202,11 @@ impl fmt::Debug for TrieDB { } } +enum MaybeChanged<'a> { + Same(Node<'a>), + Changed(Bytes), +} + impl TrieDB { pub fn new_boxed(db_box: Box) -> Self { let mut r = TrieDB{ db: db_box, root: H256::new() }; r.set_root_rlp(&NULL_RLP); r } @@ -201,9 +251,9 @@ impl TrieDB { fn fmt_all(&self, node: Node, f: &mut fmt::Formatter, deepness: usize) -> fmt::Result { match node { - Node::Leaf(slice, value) => try!(writeln!(f, "-{:?}: {:?}.", slice, value.pretty())), + Node::Leaf(slice, value) => try!(writeln!(f, "'{:?}: {:?}.", slice, value.pretty())), Node::Extension(ref slice, ref item) => { - try!(write!(f, "-{:?}- ", slice)); + try!(write!(f, "'{:?} ", slice)); try!(self.fmt_all(self.get_node(item), f, deepness)); }, Node::Branch(ref nodes, ref value) => { @@ -220,7 +270,7 @@ impl TrieDB { Node::Empty => {}, n => { try!(self.fmt_indent(f, deepness + 1)); - try!(write!(f, "{:x}: ", i)); + try!(write!(f, "'{:x} ", i)); try!(self.fmt_all(n, f, deepness + 1)); } } @@ -253,7 +303,7 @@ impl TrieDB { } } - fn get_raw_or_lookup<'a>(&'a self, node: &'a [u8]) -> &'a [u8] { + fn get_raw_or_lookup<'a, 'b>(&'a self, node: &'b [u8]) -> &'b [u8] where 'a: 'b { // check if its sha3 + len let r = Rlp::new(node); match r.is_data() && r.size() == 32 { @@ -276,9 +326,15 @@ impl TrieDB { trace!("DELETE: {:?}", key); // determine what the new root is, insert new nodes and remove old as necessary. let mut todo: Diff = Diff::new(); - let root_rlp = self.cleared(self.db.lookup(&self.root).expect("Trie root not found!"), key, &mut todo); - self.apply(todo); - self.set_root_rlp(&root_rlp); + match self.cleared_from_slice(self.db.lookup(&self.root).expect("Trie root not found!"), key, &mut todo) { + Some(root_rlp) => { + self.apply(todo); + self.set_root_rlp(&root_rlp); + }, + None => { + trace!("no change needed"); + } + } trace!("/"); } @@ -492,24 +548,159 @@ impl TrieDB { _ => panic!("Invalid RLP for node: {:?}", old.pretty()), } } + fn encoded(n: MaybeChanged) -> Bytes { + match n { + MaybeChanged::Same(n) => n.encoded(), + MaybeChanged::Changed(b) => b, + } + } + fn ensure_is_changed(n: MaybeChanged) -> MaybeChanged { + match n { + MaybeChanged::Same(n) => MaybeChanged::Changed(n.encoded()), + f => f, + } + } + fn fixed_indirection<'a>(n: Node<'a>, diff: &mut Diff) -> MaybeChanged<'a> { + match n { + Node::Extension(partial, payload) if payload.len() >= 32 => { + // make indirect + MaybeChanged::Changed(Node::Extension(partial, &Node::decoded(payload).encoded_and_added(diff)).encoded()) + }, + Node::Branch(nodes, node_value) => { + // check each child isn't too big + // TODO OPTIMISE - should really check at the point of (re-)constructing the branch. + for i in 0..16 { + if nodes[i].len() >= 32 { + let n = Node::decoded(nodes[i]).encoded_and_added(diff); + let mut new_nodes = nodes; + new_nodes[i] = &n; + return MaybeChanged::Changed(Node::Branch(new_nodes, node_value).encoded()) + } + } + MaybeChanged::Same(n) + } + _ => MaybeChanged::Same(n), + } + } + + /// Given a node `n` which may be in an _invalid state_, fix it such that it is then in a valid + /// state. + /// + /// _invalid state_ means: + /// - Branch node where there is only a single entry; + /// - Extension node followed by anything other than a Branch node. + /// - Extension node with a child which has too many bytes to be inline. + /// + /// **This operation will not insert the new node nor destroy the original.** + fn fixed<'a, 'b>(&'a self, n: Node<'b>, diff: &mut Diff) -> MaybeChanged<'b> where 'a: 'b { + match n { + Node::Branch(nodes, node_value) => { + // if only a single value, transmute to leaf/extension and feed through fixed. + let mut index: [u8; 1] = [16; 1]; + // 0-15 -> index of a non-null branch + // 16 -> no non-null branch + // 17 -> multiple non-null branches + for i in 0..16 { + match (nodes[i] == NULL_RLP, index[0]) { + (false, _) => {}, + (true, 16) => index[0] = i as u8, + (true, _) => index[0] = 17, + } + } + match (index[0], node_value) { + (16, None) => panic!("Branch with no subvalues. Something went wrong."), + (0 ... 15, None) => { // one onward node + // transmute to extension. + // TODO: OPTIMISE: - don't call fixed again but put the right node in straight away here. + // call fixed again since the transmute may cause invalidity. + MaybeChanged::Changed(Self::encoded(self.fixed(Node::Extension(NibbleSlice::new_offset(&index[..], 1), nodes[index[0] as usize]), diff))) + }, + (16, Some(value)) => { // one leaf value + // transmute to leaf. + // call fixed again since the transmute may cause invalidity. + MaybeChanged::Changed(Self::encoded(self.fixed(Node::Leaf(NibbleSlice::new(&b""[..]), value), diff))) + } + _ => { // onwards node(s) and/or leaf + // no transmute needed, but should still fix the indirection. + Self::fixed_indirection(Node::Branch(nodes, node_value), diff) + }, + } + }, + Node::Extension(partial, payload) => { + match Node::decoded(self.get_raw_or_lookup(payload)) { + Node::Extension(sub_partial, sub_payload) => { + // combine with node below + diff.delete_node_from_slice(payload); + MaybeChanged::Changed(Self::encoded(Self::fixed_indirection(Node::Extension(NibbleSlice::new_composed(&partial, &sub_partial), sub_payload), diff))) + }, + Node::Leaf(sub_partial, sub_value) => { + // combine with node below + diff.delete_node_from_slice(payload); + MaybeChanged::Changed(Self::encoded(Self::fixed_indirection(Node::Leaf(NibbleSlice::new_composed(&partial, &sub_partial), sub_value), diff))) + }, + // no change, might still have an oversize node inline - fix indirection + _ => Self::fixed_indirection(n, diff), + } + }, + // leaf or empty. no change. + n => { MaybeChanged::Same(n) } + } + } /// Determine the RLP of the node, assuming we're removing `partial` from the /// node currently of data `old`. This will *not* delete any hash of `old` from the database; /// it will just return the new RLP that represents the new node. + /// `None` may be returned should no change be needed. /// /// The database will be updated so as to make the returned RLP valid through inserting /// and deleting nodes as necessary. /// /// **This operation will not insert the new node nor destroy the original.** - fn cleared(&self, old: &[u8], partial: &NibbleSlice, _diff: &mut Diff) -> Bytes { - trace!("cleared (old: {:?}, partial: {:?})", old.pretty(), partial); + fn cleared_from_slice(&self, old: &[u8], partial: &NibbleSlice, diff: &mut Diff) -> Option { + self.cleared(Node::decoded(old), partial, diff) + } - unimplemented!(); + fn cleared(&self, n: Node, partial: &NibbleSlice, diff: &mut Diff) -> Option { + trace!("cleared (old: {:?}, partial: {:?})", n, partial); -/* match (Node::decoded(old)) { - - }*/ + match (n, partial.is_empty()) { + (Node::Empty, _) => None, + (Node::Branch(nodes, None), true) => { None }, + (Node::Branch(nodes, _), true) => Some(Self::encoded(self.fixed(Node::Branch(nodes, None), diff))), // matched as leaf-branch - give back fixed branch with it. + (Node::Branch(nodes, value), false) => { + // Branch with partial left - route, clear, fix. + let i: usize = partial.at(0) as usize; + self.cleared(self.get_node(nodes[i]), &partial.mid(1), diff).map(|new_payload| { + // downsteam node needed to be changed. + diff.delete_node_from_slice(nodes[i]); + // return fixed up new node. + let mut new_nodes = nodes; + new_nodes[i] = &new_payload; + Self::encoded(self.fixed(Node::Branch(new_nodes, value), diff)) + }) + }, + (Node::Leaf(node_partial, node_value), _) => { + match node_partial.common_prefix(partial) { + cp if cp == partial.len() => Some(Node::Empty.encoded()), // leaf to be deleted - delete it :) + cp => None, // anything else and the key doesn't exit - no change. + } + }, + (Node::Extension(node_partial, node_payload), _) => { + match node_partial.common_prefix(partial) { + cp if cp < partial.len() => None, // key in the middle of an extension - doesn't exist. + cp => { + // key at end of extension - skip, clear, fix + self.cleared(self.get_node(node_payload), &partial.mid(node_partial.len()), diff).map(|new_payload| { + // downsteam node needed to be changed. + diff.delete_node_from_slice(node_payload); + // return fixed up new node. + Self::encoded(self.fixed(Node::Extension(node_partial, &new_payload), diff)) + }) + }, + } + }, + } } } @@ -544,6 +735,136 @@ mod tests { use rand::random; use bytes::ToPretty; + #[test] + fn playpen() { + env_logger::init().ok(); + + let mut t1 = TrieDB::new_memory(); + t1.insert(&[0x01], &[0]); + t1.insert(&[0x01, 0x23], &[1]); + t1.insert(&[0x01, 0x34], &[2]); + t1.remove(&[0x01]); + let mut t2 = TrieDB::new_memory(); + t2.insert(&[0x01, 0x23], &[1]); + t2.insert(&[0x01, 0x34], &[2]); + /*if t1.root() != t2.root()*/ { + trace!("{:?}", t1); + trace!("{:?}", t2); + } + } + + #[test] + fn init() { + let t = TrieDB::new_memory(); + assert_eq!(*t.root(), SHA3_NULL_RLP); + assert!(t.is_empty()); + } + + #[test] + fn insert_on_empty() { + let mut t = TrieDB::new_memory(); + t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]); + assert_eq!(*t.root(), trie_root(vec![ (vec![0x01u8, 0x23], vec![0x01u8, 0x23]) ])); + } + + #[test] + fn remove_to_empty() { + } + + #[test] + fn insert_replace_root() { + let mut t = TrieDB::new_memory(); + t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]); + t.insert(&[0x01u8, 0x23], &[0x23u8, 0x45]); + assert_eq!(*t.root(), trie_root(vec![ (vec![0x01u8, 0x23], vec![0x23u8, 0x45]) ])); + } + + #[test] + fn insert_make_branch_root() { + let mut t = TrieDB::new_memory(); + t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]); + t.insert(&[0x11u8, 0x23], &[0x11u8, 0x23]); + assert_eq!(*t.root(), trie_root(vec![ + (vec![0x01u8, 0x23], vec![0x01u8, 0x23]), + (vec![0x11u8, 0x23], vec![0x11u8, 0x23]) + ])); + } + + #[test] + fn insert_into_branch_root() { + let mut t = TrieDB::new_memory(); + t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]); + t.insert(&[0xf1u8, 0x23], &[0xf1u8, 0x23]); + t.insert(&[0x81u8, 0x23], &[0x81u8, 0x23]); + assert_eq!(*t.root(), trie_root(vec![ + (vec![0x01u8, 0x23], vec![0x01u8, 0x23]), + (vec![0x81u8, 0x23], vec![0x81u8, 0x23]), + (vec![0xf1u8, 0x23], vec![0xf1u8, 0x23]), + ])); + } + + #[test] + fn insert_value_into_branch_root() { + let mut t = TrieDB::new_memory(); + t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]); + t.insert(&[], &[0x0]); + assert_eq!(*t.root(), trie_root(vec![ + (vec![], vec![0x0]), + (vec![0x01u8, 0x23], vec![0x01u8, 0x23]), + ])); + } + + #[test] + fn insert_split_leaf() { + let mut t = TrieDB::new_memory(); + t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]); + t.insert(&[0x01u8, 0x34], &[0x01u8, 0x34]); + assert_eq!(*t.root(), trie_root(vec![ + (vec![0x01u8, 0x23], vec![0x01u8, 0x23]), + (vec![0x01u8, 0x34], vec![0x01u8, 0x34]), + ])); + } + + #[test] + fn insert_split_extenstion() { + let mut t = TrieDB::new_memory(); + t.insert(&[0x01, 0x23, 0x45], &[0x01]); + t.insert(&[0x01, 0xf3, 0x45], &[0x02]); + t.insert(&[0x01, 0xf3, 0xf5], &[0x03]); + assert_eq!(*t.root(), trie_root(vec![ + (vec![0x01, 0x23, 0x45], vec![0x01]), + (vec![0x01, 0xf3, 0x45], vec![0x02]), + (vec![0x01, 0xf3, 0xf5], vec![0x03]), + ])); + } + + #[test] + fn insert_big_value() { + let big_value0 = b"00000000000000000000000000000000"; + let big_value1 = b"11111111111111111111111111111111"; + + let mut t = TrieDB::new_memory(); + t.insert(&[0x01u8, 0x23], big_value0); + t.insert(&[0x11u8, 0x23], big_value1); + assert_eq!(*t.root(), trie_root(vec![ + (vec![0x01u8, 0x23], big_value0.to_vec()), + (vec![0x11u8, 0x23], big_value1.to_vec()) + ])); + } + + #[test] + fn insert_duplicate_value() { + let big_value = b"00000000000000000000000000000000"; + + let mut t = TrieDB::new_memory(); + t.insert(&[0x01u8, 0x23], big_value); + t.insert(&[0x11u8, 0x23], big_value); + assert_eq!(*t.root(), trie_root(vec![ + (vec![0x01u8, 0x23], big_value.to_vec()), + (vec![0x11u8, 0x23], big_value.to_vec()) + ])); + } + #[test] fn test_node_leaf() { let k = vec![0x20u8, 0x01, 0x23, 0x45]; @@ -743,128 +1064,4 @@ mod tests { test_all(v); } - - #[test] - fn playpen() { - env_logger::init().ok(); - - let big_value = b"00000000000000000000000000000000"; - - let mut t = TrieDB::new_memory(); - t.insert(&[0x01u8, 0x23], big_value); - t.insert(&[0x11u8, 0x23], big_value); - assert_eq!(*t.root(), trie_root(vec![ - (vec![0x01u8, 0x23], big_value.to_vec()), - (vec![0x11u8, 0x23], big_value.to_vec()) - ])); - } - - #[test] - fn init() { - let t = TrieDB::new_memory(); - assert_eq!(*t.root(), SHA3_NULL_RLP); - assert!(t.is_empty()); - } - - #[test] - fn insert_on_empty() { - let mut t = TrieDB::new_memory(); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]); - assert_eq!(*t.root(), trie_root(vec![ (vec![0x01u8, 0x23], vec![0x01u8, 0x23]) ])); - } - - #[test] - fn insert_replace_root() { - let mut t = TrieDB::new_memory(); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]); - t.insert(&[0x01u8, 0x23], &[0x23u8, 0x45]); - assert_eq!(*t.root(), trie_root(vec![ (vec![0x01u8, 0x23], vec![0x23u8, 0x45]) ])); - } - - #[test] - fn insert_make_branch_root() { - let mut t = TrieDB::new_memory(); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]); - t.insert(&[0x11u8, 0x23], &[0x11u8, 0x23]); - assert_eq!(*t.root(), trie_root(vec![ - (vec![0x01u8, 0x23], vec![0x01u8, 0x23]), - (vec![0x11u8, 0x23], vec![0x11u8, 0x23]) - ])); - } - - #[test] - fn insert_into_branch_root() { - let mut t = TrieDB::new_memory(); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]); - t.insert(&[0xf1u8, 0x23], &[0xf1u8, 0x23]); - t.insert(&[0x81u8, 0x23], &[0x81u8, 0x23]); - assert_eq!(*t.root(), trie_root(vec![ - (vec![0x01u8, 0x23], vec![0x01u8, 0x23]), - (vec![0x81u8, 0x23], vec![0x81u8, 0x23]), - (vec![0xf1u8, 0x23], vec![0xf1u8, 0x23]), - ])); - } - - #[test] - fn insert_value_into_branch_root() { - let mut t = TrieDB::new_memory(); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]); - t.insert(&[], &[0x0]); - assert_eq!(*t.root(), trie_root(vec![ - (vec![], vec![0x0]), - (vec![0x01u8, 0x23], vec![0x01u8, 0x23]), - ])); - } - - #[test] - fn insert_split_leaf() { - let mut t = TrieDB::new_memory(); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]); - t.insert(&[0x01u8, 0x34], &[0x01u8, 0x34]); - assert_eq!(*t.root(), trie_root(vec![ - (vec![0x01u8, 0x23], vec![0x01u8, 0x23]), - (vec![0x01u8, 0x34], vec![0x01u8, 0x34]), - ])); - } - - #[test] - fn insert_split_extenstion() { - let mut t = TrieDB::new_memory(); - t.insert(&[0x01, 0x23, 0x45], &[0x01]); - t.insert(&[0x01, 0xf3, 0x45], &[0x02]); - t.insert(&[0x01, 0xf3, 0xf5], &[0x03]); - assert_eq!(*t.root(), trie_root(vec![ - (vec![0x01, 0x23, 0x45], vec![0x01]), - (vec![0x01, 0xf3, 0x45], vec![0x02]), - (vec![0x01, 0xf3, 0xf5], vec![0x03]), - ])); - } - - #[test] - fn insert_big_value() { - let big_value0 = b"00000000000000000000000000000000"; - let big_value1 = b"11111111111111111111111111111111"; - - let mut t = TrieDB::new_memory(); - t.insert(&[0x01u8, 0x23], big_value0); - t.insert(&[0x11u8, 0x23], big_value1); - assert_eq!(*t.root(), trie_root(vec![ - (vec![0x01u8, 0x23], big_value0.to_vec()), - (vec![0x11u8, 0x23], big_value1.to_vec()) - ])); - } - - #[test] - fn insert_duplicate_value() { - let big_value = b"00000000000000000000000000000000"; - - let mut t = TrieDB::new_memory(); - t.insert(&[0x01u8, 0x23], big_value); - t.insert(&[0x11u8, 0x23], big_value); - assert_eq!(*t.root(), trie_root(vec![ - (vec![0x01u8, 0x23], big_value.to_vec()), - (vec![0x11u8, 0x23], big_value.to_vec()) - ])); - } } - diff --git a/src/triehash.rs b/src/triehash.rs index 778dc7f17..d31cec473 100644 --- a/src/triehash.rs +++ b/src/triehash.rs @@ -381,7 +381,7 @@ mod tests { #[test] fn test_triehash_json_trietest_json() { - let data = include_bytes!("../tests/TrieTests/trietest.json"); +/* let data = include_bytes!("../tests/TrieTests/trietest.json"); let s = String::from_bytes(data).unwrap(); let json = Json::from_str(&s).unwrap(); @@ -390,7 +390,7 @@ mod tests { for (key, value) in obj.iter() { println!("running test: {}", key); } - assert!(false); + assert!(false);*/ } } From 1b41b96dd109c072b75cc08132d358ad78d5387e Mon Sep 17 00:00:00 2001 From: Gav Wood Date: Wed, 2 Dec 2015 23:00:36 +0100 Subject: [PATCH 10/25] Clear up warnings. --- src/trie.rs | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/src/trie.rs b/src/trie.rs index b205eb842..1dde1993c 100644 --- a/src/trie.rs +++ b/src/trie.rs @@ -548,6 +548,7 @@ impl TrieDB { _ => panic!("Invalid RLP for node: {:?}", old.pretty()), } } + fn encoded(n: MaybeChanged) -> Bytes { match n { MaybeChanged::Same(n) => n.encoded(), @@ -555,12 +556,6 @@ impl TrieDB { } } - fn ensure_is_changed(n: MaybeChanged) -> MaybeChanged { - match n { - MaybeChanged::Same(n) => MaybeChanged::Changed(n.encoded()), - f => f, - } - } fn fixed_indirection<'a>(n: Node<'a>, diff: &mut Diff) -> MaybeChanged<'a> { match n { Node::Extension(partial, payload) if payload.len() >= 32 => { @@ -666,7 +661,7 @@ impl TrieDB { match (n, partial.is_empty()) { (Node::Empty, _) => None, - (Node::Branch(nodes, None), true) => { None }, + (Node::Branch(_, None), true) => { None }, (Node::Branch(nodes, _), true) => Some(Self::encoded(self.fixed(Node::Branch(nodes, None), diff))), // matched as leaf-branch - give back fixed branch with it. (Node::Branch(nodes, value), false) => { // Branch with partial left - route, clear, fix. @@ -680,16 +675,16 @@ impl TrieDB { Self::encoded(self.fixed(Node::Branch(new_nodes, value), diff)) }) }, - (Node::Leaf(node_partial, node_value), _) => { + (Node::Leaf(node_partial, _), _) => { match node_partial.common_prefix(partial) { cp if cp == partial.len() => Some(Node::Empty.encoded()), // leaf to be deleted - delete it :) - cp => None, // anything else and the key doesn't exit - no change. + _ => None, // anything else and the key doesn't exit - no change. } }, (Node::Extension(node_partial, node_payload), _) => { match node_partial.common_prefix(partial) { cp if cp < partial.len() => None, // key in the middle of an extension - doesn't exist. - cp => { + _ => { // key at end of extension - skip, clear, fix self.cleared(self.get_node(node_payload), &partial.mid(node_partial.len()), diff).map(|new_payload| { // downsteam node needed to be changed. From 8c9340d3cf41f67beca926533130800fcd20ba69 Mon Sep 17 00:00:00 2001 From: Gav Wood Date: Wed, 2 Dec 2015 23:01:32 +0100 Subject: [PATCH 11/25] Remove unused uses. --- src/triehash.rs | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/triehash.rs b/src/triehash.rs index d31cec473..c12971112 100644 --- a/src/triehash.rs +++ b/src/triehash.rs @@ -278,10 +278,7 @@ fn test_hex_prefix_encode() { #[cfg(test)] mod tests { use std::str::FromStr; - use std::collections::BTreeMap; use rustc_serialize::hex::FromHex; - use rustc_serialize::json::Json; - use bytes::*; use hash::*; use triehash::*; From 3f31d617505ffc75c66a46f9d2319a4e0ba70d86 Mon Sep 17 00:00:00 2001 From: Marek Kotewicz Date: Wed, 2 Dec 2015 23:29:22 +0100 Subject: [PATCH 12/25] Update README.md --- json-tests/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/json-tests/README.md b/json-tests/README.md index 170cd588d..9e6915ca3 100644 --- a/json-tests/README.md +++ b/json-tests/README.md @@ -3,7 +3,7 @@ Cause it's very hard to write generic json test files, each subdirectory should follow its own convention. BUT all json files `within` same directory should be consistent. -### Test file should always contain a single file with input and output. +### Test files should always contain a single test with input and output. ```json { From 46d841f81f7eda103eb9fc913065b62720efb000 Mon Sep 17 00:00:00 2001 From: debris Date: Wed, 2 Dec 2015 23:35:34 +0100 Subject: [PATCH 13/25] removed redundant lifetime --- src/trie.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/trie.rs b/src/trie.rs index 1dde1993c..3c8527424 100644 --- a/src/trie.rs +++ b/src/trie.rs @@ -303,7 +303,7 @@ impl TrieDB { } } - fn get_raw_or_lookup<'a, 'b>(&'a self, node: &'b [u8]) -> &'b [u8] where 'a: 'b { + fn get_raw_or_lookup<'a>(&'a self, node: &'a [u8]) -> &'a [u8] { // check if its sha3 + len let r = Rlp::new(node); match r.is_data() && r.size() == 32 { From f20665c41a9868f5eb055c7fa5d4dfb19168c92e Mon Sep 17 00:00:00 2001 From: Gav Wood Date: Wed, 2 Dec 2015 23:50:14 +0100 Subject: [PATCH 14/25] Fix test. --- src/trie.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/trie.rs b/src/trie.rs index 1dde1993c..64af55a5e 100644 --- a/src/trie.rs +++ b/src/trie.rs @@ -887,9 +887,11 @@ mod tests { #[test] fn test_node_empty_branch() { - let branch = Node::Branch([&b""[..]; 16], None); + let null_rlp = NULL_RLP; + let branch = Node::Branch([&null_rlp; 16], None); let rlp = branch.encoded(); let branch2 = Node::decoded(&rlp); + println!("{:?}", rlp); assert_eq!(branch, branch2); } From 8b8a2e39e2bc66f0550d1b7c567ac6091774fe3e Mon Sep 17 00:00:00 2001 From: debris Date: Thu, 3 Dec 2015 00:29:31 +0100 Subject: [PATCH 15/25] trie test failing --- json-tests/src/trie.rs | 24 ++++++++++-------------- src/trie.rs | 21 +++++++++++++++++++++ src/triehash.rs | 4 ++-- 3 files changed, 33 insertions(+), 16 deletions(-) diff --git a/json-tests/src/trie.rs b/json-tests/src/trie.rs index f6d4eaae1..827bf8c9a 100644 --- a/json-tests/src/trie.rs +++ b/json-tests/src/trie.rs @@ -4,7 +4,7 @@ use rustc_serialize::*; use rustc_serialize::hex::FromHex; use super::{JsonTest, JsonLoader}; -pub enum OperationType { +enum OperationType { Insert, Remove } @@ -26,10 +26,9 @@ struct RawOperation { value: Option } -pub struct Operation { - pub operation: OperationType, - pub key: Vec, - pub value: Option> +pub enum Operation { + Insert(Vec, Vec), + Remove(Vec) } fn hex_or_string(s: &str) -> Vec { @@ -41,12 +40,9 @@ fn hex_or_string(s: &str) -> Vec { impl Into for RawOperation { fn into(self) -> Operation { - Operation { - operation: self.operation, - key: hex_or_string(&self.key), - value: self.value.map(|v| { - hex_or_string(&v) - }) + match self.operation { + OperationType::Insert => Operation::Insert(hex_or_string(&self.key), hex_or_string(&self.value.unwrap())), + OperationType::Remove => Operation::Remove(hex_or_string(&self.key)) } } } @@ -96,9 +92,9 @@ impl JsonTest for TriehashTest { self.trietest.input() .into_iter() .fold(HashMap::new(), | mut map, o | { - match o.operation { - OperationType::Insert => map.insert(o.key, o.value.unwrap()), - OperationType::Remove => map.remove(&o.key) + match o { + Operation::Insert(k, v) => map.insert(k, v), + Operation::Remove(k) => map.remove(&k) }; map }) diff --git a/src/trie.rs b/src/trie.rs index 85312a58c..11e87c302 100644 --- a/src/trie.rs +++ b/src/trie.rs @@ -721,8 +721,11 @@ impl Trie for TrieDB { #[cfg(test)] mod tests { + extern crate json_tests; + use self::json_tests::*; use rustc_serialize::hex::FromHex; use triehash::*; + use hash::*; use super::*; use nibbleslice::*; use rlp; @@ -1061,4 +1064,22 @@ mod tests { test_all(v); } + + #[test] + fn test_trie_json() { + println!("Json trie test: "); + execute_tests_from_directory::("json-tests/json/trie/*.json", &mut | file, input, output | { + println!("file: {}", file); + + let mut t = TrieDB::new_memory(); + for operation in input.into_iter() { + match operation { + trie::Operation::Insert(key, value) => t.insert(&key, &value), + trie::Operation::Remove(key) => t.remove(&key) + } + } + + assert_eq!(*t.root(), H256::from_slice(&output)); + }); + } } diff --git a/src/triehash.rs b/src/triehash.rs index 6cfa2103d..93d21aacc 100644 --- a/src/triehash.rs +++ b/src/triehash.rs @@ -283,7 +283,7 @@ mod tests { use triehash::*; #[test] - fn test_trie_out_of_order() { + fn test_triehash_out_of_order() { assert!(trie_root(vec![ (vec![0x01u8, 0x23], vec![0x01u8, 0x23]), (vec![0x81u8, 0x23], vec![0x81u8, 0x23]), @@ -297,7 +297,7 @@ mod tests { } #[test] - fn test_trie_json() { + fn test_triehash_json() { execute_tests_from_directory::("json-tests/json/trie/*.json", &mut | file, input, output | { println!("file: {}, output: {:?}", file, output); assert_eq!(trie_root(input), H256::from_slice(&output)); From b99cefb9d6a302382689dff29940a57b0ee48d64 Mon Sep 17 00:00:00 2001 From: Gav Wood Date: Thu, 3 Dec 2015 00:32:58 +0100 Subject: [PATCH 16/25] Fix for node removal from trie. --- src/trie.rs | 67 ++++++++++++++++++++++++++++++++++------------------- 1 file changed, 43 insertions(+), 24 deletions(-) diff --git a/src/trie.rs b/src/trie.rs index 85312a58c..550d7271e 100644 --- a/src/trie.rs +++ b/src/trie.rs @@ -589,29 +589,38 @@ impl TrieDB { /// /// **This operation will not insert the new node nor destroy the original.** fn fixed<'a, 'b>(&'a self, n: Node<'b>, diff: &mut Diff) -> MaybeChanged<'b> where 'a: 'b { + trace!("fixed node={:?}", n); match n { Node::Branch(nodes, node_value) => { // if only a single value, transmute to leaf/extension and feed through fixed. - let mut index: [u8; 1] = [16; 1]; + #[derive(Debug)] + enum UsedIndex { + None, + One(usize), + Many, + }; + let mut used_index = UsedIndex::None; // 0-15 -> index of a non-null branch // 16 -> no non-null branch // 17 -> multiple non-null branches for i in 0..16 { - match (nodes[i] == NULL_RLP, index[0]) { - (false, _) => {}, - (true, 16) => index[0] = i as u8, - (true, _) => index[0] = 17, + match (nodes[i] == NULL_RLP, &used_index) { + (false, &UsedIndex::None) => used_index = UsedIndex::One(i), + (false, &UsedIndex::One(_)) => used_index = UsedIndex::Many, + (_, _) => {}, } } - match (index[0], node_value) { - (16, None) => panic!("Branch with no subvalues. Something went wrong."), - (0 ... 15, None) => { // one onward node + trace!("branch: used_index={:?}, node_value={:?}", used_index, node_value); + match (used_index, node_value) { + (UsedIndex::None, None) => panic!("Branch with no subvalues. Something went wrong."), + (UsedIndex::One(a), None) => { // one onward node // transmute to extension. // TODO: OPTIMISE: - don't call fixed again but put the right node in straight away here. // call fixed again since the transmute may cause invalidity. - MaybeChanged::Changed(Self::encoded(self.fixed(Node::Extension(NibbleSlice::new_offset(&index[..], 1), nodes[index[0] as usize]), diff))) + let new_partial: [u8; 1] = [a as u8; 1]; + MaybeChanged::Changed(Self::encoded(self.fixed(Node::Extension(NibbleSlice::new_offset(&new_partial[..], 1), nodes[a as usize]), diff))) }, - (16, Some(value)) => { // one leaf value + (UsedIndex::None, Some(value)) => { // one leaf value // transmute to leaf. // call fixed again since the transmute may cause invalidity. MaybeChanged::Changed(Self::encoded(self.fixed(Node::Leaf(NibbleSlice::new(&b""[..]), value), diff))) @@ -657,42 +666,52 @@ impl TrieDB { } fn cleared(&self, n: Node, partial: &NibbleSlice, diff: &mut Diff) -> Option { - trace!("cleared (old: {:?}, partial: {:?})", n, partial); + trace!("cleared old={:?}, partial={:?})", n, partial); match (n, partial.is_empty()) { (Node::Empty, _) => None, (Node::Branch(_, None), true) => { None }, - (Node::Branch(nodes, _), true) => Some(Self::encoded(self.fixed(Node::Branch(nodes, None), diff))), // matched as leaf-branch - give back fixed branch with it. - (Node::Branch(nodes, value), false) => { + (Node::Branch(payloads, _), true) => Some(Self::encoded(self.fixed(Node::Branch(payloads, None), diff))), // matched as leaf-branch - give back fixed branch with it. + (Node::Branch(payloads, value), false) => { // Branch with partial left - route, clear, fix. let i: usize = partial.at(0) as usize; - self.cleared(self.get_node(nodes[i]), &partial.mid(1), diff).map(|new_payload| { + trace!("branch-with-partial node[{:?}]={:?}", i, payloads[i].pretty()); + self.cleared(self.get_node(payloads[i]), &partial.mid(1), diff).map(|new_payload| { + trace!("branch-new-payload={:?}; delete-old={:?}", new_payload.pretty(), payloads[i].pretty()); + // downsteam node needed to be changed. - diff.delete_node_from_slice(nodes[i]); + diff.delete_node_from_slice(payloads[i]); // return fixed up new node. - let mut new_nodes = nodes; - new_nodes[i] = &new_payload; - Self::encoded(self.fixed(Node::Branch(new_nodes, value), diff)) + let mut new_payloads = payloads; + new_payloads[i] = &new_payload; + Self::encoded(self.fixed(Node::Branch(new_payloads, value), diff)) }) }, (Node::Leaf(node_partial, _), _) => { + trace!("leaf partial={:?}", node_partial); match node_partial.common_prefix(partial) { - cp if cp == partial.len() => Some(Node::Empty.encoded()), // leaf to be deleted - delete it :) + cp if cp == partial.len() => { // leaf to be deleted - delete it :) + trace!("matched-prefix (cp={:?}): REPLACE-EMPTY", cp); + Some(Node::Empty.encoded()) + }, _ => None, // anything else and the key doesn't exit - no change. } }, (Node::Extension(node_partial, node_payload), _) => { + trace!("extension partial={:?}, payload={:?}", node_partial, node_payload.pretty()); match node_partial.common_prefix(partial) { - cp if cp < partial.len() => None, // key in the middle of an extension - doesn't exist. - _ => { + cp if cp == node_partial.len() => { + trace!("matching-prefix (cp={:?}): SKIP,CLEAR,FIXUP", cp); // key at end of extension - skip, clear, fix self.cleared(self.get_node(node_payload), &partial.mid(node_partial.len()), diff).map(|new_payload| { + trace!("extension-new-payload={:?}; delete-old={:?}", new_payload.pretty(), node_payload.pretty()); // downsteam node needed to be changed. diff.delete_node_from_slice(node_payload); // return fixed up new node. Self::encoded(self.fixed(Node::Extension(node_partial, &new_payload), diff)) }) }, + _ => None, // key in the middle of an extension - doesn't exist. } }, } @@ -735,13 +754,13 @@ mod tests { env_logger::init().ok(); let mut t1 = TrieDB::new_memory(); - t1.insert(&[0x01], &[0]); - t1.insert(&[0x01, 0x23], &[1]); t1.insert(&[0x01, 0x34], &[2]); - t1.remove(&[0x01]); let mut t2 = TrieDB::new_memory(); + t2.insert(&[0x01], &[0]); t2.insert(&[0x01, 0x23], &[1]); t2.insert(&[0x01, 0x34], &[2]); + t2.remove(&[0x01]); + t2.remove(&[0x01, 0x23]); /*if t1.root() != t2.root()*/ { trace!("{:?}", t1); trace!("{:?}", t2); From 84cc7715b41ee08974a3b2d26f6b196ebebad5e1 Mon Sep 17 00:00:00 2001 From: Gav Wood Date: Thu, 3 Dec 2015 00:50:14 +0100 Subject: [PATCH 17/25] Fix trie - tests pass. --- src/trie.rs | 31 +++++++++++++++++-------------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/src/trie.rs b/src/trie.rs index a1ffcfe6c..081fcfcff 100644 --- a/src/trie.rs +++ b/src/trie.rs @@ -558,19 +558,19 @@ impl TrieDB { fn fixed_indirection<'a>(n: Node<'a>, diff: &mut Diff) -> MaybeChanged<'a> { match n { - Node::Extension(partial, payload) if payload.len() >= 32 => { + Node::Extension(partial, payload) if payload.len() >= 32 && Rlp::new(payload).is_list() => { // make indirect MaybeChanged::Changed(Node::Extension(partial, &Node::decoded(payload).encoded_and_added(diff)).encoded()) }, - Node::Branch(nodes, node_value) => { + Node::Branch(payloads, value) => { // check each child isn't too big // TODO OPTIMISE - should really check at the point of (re-)constructing the branch. for i in 0..16 { - if nodes[i].len() >= 32 { - let n = Node::decoded(nodes[i]).encoded_and_added(diff); - let mut new_nodes = nodes; + if payloads[i].len() >= 32 && Rlp::new(payloads[i]).is_list() { + let n = Node::decoded(payloads[i]).encoded_and_added(diff); + let mut new_nodes = payloads; new_nodes[i] = &n; - return MaybeChanged::Changed(Node::Branch(new_nodes, node_value).encoded()) + return MaybeChanged::Changed(Node::Branch(new_nodes, value).encoded()) } } MaybeChanged::Same(n) @@ -596,7 +596,7 @@ impl TrieDB { #[derive(Debug)] enum UsedIndex { None, - One(usize), + One(u8), Many, }; let mut used_index = UsedIndex::None; @@ -605,7 +605,7 @@ impl TrieDB { // 17 -> multiple non-null branches for i in 0..16 { match (nodes[i] == NULL_RLP, &used_index) { - (false, &UsedIndex::None) => used_index = UsedIndex::One(i), + (false, &UsedIndex::None) => used_index = UsedIndex::One(i as u8), (false, &UsedIndex::One(_)) => used_index = UsedIndex::Many, (_, _) => {}, } @@ -617,7 +617,7 @@ impl TrieDB { // transmute to extension. // TODO: OPTIMISE: - don't call fixed again but put the right node in straight away here. // call fixed again since the transmute may cause invalidity. - let new_partial: [u8; 1] = [a as u8; 1]; + let new_partial: [u8; 1] = [a; 1]; MaybeChanged::Changed(Self::encoded(self.fixed(Node::Extension(NibbleSlice::new_offset(&new_partial[..], 1), nodes[a as usize]), diff))) }, (UsedIndex::None, Some(value)) => { // one leaf value @@ -627,6 +627,7 @@ impl TrieDB { } _ => { // onwards node(s) and/or leaf // no transmute needed, but should still fix the indirection. + trace!("no-transmute: FIXINDIRECTION"); Self::fixed_indirection(Node::Branch(nodes, node_value), diff) }, } @@ -756,14 +757,16 @@ mod tests { fn playpen() { env_logger::init().ok(); + let big_value = b"00000000000000000000000000000000"; + let mut t1 = TrieDB::new_memory(); - t1.insert(&[0x01, 0x34], &[2]); + t1.insert(&[0x01, 0x23], &big_value.to_vec()); + t1.insert(&[0x01, 0x34], &big_value.to_vec()); let mut t2 = TrieDB::new_memory(); - t2.insert(&[0x01], &[0]); - t2.insert(&[0x01, 0x23], &[1]); - t2.insert(&[0x01, 0x34], &[2]); + t2.insert(&[0x01], &big_value.to_vec()); + t2.insert(&[0x01, 0x23], &big_value.to_vec()); + t2.insert(&[0x01, 0x34], &big_value.to_vec()); t2.remove(&[0x01]); - t2.remove(&[0x01, 0x23]); /*if t1.root() != t2.root()*/ { trace!("{:?}", t1); trace!("{:?}", t2); From 05f7e85d301c8400df7635c0236ca845d0237670 Mon Sep 17 00:00:00 2001 From: debris Date: Thu, 3 Dec 2015 05:44:35 +0100 Subject: [PATCH 18/25] rlp tests, the beginning --- json-tests/json/rlp/README.md | 39 +++++++++++++++++++++++++ json-tests/json/rlp/catdog.json | 18 ++++++++++++ json-tests/src/lib.rs | 2 ++ json-tests/src/rlp.rs | 52 +++++++++++++++++++++++++++++++++ json-tests/src/trie.rs | 33 ++++----------------- json-tests/src/util.rs | 8 +++++ src/trie.rs | 2 +- 7 files changed, 126 insertions(+), 28 deletions(-) create mode 100644 json-tests/json/rlp/README.md create mode 100644 json-tests/json/rlp/catdog.json create mode 100644 json-tests/src/rlp.rs create mode 100644 json-tests/src/util.rs diff --git a/json-tests/json/rlp/README.md b/json-tests/json/rlp/README.md new file mode 100644 index 000000000..89cb072c7 --- /dev/null +++ b/json-tests/json/rlp/README.md @@ -0,0 +1,39 @@ +# Rlp tests guideline + +Rlp can be tested in various ways. It can encode/decode a value or an array of values. Let's start with encoding. + +Each operation must have field: + +- `operation` - `append`, `append_list`, `append_empty` or `append_raw` + +Additionally `append` and `append_raw` must additionally define a `value` field: + +- `value` - data + +Also `append_raw` and `append_list` requires `len` field + +- `len` - integer + +### Encoding Test Example + +```json +{ + "input": + [ + { + "operation": "append_list", + "len": 2 + }, + { + "operation": "append", + "value": "cat" + }, + { + "operation": "append", + "value": "dog" + } + ] + "output": "0xc88363617183646f67" +} +``` + diff --git a/json-tests/json/rlp/catdog.json b/json-tests/json/rlp/catdog.json new file mode 100644 index 000000000..352b24892 --- /dev/null +++ b/json-tests/json/rlp/catdog.json @@ -0,0 +1,18 @@ +{ + "input": + [ + { + "operation": "append_list", + "len": 2 + }, + { + "operation": "append", + "value": "cat" + }, + { + "operation": "append", + "value": "dog" + } + ] + "output": "0xc88363617183646f67" +} diff --git a/json-tests/src/lib.rs b/json-tests/src/lib.rs index 8a800e8f9..0fb1b091a 100644 --- a/json-tests/src/lib.rs +++ b/json-tests/src/lib.rs @@ -8,7 +8,9 @@ use std::fs::File; use glob::glob; use rustc_serialize::*; +mod util; pub mod trie; +pub mod rlp; pub trait JsonTest: Sized { type Input; diff --git a/json-tests/src/rlp.rs b/json-tests/src/rlp.rs new file mode 100644 index 000000000..bf6131b5b --- /dev/null +++ b/json-tests/src/rlp.rs @@ -0,0 +1,52 @@ +//! json rlp tests +use rustc_serialize::*; +use super::{JsonTest, JsonLoader}; +use util::*; + +pub enum Operation { + Append(Vec), + AppendList(usize), + AppendRaw(Vec, usize), + AppendEmpty +} + +impl Into for json::Json { + fn into(self) -> Operation { + let obj = self.as_object().unwrap(); + match obj["operation"].as_string().unwrap().as_ref() { + "append" => Operation::Append(hex_or_string(obj["value"].as_string().unwrap())), + "append_list" => Operation::AppendList(obj["len"].as_u64().unwrap() as usize), + "append_raw" => Operation::AppendRaw(hex_or_string(obj["value"].as_string().unwrap()), obj["len"].as_u64().unwrap() as usize), + "append_empty" => Operation::AppendEmpty, + other => { panic!("Unsupported opertation: {}", other); } + } + } +} + +pub struct RlpStreamTest { + loader: JsonLoader +} + +impl JsonTest for RlpStreamTest { + type Input = Vec; + type Output = Vec; + + fn new(data: &[u8]) -> Self { + RlpStreamTest { + loader: JsonLoader::new(data) + } + } + + fn input(&self) -> Self::Input { + self.loader.input().as_array().unwrap() + .iter() + .cloned() + .map(|i| i.into()) + .collect() + } + + fn output(&self) -> Self::Output { + hex_or_string(self.loader.output().as_string().unwrap()) + } +} + diff --git a/json-tests/src/trie.rs b/json-tests/src/trie.rs index 827bf8c9a..bc65e9db9 100644 --- a/json-tests/src/trie.rs +++ b/json-tests/src/trie.rs @@ -1,27 +1,12 @@ //! json trie tests use std::collections::HashMap; use rustc_serialize::*; -use rustc_serialize::hex::FromHex; use super::{JsonTest, JsonLoader}; - -enum OperationType { - Insert, - Remove -} - -impl Decodable for OperationType { - fn decode(d: &mut D) -> Result where D: Decoder { - match try!(String::decode(d)).as_ref() { - "insert" => Ok(OperationType::Insert), - "remove" => Ok(OperationType::Remove), - other => panic!("invalid operation type: {}", other) - } - } -} +use util::*; #[derive(RustcDecodable)] struct RawOperation { - operation: OperationType, + operation: String, key: String, value: Option } @@ -31,18 +16,12 @@ pub enum Operation { Remove(Vec) } -fn hex_or_string(s: &str) -> Vec { - match s.starts_with("0x") { - true => s[2..].from_hex().unwrap(), - false => From::from(s) - } -} - impl Into for RawOperation { fn into(self) -> Operation { - match self.operation { - OperationType::Insert => Operation::Insert(hex_or_string(&self.key), hex_or_string(&self.value.unwrap())), - OperationType::Remove => Operation::Remove(hex_or_string(&self.key)) + match self.operation.as_ref() { + "insert" => Operation::Insert(hex_or_string(&self.key), hex_or_string(&self.value.unwrap())), + "remove" => Operation::Remove(hex_or_string(&self.key)), + other => panic!("invalid operation type: {}", other) } } } diff --git a/json-tests/src/util.rs b/json-tests/src/util.rs new file mode 100644 index 000000000..f9d1e4eab --- /dev/null +++ b/json-tests/src/util.rs @@ -0,0 +1,8 @@ +use rustc_serialize::hex::FromHex; + +pub fn hex_or_string(s: &str) -> Vec { + match s.starts_with("0x") { + true => s[2..].from_hex().unwrap(), + false => From::from(s) + } +} diff --git a/src/trie.rs b/src/trie.rs index 081fcfcff..9dd12d8a5 100644 --- a/src/trie.rs +++ b/src/trie.rs @@ -742,7 +742,7 @@ impl Trie for TrieDB { #[cfg(test)] mod tests { extern crate json_tests; - use self::json_tests::*; + use self::json_tests::{trie, execute_tests_from_directory}; use rustc_serialize::hex::FromHex; use triehash::*; use hash::*; From 9f9c508ebd9306aeb5e892c52583a71cf2dfc792 Mon Sep 17 00:00:00 2001 From: debris Date: Thu, 3 Dec 2015 05:47:07 +0100 Subject: [PATCH 19/25] fixed example .json files --- json-tests/json/rlp/README.md | 2 +- json-tests/json/rlp/catdog.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/json-tests/json/rlp/README.md b/json-tests/json/rlp/README.md index 89cb072c7..ab8b01020 100644 --- a/json-tests/json/rlp/README.md +++ b/json-tests/json/rlp/README.md @@ -32,7 +32,7 @@ Also `append_raw` and `append_list` requires `len` field "operation": "append", "value": "dog" } - ] + ], "output": "0xc88363617183646f67" } ``` diff --git a/json-tests/json/rlp/catdog.json b/json-tests/json/rlp/catdog.json index 352b24892..ed4c02602 100644 --- a/json-tests/json/rlp/catdog.json +++ b/json-tests/json/rlp/catdog.json @@ -13,6 +13,6 @@ "operation": "append", "value": "dog" } - ] + ], "output": "0xc88363617183646f67" } From 84eb30a133653a9519be13b34e2e4e89234d984b Mon Sep 17 00:00:00 2001 From: debris Date: Thu, 3 Dec 2015 06:06:42 +0100 Subject: [PATCH 20/25] empty lists tests --- json-tests/json/rlp/catdog.json | 2 +- json-tests/json/rlp/empty_lists.json | 38 ++++++++++++++++ src/rlp.rs | 24 ++++++++++ src/trie.rs | 65 ---------------------------- 4 files changed, 63 insertions(+), 66 deletions(-) create mode 100644 json-tests/json/rlp/empty_lists.json diff --git a/json-tests/json/rlp/catdog.json b/json-tests/json/rlp/catdog.json index ed4c02602..724329784 100644 --- a/json-tests/json/rlp/catdog.json +++ b/json-tests/json/rlp/catdog.json @@ -14,5 +14,5 @@ "value": "dog" } ], - "output": "0xc88363617183646f67" + "output": "0xc88363617483646f67" } diff --git a/json-tests/json/rlp/empty_lists.json b/json-tests/json/rlp/empty_lists.json new file mode 100644 index 000000000..5ac649c2c --- /dev/null +++ b/json-tests/json/rlp/empty_lists.json @@ -0,0 +1,38 @@ +{ + "input": + [ + { + "operation": "append_list", + "len": 3 + }, + { + "operation": "append_list", + "len": 0 + }, + { + "operation": "append_list", + "len": 1 + }, + { + "operation": "append_list", + "len": 0 + }, + { + "operation": "append_list", + "len": 2 + }, + { + "operation": "append_list", + "len": 0 + }, + { + "operation": "append_list", + "len": 1 + }, + { + "operation": "append_list", + "len": 0 + } + ], + "output": "0xc7c0c1c0c3c0c1c0" +} diff --git a/src/rlp.rs b/src/rlp.rs index b055632b1..8cb5f2b9b 100644 --- a/src/rlp.rs +++ b/src/rlp.rs @@ -1098,6 +1098,9 @@ impl Encoder for BasicEncoder { #[cfg(test)] mod tests { + extern crate json_tests; + use self::json_tests::execute_tests_from_directory; + use self::json_tests::rlp as rlptest; use std::{fmt, cmp}; use std::str::FromStr; use rlp; @@ -1496,4 +1499,25 @@ mod tests { let view = View::new(&data); let _data_slice = view.offset(1).data(); } + + #[test] + fn test_rlp_json() { + println!("Json rlp test: "); + execute_tests_from_directory::("json-tests/json/rlp/*.json", &mut | file, input, output | { + println!("file: {}", file); + + let mut stream = RlpStream::new(); + for operation in input.into_iter() { + match operation { + rlptest::Operation::Append(ref v) => stream.append(v), + rlptest::Operation::AppendList(len) => stream.append_list(len), + rlptest::Operation::AppendRaw(ref raw, len) => stream.append_raw(raw, len), + rlptest::Operation::AppendEmpty => stream.append_empty_data() + }; + } + + assert_eq!(stream.out(), output); + }); + } + } diff --git a/src/trie.rs b/src/trie.rs index 9dd12d8a5..674d22488 100644 --- a/src/trie.rs +++ b/src/trie.rs @@ -743,7 +743,6 @@ impl Trie for TrieDB { mod tests { extern crate json_tests; use self::json_tests::{trie, execute_tests_from_directory}; - use rustc_serialize::hex::FromHex; use triehash::*; use hash::*; use super::*; @@ -969,24 +968,6 @@ mod tests { //assert!(false); } - fn test_all(v: Vec<(Vec, Vec)>) { - let mut t = TrieDB::new_memory(); - - for i in 0..v.len() { - let key: &[u8]= &v[i].0; - let val: &[u8] = &v[i].1; - t.insert(&key, &val); - } - -// trace!("{:?}", t); -// println!("{:?}", t); - - // check lifetime -// let _q = t.at(&[b'd', b'o']).unwrap(); - - assert_eq!(*t.root(), trie_root(v)); - } - fn random_key() -> Vec { let chars = b"abcdefgrstuvwABCDEFGRSTUVW"; let mut ret: Vec = Vec::new(); @@ -1041,52 +1022,6 @@ mod tests { t } - #[test] - fn test_at_dog() { - env_logger::init().ok(); - let v = vec![ - (From::from("do"), From::from("verb")), - (From::from("dog"), From::from("puppy")), - (From::from("doge"), From::from("coin")), - (From::from("horse"), From::from("stallion")), - ]; - - test_all(v); - } - - #[test] - fn test_more_data() { - let v = vec![ - - ("0000000000000000000000000000000000000000000000000000000000000045".from_hex().unwrap(), - "22b224a1420a802ab51d326e29fa98e34c4f24ea".from_hex().unwrap()), - - ("0000000000000000000000000000000000000000000000000000000000000046".from_hex().unwrap(), - "67706c2076330000000000000000000000000000000000000000000000000000".from_hex().unwrap()), - - ("000000000000000000000000697c7b8c961b56f675d570498424ac8de1a918f6".from_hex().unwrap(), - "6f6f6f6820736f2067726561742c207265616c6c6c793f000000000000000000".from_hex().unwrap()), - - ("0000000000000000000000007ef9e639e2733cb34e4dfc576d4b23f72db776b2".from_hex().unwrap(), - "4655474156000000000000000000000000000000000000000000000000000000".from_hex().unwrap()), - - ("000000000000000000000000ec4f34c97e43fbb2816cfd95e388353c7181dab1".from_hex().unwrap(), - "4e616d6552656700000000000000000000000000000000000000000000000000".from_hex().unwrap()), - - ("4655474156000000000000000000000000000000000000000000000000000000".from_hex().unwrap(), - "7ef9e639e2733cb34e4dfc576d4b23f72db776b2".from_hex().unwrap()), - - ("4e616d6552656700000000000000000000000000000000000000000000000000".from_hex().unwrap(), - "ec4f34c97e43fbb2816cfd95e388353c7181dab1".from_hex().unwrap()), - - ("6f6f6f6820736f2067726561742c207265616c6c6c793f000000000000000000".from_hex().unwrap(), - "697c7b8c961b56f675d570498424ac8de1a918f6".from_hex().unwrap()) - - ]; - - test_all(v); - } - #[test] fn test_trie_json() { println!("Json trie test: "); From dc316dcfeb04619d659ac8768b62eaf595f1156f Mon Sep 17 00:00:00 2001 From: debris Date: Thu, 3 Dec 2015 06:13:27 +0100 Subject: [PATCH 21/25] additional simple json tests --- json-tests/json/rlp/empty.json | 9 +++++++++ json-tests/json/rlp/integer.json | 10 ++++++++++ 2 files changed, 19 insertions(+) create mode 100644 json-tests/json/rlp/empty.json create mode 100644 json-tests/json/rlp/integer.json diff --git a/json-tests/json/rlp/empty.json b/json-tests/json/rlp/empty.json new file mode 100644 index 000000000..19cbf4185 --- /dev/null +++ b/json-tests/json/rlp/empty.json @@ -0,0 +1,9 @@ +{ + "input": + [ + { + "operation": "append_empty" + } + ], + "output": "0x80" +} diff --git a/json-tests/json/rlp/integer.json b/json-tests/json/rlp/integer.json new file mode 100644 index 000000000..1effa4a1e --- /dev/null +++ b/json-tests/json/rlp/integer.json @@ -0,0 +1,10 @@ +{ + "input": + [ + { + "operation": "append", + "value": "0x0400" + } + ], + "output": "0x820400" +} From 611226c117ca773b9a3ef43cc4aa390e82e22cb9 Mon Sep 17 00:00:00 2001 From: debris Date: Thu, 3 Dec 2015 11:36:20 +0100 Subject: [PATCH 22/25] small changes in tests --- json-tests/json/rlp/{ => stream}/catdog.json | 0 json-tests/json/rlp/{ => stream}/empty.json | 0 .../json/rlp/{ => stream}/empty_lists.json | 0 json-tests/json/rlp/{ => stream}/integer.json | 0 .../json/rlp/stream/list_of_empty_data.json | 22 +++++++++++++++++++ .../json/rlp/stream/list_of_empty_data2.json | 19 ++++++++++++++++ src/rlp.rs | 16 +++++++++++++- 7 files changed, 56 insertions(+), 1 deletion(-) rename json-tests/json/rlp/{ => stream}/catdog.json (100%) rename json-tests/json/rlp/{ => stream}/empty.json (100%) rename json-tests/json/rlp/{ => stream}/empty_lists.json (100%) rename json-tests/json/rlp/{ => stream}/integer.json (100%) create mode 100644 json-tests/json/rlp/stream/list_of_empty_data.json create mode 100644 json-tests/json/rlp/stream/list_of_empty_data2.json diff --git a/json-tests/json/rlp/catdog.json b/json-tests/json/rlp/stream/catdog.json similarity index 100% rename from json-tests/json/rlp/catdog.json rename to json-tests/json/rlp/stream/catdog.json diff --git a/json-tests/json/rlp/empty.json b/json-tests/json/rlp/stream/empty.json similarity index 100% rename from json-tests/json/rlp/empty.json rename to json-tests/json/rlp/stream/empty.json diff --git a/json-tests/json/rlp/empty_lists.json b/json-tests/json/rlp/stream/empty_lists.json similarity index 100% rename from json-tests/json/rlp/empty_lists.json rename to json-tests/json/rlp/stream/empty_lists.json diff --git a/json-tests/json/rlp/integer.json b/json-tests/json/rlp/stream/integer.json similarity index 100% rename from json-tests/json/rlp/integer.json rename to json-tests/json/rlp/stream/integer.json diff --git a/json-tests/json/rlp/stream/list_of_empty_data.json b/json-tests/json/rlp/stream/list_of_empty_data.json new file mode 100644 index 000000000..c5b898f25 --- /dev/null +++ b/json-tests/json/rlp/stream/list_of_empty_data.json @@ -0,0 +1,22 @@ +{ + "input": + [ + { + "operation": "append_list", + "len": 3 + }, + { + "operation": "append", + "value": "" + }, + { + "operation": "append", + "value": "" + }, + { + "operation": "append", + "value": "" + } + ], + "output": "0xc3808080" +} diff --git a/json-tests/json/rlp/stream/list_of_empty_data2.json b/json-tests/json/rlp/stream/list_of_empty_data2.json new file mode 100644 index 000000000..76043af91 --- /dev/null +++ b/json-tests/json/rlp/stream/list_of_empty_data2.json @@ -0,0 +1,19 @@ +{ + "input": + [ + { + "operation": "append_list", + "len": 3 + }, + { + "operation": "append_empty" + }, + { + "operation": "append_empty" + }, + { + "operation": "append_empty" + } + ], + "output": "0xc3808080" +} diff --git a/src/rlp.rs b/src/rlp.rs index 8cb5f2b9b..5f5c90590 100644 --- a/src/rlp.rs +++ b/src/rlp.rs @@ -1335,6 +1335,20 @@ mod tests { 0x80, 0x80, 0x80, 0x80, 0x80, 0x80]); } + #[test] + fn rlp_stream_list4() { + let mut stream = RlpStream::new(); + stream.append_list(17); + let v: Vec = vec![]; + for _ in 0..17 { + stream.append(&v); + } + let out = stream.out(); + assert_eq!(out, vec![0xd1, 0x80, 0x80, 0x80, 0x80, 0x80, + 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, + 0x80, 0x80, 0x80, 0x80, 0x80, 0x80]); + } + #[test] fn rlp_stream_list3() { let mut stream = RlpStream::new(); @@ -1503,7 +1517,7 @@ mod tests { #[test] fn test_rlp_json() { println!("Json rlp test: "); - execute_tests_from_directory::("json-tests/json/rlp/*.json", &mut | file, input, output | { + execute_tests_from_directory::("json-tests/json/rlp/stream/*.json", &mut | file, input, output | { println!("file: {}", file); let mut stream = RlpStream::new(); From 38a3650ba95741e9e60dcb442a53a1010b272aa9 Mon Sep 17 00:00:00 2001 From: debris Date: Thu, 3 Dec 2015 12:00:57 +0100 Subject: [PATCH 23/25] few additional json tests --- json-tests/json/.DS_Store | Bin 0 -> 6148 bytes json-tests/json/rlp/stream/bytestring0.json | 10 + json-tests/json/rlp/stream/bytestring1.json | 10 + json-tests/json/rlp/stream/bytestring7.json | 10 + json-tests/json/rlp/stream/catdog.json | 12 +- json-tests/json/rlp/stream/longlist.json | 521 ++++++++++++++++++++ json-tests/json/rlp/stream/longstring.json | 10 + 7 files changed, 563 insertions(+), 10 deletions(-) create mode 100644 json-tests/json/.DS_Store create mode 100644 json-tests/json/rlp/stream/bytestring0.json create mode 100644 json-tests/json/rlp/stream/bytestring1.json create mode 100644 json-tests/json/rlp/stream/bytestring7.json create mode 100644 json-tests/json/rlp/stream/longlist.json create mode 100644 json-tests/json/rlp/stream/longstring.json diff --git a/json-tests/json/.DS_Store b/json-tests/json/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..5ddc63de7e78f0f09186cf46a2ff1cf2b9cae2aa GIT binary patch literal 6148 zcmeH~Jr2S!425mVfW*>~F$)La1`&c2Z~+8}2?+#Z&(V4QSt!h?LeG-@#ZIlZZ)j=} z(e?AR66r-`1~vIYIqf#4$mI7QhFYo8^+Vg;}!TM!kPMk^St>SKu2 zy&WuhT}`%Nw2S8Op?PPuDF&v|E?SVlv^p5502LT0&_&+c`M-sKoBszbOsN1B_%j7` zy4h{kc&R*FKVHx3`>fi!!9l+q;q4~?i5 Date: Thu, 3 Dec 2015 13:11:02 +0100 Subject: [PATCH 24/25] removed duplicate tests --- src/rlp.rs | 99 ++---------------------------------------------------- 1 file changed, 2 insertions(+), 97 deletions(-) diff --git a/src/rlp.rs b/src/rlp.rs index 5f5c90590..076111b23 100644 --- a/src/rlp.rs +++ b/src/rlp.rs @@ -1259,7 +1259,7 @@ mod tests { run_encode_tests(tests); } - /// Vec is treated as a single value + /// Vec (Bytes) is treated as a single value #[test] fn encode_vector_u8() { let tests = vec![ @@ -1295,74 +1295,6 @@ mod tests { run_encode_tests(tests); } - #[test] - fn encode_bytes() { - let vec = vec![0u8]; - let slice: &[u8] = &vec; - let res = rlp::encode(&slice); - assert_eq!(res, vec![0u8]); - } - - #[test] - fn rlp_stream() { - let mut stream = RlpStream::new_list(2); - stream.append(&"cat").append(&"dog"); - let out = stream.out(); - assert_eq!(out, - vec![0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g']); - } - - #[test] - fn rlp_stream_list() { - let mut stream = RlpStream::new_list(3); - stream.append_list(0); - stream.append_list(1).append_list(0); - stream.append_list(2).append_list(0).append_list(1).append_list(0); - let out = stream.out(); - assert_eq!(out, vec![0xc7, 0xc0, 0xc1, 0xc0, 0xc3, 0xc0, 0xc1, 0xc0]); - } - - #[test] - fn rlp_stream_list2() { - let mut stream = RlpStream::new(); - stream.append_list(17); - for _ in 0..17 { - stream.append(&""); - } - let out = stream.out(); - assert_eq!(out, vec![0xd1, 0x80, 0x80, 0x80, 0x80, 0x80, - 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, - 0x80, 0x80, 0x80, 0x80, 0x80, 0x80]); - } - - #[test] - fn rlp_stream_list4() { - let mut stream = RlpStream::new(); - stream.append_list(17); - let v: Vec = vec![]; - for _ in 0..17 { - stream.append(&v); - } - let out = stream.out(); - assert_eq!(out, vec![0xd1, 0x80, 0x80, 0x80, 0x80, 0x80, - 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, - 0x80, 0x80, 0x80, 0x80, 0x80, 0x80]); - } - - #[test] - fn rlp_stream_list3() { - let mut stream = RlpStream::new(); - stream.append_list(17); - - let mut res = vec![0xf8, 0x44]; - for _ in 0..17 { - stream.append(&"aaa"); - res.extend(vec![0x83, b'a', b'a', b'a']); - } - let out = stream.out(); - assert_eq!(out, res); - } - struct DTestPair(T, Vec) where T: rlp::Decodable + fmt::Debug + cmp::Eq; fn run_decode_tests(tests: Vec>) where T: rlp::Decodable + fmt::Debug + cmp::Eq { @@ -1372,7 +1304,7 @@ mod tests { } } - /// Vec is treated as a single value + /// Vec (Bytes) is treated as a single value #[test] fn decode_vector_u8() { let tests = vec![ @@ -1487,33 +1419,6 @@ mod tests { run_decode_tests(tests); } - #[test] - fn test_view() { - struct View<'a> { - bytes: &'a [u8] - } - - impl <'a, 'view> View<'a> where 'a: 'view { - fn new(bytes: &'a [u8]) -> View<'a> { - View { - bytes: bytes - } - } - - fn offset(&'view self, len: usize) -> View<'a> { - View::new(&self.bytes[len..]) - } - - fn data(&'view self) -> &'a [u8] { - self.bytes - } - } - - let data = vec![0, 1, 2, 3]; - let view = View::new(&data); - let _data_slice = view.offset(1).data(); - } - #[test] fn test_rlp_json() { println!("Json rlp test: "); From cbbe5ee0fe8562bcdeb6df9a9540fe07a89ea5e5 Mon Sep 17 00:00:00 2001 From: Gav Wood Date: Thu, 3 Dec 2015 14:56:39 +0100 Subject: [PATCH 25/25] trie node ref counter. good for testing. --- src/hashdb.rs | 4 ++++ src/memorydb.rs | 8 +++++++ src/overlaydb.rs | 15 ++++++++++++++ src/rlp.rs | 1 + src/trie.rs | 54 ++++++++++++++++++++++++++++++++++++++++++++++++ 5 files changed, 82 insertions(+) diff --git a/src/hashdb.rs b/src/hashdb.rs index dd0973bb3..f893c8df8 100644 --- a/src/hashdb.rs +++ b/src/hashdb.rs @@ -1,7 +1,11 @@ use hash::*; use bytes::*; +use std::collections::HashMap; pub trait HashDB { + /// Get the keys in the database together with number of underlying references. + fn keys(&self) -> HashMap; + /// Look up a given hash into the bytes that hash to it, returning None if the /// hash is not known. /// diff --git a/src/memorydb.rs b/src/memorydb.rs index 85fdc7c19..8c7eaff2c 100644 --- a/src/memorydb.rs +++ b/src/memorydb.rs @@ -116,6 +116,10 @@ impl MemoryDB { } self.data.get(key).unwrap() } + + pub fn raw_keys(&self) -> HashMap { + self.data.iter().filter_map(|(k, v)| if v.1 != 0 {Some((k.clone(), v.1))} else {None}).collect::>() + } } impl HashDB for MemoryDB { @@ -126,6 +130,10 @@ impl HashDB for MemoryDB { } } + fn keys(&self) -> HashMap { + self.data.iter().filter_map(|(k, v)| if v.1 > 0 {Some((k.clone(), v.1 as u32))} else {None} ).collect::>() + } + fn exists(&self, key: &H256) -> bool { match self.data.get(key) { Some(&(_, x)) if x > 0 => true, diff --git a/src/overlaydb.rs b/src/overlaydb.rs index d69afa0f1..78ca67d01 100644 --- a/src/overlaydb.rs +++ b/src/overlaydb.rs @@ -9,6 +9,7 @@ use memorydb::*; use std::ops::*; use std::sync::*; use std::env; +use std::collections::HashMap; use rocksdb::{DB, Writable}; #[derive(Clone)] @@ -135,6 +136,20 @@ impl OverlayDB { } impl HashDB for OverlayDB { + fn keys(&self) -> HashMap { + let mut ret: HashMap = HashMap::new(); + for (key, _) in self.backing.iterator().from_start() { + let h = H256::from_slice(key.deref()); + let r = self.payload(&h).unwrap().1; + ret.insert(h, r); + } + + for (key, refs) in self.overlay.raw_keys().into_iter() { + let refs = *ret.get(&key).unwrap_or(&0u32) as i32 + refs as i32; + ret.insert(key, refs as u32); + } + ret + } fn lookup(&self, key: &H256) -> Option<&[u8]> { // return ok if positive; if negative, check backing - might be enough references there to make // it positive again. diff --git a/src/rlp.rs b/src/rlp.rs index b055632b1..b875e61c5 100644 --- a/src/rlp.rs +++ b/src/rlp.rs @@ -125,6 +125,7 @@ impl<'a> From> for UntrustedRlp<'a> { } } +#[derive(Debug)] pub enum Prototype { Null, Data(usize), diff --git a/src/trie.rs b/src/trie.rs index 081fcfcff..3d825c707 100644 --- a/src/trie.rs +++ b/src/trie.rs @@ -8,6 +8,7 @@ use hash::*; use nibbleslice::*; use bytes::*; use rlp::*; +use std::collections::HashMap; //use log::*; @@ -238,6 +239,52 @@ impl TrieDB { } } + pub fn keys(&self) -> Vec { + let mut ret: Vec = Vec::new(); + ret.push(self.root.clone()); + self.accumulate_keys(self.root_node(), &mut ret); + ret + } + + fn accumulate_keys(&self, node: Node, acc: &mut Vec) { + let mut handle_payload = |payload| { + let p = Rlp::new(payload); + if p.is_data() && p.size() == 32 { + acc.push(H256::decode(&p)); + } + + self.accumulate_keys(self.get_node(payload), acc); + }; + + match node { + Node::Extension(_, payload) => handle_payload(payload), + Node::Branch(payloads, _) => for payload in payloads.iter() { handle_payload(payload) }, + _ => {}, + } + } + + fn to_map(hashes: Vec) -> HashMap { + let mut r: HashMap = HashMap::new(); + for h in hashes.into_iter() { + let c = *r.get(&h).unwrap_or(&0); + r.insert(h, c + 1); + } + r + } + + pub fn db_items_remaining(&self) -> HashMap { + let mut ret = self.db().keys(); + for (k, v) in Self::to_map(self.keys()).into_iter() { + let old = *ret.get(&k).expect("Node in trie is not in database!"); + assert!(old >= v); + match old > v { + true => ret.insert(k, old - v), + _ => ret.remove(&k), + }; + } + ret + } + fn fmt_indent(&self, f: &mut fmt::Formatter, size: usize) -> fmt::Result { for _ in 0..size { try!(write!(f, " ")); @@ -245,6 +292,10 @@ impl TrieDB { Ok(()) } + fn root_node(&self) -> Node { + Node::decoded(self.db.lookup(&self.root).expect("Trie root not found!")) + } + fn get_node<'a>(&'a self, node: &'a [u8]) -> Node { Node::decoded(self.get_raw_or_lookup(node)) } @@ -762,11 +813,14 @@ mod tests { let mut t1 = TrieDB::new_memory(); t1.insert(&[0x01, 0x23], &big_value.to_vec()); t1.insert(&[0x01, 0x34], &big_value.to_vec()); + trace!("keys remaining {:?}", t1.db_items_remaining()); + assert!(t1.db_items_remaining().is_empty()); let mut t2 = TrieDB::new_memory(); t2.insert(&[0x01], &big_value.to_vec()); t2.insert(&[0x01, 0x23], &big_value.to_vec()); t2.insert(&[0x01, 0x34], &big_value.to_vec()); t2.remove(&[0x01]); + assert!(t2.db_items_remaining().is_empty()); /*if t1.root() != t2.root()*/ { trace!("{:?}", t1); trace!("{:?}", t2);