Merge pull request #6389 from paritytech/trie

Trie optimizations
This commit is contained in:
Marek Kotewicz 2017-08-28 18:45:30 +02:00 committed by GitHub
commit 8ead8061ee
7 changed files with 167 additions and 120 deletions

View File

@ -17,6 +17,7 @@
//! Disk-backed `HashDB` implementation. //! Disk-backed `HashDB` implementation.
use std::collections::HashMap; use std::collections::HashMap;
use std::collections::hash_map::Entry;
use std::sync::Arc; use std::sync::Arc;
use rlp::*; use rlp::*;
use hashdb::*; use hashdb::*;
@ -66,23 +67,28 @@ impl ArchiveDB {
impl HashDB for ArchiveDB { impl HashDB for ArchiveDB {
fn keys(&self) -> HashMap<H256, i32> { fn keys(&self) -> HashMap<H256, i32> {
let mut ret: HashMap<H256, i32> = HashMap::new(); let mut ret: HashMap<H256, i32> = self.backing.iter(self.column)
for (key, _) in self.backing.iter(self.column) { .map(|(key, _)| (H256::from_slice(&*key), 1))
let h = H256::from_slice(&*key); .collect();
ret.insert(h, 1);
}
for (key, refs) in self.overlay.keys() { for (key, refs) in self.overlay.keys() {
let refs = *ret.get(&key).unwrap_or(&0) + refs; match ret.entry(key) {
ret.insert(key, refs); Entry::Occupied(mut entry) => {
*entry.get_mut() += refs;
},
Entry::Vacant(entry) => {
entry.insert(refs);
}
}
} }
ret ret
} }
fn get(&self, key: &H256) -> Option<DBValue> { fn get(&self, key: &H256) -> Option<DBValue> {
let k = self.overlay.raw(key); if let Some((d, rc)) = self.overlay.raw(key) {
if let Some((d, rc)) = k { if rc > 0 {
if rc > 0 { return Some(d); } return Some(d);
}
} }
self.payload(key) self.payload(key)
} }

View File

@ -18,6 +18,7 @@
use std::fmt; use std::fmt;
use std::collections::HashMap; use std::collections::HashMap;
use std::collections::hash_map::Entry;
use std::sync::Arc; use std::sync::Arc;
use parking_lot::RwLock; use parking_lot::RwLock;
use heapsize::HeapSizeOf; use heapsize::HeapSizeOf;
@ -311,23 +312,28 @@ impl EarlyMergeDB {
impl HashDB for EarlyMergeDB { impl HashDB for EarlyMergeDB {
fn keys(&self) -> HashMap<H256, i32> { fn keys(&self) -> HashMap<H256, i32> {
let mut ret: HashMap<H256, i32> = HashMap::new(); let mut ret: HashMap<H256, i32> = self.backing.iter(self.column)
for (key, _) in self.backing.iter(self.column) { .map(|(key, _)| (H256::from_slice(&*key), 1))
let h = H256::from_slice(&*key); .collect();
ret.insert(h, 1);
}
for (key, refs) in self.overlay.keys() { for (key, refs) in self.overlay.keys() {
let refs = *ret.get(&key).unwrap_or(&0) + refs; match ret.entry(key) {
ret.insert(key, refs); Entry::Occupied(mut entry) => {
*entry.get_mut() += refs;
},
Entry::Vacant(entry) => {
entry.insert(refs);
}
}
} }
ret ret
} }
fn get(&self, key: &H256) -> Option<DBValue> { fn get(&self, key: &H256) -> Option<DBValue> {
let k = self.overlay.raw(key); if let Some((d, rc)) = self.overlay.raw(key) {
if let Some((d, rc)) = k { if rc > 0 {
if rc > 0 { return Some(d) } return Some(d)
}
} }
self.payload(key) self.payload(key)
} }

View File

@ -17,6 +17,7 @@
//! `JournalDB` over in-memory overlay //! `JournalDB` over in-memory overlay
use std::collections::HashMap; use std::collections::HashMap;
use std::collections::hash_map::Entry;
use std::sync::Arc; use std::sync::Arc;
use parking_lot::RwLock; use parking_lot::RwLock;
use heapsize::HeapSizeOf; use heapsize::HeapSizeOf;
@ -407,23 +408,28 @@ impl JournalDB for OverlayRecentDB {
impl HashDB for OverlayRecentDB { impl HashDB for OverlayRecentDB {
fn keys(&self) -> HashMap<H256, i32> { fn keys(&self) -> HashMap<H256, i32> {
let mut ret: HashMap<H256, i32> = HashMap::new(); let mut ret: HashMap<H256, i32> = self.backing.iter(self.column)
for (key, _) in self.backing.iter(self.column) { .map(|(key, _)| (H256::from_slice(&*key), 1))
let h = H256::from_slice(&*key); .collect();
ret.insert(h, 1);
}
for (key, refs) in self.transaction_overlay.keys() { for (key, refs) in self.transaction_overlay.keys() {
let refs = *ret.get(&key).unwrap_or(&0) + refs; match ret.entry(key) {
ret.insert(key, refs); Entry::Occupied(mut entry) => {
*entry.get_mut() += refs;
},
Entry::Vacant(entry) => {
entry.insert(refs);
}
}
} }
ret ret
} }
fn get(&self, key: &H256) -> Option<DBValue> { fn get(&self, key: &H256) -> Option<DBValue> {
let k = self.transaction_overlay.raw(key); if let Some((d, rc)) = self.transaction_overlay.raw(key) {
if let Some((d, rc)) = k { if rc > 0 {
if rc > 0 { return Some(d) } return Some(d)
}
} }
let v = { let v = {
let journal_overlay = self.journal_overlay.read(); let journal_overlay = self.journal_overlay.read();

View File

@ -198,7 +198,7 @@ impl JournalDB for RefCountedDB {
fn consolidate(&mut self, mut with: MemoryDB) { fn consolidate(&mut self, mut with: MemoryDB) {
for (key, (value, rc)) in with.drain() { for (key, (value, rc)) in with.drain() {
for _ in 0..rc { for _ in 0..rc {
self.emplace(key.clone(), value.clone()); self.emplace(key, value.clone());
} }
for _ in rc..0 { for _ in rc..0 {

View File

@ -16,14 +16,14 @@
//! Reference-counted memory-based `HashDB` implementation. //! Reference-counted memory-based `HashDB` implementation.
use hash::*;
use rlp::*;
use sha3::*;
use hashdb::*;
use heapsize::*;
use std::mem; use std::mem;
use std::collections::HashMap; use std::collections::HashMap;
use std::collections::hash_map::Entry; use std::collections::hash_map::Entry;
use heapsize::HeapSizeOf;
use hash::{H256FastMap, H256};
use rlp::NULL_RLP;
use sha3::*;
use hashdb::*;
/// Reference-counted memory-based `HashDB` implementation. /// Reference-counted memory-based `HashDB` implementation.
/// ///
@ -181,7 +181,13 @@ impl HashDB for MemoryDB {
} }
fn keys(&self) -> HashMap<H256, i32> { fn keys(&self) -> HashMap<H256, i32> {
self.data.iter().filter_map(|(k, v)| if v.1 != 0 {Some((k.clone(), v.1))} else {None}).collect() self.data.iter()
.filter_map(|(k, v)| if v.1 != 0 {
Some((*k, v.1))
} else {
None
})
.collect()
} }
fn contains(&self, key: &H256) -> bool { fn contains(&self, key: &H256) -> bool {
@ -200,16 +206,17 @@ impl HashDB for MemoryDB {
return SHA3_NULL_RLP.clone(); return SHA3_NULL_RLP.clone();
} }
let key = value.sha3(); let key = value.sha3();
if match self.data.get_mut(&key) { match self.data.entry(key) {
Some(&mut (ref mut old_value, ref mut rc @ -0x80000000i32 ... 0)) => { Entry::Occupied(mut entry) => {
let &mut (ref mut old_value, ref mut rc) = entry.get_mut();
if *rc >= -0x80000000i32 && *rc <= 0 {
*old_value = DBValue::from_slice(value); *old_value = DBValue::from_slice(value);
}
*rc += 1; *rc += 1;
false
}, },
Some(&mut (_, ref mut x)) => { *x += 1; false } , Entry::Vacant(entry) => {
None => true, entry.insert((DBValue::from_slice(value), 1));
}{ // ... None falls through into... },
self.data.insert(key.clone(), (DBValue::from_slice(value), 1));
} }
key key
} }
@ -219,17 +226,18 @@ impl HashDB for MemoryDB {
return; return;
} }
match self.data.get_mut(&key) { match self.data.entry(key) {
Some(&mut (ref mut old_value, ref mut rc @ -0x80000000i32 ... 0)) => { Entry::Occupied(mut entry) => {
let &mut (ref mut old_value, ref mut rc) = entry.get_mut();
if *rc >= -0x80000000i32 && *rc <= 0 {
*old_value = value; *old_value = value;
*rc += 1;
return;
},
Some(&mut (_, ref mut x)) => { *x += 1; return; } ,
None => {},
} }
// ... None falls through into... *rc += 1;
self.data.insert(key, (value, 1)); },
Entry::Vacant(entry) => {
entry.insert((value, 1));
},
}
} }
fn remove(&mut self, key: &H256) { fn remove(&mut self, key: &H256) {
@ -237,11 +245,14 @@ impl HashDB for MemoryDB {
return; return;
} }
if match self.data.get_mut(key) { match self.data.entry(*key) {
Some(&mut (_, ref mut x)) => { *x -= 1; false } Entry::Occupied(mut entry) => {
None => true let &mut (_, ref mut rc) = entry.get_mut();
}{ // ... None falls through into... *rc -= 1;
self.data.insert(key.clone(), (DBValue::new(), -1)); },
Entry::Vacant(entry) => {
entry.insert((DBValue::new(), -1));
},
} }
} }
} }

View File

@ -16,13 +16,14 @@
//! Disk-backed `HashDB` implementation. //! Disk-backed `HashDB` implementation.
use std::sync::Arc;
use std::collections::HashMap;
use std::collections::hash_map::Entry;
use error::*; use error::*;
use hash::*; use hash::*;
use rlp::*; use rlp::*;
use hashdb::*; use hashdb::*;
use memorydb::*; use memorydb::*;
use std::sync::*;
use std::collections::HashMap;
use kvdb::{KeyValueDB, DBTransaction}; use kvdb::{KeyValueDB, DBTransaction};
/// Implementation of the `HashDB` trait for a disk-backed database with a memory overlay. /// Implementation of the `HashDB` trait for a disk-backed database with a memory overlay.
@ -125,19 +126,27 @@ impl OverlayDB {
impl HashDB for OverlayDB { impl HashDB for OverlayDB {
fn keys(&self) -> HashMap<H256, i32> { fn keys(&self) -> HashMap<H256, i32> {
let mut ret: HashMap<H256, i32> = HashMap::new(); let mut ret: HashMap<H256, i32> = self.backing.iter(self.column)
for (key, _) in self.backing.iter(self.column) { .map(|(key, _)| {
let h = H256::from_slice(&*key); let h = H256::from_slice(&*key);
let r = self.payload(&h).unwrap().1; let r = self.payload(&h).unwrap().1;
ret.insert(h, r as i32); (h, r as i32)
} })
.collect();
for (key, refs) in self.overlay.keys() { for (key, refs) in self.overlay.keys() {
let refs = *ret.get(&key).unwrap_or(&0) + refs; match ret.entry(key) {
ret.insert(key, refs); Entry::Occupied(mut entry) => {
*entry.get_mut() += refs;
},
Entry::Vacant(entry) => {
entry.insert(refs);
}
}
} }
ret ret
} }
fn get(&self, key: &H256) -> Option<DBValue> { fn get(&self, key: &H256) -> Option<DBValue> {
// return ok if positive; if negative, check backing - might be enough references there to make // return ok if positive; if negative, check backing - might be enough references there to make
// it positive again. // it positive again.
@ -165,6 +174,7 @@ impl HashDB for OverlayDB {
_ => None, _ => None,
} }
} }
fn contains(&self, key: &H256) -> bool { fn contains(&self, key: &H256) -> bool {
// return ok if positive; if negative, check backing - might be enough references there to make // return ok if positive; if negative, check backing - might be enough references there to make
// it positive again. // it positive again.
@ -185,6 +195,7 @@ impl HashDB for OverlayDB {
} }
} }
} }
fn insert(&mut self, value: &[u8]) -> H256 { self.overlay.insert(value) } fn insert(&mut self, value: &[u8]) -> H256 { self.overlay.insert(value) }
fn emplace(&mut self, key: H256, value: DBValue) { self.overlay.emplace(key, value); } fn emplace(&mut self, key: H256, value: DBValue) { self.overlay.emplace(key, value); }
fn remove(&mut self, key: &H256) { self.overlay.remove(key); } fn remove(&mut self, key: &H256) { self.overlay.remove(key); }

View File

@ -217,10 +217,12 @@ impl<'a> TrieDBIterator<'a> {
Ok(r) Ok(r)
} }
fn seek_descend<'key>(&mut self, node_data: DBValue, key: &NibbleSlice<'key>) -> super::Result<()> { fn seek<'key>(&mut self, mut node_data: DBValue, mut key: NibbleSlice<'key>) -> super::Result<()> {
loop {
let (data, mid) = {
let node = Node::decoded(&node_data); let node = Node::decoded(&node_data);
match node { match node {
Node::Leaf(ref slice, _) => { Node::Leaf(slice, _) => {
if slice == key { if slice == key {
self.trail.push(Crumb { self.trail.push(Crumb {
status: Status::At, status: Status::At,
@ -234,7 +236,7 @@ impl<'a> TrieDBIterator<'a> {
} }
self.key_nibbles.extend(slice.iter()); self.key_nibbles.extend(slice.iter());
Ok(()) return Ok(())
}, },
Node::Extension(ref slice, ref item) => { Node::Extension(ref slice, ref item) => {
if key.starts_with(slice) { if key.starts_with(slice) {
@ -244,10 +246,10 @@ impl<'a> TrieDBIterator<'a> {
}); });
self.key_nibbles.extend(slice.iter()); self.key_nibbles.extend(slice.iter());
let data = self.db.get_raw_or_lookup(&*item)?; let data = self.db.get_raw_or_lookup(&*item)?;
self.seek_descend(data, &key.mid(slice.len())) (data, slice.len())
} else { } else {
self.descend(&node_data)?; self.descend(&node_data)?;
Ok(()) return Ok(())
} }
}, },
Node::Branch(ref nodes, _) => match key.is_empty() { Node::Branch(ref nodes, _) => match key.is_empty() {
@ -256,7 +258,7 @@ impl<'a> TrieDBIterator<'a> {
status: Status::At, status: Status::At,
node: node.clone().into(), node: node.clone().into(),
}); });
Ok(()) return Ok(())
}, },
false => { false => {
let i = key.at(0); let i = key.at(0);
@ -266,10 +268,15 @@ impl<'a> TrieDBIterator<'a> {
}); });
self.key_nibbles.push(i); self.key_nibbles.push(i);
let child = self.db.get_raw_or_lookup(&*nodes[i as usize])?; let child = self.db.get_raw_or_lookup(&*nodes[i as usize])?;
self.seek_descend(child, &key.mid(1)) (child, 1)
} }
}, },
_ => Ok(()) _ => return Ok(()),
}
};
node_data = data;
key = key.mid(mid);
} }
} }
@ -314,7 +321,7 @@ impl<'a> TrieIterator for TrieDBIterator<'a> {
self.trail.clear(); self.trail.clear();
self.key_nibbles.clear(); self.key_nibbles.clear();
let root_rlp = self.db.root_data()?; let root_rlp = self.db.root_data()?;
self.seek_descend(root_rlp, &NibbleSlice::new(key)) self.seek(root_rlp, NibbleSlice::new(key))
} }
} }