Merge pull request #972 from ethcore/db_writer

querying extras separated to its own module
This commit is contained in:
Arkadiy Paronyan 2016-04-21 11:26:13 +02:00
commit f2a5630fdf
2 changed files with 88 additions and 50 deletions

View File

@ -30,7 +30,7 @@ use blockchain::bloom_indexer::BloomIndexer;
use blockchain::tree_route::TreeRoute; use blockchain::tree_route::TreeRoute;
use blockchain::update::ExtrasUpdate; use blockchain::update::ExtrasUpdate;
use blockchain::{CacheSize, ImportRoute}; use blockchain::{CacheSize, ImportRoute};
use db::{Writable, Readable, Key}; use db::{Writable, Readable, Key, CacheUpdatePolicy};
const BLOOM_INDEX_SIZE: usize = 16; const BLOOM_INDEX_SIZE: usize = 16;
const BLOOM_LEVELS: u8 = 3; const BLOOM_LEVELS: u8 = 3;
@ -183,7 +183,7 @@ impl BlockProvider for BlockChain {
/// Returns true if the given block is known /// Returns true if the given block is known
/// (though not necessarily a part of the canon chain). /// (though not necessarily a part of the canon chain).
fn is_known(&self, hash: &H256) -> bool { fn is_known(&self, hash: &H256) -> bool {
self.query_extras_exist(hash, &self.block_details) self.extras_db.exists_with_cache(&self.block_details, hash)
} }
// We do not store tracing information. // We do not store tracing information.
@ -466,28 +466,22 @@ impl BlockChain {
batch.put(b"best", &update.info.hash).unwrap(); batch.put(b"best", &update.info.hash).unwrap();
{ {
let mut write_details = self.block_details.write().unwrap(); for hash in update.block_details.keys().cloned() {
for (hash, details) in update.block_details.into_iter() { self.note_used(CacheID::Extras(ExtrasIndex::BlockDetails, hash));
batch.write(&hash, &details);
self.note_used(CacheID::Extras(ExtrasIndex::BlockDetails, hash.clone()));
write_details.insert(hash, details);
} }
let mut write_details = self.block_details.write().unwrap();
batch.extend_with_cache(&mut write_details, update.block_details, CacheUpdatePolicy::Overwrite);
} }
{ {
let mut write_receipts = self.block_receipts.write().unwrap(); let mut write_receipts = self.block_receipts.write().unwrap();
for (hash, receipt) in &update.block_receipts { batch.extend_with_cache(&mut write_receipts, update.block_receipts, CacheUpdatePolicy::Remove);
batch.write(hash, receipt);
write_receipts.remove(hash);
}
} }
{ {
let mut write_blocks_blooms = self.blocks_blooms.write().unwrap(); let mut write_blocks_blooms = self.blocks_blooms.write().unwrap();
for (bloom_hash, blocks_bloom) in &update.blocks_blooms { batch.extend_with_cache(&mut write_blocks_blooms, update.blocks_blooms, CacheUpdatePolicy::Remove);
batch.write(bloom_hash, blocks_bloom);
write_blocks_blooms.remove(bloom_hash);
}
} }
// These cached values must be updated last and togeterh // These cached values must be updated last and togeterh
@ -508,15 +502,8 @@ impl BlockChain {
} }
} }
for (number, hash) in &update.block_hashes { batch.extend_with_cache(&mut write_hashes, update.block_hashes, CacheUpdatePolicy::Remove);
batch.write(number, hash); batch.extend_with_cache(&mut write_txs, update.transactions_addresses, CacheUpdatePolicy::Remove);
write_hashes.remove(number);
}
for (hash, tx_address) in &update.transactions_addresses {
batch.write(hash, tx_address);
write_txs.remove(hash);
}
// update extras database // update extras database
self.extras_db.write(batch).unwrap(); self.extras_db.write(batch).unwrap();
@ -751,32 +738,8 @@ impl BlockChain {
T: ExtrasIndexable + Clone + Decodable, T: ExtrasIndexable + Clone + Decodable,
K: Key<T> + Eq + Hash + Clone, K: Key<T> + Eq + Hash + Clone,
H256: From<K> { H256: From<K> {
{
let read = cache.read().unwrap();
if let Some(v) = read.get(hash) {
return Some(v.clone());
}
}
self.note_used(CacheID::Extras(T::index(), H256::from(hash.clone()))); self.note_used(CacheID::Extras(T::index(), H256::from(hash.clone())));
self.extras_db.read_with_cache(cache, hash)
self.extras_db.read(hash).map(|t: T| {
let mut write = cache.write().unwrap();
write.insert(hash.clone(), t.clone());
t
})
}
fn query_extras_exist<K, T>(&self, hash: &K, cache: &RwLock<HashMap<K, T>>) -> bool where
K: Key<T> + Eq + Hash + Clone {
{
let read = cache.read().unwrap();
if let Some(_) = read.get(hash) {
return true;
}
}
self.extras_db.exists::<T>(hash)
} }
/// Get current cache size. /// Get current cache size.

View File

@ -16,9 +16,18 @@
//! Extras db utils. //! Extras db utils.
use std::hash::Hash;
use std::sync::RwLock;
use std::collections::HashMap;
use util::{H264, DBTransaction, Database}; use util::{H264, DBTransaction, Database};
use util::rlp::{encode, Encodable, decode, Decodable}; use util::rlp::{encode, Encodable, decode, Decodable};
#[derive(Clone, Copy)]
pub enum CacheUpdatePolicy {
Overwrite,
Remove,
}
/// Should be used to get database key associated with given value. /// Should be used to get database key associated with given value.
pub trait Key<T> { pub trait Key<T> {
/// Returns db key. /// Returns db key.
@ -27,16 +36,82 @@ pub trait Key<T> {
/// Should be used to write value into database. /// Should be used to write value into database.
pub trait Writable { pub trait Writable {
/// Writes key into database. /// Writes the value into the database.
fn write<T>(&self, key: &Key<T>, value: &T) where T: Encodable; fn write<T>(&self, key: &Key<T>, value: &T) where T: Encodable;
/// Writes the value into the database and updates the cache.
fn write_with_cache<K, T>(&self, cache: &mut HashMap<K, T>, key: K, value: T, policy: CacheUpdatePolicy) where
K: Key<T> + Hash + Eq,
T: Encodable {
self.write(&key, &value);
match policy {
CacheUpdatePolicy::Overwrite => {
cache.insert(key, value);
},
CacheUpdatePolicy::Remove => {
cache.remove(&key);
}
}
}
/// Writes the values into the database and updates the cache.
fn extend_with_cache<K, T>(&self, cache: &mut HashMap<K, T>, values: HashMap<K, T>, policy: CacheUpdatePolicy)
where K: Key<T> + Hash + Eq, T: Encodable {
match policy {
CacheUpdatePolicy::Overwrite => {
for (key, value) in values.into_iter() {
self.write(&key, &value);
cache.insert(key, value);
}
},
CacheUpdatePolicy::Remove => {
for (key, value) in &values {
self.write(key, value);
cache.remove(key);
}
},
}
}
} }
/// Should be used to read values from database. /// Should be used to read values from database.
pub trait Readable { pub trait Readable {
/// Returns value for given key. /// Returns value for given key.
fn read<T>(&self, key: &Key<T>) -> Option<T> where T: Decodable; fn read<T>(&self, key: &Key<T>) -> Option<T> where T: Decodable;
/// Returns value for given key either in cache or in database.
fn read_with_cache<K, T>(&self, cache: &RwLock<HashMap<K, T>>, key: &K) -> Option<T> where
K: Key<T> + Eq + Hash + Clone,
T: Clone + Decodable {
{
let read = cache.read().unwrap();
if let Some(v) = read.get(key) {
return Some(v.clone());
}
}
self.read(key).map(|value: T|{
let mut write = cache.write().unwrap();
write.insert(key.clone(), value.clone());
value
})
}
/// Returns true if given value exists. /// Returns true if given value exists.
fn exists<T>(&self, key: &Key<T>) -> bool; fn exists<T>(&self, key: &Key<T>) -> bool;
/// Returns true if given value exists either in cache or in database.
fn exists_with_cache<K, T>(&self, cache: &RwLock<HashMap<K, T>>, key: &K) -> bool where
K: Eq + Hash + Key<T> {
{
let read = cache.read().unwrap();
if read.get(key).is_some() {
return true;
}
}
self.exists::<T>(key)
}
} }
impl Writable for DBTransaction { impl Writable for DBTransaction {