Add cache to HeaderChain struct.

This commit is contained in:
Adrian Brink 2017-04-05 13:50:11 +02:00
parent e004e05037
commit 8ea25eeb3c
3 changed files with 52 additions and 14 deletions

View File

@ -39,6 +39,9 @@ use rlp::{Encodable, Decodable, DecoderError, RlpStream, Rlp, UntrustedRlp};
use util::{H256, U256, HeapSizeOf, RwLock}; use util::{H256, U256, HeapSizeOf, RwLock};
use util::kvdb::{DBTransaction, KeyValueDB}; use util::kvdb::{DBTransaction, KeyValueDB};
use cache::Cache;
use util::Mutex;
use smallvec::SmallVec; use smallvec::SmallVec;
/// Store at least this many candidate headers at all times. /// Store at least this many candidate headers at all times.
@ -138,11 +141,12 @@ pub struct HeaderChain {
best_block: RwLock<BlockDescriptor>, best_block: RwLock<BlockDescriptor>,
db: Arc<KeyValueDB>, db: Arc<KeyValueDB>,
col: Option<u32>, col: Option<u32>,
cache: Arc<Mutex<Cache>>,
} }
impl HeaderChain { impl HeaderChain {
/// Create a new header chain given this genesis block and database to read from. /// Create a new header chain given this genesis block and database to read from.
pub fn new(db: Arc<KeyValueDB>, col: Option<u32>, genesis: &[u8]) -> Result<Self, String> { pub fn new(db: Arc<KeyValueDB>, col: Option<u32>, genesis: &[u8], cache: Arc<Mutex<Cache>>) -> Result<Self, String> {
use ethcore::views::HeaderView; use ethcore::views::HeaderView;
let chain = if let Some(current) = db.get(col, CURRENT_KEY)? { let chain = if let Some(current) = db.get(col, CURRENT_KEY)? {
@ -186,6 +190,7 @@ impl HeaderChain {
candidates: RwLock::new(candidates), candidates: RwLock::new(candidates),
db: db, db: db,
col: col, col: col,
cache: cache,
} }
} else { } else {
let g_view = HeaderView::new(genesis); let g_view = HeaderView::new(genesis);
@ -199,6 +204,7 @@ impl HeaderChain {
candidates: RwLock::new(BTreeMap::new()), candidates: RwLock::new(BTreeMap::new()),
db: db, db: db,
col: col, col: col,
cache: cache,
} }
}; };
@ -408,7 +414,7 @@ impl HeaderChain {
} }
} }
/// Get the nth CHT root, if it's been computed. /// Get the nth CHT root, if it has been computed.
/// ///
/// CHT root 0 is from block `1..2048`. /// CHT root 0 is from block `1..2048`.
/// CHT root 1 is from block `2049..4096` /// CHT root 1 is from block `2049..4096`
@ -493,6 +499,22 @@ mod tests {
use ethcore::ids::BlockId; use ethcore::ids::BlockId;
use ethcore::header::Header; use ethcore::header::Header;
use ethcore::spec::Spec; use ethcore::spec::Spec;
use cache::Cache;
use time::Duration;
use util::Mutex;
#[test]
fn basic_chain_with_cache() {
let spec = Spec::new_test();
let genesis_header = spec.genesis_header();
let db = make_db();
let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::hours(6))));
let chain = HeaderChain::new(db.clone(), None, &::rlp::encode(&genesis_header), cache);
}
fn make_db() -> Arc<::util::KeyValueDB> { fn make_db() -> Arc<::util::KeyValueDB> {
Arc::new(::util::kvdb::in_memory(0)) Arc::new(::util::kvdb::in_memory(0))
@ -504,7 +526,9 @@ mod tests {
let genesis_header = spec.genesis_header(); let genesis_header = spec.genesis_header();
let db = make_db(); let db = make_db();
let chain = HeaderChain::new(db.clone(), None, &::rlp::encode(&genesis_header)).unwrap(); let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::hours(6))));
let chain = HeaderChain::new(db.clone(), None, &::rlp::encode(&genesis_header), cache).unwrap();
let mut parent_hash = genesis_header.hash(); let mut parent_hash = genesis_header.hash();
let mut rolling_timestamp = genesis_header.timestamp(); let mut rolling_timestamp = genesis_header.timestamp();
@ -534,9 +558,10 @@ mod tests {
fn reorganize() { fn reorganize() {
let spec = Spec::new_test(); let spec = Spec::new_test();
let genesis_header = spec.genesis_header(); let genesis_header = spec.genesis_header();
let db = make_db(); let db = make_db();
let chain = HeaderChain::new(db.clone(), None, &::rlp::encode(&genesis_header)).unwrap(); let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::hours(6))));
let chain = HeaderChain::new(db.clone(), None, &::rlp::encode(&genesis_header), cache).unwrap();
let mut parent_hash = genesis_header.hash(); let mut parent_hash = genesis_header.hash();
let mut rolling_timestamp = genesis_header.timestamp(); let mut rolling_timestamp = genesis_header.timestamp();
@ -617,8 +642,10 @@ mod tests {
let spec = Spec::new_test(); let spec = Spec::new_test();
let genesis_header = spec.genesis_header(); let genesis_header = spec.genesis_header();
let db = make_db(); let db = make_db();
let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::hours(6))));
let chain = HeaderChain::new(db.clone(), None, &::rlp::encode(&genesis_header), cache).unwrap();
let chain = HeaderChain::new(db.clone(), None, &::rlp::encode(&genesis_header)).unwrap();
assert!(chain.block_header(BlockId::Earliest).is_some()); assert!(chain.block_header(BlockId::Earliest).is_some());
assert!(chain.block_header(BlockId::Latest).is_some()); assert!(chain.block_header(BlockId::Latest).is_some());
@ -630,9 +657,10 @@ mod tests {
let spec = Spec::new_test(); let spec = Spec::new_test();
let genesis_header = spec.genesis_header(); let genesis_header = spec.genesis_header();
let db = make_db(); let db = make_db();
let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::hours(6))));
{ {
let chain = HeaderChain::new(db.clone(), None, &::rlp::encode(&genesis_header)).unwrap(); let chain = HeaderChain::new(db.clone(), None, &::rlp::encode(&genesis_header), cache.clone()).unwrap();
let mut parent_hash = genesis_header.hash(); let mut parent_hash = genesis_header.hash();
let mut rolling_timestamp = genesis_header.timestamp(); let mut rolling_timestamp = genesis_header.timestamp();
for i in 1..10000 { for i in 1..10000 {
@ -652,7 +680,7 @@ mod tests {
} }
} }
let chain = HeaderChain::new(db.clone(), None, &::rlp::encode(&genesis_header)).unwrap(); let chain = HeaderChain::new(db.clone(), None, &::rlp::encode(&genesis_header), cache.clone()).unwrap();
assert!(chain.block_header(BlockId::Number(10)).is_none()); assert!(chain.block_header(BlockId::Number(10)).is_none());
assert!(chain.block_header(BlockId::Number(9000)).is_some()); assert!(chain.block_header(BlockId::Number(9000)).is_some());
assert!(chain.cht_root(2).is_some()); assert!(chain.cht_root(2).is_some());
@ -665,9 +693,10 @@ mod tests {
let spec = Spec::new_test(); let spec = Spec::new_test();
let genesis_header = spec.genesis_header(); let genesis_header = spec.genesis_header();
let db = make_db(); let db = make_db();
let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::hours(6))));
{ {
let chain = HeaderChain::new(db.clone(), None, &::rlp::encode(&genesis_header)).unwrap(); let chain = HeaderChain::new(db.clone(), None, &::rlp::encode(&genesis_header), cache.clone()).unwrap();
let mut parent_hash = genesis_header.hash(); let mut parent_hash = genesis_header.hash();
let mut rolling_timestamp = genesis_header.timestamp(); let mut rolling_timestamp = genesis_header.timestamp();
@ -709,7 +738,7 @@ mod tests {
} }
// after restoration, non-canonical eras should still be loaded. // after restoration, non-canonical eras should still be loaded.
let chain = HeaderChain::new(db.clone(), None, &::rlp::encode(&genesis_header)).unwrap(); let chain = HeaderChain::new(db.clone(), None, &::rlp::encode(&genesis_header), cache.clone()).unwrap();
assert_eq!(chain.block_header(BlockId::Latest).unwrap().number(), 10); assert_eq!(chain.block_header(BlockId::Latest).unwrap().number(), 10);
assert!(chain.candidates.read().get(&100).is_some()) assert!(chain.candidates.read().get(&100).is_some())
} }

View File

@ -36,6 +36,8 @@ use util::kvdb::{KeyValueDB, CompactionProfile};
use self::header_chain::{AncestryIter, HeaderChain}; use self::header_chain::{AncestryIter, HeaderChain};
use cache::Cache;
pub use self::service::Service; pub use self::service::Service;
mod header_chain; mod header_chain;
@ -120,13 +122,13 @@ pub struct Client {
impl Client { impl Client {
/// Create a new `Client`. /// Create a new `Client`.
pub fn new(config: Config, db: Arc<KeyValueDB>, chain_col: Option<u32>, spec: &Spec, io_channel: IoChannel<ClientIoMessage>) -> Result<Self, String> { pub fn new(config: Config, db: Arc<KeyValueDB>, chain_col: Option<u32>, spec: &Spec, io_channel: IoChannel<ClientIoMessage>, cache: Arc<Mutex<Cache>>) -> Result<Self, String> {
let gh = ::rlp::encode(&spec.genesis_header()); let gh = ::rlp::encode(&spec.genesis_header());
Ok(Client { Ok(Client {
queue: HeaderQueue::new(config.queue, spec.engine.clone(), io_channel, true), queue: HeaderQueue::new(config.queue, spec.engine.clone(), io_channel, true),
engine: spec.engine.clone(), engine: spec.engine.clone(),
chain: HeaderChain::new(db.clone(), chain_col, &gh)?, chain: HeaderChain::new(db.clone(), chain_col, &gh, cache)?,
report: RwLock::new(ClientReport::default()), report: RwLock::new(ClientReport::default()),
import_lock: Mutex::new(()), import_lock: Mutex::new(()),
db: db, db: db,
@ -135,10 +137,10 @@ impl Client {
/// Create a new `Client` backed purely in-memory. /// Create a new `Client` backed purely in-memory.
/// This will ignore all database options in the configuration. /// This will ignore all database options in the configuration.
pub fn in_memory(config: Config, spec: &Spec, io_channel: IoChannel<ClientIoMessage>) -> Self { pub fn in_memory(config: Config, spec: &Spec, io_channel: IoChannel<ClientIoMessage>, cache: Arc<Mutex<Cache>>) -> Self {
let db = ::util::kvdb::in_memory(0); let db = ::util::kvdb::in_memory(0);
Client::new(config, Arc::new(db), None, spec, io_channel).expect("New DB creation infallible; qed") Client::new(config, Arc::new(db), None, spec, io_channel, cache).expect("New DB creation infallible; qed")
} }
/// Import a header to the queue for additional verification. /// Import a header to the queue for additional verification.

View File

@ -27,6 +27,10 @@ use ethcore::spec::Spec;
use io::{IoContext, IoError, IoHandler, IoService}; use io::{IoContext, IoError, IoHandler, IoService};
use util::kvdb::{Database, DatabaseConfig}; use util::kvdb::{Database, DatabaseConfig};
use cache::Cache;
use time::Duration;
use util::Mutex;
use super::{Client, Config as ClientConfig}; use super::{Client, Config as ClientConfig};
/// Errors on service initialization. /// Errors on service initialization.
@ -56,6 +60,8 @@ pub struct Service {
impl Service { impl Service {
/// Start the service: initialize I/O workers and client itself. /// Start the service: initialize I/O workers and client itself.
pub fn start(config: ClientConfig, spec: &Spec, path: &Path) -> Result<Self, Error> { pub fn start(config: ClientConfig, spec: &Spec, path: &Path) -> Result<Self, Error> {
let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::hours(6))));
// initialize database. // initialize database.
let mut db_config = DatabaseConfig::with_columns(db::NUM_COLUMNS); let mut db_config = DatabaseConfig::with_columns(db::NUM_COLUMNS);
@ -78,6 +84,7 @@ impl Service {
db::COL_LIGHT_CHAIN, db::COL_LIGHT_CHAIN,
spec, spec,
io_service.channel(), io_service.channel(),
cache,
).map_err(Error::Database)?); ).map_err(Error::Database)?);
io_service.register_handler(Arc::new(ImportBlocks(client.clone()))).map_err(Error::Io)?; io_service.register_handler(Arc::new(ImportBlocks(client.clone()))).map_err(Error::Io)?;
Ok(Service { Ok(Service {