commit
4e8092b008
@ -64,6 +64,7 @@ pub struct EarlyMergeDB {
|
||||
overlay: MemoryDB,
|
||||
backing: Arc<Database>,
|
||||
refs: Option<Arc<RwLock<HashMap<H256, RefInfo>>>>,
|
||||
latest_era: Option<u64>,
|
||||
}
|
||||
|
||||
// all keys must be at least 12 bytes
|
||||
@ -90,11 +91,13 @@ impl EarlyMergeDB {
|
||||
backing.put(&VERSION_KEY, &encode(&DB_VERSION)).expect("Error writing version to database");
|
||||
}
|
||||
|
||||
let refs = Some(Arc::new(RwLock::new(EarlyMergeDB::read_refs(&backing))));
|
||||
let (latest_era, refs) = EarlyMergeDB::read_refs(&backing);
|
||||
let refs = Some(Arc::new(RwLock::new(refs)));
|
||||
EarlyMergeDB {
|
||||
overlay: MemoryDB::new(),
|
||||
backing: Arc::new(backing),
|
||||
refs: refs,
|
||||
latest_era: latest_era,
|
||||
}
|
||||
}
|
||||
|
||||
@ -225,9 +228,9 @@ impl EarlyMergeDB {
|
||||
|
||||
#[cfg(test)]
|
||||
fn can_reconstruct_refs(&self) -> bool {
|
||||
let reconstructed = Self::read_refs(&self.backing);
|
||||
let (latest_era, reconstructed) = Self::read_refs(&self.backing);
|
||||
let refs = self.refs.as_ref().unwrap().write().unwrap();
|
||||
if *refs != reconstructed {
|
||||
if *refs != reconstructed || latest_era != self.latest_era {
|
||||
let clean_refs = refs.iter().filter_map(|(k, v)| if reconstructed.get(k) == Some(v) {None} else {Some((k.clone(), v.clone()))}).collect::<HashMap<_, _>>();
|
||||
let clean_recon = reconstructed.into_iter().filter_map(|(k, v)| if refs.get(&k) == Some(&v) {None} else {Some((k.clone(), v.clone()))}).collect::<HashMap<_, _>>();
|
||||
warn!(target: "jdb", "mem: {:?} != log: {:?}", clean_refs, clean_recon);
|
||||
@ -241,10 +244,12 @@ impl EarlyMergeDB {
|
||||
self.backing.get(&key.bytes()).expect("Low-level database error. Some issue with your hard disk?").map(|v| v.to_vec())
|
||||
}
|
||||
|
||||
fn read_refs(db: &Database) -> HashMap<H256, RefInfo> {
|
||||
fn read_refs(db: &Database) -> (Option<u64>, HashMap<H256, RefInfo>) {
|
||||
let mut refs = HashMap::new();
|
||||
let mut latest_era = None;
|
||||
if let Some(val) = db.get(&LATEST_ERA_KEY).expect("Low-level database error.") {
|
||||
let mut era = decode::<u64>(&val);
|
||||
latest_era = Some(era);
|
||||
loop {
|
||||
let mut index = 0usize;
|
||||
while let Some(rlp_data) = db.get({
|
||||
@ -265,7 +270,7 @@ impl EarlyMergeDB {
|
||||
era -= 1;
|
||||
}
|
||||
}
|
||||
refs
|
||||
(latest_era, refs)
|
||||
}
|
||||
}
|
||||
|
||||
@ -320,6 +325,7 @@ impl JournalDB for EarlyMergeDB {
|
||||
overlay: MemoryDB::new(),
|
||||
backing: self.backing.clone(),
|
||||
refs: self.refs.clone(),
|
||||
latest_era: self.latest_era.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
@ -435,7 +441,10 @@ impl JournalDB for EarlyMergeDB {
|
||||
trace!(target: "jdb.ops", " Deletes: {:?}", removes);
|
||||
}
|
||||
try!(batch.put(&last, r.as_raw()));
|
||||
try!(batch.put(&LATEST_ERA_KEY, &encode(&now)));
|
||||
if self.latest_era.map_or(true, |e| now > e) {
|
||||
try!(batch.put(&LATEST_ERA_KEY, &encode(&now)));
|
||||
self.latest_era = Some(now);
|
||||
}
|
||||
}
|
||||
|
||||
// apply old commits' details
|
||||
@ -552,6 +561,26 @@ mod tests {
|
||||
assert!(jdb.exists(&x));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_older_era() {
|
||||
let mut jdb = EarlyMergeDB::new_temp();
|
||||
let foo = jdb.insert(b"foo");
|
||||
jdb.commit(0, &b"0a".sha3(), None).unwrap();
|
||||
assert!(jdb.can_reconstruct_refs());
|
||||
|
||||
let bar = jdb.insert(b"bar");
|
||||
jdb.commit(1, &b"1".sha3(), Some((0, b"0a".sha3()))).unwrap();
|
||||
assert!(jdb.can_reconstruct_refs());
|
||||
|
||||
jdb.remove(&bar);
|
||||
jdb.commit(0, &b"0b".sha3(), None).unwrap();
|
||||
assert!(jdb.can_reconstruct_refs());
|
||||
jdb.commit(2, &b"2".sha3(), Some((1, b"1".sha3()))).unwrap();
|
||||
|
||||
assert!(jdb.exists(&foo));
|
||||
assert!(jdb.exists(&bar));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn long_history() {
|
||||
// history is 3
|
||||
|
@ -66,7 +66,7 @@ pub struct OverlayRecentDB {
|
||||
struct JournalOverlay {
|
||||
backing_overlay: MemoryDB,
|
||||
journal: HashMap<u64, Vec<JournalEntry>>,
|
||||
latest_era: u64,
|
||||
latest_era: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(PartialEq)]
|
||||
@ -152,10 +152,10 @@ impl OverlayRecentDB {
|
||||
let mut journal = HashMap::new();
|
||||
let mut overlay = MemoryDB::new();
|
||||
let mut count = 0;
|
||||
let mut latest_era = 0;
|
||||
let mut latest_era = None;
|
||||
if let Some(val) = db.get(&LATEST_ERA_KEY).expect("Low-level database error.") {
|
||||
latest_era = decode::<u64>(&val);
|
||||
let mut era = latest_era;
|
||||
let mut era = decode::<u64>(&val);
|
||||
latest_era = Some(era);
|
||||
loop {
|
||||
let mut index = 0usize;
|
||||
while let Some(rlp_data) = db.get({
|
||||
@ -241,9 +241,9 @@ impl JournalDB for OverlayRecentDB {
|
||||
k.append(&index);
|
||||
k.append(&&PADDING[..]);
|
||||
try!(batch.put(&k.drain(), r.as_raw()));
|
||||
if now >= journal_overlay.latest_era {
|
||||
if journal_overlay.latest_era.map_or(true, |e| now > e) {
|
||||
try!(batch.put(&LATEST_ERA_KEY, &encode(&now)));
|
||||
journal_overlay.latest_era = now;
|
||||
journal_overlay.latest_era = Some(now);
|
||||
}
|
||||
journal_overlay.journal.entry(now).or_insert_with(Vec::new).push(JournalEntry { id: id.clone(), insertions: inserted_keys, deletions: removed_keys });
|
||||
}
|
||||
@ -870,4 +870,24 @@ mod tests {
|
||||
assert!(!jdb.exists(&bar));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_older_era() {
|
||||
let mut jdb = OverlayRecentDB::new_temp();
|
||||
let foo = jdb.insert(b"foo");
|
||||
jdb.commit(0, &b"0a".sha3(), None).unwrap();
|
||||
assert!(jdb.can_reconstruct_refs());
|
||||
|
||||
let bar = jdb.insert(b"bar");
|
||||
jdb.commit(1, &b"1".sha3(), Some((0, b"0a".sha3()))).unwrap();
|
||||
assert!(jdb.can_reconstruct_refs());
|
||||
|
||||
jdb.remove(&bar);
|
||||
jdb.commit(0, &b"0b".sha3(), None).unwrap();
|
||||
assert!(jdb.can_reconstruct_refs());
|
||||
jdb.commit(2, &b"2".sha3(), Some((1, b"1".sha3()))).unwrap();
|
||||
|
||||
assert!(jdb.exists(&foo));
|
||||
assert!(jdb.exists(&bar));
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user