Ancient target set. InvalidStateRoot bug (#69) (#149)

This commit is contained in:
rakita 2020-12-10 17:57:26 +01:00 committed by GitHub
parent ea3efd926e
commit 837e8b8725
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 78 additions and 33 deletions

View File

@ -749,7 +749,7 @@ impl BlockChain {
} }
if hash != bc.genesis_hash() { if hash != bc.genesis_hash() {
trace!("First block calculated: {:?}", hash); info!("First new block calculated: {:?}", hash);
let mut batch = db.key_value().transaction(); let mut batch = db.key_value().transaction();
batch.put(db::COL_EXTRA, b"first", &hash); batch.put(db::COL_EXTRA, b"first", &hash);
db.key_value().write(batch).expect("Low level database error when writing 'first' block. Some issue with disk?"); db.key_value().write(batch).expect("Low level database error when writing 'first' block. Some issue with disk?");

View File

@ -531,6 +531,10 @@ impl Importer {
.unwrap_or_else(|e| panic!("Receipt bytes should be valid: {:?}", e)); .unwrap_or_else(|e| panic!("Receipt bytes should be valid: {:?}", e));
let _import_lock = self.import_lock.lock(); let _import_lock = self.import_lock.lock();
if unverified.header.number() >= chain.best_block_header().number() {
panic!("Ancient block number is higher then best block number");
}
{ {
trace_time!("import_old_block"); trace_time!("import_old_block");
// verify the block, passing the chain for updating the epoch verifier. // verify the block, passing the chain for updating the epoch verifier.
@ -1201,15 +1205,27 @@ impl Client {
/// Get a copy of the best block's state. /// Get a copy of the best block's state.
pub fn latest_state_and_header(&self) -> (State<StateDB>, Header) { pub fn latest_state_and_header(&self) -> (State<StateDB>, Header) {
let header = self.best_block_header(); let mut nb_tries = 5;
let state = State::from_existing( // Here, we are taking latest block and then latest state. If in between those two calls `best` block got prunned app will panic.
self.state_db.read().boxed_clone_canon(&header.hash()), // This is something that should not happend often and it is edge case.
*header.state_root(), // Locking read best_block lock would be more straighforward, but can introduce overlaping locks,
self.engine.account_start_nonce(header.number()), // because of this we are just taking 5 tries to get best state in most cases it will work on first try.
self.factories.clone(), while nb_tries != 0 {
) let header = self.best_block_header();
.expect("State root of best block header always valid."); match State::from_existing(
(state, header) self.state_db.read().boxed_clone_canon(&header.hash()),
*header.state_root(),
self.engine.account_start_nonce(header.number()),
self.factories.clone(),
) {
Ok(ret) => return (ret, header),
Err(_) => {
warn!("Couldn't fetch state of best block header: {:?}", header);
nb_tries -= 1;
}
}
}
panic!("Couldn't get latest state in 5 tries");
} }
/// Attempt to get a copy of a specific block's final state. /// Attempt to get a copy of a specific block's final state.

View File

@ -241,7 +241,7 @@ impl BlockDownloader {
self.last_round_start_hash = start_hash.clone(); self.last_round_start_hash = start_hash.clone();
self.imported_this_round = None; self.imported_this_round = None;
self.round_parents = VecDeque::new(); self.round_parents = VecDeque::new();
self.target_hash = None; //self.target_hash = None; // target_hash is only used for old (ancient) block download. And once set should not be reseted in any way.
self.retract_step = 1; self.retract_step = 1;
} }
@ -516,8 +516,24 @@ impl BlockDownloader {
self.last_imported_hash self.last_imported_hash
); );
} else { } else {
let best = io.chain().chain_info().best_block_number; let (best, best_hash) = match self.block_set {
let best_hash = io.chain().chain_info().best_block_hash; BlockSet::NewBlocks => (
io.chain().chain_info().best_block_number,
io.chain().chain_info().best_block_hash,
),
BlockSet::OldBlocks => {
if let (Some(best), Some(best_hash)) = (
io.chain().chain_info().ancient_block_number,
io.chain().chain_info().ancient_block_hash,
) {
(best, best_hash) // best ancient block number and hash.
} else {
// None on ancient block/hash means that all ancient are already downloaded and stored in DB.
self.state = State::Complete;
return;
}
}
};
let oldest_reorg = io.chain().pruning_info().earliest_state; let oldest_reorg = io.chain().pruning_info().earliest_state;
if self.block_set == BlockSet::NewBlocks && best > start && start < oldest_reorg if self.block_set == BlockSet::NewBlocks && best > start && start < oldest_reorg
{ {
@ -531,7 +547,7 @@ impl BlockDownloader {
} else { } else {
let n = start - cmp::min(self.retract_step, start); let n = start - cmp::min(self.retract_step, start);
if n == 0 { if n == 0 {
debug_sync!(self, "Header not found, bottom line reached, resetting, last imported: {}", self.last_imported_hash); info!("Header not found, bottom line reached, resetting, last imported: {}", self.last_imported_hash);
self.reset_to_block(&best_hash, best); self.reset_to_block(&best_hash, best);
} else { } else {
self.retract_step *= 2; self.retract_step *= 2;
@ -547,11 +563,9 @@ impl BlockDownloader {
); );
} }
None => { None => {
debug_sync!( info!(
self,
"Could not revert to previous block, last: {} ({})", "Could not revert to previous block, last: {} ({})",
start, start, self.last_imported_hash
self.last_imported_hash
); );
self.reset_to_block(&best_hash, best); self.reset_to_block(&best_hash, best);
} }
@ -661,7 +675,10 @@ impl BlockDownloader {
if self.target_hash.as_ref().map_or(false, |t| t == &h) { if self.target_hash.as_ref().map_or(false, |t| t == &h) {
self.state = State::Complete; self.state = State::Complete;
trace_sync!(self, "Sync target reached"); info!(
"Sync target {:?} for old blocks reached. Syncing ancient blocks finished.",
self.target_hash
);
return download_action; return download_action;
} }

View File

@ -969,15 +969,17 @@ impl ChainSync {
if let (Some(ancient_block_hash), Some(ancient_block_number)) = if let (Some(ancient_block_hash), Some(ancient_block_number)) =
(chain.ancient_block_hash, chain.ancient_block_number) (chain.ancient_block_hash, chain.ancient_block_number)
{ {
trace!(target: "sync", "Downloading old blocks from {:?} (#{}) till {:?} (#{:?})", ancient_block_hash, ancient_block_number, chain.first_block_hash, chain.first_block_number); info!(target: "sync", "Downloading old blocks from {:?} (#{}) till {:?} (#{:?})", ancient_block_hash, ancient_block_number, chain.first_block_hash, chain.first_block_number);
let mut downloader = BlockDownloader::new( let mut downloader = BlockDownloader::new(
BlockSet::OldBlocks, BlockSet::OldBlocks,
&ancient_block_hash, &ancient_block_hash,
ancient_block_number, ancient_block_number,
); );
if let Some(hash) = chain.first_block_hash { if let Some(hash) = chain.first_block_hash {
trace!(target: "sync", "Downloader target set to {:?}", hash); trace!(target: "sync", "Downloader target for old blocks is set to {:?}", hash);
downloader.set_target(&hash); downloader.set_target(&hash);
} else {
trace!(target: "sync", "Downloader target could not be found");
} }
self.old_blocks = Some(downloader); self.old_blocks = Some(downloader);
} }
@ -1114,10 +1116,18 @@ impl ChainSync {
let equal_or_higher_difficulty = peer_difficulty.map_or(true, |pd| pd >= syncing_difficulty); let equal_or_higher_difficulty = peer_difficulty.map_or(true, |pd| pd >= syncing_difficulty);
if force || equal_or_higher_difficulty { if force || equal_or_higher_difficulty {
if let Some(request) = self.old_blocks.as_mut().and_then(|d| d.request_blocks(peer_id, io, num_active_peers)) { let mut is_complete = false;
SyncRequester::request_blocks(self, io, peer_id, request, BlockSet::OldBlocks); if let Some(old_blocks) = self.old_blocks.as_mut() {
return; if let Some(request) = old_blocks.request_blocks(peer_id, io, num_active_peers) {
} SyncRequester::request_blocks(self, io, peer_id, request, BlockSet::OldBlocks);
return;
}
is_complete = old_blocks.is_complete();
}
if is_complete { // if old_blocks is in complete state, set it to None.
self.old_blocks = None;
}
} else { } else {
trace!( trace!(
target: "sync", target: "sync",
@ -1344,7 +1354,7 @@ impl ChainSync {
}, },
SyncState::SnapshotWaiting => match io.snapshot_service().restoration_status() { SyncState::SnapshotWaiting => match io.snapshot_service().restoration_status() {
RestorationStatus::Inactive => { RestorationStatus::Inactive => {
trace!(target:"sync", "Snapshot restoration is complete"); info!(target:"sync", "Snapshot restoration is complete");
self.restart(io); self.restart(io);
} }
RestorationStatus::Initializing { .. } => { RestorationStatus::Initializing { .. } => {
@ -1443,7 +1453,7 @@ impl ChainSync {
SyncPropagator::propagate_proposed_blocks(self, io, proposed); SyncPropagator::propagate_proposed_blocks(self, io, proposed);
} }
if !invalid.is_empty() { if !invalid.is_empty() {
trace!(target: "sync", "Bad blocks in the queue, restarting"); info!(target: "sync", "Bad blocks in the queue, restarting sync");
self.restart(io); self.restart(io);
} }

View File

@ -92,7 +92,6 @@ impl CacheSizes {
pub struct SyncInfo { pub struct SyncInfo {
last_imported_block_number: BlockNumber, last_imported_block_number: BlockNumber,
last_imported_old_block_number: Option<BlockNumber>,
num_peers: usize, num_peers: usize,
max_peers: u32, max_peers: u32,
snapshot_sync: bool, snapshot_sync: bool,
@ -160,7 +159,6 @@ impl InformantData for FullNodeInformantData {
last_imported_block_number: status last_imported_block_number: status
.last_imported_block_number .last_imported_block_number
.unwrap_or(chain_info.best_block_number), .unwrap_or(chain_info.best_block_number),
last_imported_old_block_number: status.last_imported_old_block_number,
num_peers: status.num_peers, num_peers: status.num_peers,
max_peers: status max_peers: status
.current_max_peers(*num_peers_range.start(), *num_peers_range.end()), .current_max_peers(*num_peers_range.start(), *num_peers_range.end()),
@ -265,7 +263,7 @@ impl<T: InformantData> Informant<T> {
false => t, false => t,
}; };
info!(target: "import", "{} {} {} {}", info!(target: "import", "{}{} {} {} {}",
match importing { match importing {
true => match snapshot_sync { true => match snapshot_sync {
false => format!("Syncing {} {} {} {}+{} Qed", false => format!("Syncing {} {} {} {}+{} Qed",
@ -301,18 +299,22 @@ impl<T: InformantData> Informant<T> {
}, },
false => String::new(), false => String::new(),
}, },
match chain_info.ancient_block_number {
Some(ancient_number) => format!(" (Ancient:#{})", ancient_number),
None => String::new(),
},
match sync_info.as_ref() { match sync_info.as_ref() {
Some(ref sync_info) => format!("{}{}/{} peers", Some(ref sync_info) => format!("{}{}/{} peers",
match importing { match importing {
true => format!("{}", true => format!("{}",
if self.target.executes_transactions() { if self.target.executes_transactions() {
paint(Green.bold(), format!("{:>8} ", format!("#{}", sync_info.last_imported_block_number))) paint(Green.bold(), format!("{:>8} ", format!("LI:#{}", sync_info.last_imported_block_number)))
} else { } else {
String::new() String::new()
} }
), ),
false => match sync_info.last_imported_old_block_number { false => match chain_info.ancient_block_number {
Some(number) => format!("{} ", paint(Yellow.bold(), format!("{:>8}", format!("#{}", number)))), Some(number) => format!("{} ", paint(Yellow.bold(), format!("{:>8}", format!("AB:#{}", number)))),
None => String::new(), None => String::new(),
} }
}, },