Limit sync reorg to 20 blocks (#3519)

* Limit sync reorg

* Fixed tests
This commit is contained in:
Arkadiy Paronyan 2016-11-18 19:17:35 +01:00 committed by GitHub
parent 51012d1fae
commit 9c62dd3916
3 changed files with 35 additions and 23 deletions

View File

@ -34,6 +34,7 @@ const MAX_RECEPITS_TO_REQUEST: usize = 128;
const SUBCHAIN_SIZE: u64 = 256; const SUBCHAIN_SIZE: u64 = 256;
const MAX_ROUND_PARENTS: usize = 32; const MAX_ROUND_PARENTS: usize = 32;
const MAX_PARALLEL_SUBCHAIN_DOWNLOAD: usize = 5; const MAX_PARALLEL_SUBCHAIN_DOWNLOAD: usize = 5;
const MAX_REORG_BLOCKS: u64 = 20;
#[derive(Copy, Clone, Eq, PartialEq, Debug)] #[derive(Copy, Clone, Eq, PartialEq, Debug)]
/// Downloader state /// Downloader state
@ -262,7 +263,8 @@ impl BlockDownloader {
State::Blocks => { State::Blocks => {
let count = headers.len(); let count = headers.len();
// At least one of the heades must advance the subchain. Otherwise they are all useless. // At least one of the heades must advance the subchain. Otherwise they are all useless.
if !any_known { if count == 0 || !any_known {
trace!(target: "sync", "No useful headers");
return Err(BlockDownloaderImportError::Useless); return Err(BlockDownloaderImportError::Useless);
} }
self.blocks.insert_headers(headers); self.blocks.insert_headers(headers);
@ -340,14 +342,21 @@ impl BlockDownloader {
self.last_imported_hash = p.clone(); self.last_imported_hash = p.clone();
trace!(target: "sync", "Searching common header from the last round {} ({})", self.last_imported_block, self.last_imported_hash); trace!(target: "sync", "Searching common header from the last round {} ({})", self.last_imported_block, self.last_imported_hash);
} else { } else {
match io.chain().block_hash(BlockID::Number(self.last_imported_block - 1)) { let best = io.chain().chain_info().best_block_number;
Some(h) => { if best > self.last_imported_block && best - self.last_imported_block > MAX_REORG_BLOCKS {
self.last_imported_block -= 1; debug!(target: "sync", "Could not revert to previous ancient block, last: {} ({})", self.last_imported_block, self.last_imported_hash);
self.last_imported_hash = h; self.reset();
trace!(target: "sync", "Searching common header in the blockchain {} ({})", self.last_imported_block, self.last_imported_hash); } else {
} match io.chain().block_hash(BlockID::Number(self.last_imported_block - 1)) {
None => { Some(h) => {
debug!(target: "sync", "Could not revert to previous block, last: {} ({})", self.last_imported_block, self.last_imported_hash); self.last_imported_block -= 1;
self.last_imported_hash = h;
trace!(target: "sync", "Searching common header in the blockchain {} ({})", self.last_imported_block, self.last_imported_hash);
}
None => {
debug!(target: "sync", "Could not revert to previous block, last: {} ({})", self.last_imported_block, self.last_imported_hash);
self.reset();
}
} }
} }
} }
@ -362,7 +371,9 @@ impl BlockDownloader {
match self.state { match self.state {
State::Idle => { State::Idle => {
self.start_sync_round(io); self.start_sync_round(io);
return self.request_blocks(io, num_active_peers); if self.state == State::ChainHead {
return self.request_blocks(io, num_active_peers);
}
}, },
State::ChainHead => { State::ChainHead => {
if num_active_peers < MAX_PARALLEL_SUBCHAIN_DOWNLOAD { if num_active_peers < MAX_PARALLEL_SUBCHAIN_DOWNLOAD {

View File

@ -1144,6 +1144,7 @@ impl ChainSync {
let have_latest = io.chain().block_status(BlockID::Hash(peer_latest)) != BlockStatus::Unknown; let have_latest = io.chain().block_status(BlockID::Hash(peer_latest)) != BlockStatus::Unknown;
if !have_latest && (higher_difficulty || force || self.state == SyncState::NewBlocks) { if !have_latest && (higher_difficulty || force || self.state == SyncState::NewBlocks) {
// check if got new blocks to download // check if got new blocks to download
trace!(target: "sync", "Syncing with {}, force={}, td={:?}, our td={}, state={:?}", peer_id, force, peer_difficulty, syncing_difficulty, self.state);
if let Some(request) = self.new_blocks.request_blocks(io, num_active_peers) { if let Some(request) = self.new_blocks.request_blocks(io, num_active_peers) {
self.request_blocks(io, peer_id, request, BlockSet::NewBlocks); self.request_blocks(io, peer_id, request, BlockSet::NewBlocks);
if self.state == SyncState::Idle { if self.state == SyncState::Idle {

View File

@ -79,14 +79,14 @@ fn empty_blocks() {
fn forked() { fn forked() {
::env_logger::init().ok(); ::env_logger::init().ok();
let mut net = TestNet::new(3); let mut net = TestNet::new(3);
net.peer_mut(0).chain.add_blocks(300, EachBlockWith::Uncle); net.peer_mut(0).chain.add_blocks(30, EachBlockWith::Uncle);
net.peer_mut(1).chain.add_blocks(300, EachBlockWith::Uncle); net.peer_mut(1).chain.add_blocks(30, EachBlockWith::Uncle);
net.peer_mut(2).chain.add_blocks(300, EachBlockWith::Uncle); net.peer_mut(2).chain.add_blocks(30, EachBlockWith::Uncle);
net.peer_mut(0).chain.add_blocks(100, EachBlockWith::Nothing); //fork net.peer_mut(0).chain.add_blocks(10, EachBlockWith::Nothing); //fork
net.peer_mut(1).chain.add_blocks(200, EachBlockWith::Uncle); net.peer_mut(1).chain.add_blocks(20, EachBlockWith::Uncle);
net.peer_mut(2).chain.add_blocks(200, EachBlockWith::Uncle); net.peer_mut(2).chain.add_blocks(20, EachBlockWith::Uncle);
net.peer_mut(1).chain.add_blocks(100, EachBlockWith::Uncle); //fork between 1 and 2 net.peer_mut(1).chain.add_blocks(10, EachBlockWith::Uncle); //fork between 1 and 2
net.peer_mut(2).chain.add_blocks(10, EachBlockWith::Nothing); net.peer_mut(2).chain.add_blocks(1, EachBlockWith::Nothing);
// peer 1 has the best chain of 601 blocks // peer 1 has the best chain of 601 blocks
let peer1_chain = net.peer(1).chain.numbers.read().clone(); let peer1_chain = net.peer(1).chain.numbers.read().clone();
net.sync(); net.sync();
@ -102,12 +102,12 @@ fn forked_with_misbehaving_peer() {
let mut net = TestNet::new(3); let mut net = TestNet::new(3);
// peer 0 is on a totally different chain with higher total difficulty // peer 0 is on a totally different chain with higher total difficulty
net.peer_mut(0).chain = TestBlockChainClient::new_with_extra_data(b"fork".to_vec()); net.peer_mut(0).chain = TestBlockChainClient::new_with_extra_data(b"fork".to_vec());
net.peer_mut(0).chain.add_blocks(500, EachBlockWith::Nothing); net.peer_mut(0).chain.add_blocks(50, EachBlockWith::Nothing);
net.peer_mut(1).chain.add_blocks(100, EachBlockWith::Nothing); net.peer_mut(1).chain.add_blocks(10, EachBlockWith::Nothing);
net.peer_mut(2).chain.add_blocks(100, EachBlockWith::Nothing); net.peer_mut(2).chain.add_blocks(10, EachBlockWith::Nothing);
net.peer_mut(1).chain.add_blocks(100, EachBlockWith::Nothing); net.peer_mut(1).chain.add_blocks(10, EachBlockWith::Nothing);
net.peer_mut(2).chain.add_blocks(200, EachBlockWith::Uncle); net.peer_mut(2).chain.add_blocks(20, EachBlockWith::Uncle);
// peer 1 should sync to peer 2, others should not change // peer 1 should sync to peer 2, others should not change
let peer0_chain = net.peer(0).chain.numbers.read().clone(); let peer0_chain = net.peer(0).chain.numbers.read().clone();
let peer2_chain = net.peer(2).chain.numbers.read().clone(); let peer2_chain = net.peer(2).chain.numbers.read().clone();