Parallel block body download (#1659)

This commit is contained in:
Arkadiy Paronyan 2016-07-25 18:38:36 +02:00 committed by Gav Wood
parent 898f1410a5
commit d0e79be5c6
2 changed files with 17 additions and 5 deletions

View File

@ -120,13 +120,22 @@ impl BlockCollection {
if let Some(head) = head { if let Some(head) = head {
match self.blocks.get(&head) { match self.blocks.get(&head) {
Some(block) if block.body.is_none() && !self.downloading_bodies.contains(&head) => { Some(block) if block.body.is_none() && !self.downloading_bodies.contains(&head) => {
self.downloading_bodies.insert(head.clone());
needed_bodies.push(head.clone()); needed_bodies.push(head.clone());
} }
_ => (), _ => (),
} }
} }
} }
self.downloading_bodies.extend(needed_bodies.iter()); for h in self.header_ids.values() {
if needed_bodies.len() >= count {
break;
}
if !self.downloading_bodies.contains(h) {
needed_bodies.push(h.clone());
self.downloading_bodies.insert(h.clone());
}
}
needed_bodies needed_bodies
} }
@ -286,6 +295,7 @@ impl BlockCollection {
self.parents.insert(info.parent_hash.clone(), hash.clone()); self.parents.insert(info.parent_hash.clone(), hash.clone());
self.blocks.insert(hash.clone(), block); self.blocks.insert(hash.clone(), block);
trace!(target: "sync", "New header: {}", hash.hex());
Ok(hash) Ok(hash)
} }

View File

@ -111,11 +111,11 @@ const MAX_NODE_DATA_TO_SEND: usize = 1024;
const MAX_RECEIPTS_TO_SEND: usize = 1024; const MAX_RECEIPTS_TO_SEND: usize = 1024;
const MAX_RECEIPTS_HEADERS_TO_SEND: usize = 256; const MAX_RECEIPTS_HEADERS_TO_SEND: usize = 256;
const MAX_HEADERS_TO_REQUEST: usize = 128; const MAX_HEADERS_TO_REQUEST: usize = 128;
const MAX_BODIES_TO_REQUEST: usize = 64; const MAX_BODIES_TO_REQUEST: usize = 128;
const MIN_PEERS_PROPAGATION: usize = 4; const MIN_PEERS_PROPAGATION: usize = 4;
const MAX_PEERS_PROPAGATION: usize = 128; const MAX_PEERS_PROPAGATION: usize = 128;
const MAX_PEER_LAG_PROPAGATION: BlockNumber = 20; const MAX_PEER_LAG_PROPAGATION: BlockNumber = 20;
const SUBCHAIN_SIZE: usize = 64; const SUBCHAIN_SIZE: usize = 256;
const MAX_ROUND_PARENTS: usize = 32; const MAX_ROUND_PARENTS: usize = 32;
const MAX_NEW_HASHES: usize = 64; const MAX_NEW_HASHES: usize = 64;
const MAX_TX_TO_IMPORT: usize = 512; const MAX_TX_TO_IMPORT: usize = 512;
@ -408,7 +408,7 @@ impl ChainSync {
let expected_hash = self.peers.get(&peer_id).and_then(|p| p.asking_hash); let expected_hash = self.peers.get(&peer_id).and_then(|p| p.asking_hash);
let expected_asking = if self.state == SyncState::ChainHead { PeerAsking::Heads } else { PeerAsking::BlockHeaders }; let expected_asking = if self.state == SyncState::ChainHead { PeerAsking::Heads } else { PeerAsking::BlockHeaders };
if !self.reset_peer_asking(peer_id, expected_asking) || expected_hash.is_none() { if !self.reset_peer_asking(peer_id, expected_asking) || expected_hash.is_none() {
trace!(target: "sync", "Ignored unexpected headers"); trace!(target: "sync", "{}: Ignored unexpected headers", peer_id);
self.continue_sync(io); self.continue_sync(io);
return Ok(()); return Ok(());
} }
@ -754,7 +754,9 @@ impl ChainSync {
// Request subchain headers // Request subchain headers
trace!(target: "sync", "Starting sync with better chain"); trace!(target: "sync", "Starting sync with better chain");
let last = self.last_imported_hash.clone(); let last = self.last_imported_hash.clone();
self.request_headers_by_hash(io, peer_id, &last, SUBCHAIN_SIZE, MAX_HEADERS_TO_REQUEST - 1, false, PeerAsking::Heads); // Request MAX_HEADERS_TO_REQUEST - 2 headers apart so that
// MAX_HEADERS_TO_REQUEST would include headers for neighbouring subchains
self.request_headers_by_hash(io, peer_id, &last, SUBCHAIN_SIZE, MAX_HEADERS_TO_REQUEST - 2, false, PeerAsking::Heads);
}, },
SyncState::Blocks | SyncState::NewBlocks => { SyncState::Blocks | SyncState::NewBlocks => {
if io.chain().block_status(BlockID::Hash(peer_latest)) == BlockStatus::Unknown { if io.chain().block_status(BlockID::Hash(peer_latest)) == BlockStatus::Unknown {