Merge branch 'master' into clippy-dev
Conflicts: util/src/journaldb.rs
This commit is contained in:
commit
69db469d39
@ -312,7 +312,8 @@ impl EthFilter for EthFilterClient {
|
|||||||
None => Ok(Value::Array(vec![] as Vec<Value>)),
|
None => Ok(Value::Array(vec![] as Vec<Value>)),
|
||||||
Some(info) => match info.filter {
|
Some(info) => match info.filter {
|
||||||
PollFilter::Block => {
|
PollFilter::Block => {
|
||||||
let current_number = client.chain_info().best_block_number;
|
// + 1, cause we want to return hashes including current block hash.
|
||||||
|
let current_number = client.chain_info().best_block_number + 1;
|
||||||
let hashes = (info.block_number..current_number).into_iter()
|
let hashes = (info.block_number..current_number).into_iter()
|
||||||
.map(BlockId::Number)
|
.map(BlockId::Number)
|
||||||
.filter_map(|id| client.block_hash(id))
|
.filter_map(|id| client.block_hash(id))
|
||||||
|
@ -576,7 +576,7 @@ impl ChainSync {
|
|||||||
pub fn on_peer_connected(&mut self, io: &mut SyncIo, peer: PeerId) {
|
pub fn on_peer_connected(&mut self, io: &mut SyncIo, peer: PeerId) {
|
||||||
trace!(target: "sync", "== Connected {}", peer);
|
trace!(target: "sync", "== Connected {}", peer);
|
||||||
if let Err(e) = self.send_status(io) {
|
if let Err(e) = self.send_status(io) {
|
||||||
trace!(target:"sync", "Error sending status request: {:?}", e);
|
warn!(target:"sync", "Error sending status request: {:?}", e);
|
||||||
io.disable_peer(peer);
|
io.disable_peer(peer);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -901,9 +901,8 @@ impl ChainSync {
|
|||||||
}
|
}
|
||||||
match sync.send(peer_id, packet_id, packet) {
|
match sync.send(peer_id, packet_id, packet) {
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
warn!(target:"sync", "Error sending request: {:?}", e);
|
debug!(target:"sync", "Error sending request: {:?}", e);
|
||||||
sync.disable_peer(peer_id);
|
sync.disable_peer(peer_id);
|
||||||
self.on_peer_aborting(sync, peer_id);
|
|
||||||
}
|
}
|
||||||
Ok(_) => {
|
Ok(_) => {
|
||||||
let mut peer = self.peers.get_mut(&peer_id).unwrap();
|
let mut peer = self.peers.get_mut(&peer_id).unwrap();
|
||||||
@ -916,9 +915,8 @@ impl ChainSync {
|
|||||||
/// Generic packet sender
|
/// Generic packet sender
|
||||||
fn send_packet(&mut self, sync: &mut SyncIo, peer_id: PeerId, packet_id: PacketId, packet: Bytes) {
|
fn send_packet(&mut self, sync: &mut SyncIo, peer_id: PeerId, packet_id: PacketId, packet: Bytes) {
|
||||||
if let Err(e) = sync.send(peer_id, packet_id, packet) {
|
if let Err(e) = sync.send(peer_id, packet_id, packet) {
|
||||||
warn!(target:"sync", "Error sending packet: {:?}", e);
|
debug!(target:"sync", "Error sending packet: {:?}", e);
|
||||||
sync.disable_peer(peer_id);
|
sync.disable_peer(peer_id);
|
||||||
self.on_peer_aborting(sync, peer_id);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/// Called when peer sends us new transactions
|
/// Called when peer sends us new transactions
|
||||||
|
@ -182,7 +182,7 @@ impl JournalDB {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn replay_keys(inserts: &[H256], backing: &Database, counters: &mut HashMap<H256, i32>) {
|
fn replay_keys(inserts: &[H256], backing: &Database, counters: &mut HashMap<H256, i32>) {
|
||||||
println!("replay_keys: inserts={:?}, counters={:?}", inserts, counters);
|
trace!("replay_keys: inserts={:?}, counters={:?}", inserts, counters);
|
||||||
for h in inserts {
|
for h in inserts {
|
||||||
if let Some(c) = counters.get_mut(h) {
|
if let Some(c) = counters.get_mut(h) {
|
||||||
// already counting. increment.
|
// already counting. increment.
|
||||||
@ -193,11 +193,11 @@ impl JournalDB {
|
|||||||
// this is the first entry for this node in the journal.
|
// this is the first entry for this node in the journal.
|
||||||
// it is initialised to 1 if it was already in.
|
// it is initialised to 1 if it was already in.
|
||||||
if Self::is_already_in(backing, h) {
|
if Self::is_already_in(backing, h) {
|
||||||
println!("replace_keys: Key {} was already in!", h);
|
trace!("replace_keys: Key {} was already in!", h);
|
||||||
counters.insert(h.clone(), 1);
|
counters.insert(h.clone(), 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
println!("replay_keys: (end) counters={:?}", counters);
|
trace!("replay_keys: (end) counters={:?}", counters);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn kill_keys(deletes: Vec<H256>, counters: &mut HashMap<H256, i32>, batch: &DBTransaction) {
|
fn kill_keys(deletes: Vec<H256>, counters: &mut HashMap<H256, i32>, batch: &DBTransaction) {
|
||||||
@ -364,7 +364,7 @@ impl JournalDB {
|
|||||||
r.append(&&PADDING[..]);
|
r.append(&&PADDING[..]);
|
||||||
&r.drain()
|
&r.drain()
|
||||||
}).expect("Low-level database error.") {
|
}).expect("Low-level database error.") {
|
||||||
println!("read_counters: era={}, index={}", era, index);
|
trace!("read_counters: era={}, index={}", era, index);
|
||||||
let rlp = Rlp::new(&rlp_data);
|
let rlp = Rlp::new(&rlp_data);
|
||||||
let inserts: Vec<H256> = rlp.val_at(1);
|
let inserts: Vec<H256> = rlp.val_at(1);
|
||||||
Self::replay_keys(&inserts, db, &mut counters);
|
Self::replay_keys(&inserts, db, &mut counters);
|
||||||
|
Loading…
Reference in New Issue
Block a user