diff --git a/rpc/src/v1/impls/eth.rs b/rpc/src/v1/impls/eth.rs index 2313d5114..7113c55b1 100644 --- a/rpc/src/v1/impls/eth.rs +++ b/rpc/src/v1/impls/eth.rs @@ -312,7 +312,8 @@ impl EthFilter for EthFilterClient { None => Ok(Value::Array(vec![] as Vec)), Some(info) => match info.filter { PollFilter::Block => { - let current_number = client.chain_info().best_block_number; + // + 1, cause we want to return hashes including current block hash. + let current_number = client.chain_info().best_block_number + 1; let hashes = (info.block_number..current_number).into_iter() .map(BlockId::Number) .filter_map(|id| client.block_hash(id)) diff --git a/sync/src/chain.rs b/sync/src/chain.rs index 0feae01b0..6b72e7e1c 100644 --- a/sync/src/chain.rs +++ b/sync/src/chain.rs @@ -576,7 +576,7 @@ impl ChainSync { pub fn on_peer_connected(&mut self, io: &mut SyncIo, peer: PeerId) { trace!(target: "sync", "== Connected {}", peer); if let Err(e) = self.send_status(io) { - trace!(target:"sync", "Error sending status request: {:?}", e); + warn!(target:"sync", "Error sending status request: {:?}", e); io.disable_peer(peer); } } @@ -901,9 +901,8 @@ impl ChainSync { } match sync.send(peer_id, packet_id, packet) { Err(e) => { - warn!(target:"sync", "Error sending request: {:?}", e); + debug!(target:"sync", "Error sending request: {:?}", e); sync.disable_peer(peer_id); - self.on_peer_aborting(sync, peer_id); } Ok(_) => { let mut peer = self.peers.get_mut(&peer_id).unwrap(); @@ -916,9 +915,8 @@ impl ChainSync { /// Generic packet sender fn send_packet(&mut self, sync: &mut SyncIo, peer_id: PeerId, packet_id: PacketId, packet: Bytes) { if let Err(e) = sync.send(peer_id, packet_id, packet) { - warn!(target:"sync", "Error sending packet: {:?}", e); + debug!(target:"sync", "Error sending packet: {:?}", e); sync.disable_peer(peer_id); - self.on_peer_aborting(sync, peer_id); } } /// Called when peer sends us new transactions diff --git a/util/src/journaldb.rs b/util/src/journaldb.rs index 3228f2201..378bc2de5 100644 --- a/util/src/journaldb.rs +++ b/util/src/journaldb.rs @@ -182,7 +182,7 @@ impl JournalDB { } fn replay_keys(inserts: &[H256], backing: &Database, counters: &mut HashMap) { - println!("replay_keys: inserts={:?}, counters={:?}", inserts, counters); + trace!("replay_keys: inserts={:?}, counters={:?}", inserts, counters); for h in inserts { if let Some(c) = counters.get_mut(h) { // already counting. increment. @@ -193,11 +193,11 @@ impl JournalDB { // this is the first entry for this node in the journal. // it is initialised to 1 if it was already in. if Self::is_already_in(backing, h) { - println!("replace_keys: Key {} was already in!", h); + trace!("replace_keys: Key {} was already in!", h); counters.insert(h.clone(), 1); } } - println!("replay_keys: (end) counters={:?}", counters); + trace!("replay_keys: (end) counters={:?}", counters); } fn kill_keys(deletes: Vec, counters: &mut HashMap, batch: &DBTransaction) { @@ -364,7 +364,7 @@ impl JournalDB { r.append(&&PADDING[..]); &r.drain() }).expect("Low-level database error.") { - println!("read_counters: era={}, index={}", era, index); + trace!("read_counters: era={}, index={}", era, index); let rlp = Rlp::new(&rlp_data); let inserts: Vec = rlp.val_at(1); Self::replay_keys(&inserts, db, &mut counters);