From 0eb69c7f1c9c592cdb6997356e41957dd64bb105 Mon Sep 17 00:00:00 2001 From: arkpar Date: Sat, 9 Jan 2016 19:10:58 +0100 Subject: [PATCH 1/5] Removed verification from this branch --- src/verification.rs | 109 -------------------------------------------- 1 file changed, 109 deletions(-) delete mode 100644 src/verification.rs diff --git a/src/verification.rs b/src/verification.rs deleted file mode 100644 index 95951da53..000000000 --- a/src/verification.rs +++ /dev/null @@ -1,109 +0,0 @@ -use util::uint::*; -use util::hash::*; -use util::rlp::*; -use util::sha3::Hashable; -use util::triehash::ordered_trie_root; -use header::Header; -use client::BlockNumber; - -#[derive(Debug)] -pub enum TransactionVerificationError { - OutOfGasBase, - OutOfGasIntrinsic, - NotEnoughCash, - GasPriceTooLow, - BlockGasLimitReached, - FeeTooSmall, - TooMuchGasUsed { - used: U256, - limit: U256 - }, - ExtraDataTooBig, - InvalidSignature, - InvalidTransactionFormat, -} - -#[derive(Debug)] -pub enum BlockVerificationError { - TooMuchGasUsed { - used: U256, - limit: U256, - }, - InvalidBlockFormat, - InvalidUnclesHash { - expected: H256, - got: H256, - }, - TooManyUncles, - UncleTooOld, - UncleIsBrother, - UncleInChain, - UncleParentNotInChain, - InvalidStateRoot, - InvalidGasUsed, - InvalidTransactionsRoot { - expected: H256, - got: H256, - }, - InvalidDifficulty, - InvalidGasLimit, - InvalidReceiptsStateRoot, - InvalidTimestamp, - InvalidLogBloom, - InvalidNonce, - InvalidBlockHeaderItemCount, - InvalidBlockNonce, - InvalidParentHash, - InvalidUncleParentHash, - InvalidNumber, - BlockNotFound, - UnknownParent, -} - - -pub fn verify_header(header: &Header) -> Result<(), BlockVerificationError> { - if header.number > From::from(BlockNumber::max_value()) { - return Err(BlockVerificationError::InvalidNumber) - } - if header.gas_used > header.gas_limit { - return Err(BlockVerificationError::TooMuchGasUsed { - used: header.gas_used, - limit: header.gas_limit, - }); - } - Ok(()) -} - -pub fn verify_parent(header: &Header, parent: &Header) -> Result<(), BlockVerificationError> { - if !header.parent_hash.is_zero() && parent.hash() != header.parent_hash { - return Err(BlockVerificationError::InvalidParentHash) - } - if header.timestamp <= parent.timestamp { - return Err(BlockVerificationError::InvalidTimestamp) - } - if header.number <= parent.number { - return Err(BlockVerificationError::InvalidNumber) - } - Ok(()) -} - -pub fn verify_block_integrity(block: &[u8], transactions_root: &H256, uncles_hash: &H256) -> Result<(), BlockVerificationError> { - let block = Rlp::new(block); - let tx = block.at(1); - let expected_root = ordered_trie_root(tx.iter().map(|r| r.as_raw().to_vec()).collect()); //TODO: get rid of vectors here - if &expected_root != transactions_root { - return Err(BlockVerificationError::InvalidTransactionsRoot { - expected: expected_root.clone(), - got: transactions_root.clone(), - }); - } - let expected_uncles = block.at(2).as_raw().sha3(); - if &expected_uncles != uncles_hash { - return Err(BlockVerificationError::InvalidUnclesHash { - expected: expected_uncles.clone(), - got: uncles_hash.clone(), - }); - } - Ok(()) -} - From 32bfa69106a4c370fe8a2e6e464c6159c9414bb9 Mon Sep 17 00:00:00 2001 From: arkpar Date: Sat, 9 Jan 2016 19:13:58 +0100 Subject: [PATCH 2/5] More docs --- src/sync/chain.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/sync/chain.rs b/src/sync/chain.rs index b7d550c9b..276b1993c 100644 --- a/src/sync/chain.rs +++ b/src/sync/chain.rs @@ -164,6 +164,7 @@ pub struct ChainSync { impl ChainSync { + /// Create a new instance of syncing strategy. pub fn new() -> ChainSync { ChainSync { state: SyncState::NotSynced, From 85ddbba893c41e8ee6ff714ae80ac01c011083e4 Mon Sep 17 00:00:00 2001 From: arkpar Date: Sat, 9 Jan 2016 23:21:57 +0100 Subject: [PATCH 3/5] Enabled logger for client app --- src/bin/client.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/bin/client.rs b/src/bin/client.rs index 2b003dca9..09e414476 100644 --- a/src/bin/client.rs +++ b/src/bin/client.rs @@ -1,6 +1,7 @@ extern crate ethcore_util as util; extern crate ethcore; extern crate rustc_serialize; +extern crate env_logger; use std::io::*; use std::env; @@ -12,6 +13,7 @@ use ethcore::sync::EthSync; use ethcore::ethereum; fn main() { + ::env_logger::init().ok(); let mut service = NetworkService::start().unwrap(); //TODO: replace with proper genesis and chain params. let frontier = ethereum::new_frontier(); From 51584f520244ac50a53f0708394abc5270fa886c Mon Sep 17 00:00:00 2001 From: arkpar Date: Sun, 10 Jan 2016 14:11:23 +0100 Subject: [PATCH 4/5] Networking fixes --- src/sync/chain.rs | 37 ++++++++++++++++++++++--------------- 1 file changed, 22 insertions(+), 15 deletions(-) diff --git a/src/sync/chain.rs b/src/sync/chain.rs index 276b1993c..95505ff81 100644 --- a/src/sync/chain.rs +++ b/src/sync/chain.rs @@ -13,20 +13,11 @@ /// 4. Maintain sync by handling NewBlocks/NewHashes messages /// -use std::collections::{HashSet, HashMap}; -use std::cmp::{min, max}; +use util::*; use std::mem::{replace}; -use util::network::{PeerId, PacketId}; -use util::hash::{H256, FixedHash}; -use util::bytes::{Bytes}; -use util::uint::{U256}; -use util::rlp::{Rlp, UntrustedRlp, RlpStream, self}; -use util::rlp::rlptraits::{Stream, View}; -use util::rlp::rlperrors::DecoderError; -use util::sha3::Hashable; -use client::{BlockNumber, BlockChainClient, BlockStatus, QueueStatus, ImportResult}; use views::{HeaderView}; use header::{Header as BlockHeader}; +use client::{BlockNumber, BlockChainClient, BlockStatus, QueueStatus, ImportResult}; use sync::range_collection::{RangeCollection, ToUsize, FromUsize}; use sync::io::SyncIo; @@ -66,6 +57,8 @@ const NODE_DATA_PACKET: u8 = 0x0e; const GET_RECEIPTS_PACKET: u8 = 0x0f; const RECEIPTS_PACKET: u8 = 0x10; +const NETWORK_ID: U256 = ONE_U256; //TODO: get this from parent + struct Header { /// Header data data: Bytes, @@ -241,7 +234,19 @@ impl ChainSync { asking_blocks: Vec::new(), }; - trace!(target: "sync", "New peer (protocol: {}, network: {:?}, difficulty: {:?}, latest:{}, genesis:{})", peer.protocol_version, peer.network_id, peer.difficulty, peer.latest, peer.genesis); + trace!(target: "sync", "New peer {} (protocol: {}, network: {:?}, difficulty: {:?}, latest:{}, genesis:{})", peer_id, peer.protocol_version, peer.network_id, peer.difficulty, peer.latest, peer.genesis); + + let chain_info = io.chain().chain_info(); + if peer.genesis != chain_info.genesis_hash { + io.disable_peer(peer_id); + trace!(target: "sync", "Peer {} genesis hash not matched", peer_id); + return Ok(()); + } + if peer.network_id != NETWORK_ID { + io.disable_peer(peer_id); + trace!(target: "sync", "Peer {} network id not matched", peer_id); + return Ok(()); + } let old = self.peers.insert(peer_id.clone(), peer); if old.is_some() { @@ -449,8 +454,10 @@ impl ChainSync { /// Called by peer when it is disconnecting pub fn on_peer_aborting(&mut self, io: &mut SyncIo, peer: &PeerId) { trace!(target: "sync", "== Disconnected {}", peer); - self.clear_peer_download(peer); - self.continue_sync(io); + if self.peers.contains_key(&peer) { + self.clear_peer_download(peer); + self.continue_sync(io); + } } /// Called when a new peer is connected @@ -769,7 +776,7 @@ impl ChainSync { let mut packet = RlpStream::new_list(5); let chain = io.chain().chain_info(); packet.append(&(PROTOCOL_VERSION as u32)); - packet.append(&0u32); //TODO: network id + packet.append(&NETWORK_ID); //TODO: network id packet.append(&chain.total_difficulty); packet.append(&chain.best_block_hash); packet.append(&chain.genesis_hash); From 5f5f26de48f8566237b0bab911f8ecdf25711bb4 Mon Sep 17 00:00:00 2001 From: arkpar Date: Sun, 10 Jan 2016 15:08:57 +0100 Subject: [PATCH 5/5] Do not insert new blocks out of order --- src/sync/chain.rs | 52 +++++++++++++++++++++++++---------------------- src/sync/io.rs | 11 +++++----- src/sync/tests.rs | 7 ++++--- 3 files changed, 38 insertions(+), 32 deletions(-) diff --git a/src/sync/chain.rs b/src/sync/chain.rs index 95505ff81..bfbf3cfb8 100644 --- a/src/sync/chain.rs +++ b/src/sync/chain.rs @@ -386,31 +386,35 @@ impl ChainSync { let h = header_rlp.as_raw().sha3(); trace!(target: "sync", "{} -> NewBlock ({})", peer_id, h); - match io.chain().import_block(block_rlp.as_raw()) { - ImportResult::AlreadyInChain => { - trace!(target: "sync", "New block already in chain {:?}", h); - }, - ImportResult::AlreadyQueued(_) => { - trace!(target: "sync", "New block already queued {:?}", h); - }, - ImportResult::Queued(QueueStatus::Known) => { - trace!(target: "sync", "New block queued {:?}", h); - }, - ImportResult::Queued(QueueStatus::Unknown) => { - trace!(target: "sync", "New block unknown {:?}", h); - //TODO: handle too many unknown blocks - let difficulty: U256 = try!(r.val_at(1)); - let peer_difficulty = self.peers.get_mut(&peer_id).expect("ChainSync: unknown peer").difficulty; - if difficulty > peer_difficulty { - trace!(target: "sync", "Received block {:?} with no known parent. Peer needs syncing...", h); - self.sync_peer(io, peer_id, true); + let header_view = HeaderView::new(header_rlp.as_raw()); + // TODO: Decompose block and add to self.headers and self.bodies instead + if header_view.number() == From::from(self.last_imported_block + 1) { + match io.chain().import_block(block_rlp.as_raw()) { + ImportResult::AlreadyInChain => { + trace!(target: "sync", "New block already in chain {:?}", h); + }, + ImportResult::AlreadyQueued(_) => { + trace!(target: "sync", "New block already queued {:?}", h); + }, + ImportResult::Queued(QueueStatus::Known) => { + trace!(target: "sync", "New block queued {:?}", h); + }, + ImportResult::Queued(QueueStatus::Unknown) => { + trace!(target: "sync", "New block unknown {:?}", h); + //TODO: handle too many unknown blocks + let difficulty: U256 = try!(r.val_at(1)); + let peer_difficulty = self.peers.get_mut(&peer_id).expect("ChainSync: unknown peer").difficulty; + if difficulty > peer_difficulty { + trace!(target: "sync", "Received block {:?} with no known parent. Peer needs syncing...", h); + self.sync_peer(io, peer_id, true); + } + }, + ImportResult::Bad =>{ + debug!(target: "sync", "Bad new block {:?}", h); + io.disable_peer(peer_id); } - }, - ImportResult::Bad =>{ - debug!(target: "sync", "Bad new block {:?}", h); - io.disable_peer(peer_id); - } - }; + }; + } Ok(()) } diff --git a/src/sync/io.rs b/src/sync/io.rs index 54bd22f14..ed7b0fec5 100644 --- a/src/sync/io.rs +++ b/src/sync/io.rs @@ -1,10 +1,11 @@ use client::BlockChainClient; -use util::network::{HandlerIo, PeerId, PacketId, Error as NetworkError}; +use util::network::{HandlerIo, PeerId, PacketId,}; +use util::error::UtilError; pub trait SyncIo { fn disable_peer(&mut self, peer_id: &PeerId); - fn respond(&mut self, packet_id: PacketId, data: Vec) -> Result<(), NetworkError>; - fn send(&mut self, peer_id: PeerId, packet_id: PacketId, data: Vec) -> Result<(), NetworkError>; + fn respond(&mut self, packet_id: PacketId, data: Vec) -> Result<(), UtilError>; + fn send(&mut self, peer_id: PeerId, packet_id: PacketId, data: Vec) -> Result<(), UtilError>; fn chain<'s>(&'s mut self) -> &'s mut BlockChainClient; } @@ -27,11 +28,11 @@ impl<'s, 'h> SyncIo for NetSyncIo<'s, 'h> { self.network.disable_peer(*peer_id); } - fn respond(&mut self, packet_id: PacketId, data: Vec) -> Result<(), NetworkError>{ + fn respond(&mut self, packet_id: PacketId, data: Vec) -> Result<(), UtilError>{ self.network.respond(packet_id, data) } - fn send(&mut self, peer_id: PeerId, packet_id: PacketId, data: Vec) -> Result<(), NetworkError>{ + fn send(&mut self, peer_id: PeerId, packet_id: PacketId, data: Vec) -> Result<(), UtilError>{ self.network.send(peer_id, packet_id, data) } diff --git a/src/sync/tests.rs b/src/sync/tests.rs index 28e526aa9..dfcf75c8b 100644 --- a/src/sync/tests.rs +++ b/src/sync/tests.rs @@ -4,7 +4,8 @@ use util::hash::{H256, FixedHash}; use util::uint::{U256}; use util::sha3::Hashable; use util::rlp::{self, Rlp, RlpStream, View, Stream}; -use util::network::{PeerId, PacketId, Error as NetworkError}; +use util::network::{PeerId, PacketId}; +use util::error::UtilError; use client::{BlockChainClient, BlockStatus, BlockNumber, TreeRoute, BlockQueueStatus, BlockChainInfo, ImportResult, QueueStatus}; use header::Header as BlockHeader; use sync::io::SyncIo; @@ -195,7 +196,7 @@ impl<'p> SyncIo for TestIo<'p> { fn disable_peer(&mut self, _peer_id: &PeerId) { } - fn respond(&mut self, packet_id: PacketId, data: Vec) -> Result<(), NetworkError> { + fn respond(&mut self, packet_id: PacketId, data: Vec) -> Result<(), UtilError> { self.queue.push_back(TestPacket { data: data, packet_id: packet_id, @@ -204,7 +205,7 @@ impl<'p> SyncIo for TestIo<'p> { Ok(()) } - fn send(&mut self, peer_id: PeerId, packet_id: PacketId, data: Vec) -> Result<(), NetworkError> { + fn send(&mut self, peer_id: PeerId, packet_id: PacketId, data: Vec) -> Result<(), UtilError> { self.queue.push_back(TestPacket { data: data, packet_id: packet_id,