From 53c2cbc631b30d8919d0b393e0da6f5ac238fabd Mon Sep 17 00:00:00 2001 From: Nikolay Volf Date: Fri, 29 Jan 2016 13:16:53 +0400 Subject: [PATCH 1/7] moved common code to helpers --- src/tests/helpers.rs | 15 +++++++++++++++ src/verification.rs | 23 +---------------------- 2 files changed, 16 insertions(+), 22 deletions(-) diff --git a/src/tests/helpers.rs b/src/tests/helpers.rs index a566392cc..fd3302ffe 100644 --- a/src/tests/helpers.rs +++ b/src/tests/helpers.rs @@ -44,6 +44,21 @@ pub fn create_test_block(header: &Header) -> Bytes { rlp.out() } +pub fn create_test_block_with_data(header: &Header, transactions: &[&Transaction], uncles: &[Header]) -> Bytes { + let mut rlp = RlpStream::new_list(3); + rlp.append(header); + rlp.append_list(transactions.len()); + for t in transactions { + rlp.append_raw(&t.rlp_bytes_opt(Seal::With), 1); + } + rlp.append_list(uncles.len()); + for h in uncles { + rlp.append(h); + } + rlp.out() +} + + pub fn generate_dummy_client(block_number: usize) -> Arc { let dir = RandomTempPath::new(); diff --git a/src/verification.rs b/src/verification.rs index 158f28f95..368ff84e9 100644 --- a/src/verification.rs +++ b/src/verification.rs @@ -221,28 +221,7 @@ mod tests { use spec::*; use transaction::*; use basic_types::*; - - fn create_test_block(header: &Header) -> Bytes { - let mut rlp = RlpStream::new_list(3); - rlp.append(header); - rlp.append_raw(&rlp::EMPTY_LIST_RLP, 1); - rlp.append_raw(&rlp::EMPTY_LIST_RLP, 1); - rlp.out() - } - - fn create_test_block_with_data(header: &Header, transactions: &[&Transaction], uncles: &[Header]) -> Bytes { - let mut rlp = RlpStream::new_list(3); - rlp.append(header); - rlp.append_list(transactions.len()); - for t in transactions { - rlp.append_raw(&t.rlp_bytes_opt(Seal::With), 1); - } - rlp.append_list(uncles.len()); - for h in uncles { - rlp.append(h); - } - rlp.out() - } + use tests::helpers::*; fn check_ok(result: Result<(), Error>) { result.unwrap_or_else(|e| panic!("Block verification failed: {:?}", e)); From c1cf9bdcdfa012164e1164d5836a1e418a908973 Mon Sep 17 00:00:00 2001 From: debris Date: Fri, 29 Jan 2016 14:19:14 +0100 Subject: [PATCH 2/7] defensive initializing log --- src/tests/chain.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/tests/chain.rs b/src/tests/chain.rs index e260deddc..d56d99491 100644 --- a/src/tests/chain.rs +++ b/src/tests/chain.rs @@ -18,11 +18,13 @@ lazy_static! { let mut builder = LogBuilder::new(); builder.filter(None, LogLevelFilter::Info); - if env::var("RUST_LOG").is_ok() { - builder.parse(&env::var("RUST_LOG").unwrap()); + if let Ok(log) = env::var("RUST_LOG") { + builder.parse(&log); } - builder.init().unwrap(); + if let Ok(_) = builder.init() { + println!("logger initialized"); + } true }; } From 15fdebc4f348f37a312bc2322d065caca6a4b0ff Mon Sep 17 00:00:00 2001 From: Nikolay Volf Date: Fri, 29 Jan 2016 17:42:29 +0400 Subject: [PATCH 3/7] chain sync initial state refact --- src/sync/chain.rs | 79 ++++++++++++++++++++++++++--------------------- src/sync/tests.rs | 71 +++++++++++++++++++++++++++++++++++++++--- 2 files changed, 109 insertions(+), 41 deletions(-) diff --git a/src/sync/chain.rs b/src/sync/chain.rs index 1f79477af..3eaf69cb1 100644 --- a/src/sync/chain.rs +++ b/src/sync/chain.rs @@ -21,6 +21,7 @@ use client::{BlockChainClient, BlockStatus}; use sync::range_collection::{RangeCollection, ToUsize, FromUsize}; use error::*; use sync::io::SyncIo; +use std::option::Option; impl ToUsize for BlockNumber { fn to_usize(&self) -> usize { @@ -99,14 +100,14 @@ pub struct SyncStatus { pub protocol_version: u8, /// BlockChain height for the moment the sync started. pub start_block_number: BlockNumber, - /// Last fully downloaded and imported block number. - pub last_imported_block_number: BlockNumber, - /// Highest block number in the download queue. - pub highest_block_number: BlockNumber, + /// Last fully downloaded and imported block number (if any). + pub last_imported_block_number: Option, + /// Highest block number in the download queue (if any). + pub highest_block_number: Option, /// Total number of blocks for the sync process. - pub blocks_total: usize, + pub blocks_total: BlockNumber, /// Number of blocks downloaded so far. - pub blocks_received: usize, + pub blocks_received: BlockNumber, /// Total number of connected peers pub num_peers: usize, /// Total number of active peers @@ -147,7 +148,7 @@ pub struct ChainSync { /// Last block number for the start of sync starting_block: BlockNumber, /// Highest block number seen - highest_block: BlockNumber, + highest_block: Option, /// Set of block header numbers being downloaded downloading_headers: HashSet, /// Set of block body numbers being downloaded @@ -161,9 +162,9 @@ pub struct ChainSync { /// Used to map body to header header_ids: HashMap, /// Last impoted block number - last_imported_block: BlockNumber, + last_imported_block: Option, /// Last impoted block hash - last_imported_hash: H256, + last_imported_hash: Option, /// Syncing total difficulty syncing_difficulty: U256, /// True if common block for our and remote chain has been found @@ -177,15 +178,15 @@ impl ChainSync { ChainSync { state: SyncState::NotSynced, starting_block: 0, - highest_block: 0, + highest_block: None, downloading_headers: HashSet::new(), downloading_bodies: HashSet::new(), headers: Vec::new(), bodies: Vec::new(), peers: HashMap::new(), header_ids: HashMap::new(), - last_imported_block: 0, - last_imported_hash: H256::new(), + last_imported_block: None, + last_imported_hash: None, syncing_difficulty: U256::from(0u64), have_common_block: false, } @@ -199,8 +200,8 @@ impl ChainSync { start_block_number: self.starting_block, last_imported_block_number: self.last_imported_block, highest_block_number: self.highest_block, - blocks_received: (self.last_imported_block - self.starting_block) as usize, - blocks_total: (self.highest_block - self.starting_block) as usize, + blocks_received: match self.last_imported_block { None => 0, Some(x) => x - self.starting_block }, + blocks_total: match self.highest_block { None => 0, Some(x) => x - self.starting_block }, num_peers: self.peers.len(), num_active_peers: self.peers.values().filter(|p| p.asking != PeerAsking::Nothing).count(), } @@ -229,10 +230,10 @@ impl ChainSync { /// Restart sync pub fn restart(&mut self, io: &mut SyncIo) { self.reset(); - self.last_imported_block = 0; - self.last_imported_hash = H256::new(); + self.last_imported_block = None; + self.last_imported_hash = None; self.starting_block = 0; - self.highest_block = 0; + self.highest_block = None; self.have_common_block = false; io.chain().clear_queue(); self.starting_block = io.chain().chain_info().best_block_number; @@ -293,25 +294,27 @@ impl ChainSync { for i in 0..item_count { let info: BlockHeader = try!(r.val_at(i)); let number = BlockNumber::from(info.number); - if number <= self.last_imported_block || self.headers.have_item(&number) { + if number <= self.current_base_block() || self.headers.have_item(&number) { trace!(target: "sync", "Skipping existing block header"); continue; } - if number > self.highest_block { - self.highest_block = number; + + if self.highest_block == None || number > self.highest_block.unwrap() { + self.highest_block = Some(number); } let hash = info.hash(); match io.chain().block_status(&hash) { BlockStatus::InChain => { self.have_common_block = true; - self.last_imported_block = number; - self.last_imported_hash = hash.clone(); + self.last_imported_block = Some(number); + self.last_imported_hash = Some(hash.clone()); trace!(target: "sync", "Found common header {} ({})", number, hash); }, _ => { if self.have_common_block { //validate chain - if self.have_common_block && number == self.last_imported_block + 1 && info.parent_hash != self.last_imported_hash { + let base_hash = self.last_imported_hash.clone().unwrap(); + if self.have_common_block && number == self.current_base_block() + 1 && info.parent_hash != base_hash { // TODO: lower peer rating debug!(target: "sync", "Mismatched block header {} {}", number, hash); continue; @@ -407,7 +410,7 @@ impl ChainSync { trace!(target: "sync", "{} -> NewBlock ({})", peer_id, h); let header_view = HeaderView::new(header_rlp.as_raw()); // TODO: Decompose block and add to self.headers and self.bodies instead - if header_view.number() == From::from(self.last_imported_block + 1) { + if header_view.number() == From::from(self.current_base_block() + 1) { match io.chain().import_block(block_rlp.as_raw().to_vec()) { Err(ImportError::AlreadyInChain) => { trace!(target: "sync", "New block already in chain {:?}", h); @@ -550,6 +553,10 @@ impl ChainSync { } } + fn current_base_block(&self) -> BlockNumber { + match self.last_imported_block { None => 0, Some(x) => x } + } + /// Find some headers or blocks to download for a peer. fn request_blocks(&mut self, io: &mut SyncIo, peer_id: PeerId) { self.clear_peer_download(peer_id); @@ -563,7 +570,7 @@ impl ChainSync { let mut needed_bodies: Vec = Vec::new(); let mut needed_numbers: Vec = Vec::new(); - if self.have_common_block && !self.headers.is_empty() && self.headers.range_iter().next().unwrap().0 == self.last_imported_block + 1 { + if self.have_common_block && !self.headers.is_empty() && self.headers.range_iter().next().unwrap().0 == self.current_base_block() + 1 { for (start, ref items) in self.headers.range_iter() { if needed_bodies.len() > MAX_BODIES_TO_REQUEST { break; @@ -596,12 +603,12 @@ impl ChainSync { } if start == 0 { self.have_common_block = true; //reached genesis - self.last_imported_hash = chain_info.genesis_hash; + self.last_imported_hash = Some(chain_info.genesis_hash); } } if self.have_common_block { let mut headers: Vec = Vec::new(); - let mut prev = self.last_imported_block + 1; + let mut prev = self.current_base_block() + 1; for (next, ref items) in self.headers.range_iter() { if !headers.is_empty() { break; @@ -656,7 +663,7 @@ impl ChainSync { { let headers = self.headers.range_iter().next().unwrap(); let bodies = self.bodies.range_iter().next().unwrap(); - if headers.0 != bodies.0 || headers.0 != self.last_imported_block + 1 { + if headers.0 != bodies.0 || headers.0 != self.current_base_block() + 1 { return; } @@ -672,18 +679,18 @@ impl ChainSync { match io.chain().import_block(block_rlp.out()) { Err(ImportError::AlreadyInChain) => { trace!(target: "sync", "Block already in chain {:?}", h); - self.last_imported_block = headers.0 + i as BlockNumber; - self.last_imported_hash = h.clone(); + self.last_imported_block = Some(headers.0 + i as BlockNumber); + self.last_imported_hash = Some(h.clone()); }, Err(ImportError::AlreadyQueued) => { trace!(target: "sync", "Block already queued {:?}", h); - self.last_imported_block = headers.0 + i as BlockNumber; - self.last_imported_hash = h.clone(); + self.last_imported_block = Some(headers.0 + i as BlockNumber); + self.last_imported_hash = Some(h.clone()); }, Ok(_) => { trace!(target: "sync", "Block queued {:?}", h); - self.last_imported_block = headers.0 + i as BlockNumber; - self.last_imported_hash = h.clone(); + self.last_imported_block = Some(headers.0 + i as BlockNumber); + self.last_imported_hash = Some(h.clone()); imported += 1; }, Err(e) => { @@ -700,8 +707,8 @@ impl ChainSync { return; } - self.headers.remove_head(&(self.last_imported_block + 1)); - self.bodies.remove_head(&(self.last_imported_block + 1)); + self.headers.remove_head(&(self.last_imported_block.unwrap() + 1)); + self.bodies.remove_head(&(self.last_imported_block.unwrap() + 1)); if self.headers.is_empty() { assert!(self.bodies.is_empty()); diff --git a/src/sync/tests.rs b/src/sync/tests.rs index eca8a07a6..5afdf5b36 100644 --- a/src/sync/tests.rs +++ b/src/sync/tests.rs @@ -4,7 +4,7 @@ use block_queue::BlockQueueInfo; use header::{Header as BlockHeader, BlockNumber}; use error::*; use sync::io::SyncIo; -use sync::chain::ChainSync; +use sync::chain::{ChainSync, SyncState}; struct TestBlockChainClient { blocks: RwLock>, @@ -241,13 +241,15 @@ struct TestPeer { } struct TestNet { - peers: Vec + peers: Vec, + started: bool } impl TestNet { pub fn new(n: usize) -> TestNet { let mut net = TestNet { peers: Vec::new(), + started: false }; for _ in 0..n { net.peers.push(TestPeer { @@ -291,10 +293,28 @@ impl TestNet { } } - pub fn sync(&mut self) { + pub fn restart_peer(&mut self, i: usize) { + let peer = self.peer_mut(i); + peer.sync.restart(&mut TestIo::new(&mut peer.chain, &mut peer.queue, None)); + } + + pub fn sync(&mut self) -> u32 { self.start(); + let mut total_steps = 0; while !self.done() { - self.sync_step() + self.sync_step(); + total_steps = total_steps + 1; + } + total_steps + } + + pub fn sync_steps(&mut self, count: usize) { + if !self.started { + self.start(); + self.started = true; + } + for _ in 0..count { + self.sync_step(); } } @@ -303,7 +323,6 @@ impl TestNet { } } - #[test] fn full_sync_two_peers() { ::env_logger::init().ok(); @@ -315,6 +334,26 @@ fn full_sync_two_peers() { assert_eq!(net.peer(0).chain.blocks.read().unwrap().deref(), net.peer(1).chain.blocks.read().unwrap().deref()); } +#[test] +fn status_after_sync() { + ::env_logger::init().ok(); + let mut net = TestNet::new(3); + net.peer_mut(1).chain.add_blocks(1000, false); + net.peer_mut(2).chain.add_blocks(1000, false); + net.sync(); + let status = net.peer(0).sync.status(); + assert_eq!(status.state, SyncState::Idle); +} + +#[test] +fn full_sync_takes_few_steps() { + let mut net = TestNet::new(3); + net.peer_mut(1).chain.add_blocks(100, false); + net.peer_mut(2).chain.add_blocks(100, false); + let total_steps = net.sync(); + assert!(total_steps < 7); +} + #[test] fn full_sync_empty_blocks() { ::env_logger::init().ok(); @@ -347,3 +386,25 @@ fn forked_sync() { assert_eq!(net.peer(1).chain.numbers.read().unwrap().deref(), &peer1_chain); assert_eq!(net.peer(2).chain.numbers.read().unwrap().deref(), &peer1_chain); } + +#[test] +fn chain_sync_restart() { + let mut net = TestNet::new(3); + net.peer_mut(1).chain.add_blocks(1000, false); + net.peer_mut(2).chain.add_blocks(1000, false); + + net.sync_steps(8); + + // make sure that sync has actually happened + assert!(net.peer(0).chain.chain_info().best_block_number > 100); + net.restart_peer(0); + + let status = net.peer(0).sync.status(); + assert_eq!(status.state, SyncState::NotSynced); +} + +#[test] +fn chain_sync_status() { + let net = TestNet::new(2); + assert_eq!(net.peer(0).sync.status().state, SyncState::NotSynced); +} \ No newline at end of file From d3b4cfd9d9ea5090739c6106792d2a3b0f8757d4 Mon Sep 17 00:00:00 2001 From: debris Date: Fri, 29 Jan 2016 14:43:18 +0100 Subject: [PATCH 4/7] changed max vm depth from 128 to 64, change homestead block to 1_000_000 --- res/ethereum/frontier.json | 2 +- res/ethereum/frontier_like_test.json | 2 +- src/executive.rs | 2 +- src/tests/transaction.rs | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/res/ethereum/frontier.json b/res/ethereum/frontier.json index 43be680d2..35cf1ebe0 100644 --- a/res/ethereum/frontier.json +++ b/res/ethereum/frontier.json @@ -3,7 +3,7 @@ "engineName": "Ethash", "params": { "accountStartNonce": "0x00", - "frontierCompatibilityModeLimit": "0xdbba0", + "frontierCompatibilityModeLimit": "0xf4240", "maximumExtraDataSize": "0x20", "tieBreakingGas": false, "minGasLimit": "0x1388", diff --git a/res/ethereum/frontier_like_test.json b/res/ethereum/frontier_like_test.json index b5cbec465..84a6200fd 100644 --- a/res/ethereum/frontier_like_test.json +++ b/res/ethereum/frontier_like_test.json @@ -3,7 +3,7 @@ "engineName": "Ethash", "params": { "accountStartNonce": "0x00", - "frontierCompatibilityModeLimit": "0x0dbba0", + "frontierCompatibilityModeLimit": "0xf4240", "maximumExtraDataSize": "0x20", "tieBreakingGas": false, "minGasLimit": "0x1388", diff --git a/src/executive.rs b/src/executive.rs index b21026835..b113363fd 100644 --- a/src/executive.rs +++ b/src/executive.rs @@ -10,7 +10,7 @@ use crossbeam; /// Max depth to avoid stack overflow (when it's reached we start a new thread with VM) /// TODO [todr] We probably need some more sophisticated calculations here (limit on my machine 132) /// Maybe something like here: https://github.com/ethereum/libethereum/blob/4db169b8504f2b87f7d5a481819cfb959fc65f6c/libethereum/ExtVM.cpp -const MAX_VM_DEPTH_FOR_THREAD: usize = 128; +const MAX_VM_DEPTH_FOR_THREAD: usize = 64; /// Returns new address created from address and given nonce. pub fn contract_address(address: &Address, nonce: &U256) -> Address { diff --git a/src/tests/transaction.rs b/src/tests/transaction.rs index 59a6db7cb..91f7d96e5 100644 --- a/src/tests/transaction.rs +++ b/src/tests/transaction.rs @@ -13,7 +13,7 @@ fn do_json_test(json_data: &[u8]) -> Vec { let schedule = match test.find("blocknumber") .and_then(|j| j.as_string()) .and_then(|s| BlockNumber::from_str(s).ok()) - .unwrap_or(0) { x if x < 900000 => &old_schedule, _ => &new_schedule }; + .unwrap_or(0) { x if x < 1_000_000 => &old_schedule, _ => &new_schedule }; let rlp = Bytes::from_json(&test["rlp"]); let res = UntrustedRlp::new(&rlp).as_val().map_err(From::from).and_then(|t: Transaction| t.validate(schedule, schedule.have_delegate_call)); fail_unless(test.find("transaction").is_none() == res.is_err()); From 952a2986380c30a881acfa470175e17c863110ff Mon Sep 17 00:00:00 2001 From: Nikolay Volf Date: Fri, 29 Jan 2016 17:57:17 +0400 Subject: [PATCH 5/7] test names --- src/sync/tests.rs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/sync/tests.rs b/src/sync/tests.rs index 5afdf5b36..7f19c3781 100644 --- a/src/sync/tests.rs +++ b/src/sync/tests.rs @@ -324,7 +324,7 @@ impl TestNet { } #[test] -fn full_sync_two_peers() { +fn chain_two_peers() { ::env_logger::init().ok(); let mut net = TestNet::new(3); net.peer_mut(1).chain.add_blocks(1000, false); @@ -335,7 +335,7 @@ fn full_sync_two_peers() { } #[test] -fn status_after_sync() { +fn chain_status_after_sync() { ::env_logger::init().ok(); let mut net = TestNet::new(3); net.peer_mut(1).chain.add_blocks(1000, false); @@ -346,7 +346,7 @@ fn status_after_sync() { } #[test] -fn full_sync_takes_few_steps() { +fn chain_takes_few_steps() { let mut net = TestNet::new(3); net.peer_mut(1).chain.add_blocks(100, false); net.peer_mut(2).chain.add_blocks(100, false); @@ -355,7 +355,7 @@ fn full_sync_takes_few_steps() { } #[test] -fn full_sync_empty_blocks() { +fn chain_empty_blocks() { ::env_logger::init().ok(); let mut net = TestNet::new(3); for n in 0..200 { @@ -368,7 +368,7 @@ fn full_sync_empty_blocks() { } #[test] -fn forked_sync() { +fn chain_forged() { ::env_logger::init().ok(); let mut net = TestNet::new(3); net.peer_mut(0).chain.add_blocks(300, false); @@ -388,7 +388,7 @@ fn forked_sync() { } #[test] -fn chain_sync_restart() { +fn chain_restart() { let mut net = TestNet::new(3); net.peer_mut(1).chain.add_blocks(1000, false); net.peer_mut(2).chain.add_blocks(1000, false); @@ -404,7 +404,7 @@ fn chain_sync_restart() { } #[test] -fn chain_sync_status() { +fn chain_status_empty() { let net = TestNet::new(2); assert_eq!(net.peer(0).sync.status().state, SyncState::NotSynced); -} \ No newline at end of file +} From 3aa00586e39ee64e493ae13445ad127ca30c54e8 Mon Sep 17 00:00:00 2001 From: arkpar Date: Fri, 29 Jan 2016 15:01:39 +0100 Subject: [PATCH 6/7] Common log init function --- src/tests/chain.rs | 18 +----------------- util/src/lib.rs | 6 ++++-- util/src/log.rs | 26 ++++++++++++++++++++++++++ 3 files changed, 31 insertions(+), 19 deletions(-) create mode 100644 util/src/log.rs diff --git a/src/tests/chain.rs b/src/tests/chain.rs index d56d99491..480042653 100644 --- a/src/tests/chain.rs +++ b/src/tests/chain.rs @@ -13,24 +13,8 @@ pub enum ChainEra { Homestead, } -lazy_static! { - static ref LOG_DUMMY: bool = { - let mut builder = LogBuilder::new(); - builder.filter(None, LogLevelFilter::Info); - - if let Ok(log) = env::var("RUST_LOG") { - builder.parse(&log); - } - - if let Ok(_) = builder.init() { - println!("logger initialized"); - } - true - }; -} - pub fn json_chain_test(json_data: &[u8], era: ChainEra) -> Vec { - let _ = LOG_DUMMY.deref(); + init_log(); let json = Json::from_str(::std::str::from_utf8(json_data).unwrap()).expect("Json is invalid"); let mut failed = Vec::new(); diff --git a/util/src/lib.rs b/util/src/lib.rs index b1b93968c..622fc950b 100644 --- a/util/src/lib.rs +++ b/util/src/lib.rs @@ -43,8 +43,6 @@ extern crate tiny_keccak; #[macro_use] extern crate heapsize; #[macro_use] -extern crate log; -#[macro_use] extern crate lazy_static; #[macro_use] extern crate itertools; @@ -56,6 +54,8 @@ extern crate arrayvec; extern crate elastic_array; extern crate crossbeam; extern crate serde; +#[macro_use] +extern crate log as rlog; /// TODO [Gav Wood] Please document me pub mod standard; @@ -98,6 +98,7 @@ pub mod semantic_version; pub mod io; /// TODO [Gav Wood] Please document me pub mod network; +pub mod log; pub use common::*; pub use misc::*; @@ -118,3 +119,4 @@ pub use squeeze::*; pub use semantic_version::*; pub use network::*; pub use io::*; +pub use log::*; diff --git a/util/src/log.rs b/util/src/log.rs new file mode 100644 index 000000000..619d4af3a --- /dev/null +++ b/util/src/log.rs @@ -0,0 +1,26 @@ +//! Common log helper functions + +use std::env; +use rlog::{LogLevelFilter}; +use env_logger::LogBuilder; + +lazy_static! { + static ref LOG_DUMMY: bool = { + let mut builder = LogBuilder::new(); + builder.filter(None, LogLevelFilter::Info); + + if let Ok(log) = env::var("RUST_LOG") { + builder.parse(&log); + } + + if let Ok(_) = builder.init() { + println!("logger initialized"); + } + true + }; +} + +/// Intialize log with default settings +pub fn init_log() { + let _ = *LOG_DUMMY; +} From 233684bf13f434dc13b45c01c1e7eee4477c995a Mon Sep 17 00:00:00 2001 From: arkpar Date: Fri, 29 Jan 2016 15:06:25 +0100 Subject: [PATCH 7/7] Fixed warnings --- src/tests/chain.rs | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/tests/chain.rs b/src/tests/chain.rs index 480042653..35e580c1e 100644 --- a/src/tests/chain.rs +++ b/src/tests/chain.rs @@ -1,6 +1,3 @@ -use std::env; -use log::{LogLevelFilter}; -use env_logger::LogBuilder; use super::test_common::*; use client::{BlockChainClient,Client}; use pod_state::*;