From 99a6802b619cb3c1c821ed9645d509dfc9cbc9b0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Tue, 8 Mar 2016 15:46:44 +0100 Subject: [PATCH 01/42] Moving block sealing and transaction_queue to separate create --- Cargo.lock | 18 ++- Cargo.toml | 1 + ethcore/src/client.rs | 105 ++++------------ miner/Cargo.toml | 20 +++ miner/src/lib.rs | 86 +++++++++++++ miner/src/miner.rs | 149 +++++++++++++++++++++++ {sync => miner}/src/transaction_queue.rs | 0 parity/main.rs | 73 ++++++----- rpc/Cargo.toml | 1 + rpc/src/lib.rs | 1 + rpc/src/v1/impls/eth.rs | 13 +- sync/Cargo.toml | 5 +- sync/src/chain.rs | 56 ++------- sync/src/lib.rs | 14 +-- 14 files changed, 375 insertions(+), 167 deletions(-) create mode 100644 miner/Cargo.toml create mode 100644 miner/src/lib.rs create mode 100644 miner/src/miner.rs rename {sync => miner}/src/transaction_queue.rs (100%) diff --git a/Cargo.lock b/Cargo.lock index 510e69b59..505fcac63 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -11,6 +11,7 @@ dependencies = [ "ethcore-devtools 0.9.99", "ethcore-rpc 0.9.99", "ethcore-util 0.9.99", + "ethminer 0.9.99", "ethsync 0.9.99", "fdlimit 0.1.0", "log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", @@ -235,6 +236,7 @@ dependencies = [ "ethash 0.9.99", "ethcore 0.9.99", "ethcore-util 0.9.99", + "ethminer 0.9.99", "ethsync 0.9.99", "jsonrpc-core 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "jsonrpc-http-server 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -282,6 +284,19 @@ dependencies = [ "vergen 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "ethminer" +version = "0.9.99" +dependencies = [ + "clippy 0.0.44 (registry+https://github.com/rust-lang/crates.io-index)", + "env_logger 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", + "ethcore 0.9.99", + "ethcore-util 0.9.99", + "log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "rayon 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "ethsync" version = "0.9.99" @@ -290,11 +305,10 @@ dependencies = [ "env_logger 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "ethcore 0.9.99", "ethcore-util 0.9.99", + "ethminer 0.9.99", "heapsize 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", - "rayon 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)", ] diff --git a/Cargo.toml b/Cargo.toml index 9b8ec6405..7e5bc334b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -16,6 +16,7 @@ clippy = { version = "0.0.44", optional = true } ethcore-util = { path = "util" } ethcore = { path = "ethcore" } ethsync = { path = "sync" } +ethminer = { path = "miner" } ethcore-rpc = { path = "rpc", optional = true } fdlimit = { path = "util/fdlimit" } daemonize = "0.2" diff --git a/ethcore/src/client.rs b/ethcore/src/client.rs index 874fc9646..af1745ca8 100644 --- a/ethcore/src/client.rs +++ b/ethcore/src/client.rs @@ -17,7 +17,6 @@ //! Blockchain database client. use std::marker::PhantomData; -use std::sync::atomic::AtomicBool; use util::*; use util::panics::*; use blockchain::{BlockChain, BlockProvider}; @@ -185,6 +184,9 @@ pub trait BlockChainClient : Sync + Send { /// Returns logs matching given filter. fn logs(&self, filter: Filter) -> Vec; + + fn prepare_sealing(&self, author: Address, extra_data: Bytes) -> Option; + fn try_seal(&self, block: ClosedBlock, seal: Vec) -> Result; } #[derive(Default, Clone, Debug, Eq, PartialEq)] @@ -219,12 +221,6 @@ pub struct Client where V: Verifier { report: RwLock, import_lock: Mutex<()>, panic_handler: Arc, - - // for sealing... - sealing_enabled: AtomicBool, - sealing_block: Mutex>, - author: RwLock
, - extra_data: RwLock, verifier: PhantomData, secret_store: Arc>, } @@ -273,10 +269,6 @@ impl Client where V: Verifier { report: RwLock::new(Default::default()), import_lock: Mutex::new(()), panic_handler: panic_handler, - sealing_enabled: AtomicBool::new(false), - sealing_block: Mutex::new(None), - author: RwLock::new(Address::new()), - extra_data: RwLock::new(Vec::new()), verifier: PhantomData, secret_store: secret_store, })) @@ -425,10 +417,6 @@ impl Client where V: Verifier { } } - if self.chain_info().best_block_hash != original_best && self.sealing_enabled.load(atomic::Ordering::Relaxed) { - self.prepare_sealing(); - } - imported } @@ -477,85 +465,46 @@ impl Client where V: Verifier { BlockId::Latest => Some(self.chain.read().unwrap().best_block_number()) } } +} - /// Get the author that we will seal blocks as. - pub fn author(&self) -> Address { - self.author.read().unwrap().clone() + +// TODO: need MinerService MinerIoHandler + +impl BlockChainClient for Client where V: Verifier { + + + fn try_seal(&self, block: ClosedBlock, seal: Vec) -> Result { + block.try_seal(self.engine.deref().deref(), seal) } - /// Set the author that we will seal blocks as. - pub fn set_author(&self, author: Address) { - *self.author.write().unwrap() = author; - } - - /// Get the extra_data that we will seal blocks wuth. - pub fn extra_data(&self) -> Bytes { - self.extra_data.read().unwrap().clone() - } - - /// Set the extra_data that we will seal blocks with. - pub fn set_extra_data(&self, extra_data: Bytes) { - *self.extra_data.write().unwrap() = extra_data; - } - - /// New chain head event. Restart mining operation. - pub fn prepare_sealing(&self) { + fn prepare_sealing(&self, author: Address, extra_data: Bytes) -> Option { + let engine = self.engine.deref().deref(); let h = self.chain.read().unwrap().best_block_hash(); + let mut b = OpenBlock::new( - self.engine.deref().deref(), + engine, self.state_db.lock().unwrap().clone(), - match self.chain.read().unwrap().block_header(&h) { Some(ref x) => x, None => {return;} }, + match self.chain.read().unwrap().block_header(&h) { Some(ref x) => x, None => { return None; } }, self.build_last_hashes(h.clone()), - self.author(), - self.extra_data() + author, + extra_data, ); - self.chain.read().unwrap().find_uncle_headers(&h, self.engine.deref().deref().maximum_uncle_age()).unwrap().into_iter().take(self.engine.deref().deref().maximum_uncle_count()).foreach(|h| { b.push_uncle(h).unwrap(); }); + self.chain.read().unwrap().find_uncle_headers(&h, engine.maximum_uncle_age()) + .unwrap() + .into_iter() + .take(engine.maximum_uncle_count()) + .foreach(|h| { + b.push_uncle(h).unwrap(); + }); // TODO: push transactions. let b = b.close(); trace!("Sealing: number={}, hash={}, diff={}", b.hash(), b.block().header().difficulty(), b.block().header().number()); - *self.sealing_block.lock().unwrap() = Some(b); + Some(b) } - /// Grab the `ClosedBlock` that we want to be sealed. Comes as a mutex that you have to lock. - pub fn sealing_block(&self) -> &Mutex> { - if self.sealing_block.lock().unwrap().is_none() { - self.sealing_enabled.store(true, atomic::Ordering::Relaxed); - // TODO: Above should be on a timer that resets after two blocks have arrived without being asked for. - self.prepare_sealing(); - } - &self.sealing_block - } - - /// Submit `seal` as a valid solution for the header of `pow_hash`. - /// Will check the seal, but not actually insert the block into the chain. - pub fn submit_seal(&self, pow_hash: H256, seal: Vec) -> Result<(), Error> { - let mut maybe_b = self.sealing_block.lock().unwrap(); - match *maybe_b { - Some(ref b) if b.hash() == pow_hash => {} - _ => { return Err(Error::PowHashInvalid); } - } - - let b = maybe_b.take(); - match b.unwrap().try_seal(self.engine.deref().deref(), seal) { - Err(old) => { - *maybe_b = Some(old); - Err(Error::PowInvalid) - } - Ok(sealed) => { - // TODO: commit DB from `sealed.drain` and make a VerifiedBlock to skip running the transactions twice. - try!(self.import_block(sealed.rlp_bytes())); - Ok(()) - } - } - } -} - -// TODO: need MinerService MinerIoHandler - -impl BlockChainClient for Client where V: Verifier { fn block_header(&self, id: BlockId) -> Option { let chain = self.chain.read().unwrap(); Self::block_hash(&chain, id).and_then(|hash| chain.block(&hash).map(|bytes| BlockView::new(&bytes).rlp().at(0).as_raw().to_vec())) diff --git a/miner/Cargo.toml b/miner/Cargo.toml new file mode 100644 index 000000000..0972aa122 --- /dev/null +++ b/miner/Cargo.toml @@ -0,0 +1,20 @@ +[package] +description = "Ethminer library" +homepage = "http://ethcore.io" +license = "GPL-3.0" +name = "ethminer" +version = "0.9.99" +authors = ["Ethcore "] + +[dependencies] +ethcore-util = { path = "../util" } +ethcore = { path = "../ethcore" } +log = "0.3" +env_logger = "0.3" +rustc-serialize = "0.3" +rayon = "0.3.1" +clippy = { version = "0.0.44", optional = true } + +[features] +dev = ["clippy"] +default = [] diff --git a/miner/src/lib.rs b/miner/src/lib.rs new file mode 100644 index 000000000..e8a50e9b5 --- /dev/null +++ b/miner/src/lib.rs @@ -0,0 +1,86 @@ +// Copyright 2015, 2016 Ethcore (UK) Ltd. +// This file is part of Parity. + +// Parity is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Parity is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Parity. If not, see . + +#![warn(missing_docs)] +#![cfg_attr(feature="dev", feature(plugin))] +#![cfg_attr(feature="dev", plugin(clippy))] + +#[macro_use] +extern crate log; +#[macro_use] +extern crate ethcore_util as util; +extern crate ethcore; +extern crate env_logger; +extern crate rayon; + +mod miner; +mod transaction_queue; + +use util::{Bytes, H256, Address}; +use std::ops::*; +use std::sync::*; +use util::TimerToken; +use ethcore::block::*; +use ethcore::error::*; +use ethcore::client::{Client, BlockChainClient}; +use ethcore::transaction::*; +use miner::Miner; + +pub struct EthMiner { + miner: Miner, + /// Shared blockchain client. TODO: this should evetually become an IPC endpoint + chain: Arc, +} + +impl EthMiner { + /// Creates and register protocol with the network service + pub fn new(chain: Arc) -> Arc { + Arc::new(EthMiner { + miner: Miner::new(), + chain: chain, + }) + } + + pub fn sealing_block(&self) -> &Mutex> { + self.miner.sealing_block(self.chain.deref()) + } + + pub fn submit_seal(&self, pow_hash: H256, seal: Vec) -> Result<(), Error> { + self.miner.submit_seal(self.chain.deref(), pow_hash, seal) + } + + /// Set the author that we will seal blocks as. + pub fn set_author(&self, author: Address) { + self.miner.set_author(author); + } + + /// Set the extra_data that we will seal blocks with. + pub fn set_extra_data(&self, extra_data: Bytes) { + self.miner.set_extra_data(extra_data); + } + + pub fn import_transactions(&self, transactions: Vec) { + let chain = self.chain.deref(); + let fetch_latest_nonce = |a : &Address| chain.nonce(a); + + self.miner.import_transactions(transactions, fetch_latest_nonce); + } + + pub fn chain_new_blocks(&self, good: &[H256], bad: &[H256], retracted: &[H256]) { + let mut chain = self.chain.deref(); + self.miner.chain_new_blocks(chain, good, bad, retracted); + } +} diff --git a/miner/src/miner.rs b/miner/src/miner.rs new file mode 100644 index 000000000..1a48d5288 --- /dev/null +++ b/miner/src/miner.rs @@ -0,0 +1,149 @@ +// Copyright 2015, 2016 Ethcore (UK) Ltd. +// This file is part of Parity. + +// Parity is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Parity is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Parity. If not, see . + +use util::*; +use std::sync::atomic::AtomicBool; +use rayon::prelude::*; +use ethcore::views::{HeaderView, BlockView}; +use ethcore::header::{BlockNumber, Header as BlockHeader}; +use ethcore::client::{BlockChainClient, BlockStatus, BlockId, BlockChainInfo}; +use ethcore::block::*; +use ethcore::error::*; +use ethcore::transaction::SignedTransaction; +use transaction_queue::TransactionQueue; + +pub struct Miner { + /// Transactions Queue + transaction_queue: Mutex, + + // for sealing... + sealing_enabled: AtomicBool, + sealing_block: Mutex>, + author: RwLock
, + extra_data: RwLock, +} + +impl Miner { + pub fn new() -> Miner { + Miner { + transaction_queue: Mutex::new(TransactionQueue::new()), + sealing_enabled: AtomicBool::new(false), + sealing_block: Mutex::new(None), + author: RwLock::new(Address::new()), + extra_data: RwLock::new(Vec::new()), + } + } + + pub fn import_transactions(&self, transactions: Vec, fetch_nonce: T) + where T: Fn(&Address) -> U256 { + let mut transaction_queue = self.transaction_queue.lock().unwrap(); + transaction_queue.add_all(transactions, fetch_nonce); + } + + /// Get the author that we will seal blocks as. + pub fn author(&self) -> Address { + self.author.read().unwrap().clone() + } + + /// Set the author that we will seal blocks as. + pub fn set_author(&self, author: Address) { + *self.author.write().unwrap() = author; + } + + /// Get the extra_data that we will seal blocks wuth. + pub fn extra_data(&self) -> Bytes { + self.extra_data.read().unwrap().clone() + } + + /// Set the extra_data that we will seal blocks with. + pub fn set_extra_data(&self, extra_data: Bytes) { + *self.extra_data.write().unwrap() = extra_data; + } + + /// New chain head event. Restart mining operation. + fn prepare_sealing(&self, chain: &BlockChainClient) { + let b = chain.prepare_sealing(self.author.read().unwrap().clone(), self.extra_data.read().unwrap().clone()); + *self.sealing_block.lock().unwrap() = b; + } + + /// Grab the `ClosedBlock` that we want to be sealed. Comes as a mutex that you have to lock. + pub fn sealing_block(&self, chain: &BlockChainClient) -> &Mutex> { + if self.sealing_block.lock().unwrap().is_none() { + self.sealing_enabled.store(true, atomic::Ordering::Relaxed); + // TODO: Above should be on a timer that resets after two blocks have arrived without being asked for. + self.prepare_sealing(chain); + } + &self.sealing_block + } + + /// Submit `seal` as a valid solution for the header of `pow_hash`. + /// Will check the seal, but not actually insert the block into the chain. + pub fn submit_seal(&self, chain: &BlockChainClient, pow_hash: H256, seal: Vec) -> Result<(), Error> { + let mut maybe_b = self.sealing_block.lock().unwrap(); + match *maybe_b { + Some(ref b) if b.hash() == pow_hash => {} + _ => { return Err(Error::PowHashInvalid); } + } + + let b = maybe_b.take(); + match chain.try_seal(b.unwrap(), seal) { + Err(old) => { + *maybe_b = Some(old); + Err(Error::PowInvalid) + } + Ok(sealed) => { + // TODO: commit DB from `sealed.drain` and make a VerifiedBlock to skip running the transactions twice. + try!(chain.import_block(sealed.rlp_bytes())); + Ok(()) + } + } + } + + /// called when block is imported to chain, updates transactions queue and propagates the blocks + pub fn chain_new_blocks(&self, chain: &BlockChainClient, good: &[H256], bad: &[H256], _retracted: &[H256]) { + fn fetch_transactions(chain: &BlockChainClient, hash: &H256) -> Vec { + let block = chain + .block(BlockId::Hash(hash.clone())) + // Client should send message after commit to db and inserting to chain. + .expect("Expected in-chain blocks."); + let block = BlockView::new(&block); + block.transactions() + } + + { + let good = good.par_iter().map(|h| fetch_transactions(chain, h)); + let bad = bad.par_iter().map(|h| fetch_transactions(chain, h)); + + good.for_each(|txs| { + let mut transaction_queue = self.transaction_queue.lock().unwrap(); + let hashes = txs.iter().map(|tx| tx.hash()).collect::>(); + transaction_queue.remove_all(&hashes, |a| chain.nonce(a)); + }); + bad.for_each(|txs| { + // populate sender + for tx in &txs { + let _sender = tx.sender(); + } + let mut transaction_queue = self.transaction_queue.lock().unwrap(); + transaction_queue.add_all(txs, |a| chain.nonce(a)); + }); + } + + if self.sealing_enabled.load(atomic::Ordering::Relaxed) { + self.prepare_sealing(chain); + } + } +} diff --git a/sync/src/transaction_queue.rs b/miner/src/transaction_queue.rs similarity index 100% rename from sync/src/transaction_queue.rs rename to miner/src/transaction_queue.rs diff --git a/parity/main.rs b/parity/main.rs index 43b0504f1..ef088ab5b 100644 --- a/parity/main.rs +++ b/parity/main.rs @@ -24,6 +24,7 @@ extern crate rustc_serialize; extern crate ethcore_util as util; extern crate ethcore; extern crate ethsync; +extern crate ethminer; #[macro_use] extern crate log as rlog; extern crate env_logger; @@ -49,6 +50,7 @@ use ethcore::client::*; use ethcore::service::{ClientService, NetSyncMessage}; use ethcore::ethereum; use ethsync::{EthSync, SyncConfig}; +use ethminer::{EthMiner}; use docopt::Docopt; use daemonize::Daemonize; use number_prefix::{binary_prefix, Standalone, Prefixed}; @@ -79,6 +81,7 @@ Protocol Options: --networkid INDEX Override the network identifier from the chain we are on. --archive Client should not prune the state/storage trie. -d --datadir PATH Specify the database & configuration directory path [default: $HOME/.parity] + --keys-path PATH Specify the path for JSON key files to be found [default: $HOME/.web3/keys] --identity NAME Specify your node's name. Networking Options: @@ -107,13 +110,13 @@ API and Console Options: Sealing/Mining Options: --author ADDRESS Specify the block author (aka "coinbase") address for sending block rewards from sealed blocks [default: 0037a6b811ffeb6e072da21179d11b1406371c63]. - --extradata STRING Specify a custom extra-data for authored blocks, no more than 32 characters. + --extradata STRING Specify a custom extra-data for authored blocks, no more than 32 characters. Memory Footprint Options: --cache-pref-size BYTES Specify the prefered size of the blockchain cache in bytes [default: 16384]. --cache-max-size BYTES Specify the maximum size of the blockchain cache in bytes [default: 262144]. --queue-max-size BYTES Specify the maximum size of memory to use for block queue [default: 52428800]. - --cache MEGABYTES Set total amount of cache to use for the entire system, mutually exclusive with + --cache MEGABYTES Set total amount of cache to use for the entire system, mutually exclusive with other cache options (geth-compatible). Miscellaneous Options: @@ -129,7 +132,7 @@ struct Args { arg_enode: Vec, flag_chain: String, flag_testnet: bool, - flag_db_path: String, + flag_datadir: String, flag_networkid: Option, flag_identity: String, flag_cache: Option, @@ -189,7 +192,7 @@ fn setup_log(init: &Option) { } #[cfg(feature = "rpc")] -fn setup_rpc_server(client: Arc, sync: Arc, url: &str, cors_domain: &str, apis: Vec<&str>) { +fn setup_rpc_server(client: Arc, sync: Arc, miner: Arc, url: &str, cors_domain: &str, apis: Vec<&str>) { use rpc::v1::*; let mut server = rpc::HttpServer::new(1); @@ -198,7 +201,7 @@ fn setup_rpc_server(client: Arc, sync: Arc, url: &str, cors_dom "web3" => server.add_delegate(Web3Client::new().to_delegate()), "net" => server.add_delegate(NetClient::new(&sync).to_delegate()), "eth" => { - server.add_delegate(EthClient::new(&client, &sync).to_delegate()); + server.add_delegate(EthClient::new(&client, &sync, &miner).to_delegate()); server.add_delegate(EthFilterClient::new(&client).to_delegate()); } _ => { @@ -238,7 +241,7 @@ impl Configuration { } fn path(&self) -> String { - self.args.flag_db_path.replace("$HOME", env::home_dir().unwrap().to_str().unwrap()) + self.args.flag_datadir.replace("$HOME", env::home_dir().unwrap().to_str().unwrap()) } fn author(&self) -> Address { @@ -323,6 +326,32 @@ impl Configuration { ret } + fn client_config(&self) -> ClientConfig { + let mut client_config = ClientConfig::default(); + match self.args.flag_cache { + Some(mb) => { + client_config.blockchain.max_cache_size = mb * 1024 * 1024; + client_config.blockchain.pref_cache_size = client_config.blockchain.max_cache_size / 2; + } + None => { + client_config.blockchain.pref_cache_size = self.args.flag_cache_pref_size; + client_config.blockchain.max_cache_size = self.args.flag_cache_max_size; + } + } + client_config.prefer_journal = !self.args.flag_archive; + client_config.name = self.args.flag_identity.clone(); + client_config.queue.max_mem_use = self.args.flag_queue_max_size; + client_config + } + + fn sync_config(&self, spec: &Spec) -> SyncConfig { + let mut sync_config = SyncConfig::default(); + sync_config.network_id = self.args.flag_networkid.as_ref().map(|id| { + U256::from_str(id).unwrap_or_else(|_| die!("{}: Invalid index given with --networkid", id)) + }).unwrap_or(spec.network_id()); + sync_config + } + fn execute(&self) { if self.args.flag_version { print_version(); @@ -346,31 +375,19 @@ impl Configuration { let spec = self.spec(); let net_settings = self.net_settings(&spec); - let mut sync_config = SyncConfig::default(); - sync_config.network_id = self.args.flag_networkid.as_ref().map(|id| U256::from_str(id).unwrap_or_else(|_| die!("{}: Invalid index given with --networkid", id))).unwrap_or(spec.network_id()); + let sync_config = self.sync_config(&spec); // Build client - let mut client_config = ClientConfig::default(); - match self.args.flag_cache { - Some(mb) => { - client_config.blockchain.max_cache_size = mb * 1024 * 1024; - client_config.blockchain.pref_cache_size = client_config.blockchain.max_cache_size / 2; - } - None => { - client_config.blockchain.pref_cache_size = self.args.flag_cache_pref_size; - client_config.blockchain.max_cache_size = self.args.flag_cache_max_size; - } - } - client_config.prefer_journal = !self.args.flag_archive; - client_config.name = self.args.flag_identity.clone(); - client_config.queue.max_mem_use = self.args.flag_queue_max_size; - let mut service = ClientService::start(client_config, spec, net_settings, &Path::new(&self.path())).unwrap(); - let client = service.client().clone(); - client.set_author(self.author()); - client.set_extra_data(self.extra_data()); + let mut service = ClientService::start(self.client_config(), spec, net_settings, &Path::new(&self.path())).unwrap(); + let client = service.client(); + + // Miner + let miner = EthMiner::new(client.clone()); + miner.set_author(self.author()); + miner.set_extra_data(self.extra_data()); // Sync - let sync = EthSync::register(service.network(), sync_config, client); + let sync = EthSync::register(service.network(), sync_config, client.clone(), miner.clone()); // Setup rpc if self.args.flag_jsonrpc || self.args.flag_rpc { @@ -382,7 +399,7 @@ impl Configuration { let cors = self.args.flag_rpccorsdomain.as_ref().unwrap_or(&self.args.flag_jsonrpc_cors); // TODO: use this as the API list. let apis = self.args.flag_rpcapi.as_ref().unwrap_or(&self.args.flag_jsonrpc_apis); - setup_rpc_server(service.client(), sync.clone(), &url, cors, apis.split(",").collect()); + setup_rpc_server(service.client(), sync.clone(), miner.clone(), &url, cors, apis.split(",").collect()); } // Register IO handler diff --git a/rpc/Cargo.toml b/rpc/Cargo.toml index bfdf8f2d3..f6d468f47 100644 --- a/rpc/Cargo.toml +++ b/rpc/Cargo.toml @@ -18,6 +18,7 @@ ethcore-util = { path = "../util" } ethcore = { path = "../ethcore" } ethash = { path = "../ethash" } ethsync = { path = "../sync" } +ethminer = { path = "../miner" } clippy = { version = "0.0.44", optional = true } rustc-serialize = "0.3" transient-hashmap = "0.1" diff --git a/rpc/src/lib.rs b/rpc/src/lib.rs index 0653a0c33..299084a6d 100644 --- a/rpc/src/lib.rs +++ b/rpc/src/lib.rs @@ -27,6 +27,7 @@ extern crate jsonrpc_http_server; extern crate ethcore_util as util; extern crate ethcore; extern crate ethsync; +extern crate ethminer; extern crate transient_hashmap; use self::jsonrpc_core::{IoHandler, IoDelegate}; diff --git a/rpc/src/v1/impls/eth.rs b/rpc/src/v1/impls/eth.rs index 7113c55b1..11c6fe8d0 100644 --- a/rpc/src/v1/impls/eth.rs +++ b/rpc/src/v1/impls/eth.rs @@ -18,6 +18,7 @@ use std::collections::HashMap; use std::sync::{Arc, Weak, Mutex, RwLock}; use ethsync::{EthSync, SyncState}; +use ethminer::{EthMiner}; use jsonrpc_core::*; use util::numbers::*; use util::sha3::*; @@ -36,15 +37,17 @@ use v1::helpers::{PollFilter, PollManager}; pub struct EthClient { client: Weak, sync: Weak, + miner: Weak, hashrates: RwLock>, } impl EthClient { /// Creates new EthClient. - pub fn new(client: &Arc, sync: &Arc) -> Self { + pub fn new(client: &Arc, sync: &Arc, miner: &Arc) -> Self { EthClient { client: Arc::downgrade(client), sync: Arc::downgrade(sync), + miner: Arc::downgrade(miner), hashrates: RwLock::new(HashMap::new()), } } @@ -220,8 +223,8 @@ impl Eth for EthClient { fn work(&self, params: Params) -> Result { match params { Params::None => { - let c = take_weak!(self.client); - let u = c.sealing_block().lock().unwrap(); + let miner = take_weak!(self.miner); + let u = miner.sealing_block().lock().unwrap(); match *u { Some(ref b) => { let pow_hash = b.hash(); @@ -239,9 +242,9 @@ impl Eth for EthClient { fn submit_work(&self, params: Params) -> Result { from_params::<(H64, H256, H256)>(params).and_then(|(nonce, pow_hash, mix_hash)| { // trace!("Decoded: nonce={}, pow_hash={}, mix_hash={}", nonce, pow_hash, mix_hash); - let c = take_weak!(self.client); + let miner = take_weak!(self.miner); let seal = vec![encode(&mix_hash).to_vec(), encode(&nonce).to_vec()]; - let r = c.submit_seal(pow_hash, seal); + let r = miner.submit_seal(pow_hash, seal); to_value(&r.is_ok()) }) } diff --git a/sync/Cargo.toml b/sync/Cargo.toml index 0097cd47e..748065deb 100644 --- a/sync/Cargo.toml +++ b/sync/Cargo.toml @@ -10,15 +10,14 @@ authors = ["Ethcore , + /// Miner + miner: Arc, } type RlpResponseResult = Result, PacketDecodeError>; impl ChainSync { /// Create a new instance of syncing strategy. - pub fn new(config: SyncConfig) -> ChainSync { + pub fn new(config: SyncConfig, miner: Arc) -> ChainSync { ChainSync { state: SyncState::NotSynced, starting_block: 0, @@ -239,7 +238,7 @@ impl ChainSync { last_sent_block_number: 0, max_download_ahead_blocks: max(MAX_HEADERS_TO_REQUEST, config.max_download_ahead_blocks), network_id: config.network_id, - transaction_queue: Mutex::new(TransactionQueue::new()), + miner: miner, } } @@ -298,7 +297,6 @@ impl ChainSync { self.starting_block = 0; self.highest_block = None; self.have_common_block = false; - self.transaction_queue.lock().unwrap().clear(); self.starting_block = io.chain().chain_info().best_block_number; self.state = SyncState::NotSynced; } @@ -927,16 +925,15 @@ impl ChainSync { } /// Called when peer sends us new transactions fn on_peer_transactions(&mut self, io: &mut SyncIo, peer_id: PeerId, r: &UntrustedRlp) -> Result<(), PacketDecodeError> { - let chain = io.chain(); let item_count = r.item_count(); trace!(target: "sync", "{} -> Transactions ({} entries)", peer_id, item_count); - let fetch_latest_nonce = |a : &Address| chain.nonce(a); - let mut transaction_queue = self.transaction_queue.lock().unwrap(); + let mut transactions = Vec::with_capacity(item_count); for i in 0..item_count { let tx: SignedTransaction = try!(r.val_at(i)); - transaction_queue.add(tx, &fetch_latest_nonce); + transactions.push(tx); } + self.miner.import_transactions(transactions); Ok(()) } @@ -1263,38 +1260,9 @@ impl ChainSync { self.check_resume(io); } - /// called when block is imported to chain, updates transactions queue and propagates the blocks - pub fn chain_new_blocks(&mut self, io: &mut SyncIo, good: &[H256], bad: &[H256], _retracted: &[H256]) { - fn fetch_transactions(chain: &BlockChainClient, hash: &H256) -> Vec { - let block = chain - .block(BlockId::Hash(hash.clone())) - // Client should send message after commit to db and inserting to chain. - .expect("Expected in-chain blocks."); - let block = BlockView::new(&block); - block.transactions() - } - - - { - let chain = io.chain(); - let good = good.par_iter().map(|h| fetch_transactions(chain, h)); - let bad = bad.par_iter().map(|h| fetch_transactions(chain, h)); - - good.for_each(|txs| { - let mut transaction_queue = self.transaction_queue.lock().unwrap(); - let hashes = txs.iter().map(|tx| tx.hash()).collect::>(); - transaction_queue.remove_all(&hashes, |a| chain.nonce(a)); - }); - bad.for_each(|txs| { - // populate sender - for tx in &txs { - let _sender = tx.sender(); - } - let mut transaction_queue = self.transaction_queue.lock().unwrap(); - transaction_queue.add_all(txs, |a| chain.nonce(a)); - }); - } - + pub fn chain_new_blocks(&mut self, io: &mut SyncIo, good: &[H256], bad: &[H256], retracted: &[H256]) { + // notify miner + self.miner.chain_new_blocks(good, bad, retracted); // Propagate latests blocks self.propagate_latest_blocks(io); // TODO [todr] propagate transactions? diff --git a/sync/src/lib.rs b/sync/src/lib.rs index b5869642c..0d6044135 100644 --- a/sync/src/lib.rs +++ b/sync/src/lib.rs @@ -51,27 +51,27 @@ extern crate log; #[macro_use] extern crate ethcore_util as util; extern crate ethcore; +extern crate ethminer; extern crate env_logger; extern crate time; extern crate rand; -extern crate rayon; #[macro_use] extern crate heapsize; use std::ops::*; use std::sync::*; -use ethcore::client::Client; use util::network::{NetworkProtocolHandler, NetworkService, NetworkContext, PeerId}; use util::TimerToken; use util::{U256, ONE_U256}; -use chain::ChainSync; +use ethcore::client::Client; use ethcore::service::SyncMessage; +use ethminer::EthMiner; use io::NetSyncIo; +use chain::ChainSync; mod chain; mod io; mod range_collection; -mod transaction_queue; #[cfg(test)] mod tests; @@ -105,10 +105,10 @@ pub use self::chain::{SyncStatus, SyncState}; impl EthSync { /// Creates and register protocol with the network service - pub fn register(service: &mut NetworkService, config: SyncConfig, chain: Arc) -> Arc { + pub fn register(service: &mut NetworkService, config: SyncConfig, chain: Arc, miner: Arc) -> Arc { let sync = Arc::new(EthSync { chain: chain, - sync: RwLock::new(ChainSync::new(config)), + sync: RwLock::new(ChainSync::new(config, miner)), }); service.register_protocol(sync.clone(), "eth", &[62u8, 63u8]).expect("Error registering eth protocol handler"); sync @@ -154,7 +154,7 @@ impl NetworkProtocolHandler for EthSync { fn message(&self, io: &NetworkContext, message: &SyncMessage) { match *message { - SyncMessage::NewChainBlocks { ref good, ref bad, ref retracted } => { + SyncMessage::NewChainBlocks { ref good, ref bad, ref retracted} => { let mut sync_io = NetSyncIo::new(io, self.chain.deref()); self.sync.write().unwrap().chain_new_blocks(&mut sync_io, good, bad, retracted); }, From 84444c697ce69ac8bc3f1126b6d6987af3a2df6b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Tue, 8 Mar 2016 15:53:12 +0100 Subject: [PATCH 02/42] Adding ethminer to dev/ci files --- .travis.yml | 7 ++++--- Cargo.toml | 2 +- cov.sh | 23 +++++++++++++++++------ doc.sh | 9 ++++++++- hook.sh | 2 +- rpc/Cargo.toml | 2 +- test.sh | 3 ++- 7 files changed, 34 insertions(+), 14 deletions(-) diff --git a/.travis.yml b/.travis.yml index 7213b8f09..48487e0d3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -14,11 +14,11 @@ matrix: - rust: nightly include: - rust: stable - env: FEATURES="--features travis-beta" KCOV_FEATURES="" TARGETS="-p ethash -p ethcore-util -p ethcore -p ethsync -p ethcore-rpc -p parity" ARCHIVE_SUFFIX="-${TRAVIS_OS_NAME}-${TRAVIS_TAG}" + env: FEATURES="--features travis-beta" KCOV_FEATURES="" TARGETS="-p ethash -p ethcore-util -p ethcore -p ethsync -p ethcore-rpc -p parity -p ethminer" ARCHIVE_SUFFIX="-${TRAVIS_OS_NAME}-${TRAVIS_TAG}" - rust: beta - env: FEATURES="--features travis-beta" KCOV_FEATURES="" TARGETS="-p ethash -p ethcore-util -p ethcore -p ethsync -p ethcore-rpc -p parity" ARCHIVE_SUFFIX="-${TRAVIS_OS_NAME}-${TRAVIS_TAG}" + env: FEATURES="--features travis-beta" KCOV_FEATURES="" TARGETS="-p ethash -p ethcore-util -p ethcore -p ethsync -p ethcore-rpc -p parity -p ethminer" ARCHIVE_SUFFIX="-${TRAVIS_OS_NAME}-${TRAVIS_TAG}" - rust: nightly - env: FEATURES="--features travis-nightly" KCOV_FEATURES="" TARGETS="-p ethash -p ethcore-util -p ethcore -p ethsync -p ethcore-rpc -p parity" ARCHIVE_SUFFIX="-${TRAVIS_OS_NAME}-${TRAVIS_TAG}" + env: FEATURES="--features travis-nightly" KCOV_FEATURES="" TARGETS="-p ethash -p ethcore-util -p ethcore -p ethsync -p ethcore-rpc -p parity -p ethminer" ARCHIVE_SUFFIX="-${TRAVIS_OS_NAME}-${TRAVIS_TAG}" cache: apt: true directories: @@ -51,6 +51,7 @@ after_success: | ./kcov-master/tmp/usr/local/bin/kcov --exclude-pattern /usr/,/.cargo,/root/.multirust,src/tests,util/json-tests,util/src/network/tests,sync/src/tests,ethcore/src/tests,ethcore/src/evm/tests target/kcov target/debug/deps/ethcore-* && ./kcov-master/tmp/usr/local/bin/kcov --exclude-pattern /usr/,/.cargo,/root/.multirust,src/tests,util/json-tests,util/src/network/tests,sync/src/tests,ethcore/src/tests,ethcore/src/evm/tests target/kcov target/debug/deps/ethsync-* && ./kcov-master/tmp/usr/local/bin/kcov --exclude-pattern /usr/,/.cargo,/root/.multirust,src/tests,util/json-tests,util/src/network/tests,sync/src/tests,ethcore/src/tests,ethcore/src/evm/tests target/kcov target/debug/deps/ethcore_rpc-* && + ./kcov-master/tmp/usr/local/bin/kcov --exclude-pattern /usr/,/.cargo,/root/.multirust,src/tests,util/json-tests,util/src/network/tests,sync/src/tests,ethcore/src/tests,ethcore/src/evm/tests target/kcov target/debug/deps/ethminer-* && ./kcov-master/tmp/usr/local/bin/kcov --coveralls-id=${TRAVIS_JOB_ID} --exclude-pattern /usr/,/.cargo,/root/.multirust target/kcov target/debug/parity-* && [ $TRAVIS_BRANCH = master ] && [ $TRAVIS_PULL_REQUEST = false ] && diff --git a/Cargo.toml b/Cargo.toml index 7e5bc334b..a501baaab 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -26,7 +26,7 @@ number_prefix = "0.2" [features] default = ["rpc"] rpc = ["ethcore-rpc"] -dev = ["clippy", "ethcore/dev", "ethcore-util/dev", "ethsync/dev", "ethcore-rpc/dev"] +dev = ["clippy", "ethcore/dev", "ethcore-util/dev", "ethsync/dev", "ethcore-rpc/dev", "ethminer/dev"] travis-beta = ["ethcore/json-tests"] travis-nightly = ["ethcore/json-tests", "dev"] diff --git a/cov.sh b/cov.sh index a1fa29e46..d60ef223d 100755 --- a/cov.sh +++ b/cov.sh @@ -15,12 +15,23 @@ if ! type kcov > /dev/null; then exit 1 fi -cargo test -p ethash -p ethcore-util -p ethcore -p ethsync -p ethcore-rpc -p parity --no-run || exit $? +cargo test \ + -p ethash \ + -p ethcore-util \ + -p ethcore \ + -p ethsync \ + -p ethcore-rpc \ + -p parity \ + -p ethminer \ + --no-run || exit $? rm -rf target/coverage mkdir -p target/coverage -kcov --exclude-pattern ~/.multirust,rocksdb,secp256k1,src/tests,util/json-tests,util/src/network/tests,sync/src/tests,ethcore/src/tests,ethcore/src/evm/tests --include-pattern src --verify target/coverage target/debug/deps/ethcore-* -kcov --exclude-pattern ~/.multirust,rocksdb,secp256k1,src/tests,util/json-tests,util/src/network/tests,sync/src/tests,ethcore/src/tests,ethcore/src/evm/tests --include-pattern src --verify target/coverage target/debug/deps/ethash-* -kcov --exclude-pattern ~/.multirust,rocksdb,secp256k1,src/tests,util/json-tests,util/src/network/tests,sync/src/tests,ethcore/src/tests,ethcore/src/evm/tests --include-pattern src --verify target/coverage target/debug/deps/ethcore_util-* -kcov --exclude-pattern ~/.multirust,rocksdb,secp256k1,src/tests,util/json-tests,util/src/network/tests,sync/src/tests,ethcore/src/tests,ethcore/src/evm/tests --include-pattern src --verify target/coverage target/debug/deps/ethsync-* -kcov --exclude-pattern ~/.multirust,rocksdb,secp256k1,src/tests,util/json-tests,util/src/network/tests,sync/src/tests,ethcore/src/tests,ethcore/src/evm/tests --include-pattern src --verify target/coverage target/debug/deps/ethcore_rpc-* + +EXCLUDE="~/.multirust,rocksdb,secp256k1,src/tests,util/json-tests,util/src/network/tests,sync/src/tests,ethcore/src/tests,ethcore/src/evm/tests" +kcov --exclude-pattern $EXCLUDE --include-pattern src --verify target/coverage target/debug/deps/ethcore-* +kcov --exclude-pattern $EXCLUDE --include-pattern src --verify target/coverage target/debug/deps/ethash-* +kcov --exclude-pattern $EXCLUDE --include-pattern src --verify target/coverage target/debug/deps/ethcore_util-* +kcov --exclude-pattern $EXCLUDE --include-pattern src --verify target/coverage target/debug/deps/ethsync-* +kcov --exclude-pattern $EXCLUDE --include-pattern src --verify target/coverage target/debug/deps/ethcore_rpc-* +kcov --exclude-pattern $EXCLUDE --include-pattern src --verify target/coverage target/debug/deps/ethminer-* xdg-open target/coverage/index.html diff --git a/doc.sh b/doc.sh index 2fd5ac20f..a5e5e2e13 100755 --- a/doc.sh +++ b/doc.sh @@ -1,4 +1,11 @@ #!/bin/sh # generate documentation only for partiy and ethcore libraries -cargo doc --no-deps --verbose -p ethash -p ethcore-util -p ethcore -p ethsync -p ethcore-rpc -p parity +cargo doc --no-deps --verbose \ + -p ethash \ + -p ethcore-util \ + -p ethcore \ + -p ethsync \ + -p ethcore-rpc \ + -p parity \ + -p ethminer diff --git a/hook.sh b/hook.sh index 106ffe4f0..313639640 100755 --- a/hook.sh +++ b/hook.sh @@ -1,3 +1,3 @@ #!/bin/sh -echo "#!/bin/sh\ncargo test -p ethash -p ethcore-util -p ethcore -p ethsync -p ethcore-rpc -p parity --features dev" > ./.git/hooks/pre-push +echo "#!/bin/sh\ncargo test -p ethash -p ethcore-util -p ethcore -p ethsync -p ethcore-rpc -p parity -p ethminer --features dev" > ./.git/hooks/pre-push chmod +x ./.git/hooks/pre-push diff --git a/rpc/Cargo.toml b/rpc/Cargo.toml index f6d468f47..d0174be59 100644 --- a/rpc/Cargo.toml +++ b/rpc/Cargo.toml @@ -31,4 +31,4 @@ syntex = "0.29.0" [features] default = ["serde_codegen"] nightly = ["serde_macros"] -dev = ["clippy", "ethcore/dev", "ethcore-util/dev", "ethsync/dev"] +dev = ["clippy", "ethcore/dev", "ethcore-util/dev", "ethsync/dev", "ethminer/dev"] diff --git a/test.sh b/test.sh index 0f5edb0d1..e1881a8ad 100755 --- a/test.sh +++ b/test.sh @@ -1,4 +1,5 @@ #!/bin/sh # Running Parity Full Test Sute -cargo test --features ethcore/json-tests $1 -p ethash -p ethcore-util -p ethcore -p ethsync -p ethcore-rpc -p parity +cargo test --features ethcore/json-tests $1 -p ethash -p ethcore-util -p ethcore -p ethsync -p ethcore-rpc -p parity -p +ethminer From 9acb36af871d8b0c84f3a48a4587e9d64ae68456 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Tue, 8 Mar 2016 16:23:32 +0100 Subject: [PATCH 03/42] Fixing tests compilation. Removing ethminer dependency on client --- ethcore/src/log_entry.rs | 2 +- ethcore/src/tests/client.rs | 17 ++++---------- miner/src/lib.rs | 46 ++++++------------------------------- miner/src/miner.rs | 19 ++++++++++++--- parity/main.rs | 2 +- rpc/src/v1/impls/eth.rs | 7 ++++-- sync/src/chain.rs | 21 +++++++++-------- sync/src/tests/helpers.rs | 12 +++++++++- 8 files changed, 58 insertions(+), 68 deletions(-) diff --git a/ethcore/src/log_entry.rs b/ethcore/src/log_entry.rs index a75e6fcc1..63d09b4f0 100644 --- a/ethcore/src/log_entry.rs +++ b/ethcore/src/log_entry.rs @@ -111,7 +111,7 @@ mod tests { let bloom = H2048::from_str("00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000").unwrap(); let address = Address::from_str("0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6").unwrap(); let log = LogEntry { - address: address, + address: address, topics: vec![], data: vec![] }; diff --git a/ethcore/src/tests/client.rs b/ethcore/src/tests/client.rs index 001d1729b..d31a780e6 100644 --- a/ethcore/src/tests/client.rs +++ b/ethcore/src/tests/client.rs @@ -132,16 +132,9 @@ fn can_mine() { let dummy_blocks = get_good_dummy_block_seq(2); let client_result = get_test_client_with_blocks(vec![dummy_blocks[0].clone()]); let client = client_result.reference(); - let b = client.sealing_block(); - let pow_hash = { - let u = b.lock().unwrap(); - match *u { - Some(ref b) => { - assert_eq!(*b.block().header().parent_hash(), BlockView::new(&dummy_blocks[0]).header_view().sha3()); - b.hash() - } - None => { panic!(); } - } - }; - assert!(client.submit_seal(pow_hash, vec![]).is_ok()); + + let b = client.prepare_sealing(Address::default(), vec![]).unwrap(); + + assert_eq!(*b.block().header().parent_hash(), BlockView::new(&dummy_blocks[0]).header_view().sha3()); + assert!(client.try_seal(b, vec![]).is_ok()); } diff --git a/miner/src/lib.rs b/miner/src/lib.rs index e8a50e9b5..ae6235393 100644 --- a/miner/src/lib.rs +++ b/miner/src/lib.rs @@ -29,58 +29,26 @@ extern crate rayon; mod miner; mod transaction_queue; -use util::{Bytes, H256, Address}; use std::ops::*; use std::sync::*; -use util::TimerToken; -use ethcore::block::*; -use ethcore::error::*; -use ethcore::client::{Client, BlockChainClient}; -use ethcore::transaction::*; -use miner::Miner; +pub use miner::Miner; pub struct EthMiner { miner: Miner, - /// Shared blockchain client. TODO: this should evetually become an IPC endpoint - chain: Arc, } impl EthMiner { /// Creates and register protocol with the network service - pub fn new(chain: Arc) -> Arc { + pub fn new() -> Arc { Arc::new(EthMiner { miner: Miner::new(), - chain: chain, }) } +} +impl Deref for EthMiner { + type Target = Miner; - pub fn sealing_block(&self) -> &Mutex> { - self.miner.sealing_block(self.chain.deref()) - } - - pub fn submit_seal(&self, pow_hash: H256, seal: Vec) -> Result<(), Error> { - self.miner.submit_seal(self.chain.deref(), pow_hash, seal) - } - - /// Set the author that we will seal blocks as. - pub fn set_author(&self, author: Address) { - self.miner.set_author(author); - } - - /// Set the extra_data that we will seal blocks with. - pub fn set_extra_data(&self, extra_data: Bytes) { - self.miner.set_extra_data(extra_data); - } - - pub fn import_transactions(&self, transactions: Vec) { - let chain = self.chain.deref(); - let fetch_latest_nonce = |a : &Address| chain.nonce(a); - - self.miner.import_transactions(transactions, fetch_latest_nonce); - } - - pub fn chain_new_blocks(&self, good: &[H256], bad: &[H256], retracted: &[H256]) { - let mut chain = self.chain.deref(); - self.miner.chain_new_blocks(chain, good, bad, retracted); + fn deref(&self) -> &Self::Target { + &self.miner } } diff --git a/miner/src/miner.rs b/miner/src/miner.rs index 1a48d5288..76130b261 100644 --- a/miner/src/miner.rs +++ b/miner/src/miner.rs @@ -23,7 +23,7 @@ use ethcore::client::{BlockChainClient, BlockStatus, BlockId, BlockChainInfo}; use ethcore::block::*; use ethcore::error::*; use ethcore::transaction::SignedTransaction; -use transaction_queue::TransactionQueue; +use transaction_queue::{TransactionQueue, TransactionQueueStatus}; pub struct Miner { /// Transactions Queue @@ -36,6 +36,11 @@ pub struct Miner { extra_data: RwLock, } +pub struct MinerStatus { + pub transaction_queue_pending: usize, + pub transaction_queue_future: usize, +} + impl Miner { pub fn new() -> Miner { Miner { @@ -47,6 +52,14 @@ impl Miner { } } + pub fn status(&self) -> MinerStatus { + let status = self.transaction_queue.lock().unwrap().status(); + MinerStatus { + transaction_queue_pending: status.pending, + transaction_queue_future: status.future, + } + } + pub fn import_transactions(&self, transactions: Vec, fetch_nonce: T) where T: Fn(&Address) -> U256 { let mut transaction_queue = self.transaction_queue.lock().unwrap(); @@ -55,7 +68,7 @@ impl Miner { /// Get the author that we will seal blocks as. pub fn author(&self) -> Address { - self.author.read().unwrap().clone() + *self.author.read().unwrap() } /// Set the author that we will seal blocks as. @@ -75,7 +88,7 @@ impl Miner { /// New chain head event. Restart mining operation. fn prepare_sealing(&self, chain: &BlockChainClient) { - let b = chain.prepare_sealing(self.author.read().unwrap().clone(), self.extra_data.read().unwrap().clone()); + let b = chain.prepare_sealing(*self.author.read().unwrap(), self.extra_data.read().unwrap().clone()); *self.sealing_block.lock().unwrap() = b; } diff --git a/parity/main.rs b/parity/main.rs index ef088ab5b..a0bc87a03 100644 --- a/parity/main.rs +++ b/parity/main.rs @@ -382,7 +382,7 @@ impl Configuration { let client = service.client(); // Miner - let miner = EthMiner::new(client.clone()); + let miner = EthMiner::new(); miner.set_author(self.author()); miner.set_extra_data(self.extra_data()); diff --git a/rpc/src/v1/impls/eth.rs b/rpc/src/v1/impls/eth.rs index 11c6fe8d0..46d875c99 100644 --- a/rpc/src/v1/impls/eth.rs +++ b/rpc/src/v1/impls/eth.rs @@ -17,6 +17,7 @@ //! Eth rpc implementation. use std::collections::HashMap; use std::sync::{Arc, Weak, Mutex, RwLock}; +use std::ops::Deref; use ethsync::{EthSync, SyncState}; use ethminer::{EthMiner}; use jsonrpc_core::*; @@ -224,7 +225,8 @@ impl Eth for EthClient { match params { Params::None => { let miner = take_weak!(self.miner); - let u = miner.sealing_block().lock().unwrap(); + let client = take_weak!(self.client); + let u = miner.sealing_block(client.deref()).lock().unwrap(); match *u { Some(ref b) => { let pow_hash = b.hash(); @@ -243,8 +245,9 @@ impl Eth for EthClient { from_params::<(H64, H256, H256)>(params).and_then(|(nonce, pow_hash, mix_hash)| { // trace!("Decoded: nonce={}, pow_hash={}, mix_hash={}", nonce, pow_hash, mix_hash); let miner = take_weak!(self.miner); + let client = take_weak!(self.client); let seal = vec![encode(&mix_hash).to_vec(), encode(&nonce).to_vec()]; - let r = miner.submit_seal(pow_hash, seal); + let r = miner.submit_seal(client.deref(), pow_hash, seal); to_value(&r.is_ok()) }) } diff --git a/sync/src/chain.rs b/sync/src/chain.rs index d3277eccc..c607f53b1 100644 --- a/sync/src/chain.rs +++ b/sync/src/chain.rs @@ -31,7 +31,7 @@ use util::*; use std::mem::{replace}; -use ethcore::views::{HeaderView, BlockView}; +use ethcore::views::{HeaderView}; use ethcore::header::{BlockNumber, Header as BlockHeader}; use ethcore::client::{BlockChainClient, BlockStatus, BlockId, BlockChainInfo}; use range_collection::{RangeCollection, ToUsize, FromUsize}; @@ -933,7 +933,9 @@ impl ChainSync { let tx: SignedTransaction = try!(r.val_at(i)); transactions.push(tx); } - self.miner.import_transactions(transactions); + let chain = io.chain(); + let fetch_nonce = |a: &Address| chain.nonce(a); + self.miner.import_transactions(transactions, fetch_nonce); Ok(()) } @@ -1262,7 +1264,7 @@ impl ChainSync { pub fn chain_new_blocks(&mut self, io: &mut SyncIo, good: &[H256], bad: &[H256], retracted: &[H256]) { // notify miner - self.miner.chain_new_blocks(good, bad, retracted); + self.miner.chain_new_blocks(io.chain(), good, bad, retracted); // Propagate latests blocks self.propagate_latest_blocks(io); // TODO [todr] propagate transactions? @@ -1279,6 +1281,7 @@ mod tests { use super::{PeerInfo, PeerAsking}; use ethcore::header::*; use ethcore::client::*; + use ethminer::EthMiner; fn get_dummy_block(order: u32, parent_hash: H256) -> Bytes { let mut header = Header::new(); @@ -1388,7 +1391,7 @@ mod tests { } fn dummy_sync_with_peer(peer_latest_hash: H256) -> ChainSync { - let mut sync = ChainSync::new(SyncConfig::default()); + let mut sync = ChainSync::new(SyncConfig::default(), EthMiner::new()); sync.peers.insert(0, PeerInfo { protocol_version: 0, @@ -1610,14 +1613,14 @@ mod tests { // when sync.chain_new_blocks(&mut io, &[], &good_blocks, &[]); - assert_eq!(sync.transaction_queue.lock().unwrap().status().future, 0); - assert_eq!(sync.transaction_queue.lock().unwrap().status().pending, 1); + assert_eq!(sync.miner.status().transaction_queue_future, 0); + assert_eq!(sync.miner.status().transaction_queue_pending, 1); sync.chain_new_blocks(&mut io, &good_blocks, &retracted_blocks, &[]); // then - let status = sync.transaction_queue.lock().unwrap().status(); - assert_eq!(status.pending, 1); - assert_eq!(status.future, 0); + let status = sync.miner.status(); + assert_eq!(status.transaction_queue_pending, 1); + assert_eq!(status.transaction_queue_future, 0); } #[test] diff --git a/sync/src/tests/helpers.rs b/sync/src/tests/helpers.rs index d01dba0b2..37ee862b5 100644 --- a/sync/src/tests/helpers.rs +++ b/sync/src/tests/helpers.rs @@ -17,7 +17,9 @@ use util::*; use ethcore::client::{BlockChainClient, BlockStatus, TreeRoute, BlockChainInfo, TransactionId, BlockId, BlockQueueInfo}; use ethcore::header::{Header as BlockHeader, BlockNumber}; +use ethcore::block::*; use ethcore::error::*; +use ethminer::EthMiner; use io::SyncIo; use chain::ChainSync; use ::SyncConfig; @@ -308,6 +310,14 @@ impl BlockChainClient for TestBlockChainClient { best_block_number: self.blocks.read().unwrap().len() as BlockNumber - 1, } } + + fn prepare_sealing(&self, author: Address, extra_data: Bytes) -> Option { + unimplemented!() + } + + fn try_seal(&self, block: ClosedBlock, seal: Vec) -> Result { + unimplemented!() + } } pub struct TestIo<'p> { @@ -382,7 +392,7 @@ impl TestNet { for _ in 0..n { net.peers.push(TestPeer { chain: TestBlockChainClient::new(), - sync: ChainSync::new(SyncConfig::default()), + sync: ChainSync::new(SyncConfig::default(), EthMiner::new()), queue: VecDeque::new(), }); } From 49f1834ffb97244f0aaf404d86c0bdaaf226c9d0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Tue, 8 Mar 2016 16:40:35 +0100 Subject: [PATCH 04/42] Fixing travis yml whitespace --- .travis.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 48487e0d3..0c614ca5d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -14,11 +14,11 @@ matrix: - rust: nightly include: - rust: stable - env: FEATURES="--features travis-beta" KCOV_FEATURES="" TARGETS="-p ethash -p ethcore-util -p ethcore -p ethsync -p ethcore-rpc -p parity -p ethminer" ARCHIVE_SUFFIX="-${TRAVIS_OS_NAME}-${TRAVIS_TAG}" + env: FEATURES="--features travis-beta" KCOV_FEATURES="" TARGETS="-p ethash -p ethcore-util -p ethcore -p ethsync -p ethcore-rpc -p parity -p ethminer" ARCHIVE_SUFFIX="-${TRAVIS_OS_NAME}-${TRAVIS_TAG}" - rust: beta - env: FEATURES="--features travis-beta" KCOV_FEATURES="" TARGETS="-p ethash -p ethcore-util -p ethcore -p ethsync -p ethcore-rpc -p parity -p ethminer" ARCHIVE_SUFFIX="-${TRAVIS_OS_NAME}-${TRAVIS_TAG}" + env: FEATURES="--features travis-beta" KCOV_FEATURES="" TARGETS="-p ethash -p ethcore-util -p ethcore -p ethsync -p ethcore-rpc -p parity -p ethminer" ARCHIVE_SUFFIX="-${TRAVIS_OS_NAME}-${TRAVIS_TAG}" - rust: nightly - env: FEATURES="--features travis-nightly" KCOV_FEATURES="" TARGETS="-p ethash -p ethcore-util -p ethcore -p ethsync -p ethcore-rpc -p parity -p ethminer" ARCHIVE_SUFFIX="-${TRAVIS_OS_NAME}-${TRAVIS_TAG}" + env: FEATURES="--features travis-nightly" KCOV_FEATURES="" TARGETS="-p ethash -p ethcore-util -p ethcore -p ethsync -p ethcore-rpc -p parity -p ethminer" ARCHIVE_SUFFIX="-${TRAVIS_OS_NAME}-${TRAVIS_TAG}" cache: apt: true directories: From 5db84c32338bc6708dce3d299553531f27b68f8f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Wed, 9 Mar 2016 12:54:07 +0100 Subject: [PATCH 05/42] Adding transactions to block --- ethcore/src/block.rs | 2 +- ethcore/src/client.rs | 35 +++++++++++++++++++++++++++++------ ethcore/src/service.rs | 2 ++ ethcore/src/tests/client.rs | 2 +- miner/src/miner.rs | 11 +++++++++-- sync/src/chain.rs | 4 ++++ sync/src/lib.rs | 4 ++++ sync/src/tests/helpers.rs | 4 ++-- 8 files changed, 52 insertions(+), 12 deletions(-) diff --git a/ethcore/src/block.rs b/ethcore/src/block.rs index 68f647e37..9ecd58e0a 100644 --- a/ethcore/src/block.rs +++ b/ethcore/src/block.rs @@ -274,7 +274,7 @@ impl<'x> OpenBlock<'x> { s.block.base.header.note_dirty(); ClosedBlock { - block: s.block, + block: s.block, uncle_bytes: uncle_bytes, } } diff --git a/ethcore/src/client.rs b/ethcore/src/client.rs index af1745ca8..fb69df757 100644 --- a/ethcore/src/client.rs +++ b/ethcore/src/client.rs @@ -32,7 +32,7 @@ use service::{NetSyncMessage, SyncMessage}; use env_info::LastHashes; use verification::*; use block::*; -use transaction::LocalizedTransaction; +use transaction::{LocalizedTransaction, SignedTransaction}; use extras::TransactionAddress; use filter::Filter; use log_entry::LocalizedLogEntry; @@ -185,7 +185,10 @@ pub trait BlockChainClient : Sync + Send { /// Returns logs matching given filter. fn logs(&self, filter: Filter) -> Vec; - fn prepare_sealing(&self, author: Address, extra_data: Bytes) -> Option; + /// Returns ClosedBlock prepared for sealing. + fn prepare_sealing(&self, author: Address, extra_data: Bytes, transactions: Vec) -> Option; + + /// Attempts to seal given block. Returns `SealedBlock` on success and the same block in case of error. fn try_seal(&self, block: ClosedBlock, seal: Vec) -> Result; } @@ -417,6 +420,12 @@ impl Client where V: Verifier { } } + { + if self.chain_info().best_block_hash != original_best { + io.send(NetworkIoMessage::User(SyncMessage::NewChainHead)).unwrap(); + } + } + imported } @@ -477,7 +486,7 @@ impl BlockChainClient for Client where V: Verifier { block.try_seal(self.engine.deref().deref(), seal) } - fn prepare_sealing(&self, author: Address, extra_data: Bytes) -> Option { + fn prepare_sealing(&self, author: Address, extra_data: Bytes, transactions: Vec) -> Option { let engine = self.engine.deref().deref(); let h = self.chain.read().unwrap().best_block_hash(); @@ -490,7 +499,9 @@ impl BlockChainClient for Client where V: Verifier { extra_data, ); - self.chain.read().unwrap().find_uncle_headers(&h, engine.maximum_uncle_age()) + // Add uncles + self.chain.read().unwrap() + .find_uncle_headers(&h, engine.maximum_uncle_age()) .unwrap() .into_iter() .take(engine.maximum_uncle_count()) @@ -498,10 +509,22 @@ impl BlockChainClient for Client where V: Verifier { b.push_uncle(h).unwrap(); }); - // TODO: push transactions. + // Add transactions + let block_number = b.block().header().number(); + for tx in transactions { + let import = b.push_transaction(tx, None); + if let Err(e) = import { + trace!("Error adding transaction to block: number={}. Error: {:?}", block_number, e); + } + } + // And close let b = b.close(); - trace!("Sealing: number={}, hash={}, diff={}", b.hash(), b.block().header().difficulty(), b.block().header().number()); + trace!("Sealing: number={}, hash={}, diff={}", + b.block().header().number(), + b.hash(), + b.block().header().difficulty() + ); Some(b) } diff --git a/ethcore/src/service.rs b/ethcore/src/service.rs index 443d09e3b..11380d276 100644 --- a/ethcore/src/service.rs +++ b/ethcore/src/service.rs @@ -34,6 +34,8 @@ pub enum SyncMessage { /// Hashes of blocks that were removed from canonical chain retracted: Vec, }, + /// Best Block Hash in chain has been changed + NewChainHead, /// A block is ready BlockVerified, } diff --git a/ethcore/src/tests/client.rs b/ethcore/src/tests/client.rs index d31a780e6..ed0b02788 100644 --- a/ethcore/src/tests/client.rs +++ b/ethcore/src/tests/client.rs @@ -133,7 +133,7 @@ fn can_mine() { let client_result = get_test_client_with_blocks(vec![dummy_blocks[0].clone()]); let client = client_result.reference(); - let b = client.prepare_sealing(Address::default(), vec![]).unwrap(); + let b = client.prepare_sealing(Address::default(), vec![], vec![]).unwrap(); assert_eq!(*b.block().header().parent_hash(), BlockView::new(&dummy_blocks[0]).header_view().sha3()); assert!(client.try_seal(b, vec![]).is_ok()); diff --git a/miner/src/miner.rs b/miner/src/miner.rs index 76130b261..501f8c35c 100644 --- a/miner/src/miner.rs +++ b/miner/src/miner.rs @@ -87,8 +87,15 @@ impl Miner { } /// New chain head event. Restart mining operation. - fn prepare_sealing(&self, chain: &BlockChainClient) { - let b = chain.prepare_sealing(*self.author.read().unwrap(), self.extra_data.read().unwrap().clone()); + pub fn prepare_sealing(&self, chain: &BlockChainClient) { + let no_of_transactions = 128; + let transactions = self.transaction_queue.lock().unwrap().top_transactions(no_of_transactions); + + let b = chain.prepare_sealing( + self.author(), + self.extra_data(), + transactions, + ); *self.sealing_block.lock().unwrap() = b; } diff --git a/sync/src/chain.rs b/sync/src/chain.rs index c607f53b1..2669b71e2 100644 --- a/sync/src/chain.rs +++ b/sync/src/chain.rs @@ -1270,6 +1270,10 @@ impl ChainSync { // TODO [todr] propagate transactions? } + pub fn chain_new_head(&mut self, io: &mut SyncIo) { + self.miner.prepare_sealing(io.chain()); + } + } #[cfg(test)] diff --git a/sync/src/lib.rs b/sync/src/lib.rs index 0d6044135..be01d2b7b 100644 --- a/sync/src/lib.rs +++ b/sync/src/lib.rs @@ -158,6 +158,10 @@ impl NetworkProtocolHandler for EthSync { let mut sync_io = NetSyncIo::new(io, self.chain.deref()); self.sync.write().unwrap().chain_new_blocks(&mut sync_io, good, bad, retracted); }, + SyncMessage::NewChainHead => { + let mut sync_io = NetSyncIo::new(io, self.chain.deref()); + self.sync.write().unwrap().chain_new_head(&mut sync_io); + } _ => {/* Ignore other messages */}, } } diff --git a/sync/src/tests/helpers.rs b/sync/src/tests/helpers.rs index 37ee862b5..8c8f669a2 100644 --- a/sync/src/tests/helpers.rs +++ b/sync/src/tests/helpers.rs @@ -24,7 +24,7 @@ use io::SyncIo; use chain::ChainSync; use ::SyncConfig; use ethcore::receipt::Receipt; -use ethcore::transaction::{LocalizedTransaction, Transaction, Action}; +use ethcore::transaction::{LocalizedTransaction, SignedTransaction, Transaction, Action}; use ethcore::filter::Filter; use ethcore::log_entry::LocalizedLogEntry; @@ -311,7 +311,7 @@ impl BlockChainClient for TestBlockChainClient { } } - fn prepare_sealing(&self, author: Address, extra_data: Bytes) -> Option { + fn prepare_sealing(&self, author: Address, extra_data: Bytes, transactions: Vec) -> Option { unimplemented!() } From 9d664336b5e2bdd99855ca9961cc2da0ab4b9b31 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Wed, 9 Mar 2016 13:28:37 +0100 Subject: [PATCH 06/42] Tratifying Miner --- miner/src/lib.rs | 4 +- miner/src/miner.rs | 90 ++++++++++++++++++++++++--------------- parity/main.rs | 2 +- rpc/src/v1/impls/eth.rs | 2 +- sync/src/chain.rs | 4 +- sync/src/tests/helpers.rs | 4 +- 6 files changed, 65 insertions(+), 41 deletions(-) diff --git a/miner/src/lib.rs b/miner/src/lib.rs index ae6235393..36b040b78 100644 --- a/miner/src/lib.rs +++ b/miner/src/lib.rs @@ -31,7 +31,8 @@ mod transaction_queue; use std::ops::*; use std::sync::*; -pub use miner::Miner; +pub use miner::{Miner, MinerService}; + pub struct EthMiner { miner: Miner, @@ -45,6 +46,7 @@ impl EthMiner { }) } } + impl Deref for EthMiner { type Target = Miner; diff --git a/miner/src/miner.rs b/miner/src/miner.rs index 501f8c35c..64d3c9083 100644 --- a/miner/src/miner.rs +++ b/miner/src/miner.rs @@ -17,16 +17,45 @@ use util::*; use std::sync::atomic::AtomicBool; use rayon::prelude::*; -use ethcore::views::{HeaderView, BlockView}; -use ethcore::header::{BlockNumber, Header as BlockHeader}; -use ethcore::client::{BlockChainClient, BlockStatus, BlockId, BlockChainInfo}; +use ethcore::views::{BlockView}; +use ethcore::client::{BlockChainClient, BlockId}; use ethcore::block::*; use ethcore::error::*; use ethcore::transaction::SignedTransaction; -use transaction_queue::{TransactionQueue, TransactionQueueStatus}; +use transaction_queue::{TransactionQueue}; + +pub trait MinerService { + fn status(&self) -> MinerStatus; + + fn import_transactions(&self, transactions: Vec, fetch_nonce: T) + where T: Fn(&Address) -> U256; + + /// called when blocks are imported to chain, updates transactions queue + fn chain_new_blocks(&self, chain: &BlockChainClient, good: &[H256], bad: &[H256], _retracted: &[H256]); + + /// Set the author that we will seal blocks as. + fn set_author(&self, author: Address); + + /// Set the extra_data that we will seal blocks with. + fn set_extra_data(&self, extra_data: Bytes); + + /// New chain head event. Restart mining operation. + fn prepare_sealing(&self, chain: &BlockChainClient); + + /// Grab the `ClosedBlock` that we want to be sealed. Comes as a mutex that you have to lock. + fn sealing_block(&self, chain: &BlockChainClient) -> &Mutex>; + + /// Submit `seal` as a valid solution for the header of `pow_hash`. + /// Will check the seal, but not actually insert the block into the chain. + fn submit_seal(&self, chain: &BlockChainClient, pow_hash: H256, seal: Vec) -> Result<(), Error>; +} + +pub struct MinerStatus { + pub transaction_queue_pending: usize, + pub transaction_queue_future: usize, +} pub struct Miner { - /// Transactions Queue transaction_queue: Mutex, // for sealing... @@ -36,12 +65,8 @@ pub struct Miner { extra_data: RwLock, } -pub struct MinerStatus { - pub transaction_queue_pending: usize, - pub transaction_queue_future: usize, -} - impl Miner { + /// Creates new instance of miner pub fn new() -> Miner { Miner { transaction_queue: Mutex::new(TransactionQueue::new()), @@ -52,7 +77,20 @@ impl Miner { } } - pub fn status(&self) -> MinerStatus { + /// Get the author that we will seal blocks as. + fn author(&self) -> Address { + *self.author.read().unwrap() + } + + /// Get the extra_data that we will seal blocks wuth. + fn extra_data(&self) -> Bytes { + self.extra_data.read().unwrap().clone() + } +} + +impl MinerService for Miner { + + fn status(&self) -> MinerStatus { let status = self.transaction_queue.lock().unwrap().status(); MinerStatus { transaction_queue_pending: status.pending, @@ -60,34 +98,22 @@ impl Miner { } } - pub fn import_transactions(&self, transactions: Vec, fetch_nonce: T) + fn import_transactions(&self, transactions: Vec, fetch_nonce: T) where T: Fn(&Address) -> U256 { let mut transaction_queue = self.transaction_queue.lock().unwrap(); transaction_queue.add_all(transactions, fetch_nonce); } - /// Get the author that we will seal blocks as. - pub fn author(&self) -> Address { - *self.author.read().unwrap() - } - - /// Set the author that we will seal blocks as. - pub fn set_author(&self, author: Address) { + fn set_author(&self, author: Address) { *self.author.write().unwrap() = author; } - /// Get the extra_data that we will seal blocks wuth. - pub fn extra_data(&self) -> Bytes { - self.extra_data.read().unwrap().clone() - } - /// Set the extra_data that we will seal blocks with. - pub fn set_extra_data(&self, extra_data: Bytes) { + fn set_extra_data(&self, extra_data: Bytes) { *self.extra_data.write().unwrap() = extra_data; } - /// New chain head event. Restart mining operation. - pub fn prepare_sealing(&self, chain: &BlockChainClient) { + fn prepare_sealing(&self, chain: &BlockChainClient) { let no_of_transactions = 128; let transactions = self.transaction_queue.lock().unwrap().top_transactions(no_of_transactions); @@ -99,8 +125,7 @@ impl Miner { *self.sealing_block.lock().unwrap() = b; } - /// Grab the `ClosedBlock` that we want to be sealed. Comes as a mutex that you have to lock. - pub fn sealing_block(&self, chain: &BlockChainClient) -> &Mutex> { + fn sealing_block(&self, chain: &BlockChainClient) -> &Mutex> { if self.sealing_block.lock().unwrap().is_none() { self.sealing_enabled.store(true, atomic::Ordering::Relaxed); // TODO: Above should be on a timer that resets after two blocks have arrived without being asked for. @@ -109,9 +134,7 @@ impl Miner { &self.sealing_block } - /// Submit `seal` as a valid solution for the header of `pow_hash`. - /// Will check the seal, but not actually insert the block into the chain. - pub fn submit_seal(&self, chain: &BlockChainClient, pow_hash: H256, seal: Vec) -> Result<(), Error> { + fn submit_seal(&self, chain: &BlockChainClient, pow_hash: H256, seal: Vec) -> Result<(), Error> { let mut maybe_b = self.sealing_block.lock().unwrap(); match *maybe_b { Some(ref b) if b.hash() == pow_hash => {} @@ -132,8 +155,7 @@ impl Miner { } } - /// called when block is imported to chain, updates transactions queue and propagates the blocks - pub fn chain_new_blocks(&self, chain: &BlockChainClient, good: &[H256], bad: &[H256], _retracted: &[H256]) { + fn chain_new_blocks(&self, chain: &BlockChainClient, good: &[H256], bad: &[H256], _retracted: &[H256]) { fn fetch_transactions(chain: &BlockChainClient, hash: &H256) -> Vec { let block = chain .block(BlockId::Hash(hash.clone())) diff --git a/parity/main.rs b/parity/main.rs index a0bc87a03..89668a456 100644 --- a/parity/main.rs +++ b/parity/main.rs @@ -50,7 +50,7 @@ use ethcore::client::*; use ethcore::service::{ClientService, NetSyncMessage}; use ethcore::ethereum; use ethsync::{EthSync, SyncConfig}; -use ethminer::{EthMiner}; +use ethminer::{EthMiner, MinerService}; use docopt::Docopt; use daemonize::Daemonize; use number_prefix::{binary_prefix, Standalone, Prefixed}; diff --git a/rpc/src/v1/impls/eth.rs b/rpc/src/v1/impls/eth.rs index 46d875c99..d40761b09 100644 --- a/rpc/src/v1/impls/eth.rs +++ b/rpc/src/v1/impls/eth.rs @@ -19,7 +19,7 @@ use std::collections::HashMap; use std::sync::{Arc, Weak, Mutex, RwLock}; use std::ops::Deref; use ethsync::{EthSync, SyncState}; -use ethminer::{EthMiner}; +use ethminer::{EthMiner, MinerService}; use jsonrpc_core::*; use util::numbers::*; use util::sha3::*; diff --git a/sync/src/chain.rs b/sync/src/chain.rs index 2669b71e2..cb584f51d 100644 --- a/sync/src/chain.rs +++ b/sync/src/chain.rs @@ -38,7 +38,7 @@ use range_collection::{RangeCollection, ToUsize, FromUsize}; use ethcore::error::*; use ethcore::transaction::SignedTransaction; use ethcore::block::Block; -use ethminer::EthMiner; +use ethminer::{EthMiner, MinerService}; use io::SyncIo; use time; use super::SyncConfig; @@ -1285,7 +1285,7 @@ mod tests { use super::{PeerInfo, PeerAsking}; use ethcore::header::*; use ethcore::client::*; - use ethminer::EthMiner; + use ethminer::{EthMiner, MinerService}; fn get_dummy_block(order: u32, parent_hash: H256) -> Bytes { let mut header = Header::new(); diff --git a/sync/src/tests/helpers.rs b/sync/src/tests/helpers.rs index 8c8f669a2..9a4dd2814 100644 --- a/sync/src/tests/helpers.rs +++ b/sync/src/tests/helpers.rs @@ -311,11 +311,11 @@ impl BlockChainClient for TestBlockChainClient { } } - fn prepare_sealing(&self, author: Address, extra_data: Bytes, transactions: Vec) -> Option { + fn prepare_sealing(&self, _author: Address, _extra_data: Bytes, _transactions: Vec) -> Option { unimplemented!() } - fn try_seal(&self, block: ClosedBlock, seal: Vec) -> Result { + fn try_seal(&self, _block: ClosedBlock, _seal: Vec) -> Result { unimplemented!() } } From 363de973c90ed916959e8bf912f8ebde7f3aafa6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Wed, 9 Mar 2016 14:26:28 +0100 Subject: [PATCH 07/42] Adding documentation and ditching EthMiner --- miner/src/lib.rs | 54 ++++++++++++++++++++++----------------- miner/src/miner.rs | 34 +++++++++++++++++++----- parity/main.rs | 6 ++--- rpc/src/v1/impls/eth.rs | 6 ++--- sync/src/chain.rs | 10 ++++---- sync/src/lib.rs | 4 +-- sync/src/tests/helpers.rs | 4 +-- 7 files changed, 74 insertions(+), 44 deletions(-) diff --git a/miner/src/lib.rs b/miner/src/lib.rs index 36b040b78..591b73402 100644 --- a/miner/src/lib.rs +++ b/miner/src/lib.rs @@ -18,6 +18,37 @@ #![cfg_attr(feature="dev", feature(plugin))] #![cfg_attr(feature="dev", plugin(clippy))] +//! Miner module +//! Keeps track of transactions and mined block. +//! +//! Usage example: +//! +//! ```rust +//! extern crate ethcore_util as util; +//! extern crate ethcore; +//! extern crate ethminer; +//! use std::env; +//! use std::sync::Arc; +//! use util::network::{NetworkService, NetworkConfiguration}; +//! use ethcore::client::{Client, ClientConfig}; +//! use ethcore::ethereum; +//! use ethminer::{Miner, MinerService}; +//! +//! fn main() { +//! let dir = env::temp_dir(); +//! let client = Client::new(ClientConfig::default(), ethereum::new_frontier(), &dir, service.io().channel()).unwrap(); +//! +//! let miner: Miner = Miner::default(); +//! // get status +//! assert_eq!(miner.status().transaction_queue_pending, 0); +//! +//! // Check block for sealing +//! miner.prepare_sealing(&client); +//! assert_eq!(miner.sealing_block(&client).lock().unwrap().is_some()); +//! } +//! ``` + + #[macro_use] extern crate log; #[macro_use] @@ -29,28 +60,5 @@ extern crate rayon; mod miner; mod transaction_queue; -use std::ops::*; -use std::sync::*; pub use miner::{Miner, MinerService}; - -pub struct EthMiner { - miner: Miner, -} - -impl EthMiner { - /// Creates and register protocol with the network service - pub fn new() -> Arc { - Arc::new(EthMiner { - miner: Miner::new(), - }) - } -} - -impl Deref for EthMiner { - type Target = Miner; - - fn deref(&self) -> &Self::Target { - &self.miner - } -} diff --git a/miner/src/miner.rs b/miner/src/miner.rs index 64d3c9083..f5ad32d2d 100644 --- a/miner/src/miner.rs +++ b/miner/src/miner.rs @@ -24,21 +24,28 @@ use ethcore::error::*; use ethcore::transaction::SignedTransaction; use transaction_queue::{TransactionQueue}; +/// Miner external API pub trait MinerService { + + /// Returns miner's status. fn status(&self) -> MinerStatus; + /// Imports transactions to transaction queue. fn import_transactions(&self, transactions: Vec, fetch_nonce: T) where T: Fn(&Address) -> U256; - /// called when blocks are imported to chain, updates transactions queue - fn chain_new_blocks(&self, chain: &BlockChainClient, good: &[H256], bad: &[H256], _retracted: &[H256]); - /// Set the author that we will seal blocks as. fn set_author(&self, author: Address); /// Set the extra_data that we will seal blocks with. fn set_extra_data(&self, extra_data: Bytes); + /// Removes all transactions from the queue and restart mining operation. + fn clear_and_reset(&self, chain: &BlockChainClient); + + /// called when blocks are imported to chain, updates transactions queue. + fn chain_new_blocks(&self, chain: &BlockChainClient, good: &[H256], bad: &[H256], _retracted: &[H256]); + /// New chain head event. Restart mining operation. fn prepare_sealing(&self, chain: &BlockChainClient); @@ -50,11 +57,15 @@ pub trait MinerService { fn submit_seal(&self, chain: &BlockChainClient, pow_hash: H256, seal: Vec) -> Result<(), Error>; } +/// Mining status pub struct MinerStatus { + /// Number of transactions in queue with state `pending` (ready to be included in block) pub transaction_queue_pending: usize, + /// Number of transactions in queue with state `future` (not yet ready to be included in block) pub transaction_queue_future: usize, } +/// Keeps track of transactions using priority queue and holds currently mined block. pub struct Miner { transaction_queue: Mutex, @@ -65,9 +76,8 @@ pub struct Miner { extra_data: RwLock, } -impl Miner { - /// Creates new instance of miner - pub fn new() -> Miner { +impl Default for Miner { + fn default() -> Miner { Miner { transaction_queue: Mutex::new(TransactionQueue::new()), sealing_enabled: AtomicBool::new(false), @@ -76,6 +86,13 @@ impl Miner { extra_data: RwLock::new(Vec::new()), } } +} + +impl Miner { + /// Creates new instance of miner + pub fn new() -> Arc { + Arc::new(Miner::default()) + } /// Get the author that we will seal blocks as. fn author(&self) -> Address { @@ -90,6 +107,11 @@ impl Miner { impl MinerService for Miner { + fn clear_and_reset(&self, chain: &BlockChainClient) { + self.transaction_queue.lock().unwrap().clear(); + self.prepare_sealing(chain); + } + fn status(&self) -> MinerStatus { let status = self.transaction_queue.lock().unwrap().status(); MinerStatus { diff --git a/parity/main.rs b/parity/main.rs index 89668a456..d75bdcb57 100644 --- a/parity/main.rs +++ b/parity/main.rs @@ -50,7 +50,7 @@ use ethcore::client::*; use ethcore::service::{ClientService, NetSyncMessage}; use ethcore::ethereum; use ethsync::{EthSync, SyncConfig}; -use ethminer::{EthMiner, MinerService}; +use ethminer::{Miner, MinerService}; use docopt::Docopt; use daemonize::Daemonize; use number_prefix::{binary_prefix, Standalone, Prefixed}; @@ -192,7 +192,7 @@ fn setup_log(init: &Option) { } #[cfg(feature = "rpc")] -fn setup_rpc_server(client: Arc, sync: Arc, miner: Arc, url: &str, cors_domain: &str, apis: Vec<&str>) { +fn setup_rpc_server(client: Arc, sync: Arc, miner: Arc, url: &str, cors_domain: &str, apis: Vec<&str>) { use rpc::v1::*; let mut server = rpc::HttpServer::new(1); @@ -382,7 +382,7 @@ impl Configuration { let client = service.client(); // Miner - let miner = EthMiner::new(); + let miner = Miner::new(); miner.set_author(self.author()); miner.set_extra_data(self.extra_data()); diff --git a/rpc/src/v1/impls/eth.rs b/rpc/src/v1/impls/eth.rs index d40761b09..a9ee389f8 100644 --- a/rpc/src/v1/impls/eth.rs +++ b/rpc/src/v1/impls/eth.rs @@ -19,7 +19,7 @@ use std::collections::HashMap; use std::sync::{Arc, Weak, Mutex, RwLock}; use std::ops::Deref; use ethsync::{EthSync, SyncState}; -use ethminer::{EthMiner, MinerService}; +use ethminer::{Miner, MinerService}; use jsonrpc_core::*; use util::numbers::*; use util::sha3::*; @@ -38,13 +38,13 @@ use v1::helpers::{PollFilter, PollManager}; pub struct EthClient { client: Weak, sync: Weak, - miner: Weak, + miner: Weak, hashrates: RwLock>, } impl EthClient { /// Creates new EthClient. - pub fn new(client: &Arc, sync: &Arc, miner: &Arc) -> Self { + pub fn new(client: &Arc, sync: &Arc, miner: &Arc) -> Self { EthClient { client: Arc::downgrade(client), sync: Arc::downgrade(sync), diff --git a/sync/src/chain.rs b/sync/src/chain.rs index cb584f51d..4c7b0893a 100644 --- a/sync/src/chain.rs +++ b/sync/src/chain.rs @@ -38,7 +38,7 @@ use range_collection::{RangeCollection, ToUsize, FromUsize}; use ethcore::error::*; use ethcore::transaction::SignedTransaction; use ethcore::block::Block; -use ethminer::{EthMiner, MinerService}; +use ethminer::{Miner, MinerService}; use io::SyncIo; use time; use super::SyncConfig; @@ -212,14 +212,14 @@ pub struct ChainSync { /// Network ID network_id: U256, /// Miner - miner: Arc, + miner: Arc, } type RlpResponseResult = Result, PacketDecodeError>; impl ChainSync { /// Create a new instance of syncing strategy. - pub fn new(config: SyncConfig, miner: Arc) -> ChainSync { + pub fn new(config: SyncConfig, miner: Arc) -> ChainSync { ChainSync { state: SyncState::NotSynced, starting_block: 0, @@ -1285,7 +1285,7 @@ mod tests { use super::{PeerInfo, PeerAsking}; use ethcore::header::*; use ethcore::client::*; - use ethminer::{EthMiner, MinerService}; + use ethminer::{Miner, MinerService}; fn get_dummy_block(order: u32, parent_hash: H256) -> Bytes { let mut header = Header::new(); @@ -1395,7 +1395,7 @@ mod tests { } fn dummy_sync_with_peer(peer_latest_hash: H256) -> ChainSync { - let mut sync = ChainSync::new(SyncConfig::default(), EthMiner::new()); + let mut sync = ChainSync::new(SyncConfig::default(), Miner::new()); sync.peers.insert(0, PeerInfo { protocol_version: 0, diff --git a/sync/src/lib.rs b/sync/src/lib.rs index be01d2b7b..dd331b5da 100644 --- a/sync/src/lib.rs +++ b/sync/src/lib.rs @@ -65,7 +65,7 @@ use util::TimerToken; use util::{U256, ONE_U256}; use ethcore::client::Client; use ethcore::service::SyncMessage; -use ethminer::EthMiner; +use ethminer::Miner; use io::NetSyncIo; use chain::ChainSync; @@ -105,7 +105,7 @@ pub use self::chain::{SyncStatus, SyncState}; impl EthSync { /// Creates and register protocol with the network service - pub fn register(service: &mut NetworkService, config: SyncConfig, chain: Arc, miner: Arc) -> Arc { + pub fn register(service: &mut NetworkService, config: SyncConfig, chain: Arc, miner: Arc) -> Arc { let sync = Arc::new(EthSync { chain: chain, sync: RwLock::new(ChainSync::new(config, miner)), diff --git a/sync/src/tests/helpers.rs b/sync/src/tests/helpers.rs index 9a4dd2814..52a1feba4 100644 --- a/sync/src/tests/helpers.rs +++ b/sync/src/tests/helpers.rs @@ -19,7 +19,7 @@ use ethcore::client::{BlockChainClient, BlockStatus, TreeRoute, BlockChainInfo, use ethcore::header::{Header as BlockHeader, BlockNumber}; use ethcore::block::*; use ethcore::error::*; -use ethminer::EthMiner; +use ethminer::Miner; use io::SyncIo; use chain::ChainSync; use ::SyncConfig; @@ -392,7 +392,7 @@ impl TestNet { for _ in 0..n { net.peers.push(TestPeer { chain: TestBlockChainClient::new(), - sync: ChainSync::new(SyncConfig::default(), EthMiner::new()), + sync: ChainSync::new(SyncConfig::default(), Miner::new()), queue: VecDeque::new(), }); } From 493c61f09d9a2e8b12579c8a27cdc9ef22766651 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Wed, 9 Mar 2016 15:22:06 +0100 Subject: [PATCH 08/42] Minimal gas price threshold. Closes: #636 --- miner/src/miner.rs | 32 ++++++++++++++++---------------- miner/src/transaction_queue.rs | 34 ++++++++++++++++++++++++++++++++++ parity/main.rs | 8 ++++++++ 3 files changed, 58 insertions(+), 16 deletions(-) diff --git a/miner/src/miner.rs b/miner/src/miner.rs index f5ad32d2d..2c18f3a79 100644 --- a/miner/src/miner.rs +++ b/miner/src/miner.rs @@ -24,7 +24,7 @@ use ethcore::error::*; use ethcore::transaction::SignedTransaction; use transaction_queue::{TransactionQueue}; -/// Miner external API +/// Miner client API pub trait MinerService { /// Returns miner's status. @@ -34,12 +34,6 @@ pub trait MinerService { fn import_transactions(&self, transactions: Vec, fetch_nonce: T) where T: Fn(&Address) -> U256; - /// Set the author that we will seal blocks as. - fn set_author(&self, author: Address); - - /// Set the extra_data that we will seal blocks with. - fn set_extra_data(&self, extra_data: Bytes); - /// Removes all transactions from the queue and restart mining operation. fn clear_and_reset(&self, chain: &BlockChainClient); @@ -103,6 +97,21 @@ impl Miner { fn extra_data(&self) -> Bytes { self.extra_data.read().unwrap().clone() } + + /// Set the author that we will seal blocks as. + pub fn set_author(&self, author: Address) { + *self.author.write().unwrap() = author; + } + + /// Set the extra_data that we will seal blocks with. + pub fn set_extra_data(&self, extra_data: Bytes) { + *self.extra_data.write().unwrap() = extra_data; + } + + /// Set minimal gas price of transaction to be accepted for mining. + pub fn set_minimal_gas_price(&self, min_gas_price: U256) { + self.transaction_queue.lock().unwrap().set_minimal_gas_price(min_gas_price); + } } impl MinerService for Miner { @@ -126,15 +135,6 @@ impl MinerService for Miner { transaction_queue.add_all(transactions, fetch_nonce); } - fn set_author(&self, author: Address) { - *self.author.write().unwrap() = author; - } - - - fn set_extra_data(&self, extra_data: Bytes) { - *self.extra_data.write().unwrap() = extra_data; - } - fn prepare_sealing(&self, chain: &BlockChainClient) { let no_of_transactions = 128; let transactions = self.transaction_queue.lock().unwrap().top_transactions(no_of_transactions); diff --git a/miner/src/transaction_queue.rs b/miner/src/transaction_queue.rs index 3e0d931b5..ed8cf801e 100644 --- a/miner/src/transaction_queue.rs +++ b/miner/src/transaction_queue.rs @@ -159,6 +159,8 @@ pub struct TransactionQueueStatus { /// TransactionQueue implementation pub struct TransactionQueue { + /// Gas Price threshold for transactions that can be imported to this queue (defaults to 0) + minimal_gas_price: U256, /// Priority queue for transactions that can go to block current: TransactionSet, /// Priority queue for transactions that has been received but are not yet valid to go to block @@ -189,6 +191,7 @@ impl TransactionQueue { }; TransactionQueue { + minimal_gas_price: U256::zero(), current: current, future: future, by_hash: HashMap::new(), @@ -196,6 +199,12 @@ impl TransactionQueue { } } + /// Sets new gas price threshold for incoming transactions. + /// Any transactions already imported to the queue are not affected. + pub fn set_minimal_gas_price(&mut self, min_gas_price: U256) { + self.minimal_gas_price = min_gas_price; + } + /// Returns current status for this queue pub fn status(&self) -> TransactionQueueStatus { TransactionQueueStatus { @@ -215,6 +224,15 @@ impl TransactionQueue { /// Add signed transaction to queue to be verified and imported pub fn add(&mut self, tx: SignedTransaction, fetch_nonce: &T) where T: Fn(&Address) -> U256 { + + if tx.gas_price < self.minimal_gas_price { + trace!(target: "sync", + "Dropping transaction below minimal gas price threshold: {:?} (gp: {} < {})", + tx.hash(), tx.gas_price, self.minimal_gas_price + ); + return; + } + // Everything ok - import transaction self.import_tx(VerifiedTransaction::new(tx), fetch_nonce); } @@ -503,6 +521,22 @@ mod test { assert_eq!(stats.pending, 1); } + #[test] + fn should_not_import_transaction_below_min_gas_price_threshold() { + // given + let mut txq = TransactionQueue::new(); + let tx = new_tx(); + txq.set_minimal_gas_price(tx.gas_price + U256::one()); + + // when + txq.add(tx, &default_nonce); + + // then + let stats = txq.status(); + assert_eq!(stats.pending, 0); + assert_eq!(stats.future, 0); + } + #[test] fn should_import_txs_from_same_sender() { // given diff --git a/parity/main.rs b/parity/main.rs index d75bdcb57..b3a0224d8 100644 --- a/parity/main.rs +++ b/parity/main.rs @@ -108,6 +108,7 @@ API and Console Options: --rpccorsdomain URL Equivalent to --jsonrpc-cors URL (geth-compatible). Sealing/Mining Options: + --gasprice GAS Minimal gas price a transaction must have to be accepted for mining [default: 50000000000]. --author ADDRESS Specify the block author (aka "coinbase") address for sending block rewards from sealed blocks [default: 0037a6b811ffeb6e072da21179d11b1406371c63]. --extradata STRING Specify a custom extra-data for authored blocks, no more than 32 characters. @@ -161,6 +162,7 @@ struct Args { flag_rpcapi: Option, flag_logging: Option, flag_version: bool, + flag_gasprice: String, flag_author: String, flag_extra_data: Option, } @@ -248,6 +250,11 @@ impl Configuration { Address::from_str(&self.args.flag_author).unwrap_or_else(|_| die!("{}: Invalid address for --author. Must be 40 hex characters, without the 0x at the beginning.", self.args.flag_author)) } + fn gasprice(&self) -> U256 { + U256::from_dec_str(self.args.flag_gasprice).unwrap_or_else(|_| die("{}: Invalid gasprice given. Must be a + decimal unsigned 256-bit number.")) + } + fn extra_data(&self) -> Bytes { match self.args.flag_extra_data { Some(ref x) if x.len() <= 32 => x.as_bytes().to_owned(), @@ -385,6 +392,7 @@ impl Configuration { let miner = Miner::new(); miner.set_author(self.author()); miner.set_extra_data(self.extra_data()); + miner.set_minimal_gas_price(self.gasprice()); // Sync let sync = EthSync::register(service.network(), sync_config, client.clone(), miner.clone()); From 3d74e5bd473d466d20b7169ee56f0316ea72d2ac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Wed, 9 Mar 2016 15:27:07 +0100 Subject: [PATCH 09/42] Fixing doctest --- miner/src/lib.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/miner/src/lib.rs b/miner/src/lib.rs index 591b73402..4fccc6d51 100644 --- a/miner/src/lib.rs +++ b/miner/src/lib.rs @@ -27,14 +27,16 @@ //! extern crate ethcore_util as util; //! extern crate ethcore; //! extern crate ethminer; +//! use std::ops::Deref; //! use std::env; //! use std::sync::Arc; //! use util::network::{NetworkService, NetworkConfiguration}; -//! use ethcore::client::{Client, ClientConfig}; +//! use ethcore::client::{Client, ClientConfig, BlockChainClient}; //! use ethcore::ethereum; //! use ethminer::{Miner, MinerService}; //! //! fn main() { +//! let mut service = NetworkService::start(NetworkConfiguration::new()).unwrap(); //! let dir = env::temp_dir(); //! let client = Client::new(ClientConfig::default(), ethereum::new_frontier(), &dir, service.io().channel()).unwrap(); //! @@ -43,8 +45,8 @@ //! assert_eq!(miner.status().transaction_queue_pending, 0); //! //! // Check block for sealing -//! miner.prepare_sealing(&client); -//! assert_eq!(miner.sealing_block(&client).lock().unwrap().is_some()); +//! miner.prepare_sealing(client.deref()); +//! assert!(miner.sealing_block(client.deref()).lock().unwrap().is_some()); //! } //! ``` From ca2cf8e591404245fed40cc1f8d81f8a6e67087e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Thu, 10 Mar 2016 10:05:51 +0100 Subject: [PATCH 10/42] Lowering minimal gas price --- parity/main.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/parity/main.rs b/parity/main.rs index b3a0224d8..4cf52728e 100644 --- a/parity/main.rs +++ b/parity/main.rs @@ -108,7 +108,7 @@ API and Console Options: --rpccorsdomain URL Equivalent to --jsonrpc-cors URL (geth-compatible). Sealing/Mining Options: - --gasprice GAS Minimal gas price a transaction must have to be accepted for mining [default: 50000000000]. + --gasprice GAS Minimal gas price a transaction must have to be accepted for mining [default: 20000000000]. --author ADDRESS Specify the block author (aka "coinbase") address for sending block rewards from sealed blocks [default: 0037a6b811ffeb6e072da21179d11b1406371c63]. --extradata STRING Specify a custom extra-data for authored blocks, no more than 32 characters. From 02b7e7698ad07fccb61aa5f51b31e47ad599c851 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Thu, 10 Mar 2016 10:09:55 +0100 Subject: [PATCH 11/42] Breaking couple of lines to keep number of characters below limit --- parity/main.rs | 37 +++++++++++++++++++++++++++---------- 1 file changed, 27 insertions(+), 10 deletions(-) diff --git a/parity/main.rs b/parity/main.rs index 4cf52728e..729f6aeed 100644 --- a/parity/main.rs +++ b/parity/main.rs @@ -247,12 +247,15 @@ impl Configuration { } fn author(&self) -> Address { - Address::from_str(&self.args.flag_author).unwrap_or_else(|_| die!("{}: Invalid address for --author. Must be 40 hex characters, without the 0x at the beginning.", self.args.flag_author)) + Address::from_str(&self.args.flag_author).unwrap_or_else(|_| { + die!("{}: Invalid address for --author. Must be 40 hex characters, without the 0x at the beginning.", self.args.flag_author) + }) } fn gasprice(&self) -> U256 { - U256::from_dec_str(self.args.flag_gasprice).unwrap_or_else(|_| die("{}: Invalid gasprice given. Must be a - decimal unsigned 256-bit number.")) + U256::from_dec_str(self.args.flag_gasprice).unwrap_or_else(|_| { + die("{}: Invalid gasprice given. Must be a decimal unsigned 256-bit number.") + }) } fn extra_data(&self) -> Bytes { @@ -275,7 +278,9 @@ impl Configuration { "frontier" | "homestead" | "mainnet" => ethereum::new_frontier(), "morden" | "testnet" => ethereum::new_morden(), "olympic" => ethereum::new_olympic(), - f => Spec::from_json_utf8(contents(f).unwrap_or_else(|_| die!("{}: Couldn't read chain specification file. Sure it exists?", f)).as_ref()), + f => Spec::from_json_utf8(contents(f).unwrap_or_else(|_| { + die!("{}: Couldn't read chain specification file. Sure it exists?", f) + }).as_ref()), } } @@ -291,7 +296,9 @@ impl Configuration { if self.args.flag_no_bootstrap { Vec::new() } else { match self.args.arg_enode.len() { 0 => spec.nodes().clone(), - _ => self.args.arg_enode.iter().map(|s| Self::normalize_enode(s).unwrap_or_else(||die!("{}: Invalid node address format given for a boot node.", s))).collect(), + _ => self.args.arg_enode.iter().map(|s| Self::normalize_enode(s).unwrap_or_else(|| { + die!("{}: Invalid node address format given for a boot node.", s) + })).collect(), } } } @@ -302,17 +309,23 @@ impl Configuration { let mut public_address = None; if let Some(ref a) = self.args.flag_address { - public_address = Some(SocketAddr::from_str(a.as_ref()).unwrap_or_else(|_| die!("{}: Invalid listen/public address given with --address", a))); + public_address = Some(SocketAddr::from_str(a.as_ref()).unwrap_or_else(|_| { + die!("{}: Invalid listen/public address given with --address", a) + })); listen_address = public_address; } if listen_address.is_none() { - listen_address = Some(SocketAddr::from_str(self.args.flag_listen_address.as_ref()).unwrap_or_else(|_| die!("{}: Invalid listen/public address given with --listen-address", self.args.flag_listen_address))); + listen_address = Some(SocketAddr::from_str(self.args.flag_listen_address.as_ref()).unwrap_or_else(|_| { + die!("{}: Invalid listen/public address given with --listen-address", self.args.flag_listen_address) + })); } if let Some(ref a) = self.args.flag_public_address { if public_address.is_some() { die!("Conflicting flags provided: --address and --public-address"); } - public_address = Some(SocketAddr::from_str(a.as_ref()).unwrap_or_else(|_| die!("{}: Invalid listen/public address given with --public-address", a))); + public_address = Some(SocketAddr::from_str(a.as_ref()).unwrap_or_else(|_| { + die!("{}: Invalid listen/public address given with --public-address", a) + })); } (listen_address, public_address) } @@ -403,7 +416,7 @@ impl Configuration { self.args.flag_rpcaddr.as_ref().unwrap_or(&self.args.flag_jsonrpc_addr), self.args.flag_rpcport.unwrap_or(self.args.flag_jsonrpc_port) ); - SocketAddr::from_str(&url).unwrap_or_else(|_|die!("{}: Invalid JSONRPC listen host/port given.", url)); + SocketAddr::from_str(&url).unwrap_or_else(|_| die!("{}: Invalid JSONRPC listen host/port given.", url)); let cors = self.args.flag_rpccorsdomain.as_ref().unwrap_or(&self.args.flag_jsonrpc_cors); // TODO: use this as the API list. let apis = self.args.flag_rpcapi.as_ref().unwrap_or(&self.args.flag_jsonrpc_apis); @@ -475,7 +488,11 @@ impl Informant { let report = client.report(); let sync_info = sync.status(); - if let (_, _, &Some(ref last_report)) = (self.chain_info.read().unwrap().deref(), self.cache_info.read().unwrap().deref(), self.report.read().unwrap().deref()) { + if let (_, _, &Some(ref last_report)) = ( + self.chain_info.read().unwrap().deref(), + self.cache_info.read().unwrap().deref(), + self.report.read().unwrap().deref() + ) { println!("[ #{} {} ]---[ {} blk/s | {} tx/s | {} gas/s //··· {}/{} peers, #{}, {}+{} queued ···// mem: {} db, {} chain, {} queue, {} sync ]", chain_info.best_block_number, chain_info.best_block_hash, From 90ae7500da814df956e7b2fb228c1a15711c5886 Mon Sep 17 00:00:00 2001 From: Gav Wood Date: Thu, 10 Mar 2016 11:07:10 +0100 Subject: [PATCH 12/42] Update main.rs --- parity/main.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/parity/main.rs b/parity/main.rs index 729f6aeed..745912028 100644 --- a/parity/main.rs +++ b/parity/main.rs @@ -108,7 +108,7 @@ API and Console Options: --rpccorsdomain URL Equivalent to --jsonrpc-cors URL (geth-compatible). Sealing/Mining Options: - --gasprice GAS Minimal gas price a transaction must have to be accepted for mining [default: 20000000000]. + --gasprice GAS Minimal gas price a transaction must have to be accepted for mining [default: 20000000000]. --author ADDRESS Specify the block author (aka "coinbase") address for sending block rewards from sealed blocks [default: 0037a6b811ffeb6e072da21179d11b1406371c63]. --extradata STRING Specify a custom extra-data for authored blocks, no more than 32 characters. From eb1fab92024779aa91d4c83844677347b0b071e8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Thu, 10 Mar 2016 12:33:29 +0100 Subject: [PATCH 13/42] Adding clippy support to ethminer. --- miner/Cargo.toml | 4 ++++ miner/build.rs | 25 +++++++++++++++++++++++++ 2 files changed, 29 insertions(+) create mode 100644 miner/build.rs diff --git a/miner/Cargo.toml b/miner/Cargo.toml index fb3f24210..713182563 100644 --- a/miner/Cargo.toml +++ b/miner/Cargo.toml @@ -5,6 +5,10 @@ license = "GPL-3.0" name = "ethminer" version = "0.9.99" authors = ["Ethcore "] +build = "build.rs" + +[build-dependencies] +rustc_version = "0.1" [dependencies] ethcore-util = { path = "../util" } diff --git a/miner/build.rs b/miner/build.rs new file mode 100644 index 000000000..41b9a1b3e --- /dev/null +++ b/miner/build.rs @@ -0,0 +1,25 @@ +// Copyright 2015, 2016 Ethcore (UK) Ltd. +// This file is part of Parity. + +// Parity is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Parity is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Parity. If not, see . + +extern crate rustc_version; + +use rustc_version::{version_meta, Channel}; + +fn main() { + if let Channel::Nightly = version_meta().channel { + println!("cargo:rustc-cfg=nightly"); + } +} From 9db4720162a2d46e81d1d595c0a052c0e2452f60 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Thu, 10 Mar 2016 14:06:47 +0100 Subject: [PATCH 14/42] Fixing clippy warnings. --- miner/src/miner.rs | 10 +++++----- miner/src/transaction_queue.rs | 28 ++++++++++++++-------------- parity/main.rs | 4 ++-- sync/src/chain.rs | 2 +- 4 files changed, 22 insertions(+), 22 deletions(-) diff --git a/miner/src/miner.rs b/miner/src/miner.rs index 2c18f3a79..8e93defcf 100644 --- a/miner/src/miner.rs +++ b/miner/src/miner.rs @@ -31,7 +31,7 @@ pub trait MinerService { fn status(&self) -> MinerStatus; /// Imports transactions to transaction queue. - fn import_transactions(&self, transactions: Vec, fetch_nonce: T) + fn import_transactions(&self, transactions: Vec, fetch_nonce: T) -> Result<(), Error> where T: Fn(&Address) -> U256; /// Removes all transactions from the queue and restart mining operation. @@ -129,10 +129,10 @@ impl MinerService for Miner { } } - fn import_transactions(&self, transactions: Vec, fetch_nonce: T) + fn import_transactions(&self, transactions: Vec, fetch_nonce: T) -> Result<(), Error> where T: Fn(&Address) -> U256 { let mut transaction_queue = self.transaction_queue.lock().unwrap(); - transaction_queue.add_all(transactions, fetch_nonce); + transaction_queue.add_all(transactions, fetch_nonce) } fn prepare_sealing(&self, chain: &BlockChainClient) { @@ -180,7 +180,7 @@ impl MinerService for Miner { fn chain_new_blocks(&self, chain: &BlockChainClient, good: &[H256], bad: &[H256], _retracted: &[H256]) { fn fetch_transactions(chain: &BlockChainClient, hash: &H256) -> Vec { let block = chain - .block(BlockId::Hash(hash.clone())) + .block(BlockId::Hash(*hash)) // Client should send message after commit to db and inserting to chain. .expect("Expected in-chain blocks."); let block = BlockView::new(&block); @@ -202,7 +202,7 @@ impl MinerService for Miner { let _sender = tx.sender(); } let mut transaction_queue = self.transaction_queue.lock().unwrap(); - transaction_queue.add_all(txs, |a| chain.nonce(a)); + let _ = transaction_queue.add_all(txs, |a| chain.nonce(a)); }); } diff --git a/miner/src/transaction_queue.rs b/miner/src/transaction_queue.rs index 81825773f..f64bd7318 100644 --- a/miner/src/transaction_queue.rs +++ b/miner/src/transaction_queue.rs @@ -360,7 +360,7 @@ impl TransactionQueue { self.update_future(&sender, current_nonce); // And now lets check if there is some chain of transactions in future // that should be placed in current - self.move_matching_future_to_current(sender.clone(), current_nonce, current_nonce); + self.move_matching_future_to_current(sender, current_nonce, current_nonce); return; } @@ -376,7 +376,7 @@ impl TransactionQueue { self.move_all_to_future(&sender, current_nonce); // And now lets check if there is some chain of transactions in future // that should be placed in current. It should also update last_nonces. - self.move_matching_future_to_current(sender.clone(), current_nonce, current_nonce); + self.move_matching_future_to_current(sender, current_nonce, current_nonce); return; } } @@ -391,7 +391,7 @@ impl TransactionQueue { for k in all_nonces_from_sender { let order = self.future.drop(&sender, &k).unwrap(); if k >= current_nonce { - self.future.insert(sender.clone(), k, order.update_height(k, current_nonce)); + self.future.insert(*sender, k, order.update_height(k, current_nonce)); } else { // Remove the transaction completely self.by_hash.remove(&order.hash); @@ -411,7 +411,7 @@ impl TransactionQueue { // Goes to future or is removed let order = self.current.drop(&sender, &k).unwrap(); if k >= current_nonce { - self.future.insert(sender.clone(), k, order.update_height(k, current_nonce)); + self.future.insert(*sender, k, order.update_height(k, current_nonce)); } else { self.by_hash.remove(&order.hash); } @@ -452,8 +452,8 @@ impl TransactionQueue { // remove also from priority and hash self.future.by_priority.remove(&order); // Put to current - let order = order.update_height(current_nonce.clone(), first_nonce); - self.current.insert(address.clone(), current_nonce, order); + let order = order.update_height(current_nonce, first_nonce); + self.current.insert(address, current_nonce, order); current_nonce = current_nonce + U256::one(); } } @@ -501,10 +501,10 @@ impl TransactionQueue { } let base_nonce = fetch_nonce(&address); - Self::replace_transaction(tx, base_nonce.clone(), &mut self.current, &mut self.by_hash); - self.last_nonces.insert(address.clone(), nonce); + Self::replace_transaction(tx, base_nonce, &mut self.current, &mut self.by_hash); + self.last_nonces.insert(address, nonce); // But maybe there are some more items waiting in future? - self.move_matching_future_to_current(address.clone(), nonce + U256::one(), base_nonce); + self.move_matching_future_to_current(address, nonce + U256::one(), base_nonce); self.current.enforce_limit(&mut self.by_hash); } @@ -518,7 +518,7 @@ impl TransactionQueue { let address = tx.sender(); let nonce = tx.nonce(); - by_hash.insert(hash.clone(), tx); + by_hash.insert(hash, tx); if let Some(old) = set.insert(address, nonce, order.clone()) { // There was already transaction in queue. Let's check which one should stay let old_fee = old.gas_price; @@ -642,7 +642,7 @@ mod test { txq.set_minimal_gas_price(tx.gas_price + U256::one()); // when - txq.add(tx, &default_nonce); + txq.add(tx, &default_nonce).unwrap_err(); // then let stats = txq.status(); @@ -722,8 +722,8 @@ mod test { let mut txq = TransactionQueue::new(); let (tx, tx2) = new_txs(U256::from(1)); - txq.add(tx.clone(), &prev_nonce); - txq.add(tx2.clone(), &prev_nonce); + txq.add(tx.clone(), &prev_nonce).unwrap(); + txq.add(tx2.clone(), &prev_nonce).unwrap(); assert_eq!(txq.status().future, 2); // when @@ -861,7 +861,7 @@ mod test { fn should_drop_transactions_with_old_nonces() { let mut txq = TransactionQueue::new(); let tx = new_tx(); - let last_nonce = tx.nonce.clone() + U256::one(); + let last_nonce = tx.nonce + U256::one(); let fetch_last_nonce = |_a: &Address| last_nonce; // when diff --git a/parity/main.rs b/parity/main.rs index 9dbc3e6be..c73f971d9 100644 --- a/parity/main.rs +++ b/parity/main.rs @@ -259,8 +259,8 @@ impl Configuration { } fn gasprice(&self) -> U256 { - U256::from_dec_str(self.args.flag_gasprice).unwrap_or_else(|_| { - die("{}: Invalid gasprice given. Must be a decimal unsigned 256-bit number.") + U256::from_dec_str(self.args.flag_gasprice.as_str()).unwrap_or_else(|_| { + die!("{}: Invalid gas price given. Must be a decimal unsigned 256-bit number.", self.args.flag_gasprice) }) } diff --git a/sync/src/chain.rs b/sync/src/chain.rs index 46eaebe4c..85f5d6510 100644 --- a/sync/src/chain.rs +++ b/sync/src/chain.rs @@ -936,7 +936,7 @@ impl ChainSync { } let chain = io.chain(); let fetch_nonce = |a: &Address| chain.nonce(a); - self.miner.import_transactions(transactions, fetch_nonce); + let _ = self.miner.import_transactions(transactions, fetch_nonce); Ok(()) } From 9ea3c0eba00efc5c7e13abc26ac2b7f7a5490ca4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Thu, 10 Mar 2016 15:20:54 +0100 Subject: [PATCH 15/42] Fixing compilation on beta & stable --- miner/src/lib.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/miner/src/lib.rs b/miner/src/lib.rs index 4fccc6d51..0cee4ef43 100644 --- a/miner/src/lib.rs +++ b/miner/src/lib.rs @@ -15,8 +15,8 @@ // along with Parity. If not, see . #![warn(missing_docs)] -#![cfg_attr(feature="dev", feature(plugin))] -#![cfg_attr(feature="dev", plugin(clippy))] +#![cfg_attr(all(nightly, feature="dev"), feature(plugin))] +#![cfg_attr(all(nightly, feature="dev"), plugin(clippy))] //! Miner module //! Keeps track of transactions and mined block. From 9741d48496171b387732ca37a2b8f222fbc3983a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Thu, 10 Mar 2016 15:35:36 +0100 Subject: [PATCH 16/42] Transaction data associated with polls. --- rpc/src/v1/helpers/poll_manager.rs | 104 +++++++++++++++++++++++++++-- 1 file changed, 100 insertions(+), 4 deletions(-) diff --git a/rpc/src/v1/helpers/poll_manager.rs b/rpc/src/v1/helpers/poll_manager.rs index 36a6352c2..6c0862633 100644 --- a/rpc/src/v1/helpers/poll_manager.rs +++ b/rpc/src/v1/helpers/poll_manager.rs @@ -16,6 +16,8 @@ //! Indexes all rpc poll requests. +use util::hash::H256; +use std::collections::HashMap; use transient_hashmap::{TransientHashMap, Timer, StandardTimer}; /// Lifetime of poll (in seconds). @@ -43,7 +45,8 @@ impl Clone for PollInfo where F: Clone { /// Lazily garbage collects unused polls info. pub struct PollManager where T: Timer { polls: TransientHashMap, T>, - next_available_id: PollId + transactions_data: HashMap>, + next_available_id: PollId, } impl PollManager { @@ -57,15 +60,25 @@ impl PollManager where T: Timer { pub fn new_with_timer(timer: T) -> Self { PollManager { polls: TransientHashMap::new_with_timer(POLL_LIFETIME, timer), + transactions_data: HashMap::new(), next_available_id: 0, } } + fn prune(&mut self) { + self.polls.prune(); + // self.polls.prune() + // .into_iter() + // .map(|key| { + // self.transactions_data.remove(key); + // }); + } + /// Returns id which can be used for new poll. /// /// Stores information when last poll happend. pub fn create_poll(&mut self, filter: F, block: BlockNumber) -> PollId { - self.polls.prune(); + self.prune(); let id = self.next_available_id; self.next_available_id += 1; self.polls.insert(id, PollInfo { @@ -77,7 +90,7 @@ impl PollManager where T: Timer { /// Updates information when last poll happend. pub fn update_poll(&mut self, id: &PollId, block: BlockNumber) { - self.polls.prune(); + self.prune(); if let Some(info) = self.polls.get_mut(id) { info.block_number = block; } @@ -85,13 +98,27 @@ impl PollManager where T: Timer { /// Returns number of block when last poll happend. pub fn get_poll_info(&mut self, id: &PollId) -> Option<&PollInfo> { - self.polls.prune(); + self.prune(); self.polls.get(id) } + pub fn set_poll_transactions(&mut self, id: &PollId, transactions: Vec) { + self.prune(); + if self.polls.get(id).is_some() { + self.transactions_data.insert(*id, transactions); + } + } + + /// Returns last transactions hashes for given poll. + pub fn poll_transactions(&mut self, id: &PollId) -> Option<&Vec> { + self.prune(); + self.transactions_data.get(id) + } + /// Removes poll info. pub fn remove_poll(&mut self, id: &PollId) { self.polls.remove(id); + self.transactions_data.remove(id); } } @@ -100,6 +127,7 @@ mod tests { use std::cell::RefCell; use transient_hashmap::Timer; use v1::helpers::PollManager; + use util::hash::H256; struct TestTimer<'a> { time: &'a RefCell, @@ -141,4 +169,72 @@ mod tests { indexer.remove_poll(&1); assert!(indexer.get_poll_info(&1).is_none()); } + + #[test] + fn should_return_poll_transactions_hashes() { + // given + let mut indexer = PollManager::new(); + let poll_id = indexer.create_poll(false, 20); + assert!(indexer.poll_transactions(&poll_id).is_none()); + let transactions = vec![H256::from(1), H256::from(2)]; + + // when + indexer.set_poll_transactions(&poll_id, transactions.clone()); + + // then + let txs = indexer.poll_transactions(&poll_id); + assert_eq!(txs.unwrap(), &transactions); + } + + + #[test] + fn should_remove_transaction_data_when_poll_timed_out() { + // given + let time = RefCell::new(0); + let timer = TestTimer { + time: &time, + }; + let mut indexer = PollManager::new_with_timer(timer); + let poll_id = indexer.create_poll(false, 20); + let transactions = vec![H256::from(1), H256::from(2)]; + indexer.set_poll_transactions(&poll_id, transactions.clone()); + assert!(indexer.poll_transactions(&poll_id).is_some()); + + // when + *time.borrow_mut() = 75; + indexer.prune(); + + // then + assert!(indexer.poll_transactions(&poll_id).is_none()); + + } + + #[test] + fn should_remove_transaction_data_when_poll_is_removed() { + // given + let mut indexer = PollManager::new(); + let poll_id = indexer.create_poll(false, 20); + let transactions = vec![H256::from(1), H256::from(2)]; + + // when + indexer.set_poll_transactions(&poll_id, transactions.clone()); + assert!(indexer.poll_transactions(&poll_id).is_some()); + indexer.remove_poll(&poll_id); + + // then + assert!(indexer.poll_transactions(&poll_id).is_none()); + } + + #[test] + fn should_ignore_transactions_for_invalid_poll_id() { + // given + let mut indexer = PollManager::<()>::new(); + let transactions = vec![H256::from(1), H256::from(2)]; + + // when + indexer.set_poll_transactions(&5, transactions.clone()); + + // then + assert!(indexer.poll_transactions(&5).is_none()); + } } From c37370a8a777f503307d341381cd00f3fc27ff08 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Thu, 10 Mar 2016 16:00:55 +0100 Subject: [PATCH 17/42] PendingTransaction filter. --- miner/src/miner.rs | 8 ++++++++ miner/src/transaction_queue.rs | 26 ++++++++++++++++++++++++ parity/main.rs | 2 +- rpc/src/v1/helpers/poll_manager.rs | 32 +++++++++++++++++------------- rpc/src/v1/impls/eth.rs | 32 ++++++++++++++++++++++++------ 5 files changed, 79 insertions(+), 21 deletions(-) diff --git a/miner/src/miner.rs b/miner/src/miner.rs index 8e93defcf..85dbc6bbc 100644 --- a/miner/src/miner.rs +++ b/miner/src/miner.rs @@ -34,6 +34,9 @@ pub trait MinerService { fn import_transactions(&self, transactions: Vec, fetch_nonce: T) -> Result<(), Error> where T: Fn(&Address) -> U256; + /// Returns hashes of transactions currently in pending + fn pending_transactions_hashes(&self) -> Vec; + /// Removes all transactions from the queue and restart mining operation. fn clear_and_reset(&self, chain: &BlockChainClient); @@ -135,6 +138,11 @@ impl MinerService for Miner { transaction_queue.add_all(transactions, fetch_nonce) } + fn pending_transactions_hashes(&self) -> Vec { + let transaction_queue = self.transaction_queue.lock().unwrap(); + transaction_queue.pending_hashes() + } + fn prepare_sealing(&self, chain: &BlockChainClient) { let no_of_transactions = 128; let transactions = self.transaction_queue.lock().unwrap().top_transactions(no_of_transactions); diff --git a/miner/src/transaction_queue.rs b/miner/src/transaction_queue.rs index f64bd7318..4379531b2 100644 --- a/miner/src/transaction_queue.rs +++ b/miner/src/transaction_queue.rs @@ -431,6 +431,14 @@ impl TransactionQueue { .collect() } + /// Returns hashes of all transactions from current, ordered by priority. + pub fn pending_hashes(&self) -> Vec { + self.current.by_priority + .iter() + .map(|t| t.hash) + .collect() + } + /// Removes all elements (in any state) from the queue pub fn clear(&mut self) { self.current.clear(); @@ -693,6 +701,24 @@ mod test { assert_eq!(top.len(), 2); } + #[test] + fn should_return_pending_hashes() { + // given + let mut txq = TransactionQueue::new(); + + let (tx, tx2) = new_txs(U256::from(1)); + + // when + txq.add(tx.clone(), &default_nonce).unwrap(); + txq.add(tx2.clone(), &default_nonce).unwrap(); + + // then + let top = txq.pending_hashes(); + assert_eq!(top[0], tx.hash()); + assert_eq!(top[1], tx2.hash()); + assert_eq!(top.len(), 2); + } + #[test] fn should_put_transaction_to_futures_if_gap_detected() { // given diff --git a/parity/main.rs b/parity/main.rs index c73f971d9..d83fe680d 100644 --- a/parity/main.rs +++ b/parity/main.rs @@ -209,7 +209,7 @@ fn setup_rpc_server(client: Arc, sync: Arc, miner: Arc, "net" => server.add_delegate(NetClient::new(&sync).to_delegate()), "eth" => { server.add_delegate(EthClient::new(&client, &sync, &miner).to_delegate()); - server.add_delegate(EthFilterClient::new(&client).to_delegate()); + server.add_delegate(EthFilterClient::new(&client, &miner).to_delegate()); } _ => { die!("{}: Invalid API name to be enabled.", api); diff --git a/rpc/src/v1/helpers/poll_manager.rs b/rpc/src/v1/helpers/poll_manager.rs index 6c0862633..73b273a8f 100644 --- a/rpc/src/v1/helpers/poll_manager.rs +++ b/rpc/src/v1/helpers/poll_manager.rs @@ -102,15 +102,19 @@ impl PollManager where T: Timer { self.polls.get(id) } - pub fn set_poll_transactions(&mut self, id: &PollId, transactions: Vec) { + pub fn update_transactions(&mut self, id: &PollId, transactions: Vec) -> Option> { self.prune(); if self.polls.get(id).is_some() { - self.transactions_data.insert(*id, transactions); + self.transactions_data.insert(*id, transactions) + } else { + None } } + // Normal code always replaces transactions + #[cfg(test)] /// Returns last transactions hashes for given poll. - pub fn poll_transactions(&mut self, id: &PollId) -> Option<&Vec> { + pub fn transactions(&mut self, id: &PollId) -> Option<&Vec> { self.prune(); self.transactions_data.get(id) } @@ -175,14 +179,14 @@ mod tests { // given let mut indexer = PollManager::new(); let poll_id = indexer.create_poll(false, 20); - assert!(indexer.poll_transactions(&poll_id).is_none()); + assert!(indexer.transactions(&poll_id).is_none()); let transactions = vec![H256::from(1), H256::from(2)]; // when - indexer.set_poll_transactions(&poll_id, transactions.clone()); + indexer.update_transactions(&poll_id, transactions.clone()); // then - let txs = indexer.poll_transactions(&poll_id); + let txs = indexer.transactions(&poll_id); assert_eq!(txs.unwrap(), &transactions); } @@ -197,15 +201,15 @@ mod tests { let mut indexer = PollManager::new_with_timer(timer); let poll_id = indexer.create_poll(false, 20); let transactions = vec![H256::from(1), H256::from(2)]; - indexer.set_poll_transactions(&poll_id, transactions.clone()); - assert!(indexer.poll_transactions(&poll_id).is_some()); + indexer.update_transactions(&poll_id, transactions.clone()); + assert!(indexer.transactions(&poll_id).is_some()); // when *time.borrow_mut() = 75; indexer.prune(); // then - assert!(indexer.poll_transactions(&poll_id).is_none()); + assert!(indexer.transactions(&poll_id).is_none()); } @@ -217,12 +221,12 @@ mod tests { let transactions = vec![H256::from(1), H256::from(2)]; // when - indexer.set_poll_transactions(&poll_id, transactions.clone()); - assert!(indexer.poll_transactions(&poll_id).is_some()); + indexer.update_transactions(&poll_id, transactions.clone()); + assert!(indexer.transactions(&poll_id).is_some()); indexer.remove_poll(&poll_id); // then - assert!(indexer.poll_transactions(&poll_id).is_none()); + assert!(indexer.transactions(&poll_id).is_none()); } #[test] @@ -232,9 +236,9 @@ mod tests { let transactions = vec![H256::from(1), H256::from(2)]; // when - indexer.set_poll_transactions(&5, transactions.clone()); + indexer.update_transactions(&5, transactions.clone()); // then - assert!(indexer.poll_transactions(&5).is_none()); + assert!(indexer.transactions(&5).is_none()); } } diff --git a/rpc/src/v1/impls/eth.rs b/rpc/src/v1/impls/eth.rs index a9ee389f8..5c7df574d 100644 --- a/rpc/src/v1/impls/eth.rs +++ b/rpc/src/v1/impls/eth.rs @@ -15,7 +15,7 @@ // along with Parity. If not, see . //! Eth rpc implementation. -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; use std::sync::{Arc, Weak, Mutex, RwLock}; use std::ops::Deref; use ethsync::{EthSync, SyncState}; @@ -264,15 +264,17 @@ impl Eth for EthClient { /// Eth filter rpc implementation. pub struct EthFilterClient { client: Weak, + miner: Weak, polls: Mutex>, } impl EthFilterClient { /// Creates new Eth filter client. - pub fn new(client: &Arc) -> Self { + pub fn new(client: &Arc, miner: &Arc) -> Self { EthFilterClient { client: Arc::downgrade(client), - polls: Mutex::new(PollManager::new()) + miner: Arc::downgrade(miner), + polls: Mutex::new(PollManager::new()), } } } @@ -302,7 +304,12 @@ impl EthFilter for EthFilterClient { match params { Params::None => { let mut polls = self.polls.lock().unwrap(); - let id = polls.create_poll(PollFilter::PendingTransaction, take_weak!(self.client).chain_info().best_block_number); + let best_block_number = take_weak!(self.client).chain_info().best_block_number; + let pending_transactions = take_weak!(self.miner).pending_transactions_hashes(); + + let id = polls.create_poll(PollFilter::PendingTransaction, best_block_number); + polls.update_transactions(&id, pending_transactions); + to_value(&U256::from(id)) }, _ => Err(Error::invalid_params()) @@ -330,8 +337,21 @@ impl EthFilter for EthFilterClient { to_value(&hashes) }, PollFilter::PendingTransaction => { - // TODO: fix implementation once TransactionQueue is merged - to_value(&vec![] as &Vec) + let poll_id = index.value(); + let mut polls = self.polls.lock().unwrap(); + + let current_hashes = take_weak!(self.miner).pending_transactions_hashes(); + let previous_hashes = polls.update_transactions(&poll_id, current_hashes.clone()).unwrap(); + polls.update_poll(&poll_id, client.chain_info().best_block_number); + + // calculate diff + let previous_hashes_set = previous_hashes.into_iter().collect::>(); + let diff = current_hashes + .into_iter() + .filter(|hash| previous_hashes_set.contains(&hash)) + .collect::>(); + + to_value(&diff) }, PollFilter::Logs(mut filter) => { filter.from_block = BlockId::Number(info.block_number); From 0eaf0a8db1827a005f5ae920736136ef33f4d70f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Thu, 10 Mar 2016 16:40:15 +0100 Subject: [PATCH 18/42] Updating hook. --- hook.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hook.sh b/hook.sh index 113bf1838..25877868a 100755 --- a/hook.sh +++ b/hook.sh @@ -1,3 +1,3 @@ #!/bin/sh -echo "#!/bin/sh\ncargo test -p ethash -p ethcore-util -p ethcore -p ethsync -p ethcore-rpc -p parity -p ethminer --features dev-clippy" > ./.git/hooks/pre-push +echo "#!/bin/sh\ncargo build --features dev-clippy && cargo test --no-run -p ethash -p ethcore-util -p ethcore -p ethsync -p ethcore-rpc -p parity -p ethminer --features dev-clippy" > ./.git/hooks/pre-push chmod +x ./.git/hooks/pre-push From 3bbdc03d0cbebe315a059ac7762db386805868f9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Fri, 11 Mar 2016 10:17:51 +0100 Subject: [PATCH 19/42] Fixing doctest. --- sync/src/lib.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/sync/src/lib.rs b/sync/src/lib.rs index e0158a564..d54acaf8a 100644 --- a/sync/src/lib.rs +++ b/sync/src/lib.rs @@ -31,18 +31,21 @@ //! extern crate ethcore_util as util; //! extern crate ethcore; //! extern crate ethsync; +//! extern crate ethminer; //! use std::env; //! use std::sync::Arc; //! use util::network::{NetworkService, NetworkConfiguration}; //! use ethcore::client::{Client, ClientConfig}; //! use ethsync::{EthSync, SyncConfig}; +//! use ethminer::Miner; //! use ethcore::ethereum; //! //! fn main() { //! let mut service = NetworkService::start(NetworkConfiguration::new()).unwrap(); //! let dir = env::temp_dir(); //! let client = Client::new(ClientConfig::default(), ethereum::new_frontier(), &dir, service.io().channel()).unwrap(); -//! EthSync::register(&mut service, SyncConfig::default(), client); +//! let miner = Miner::new(); +//! EthSync::register(&mut service, SyncConfig::default(), client, miner); //! } //! ``` From 55a14b3aaf31e8b1f7b14d4eb75032a1fa815561 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Fri, 11 Mar 2016 11:40:12 +0100 Subject: [PATCH 20/42] Fixing transaction queue test --- Cargo.lock | 1 + miner/src/lib.rs | 1 + miner/src/transaction_queue.rs | 4 ++-- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3b49a974c..8b24df187 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -299,6 +299,7 @@ dependencies = [ "log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "rayon 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] diff --git a/miner/src/lib.rs b/miner/src/lib.rs index 0cee4ef43..20b5dd7d3 100644 --- a/miner/src/lib.rs +++ b/miner/src/lib.rs @@ -62,5 +62,6 @@ extern crate rayon; mod miner; mod transaction_queue; +pub use transaction_queue::TransactionQueue; pub use miner::{Miner, MinerService}; diff --git a/miner/src/transaction_queue.rs b/miner/src/transaction_queue.rs index f64bd7318..3d5c38b0c 100644 --- a/miner/src/transaction_queue.rs +++ b/miner/src/transaction_queue.rs @@ -28,13 +28,13 @@ //! ```rust //! extern crate ethcore_util as util; //! extern crate ethcore; -//! extern crate ethsync; +//! extern crate ethminer; //! extern crate rustc_serialize; //! //! use util::crypto::KeyPair; //! use util::hash::Address; //! use util::numbers::{Uint, U256}; -//! use ethsync::TransactionQueue; +//! use ethminer::TransactionQueue; //! use ethcore::transaction::*; //! use rustc_serialize::hex::FromHex; //! From dd2fb4df67307c4ce8e632d7ab009937422888af Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Fri, 11 Mar 2016 12:31:42 +0100 Subject: [PATCH 21/42] Storing BlockNumber & transactions directly in enum --- rpc/src/v1/helpers/poll_filter.rs | 9 +- rpc/src/v1/helpers/poll_manager.rs | 183 ++++++----------------------- rpc/src/v1/impls/eth.rs | 45 ++++--- 3 files changed, 59 insertions(+), 178 deletions(-) diff --git a/rpc/src/v1/helpers/poll_filter.rs b/rpc/src/v1/helpers/poll_filter.rs index 465290270..f9ed6230c 100644 --- a/rpc/src/v1/helpers/poll_filter.rs +++ b/rpc/src/v1/helpers/poll_filter.rs @@ -1,10 +1,13 @@ //! Helper type with all filter possibilities. +use util::hash::H256; use ethcore::filter::Filter; +pub type BlockNumber = u64; + #[derive(Clone)] pub enum PollFilter { - Block, - PendingTransaction, - Logs(Filter) + Block(BlockNumber), + PendingTransaction(Vec), + Logs(BlockNumber, Filter) } diff --git a/rpc/src/v1/helpers/poll_manager.rs b/rpc/src/v1/helpers/poll_manager.rs index 765410567..9735d7d5d 100644 --- a/rpc/src/v1/helpers/poll_manager.rs +++ b/rpc/src/v1/helpers/poll_manager.rs @@ -16,36 +16,18 @@ //! Indexes all rpc poll requests. -use util::hash::H256; -use std::collections::HashMap; use transient_hashmap::{TransientHashMap, Timer, StandardTimer}; /// Lifetime of poll (in seconds). const POLL_LIFETIME: u64 = 60; pub type PollId = usize; -pub type BlockNumber = u64; - -pub struct PollInfo { - pub filter: F, - pub block_number: BlockNumber -} - -impl Clone for PollInfo where F: Clone { - fn clone(&self) -> Self { - PollInfo { - filter: self.filter.clone(), - block_number: self.block_number.clone() - } - } -} /// Indexes all poll requests. /// /// Lazily garbage collects unused polls info. pub struct PollManager where T: Timer { - polls: TransientHashMap, T>, - transactions_data: HashMap>, + polls: TransientHashMap, next_available_id: PollId, } @@ -57,188 +39,89 @@ impl PollManager { } impl PollManager where T: Timer { + pub fn new_with_timer(timer: T) -> Self { PollManager { polls: TransientHashMap::new_with_timer(POLL_LIFETIME, timer), - transactions_data: HashMap::new(), next_available_id: 0, } } - fn prune(&mut self) { - self.polls.prune(); - // self.polls.prune() - // .into_iter() - // .map(|key| { - // self.transactions_data.remove(key); - // }); - } - /// Returns id which can be used for new poll. /// /// Stores information when last poll happend. - pub fn create_poll(&mut self, filter: F, block: BlockNumber) -> PollId { - self.prune(); + pub fn create_poll(&mut self, filter: F) -> PollId { + self.polls.prune(); + let id = self.next_available_id; + self.polls.insert(id, filter); + self.next_available_id += 1; - self.polls.insert(id, PollInfo { - filter: filter, - block_number: block, - }); id } - /// Updates information when last poll happend. - pub fn update_poll(&mut self, id: &PollId, block: BlockNumber) { - self.prune(); - if let Some(info) = self.polls.get_mut(id) { - info.block_number = block; - } - } - - /// Returns number of block when last poll happend. - pub fn poll_info(&mut self, id: &PollId) -> Option<&PollInfo> { - self.prune(); + // Implementation is always using `poll_mut` + #[cfg(test)] + /// Get a reference to stored poll filter + pub fn poll(&mut self, id: &PollId) -> Option<&F> { + self.polls.prune(); self.polls.get(id) } - pub fn update_transactions(&mut self, id: &PollId, transactions: Vec) -> Option> { - self.prune(); - if self.polls.get(id).is_some() { - self.transactions_data.insert(*id, transactions) - } else { - None - } - } - - // Normal code always replaces transactions - #[cfg(test)] - /// Returns last transactions hashes for given poll. - pub fn transactions(&mut self, id: &PollId) -> Option<&Vec> { - self.prune(); - self.transactions_data.get(id) + /// Get a mutable reference to stored poll filter + pub fn poll_mut(&mut self, id: &PollId) -> Option<&mut F> { + self.polls.prune(); + self.polls.get_mut(id) } /// Removes poll info. pub fn remove_poll(&mut self, id: &PollId) { self.polls.remove(id); - self.transactions_data.remove(id); } } #[cfg(test)] mod tests { - use std::cell::RefCell; + use std::cell::Cell; use transient_hashmap::Timer; use v1::helpers::PollManager; - use util::hash::H256; struct TestTimer<'a> { - time: &'a RefCell, + time: &'a Cell, } impl<'a> Timer for TestTimer<'a> { fn get_time(&self) -> i64 { - *self.time.borrow() + self.time.get() } } #[test] fn test_poll_indexer() { - let time = RefCell::new(0); + let time = Cell::new(0); let timer = TestTimer { time: &time, }; let mut indexer = PollManager::new_with_timer(timer); - assert_eq!(indexer.create_poll(false, 20), 0); - assert_eq!(indexer.create_poll(true, 20), 1); + assert_eq!(indexer.create_poll(20), 0); + assert_eq!(indexer.create_poll(20), 1); - *time.borrow_mut() = 10; - indexer.update_poll(&0, 21); - assert_eq!(indexer.poll_info(&0).unwrap().filter, false); - assert_eq!(indexer.poll_info(&0).unwrap().block_number, 21); + time.set(10); + *indexer.poll_mut(&0).unwrap() = 21; + assert_eq!(*indexer.poll(&0).unwrap(), 21); + assert_eq!(*indexer.poll(&1).unwrap(), 20); - *time.borrow_mut() = 30; - indexer.update_poll(&1, 23); - assert_eq!(indexer.poll_info(&1).unwrap().filter, true); - assert_eq!(indexer.poll_info(&1).unwrap().block_number, 23); + time.set(30); + *indexer.poll_mut(&1).unwrap() = 23; + assert_eq!(*indexer.poll(&1).unwrap(), 23); - *time.borrow_mut() = 75; - indexer.update_poll(&0, 30); - assert!(indexer.poll_info(&0).is_none()); - assert_eq!(indexer.poll_info(&1).unwrap().filter, true); - assert_eq!(indexer.poll_info(&1).unwrap().block_number, 23); + time.set(75); + assert!(indexer.poll(&0).is_none()); + assert_eq!(*indexer.poll(&1).unwrap(), 23); indexer.remove_poll(&1); - assert!(indexer.poll_info(&1).is_none()); + assert!(indexer.poll(&1).is_none()); } - #[test] - fn should_return_poll_transactions_hashes() { - // given - let mut indexer = PollManager::new(); - let poll_id = indexer.create_poll(false, 20); - assert!(indexer.transactions(&poll_id).is_none()); - let transactions = vec![H256::from(1), H256::from(2)]; - - // when - indexer.update_transactions(&poll_id, transactions.clone()); - - // then - let txs = indexer.transactions(&poll_id); - assert_eq!(txs.unwrap(), &transactions); - } - - - #[test] - fn should_remove_transaction_data_when_poll_timed_out() { - // given - let time = RefCell::new(0); - let timer = TestTimer { - time: &time, - }; - let mut indexer = PollManager::new_with_timer(timer); - let poll_id = indexer.create_poll(false, 20); - let transactions = vec![H256::from(1), H256::from(2)]; - indexer.update_transactions(&poll_id, transactions.clone()); - assert!(indexer.transactions(&poll_id).is_some()); - - // when - *time.borrow_mut() = 75; - indexer.prune(); - - // then - assert!(indexer.transactions(&poll_id).is_none()); - - } - - #[test] - fn should_remove_transaction_data_when_poll_is_removed() { - // given - let mut indexer = PollManager::new(); - let poll_id = indexer.create_poll(false, 20); - let transactions = vec![H256::from(1), H256::from(2)]; - - // when - indexer.update_transactions(&poll_id, transactions.clone()); - assert!(indexer.transactions(&poll_id).is_some()); - indexer.remove_poll(&poll_id); - - // then - assert!(indexer.transactions(&poll_id).is_none()); - } - - #[test] - fn should_ignore_transactions_for_invalid_poll_id() { - // given - let mut indexer = PollManager::<()>::new(); - let transactions = vec![H256::from(1), H256::from(2)]; - - // when - indexer.update_transactions(&5, transactions.clone()); - - // then - assert!(indexer.transactions(&5).is_none()); - } } diff --git a/rpc/src/v1/impls/eth.rs b/rpc/src/v1/impls/eth.rs index 211c46304..9f81caa90 100644 --- a/rpc/src/v1/impls/eth.rs +++ b/rpc/src/v1/impls/eth.rs @@ -301,7 +301,8 @@ impl EthFilter for EthFilterClient from_params::<(Filter,)>(params) .and_then(|(filter,)| { let mut polls = self.polls.lock().unwrap(); - let id = polls.create_poll(PollFilter::Logs(filter.into()), take_weak!(self.client).chain_info().best_block_number); + let block_number = take_weak!(self.client).chain_info().best_block_number; + let id = polls.create_poll(PollFilter::Logs(block_number, filter.into())); to_value(&U256::from(id)) }) } @@ -310,7 +311,7 @@ impl EthFilter for EthFilterClient match params { Params::None => { let mut polls = self.polls.lock().unwrap(); - let id = polls.create_poll(PollFilter::Block, take_weak!(self.client).chain_info().best_block_number); + let id = polls.create_poll(PollFilter::Block(take_weak!(self.client).chain_info().best_block_number)); to_value(&U256::from(id)) }, _ => Err(Error::invalid_params()) @@ -321,11 +322,8 @@ impl EthFilter for EthFilterClient match params { Params::None => { let mut polls = self.polls.lock().unwrap(); - let best_block_number = take_weak!(self.client).chain_info().best_block_number; let pending_transactions = take_weak!(self.miner).pending_transactions_hashes(); - - let id = polls.create_poll(PollFilter::PendingTransaction, best_block_number); - polls.update_transactions(&id, pending_transactions); + let id = polls.create_poll(PollFilter::PendingTransaction(pending_transactions)); to_value(&U256::from(id)) }, @@ -337,50 +335,47 @@ impl EthFilter for EthFilterClient let client = take_weak!(self.client); from_params::<(Index,)>(params) .and_then(|(index,)| { - let info = self.polls.lock().unwrap().poll_info(&index.value()).cloned(); - match info { + let mut polls = self.polls.lock().unwrap(); + match polls.poll_mut(&index.value()) { None => Ok(Value::Array(vec![] as Vec)), - Some(info) => match info.filter { - PollFilter::Block => { + Some(filter) => match *filter { + PollFilter::Block(ref mut block_number) => { // + 1, cause we want to return hashes including current block hash. let current_number = client.chain_info().best_block_number + 1; - let hashes = (info.block_number..current_number).into_iter() + let hashes = (*block_number..current_number).into_iter() .map(BlockId::Number) .filter_map(|id| client.block_hash(id)) .collect::>(); - self.polls.lock().unwrap().update_poll(&index.value(), current_number); + *block_number = current_number; to_value(&hashes) }, - PollFilter::PendingTransaction => { - let poll_id = index.value(); - let mut polls = self.polls.lock().unwrap(); - + PollFilter::PendingTransaction(ref mut previous_hashes) => { let current_hashes = take_weak!(self.miner).pending_transactions_hashes(); - let previous_hashes = polls.update_transactions(&poll_id, current_hashes.clone()).unwrap(); - polls.update_poll(&poll_id, client.chain_info().best_block_number); - // calculate diff - let previous_hashes_set = previous_hashes.into_iter().collect::>(); + let previous_hashes_set = previous_hashes.into_iter().map(|h| h.clone()).collect::>(); let diff = current_hashes - .into_iter() + .iter() .filter(|hash| previous_hashes_set.contains(&hash)) + .cloned() .collect::>(); + *previous_hashes = current_hashes; + to_value(&diff) }, - PollFilter::Logs(mut filter) => { - filter.from_block = BlockId::Number(info.block_number); + PollFilter::Logs(ref mut block_number, ref mut filter) => { + filter.from_block = BlockId::Number(*block_number); filter.to_block = BlockId::Latest; - let logs = client.logs(filter) + let logs = client.logs(filter.clone()) .into_iter() .map(From::from) .collect::>(); let current_number = client.chain_info().best_block_number; - self.polls.lock().unwrap().update_poll(&index.value(), current_number); + *block_number = current_number; to_value(&logs) } } From 197ea7f7d6bc89337215ad72702ac34734f0f71f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Fri, 11 Mar 2016 14:48:30 +0100 Subject: [PATCH 22/42] Using miner in rpc instead of sync --- miner/src/lib.rs | 43 +++++++++++++++++++++++++++++++++++- miner/src/miner.rs | 49 ++++++++--------------------------------- parity/main.rs | 1 + rpc/src/lib.rs | 2 ++ rpc/src/v1/impls/eth.rs | 18 ++++++++++----- 5 files changed, 67 insertions(+), 46 deletions(-) diff --git a/miner/src/lib.rs b/miner/src/lib.rs index 20b5dd7d3..135a15df5 100644 --- a/miner/src/lib.rs +++ b/miner/src/lib.rs @@ -63,5 +63,46 @@ mod miner; mod transaction_queue; pub use transaction_queue::TransactionQueue; -pub use miner::{Miner, MinerService}; +pub use miner::{Miner}; +use std::sync::Mutex; +use util::{H256, U256, Address, Bytes}; +use ethcore::client::{BlockChainClient}; +use ethcore::block::{ClosedBlock}; +use ethcore::error::{Error}; +use ethcore::transaction::SignedTransaction; + +/// Miner client API +pub trait MinerService : Send + Sync { + + /// Returns miner's status. + fn status(&self) -> MinerStatus; + + /// Imports transactions to transaction queue. + fn import_transactions(&self, transactions: Vec, fetch_nonce: T) -> Result<(), Error> + where T: Fn(&Address) -> U256; + + /// Removes all transactions from the queue and restart mining operation. + fn clear_and_reset(&self, chain: &BlockChainClient); + + /// called when blocks are imported to chain, updates transactions queue. + fn chain_new_blocks(&self, chain: &BlockChainClient, good: &[H256], bad: &[H256], retracted: &[H256]); + + /// New chain head event. Restart mining operation. + fn prepare_sealing(&self, chain: &BlockChainClient); + + /// Grab the `ClosedBlock` that we want to be sealed. Comes as a mutex that you have to lock. + fn sealing_block(&self, chain: &BlockChainClient) -> &Mutex>; + + /// Submit `seal` as a valid solution for the header of `pow_hash`. + /// Will check the seal, but not actually insert the block into the chain. + fn submit_seal(&self, chain: &BlockChainClient, pow_hash: H256, seal: Vec) -> Result<(), Error>; +} + +/// Mining status +pub struct MinerStatus { + /// Number of transactions in queue with state `pending` (ready to be included in block) + pub transaction_queue_pending: usize, + /// Number of transactions in queue with state `future` (not yet ready to be included in block) + pub transaction_queue_future: usize, +} diff --git a/miner/src/miner.rs b/miner/src/miner.rs index d2e839101..623af33a0 100644 --- a/miner/src/miner.rs +++ b/miner/src/miner.rs @@ -14,50 +14,19 @@ // You should have received a copy of the GNU General Public License // along with Parity. If not, see . -use util::*; -use std::sync::atomic::AtomicBool; use rayon::prelude::*; +use std::sync::{Mutex, RwLock, Arc}; +use std::sync::atomic; +use std::sync::atomic::AtomicBool; + +use util::{H256, U256, Address, Bytes}; use ethcore::views::{BlockView}; use ethcore::client::{BlockChainClient, BlockId}; -use ethcore::block::*; -use ethcore::error::*; +use ethcore::block::{ClosedBlock}; +use ethcore::error::{Error}; use ethcore::transaction::SignedTransaction; -use transaction_queue::{TransactionQueue}; -/// Miner client API -pub trait MinerService : Send + Sync { - - /// Returns miner's status. - fn status(&self) -> MinerStatus; - - /// Imports transactions to transaction queue. - fn import_transactions(&self, transactions: Vec, fetch_nonce: T) -> Result<(), Error> - where T: Fn(&Address) -> U256; - - /// Removes all transactions from the queue and restart mining operation. - fn clear_and_reset(&self, chain: &BlockChainClient); - - /// called when blocks are imported to chain, updates transactions queue. - fn chain_new_blocks(&self, chain: &BlockChainClient, good: &[H256], bad: &[H256], _retracted: &[H256]); - - /// New chain head event. Restart mining operation. - fn prepare_sealing(&self, chain: &BlockChainClient); - - /// Grab the `ClosedBlock` that we want to be sealed. Comes as a mutex that you have to lock. - fn sealing_block(&self, chain: &BlockChainClient) -> &Mutex>; - - /// Submit `seal` as a valid solution for the header of `pow_hash`. - /// Will check the seal, but not actually insert the block into the chain. - fn submit_seal(&self, chain: &BlockChainClient, pow_hash: H256, seal: Vec) -> Result<(), Error>; -} - -/// Mining status -pub struct MinerStatus { - /// Number of transactions in queue with state `pending` (ready to be included in block) - pub transaction_queue_pending: usize, - /// Number of transactions in queue with state `future` (not yet ready to be included in block) - pub transaction_queue_future: usize, -} +use super::{MinerService, MinerStatus, TransactionQueue}; /// Keeps track of transactions using priority queue and holds currently mined block. pub struct Miner { @@ -76,7 +45,7 @@ impl Default for Miner { transaction_queue: Mutex::new(TransactionQueue::new()), sealing_enabled: AtomicBool::new(false), sealing_block: Mutex::new(None), - author: RwLock::new(Address::new()), + author: RwLock::new(Address::default()), extra_data: RwLock::new(Vec::new()), } } diff --git a/parity/main.rs b/parity/main.rs index d26908f8a..f8a45b01e 100644 --- a/parity/main.rs +++ b/parity/main.rs @@ -171,6 +171,7 @@ struct Args { flag_nodekey: Option, flag_nodiscover: bool, flag_maxpeers: Option, + flag_gasprice: String, flag_author: String, flag_extra_data: Option, flag_datadir: Option, diff --git a/rpc/src/lib.rs b/rpc/src/lib.rs index 103bef546..3096a45c9 100644 --- a/rpc/src/lib.rs +++ b/rpc/src/lib.rs @@ -19,6 +19,8 @@ #![cfg_attr(feature="nightly", feature(custom_derive, custom_attribute, plugin))] #![cfg_attr(feature="nightly", plugin(serde_macros, clippy))] +#[macro_use] +extern crate log; extern crate rustc_serialize; extern crate serde; extern crate serde_json; diff --git a/rpc/src/v1/impls/eth.rs b/rpc/src/v1/impls/eth.rs index 9c0f37bc1..c4d649d5a 100644 --- a/rpc/src/v1/impls/eth.rs +++ b/rpc/src/v1/impls/eth.rs @@ -53,7 +53,7 @@ impl EthClient A: AccountProvider, M: MinerService { /// Creates new EthClient. - pub fn new(client: &Arc, sync: &Arc, miner: &Arc) -> Self { + pub fn new(client: &Arc, sync: &Arc, accounts: &Arc, miner: &Arc) -> Self { EthClient { client: Arc::downgrade(client), sync: Arc::downgrade(sync), @@ -189,7 +189,7 @@ impl Eth for EthClient fn block_transaction_count_by_number(&self, params: Params) -> Result { from_params::<(BlockNumber,)>(params) .and_then(|(block_number,)| match block_number { - BlockNumber::Pending => to_value(&take_weak!(self.sync).status().transaction_queue_pending), + BlockNumber::Pending => to_value(&take_weak!(self.miner).status().transaction_queue_pending), _ => match take_weak!(self.client).block(block_number.into()) { Some(bytes) => to_value(&BlockView::new(&bytes).transactions_count()), None => Ok(Value::Null) @@ -292,12 +292,20 @@ impl Eth for EthClient let accounts = take_weak!(self.accounts); match accounts.account_secret(&transaction_request.from) { Ok(secret) => { - let sync = take_weak!(self.sync); + let miner = take_weak!(self.miner); + let client = take_weak!(self.client); let (transaction, _) = transaction_request.to_eth(); let signed_transaction = transaction.sign(&secret); let hash = signed_transaction.hash(); - sync.insert_transaction(signed_transaction); - to_value(&hash) + + let import = miner.import_transactions(vec![signed_transaction], |a: &Address| client.nonce(a)); + match import { + Ok(_) => to_value(&hash), + Err(e) => { + warn!("Error sending transaction: {:?}", e); + to_value(&U256::zero()) + } + } }, Err(_) => { to_value(&U256::zero()) } } From 36ff65d05078c81208eae39adbd470e2d2ea5029 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Fri, 11 Mar 2016 14:52:47 +0100 Subject: [PATCH 23/42] Fixing warnings --- parity/main.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/parity/main.rs b/parity/main.rs index f8a45b01e..e4ce36144 100644 --- a/parity/main.rs +++ b/parity/main.rs @@ -338,7 +338,7 @@ impl Configuration { let host = IpAddr::from_str(host).unwrap_or_else(|_| die!("Invalid host given with `--nat extip:{}`", host)); Some(SocketAddr::new(host, self.args.flag_port)) } else { - listen_address.clone() + listen_address }; (listen_address, public_address) } @@ -379,9 +379,9 @@ impl Configuration { fn sync_config(&self, spec: &Spec) -> SyncConfig { let mut sync_config = SyncConfig::default(); - sync_config.network_id = self.args.flag_networkid.as_ref().map(|id| { + sync_config.network_id = self.args.flag_networkid.as_ref().map_or(spec.network_id(), |id| { U256::from_str(id).unwrap_or_else(|_| die!("{}: Invalid index given with --networkid", id)) - }).unwrap_or(spec.network_id()); + }); sync_config } @@ -425,7 +425,7 @@ impl Configuration { } if self.args.cmd_list { println!("Known addresses:"); - for &(addr, _) in secret_store.accounts().unwrap().iter() { + for &(addr, _) in &secret_store.accounts().unwrap() { println!("{:?}", addr); } } From b458452f0e03acf44eda70b238c804afb2b62e8e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Fri, 11 Mar 2016 14:55:56 +0100 Subject: [PATCH 24/42] TestSyncProvider fixes --- rpc/src/v1/tests/helpers/sync_provider.rs | 6 ------ 1 file changed, 6 deletions(-) diff --git a/rpc/src/v1/tests/helpers/sync_provider.rs b/rpc/src/v1/tests/helpers/sync_provider.rs index a3711d949..631752dfc 100644 --- a/rpc/src/v1/tests/helpers/sync_provider.rs +++ b/rpc/src/v1/tests/helpers/sync_provider.rs @@ -14,7 +14,6 @@ // You should have received a copy of the GNU General Public License // along with Parity. If not, see . -use ethcore::transaction::SignedTransaction; use ethsync::{SyncProvider, SyncStatus, SyncState}; pub struct Config { @@ -40,7 +39,6 @@ impl TestSyncProvider { num_peers: config.num_peers, num_active_peers: 0, mem_used: 0, - transaction_queue_pending: 0, }, } } @@ -50,9 +48,5 @@ impl SyncProvider for TestSyncProvider { fn status(&self) -> SyncStatus { self.status.clone() } - - fn insert_transaction(&self, _transaction: SignedTransaction) { - unimplemented!() - } } From 0dbe6684ad42ecd1a681b6430bcb2205a995ea7d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Fri, 11 Mar 2016 15:58:12 +0100 Subject: [PATCH 25/42] adding std::mem --- util/bigint/src/uint.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/util/bigint/src/uint.rs b/util/bigint/src/uint.rs index 3997d2e66..3dfb9dd45 100644 --- a/util/bigint/src/uint.rs +++ b/util/bigint/src/uint.rs @@ -36,6 +36,7 @@ //! The functions here are designed to be fast. //! +use std::mem; use std::fmt; use std::cmp; From 7cfe1d258bebcdf45cad0624b3cd858396464dd3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Fri, 11 Mar 2016 23:49:32 +0100 Subject: [PATCH 26/42] Adding more detailed logging --- util/src/rlp/rlpin.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/util/src/rlp/rlpin.rs b/util/src/rlp/rlpin.rs index d58fa95e8..9d3fcb2fa 100644 --- a/util/src/rlp/rlpin.rs +++ b/util/src/rlp/rlpin.rs @@ -24,7 +24,7 @@ impl<'a> From> for Rlp<'a> { } /// Data-oriented view onto trusted rlp-slice. -/// +/// /// Unlikely to `UntrustedRlp` doesn't bother you with error /// handling. It assumes that you know what you are doing. #[derive(Debug)] @@ -44,7 +44,7 @@ impl<'a, 'view> View<'a, 'view> for Rlp<'a> where 'a: 'view { type Data = &'a [u8]; type Item = Rlp<'a>; type Iter = RlpIterator<'a, 'view>; - + /// Create a new instance of `Rlp` fn new(bytes: &'a [u8]) -> Rlp<'a> { Rlp { @@ -116,7 +116,7 @@ impl<'a, 'view> View<'a, 'view> for Rlp<'a> where 'a: 'view { impl <'a, 'view> Rlp<'a> where 'a: 'view { fn view_as_val(r: &R) -> T where R: View<'a, 'view>, T: RlpDecodable { let res: Result = r.as_val(); - res.unwrap_or_else(|_| panic!()) + res.unwrap_or_else(|e| panic!("DecodeError: {}", e)) } /// Decode into an object From 9424d530627ba0d185bb505a737f4b4121c526b6 Mon Sep 17 00:00:00 2001 From: Gav Wood Date: Sat, 12 Mar 2016 10:37:27 +0100 Subject: [PATCH 27/42] Update lib.rs --- miner/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/miner/src/lib.rs b/miner/src/lib.rs index 10da070a4..9c2ad9ba5 100644 --- a/miner/src/lib.rs +++ b/miner/src/lib.rs @@ -88,7 +88,7 @@ pub trait MinerService : Send + Sync { /// Removes all transactions from the queue and restart mining operation. fn clear_and_reset(&self, chain: &BlockChainClient); - /// called when blocks are imported to chain, updates transactions queue. + /// Called when blocks are imported to chain, updates transactions queue. fn chain_new_blocks(&self, chain: &BlockChainClient, good: &[H256], bad: &[H256], retracted: &[H256]); /// New chain head event. Restart mining operation. From 65dadcc2a2bfe9c5d382b2d2716fee857318cf68 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Sat, 12 Mar 2016 10:44:24 +0100 Subject: [PATCH 28/42] Adding todos --- ethcore/src/client/client.rs | 5 ++--- ethcore/src/client/mod.rs | 2 ++ 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/ethcore/src/client/client.rs b/ethcore/src/client/client.rs index 4341e1898..1a88af951 100644 --- a/ethcore/src/client/client.rs +++ b/ethcore/src/client/client.rs @@ -350,16 +350,15 @@ impl Client where V: Verifier { } } - -// TODO: need MinerService MinerIoHandler - impl BlockChainClient for Client where V: Verifier { + // TODO [todr] Should be moved to miner crate eventually. fn try_seal(&self, block: ClosedBlock, seal: Vec) -> Result { block.try_seal(self.engine.deref().deref(), seal) } + // TODO [todr] Should be moved to miner crate eventually. fn prepare_sealing(&self, author: Address, extra_data: Bytes, transactions: Vec) -> Option { let engine = self.engine.deref().deref(); let h = self.chain.best_block_hash(); diff --git a/ethcore/src/client/mod.rs b/ethcore/src/client/mod.rs index d97f0d8b9..c13cfeee1 100644 --- a/ethcore/src/client/mod.rs +++ b/ethcore/src/client/mod.rs @@ -102,9 +102,11 @@ pub trait BlockChainClient : Sync + Send { /// Returns logs matching given filter. fn logs(&self, filter: Filter) -> Vec; + // TODO [todr] Should be moved to miner crate eventually. /// Returns ClosedBlock prepared for sealing. fn prepare_sealing(&self, author: Address, extra_data: Bytes, transactions: Vec) -> Option; + // TODO [todr] Should be moved to miner crate eventually. /// Attempts to seal given block. Returns `SealedBlock` on success and the same block in case of error. fn try_seal(&self, block: ClosedBlock, seal: Vec) -> Result; From e1c0177932efef0b75c58e2e9ce3058eeb2a40cd Mon Sep 17 00:00:00 2001 From: Gav Wood Date: Sat, 12 Mar 2016 10:44:48 +0100 Subject: [PATCH 29/42] Update main.rs --- parity/main.rs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/parity/main.rs b/parity/main.rs index 9e6b50e08..1350aca45 100644 --- a/parity/main.rs +++ b/parity/main.rs @@ -248,8 +248,14 @@ fn setup_rpc_server( #[cfg(not(feature = "rpc"))] fn setup_rpc_server( - _client: Arc, _sync: Arc, _secret_store: Arc, _miner: Arc, - _url: &str, _cors_domain: &str, _apis: Vec<&str>) -> Option> { + _client: Arc, + _sync: Arc, + _secret_store: Arc, + _miner: Arc, + _url: &str, + _cors_domain: &str, + _apis: Vec<&str> +) -> Option> { None } From 89986ec0e0d38ca3385cdc6cfeecaa6c2115ff61 Mon Sep 17 00:00:00 2001 From: Gav Wood Date: Sat, 12 Mar 2016 19:19:16 +0100 Subject: [PATCH 30/42] Update main.rs [noci] --- parity/main.rs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/parity/main.rs b/parity/main.rs index 1350aca45..a22e4f763 100644 --- a/parity/main.rs +++ b/parity/main.rs @@ -224,8 +224,14 @@ fn setup_log(init: &Option) { #[cfg(feature = "rpc")] fn setup_rpc_server( - client: Arc, sync: Arc, secret_store: Arc, miner: Arc, - url: &str, cors_domain: &str, apis: Vec<&str>) -> Option> { + client: Arc, + sync: Arc, + secret_store: Arc, + miner: Arc, + url: &str, + cors_domain: &str, + apis: Vec<&str> +) -> Option> { use rpc::v1::*; let server = rpc::RpcServer::new(); From e85a2f3804dea8387181008ba86d1b3456bde555 Mon Sep 17 00:00:00 2001 From: Gav Wood Date: Sat, 12 Mar 2016 19:22:38 +0100 Subject: [PATCH 31/42] Update main.rs [noci] --- parity/main.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/parity/main.rs b/parity/main.rs index a22e4f763..93fed03aa 100644 --- a/parity/main.rs +++ b/parity/main.rs @@ -506,7 +506,9 @@ impl Configuration { sync.clone(), account_service.clone(), miner.clone(), - &url, cors, apis.split(',').collect() + &url, + cors, + apis.split(',').collect() ); if let Some(handler) = server_handler { panic_handler.forward_from(handler.deref()); From e7574d451675592f10fa23d0cf8afeb2ba758345 Mon Sep 17 00:00:00 2001 From: Gav Wood Date: Sat, 12 Mar 2016 19:29:18 +0100 Subject: [PATCH 32/42] Update lib.rs --- sync/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sync/src/lib.rs b/sync/src/lib.rs index bcf8fbcd1..0c7abd1d0 100644 --- a/sync/src/lib.rs +++ b/sync/src/lib.rs @@ -167,7 +167,7 @@ impl NetworkProtocolHandler for EthSync { #[allow(single_match)] fn message(&self, io: &NetworkContext, message: &SyncMessage) { match *message { - SyncMessage::NewChainBlocks { ref good, ref bad, ref retracted} => { + SyncMessage::NewChainBlocks { ref good, ref bad, ref retracted } => { let mut sync_io = NetSyncIo::new(io, self.chain.deref()); self.sync.write().unwrap().chain_new_blocks(&mut sync_io, good, bad, retracted); }, From 4a58e142bd2062ccc7491e2f04b4e907224f3283 Mon Sep 17 00:00:00 2001 From: Gav Wood Date: Sun, 13 Mar 2016 15:02:08 +0100 Subject: [PATCH 33/42] Remove duplicate ciippys. --- Cargo.toml | 3 +-- rpc/Cargo.toml | 3 +-- sync/Cargo.toml | 1 - 3 files changed, 2 insertions(+), 5 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index d3dcad0f8..351041119 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -19,15 +19,14 @@ ctrlc = { git = "https://github.com/tomusdrw/rust-ctrlc.git" } fdlimit = { path = "util/fdlimit" } daemonize = "0.2" number_prefix = "0.2" -clippy = { version = "0.0.50", optional = true } rpassword = "0.1" +clippy = { version = "0.0.50", optional = true } ethcore = { path = "ethcore" } ethcore-util = { path = "util" } ethsync = { path = "sync" } ethminer = { path = "miner" } ethcore-devtools = { path = "devtools" } ethcore-rpc = { path = "rpc", optional = true } -clippy = { version = "0.0.49", optional = true } [features] default = ["rpc"] diff --git a/rpc/Cargo.toml b/rpc/Cargo.toml index 70cc2ec42..fa89041d8 100644 --- a/rpc/Cargo.toml +++ b/rpc/Cargo.toml @@ -18,12 +18,11 @@ ethcore-util = { path = "../util" } ethcore = { path = "../ethcore" } ethash = { path = "../ethash" } ethsync = { path = "../sync" } -clippy = { version = "0.0.50", optional = true } ethminer = { path = "../miner" } rustc-serialize = "0.3" transient-hashmap = "0.1" serde_macros = { version = "0.7.0", optional = true } -clippy = { version = "0.0.49", optional = true } +clippy = { version = "0.0.50", optional = true } [build-dependencies] serde_codegen = { version = "0.7.0", optional = true } diff --git a/sync/Cargo.toml b/sync/Cargo.toml index 2765b0680..8cd59333d 100644 --- a/sync/Cargo.toml +++ b/sync/Cargo.toml @@ -17,7 +17,6 @@ env_logger = "0.3" time = "0.1.34" rand = "0.3.13" heapsize = "0.3" -clippy = { version = "0.0.49", optional = true } [features] default = [] From 08b9cc2c41d3ae7a761984acc5f2a2956b18a1c1 Mon Sep 17 00:00:00 2001 From: Gav Wood Date: Sun, 13 Mar 2016 15:29:55 +0100 Subject: [PATCH 34/42] Merge changes from #674 into branch. --- Cargo.lock | 10 ++++++ ethcore/src/client/client.rs | 67 ++++++++++++++++++++++++++---------- ethcore/src/service.rs | 8 +++-- miner/src/lib.rs | 2 +- miner/src/miner.rs | 15 +++++--- parity/main.rs | 21 ++++++----- sync/src/chain.rs | 8 ++--- sync/src/lib.rs | 4 +-- sync/src/tests/helpers.rs | 2 +- 9 files changed, 93 insertions(+), 44 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8552f299f..d68c5c121 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -93,6 +93,16 @@ dependencies = [ "time 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "clippy" +version = "0.0.49" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "regex-syntax 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", + "semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-normalization 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "clippy" version = "0.0.50" diff --git a/ethcore/src/client/client.rs b/ethcore/src/client/client.rs index 520b069e1..8c3d73ebb 100644 --- a/ethcore/src/client/client.rs +++ b/ethcore/src/client/client.rs @@ -35,7 +35,7 @@ use extras::TransactionAddress; use filter::Filter; use log_entry::LocalizedLogEntry; use block_queue::{BlockQueue, BlockQueueInfo}; -use blockchain::{BlockChain, BlockProvider, TreeRoute}; +use blockchain::{BlockChain, BlockProvider, TreeRoute, ImportRoute}; use client::{BlockId, TransactionId, ClientConfig, BlockChainClient}; pub use blockchain::CacheSize as BlockChainCacheSize; @@ -222,12 +222,39 @@ impl Client where V: Verifier { Ok(closed_block) } + fn calculate_enacted_retracted(&self, import_results: Vec) -> (Vec, Vec) { + fn map_to_vec(map: Vec<(H256, bool)>) -> Vec { + map.into_iter().map(|(k, _v)| k).collect() + } + + // In ImportRoute we get all the blocks that have been enacted and retracted by single insert. + // Because we are doing multiple inserts some of the blocks that were enacted in import `k` + // could be retracted in import `k+1`. This is why to understand if after all inserts + // the block is enacted or retracted we iterate over all routes and at the end final state + // will be in the hashmap + let map = import_results.into_iter().fold(HashMap::new(), |mut map, route| { + for hash in route.enacted { + map.insert(hash, true); + } + for hash in route.retracted { + map.insert(hash, false); + } + map + }); + + // Split to enacted retracted (using hashmap value) + let (enacted, retracted) = map.into_iter().partition(|&(_k, v)| v); + // And convert tuples to keys + (map_to_vec(enacted), map_to_vec(retracted)) + } + /// This is triggered by a message coming from a block queue when the block is ready for insertion pub fn import_verified_blocks(&self, io: &IoChannel) -> usize { let max_blocks_to_import = 128; - let mut good_blocks = Vec::with_capacity(max_blocks_to_import); - let mut bad_blocks = HashSet::new(); + let mut imported_blocks = Vec::with_capacity(max_blocks_to_import); + let mut invalid_blocks = HashSet::new(); + let mut import_results = Vec::with_capacity(max_blocks_to_import); let _import_lock = self.import_lock.lock(); let blocks = self.block_queue.drain(max_blocks_to_import); @@ -237,16 +264,16 @@ impl Client where V: Verifier { for block in blocks { let header = &block.header; - if bad_blocks.contains(&header.parent_hash) { - bad_blocks.insert(header.hash()); + if invalid_blocks.contains(&header.parent_hash) { + invalid_blocks.insert(header.hash()); continue; } let closed_block = self.check_and_close_block(&block); if let Err(_) = closed_block { - bad_blocks.insert(header.hash()); + invalid_blocks.insert(header.hash()); break; } - good_blocks.push(header.hash()); + imported_blocks.push(header.hash()); // Are we committing an era? let ancient = if header.number() >= HISTORY { @@ -265,31 +292,33 @@ impl Client where V: Verifier { // And update the chain after commit to prevent race conditions // (when something is in chain but you are not able to fetch details) - self.chain.insert_block(&block.bytes, receipts); + let route = self.chain.insert_block(&block.bytes, receipts); + import_results.push(route); self.report.write().unwrap().accrue_block(&block); trace!(target: "client", "Imported #{} ({})", header.number(), header.hash()); } - let imported = good_blocks.len(); - let bad_blocks = bad_blocks.into_iter().collect::>(); + let imported = imported_blocks.len(); + let invalid_blocks = invalid_blocks.into_iter().collect::>(); { - if !bad_blocks.is_empty() { - self.block_queue.mark_as_bad(&bad_blocks); + if !invalid_blocks.is_empty() { + self.block_queue.mark_as_bad(&invalid_blocks); } - if !good_blocks.is_empty() { - self.block_queue.mark_as_good(&good_blocks); + if !imported_blocks.is_empty() { + self.block_queue.mark_as_good(&imported_blocks); } } { - if !good_blocks.is_empty() && self.block_queue.queue_info().is_empty() { + if !imported_blocks.is_empty() && self.block_queue.queue_info().is_empty() { + let (enacted, retracted) = self.calculate_enacted_retracted(import_results); io.send(NetworkIoMessage::User(SyncMessage::NewChainBlocks { - good: good_blocks, - bad: bad_blocks, - // TODO [todr] were to take those from? - retracted: vec![], + good: imported_blocks, + invalid: invalid_blocks, + enacted: enacted, + retracted: retracted, })).unwrap(); } } diff --git a/ethcore/src/service.rs b/ethcore/src/service.rs index 27303bea7..bcfe7724f 100644 --- a/ethcore/src/service.rs +++ b/ethcore/src/service.rs @@ -28,11 +28,13 @@ pub enum SyncMessage { /// New block has been imported into the blockchain NewChainBlocks { /// Hashes of blocks imported to blockchain - good: Vec, - /// Hashes of blocks not imported to blockchain - bad: Vec, + imported: Vec, + /// Hashes of blocks not imported to blockchain (because were invalid) + invalid: Vec, /// Hashes of blocks that were removed from canonical chain retracted: Vec, + /// Hashes of blocks that are now included in cannonical chain + enacted: Vec, }, /// Best Block Hash in chain has been changed NewChainHead, diff --git a/miner/src/lib.rs b/miner/src/lib.rs index 9c2ad9ba5..a431bd44e 100644 --- a/miner/src/lib.rs +++ b/miner/src/lib.rs @@ -89,7 +89,7 @@ pub trait MinerService : Send + Sync { fn clear_and_reset(&self, chain: &BlockChainClient); /// Called when blocks are imported to chain, updates transactions queue. - fn chain_new_blocks(&self, chain: &BlockChainClient, good: &[H256], bad: &[H256], retracted: &[H256]); + fn chain_new_blocks(&self, chain: &BlockChainClient, imported: &[H256], invalid: &[H256], enacted: &[H256], retracted: &[H256]); /// New chain head event. Restart mining operation. fn prepare_sealing(&self, chain: &BlockChainClient); diff --git a/miner/src/miner.rs b/miner/src/miner.rs index 00efb83d3..a07da7569 100644 --- a/miner/src/miner.rs +++ b/miner/src/miner.rs @@ -150,7 +150,7 @@ impl MinerService for Miner { } } - fn chain_new_blocks(&self, chain: &BlockChainClient, good: &[H256], bad: &[H256], _retracted: &[H256]) { + fn chain_new_blocks(&self, chain: &BlockChainClient, imported: &[H256], invalid: &[H256], enacted: &[H256], _retracted: &[H256]) { fn fetch_transactions(chain: &BlockChainClient, hash: &H256) -> Vec { let block = chain .block(BlockId::Hash(*hash)) @@ -161,15 +161,20 @@ impl MinerService for Miner { } { - let good = good.par_iter().map(|h| fetch_transactions(chain, h)); - let bad = bad.par_iter().map(|h| fetch_transactions(chain, h)); + let in_chain = vec![imported, enacted, invalid]; + let in_chain = in_chain + .par_iter() + .flat_map(|h| h.par_iter().map(|h| fetch_transactions(chain, h))); + .map(|h| fetch_transactions(chain, h)); + let out_of_chain = retracted + .par_iter() - good.for_each(|txs| { + in_chain.for_each(|txs| { let mut transaction_queue = self.transaction_queue.lock().unwrap(); let hashes = txs.iter().map(|tx| tx.hash()).collect::>(); transaction_queue.remove_all(&hashes, |a| chain.nonce(a)); }); - bad.for_each(|txs| { + out_of_chain.for_each(|txs| { // populate sender for tx in &txs { let _sender = tx.sender(); diff --git a/parity/main.rs b/parity/main.rs index eb1937757..56a7d48de 100644 --- a/parity/main.rs +++ b/parity/main.rs @@ -114,7 +114,7 @@ API and Console Options: --rpccorsdomain URL Equivalent to --jsonrpc-cors URL (geth-compatible). Sealing/Mining Options: - --gasprice GAS Minimal gas price a transaction must have to be accepted for mining [default: 20000000000]. + --gas-price WEI Minimum amount of Wei to be paid for a transaction to be accepted for mining [default: 20000000000]. --author ADDRESS Specify the block author (aka "coinbase") address for sending block rewards from sealed blocks [default: 0037a6b811ffeb6e072da21179d11b1406371c63]. --extra-data STRING Specify a custom extra-data for authored blocks, no more than 32 characters. @@ -138,11 +138,12 @@ Geth-Compatibility Options --maxpeers COUNT Equivalent to --peers COUNT. --nodekey KEY Equivalent to --node-key KEY. --nodiscover Equivalent to --no-discovery. + --gasprice WEI Equivalent to --gas-price WEI. --etherbase ADDRESS Equivalent to --author ADDRESS. --extradata STRING Equivalent to --extra-data STRING. Miscellaneous Options: - -l --logging LOGGING Specify the logging level. + -l --logging LOGGING Specify the logging level. Must conform to the same format as RUST_LOG. -v --version Show information about version. -h --help Show this screen. "#; @@ -175,18 +176,19 @@ struct Args { flag_jsonrpc_port: u16, flag_jsonrpc_cors: String, flag_jsonrpc_apis: String, + flag_author: String, + flag_gas_price: String, + flag_extra_data: Option, flag_logging: Option, flag_version: bool, // geth-compatibility... flag_nodekey: Option, flag_nodiscover: bool, flag_maxpeers: Option, - flag_gasprice: String, - flag_author: String, - flag_extra_data: Option, flag_datadir: Option, flag_extradata: Option, flag_etherbase: Option, + flag_gasprice: Option, flag_rpc: bool, flag_rpcaddr: Option, flag_rpcport: Option, @@ -301,9 +303,10 @@ impl Configuration { }) } - fn gasprice(&self) -> U256 { - U256::from_dec_str(self.args.flag_gasprice.as_str()).unwrap_or_else(|_| { - die!("{}: Invalid gas price given. Must be a decimal unsigned 256-bit number.", self.args.flag_gasprice) + fn gas_price(&self) -> U256 { + let d = self.args.flag_gasprice.as_ref().unwrap_or(&self.args.flag_gas_price); + U256::from_dec_str(d).unwrap_or_else(|_| { + die!("{}: Invalid gas price given. Must be a decimal unsigned 256-bit number.", d) }) } @@ -483,7 +486,7 @@ impl Configuration { let miner = Miner::new(); miner.set_author(self.author()); miner.set_extra_data(self.extra_data()); - miner.set_minimal_gas_price(self.gasprice()); + miner.set_minimal_gas_price(self.gas_price()); // Sync let sync = EthSync::register(service.network(), sync_config, client.clone(), miner.clone()); diff --git a/sync/src/chain.rs b/sync/src/chain.rs index e622f0b86..d06a34764 100644 --- a/sync/src/chain.rs +++ b/sync/src/chain.rs @@ -1263,9 +1263,9 @@ impl ChainSync { } /// called when block is imported to chain, updates transactions queue and propagates the blocks - pub fn chain_new_blocks(&mut self, io: &mut SyncIo, good: &[H256], bad: &[H256], retracted: &[H256]) { + pub fn chain_new_blocks(&mut self, io: &mut SyncIo, imported: &[H256], invalid: &[H256], enacted: &[H256], retracted: &[H256]) { // Notify miner - self.miner.chain_new_blocks(io.chain(), good, bad, retracted); + self.miner.chain_new_blocks(io.chain(), imported, invalid, enacted, retracted); // Propagate latests blocks self.propagate_latest_blocks(io); // TODO [todr] propagate transactions? @@ -1616,10 +1616,10 @@ mod tests { let mut io = TestIo::new(&mut client, &mut queue, None); // when - sync.chain_new_blocks(&mut io, &[], &good_blocks, &[]); + sync.chain_new_blocks(&mut io, &[], &good_blocks, &[], &[]); assert_eq!(sync.miner.status().transaction_queue_future, 0); assert_eq!(sync.miner.status().transaction_queue_pending, 1); - sync.chain_new_blocks(&mut io, &good_blocks, &retracted_blocks, &[]); + sync.chain_new_blocks(&mut io, &good_blocks, &[], &[], &retracted_blocks); // then let status = sync.miner.status(); diff --git a/sync/src/lib.rs b/sync/src/lib.rs index 0c7abd1d0..c47b74b66 100644 --- a/sync/src/lib.rs +++ b/sync/src/lib.rs @@ -167,9 +167,9 @@ impl NetworkProtocolHandler for EthSync { #[allow(single_match)] fn message(&self, io: &NetworkContext, message: &SyncMessage) { match *message { - SyncMessage::NewChainBlocks { ref good, ref bad, ref retracted } => { + SyncMessage::NewChainBlocks { ref imported, ref invalid, ref enacted, ref retracted } => { let mut sync_io = NetSyncIo::new(io, self.chain.deref()); - self.sync.write().unwrap().chain_new_blocks(&mut sync_io, good, bad, retracted); + self.sync.write().unwrap().chain_new_blocks(&mut sync_io, imported, invalid, enacted, retracted); }, SyncMessage::NewChainHead => { let mut sync_io = NetSyncIo::new(io, self.chain.deref()); diff --git a/sync/src/tests/helpers.rs b/sync/src/tests/helpers.rs index 00df35e77..b3e62ccc6 100644 --- a/sync/src/tests/helpers.rs +++ b/sync/src/tests/helpers.rs @@ -168,6 +168,6 @@ impl TestNet { pub fn trigger_chain_new_blocks(&mut self, peer_id: usize) { let mut peer = self.peer_mut(peer_id); - peer.sync.chain_new_blocks(&mut TestIo::new(&mut peer.chain, &mut peer.queue, None), &[], &[], &[]); + peer.sync.chain_new_blocks(&mut TestIo::new(&mut peer.chain, &mut peer.queue, None), &[], &[], &[], &[]); } } From 76696e3b49a2a8e84caa11a1d27c2f9ad5e78cb8 Mon Sep 17 00:00:00 2001 From: Gav Wood Date: Sun, 13 Mar 2016 15:36:03 +0100 Subject: [PATCH 35/42] Minor build fixes. --- ethcore/src/client/client.rs | 2 +- ethcore/src/client/test_client.rs | 2 ++ miner/src/miner.rs | 4 ++-- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/ethcore/src/client/client.rs b/ethcore/src/client/client.rs index 8c3d73ebb..d748cc4ee 100644 --- a/ethcore/src/client/client.rs +++ b/ethcore/src/client/client.rs @@ -315,7 +315,7 @@ impl Client where V: Verifier { if !imported_blocks.is_empty() && self.block_queue.queue_info().is_empty() { let (enacted, retracted) = self.calculate_enacted_retracted(import_results); io.send(NetworkIoMessage::User(SyncMessage::NewChainBlocks { - good: imported_blocks, + imported: imported_blocks, invalid: invalid_blocks, enacted: enacted, retracted: retracted, diff --git a/ethcore/src/client/test_client.rs b/ethcore/src/client/test_client.rs index d801c08e0..a97228b09 100644 --- a/ethcore/src/client/test_client.rs +++ b/ethcore/src/client/test_client.rs @@ -83,10 +83,12 @@ impl TestBlockChainClient { client } + /// Set the balance of account `address` to `balance`. pub fn set_balance(&mut self, address: Address, balance: U256) { self.balances.write().unwrap().insert(address, balance); } + /// Set storage `position` to `value` for account `address`. pub fn set_storage(&mut self, address: Address, position: H256, value: H256) { self.storage.write().unwrap().insert((address, position), value); } diff --git a/miner/src/miner.rs b/miner/src/miner.rs index a07da7569..ad403150d 100644 --- a/miner/src/miner.rs +++ b/miner/src/miner.rs @@ -150,7 +150,7 @@ impl MinerService for Miner { } } - fn chain_new_blocks(&self, chain: &BlockChainClient, imported: &[H256], invalid: &[H256], enacted: &[H256], _retracted: &[H256]) { + fn chain_new_blocks(&self, chain: &BlockChainClient, imported: &[H256], invalid: &[H256], enacted: &[H256], retracted: &[H256]) { fn fetch_transactions(chain: &BlockChainClient, hash: &H256) -> Vec { let block = chain .block(BlockId::Hash(*hash)) @@ -165,9 +165,9 @@ impl MinerService for Miner { let in_chain = in_chain .par_iter() .flat_map(|h| h.par_iter().map(|h| fetch_transactions(chain, h))); - .map(|h| fetch_transactions(chain, h)); let out_of_chain = retracted .par_iter() + .map(|h| fetch_transactions(chain, h)); in_chain.for_each(|txs| { let mut transaction_queue = self.transaction_queue.lock().unwrap(); From 6cedb263aa5857ec480ab9ea86061fe90dac8c7d Mon Sep 17 00:00:00 2001 From: Gav Wood Date: Sun, 13 Mar 2016 17:01:50 +0100 Subject: [PATCH 36/42] Add missing file. --- rpc/src/v1/tests/helpers/miner_service.rs | 53 +++++++++++++++++++++++ 1 file changed, 53 insertions(+) create mode 100644 rpc/src/v1/tests/helpers/miner_service.rs diff --git a/rpc/src/v1/tests/helpers/miner_service.rs b/rpc/src/v1/tests/helpers/miner_service.rs new file mode 100644 index 000000000..0cddf2a1e --- /dev/null +++ b/rpc/src/v1/tests/helpers/miner_service.rs @@ -0,0 +1,53 @@ +// Copyright 2015, 2016 Ethcore (UK) Ltd. +// This file is part of Parity. + +// Parity is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Parity is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Parity. If not, see . + +use util::{Address, H256, U256, Bytes}; +use util::standard::*; +use ethcore::error::Error; +use ethcore::client::BlockChainClient; +use ethcore::block::ClosedBlock; +use ethcore::transaction::SignedTransaction; +use ethminer::{MinerService, MinerStatus}; + +pub struct TestMinerService; + +impl MinerService for TestMinerService { + + /// Returns miner's status. + fn status(&self) -> MinerStatus { unimplemented!(); } + + /// Imports transactions to transaction queue. + fn import_transactions(&self, _transactions: Vec, _fetch_nonce: T) -> Result<(), Error> where T: Fn(&Address) -> U256 { unimplemented!(); } + + /// Returns hashes of transactions currently in pending + fn pending_transactions_hashes(&self) -> Vec { unimplemented!(); } + + /// Removes all transactions from the queue and restart mining operation. + fn clear_and_reset(&self, _chain: &BlockChainClient) { unimplemented!(); } + + /// Called when blocks are imported to chain, updates transactions queue. + fn chain_new_blocks(&self, _chain: &BlockChainClient, _imported: &[H256], _invalid: &[H256], _enacted: &[H256], _retracted: &[H256]) { unimplemented!(); } + + /// New chain head event. Restart mining operation. + fn prepare_sealing(&self, _chain: &BlockChainClient) { unimplemented!(); } + + /// Grab the `ClosedBlock` that we want to be sealed. Comes as a mutex that you have to lock. + fn sealing_block(&self, _chain: &BlockChainClient) -> &Mutex> { unimplemented!(); } + + /// Submit `seal` as a valid solution for the header of `pow_hash`. + /// Will check the seal, but not actually insert the block into the chain. + fn submit_seal(&self, _chain: &BlockChainClient, _pow_hash: H256, _seal: Vec) -> Result<(), Error> { unimplemented!(); } +} \ No newline at end of file From 81291622eb9da914cfa1e6ba123245edfa48cd56 Mon Sep 17 00:00:00 2001 From: Gav Wood Date: Sun, 13 Mar 2016 19:22:42 +0100 Subject: [PATCH 37/42] Avoid batches for now. --- parity/main.rs | 4 +-- util/src/journaldb/archivedb.rs | 8 +++-- util/src/journaldb/refcounteddb.rs | 2 +- util/src/overlaydb.rs | 50 +++++++++++++++++++++++++----- 4 files changed, 52 insertions(+), 12 deletions(-) diff --git a/parity/main.rs b/parity/main.rs index 2bfa75e8a..002655951 100644 --- a/parity/main.rs +++ b/parity/main.rs @@ -427,9 +427,9 @@ impl Configuration { } client_config.pruning = match self.args.flag_pruning.as_str() { "" | "archive" => journaldb::Algorithm::Archive, - "pruned" => journaldb::Algorithm::EarlyMerge, + "light" => journaldb::Algorithm::EarlyMerge, "fast" => journaldb::Algorithm::OverlayRecent, - "slow" => journaldb::Algorithm::RefCounted, + "basic" => journaldb::Algorithm::RefCounted, _ => { die!("Invalid pruning method given."); } }; client_config.name = self.args.flag_identity.clone(); diff --git a/util/src/journaldb/archivedb.rs b/util/src/journaldb/archivedb.rs index 19570b281..83a80b7c2 100644 --- a/util/src/journaldb/archivedb.rs +++ b/util/src/journaldb/archivedb.rs @@ -132,7 +132,7 @@ impl JournalDB for ArchiveDB { Box::new(ArchiveDB { overlay: MemoryDB::new(), backing: self.backing.clone(), - latest_era: None, + latest_era: self.latest_era, }) } @@ -144,7 +144,7 @@ impl JournalDB for ArchiveDB { self.latest_era.is_none() } - fn commit(&mut self, _: u64, _: &H256, _: Option<(u64, H256)>) -> Result { + fn commit(&mut self, now: u64, _: &H256, _: Option<(u64, H256)>) -> Result { let batch = DBTransaction::new(); let mut inserts = 0usize; let mut deletes = 0usize; @@ -160,6 +160,10 @@ impl JournalDB for ArchiveDB { deletes += 1; } } + if self.latest_era.map_or(true, |e| now > e) { + try!(batch.put(&LATEST_ERA_KEY, &encode(&now))); + self.latest_era = Some(now); + } try!(self.backing.write(batch)); Ok((inserts + deletes) as u32) } diff --git a/util/src/journaldb/refcounteddb.rs b/util/src/journaldb/refcounteddb.rs index 09362676b..85f40e048 100644 --- a/util/src/journaldb/refcounteddb.rs +++ b/util/src/journaldb/refcounteddb.rs @@ -176,7 +176,7 @@ impl JournalDB for RefCountedDB { } } - let r = try!(self.forward.commit_to_batch(&batch)); + let r = try!(self.forward.commit()); try!(self.backing.write(batch)); Ok(r) } diff --git a/util/src/overlaydb.rs b/util/src/overlaydb.rs index 8166dd318..d176d38f6 100644 --- a/util/src/overlaydb.rs +++ b/util/src/overlaydb.rs @@ -72,13 +72,13 @@ impl OverlayDB { if total_rc < 0 { return Err(From::from(BaseDataError::NegativelyReferencedHash)); } - deletes += if self.put_payload(batch, &key, (back_value, total_rc as u32)) {1} else {0}; + deletes += if self.put_payload_in_batch(batch, &key, (back_value, total_rc as u32)) {1} else {0}; } None => { if rc < 0 { return Err(From::from(BaseDataError::NegativelyReferencedHash)); } - self.put_payload(batch, &key, (value, rc as u32)); + self.put_payload_in_batch(batch, &key, (value, rc as u32)); } }; ret += 1; @@ -116,10 +116,32 @@ impl OverlayDB { /// } /// ``` pub fn commit(&mut self) -> Result { - let batch = DBTransaction::new(); - let r = try!(self.commit_to_batch(&batch)); - try!(self.backing.write(batch)); - Ok(r) + let mut ret = 0u32; + let mut deletes = 0usize; + for i in self.overlay.drain().into_iter() { + let (key, (value, rc)) = i; + if rc != 0 { + match self.payload(&key) { + Some(x) => { + let (back_value, back_rc) = x; + let total_rc: i32 = back_rc as i32 + rc; + if total_rc < 0 { + return Err(From::from(BaseDataError::NegativelyReferencedHash)); + } + deletes += if self.put_payload(&key, (back_value, total_rc as u32)) {1} else {0}; + } + None => { + if rc < 0 { + return Err(From::from(BaseDataError::NegativelyReferencedHash)); + } + self.put_payload(&key, (value, rc as u32)); + } + }; + ret += 1; + } + } + trace!("OverlayDB::commit() deleted {} nodes", deletes); + Ok(ret) } /// Revert all operations on this object (i.e. `insert()`s and `kill()`s) since the @@ -156,7 +178,7 @@ impl OverlayDB { } /// Put the refs and value of the given key, possibly deleting it from the db. - fn put_payload(&self, batch: &DBTransaction, key: &H256, payload: (Bytes, u32)) -> bool { + fn put_payload_in_batch(&self, batch: &DBTransaction, key: &H256, payload: (Bytes, u32)) -> bool { if payload.1 > 0 { let mut s = RlpStream::new_list(2); s.append(&payload.1); @@ -168,6 +190,20 @@ impl OverlayDB { true } } + + /// Put the refs and value of the given key, possibly deleting it from the db. + fn put_payload(&self, key: &H256, payload: (Bytes, u32)) -> bool { + if payload.1 > 0 { + let mut s = RlpStream::new_list(2); + s.append(&payload.1); + s.append(&payload.0); + self.backing.put(&key.bytes(), s.as_raw()).expect("Low-level database error. Some issue with your hard disk?"); + false + } else { + self.backing.delete(&key.bytes()).expect("Low-level database error. Some issue with your hard disk?"); + true + } + } } impl HashDB for OverlayDB { From e2e067cdd07f5d2a5dd18d79d7d36d8b4a93e122 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Sun, 13 Mar 2016 20:44:25 +0100 Subject: [PATCH 38/42] Bumping clippy --- miner/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/miner/Cargo.toml b/miner/Cargo.toml index 5d265e9a4..b450ece73 100644 --- a/miner/Cargo.toml +++ b/miner/Cargo.toml @@ -17,7 +17,7 @@ log = "0.3" env_logger = "0.3" rustc-serialize = "0.3" rayon = "0.3.1" -clippy = { version = "0.0.49", optional = true } +clippy = { version = "0.0.50", optional = true } [features] default = [] From 4cf18c728d972524089b1e6190a47cb22445c542 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Sun, 13 Mar 2016 20:53:47 +0100 Subject: [PATCH 39/42] Fixing sync invalid sync test --- Cargo.lock | 12 +----------- sync/src/chain.rs | 2 +- sync/src/lib.rs | 1 - 3 files changed, 2 insertions(+), 13 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d68c5c121..8bf57cb6a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -93,16 +93,6 @@ dependencies = [ "time 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "clippy" -version = "0.0.49" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "regex-syntax 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", - "semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-normalization 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "clippy" version = "0.0.50" @@ -300,7 +290,7 @@ dependencies = [ name = "ethminer" version = "0.9.99" dependencies = [ - "clippy 0.0.49 (registry+https://github.com/rust-lang/crates.io-index)", + "clippy 0.0.50 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "ethcore 0.9.99", "ethcore-util 0.9.99", diff --git a/sync/src/chain.rs b/sync/src/chain.rs index d06a34764..6045a23d9 100644 --- a/sync/src/chain.rs +++ b/sync/src/chain.rs @@ -1616,7 +1616,7 @@ mod tests { let mut io = TestIo::new(&mut client, &mut queue, None); // when - sync.chain_new_blocks(&mut io, &[], &good_blocks, &[], &[]); + sync.chain_new_blocks(&mut io, &[], &[], &[], &good_blocks); assert_eq!(sync.miner.status().transaction_queue_future, 0); assert_eq!(sync.miner.status().transaction_queue_pending, 1); sync.chain_new_blocks(&mut io, &good_blocks, &[], &[], &retracted_blocks); diff --git a/sync/src/lib.rs b/sync/src/lib.rs index c47b74b66..1c87da2de 100644 --- a/sync/src/lib.rs +++ b/sync/src/lib.rs @@ -164,7 +164,6 @@ impl NetworkProtocolHandler for EthSync { self.sync.write().unwrap().maintain_sync(&mut NetSyncIo::new(io, self.chain.deref())); } - #[allow(single_match)] fn message(&self, io: &NetworkContext, message: &SyncMessage) { match *message { SyncMessage::NewChainBlocks { ref imported, ref invalid, ref enacted, ref retracted } => { From 1be92ea8efafd5e4751ba6f2404073a8b72ea0b0 Mon Sep 17 00:00:00 2001 From: Gav Wood Date: Sun, 13 Mar 2016 21:21:30 +0100 Subject: [PATCH 40/42] Fixes and traces for refcountdb. --- util/src/journaldb/refcounteddb.rs | 34 ++++++++++++++++++------------ util/src/overlaydb.rs | 7 ++++++ 2 files changed, 28 insertions(+), 13 deletions(-) diff --git a/util/src/journaldb/refcounteddb.rs b/util/src/journaldb/refcounteddb.rs index 85f40e048..71833533f 100644 --- a/util/src/journaldb/refcounteddb.rs +++ b/util/src/journaldb/refcounteddb.rs @@ -43,6 +43,7 @@ pub struct RefCountedDB { const LATEST_ERA_KEY : [u8; 12] = [ b'l', b'a', b's', b't', 0, 0, 0, 0, 0, 0, 0, 0 ]; const VERSION_KEY : [u8; 12] = [ b'j', b'v', b'e', b'r', 0, 0, 0, 0, 0, 0, 0, 0 ]; const DB_VERSION : u32 = 512; +const PADDING : [u8; 10] = [ 0u8; 10 ]; impl RefCountedDB { /// Create a new instance given a `backing` database. @@ -125,15 +126,15 @@ impl JournalDB for RefCountedDB { // of its inserts otherwise. // record new commit's details. - let batch = DBTransaction::new(); { let mut index = 0usize; let mut last; while try!(self.backing.get({ - let mut r = RlpStream::new_list(2); + let mut r = RlpStream::new_list(3); r.append(&now); r.append(&index); + r.append(&&PADDING[..]); last = r.drain(); &last })).is_some() { @@ -145,11 +146,14 @@ impl JournalDB for RefCountedDB { r.append(&self.inserts); r.append(&self.removes); try!(self.backing.put(&last, r.as_raw())); + + trace!(target: "rcdb", "new journal for time #{}.{} => {}: inserts={:?}, removes={:?}", now, index, id, self.inserts, self.removes); + self.inserts.clear(); self.removes.clear(); if self.latest_era.map_or(true, |e| now > e) { - try!(batch.put(&LATEST_ERA_KEY, &encode(&now))); + try!(self.backing.put(&LATEST_ERA_KEY, &encode(&now))); self.latest_era = Some(now); } } @@ -158,26 +162,30 @@ impl JournalDB for RefCountedDB { if let Some((end_era, canon_id)) = end { let mut index = 0usize; let mut last; - while let Some(rlp_data) = try!(self.backing.get({ - let mut r = RlpStream::new_list(2); - r.append(&end_era); - r.append(&index); - last = r.drain(); - &last - })) { + while let Some(rlp_data) = { +// trace!(target: "rcdb", "checking for journal #{}.{}", end_era, index); + try!(self.backing.get({ + let mut r = RlpStream::new_list(3); + r.append(&end_era); + r.append(&index); + r.append(&&PADDING[..]); + last = r.drain(); + &last + })) + } { let rlp = Rlp::new(&rlp_data); - let to_remove: Vec = rlp.val_at(if canon_id == rlp.val_at(0) {2} else {1}); + let our_id: H256 = rlp.val_at(0); + let to_remove: Vec = rlp.val_at(if canon_id == our_id {2} else {1}); + trace!(target: "rcdb", "delete journal for time #{}.{}=>{}, (canon was {}): deleting {:?}", end_era, index, our_id, canon_id, to_remove); for i in &to_remove { self.forward.remove(i); } try!(self.backing.delete(&last)); - trace!("RefCountedDB: delete journal for time #{}.{}, (canon was {}): {} entries", end_era, index, canon_id, to_remove.len()); index += 1; } } let r = try!(self.forward.commit()); - try!(self.backing.write(batch)); Ok(r) } } diff --git a/util/src/overlaydb.rs b/util/src/overlaydb.rs index d176d38f6..5704950ed 100644 --- a/util/src/overlaydb.rs +++ b/util/src/overlaydb.rs @@ -70,12 +70,14 @@ impl OverlayDB { let (back_value, back_rc) = x; let total_rc: i32 = back_rc as i32 + rc; if total_rc < 0 { + warn!("NEGATIVELY REFERENCED HASH {:?}", key); return Err(From::from(BaseDataError::NegativelyReferencedHash)); } deletes += if self.put_payload_in_batch(batch, &key, (back_value, total_rc as u32)) {1} else {0}; } None => { if rc < 0 { + warn!("NEGATIVELY REFERENCED HASH {:?}", key); return Err(From::from(BaseDataError::NegativelyReferencedHash)); } self.put_payload_in_batch(batch, &key, (value, rc as u32)); @@ -126,12 +128,14 @@ impl OverlayDB { let (back_value, back_rc) = x; let total_rc: i32 = back_rc as i32 + rc; if total_rc < 0 { + warn!("NEGATIVELY REFERENCED HASH {:?}", key); return Err(From::from(BaseDataError::NegativelyReferencedHash)); } deletes += if self.put_payload(&key, (back_value, total_rc as u32)) {1} else {0}; } None => { if rc < 0 { + warn!("NEGATIVELY REFERENCED HASH {:?}", key); return Err(From::from(BaseDataError::NegativelyReferencedHash)); } self.put_payload(&key, (value, rc as u32)); @@ -167,6 +171,9 @@ impl OverlayDB { /// ``` pub fn revert(&mut self) { self.overlay.clear(); } + /// Get the number of references that would be committed. + pub fn commit_refs(&self, key: &H256) -> i32 { self.overlay.raw(&key).map_or(0, |&(_, refs)| refs) } + /// Get the refs and value of the given key. fn payload(&self, key: &H256) -> Option<(Bytes, u32)> { self.backing.get(&key.bytes()) From 420f473f90d3908d7a3f3cd0c245b797ae64c9f1 Mon Sep 17 00:00:00 2001 From: arkpar Date: Sun, 13 Mar 2016 21:28:57 +0100 Subject: [PATCH 41/42] Check for NULL_RLP in AccountDB --- ethcore/src/account_db.rs | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/ethcore/src/account_db.rs b/ethcore/src/account_db.rs index 026e813f5..f95ec53a1 100644 --- a/ethcore/src/account_db.rs +++ b/ethcore/src/account_db.rs @@ -97,6 +97,9 @@ impl<'db> HashDB for AccountDBMut<'db>{ } fn insert(&mut self, value: &[u8]) -> H256 { + if value == &NULL_RLP { + return SHA3_NULL_RLP.clone(); + } let k = value.sha3(); let ak = combine_key(&self.address, &k); self.db.emplace(ak, value.to_vec()); @@ -104,11 +107,17 @@ impl<'db> HashDB for AccountDBMut<'db>{ } fn emplace(&mut self, key: H256, value: Bytes) { + if key == SHA3_NULL_RLP { + return; + } let key = combine_key(&self.address, &key); self.db.emplace(key, value.to_vec()) } fn kill(&mut self, key: &H256) { + if key == &SHA3_NULL_RLP { + return; + } let key = combine_key(&self.address, key); self.db.kill(&key) } From 26f41b711c30dc4a0cd3264cf38dd47f7b6aefc7 Mon Sep 17 00:00:00 2001 From: Gav Wood Date: Sun, 13 Mar 2016 21:54:06 +0100 Subject: [PATCH 42/42] Bring back batching. --- util/src/error.rs | 3 ++- util/src/journaldb/refcounteddb.rs | 10 ++++++---- util/src/overlaydb.rs | 12 ++++-------- 3 files changed, 12 insertions(+), 13 deletions(-) diff --git a/util/src/error.rs b/util/src/error.rs index 68aa3e648..409cc0e5d 100644 --- a/util/src/error.rs +++ b/util/src/error.rs @@ -21,12 +21,13 @@ use network::NetworkError; use rlp::DecoderError; use io; use std::fmt; +use hash::H256; #[derive(Debug)] /// Error in database subsystem. pub enum BaseDataError { /// An entry was removed more times than inserted. - NegativelyReferencedHash, + NegativelyReferencedHash(H256), } #[derive(Debug)] diff --git a/util/src/journaldb/refcounteddb.rs b/util/src/journaldb/refcounteddb.rs index 71833533f..590964247 100644 --- a/util/src/journaldb/refcounteddb.rs +++ b/util/src/journaldb/refcounteddb.rs @@ -126,6 +126,7 @@ impl JournalDB for RefCountedDB { // of its inserts otherwise. // record new commit's details. + let batch = DBTransaction::new(); { let mut index = 0usize; let mut last; @@ -145,7 +146,7 @@ impl JournalDB for RefCountedDB { r.append(id); r.append(&self.inserts); r.append(&self.removes); - try!(self.backing.put(&last, r.as_raw())); + try!(batch.put(&last, r.as_raw())); trace!(target: "rcdb", "new journal for time #{}.{} => {}: inserts={:?}, removes={:?}", now, index, id, self.inserts, self.removes); @@ -153,7 +154,7 @@ impl JournalDB for RefCountedDB { self.removes.clear(); if self.latest_era.map_or(true, |e| now > e) { - try!(self.backing.put(&LATEST_ERA_KEY, &encode(&now))); + try!(batch.put(&LATEST_ERA_KEY, &encode(&now))); self.latest_era = Some(now); } } @@ -180,12 +181,13 @@ impl JournalDB for RefCountedDB { for i in &to_remove { self.forward.remove(i); } - try!(self.backing.delete(&last)); + try!(batch.delete(&last)); index += 1; } } - let r = try!(self.forward.commit()); + let r = try!(self.forward.commit_to_batch(&batch)); + try!(self.backing.write(batch)); Ok(r) } } diff --git a/util/src/overlaydb.rs b/util/src/overlaydb.rs index 5704950ed..b5dec75e2 100644 --- a/util/src/overlaydb.rs +++ b/util/src/overlaydb.rs @@ -70,15 +70,13 @@ impl OverlayDB { let (back_value, back_rc) = x; let total_rc: i32 = back_rc as i32 + rc; if total_rc < 0 { - warn!("NEGATIVELY REFERENCED HASH {:?}", key); - return Err(From::from(BaseDataError::NegativelyReferencedHash)); + return Err(From::from(BaseDataError::NegativelyReferencedHash(key))); } deletes += if self.put_payload_in_batch(batch, &key, (back_value, total_rc as u32)) {1} else {0}; } None => { if rc < 0 { - warn!("NEGATIVELY REFERENCED HASH {:?}", key); - return Err(From::from(BaseDataError::NegativelyReferencedHash)); + return Err(From::from(BaseDataError::NegativelyReferencedHash(key))); } self.put_payload_in_batch(batch, &key, (value, rc as u32)); } @@ -128,15 +126,13 @@ impl OverlayDB { let (back_value, back_rc) = x; let total_rc: i32 = back_rc as i32 + rc; if total_rc < 0 { - warn!("NEGATIVELY REFERENCED HASH {:?}", key); - return Err(From::from(BaseDataError::NegativelyReferencedHash)); + return Err(From::from(BaseDataError::NegativelyReferencedHash(key))); } deletes += if self.put_payload(&key, (back_value, total_rc as u32)) {1} else {0}; } None => { if rc < 0 { - warn!("NEGATIVELY REFERENCED HASH {:?}", key); - return Err(From::from(BaseDataError::NegativelyReferencedHash)); + return Err(From::from(BaseDataError::NegativelyReferencedHash(key))); } self.put_payload(&key, (value, rc as u32)); }