Bumping clippy. Fixing warnings (#1139)

* Bumping clippy. Fixing warnings

* Removing unused import

* Fixing complexity and arguments warning on two functions
This commit is contained in:
Tomasz Drwięga 2016-05-25 17:03:58 +02:00 committed by Gav Wood
parent a0bc1f9dae
commit fa6b35ec8d
17 changed files with 60 additions and 55 deletions

16
Cargo.lock generated
View File

@ -3,7 +3,7 @@ name = "parity"
version = "1.2.0"
dependencies = [
"ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.67 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.69 (registry+https://github.com/rust-lang/crates.io-index)",
"ctrlc 1.1.1 (git+https://github.com/ethcore/rust-ctrlc.git)",
"daemonize 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)",
@ -125,7 +125,7 @@ dependencies = [
[[package]]
name = "clippy"
version = "0.0.67"
version = "0.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"quine-mc_cluskey 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -236,7 +236,7 @@ name = "ethcore"
version = "1.2.0"
dependencies = [
"bloomchain 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.67 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.69 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethash 1.2.0",
@ -297,7 +297,7 @@ dependencies = [
name = "ethcore-rpc"
version = "1.2.0"
dependencies = [
"clippy 0.0.67 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.69 (registry+https://github.com/rust-lang/crates.io-index)",
"ethash 1.2.0",
"ethcore 1.2.0",
"ethcore-util 1.2.0",
@ -322,7 +322,7 @@ dependencies = [
"arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
"bigint 0.1.0",
"chrono 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.67 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.69 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
"elastic-array 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
@ -355,7 +355,7 @@ dependencies = [
name = "ethcore-webapp"
version = "1.2.0"
dependencies = [
"clippy 0.0.67 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.69 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore-rpc 1.2.0",
"ethcore-util 1.2.0",
"hyper 0.9.3 (git+https://github.com/ethcore/hyper)",
@ -392,7 +392,7 @@ dependencies = [
name = "ethminer"
version = "1.2.0"
dependencies = [
"clippy 0.0.67 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.69 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore 1.2.0",
"ethcore-util 1.2.0",
@ -406,7 +406,7 @@ dependencies = [
name = "ethsync"
version = "1.2.0"
dependencies = [
"clippy 0.0.67 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.69 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore 1.2.0",
"ethcore-util 1.2.0",

View File

@ -23,7 +23,7 @@ daemonize = "0.2"
num_cpus = "0.2"
number_prefix = "0.2"
rpassword = "0.2.1"
clippy = { version = "0.0.67", optional = true}
clippy = { version = "0.0.69", optional = true}
ethcore = { path = "ethcore" }
ethcore-util = { path = "util" }
ethsync = { path = "sync" }

View File

@ -22,7 +22,7 @@ ethcore-util = { path = "../util" }
evmjit = { path = "../evmjit", optional = true }
ethash = { path = "../ethash" }
num_cpus = "0.2"
clippy = { version = "0.0.67", optional = true}
clippy = { version = "0.0.69", optional = true}
crossbeam = "0.2.9"
lazy_static = "0.1"
ethcore-devtools = { path = "../devtools" }

View File

@ -14,7 +14,7 @@
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! TraceDB errors.
//! `TraceDB` errors.
use std::fmt::{Display, Formatter, Error as FmtError};
@ -26,7 +26,7 @@ To do this, remove or move away your current database and restart parity. e.g.:
> mv ~/.parity/906a34e69aec8c0d /tmp
> parity";
/// TraceDB errors.
/// `TraceDB` errors.
#[derive(Debug)]
pub enum Error {
/// Returned when tracing is enabled,

View File

@ -320,7 +320,6 @@ impl<'a> Hashable for HeaderView<'a> {
#[cfg(test)]
mod tests {
use rustc_serialize::hex::FromHex;
use util::rlp::View;
use super::BlockView;
#[test]

View File

@ -277,18 +277,18 @@ fn binary_expr_struct(
match raw_ident.as_ref() {
"u8" => {
map_stmts.push(quote_stmt!(cx, total = total + 1;).unwrap());
map_stmts.push(quote_stmt!(cx, total += 1;).unwrap());
},
"[u8]" => {
map_stmts.push(quote_stmt!(cx, let size = length_stack.pop_front().unwrap();).unwrap());
map_stmts.push(quote_stmt!(cx, total = total + size;).unwrap());
map_stmts.push(quote_stmt!(cx, total += size;).unwrap());
},
_ => {
map_stmts.push(quote_stmt!(cx, let size = match $field_type_ident_qualified::len_params() {
0 => mem::size_of::<$field_type_ident>(),
_ => length_stack.pop_front().unwrap(),
}).unwrap());
map_stmts.push(quote_stmt!(cx, total = total + size;).unwrap());
map_stmts.push(quote_stmt!(cx, total += size;).unwrap());
}
}
};

View File

@ -10,7 +10,7 @@ rustc-serialize = "0.3"
serde = "0.7.0"
serde_json = "0.7.0"
serde_macros = { version = "0.7.0", optional = true }
clippy = { version = "0.0.67", optional = true}
clippy = { version = "0.0.69", optional = true}
[build-dependencies]
serde_codegen = { version = "0.7.0", optional = true }

View File

@ -17,7 +17,7 @@ log = "0.3"
env_logger = "0.3"
rustc-serialize = "0.3"
rayon = "0.3.1"
clippy = { version = "0.0.67", optional = true}
clippy = { version = "0.0.69", optional = true}
[features]
default = []

View File

@ -103,6 +103,7 @@ impl Miner {
/// Prepares new block for sealing including top transactions from queue.
#[cfg_attr(feature="dev", allow(match_same_arms))]
#[cfg_attr(feature="dev", allow(cyclomatic_complexity))]
fn prepare_sealing(&self, chain: &BlockChainClient) {
trace!(target: "miner", "prepare_sealing: entering");
let transactions = self.transaction_queue.lock().unwrap().top_transactions();

View File

@ -318,10 +318,13 @@ impl Configuration {
self.args.flag_datadir.as_ref().unwrap_or(&self.args.flag_db_path));
::std::fs::create_dir_all(&db_path).unwrap_or_else(|e| die_with_io_error("main", e));
let keys_path = Configuration::replace_home(match self.args.flag_testnet {
true => "$HOME/.parity/testnet_keys",
false => &self.args.flag_keys_path,
});
let keys_path = Configuration::replace_home(
if self.args.flag_testnet {
"$HOME/.parity/testnet_keys"
} else {
&self.args.flag_keys_path
}
);
::std::fs::create_dir_all(&db_path).unwrap_or_else(|e| die_with_io_error("main", e));
Directories {

View File

@ -22,7 +22,7 @@ ethminer = { path = "../miner" }
rustc-serialize = "0.3"
transient-hashmap = "0.1"
serde_macros = { version = "0.7.0", optional = true }
clippy = { version = "0.0.67", optional = true}
clippy = { version = "0.0.69", optional = true}
json-ipc-server = { git = "https://github.com/ethcore/json-ipc-server.git" }
[build-dependencies]

View File

@ -10,7 +10,7 @@ authors = ["Ethcore <admin@ethcore.io"]
[dependencies]
ethcore-util = { path = "../util" }
ethcore = { path = "../ethcore" }
clippy = { version = "0.0.67", optional = true}
clippy = { version = "0.0.69", optional = true}
ethminer = { path = "../miner" }
log = "0.3"
env_logger = "0.3"

View File

@ -35,6 +35,7 @@ struct HeaderId {
/// A collection of blocks and subchain pointers being downloaded. This keeps track of
/// which headers/bodies need to be downloaded, which are being downloaded and also holds
/// the downloaded blocks.
#[derive(Default)]
pub struct BlockCollection {
/// Heads of subchains to download
heads: Vec<H256>,
@ -130,7 +131,7 @@ impl BlockCollection {
let mut download = None;
{
for h in &self.heads {
if ignore_downloading || !self.downloading_headers.contains(&h) {
if ignore_downloading || !self.downloading_headers.contains(h) {
self.downloading_headers.insert(h.clone());
download = Some(h.clone());
break;
@ -178,7 +179,7 @@ impl BlockCollection {
for block in blocks.drain(..) {
let mut block_rlp = RlpStream::new_list(3);
block_rlp.append_raw(&block.header, 1);
let body = Rlp::new(&block.body.as_ref().unwrap()); // incomplete blocks are filtered out in the loop above
let body = Rlp::new(block.body.as_ref().unwrap()); // incomplete blocks are filtered out in the loop above
block_rlp.append_raw(body.at(0).as_raw(), 1);
block_rlp.append_raw(body.at(1).as_raw(), 1);
drained.push(block_rlp.out());
@ -194,8 +195,7 @@ impl BlockCollection {
/// Check if the collection is empty. We consider the syncing round complete once
/// there is no block data left and only a single or none head pointer remains.
pub fn is_empty(&self) -> bool {
return self.heads.len() == 0 ||
(self.heads.len() == 1 && self.head.map_or(false, |h| h == self.heads[0]))
self.heads.len() == 0 || (self.heads.len() == 1 && self.head.map_or(false, |h| h == self.heads[0]))
}
/// Chech is collection contains a block header.
@ -281,7 +281,7 @@ impl BlockCollection {
// update subchain headers
fn update_heads(&mut self) {
let mut new_heads = Vec::new();
let old_subchains: HashSet<_> = { self.heads.iter().map(Clone::clone).collect() };
let old_subchains: HashSet<_> = { self.heads.iter().cloned().collect() };
for s in self.heads.drain(..) {
let mut h = s.clone();
loop {

View File

@ -25,66 +25,66 @@
/// Split the chain into ranges of N blocks each. Download ranges sequentially. Split each range into subchains of M blocks. Download subchains in parallel.
/// State.
/// Sync state consists of the following data:
/// - s: State enum which can be one of the following values: ChainHead, Blocks, Idle
/// - s: State enum which can be one of the following values: `ChainHead`, `Blocks`, `Idle`
/// - H: A set of downloaded block headers
/// - B: A set of downloaded block bodies
/// - S: Set of block subchain start block hashes to download.
/// - l: Last imported / common block hash
/// - P: A set of connected peers. For each peer we maintain its last known total difficulty and starting block hash being requested if any.
/// General behaviour.
/// We start with all sets empty, l is set to the best block in the block chain, s is set to ChainHead.
/// If at any moment a bad block is reported by the block queue, we set s to ChainHead, reset l to the best block in the block chain and clear H, B and S.
/// If at any moment P becomes empty, we set s to ChainHead, and clear H, B and S.
/// We start with all sets empty, l is set to the best block in the block chain, s is set to `ChainHead`.
/// If at any moment a bad block is reported by the block queue, we set s to `ChainHead`, reset l to the best block in the block chain and clear H, B and S.
/// If at any moment P becomes empty, we set s to `ChainHead`, and clear H, B and S.
///
/// Workflow for ChainHead state.
/// In this state we try to get subchain headers with a single GetBlockHeaders request.
/// On NewPeer / On Restart:
/// Workflow for `ChainHead` state.
/// In this state we try to get subchain headers with a single `GetBlockHeaders` request.
/// On `NewPeer` / On `Restart`:
/// If peer's total difficulty is higher, request N/M headers with interval M+1 starting from l
/// On BlockHeaders(R):
/// On `BlockHeaders(R)`:
/// If R is empty:
/// If l is equal to genesis block hash or l is more than 1000 blocks behind our best hash:
/// Remove current peer from P. set l to the best block in the block chain. Select peer with maximum total difficulty from P and restart.
/// Else
/// Set l to ls parent and restart.
/// Else if we already have all the headers in the block chain or the block queue:
/// Set s to Idle,
/// Set s to `Idle`,
/// Else
/// Set S to R, set s to Blocks.
/// Set S to R, set s to `Blocks`.
///
///
/// All other messages are ignored.
/// Workflow for Blocks state.
/// Workflow for `Blocks` state.
/// In this state we download block headers and bodies from multiple peers.
/// On NewPeer / On Restart:
/// On `NewPeer` / On `Restart`:
/// For all idle peers:
/// Find a set of 256 or less block hashes in H which are not in B and not being downloaded by other peers. If the set is not empty:
/// Request block bodies for the hashes in the set.
/// Else
/// Find an element in S which is not being downloaded by other peers. If found: Request M headers starting from the element.
///
/// On BlockHeaders(R):
/// On `BlockHeaders(R)`:
/// If R is empty remove current peer from P and restart.
/// Validate received headers. For each header find a parent in H or R or the blockchain. Restart if there is a block with unknown parent.
/// Go to CollectBlocks.
/// Go to `CollectBlocks`.
///
/// On BlockBodies(R):
/// On `BlockBodies(R)`:
/// If R is empty remove current peer from P and restart.
/// Add bodies with a matching header in H to B.
/// Go to CollectBlocks.
/// Go to `CollectBlocks`.
///
/// CollectBlocks:
/// `CollectBlocks`:
/// Find a chain of blocks C in H starting from h where hs parent equals to l. The chain ends with the first block which does not have a body in B.
/// Add all blocks from the chain to the block queue. Remove them from H and B. Set l to the hash of the last block from C.
/// Update and merge subchain heads in S. For each h in S find a chain of blocks in B starting from h. Remove h from S. if the chain does not include an element from S add the end of the chain to S.
/// If H is empty and S contains a single element set s to ChainHead.
/// If H is empty and S contains a single element set s to `ChainHead`.
/// Restart.
///
/// All other messages are ignored.
/// Workflow for Idle state.
/// On NewBlock:
/// Import the block. If the block is unknown set s to ChainHead and restart.
/// On NewHashes:
/// Set s to ChainHead and restart.
/// On `NewBlock`:
/// Import the block. If the block is unknown set s to `ChainHead` and restart.
/// On `NewHashes`:
/// Set s to `ChainHead` and restart.
///
/// All other messages are ignored.
///
@ -749,12 +749,12 @@ impl ChainSync {
match peer.asking {
PeerAsking::BlockHeaders | PeerAsking::Heads => {
for b in &peer.asking_blocks {
self.blocks.clear_header_download(&b);
self.blocks.clear_header_download(b);
}
},
PeerAsking::BlockBodies => {
for b in &peer.asking_blocks {
self.blocks.clear_body_download(&b);
self.blocks.clear_body_download(b);
}
},
_ => (),
@ -819,6 +819,7 @@ impl ChainSync {
}
/// Request headers from a peer by block hash
#[cfg_attr(feature="dev", allow(too_many_arguments))]
fn request_headers_by_hash(&mut self, sync: &mut SyncIo, peer_id: PeerId, h: &H256, count: usize, skip: usize, reverse: bool, asking: PeerAsking) {
trace!(target: "sync", "{} <- GetBlockHeaders: {} entries starting from {}", peer_id, count, h);
let mut rlp = RlpStream::new_list(4);

View File

@ -28,7 +28,7 @@ crossbeam = "0.2"
slab = "0.1"
sha3 = { path = "sha3" }
serde = "0.7.0"
clippy = { version = "0.0.67", optional = true}
clippy = { version = "0.0.69", optional = true}
json-tests = { path = "json-tests" }
igd = "0.4.2"
ethcore-devtools = { path = "../devtools" }

View File

@ -32,6 +32,7 @@
// TODO [todr] a lot of warnings to be fixed
#![cfg_attr(feature="dev", allow(needless_borrow))]
#![cfg_attr(feature="dev", allow(assign_op_pattern))]
#![cfg_attr(feature="dev", allow(unnecessary_operation))]
//! Ethcore-util library

View File

@ -27,7 +27,7 @@ parity-idmanager = { git = "https://github.com/ethcore/parity-idmanager-rs.git",
parity-wallet = { git = "https://github.com/ethcore/parity-wallet.git", version = "0.4.1", optional = true }
parity-daodapp = { git = "https://github.com/ethcore/parity-daodapp-rs.git", version = "0.2.1", optional = true }
parity-makerotc = { git = "https://github.com/ethcore/parity-makerotc-rs.git", version = "0.1.3", optional = true }
clippy = { version = "0.0.67", optional = true}
clippy = { version = "0.0.69", optional = true}
[build-dependencies]
serde_codegen = { version = "0.7.0", optional = true }