diff --git a/Cargo.toml b/Cargo.toml index 04c4bf956..ff551e69c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -20,6 +20,7 @@ time = "0.1" evmjit = { path = "rust-evmjit", optional = true } ethash = { path = "ethash" } num_cpus = "0.2" +clippy = "*" # Always newest, since we use nightly [features] jit = ["evmjit"] diff --git a/README.md b/README.md index 216ac8091..48172bb60 100644 --- a/README.md +++ b/README.md @@ -1 +1,5 @@ # ethcore + + +# Running clippy + diff --git a/src/account.rs b/src/account.rs index c6c4136df..b0fbf3f85 100644 --- a/src/account.rs +++ b/src/account.rs @@ -103,7 +103,7 @@ impl Account { /// Get (and cache) the contents of the trie's storage at `key`. pub fn storage_at(&self, db: &HashDB, key: &H256) -> H256 { self.storage_overlay.borrow_mut().entry(key.clone()).or_insert_with(||{ - (Filth::Clean, H256::from(SecTrieDB::new(db, &self.storage_root).get(key.bytes()).map(|v| -> U256 {decode(v)}).unwrap_or(U256::zero()))) + (Filth::Clean, H256::from(SecTrieDB::new(db, &self.storage_root).get(key.bytes()).map_or(U256::zero(), |v| -> U256 {decode(v)}))) }).1.clone() } @@ -149,7 +149,7 @@ impl Account { /// Provide a database to lookup `code_hash`. Should not be called if it is a contract without code. pub fn cache_code(&mut self, db: &HashDB) -> bool { // TODO: fill out self.code_cache; - return self.is_cached() || + self.is_cached() || match self.code_hash { Some(ref h) => match db.lookup(h) { Some(x) => { self.code_cache = x.to_vec(); true }, @@ -248,8 +248,8 @@ mod tests { let a = Account::from_rlp(&rlp); assert_eq!(a.storage_root().unwrap().hex(), "c57e1afb758b07f8d2c8f13a3b6e44fa5ff94ab266facc5a4fd3f062426e50b2"); - assert_eq!(a.storage_at(&mut db, &H256::from(&U256::from(0x00u64))), H256::from(&U256::from(0x1234u64))); - assert_eq!(a.storage_at(&mut db, &H256::from(&U256::from(0x01u64))), H256::new()); + assert_eq!(a.storage_at(&db, &H256::from(&U256::from(0x00u64))), H256::from(&U256::from(0x1234u64))); + assert_eq!(a.storage_at(&db, &H256::from(&U256::from(0x01u64))), H256::new()); } #[test] diff --git a/src/account_diff.rs b/src/account_diff.rs index 06315db95..1843c4381 100644 --- a/src/account_diff.rs +++ b/src/account_diff.rs @@ -15,10 +15,10 @@ pub enum Existance { impl fmt::Display for Existance { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - &Existance::Born => try!(write!(f, "+++")), - &Existance::Alive => try!(write!(f, "***")), - &Existance::Died => try!(write!(f, "XXX")), + match *self { + Existance::Born => try!(write!(f, "+++")), + Existance::Alive => try!(write!(f, "***")), + Existance::Died => try!(write!(f, "XXX")), } Ok(()) } @@ -72,11 +72,11 @@ impl AccountDiff { code: Diff::new(pre.code.clone(), post.code.clone()), storage: storage.into_iter().map(|k| (k.clone(), Diff::new( - pre.storage.get(&k).cloned().unwrap_or(H256::new()), - post.storage.get(&k).cloned().unwrap_or(H256::new()) + pre.storage.get(&k).cloned().unwrap_or_else(H256::new), + post.storage.get(&k).cloned().unwrap_or_else(H256::new) ))).collect(), }; - if r.balance.is_same() && r.nonce.is_same() && r.code.is_same() && r.storage.len() == 0 { + if r.balance.is_same() && r.nonce.is_same() && r.code.is_same() && r.storage.is_empty() { None } else { Some(r) @@ -112,16 +112,15 @@ impl fmt::Display for AccountDiff { Diff::Changed(ref pre, ref post) => try!(write!(f, "${} ({} {} {})", post, pre, if pre > post {"-"} else {"+"}, *max(pre, post) - *min(pre, post))), _ => {}, } - match self.code { - Diff::Born(ref x) => try!(write!(f, " code {}", x.pretty())), - _ => {}, + if let Diff::Born(ref x) = self.code { + try!(write!(f, " code {}", x.pretty())); } try!(write!(f, "\n")); - for (k, dv) in self.storage.iter() { - match dv { - &Diff::Born(ref v) => try!(write!(f, " + {} => {}\n", interpreted_hash(k), interpreted_hash(v))), - &Diff::Changed(ref pre, ref post) => try!(write!(f, " * {} => {} (was {})\n", interpreted_hash(k), interpreted_hash(post), interpreted_hash(pre))), - &Diff::Died(_) => try!(write!(f, " X {}\n", interpreted_hash(k))), + for (k, dv) in &self.storage { + match *dv { + Diff::Born(ref v) => try!(write!(f, " + {} => {}\n", interpreted_hash(k), interpreted_hash(v))), + Diff::Changed(ref pre, ref post) => try!(write!(f, " * {} => {} (was {})\n", interpreted_hash(k), interpreted_hash(post), interpreted_hash(pre))), + Diff::Died(_) => try!(write!(f, " X {}\n", interpreted_hash(k))), _ => {}, } } diff --git a/src/block.rs b/src/block.rs index ac0ef7f89..1ff326430 100644 --- a/src/block.rs +++ b/src/block.rs @@ -1,3 +1,5 @@ +#![allow(ptr_arg)] // Because of &LastHashes -> &Vec<_> + use common::*; use engine::*; use state::*; @@ -173,7 +175,7 @@ impl<'x, 'y> OpenBlock<'x, 'y> { timestamp: self.block.header.timestamp, difficulty: self.block.header.difficulty.clone(), last_hashes: self.last_hashes.clone(), // TODO: should be a reference. - gas_used: self.block.archive.last().map(|t| t.receipt.gas_used).unwrap_or(U256::from(0)), + gas_used: self.block.archive.last().map_or(U256::zero(), |t| t.receipt.gas_used), gas_limit: self.block.header.gas_limit.clone(), } } @@ -204,7 +206,7 @@ impl<'x, 'y> OpenBlock<'x, 'y> { s.block.header.state_root = s.block.state.root().clone(); s.block.header.receipts_root = ordered_trie_root(s.block.archive.iter().map(|ref e| e.receipt.rlp_bytes()).collect()); s.block.header.log_bloom = s.block.archive.iter().fold(LogBloom::zero(), |mut b, e| {b |= &e.receipt.log_bloom; b}); - s.block.header.gas_used = s.block.archive.last().map(|t| t.receipt.gas_used).unwrap_or(U256::from(0)); + s.block.header.gas_used = s.block.archive.last().map_or(U256::zero(), |t| t.receipt.gas_used); s.block.header.note_dirty(); ClosedBlock::new(s, uncle_bytes) @@ -255,7 +257,7 @@ impl SealedBlock { let mut block_rlp = RlpStream::new_list(3); self.block.header.stream_rlp(&mut block_rlp, Seal::With); block_rlp.append_list(self.block.archive.len()); - for e in self.block.archive.iter() { e.transaction.rlp_append(&mut block_rlp); } + for e in &self.block.archive { e.transaction.rlp_append(&mut block_rlp); } block_rlp.append_raw(&self.uncle_bytes, 1); block_rlp.out() } diff --git a/src/blockchain.rs b/src/blockchain.rs index 27abe9ee3..346fa61b5 100644 --- a/src/blockchain.rs +++ b/src/blockchain.rs @@ -153,9 +153,8 @@ impl BlockProvider for BlockChain { fn block(&self, hash: &H256) -> Option { { let read = self.blocks.read().unwrap(); - match read.get(hash) { - Some(v) => return Some(v.clone()), - None => () + if let Some(v) = read.get(hash) { + return Some(v.clone()); } } @@ -188,7 +187,7 @@ impl BlockProvider for BlockChain { const COLLECTION_QUEUE_SIZE: usize = 2; const MIN_CACHE_SIZE: usize = 1; -const MAX_CACHE_SIZE: usize = 1024 * 1024 * 1; +const MAX_CACHE_SIZE: usize = 1024 * 1024; impl BlockChain { /// Create new instance of blockchain from given Genesis @@ -527,9 +526,8 @@ impl BlockChain { K: ExtrasSliceConvertable + Eq + Hash + Clone { { let read = cache.read().unwrap(); - match read.get(hash) { - Some(v) => return Some(v.clone()), - None => () + if let Some(v) = read.get(hash) { + return Some(v.clone()); } } @@ -549,9 +547,8 @@ impl BlockChain { T: ExtrasIndexable { { let read = cache.read().unwrap(); - match read.get(hash) { - Some(_) => return true, - None => () + if let Some(_) = read.get(hash) { + return true; } } @@ -670,6 +667,7 @@ mod tests { } #[test] + #[allow(cyclomatic_complexity)] fn test_small_fork() { let genesis = "f901fcf901f7a00000000000000000000000000000000000000000000000000000000000000000a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347948888f1f195afa192cfee860698584c030f4c9db1a07dba07d6b448a186e9612e5f737d1c909dce473e53199901a302c00646d523c1a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008302000080832fefd8808454c98c8142a059262c330941f3fe2a34d16d6e3c7b30d2ceb37c6a0e9a994c494ee1a61d2410885aa4c8bf8e56e264c0c0".from_hex().unwrap(); let b1 = "f90261f901f9a05716670833ec874362d65fea27a7cd35af5897d275b31a44944113111e4e96d2a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347948888f1f195afa192cfee860698584c030f4c9db1a0cb52de543653d86ccd13ba3ddf8b052525b04231c6884a4db3188a184681d878a0e78628dd45a1f8dc495594d83b76c588a3ee67463260f8b7d4a42f574aeab29aa0e9244cf7503b79c03d3a099e07a80d2dbc77bb0b502d8a89d51ac0d68dd31313b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008302000001832fefd882520884562791e580a051b3ecba4e3f2b49c11d42dd0851ec514b1be3138080f72a2b6e83868275d98f8877671f479c414b47f862f86080018304cb2f94095e7baea6a6c7c4c2dfeb977efac326af552d870a801ca09e2709d7ec9bbe6b1bbbf0b2088828d14cd5e8642a1fee22dc74bfa89761a7f9a04bd8813dee4be989accdb708b1c2e325a7e9c695a8024e30e89d6c644e424747c0".from_hex().unwrap(); diff --git a/src/builtin.rs b/src/builtin.rs index 1835eda97..00d0bbeb5 100644 --- a/src/builtin.rs +++ b/src/builtin.rs @@ -94,16 +94,13 @@ pub fn new_builtin_exec(name: &str) -> Option> { if it.v == H256::from(&U256::from(27)) || it.v == H256::from(&U256::from(28)) { let s = Signature::from_rsv(&it.r, &it.s, it.v[31] - 27); if ec::is_valid(&s) { - match ec::recover(&s, &it.hash) { - Ok(p) => { - let r = p.as_slice().sha3(); - // NICE: optimise and separate out into populate-like function - for i in 0..min(32, output.len()) { - output[i] = if i < 12 {0} else {r[i]}; - } + if let Ok(p) = ec::recover(&s, &it.hash) { + let r = p.as_slice().sha3(); + // NICE: optimise and separate out into populate-like function + for i in 0..min(32, output.len()) { + output[i] = if i < 12 {0} else {r[i]}; } - _ => {} - }; + } } } })), diff --git a/src/ethereum/ethash.rs b/src/ethereum/ethash.rs index ecfdb0541..aebee1e92 100644 --- a/src/ethereum/ethash.rs +++ b/src/ethereum/ethash.rs @@ -32,13 +32,13 @@ impl Ethash { } fn u64_param(&self, name: &str) -> u64 { - *self.u64_params.write().unwrap().entry(name.to_string()).or_insert_with(|| - self.spec().engine_params.get(name).map(|a| decode(&a)).unwrap_or(0u64)) + *self.u64_params.write().unwrap().entry(name.to_owned()).or_insert_with(|| + self.spec().engine_params.get(name).map_or(0u64, |a| decode(&a))) } fn u256_param(&self, name: &str) -> U256 { - *self.u256_params.write().unwrap().entry(name.to_string()).or_insert_with(|| - self.spec().engine_params.get(name).map(|a| decode(&a)).unwrap_or(x!(0))) + *self.u256_params.write().unwrap().entry(name.to_owned()).or_insert_with(|| + self.spec().engine_params.get(name).map_or(x!(0), |a| decode(&a))) } } @@ -84,7 +84,7 @@ impl Engine for Ethash { /// Apply the block reward on finalisation of the block. /// This assumes that all uncles are valid uncles (i.e. of at least one generation before the current). fn on_close_block(&self, block: &mut Block) { - let reward = self.spec().engine_params.get("blockReward").map(|a| decode(&a)).unwrap_or(U256::from(0u64)); + let reward = self.spec().engine_params.get("blockReward").map_or(U256::from(0u64), |a| decode(&a)); let fields = block.fields(); // Bestow block reward @@ -153,6 +153,7 @@ impl Engine for Ethash { } } +#[allow(wrong_self_convention)] // to_ethash should take self impl Ethash { fn calculate_difficuty(&self, header: &Header, parent: &Header) -> U256 { const EXP_DIFF_PERIOD: u64 = 100000; diff --git a/src/evm/factory.rs b/src/evm/factory.rs index 439bee5da..9f76d25ec 100644 --- a/src/evm/factory.rs +++ b/src/evm/factory.rs @@ -68,10 +68,11 @@ impl Factory { fn jit() -> Box { unimplemented!() } - +} +impl Default for Factory { /// Returns jitvm factory #[cfg(feature = "jit")] - pub fn default() -> Factory { + fn default() -> Factory { Factory { evm: VMType::Jit } @@ -79,7 +80,7 @@ impl Factory { /// Returns native rust evm factory #[cfg(not(feature = "jit"))] - pub fn default() -> Factory { + fn default() -> Factory { Factory { evm: VMType::Interpreter } diff --git a/src/evm/interpreter.rs b/src/evm/interpreter.rs index 88823cbea..e21074531 100644 --- a/src/evm/interpreter.rs +++ b/src/evm/interpreter.rs @@ -72,7 +72,7 @@ impl VecStack { impl Stack for VecStack { fn peek(&self, no_from_top: usize) -> &S { - return &self.stack[self.stack.len() - no_from_top - 1]; + &self.stack[self.stack.len() - no_from_top - 1] } fn swap_with_top(&mut self, no_from_top: usize) { @@ -157,7 +157,7 @@ impl Memory for Vec { } fn size(&self) -> usize { - return self.len() + self.len() } fn read_slice(&self, init_off_u: U256, init_size_u: U256) -> &[u8] { @@ -228,6 +228,7 @@ struct CodeReader<'a> { code: &'a Bytes } +#[allow(len_without_is_empty)] impl<'a> CodeReader<'a> { /// Get `no_of_bytes` from code and convert to U256. Move PC fn read(&mut self, no_of_bytes: usize) -> U256 { @@ -330,6 +331,7 @@ impl evm::Evm for Interpreter { } impl Interpreter { + #[allow(cyclomatic_complexity)] fn get_gas_cost_mem(&self, ext: &evm::Ext, instruction: Instruction, @@ -716,7 +718,7 @@ impl Interpreter { let big_id = stack.pop_back(); let id = big_id.low_u64() as usize; let max = id.wrapping_add(32); - let data = params.data.clone().unwrap_or(vec![]); + let data = params.data.clone().unwrap_or_else(|| vec![]); let bound = cmp::min(data.len(), max); if id < bound && big_id < U256::from(data.len()) { let mut v = data[id..bound].to_vec(); @@ -727,7 +729,7 @@ impl Interpreter { } }, instructions::CALLDATASIZE => { - stack.push(U256::from(params.data.clone().unwrap_or(vec![]).len())); + stack.push(U256::from(params.data.clone().map_or(0, |l| l.len()))); }, instructions::CODESIZE => { stack.push(U256::from(code.len())); @@ -738,10 +740,10 @@ impl Interpreter { stack.push(U256::from(len)); }, instructions::CALLDATACOPY => { - self.copy_data_to_memory(mem, stack, ¶ms.data.clone().unwrap_or(vec![])); + self.copy_data_to_memory(mem, stack, ¶ms.data.clone().unwrap_or_else(|| vec![])); }, instructions::CODECOPY => { - self.copy_data_to_memory(mem, stack, ¶ms.code.clone().unwrap_or(vec![])); + self.copy_data_to_memory(mem, stack, ¶ms.code.clone().unwrap_or_else(|| vec![])); }, instructions::EXTCODECOPY => { let address = u256_to_address(&stack.pop_back()); @@ -781,7 +783,7 @@ impl Interpreter { fn copy_data_to_memory(&self, mem: &mut Memory, stack: &mut Stack, - data: &Bytes) { + data: &[u8]) { let offset = stack.pop_back(); let index = stack.pop_back(); let size = stack.pop_back(); @@ -1051,7 +1053,7 @@ impl Interpreter { Ok(()) } - fn find_jump_destinations(&self, code: &Bytes) -> HashSet { + fn find_jump_destinations(&self, code: &[u8]) -> HashSet { let mut jump_dests = HashSet::new(); let mut position = 0; @@ -1066,7 +1068,7 @@ impl Interpreter { position += 1; } - return jump_dests; + jump_dests } } diff --git a/src/evm/tests.rs b/src/evm/tests.rs index 8e1b5eff4..ef5987285 100644 --- a/src/evm/tests.rs +++ b/src/evm/tests.rs @@ -19,7 +19,7 @@ struct FakeExt { logs: Vec, _suicides: HashSet
, info: EnvInfo, - _schedule: Schedule + schedule: Schedule } impl FakeExt { @@ -89,7 +89,7 @@ impl Ext for FakeExt { } fn schedule(&self) -> &Schedule { - &self._schedule + &self.schedule } fn env_info(&self) -> &EnvInfo { @@ -122,7 +122,7 @@ fn test_stack_underflow() { }; match err { - evm::Error::StackUnderflow {instruction: _, wanted, on_stack} => { + evm::Error::StackUnderflow {wanted, on_stack, ..} => { assert_eq!(wanted, 2); assert_eq!(on_stack, 0); } diff --git a/src/executive.rs b/src/executive.rs index 6c2b29e3f..7671f3cc9 100644 --- a/src/executive.rs +++ b/src/executive.rs @@ -75,7 +75,7 @@ impl<'a> Executive<'a> { } /// Creates `Externalities` from `Executive`. - pub fn to_externalities<'_>(&'_ mut self, origin_info: OriginInfo, substate: &'_ mut Substate, output: OutputPolicy<'_>) -> Externalities { + pub fn as_externalities<'_>(&'_ mut self, origin_info: OriginInfo, substate: &'_ mut Substate, output: OutputPolicy<'_>) -> Externalities { Externalities::new(self.state, self.info, self.engine, self.depth, origin_info, substate, output) } @@ -123,8 +123,8 @@ impl<'a> Executive<'a> { let mut substate = Substate::new(); - let res = match t.action() { - &Action::Create => { + let res = match *t.action() { + Action::Create => { let new_address = contract_address(&sender, &nonce); let params = ActionParams { code_address: new_address.clone(), @@ -139,7 +139,7 @@ impl<'a> Executive<'a> { }; self.create(params, &mut substate) }, - &Action::Call(ref address) => { + Action::Call(ref address) => { let params = ActionParams { code_address: address.clone(), address: address.clone(), @@ -177,7 +177,7 @@ impl<'a> Executive<'a> { // if destination is builtin, try to execute it let default = []; - let data = if let &Some(ref d) = ¶ms.data { d as &[u8] } else { &default as &[u8] }; + let data = if let Some(ref d) = params.data { d as &[u8] } else { &default as &[u8] }; let cost = self.engine.cost_of_builtin(¶ms.code_address, data); match cost <= params.gas { @@ -198,7 +198,7 @@ impl<'a> Executive<'a> { let mut unconfirmed_substate = Substate::new(); let res = { - let mut ext = self.to_externalities(OriginInfo::from(¶ms), &mut unconfirmed_substate, OutputPolicy::Return(output)); + let mut ext = self.as_externalities(OriginInfo::from(¶ms), &mut unconfirmed_substate, OutputPolicy::Return(output)); self.engine.vm_factory().create().exec(params, &mut ext) }; @@ -230,7 +230,7 @@ impl<'a> Executive<'a> { self.state.transfer_balance(¶ms.sender, ¶ms.address, ¶ms.value); let res = { - let mut ext = self.to_externalities(OriginInfo::from(¶ms), &mut unconfirmed_substate, OutputPolicy::InitContract); + let mut ext = self.as_externalities(OriginInfo::from(¶ms), &mut unconfirmed_substate, OutputPolicy::InitContract); self.engine.vm_factory().create().exec(params, &mut ext) }; self.enact_result(&res, substate, unconfirmed_substate, backup); @@ -248,7 +248,7 @@ impl<'a> Executive<'a> { let refunds_bound = sstore_refunds + suicide_refunds; // real ammount to refund - let gas_left_prerefund = match &result { &Ok(x) => x, _ => x!(0) }; + let gas_left_prerefund = match result { Ok(x) => x, _ => x!(0) }; let refunded = cmp::min(refunds_bound, (t.gas - gas_left_prerefund) / U256::from(2)); let gas_left = gas_left_prerefund + refunded; @@ -265,7 +265,7 @@ impl<'a> Executive<'a> { self.state.add_balance(&self.info.author, &fees_value); // perform suicides - for address in substate.suicides.iter() { + for address in &substate.suicides { trace!("Killing {}", address); self.state.kill_account(address); } @@ -273,11 +273,7 @@ impl<'a> Executive<'a> { match result { Err(evm::Error::Internal) => Err(ExecutionError::Internal), // TODO [ToDr] BadJumpDestination @debris - how to handle that? - Err(evm::Error::OutOfGas) - | Err(evm::Error::BadJumpDestination { destination: _ }) - | Err(evm::Error::BadInstruction { instruction: _ }) - | Err(evm::Error::StackUnderflow {instruction: _, wanted: _, on_stack: _}) - | Err(evm::Error::OutOfStack {instruction: _, wanted: _, limit: _}) => { + Err(_) => { Ok(Executed { gas: t.gas, gas_used: t.gas, @@ -302,15 +298,15 @@ impl<'a> Executive<'a> { fn enact_result(&mut self, result: &evm::Result, substate: &mut Substate, un_substate: Substate, backup: State) { // TODO: handle other evm::Errors same as OutOfGas once they are implemented - match result { - &Err(evm::Error::OutOfGas) - | &Err(evm::Error::BadJumpDestination { destination: _ }) - | &Err(evm::Error::BadInstruction { instruction: _ }) - | &Err(evm::Error::StackUnderflow {instruction: _, wanted: _, on_stack: _}) - | &Err(evm::Error::OutOfStack {instruction: _, wanted: _, limit: _}) => { + match *result { + Err(evm::Error::OutOfGas) + | Err(evm::Error::BadJumpDestination {..}) + | Err(evm::Error::BadInstruction {.. }) + | Err(evm::Error::StackUnderflow {..}) + | Err(evm::Error::OutOfStack {..}) => { self.state.revert(backup); }, - &Ok(_) | &Err(evm::Error::Internal) => substate.accrue(un_substate) + Ok(_) | Err(evm::Error::Internal) => substate.accrue(un_substate) } } } diff --git a/src/externalities.rs b/src/externalities.rs index 8b16cc72b..b2d716d37 100644 --- a/src/externalities.rs +++ b/src/externalities.rs @@ -158,9 +158,10 @@ impl<'a> Ext for Externalities<'a> { } fn extcode(&self, address: &Address) -> Bytes { - self.state.code(address).unwrap_or(vec![]) + self.state.code(address).unwrap_or_else(|| vec![]) } + #[allow(match_ref_pats)] fn ret(&mut self, gas: &U256, data: &[u8]) -> Result { match &mut self.output { &mut OutputPolicy::Return(BytesRef::Fixed(ref mut slice)) => unsafe { diff --git a/src/header.rs b/src/header.rs index 28ed458fb..86710efcb 100644 --- a/src/header.rs +++ b/src/header.rs @@ -171,9 +171,10 @@ impl Header { s.append(&self.gas_used); s.append(&self.timestamp); s.append(&self.extra_data); - match with_seal { - Seal::With => for b in self.seal.iter() { s.append_raw(&b, 1); }, - _ => {} + if let Seal::With = with_seal { + for b in &self.seal { + s.append_raw(&b, 1); + } } } @@ -236,7 +237,7 @@ impl Encodable for Header { self.timestamp.encode(e); self.extra_data.encode(e); - for b in self.seal.iter() { + for b in &self.seal { e.emit_raw(&b); } }) diff --git a/src/lib.rs b/src/lib.rs index a5b6c3dae..f478cf75c 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,8 +1,11 @@ #![warn(missing_docs)] #![feature(cell_extras)] #![feature(augmented_assignments)] -//#![feature(plugin)] +#![feature(plugin)] //#![plugin(interpolate_idents)] +#![plugin(clippy)] +#![allow(needless_range_loop, match_bool)] + //! Ethcore's ethereum implementation //! //! ### Rust version @@ -73,7 +76,6 @@ //! sudo make install //! sudo ldconfig //! ``` - #[macro_use] extern crate log; extern crate rustc_serialize; diff --git a/src/pod_state.rs b/src/pod_state.rs index 2ab27ef1d..1ea8382a5 100644 --- a/src/pod_state.rs +++ b/src/pod_state.rs @@ -26,10 +26,10 @@ impl FromJson for PodState { let code = acc.find("code").map(&Bytes::from_json); if balance.is_some() || nonce.is_some() || storage.is_some() || code.is_some() { state.insert(address_from_hex(address), PodAccount{ - balance: balance.unwrap_or(U256::zero()), - nonce: nonce.unwrap_or(U256::zero()), - storage: storage.unwrap_or(BTreeMap::new()), - code: code.unwrap_or(Vec::new()) + balance: balance.unwrap_or_else(U256::zero), + nonce: nonce.unwrap_or_else(U256::zero), + storage: storage.unwrap_or_else(BTreeMap::new), + code: code.unwrap_or_else(Vec::new) }); } state diff --git a/src/receipt.rs b/src/receipt.rs index 82620cfcd..403915fdc 100644 --- a/src/receipt.rs +++ b/src/receipt.rs @@ -36,7 +36,7 @@ impl RlpStandard for Receipt { // TODO: make work: //s.append(&self.logs); s.append_list(self.logs.len()); - for l in self.logs.iter() { + for l in &self.logs { l.rlp_append(s); } } diff --git a/src/service.rs b/src/service.rs index 30565b37a..debabaafe 100644 --- a/src/service.rs +++ b/src/service.rs @@ -53,20 +53,13 @@ impl IoHandler for ClientIoHandler { fn initialize<'s>(&'s mut self, _io: &mut IoContext<'s, NetSyncMessage>) { } + #[allow(match_ref_pats)] fn message<'s>(&'s mut self, _io: &mut IoContext<'s, NetSyncMessage>, net_message: &'s mut NetSyncMessage) { - match net_message { - &mut UserMessage(ref mut message) => { - match message { - &mut SyncMessage::BlockVerified => { - self.client.write().unwrap().import_verified_blocks(); - }, - _ => {}, // ignore other messages - } - + if let &mut UserMessage(ref mut message) = net_message { + if let &mut SyncMessage::BlockVerified= message { + self.client.write().unwrap().import_verified_blocks(); } - _ => {}, // ignore other messages } - } } diff --git a/src/spec.rs b/src/spec.rs index 24c0e4eda..086b76339 100644 --- a/src/spec.rs +++ b/src/spec.rs @@ -10,7 +10,7 @@ pub fn gzip64res_to_json(source: &[u8]) -> Json { let data = source.from_base64().expect("Genesis block is malformed!"); let data_ref: &[u8] = &data; let mut decoder = GzDecoder::new(data_ref).expect("Gzip is invalid"); - let mut s: String = "".to_string(); + let mut s: String = "".to_owned(); decoder.read_to_string(&mut s).expect("Gzip is invalid"); Json::from_str(&s).expect("Json is invalid") } @@ -18,14 +18,14 @@ pub fn gzip64res_to_json(source: &[u8]) -> Json { /// Convert JSON value to equivlaent RLP representation. // TODO: handle container types. fn json_to_rlp(json: &Json) -> Bytes { - match json { - &Json::Boolean(o) => encode(&(if o {1u64} else {0})), - &Json::I64(o) => encode(&(o as u64)), - &Json::U64(o) => encode(&o), - &Json::String(ref s) if s.len() >= 2 && &s[0..2] == "0x" && U256::from_str(&s[2..]).is_ok() => { + match *json { + Json::Boolean(o) => encode(&(if o {1u64} else {0})), + Json::I64(o) => encode(&(o as u64)), + Json::U64(o) => encode(&o), + Json::String(ref s) if s.len() >= 2 && &s[0..2] == "0x" && U256::from_str(&s[2..]).is_ok() => { encode(&U256::from_str(&s[2..]).unwrap()) }, - &Json::String(ref s) => { + Json::String(ref s) => { encode(s) }, _ => panic!() @@ -108,6 +108,7 @@ pub struct Spec { state_root_memo: RwLock>, } +#[allow(wrong_self_convention)] // because to_engine(self) should be to_engine(&self) impl Spec { /// Convert this object into a boxed Engine of the right underlying type. // TODO avoid this hard-coded nastiness - use dynamic-linked plugin framework instead. @@ -185,13 +186,13 @@ impl FromJson for Spec { builtins.insert(addr.clone(), builtin); } } - let balance = acc.find("balance").and_then(|x| match x { &Json::String(ref b) => U256::from_dec_str(b).ok(), _ => None }); - let nonce = acc.find("nonce").and_then(|x| match x { &Json::String(ref b) => U256::from_dec_str(b).ok(), _ => None }); + let balance = acc.find("balance").and_then(|x| match *x { Json::String(ref b) => U256::from_dec_str(b).ok(), _ => None }); + let nonce = acc.find("nonce").and_then(|x| match *x { Json::String(ref b) => U256::from_dec_str(b).ok(), _ => None }); // let balance = if let Some(&Json::String(ref b)) = acc.find("balance") {U256::from_dec_str(b).unwrap_or(U256::from(0))} else {U256::from(0)}; // let nonce = if let Some(&Json::String(ref n)) = acc.find("nonce") {U256::from_dec_str(n).unwrap_or(U256::from(0))} else {U256::from(0)}; // TODO: handle code & data if they exist. if balance.is_some() || nonce.is_some() { - state.insert(addr, GenesisAccount { balance: balance.unwrap_or(U256::from(0)), nonce: nonce.unwrap_or(U256::from(0)) }); + state.insert(addr, GenesisAccount { balance: balance.unwrap_or_else(U256::zero), nonce: nonce.unwrap_or_else(U256::zero) }); } } } @@ -215,8 +216,8 @@ impl FromJson for Spec { Spec { - name: json.find("name").map(|j| j.as_string().unwrap()).unwrap_or("unknown").to_string(), - engine_name: json["engineName"].as_string().unwrap().to_string(), + name: json.find("name").map_or("unknown", |j| j.as_string().unwrap()).to_owned(), + engine_name: json["engineName"].as_string().unwrap().to_owned(), engine_params: json_to_rlp_map(&json["params"]), builtins: builtins, parent_hash: H256::from_str(&genesis["parentHash"].as_string().unwrap()[2..]).unwrap(), @@ -242,7 +243,7 @@ impl Spec { let mut root = H256::new(); { let mut t = SecTrieDBMut::new(db, &mut root); - for (address, account) in self.genesis_state.iter() { + for (address, account) in &self.genesis_state { t.insert(address.as_slice(), &account.rlp()); } } diff --git a/src/state.rs b/src/state.rs index e45347fd2..4b7e5af34 100644 --- a/src/state.rs +++ b/src/state.rs @@ -88,22 +88,22 @@ impl State { /// Get the balance of account `a`. pub fn balance(&self, a: &Address) -> U256 { - self.get(a, false).as_ref().map(|account| account.balance().clone()).unwrap_or(U256::from(0u8)) + self.get(a, false).as_ref().map_or(U256::zero(), |account| account.balance().clone()) } /// Get the nonce of account `a`. pub fn nonce(&self, a: &Address) -> U256 { - self.get(a, false).as_ref().map(|account| account.nonce().clone()).unwrap_or(U256::from(0u8)) + self.get(a, false).as_ref().map_or(U256::zero(), |account| account.nonce().clone()) } /// Mutate storage of account `a` so that it is `value` for `key`. pub fn storage_at(&self, a: &Address, key: &H256) -> H256 { - self.get(a, false).as_ref().map(|a|a.storage_at(&self.db, key)).unwrap_or(H256::new()) + self.get(a, false).as_ref().map_or(H256::new(), |a|a.storage_at(&self.db, key)) } /// Mutate storage of account `a` so that it is `value` for `key`. pub fn code(&self, a: &Address) -> Option { - self.get(a, true).as_ref().map(|a|a.code().map(|x|x.to_vec())).unwrap_or(None) + self.get(a, true).as_ref().map_or(None, |a|a.code().map(|x|x.to_vec())) } /// Add `incr` to the balance of account `a`. @@ -170,6 +170,7 @@ impl State { /// Commit accounts to SecTrieDBMut. This is similar to cpp-ethereum's dev::eth::commit. /// `accounts` is mutable because we may need to commit the code or storage and record that. + #[allow(match_ref_pats)] pub fn commit_into(db: &mut HashDB, root: &mut H256, accounts: &mut HashMap>) { // first, commit the sub trees. // TODO: is this necessary or can we dispense with the `ref mut a` for just `a`? @@ -186,9 +187,9 @@ impl State { { let mut trie = SecTrieDBMut::from_existing(db, root); for (address, ref a) in accounts.iter() { - match a { - &&Some(ref account) => trie.insert(address, &account.rlp()), - &&None => trie.remove(address), + match **a { + Some(ref account) => trie.insert(address, &account.rlp()), + None => trie.remove(address), } } } @@ -210,7 +211,7 @@ impl State { pub fn to_hashmap_pod(&self) -> HashMap { // TODO: handle database rather than just the cache. self.cache.borrow().iter().fold(HashMap::new(), |mut m, (add, opt)| { - if let &Some(ref acc) = opt { + if let Some(ref acc) = *opt { m.insert(add.clone(), PodAccount::from_account(acc)); } m @@ -221,7 +222,7 @@ impl State { pub fn to_pod(&self) -> PodState { // TODO: handle database rather than just the cache. PodState::new(self.cache.borrow().iter().fold(BTreeMap::new(), |mut m, (add, opt)| { - if let &Some(ref acc) = opt { + if let Some(ref acc) = *opt { m.insert(add.clone(), PodAccount::from_account(acc)); } m diff --git a/src/state_diff.rs b/src/state_diff.rs index 28d92e59e..08fccf3ed 100644 --- a/src/state_diff.rs +++ b/src/state_diff.rs @@ -15,7 +15,7 @@ impl StateDiff { impl fmt::Display for StateDiff { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - for (add, acc) in self.0.iter() { + for (add, acc) in &self.0 { try!(write!(f, "{} {}: {}", acc.existance(), add, acc)); } Ok(()) diff --git a/src/sync/chain.rs b/src/sync/chain.rs index dba9ae08d..e90b0add5 100644 --- a/src/sync/chain.rs +++ b/src/sync/chain.rs @@ -212,7 +212,7 @@ impl ChainSync { self.downloading_bodies.clear(); self.headers.clear(); self.bodies.clear(); - for (_, ref mut p) in self.peers.iter_mut() { + for (_, ref mut p) in &mut self.peers { p.asking_blocks.clear(); } self.header_ids.clear(); @@ -268,6 +268,7 @@ impl ChainSync { Ok(()) } + #[allow(cyclomatic_complexity)] /// Called by peer once it has new block headers during sync fn on_peer_block_headers(&mut self, io: &mut SyncIo, peer_id: PeerId, r: &UntrustedRlp) -> Result<(), PacketDecodeError> { self.reset_peer_asking(peer_id, PeerAsking::BlockHeaders); @@ -375,7 +376,7 @@ impl ChainSync { transactions_root: tx_root, uncles: uncles }; - match self.header_ids.get(&header_id).map(|n| *n) { + match self.header_ids.get(&header_id).cloned() { Some(n) => { self.header_ids.remove(&header_id); self.bodies.insert_item(n, body.as_raw().to_vec()); @@ -699,16 +700,13 @@ impl ChainSync { /// Used to recover from an error and re-download parts of the chain detected as bad. fn remove_downloaded_blocks(&mut self, start: BlockNumber) { for n in self.headers.get_tail(&start) { - match self.headers.find_item(&n) { - Some(ref header_data) => { - let header_to_delete = HeaderView::new(&header_data.data); - let header_id = HeaderId { - transactions_root: header_to_delete.transactions_root(), - uncles: header_to_delete.uncles_hash() - }; - self.header_ids.remove(&header_id); - }, - None => {} + if let Some(ref header_data) = self.headers.find_item(&n) { + let header_to_delete = HeaderView::new(&header_data.data); + let header_id = HeaderId { + transactions_root: header_to_delete.transactions_root(), + uncles: header_to_delete.uncles_hash() + }; + self.header_ids.remove(&header_id); } self.downloading_bodies.remove(&n); self.downloading_headers.remove(&n); @@ -796,12 +794,9 @@ impl ChainSync { packet.append(&chain.best_block_hash); packet.append(&chain.genesis_hash); //TODO: handle timeout for status request - match io.send(peer_id, STATUS_PACKET, packet.out()) { - Err(e) => { - warn!(target:"sync", "Error sending status request: {:?}", e); - io.disable_peer(peer_id); - } - Ok(_) => () + if let Err(e) = io.send(peer_id, STATUS_PACKET, packet.out()) { + warn!(target:"sync", "Error sending status request: {:?}", e); + io.disable_peer(peer_id); } } @@ -837,12 +832,9 @@ impl ChainSync { let mut data = Bytes::new(); let inc = (skip + 1) as BlockNumber; while number <= last && number > 0 && count < max_count { - match io.chain().block_header_at(number) { - Some(mut hdr) => { - data.append(&mut hdr); - count += 1; - } - None => {} + if let Some(mut hdr) = io.chain().block_header_at(number) { + data.append(&mut hdr); + count += 1; } if reverse { if number <= inc { @@ -874,12 +866,9 @@ impl ChainSync { let mut added = 0usize; let mut data = Bytes::new(); for i in 0..count { - match io.chain().block_body(&try!(r.val_at::(i))) { - Some(mut hdr) => { - data.append(&mut hdr); - added += 1; - } - None => {} + if let Some(mut hdr) = io.chain().block_body(&try!(r.val_at::(i))) { + data.append(&mut hdr); + added += 1; } } let mut rlp = RlpStream::new_list(added); @@ -901,12 +890,9 @@ impl ChainSync { let mut added = 0usize; let mut data = Bytes::new(); for i in 0..count { - match io.chain().state_data(&try!(r.val_at::(i))) { - Some(mut hdr) => { - data.append(&mut hdr); - added += 1; - } - None => {} + if let Some(mut hdr) = io.chain().state_data(&try!(r.val_at::(i))) { + data.append(&mut hdr); + added += 1; } } let mut rlp = RlpStream::new_list(added); @@ -927,12 +913,9 @@ impl ChainSync { let mut added = 0usize; let mut data = Bytes::new(); for i in 0..count { - match io.chain().block_receipts(&try!(r.val_at::(i))) { - Some(mut hdr) => { - data.append(&mut hdr); - added += 1; - } - None => {} + if let Some(mut hdr) = io.chain().block_receipts(&try!(r.val_at::(i))) { + data.append(&mut hdr); + added += 1; } } let mut rlp = RlpStream::new_list(added); diff --git a/src/sync/io.rs b/src/sync/io.rs index affcbc0d7..aa572c133 100644 --- a/src/sync/io.rs +++ b/src/sync/io.rs @@ -14,7 +14,7 @@ pub trait SyncIo { /// Send a packet to a peer. fn send(&mut self, peer_id: PeerId, packet_id: PacketId, data: Vec) -> Result<(), UtilError>; /// Get the blockchain - fn chain<'s>(&'s mut self) -> &'s mut BlockChainClient; + fn chain(&mut self) -> &mut BlockChainClient; /// Returns peer client identifier string fn peer_info(&self, peer_id: PeerId) -> String { peer_id.to_string() @@ -50,7 +50,7 @@ impl<'s, 'h, 'op> SyncIo for NetSyncIo<'s, 'h, 'op> { self.network.send(peer_id, packet_id, data) } - fn chain<'a>(&'a mut self) -> &'a mut BlockChainClient { + fn chain(&mut self) -> &mut BlockChainClient { self.chain } diff --git a/src/sync/range_collection.rs b/src/sync/range_collection.rs index d212625be..b8186e5a5 100644 --- a/src/sync/range_collection.rs +++ b/src/sync/range_collection.rs @@ -29,7 +29,7 @@ pub trait RangeCollection { /// Remove all elements >= `tail` fn insert_item(&mut self, key: K, value: V); /// Get an iterator over ranges - fn range_iter<'c>(&'c self) -> RangeIterator<'c, K, V>; + fn range_iter(& self) -> RangeIterator; } /// Range iterator. For each range yelds a key for the first element of the range and a vector of values. @@ -60,7 +60,7 @@ impl<'c, K:'c, V:'c> Iterator for RangeIterator<'c, K, V> where K: Add RangeCollection for Vec<(K, Vec)> where K: Ord + PartialEq + Add + Sub + Copy + FromUsize + ToUsize { - fn range_iter<'c>(&'c self) -> RangeIterator<'c, K, V> { + fn range_iter(&self) -> RangeIterator { RangeIterator { range: self.len(), collection: self @@ -191,6 +191,7 @@ impl RangeCollection for Vec<(K, Vec)> where K: Ord + PartialEq + } #[test] +#[allow(cyclomatic_complexity)] fn test_range() { use std::cmp::{Ordering}; diff --git a/src/sync/tests.rs b/src/sync/tests.rs index 05d7ac317..72c207f10 100644 --- a/src/sync/tests.rs +++ b/src/sync/tests.rs @@ -64,7 +64,7 @@ impl BlockChainClient for TestBlockChainClient { } fn block(&self, h: &H256) -> Option { - self.blocks.get(h).map(|b| b.clone()) + self.blocks.get(h).cloned() } fn block_status(&self, h: &H256) -> BlockStatus { @@ -208,7 +208,7 @@ impl<'p> SyncIo for TestIo<'p> { Ok(()) } - fn chain<'a>(&'a mut self) -> &'a mut BlockChainClient { + fn chain(&mut self) -> &mut BlockChainClient { self.chain } } @@ -265,14 +265,11 @@ impl TestNet { pub fn sync_step(&mut self) { for peer in 0..self.peers.len() { - match self.peers[peer].queue.pop_front() { - Some(packet) => { - let mut p = self.peers.get_mut(packet.recipient).unwrap(); - trace!("--- {} -> {} ---", peer, packet.recipient); - p.sync.on_packet(&mut TestIo::new(&mut p.chain, &mut p.queue, Some(peer as PeerId)), peer as PeerId, packet.packet_id, &packet.data); - trace!("----------------"); - }, - None => {} + if let Some(packet) = self.peers[peer].queue.pop_front() { + let mut p = self.peers.get_mut(packet.recipient).unwrap(); + trace!("--- {} -> {} ---", peer, packet.recipient); + p.sync.on_packet(&mut TestIo::new(&mut p.chain, &mut p.queue, Some(peer as PeerId)), peer as PeerId, packet.packet_id, &packet.data); + trace!("----------------"); } let mut p = self.peers.get_mut(peer).unwrap(); p.sync.maintain_sync(&mut TestIo::new(&mut p.chain, &mut p.queue, None)); diff --git a/src/tests/executive.rs b/src/tests/executive.rs index fe428e199..d201f7fc5 100644 --- a/src/tests/executive.rs +++ b/src/tests/executive.rs @@ -168,7 +168,7 @@ fn do_json_test_for(vm: &VMType, json_data: &[u8]) -> Vec { let mut fail = false; //let mut fail_unless = |cond: bool| if !cond && !fail { failed.push(name.to_string()); fail = true }; let mut fail_unless = |cond: bool, s: &str | if !cond && !fail { - failed.push(format!("[{}] {}: {}", vm, name.to_string(), s)); + failed.push(format!("[{}] {}: {}", vm, name, s)); fail = true }; @@ -245,7 +245,7 @@ fn do_json_test_for(vm: &VMType, json_data: &[u8]) -> Vec { test.find("post").map(|pre| for (addr, s) in pre.as_object().unwrap() { let address = Address::from(addr.as_ref()); - fail_unless(state.code(&address).unwrap_or(vec![]) == Bytes::from_json(&s["code"]), "code is incorrect"); + fail_unless(state.code(&address).unwrap_or_else(|| vec![]) == Bytes::from_json(&s["code"]), "code is incorrect"); fail_unless(state.balance(&address) == xjson!(&s["balance"]), "balance is incorrect"); fail_unless(state.nonce(&address) == xjson!(&s["nonce"]), "nonce is incorrect"); BTreeMap::from_json(&s["storage"]).iter().foreach(|(k, v)| fail_unless(&state.storage_at(&address, &k) == v, "storage is incorrect")); @@ -266,7 +266,7 @@ fn do_json_test_for(vm: &VMType, json_data: &[u8]) -> Vec { } - for f in failed.iter() { + for f in &failed { println!("FAILED: {:?}", f); } diff --git a/src/tests/state.rs b/src/tests/state.rs index 119e7037a..bebb37c88 100644 --- a/src/tests/state.rs +++ b/src/tests/state.rs @@ -15,7 +15,7 @@ fn do_json_test(json_data: &[u8]) -> Vec { let mut fail = false; { let mut fail_unless = |cond: bool| if !cond && !fail { - failed.push(name.to_string()); + failed.push(name.clone()); flush(format!("FAIL\n")); fail = true; true diff --git a/src/tests/test_common.rs b/src/tests/test_common.rs index 15c5364c4..adb1c35d3 100644 --- a/src/tests/test_common.rs +++ b/src/tests/test_common.rs @@ -6,7 +6,7 @@ macro_rules! declare_test { #[test] #[allow(non_snake_case)] fn $id() { - assert!(do_json_test(include_bytes!(concat!("../../res/ethereum/tests/", $name, ".json"))).len() == 0); + assert!(do_json_test(include_bytes!(concat!("../../res/ethereum/tests/", $name, ".json"))).is_empty()); } }; } @@ -18,7 +18,7 @@ macro_rules! declare_test_ignore { #[ignore] #[allow(non_snake_case)] fn $id() { - assert!(do_json_test(include_bytes!(concat!("../../res/ethereum/tests/", $name, ".json"))).len() == 0); + assert!(do_json_test(include_bytes!(concat!("../../res/ethereum/tests/", $name, ".json"))).is_empty()); } }; } diff --git a/src/tests/transaction.rs b/src/tests/transaction.rs index d4836ca84..a798a38f3 100644 --- a/src/tests/transaction.rs +++ b/src/tests/transaction.rs @@ -9,13 +9,13 @@ fn do_json_test(json_data: &[u8]) -> Vec { let ot = RefCell::new(Transaction::new()); for (name, test) in json.as_object().unwrap() { let mut fail = false; - let mut fail_unless = |cond: bool| if !cond && !fail { failed.push(name.to_string()); println!("Transaction: {:?}", ot.borrow()); fail = true }; + let mut fail_unless = |cond: bool| if !cond && !fail { failed.push(name.clone()); println!("Transaction: {:?}", ot.borrow()); fail = true }; let schedule = match test.find("blocknumber") .and_then(|j| j.as_string()) .and_then(|s| BlockNumber::from_str(s).ok()) .unwrap_or(0) { x if x < 900000 => &old_schedule, _ => &new_schedule }; let rlp = Bytes::from_json(&test["rlp"]); - let res = UntrustedRlp::new(&rlp).as_val().map_err(|e| From::from(e)).and_then(|t: Transaction| t.validate(schedule, schedule.have_delegate_call)); + let res = UntrustedRlp::new(&rlp).as_val().map_err(From::from).and_then(|t: Transaction| t.validate(schedule, schedule.have_delegate_call)); fail_unless(test.find("transaction").is_none() == res.is_err()); if let (Some(&Json::Object(ref tx)), Some(&Json::String(ref expect_sender))) = (test.find("transaction"), test.find("sender")) { let t = res.unwrap(); @@ -30,11 +30,11 @@ fn do_json_test(json_data: &[u8]) -> Vec { fail_unless(to == &xjson!(&tx["to"])); } else { *ot.borrow_mut() = t.clone(); - fail_unless(Bytes::from_json(&tx["to"]).len() == 0); + fail_unless(Bytes::from_json(&tx["to"]).is_empty()); } } } - for f in failed.iter() { + for f in &failed { println!("FAILED: {:?}", f); } failed diff --git a/src/transaction.rs b/src/transaction.rs index 081870e4b..47b8fa91f 100644 --- a/src/transaction.rs +++ b/src/transaction.rs @@ -117,9 +117,8 @@ impl Transaction { }; s.append(&self.value); s.append(&self.data); - match with_seal { - Seal::With => { s.append(&(self.v as u16)).append(&self.r).append(&self.s); }, - _ => {} + if let Seal::With = with_seal { + s.append(&(self.v as u16)).append(&self.r).append(&self.s); } } @@ -138,7 +137,7 @@ impl FromJson for Transaction { gas_price: xjson!(&json["gasPrice"]), gas: xjson!(&json["gasLimit"]), action: match Bytes::from_json(&json["to"]) { - ref x if x.len() == 0 => Action::Create, + ref x if x.is_empty() => Action::Create, ref x => Action::Call(Address::from_slice(x)), }, value: xjson!(&json["value"]), @@ -303,4 +302,4 @@ fn signing() { let key = KeyPair::create().unwrap(); let t = Transaction::new_create(U256::from(42u64), b"Hello!".to_vec(), U256::from(3000u64), U256::from(50_000u64), U256::from(1u64)).signed(&key.secret()); assert_eq!(Address::from(key.public().sha3()), t.sender().unwrap()); -} \ No newline at end of file +} diff --git a/src/verification.rs b/src/verification.rs index 3d852dc3e..064c0b7d7 100644 --- a/src/verification.rs +++ b/src/verification.rs @@ -64,7 +64,7 @@ pub fn verify_block_unordered(header: Header, bytes: Bytes, engine: &Engine) -> /// Phase 3 verification. Check block information against parent and uncles. pub fn verify_block_family(header: &Header, bytes: &[u8], engine: &Engine, bc: &BC) -> Result<(), Error> where BC: BlockProvider { // TODO: verify timestamp - let parent = try!(bc.block_header(&header.parent_hash).ok_or::(From::from(BlockError::UnknownParent(header.parent_hash.clone())))); + let parent = try!(bc.block_header(&header.parent_hash).ok_or_else(|| Error::from(BlockError::UnknownParent(header.parent_hash.clone())))); try!(verify_parent(&header, &parent)); try!(engine.verify_block_family(&header, &parent, Some(bytes))); @@ -122,7 +122,7 @@ pub fn verify_block_family(header: &Header, bytes: &[u8], engine: &Engine, b // cB.p^7 -------------/ // cB.p^8 let mut expected_uncle_parent = header.parent_hash.clone(); - let uncle_parent = try!(bc.block_header(&uncle.parent_hash).ok_or::(From::from(BlockError::UnknownUncleParent(uncle.parent_hash.clone())))); + let uncle_parent = try!(bc.block_header(&uncle.parent_hash).ok_or_else(|| Error::from(BlockError::UnknownUncleParent(uncle.parent_hash.clone())))); for _ in 0..depth { match bc.block_details(&expected_uncle_parent) { Some(details) => { @@ -284,7 +284,7 @@ mod tests { /// Get raw block data fn block(&self, hash: &H256) -> Option { - self.blocks.get(hash).map(|b| b.clone()) + self.blocks.get(hash).cloned() } /// Get the familial details concerning a block. @@ -302,7 +302,7 @@ mod tests { /// Get the hash of given block's number. fn block_hash(&self, index: BlockNumber) -> Option { - self.numbers.get(&index).map(|h| h.clone()) + self.numbers.get(&index).cloned() } } diff --git a/src/views.rs b/src/views.rs index a9c723ef2..e1c704625 100644 --- a/src/views.rs +++ b/src/views.rs @@ -141,7 +141,7 @@ impl<'a> BlockView<'a> { /// Return List of transactions in given block. pub fn transaction_views(&self) -> Vec { - self.rlp.at(1).iter().map(|rlp| TransactionView::new_from_rlp(rlp)).collect() + self.rlp.at(1).iter().map(TransactionView::new_from_rlp).collect() } /// Return transaction hashes. @@ -156,7 +156,7 @@ impl<'a> BlockView<'a> { /// Return List of transactions in given block. pub fn uncle_views(&self) -> Vec { - self.rlp.at(2).iter().map(|rlp| HeaderView::new_from_rlp(rlp)).collect() + self.rlp.at(2).iter().map(HeaderView::new_from_rlp).collect() } /// Return list of uncle hashes of given block. diff --git a/util/Cargo.toml b/util/Cargo.toml index 02fdad17f..38ccfe9f8 100644 --- a/util/Cargo.toml +++ b/util/Cargo.toml @@ -24,6 +24,7 @@ heapsize = "0.2" itertools = "0.4" slab = { git = "https://github.com/arkpar/slab.git" } sha3 = { path = "sha3" } +clippy = "*" # Always newest, since we use nightly [dev-dependencies] json-tests = { path = "json-tests" } diff --git a/util/src/bytes.rs b/util/src/bytes.rs index 8ee98441b..a30581a1f 100644 --- a/util/src/bytes.rs +++ b/util/src/bytes.rs @@ -106,18 +106,18 @@ impl<'a> Deref for BytesRef<'a> { type Target = [u8]; fn deref(&self) -> &[u8] { - match self { - &BytesRef::Flexible(ref bytes) => bytes, - &BytesRef::Fixed(ref bytes) => bytes + match *self { + BytesRef::Flexible(ref bytes) => bytes, + BytesRef::Fixed(ref bytes) => bytes } } } impl <'a> DerefMut for BytesRef<'a> { fn deref_mut(&mut self) -> &mut [u8] { - match self { - &mut BytesRef::Flexible(ref mut bytes) => bytes, - &mut BytesRef::Fixed(ref mut bytes) => bytes + match *self { + BytesRef::Flexible(ref mut bytes) => bytes, + BytesRef::Fixed(ref mut bytes) => bytes } } } @@ -299,7 +299,7 @@ pub trait FromBytes: Sized { impl FromBytes for String { fn from_bytes(bytes: &[u8]) -> FromBytesResult { - Ok(::std::str::from_utf8(bytes).unwrap().to_string()) + Ok(::std::str::from_utf8(bytes).unwrap().to_owned()) } } diff --git a/util/src/chainfilter.rs b/util/src/chainfilter.rs index 41fce8521..64f9e7d39 100644 --- a/util/src/chainfilter.rs +++ b/util/src/chainfilter.rs @@ -323,10 +323,9 @@ impl<'a, D> ChainFilter<'a, D> where D: FilterDataSource let offset = level_size * index; // go doooown! - match self.blocks(bloom, from_block, to_block, max_level, offset) { - Some(blocks) => result.extend(blocks), - None => () - }; + if let Some(blocks) = self.blocks(bloom, from_block, to_block, max_level, offset) { + result.extend(blocks); + } } result diff --git a/util/src/hash.rs b/util/src/hash.rs index 352efed7d..252877a24 100644 --- a/util/src/hash.rs +++ b/util/src/hash.rs @@ -207,11 +207,11 @@ macro_rules! impl_hash { impl FromJson for $from { fn from_json(json: &Json) -> Self { - match json { - &Json::String(ref s) => { + match *json { + Json::String(ref s) => { match s.len() % 2 { 0 => FromStr::from_str(clean_0x(s)).unwrap(), - _ => FromStr::from_str(&("0".to_string() + &(clean_0x(s).to_string()))[..]).unwrap() + _ => FromStr::from_str(&("0".to_owned() + &(clean_0x(s).to_owned()))[..]).unwrap() } }, _ => Default::default(), @@ -221,7 +221,7 @@ macro_rules! impl_hash { impl fmt::Debug for $from { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - for i in self.0.iter() { + for i in &self.0[..] { try!(write!(f, "{:02x}", i)); } Ok(()) @@ -229,11 +229,11 @@ macro_rules! impl_hash { } impl fmt::Display for $from { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - for i in self.0[0..2].iter() { + for i in &self.0[0..2] { try!(write!(f, "{:02x}", i)); } try!(write!(f, "…")); - for i in self.0[$size - 4..$size].iter() { + for i in &self.0[$size - 4..$size] { try!(write!(f, "{:02x}", i)); } Ok(()) @@ -291,36 +291,36 @@ macro_rules! impl_hash { impl Index for $from { type Output = u8; - fn index<'a>(&'a self, index: usize) -> &'a u8 { + fn index(&self, index: usize) -> &u8 { &self.0[index] } } impl IndexMut for $from { - fn index_mut<'a>(&'a mut self, index: usize) -> &'a mut u8 { + fn index_mut(&mut self, index: usize) -> &mut u8 { &mut self.0[index] } } impl Index> for $from { type Output = [u8]; - fn index<'a>(&'a self, index: ops::Range) -> &'a [u8] { + fn index(&self, index: ops::Range) -> &[u8] { &self.0[index] } } impl IndexMut> for $from { - fn index_mut<'a>(&'a mut self, index: ops::Range) -> &'a mut [u8] { + fn index_mut(&mut self, index: ops::Range) -> &mut [u8] { &mut self.0[index] } } impl Index for $from { type Output = [u8]; - fn index<'a>(&'a self, _index: ops::RangeFull) -> &'a [u8] { + fn index(&self, _index: ops::RangeFull) -> &[u8] { &self.0 } } impl IndexMut for $from { - fn index_mut<'a>(&'a mut self, _index: ops::RangeFull) -> &'a mut [u8] { + fn index_mut(&mut self, _index: ops::RangeFull) -> &mut [u8] { &mut self.0 } } @@ -440,9 +440,9 @@ macro_rules! impl_hash { fn from(s: &'_ str) -> $from { use std::str::FromStr; if s.len() % 2 == 1 { - $from::from_str(&("0".to_string() + &(clean_0x(s).to_string()))[..]).unwrap_or($from::new()) + $from::from_str(&("0".to_owned() + &(clean_0x(s).to_owned()))[..]).unwrap_or_else(|_| $from::new()) } else { - $from::from_str(clean_0x(s)).unwrap_or($from::new()) + $from::from_str(clean_0x(s)).unwrap_or_else(|_| $from::new()) } } } @@ -565,6 +565,7 @@ mod tests { use std::str::FromStr; #[test] + #[allow(eq_op)] fn hash() { let h = H64([0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef]); assert_eq!(H64::from_str("0123456789abcdef").unwrap(), h); diff --git a/util/src/io/service.rs b/util/src/io/service.rs index 4a96d19a7..537f26b8f 100644 --- a/util/src/io/service.rs +++ b/util/src/io/service.rs @@ -93,17 +93,17 @@ impl Handler for IoManager where Message: Send + 'static { fn ready(&mut self, event_loop: &mut EventLoop, token: Token, events: EventSet) { if events.is_hup() { - for h in self.handlers.iter_mut() { + for h in &mut self.handlers { h.stream_hup(&mut IoContext::new(event_loop, &mut self.timers), token.as_usize()); } } else if events.is_readable() { - for h in self.handlers.iter_mut() { + for h in &mut self.handlers { h.stream_readable(&mut IoContext::new(event_loop, &mut self.timers), token.as_usize()); } } else if events.is_writable() { - for h in self.handlers.iter_mut() { + for h in &mut self.handlers { h.stream_writable(&mut IoContext::new(event_loop, &mut self.timers), token.as_usize()); } } @@ -116,13 +116,13 @@ impl Handler for IoManager where Message: Send + 'static { let timer = self.timers.get_mut(token).expect("Unknown user timer token"); timer.delay }; - for h in self.handlers.iter_mut() { + for h in &mut self.handlers { h.timeout(&mut IoContext::new(event_loop, &mut self.timers), token.as_usize()); } event_loop.timeout_ms(token, delay).expect("Error re-registering user timer"); } _ => { // Just pass the event down. IoHandler is supposed to re-register it if required. - for h in self.handlers.iter_mut() { + for h in &mut self.handlers { h.timeout(&mut IoContext::new(event_loop, &mut self.timers), token.as_usize()); } } @@ -140,7 +140,7 @@ impl Handler for IoManager where Message: Send + 'static { self.handlers.last_mut().unwrap().initialize(&mut IoContext::new(event_loop, &mut self.timers)); }, IoMessage::UserMessage(ref mut data) => { - for h in self.handlers.iter_mut() { + for h in &mut self.handlers { h.message(&mut IoContext::new(event_loop, &mut self.timers), data); } } diff --git a/util/src/journaldb.rs b/util/src/journaldb.rs index ada9c0d2b..477aa6071 100644 --- a/util/src/journaldb.rs +++ b/util/src/journaldb.rs @@ -96,7 +96,7 @@ impl JournalDB { })) { let rlp = Rlp::new(&rlp_data); let to_remove: Vec = rlp.val_at(if canon_id == rlp.val_at(0) {2} else {1}); - for i in to_remove.iter() { + for i in &to_remove { self.forward.remove(i); } try!(self.backing.delete(&last)); diff --git a/util/src/json_aid.rs b/util/src/json_aid.rs index 595484908..5974c5989 100644 --- a/util/src/json_aid.rs +++ b/util/src/json_aid.rs @@ -11,18 +11,18 @@ pub fn clean(s: &str) -> &str { fn u256_from_str(s: &str) -> U256 { if s.len() >= 2 && &s[0..2] == "0x" { - U256::from_str(&s[2..]).unwrap_or(U256::from(0)) + U256::from_str(&s[2..]).unwrap_or_else(|_| U256::zero()) } else { - U256::from_dec_str(s).unwrap_or(U256::from(0)) + U256::from_dec_str(s).unwrap_or_else(|_| U256::zero()) } } impl FromJson for Bytes { fn from_json(json: &Json) -> Self { - match json { - &Json::String(ref s) => match s.len() % 2 { - 0 => FromHex::from_hex(clean(s)).unwrap_or(vec![]), - _ => FromHex::from_hex(&("0".to_string() + &(clean(s).to_string()))[..]).unwrap_or(vec![]), + match *json { + Json::String(ref s) => match s.len() % 2 { + 0 => FromHex::from_hex(clean(s)).unwrap_or_else(|_| vec![]), + _ => FromHex::from_hex(&("0".to_owned() + &(clean(s).to_owned()))[..]).unwrap_or_else(|_| vec![]), }, _ => vec![], } @@ -31,8 +31,8 @@ impl FromJson for Bytes { impl FromJson for BTreeMap { fn from_json(json: &Json) -> Self { - match json { - &Json::Object(ref o) => o.iter().map(|(key, value)| (x!(&u256_from_str(key)), x!(&U256::from_json(value)))).collect(), + match *json { + Json::Object(ref o) => o.iter().map(|(key, value)| (x!(&u256_from_str(key)), x!(&U256::from_json(value)))).collect(), _ => BTreeMap::new(), } } @@ -40,8 +40,8 @@ impl FromJson for BTreeMap { impl FromJson for Vec where T: FromJson { fn from_json(json: &Json) -> Self { - match json { - &Json::Array(ref o) => o.iter().map(|x|T::from_json(x)).collect(), + match *json { + Json::Array(ref o) => o.iter().map(|x|T::from_json(x)).collect(), _ => Vec::new(), } } @@ -49,9 +49,9 @@ impl FromJson for Vec where T: FromJson { impl FromJson for Option where T: FromJson { fn from_json(json: &Json) -> Self { - match json { - &Json::String(ref o) if o.is_empty() => None, - &Json::Null => None, + match *json { + Json::String(ref o) if o.is_empty() => None, + Json::Null => None, _ => Some(FromJson::from_json(json)), } } @@ -135,4 +135,4 @@ fn option_types() { assert_eq!(None, v); let v: Option = xjson!(&j["empty"]); assert_eq!(None, v); -} \ No newline at end of file +} diff --git a/util/src/lib.rs b/util/src/lib.rs index 1cfa11657..d5acbc069 100644 --- a/util/src/lib.rs +++ b/util/src/lib.rs @@ -2,6 +2,9 @@ #![feature(op_assign_traits)] #![feature(augmented_assignments)] #![feature(associated_consts)] +#![feature(plugin)] +#![plugin(clippy)] +#![allow(needless_range_loop, match_bool)] //! Ethcore-util library //! //! ### Rust version: diff --git a/util/src/misc.rs b/util/src/misc.rs index b28b8df42..316e78a11 100644 --- a/util/src/misc.rs +++ b/util/src/misc.rs @@ -18,13 +18,13 @@ impl Diff where T: Eq { pub fn new(pre: T, post: T) -> Self { if pre == post { Diff::Same } else { Diff::Changed(pre, post) } } /// Get the before value, if there is one. - pub fn pre(&self) -> Option<&T> { match self { &Diff::Died(ref x) | &Diff::Changed(ref x, _) => Some(x), _ => None } } + pub fn pre(&self) -> Option<&T> { match *self { Diff::Died(ref x) | Diff::Changed(ref x, _) => Some(x), _ => None } } /// Get the after value, if there is one. - pub fn post(&self) -> Option<&T> { match self { &Diff::Born(ref x) | &Diff::Changed(_, ref x) => Some(x), _ => None } } + pub fn post(&self) -> Option<&T> { match *self { Diff::Born(ref x) | Diff::Changed(_, ref x) => Some(x), _ => None } } /// Determine whether there was a change or not. - pub fn is_same(&self) -> bool { match self { &Diff::Same => true, _ => false }} + pub fn is_same(&self) -> bool { match *self { Diff::Same => true, _ => false }} } #[derive(PartialEq,Eq,Clone,Copy)] diff --git a/util/src/network/connection.rs b/util/src/network/connection.rs index f11c10384..6127cf838 100644 --- a/util/src/network/connection.rs +++ b/util/src/network/connection.rs @@ -86,7 +86,7 @@ impl Connection { /// Add a packet to send queue. pub fn send(&mut self, data: Bytes) { - if data.len() != 0 { + if !data.is_empty() { self.send_queue.push_back(Cursor::new(data)); } if !self.interest.is_writable() { @@ -341,11 +341,8 @@ impl EncryptedConnection { self.idle_timeout.map(|t| event_loop.clear_timeout(t)); match self.read_state { EncryptedConnectionState::Header => { - match try!(self.connection.readable()) { - Some(data) => { - try!(self.read_header(&data)); - }, - None => {} + if let Some(data) = try!(self.connection.readable()) { + try!(self.read_header(&data)); }; Ok(None) }, diff --git a/util/src/network/discovery.rs b/util/src/network/discovery.rs index d2a4f21a2..1f2bd4e06 100644 --- a/util/src/network/discovery.rs +++ b/util/src/network/discovery.rs @@ -62,7 +62,7 @@ impl Discovery { discovery_round: 0, discovery_id: NodeId::new(), discovery_nodes: HashSet::new(), - node_buckets: (0..NODE_BINS).map(|x| NodeBucket::new(x)).collect(), + node_buckets: (0..NODE_BINS).map(NodeBucket::new).collect(), } } @@ -122,7 +122,8 @@ impl Discovery { ret } - fn nearest_node_entries<'b>(source: &NodeId, target: &NodeId, buckets: &'b Vec) -> Vec<&'b NodeId> + #[allow(cyclomatic_complexity)] + fn nearest_node_entries<'b>(source: &NodeId, target: &NodeId, buckets: &'b [NodeBucket]) -> Vec<&'b NodeId> { // send ALPHA FindNode packets to nodes we know, closest to target const LAST_BIN: u32 = NODE_BINS - 1; @@ -136,7 +137,7 @@ impl Discovery { if head > 1 && tail != LAST_BIN { while head != tail && head < NODE_BINS && count < BUCKET_SIZE { - for n in buckets[head as usize].nodes.iter() + for n in &buckets[head as usize].nodes { if count < BUCKET_SIZE { count += 1; @@ -147,7 +148,7 @@ impl Discovery { } } if count < BUCKET_SIZE && tail != 0 { - for n in buckets[tail as usize].nodes.iter() { + for n in &buckets[tail as usize].nodes { if count < BUCKET_SIZE { count += 1; found.entry(Discovery::distance(target, &n)).or_insert(Vec::new()).push(n); @@ -166,7 +167,7 @@ impl Discovery { } else if head < 2 { while head < NODE_BINS && count < BUCKET_SIZE { - for n in buckets[head as usize].nodes.iter() { + for n in &buckets[head as usize].nodes { if count < BUCKET_SIZE { count += 1; found.entry(Discovery::distance(target, &n)).or_insert(Vec::new()).push(n); @@ -180,7 +181,7 @@ impl Discovery { } else { while tail > 0 && count < BUCKET_SIZE { - for n in buckets[tail as usize].nodes.iter() { + for n in &buckets[tail as usize].nodes { if count < BUCKET_SIZE { count += 1; found.entry(Discovery::distance(target, &n)).or_insert(Vec::new()).push(n); diff --git a/util/src/network/handshake.rs b/util/src/network/handshake.rs index ca95808b4..e7a669e25 100644 --- a/util/src/network/handshake.rs +++ b/util/src/network/handshake.rs @@ -93,21 +93,15 @@ impl Handshake { self.idle_timeout.map(|t| event_loop.clear_timeout(t)); match self.state { HandshakeState::ReadingAuth => { - match try!(self.connection.readable()) { - Some(data) => { - try!(self.read_auth(host, &data)); - try!(self.write_ack()); - }, - None => {} + if let Some(data) = try!(self.connection.readable()) { + try!(self.read_auth(host, &data)); + try!(self.write_ack()); }; }, HandshakeState::ReadingAck => { - match try!(self.connection.readable()) { - Some(data) => { - try!(self.read_ack(host, &data)); - self.state = HandshakeState::StartSession; - }, - None => {} + if let Some(data) = try!(self.connection.readable()) { + try!(self.read_ack(host, &data)); + self.state = HandshakeState::StartSession; }; }, _ => { panic!("Unexpected state"); } diff --git a/util/src/network/host.rs b/util/src/network/host.rs index 37b58f1f0..060fd5217 100644 --- a/util/src/network/host.rs +++ b/util/src/network/host.rs @@ -175,7 +175,7 @@ impl<'s, 'io, Message> NetworkContext<'s, 'io, Message> where Message: Send + 's s.info.client_version.clone() }, _ => { - "unknown".to_string() + "unknown".to_owned() } } } @@ -213,7 +213,7 @@ impl HostInfo { /// Increments and returns connection nonce. pub fn next_nonce(&mut self) -> H256 { self.nonce = self.nonce.sha3(); - return self.nonce.clone(); + self.nonce.clone() } } @@ -246,7 +246,7 @@ impl Host where Message: Send { config: config, nonce: H256::random(), protocol_version: 4, - client_version: "parity".to_string(), + client_version: "parity".to_owned(), listen_port: 0, capabilities: Vec::new(), }, @@ -274,11 +274,11 @@ impl Host where Message: Send { } fn have_session(&self, id: &NodeId) -> bool { - self.connections.iter().any(|e| match e { &ConnectionEntry::Session(ref s) => s.info.id.eq(&id), _ => false }) + self.connections.iter().any(|e| match *e { ConnectionEntry::Session(ref s) => s.info.id.eq(&id), _ => false }) } fn connecting_to(&self, id: &NodeId) -> bool { - self.connections.iter().any(|e| match e { &ConnectionEntry::Handshake(ref h) => h.id.eq(&id), _ => false }) + self.connections.iter().any(|e| match *e { ConnectionEntry::Handshake(ref h) => h.id.eq(&id), _ => false }) } fn connect_peers(&mut self, io: &mut IoContext>) { @@ -303,7 +303,7 @@ impl Host where Message: Send { } } - for n in to_connect.iter() { + for n in &to_connect { if n.peer_type == PeerType::Required { if req_conn < IDEAL_PEERS { self.connect_peer(&n.id, io); @@ -318,7 +318,7 @@ impl Host where Message: Send { let peer_count = 0; let mut open_slots = IDEAL_PEERS - peer_count - pending_count + req_conn; if open_slots > 0 { - for n in to_connect.iter() { + for n in &to_connect { if n.peer_type == PeerType::Optional && open_slots > 0 { open_slots -= 1; self.connect_peer(&n.id, io); @@ -328,6 +328,7 @@ impl Host where Message: Send { } } + #[allow(single_match)] fn connect_peer(&mut self, id: &NodeId, io: &mut IoContext>) { if self.have_session(id) { @@ -376,6 +377,7 @@ impl Host where Message: Send { trace!(target: "net", "accept"); } + #[allow(single_match)] fn connection_writable<'s>(&'s mut self, token: StreamToken, io: &mut IoContext<'s, NetworkIoMessage>) { let mut kill = false; let mut create_session = false; @@ -436,7 +438,7 @@ impl Host where Message: Send { }) }; match sd { SessionData::Ready => { - for (p, _) in self.handlers.iter_mut() { + for (p, _) in &mut self.handlers { if s.have_capability(p) { ready_data.push(p); } @@ -475,11 +477,8 @@ impl Host where Message: Send { h.read(&mut NetworkContext::new(io, p, Some(token), &mut self.connections, &mut self.timers), &token, packet_id, &data[1..]); } - match self.connections.get_mut(token) { - Some(&mut ConnectionEntry::Session(ref mut s)) => { - s.reregister(io.event_loop).unwrap_or_else(|e| debug!(target: "net", "Session registration error: {:?}", e)); - }, - _ => (), + if let Some(&mut ConnectionEntry::Session(ref mut s)) = self.connections.get_mut(token) { + s.reregister(io.event_loop).unwrap_or_else(|e| debug!(target: "net", "Session registration error: {:?}", e)); } } @@ -523,7 +522,7 @@ impl Host where Message: Send { match self.connections.get_mut(token) { Some(&mut ConnectionEntry::Handshake(_)) => (), // just abandon handshake Some(&mut ConnectionEntry::Session(ref mut s)) if s.is_ready() => { - for (p, _) in self.handlers.iter_mut() { + for (p, _) in &mut self.handlers { if s.have_capability(p) { to_disconnect.push(p); } @@ -600,19 +599,20 @@ impl IoHandler> for Host where Messa FIRST_CONNECTION ... LAST_CONNECTION => self.connection_timeout(token, io), NODETABLE_DISCOVERY => {}, NODETABLE_MAINTAIN => {}, - _ => match self.timers.get_mut(&token).map(|p| *p) { - Some(protocol) => match self.handlers.get_mut(protocol) { + _ => { + if let Some(protocol) = self.timers.get_mut(&token).map(|p| *p) { + match self.handlers.get_mut(protocol) { None => { warn!(target: "net", "No handler found for protocol: {:?}", protocol) }, Some(h) => { h.timeout(&mut NetworkContext::new(io, protocol, Some(token), &mut self.connections, &mut self.timers), token); } - }, - None => {} // time not registerd through us + }; + } // else time not registerd through us } } } fn message<'s>(&'s mut self, io: &mut IoContext<'s, NetworkIoMessage>, message: &'s mut NetworkIoMessage) { - match message { - &mut NetworkIoMessage::AddHandler { + match *message { + NetworkIoMessage::AddHandler { ref mut handler, ref protocol, ref versions @@ -624,7 +624,7 @@ impl IoHandler> for Host where Messa self.info.capabilities.push(CapabilityInfo { protocol: protocol, version: *v, packet_count:0 }); } }, - &mut NetworkIoMessage::Send { + NetworkIoMessage::Send { ref peer, ref packet_id, ref protocol, @@ -641,8 +641,8 @@ impl IoHandler> for Host where Messa } } }, - &mut NetworkIoMessage::User(ref message) => { - for (p, h) in self.handlers.iter_mut() { + NetworkIoMessage::User(ref message) => { + for (p, h) in &mut self.handlers { h.message(&mut NetworkContext::new(io, p, None, &mut self.connections, &mut self.timers), &message); } } diff --git a/util/src/network/node.rs b/util/src/network/node.rs index 5c08e0a66..fd27e58e2 100644 --- a/util/src/network/node.rs +++ b/util/src/network/node.rs @@ -20,14 +20,16 @@ pub struct NodeEndpoint { pub udp_port: u16 } -impl NodeEndpoint { +impl FromStr for NodeEndpoint { + type Err = UtilError; + /// Create endpoint from string. Performs name resolution if given a host name. fn from_str(s: &str) -> Result { let address = s.to_socket_addrs().map(|mut i| i.next()); match address { Ok(Some(a)) => Ok(NodeEndpoint { address: a, - address_str: s.to_string(), + address_str: s.to_owned(), udp_port: a.port() }), Ok(_) => Err(UtilError::AddressResolve(None)), diff --git a/util/src/network/session.rs b/util/src/network/session.rs index 828e4b062..d20448fdd 100644 --- a/util/src/network/session.rs +++ b/util/src/network/session.rs @@ -182,7 +182,7 @@ impl Session { // map to protocol let protocol = self.info.capabilities[i].protocol; let pid = packet_id - self.info.capabilities[i].id_offset; - return Ok(SessionData::Packet { data: packet.data, protocol: protocol, packet_id: pid } ) + Ok(SessionData::Packet { data: packet.data, protocol: protocol, packet_id: pid } ) }, _ => { debug!(target: "net", "Unkown packet: {:?}", packet_id); @@ -212,7 +212,7 @@ impl Session { // Intersect with host capabilities // Leave only highset mutually supported capability version let mut caps: Vec = Vec::new(); - for hc in host.capabilities.iter() { + for hc in &host.capabilities { if peer_caps.iter().any(|c| c.protocol == hc.protocol && c.version == hc.version) { caps.push(SessionCapabilityInfo { protocol: hc.protocol, diff --git a/util/src/overlaydb.rs b/util/src/overlaydb.rs index e8492091f..d2f56a520 100644 --- a/util/src/overlaydb.rs +++ b/util/src/overlaydb.rs @@ -169,7 +169,7 @@ impl HashDB for OverlayDB { match k { Some(&(ref d, rc)) if rc > 0 => Some(d), _ => { - let memrc = k.map(|&(_, rc)| rc).unwrap_or(0); + let memrc = k.map_or(0, |&(_, rc)| rc); match self.payload(key) { Some(x) => { let (d, rc) = x; @@ -194,16 +194,11 @@ impl HashDB for OverlayDB { match k { Some(&(_, rc)) if rc > 0 => true, _ => { - let memrc = k.map(|&(_, rc)| rc).unwrap_or(0); + let memrc = k.map_or(0, |&(_, rc)| rc); match self.payload(key) { Some(x) => { let (_, rc) = x; - if rc as i32 + memrc > 0 { - true - } - else { - false - } + rc as i32 + memrc > 0 } // Replace above match arm with this once https://github.com/rust-lang/rust/issues/15287 is done. //Some((d, rc)) if rc + memrc > 0 => true, diff --git a/util/src/rlp/rlpstream.rs b/util/src/rlp/rlpstream.rs index 724974e08..ac22736fc 100644 --- a/util/src/rlp/rlpstream.rs +++ b/util/src/rlp/rlpstream.rs @@ -41,7 +41,7 @@ impl Stream for RlpStream { stream } - fn append<'a, E>(&'a mut self, object: &E) -> &'a mut RlpStream where E: Encodable { + fn append(&mut self, object: &E) -> &mut RlpStream where E: Encodable { // encode given value and add it at the end of the stream object.encode(&mut self.encoder); @@ -52,7 +52,7 @@ impl Stream for RlpStream { self } - fn append_list<'a>(&'a mut self, len: usize) -> &'a mut RlpStream { + fn append_list(&mut self, len: usize) -> &mut RlpStream { match len { 0 => { // we may finish, if the appended list len is equal 0 @@ -69,7 +69,7 @@ impl Stream for RlpStream { self } - fn append_empty_data<'a>(&'a mut self) -> &'a mut RlpStream { + fn append_empty_data(&mut self) -> &mut RlpStream { // self push raw item self.encoder.bytes.push(0x80); diff --git a/util/src/rlp/rlptraits.rs b/util/src/rlp/rlptraits.rs index 6fbc2125c..d6ef16932 100644 --- a/util/src/rlp/rlptraits.rs +++ b/util/src/rlp/rlptraits.rs @@ -9,7 +9,7 @@ pub trait Decoder: Sized { /// TODO [arkpar] Please document me fn as_list(&self) -> Result, DecoderError>; /// TODO [Gav Wood] Please document me - fn as_rlp<'a>(&'a self) -> &'a UntrustedRlp<'a>; + fn as_rlp(&self) -> &UntrustedRlp; /// TODO [debris] Please document me fn as_raw(&self) -> &[u8]; } @@ -255,7 +255,7 @@ pub trait Stream: Sized { /// assert_eq!(out, vec![0xca, 0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g', 0x80]); /// } /// ``` - fn append_list<'a>(&'a mut self, len: usize) -> &'a mut Self; + fn append_list(&mut self, len: usize) -> &mut Self; /// Apends null to the end of stream, chainable. /// @@ -270,7 +270,7 @@ pub trait Stream: Sized { /// assert_eq!(out, vec![0xc2, 0x80, 0x80]); /// } /// ``` - fn append_empty_data<'a>(&'a mut self) -> &'a mut Self; + fn append_empty_data(&mut self) -> &mut Self; /// Appends raw (pre-serialised) RLP data. Use with caution. Chainable. fn append_raw<'a>(&'a mut self, bytes: &[u8], item_count: usize) -> &'a mut Self; diff --git a/util/src/rlp/tests.rs b/util/src/rlp/tests.rs index 7c2099124..f33cec177 100644 --- a/util/src/rlp/tests.rs +++ b/util/src/rlp/tests.rs @@ -15,25 +15,25 @@ fn rlp_at() { assert!(rlp.is_list()); //let animals = as rlp::Decodable>::decode_untrusted(&rlp).unwrap(); let animals: Vec = rlp.as_val().unwrap(); - assert_eq!(animals, vec!["cat".to_string(), "dog".to_string()]); + assert_eq!(animals, vec!["cat".to_owned(), "dog".to_owned()]); let cat = rlp.at(0).unwrap(); assert!(cat.is_data()); assert_eq!(cat.as_raw(), &[0x83, b'c', b'a', b't']); - //assert_eq!(String::decode_untrusted(&cat).unwrap(), "cat".to_string()); - assert_eq!(cat.as_val::().unwrap(), "cat".to_string()); + //assert_eq!(String::decode_untrusted(&cat).unwrap(), "cat".to_owned()); + assert_eq!(cat.as_val::().unwrap(), "cat".to_owned()); let dog = rlp.at(1).unwrap(); assert!(dog.is_data()); assert_eq!(dog.as_raw(), &[0x83, b'd', b'o', b'g']); - //assert_eq!(String::decode_untrusted(&dog).unwrap(), "dog".to_string()); - assert_eq!(dog.as_val::().unwrap(), "dog".to_string()); + //assert_eq!(String::decode_untrusted(&dog).unwrap(), "dog".to_owned()); + assert_eq!(dog.as_val::().unwrap(), "dog".to_owned()); let cat_again = rlp.at(0).unwrap(); assert!(cat_again.is_data()); assert_eq!(cat_again.as_raw(), &[0x83, b'c', b'a', b't']); - //assert_eq!(String::decode_untrusted(&cat_again).unwrap(), "cat".to_string()); - assert_eq!(cat_again.as_val::().unwrap(), "cat".to_string()); + //assert_eq!(String::decode_untrusted(&cat_again).unwrap(), "cat".to_owned()); + assert_eq!(cat_again.as_val::().unwrap(), "cat".to_owned()); } } @@ -268,13 +268,13 @@ fn decode_untrusted_u256() { #[test] fn decode_untrusted_str() { - let tests = vec![DTestPair("cat".to_string(), vec![0x83, b'c', b'a', b't']), - DTestPair("dog".to_string(), vec![0x83, b'd', b'o', b'g']), - DTestPair("Marek".to_string(), + let tests = vec![DTestPair("cat".to_owned(), vec![0x83, b'c', b'a', b't']), + DTestPair("dog".to_owned(), vec![0x83, b'd', b'o', b'g']), + DTestPair("Marek".to_owned(), vec![0x85, b'M', b'a', b'r', b'e', b'k']), - DTestPair("".to_string(), vec![0x80]), + DTestPair("".to_owned(), vec![0x80]), DTestPair("Lorem ipsum dolor sit amet, consectetur adipisicing elit" - .to_string(), + .to_owned(), vec![0xb8, 0x38, b'L', b'o', b'r', b'e', b'm', b' ', b'i', b'p', b's', b'u', b'm', b' ', b'd', b'o', b'l', b'o', b'r', b' ', b's', b'i', b't', b' ', b'a', b'm', b'e', @@ -311,14 +311,14 @@ fn decode_untrusted_vector_u64() { #[test] fn decode_untrusted_vector_str() { - let tests = vec![DTestPair(vec!["cat".to_string(), "dog".to_string()], + let tests = vec![DTestPair(vec!["cat".to_owned(), "dog".to_owned()], vec![0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g'])]; run_decode_tests(tests); } #[test] fn decode_untrusted_vector_of_vectors_str() { - let tests = vec![DTestPair(vec![vec!["cat".to_string()]], + let tests = vec![DTestPair(vec![vec!["cat".to_owned()]], vec![0xc5, 0xc4, 0x83, b'c', b'a', b't'])]; run_decode_tests(tests); } diff --git a/util/src/rlp/untrusted_rlp.rs b/util/src/rlp/untrusted_rlp.rs index 3f11fa070..768d058c1 100644 --- a/util/src/rlp/untrusted_rlp.rs +++ b/util/src/rlp/untrusted_rlp.rs @@ -288,7 +288,7 @@ impl<'a> BasicDecoder<'a> { /// Return first item info fn payload_info(bytes: &[u8]) -> Result { - let item = match bytes.first().map(|&x| x) { + let item = match bytes.first().cloned() { None => return Err(DecoderError::RlpIsTooShort), Some(0...0x7f) => PayloadInfo::new(0, 1), Some(l @ 0x80...0xb7) => PayloadInfo::new(1, l as usize - 0x80), @@ -324,7 +324,7 @@ impl<'a> Decoder for BasicDecoder<'a> { let bytes = self.rlp.as_raw(); - match bytes.first().map(|&x| x) { + match bytes.first().cloned() { // rlp is too short None => Err(DecoderError::RlpIsTooShort), // single byt value @@ -355,12 +355,12 @@ impl<'a> Decoder for BasicDecoder<'a> { fn as_list(&self) -> Result, DecoderError> { let v: Vec> = self.rlp.iter() - .map(| i | BasicDecoder::new(i)) + .map(BasicDecoder::new) .collect(); Ok(v) } - fn as_rlp<'s>(&'s self) -> &'s UntrustedRlp<'s> { + fn as_rlp(&self) -> &UntrustedRlp { &self.rlp } } @@ -405,6 +405,7 @@ impl Decodable for Option where T: Decodable { macro_rules! impl_array_decodable { ($index_type:ty, $len:expr ) => ( impl Decodable for [T; $len] where T: Decodable { + #[allow(len_zero)] fn decode(decoder: &D) -> Result where D: Decoder { let decoders = try!(decoder.as_list()); diff --git a/util/src/squeeze.rs b/util/src/squeeze.rs index 10f446395..3fb4a81f9 100644 --- a/util/src/squeeze.rs +++ b/util/src/squeeze.rs @@ -42,7 +42,7 @@ pub trait Squeeze { impl Squeeze for HashMap where K: Eq + Hash + Clone + HeapSizeOf, T: HeapSizeOf { fn squeeze(&mut self, size: usize) { - if self.len() == 0 { + if self.is_empty() { return } @@ -50,7 +50,7 @@ impl Squeeze for HashMap where K: Eq + Hash + Clone + HeapSizeOf, T: let all_entries = size_of_entry * self.len(); let mut shrinked_size = all_entries; - while self.len() > 0 && shrinked_size > size { + while !self.is_empty() && shrinked_size > size { // could be optimized let key = self.keys().next().unwrap().clone(); self.remove(&key); diff --git a/util/src/trie/triedb.rs b/util/src/trie/triedb.rs index b0fcfcd16..7179c395e 100644 --- a/util/src/trie/triedb.rs +++ b/util/src/trie/triedb.rs @@ -38,6 +38,7 @@ pub struct TrieDB<'db> { pub hash_count: usize, } +#[allow(wrong_self_convention)] impl<'db> TrieDB<'db> { /// Create a new trie with the backing database `db` and `root` /// Panics, if `root` does not exist @@ -103,7 +104,7 @@ impl<'db> TrieDB<'db> { match node { Node::Extension(_, payload) => handle_payload(payload), - Node::Branch(payloads, _) => for payload in payloads.iter() { handle_payload(payload) }, + Node::Branch(payloads, _) => for payload in &payloads { handle_payload(payload) }, _ => {}, } } @@ -141,12 +142,9 @@ impl<'db> TrieDB<'db> { }, Node::Branch(ref nodes, ref value) => { try!(writeln!(f, "")); - match value { - &Some(v) => { - try!(self.fmt_indent(f, deepness + 1)); - try!(writeln!(f, "=: {:?}", v.pretty())) - }, - &None => {} + if let Some(v) = *value { + try!(self.fmt_indent(f, deepness + 1)); + try!(writeln!(f, "=: {:?}", v.pretty())) } for i in 0..16 { match self.get_node(nodes[i]) { diff --git a/util/src/trie/triedbmut.rs b/util/src/trie/triedbmut.rs index 3f62b2fdb..48749bf0d 100644 --- a/util/src/trie/triedbmut.rs +++ b/util/src/trie/triedbmut.rs @@ -50,6 +50,7 @@ enum MaybeChanged<'a> { Changed(Bytes), } +#[allow(wrong_self_convention)] impl<'db> TrieDBMut<'db> { /// Create a new trie with the backing database `db` and empty `root` /// Initialise to the state entailed by the genesis block. @@ -145,7 +146,7 @@ impl<'db> TrieDBMut<'db> { match node { Node::Extension(_, payload) => handle_payload(payload), - Node::Branch(payloads, _) => for payload in payloads.iter() { handle_payload(payload) }, + Node::Branch(payloads, _) => for payload in &payloads { handle_payload(payload) }, _ => {}, } } @@ -178,12 +179,9 @@ impl<'db> TrieDBMut<'db> { }, Node::Branch(ref nodes, ref value) => { try!(writeln!(f, "")); - match value { - &Some(v) => { - try!(self.fmt_indent(f, deepness + 1)); - try!(writeln!(f, "=: {:?}", v.pretty())) - }, - &None => {} + if let Some(v) = *value { + try!(self.fmt_indent(f, deepness + 1)); + try!(writeln!(f, "=: {:?}", v.pretty())) } for i in 0..16 { match self.get_node(nodes[i]) { @@ -331,6 +329,7 @@ impl<'db> TrieDBMut<'db> { } } + #[allow(cyclomatic_complexity)] /// Determine the RLP of the node, assuming we're inserting `partial` into the /// node currently of data `old`. This will *not* delete any hash of `old` from the database; /// it will just return the new RLP that includes the new node. @@ -694,7 +693,7 @@ mod tests { } } - fn populate_trie<'db>(db: &'db mut HashDB, root: &'db mut H256, v: &Vec<(Vec, Vec)>) -> TrieDBMut<'db> { + fn populate_trie<'db>(db: &'db mut HashDB, root: &'db mut H256, v: &[(Vec, Vec)]) -> TrieDBMut<'db> { let mut t = TrieDBMut::new(db, root); for i in 0..v.len() { let key: &[u8]= &v[i].0; @@ -704,8 +703,8 @@ mod tests { t } - fn unpopulate_trie<'a, 'db>(t: &mut TrieDBMut<'db>, v: &Vec<(Vec, Vec)>) { - for i in v.iter() { + fn unpopulate_trie<'db>(t: &mut TrieDBMut<'db>, v: &[(Vec, Vec)]) { + for i in v { let key: &[u8]= &i.0; t.remove(&key); } @@ -761,7 +760,7 @@ mod tests { println!("TRIE MISMATCH"); println!(""); println!("{:?} vs {:?}", memtrie.root(), real); - for i in x.iter() { + for i in &x { println!("{:?} -> {:?}", i.0.pretty(), i.1.pretty()); } println!("{:?}", memtrie); @@ -774,7 +773,7 @@ mod tests { println!(""); println!("remaining: {:?}", memtrie.db_items_remaining()); println!("{:?} vs {:?}", memtrie.root(), real); - for i in x.iter() { + for i in &x { println!("{:?} -> {:?}", i.0.pretty(), i.1.pretty()); } println!("{:?}", memtrie); @@ -1051,12 +1050,12 @@ mod tests { println!("TRIE MISMATCH"); println!(""); println!("ORIGINAL... {:?}", memtrie.root()); - for i in x.iter() { + for i in &x { println!("{:?} -> {:?}", i.0.pretty(), i.1.pretty()); } println!("{:?}", memtrie); println!("SORTED... {:?}", memtrie_sorted.root()); - for i in y.iter() { + for i in &y { println!("{:?} -> {:?}", i.0.pretty(), i.1.pretty()); } println!("{:?}", memtrie_sorted); diff --git a/util/src/uint.rs b/util/src/uint.rs index 5721c5614..ab136d7c6 100644 --- a/util/src/uint.rs +++ b/util/src/uint.rs @@ -200,7 +200,7 @@ macro_rules! construct_uint { #[inline] fn byte(&self, index: usize) -> u8 { let &$name(ref arr) = self; - (arr[index / 8] >> ((index % 8)) * 8) as u8 + (arr[index / 8] >> (((index % 8)) * 8)) as u8 } fn to_bytes(&self, bytes: &mut[u8]) { @@ -446,16 +446,16 @@ macro_rules! construct_uint { impl FromJson for $name { fn from_json(json: &Json) -> Self { - match json { - &Json::String(ref s) => { + match *json { + Json::String(ref s) => { if s.len() >= 2 && &s[0..2] == "0x" { - FromStr::from_str(&s[2..]).unwrap_or(Default::default()) + FromStr::from_str(&s[2..]).unwrap_or_else(|_| Default::default()) } else { - Uint::from_dec_str(s).unwrap_or(Default::default()) + Uint::from_dec_str(s).unwrap_or_else(|_| Default::default()) } }, - &Json::U64(u) => From::from(u), - &Json::I64(i) => From::from(i as u64), + Json::U64(u) => From::from(u), + Json::I64(i) => From::from(i as u64), _ => Uint::zero(), } } @@ -488,7 +488,7 @@ macro_rules! construct_uint { for i in 0..bytes.len() { let rev = bytes.len() - 1 - i; let pos = rev / 8; - ret[pos] += (bytes[i] as u64) << (rev % 8) * 8; + ret[pos] += (bytes[i] as u64) << ((rev % 8) * 8); } $name(ret) } @@ -500,7 +500,7 @@ macro_rules! construct_uint { fn from_str(value: &str) -> Result<$name, Self::Err> { let bytes: Vec = match value.len() % 2 == 0 { true => try!(value.from_hex()), - false => try!(("0".to_string() + value).from_hex()) + false => try!(("0".to_owned() + value).from_hex()) }; let bytes_ref: &[u8] = &bytes; @@ -1061,6 +1061,7 @@ mod tests { } #[test] + #[allow(eq_op)] pub fn uint256_comp_test() { let small = U256([10u64, 0, 0, 0]); let big = U256([0x8C8C3EE70C644118u64, 0x0209E7378231E632, 0, 0]);