Merge pull request #501 from ethcore/todr

Bumping clippy and fixing warnings.
This commit is contained in:
Nikolay Volf 2016-02-23 14:23:45 +03:00
commit a05e8d226f
16 changed files with 58 additions and 55 deletions

12
Cargo.lock generated
View File

@ -2,7 +2,7 @@
name = "parity"
version = "0.9.99"
dependencies = [
"clippy 0.0.42 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.44 (registry+https://github.com/rust-lang/crates.io-index)",
"ctrlc 1.1.1 (git+https://github.com/tomusdrw/rust-ctrlc.git)",
"daemonize 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"docopt 0.6.78 (registry+https://github.com/rust-lang/crates.io-index)",
@ -69,7 +69,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "clippy"
version = "0.0.42"
version = "0.0.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"regex-syntax 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
@ -173,7 +173,7 @@ dependencies = [
name = "ethcore"
version = "0.9.99"
dependencies = [
"clippy 0.0.42 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.44 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
"ethash 0.9.99",
@ -200,7 +200,7 @@ dependencies = [
name = "ethcore-rpc"
version = "0.9.99"
dependencies = [
"clippy 0.0.42 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.44 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore 0.9.99",
"ethcore-util 0.9.99",
"ethsync 0.9.99",
@ -218,7 +218,7 @@ name = "ethcore-util"
version = "0.9.99"
dependencies = [
"arrayvec 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.42 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.44 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
"elastic-array 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -250,7 +250,7 @@ dependencies = [
name = "ethsync"
version = "0.9.99"
dependencies = [
"clippy 0.0.42 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.44 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore 0.9.99",
"ethcore-util 0.9.99",

View File

@ -11,7 +11,7 @@ env_logger = "0.3"
rustc-serialize = "0.3"
docopt = "0.6"
ctrlc = { git = "https://github.com/tomusdrw/rust-ctrlc.git" }
clippy = { version = "0.0.42", optional = true }
clippy = { version = "0.0.44", optional = true }
ethcore-util = { path = "util" }
ethcore = { path = "ethcore" }
ethsync = { path = "sync" }

View File

@ -18,7 +18,7 @@ ethcore-util = { path = "../util" }
evmjit = { path = "../evmjit", optional = true }
ethash = { path = "../ethash" }
num_cpus = "0.2"
clippy = { version = "0.0.42", optional = true }
clippy = { version = "0.0.44", optional = true }
crossbeam = "0.1.5"
lazy_static = "0.1"
ethcore-devtools = { path = "../devtools" }

View File

@ -48,7 +48,7 @@ impl Block {
if urlp.at(1).unwrap().iter().find(|i| i.as_val::<Transaction>().is_err()).is_some() {
return false;
}
if !urlp.at(2).unwrap().is_list() { return false; }
if urlp.at(2).unwrap().iter().find(|i| i.as_val::<Header>().is_err()).is_some() {
return false;
@ -61,7 +61,7 @@ impl Block {
impl Decodable for Block {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder {
if decoder.as_raw().len() != try!(decoder.as_rlp().payload_info()).total() {
return Err(DecoderError::RlpIsTooBig);
return Err(DecoderError::RlpIsTooBig);
}
let d = decoder.as_rlp();
if d.item_count() != 3 {
@ -87,7 +87,7 @@ pub struct ExecutedBlock {
state: State,
}
/// A set of references to `ExecutedBlock` fields that are publicly accessible.
/// A set of references to `ExecutedBlock` fields that are publicly accessible.
pub struct BlockRefMut<'a> {
/// Block header.
pub header: &'a Header,
@ -171,7 +171,7 @@ pub struct SealedBlock {
impl<'x, 'y> OpenBlock<'x, 'y> {
/// Create a new OpenBlock ready for transaction pushing.
pub fn new<'a, 'b>(engine: &'a Engine, db: JournalDB, parent: &Header, last_hashes: &'b LastHashes, author: Address, extra_data: Bytes) -> OpenBlock<'a, 'b> {
pub fn new(engine: &'x Engine, db: JournalDB, parent: &Header, last_hashes: &'y LastHashes, author: Address, extra_data: Bytes) -> Self {
let mut r = OpenBlock {
block: ExecutedBlock::new(State::from_existing(db, parent.state_root().clone(), engine.account_start_nonce())),
engine: engine,
@ -284,7 +284,7 @@ impl<'x, 'y> IsBlock for ClosedBlock<'x, 'y> {
}
impl<'x, 'y> ClosedBlock<'x, 'y> {
fn new<'a, 'b>(open_block: OpenBlock<'a, 'b>, uncle_bytes: Bytes) -> ClosedBlock<'a, 'b> {
fn new(open_block: OpenBlock<'x, 'y>, uncle_bytes: Bytes) -> Self {
ClosedBlock {
open_block: open_block,
uncle_bytes: uncle_bytes,

View File

@ -87,13 +87,10 @@ impl Engine for Ethash {
fn schedule(&self, env_info: &EnvInfo) -> Schedule {
trace!(target: "client", "Creating schedule. param={:?}, fCML={}", self.spec().engine_params.get("frontierCompatibilityModeLimit"), self.u64_param("frontierCompatibilityModeLimit"));
match env_info.number < self.u64_param("frontierCompatibilityModeLimit") {
true => {
Schedule::new_frontier()
},
_ => {
Schedule::new_homestead()
},
if env_info.number < self.u64_param("frontierCompatibilityModeLimit") {
Schedule::new_frontier()
} else {
Schedule::new_homestead()
}
}
@ -147,7 +144,7 @@ impl Engine for Ethash {
}
let difficulty = Ethash::boundary_to_difficulty(&Ethash::from_ethash(quick_get_difficulty(
&Ethash::to_ethash(header.bare_hash()),
&Ethash::to_ethash(header.bare_hash()),
header.nonce().low_u64(),
&Ethash::to_ethash(header.mix_hash()))));
if difficulty < header.difficulty {
@ -189,7 +186,7 @@ impl Engine for Ethash {
let min_gas = parent.gas_limit - parent.gas_limit / gas_limit_divisor;
let max_gas = parent.gas_limit + parent.gas_limit / gas_limit_divisor;
if header.gas_limit <= min_gas || header.gas_limit >= max_gas {
return Err(From::from(BlockError::InvalidGasLimit(OutOfBounds { min: Some(min_gas), max: Some(max_gas), found: header.gas_limit })));
return Err(From::from(BlockError::InvalidGasLimit(OutOfBounds { min: Some(min_gas), max: Some(max_gas), found: header.gas_limit })));
}
Ok(())
}
@ -220,8 +217,8 @@ impl Ethash {
let frontier_limit = self.u64_param("frontierCompatibilityModeLimit");
let mut target = if header.number < frontier_limit {
if header.timestamp >= parent.timestamp + duration_limit {
parent.difficulty - (parent.difficulty / difficulty_bound_divisor)
}
parent.difficulty - (parent.difficulty / difficulty_bound_divisor)
}
else {
parent.difficulty + (parent.difficulty / difficulty_bound_divisor)
}
@ -243,7 +240,7 @@ impl Ethash {
}
target
}
fn boundary_to_difficulty(boundary: &H256) -> U256 {
U256::from((U512::one() << 256) / x!(U256::from(boundary.as_slice())))
}

View File

@ -581,9 +581,10 @@ impl Interpreter {
let code_address = stack.pop_back();
let code_address = u256_to_address(&code_address);
let value = match instruction == instructions::DELEGATECALL {
true => None,
false => Some(stack.pop_back())
let value = if instruction == instructions::DELEGATECALL {
None
} else {
Some(stack.pop_back())
};
let in_off = stack.pop_back();

View File

@ -22,7 +22,7 @@
// TODO [todr] not really sure
#![cfg_attr(feature="dev", allow(needless_range_loop))]
// Shorter than if-else
#![cfg_attr(feautre="dev", allow(match_bool))]
#![cfg_attr(feature="dev", allow(match_bool))]
// Keeps consistency (all lines with `.clone()`) and helpful when changing ref to non-ref.
#![cfg_attr(feature="dev", allow(clone_on_copy))]

View File

@ -37,10 +37,11 @@ impl Default for Action {
impl Decodable for Action {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder {
let rlp = decoder.as_rlp();
match rlp.is_empty() {
true => Ok(Action::Create),
false => Ok(Action::Call(try!(rlp.as_val())))
}
if rlp.is_empty() {
Ok(Action::Create)
} else {
Ok(Action::Call(try!(rlp.as_val())))
}
}
}
@ -79,6 +80,7 @@ impl Transaction {
}
impl FromJson for SignedTransaction {
#[cfg_attr(feature="dev", allow(single_char_pattern))]
fn from_json(json: &Json) -> SignedTransaction {
let t = Transaction {
nonce: xjson!(&json["nonce"]),
@ -110,7 +112,7 @@ impl FromJson for SignedTransaction {
impl Transaction {
/// The message hash of the transaction.
pub fn hash(&self) -> H256 {
pub fn hash(&self) -> H256 {
let mut stream = RlpStream::new();
self.rlp_append_unsigned_transaction(&mut stream);
stream.out().sha3()

View File

@ -1,3 +1,3 @@
#!/bin/sh
echo "#!/bin/sh\ncargo test -p ethcore" >> ./.git/hooks/pre-push
echo "#!/bin/sh\ncargo test -p ethash -p ethcore-util -p ethcore -p ethsync -p ethcore-rpc -p parity --features dev" > ./.git/hooks/pre-push
chmod +x ./.git/hooks/pre-push

View File

@ -181,7 +181,7 @@ impl Configuration {
}
fn _keys_path(&self) -> String {
self.args.flag_keys_path.replace("$HOME", env::home_dir().unwrap().to_str().unwrap())
self.args.flag_keys_path.replace("$HOME", env::home_dir().unwrap().to_str().unwrap())
}
fn spec(&self) -> Spec {
@ -194,9 +194,10 @@ impl Configuration {
}
fn normalize_enode(e: &str) -> Option<String> {
match is_valid_node_url(e) {
true => Some(e.to_owned()),
false => None,
if is_valid_node_url(e) {
Some(e.to_owned())
} else {
None
}
}
@ -209,6 +210,7 @@ impl Configuration {
}
}
#[cfg_attr(feature="dev", allow(useless_format))]
fn net_addresses(&self) -> (Option<SocketAddr>, Option<SocketAddr>) {
let mut listen_address = None;
let mut public_address = None;

View File

@ -16,7 +16,7 @@ jsonrpc-http-server = "1.1"
ethcore-util = { path = "../util" }
ethcore = { path = "../ethcore" }
ethsync = { path = "../sync" }
clippy = { version = "0.0.42", optional = true }
clippy = { version = "0.0.44", optional = true }
rustc-serialize = "0.3"
serde_macros = { version = "0.6.13", optional = true }

View File

@ -10,7 +10,7 @@ authors = ["Ethcore <admin@ethcore.io"]
[dependencies]
ethcore-util = { path = "../util" }
ethcore = { path = ".." }
clippy = { version = "0.0.42", optional = true }
clippy = { version = "0.0.44", optional = true }
log = "0.3"
env_logger = "0.3"
time = "0.1.34"
@ -18,4 +18,4 @@ rand = "0.3.13"
[features]
default = []
dev = ["clippy", "ethcore/dev", "ethcore-util/dev"]
dev = ["clippy", "ethcore/dev", "ethcore-util/dev"]

View File

@ -27,7 +27,7 @@ crossbeam = "0.2"
slab = "0.1"
sha3 = { path = "sha3" }
serde = "0.6.7"
clippy = { version = "0.0.42", optional = true }
clippy = { version = "0.0.44", optional = true }
json-tests = { path = "json-tests" }
rustc_version = "0.1.0"
igd = "0.4.2"

View File

@ -305,7 +305,7 @@ fn uuid_to_string(uuid: &Uuid) -> String {
}
fn uuid_from_string(s: &str) -> Result<Uuid, UtilError> {
let parts: Vec<&str> = s.split("-").collect();
let parts: Vec<&str> = s.split('-').collect();
if parts.len() != 5 { return Err(UtilError::BadSize); }
let mut uuid = H128::zero();

View File

@ -69,13 +69,13 @@ impl<'a> Iterator for NibbleSliceIterator<'a> {
impl<'a, 'view> NibbleSlice<'a> where 'a: 'view {
/// Create a new nibble slice with the given byte-slice.
pub fn new(data: &[u8]) -> NibbleSlice { NibbleSlice::new_offset(data, 0) }
pub fn new(data: &'a [u8]) -> Self { NibbleSlice::new_offset(data, 0) }
/// Create a new nibble slice with the given byte-slice with a nibble offset.
pub fn new_offset(data: &'a [u8], offset: usize) -> NibbleSlice { NibbleSlice{data: data, offset: offset, data_encode_suffix: &b""[..], offset_encode_suffix: 0} }
pub fn new_offset(data: &'a [u8], offset: usize) -> Self { NibbleSlice{data: data, offset: offset, data_encode_suffix: &b""[..], offset_encode_suffix: 0} }
/// Create a composed nibble slice; one followed by the other.
pub fn new_composed(a: &'a NibbleSlice, b: &'a NibbleSlice) -> NibbleSlice<'a> { NibbleSlice{data: a.data, offset: a.offset, data_encode_suffix: b.data, offset_encode_suffix: b.offset} }
pub fn new_composed(a: &'a NibbleSlice, b: &'a NibbleSlice) -> Self { NibbleSlice{data: a.data, offset: a.offset, data_encode_suffix: b.data, offset_encode_suffix: b.offset} }
/*pub fn new_composed_bytes_offset(a: &NibbleSlice, b: &NibbleSlice) -> (Bytes, usize) {
let r: Vec<u8>::with_capacity((a.len() + b.len() + 1) / 2);

View File

@ -31,7 +31,7 @@
//
//! Big unsigned integer types
//!
//!
//! Implementation of a various large-but-fixed sized unsigned integer types.
//! The functions here are designed to be fast.
//!
@ -99,7 +99,7 @@ pub trait Uint: Sized + Default + FromStr + From<u64> + FromJson + fmt::Debug +
/// Conversion to u64 with overflow checking
fn as_u64(&self) -> u64;
/// Return the least number of bits needed to represent the number
fn bits(&self) -> usize;
/// Return if specific bit is set
@ -124,7 +124,7 @@ pub trait Uint: Sized + Default + FromStr + From<u64> + FromJson + fmt::Debug +
/// Multiple this `Uint` with other returning result and possible overflow
fn overflowing_mul(self, other: Self) -> (Self, bool);
/// Divide this `Uint` by other returning result and possible overflow
fn overflowing_div(self, other: Self) -> (Self, bool);
@ -133,7 +133,7 @@ pub trait Uint: Sized + Default + FromStr + From<u64> + FromJson + fmt::Debug +
/// Returns negation of this `Uint` and overflow (always true)
fn overflowing_neg(self) -> (Self, bool);
/// Shifts this `Uint` and returns overflow
fn overflowing_shl(self, shift: u32) -> (Self, bool);
}
@ -175,7 +175,7 @@ macro_rules! construct_uint {
fn as_u32(&self) -> u32 {
let &$name(ref arr) = self;
if (arr[0] & (0xffffffffu64 << 32)) != 0 {
panic!("Integer overflow when casting U256")
panic!("Integer overflow when casting U256")
}
self.as_u64() as u32
}
@ -186,7 +186,7 @@ macro_rules! construct_uint {
let &$name(ref arr) = self;
for i in 1..$n_words {
if arr[i] != 0 {
panic!("Integer overflow when casting U256")
panic!("Integer overflow when casting U256")
}
}
arr[0]
@ -320,7 +320,7 @@ macro_rules! construct_uint {
if b_carry {
let ret = overflowing!($name(ret).overflowing_add($name(carry)), overflow);
(ret, overflow)
} else {
} else {
($name(ret), overflow)
}
}
@ -448,7 +448,7 @@ macro_rules! construct_uint {
}
impl serde::Serialize for $name {
fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>
fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>
where S: serde::Serializer {
let mut hex = "0x".to_owned();
let mut bytes = [0u8; 8 * $n_words];
@ -791,6 +791,7 @@ macro_rules! construct_uint {
}
}
#[cfg_attr(feature="dev", allow(derive_hash_xor_eq))] // We are pretty sure it's ok.
impl Hash for $name {
fn hash<H>(&self, state: &mut H) where H: Hasher {
unsafe { state.write(::std::slice::from_raw_parts(self.0.as_ptr() as *mut u8, self.0.len() * 8)); }