Fixing clippy errors in util

This commit is contained in:
Tomusdrw 2016-01-19 12:14:29 +01:00
parent fc4b67a12d
commit 062193ceb5
22 changed files with 128 additions and 137 deletions

View File

@ -1 +1,5 @@
# ethcore # ethcore
# Running clippy

View File

@ -99,18 +99,18 @@ impl<'a> Deref for BytesRef<'a> {
type Target = [u8]; type Target = [u8];
fn deref(&self) -> &[u8] { fn deref(&self) -> &[u8] {
match self { match *self {
&BytesRef::Flexible(ref bytes) => bytes, BytesRef::Flexible(ref bytes) => bytes,
&BytesRef::Fixed(ref bytes) => bytes BytesRef::Fixed(ref bytes) => bytes
} }
} }
} }
impl <'a> DerefMut for BytesRef<'a> { impl <'a> DerefMut for BytesRef<'a> {
fn deref_mut(&mut self) -> &mut [u8] { fn deref_mut(&mut self) -> &mut [u8] {
match self { match *self {
&mut BytesRef::Flexible(ref mut bytes) => bytes, BytesRef::Flexible(ref mut bytes) => bytes,
&mut BytesRef::Fixed(ref mut bytes) => bytes BytesRef::Fixed(ref mut bytes) => bytes
} }
} }
} }
@ -283,7 +283,7 @@ pub trait FromBytes: Sized {
impl FromBytes for String { impl FromBytes for String {
fn from_bytes(bytes: &[u8]) -> FromBytesResult<String> { fn from_bytes(bytes: &[u8]) -> FromBytesResult<String> {
Ok(::std::str::from_utf8(bytes).unwrap().to_string()) Ok(::std::str::from_utf8(bytes).unwrap().to_owned())
} }
} }

View File

@ -321,10 +321,9 @@ impl<'a, D> ChainFilter<'a, D> where D: FilterDataSource
let offset = level_size * index; let offset = level_size * index;
// go doooown! // go doooown!
match self.blocks(bloom, from_block, to_block, max_level, offset) { if let Some(blocks) = self.blocks(bloom, from_block, to_block, max_level, offset) {
Some(blocks) => result.extend(blocks), result.extend(blocks);
None => () }
};
} }
result result

View File

@ -193,11 +193,11 @@ macro_rules! impl_hash {
impl FromJson for $from { impl FromJson for $from {
fn from_json(json: &Json) -> Self { fn from_json(json: &Json) -> Self {
match json { match *json {
&Json::String(ref s) => { Json::String(ref s) => {
match s.len() % 2 { match s.len() % 2 {
0 => FromStr::from_str(clean_0x(s)).unwrap(), 0 => FromStr::from_str(clean_0x(s)).unwrap(),
_ => FromStr::from_str(&("0".to_string() + &(clean_0x(s).to_string()))[..]).unwrap() _ => FromStr::from_str(&("0".to_owned() + &(clean_0x(s).to_owned()))[..]).unwrap()
} }
}, },
_ => Default::default(), _ => Default::default(),
@ -207,7 +207,7 @@ macro_rules! impl_hash {
impl fmt::Debug for $from { impl fmt::Debug for $from {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for i in self.0.iter() { for i in &self.0[..] {
try!(write!(f, "{:02x}", i)); try!(write!(f, "{:02x}", i));
} }
Ok(()) Ok(())
@ -215,11 +215,11 @@ macro_rules! impl_hash {
} }
impl fmt::Display for $from { impl fmt::Display for $from {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for i in self.0[0..2].iter() { for i in &self.0[0..2] {
try!(write!(f, "{:02x}", i)); try!(write!(f, "{:02x}", i));
} }
try!(write!(f, "")); try!(write!(f, ""));
for i in self.0[$size - 4..$size].iter() { for i in &self.0[$size - 4..$size] {
try!(write!(f, "{:02x}", i)); try!(write!(f, "{:02x}", i));
} }
Ok(()) Ok(())
@ -277,36 +277,36 @@ macro_rules! impl_hash {
impl Index<usize> for $from { impl Index<usize> for $from {
type Output = u8; type Output = u8;
fn index<'a>(&'a self, index: usize) -> &'a u8 { fn index(&self, index: usize) -> &u8 {
&self.0[index] &self.0[index]
} }
} }
impl IndexMut<usize> for $from { impl IndexMut<usize> for $from {
fn index_mut<'a>(&'a mut self, index: usize) -> &'a mut u8 { fn index_mut(&mut self, index: usize) -> &mut u8 {
&mut self.0[index] &mut self.0[index]
} }
} }
impl Index<ops::Range<usize>> for $from { impl Index<ops::Range<usize>> for $from {
type Output = [u8]; type Output = [u8];
fn index<'a>(&'a self, index: ops::Range<usize>) -> &'a [u8] { fn index(&self, index: ops::Range<usize>) -> &[u8] {
&self.0[index] &self.0[index]
} }
} }
impl IndexMut<ops::Range<usize>> for $from { impl IndexMut<ops::Range<usize>> for $from {
fn index_mut<'a>(&'a mut self, index: ops::Range<usize>) -> &'a mut [u8] { fn index_mut(&mut self, index: ops::Range<usize>) -> &mut [u8] {
&mut self.0[index] &mut self.0[index]
} }
} }
impl Index<ops::RangeFull> for $from { impl Index<ops::RangeFull> for $from {
type Output = [u8]; type Output = [u8];
fn index<'a>(&'a self, _index: ops::RangeFull) -> &'a [u8] { fn index(&self, _index: ops::RangeFull) -> &[u8] {
&self.0 &self.0
} }
} }
impl IndexMut<ops::RangeFull> for $from { impl IndexMut<ops::RangeFull> for $from {
fn index_mut<'a>(&'a mut self, _index: ops::RangeFull) -> &'a mut [u8] { fn index_mut(&mut self, _index: ops::RangeFull) -> &mut [u8] {
&mut self.0 &mut self.0
} }
} }
@ -424,7 +424,7 @@ macro_rules! impl_hash {
fn from(s: &'_ str) -> $from { fn from(s: &'_ str) -> $from {
use std::str::FromStr; use std::str::FromStr;
if s.len() % 2 == 1 { if s.len() % 2 == 1 {
$from::from_str(&("0".to_string() + &(clean_0x(s).to_string()))[..]).unwrap_or($from::new()) $from::from_str(&("0".to_owned() + &(clean_0x(s).to_owned()))[..]).unwrap_or($from::new())
} else { } else {
$from::from_str(clean_0x(s)).unwrap_or($from::new()) $from::from_str(clean_0x(s)).unwrap_or($from::new())
} }

View File

@ -93,17 +93,17 @@ impl<Message> Handler for IoManager<Message> where Message: Send + 'static {
fn ready(&mut self, event_loop: &mut EventLoop<Self>, token: Token, events: EventSet) { fn ready(&mut self, event_loop: &mut EventLoop<Self>, token: Token, events: EventSet) {
if events.is_hup() { if events.is_hup() {
for h in self.handlers.iter_mut() { for h in &mut self.handlers {
h.stream_hup(&mut IoContext::new(event_loop, &mut self.timers), token.as_usize()); h.stream_hup(&mut IoContext::new(event_loop, &mut self.timers), token.as_usize());
} }
} }
else if events.is_readable() { else if events.is_readable() {
for h in self.handlers.iter_mut() { for h in &mut self.handlers {
h.stream_readable(&mut IoContext::new(event_loop, &mut self.timers), token.as_usize()); h.stream_readable(&mut IoContext::new(event_loop, &mut self.timers), token.as_usize());
} }
} }
else if events.is_writable() { else if events.is_writable() {
for h in self.handlers.iter_mut() { for h in &mut self.handlers {
h.stream_writable(&mut IoContext::new(event_loop, &mut self.timers), token.as_usize()); h.stream_writable(&mut IoContext::new(event_loop, &mut self.timers), token.as_usize());
} }
} }
@ -116,13 +116,13 @@ impl<Message> Handler for IoManager<Message> where Message: Send + 'static {
let timer = self.timers.get_mut(token).expect("Unknown user timer token"); let timer = self.timers.get_mut(token).expect("Unknown user timer token");
timer.delay timer.delay
}; };
for h in self.handlers.iter_mut() { for h in &mut self.handlers {
h.timeout(&mut IoContext::new(event_loop, &mut self.timers), token.as_usize()); h.timeout(&mut IoContext::new(event_loop, &mut self.timers), token.as_usize());
} }
event_loop.timeout_ms(token, delay).expect("Error re-registering user timer"); event_loop.timeout_ms(token, delay).expect("Error re-registering user timer");
} }
_ => { // Just pass the event down. IoHandler is supposed to re-register it if required. _ => { // Just pass the event down. IoHandler is supposed to re-register it if required.
for h in self.handlers.iter_mut() { for h in &mut self.handlers {
h.timeout(&mut IoContext::new(event_loop, &mut self.timers), token.as_usize()); h.timeout(&mut IoContext::new(event_loop, &mut self.timers), token.as_usize());
} }
} }
@ -140,7 +140,7 @@ impl<Message> Handler for IoManager<Message> where Message: Send + 'static {
self.handlers.last_mut().unwrap().initialize(&mut IoContext::new(event_loop, &mut self.timers)); self.handlers.last_mut().unwrap().initialize(&mut IoContext::new(event_loop, &mut self.timers));
}, },
IoMessage::UserMessage(ref mut data) => { IoMessage::UserMessage(ref mut data) => {
for h in self.handlers.iter_mut() { for h in &mut self.handlers {
h.message(&mut IoContext::new(event_loop, &mut self.timers), data); h.message(&mut IoContext::new(event_loop, &mut self.timers), data);
} }
} }

View File

@ -18,10 +18,10 @@ fn u256_from_str(s: &str) -> U256 {
impl FromJson for Bytes { impl FromJson for Bytes {
fn from_json(json: &Json) -> Self { fn from_json(json: &Json) -> Self {
match json { match *json {
&Json::String(ref s) => match s.len() % 2 { Json::String(ref s) => match s.len() % 2 {
0 => FromHex::from_hex(clean(s)).unwrap_or(vec![]), 0 => FromHex::from_hex(clean(s)).unwrap_or(vec![]),
_ => FromHex::from_hex(&("0".to_string() + &(clean(s).to_string()))[..]).unwrap_or(vec![]), _ => FromHex::from_hex(&("0".to_owned() + &(clean(s).to_owned()))[..]).unwrap_or(vec![]),
}, },
_ => vec![], _ => vec![],
} }
@ -30,8 +30,8 @@ impl FromJson for Bytes {
impl FromJson for BTreeMap<H256, H256> { impl FromJson for BTreeMap<H256, H256> {
fn from_json(json: &Json) -> Self { fn from_json(json: &Json) -> Self {
match json { match *json {
&Json::Object(ref o) => o.iter().map(|(key, value)| (x!(&u256_from_str(key)), x!(&U256::from_json(value)))).collect(), Json::Object(ref o) => o.iter().map(|(key, value)| (x!(&u256_from_str(key)), x!(&U256::from_json(value)))).collect(),
_ => BTreeMap::new(), _ => BTreeMap::new(),
} }
} }
@ -39,8 +39,8 @@ impl FromJson for BTreeMap<H256, H256> {
impl<T> FromJson for Vec<T> where T: FromJson { impl<T> FromJson for Vec<T> where T: FromJson {
fn from_json(json: &Json) -> Self { fn from_json(json: &Json) -> Self {
match json { match *json {
&Json::Array(ref o) => o.iter().map(|x|T::from_json(x)).collect(), Json::Array(ref o) => o.iter().map(|x|T::from_json(x)).collect(),
_ => Vec::new(), _ => Vec::new(),
} }
} }
@ -48,9 +48,9 @@ impl<T> FromJson for Vec<T> where T: FromJson {
impl<T> FromJson for Option<T> where T: FromJson { impl<T> FromJson for Option<T> where T: FromJson {
fn from_json(json: &Json) -> Self { fn from_json(json: &Json) -> Self {
match json { match *json {
&Json::String(ref o) if o.is_empty() => None, Json::String(ref o) if o.is_empty() => None,
&Json::Null => None, Json::Null => None,
_ => Some(FromJson::from_json(json)), _ => Some(FromJson::from_json(json)),
} }
} }

View File

@ -2,6 +2,7 @@
#![feature(augmented_assignments)] #![feature(augmented_assignments)]
#![feature(associated_consts)] #![feature(associated_consts)]
#![feature(wrapping)] #![feature(wrapping)]
#![allow(needless_range_loop, match_bool)]
//! Ethcore-util library //! Ethcore-util library
//! //!
//! ### Rust version: //! ### Rust version:

View File

@ -14,13 +14,13 @@ impl<T> Diff<T> where T: Eq {
pub fn new(pre: T, post: T) -> Self { if pre == post { Diff::Same } else { Diff::Changed(pre, post) } } pub fn new(pre: T, post: T) -> Self { if pre == post { Diff::Same } else { Diff::Changed(pre, post) } }
/// Get the before value, if there is one. /// Get the before value, if there is one.
pub fn pre(&self) -> Option<&T> { match self { &Diff::Died(ref x) | &Diff::Changed(ref x, _) => Some(x), _ => None } } pub fn pre(&self) -> Option<&T> { match *self { Diff::Died(ref x) | Diff::Changed(ref x, _) => Some(x), _ => None } }
/// Get the after value, if there is one. /// Get the after value, if there is one.
pub fn post(&self) -> Option<&T> { match self { &Diff::Born(ref x) | &Diff::Changed(_, ref x) => Some(x), _ => None } } pub fn post(&self) -> Option<&T> { match *self { Diff::Born(ref x) | Diff::Changed(_, ref x) => Some(x), _ => None } }
/// Determine whether there was a change or not. /// Determine whether there was a change or not.
pub fn is_same(&self) -> bool { match self { &Diff::Same => true, _ => false }} pub fn is_same(&self) -> bool { match *self { Diff::Same => true, _ => false }}
} }
#[derive(PartialEq,Eq,Clone,Copy)] #[derive(PartialEq,Eq,Clone,Copy)]

View File

@ -86,7 +86,7 @@ impl Connection {
/// Add a packet to send queue. /// Add a packet to send queue.
pub fn send(&mut self, data: Bytes) { pub fn send(&mut self, data: Bytes) {
if data.len() != 0 { if !data.is_empty() {
self.send_queue.push_back(Cursor::new(data)); self.send_queue.push_back(Cursor::new(data));
} }
if !self.interest.is_writable() { if !self.interest.is_writable() {
@ -341,11 +341,8 @@ impl EncryptedConnection {
self.idle_timeout.map(|t| event_loop.clear_timeout(t)); self.idle_timeout.map(|t| event_loop.clear_timeout(t));
match self.read_state { match self.read_state {
EncryptedConnectionState::Header => { EncryptedConnectionState::Header => {
match try!(self.connection.readable()) { if let Some(data) = try!(self.connection.readable()) {
Some(data) => {
try!(self.read_header(&data)); try!(self.read_header(&data));
},
None => {}
}; };
Ok(None) Ok(None)
}, },

View File

@ -62,7 +62,7 @@ impl Discovery {
discovery_round: 0, discovery_round: 0,
discovery_id: NodeId::new(), discovery_id: NodeId::new(),
discovery_nodes: HashSet::new(), discovery_nodes: HashSet::new(),
node_buckets: (0..NODE_BINS).map(|x| NodeBucket::new(x)).collect(), node_buckets: (0..NODE_BINS).map(NodeBucket::new).collect(),
} }
} }
@ -122,7 +122,8 @@ impl Discovery {
ret ret
} }
fn nearest_node_entries<'b>(source: &NodeId, target: &NodeId, buckets: &'b Vec<NodeBucket>) -> Vec<&'b NodeId> #[allow(cyclomatic_complexity)]
fn nearest_node_entries<'b>(source: &NodeId, target: &NodeId, buckets: &'b [NodeBucket]) -> Vec<&'b NodeId>
{ {
// send ALPHA FindNode packets to nodes we know, closest to target // send ALPHA FindNode packets to nodes we know, closest to target
const LAST_BIN: u32 = NODE_BINS - 1; const LAST_BIN: u32 = NODE_BINS - 1;
@ -136,7 +137,7 @@ impl Discovery {
if head > 1 && tail != LAST_BIN { if head > 1 && tail != LAST_BIN {
while head != tail && head < NODE_BINS && count < BUCKET_SIZE while head != tail && head < NODE_BINS && count < BUCKET_SIZE
{ {
for n in buckets[head as usize].nodes.iter() for n in &buckets[head as usize].nodes
{ {
if count < BUCKET_SIZE { if count < BUCKET_SIZE {
count += 1; count += 1;
@ -147,7 +148,7 @@ impl Discovery {
} }
} }
if count < BUCKET_SIZE && tail != 0 { if count < BUCKET_SIZE && tail != 0 {
for n in buckets[tail as usize].nodes.iter() { for n in &buckets[tail as usize].nodes {
if count < BUCKET_SIZE { if count < BUCKET_SIZE {
count += 1; count += 1;
found.entry(Discovery::distance(target, &n)).or_insert(Vec::new()).push(n); found.entry(Discovery::distance(target, &n)).or_insert(Vec::new()).push(n);
@ -166,7 +167,7 @@ impl Discovery {
} }
else if head < 2 { else if head < 2 {
while head < NODE_BINS && count < BUCKET_SIZE { while head < NODE_BINS && count < BUCKET_SIZE {
for n in buckets[head as usize].nodes.iter() { for n in &buckets[head as usize].nodes {
if count < BUCKET_SIZE { if count < BUCKET_SIZE {
count += 1; count += 1;
found.entry(Discovery::distance(target, &n)).or_insert(Vec::new()).push(n); found.entry(Discovery::distance(target, &n)).or_insert(Vec::new()).push(n);
@ -180,7 +181,7 @@ impl Discovery {
} }
else { else {
while tail > 0 && count < BUCKET_SIZE { while tail > 0 && count < BUCKET_SIZE {
for n in buckets[tail as usize].nodes.iter() { for n in &buckets[tail as usize].nodes {
if count < BUCKET_SIZE { if count < BUCKET_SIZE {
count += 1; count += 1;
found.entry(Discovery::distance(target, &n)).or_insert(Vec::new()).push(n); found.entry(Discovery::distance(target, &n)).or_insert(Vec::new()).push(n);

View File

@ -93,21 +93,15 @@ impl Handshake {
self.idle_timeout.map(|t| event_loop.clear_timeout(t)); self.idle_timeout.map(|t| event_loop.clear_timeout(t));
match self.state { match self.state {
HandshakeState::ReadingAuth => { HandshakeState::ReadingAuth => {
match try!(self.connection.readable()) { if let Some(data) = try!(self.connection.readable()) {
Some(data) => {
try!(self.read_auth(host, &data)); try!(self.read_auth(host, &data));
try!(self.write_ack()); try!(self.write_ack());
},
None => {}
}; };
}, },
HandshakeState::ReadingAck => { HandshakeState::ReadingAck => {
match try!(self.connection.readable()) { if let Some(data) = try!(self.connection.readable()) {
Some(data) => {
try!(self.read_ack(host, &data)); try!(self.read_ack(host, &data));
self.state = HandshakeState::StartSession; self.state = HandshakeState::StartSession;
},
None => {}
}; };
}, },
_ => { panic!("Unexpected state"); } _ => { panic!("Unexpected state"); }

View File

@ -175,7 +175,7 @@ impl<'s, 'io, Message> NetworkContext<'s, 'io, Message> where Message: Send + 's
s.info.client_version.clone() s.info.client_version.clone()
}, },
_ => { _ => {
"unknown".to_string() "unknown".to_owned()
} }
} }
} }
@ -213,7 +213,7 @@ impl HostInfo {
/// Increments and returns connection nonce. /// Increments and returns connection nonce.
pub fn next_nonce(&mut self) -> H256 { pub fn next_nonce(&mut self) -> H256 {
self.nonce = self.nonce.sha3(); self.nonce = self.nonce.sha3();
return self.nonce.clone(); self.nonce.clone()
} }
} }
@ -246,7 +246,7 @@ impl<Message> Host<Message> where Message: Send {
config: config, config: config,
nonce: H256::random(), nonce: H256::random(),
protocol_version: 4, protocol_version: 4,
client_version: "parity".to_string(), client_version: "parity".to_owned(),
listen_port: 0, listen_port: 0,
capabilities: Vec::new(), capabilities: Vec::new(),
}, },
@ -274,11 +274,11 @@ impl<Message> Host<Message> where Message: Send {
} }
fn have_session(&self, id: &NodeId) -> bool { fn have_session(&self, id: &NodeId) -> bool {
self.connections.iter().any(|e| match e { &ConnectionEntry::Session(ref s) => s.info.id.eq(&id), _ => false }) self.connections.iter().any(|e| match *e { ConnectionEntry::Session(ref s) => s.info.id.eq(&id), _ => false })
} }
fn connecting_to(&self, id: &NodeId) -> bool { fn connecting_to(&self, id: &NodeId) -> bool {
self.connections.iter().any(|e| match e { &ConnectionEntry::Handshake(ref h) => h.id.eq(&id), _ => false }) self.connections.iter().any(|e| match *e { ConnectionEntry::Handshake(ref h) => h.id.eq(&id), _ => false })
} }
fn connect_peers(&mut self, io: &mut IoContext<NetworkIoMessage<Message>>) { fn connect_peers(&mut self, io: &mut IoContext<NetworkIoMessage<Message>>) {
@ -303,7 +303,7 @@ impl<Message> Host<Message> where Message: Send {
} }
} }
for n in to_connect.iter() { for n in &to_connect {
if n.peer_type == PeerType::Required { if n.peer_type == PeerType::Required {
if req_conn < IDEAL_PEERS { if req_conn < IDEAL_PEERS {
self.connect_peer(&n.id, io); self.connect_peer(&n.id, io);
@ -318,7 +318,7 @@ impl<Message> Host<Message> where Message: Send {
let peer_count = 0; let peer_count = 0;
let mut open_slots = IDEAL_PEERS - peer_count - pending_count + req_conn; let mut open_slots = IDEAL_PEERS - peer_count - pending_count + req_conn;
if open_slots > 0 { if open_slots > 0 {
for n in to_connect.iter() { for n in &to_connect {
if n.peer_type == PeerType::Optional && open_slots > 0 { if n.peer_type == PeerType::Optional && open_slots > 0 {
open_slots -= 1; open_slots -= 1;
self.connect_peer(&n.id, io); self.connect_peer(&n.id, io);
@ -328,6 +328,7 @@ impl<Message> Host<Message> where Message: Send {
} }
} }
#[allow(single_match)]
fn connect_peer(&mut self, id: &NodeId, io: &mut IoContext<NetworkIoMessage<Message>>) { fn connect_peer(&mut self, id: &NodeId, io: &mut IoContext<NetworkIoMessage<Message>>) {
if self.have_session(id) if self.have_session(id)
{ {
@ -376,6 +377,7 @@ impl<Message> Host<Message> where Message: Send {
trace!(target: "net", "accept"); trace!(target: "net", "accept");
} }
#[allow(single_match)]
fn connection_writable<'s>(&'s mut self, token: StreamToken, io: &mut IoContext<'s, NetworkIoMessage<Message>>) { fn connection_writable<'s>(&'s mut self, token: StreamToken, io: &mut IoContext<'s, NetworkIoMessage<Message>>) {
let mut kill = false; let mut kill = false;
let mut create_session = false; let mut create_session = false;
@ -436,7 +438,7 @@ impl<Message> Host<Message> where Message: Send {
}) }; }) };
match sd { match sd {
SessionData::Ready => { SessionData::Ready => {
for (p, _) in self.handlers.iter_mut() { for (p, _) in &mut self.handlers {
if s.have_capability(p) { if s.have_capability(p) {
ready_data.push(p); ready_data.push(p);
} }
@ -475,11 +477,8 @@ impl<Message> Host<Message> where Message: Send {
h.read(&mut NetworkContext::new(io, p, Some(token), &mut self.connections, &mut self.timers), &token, packet_id, &data[1..]); h.read(&mut NetworkContext::new(io, p, Some(token), &mut self.connections, &mut self.timers), &token, packet_id, &data[1..]);
} }
match self.connections.get_mut(token) { if let Some(&mut ConnectionEntry::Session(ref mut s)) = self.connections.get_mut(token) {
Some(&mut ConnectionEntry::Session(ref mut s)) => {
s.reregister(io.event_loop).unwrap_or_else(|e| debug!(target: "net", "Session registration error: {:?}", e)); s.reregister(io.event_loop).unwrap_or_else(|e| debug!(target: "net", "Session registration error: {:?}", e));
},
_ => (),
} }
} }
@ -523,7 +522,7 @@ impl<Message> Host<Message> where Message: Send {
match self.connections.get_mut(token) { match self.connections.get_mut(token) {
Some(&mut ConnectionEntry::Handshake(_)) => (), // just abandon handshake Some(&mut ConnectionEntry::Handshake(_)) => (), // just abandon handshake
Some(&mut ConnectionEntry::Session(ref mut s)) if s.is_ready() => { Some(&mut ConnectionEntry::Session(ref mut s)) if s.is_ready() => {
for (p, _) in self.handlers.iter_mut() { for (p, _) in &mut self.handlers {
if s.have_capability(p) { if s.have_capability(p) {
to_disconnect.push(p); to_disconnect.push(p);
} }
@ -600,19 +599,20 @@ impl<Message> IoHandler<NetworkIoMessage<Message>> for Host<Message> where Messa
FIRST_CONNECTION ... LAST_CONNECTION => self.connection_timeout(token, io), FIRST_CONNECTION ... LAST_CONNECTION => self.connection_timeout(token, io),
NODETABLE_DISCOVERY => {}, NODETABLE_DISCOVERY => {},
NODETABLE_MAINTAIN => {}, NODETABLE_MAINTAIN => {},
_ => match self.timers.get_mut(&token).map(|p| *p) { _ => {
Some(protocol) => match self.handlers.get_mut(protocol) { if let Some(protocol) = self.timers.get_mut(&token).map(|p| *p) {
match self.handlers.get_mut(protocol) {
None => { warn!(target: "net", "No handler found for protocol: {:?}", protocol) }, None => { warn!(target: "net", "No handler found for protocol: {:?}", protocol) },
Some(h) => { h.timeout(&mut NetworkContext::new(io, protocol, Some(token), &mut self.connections, &mut self.timers), token); } Some(h) => { h.timeout(&mut NetworkContext::new(io, protocol, Some(token), &mut self.connections, &mut self.timers), token); }
}, };
None => {} // time not registerd through us } // else time not registerd through us
} }
} }
} }
fn message<'s>(&'s mut self, io: &mut IoContext<'s, NetworkIoMessage<Message>>, message: &'s mut NetworkIoMessage<Message>) { fn message<'s>(&'s mut self, io: &mut IoContext<'s, NetworkIoMessage<Message>>, message: &'s mut NetworkIoMessage<Message>) {
match message { match *message {
&mut NetworkIoMessage::AddHandler { NetworkIoMessage::AddHandler {
ref mut handler, ref mut handler,
ref protocol, ref protocol,
ref versions ref versions
@ -624,7 +624,7 @@ impl<Message> IoHandler<NetworkIoMessage<Message>> for Host<Message> where Messa
self.info.capabilities.push(CapabilityInfo { protocol: protocol, version: *v, packet_count:0 }); self.info.capabilities.push(CapabilityInfo { protocol: protocol, version: *v, packet_count:0 });
} }
}, },
&mut NetworkIoMessage::Send { NetworkIoMessage::Send {
ref peer, ref peer,
ref packet_id, ref packet_id,
ref protocol, ref protocol,
@ -641,8 +641,8 @@ impl<Message> IoHandler<NetworkIoMessage<Message>> for Host<Message> where Messa
} }
} }
}, },
&mut NetworkIoMessage::User(ref message) => { NetworkIoMessage::User(ref message) => {
for (p, h) in self.handlers.iter_mut() { for (p, h) in &mut self.handlers {
h.message(&mut NetworkContext::new(io, p, None, &mut self.connections, &mut self.timers), &message); h.message(&mut NetworkContext::new(io, p, None, &mut self.connections, &mut self.timers), &message);
} }
} }

View File

@ -20,14 +20,16 @@ pub struct NodeEndpoint {
pub udp_port: u16 pub udp_port: u16
} }
impl NodeEndpoint { impl FromStr for NodeEndpoint {
type Err = UtilError;
/// Create endpoint from string. Performs name resolution if given a host name. /// Create endpoint from string. Performs name resolution if given a host name.
fn from_str(s: &str) -> Result<NodeEndpoint, UtilError> { fn from_str(s: &str) -> Result<NodeEndpoint, UtilError> {
let address = s.to_socket_addrs().map(|mut i| i.next()); let address = s.to_socket_addrs().map(|mut i| i.next());
match address { match address {
Ok(Some(a)) => Ok(NodeEndpoint { Ok(Some(a)) => Ok(NodeEndpoint {
address: a, address: a,
address_str: s.to_string(), address_str: s.to_owned(),
udp_port: a.port() udp_port: a.port()
}), }),
Ok(_) => Err(UtilError::AddressResolve(None)), Ok(_) => Err(UtilError::AddressResolve(None)),

View File

@ -182,7 +182,7 @@ impl Session {
// map to protocol // map to protocol
let protocol = self.info.capabilities[i].protocol; let protocol = self.info.capabilities[i].protocol;
let pid = packet_id - self.info.capabilities[i].id_offset; let pid = packet_id - self.info.capabilities[i].id_offset;
return Ok(SessionData::Packet { data: packet.data, protocol: protocol, packet_id: pid } ) Ok(SessionData::Packet { data: packet.data, protocol: protocol, packet_id: pid } )
}, },
_ => { _ => {
debug!(target: "net", "Unkown packet: {:?}", packet_id); debug!(target: "net", "Unkown packet: {:?}", packet_id);
@ -212,7 +212,7 @@ impl Session {
// Intersect with host capabilities // Intersect with host capabilities
// Leave only highset mutually supported capability version // Leave only highset mutually supported capability version
let mut caps: Vec<SessionCapabilityInfo> = Vec::new(); let mut caps: Vec<SessionCapabilityInfo> = Vec::new();
for hc in host.capabilities.iter() { for hc in &host.capabilities {
if peer_caps.iter().any(|c| c.protocol == hc.protocol && c.version == hc.version) { if peer_caps.iter().any(|c| c.protocol == hc.protocol && c.version == hc.version) {
caps.push(SessionCapabilityInfo { caps.push(SessionCapabilityInfo {
protocol: hc.protocol, protocol: hc.protocol,

View File

@ -159,7 +159,7 @@ impl HashDB for OverlayDB {
match k { match k {
Some(&(ref d, rc)) if rc > 0 => Some(d), Some(&(ref d, rc)) if rc > 0 => Some(d),
_ => { _ => {
let memrc = k.map(|&(_, rc)| rc).unwrap_or(0); let memrc = k.map_or(0, |&(_, rc)| rc);
match self.payload(key) { match self.payload(key) {
Some(x) => { Some(x) => {
let (d, rc) = x; let (d, rc) = x;
@ -184,16 +184,11 @@ impl HashDB for OverlayDB {
match k { match k {
Some(&(_, rc)) if rc > 0 => true, Some(&(_, rc)) if rc > 0 => true,
_ => { _ => {
let memrc = k.map(|&(_, rc)| rc).unwrap_or(0); let memrc = k.map_or(0, |&(_, rc)| rc);
match self.payload(key) { match self.payload(key) {
Some(x) => { Some(x) => {
let (_, rc) = x; let (_, rc) = x;
if rc as i32 + memrc > 0 { rc as i32 + memrc > 0
true
}
else {
false
}
} }
// Replace above match arm with this once https://github.com/rust-lang/rust/issues/15287 is done. // Replace above match arm with this once https://github.com/rust-lang/rust/issues/15287 is done.
//Some((d, rc)) if rc + memrc > 0 => true, //Some((d, rc)) if rc + memrc > 0 => true,

View File

@ -41,7 +41,7 @@ impl Stream for RlpStream {
stream stream
} }
fn append<'a, E>(&'a mut self, object: &E) -> &'a mut RlpStream where E: Encodable { fn append<E>(&mut self, object: &E) -> &mut RlpStream where E: Encodable {
// encode given value and add it at the end of the stream // encode given value and add it at the end of the stream
object.encode(&mut self.encoder); object.encode(&mut self.encoder);
@ -52,7 +52,7 @@ impl Stream for RlpStream {
self self
} }
fn append_list<'a>(&'a mut self, len: usize) -> &'a mut RlpStream { fn append_list(&mut self, len: usize) -> &mut RlpStream {
match len { match len {
0 => { 0 => {
// we may finish, if the appended list len is equal 0 // we may finish, if the appended list len is equal 0
@ -69,7 +69,7 @@ impl Stream for RlpStream {
self self
} }
fn append_empty_data<'a>(&'a mut self) -> &'a mut RlpStream { fn append_empty_data(&mut self) -> &mut RlpStream {
// self push raw item // self push raw item
self.encoder.bytes.push(0x80); self.encoder.bytes.push(0x80);

View File

@ -5,7 +5,7 @@ pub trait Decoder: Sized {
where F: FnOnce(&[u8]) -> Result<T, DecoderError>; where F: FnOnce(&[u8]) -> Result<T, DecoderError>;
fn as_list(&self) -> Result<Vec<Self>, DecoderError>; fn as_list(&self) -> Result<Vec<Self>, DecoderError>;
fn as_rlp<'a>(&'a self) -> &'a UntrustedRlp<'a>; fn as_rlp(&self) -> &UntrustedRlp;
fn as_raw(&self) -> &[u8]; fn as_raw(&self) -> &[u8];
} }
@ -231,7 +231,7 @@ pub trait Stream: Sized {
/// assert_eq!(out, vec![0xca, 0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g', 0x80]); /// assert_eq!(out, vec![0xca, 0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g', 0x80]);
/// } /// }
/// ``` /// ```
fn append_list<'a>(&'a mut self, len: usize) -> &'a mut Self; fn append_list(&mut self, len: usize) -> &mut Self;
/// Apends null to the end of stream, chainable. /// Apends null to the end of stream, chainable.
/// ///
@ -246,7 +246,7 @@ pub trait Stream: Sized {
/// assert_eq!(out, vec![0xc2, 0x80, 0x80]); /// assert_eq!(out, vec![0xc2, 0x80, 0x80]);
/// } /// }
/// ``` /// ```
fn append_empty_data<'a>(&'a mut self) -> &'a mut Self; fn append_empty_data(&mut self) -> &mut Self;
/// Appends raw (pre-serialised) RLP data. Use with caution. Chainable. /// Appends raw (pre-serialised) RLP data. Use with caution. Chainable.
fn append_raw<'a>(&'a mut self, bytes: &[u8], item_count: usize) -> &'a mut Self; fn append_raw<'a>(&'a mut self, bytes: &[u8], item_count: usize) -> &'a mut Self;

View File

@ -282,7 +282,7 @@ impl<'a> BasicDecoder<'a> {
/// Return first item info /// Return first item info
fn payload_info(bytes: &[u8]) -> Result<PayloadInfo, DecoderError> { fn payload_info(bytes: &[u8]) -> Result<PayloadInfo, DecoderError> {
let item = match bytes.first().map(|&x| x) { let item = match bytes.first().cloned() {
None => return Err(DecoderError::RlpIsTooShort), None => return Err(DecoderError::RlpIsTooShort),
Some(0...0x7f) => PayloadInfo::new(0, 1), Some(0...0x7f) => PayloadInfo::new(0, 1),
Some(l @ 0x80...0xb7) => PayloadInfo::new(1, l as usize - 0x80), Some(l @ 0x80...0xb7) => PayloadInfo::new(1, l as usize - 0x80),
@ -318,7 +318,7 @@ impl<'a> Decoder for BasicDecoder<'a> {
let bytes = self.rlp.as_raw(); let bytes = self.rlp.as_raw();
match bytes.first().map(|&x| x) { match bytes.first().cloned() {
// rlp is too short // rlp is too short
None => Err(DecoderError::RlpIsTooShort), None => Err(DecoderError::RlpIsTooShort),
// single byt value // single byt value
@ -349,12 +349,12 @@ impl<'a> Decoder for BasicDecoder<'a> {
fn as_list(&self) -> Result<Vec<Self>, DecoderError> { fn as_list(&self) -> Result<Vec<Self>, DecoderError> {
let v: Vec<BasicDecoder<'a>> = self.rlp.iter() let v: Vec<BasicDecoder<'a>> = self.rlp.iter()
.map(| i | BasicDecoder::new(i)) .map(BasicDecoder::new)
.collect(); .collect();
Ok(v) Ok(v)
} }
fn as_rlp<'s>(&'s self) -> &'s UntrustedRlp<'s> { fn as_rlp(&self) -> &UntrustedRlp {
&self.rlp &self.rlp
} }
} }
@ -399,6 +399,7 @@ impl<T> Decodable for Option<T> where T: Decodable {
macro_rules! impl_array_decodable { macro_rules! impl_array_decodable {
($index_type:ty, $len:expr ) => ( ($index_type:ty, $len:expr ) => (
impl<T> Decodable for [T; $len] where T: Decodable { impl<T> Decodable for [T; $len] where T: Decodable {
#[allow(len_zero)]
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder {
let decoders = try!(decoder.as_list()); let decoders = try!(decoder.as_list());

View File

@ -41,7 +41,7 @@ pub trait Squeeze {
impl<K, T> Squeeze for HashMap<K, T> where K: Eq + Hash + Clone + HeapSizeOf, T: HeapSizeOf { impl<K, T> Squeeze for HashMap<K, T> where K: Eq + Hash + Clone + HeapSizeOf, T: HeapSizeOf {
fn squeeze(&mut self, size: usize) { fn squeeze(&mut self, size: usize) {
if self.len() == 0 { if self.is_empty() {
return return
} }
@ -49,7 +49,7 @@ impl<K, T> Squeeze for HashMap<K, T> where K: Eq + Hash + Clone + HeapSizeOf, T:
let all_entries = size_of_entry * self.len(); let all_entries = size_of_entry * self.len();
let mut shrinked_size = all_entries; let mut shrinked_size = all_entries;
while self.len() > 0 && shrinked_size > size { while !self.is_empty() && shrinked_size > size {
// could be optimized // could be optimized
let key = self.keys().next().unwrap().clone(); let key = self.keys().next().unwrap().clone();
self.remove(&key); self.remove(&key);

View File

@ -37,6 +37,7 @@ pub struct TrieDB<'db> {
pub hash_count: usize, pub hash_count: usize,
} }
#[allow(wrong_self_convention)]
impl<'db> TrieDB<'db> { impl<'db> TrieDB<'db> {
/// Create a new trie with the backing database `db` and `root` /// Create a new trie with the backing database `db` and `root`
/// Panics, if `root` does not exist /// Panics, if `root` does not exist
@ -102,7 +103,7 @@ impl<'db> TrieDB<'db> {
match node { match node {
Node::Extension(_, payload) => handle_payload(payload), Node::Extension(_, payload) => handle_payload(payload),
Node::Branch(payloads, _) => for payload in payloads.iter() { handle_payload(payload) }, Node::Branch(payloads, _) => for payload in &payloads { handle_payload(payload) },
_ => {}, _ => {},
} }
} }
@ -140,12 +141,9 @@ impl<'db> TrieDB<'db> {
}, },
Node::Branch(ref nodes, ref value) => { Node::Branch(ref nodes, ref value) => {
try!(writeln!(f, "")); try!(writeln!(f, ""));
match value { if let Some(v) = *value {
&Some(v) => {
try!(self.fmt_indent(f, deepness + 1)); try!(self.fmt_indent(f, deepness + 1));
try!(writeln!(f, "=: {:?}", v.pretty())) try!(writeln!(f, "=: {:?}", v.pretty()))
},
&None => {}
} }
for i in 0..16 { for i in 0..16 {
match self.get_node(nodes[i]) { match self.get_node(nodes[i]) {

View File

@ -49,6 +49,7 @@ enum MaybeChanged<'a> {
Changed(Bytes), Changed(Bytes),
} }
#[allow(wrong_self_convention)]
impl<'db> TrieDBMut<'db> { impl<'db> TrieDBMut<'db> {
/// Create a new trie with the backing database `db` and empty `root` /// Create a new trie with the backing database `db` and empty `root`
/// Initialise to the state entailed by the genesis block. /// Initialise to the state entailed by the genesis block.
@ -144,7 +145,7 @@ impl<'db> TrieDBMut<'db> {
match node { match node {
Node::Extension(_, payload) => handle_payload(payload), Node::Extension(_, payload) => handle_payload(payload),
Node::Branch(payloads, _) => for payload in payloads.iter() { handle_payload(payload) }, Node::Branch(payloads, _) => for payload in &payloads { handle_payload(payload) },
_ => {}, _ => {},
} }
} }
@ -177,12 +178,9 @@ impl<'db> TrieDBMut<'db> {
}, },
Node::Branch(ref nodes, ref value) => { Node::Branch(ref nodes, ref value) => {
try!(writeln!(f, "")); try!(writeln!(f, ""));
match value { if let Some(v) = *value {
&Some(v) => {
try!(self.fmt_indent(f, deepness + 1)); try!(self.fmt_indent(f, deepness + 1));
try!(writeln!(f, "=: {:?}", v.pretty())) try!(writeln!(f, "=: {:?}", v.pretty()))
},
&None => {}
} }
for i in 0..16 { for i in 0..16 {
match self.get_node(nodes[i]) { match self.get_node(nodes[i]) {
@ -330,6 +328,7 @@ impl<'db> TrieDBMut<'db> {
} }
} }
#[allow(cyclomatic_complexity)]
/// Determine the RLP of the node, assuming we're inserting `partial` into the /// Determine the RLP of the node, assuming we're inserting `partial` into the
/// node currently of data `old`. This will *not* delete any hash of `old` from the database; /// node currently of data `old`. This will *not* delete any hash of `old` from the database;
/// it will just return the new RLP that includes the new node. /// it will just return the new RLP that includes the new node.
@ -704,7 +703,7 @@ mod tests {
} }
fn unpopulate_trie<'a, 'db>(t: &mut TrieDBMut<'db>, v: &Vec<(Vec<u8>, Vec<u8>)>) { fn unpopulate_trie<'a, 'db>(t: &mut TrieDBMut<'db>, v: &Vec<(Vec<u8>, Vec<u8>)>) {
for i in v.iter() { for i in &v {
let key: &[u8]= &i.0; let key: &[u8]= &i.0;
t.remove(&key); t.remove(&key);
} }

View File

@ -174,7 +174,7 @@ macro_rules! construct_uint {
#[inline] #[inline]
fn byte(&self, index: usize) -> u8 { fn byte(&self, index: usize) -> u8 {
let &$name(ref arr) = self; let &$name(ref arr) = self;
(arr[index / 8] >> ((index % 8)) * 8) as u8 (arr[index / 8] >> (((index % 8)) * 8)) as u8
} }
fn to_bytes(&self, bytes: &mut[u8]) { fn to_bytes(&self, bytes: &mut[u8]) {
@ -328,16 +328,16 @@ macro_rules! construct_uint {
impl FromJson for $name { impl FromJson for $name {
fn from_json(json: &Json) -> Self { fn from_json(json: &Json) -> Self {
match json { match *json {
&Json::String(ref s) => { Json::String(ref s) => {
if s.len() >= 2 && &s[0..2] == "0x" { if s.len() >= 2 && &s[0..2] == "0x" {
FromStr::from_str(&s[2..]).unwrap_or(Default::default()) FromStr::from_str(&s[2..]).unwrap_or(Default::default())
} else { } else {
Uint::from_dec_str(s).unwrap_or(Default::default()) Uint::from_dec_str(s).unwrap_or(Default::default())
} }
}, },
&Json::U64(u) => From::from(u), Json::U64(u) => From::from(u),
&Json::I64(i) => From::from(i as u64), Json::I64(i) => From::from(i as u64),
_ => Uint::zero(), _ => Uint::zero(),
} }
} }
@ -370,7 +370,7 @@ macro_rules! construct_uint {
for i in 0..bytes.len() { for i in 0..bytes.len() {
let rev = bytes.len() - 1 - i; let rev = bytes.len() - 1 - i;
let pos = rev / 8; let pos = rev / 8;
ret[pos] += (bytes[i] as u64) << (rev % 8) * 8; ret[pos] += (bytes[i] as u64) << ((rev % 8) * 8);
} }
$name(ret) $name(ret)
} }
@ -382,7 +382,7 @@ macro_rules! construct_uint {
fn from_str(value: &str) -> Result<$name, Self::Err> { fn from_str(value: &str) -> Result<$name, Self::Err> {
let bytes: Vec<u8> = match value.len() % 2 == 0 { let bytes: Vec<u8> = match value.len() % 2 == 0 {
true => try!(value.from_hex()), true => try!(value.from_hex()),
false => try!(("0".to_string() + value).from_hex()) false => try!(("0".to_owned() + value).from_hex())
}; };
let bytes_ref: &[u8] = &bytes; let bytes_ref: &[u8] = &bytes;