Fixing clippy stuff - work in progress

This commit is contained in:
Tomusdrw 2016-01-17 15:56:09 +01:00
parent 14a12b3f16
commit 1b6dead109
17 changed files with 98 additions and 121 deletions

View File

@ -103,7 +103,7 @@ impl Account {
/// Get (and cache) the contents of the trie's storage at `key`. /// Get (and cache) the contents of the trie's storage at `key`.
pub fn storage_at(&self, db: &HashDB, key: &H256) -> H256 { pub fn storage_at(&self, db: &HashDB, key: &H256) -> H256 {
self.storage_overlay.borrow_mut().entry(key.clone()).or_insert_with(||{ self.storage_overlay.borrow_mut().entry(key.clone()).or_insert_with(||{
(Filth::Clean, H256::from(SecTrieDB::new(db, &self.storage_root).get(key.bytes()).map(|v| -> U256 {decode(v)}).unwrap_or(U256::zero()))) (Filth::Clean, H256::from(SecTrieDB::new(db, &self.storage_root).get(key.bytes()).map_or(U256::zero(), |v| -> U256 {decode(v)})))
}).1.clone() }).1.clone()
} }
@ -149,7 +149,7 @@ impl Account {
/// Provide a database to lookup `code_hash`. Should not be called if it is a contract without code. /// Provide a database to lookup `code_hash`. Should not be called if it is a contract without code.
pub fn cache_code(&mut self, db: &HashDB) -> bool { pub fn cache_code(&mut self, db: &HashDB) -> bool {
// TODO: fill out self.code_cache; // TODO: fill out self.code_cache;
return self.is_cached() || self.is_cached() ||
match self.code_hash { match self.code_hash {
Some(ref h) => match db.lookup(h) { Some(ref h) => match db.lookup(h) {
Some(x) => { self.code_cache = x.to_vec(); true }, Some(x) => { self.code_cache = x.to_vec(); true },

View File

@ -12,10 +12,10 @@ pub enum Existance {
impl fmt::Display for Existance { impl fmt::Display for Existance {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self { match *self {
&Existance::Born => try!(write!(f, "+++")), Existance::Born => try!(write!(f, "+++")),
&Existance::Alive => try!(write!(f, "***")), Existance::Alive => try!(write!(f, "***")),
&Existance::Died => try!(write!(f, "XXX")), Existance::Died => try!(write!(f, "XXX")),
} }
Ok(()) Ok(())
} }
@ -102,16 +102,15 @@ impl fmt::Display for AccountDiff {
Diff::Changed(ref pre, ref post) => try!(write!(f, "${} ({} {} {})", post, pre, if pre > post {"-"} else {"+"}, *max(pre, post) - *min(pre, post))), Diff::Changed(ref pre, ref post) => try!(write!(f, "${} ({} {} {})", post, pre, if pre > post {"-"} else {"+"}, *max(pre, post) - *min(pre, post))),
_ => {}, _ => {},
} }
match self.code { if let Diff::Born(ref x) = self.code {
Diff::Born(ref x) => try!(write!(f, " code {}", x.pretty())), try!(write!(f, " code {}", x.pretty()));
_ => {},
} }
try!(write!(f, "\n")); try!(write!(f, "\n"));
for (k, dv) in self.storage.iter() { for (k, dv) in &self.storage {
match dv { match *dv {
&Diff::Born(ref v) => try!(write!(f, " + {} => {}\n", interpreted_hash(k), interpreted_hash(v))), Diff::Born(ref v) => try!(write!(f, " + {} => {}\n", interpreted_hash(k), interpreted_hash(v))),
&Diff::Changed(ref pre, ref post) => try!(write!(f, " * {} => {} (was {})\n", interpreted_hash(k), interpreted_hash(post), interpreted_hash(pre))), Diff::Changed(ref pre, ref post) => try!(write!(f, " * {} => {} (was {})\n", interpreted_hash(k), interpreted_hash(post), interpreted_hash(pre))),
&Diff::Died(_) => try!(write!(f, " X {}\n", interpreted_hash(k))), Diff::Died(_) => try!(write!(f, " X {}\n", interpreted_hash(k))),
_ => {}, _ => {},
} }
} }

View File

@ -127,9 +127,8 @@ impl BlockProvider for BlockChain {
fn block(&self, hash: &H256) -> Option<Bytes> { fn block(&self, hash: &H256) -> Option<Bytes> {
{ {
let read = self.blocks.read().unwrap(); let read = self.blocks.read().unwrap();
match read.get(hash) { if let Some(v) = read.get(hash) {
Some(v) => return Some(v.clone()), return Some(v.clone());
None => ()
} }
} }
@ -509,9 +508,8 @@ impl BlockChain {
T: ExtrasIndexable { T: ExtrasIndexable {
{ {
let read = cache.read().unwrap(); let read = cache.read().unwrap();
match read.get(hash) { if let Some(_) = read.get(hash) {
Some(_) => return true, return true;
None => ()
} }
} }

View File

@ -93,16 +93,13 @@ pub fn new_builtin_exec(name: &str) -> Option<Box<Fn(&[u8], &mut [u8])>> {
if it.v == H256::from(&U256::from(27)) || it.v == H256::from(&U256::from(28)) { if it.v == H256::from(&U256::from(27)) || it.v == H256::from(&U256::from(28)) {
let s = Signature::from_rsv(&it.r, &it.s, it.v[31] - 27); let s = Signature::from_rsv(&it.r, &it.s, it.v[31] - 27);
if ec::is_valid(&s) { if ec::is_valid(&s) {
match ec::recover(&s, &it.hash) { if let Ok(p) = ec::recover(&s, &it.hash) {
Ok(p) => { let r = p.as_slice().sha3();
let r = p.as_slice().sha3(); // NICE: optimise and separate out into populate-like function
// NICE: optimise and separate out into populate-like function for i in 0..min(32, output.len()) {
for i in 0..min(32, output.len()) { output[i] = if i < 12 {0} else {r[i]};
output[i] = if i < 12 {0} else {r[i]};
}
} }
_ => {} }
};
} }
} }
})), })),

View File

@ -65,7 +65,7 @@ impl<S : Copy> VecStack<S> {
impl<S : fmt::Display> Stack<S> for VecStack<S> { impl<S : fmt::Display> Stack<S> for VecStack<S> {
fn peek(&self, no_from_top: usize) -> &S { fn peek(&self, no_from_top: usize) -> &S {
return &self.stack[self.stack.len() - no_from_top - 1]; &self.stack[self.stack.len() - no_from_top - 1]
} }
fn swap_with_top(&mut self, no_from_top: usize) { fn swap_with_top(&mut self, no_from_top: usize) {
@ -150,7 +150,7 @@ impl Memory for Vec<u8> {
} }
fn size(&self) -> usize { fn size(&self) -> usize {
return self.len() self.len()
} }
fn read_slice(&self, init_off_u: U256, init_size_u: U256) -> &[u8] { fn read_slice(&self, init_off_u: U256, init_size_u: U256) -> &[u8] {
@ -820,7 +820,7 @@ impl Interpreter {
fn copy_data_to_memory(&self, fn copy_data_to_memory(&self,
mem: &mut Memory, mem: &mut Memory,
stack: &mut Stack<U256>, stack: &mut Stack<U256>,
data: &Bytes) { data: &[u8]) {
let offset = stack.pop_back(); let offset = stack.pop_back();
let index = stack.pop_back(); let index = stack.pop_back();
let size = stack.pop_back(); let size = stack.pop_back();
@ -1097,7 +1097,7 @@ impl Interpreter {
Ok(()) Ok(())
} }
fn find_jump_destinations(&self, code: &Bytes) -> HashSet<CodePosition> { fn find_jump_destinations(&self, code: &[u8]) -> HashSet<CodePosition> {
let mut jump_dests = HashSet::new(); let mut jump_dests = HashSet::new();
let mut position = 0; let mut position = 0;
@ -1112,7 +1112,7 @@ impl Interpreter {
position += 1; position += 1;
} }
return jump_dests; jump_dests
} }
} }

View File

@ -123,8 +123,8 @@ impl<'a> Executive<'a> {
let mut substate = Substate::new(); let mut substate = Substate::new();
let res = match t.action() { let res = match *t.action() {
&Action::Create => { Action::Create => {
let new_address = contract_address(&sender, &nonce); let new_address = contract_address(&sender, &nonce);
let params = ActionParams { let params = ActionParams {
code_address: new_address.clone(), code_address: new_address.clone(),
@ -139,7 +139,7 @@ impl<'a> Executive<'a> {
}; };
self.create(params, &mut substate) self.create(params, &mut substate)
}, },
&Action::Call(ref address) => { Action::Call(ref address) => {
let params = ActionParams { let params = ActionParams {
code_address: address.clone(), code_address: address.clone(),
address: address.clone(), address: address.clone(),
@ -177,7 +177,7 @@ impl<'a> Executive<'a> {
// if destination is builtin, try to execute it // if destination is builtin, try to execute it
let default = []; let default = [];
let data = if let &Some(ref d) = &params.data { d as &[u8] } else { &default as &[u8] }; let data = if let Some(ref d) = params.data { d as &[u8] } else { &default as &[u8] };
let cost = self.engine.cost_of_builtin(&params.code_address, data); let cost = self.engine.cost_of_builtin(&params.code_address, data);
match cost <= params.gas { match cost <= params.gas {
@ -248,7 +248,7 @@ impl<'a> Executive<'a> {
let refunds_bound = sstore_refunds + suicide_refunds; let refunds_bound = sstore_refunds + suicide_refunds;
// real ammount to refund // real ammount to refund
let gas_left_prerefund = match &result { &Ok(x) => x, _ => x!(0) }; let gas_left_prerefund = match result { Ok(x) => x, _ => x!(0) };
let refunded = cmp::min(refunds_bound, (t.gas - gas_left_prerefund) / U256::from(2)); let refunded = cmp::min(refunds_bound, (t.gas - gas_left_prerefund) / U256::from(2));
let gas_left = gas_left_prerefund + refunded; let gas_left = gas_left_prerefund + refunded;
@ -265,7 +265,7 @@ impl<'a> Executive<'a> {
self.state.add_balance(&self.info.author, &fees_value); self.state.add_balance(&self.info.author, &fees_value);
// perform suicides // perform suicides
for address in substate.suicides.iter() { for address in &substate.suicides {
trace!("Killing {}", address); trace!("Killing {}", address);
self.state.kill_account(address); self.state.kill_account(address);
} }
@ -273,11 +273,7 @@ impl<'a> Executive<'a> {
match result { match result {
Err(evm::Error::Internal) => Err(ExecutionError::Internal), Err(evm::Error::Internal) => Err(ExecutionError::Internal),
// TODO [ToDr] BadJumpDestination @debris - how to handle that? // TODO [ToDr] BadJumpDestination @debris - how to handle that?
Err(evm::Error::OutOfGas) Err(_) => {
| Err(evm::Error::BadJumpDestination { destination: _ })
| Err(evm::Error::BadInstruction { instruction: _ })
| Err(evm::Error::StackUnderflow {instruction: _, wanted: _, on_stack: _})
| Err(evm::Error::OutOfStack {instruction: _, wanted: _, limit: _}) => {
Ok(Executed { Ok(Executed {
gas: t.gas, gas: t.gas,
gas_used: t.gas, gas_used: t.gas,
@ -302,15 +298,15 @@ impl<'a> Executive<'a> {
fn enact_result(&mut self, result: &evm::Result, substate: &mut Substate, un_substate: Substate, backup: State) { fn enact_result(&mut self, result: &evm::Result, substate: &mut Substate, un_substate: Substate, backup: State) {
// TODO: handle other evm::Errors same as OutOfGas once they are implemented // TODO: handle other evm::Errors same as OutOfGas once they are implemented
match result { match *result {
&Err(evm::Error::OutOfGas) Err(evm::Error::OutOfGas)
| &Err(evm::Error::BadJumpDestination { destination: _ }) | Err(evm::Error::BadJumpDestination {..})
| &Err(evm::Error::BadInstruction { instruction: _ }) | Err(evm::Error::BadInstruction {.. })
| &Err(evm::Error::StackUnderflow {instruction: _, wanted: _, on_stack: _}) | Err(evm::Error::StackUnderflow {..})
| &Err(evm::Error::OutOfStack {instruction: _, wanted: _, limit: _}) => { | Err(evm::Error::OutOfStack {..}) => {
self.state.revert(backup); self.state.revert(backup);
}, },
&Ok(_) | &Err(evm::Error::Internal) => substate.accrue(un_substate) Ok(_) | Err(evm::Error::Internal) => substate.accrue(un_substate)
} }
} }
} }

View File

@ -161,6 +161,7 @@ impl<'a> Ext for Externalities<'a> {
self.state.code(address).unwrap_or(vec![]) self.state.code(address).unwrap_or(vec![])
} }
#[allow(match_ref_pats)]
fn ret(&mut self, gas: &U256, data: &[u8]) -> Result<U256, evm::Error> { fn ret(&mut self, gas: &U256, data: &[u8]) -> Result<U256, evm::Error> {
match &mut self.output { match &mut self.output {
&mut OutputPolicy::Return(BytesRef::Fixed(ref mut slice)) => unsafe { &mut OutputPolicy::Return(BytesRef::Fixed(ref mut slice)) => unsafe {

View File

@ -135,9 +135,10 @@ impl Header {
s.append(&self.gas_used); s.append(&self.gas_used);
s.append(&self.timestamp); s.append(&self.timestamp);
s.append(&self.extra_data); s.append(&self.extra_data);
match with_seal { if let Seal::With = with_seal {
Seal::With => for b in self.seal.iter() { s.append_raw(&b, 1); }, for b in &self.seal {
_ => {} s.append_raw(&b, 1);
}
} }
} }
@ -198,7 +199,7 @@ impl Encodable for Header {
self.timestamp.encode(e); self.timestamp.encode(e);
self.extra_data.encode(e); self.extra_data.encode(e);
for b in self.seal.iter() { for b in &self.seal {
e.emit_raw(&b); e.emit_raw(&b);
} }
}) })

View File

@ -73,7 +73,6 @@
//! sudo make install //! sudo make install
//! sudo ldconfig //! sudo ldconfig
//! ``` //! ```
#[macro_use] #[macro_use]
extern crate log; extern crate log;
extern crate rustc_serialize; extern crate rustc_serialize;
@ -86,6 +85,7 @@ extern crate env_logger;
#[cfg(feature = "jit" )] #[cfg(feature = "jit" )]
extern crate evmjit; extern crate evmjit;
#[macro_use] #[macro_use]
#[allow(match_bool)]
extern crate ethcore_util as util; extern crate ethcore_util as util;
pub mod common; pub mod common;

View File

@ -31,7 +31,7 @@ impl RlpStandard for Receipt {
// TODO: make work: // TODO: make work:
//s.append(&self.logs); //s.append(&self.logs);
s.append_list(self.logs.len()); s.append_list(self.logs.len());
for l in self.logs.iter() { for l in &self.logs {
l.rlp_append(s); l.rlp_append(s);
} }
} }

View File

@ -51,20 +51,13 @@ impl IoHandler<NetSyncMessage> for ClientIoHandler {
fn initialize<'s>(&'s mut self, _io: &mut IoContext<'s, NetSyncMessage>) { fn initialize<'s>(&'s mut self, _io: &mut IoContext<'s, NetSyncMessage>) {
} }
#[allow(match_ref_pats)]
fn message<'s>(&'s mut self, _io: &mut IoContext<'s, NetSyncMessage>, net_message: &'s mut NetSyncMessage) { fn message<'s>(&'s mut self, _io: &mut IoContext<'s, NetSyncMessage>, net_message: &'s mut NetSyncMessage) {
match net_message { if let &mut UserMessage(ref mut message) = net_message {
&mut UserMessage(ref mut message) => { if let &mut SyncMessage::BlockVerified(ref mut bytes) = message {
match message { self.client.write().unwrap().import_verified_block(mem::replace(bytes, Bytes::new()));
&mut SyncMessage::BlockVerified(ref mut bytes) => {
self.client.write().unwrap().import_verified_block(mem::replace(bytes, Bytes::new()));
},
_ => {}, // ignore other messages
}
} }
_ => {}, // ignore other messages
} }
} }
} }

View File

@ -10,7 +10,7 @@ pub fn gzip64res_to_json(source: &[u8]) -> Json {
let data = source.from_base64().expect("Genesis block is malformed!"); let data = source.from_base64().expect("Genesis block is malformed!");
let data_ref: &[u8] = &data; let data_ref: &[u8] = &data;
let mut decoder = GzDecoder::new(data_ref).expect("Gzip is invalid"); let mut decoder = GzDecoder::new(data_ref).expect("Gzip is invalid");
let mut s: String = "".to_string(); let mut s: String = "".to_owned();
decoder.read_to_string(&mut s).expect("Gzip is invalid"); decoder.read_to_string(&mut s).expect("Gzip is invalid");
Json::from_str(&s).expect("Json is invalid") Json::from_str(&s).expect("Json is invalid")
} }
@ -18,14 +18,14 @@ pub fn gzip64res_to_json(source: &[u8]) -> Json {
/// Convert JSON value to equivlaent RLP representation. /// Convert JSON value to equivlaent RLP representation.
// TODO: handle container types. // TODO: handle container types.
fn json_to_rlp(json: &Json) -> Bytes { fn json_to_rlp(json: &Json) -> Bytes {
match json { match *json {
&Json::Boolean(o) => encode(&(if o {1u64} else {0})), Json::Boolean(o) => encode(&(if o {1u64} else {0})),
&Json::I64(o) => encode(&(o as u64)), Json::I64(o) => encode(&(o as u64)),
&Json::U64(o) => encode(&o), Json::U64(o) => encode(&o),
&Json::String(ref s) if s.len() >= 2 && &s[0..2] == "0x" && U256::from_str(&s[2..]).is_ok() => { Json::String(ref s) if s.len() >= 2 && &s[0..2] == "0x" && U256::from_str(&s[2..]).is_ok() => {
encode(&U256::from_str(&s[2..]).unwrap()) encode(&U256::from_str(&s[2..]).unwrap())
}, },
&Json::String(ref s) => { Json::String(ref s) => {
encode(s) encode(s)
}, },
_ => panic!() _ => panic!()
@ -96,6 +96,7 @@ pub struct Spec {
impl Spec { impl Spec {
/// Convert this object into a boxed Engine of the right underlying type. /// Convert this object into a boxed Engine of the right underlying type.
// TODO avoid this hard-coded nastiness - use dynamic-linked plugin framework instead. // TODO avoid this hard-coded nastiness - use dynamic-linked plugin framework instead.
#[allow(wrong_self_convention)]
pub fn to_engine(self) -> Result<Box<Engine>, Error> { pub fn to_engine(self) -> Result<Box<Engine>, Error> {
match self.engine_name.as_ref() { match self.engine_name.as_ref() {
"NullEngine" => Ok(NullEngine::new_boxed(self)), "NullEngine" => Ok(NullEngine::new_boxed(self)),
@ -169,8 +170,8 @@ impl FromJson for Spec {
builtins.insert(addr.clone(), builtin); builtins.insert(addr.clone(), builtin);
} }
} }
let balance = acc.find("balance").and_then(|x| match x { &Json::String(ref b) => U256::from_dec_str(b).ok(), _ => None }); let balance = acc.find("balance").and_then(|x| match *x { Json::String(ref b) => U256::from_dec_str(b).ok(), _ => None });
let nonce = acc.find("nonce").and_then(|x| match x { &Json::String(ref b) => U256::from_dec_str(b).ok(), _ => None }); let nonce = acc.find("nonce").and_then(|x| match *x { Json::String(ref b) => U256::from_dec_str(b).ok(), _ => None });
// let balance = if let Some(&Json::String(ref b)) = acc.find("balance") {U256::from_dec_str(b).unwrap_or(U256::from(0))} else {U256::from(0)}; // let balance = if let Some(&Json::String(ref b)) = acc.find("balance") {U256::from_dec_str(b).unwrap_or(U256::from(0))} else {U256::from(0)};
// let nonce = if let Some(&Json::String(ref n)) = acc.find("nonce") {U256::from_dec_str(n).unwrap_or(U256::from(0))} else {U256::from(0)}; // let nonce = if let Some(&Json::String(ref n)) = acc.find("nonce") {U256::from_dec_str(n).unwrap_or(U256::from(0))} else {U256::from(0)};
// TODO: handle code & data if they exist. // TODO: handle code & data if they exist.
@ -199,8 +200,8 @@ impl FromJson for Spec {
Spec { Spec {
name: json.find("name").map(|j| j.as_string().unwrap()).unwrap_or("unknown").to_string(), name: json.find("name").map(|j| j.as_string().unwrap()).unwrap_or("unknown").to_owned(),
engine_name: json["engineName"].as_string().unwrap().to_string(), engine_name: json["engineName"].as_string().unwrap().to_owned(),
engine_params: json_to_rlp_map(&json["params"]), engine_params: json_to_rlp_map(&json["params"]),
builtins: builtins, builtins: builtins,
parent_hash: H256::from_str(&genesis["parentHash"].as_string().unwrap()[2..]).unwrap(), parent_hash: H256::from_str(&genesis["parentHash"].as_string().unwrap()[2..]).unwrap(),
@ -226,7 +227,7 @@ impl Spec {
let mut root = H256::new(); let mut root = H256::new();
{ {
let mut t = SecTrieDBMut::new(db, &mut root); let mut t = SecTrieDBMut::new(db, &mut root);
for (address, account) in self.genesis_state.iter() { for (address, account) in &self.genesis_state {
t.insert(address.as_slice(), &account.rlp()); t.insert(address.as_slice(), &account.rlp());
} }
} }

View File

@ -87,22 +87,22 @@ impl State {
/// Get the balance of account `a`. /// Get the balance of account `a`.
pub fn balance(&self, a: &Address) -> U256 { pub fn balance(&self, a: &Address) -> U256 {
self.get(a, false).as_ref().map(|account| account.balance().clone()).unwrap_or(U256::from(0u8)) self.get(a, false).as_ref().map_or(U256::zero(), |account| account.balance().clone())
} }
/// Get the nonce of account `a`. /// Get the nonce of account `a`.
pub fn nonce(&self, a: &Address) -> U256 { pub fn nonce(&self, a: &Address) -> U256 {
self.get(a, false).as_ref().map(|account| account.nonce().clone()).unwrap_or(U256::from(0u8)) self.get(a, false).as_ref().map_or(U256::zero(), |account| account.nonce().clone())
} }
/// Mutate storage of account `a` so that it is `value` for `key`. /// Mutate storage of account `a` so that it is `value` for `key`.
pub fn storage_at(&self, a: &Address, key: &H256) -> H256 { pub fn storage_at(&self, a: &Address, key: &H256) -> H256 {
self.get(a, false).as_ref().map(|a|a.storage_at(&self.db, key)).unwrap_or(H256::new()) self.get(a, false).as_ref().map_or(H256::new(), |a|a.storage_at(&self.db, key))
} }
/// Mutate storage of account `a` so that it is `value` for `key`. /// Mutate storage of account `a` so that it is `value` for `key`.
pub fn code(&self, a: &Address) -> Option<Bytes> { pub fn code(&self, a: &Address) -> Option<Bytes> {
self.get(a, true).as_ref().map(|a|a.code().map(|x|x.to_vec())).unwrap_or(None) self.get(a, true).as_ref().map_or(None, |a|a.code().map(|x|x.to_vec()))
} }
/// Add `incr` to the balance of account `a`. /// Add `incr` to the balance of account `a`.
@ -168,6 +168,7 @@ impl State {
/// Commit accounts to SecTrieDBMut. This is similar to cpp-ethereum's dev::eth::commit. /// Commit accounts to SecTrieDBMut. This is similar to cpp-ethereum's dev::eth::commit.
/// `accounts` is mutable because we may need to commit the code or storage and record that. /// `accounts` is mutable because we may need to commit the code or storage and record that.
#[allow(match_ref_pats)]
pub fn commit_into(db: &mut HashDB, root: &mut H256, accounts: &mut HashMap<Address, Option<Account>>) { pub fn commit_into(db: &mut HashDB, root: &mut H256, accounts: &mut HashMap<Address, Option<Account>>) {
// first, commit the sub trees. // first, commit the sub trees.
// TODO: is this necessary or can we dispense with the `ref mut a` for just `a`? // TODO: is this necessary or can we dispense with the `ref mut a` for just `a`?
@ -184,9 +185,9 @@ impl State {
{ {
let mut trie = SecTrieDBMut::from_existing(db, root); let mut trie = SecTrieDBMut::from_existing(db, root);
for (address, ref a) in accounts.iter() { for (address, ref a) in accounts.iter() {
match a { match **a {
&&Some(ref account) => trie.insert(address, &account.rlp()), Some(ref account) => trie.insert(address, &account.rlp()),
&&None => trie.remove(address), None => trie.remove(address),
} }
} }
} }
@ -208,7 +209,7 @@ impl State {
pub fn to_hashmap_pod(&self) -> HashMap<Address, PodAccount> { pub fn to_hashmap_pod(&self) -> HashMap<Address, PodAccount> {
// TODO: handle database rather than just the cache. // TODO: handle database rather than just the cache.
self.cache.borrow().iter().fold(HashMap::new(), |mut m, (add, opt)| { self.cache.borrow().iter().fold(HashMap::new(), |mut m, (add, opt)| {
if let &Some(ref acc) = opt { if let Some(ref acc) = *opt {
m.insert(add.clone(), PodAccount::from_account(acc)); m.insert(add.clone(), PodAccount::from_account(acc));
} }
m m
@ -219,7 +220,7 @@ impl State {
pub fn to_pod(&self) -> PodState { pub fn to_pod(&self) -> PodState {
// TODO: handle database rather than just the cache. // TODO: handle database rather than just the cache.
PodState::new(self.cache.borrow().iter().fold(BTreeMap::new(), |mut m, (add, opt)| { PodState::new(self.cache.borrow().iter().fold(BTreeMap::new(), |mut m, (add, opt)| {
if let &Some(ref acc) = opt { if let Some(ref acc) = *opt {
m.insert(add.clone(), PodAccount::from_account(acc)); m.insert(add.clone(), PodAccount::from_account(acc));
} }
m m

View File

@ -14,7 +14,7 @@ impl StateDiff {
impl fmt::Display for StateDiff { impl fmt::Display for StateDiff {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for (add, acc) in self.0.iter() { for (add, acc) in &self.0 {
try!(write!(f, "{} {}: {}", acc.existance(), add, acc)); try!(write!(f, "{} {}: {}", acc.existance(), add, acc));
} }
Ok(()) Ok(())

View File

@ -212,7 +212,7 @@ impl ChainSync {
self.downloading_bodies.clear(); self.downloading_bodies.clear();
self.headers.clear(); self.headers.clear();
self.bodies.clear(); self.bodies.clear();
for (_, ref mut p) in self.peers.iter_mut() { for (_, ref mut p) in &mut self.peers {
p.asking_blocks.clear(); p.asking_blocks.clear();
} }
self.header_ids.clear(); self.header_ids.clear();
@ -375,7 +375,7 @@ impl ChainSync {
transactions_root: tx_root, transactions_root: tx_root,
uncles: uncles uncles: uncles
}; };
match self.header_ids.get(&header_id).map(|n| *n) { match self.header_ids.get(&header_id).cloned() {
Some(n) => { Some(n) => {
self.header_ids.remove(&header_id); self.header_ids.remove(&header_id);
self.bodies.insert_item(n, body.as_raw().to_vec()); self.bodies.insert_item(n, body.as_raw().to_vec());
@ -699,16 +699,13 @@ impl ChainSync {
/// Used to recover from an error and re-download parts of the chain detected as bad. /// Used to recover from an error and re-download parts of the chain detected as bad.
fn remove_downloaded_blocks(&mut self, start: BlockNumber) { fn remove_downloaded_blocks(&mut self, start: BlockNumber) {
for n in self.headers.get_tail(&start) { for n in self.headers.get_tail(&start) {
match self.headers.find_item(&n) { if let Some(ref header_data) = self.headers.find_item(&n) {
Some(ref header_data) => { let header_to_delete = HeaderView::new(&header_data.data);
let header_to_delete = HeaderView::new(&header_data.data); let header_id = HeaderId {
let header_id = HeaderId { transactions_root: header_to_delete.transactions_root(),
transactions_root: header_to_delete.transactions_root(), uncles: header_to_delete.uncles_hash()
uncles: header_to_delete.uncles_hash() };
}; self.header_ids.remove(&header_id);
self.header_ids.remove(&header_id);
},
None => {}
} }
self.downloading_bodies.remove(&n); self.downloading_bodies.remove(&n);
self.downloading_headers.remove(&n); self.downloading_headers.remove(&n);
@ -796,12 +793,9 @@ impl ChainSync {
packet.append(&chain.best_block_hash); packet.append(&chain.best_block_hash);
packet.append(&chain.genesis_hash); packet.append(&chain.genesis_hash);
//TODO: handle timeout for status request //TODO: handle timeout for status request
match io.send(peer_id, STATUS_PACKET, packet.out()) { if let Err(e) = io.send(peer_id, STATUS_PACKET, packet.out()) {
Err(e) => { warn!(target:"sync", "Error sending status request: {:?}", e);
warn!(target:"sync", "Error sending status request: {:?}", e); io.disable_peer(peer_id);
io.disable_peer(peer_id);
}
Ok(_) => ()
} }
} }
@ -837,12 +831,9 @@ impl ChainSync {
let mut data = Bytes::new(); let mut data = Bytes::new();
let inc = (skip + 1) as BlockNumber; let inc = (skip + 1) as BlockNumber;
while number <= last && number > 0 && count < max_count { while number <= last && number > 0 && count < max_count {
match io.chain().block_header_at(number) { if let Some(mut hdr) = io.chain().block_header_at(number) {
Some(mut hdr) => { data.append(&mut hdr);
data.append(&mut hdr); count += 1;
count += 1;
}
None => {}
} }
if reverse { if reverse {
if number <= inc { if number <= inc {

View File

@ -104,9 +104,8 @@ impl Transaction {
}; };
s.append(&self.value); s.append(&self.value);
s.append(&self.data); s.append(&self.data);
match with_seal { if let Seal::With = with_seal {
Seal::With => { s.append(&(self.v as u16)).append(&self.r).append(&self.s); }, s.append(&(self.v as u16)).append(&self.r).append(&self.s);
_ => {}
} }
} }
@ -125,7 +124,7 @@ impl FromJson for Transaction {
gas_price: xjson!(&json["gasPrice"]), gas_price: xjson!(&json["gasPrice"]),
gas: xjson!(&json["gasLimit"]), gas: xjson!(&json["gasLimit"]),
action: match Bytes::from_json(&json["to"]) { action: match Bytes::from_json(&json["to"]) {
ref x if x.len() == 0 => Action::Create, ref x if x.is_empty() => Action::Create,
ref x => Action::Call(Address::from_slice(x)), ref x => Action::Call(Address::from_slice(x)),
}, },
value: xjson!(&json["value"]), value: xjson!(&json["value"]),
@ -290,4 +289,4 @@ fn signing() {
let key = KeyPair::create().unwrap(); let key = KeyPair::create().unwrap();
let t = Transaction::new_create(U256::from(42u64), b"Hello!".to_vec(), U256::from(3000u64), U256::from(50_000u64), U256::from(1u64)).signed(&key.secret()); let t = Transaction::new_create(U256::from(42u64), b"Hello!".to_vec(), U256::from(3000u64), U256::from(50_000u64), U256::from(1u64)).signed(&key.secret());
assert_eq!(Address::from(key.public().sha3()), t.sender().unwrap()); assert_eq!(Address::from(key.public().sha3()), t.sender().unwrap());
} }

View File

@ -98,7 +98,7 @@ impl<'a> BlockView<'a> {
/// Return List of transactions in given block. /// Return List of transactions in given block.
pub fn transaction_views(&self) -> Vec<TransactionView> { pub fn transaction_views(&self) -> Vec<TransactionView> {
self.rlp.at(1).iter().map(|rlp| TransactionView::new_from_rlp(rlp)).collect() self.rlp.at(1).iter().map(TransactionView::new_from_rlp).collect()
} }
/// Return List of transactions in given block. /// Return List of transactions in given block.