Merge branch 'master' of github.com:ethcore/parity into io
This commit is contained in:
commit
e5e0d3d136
@ -21,6 +21,7 @@ evmjit = { path = "rust-evmjit", optional = true }
|
|||||||
ethash = { path = "ethash" }
|
ethash = { path = "ethash" }
|
||||||
num_cpus = "0.2"
|
num_cpus = "0.2"
|
||||||
ctrlc = "1.0"
|
ctrlc = "1.0"
|
||||||
|
clippy = "*" # Always newest, since we use nightly
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
jit = ["evmjit"]
|
jit = ["evmjit"]
|
||||||
|
@ -103,7 +103,7 @@ impl Account {
|
|||||||
/// Get (and cache) the contents of the trie's storage at `key`.
|
/// Get (and cache) the contents of the trie's storage at `key`.
|
||||||
pub fn storage_at(&self, db: &HashDB, key: &H256) -> H256 {
|
pub fn storage_at(&self, db: &HashDB, key: &H256) -> H256 {
|
||||||
self.storage_overlay.borrow_mut().entry(key.clone()).or_insert_with(||{
|
self.storage_overlay.borrow_mut().entry(key.clone()).or_insert_with(||{
|
||||||
(Filth::Clean, H256::from(SecTrieDB::new(db, &self.storage_root).get(key.bytes()).map(|v| -> U256 {decode(v)}).unwrap_or(U256::zero())))
|
(Filth::Clean, H256::from(SecTrieDB::new(db, &self.storage_root).get(key.bytes()).map_or(U256::zero(), |v| -> U256 {decode(v)})))
|
||||||
}).1.clone()
|
}).1.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -149,7 +149,7 @@ impl Account {
|
|||||||
/// Provide a database to lookup `code_hash`. Should not be called if it is a contract without code.
|
/// Provide a database to lookup `code_hash`. Should not be called if it is a contract without code.
|
||||||
pub fn cache_code(&mut self, db: &HashDB) -> bool {
|
pub fn cache_code(&mut self, db: &HashDB) -> bool {
|
||||||
// TODO: fill out self.code_cache;
|
// TODO: fill out self.code_cache;
|
||||||
return self.is_cached() ||
|
self.is_cached() ||
|
||||||
match self.code_hash {
|
match self.code_hash {
|
||||||
Some(ref h) => match db.lookup(h) {
|
Some(ref h) => match db.lookup(h) {
|
||||||
Some(x) => { self.code_cache = x.to_vec(); true },
|
Some(x) => { self.code_cache = x.to_vec(); true },
|
||||||
@ -248,8 +248,8 @@ mod tests {
|
|||||||
|
|
||||||
let a = Account::from_rlp(&rlp);
|
let a = Account::from_rlp(&rlp);
|
||||||
assert_eq!(a.storage_root().unwrap().hex(), "c57e1afb758b07f8d2c8f13a3b6e44fa5ff94ab266facc5a4fd3f062426e50b2");
|
assert_eq!(a.storage_root().unwrap().hex(), "c57e1afb758b07f8d2c8f13a3b6e44fa5ff94ab266facc5a4fd3f062426e50b2");
|
||||||
assert_eq!(a.storage_at(&mut db, &H256::from(&U256::from(0x00u64))), H256::from(&U256::from(0x1234u64)));
|
assert_eq!(a.storage_at(&db, &H256::from(&U256::from(0x00u64))), H256::from(&U256::from(0x1234u64)));
|
||||||
assert_eq!(a.storage_at(&mut db, &H256::from(&U256::from(0x01u64))), H256::new());
|
assert_eq!(a.storage_at(&db, &H256::from(&U256::from(0x01u64))), H256::new());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -15,10 +15,10 @@ pub enum Existance {
|
|||||||
|
|
||||||
impl fmt::Display for Existance {
|
impl fmt::Display for Existance {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
match self {
|
match *self {
|
||||||
&Existance::Born => try!(write!(f, "+++")),
|
Existance::Born => try!(write!(f, "+++")),
|
||||||
&Existance::Alive => try!(write!(f, "***")),
|
Existance::Alive => try!(write!(f, "***")),
|
||||||
&Existance::Died => try!(write!(f, "XXX")),
|
Existance::Died => try!(write!(f, "XXX")),
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -72,11 +72,11 @@ impl AccountDiff {
|
|||||||
code: Diff::new(pre.code.clone(), post.code.clone()),
|
code: Diff::new(pre.code.clone(), post.code.clone()),
|
||||||
storage: storage.into_iter().map(|k|
|
storage: storage.into_iter().map(|k|
|
||||||
(k.clone(), Diff::new(
|
(k.clone(), Diff::new(
|
||||||
pre.storage.get(&k).cloned().unwrap_or(H256::new()),
|
pre.storage.get(&k).cloned().unwrap_or_else(H256::new),
|
||||||
post.storage.get(&k).cloned().unwrap_or(H256::new())
|
post.storage.get(&k).cloned().unwrap_or_else(H256::new)
|
||||||
))).collect(),
|
))).collect(),
|
||||||
};
|
};
|
||||||
if r.balance.is_same() && r.nonce.is_same() && r.code.is_same() && r.storage.len() == 0 {
|
if r.balance.is_same() && r.nonce.is_same() && r.code.is_same() && r.storage.is_empty() {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
Some(r)
|
Some(r)
|
||||||
@ -112,16 +112,15 @@ impl fmt::Display for AccountDiff {
|
|||||||
Diff::Changed(ref pre, ref post) => try!(write!(f, "${} ({} {} {})", post, pre, if pre > post {"-"} else {"+"}, *max(pre, post) - *min(pre, post))),
|
Diff::Changed(ref pre, ref post) => try!(write!(f, "${} ({} {} {})", post, pre, if pre > post {"-"} else {"+"}, *max(pre, post) - *min(pre, post))),
|
||||||
_ => {},
|
_ => {},
|
||||||
}
|
}
|
||||||
match self.code {
|
if let Diff::Born(ref x) = self.code {
|
||||||
Diff::Born(ref x) => try!(write!(f, " code {}", x.pretty())),
|
try!(write!(f, " code {}", x.pretty()));
|
||||||
_ => {},
|
|
||||||
}
|
}
|
||||||
try!(write!(f, "\n"));
|
try!(write!(f, "\n"));
|
||||||
for (k, dv) in self.storage.iter() {
|
for (k, dv) in &self.storage {
|
||||||
match dv {
|
match *dv {
|
||||||
&Diff::Born(ref v) => try!(write!(f, " + {} => {}\n", interpreted_hash(k), interpreted_hash(v))),
|
Diff::Born(ref v) => try!(write!(f, " + {} => {}\n", interpreted_hash(k), interpreted_hash(v))),
|
||||||
&Diff::Changed(ref pre, ref post) => try!(write!(f, " * {} => {} (was {})\n", interpreted_hash(k), interpreted_hash(post), interpreted_hash(pre))),
|
Diff::Changed(ref pre, ref post) => try!(write!(f, " * {} => {} (was {})\n", interpreted_hash(k), interpreted_hash(post), interpreted_hash(pre))),
|
||||||
&Diff::Died(_) => try!(write!(f, " X {}\n", interpreted_hash(k))),
|
Diff::Died(_) => try!(write!(f, " X {}\n", interpreted_hash(k))),
|
||||||
_ => {},
|
_ => {},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
#![allow(ptr_arg)] // Because of &LastHashes -> &Vec<_>
|
||||||
|
|
||||||
use common::*;
|
use common::*;
|
||||||
use engine::*;
|
use engine::*;
|
||||||
use state::*;
|
use state::*;
|
||||||
@ -173,7 +175,7 @@ impl<'x, 'y> OpenBlock<'x, 'y> {
|
|||||||
timestamp: self.block.header.timestamp,
|
timestamp: self.block.header.timestamp,
|
||||||
difficulty: self.block.header.difficulty.clone(),
|
difficulty: self.block.header.difficulty.clone(),
|
||||||
last_hashes: self.last_hashes.clone(), // TODO: should be a reference.
|
last_hashes: self.last_hashes.clone(), // TODO: should be a reference.
|
||||||
gas_used: self.block.archive.last().map(|t| t.receipt.gas_used).unwrap_or(U256::from(0)),
|
gas_used: self.block.archive.last().map_or(U256::zero(), |t| t.receipt.gas_used),
|
||||||
gas_limit: self.block.header.gas_limit.clone(),
|
gas_limit: self.block.header.gas_limit.clone(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -204,7 +206,7 @@ impl<'x, 'y> OpenBlock<'x, 'y> {
|
|||||||
s.block.header.state_root = s.block.state.root().clone();
|
s.block.header.state_root = s.block.state.root().clone();
|
||||||
s.block.header.receipts_root = ordered_trie_root(s.block.archive.iter().map(|ref e| e.receipt.rlp_bytes()).collect());
|
s.block.header.receipts_root = ordered_trie_root(s.block.archive.iter().map(|ref e| e.receipt.rlp_bytes()).collect());
|
||||||
s.block.header.log_bloom = s.block.archive.iter().fold(LogBloom::zero(), |mut b, e| {b |= &e.receipt.log_bloom; b});
|
s.block.header.log_bloom = s.block.archive.iter().fold(LogBloom::zero(), |mut b, e| {b |= &e.receipt.log_bloom; b});
|
||||||
s.block.header.gas_used = s.block.archive.last().map(|t| t.receipt.gas_used).unwrap_or(U256::from(0));
|
s.block.header.gas_used = s.block.archive.last().map_or(U256::zero(), |t| t.receipt.gas_used);
|
||||||
s.block.header.note_dirty();
|
s.block.header.note_dirty();
|
||||||
|
|
||||||
ClosedBlock::new(s, uncle_bytes)
|
ClosedBlock::new(s, uncle_bytes)
|
||||||
@ -255,7 +257,7 @@ impl SealedBlock {
|
|||||||
let mut block_rlp = RlpStream::new_list(3);
|
let mut block_rlp = RlpStream::new_list(3);
|
||||||
self.block.header.stream_rlp(&mut block_rlp, Seal::With);
|
self.block.header.stream_rlp(&mut block_rlp, Seal::With);
|
||||||
block_rlp.append_list(self.block.archive.len());
|
block_rlp.append_list(self.block.archive.len());
|
||||||
for e in self.block.archive.iter() { e.transaction.rlp_append(&mut block_rlp); }
|
for e in &self.block.archive { e.transaction.rlp_append(&mut block_rlp); }
|
||||||
block_rlp.append_raw(&self.uncle_bytes, 1);
|
block_rlp.append_raw(&self.uncle_bytes, 1);
|
||||||
block_rlp.out()
|
block_rlp.out()
|
||||||
}
|
}
|
||||||
|
@ -153,9 +153,8 @@ impl BlockProvider for BlockChain {
|
|||||||
fn block(&self, hash: &H256) -> Option<Bytes> {
|
fn block(&self, hash: &H256) -> Option<Bytes> {
|
||||||
{
|
{
|
||||||
let read = self.blocks.read().unwrap();
|
let read = self.blocks.read().unwrap();
|
||||||
match read.get(hash) {
|
if let Some(v) = read.get(hash) {
|
||||||
Some(v) => return Some(v.clone()),
|
return Some(v.clone());
|
||||||
None => ()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -188,7 +187,7 @@ impl BlockProvider for BlockChain {
|
|||||||
|
|
||||||
const COLLECTION_QUEUE_SIZE: usize = 2;
|
const COLLECTION_QUEUE_SIZE: usize = 2;
|
||||||
const MIN_CACHE_SIZE: usize = 1;
|
const MIN_CACHE_SIZE: usize = 1;
|
||||||
const MAX_CACHE_SIZE: usize = 1024 * 1024 * 1;
|
const MAX_CACHE_SIZE: usize = 1024 * 1024;
|
||||||
|
|
||||||
impl BlockChain {
|
impl BlockChain {
|
||||||
/// Create new instance of blockchain from given Genesis
|
/// Create new instance of blockchain from given Genesis
|
||||||
@ -529,9 +528,8 @@ impl BlockChain {
|
|||||||
K: ExtrasSliceConvertable + Eq + Hash + Clone {
|
K: ExtrasSliceConvertable + Eq + Hash + Clone {
|
||||||
{
|
{
|
||||||
let read = cache.read().unwrap();
|
let read = cache.read().unwrap();
|
||||||
match read.get(hash) {
|
if let Some(v) = read.get(hash) {
|
||||||
Some(v) => return Some(v.clone()),
|
return Some(v.clone());
|
||||||
None => ()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -551,9 +549,8 @@ impl BlockChain {
|
|||||||
T: ExtrasIndexable {
|
T: ExtrasIndexable {
|
||||||
{
|
{
|
||||||
let read = cache.read().unwrap();
|
let read = cache.read().unwrap();
|
||||||
match read.get(hash) {
|
if let Some(_) = read.get(hash) {
|
||||||
Some(_) => return true,
|
return true;
|
||||||
None => ()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -672,6 +669,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[allow(cyclomatic_complexity)]
|
||||||
fn test_small_fork() {
|
fn test_small_fork() {
|
||||||
let genesis = "f901fcf901f7a00000000000000000000000000000000000000000000000000000000000000000a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347948888f1f195afa192cfee860698584c030f4c9db1a07dba07d6b448a186e9612e5f737d1c909dce473e53199901a302c00646d523c1a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008302000080832fefd8808454c98c8142a059262c330941f3fe2a34d16d6e3c7b30d2ceb37c6a0e9a994c494ee1a61d2410885aa4c8bf8e56e264c0c0".from_hex().unwrap();
|
let genesis = "f901fcf901f7a00000000000000000000000000000000000000000000000000000000000000000a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347948888f1f195afa192cfee860698584c030f4c9db1a07dba07d6b448a186e9612e5f737d1c909dce473e53199901a302c00646d523c1a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008302000080832fefd8808454c98c8142a059262c330941f3fe2a34d16d6e3c7b30d2ceb37c6a0e9a994c494ee1a61d2410885aa4c8bf8e56e264c0c0".from_hex().unwrap();
|
||||||
let b1 = "f90261f901f9a05716670833ec874362d65fea27a7cd35af5897d275b31a44944113111e4e96d2a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347948888f1f195afa192cfee860698584c030f4c9db1a0cb52de543653d86ccd13ba3ddf8b052525b04231c6884a4db3188a184681d878a0e78628dd45a1f8dc495594d83b76c588a3ee67463260f8b7d4a42f574aeab29aa0e9244cf7503b79c03d3a099e07a80d2dbc77bb0b502d8a89d51ac0d68dd31313b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008302000001832fefd882520884562791e580a051b3ecba4e3f2b49c11d42dd0851ec514b1be3138080f72a2b6e83868275d98f8877671f479c414b47f862f86080018304cb2f94095e7baea6a6c7c4c2dfeb977efac326af552d870a801ca09e2709d7ec9bbe6b1bbbf0b2088828d14cd5e8642a1fee22dc74bfa89761a7f9a04bd8813dee4be989accdb708b1c2e325a7e9c695a8024e30e89d6c644e424747c0".from_hex().unwrap();
|
let b1 = "f90261f901f9a05716670833ec874362d65fea27a7cd35af5897d275b31a44944113111e4e96d2a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347948888f1f195afa192cfee860698584c030f4c9db1a0cb52de543653d86ccd13ba3ddf8b052525b04231c6884a4db3188a184681d878a0e78628dd45a1f8dc495594d83b76c588a3ee67463260f8b7d4a42f574aeab29aa0e9244cf7503b79c03d3a099e07a80d2dbc77bb0b502d8a89d51ac0d68dd31313b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008302000001832fefd882520884562791e580a051b3ecba4e3f2b49c11d42dd0851ec514b1be3138080f72a2b6e83868275d98f8877671f479c414b47f862f86080018304cb2f94095e7baea6a6c7c4c2dfeb977efac326af552d870a801ca09e2709d7ec9bbe6b1bbbf0b2088828d14cd5e8642a1fee22dc74bfa89761a7f9a04bd8813dee4be989accdb708b1c2e325a7e9c695a8024e30e89d6c644e424747c0".from_hex().unwrap();
|
||||||
|
@ -94,16 +94,13 @@ pub fn new_builtin_exec(name: &str) -> Option<Box<Fn(&[u8], &mut [u8])>> {
|
|||||||
if it.v == H256::from(&U256::from(27)) || it.v == H256::from(&U256::from(28)) {
|
if it.v == H256::from(&U256::from(27)) || it.v == H256::from(&U256::from(28)) {
|
||||||
let s = Signature::from_rsv(&it.r, &it.s, it.v[31] - 27);
|
let s = Signature::from_rsv(&it.r, &it.s, it.v[31] - 27);
|
||||||
if ec::is_valid(&s) {
|
if ec::is_valid(&s) {
|
||||||
match ec::recover(&s, &it.hash) {
|
if let Ok(p) = ec::recover(&s, &it.hash) {
|
||||||
Ok(p) => {
|
let r = p.as_slice().sha3();
|
||||||
let r = p.as_slice().sha3();
|
// NICE: optimise and separate out into populate-like function
|
||||||
// NICE: optimise and separate out into populate-like function
|
for i in 0..min(32, output.len()) {
|
||||||
for i in 0..min(32, output.len()) {
|
output[i] = if i < 12 {0} else {r[i]};
|
||||||
output[i] = if i < 12 {0} else {r[i]};
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
_ => {}
|
}
|
||||||
};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})),
|
})),
|
||||||
|
@ -25,8 +25,14 @@ pub struct EnvInfo {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl EnvInfo {
|
impl EnvInfo {
|
||||||
/// TODO [debris] Please document me
|
/// Create empty env_info initialized with zeros
|
||||||
pub fn new() -> EnvInfo {
|
pub fn new() -> EnvInfo {
|
||||||
|
EnvInfo::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for EnvInfo {
|
||||||
|
fn default() -> Self {
|
||||||
EnvInfo {
|
EnvInfo {
|
||||||
number: 0,
|
number: 0,
|
||||||
author: Address::new(),
|
author: Address::new(),
|
||||||
@ -53,11 +59,3 @@ impl FromJson for EnvInfo {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO: it should be the other way around.
|
|
||||||
/// `new` should call `default`.
|
|
||||||
impl Default for EnvInfo {
|
|
||||||
fn default() -> Self {
|
|
||||||
EnvInfo::new()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -32,13 +32,13 @@ impl Ethash {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn u64_param(&self, name: &str) -> u64 {
|
fn u64_param(&self, name: &str) -> u64 {
|
||||||
*self.u64_params.write().unwrap().entry(name.to_string()).or_insert_with(||
|
*self.u64_params.write().unwrap().entry(name.to_owned()).or_insert_with(||
|
||||||
self.spec().engine_params.get(name).map(|a| decode(&a)).unwrap_or(0u64))
|
self.spec().engine_params.get(name).map_or(0u64, |a| decode(&a)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn u256_param(&self, name: &str) -> U256 {
|
fn u256_param(&self, name: &str) -> U256 {
|
||||||
*self.u256_params.write().unwrap().entry(name.to_string()).or_insert_with(||
|
*self.u256_params.write().unwrap().entry(name.to_owned()).or_insert_with(||
|
||||||
self.spec().engine_params.get(name).map(|a| decode(&a)).unwrap_or(x!(0)))
|
self.spec().engine_params.get(name).map_or(x!(0), |a| decode(&a)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -84,7 +84,7 @@ impl Engine for Ethash {
|
|||||||
/// Apply the block reward on finalisation of the block.
|
/// Apply the block reward on finalisation of the block.
|
||||||
/// This assumes that all uncles are valid uncles (i.e. of at least one generation before the current).
|
/// This assumes that all uncles are valid uncles (i.e. of at least one generation before the current).
|
||||||
fn on_close_block(&self, block: &mut Block) {
|
fn on_close_block(&self, block: &mut Block) {
|
||||||
let reward = self.spec().engine_params.get("blockReward").map(|a| decode(&a)).unwrap_or(U256::from(0u64));
|
let reward = self.spec().engine_params.get("blockReward").map_or(U256::from(0u64), |a| decode(&a));
|
||||||
let fields = block.fields();
|
let fields = block.fields();
|
||||||
|
|
||||||
// Bestow block reward
|
// Bestow block reward
|
||||||
@ -153,6 +153,7 @@ impl Engine for Ethash {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(wrong_self_convention)] // to_ethash should take self
|
||||||
impl Ethash {
|
impl Ethash {
|
||||||
fn calculate_difficuty(&self, header: &Header, parent: &Header) -> U256 {
|
fn calculate_difficuty(&self, header: &Header, parent: &Header) -> U256 {
|
||||||
const EXP_DIFF_PERIOD: u64 = 100000;
|
const EXP_DIFF_PERIOD: u64 = 100000;
|
||||||
|
@ -68,10 +68,11 @@ impl Factory {
|
|||||||
fn jit() -> Box<Evm> {
|
fn jit() -> Box<Evm> {
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
impl Default for Factory {
|
||||||
/// Returns jitvm factory
|
/// Returns jitvm factory
|
||||||
#[cfg(feature = "jit")]
|
#[cfg(feature = "jit")]
|
||||||
pub fn default() -> Factory {
|
fn default() -> Factory {
|
||||||
Factory {
|
Factory {
|
||||||
evm: VMType::Jit
|
evm: VMType::Jit
|
||||||
}
|
}
|
||||||
@ -79,7 +80,7 @@ impl Factory {
|
|||||||
|
|
||||||
/// Returns native rust evm factory
|
/// Returns native rust evm factory
|
||||||
#[cfg(not(feature = "jit"))]
|
#[cfg(not(feature = "jit"))]
|
||||||
pub fn default() -> Factory {
|
fn default() -> Factory {
|
||||||
Factory {
|
Factory {
|
||||||
evm: VMType::Interpreter
|
evm: VMType::Interpreter
|
||||||
}
|
}
|
||||||
|
@ -72,7 +72,7 @@ impl<S : Copy> VecStack<S> {
|
|||||||
|
|
||||||
impl<S : fmt::Display> Stack<S> for VecStack<S> {
|
impl<S : fmt::Display> Stack<S> for VecStack<S> {
|
||||||
fn peek(&self, no_from_top: usize) -> &S {
|
fn peek(&self, no_from_top: usize) -> &S {
|
||||||
return &self.stack[self.stack.len() - no_from_top - 1];
|
&self.stack[self.stack.len() - no_from_top - 1]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn swap_with_top(&mut self, no_from_top: usize) {
|
fn swap_with_top(&mut self, no_from_top: usize) {
|
||||||
@ -157,7 +157,7 @@ impl Memory for Vec<u8> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn size(&self) -> usize {
|
fn size(&self) -> usize {
|
||||||
return self.len()
|
self.len()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_slice(&self, init_off_u: U256, init_size_u: U256) -> &[u8] {
|
fn read_slice(&self, init_off_u: U256, init_size_u: U256) -> &[u8] {
|
||||||
@ -228,6 +228,7 @@ struct CodeReader<'a> {
|
|||||||
code: &'a Bytes
|
code: &'a Bytes
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(len_without_is_empty)]
|
||||||
impl<'a> CodeReader<'a> {
|
impl<'a> CodeReader<'a> {
|
||||||
/// Get `no_of_bytes` from code and convert to U256. Move PC
|
/// Get `no_of_bytes` from code and convert to U256. Move PC
|
||||||
fn read(&mut self, no_of_bytes: usize) -> U256 {
|
fn read(&mut self, no_of_bytes: usize) -> U256 {
|
||||||
@ -330,6 +331,7 @@ impl evm::Evm for Interpreter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Interpreter {
|
impl Interpreter {
|
||||||
|
#[allow(cyclomatic_complexity)]
|
||||||
fn get_gas_cost_mem(&self,
|
fn get_gas_cost_mem(&self,
|
||||||
ext: &evm::Ext,
|
ext: &evm::Ext,
|
||||||
instruction: Instruction,
|
instruction: Instruction,
|
||||||
@ -716,7 +718,7 @@ impl Interpreter {
|
|||||||
let big_id = stack.pop_back();
|
let big_id = stack.pop_back();
|
||||||
let id = big_id.low_u64() as usize;
|
let id = big_id.low_u64() as usize;
|
||||||
let max = id.wrapping_add(32);
|
let max = id.wrapping_add(32);
|
||||||
let data = params.data.clone().unwrap_or(vec![]);
|
let data = params.data.clone().unwrap_or_else(|| vec![]);
|
||||||
let bound = cmp::min(data.len(), max);
|
let bound = cmp::min(data.len(), max);
|
||||||
if id < bound && big_id < U256::from(data.len()) {
|
if id < bound && big_id < U256::from(data.len()) {
|
||||||
let mut v = data[id..bound].to_vec();
|
let mut v = data[id..bound].to_vec();
|
||||||
@ -727,7 +729,7 @@ impl Interpreter {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
instructions::CALLDATASIZE => {
|
instructions::CALLDATASIZE => {
|
||||||
stack.push(U256::from(params.data.clone().unwrap_or(vec![]).len()));
|
stack.push(U256::from(params.data.clone().map_or(0, |l| l.len())));
|
||||||
},
|
},
|
||||||
instructions::CODESIZE => {
|
instructions::CODESIZE => {
|
||||||
stack.push(U256::from(code.len()));
|
stack.push(U256::from(code.len()));
|
||||||
@ -738,10 +740,10 @@ impl Interpreter {
|
|||||||
stack.push(U256::from(len));
|
stack.push(U256::from(len));
|
||||||
},
|
},
|
||||||
instructions::CALLDATACOPY => {
|
instructions::CALLDATACOPY => {
|
||||||
self.copy_data_to_memory(mem, stack, ¶ms.data.clone().unwrap_or(vec![]));
|
self.copy_data_to_memory(mem, stack, ¶ms.data.clone().unwrap_or_else(|| vec![]));
|
||||||
},
|
},
|
||||||
instructions::CODECOPY => {
|
instructions::CODECOPY => {
|
||||||
self.copy_data_to_memory(mem, stack, ¶ms.code.clone().unwrap_or(vec![]));
|
self.copy_data_to_memory(mem, stack, ¶ms.code.clone().unwrap_or_else(|| vec![]));
|
||||||
},
|
},
|
||||||
instructions::EXTCODECOPY => {
|
instructions::EXTCODECOPY => {
|
||||||
let address = u256_to_address(&stack.pop_back());
|
let address = u256_to_address(&stack.pop_back());
|
||||||
@ -781,7 +783,7 @@ impl Interpreter {
|
|||||||
fn copy_data_to_memory(&self,
|
fn copy_data_to_memory(&self,
|
||||||
mem: &mut Memory,
|
mem: &mut Memory,
|
||||||
stack: &mut Stack<U256>,
|
stack: &mut Stack<U256>,
|
||||||
data: &Bytes) {
|
data: &[u8]) {
|
||||||
let offset = stack.pop_back();
|
let offset = stack.pop_back();
|
||||||
let index = stack.pop_back();
|
let index = stack.pop_back();
|
||||||
let size = stack.pop_back();
|
let size = stack.pop_back();
|
||||||
@ -1051,7 +1053,7 @@ impl Interpreter {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_jump_destinations(&self, code: &Bytes) -> HashSet<CodePosition> {
|
fn find_jump_destinations(&self, code: &[u8]) -> HashSet<CodePosition> {
|
||||||
let mut jump_dests = HashSet::new();
|
let mut jump_dests = HashSet::new();
|
||||||
let mut position = 0;
|
let mut position = 0;
|
||||||
|
|
||||||
@ -1066,7 +1068,7 @@ impl Interpreter {
|
|||||||
position += 1;
|
position += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
return jump_dests;
|
jump_dests
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -19,7 +19,7 @@ struct FakeExt {
|
|||||||
logs: Vec<FakeLogEntry>,
|
logs: Vec<FakeLogEntry>,
|
||||||
_suicides: HashSet<Address>,
|
_suicides: HashSet<Address>,
|
||||||
info: EnvInfo,
|
info: EnvInfo,
|
||||||
_schedule: Schedule
|
schedule: Schedule
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FakeExt {
|
impl FakeExt {
|
||||||
@ -89,7 +89,7 @@ impl Ext for FakeExt {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn schedule(&self) -> &Schedule {
|
fn schedule(&self) -> &Schedule {
|
||||||
&self._schedule
|
&self.schedule
|
||||||
}
|
}
|
||||||
|
|
||||||
fn env_info(&self) -> &EnvInfo {
|
fn env_info(&self) -> &EnvInfo {
|
||||||
@ -122,7 +122,7 @@ fn test_stack_underflow() {
|
|||||||
};
|
};
|
||||||
|
|
||||||
match err {
|
match err {
|
||||||
evm::Error::StackUnderflow {instruction: _, wanted, on_stack} => {
|
evm::Error::StackUnderflow {wanted, on_stack, ..} => {
|
||||||
assert_eq!(wanted, 2);
|
assert_eq!(wanted, 2);
|
||||||
assert_eq!(on_stack, 0);
|
assert_eq!(on_stack, 0);
|
||||||
}
|
}
|
||||||
|
@ -75,7 +75,7 @@ impl<'a> Executive<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Creates `Externalities` from `Executive`.
|
/// Creates `Externalities` from `Executive`.
|
||||||
pub fn to_externalities<'_>(&'_ mut self, origin_info: OriginInfo, substate: &'_ mut Substate, output: OutputPolicy<'_>) -> Externalities {
|
pub fn as_externalities<'_>(&'_ mut self, origin_info: OriginInfo, substate: &'_ mut Substate, output: OutputPolicy<'_>) -> Externalities {
|
||||||
Externalities::new(self.state, self.info, self.engine, self.depth, origin_info, substate, output)
|
Externalities::new(self.state, self.info, self.engine, self.depth, origin_info, substate, output)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -123,8 +123,8 @@ impl<'a> Executive<'a> {
|
|||||||
|
|
||||||
let mut substate = Substate::new();
|
let mut substate = Substate::new();
|
||||||
|
|
||||||
let res = match t.action() {
|
let res = match *t.action() {
|
||||||
&Action::Create => {
|
Action::Create => {
|
||||||
let new_address = contract_address(&sender, &nonce);
|
let new_address = contract_address(&sender, &nonce);
|
||||||
let params = ActionParams {
|
let params = ActionParams {
|
||||||
code_address: new_address.clone(),
|
code_address: new_address.clone(),
|
||||||
@ -139,7 +139,7 @@ impl<'a> Executive<'a> {
|
|||||||
};
|
};
|
||||||
self.create(params, &mut substate)
|
self.create(params, &mut substate)
|
||||||
},
|
},
|
||||||
&Action::Call(ref address) => {
|
Action::Call(ref address) => {
|
||||||
let params = ActionParams {
|
let params = ActionParams {
|
||||||
code_address: address.clone(),
|
code_address: address.clone(),
|
||||||
address: address.clone(),
|
address: address.clone(),
|
||||||
@ -177,7 +177,7 @@ impl<'a> Executive<'a> {
|
|||||||
// if destination is builtin, try to execute it
|
// if destination is builtin, try to execute it
|
||||||
|
|
||||||
let default = [];
|
let default = [];
|
||||||
let data = if let &Some(ref d) = ¶ms.data { d as &[u8] } else { &default as &[u8] };
|
let data = if let Some(ref d) = params.data { d as &[u8] } else { &default as &[u8] };
|
||||||
|
|
||||||
let cost = self.engine.cost_of_builtin(¶ms.code_address, data);
|
let cost = self.engine.cost_of_builtin(¶ms.code_address, data);
|
||||||
match cost <= params.gas {
|
match cost <= params.gas {
|
||||||
@ -198,7 +198,7 @@ impl<'a> Executive<'a> {
|
|||||||
let mut unconfirmed_substate = Substate::new();
|
let mut unconfirmed_substate = Substate::new();
|
||||||
|
|
||||||
let res = {
|
let res = {
|
||||||
let mut ext = self.to_externalities(OriginInfo::from(¶ms), &mut unconfirmed_substate, OutputPolicy::Return(output));
|
let mut ext = self.as_externalities(OriginInfo::from(¶ms), &mut unconfirmed_substate, OutputPolicy::Return(output));
|
||||||
self.engine.vm_factory().create().exec(params, &mut ext)
|
self.engine.vm_factory().create().exec(params, &mut ext)
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -230,7 +230,7 @@ impl<'a> Executive<'a> {
|
|||||||
self.state.transfer_balance(¶ms.sender, ¶ms.address, ¶ms.value);
|
self.state.transfer_balance(¶ms.sender, ¶ms.address, ¶ms.value);
|
||||||
|
|
||||||
let res = {
|
let res = {
|
||||||
let mut ext = self.to_externalities(OriginInfo::from(¶ms), &mut unconfirmed_substate, OutputPolicy::InitContract);
|
let mut ext = self.as_externalities(OriginInfo::from(¶ms), &mut unconfirmed_substate, OutputPolicy::InitContract);
|
||||||
self.engine.vm_factory().create().exec(params, &mut ext)
|
self.engine.vm_factory().create().exec(params, &mut ext)
|
||||||
};
|
};
|
||||||
self.enact_result(&res, substate, unconfirmed_substate, backup);
|
self.enact_result(&res, substate, unconfirmed_substate, backup);
|
||||||
@ -248,7 +248,7 @@ impl<'a> Executive<'a> {
|
|||||||
let refunds_bound = sstore_refunds + suicide_refunds;
|
let refunds_bound = sstore_refunds + suicide_refunds;
|
||||||
|
|
||||||
// real ammount to refund
|
// real ammount to refund
|
||||||
let gas_left_prerefund = match &result { &Ok(x) => x, _ => x!(0) };
|
let gas_left_prerefund = match result { Ok(x) => x, _ => x!(0) };
|
||||||
let refunded = cmp::min(refunds_bound, (t.gas - gas_left_prerefund) / U256::from(2));
|
let refunded = cmp::min(refunds_bound, (t.gas - gas_left_prerefund) / U256::from(2));
|
||||||
let gas_left = gas_left_prerefund + refunded;
|
let gas_left = gas_left_prerefund + refunded;
|
||||||
|
|
||||||
@ -265,7 +265,7 @@ impl<'a> Executive<'a> {
|
|||||||
self.state.add_balance(&self.info.author, &fees_value);
|
self.state.add_balance(&self.info.author, &fees_value);
|
||||||
|
|
||||||
// perform suicides
|
// perform suicides
|
||||||
for address in substate.suicides.iter() {
|
for address in &substate.suicides {
|
||||||
trace!("Killing {}", address);
|
trace!("Killing {}", address);
|
||||||
self.state.kill_account(address);
|
self.state.kill_account(address);
|
||||||
}
|
}
|
||||||
@ -273,11 +273,7 @@ impl<'a> Executive<'a> {
|
|||||||
match result {
|
match result {
|
||||||
Err(evm::Error::Internal) => Err(ExecutionError::Internal),
|
Err(evm::Error::Internal) => Err(ExecutionError::Internal),
|
||||||
// TODO [ToDr] BadJumpDestination @debris - how to handle that?
|
// TODO [ToDr] BadJumpDestination @debris - how to handle that?
|
||||||
Err(evm::Error::OutOfGas)
|
Err(_) => {
|
||||||
| Err(evm::Error::BadJumpDestination { destination: _ })
|
|
||||||
| Err(evm::Error::BadInstruction { instruction: _ })
|
|
||||||
| Err(evm::Error::StackUnderflow {instruction: _, wanted: _, on_stack: _})
|
|
||||||
| Err(evm::Error::OutOfStack {instruction: _, wanted: _, limit: _}) => {
|
|
||||||
Ok(Executed {
|
Ok(Executed {
|
||||||
gas: t.gas,
|
gas: t.gas,
|
||||||
gas_used: t.gas,
|
gas_used: t.gas,
|
||||||
@ -302,15 +298,15 @@ impl<'a> Executive<'a> {
|
|||||||
|
|
||||||
fn enact_result(&mut self, result: &evm::Result, substate: &mut Substate, un_substate: Substate, backup: State) {
|
fn enact_result(&mut self, result: &evm::Result, substate: &mut Substate, un_substate: Substate, backup: State) {
|
||||||
// TODO: handle other evm::Errors same as OutOfGas once they are implemented
|
// TODO: handle other evm::Errors same as OutOfGas once they are implemented
|
||||||
match result {
|
match *result {
|
||||||
&Err(evm::Error::OutOfGas)
|
Err(evm::Error::OutOfGas)
|
||||||
| &Err(evm::Error::BadJumpDestination { destination: _ })
|
| Err(evm::Error::BadJumpDestination {..})
|
||||||
| &Err(evm::Error::BadInstruction { instruction: _ })
|
| Err(evm::Error::BadInstruction {.. })
|
||||||
| &Err(evm::Error::StackUnderflow {instruction: _, wanted: _, on_stack: _})
|
| Err(evm::Error::StackUnderflow {..})
|
||||||
| &Err(evm::Error::OutOfStack {instruction: _, wanted: _, limit: _}) => {
|
| Err(evm::Error::OutOfStack {..}) => {
|
||||||
self.state.revert(backup);
|
self.state.revert(backup);
|
||||||
},
|
},
|
||||||
&Ok(_) | &Err(evm::Error::Internal) => substate.accrue(un_substate)
|
Ok(_) | Err(evm::Error::Internal) => substate.accrue(un_substate)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -158,9 +158,10 @@ impl<'a> Ext for Externalities<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extcode(&self, address: &Address) -> Bytes {
|
fn extcode(&self, address: &Address) -> Bytes {
|
||||||
self.state.code(address).unwrap_or(vec![])
|
self.state.code(address).unwrap_or_else(|| vec![])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(match_ref_pats)]
|
||||||
fn ret(&mut self, gas: &U256, data: &[u8]) -> Result<U256, evm::Error> {
|
fn ret(&mut self, gas: &U256, data: &[u8]) -> Result<U256, evm::Error> {
|
||||||
match &mut self.output {
|
match &mut self.output {
|
||||||
&mut OutputPolicy::Return(BytesRef::Fixed(ref mut slice)) => unsafe {
|
&mut OutputPolicy::Return(BytesRef::Fixed(ref mut slice)) => unsafe {
|
||||||
|
@ -2,7 +2,7 @@ use util::*;
|
|||||||
use basic_types::*;
|
use basic_types::*;
|
||||||
use time::now_utc;
|
use time::now_utc;
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Type for Block number
|
||||||
pub type BlockNumber = u64;
|
pub type BlockNumber = u64;
|
||||||
|
|
||||||
/// A block header.
|
/// A block header.
|
||||||
@ -171,9 +171,10 @@ impl Header {
|
|||||||
s.append(&self.gas_used);
|
s.append(&self.gas_used);
|
||||||
s.append(&self.timestamp);
|
s.append(&self.timestamp);
|
||||||
s.append(&self.extra_data);
|
s.append(&self.extra_data);
|
||||||
match with_seal {
|
if let Seal::With = with_seal {
|
||||||
Seal::With => for b in self.seal.iter() { s.append_raw(&b, 1); },
|
for b in &self.seal {
|
||||||
_ => {}
|
s.append_raw(&b, 1);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -236,7 +237,7 @@ impl Encodable for Header {
|
|||||||
self.timestamp.encode(e);
|
self.timestamp.encode(e);
|
||||||
self.extra_data.encode(e);
|
self.extra_data.encode(e);
|
||||||
|
|
||||||
for b in self.seal.iter() {
|
for b in &self.seal {
|
||||||
e.emit_raw(&b);
|
e.emit_raw(&b);
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -1,8 +1,11 @@
|
|||||||
#![warn(missing_docs)]
|
#![warn(missing_docs)]
|
||||||
#![feature(cell_extras)]
|
#![feature(cell_extras)]
|
||||||
#![feature(augmented_assignments)]
|
#![feature(augmented_assignments)]
|
||||||
//#![feature(plugin)]
|
#![feature(plugin)]
|
||||||
//#![plugin(interpolate_idents)]
|
//#![plugin(interpolate_idents)]
|
||||||
|
#![plugin(clippy)]
|
||||||
|
#![allow(needless_range_loop, match_bool)]
|
||||||
|
|
||||||
//! Ethcore's ethereum implementation
|
//! Ethcore's ethereum implementation
|
||||||
//!
|
//!
|
||||||
//! ### Rust version
|
//! ### Rust version
|
||||||
@ -73,7 +76,6 @@
|
|||||||
//! sudo make install
|
//! sudo make install
|
||||||
//! sudo ldconfig
|
//! sudo ldconfig
|
||||||
//! ```
|
//! ```
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate log;
|
extern crate log;
|
||||||
extern crate rustc_serialize;
|
extern crate rustc_serialize;
|
||||||
|
@ -26,10 +26,10 @@ impl FromJson for PodState {
|
|||||||
let code = acc.find("code").map(&Bytes::from_json);
|
let code = acc.find("code").map(&Bytes::from_json);
|
||||||
if balance.is_some() || nonce.is_some() || storage.is_some() || code.is_some() {
|
if balance.is_some() || nonce.is_some() || storage.is_some() || code.is_some() {
|
||||||
state.insert(address_from_hex(address), PodAccount{
|
state.insert(address_from_hex(address), PodAccount{
|
||||||
balance: balance.unwrap_or(U256::zero()),
|
balance: balance.unwrap_or_else(U256::zero),
|
||||||
nonce: nonce.unwrap_or(U256::zero()),
|
nonce: nonce.unwrap_or_else(U256::zero),
|
||||||
storage: storage.unwrap_or(BTreeMap::new()),
|
storage: storage.unwrap_or_else(BTreeMap::new),
|
||||||
code: code.unwrap_or(Vec::new())
|
code: code.unwrap_or_else(Vec::new)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
state
|
state
|
||||||
|
@ -36,7 +36,7 @@ impl RlpStandard for Receipt {
|
|||||||
// TODO: make work:
|
// TODO: make work:
|
||||||
//s.append(&self.logs);
|
//s.append(&self.logs);
|
||||||
s.append_list(self.logs.len());
|
s.append_list(self.logs.len());
|
||||||
for l in self.logs.iter() {
|
for l in &self.logs {
|
||||||
l.rlp_append(s);
|
l.rlp_append(s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -83,20 +83,17 @@ impl IoHandler<NetSyncMessage> for ClientIoHandler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(match_ref_pats)]
|
||||||
|
#[allow(single_match)]
|
||||||
fn message(&self, io: &IoContext<NetSyncMessage>, net_message: &NetSyncMessage) {
|
fn message(&self, io: &IoContext<NetSyncMessage>, net_message: &NetSyncMessage) {
|
||||||
match net_message {
|
if let &UserMessage(ref message) = net_message {
|
||||||
&UserMessage(ref message) => {
|
match message {
|
||||||
match message {
|
&SyncMessage::BlockVerified => {
|
||||||
&SyncMessage::BlockVerified => {
|
self.client.import_verified_blocks(&io.channel());
|
||||||
self.client.import_verified_blocks(&io.channel());
|
},
|
||||||
},
|
_ => {}, // ignore other messages
|
||||||
_ => {}, // ignore other messages
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
_ => {}, // ignore other messages
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
27
src/spec.rs
27
src/spec.rs
@ -10,7 +10,7 @@ pub fn gzip64res_to_json(source: &[u8]) -> Json {
|
|||||||
let data = source.from_base64().expect("Genesis block is malformed!");
|
let data = source.from_base64().expect("Genesis block is malformed!");
|
||||||
let data_ref: &[u8] = &data;
|
let data_ref: &[u8] = &data;
|
||||||
let mut decoder = GzDecoder::new(data_ref).expect("Gzip is invalid");
|
let mut decoder = GzDecoder::new(data_ref).expect("Gzip is invalid");
|
||||||
let mut s: String = "".to_string();
|
let mut s: String = "".to_owned();
|
||||||
decoder.read_to_string(&mut s).expect("Gzip is invalid");
|
decoder.read_to_string(&mut s).expect("Gzip is invalid");
|
||||||
Json::from_str(&s).expect("Json is invalid")
|
Json::from_str(&s).expect("Json is invalid")
|
||||||
}
|
}
|
||||||
@ -18,14 +18,14 @@ pub fn gzip64res_to_json(source: &[u8]) -> Json {
|
|||||||
/// Convert JSON value to equivlaent RLP representation.
|
/// Convert JSON value to equivlaent RLP representation.
|
||||||
// TODO: handle container types.
|
// TODO: handle container types.
|
||||||
fn json_to_rlp(json: &Json) -> Bytes {
|
fn json_to_rlp(json: &Json) -> Bytes {
|
||||||
match json {
|
match *json {
|
||||||
&Json::Boolean(o) => encode(&(if o {1u64} else {0})),
|
Json::Boolean(o) => encode(&(if o {1u64} else {0})),
|
||||||
&Json::I64(o) => encode(&(o as u64)),
|
Json::I64(o) => encode(&(o as u64)),
|
||||||
&Json::U64(o) => encode(&o),
|
Json::U64(o) => encode(&o),
|
||||||
&Json::String(ref s) if s.len() >= 2 && &s[0..2] == "0x" && U256::from_str(&s[2..]).is_ok() => {
|
Json::String(ref s) if s.len() >= 2 && &s[0..2] == "0x" && U256::from_str(&s[2..]).is_ok() => {
|
||||||
encode(&U256::from_str(&s[2..]).unwrap())
|
encode(&U256::from_str(&s[2..]).unwrap())
|
||||||
},
|
},
|
||||||
&Json::String(ref s) => {
|
Json::String(ref s) => {
|
||||||
encode(s)
|
encode(s)
|
||||||
},
|
},
|
||||||
_ => panic!()
|
_ => panic!()
|
||||||
@ -108,6 +108,7 @@ pub struct Spec {
|
|||||||
state_root_memo: RwLock<Option<H256>>,
|
state_root_memo: RwLock<Option<H256>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(wrong_self_convention)] // because to_engine(self) should be to_engine(&self)
|
||||||
impl Spec {
|
impl Spec {
|
||||||
/// Convert this object into a boxed Engine of the right underlying type.
|
/// Convert this object into a boxed Engine of the right underlying type.
|
||||||
// TODO avoid this hard-coded nastiness - use dynamic-linked plugin framework instead.
|
// TODO avoid this hard-coded nastiness - use dynamic-linked plugin framework instead.
|
||||||
@ -185,13 +186,13 @@ impl FromJson for Spec {
|
|||||||
builtins.insert(addr.clone(), builtin);
|
builtins.insert(addr.clone(), builtin);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let balance = acc.find("balance").and_then(|x| match x { &Json::String(ref b) => U256::from_dec_str(b).ok(), _ => None });
|
let balance = acc.find("balance").and_then(|x| match *x { Json::String(ref b) => U256::from_dec_str(b).ok(), _ => None });
|
||||||
let nonce = acc.find("nonce").and_then(|x| match x { &Json::String(ref b) => U256::from_dec_str(b).ok(), _ => None });
|
let nonce = acc.find("nonce").and_then(|x| match *x { Json::String(ref b) => U256::from_dec_str(b).ok(), _ => None });
|
||||||
// let balance = if let Some(&Json::String(ref b)) = acc.find("balance") {U256::from_dec_str(b).unwrap_or(U256::from(0))} else {U256::from(0)};
|
// let balance = if let Some(&Json::String(ref b)) = acc.find("balance") {U256::from_dec_str(b).unwrap_or(U256::from(0))} else {U256::from(0)};
|
||||||
// let nonce = if let Some(&Json::String(ref n)) = acc.find("nonce") {U256::from_dec_str(n).unwrap_or(U256::from(0))} else {U256::from(0)};
|
// let nonce = if let Some(&Json::String(ref n)) = acc.find("nonce") {U256::from_dec_str(n).unwrap_or(U256::from(0))} else {U256::from(0)};
|
||||||
// TODO: handle code & data if they exist.
|
// TODO: handle code & data if they exist.
|
||||||
if balance.is_some() || nonce.is_some() {
|
if balance.is_some() || nonce.is_some() {
|
||||||
state.insert(addr, GenesisAccount { balance: balance.unwrap_or(U256::from(0)), nonce: nonce.unwrap_or(U256::from(0)) });
|
state.insert(addr, GenesisAccount { balance: balance.unwrap_or_else(U256::zero), nonce: nonce.unwrap_or_else(U256::zero) });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -215,8 +216,8 @@ impl FromJson for Spec {
|
|||||||
|
|
||||||
|
|
||||||
Spec {
|
Spec {
|
||||||
name: json.find("name").map(|j| j.as_string().unwrap()).unwrap_or("unknown").to_string(),
|
name: json.find("name").map_or("unknown", |j| j.as_string().unwrap()).to_owned(),
|
||||||
engine_name: json["engineName"].as_string().unwrap().to_string(),
|
engine_name: json["engineName"].as_string().unwrap().to_owned(),
|
||||||
engine_params: json_to_rlp_map(&json["params"]),
|
engine_params: json_to_rlp_map(&json["params"]),
|
||||||
builtins: builtins,
|
builtins: builtins,
|
||||||
parent_hash: H256::from_str(&genesis["parentHash"].as_string().unwrap()[2..]).unwrap(),
|
parent_hash: H256::from_str(&genesis["parentHash"].as_string().unwrap()[2..]).unwrap(),
|
||||||
@ -242,7 +243,7 @@ impl Spec {
|
|||||||
let mut root = H256::new();
|
let mut root = H256::new();
|
||||||
{
|
{
|
||||||
let mut t = SecTrieDBMut::new(db, &mut root);
|
let mut t = SecTrieDBMut::new(db, &mut root);
|
||||||
for (address, account) in self.genesis_state.iter() {
|
for (address, account) in &self.genesis_state {
|
||||||
t.insert(address.as_slice(), &account.rlp());
|
t.insert(address.as_slice(), &account.rlp());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
19
src/state.rs
19
src/state.rs
@ -88,22 +88,22 @@ impl State {
|
|||||||
|
|
||||||
/// Get the balance of account `a`.
|
/// Get the balance of account `a`.
|
||||||
pub fn balance(&self, a: &Address) -> U256 {
|
pub fn balance(&self, a: &Address) -> U256 {
|
||||||
self.get(a, false).as_ref().map(|account| account.balance().clone()).unwrap_or(U256::from(0u8))
|
self.get(a, false).as_ref().map_or(U256::zero(), |account| account.balance().clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the nonce of account `a`.
|
/// Get the nonce of account `a`.
|
||||||
pub fn nonce(&self, a: &Address) -> U256 {
|
pub fn nonce(&self, a: &Address) -> U256 {
|
||||||
self.get(a, false).as_ref().map(|account| account.nonce().clone()).unwrap_or(U256::from(0u8))
|
self.get(a, false).as_ref().map_or(U256::zero(), |account| account.nonce().clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Mutate storage of account `a` so that it is `value` for `key`.
|
/// Mutate storage of account `a` so that it is `value` for `key`.
|
||||||
pub fn storage_at(&self, a: &Address, key: &H256) -> H256 {
|
pub fn storage_at(&self, a: &Address, key: &H256) -> H256 {
|
||||||
self.get(a, false).as_ref().map(|a|a.storage_at(&self.db, key)).unwrap_or(H256::new())
|
self.get(a, false).as_ref().map_or(H256::new(), |a|a.storage_at(&self.db, key))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Mutate storage of account `a` so that it is `value` for `key`.
|
/// Mutate storage of account `a` so that it is `value` for `key`.
|
||||||
pub fn code(&self, a: &Address) -> Option<Bytes> {
|
pub fn code(&self, a: &Address) -> Option<Bytes> {
|
||||||
self.get(a, true).as_ref().map(|a|a.code().map(|x|x.to_vec())).unwrap_or(None)
|
self.get(a, true).as_ref().map_or(None, |a|a.code().map(|x|x.to_vec()))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add `incr` to the balance of account `a`.
|
/// Add `incr` to the balance of account `a`.
|
||||||
@ -170,6 +170,7 @@ impl State {
|
|||||||
|
|
||||||
/// Commit accounts to SecTrieDBMut. This is similar to cpp-ethereum's dev::eth::commit.
|
/// Commit accounts to SecTrieDBMut. This is similar to cpp-ethereum's dev::eth::commit.
|
||||||
/// `accounts` is mutable because we may need to commit the code or storage and record that.
|
/// `accounts` is mutable because we may need to commit the code or storage and record that.
|
||||||
|
#[allow(match_ref_pats)]
|
||||||
pub fn commit_into(db: &mut HashDB, root: &mut H256, accounts: &mut HashMap<Address, Option<Account>>) {
|
pub fn commit_into(db: &mut HashDB, root: &mut H256, accounts: &mut HashMap<Address, Option<Account>>) {
|
||||||
// first, commit the sub trees.
|
// first, commit the sub trees.
|
||||||
// TODO: is this necessary or can we dispense with the `ref mut a` for just `a`?
|
// TODO: is this necessary or can we dispense with the `ref mut a` for just `a`?
|
||||||
@ -186,9 +187,9 @@ impl State {
|
|||||||
{
|
{
|
||||||
let mut trie = SecTrieDBMut::from_existing(db, root);
|
let mut trie = SecTrieDBMut::from_existing(db, root);
|
||||||
for (address, ref a) in accounts.iter() {
|
for (address, ref a) in accounts.iter() {
|
||||||
match a {
|
match **a {
|
||||||
&&Some(ref account) => trie.insert(address, &account.rlp()),
|
Some(ref account) => trie.insert(address, &account.rlp()),
|
||||||
&&None => trie.remove(address),
|
None => trie.remove(address),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -210,7 +211,7 @@ impl State {
|
|||||||
pub fn to_hashmap_pod(&self) -> HashMap<Address, PodAccount> {
|
pub fn to_hashmap_pod(&self) -> HashMap<Address, PodAccount> {
|
||||||
// TODO: handle database rather than just the cache.
|
// TODO: handle database rather than just the cache.
|
||||||
self.cache.borrow().iter().fold(HashMap::new(), |mut m, (add, opt)| {
|
self.cache.borrow().iter().fold(HashMap::new(), |mut m, (add, opt)| {
|
||||||
if let &Some(ref acc) = opt {
|
if let Some(ref acc) = *opt {
|
||||||
m.insert(add.clone(), PodAccount::from_account(acc));
|
m.insert(add.clone(), PodAccount::from_account(acc));
|
||||||
}
|
}
|
||||||
m
|
m
|
||||||
@ -221,7 +222,7 @@ impl State {
|
|||||||
pub fn to_pod(&self) -> PodState {
|
pub fn to_pod(&self) -> PodState {
|
||||||
// TODO: handle database rather than just the cache.
|
// TODO: handle database rather than just the cache.
|
||||||
PodState::new(self.cache.borrow().iter().fold(BTreeMap::new(), |mut m, (add, opt)| {
|
PodState::new(self.cache.borrow().iter().fold(BTreeMap::new(), |mut m, (add, opt)| {
|
||||||
if let &Some(ref acc) = opt {
|
if let Some(ref acc) = *opt {
|
||||||
m.insert(add.clone(), PodAccount::from_account(acc));
|
m.insert(add.clone(), PodAccount::from_account(acc));
|
||||||
}
|
}
|
||||||
m
|
m
|
||||||
|
@ -15,7 +15,7 @@ impl StateDiff {
|
|||||||
|
|
||||||
impl fmt::Display for StateDiff {
|
impl fmt::Display for StateDiff {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
for (add, acc) in self.0.iter() {
|
for (add, acc) in &self.0 {
|
||||||
try!(write!(f, "{} {}: {}", acc.existance(), add, acc));
|
try!(write!(f, "{} {}: {}", acc.existance(), add, acc));
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -218,7 +218,7 @@ impl ChainSync {
|
|||||||
self.downloading_bodies.clear();
|
self.downloading_bodies.clear();
|
||||||
self.headers.clear();
|
self.headers.clear();
|
||||||
self.bodies.clear();
|
self.bodies.clear();
|
||||||
for (_, ref mut p) in self.peers.iter_mut() {
|
for (_, ref mut p) in &mut self.peers {
|
||||||
p.asking_blocks.clear();
|
p.asking_blocks.clear();
|
||||||
}
|
}
|
||||||
self.header_ids.clear();
|
self.header_ids.clear();
|
||||||
@ -274,6 +274,7 @@ impl ChainSync {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(cyclomatic_complexity)]
|
||||||
/// Called by peer once it has new block headers during sync
|
/// Called by peer once it has new block headers during sync
|
||||||
fn on_peer_block_headers(&mut self, io: &mut SyncIo, peer_id: PeerId, r: &UntrustedRlp) -> Result<(), PacketDecodeError> {
|
fn on_peer_block_headers(&mut self, io: &mut SyncIo, peer_id: PeerId, r: &UntrustedRlp) -> Result<(), PacketDecodeError> {
|
||||||
self.reset_peer_asking(peer_id, PeerAsking::BlockHeaders);
|
self.reset_peer_asking(peer_id, PeerAsking::BlockHeaders);
|
||||||
@ -381,7 +382,7 @@ impl ChainSync {
|
|||||||
transactions_root: tx_root,
|
transactions_root: tx_root,
|
||||||
uncles: uncles
|
uncles: uncles
|
||||||
};
|
};
|
||||||
match self.header_ids.get(&header_id).map(|n| *n) {
|
match self.header_ids.get(&header_id).cloned() {
|
||||||
Some(n) => {
|
Some(n) => {
|
||||||
self.header_ids.remove(&header_id);
|
self.header_ids.remove(&header_id);
|
||||||
self.bodies.insert_item(n, body.as_raw().to_vec());
|
self.bodies.insert_item(n, body.as_raw().to_vec());
|
||||||
@ -709,16 +710,13 @@ impl ChainSync {
|
|||||||
/// Used to recover from an error and re-download parts of the chain detected as bad.
|
/// Used to recover from an error and re-download parts of the chain detected as bad.
|
||||||
fn remove_downloaded_blocks(&mut self, start: BlockNumber) {
|
fn remove_downloaded_blocks(&mut self, start: BlockNumber) {
|
||||||
for n in self.headers.get_tail(&start) {
|
for n in self.headers.get_tail(&start) {
|
||||||
match self.headers.find_item(&n) {
|
if let Some(ref header_data) = self.headers.find_item(&n) {
|
||||||
Some(ref header_data) => {
|
let header_to_delete = HeaderView::new(&header_data.data);
|
||||||
let header_to_delete = HeaderView::new(&header_data.data);
|
let header_id = HeaderId {
|
||||||
let header_id = HeaderId {
|
transactions_root: header_to_delete.transactions_root(),
|
||||||
transactions_root: header_to_delete.transactions_root(),
|
uncles: header_to_delete.uncles_hash()
|
||||||
uncles: header_to_delete.uncles_hash()
|
};
|
||||||
};
|
self.header_ids.remove(&header_id);
|
||||||
self.header_ids.remove(&header_id);
|
|
||||||
},
|
|
||||||
None => {}
|
|
||||||
}
|
}
|
||||||
self.downloading_bodies.remove(&n);
|
self.downloading_bodies.remove(&n);
|
||||||
self.downloading_headers.remove(&n);
|
self.downloading_headers.remove(&n);
|
||||||
@ -806,12 +804,9 @@ impl ChainSync {
|
|||||||
packet.append(&chain.best_block_hash);
|
packet.append(&chain.best_block_hash);
|
||||||
packet.append(&chain.genesis_hash);
|
packet.append(&chain.genesis_hash);
|
||||||
//TODO: handle timeout for status request
|
//TODO: handle timeout for status request
|
||||||
match io.send(peer_id, STATUS_PACKET, packet.out()) {
|
if let Err(e) = io.send(peer_id, STATUS_PACKET, packet.out()) {
|
||||||
Err(e) => {
|
warn!(target:"sync", "Error sending status request: {:?}", e);
|
||||||
warn!(target:"sync", "Error sending status request: {:?}", e);
|
io.disable_peer(peer_id);
|
||||||
io.disable_peer(peer_id);
|
|
||||||
}
|
|
||||||
Ok(_) => ()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -847,12 +842,9 @@ impl ChainSync {
|
|||||||
let mut data = Bytes::new();
|
let mut data = Bytes::new();
|
||||||
let inc = (skip + 1) as BlockNumber;
|
let inc = (skip + 1) as BlockNumber;
|
||||||
while number <= last && number > 0 && count < max_count {
|
while number <= last && number > 0 && count < max_count {
|
||||||
match io.chain().block_header_at(number) {
|
if let Some(mut hdr) = io.chain().block_header_at(number) {
|
||||||
Some(mut hdr) => {
|
data.append(&mut hdr);
|
||||||
data.append(&mut hdr);
|
count += 1;
|
||||||
count += 1;
|
|
||||||
}
|
|
||||||
None => {}
|
|
||||||
}
|
}
|
||||||
if reverse {
|
if reverse {
|
||||||
if number <= inc {
|
if number <= inc {
|
||||||
@ -884,12 +876,9 @@ impl ChainSync {
|
|||||||
let mut added = 0usize;
|
let mut added = 0usize;
|
||||||
let mut data = Bytes::new();
|
let mut data = Bytes::new();
|
||||||
for i in 0..count {
|
for i in 0..count {
|
||||||
match io.chain().block_body(&try!(r.val_at::<H256>(i))) {
|
if let Some(mut hdr) = io.chain().block_body(&try!(r.val_at::<H256>(i))) {
|
||||||
Some(mut hdr) => {
|
data.append(&mut hdr);
|
||||||
data.append(&mut hdr);
|
added += 1;
|
||||||
added += 1;
|
|
||||||
}
|
|
||||||
None => {}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut rlp = RlpStream::new_list(added);
|
let mut rlp = RlpStream::new_list(added);
|
||||||
@ -911,12 +900,9 @@ impl ChainSync {
|
|||||||
let mut added = 0usize;
|
let mut added = 0usize;
|
||||||
let mut data = Bytes::new();
|
let mut data = Bytes::new();
|
||||||
for i in 0..count {
|
for i in 0..count {
|
||||||
match io.chain().state_data(&try!(r.val_at::<H256>(i))) {
|
if let Some(mut hdr) = io.chain().state_data(&try!(r.val_at::<H256>(i))) {
|
||||||
Some(mut hdr) => {
|
data.append(&mut hdr);
|
||||||
data.append(&mut hdr);
|
added += 1;
|
||||||
added += 1;
|
|
||||||
}
|
|
||||||
None => {}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut rlp = RlpStream::new_list(added);
|
let mut rlp = RlpStream::new_list(added);
|
||||||
@ -937,12 +923,9 @@ impl ChainSync {
|
|||||||
let mut added = 0usize;
|
let mut added = 0usize;
|
||||||
let mut data = Bytes::new();
|
let mut data = Bytes::new();
|
||||||
for i in 0..count {
|
for i in 0..count {
|
||||||
match io.chain().block_receipts(&try!(r.val_at::<H256>(i))) {
|
if let Some(mut hdr) = io.chain().block_receipts(&try!(r.val_at::<H256>(i))) {
|
||||||
Some(mut hdr) => {
|
data.append(&mut hdr);
|
||||||
data.append(&mut hdr);
|
added += 1;
|
||||||
added += 1;
|
|
||||||
}
|
|
||||||
None => {}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut rlp = RlpStream::new_list(added);
|
let mut rlp = RlpStream::new_list(added);
|
||||||
|
@ -14,7 +14,7 @@ pub trait SyncIo {
|
|||||||
/// Send a packet to a peer.
|
/// Send a packet to a peer.
|
||||||
fn send(&mut self, peer_id: PeerId, packet_id: PacketId, data: Vec<u8>) -> Result<(), UtilError>;
|
fn send(&mut self, peer_id: PeerId, packet_id: PacketId, data: Vec<u8>) -> Result<(), UtilError>;
|
||||||
/// Get the blockchain
|
/// Get the blockchain
|
||||||
fn chain<'s>(&'s self) -> &'s BlockChainClient;
|
fn chain(&self) -> &BlockChainClient;
|
||||||
/// Returns peer client identifier string
|
/// Returns peer client identifier string
|
||||||
fn peer_info(&self, peer_id: PeerId) -> String {
|
fn peer_info(&self, peer_id: PeerId) -> String {
|
||||||
peer_id.to_string()
|
peer_id.to_string()
|
||||||
@ -50,7 +50,7 @@ impl<'s, 'h> SyncIo for NetSyncIo<'s, 'h> {
|
|||||||
self.network.send(peer_id, packet_id, data)
|
self.network.send(peer_id, packet_id, data)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn chain<'a>(&'a self) -> &'a BlockChainClient {
|
fn chain(&self) -> &BlockChainClient {
|
||||||
self.chain
|
self.chain
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -29,7 +29,7 @@ pub trait RangeCollection<K, V> {
|
|||||||
/// Remove all elements >= `tail`
|
/// Remove all elements >= `tail`
|
||||||
fn insert_item(&mut self, key: K, value: V);
|
fn insert_item(&mut self, key: K, value: V);
|
||||||
/// Get an iterator over ranges
|
/// Get an iterator over ranges
|
||||||
fn range_iter<'c>(&'c self) -> RangeIterator<'c, K, V>;
|
fn range_iter(& self) -> RangeIterator<K, V>;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Range iterator. For each range yelds a key for the first element of the range and a vector of values.
|
/// Range iterator. For each range yelds a key for the first element of the range and a vector of values.
|
||||||
@ -60,7 +60,7 @@ impl<'c, K:'c, V:'c> Iterator for RangeIterator<'c, K, V> where K: Add<Output =
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<K, V> RangeCollection<K, V> for Vec<(K, Vec<V>)> where K: Ord + PartialEq + Add<Output = K> + Sub<Output = K> + Copy + FromUsize + ToUsize {
|
impl<K, V> RangeCollection<K, V> for Vec<(K, Vec<V>)> where K: Ord + PartialEq + Add<Output = K> + Sub<Output = K> + Copy + FromUsize + ToUsize {
|
||||||
fn range_iter<'c>(&'c self) -> RangeIterator<'c, K, V> {
|
fn range_iter(&self) -> RangeIterator<K, V> {
|
||||||
RangeIterator {
|
RangeIterator {
|
||||||
range: self.len(),
|
range: self.len(),
|
||||||
collection: self
|
collection: self
|
||||||
@ -191,6 +191,7 @@ impl<K, V> RangeCollection<K, V> for Vec<(K, Vec<V>)> where K: Ord + PartialEq +
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[allow(cyclomatic_complexity)]
|
||||||
fn test_range() {
|
fn test_range() {
|
||||||
use std::cmp::{Ordering};
|
use std::cmp::{Ordering};
|
||||||
|
|
||||||
|
@ -66,7 +66,7 @@ impl BlockChainClient for TestBlockChainClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn block(&self, h: &H256) -> Option<Bytes> {
|
fn block(&self, h: &H256) -> Option<Bytes> {
|
||||||
self.blocks.read().unwrap().get(h).map(|b| b.clone())
|
self.blocks.read().unwrap().get(h).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn block_status(&self, h: &H256) -> BlockStatus {
|
fn block_status(&self, h: &H256) -> BlockStatus {
|
||||||
@ -211,7 +211,7 @@ impl<'p> SyncIo for TestIo<'p> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn chain<'a>(&'a self) -> &'a BlockChainClient {
|
fn chain(&self) -> &BlockChainClient {
|
||||||
self.chain
|
self.chain
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -268,14 +268,11 @@ impl TestNet {
|
|||||||
|
|
||||||
pub fn sync_step(&mut self) {
|
pub fn sync_step(&mut self) {
|
||||||
for peer in 0..self.peers.len() {
|
for peer in 0..self.peers.len() {
|
||||||
match self.peers[peer].queue.pop_front() {
|
if let Some(packet) = self.peers[peer].queue.pop_front() {
|
||||||
Some(packet) => {
|
let mut p = self.peers.get_mut(packet.recipient).unwrap();
|
||||||
let mut p = self.peers.get_mut(packet.recipient).unwrap();
|
trace!("--- {} -> {} ---", peer, packet.recipient);
|
||||||
trace!("--- {} -> {} ---", peer, packet.recipient);
|
p.sync.on_packet(&mut TestIo::new(&mut p.chain, &mut p.queue, Some(peer as PeerId)), peer as PeerId, packet.packet_id, &packet.data);
|
||||||
p.sync.on_packet(&mut TestIo::new(&mut p.chain, &mut p.queue, Some(peer as PeerId)), peer as PeerId, packet.packet_id, &packet.data);
|
trace!("----------------");
|
||||||
trace!("----------------");
|
|
||||||
},
|
|
||||||
None => {}
|
|
||||||
}
|
}
|
||||||
let mut p = self.peers.get_mut(peer).unwrap();
|
let mut p = self.peers.get_mut(peer).unwrap();
|
||||||
p.sync._maintain_sync(&mut TestIo::new(&mut p.chain, &mut p.queue, None));
|
p.sync._maintain_sync(&mut TestIo::new(&mut p.chain, &mut p.queue, None));
|
||||||
|
@ -168,7 +168,7 @@ fn do_json_test_for(vm: &VMType, json_data: &[u8]) -> Vec<String> {
|
|||||||
let mut fail = false;
|
let mut fail = false;
|
||||||
//let mut fail_unless = |cond: bool| if !cond && !fail { failed.push(name.to_string()); fail = true };
|
//let mut fail_unless = |cond: bool| if !cond && !fail { failed.push(name.to_string()); fail = true };
|
||||||
let mut fail_unless = |cond: bool, s: &str | if !cond && !fail {
|
let mut fail_unless = |cond: bool, s: &str | if !cond && !fail {
|
||||||
failed.push(format!("[{}] {}: {}", vm, name.to_string(), s));
|
failed.push(format!("[{}] {}: {}", vm, name, s));
|
||||||
fail = true
|
fail = true
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -187,15 +187,9 @@ fn do_json_test_for(vm: &VMType, json_data: &[u8]) -> Vec<String> {
|
|||||||
BTreeMap::from_json(&s["storage"]).into_iter().foreach(|(k, v)| state.set_storage(&address, k, v));
|
BTreeMap::from_json(&s["storage"]).into_iter().foreach(|(k, v)| state.set_storage(&address, k, v));
|
||||||
});
|
});
|
||||||
|
|
||||||
let mut info = EnvInfo::new();
|
let info = test.find("env").map(|env| {
|
||||||
|
EnvInfo::from_json(env)
|
||||||
test.find("env").map(|env| {
|
}).unwrap_or_default();
|
||||||
info.author = xjson!(&env["currentCoinbase"]);
|
|
||||||
info.difficulty = xjson!(&env["currentDifficulty"]);
|
|
||||||
info.gas_limit = xjson!(&env["currentGasLimit"]);
|
|
||||||
info.number = xjson!(&env["currentNumber"]);
|
|
||||||
info.timestamp = xjson!(&env["currentTimestamp"]);
|
|
||||||
});
|
|
||||||
|
|
||||||
let engine = TestEngine::new(1, vm.clone());
|
let engine = TestEngine::new(1, vm.clone());
|
||||||
|
|
||||||
@ -245,7 +239,7 @@ fn do_json_test_for(vm: &VMType, json_data: &[u8]) -> Vec<String> {
|
|||||||
test.find("post").map(|pre| for (addr, s) in pre.as_object().unwrap() {
|
test.find("post").map(|pre| for (addr, s) in pre.as_object().unwrap() {
|
||||||
let address = Address::from(addr.as_ref());
|
let address = Address::from(addr.as_ref());
|
||||||
|
|
||||||
fail_unless(state.code(&address).unwrap_or(vec![]) == Bytes::from_json(&s["code"]), "code is incorrect");
|
fail_unless(state.code(&address).unwrap_or_else(|| vec![]) == Bytes::from_json(&s["code"]), "code is incorrect");
|
||||||
fail_unless(state.balance(&address) == xjson!(&s["balance"]), "balance is incorrect");
|
fail_unless(state.balance(&address) == xjson!(&s["balance"]), "balance is incorrect");
|
||||||
fail_unless(state.nonce(&address) == xjson!(&s["nonce"]), "nonce is incorrect");
|
fail_unless(state.nonce(&address) == xjson!(&s["nonce"]), "nonce is incorrect");
|
||||||
BTreeMap::from_json(&s["storage"]).iter().foreach(|(k, v)| fail_unless(&state.storage_at(&address, &k) == v, "storage is incorrect"));
|
BTreeMap::from_json(&s["storage"]).iter().foreach(|(k, v)| fail_unless(&state.storage_at(&address, &k) == v, "storage is incorrect"));
|
||||||
@ -266,7 +260,7 @@ fn do_json_test_for(vm: &VMType, json_data: &[u8]) -> Vec<String> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
for f in failed.iter() {
|
for f in &failed {
|
||||||
println!("FAILED: {:?}", f);
|
println!("FAILED: {:?}", f);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -277,7 +271,7 @@ fn do_json_test_for(vm: &VMType, json_data: &[u8]) -> Vec<String> {
|
|||||||
declare_test!{ExecutiveTests_vmArithmeticTest, "VMTests/vmArithmeticTest"}
|
declare_test!{ExecutiveTests_vmArithmeticTest, "VMTests/vmArithmeticTest"}
|
||||||
declare_test!{ExecutiveTests_vmBitwiseLogicOperationTest, "VMTests/vmBitwiseLogicOperationTest"}
|
declare_test!{ExecutiveTests_vmBitwiseLogicOperationTest, "VMTests/vmBitwiseLogicOperationTest"}
|
||||||
// this one crashes with some vm internal error. Separately they pass.
|
// this one crashes with some vm internal error. Separately they pass.
|
||||||
declare_test_ignore!{ExecutiveTests_vmBlockInfoTest, "VMTests/vmBlockInfoTest"}
|
declare_test!{ExecutiveTests_vmBlockInfoTest, "VMTests/vmBlockInfoTest"}
|
||||||
declare_test!{ExecutiveTests_vmEnvironmentalInfoTest, "VMTests/vmEnvironmentalInfoTest"}
|
declare_test!{ExecutiveTests_vmEnvironmentalInfoTest, "VMTests/vmEnvironmentalInfoTest"}
|
||||||
declare_test!{ExecutiveTests_vmIOandFlowOperationsTest, "VMTests/vmIOandFlowOperationsTest"}
|
declare_test!{ExecutiveTests_vmIOandFlowOperationsTest, "VMTests/vmIOandFlowOperationsTest"}
|
||||||
// this one take way too long.
|
// this one take way too long.
|
||||||
|
@ -15,7 +15,7 @@ fn do_json_test(json_data: &[u8]) -> Vec<String> {
|
|||||||
let mut fail = false;
|
let mut fail = false;
|
||||||
{
|
{
|
||||||
let mut fail_unless = |cond: bool| if !cond && !fail {
|
let mut fail_unless = |cond: bool| if !cond && !fail {
|
||||||
failed.push(name.to_string());
|
failed.push(name.clone());
|
||||||
flush(format!("FAIL\n"));
|
flush(format!("FAIL\n"));
|
||||||
fail = true;
|
fail = true;
|
||||||
true
|
true
|
||||||
|
@ -6,7 +6,7 @@ macro_rules! declare_test {
|
|||||||
#[test]
|
#[test]
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
fn $id() {
|
fn $id() {
|
||||||
assert!(do_json_test(include_bytes!(concat!("../../res/ethereum/tests/", $name, ".json"))).len() == 0);
|
assert!(do_json_test(include_bytes!(concat!("../../res/ethereum/tests/", $name, ".json"))).is_empty());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -18,7 +18,7 @@ macro_rules! declare_test_ignore {
|
|||||||
#[ignore]
|
#[ignore]
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
fn $id() {
|
fn $id() {
|
||||||
assert!(do_json_test(include_bytes!(concat!("../../res/ethereum/tests/", $name, ".json"))).len() == 0);
|
assert!(do_json_test(include_bytes!(concat!("../../res/ethereum/tests/", $name, ".json"))).is_empty());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -9,13 +9,13 @@ fn do_json_test(json_data: &[u8]) -> Vec<String> {
|
|||||||
let ot = RefCell::new(Transaction::new());
|
let ot = RefCell::new(Transaction::new());
|
||||||
for (name, test) in json.as_object().unwrap() {
|
for (name, test) in json.as_object().unwrap() {
|
||||||
let mut fail = false;
|
let mut fail = false;
|
||||||
let mut fail_unless = |cond: bool| if !cond && !fail { failed.push(name.to_string()); println!("Transaction: {:?}", ot.borrow()); fail = true };
|
let mut fail_unless = |cond: bool| if !cond && !fail { failed.push(name.clone()); println!("Transaction: {:?}", ot.borrow()); fail = true };
|
||||||
let schedule = match test.find("blocknumber")
|
let schedule = match test.find("blocknumber")
|
||||||
.and_then(|j| j.as_string())
|
.and_then(|j| j.as_string())
|
||||||
.and_then(|s| BlockNumber::from_str(s).ok())
|
.and_then(|s| BlockNumber::from_str(s).ok())
|
||||||
.unwrap_or(0) { x if x < 900000 => &old_schedule, _ => &new_schedule };
|
.unwrap_or(0) { x if x < 900000 => &old_schedule, _ => &new_schedule };
|
||||||
let rlp = Bytes::from_json(&test["rlp"]);
|
let rlp = Bytes::from_json(&test["rlp"]);
|
||||||
let res = UntrustedRlp::new(&rlp).as_val().map_err(|e| From::from(e)).and_then(|t: Transaction| t.validate(schedule, schedule.have_delegate_call));
|
let res = UntrustedRlp::new(&rlp).as_val().map_err(From::from).and_then(|t: Transaction| t.validate(schedule, schedule.have_delegate_call));
|
||||||
fail_unless(test.find("transaction").is_none() == res.is_err());
|
fail_unless(test.find("transaction").is_none() == res.is_err());
|
||||||
if let (Some(&Json::Object(ref tx)), Some(&Json::String(ref expect_sender))) = (test.find("transaction"), test.find("sender")) {
|
if let (Some(&Json::Object(ref tx)), Some(&Json::String(ref expect_sender))) = (test.find("transaction"), test.find("sender")) {
|
||||||
let t = res.unwrap();
|
let t = res.unwrap();
|
||||||
@ -30,11 +30,11 @@ fn do_json_test(json_data: &[u8]) -> Vec<String> {
|
|||||||
fail_unless(to == &xjson!(&tx["to"]));
|
fail_unless(to == &xjson!(&tx["to"]));
|
||||||
} else {
|
} else {
|
||||||
*ot.borrow_mut() = t.clone();
|
*ot.borrow_mut() = t.clone();
|
||||||
fail_unless(Bytes::from_json(&tx["to"]).len() == 0);
|
fail_unless(Bytes::from_json(&tx["to"]).is_empty());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for f in failed.iter() {
|
for f in &failed {
|
||||||
println!("FAILED: {:?}", f);
|
println!("FAILED: {:?}", f);
|
||||||
}
|
}
|
||||||
failed
|
failed
|
||||||
|
@ -117,9 +117,8 @@ impl Transaction {
|
|||||||
};
|
};
|
||||||
s.append(&self.value);
|
s.append(&self.value);
|
||||||
s.append(&self.data);
|
s.append(&self.data);
|
||||||
match with_seal {
|
if let Seal::With = with_seal {
|
||||||
Seal::With => { s.append(&(self.v as u16)).append(&self.r).append(&self.s); },
|
s.append(&(self.v as u16)).append(&self.r).append(&self.s);
|
||||||
_ => {}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -138,7 +137,7 @@ impl FromJson for Transaction {
|
|||||||
gas_price: xjson!(&json["gasPrice"]),
|
gas_price: xjson!(&json["gasPrice"]),
|
||||||
gas: xjson!(&json["gasLimit"]),
|
gas: xjson!(&json["gasLimit"]),
|
||||||
action: match Bytes::from_json(&json["to"]) {
|
action: match Bytes::from_json(&json["to"]) {
|
||||||
ref x if x.len() == 0 => Action::Create,
|
ref x if x.is_empty() => Action::Create,
|
||||||
ref x => Action::Call(Address::from_slice(x)),
|
ref x => Action::Call(Address::from_slice(x)),
|
||||||
},
|
},
|
||||||
value: xjson!(&json["value"]),
|
value: xjson!(&json["value"]),
|
||||||
@ -303,4 +302,4 @@ fn signing() {
|
|||||||
let key = KeyPair::create().unwrap();
|
let key = KeyPair::create().unwrap();
|
||||||
let t = Transaction::new_create(U256::from(42u64), b"Hello!".to_vec(), U256::from(3000u64), U256::from(50_000u64), U256::from(1u64)).signed(&key.secret());
|
let t = Transaction::new_create(U256::from(42u64), b"Hello!".to_vec(), U256::from(3000u64), U256::from(50_000u64), U256::from(1u64)).signed(&key.secret());
|
||||||
assert_eq!(Address::from(key.public().sha3()), t.sender().unwrap());
|
assert_eq!(Address::from(key.public().sha3()), t.sender().unwrap());
|
||||||
}
|
}
|
||||||
|
@ -64,7 +64,7 @@ pub fn verify_block_unordered(header: Header, bytes: Bytes, engine: &Engine) ->
|
|||||||
/// Phase 3 verification. Check block information against parent and uncles.
|
/// Phase 3 verification. Check block information against parent and uncles.
|
||||||
pub fn verify_block_family<BC>(header: &Header, bytes: &[u8], engine: &Engine, bc: &BC) -> Result<(), Error> where BC: BlockProvider {
|
pub fn verify_block_family<BC>(header: &Header, bytes: &[u8], engine: &Engine, bc: &BC) -> Result<(), Error> where BC: BlockProvider {
|
||||||
// TODO: verify timestamp
|
// TODO: verify timestamp
|
||||||
let parent = try!(bc.block_header(&header.parent_hash).ok_or::<Error>(From::from(BlockError::UnknownParent(header.parent_hash.clone()))));
|
let parent = try!(bc.block_header(&header.parent_hash).ok_or_else(|| Error::from(BlockError::UnknownParent(header.parent_hash.clone()))));
|
||||||
try!(verify_parent(&header, &parent));
|
try!(verify_parent(&header, &parent));
|
||||||
try!(engine.verify_block_family(&header, &parent, Some(bytes)));
|
try!(engine.verify_block_family(&header, &parent, Some(bytes)));
|
||||||
|
|
||||||
@ -122,7 +122,7 @@ pub fn verify_block_family<BC>(header: &Header, bytes: &[u8], engine: &Engine, b
|
|||||||
// cB.p^7 -------------/
|
// cB.p^7 -------------/
|
||||||
// cB.p^8
|
// cB.p^8
|
||||||
let mut expected_uncle_parent = header.parent_hash.clone();
|
let mut expected_uncle_parent = header.parent_hash.clone();
|
||||||
let uncle_parent = try!(bc.block_header(&uncle.parent_hash).ok_or::<Error>(From::from(BlockError::UnknownUncleParent(uncle.parent_hash.clone()))));
|
let uncle_parent = try!(bc.block_header(&uncle.parent_hash).ok_or_else(|| Error::from(BlockError::UnknownUncleParent(uncle.parent_hash.clone()))));
|
||||||
for _ in 0..depth {
|
for _ in 0..depth {
|
||||||
match bc.block_details(&expected_uncle_parent) {
|
match bc.block_details(&expected_uncle_parent) {
|
||||||
Some(details) => {
|
Some(details) => {
|
||||||
@ -284,7 +284,7 @@ mod tests {
|
|||||||
|
|
||||||
/// Get raw block data
|
/// Get raw block data
|
||||||
fn block(&self, hash: &H256) -> Option<Bytes> {
|
fn block(&self, hash: &H256) -> Option<Bytes> {
|
||||||
self.blocks.get(hash).map(|b| b.clone())
|
self.blocks.get(hash).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the familial details concerning a block.
|
/// Get the familial details concerning a block.
|
||||||
@ -302,7 +302,7 @@ mod tests {
|
|||||||
|
|
||||||
/// Get the hash of given block's number.
|
/// Get the hash of given block's number.
|
||||||
fn block_hash(&self, index: BlockNumber) -> Option<H256> {
|
fn block_hash(&self, index: BlockNumber) -> Option<H256> {
|
||||||
self.numbers.get(&index).map(|h| h.clone())
|
self.numbers.get(&index).cloned()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -141,7 +141,7 @@ impl<'a> BlockView<'a> {
|
|||||||
|
|
||||||
/// Return List of transactions in given block.
|
/// Return List of transactions in given block.
|
||||||
pub fn transaction_views(&self) -> Vec<TransactionView> {
|
pub fn transaction_views(&self) -> Vec<TransactionView> {
|
||||||
self.rlp.at(1).iter().map(|rlp| TransactionView::new_from_rlp(rlp)).collect()
|
self.rlp.at(1).iter().map(TransactionView::new_from_rlp).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return transaction hashes.
|
/// Return transaction hashes.
|
||||||
@ -156,7 +156,7 @@ impl<'a> BlockView<'a> {
|
|||||||
|
|
||||||
/// Return List of transactions in given block.
|
/// Return List of transactions in given block.
|
||||||
pub fn uncle_views(&self) -> Vec<HeaderView> {
|
pub fn uncle_views(&self) -> Vec<HeaderView> {
|
||||||
self.rlp.at(2).iter().map(|rlp| HeaderView::new_from_rlp(rlp)).collect()
|
self.rlp.at(2).iter().map(HeaderView::new_from_rlp).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return list of uncle hashes of given block.
|
/// Return list of uncle hashes of given block.
|
||||||
|
@ -25,6 +25,7 @@ itertools = "0.4"
|
|||||||
crossbeam = "0.2"
|
crossbeam = "0.2"
|
||||||
slab = { git = "https://github.com/arkpar/slab.git" }
|
slab = { git = "https://github.com/arkpar/slab.git" }
|
||||||
sha3 = { path = "sha3" }
|
sha3 = { path = "sha3" }
|
||||||
|
clippy = "*" # Always newest, since we use nightly
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
json-tests = { path = "json-tests" }
|
json-tests = { path = "json-tests" }
|
||||||
|
@ -106,18 +106,18 @@ impl<'a> Deref for BytesRef<'a> {
|
|||||||
type Target = [u8];
|
type Target = [u8];
|
||||||
|
|
||||||
fn deref(&self) -> &[u8] {
|
fn deref(&self) -> &[u8] {
|
||||||
match self {
|
match *self {
|
||||||
&BytesRef::Flexible(ref bytes) => bytes,
|
BytesRef::Flexible(ref bytes) => bytes,
|
||||||
&BytesRef::Fixed(ref bytes) => bytes
|
BytesRef::Fixed(ref bytes) => bytes
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl <'a> DerefMut for BytesRef<'a> {
|
impl <'a> DerefMut for BytesRef<'a> {
|
||||||
fn deref_mut(&mut self) -> &mut [u8] {
|
fn deref_mut(&mut self) -> &mut [u8] {
|
||||||
match self {
|
match *self {
|
||||||
&mut BytesRef::Flexible(ref mut bytes) => bytes,
|
BytesRef::Flexible(ref mut bytes) => bytes,
|
||||||
&mut BytesRef::Fixed(ref mut bytes) => bytes
|
BytesRef::Fixed(ref mut bytes) => bytes
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -299,7 +299,7 @@ pub trait FromBytes: Sized {
|
|||||||
|
|
||||||
impl FromBytes for String {
|
impl FromBytes for String {
|
||||||
fn from_bytes(bytes: &[u8]) -> FromBytesResult<String> {
|
fn from_bytes(bytes: &[u8]) -> FromBytesResult<String> {
|
||||||
Ok(::std::str::from_utf8(bytes).unwrap().to_string())
|
Ok(::std::str::from_utf8(bytes).unwrap().to_owned())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -323,10 +323,9 @@ impl<'a, D> ChainFilter<'a, D> where D: FilterDataSource
|
|||||||
let offset = level_size * index;
|
let offset = level_size * index;
|
||||||
|
|
||||||
// go doooown!
|
// go doooown!
|
||||||
match self.blocks(bloom, from_block, to_block, max_level, offset) {
|
if let Some(blocks) = self.blocks(bloom, from_block, to_block, max_level, offset) {
|
||||||
Some(blocks) => result.extend(blocks),
|
result.extend(blocks);
|
||||||
None => ()
|
}
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
result
|
result
|
||||||
|
@ -207,11 +207,11 @@ macro_rules! impl_hash {
|
|||||||
|
|
||||||
impl FromJson for $from {
|
impl FromJson for $from {
|
||||||
fn from_json(json: &Json) -> Self {
|
fn from_json(json: &Json) -> Self {
|
||||||
match json {
|
match *json {
|
||||||
&Json::String(ref s) => {
|
Json::String(ref s) => {
|
||||||
match s.len() % 2 {
|
match s.len() % 2 {
|
||||||
0 => FromStr::from_str(clean_0x(s)).unwrap(),
|
0 => FromStr::from_str(clean_0x(s)).unwrap(),
|
||||||
_ => FromStr::from_str(&("0".to_string() + &(clean_0x(s).to_string()))[..]).unwrap()
|
_ => FromStr::from_str(&("0".to_owned() + &(clean_0x(s).to_owned()))[..]).unwrap()
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
_ => Default::default(),
|
_ => Default::default(),
|
||||||
@ -221,7 +221,7 @@ macro_rules! impl_hash {
|
|||||||
|
|
||||||
impl fmt::Debug for $from {
|
impl fmt::Debug for $from {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
for i in self.0.iter() {
|
for i in &self.0[..] {
|
||||||
try!(write!(f, "{:02x}", i));
|
try!(write!(f, "{:02x}", i));
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -229,11 +229,11 @@ macro_rules! impl_hash {
|
|||||||
}
|
}
|
||||||
impl fmt::Display for $from {
|
impl fmt::Display for $from {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
for i in self.0[0..2].iter() {
|
for i in &self.0[0..2] {
|
||||||
try!(write!(f, "{:02x}", i));
|
try!(write!(f, "{:02x}", i));
|
||||||
}
|
}
|
||||||
try!(write!(f, "…"));
|
try!(write!(f, "…"));
|
||||||
for i in self.0[$size - 4..$size].iter() {
|
for i in &self.0[$size - 4..$size] {
|
||||||
try!(write!(f, "{:02x}", i));
|
try!(write!(f, "{:02x}", i));
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -291,36 +291,36 @@ macro_rules! impl_hash {
|
|||||||
impl Index<usize> for $from {
|
impl Index<usize> for $from {
|
||||||
type Output = u8;
|
type Output = u8;
|
||||||
|
|
||||||
fn index<'a>(&'a self, index: usize) -> &'a u8 {
|
fn index(&self, index: usize) -> &u8 {
|
||||||
&self.0[index]
|
&self.0[index]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl IndexMut<usize> for $from {
|
impl IndexMut<usize> for $from {
|
||||||
fn index_mut<'a>(&'a mut self, index: usize) -> &'a mut u8 {
|
fn index_mut(&mut self, index: usize) -> &mut u8 {
|
||||||
&mut self.0[index]
|
&mut self.0[index]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl Index<ops::Range<usize>> for $from {
|
impl Index<ops::Range<usize>> for $from {
|
||||||
type Output = [u8];
|
type Output = [u8];
|
||||||
|
|
||||||
fn index<'a>(&'a self, index: ops::Range<usize>) -> &'a [u8] {
|
fn index(&self, index: ops::Range<usize>) -> &[u8] {
|
||||||
&self.0[index]
|
&self.0[index]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl IndexMut<ops::Range<usize>> for $from {
|
impl IndexMut<ops::Range<usize>> for $from {
|
||||||
fn index_mut<'a>(&'a mut self, index: ops::Range<usize>) -> &'a mut [u8] {
|
fn index_mut(&mut self, index: ops::Range<usize>) -> &mut [u8] {
|
||||||
&mut self.0[index]
|
&mut self.0[index]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl Index<ops::RangeFull> for $from {
|
impl Index<ops::RangeFull> for $from {
|
||||||
type Output = [u8];
|
type Output = [u8];
|
||||||
|
|
||||||
fn index<'a>(&'a self, _index: ops::RangeFull) -> &'a [u8] {
|
fn index(&self, _index: ops::RangeFull) -> &[u8] {
|
||||||
&self.0
|
&self.0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl IndexMut<ops::RangeFull> for $from {
|
impl IndexMut<ops::RangeFull> for $from {
|
||||||
fn index_mut<'a>(&'a mut self, _index: ops::RangeFull) -> &'a mut [u8] {
|
fn index_mut(&mut self, _index: ops::RangeFull) -> &mut [u8] {
|
||||||
&mut self.0
|
&mut self.0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -440,9 +440,9 @@ macro_rules! impl_hash {
|
|||||||
fn from(s: &'_ str) -> $from {
|
fn from(s: &'_ str) -> $from {
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
if s.len() % 2 == 1 {
|
if s.len() % 2 == 1 {
|
||||||
$from::from_str(&("0".to_string() + &(clean_0x(s).to_string()))[..]).unwrap_or($from::new())
|
$from::from_str(&("0".to_owned() + &(clean_0x(s).to_owned()))[..]).unwrap_or_else(|_| $from::new())
|
||||||
} else {
|
} else {
|
||||||
$from::from_str(clean_0x(s)).unwrap_or($from::new())
|
$from::from_str(clean_0x(s)).unwrap_or_else(|_| $from::new())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -565,6 +565,7 @@ mod tests {
|
|||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[allow(eq_op)]
|
||||||
fn hash() {
|
fn hash() {
|
||||||
let h = H64([0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef]);
|
let h = H64([0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef]);
|
||||||
assert_eq!(H64::from_str("0123456789abcdef").unwrap(), h);
|
assert_eq!(H64::from_str("0123456789abcdef").unwrap(), h);
|
||||||
|
@ -132,7 +132,7 @@ impl<Message> IoManager<Message> where Message: Send + Sync + Clone + 'static {
|
|||||||
pub fn start(event_loop: &mut EventLoop<IoManager<Message>>) -> Result<(), UtilError> {
|
pub fn start(event_loop: &mut EventLoop<IoManager<Message>>) -> Result<(), UtilError> {
|
||||||
let (worker, stealer) = chase_lev::deque();
|
let (worker, stealer) = chase_lev::deque();
|
||||||
let num_workers = 4;
|
let num_workers = 4;
|
||||||
let work_ready_mutex = Arc::new(Mutex::new(false));
|
let work_ready_mutex = Arc::new(Mutex::new(()));
|
||||||
let work_ready = Arc::new(Condvar::new());
|
let work_ready = Arc::new(Condvar::new());
|
||||||
let workers = (0..num_workers).map(|i|
|
let workers = (0..num_workers).map(|i|
|
||||||
Worker::new(i, stealer.clone(), IoChannel::new(event_loop.channel()), work_ready.clone(), work_ready_mutex.clone())).collect();
|
Worker::new(i, stealer.clone(), IoChannel::new(event_loop.channel()), work_ready.clone(), work_ready_mutex.clone())).collect();
|
||||||
|
@ -35,7 +35,7 @@ impl Worker {
|
|||||||
stealer: chase_lev::Stealer<Work<Message>>,
|
stealer: chase_lev::Stealer<Work<Message>>,
|
||||||
channel: IoChannel<Message>,
|
channel: IoChannel<Message>,
|
||||||
wait: Arc<Condvar>,
|
wait: Arc<Condvar>,
|
||||||
wait_mutex: Arc<Mutex<bool>>) -> Worker
|
wait_mutex: Arc<Mutex<()>>) -> Worker
|
||||||
where Message: Send + Sync + Clone + 'static {
|
where Message: Send + Sync + Clone + 'static {
|
||||||
let deleting = Arc::new(AtomicBool::new(false));
|
let deleting = Arc::new(AtomicBool::new(false));
|
||||||
let mut worker = Worker {
|
let mut worker = Worker {
|
||||||
@ -51,7 +51,7 @@ impl Worker {
|
|||||||
|
|
||||||
fn work_loop<Message>(stealer: chase_lev::Stealer<Work<Message>>,
|
fn work_loop<Message>(stealer: chase_lev::Stealer<Work<Message>>,
|
||||||
channel: IoChannel<Message>, wait: Arc<Condvar>,
|
channel: IoChannel<Message>, wait: Arc<Condvar>,
|
||||||
wait_mutex: Arc<Mutex<bool>>,
|
wait_mutex: Arc<Mutex<()>>,
|
||||||
deleting: Arc<AtomicBool>)
|
deleting: Arc<AtomicBool>)
|
||||||
where Message: Send + Sync + Clone + 'static {
|
where Message: Send + Sync + Clone + 'static {
|
||||||
while !deleting.load(AtomicOrdering::Relaxed) {
|
while !deleting.load(AtomicOrdering::Relaxed) {
|
||||||
@ -62,13 +62,8 @@ impl Worker {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
loop {
|
while let chase_lev::Steal::Data(work) = stealer.steal() {
|
||||||
match stealer.steal() {
|
Worker::do_work(work, channel.clone());
|
||||||
chase_lev::Steal::Data(work) => {
|
|
||||||
Worker::do_work(work, channel.clone());
|
|
||||||
}
|
|
||||||
_ => break
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -76,19 +71,19 @@ impl Worker {
|
|||||||
fn do_work<Message>(work: Work<Message>, channel: IoChannel<Message>) where Message: Send + Sync + Clone + 'static {
|
fn do_work<Message>(work: Work<Message>, channel: IoChannel<Message>) where Message: Send + Sync + Clone + 'static {
|
||||||
match work.work_type {
|
match work.work_type {
|
||||||
WorkType::Readable => {
|
WorkType::Readable => {
|
||||||
work.handler.stream_readable(&mut IoContext::new(channel, work.handler_id), work.token);
|
work.handler.stream_readable(&IoContext::new(channel, work.handler_id), work.token);
|
||||||
},
|
},
|
||||||
WorkType::Writable => {
|
WorkType::Writable => {
|
||||||
work.handler.stream_writable(&mut IoContext::new(channel, work.handler_id), work.token);
|
work.handler.stream_writable(&IoContext::new(channel, work.handler_id), work.token);
|
||||||
}
|
}
|
||||||
WorkType::Hup => {
|
WorkType::Hup => {
|
||||||
work.handler.stream_hup(&mut IoContext::new(channel, work.handler_id), work.token);
|
work.handler.stream_hup(&IoContext::new(channel, work.handler_id), work.token);
|
||||||
}
|
}
|
||||||
WorkType::Timeout => {
|
WorkType::Timeout => {
|
||||||
work.handler.timeout(&mut IoContext::new(channel, work.handler_id), work.token);
|
work.handler.timeout(&IoContext::new(channel, work.handler_id), work.token);
|
||||||
}
|
}
|
||||||
WorkType::Message(message) => {
|
WorkType::Message(message) => {
|
||||||
work.handler.message(&mut IoContext::new(channel, work.handler_id), &message);
|
work.handler.message(&IoContext::new(channel, work.handler_id), &message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -106,7 +106,7 @@ impl JournalDB {
|
|||||||
})) {
|
})) {
|
||||||
let rlp = Rlp::new(&rlp_data);
|
let rlp = Rlp::new(&rlp_data);
|
||||||
let to_remove: Vec<H256> = rlp.val_at(if canon_id == rlp.val_at(0) {2} else {1});
|
let to_remove: Vec<H256> = rlp.val_at(if canon_id == rlp.val_at(0) {2} else {1});
|
||||||
for i in to_remove.iter() {
|
for i in &to_remove {
|
||||||
self.forward.remove(i);
|
self.forward.remove(i);
|
||||||
}
|
}
|
||||||
try!(self.backing.delete(&last));
|
try!(self.backing.delete(&last));
|
||||||
|
@ -11,18 +11,18 @@ pub fn clean(s: &str) -> &str {
|
|||||||
|
|
||||||
fn u256_from_str(s: &str) -> U256 {
|
fn u256_from_str(s: &str) -> U256 {
|
||||||
if s.len() >= 2 && &s[0..2] == "0x" {
|
if s.len() >= 2 && &s[0..2] == "0x" {
|
||||||
U256::from_str(&s[2..]).unwrap_or(U256::from(0))
|
U256::from_str(&s[2..]).unwrap_or_else(|_| U256::zero())
|
||||||
} else {
|
} else {
|
||||||
U256::from_dec_str(s).unwrap_or(U256::from(0))
|
U256::from_dec_str(s).unwrap_or_else(|_| U256::zero())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromJson for Bytes {
|
impl FromJson for Bytes {
|
||||||
fn from_json(json: &Json) -> Self {
|
fn from_json(json: &Json) -> Self {
|
||||||
match json {
|
match *json {
|
||||||
&Json::String(ref s) => match s.len() % 2 {
|
Json::String(ref s) => match s.len() % 2 {
|
||||||
0 => FromHex::from_hex(clean(s)).unwrap_or(vec![]),
|
0 => FromHex::from_hex(clean(s)).unwrap_or_else(|_| vec![]),
|
||||||
_ => FromHex::from_hex(&("0".to_string() + &(clean(s).to_string()))[..]).unwrap_or(vec![]),
|
_ => FromHex::from_hex(&("0".to_owned() + &(clean(s).to_owned()))[..]).unwrap_or_else(|_| vec![]),
|
||||||
},
|
},
|
||||||
_ => vec![],
|
_ => vec![],
|
||||||
}
|
}
|
||||||
@ -31,8 +31,8 @@ impl FromJson for Bytes {
|
|||||||
|
|
||||||
impl FromJson for BTreeMap<H256, H256> {
|
impl FromJson for BTreeMap<H256, H256> {
|
||||||
fn from_json(json: &Json) -> Self {
|
fn from_json(json: &Json) -> Self {
|
||||||
match json {
|
match *json {
|
||||||
&Json::Object(ref o) => o.iter().map(|(key, value)| (x!(&u256_from_str(key)), x!(&U256::from_json(value)))).collect(),
|
Json::Object(ref o) => o.iter().map(|(key, value)| (x!(&u256_from_str(key)), x!(&U256::from_json(value)))).collect(),
|
||||||
_ => BTreeMap::new(),
|
_ => BTreeMap::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -40,8 +40,8 @@ impl FromJson for BTreeMap<H256, H256> {
|
|||||||
|
|
||||||
impl<T> FromJson for Vec<T> where T: FromJson {
|
impl<T> FromJson for Vec<T> where T: FromJson {
|
||||||
fn from_json(json: &Json) -> Self {
|
fn from_json(json: &Json) -> Self {
|
||||||
match json {
|
match *json {
|
||||||
&Json::Array(ref o) => o.iter().map(|x|T::from_json(x)).collect(),
|
Json::Array(ref o) => o.iter().map(|x|T::from_json(x)).collect(),
|
||||||
_ => Vec::new(),
|
_ => Vec::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -49,9 +49,9 @@ impl<T> FromJson for Vec<T> where T: FromJson {
|
|||||||
|
|
||||||
impl<T> FromJson for Option<T> where T: FromJson {
|
impl<T> FromJson for Option<T> where T: FromJson {
|
||||||
fn from_json(json: &Json) -> Self {
|
fn from_json(json: &Json) -> Self {
|
||||||
match json {
|
match *json {
|
||||||
&Json::String(ref o) if o.is_empty() => None,
|
Json::String(ref o) if o.is_empty() => None,
|
||||||
&Json::Null => None,
|
Json::Null => None,
|
||||||
_ => Some(FromJson::from_json(json)),
|
_ => Some(FromJson::from_json(json)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -135,4 +135,4 @@ fn option_types() {
|
|||||||
assert_eq!(None, v);
|
assert_eq!(None, v);
|
||||||
let v: Option<u16> = xjson!(&j["empty"]);
|
let v: Option<u16> = xjson!(&j["empty"]);
|
||||||
assert_eq!(None, v);
|
assert_eq!(None, v);
|
||||||
}
|
}
|
||||||
|
@ -2,6 +2,9 @@
|
|||||||
#![feature(op_assign_traits)]
|
#![feature(op_assign_traits)]
|
||||||
#![feature(augmented_assignments)]
|
#![feature(augmented_assignments)]
|
||||||
#![feature(associated_consts)]
|
#![feature(associated_consts)]
|
||||||
|
#![feature(plugin)]
|
||||||
|
#![plugin(clippy)]
|
||||||
|
#![allow(needless_range_loop, match_bool)]
|
||||||
//! Ethcore-util library
|
//! Ethcore-util library
|
||||||
//!
|
//!
|
||||||
//! ### Rust version:
|
//! ### Rust version:
|
||||||
|
@ -18,13 +18,13 @@ impl<T> Diff<T> where T: Eq {
|
|||||||
pub fn new(pre: T, post: T) -> Self { if pre == post { Diff::Same } else { Diff::Changed(pre, post) } }
|
pub fn new(pre: T, post: T) -> Self { if pre == post { Diff::Same } else { Diff::Changed(pre, post) } }
|
||||||
|
|
||||||
/// Get the before value, if there is one.
|
/// Get the before value, if there is one.
|
||||||
pub fn pre(&self) -> Option<&T> { match self { &Diff::Died(ref x) | &Diff::Changed(ref x, _) => Some(x), _ => None } }
|
pub fn pre(&self) -> Option<&T> { match *self { Diff::Died(ref x) | Diff::Changed(ref x, _) => Some(x), _ => None } }
|
||||||
|
|
||||||
/// Get the after value, if there is one.
|
/// Get the after value, if there is one.
|
||||||
pub fn post(&self) -> Option<&T> { match self { &Diff::Born(ref x) | &Diff::Changed(_, ref x) => Some(x), _ => None } }
|
pub fn post(&self) -> Option<&T> { match *self { Diff::Born(ref x) | Diff::Changed(_, ref x) => Some(x), _ => None } }
|
||||||
|
|
||||||
/// Determine whether there was a change or not.
|
/// Determine whether there was a change or not.
|
||||||
pub fn is_same(&self) -> bool { match self { &Diff::Same => true, _ => false }}
|
pub fn is_same(&self) -> bool { match *self { Diff::Same => true, _ => false }}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(PartialEq,Eq,Clone,Copy)]
|
#[derive(PartialEq,Eq,Clone,Copy)]
|
||||||
|
@ -88,7 +88,7 @@ impl Connection {
|
|||||||
|
|
||||||
/// Add a packet to send queue.
|
/// Add a packet to send queue.
|
||||||
pub fn send(&mut self, data: Bytes) {
|
pub fn send(&mut self, data: Bytes) {
|
||||||
if data.len() != 0 {
|
if !data.is_empty() {
|
||||||
self.send_queue.push_back(Cursor::new(data));
|
self.send_queue.push_back(Cursor::new(data));
|
||||||
}
|
}
|
||||||
if !self.interest.is_writable() {
|
if !self.interest.is_writable() {
|
||||||
@ -340,13 +340,10 @@ impl EncryptedConnection {
|
|||||||
io.clear_timer(self.connection.token).unwrap();
|
io.clear_timer(self.connection.token).unwrap();
|
||||||
match self.read_state {
|
match self.read_state {
|
||||||
EncryptedConnectionState::Header => {
|
EncryptedConnectionState::Header => {
|
||||||
match try!(self.connection.readable()) {
|
if let Some(data) = try!(self.connection.readable()) {
|
||||||
Some(data) => {
|
try!(self.read_header(&data));
|
||||||
try!(self.read_header(&data));
|
try!(io.register_timer(self.connection.token, RECIEVE_PAYLOAD_TIMEOUT));
|
||||||
try!(io.register_timer(self.connection.token, RECIEVE_PAYLOAD_TIMEOUT));
|
}
|
||||||
},
|
|
||||||
None => {}
|
|
||||||
};
|
|
||||||
Ok(None)
|
Ok(None)
|
||||||
},
|
},
|
||||||
EncryptedConnectionState::Payload => {
|
EncryptedConnectionState::Payload => {
|
||||||
|
@ -62,7 +62,7 @@ impl Discovery {
|
|||||||
discovery_round: 0,
|
discovery_round: 0,
|
||||||
discovery_id: NodeId::new(),
|
discovery_id: NodeId::new(),
|
||||||
discovery_nodes: HashSet::new(),
|
discovery_nodes: HashSet::new(),
|
||||||
node_buckets: (0..NODE_BINS).map(|x| NodeBucket::new(x)).collect(),
|
node_buckets: (0..NODE_BINS).map(NodeBucket::new).collect(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -122,7 +122,8 @@ impl Discovery {
|
|||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
fn nearest_node_entries<'b>(source: &NodeId, target: &NodeId, buckets: &'b Vec<NodeBucket>) -> Vec<&'b NodeId>
|
#[allow(cyclomatic_complexity)]
|
||||||
|
fn nearest_node_entries<'b>(source: &NodeId, target: &NodeId, buckets: &'b [NodeBucket]) -> Vec<&'b NodeId>
|
||||||
{
|
{
|
||||||
// send ALPHA FindNode packets to nodes we know, closest to target
|
// send ALPHA FindNode packets to nodes we know, closest to target
|
||||||
const LAST_BIN: u32 = NODE_BINS - 1;
|
const LAST_BIN: u32 = NODE_BINS - 1;
|
||||||
@ -136,21 +137,21 @@ impl Discovery {
|
|||||||
if head > 1 && tail != LAST_BIN {
|
if head > 1 && tail != LAST_BIN {
|
||||||
while head != tail && head < NODE_BINS && count < BUCKET_SIZE
|
while head != tail && head < NODE_BINS && count < BUCKET_SIZE
|
||||||
{
|
{
|
||||||
for n in buckets[head as usize].nodes.iter()
|
for n in &buckets[head as usize].nodes
|
||||||
{
|
{
|
||||||
if count < BUCKET_SIZE {
|
if count < BUCKET_SIZE {
|
||||||
count += 1;
|
count += 1;
|
||||||
found.entry(Discovery::distance(target, &n)).or_insert(Vec::new()).push(n);
|
found.entry(Discovery::distance(target, &n)).or_insert_with(Vec::new).push(n);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if count < BUCKET_SIZE && tail != 0 {
|
if count < BUCKET_SIZE && tail != 0 {
|
||||||
for n in buckets[tail as usize].nodes.iter() {
|
for n in &buckets[tail as usize].nodes {
|
||||||
if count < BUCKET_SIZE {
|
if count < BUCKET_SIZE {
|
||||||
count += 1;
|
count += 1;
|
||||||
found.entry(Discovery::distance(target, &n)).or_insert(Vec::new()).push(n);
|
found.entry(Discovery::distance(target, &n)).or_insert_with(Vec::new).push(n);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
break;
|
break;
|
||||||
@ -166,10 +167,10 @@ impl Discovery {
|
|||||||
}
|
}
|
||||||
else if head < 2 {
|
else if head < 2 {
|
||||||
while head < NODE_BINS && count < BUCKET_SIZE {
|
while head < NODE_BINS && count < BUCKET_SIZE {
|
||||||
for n in buckets[head as usize].nodes.iter() {
|
for n in &buckets[head as usize].nodes {
|
||||||
if count < BUCKET_SIZE {
|
if count < BUCKET_SIZE {
|
||||||
count += 1;
|
count += 1;
|
||||||
found.entry(Discovery::distance(target, &n)).or_insert(Vec::new()).push(n);
|
found.entry(Discovery::distance(target, &n)).or_insert_with(Vec::new).push(n);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
break;
|
break;
|
||||||
@ -180,10 +181,10 @@ impl Discovery {
|
|||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
while tail > 0 && count < BUCKET_SIZE {
|
while tail > 0 && count < BUCKET_SIZE {
|
||||||
for n in buckets[tail as usize].nodes.iter() {
|
for n in &buckets[tail as usize].nodes {
|
||||||
if count < BUCKET_SIZE {
|
if count < BUCKET_SIZE {
|
||||||
count += 1;
|
count += 1;
|
||||||
found.entry(Discovery::distance(target, &n)).or_insert(Vec::new()).push(n);
|
found.entry(Discovery::distance(target, &n)).or_insert_with(Vec::new).push(n);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
break;
|
break;
|
||||||
|
@ -93,21 +93,15 @@ impl Handshake {
|
|||||||
io.clear_timer(self.connection.token).unwrap();
|
io.clear_timer(self.connection.token).unwrap();
|
||||||
match self.state {
|
match self.state {
|
||||||
HandshakeState::ReadingAuth => {
|
HandshakeState::ReadingAuth => {
|
||||||
match try!(self.connection.readable()) {
|
if let Some(data) = try!(self.connection.readable()) {
|
||||||
Some(data) => {
|
try!(self.read_auth(host, &data));
|
||||||
try!(self.read_auth(host, &data));
|
try!(self.write_ack());
|
||||||
try!(self.write_ack());
|
|
||||||
},
|
|
||||||
None => {}
|
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
HandshakeState::ReadingAck => {
|
HandshakeState::ReadingAck => {
|
||||||
match try!(self.connection.readable()) {
|
if let Some(data) = try!(self.connection.readable()) {
|
||||||
Some(data) => {
|
try!(self.read_ack(host, &data));
|
||||||
try!(self.read_ack(host, &data));
|
self.state = HandshakeState::StartSession;
|
||||||
self.state = HandshakeState::StartSession;
|
|
||||||
},
|
|
||||||
None => {}
|
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
HandshakeState::StartSession => {},
|
HandshakeState::StartSession => {},
|
||||||
|
@ -133,9 +133,9 @@ impl<'s, Message> NetworkContext<'s, Message> where Message: Send + Sync + Clone
|
|||||||
|
|
||||||
/// Send a packet over the network to another peer.
|
/// Send a packet over the network to another peer.
|
||||||
pub fn send(&self, peer: PeerId, packet_id: PacketId, data: Vec<u8>) -> Result<(), UtilError> {
|
pub fn send(&self, peer: PeerId, packet_id: PacketId, data: Vec<u8>) -> Result<(), UtilError> {
|
||||||
if let Some(connection) = self.connections.read().unwrap().get(peer).map(|c| c.clone()) {
|
if let Some(connection) = self.connections.read().unwrap().get(peer).cloned() {
|
||||||
match connection.lock().unwrap().deref_mut() {
|
match *connection.lock().unwrap().deref_mut() {
|
||||||
&mut ConnectionEntry::Session(ref mut s) => {
|
ConnectionEntry::Session(ref mut s) => {
|
||||||
s.send_packet(self.protocol, packet_id as u8, &data).unwrap_or_else(|e| {
|
s.send_packet(self.protocol, packet_id as u8, &data).unwrap_or_else(|e| {
|
||||||
warn!(target: "net", "Send error: {:?}", e);
|
warn!(target: "net", "Send error: {:?}", e);
|
||||||
}); //TODO: don't copy vector data
|
}); //TODO: don't copy vector data
|
||||||
@ -175,15 +175,12 @@ impl<'s, Message> NetworkContext<'s, Message> where Message: Send + Sync + Clone
|
|||||||
|
|
||||||
/// Returns peer identification string
|
/// Returns peer identification string
|
||||||
pub fn peer_info(&self, peer: PeerId) -> String {
|
pub fn peer_info(&self, peer: PeerId) -> String {
|
||||||
if let Some(connection) = self.connections.read().unwrap().get(peer).map(|c| c.clone()) {
|
if let Some(connection) = self.connections.read().unwrap().get(peer).cloned() {
|
||||||
match connection.lock().unwrap().deref() {
|
if let ConnectionEntry::Session(ref s) = *connection.lock().unwrap().deref() {
|
||||||
&ConnectionEntry::Session(ref s) => {
|
return s.info.client_version.clone()
|
||||||
return s.info.client_version.clone()
|
|
||||||
},
|
|
||||||
_ => {}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"unknown".to_string()
|
"unknown".to_owned()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -219,7 +216,7 @@ impl HostInfo {
|
|||||||
/// Increments and returns connection nonce.
|
/// Increments and returns connection nonce.
|
||||||
pub fn next_nonce(&mut self) -> H256 {
|
pub fn next_nonce(&mut self) -> H256 {
|
||||||
self.nonce = self.nonce.sha3();
|
self.nonce = self.nonce.sha3();
|
||||||
return self.nonce.clone();
|
self.nonce.clone()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -261,7 +258,7 @@ impl<Message> Host<Message> where Message: Send + Sync + Clone {
|
|||||||
config: config,
|
config: config,
|
||||||
nonce: H256::random(),
|
nonce: H256::random(),
|
||||||
protocol_version: 4,
|
protocol_version: 4,
|
||||||
client_version: "parity".to_string(),
|
client_version: "parity".to_owned(),
|
||||||
listen_port: 0,
|
listen_port: 0,
|
||||||
capabilities: Vec::new(),
|
capabilities: Vec::new(),
|
||||||
}),
|
}),
|
||||||
@ -314,11 +311,11 @@ impl<Message> Host<Message> where Message: Send + Sync + Clone {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn have_session(&self, id: &NodeId) -> bool {
|
fn have_session(&self, id: &NodeId) -> bool {
|
||||||
self.connections.read().unwrap().iter().any(|e| match e.lock().unwrap().deref() { &ConnectionEntry::Session(ref s) => s.info.id.eq(&id), _ => false })
|
self.connections.read().unwrap().iter().any(|e| match *e.lock().unwrap().deref() { ConnectionEntry::Session(ref s) => s.info.id.eq(&id), _ => false })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn connecting_to(&self, id: &NodeId) -> bool {
|
fn connecting_to(&self, id: &NodeId) -> bool {
|
||||||
self.connections.read().unwrap().iter().any(|e| match e.lock().unwrap().deref() { &ConnectionEntry::Handshake(ref h) => h.id.eq(&id), _ => false })
|
self.connections.read().unwrap().iter().any(|e| match *e.lock().unwrap().deref() { ConnectionEntry::Handshake(ref h) => h.id.eq(&id), _ => false })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn connect_peers(&self, io: &IoContext<NetworkIoMessage<Message>>) {
|
fn connect_peers(&self, io: &IoContext<NetworkIoMessage<Message>>) {
|
||||||
@ -344,7 +341,7 @@ impl<Message> Host<Message> where Message: Send + Sync + Clone {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for n in to_connect.iter() {
|
for n in &to_connect {
|
||||||
if n.peer_type == PeerType::Required {
|
if n.peer_type == PeerType::Required {
|
||||||
if req_conn < IDEAL_PEERS {
|
if req_conn < IDEAL_PEERS {
|
||||||
self.connect_peer(&n.id, io);
|
self.connect_peer(&n.id, io);
|
||||||
@ -358,7 +355,7 @@ impl<Message> Host<Message> where Message: Send + Sync + Clone {
|
|||||||
let peer_count = 0;
|
let peer_count = 0;
|
||||||
let mut open_slots = IDEAL_PEERS - peer_count - pending_count + req_conn;
|
let mut open_slots = IDEAL_PEERS - peer_count - pending_count + req_conn;
|
||||||
if open_slots > 0 {
|
if open_slots > 0 {
|
||||||
for n in to_connect.iter() {
|
for n in &to_connect {
|
||||||
if n.peer_type == PeerType::Optional && open_slots > 0 {
|
if n.peer_type == PeerType::Optional && open_slots > 0 {
|
||||||
open_slots -= 1;
|
open_slots -= 1;
|
||||||
self.connect_peer(&n.id, io);
|
self.connect_peer(&n.id, io);
|
||||||
@ -368,8 +365,11 @@ impl<Message> Host<Message> where Message: Send + Sync + Clone {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(single_match)]
|
||||||
|
#[allow(block_in_if_condition_stmt)]
|
||||||
fn connect_peer(&self, id: &NodeId, io: &IoContext<NetworkIoMessage<Message>>) {
|
fn connect_peer(&self, id: &NodeId, io: &IoContext<NetworkIoMessage<Message>>) {
|
||||||
if self.have_session(id) {
|
if self.have_session(id)
|
||||||
|
{
|
||||||
warn!("Aborted connect. Node already connected.");
|
warn!("Aborted connect. Node already connected.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -410,12 +410,13 @@ impl<Message> Host<Message> where Message: Send + Sync + Clone {
|
|||||||
trace!(target: "net", "accept");
|
trace!(target: "net", "accept");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(single_match)]
|
||||||
fn connection_writable(&self, token: StreamToken, io: &IoContext<NetworkIoMessage<Message>>) {
|
fn connection_writable(&self, token: StreamToken, io: &IoContext<NetworkIoMessage<Message>>) {
|
||||||
let mut create_session = false;
|
let mut create_session = false;
|
||||||
let mut kill = false;
|
let mut kill = false;
|
||||||
if let Some(connection) = self.connections.read().unwrap().get(token).map(|c| c.clone()) {
|
if let Some(connection) = self.connections.read().unwrap().get(token).cloned() {
|
||||||
match connection.lock().unwrap().deref_mut() {
|
match *connection.lock().unwrap().deref_mut() {
|
||||||
&mut ConnectionEntry::Handshake(ref mut h) => {
|
ConnectionEntry::Handshake(ref mut h) => {
|
||||||
match h.writable(io, &self.info.read().unwrap()) {
|
match h.writable(io, &self.info.read().unwrap()) {
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
debug!(target: "net", "Handshake write error: {:?}", e);
|
debug!(target: "net", "Handshake write error: {:?}", e);
|
||||||
@ -427,7 +428,7 @@ impl<Message> Host<Message> where Message: Send + Sync + Clone {
|
|||||||
create_session = true;
|
create_session = true;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
&mut ConnectionEntry::Session(ref mut s) => {
|
ConnectionEntry::Session(ref mut s) => {
|
||||||
match s.writable(io, &self.info.read().unwrap()) {
|
match s.writable(io, &self.info.read().unwrap()) {
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
debug!(target: "net", "Session write error: {:?}", e);
|
debug!(target: "net", "Session write error: {:?}", e);
|
||||||
@ -457,21 +458,18 @@ impl<Message> Host<Message> where Message: Send + Sync + Clone {
|
|||||||
let mut packet_data: Option<(ProtocolId, PacketId, Vec<u8>)> = None;
|
let mut packet_data: Option<(ProtocolId, PacketId, Vec<u8>)> = None;
|
||||||
let mut create_session = false;
|
let mut create_session = false;
|
||||||
let mut kill = false;
|
let mut kill = false;
|
||||||
if let Some(connection) = self.connections.read().unwrap().get(token).map(|c| c.clone()) {
|
if let Some(connection) = self.connections.read().unwrap().get(token).cloned() {
|
||||||
match connection.lock().unwrap().deref_mut() {
|
match *connection.lock().unwrap().deref_mut() {
|
||||||
&mut ConnectionEntry::Handshake(ref mut h) => {
|
ConnectionEntry::Handshake(ref mut h) => {
|
||||||
match h.readable(io, &self.info.read().unwrap()) {
|
if let Err(e) = h.readable(io, &self.info.read().unwrap()) {
|
||||||
Err(e) => {
|
debug!(target: "net", "Handshake read error: {:?}", e);
|
||||||
debug!(target: "net", "Handshake read error: {:?}", e);
|
kill = true;
|
||||||
kill = true;
|
|
||||||
},
|
|
||||||
Ok(_) => ()
|
|
||||||
}
|
}
|
||||||
if h.done() {
|
if h.done() {
|
||||||
create_session = true;
|
create_session = true;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
&mut ConnectionEntry::Session(ref mut s) => {
|
ConnectionEntry::Session(ref mut s) => {
|
||||||
match s.readable(io, &self.info.read().unwrap()) {
|
match s.readable(io, &self.info.read().unwrap()) {
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
debug!(target: "net", "Handshake read error: {:?}", e);
|
debug!(target: "net", "Handshake read error: {:?}", e);
|
||||||
@ -508,11 +506,11 @@ impl<Message> Host<Message> where Message: Send + Sync + Clone {
|
|||||||
}
|
}
|
||||||
for p in ready_data {
|
for p in ready_data {
|
||||||
let h = self.handlers.read().unwrap().get(p).unwrap().clone();
|
let h = self.handlers.read().unwrap().get(p).unwrap().clone();
|
||||||
h.connected(&mut NetworkContext::new(io, p, Some(token), self.connections.clone()), &token);
|
h.connected(&NetworkContext::new(io, p, Some(token), self.connections.clone()), &token);
|
||||||
}
|
}
|
||||||
if let Some((p, packet_id, data)) = packet_data {
|
if let Some((p, packet_id, data)) = packet_data {
|
||||||
let h = self.handlers.read().unwrap().get(p).unwrap().clone();
|
let h = self.handlers.read().unwrap().get(p).unwrap().clone();
|
||||||
h.read(&mut NetworkContext::new(io, p, Some(token), self.connections.clone()), &token, packet_id, &data[1..]);
|
h.read(&NetworkContext::new(io, p, Some(token), self.connections.clone()), &token, packet_id, &data[1..]);
|
||||||
}
|
}
|
||||||
io.update_registration(token).unwrap_or_else(|e| debug!(target: "net", "Token registration error: {:?}", e));
|
io.update_registration(token).unwrap_or_else(|e| debug!(target: "net", "Token registration error: {:?}", e));
|
||||||
}
|
}
|
||||||
@ -538,12 +536,12 @@ impl<Message> Host<Message> where Message: Send + Sync + Clone {
|
|||||||
let mut to_disconnect: Vec<ProtocolId> = Vec::new();
|
let mut to_disconnect: Vec<ProtocolId> = Vec::new();
|
||||||
{
|
{
|
||||||
let mut connections = self.connections.write().unwrap();
|
let mut connections = self.connections.write().unwrap();
|
||||||
if let Some(connection) = connections.get(token).map(|c| c.clone()) {
|
if let Some(connection) = connections.get(token).cloned() {
|
||||||
match connection.lock().unwrap().deref_mut() {
|
match *connection.lock().unwrap().deref_mut() {
|
||||||
&mut ConnectionEntry::Handshake(_) => {
|
ConnectionEntry::Handshake(_) => {
|
||||||
connections.remove(token);
|
connections.remove(token);
|
||||||
},
|
},
|
||||||
&mut ConnectionEntry::Session(ref mut s) if s.is_ready() => {
|
ConnectionEntry::Session(ref mut s) if s.is_ready() => {
|
||||||
for (p, _) in self.handlers.read().unwrap().iter() {
|
for (p, _) in self.handlers.read().unwrap().iter() {
|
||||||
if s.have_capability(p) {
|
if s.have_capability(p) {
|
||||||
to_disconnect.push(p);
|
to_disconnect.push(p);
|
||||||
@ -557,7 +555,7 @@ impl<Message> Host<Message> where Message: Send + Sync + Clone {
|
|||||||
}
|
}
|
||||||
for p in to_disconnect {
|
for p in to_disconnect {
|
||||||
let h = self.handlers.read().unwrap().get(p).unwrap().clone();
|
let h = self.handlers.read().unwrap().get(p).unwrap().clone();
|
||||||
h.disconnected(&mut NetworkContext::new(io, p, Some(token), self.connections.clone()), &token);
|
h.disconnected(&NetworkContext::new(io, p, Some(token), self.connections.clone()), &token);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -602,8 +600,8 @@ impl<Message> IoHandler<NetworkIoMessage<Message>> for Host<Message> where Messa
|
|||||||
FIRST_CONNECTION ... LAST_CONNECTION => self.connection_timeout(token, io),
|
FIRST_CONNECTION ... LAST_CONNECTION => self.connection_timeout(token, io),
|
||||||
NODETABLE_DISCOVERY => {},
|
NODETABLE_DISCOVERY => {},
|
||||||
NODETABLE_MAINTAIN => {},
|
NODETABLE_MAINTAIN => {},
|
||||||
_ => match self.timers.read().unwrap().get(&token).map(|p| *p) {
|
_ => match self.timers.read().unwrap().get(&token).cloned() {
|
||||||
Some(timer) => match self.handlers.read().unwrap().get(timer.protocol).map(|h| h.clone()) {
|
Some(timer) => match self.handlers.read().unwrap().get(timer.protocol).cloned() {
|
||||||
None => { warn!(target: "net", "No handler found for protocol: {:?}", timer.protocol) },
|
None => { warn!(target: "net", "No handler found for protocol: {:?}", timer.protocol) },
|
||||||
Some(h) => { h.timeout(&NetworkContext::new(io, timer.protocol, None, self.connections.clone()), timer.token); }
|
Some(h) => { h.timeout(&NetworkContext::new(io, timer.protocol, None, self.connections.clone()), timer.token); }
|
||||||
},
|
},
|
||||||
@ -613,8 +611,8 @@ impl<Message> IoHandler<NetworkIoMessage<Message>> for Host<Message> where Messa
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn message(&self, io: &IoContext<NetworkIoMessage<Message>>, message: &NetworkIoMessage<Message>) {
|
fn message(&self, io: &IoContext<NetworkIoMessage<Message>>, message: &NetworkIoMessage<Message>) {
|
||||||
match message {
|
match *message {
|
||||||
&NetworkIoMessage::AddHandler {
|
NetworkIoMessage::AddHandler {
|
||||||
ref handler,
|
ref handler,
|
||||||
ref protocol,
|
ref protocol,
|
||||||
ref versions
|
ref versions
|
||||||
@ -627,7 +625,7 @@ impl<Message> IoHandler<NetworkIoMessage<Message>> for Host<Message> where Messa
|
|||||||
info.capabilities.push(CapabilityInfo { protocol: protocol, version: *v, packet_count:0 });
|
info.capabilities.push(CapabilityInfo { protocol: protocol, version: *v, packet_count:0 });
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
&NetworkIoMessage::AddTimer {
|
NetworkIoMessage::AddTimer {
|
||||||
ref protocol,
|
ref protocol,
|
||||||
ref delay,
|
ref delay,
|
||||||
ref token,
|
ref token,
|
||||||
@ -642,9 +640,9 @@ impl<Message> IoHandler<NetworkIoMessage<Message>> for Host<Message> where Messa
|
|||||||
self.timers.write().unwrap().insert(handler_token, ProtocolTimer { protocol: protocol, token: *token });
|
self.timers.write().unwrap().insert(handler_token, ProtocolTimer { protocol: protocol, token: *token });
|
||||||
io.register_timer(handler_token, *delay).expect("Error registering timer");
|
io.register_timer(handler_token, *delay).expect("Error registering timer");
|
||||||
},
|
},
|
||||||
&NetworkIoMessage::User(ref message) => {
|
NetworkIoMessage::User(ref message) => {
|
||||||
for (p, h) in self.handlers.read().unwrap().iter() {
|
for (p, h) in self.handlers.read().unwrap().iter() {
|
||||||
h.message(&mut NetworkContext::new(io, p, None, self.connections.clone()), &message);
|
h.message(&NetworkContext::new(io, p, None, self.connections.clone()), &message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -653,10 +651,10 @@ impl<Message> IoHandler<NetworkIoMessage<Message>> for Host<Message> where Messa
|
|||||||
fn register_stream(&self, stream: StreamToken, reg: Token, event_loop: &mut EventLoop<IoManager<NetworkIoMessage<Message>>>) {
|
fn register_stream(&self, stream: StreamToken, reg: Token, event_loop: &mut EventLoop<IoManager<NetworkIoMessage<Message>>>) {
|
||||||
match stream {
|
match stream {
|
||||||
FIRST_CONNECTION ... LAST_CONNECTION => {
|
FIRST_CONNECTION ... LAST_CONNECTION => {
|
||||||
if let Some(connection) = self.connections.read().unwrap().get(stream).map(|c| c.clone()) {
|
if let Some(connection) = self.connections.read().unwrap().get(stream).cloned() {
|
||||||
match connection.lock().unwrap().deref() {
|
match *connection.lock().unwrap().deref() {
|
||||||
&ConnectionEntry::Handshake(ref h) => h.register_socket(reg, event_loop).expect("Error registering socket"),
|
ConnectionEntry::Handshake(ref h) => h.register_socket(reg, event_loop).expect("Error registering socket"),
|
||||||
&ConnectionEntry::Session(_) => warn!("Unexpected session stream registration")
|
ConnectionEntry::Session(_) => warn!("Unexpected session stream registration")
|
||||||
}
|
}
|
||||||
} else {} // expired
|
} else {} // expired
|
||||||
}
|
}
|
||||||
@ -669,10 +667,10 @@ impl<Message> IoHandler<NetworkIoMessage<Message>> for Host<Message> where Messa
|
|||||||
fn update_stream(&self, stream: StreamToken, reg: Token, event_loop: &mut EventLoop<IoManager<NetworkIoMessage<Message>>>) {
|
fn update_stream(&self, stream: StreamToken, reg: Token, event_loop: &mut EventLoop<IoManager<NetworkIoMessage<Message>>>) {
|
||||||
match stream {
|
match stream {
|
||||||
FIRST_CONNECTION ... LAST_CONNECTION => {
|
FIRST_CONNECTION ... LAST_CONNECTION => {
|
||||||
if let Some(connection) = self.connections.read().unwrap().get(stream).map(|c| c.clone()) {
|
if let Some(connection) = self.connections.read().unwrap().get(stream).cloned() {
|
||||||
match connection.lock().unwrap().deref() {
|
match *connection.lock().unwrap().deref() {
|
||||||
&ConnectionEntry::Handshake(ref h) => h.update_socket(reg, event_loop).expect("Error updating socket"),
|
ConnectionEntry::Handshake(ref h) => h.update_socket(reg, event_loop).expect("Error updating socket"),
|
||||||
&ConnectionEntry::Session(ref s) => s.update_socket(reg, event_loop).expect("Error updating socket"),
|
ConnectionEntry::Session(ref s) => s.update_socket(reg, event_loop).expect("Error updating socket"),
|
||||||
}
|
}
|
||||||
} else {} // expired
|
} else {} // expired
|
||||||
}
|
}
|
||||||
|
@ -20,14 +20,16 @@ pub struct NodeEndpoint {
|
|||||||
pub udp_port: u16
|
pub udp_port: u16
|
||||||
}
|
}
|
||||||
|
|
||||||
impl NodeEndpoint {
|
impl FromStr for NodeEndpoint {
|
||||||
|
type Err = UtilError;
|
||||||
|
|
||||||
/// Create endpoint from string. Performs name resolution if given a host name.
|
/// Create endpoint from string. Performs name resolution if given a host name.
|
||||||
fn from_str(s: &str) -> Result<NodeEndpoint, UtilError> {
|
fn from_str(s: &str) -> Result<NodeEndpoint, UtilError> {
|
||||||
let address = s.to_socket_addrs().map(|mut i| i.next());
|
let address = s.to_socket_addrs().map(|mut i| i.next());
|
||||||
match address {
|
match address {
|
||||||
Ok(Some(a)) => Ok(NodeEndpoint {
|
Ok(Some(a)) => Ok(NodeEndpoint {
|
||||||
address: a,
|
address: a,
|
||||||
address_str: s.to_string(),
|
address_str: s.to_owned(),
|
||||||
udp_port: a.port()
|
udp_port: a.port()
|
||||||
}),
|
}),
|
||||||
Ok(_) => Err(UtilError::AddressResolve(None)),
|
Ok(_) => Err(UtilError::AddressResolve(None)),
|
||||||
|
@ -182,7 +182,7 @@ impl Session {
|
|||||||
// map to protocol
|
// map to protocol
|
||||||
let protocol = self.info.capabilities[i].protocol;
|
let protocol = self.info.capabilities[i].protocol;
|
||||||
let pid = packet_id - self.info.capabilities[i].id_offset;
|
let pid = packet_id - self.info.capabilities[i].id_offset;
|
||||||
return Ok(SessionData::Packet { data: packet.data, protocol: protocol, packet_id: pid } )
|
Ok(SessionData::Packet { data: packet.data, protocol: protocol, packet_id: pid } )
|
||||||
},
|
},
|
||||||
_ => {
|
_ => {
|
||||||
debug!(target: "net", "Unkown packet: {:?}", packet_id);
|
debug!(target: "net", "Unkown packet: {:?}", packet_id);
|
||||||
@ -212,7 +212,7 @@ impl Session {
|
|||||||
// Intersect with host capabilities
|
// Intersect with host capabilities
|
||||||
// Leave only highset mutually supported capability version
|
// Leave only highset mutually supported capability version
|
||||||
let mut caps: Vec<SessionCapabilityInfo> = Vec::new();
|
let mut caps: Vec<SessionCapabilityInfo> = Vec::new();
|
||||||
for hc in host.capabilities.iter() {
|
for hc in &host.capabilities {
|
||||||
if peer_caps.iter().any(|c| c.protocol == hc.protocol && c.version == hc.version) {
|
if peer_caps.iter().any(|c| c.protocol == hc.protocol && c.version == hc.version) {
|
||||||
caps.push(SessionCapabilityInfo {
|
caps.push(SessionCapabilityInfo {
|
||||||
protocol: hc.protocol,
|
protocol: hc.protocol,
|
||||||
|
@ -169,7 +169,7 @@ impl HashDB for OverlayDB {
|
|||||||
match k {
|
match k {
|
||||||
Some(&(ref d, rc)) if rc > 0 => Some(d),
|
Some(&(ref d, rc)) if rc > 0 => Some(d),
|
||||||
_ => {
|
_ => {
|
||||||
let memrc = k.map(|&(_, rc)| rc).unwrap_or(0);
|
let memrc = k.map_or(0, |&(_, rc)| rc);
|
||||||
match self.payload(key) {
|
match self.payload(key) {
|
||||||
Some(x) => {
|
Some(x) => {
|
||||||
let (d, rc) = x;
|
let (d, rc) = x;
|
||||||
@ -194,16 +194,11 @@ impl HashDB for OverlayDB {
|
|||||||
match k {
|
match k {
|
||||||
Some(&(_, rc)) if rc > 0 => true,
|
Some(&(_, rc)) if rc > 0 => true,
|
||||||
_ => {
|
_ => {
|
||||||
let memrc = k.map(|&(_, rc)| rc).unwrap_or(0);
|
let memrc = k.map_or(0, |&(_, rc)| rc);
|
||||||
match self.payload(key) {
|
match self.payload(key) {
|
||||||
Some(x) => {
|
Some(x) => {
|
||||||
let (_, rc) = x;
|
let (_, rc) = x;
|
||||||
if rc as i32 + memrc > 0 {
|
rc as i32 + memrc > 0
|
||||||
true
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
// Replace above match arm with this once https://github.com/rust-lang/rust/issues/15287 is done.
|
// Replace above match arm with this once https://github.com/rust-lang/rust/issues/15287 is done.
|
||||||
//Some((d, rc)) if rc + memrc > 0 => true,
|
//Some((d, rc)) if rc + memrc > 0 => true,
|
||||||
|
@ -41,7 +41,7 @@ impl Stream for RlpStream {
|
|||||||
stream
|
stream
|
||||||
}
|
}
|
||||||
|
|
||||||
fn append<'a, E>(&'a mut self, object: &E) -> &'a mut RlpStream where E: Encodable {
|
fn append<E>(&mut self, object: &E) -> &mut RlpStream where E: Encodable {
|
||||||
// encode given value and add it at the end of the stream
|
// encode given value and add it at the end of the stream
|
||||||
object.encode(&mut self.encoder);
|
object.encode(&mut self.encoder);
|
||||||
|
|
||||||
@ -52,7 +52,7 @@ impl Stream for RlpStream {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
fn append_list<'a>(&'a mut self, len: usize) -> &'a mut RlpStream {
|
fn append_list(&mut self, len: usize) -> &mut RlpStream {
|
||||||
match len {
|
match len {
|
||||||
0 => {
|
0 => {
|
||||||
// we may finish, if the appended list len is equal 0
|
// we may finish, if the appended list len is equal 0
|
||||||
@ -69,7 +69,7 @@ impl Stream for RlpStream {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
fn append_empty_data<'a>(&'a mut self) -> &'a mut RlpStream {
|
fn append_empty_data(&mut self) -> &mut RlpStream {
|
||||||
// self push raw item
|
// self push raw item
|
||||||
self.encoder.bytes.push(0x80);
|
self.encoder.bytes.push(0x80);
|
||||||
|
|
||||||
|
@ -9,7 +9,7 @@ pub trait Decoder: Sized {
|
|||||||
/// TODO [arkpar] Please document me
|
/// TODO [arkpar] Please document me
|
||||||
fn as_list(&self) -> Result<Vec<Self>, DecoderError>;
|
fn as_list(&self) -> Result<Vec<Self>, DecoderError>;
|
||||||
/// TODO [Gav Wood] Please document me
|
/// TODO [Gav Wood] Please document me
|
||||||
fn as_rlp<'a>(&'a self) -> &'a UntrustedRlp<'a>;
|
fn as_rlp(&self) -> &UntrustedRlp;
|
||||||
/// TODO [debris] Please document me
|
/// TODO [debris] Please document me
|
||||||
fn as_raw(&self) -> &[u8];
|
fn as_raw(&self) -> &[u8];
|
||||||
}
|
}
|
||||||
@ -255,7 +255,7 @@ pub trait Stream: Sized {
|
|||||||
/// assert_eq!(out, vec![0xca, 0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g', 0x80]);
|
/// assert_eq!(out, vec![0xca, 0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g', 0x80]);
|
||||||
/// }
|
/// }
|
||||||
/// ```
|
/// ```
|
||||||
fn append_list<'a>(&'a mut self, len: usize) -> &'a mut Self;
|
fn append_list(&mut self, len: usize) -> &mut Self;
|
||||||
|
|
||||||
/// Apends null to the end of stream, chainable.
|
/// Apends null to the end of stream, chainable.
|
||||||
///
|
///
|
||||||
@ -270,7 +270,7 @@ pub trait Stream: Sized {
|
|||||||
/// assert_eq!(out, vec![0xc2, 0x80, 0x80]);
|
/// assert_eq!(out, vec![0xc2, 0x80, 0x80]);
|
||||||
/// }
|
/// }
|
||||||
/// ```
|
/// ```
|
||||||
fn append_empty_data<'a>(&'a mut self) -> &'a mut Self;
|
fn append_empty_data(&mut self) -> &mut Self;
|
||||||
|
|
||||||
/// Appends raw (pre-serialised) RLP data. Use with caution. Chainable.
|
/// Appends raw (pre-serialised) RLP data. Use with caution. Chainable.
|
||||||
fn append_raw<'a>(&'a mut self, bytes: &[u8], item_count: usize) -> &'a mut Self;
|
fn append_raw<'a>(&'a mut self, bytes: &[u8], item_count: usize) -> &'a mut Self;
|
||||||
|
@ -15,25 +15,25 @@ fn rlp_at() {
|
|||||||
assert!(rlp.is_list());
|
assert!(rlp.is_list());
|
||||||
//let animals = <Vec<String> as rlp::Decodable>::decode_untrusted(&rlp).unwrap();
|
//let animals = <Vec<String> as rlp::Decodable>::decode_untrusted(&rlp).unwrap();
|
||||||
let animals: Vec<String> = rlp.as_val().unwrap();
|
let animals: Vec<String> = rlp.as_val().unwrap();
|
||||||
assert_eq!(animals, vec!["cat".to_string(), "dog".to_string()]);
|
assert_eq!(animals, vec!["cat".to_owned(), "dog".to_owned()]);
|
||||||
|
|
||||||
let cat = rlp.at(0).unwrap();
|
let cat = rlp.at(0).unwrap();
|
||||||
assert!(cat.is_data());
|
assert!(cat.is_data());
|
||||||
assert_eq!(cat.as_raw(), &[0x83, b'c', b'a', b't']);
|
assert_eq!(cat.as_raw(), &[0x83, b'c', b'a', b't']);
|
||||||
//assert_eq!(String::decode_untrusted(&cat).unwrap(), "cat".to_string());
|
//assert_eq!(String::decode_untrusted(&cat).unwrap(), "cat".to_owned());
|
||||||
assert_eq!(cat.as_val::<String>().unwrap(), "cat".to_string());
|
assert_eq!(cat.as_val::<String>().unwrap(), "cat".to_owned());
|
||||||
|
|
||||||
let dog = rlp.at(1).unwrap();
|
let dog = rlp.at(1).unwrap();
|
||||||
assert!(dog.is_data());
|
assert!(dog.is_data());
|
||||||
assert_eq!(dog.as_raw(), &[0x83, b'd', b'o', b'g']);
|
assert_eq!(dog.as_raw(), &[0x83, b'd', b'o', b'g']);
|
||||||
//assert_eq!(String::decode_untrusted(&dog).unwrap(), "dog".to_string());
|
//assert_eq!(String::decode_untrusted(&dog).unwrap(), "dog".to_owned());
|
||||||
assert_eq!(dog.as_val::<String>().unwrap(), "dog".to_string());
|
assert_eq!(dog.as_val::<String>().unwrap(), "dog".to_owned());
|
||||||
|
|
||||||
let cat_again = rlp.at(0).unwrap();
|
let cat_again = rlp.at(0).unwrap();
|
||||||
assert!(cat_again.is_data());
|
assert!(cat_again.is_data());
|
||||||
assert_eq!(cat_again.as_raw(), &[0x83, b'c', b'a', b't']);
|
assert_eq!(cat_again.as_raw(), &[0x83, b'c', b'a', b't']);
|
||||||
//assert_eq!(String::decode_untrusted(&cat_again).unwrap(), "cat".to_string());
|
//assert_eq!(String::decode_untrusted(&cat_again).unwrap(), "cat".to_owned());
|
||||||
assert_eq!(cat_again.as_val::<String>().unwrap(), "cat".to_string());
|
assert_eq!(cat_again.as_val::<String>().unwrap(), "cat".to_owned());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -268,13 +268,13 @@ fn decode_untrusted_u256() {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn decode_untrusted_str() {
|
fn decode_untrusted_str() {
|
||||||
let tests = vec![DTestPair("cat".to_string(), vec![0x83, b'c', b'a', b't']),
|
let tests = vec![DTestPair("cat".to_owned(), vec![0x83, b'c', b'a', b't']),
|
||||||
DTestPair("dog".to_string(), vec![0x83, b'd', b'o', b'g']),
|
DTestPair("dog".to_owned(), vec![0x83, b'd', b'o', b'g']),
|
||||||
DTestPair("Marek".to_string(),
|
DTestPair("Marek".to_owned(),
|
||||||
vec![0x85, b'M', b'a', b'r', b'e', b'k']),
|
vec![0x85, b'M', b'a', b'r', b'e', b'k']),
|
||||||
DTestPair("".to_string(), vec![0x80]),
|
DTestPair("".to_owned(), vec![0x80]),
|
||||||
DTestPair("Lorem ipsum dolor sit amet, consectetur adipisicing elit"
|
DTestPair("Lorem ipsum dolor sit amet, consectetur adipisicing elit"
|
||||||
.to_string(),
|
.to_owned(),
|
||||||
vec![0xb8, 0x38, b'L', b'o', b'r', b'e', b'm', b' ', b'i',
|
vec![0xb8, 0x38, b'L', b'o', b'r', b'e', b'm', b' ', b'i',
|
||||||
b'p', b's', b'u', b'm', b' ', b'd', b'o', b'l', b'o',
|
b'p', b's', b'u', b'm', b' ', b'd', b'o', b'l', b'o',
|
||||||
b'r', b' ', b's', b'i', b't', b' ', b'a', b'm', b'e',
|
b'r', b' ', b's', b'i', b't', b' ', b'a', b'm', b'e',
|
||||||
@ -311,14 +311,14 @@ fn decode_untrusted_vector_u64() {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn decode_untrusted_vector_str() {
|
fn decode_untrusted_vector_str() {
|
||||||
let tests = vec![DTestPair(vec!["cat".to_string(), "dog".to_string()],
|
let tests = vec![DTestPair(vec!["cat".to_owned(), "dog".to_owned()],
|
||||||
vec![0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g'])];
|
vec![0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g'])];
|
||||||
run_decode_tests(tests);
|
run_decode_tests(tests);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn decode_untrusted_vector_of_vectors_str() {
|
fn decode_untrusted_vector_of_vectors_str() {
|
||||||
let tests = vec![DTestPair(vec![vec!["cat".to_string()]],
|
let tests = vec![DTestPair(vec![vec!["cat".to_owned()]],
|
||||||
vec![0xc5, 0xc4, 0x83, b'c', b'a', b't'])];
|
vec![0xc5, 0xc4, 0x83, b'c', b'a', b't'])];
|
||||||
run_decode_tests(tests);
|
run_decode_tests(tests);
|
||||||
}
|
}
|
||||||
|
@ -288,7 +288,7 @@ impl<'a> BasicDecoder<'a> {
|
|||||||
|
|
||||||
/// Return first item info
|
/// Return first item info
|
||||||
fn payload_info(bytes: &[u8]) -> Result<PayloadInfo, DecoderError> {
|
fn payload_info(bytes: &[u8]) -> Result<PayloadInfo, DecoderError> {
|
||||||
let item = match bytes.first().map(|&x| x) {
|
let item = match bytes.first().cloned() {
|
||||||
None => return Err(DecoderError::RlpIsTooShort),
|
None => return Err(DecoderError::RlpIsTooShort),
|
||||||
Some(0...0x7f) => PayloadInfo::new(0, 1),
|
Some(0...0x7f) => PayloadInfo::new(0, 1),
|
||||||
Some(l @ 0x80...0xb7) => PayloadInfo::new(1, l as usize - 0x80),
|
Some(l @ 0x80...0xb7) => PayloadInfo::new(1, l as usize - 0x80),
|
||||||
@ -324,7 +324,7 @@ impl<'a> Decoder for BasicDecoder<'a> {
|
|||||||
|
|
||||||
let bytes = self.rlp.as_raw();
|
let bytes = self.rlp.as_raw();
|
||||||
|
|
||||||
match bytes.first().map(|&x| x) {
|
match bytes.first().cloned() {
|
||||||
// rlp is too short
|
// rlp is too short
|
||||||
None => Err(DecoderError::RlpIsTooShort),
|
None => Err(DecoderError::RlpIsTooShort),
|
||||||
// single byt value
|
// single byt value
|
||||||
@ -355,12 +355,12 @@ impl<'a> Decoder for BasicDecoder<'a> {
|
|||||||
|
|
||||||
fn as_list(&self) -> Result<Vec<Self>, DecoderError> {
|
fn as_list(&self) -> Result<Vec<Self>, DecoderError> {
|
||||||
let v: Vec<BasicDecoder<'a>> = self.rlp.iter()
|
let v: Vec<BasicDecoder<'a>> = self.rlp.iter()
|
||||||
.map(| i | BasicDecoder::new(i))
|
.map(BasicDecoder::new)
|
||||||
.collect();
|
.collect();
|
||||||
Ok(v)
|
Ok(v)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn as_rlp<'s>(&'s self) -> &'s UntrustedRlp<'s> {
|
fn as_rlp(&self) -> &UntrustedRlp {
|
||||||
&self.rlp
|
&self.rlp
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -405,6 +405,7 @@ impl<T> Decodable for Option<T> where T: Decodable {
|
|||||||
macro_rules! impl_array_decodable {
|
macro_rules! impl_array_decodable {
|
||||||
($index_type:ty, $len:expr ) => (
|
($index_type:ty, $len:expr ) => (
|
||||||
impl<T> Decodable for [T; $len] where T: Decodable {
|
impl<T> Decodable for [T; $len] where T: Decodable {
|
||||||
|
#[allow(len_zero)]
|
||||||
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder {
|
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder {
|
||||||
let decoders = try!(decoder.as_list());
|
let decoders = try!(decoder.as_list());
|
||||||
|
|
||||||
|
@ -42,7 +42,7 @@ pub trait Squeeze {
|
|||||||
|
|
||||||
impl<K, T> Squeeze for HashMap<K, T> where K: Eq + Hash + Clone + HeapSizeOf, T: HeapSizeOf {
|
impl<K, T> Squeeze for HashMap<K, T> where K: Eq + Hash + Clone + HeapSizeOf, T: HeapSizeOf {
|
||||||
fn squeeze(&mut self, size: usize) {
|
fn squeeze(&mut self, size: usize) {
|
||||||
if self.len() == 0 {
|
if self.is_empty() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -50,7 +50,7 @@ impl<K, T> Squeeze for HashMap<K, T> where K: Eq + Hash + Clone + HeapSizeOf, T:
|
|||||||
let all_entries = size_of_entry * self.len();
|
let all_entries = size_of_entry * self.len();
|
||||||
let mut shrinked_size = all_entries;
|
let mut shrinked_size = all_entries;
|
||||||
|
|
||||||
while self.len() > 0 && shrinked_size > size {
|
while !self.is_empty() && shrinked_size > size {
|
||||||
// could be optimized
|
// could be optimized
|
||||||
let key = self.keys().next().unwrap().clone();
|
let key = self.keys().next().unwrap().clone();
|
||||||
self.remove(&key);
|
self.remove(&key);
|
||||||
|
@ -38,6 +38,7 @@ pub struct TrieDB<'db> {
|
|||||||
pub hash_count: usize,
|
pub hash_count: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(wrong_self_convention)]
|
||||||
impl<'db> TrieDB<'db> {
|
impl<'db> TrieDB<'db> {
|
||||||
/// Create a new trie with the backing database `db` and `root`
|
/// Create a new trie with the backing database `db` and `root`
|
||||||
/// Panics, if `root` does not exist
|
/// Panics, if `root` does not exist
|
||||||
@ -103,7 +104,7 @@ impl<'db> TrieDB<'db> {
|
|||||||
|
|
||||||
match node {
|
match node {
|
||||||
Node::Extension(_, payload) => handle_payload(payload),
|
Node::Extension(_, payload) => handle_payload(payload),
|
||||||
Node::Branch(payloads, _) => for payload in payloads.iter() { handle_payload(payload) },
|
Node::Branch(payloads, _) => for payload in &payloads { handle_payload(payload) },
|
||||||
_ => {},
|
_ => {},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -141,12 +142,9 @@ impl<'db> TrieDB<'db> {
|
|||||||
},
|
},
|
||||||
Node::Branch(ref nodes, ref value) => {
|
Node::Branch(ref nodes, ref value) => {
|
||||||
try!(writeln!(f, ""));
|
try!(writeln!(f, ""));
|
||||||
match value {
|
if let Some(v) = *value {
|
||||||
&Some(v) => {
|
try!(self.fmt_indent(f, deepness + 1));
|
||||||
try!(self.fmt_indent(f, deepness + 1));
|
try!(writeln!(f, "=: {:?}", v.pretty()))
|
||||||
try!(writeln!(f, "=: {:?}", v.pretty()))
|
|
||||||
},
|
|
||||||
&None => {}
|
|
||||||
}
|
}
|
||||||
for i in 0..16 {
|
for i in 0..16 {
|
||||||
match self.get_node(nodes[i]) {
|
match self.get_node(nodes[i]) {
|
||||||
|
@ -50,6 +50,7 @@ enum MaybeChanged<'a> {
|
|||||||
Changed(Bytes),
|
Changed(Bytes),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(wrong_self_convention)]
|
||||||
impl<'db> TrieDBMut<'db> {
|
impl<'db> TrieDBMut<'db> {
|
||||||
/// Create a new trie with the backing database `db` and empty `root`
|
/// Create a new trie with the backing database `db` and empty `root`
|
||||||
/// Initialise to the state entailed by the genesis block.
|
/// Initialise to the state entailed by the genesis block.
|
||||||
@ -145,7 +146,7 @@ impl<'db> TrieDBMut<'db> {
|
|||||||
|
|
||||||
match node {
|
match node {
|
||||||
Node::Extension(_, payload) => handle_payload(payload),
|
Node::Extension(_, payload) => handle_payload(payload),
|
||||||
Node::Branch(payloads, _) => for payload in payloads.iter() { handle_payload(payload) },
|
Node::Branch(payloads, _) => for payload in &payloads { handle_payload(payload) },
|
||||||
_ => {},
|
_ => {},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -178,12 +179,9 @@ impl<'db> TrieDBMut<'db> {
|
|||||||
},
|
},
|
||||||
Node::Branch(ref nodes, ref value) => {
|
Node::Branch(ref nodes, ref value) => {
|
||||||
try!(writeln!(f, ""));
|
try!(writeln!(f, ""));
|
||||||
match value {
|
if let Some(v) = *value {
|
||||||
&Some(v) => {
|
try!(self.fmt_indent(f, deepness + 1));
|
||||||
try!(self.fmt_indent(f, deepness + 1));
|
try!(writeln!(f, "=: {:?}", v.pretty()))
|
||||||
try!(writeln!(f, "=: {:?}", v.pretty()))
|
|
||||||
},
|
|
||||||
&None => {}
|
|
||||||
}
|
}
|
||||||
for i in 0..16 {
|
for i in 0..16 {
|
||||||
match self.get_node(nodes[i]) {
|
match self.get_node(nodes[i]) {
|
||||||
@ -331,6 +329,7 @@ impl<'db> TrieDBMut<'db> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(cyclomatic_complexity)]
|
||||||
/// Determine the RLP of the node, assuming we're inserting `partial` into the
|
/// Determine the RLP of the node, assuming we're inserting `partial` into the
|
||||||
/// node currently of data `old`. This will *not* delete any hash of `old` from the database;
|
/// node currently of data `old`. This will *not* delete any hash of `old` from the database;
|
||||||
/// it will just return the new RLP that includes the new node.
|
/// it will just return the new RLP that includes the new node.
|
||||||
@ -694,7 +693,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn populate_trie<'db>(db: &'db mut HashDB, root: &'db mut H256, v: &Vec<(Vec<u8>, Vec<u8>)>) -> TrieDBMut<'db> {
|
fn populate_trie<'db>(db: &'db mut HashDB, root: &'db mut H256, v: &[(Vec<u8>, Vec<u8>)]) -> TrieDBMut<'db> {
|
||||||
let mut t = TrieDBMut::new(db, root);
|
let mut t = TrieDBMut::new(db, root);
|
||||||
for i in 0..v.len() {
|
for i in 0..v.len() {
|
||||||
let key: &[u8]= &v[i].0;
|
let key: &[u8]= &v[i].0;
|
||||||
@ -704,8 +703,8 @@ mod tests {
|
|||||||
t
|
t
|
||||||
}
|
}
|
||||||
|
|
||||||
fn unpopulate_trie<'a, 'db>(t: &mut TrieDBMut<'db>, v: &Vec<(Vec<u8>, Vec<u8>)>) {
|
fn unpopulate_trie<'db>(t: &mut TrieDBMut<'db>, v: &[(Vec<u8>, Vec<u8>)]) {
|
||||||
for i in v.iter() {
|
for i in v {
|
||||||
let key: &[u8]= &i.0;
|
let key: &[u8]= &i.0;
|
||||||
t.remove(&key);
|
t.remove(&key);
|
||||||
}
|
}
|
||||||
@ -761,7 +760,7 @@ mod tests {
|
|||||||
println!("TRIE MISMATCH");
|
println!("TRIE MISMATCH");
|
||||||
println!("");
|
println!("");
|
||||||
println!("{:?} vs {:?}", memtrie.root(), real);
|
println!("{:?} vs {:?}", memtrie.root(), real);
|
||||||
for i in x.iter() {
|
for i in &x {
|
||||||
println!("{:?} -> {:?}", i.0.pretty(), i.1.pretty());
|
println!("{:?} -> {:?}", i.0.pretty(), i.1.pretty());
|
||||||
}
|
}
|
||||||
println!("{:?}", memtrie);
|
println!("{:?}", memtrie);
|
||||||
@ -774,7 +773,7 @@ mod tests {
|
|||||||
println!("");
|
println!("");
|
||||||
println!("remaining: {:?}", memtrie.db_items_remaining());
|
println!("remaining: {:?}", memtrie.db_items_remaining());
|
||||||
println!("{:?} vs {:?}", memtrie.root(), real);
|
println!("{:?} vs {:?}", memtrie.root(), real);
|
||||||
for i in x.iter() {
|
for i in &x {
|
||||||
println!("{:?} -> {:?}", i.0.pretty(), i.1.pretty());
|
println!("{:?} -> {:?}", i.0.pretty(), i.1.pretty());
|
||||||
}
|
}
|
||||||
println!("{:?}", memtrie);
|
println!("{:?}", memtrie);
|
||||||
@ -1051,12 +1050,12 @@ mod tests {
|
|||||||
println!("TRIE MISMATCH");
|
println!("TRIE MISMATCH");
|
||||||
println!("");
|
println!("");
|
||||||
println!("ORIGINAL... {:?}", memtrie.root());
|
println!("ORIGINAL... {:?}", memtrie.root());
|
||||||
for i in x.iter() {
|
for i in &x {
|
||||||
println!("{:?} -> {:?}", i.0.pretty(), i.1.pretty());
|
println!("{:?} -> {:?}", i.0.pretty(), i.1.pretty());
|
||||||
}
|
}
|
||||||
println!("{:?}", memtrie);
|
println!("{:?}", memtrie);
|
||||||
println!("SORTED... {:?}", memtrie_sorted.root());
|
println!("SORTED... {:?}", memtrie_sorted.root());
|
||||||
for i in y.iter() {
|
for i in &y {
|
||||||
println!("{:?} -> {:?}", i.0.pretty(), i.1.pretty());
|
println!("{:?} -> {:?}", i.0.pretty(), i.1.pretty());
|
||||||
}
|
}
|
||||||
println!("{:?}", memtrie_sorted);
|
println!("{:?}", memtrie_sorted);
|
||||||
|
@ -200,7 +200,7 @@ macro_rules! construct_uint {
|
|||||||
#[inline]
|
#[inline]
|
||||||
fn byte(&self, index: usize) -> u8 {
|
fn byte(&self, index: usize) -> u8 {
|
||||||
let &$name(ref arr) = self;
|
let &$name(ref arr) = self;
|
||||||
(arr[index / 8] >> ((index % 8)) * 8) as u8
|
(arr[index / 8] >> (((index % 8)) * 8)) as u8
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_bytes(&self, bytes: &mut[u8]) {
|
fn to_bytes(&self, bytes: &mut[u8]) {
|
||||||
@ -446,16 +446,16 @@ macro_rules! construct_uint {
|
|||||||
|
|
||||||
impl FromJson for $name {
|
impl FromJson for $name {
|
||||||
fn from_json(json: &Json) -> Self {
|
fn from_json(json: &Json) -> Self {
|
||||||
match json {
|
match *json {
|
||||||
&Json::String(ref s) => {
|
Json::String(ref s) => {
|
||||||
if s.len() >= 2 && &s[0..2] == "0x" {
|
if s.len() >= 2 && &s[0..2] == "0x" {
|
||||||
FromStr::from_str(&s[2..]).unwrap_or(Default::default())
|
FromStr::from_str(&s[2..]).unwrap_or_else(|_| Default::default())
|
||||||
} else {
|
} else {
|
||||||
Uint::from_dec_str(s).unwrap_or(Default::default())
|
Uint::from_dec_str(s).unwrap_or_else(|_| Default::default())
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
&Json::U64(u) => From::from(u),
|
Json::U64(u) => From::from(u),
|
||||||
&Json::I64(i) => From::from(i as u64),
|
Json::I64(i) => From::from(i as u64),
|
||||||
_ => Uint::zero(),
|
_ => Uint::zero(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -488,7 +488,7 @@ macro_rules! construct_uint {
|
|||||||
for i in 0..bytes.len() {
|
for i in 0..bytes.len() {
|
||||||
let rev = bytes.len() - 1 - i;
|
let rev = bytes.len() - 1 - i;
|
||||||
let pos = rev / 8;
|
let pos = rev / 8;
|
||||||
ret[pos] += (bytes[i] as u64) << (rev % 8) * 8;
|
ret[pos] += (bytes[i] as u64) << ((rev % 8) * 8);
|
||||||
}
|
}
|
||||||
$name(ret)
|
$name(ret)
|
||||||
}
|
}
|
||||||
@ -500,7 +500,7 @@ macro_rules! construct_uint {
|
|||||||
fn from_str(value: &str) -> Result<$name, Self::Err> {
|
fn from_str(value: &str) -> Result<$name, Self::Err> {
|
||||||
let bytes: Vec<u8> = match value.len() % 2 == 0 {
|
let bytes: Vec<u8> = match value.len() % 2 == 0 {
|
||||||
true => try!(value.from_hex()),
|
true => try!(value.from_hex()),
|
||||||
false => try!(("0".to_string() + value).from_hex())
|
false => try!(("0".to_owned() + value).from_hex())
|
||||||
};
|
};
|
||||||
|
|
||||||
let bytes_ref: &[u8] = &bytes;
|
let bytes_ref: &[u8] = &bytes;
|
||||||
@ -1061,6 +1061,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[allow(eq_op)]
|
||||||
pub fn uint256_comp_test() {
|
pub fn uint256_comp_test() {
|
||||||
let small = U256([10u64, 0, 0, 0]);
|
let small = U256([10u64, 0, 0, 0]);
|
||||||
let big = U256([0x8C8C3EE70C644118u64, 0x0209E7378231E632, 0, 0]);
|
let big = U256([0x8C8C3EE70C644118u64, 0x0209E7378231E632, 0, 0]);
|
||||||
|
Loading…
Reference in New Issue
Block a user