Merge branch 'master' into plain_hasher
This commit is contained in:
commit
e0c2995f78
40
Cargo.lock
generated
40
Cargo.lock
generated
@ -125,10 +125,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "bigint"
|
||||
version = "3.0.0"
|
||||
version = "4.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"byteorder 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"crunchy 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"heapsize 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc_version 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -301,6 +302,7 @@ dependencies = [
|
||||
"ethcore-util 1.8.0",
|
||||
"ethjson 0.1.0",
|
||||
"rlp 0.2.0",
|
||||
"rlp_derive 0.1.0",
|
||||
"rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
@ -345,7 +347,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "crunchy"
|
||||
version = "0.1.3"
|
||||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
@ -477,7 +479,7 @@ dependencies = [
|
||||
name = "ethash"
|
||||
version = "1.8.0"
|
||||
dependencies = [
|
||||
"crunchy 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"crunchy 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"primal 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -526,6 +528,7 @@ dependencies = [
|
||||
"price-info 1.7.0",
|
||||
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rlp 0.2.0",
|
||||
"rlp_derive 0.1.0",
|
||||
"rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"semver 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -542,7 +545,7 @@ dependencies = [
|
||||
name = "ethcore-bigint"
|
||||
version = "0.1.3"
|
||||
dependencies = [
|
||||
"bigint 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bigint 4.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"heapsize 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"plain_hasher 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -653,6 +656,7 @@ dependencies = [
|
||||
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rlp 0.2.0",
|
||||
"rlp_derive 0.1.0",
|
||||
"serde 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_derive 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"smallvec 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -774,7 +778,6 @@ dependencies = [
|
||||
"ethcore-devtools 1.8.0",
|
||||
"ethcore-logger 1.8.0",
|
||||
"heapsize 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.5.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lru-cache 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -1861,6 +1864,7 @@ dependencies = [
|
||||
"fetch 0.1.0",
|
||||
"futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"futures-cpupool 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.5.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
||||
"jsonrpc-http-server 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
||||
"linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -1972,6 +1976,7 @@ dependencies = [
|
||||
"fetch 0.1.0",
|
||||
"futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"futures-cpupool 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.5.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
||||
"jsonrpc-http-server 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
||||
"jsonrpc-ipc-server 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
||||
@ -2054,7 +2059,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "parity-ui-precompiled"
|
||||
version = "1.4.0"
|
||||
source = "git+https://github.com/paritytech/js-precompiled.git#04b109bd8485b26d0f8ef8df6afab69d8fe4878b"
|
||||
source = "git+https://github.com/paritytech/js-precompiled.git#d809723e58bcb36c0f8d2eca5ca94abbb3690544"
|
||||
dependencies = [
|
||||
"parity-dapps-glue 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
@ -2314,7 +2319,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "0.3.10"
|
||||
version = "0.3.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
@ -2405,6 +2410,15 @@ dependencies = [
|
||||
"rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rlp_derive"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rlp 0.2.0",
|
||||
"syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rocksdb"
|
||||
version = "0.4.5"
|
||||
@ -2462,7 +2476,7 @@ dependencies = [
|
||||
name = "rpc-cli"
|
||||
version = "1.4.0"
|
||||
dependencies = [
|
||||
"bigint 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bigint 4.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ethcore-util 1.8.0",
|
||||
"futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parity-rpc 1.8.0",
|
||||
@ -2597,7 +2611,7 @@ name = "serde_derive"
|
||||
version = "1.0.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"quote 0.3.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_derive_internals 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
@ -2739,7 +2753,7 @@ name = "syn"
|
||||
version = "0.11.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"quote 0.3.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
@ -3240,7 +3254,7 @@ dependencies = [
|
||||
"checksum backtrace-sys 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "3a0d842ea781ce92be2bf78a9b38883948542749640b8378b3b2f03d1fd9f1ff"
|
||||
"checksum base-x 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "2f59103b47307f76e03bef1633aec7fa9e29bfb5aa6daf5a334f94233c71f6c1"
|
||||
"checksum base32 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1b9605ba46d61df0410d8ac686b0007add8172eba90e8e909c347856fe794d8c"
|
||||
"checksum bigint 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d0673c930652d3d4d6dcd5c45b5db4fa5f8f33994d7323618c43c083b223e8c"
|
||||
"checksum bigint 4.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f56c9f1cd09cdcafcccdab1fd58797d39b7d4d203238b2e3768807590723bdf0"
|
||||
"checksum bincode 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e103c8b299b28a9c6990458b7013dc4a8356a9b854c51b9883241f5866fac36e"
|
||||
"checksum bit-set 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e6e1e6fb1c9e3d6fcdec57216a74eaa03e41f52a22f13a16438251d8e88b89da"
|
||||
"checksum bit-set 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d9bf6104718e80d7b26a68fdbacff3481cfc05df670821affc7e9cbc1884400c"
|
||||
@ -3265,7 +3279,7 @@ dependencies = [
|
||||
"checksum core-foundation 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "20a6d0448d3a99d977ae4a2aa5a98d886a923e863e81ad9ff814645b6feb3bbd"
|
||||
"checksum core-foundation-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "05eed248dc504a5391c63794fe4fb64f46f071280afaa1b73308f3c0ce4574c5"
|
||||
"checksum crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0c5ea215664ca264da8a9d9c3be80d2eaf30923c259d03e870388eb927508f97"
|
||||
"checksum crunchy 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e6aa9cb5f2d7bffc4eecfaf924fe450549dc4f0c3a6502298dc24f968b1eabbe"
|
||||
"checksum crunchy 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "a2f4a431c5c9f662e1200b7c7f02c34e91361150e382089a8f2dec3ba680cbda"
|
||||
"checksum crypt32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e34988f7e069e0b2f3bfc064295161e489b2d4e04a2e4248fb94360cdf00b4ec"
|
||||
"checksum ctrlc 1.1.1 (git+https://github.com/paritytech/rust-ctrlc.git)" = "<none>"
|
||||
"checksum custom_derive 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "ef8ae57c4978a2acd8b869ce6b9ca1dfe817bff704c220209fdef2c0b75a01b9"
|
||||
@ -3394,7 +3408,7 @@ dependencies = [
|
||||
"checksum quasi_macros 0.32.0 (registry+https://github.com/rust-lang/crates.io-index)" = "29cec87bc2816766d7e4168302d505dd06b0a825aed41b00633d296e922e02dd"
|
||||
"checksum quick-error 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0aad603e8d7fb67da22dbdf1f4b826ce8829e406124109e73cf1b2454b93a71c"
|
||||
"checksum quine-mc_cluskey 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a6683b0e23d80813b1a535841f0048c1537d3f86d63c999e8373b39a9b0eb74a"
|
||||
"checksum quote 0.3.10 (registry+https://github.com/rust-lang/crates.io-index)" = "6732e32663c9c271bfc7c1823486b471f18c47a2dbf87c066897b7b51afc83be"
|
||||
"checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a"
|
||||
"checksum rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)" = "2791d88c6defac799c3f20d74f094ca33b9332612d9aef9078519c82e4fe04a5"
|
||||
"checksum rayon 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8c83adcb08e5b922e804fe1918142b422602ef11f2fd670b0b52218cb5984a20"
|
||||
"checksum rayon-core 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "767d91bacddf07d442fe39257bf04fd95897d1c47c545d009f6beb03efd038f8"
|
||||
|
@ -27,6 +27,7 @@ time = "0.1.35"
|
||||
unicase = "1.3"
|
||||
url = "1.0"
|
||||
zip = { version = "0.1", default-features = false }
|
||||
itertools = "0.5"
|
||||
|
||||
jsonrpc-core = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.7" }
|
||||
jsonrpc-http-server = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.7" }
|
||||
|
@ -31,8 +31,7 @@ pub use self::redirect::Redirection;
|
||||
pub use self::streaming::StreamingHandler;
|
||||
|
||||
use std::iter;
|
||||
use util::Itertools;
|
||||
|
||||
use itertools::Itertools;
|
||||
use url::Url;
|
||||
use hyper::{server, header, net, uri};
|
||||
use {apps, address, Embeddable};
|
||||
|
@ -22,6 +22,7 @@
|
||||
extern crate base32;
|
||||
extern crate futures;
|
||||
extern crate futures_cpupool;
|
||||
extern crate itertools;
|
||||
extern crate linked_hash_map;
|
||||
extern crate mime_guess;
|
||||
extern crate ntp;
|
||||
|
@ -47,6 +47,7 @@ num_cpus = "1.2"
|
||||
price-info = { path = "../price-info" }
|
||||
rand = "0.3"
|
||||
rlp = { path = "../util/rlp" }
|
||||
rlp_derive = { path = "../util/rlp_derive" }
|
||||
rust-crypto = "0.2.34"
|
||||
rustc-hex = "1.0"
|
||||
semver = "0.6"
|
||||
|
@ -21,6 +21,7 @@ ethcore-devtools = { path = "../../devtools" }
|
||||
evm = { path = "../evm" }
|
||||
vm = { path = "../vm" }
|
||||
rlp = { path = "../../util/rlp" }
|
||||
rlp_derive = { path = "../../util/rlp_derive" }
|
||||
time = "0.1"
|
||||
smallvec = "0.4"
|
||||
futures = "0.1"
|
||||
|
@ -100,8 +100,8 @@ pub trait LightChainClient: Send + Sync {
|
||||
/// Get an iterator over a block and its ancestry.
|
||||
fn ancestry_iter<'a>(&'a self, start: BlockId) -> Box<Iterator<Item=encoded::Header> + 'a>;
|
||||
|
||||
/// Get the signing network ID.
|
||||
fn signing_network_id(&self) -> Option<u64>;
|
||||
/// Get the signing chain ID.
|
||||
fn signing_chain_id(&self) -> Option<u64>;
|
||||
|
||||
/// Get environment info for execution at a given block.
|
||||
/// Fails if that block's header is not stored.
|
||||
@ -260,9 +260,9 @@ impl Client {
|
||||
self.chain.ancestry_iter(start)
|
||||
}
|
||||
|
||||
/// Get the signing network id.
|
||||
pub fn signing_network_id(&self) -> Option<u64> {
|
||||
self.engine.signing_network_id(&self.latest_env_info())
|
||||
/// Get the signing chain id.
|
||||
pub fn signing_chain_id(&self) -> Option<u64> {
|
||||
self.engine.signing_chain_id(&self.latest_env_info())
|
||||
}
|
||||
|
||||
/// Flush the header queue.
|
||||
@ -448,8 +448,8 @@ impl LightChainClient for Client {
|
||||
Box::new(Client::ancestry_iter(self, start))
|
||||
}
|
||||
|
||||
fn signing_network_id(&self) -> Option<u64> {
|
||||
Client::signing_network_id(self)
|
||||
fn signing_chain_id(&self) -> Option<u64> {
|
||||
Client::signing_chain_id(self)
|
||||
}
|
||||
|
||||
fn env_info(&self, id: BlockId) -> Option<EnvInfo> {
|
||||
|
@ -76,6 +76,8 @@ extern crate futures;
|
||||
extern crate itertools;
|
||||
extern crate rand;
|
||||
extern crate rlp;
|
||||
#[macro_use]
|
||||
extern crate rlp_derive;
|
||||
extern crate serde;
|
||||
extern crate smallvec;
|
||||
extern crate stats;
|
||||
|
@ -650,7 +650,7 @@ pub mod header {
|
||||
use rlp::{Encodable, Decodable, DecoderError, RlpStream, UntrustedRlp};
|
||||
|
||||
/// Potentially incomplete headers request.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
|
||||
pub struct Incomplete {
|
||||
/// Start block.
|
||||
pub start: Field<HashOrNumber>,
|
||||
@ -662,27 +662,6 @@ pub mod header {
|
||||
pub reverse: bool,
|
||||
}
|
||||
|
||||
impl Decodable for Incomplete {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
Ok(Incomplete {
|
||||
start: rlp.val_at(0)?,
|
||||
skip: rlp.val_at(1)?,
|
||||
max: rlp.val_at(2)?,
|
||||
reverse: rlp.val_at(3)?
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for Incomplete {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(4)
|
||||
.append(&self.start)
|
||||
.append(&self.skip)
|
||||
.append(&self.max)
|
||||
.append(&self.reverse);
|
||||
}
|
||||
}
|
||||
|
||||
impl super::IncompleteRequest for Incomplete {
|
||||
type Complete = Complete;
|
||||
type Response = Response;
|
||||
@ -784,26 +763,12 @@ pub mod header_proof {
|
||||
use util::{Bytes, U256, H256};
|
||||
|
||||
/// Potentially incomplete header proof request.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
|
||||
pub struct Incomplete {
|
||||
/// Block number.
|
||||
pub num: Field<u64>,
|
||||
}
|
||||
|
||||
impl Decodable for Incomplete {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
Ok(Incomplete {
|
||||
num: rlp.val_at(0)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for Incomplete {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(1).append(&self.num);
|
||||
}
|
||||
}
|
||||
|
||||
impl super::IncompleteRequest for Incomplete {
|
||||
type Complete = Complete;
|
||||
type Response = Response;
|
||||
@ -889,30 +854,15 @@ pub mod header_proof {
|
||||
/// Request and response for transaction index.
|
||||
pub mod transaction_index {
|
||||
use super::{Field, NoSuchOutput, OutputKind, Output};
|
||||
use rlp::{Encodable, Decodable, DecoderError, RlpStream, UntrustedRlp};
|
||||
use util::H256;
|
||||
|
||||
/// Potentially incomplete transaction index request.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
|
||||
pub struct Incomplete {
|
||||
/// Transaction hash to get index for.
|
||||
pub hash: Field<H256>,
|
||||
}
|
||||
|
||||
impl Decodable for Incomplete {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
Ok(Incomplete {
|
||||
hash: rlp.val_at(0)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for Incomplete {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(1).append(&self.hash);
|
||||
}
|
||||
}
|
||||
|
||||
impl super::IncompleteRequest for Incomplete {
|
||||
type Complete = Complete;
|
||||
type Response = Response;
|
||||
@ -959,7 +909,7 @@ pub mod transaction_index {
|
||||
}
|
||||
|
||||
/// The output of a request for transaction index.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
|
||||
pub struct Response {
|
||||
/// Block number.
|
||||
pub num: u64,
|
||||
@ -976,55 +926,21 @@ pub mod transaction_index {
|
||||
f(1, Output::Hash(self.hash));
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for Response {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
Ok(Response {
|
||||
num: rlp.val_at(0)?,
|
||||
hash: rlp.val_at(1)?,
|
||||
index: rlp.val_at(2)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for Response {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(3)
|
||||
.append(&self.num)
|
||||
.append(&self.hash)
|
||||
.append(&self.index);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Request and response for block receipts
|
||||
pub mod block_receipts {
|
||||
use super::{Field, NoSuchOutput, OutputKind, Output};
|
||||
use ethcore::receipt::Receipt;
|
||||
use rlp::{Encodable, Decodable, DecoderError, RlpStream, UntrustedRlp};
|
||||
use util::H256;
|
||||
|
||||
/// Potentially incomplete block receipts request.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
|
||||
pub struct Incomplete {
|
||||
/// Block hash to get receipts for.
|
||||
pub hash: Field<H256>,
|
||||
}
|
||||
|
||||
impl Decodable for Incomplete {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
Ok(Incomplete {
|
||||
hash: rlp.val_at(0)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for Incomplete {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(1).append(&self.hash);
|
||||
}
|
||||
}
|
||||
|
||||
impl super::IncompleteRequest for Incomplete {
|
||||
type Complete = Complete;
|
||||
type Response = Response;
|
||||
@ -1068,7 +984,7 @@ pub mod block_receipts {
|
||||
}
|
||||
|
||||
/// The output of a request for block receipts.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodableWrapper, RlpDecodableWrapper)]
|
||||
pub struct Response {
|
||||
/// The block receipts.
|
||||
pub receipts: Vec<Receipt>
|
||||
@ -1078,20 +994,6 @@ pub mod block_receipts {
|
||||
/// Fill reusable outputs by providing them to the function.
|
||||
fn fill_outputs<F>(&self, _: F) where F: FnMut(usize, Output) {}
|
||||
}
|
||||
|
||||
impl Decodable for Response {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
Ok(Response {
|
||||
receipts: rlp.as_list()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for Response {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.append_list(&self.receipts);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Request and response for a block body
|
||||
@ -1102,26 +1004,12 @@ pub mod block_body {
|
||||
use util::H256;
|
||||
|
||||
/// Potentially incomplete block body request.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
|
||||
pub struct Incomplete {
|
||||
/// Block hash to get receipts for.
|
||||
pub hash: Field<H256>,
|
||||
}
|
||||
|
||||
impl Decodable for Incomplete {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
Ok(Incomplete {
|
||||
hash: rlp.val_at(0)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for Incomplete {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(1).append(&self.hash);
|
||||
}
|
||||
}
|
||||
|
||||
impl super::IncompleteRequest for Incomplete {
|
||||
type Complete = Complete;
|
||||
type Response = Response;
|
||||
@ -1201,11 +1089,10 @@ pub mod block_body {
|
||||
/// A request for an account proof.
|
||||
pub mod account {
|
||||
use super::{Field, NoSuchOutput, OutputKind, Output};
|
||||
use rlp::{Encodable, Decodable, DecoderError, RlpStream, UntrustedRlp};
|
||||
use util::{Bytes, U256, H256};
|
||||
|
||||
/// Potentially incomplete request for an account proof.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
|
||||
pub struct Incomplete {
|
||||
/// Block hash to request state proof for.
|
||||
pub block_hash: Field<H256>,
|
||||
@ -1213,23 +1100,6 @@ pub mod account {
|
||||
pub address_hash: Field<H256>,
|
||||
}
|
||||
|
||||
impl Decodable for Incomplete {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
Ok(Incomplete {
|
||||
block_hash: rlp.val_at(0)?,
|
||||
address_hash: rlp.val_at(1)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for Incomplete {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(2)
|
||||
.append(&self.block_hash)
|
||||
.append(&self.address_hash);
|
||||
}
|
||||
}
|
||||
|
||||
impl super::IncompleteRequest for Incomplete {
|
||||
type Complete = Complete;
|
||||
type Response = Response;
|
||||
@ -1292,7 +1162,7 @@ pub mod account {
|
||||
}
|
||||
|
||||
/// The output of a request for an account state proof.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
|
||||
pub struct Response {
|
||||
/// Inclusion/exclusion proof
|
||||
pub proof: Vec<Bytes>,
|
||||
@ -1313,39 +1183,15 @@ pub mod account {
|
||||
f(1, Output::Hash(self.storage_root));
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for Response {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
Ok(Response {
|
||||
proof: rlp.list_at(0)?,
|
||||
nonce: rlp.val_at(1)?,
|
||||
balance: rlp.val_at(2)?,
|
||||
code_hash: rlp.val_at(3)?,
|
||||
storage_root: rlp.val_at(4)?
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for Response {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(5)
|
||||
.append_list::<Vec<u8>,_>(&self.proof[..])
|
||||
.append(&self.nonce)
|
||||
.append(&self.balance)
|
||||
.append(&self.code_hash)
|
||||
.append(&self.storage_root);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A request for a storage proof.
|
||||
pub mod storage {
|
||||
use super::{Field, NoSuchOutput, OutputKind, Output};
|
||||
use rlp::{Encodable, Decodable, DecoderError, RlpStream, UntrustedRlp};
|
||||
use util::{Bytes, H256};
|
||||
|
||||
/// Potentially incomplete request for an storage proof.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
|
||||
pub struct Incomplete {
|
||||
/// Block hash to request state proof for.
|
||||
pub block_hash: Field<H256>,
|
||||
@ -1355,25 +1201,6 @@ pub mod storage {
|
||||
pub key_hash: Field<H256>,
|
||||
}
|
||||
|
||||
impl Decodable for Incomplete {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
Ok(Incomplete {
|
||||
block_hash: rlp.val_at(0)?,
|
||||
address_hash: rlp.val_at(1)?,
|
||||
key_hash: rlp.val_at(2)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for Incomplete {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(3)
|
||||
.append(&self.block_hash)
|
||||
.append(&self.address_hash)
|
||||
.append(&self.key_hash);
|
||||
}
|
||||
}
|
||||
|
||||
impl super::IncompleteRequest for Incomplete {
|
||||
type Complete = Complete;
|
||||
type Response = Response;
|
||||
@ -1450,7 +1277,7 @@ pub mod storage {
|
||||
}
|
||||
|
||||
/// The output of a request for an account state proof.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
|
||||
pub struct Response {
|
||||
/// Inclusion/exclusion proof
|
||||
pub proof: Vec<Bytes>,
|
||||
@ -1464,33 +1291,15 @@ pub mod storage {
|
||||
f(0, Output::Hash(self.value));
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for Response {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
Ok(Response {
|
||||
proof: rlp.list_at(0)?,
|
||||
value: rlp.val_at(1)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for Response {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(2)
|
||||
.append_list::<Vec<u8>,_>(&self.proof[..])
|
||||
.append(&self.value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A request for contract code.
|
||||
pub mod contract_code {
|
||||
use super::{Field, NoSuchOutput, OutputKind, Output};
|
||||
use rlp::{Encodable, Decodable, DecoderError, RlpStream, UntrustedRlp};
|
||||
use util::{Bytes, H256};
|
||||
|
||||
/// Potentially incomplete contract code request.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
|
||||
pub struct Incomplete {
|
||||
/// The block hash to request the state for.
|
||||
pub block_hash: Field<H256>,
|
||||
@ -1498,23 +1307,6 @@ pub mod contract_code {
|
||||
pub code_hash: Field<H256>,
|
||||
}
|
||||
|
||||
impl Decodable for Incomplete {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
Ok(Incomplete {
|
||||
block_hash: rlp.val_at(0)?,
|
||||
code_hash: rlp.val_at(1)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for Incomplete {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(2)
|
||||
.append(&self.block_hash)
|
||||
.append(&self.code_hash);
|
||||
}
|
||||
}
|
||||
|
||||
impl super::IncompleteRequest for Incomplete {
|
||||
type Complete = Complete;
|
||||
type Response = Response;
|
||||
@ -1573,7 +1365,7 @@ pub mod contract_code {
|
||||
}
|
||||
|
||||
/// The output of a request for
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodableWrapper, RlpDecodableWrapper)]
|
||||
pub struct Response {
|
||||
/// The requested code.
|
||||
pub code: Bytes,
|
||||
@ -1583,21 +1375,6 @@ pub mod contract_code {
|
||||
/// Fill reusable outputs by providing them to the function.
|
||||
fn fill_outputs<F>(&self, _: F) where F: FnMut(usize, Output) {}
|
||||
}
|
||||
|
||||
impl Decodable for Response {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
|
||||
Ok(Response {
|
||||
code: rlp.as_val()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for Response {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.append(&self.code);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A request for proof of execution.
|
||||
@ -1608,7 +1385,7 @@ pub mod execution {
|
||||
use util::{Bytes, Address, U256, H256, DBValue};
|
||||
|
||||
/// Potentially incomplete execution proof request.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
|
||||
pub struct Incomplete {
|
||||
/// The block hash to request the state for.
|
||||
pub block_hash: Field<H256>,
|
||||
@ -1626,38 +1403,6 @@ pub mod execution {
|
||||
pub data: Bytes,
|
||||
}
|
||||
|
||||
impl Decodable for Incomplete {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
Ok(Incomplete {
|
||||
block_hash: rlp.val_at(0)?,
|
||||
from: rlp.val_at(1)?,
|
||||
action: rlp.val_at(2)?,
|
||||
gas: rlp.val_at(3)?,
|
||||
gas_price: rlp.val_at(4)?,
|
||||
value: rlp.val_at(5)?,
|
||||
data: rlp.val_at(6)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for Incomplete {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(7)
|
||||
.append(&self.block_hash)
|
||||
.append(&self.from);
|
||||
|
||||
match self.action {
|
||||
Action::Create => s.append_empty_data(),
|
||||
Action::Call(ref addr) => s.append(addr),
|
||||
};
|
||||
|
||||
s.append(&self.gas)
|
||||
.append(&self.gas_price)
|
||||
.append(&self.value)
|
||||
.append(&self.data);
|
||||
}
|
||||
}
|
||||
|
||||
impl super::IncompleteRequest for Incomplete {
|
||||
type Complete = Complete;
|
||||
type Response = Response;
|
||||
|
@ -1 +1 @@
|
||||
Subproject commit 330f748b1eece451f460224b48d515489dd86f5c
|
||||
Subproject commit 85e76c5ea2a54c6c54e35014643b5080a50460c5
|
@ -19,6 +19,7 @@
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::sync::Arc;
|
||||
use std::mem;
|
||||
use itertools::Itertools;
|
||||
use bloomchain as bc;
|
||||
use util::*;
|
||||
use rlp::*;
|
||||
|
@ -25,11 +25,9 @@ use engines::epoch::{Transition as EpochTransition};
|
||||
use header::BlockNumber;
|
||||
use receipt::Receipt;
|
||||
|
||||
use rlp::*;
|
||||
use util::*;
|
||||
use util::{HeapSizeOf, H256, H264, U256};
|
||||
use util::kvdb::PREFIX_LEN as DB_PREFIX_LEN;
|
||||
|
||||
|
||||
/// Represents index of extra data in database
|
||||
#[derive(Copy, Debug, Hash, Eq, PartialEq, Clone)]
|
||||
pub enum ExtrasIndex {
|
||||
@ -184,7 +182,7 @@ impl Key<EpochTransitions> for u64 {
|
||||
}
|
||||
|
||||
/// Familial details concerning a block
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, RlpEncodable, RlpDecodable)]
|
||||
pub struct BlockDetails {
|
||||
/// Block number
|
||||
pub number: BlockNumber,
|
||||
@ -202,30 +200,8 @@ impl HeapSizeOf for BlockDetails {
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for BlockDetails {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
let details = BlockDetails {
|
||||
number: rlp.val_at(0)?,
|
||||
total_difficulty: rlp.val_at(1)?,
|
||||
parent: rlp.val_at(2)?,
|
||||
children: rlp.list_at(3)?,
|
||||
};
|
||||
Ok(details)
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for BlockDetails {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(4);
|
||||
s.append(&self.number);
|
||||
s.append(&self.total_difficulty);
|
||||
s.append(&self.parent);
|
||||
s.append_list(&self.children);
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents address of certain transaction within block
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
#[derive(Debug, PartialEq, Clone, RlpEncodable, RlpDecodable)]
|
||||
pub struct TransactionAddress {
|
||||
/// Block hash
|
||||
pub block_hash: H256,
|
||||
@ -237,27 +213,8 @@ impl HeapSizeOf for TransactionAddress {
|
||||
fn heap_size_of_children(&self) -> usize { 0 }
|
||||
}
|
||||
|
||||
impl Decodable for TransactionAddress {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
let tx_address = TransactionAddress {
|
||||
block_hash: rlp.val_at(0)?,
|
||||
index: rlp.val_at(1)?,
|
||||
};
|
||||
|
||||
Ok(tx_address)
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for TransactionAddress {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(2);
|
||||
s.append(&self.block_hash);
|
||||
s.append(&self.index);
|
||||
}
|
||||
}
|
||||
|
||||
/// Contains all block receipts.
|
||||
#[derive(Clone)]
|
||||
#[derive(Clone, RlpEncodableWrapper, RlpDecodableWrapper)]
|
||||
pub struct BlockReceipts {
|
||||
pub receipts: Vec<Receipt>,
|
||||
}
|
||||
@ -270,20 +227,6 @@ impl BlockReceipts {
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for BlockReceipts {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
Ok(BlockReceipts {
|
||||
receipts: rlp.as_list()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for BlockReceipts {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.append_list(&self.receipts);
|
||||
}
|
||||
}
|
||||
|
||||
impl HeapSizeOf for BlockReceipts {
|
||||
fn heap_size_of_children(&self) -> usize {
|
||||
self.receipts.heap_size_of_children()
|
||||
@ -291,27 +234,12 @@ impl HeapSizeOf for BlockReceipts {
|
||||
}
|
||||
|
||||
/// Candidate transitions to an epoch with specific number.
|
||||
#[derive(Clone)]
|
||||
#[derive(Clone, RlpEncodable, RlpDecodable)]
|
||||
pub struct EpochTransitions {
|
||||
pub number: u64,
|
||||
pub candidates: Vec<EpochTransition>,
|
||||
}
|
||||
|
||||
impl Encodable for EpochTransitions {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(2).append(&self.number).append_list(&self.candidates);
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for EpochTransitions {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
Ok(EpochTransitions {
|
||||
number: rlp.val_at(0)?,
|
||||
candidates: rlp.list_at(1)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use rlp::*;
|
||||
|
@ -15,12 +15,11 @@
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use bloomchain as bc;
|
||||
use rlp::*;
|
||||
use util::HeapSizeOf;
|
||||
use basic_types::LogBloom;
|
||||
|
||||
/// Helper structure representing bloom of the trace.
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, RlpEncodableWrapper, RlpDecodableWrapper)]
|
||||
pub struct Bloom(LogBloom);
|
||||
|
||||
impl From<LogBloom> for Bloom {
|
||||
@ -43,18 +42,6 @@ impl Into<bc::Bloom> for Bloom {
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for Bloom {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
LogBloom::decode(rlp).map(Bloom)
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for Bloom {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
Encodable::rlp_append(&self.0, s)
|
||||
}
|
||||
}
|
||||
|
||||
impl HeapSizeOf for Bloom {
|
||||
fn heap_size_of_children(&self) -> usize {
|
||||
0
|
||||
|
@ -20,9 +20,10 @@ use std::sync::{Arc, Weak};
|
||||
use std::sync::atomic::{AtomicUsize, AtomicBool, Ordering as AtomicOrdering};
|
||||
use std::time::{Instant};
|
||||
use time::precise_time_ns;
|
||||
use itertools::Itertools;
|
||||
|
||||
// util
|
||||
use util::{Bytes, PerfTimer, Itertools, Mutex, RwLock, MutexGuard, Hashable};
|
||||
use util::{Bytes, PerfTimer, Mutex, RwLock, MutexGuard, Hashable};
|
||||
use util::{journaldb, DBValue, TrieFactory, Trie};
|
||||
use util::{U256, H256, Address, H2048};
|
||||
use util::trie::TrieSpec;
|
||||
@ -1719,8 +1720,8 @@ impl BlockChainClient for Client {
|
||||
}
|
||||
}
|
||||
|
||||
fn signing_network_id(&self) -> Option<u64> {
|
||||
self.engine.signing_network_id(&self.latest_env_info())
|
||||
fn signing_chain_id(&self) -> Option<u64> {
|
||||
self.engine.signing_chain_id(&self.latest_env_info())
|
||||
}
|
||||
|
||||
fn block_extra_info(&self, id: BlockId) -> Option<BTreeMap<String, String>> {
|
||||
@ -1759,9 +1760,9 @@ impl BlockChainClient for Client {
|
||||
value: U256::zero(),
|
||||
data: data,
|
||||
};
|
||||
let network_id = self.engine.signing_network_id(&self.latest_env_info());
|
||||
let signature = self.engine.sign(transaction.hash(network_id))?;
|
||||
let signed = SignedTransaction::new(transaction.with_signature(signature, network_id))?;
|
||||
let chain_id = self.engine.signing_chain_id(&self.latest_env_info());
|
||||
let signature = self.engine.sign(transaction.hash(chain_id))?;
|
||||
let signed = SignedTransaction::new(transaction.with_signature(signature, chain_id))?;
|
||||
self.miner.import_own_transaction(self, signed.into())
|
||||
}
|
||||
|
||||
|
@ -20,6 +20,7 @@ use std::sync::atomic::{AtomicUsize, Ordering as AtomicOrder};
|
||||
use std::sync::Arc;
|
||||
use std::collections::{HashMap, BTreeMap};
|
||||
use std::mem;
|
||||
use itertools::Itertools;
|
||||
use rustc_hex::FromHex;
|
||||
use util::*;
|
||||
use rlp::*;
|
||||
@ -733,7 +734,7 @@ impl BlockChainClient for TestBlockChainClient {
|
||||
self.miner.ready_transactions(info.best_block_number, info.best_block_timestamp)
|
||||
}
|
||||
|
||||
fn signing_network_id(&self) -> Option<u64> { None }
|
||||
fn signing_chain_id(&self) -> Option<u64> { None }
|
||||
|
||||
fn mode(&self) -> Mode { Mode::Active }
|
||||
|
||||
@ -764,9 +765,9 @@ impl BlockChainClient for TestBlockChainClient {
|
||||
value: U256::default(),
|
||||
data: data,
|
||||
};
|
||||
let network_id = Some(self.spec.params().network_id);
|
||||
let sig = self.spec.engine.sign(transaction.hash(network_id)).unwrap();
|
||||
let signed = SignedTransaction::new(transaction.with_signature(sig, network_id)).unwrap();
|
||||
let chain_id = Some(self.spec.chain_id());
|
||||
let sig = self.spec.engine.sign(transaction.hash(chain_id)).unwrap();
|
||||
let signed = SignedTransaction::new(transaction.with_signature(sig, chain_id)).unwrap();
|
||||
self.miner.import_own_transaction(self, signed.into())
|
||||
}
|
||||
|
||||
|
@ -15,6 +15,7 @@
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
use itertools::Itertools;
|
||||
|
||||
use block::{OpenBlock, SealedBlock, ClosedBlock};
|
||||
use blockchain::TreeRoute;
|
||||
@ -33,7 +34,7 @@ use trace::LocalizedTrace;
|
||||
use transaction::{LocalizedTransaction, PendingTransaction, SignedTransaction};
|
||||
use verification::queue::QueueInfo as BlockQueueInfo;
|
||||
|
||||
use util::{U256, Address, H256, H2048, Bytes, Itertools};
|
||||
use util::{U256, Address, H256, H2048, Bytes};
|
||||
use util::hashdb::DBValue;
|
||||
|
||||
use types::ids::*;
|
||||
@ -239,8 +240,8 @@ pub trait BlockChainClient : Sync + Send {
|
||||
corpus.into()
|
||||
}
|
||||
|
||||
/// Get the preferred network ID to sign on
|
||||
fn signing_network_id(&self) -> Option<u64>;
|
||||
/// Get the preferred chain ID to sign on
|
||||
fn signing_chain_id(&self) -> Option<u64>;
|
||||
|
||||
/// Get the mode.
|
||||
fn mode(&self) -> Mode;
|
||||
|
@ -804,9 +804,9 @@ impl Engine for AuthorityRound {
|
||||
fn verify_transaction_basic(&self, t: &UnverifiedTransaction, header: &Header) -> Result<(), Error> {
|
||||
t.check_low_s()?;
|
||||
|
||||
if let Some(n) = t.network_id() {
|
||||
if let Some(n) = t.chain_id() {
|
||||
if header.number() >= self.params().eip155_transition && n != self.params().chain_id {
|
||||
return Err(TransactionError::InvalidNetworkId.into());
|
||||
return Err(TransactionError::InvalidChainId.into());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -16,14 +16,12 @@
|
||||
|
||||
//! Epoch verifiers and transitions.
|
||||
|
||||
use util::H256;
|
||||
use error::Error;
|
||||
use header::Header;
|
||||
|
||||
use rlp::{Encodable, Decodable, DecoderError, RlpStream, UntrustedRlp};
|
||||
use util::H256;
|
||||
|
||||
/// A full epoch transition.
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, RlpEncodable, RlpDecodable)]
|
||||
pub struct Transition {
|
||||
/// Block hash at which the transition occurred.
|
||||
pub block_hash: H256,
|
||||
@ -33,46 +31,14 @@ pub struct Transition {
|
||||
pub proof: Vec<u8>,
|
||||
}
|
||||
|
||||
impl Encodable for Transition {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(3)
|
||||
.append(&self.block_hash)
|
||||
.append(&self.block_number)
|
||||
.append(&self.proof);
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for Transition {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
Ok(Transition {
|
||||
block_hash: rlp.val_at(0)?,
|
||||
block_number: rlp.val_at(1)?,
|
||||
proof: rlp.val_at(2)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// An epoch transition pending a finality proof.
|
||||
/// Not all transitions need one.
|
||||
#[derive(RlpEncodableWrapper, RlpDecodableWrapper)]
|
||||
pub struct PendingTransition {
|
||||
/// "transition/epoch" proof from the engine.
|
||||
pub proof: Vec<u8>,
|
||||
}
|
||||
|
||||
impl Encodable for PendingTransition {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.append(&self.proof);
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for PendingTransition {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
Ok(PendingTransition {
|
||||
proof: rlp.as_val()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Verifier for all blocks within an epoch with self-contained state.
|
||||
///
|
||||
/// See docs on `Engine` relating to proving functions for more details.
|
||||
|
@ -263,7 +263,7 @@ pub trait Engine : Sync + Send {
|
||||
// TODO: Add flags for which bits of the transaction to check.
|
||||
// TODO: consider including State in the params.
|
||||
fn verify_transaction_basic(&self, t: &UnverifiedTransaction, _header: &Header) -> Result<(), Error> {
|
||||
t.verify_basic(true, Some(self.params().network_id), true)?;
|
||||
t.verify_basic(true, Some(self.params().chain_id), true)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -273,7 +273,7 @@ pub trait Engine : Sync + Send {
|
||||
}
|
||||
|
||||
/// The network ID that transactions should be signed with.
|
||||
fn signing_network_id(&self, _env_info: &EnvInfo) -> Option<u64> {
|
||||
fn signing_chain_id(&self, _env_info: &EnvInfo) -> Option<u64> {
|
||||
Some(self.params().chain_id)
|
||||
}
|
||||
|
||||
|
@ -62,6 +62,6 @@ impl Engine for NullEngine {
|
||||
}
|
||||
|
||||
fn snapshot_components(&self) -> Option<Box<::snapshot::SnapshotComponents>> {
|
||||
Some(Box::new(::snapshot::PowSnapshot(10000)))
|
||||
Some(Box::new(::snapshot::PowSnapshot::new(10000, 10000)))
|
||||
}
|
||||
}
|
||||
|
@ -452,7 +452,7 @@ mod tests {
|
||||
let s0: Secret = "1".sha3().into();
|
||||
let v0 = tap.insert_account(s0.clone(), "").unwrap();
|
||||
let v1 = tap.insert_account("0".sha3().into(), "").unwrap();
|
||||
let network_id = Spec::new_validator_safe_contract().network_id();
|
||||
let chain_id = Spec::new_validator_safe_contract().chain_id();
|
||||
let client = generate_dummy_client_with_spec_and_accounts(Spec::new_validator_safe_contract, Some(tap));
|
||||
client.engine().register_client(Arc::downgrade(&client));
|
||||
let validator_contract = "0000000000000000000000000000000000000005".parse::<Address>().unwrap();
|
||||
@ -466,7 +466,7 @@ mod tests {
|
||||
action: Action::Call(validator_contract),
|
||||
value: 0.into(),
|
||||
data: "bfc708a000000000000000000000000082a978b3f5962a5b0957d9ee9eef472ee55b42f1".from_hex().unwrap(),
|
||||
}.sign(&s0, Some(network_id));
|
||||
}.sign(&s0, Some(chain_id));
|
||||
client.miner().import_own_transaction(client.as_ref(), tx.into()).unwrap();
|
||||
client.update_sealing();
|
||||
assert_eq!(client.chain_info().best_block_number, 1);
|
||||
@ -478,7 +478,7 @@ mod tests {
|
||||
action: Action::Call(validator_contract),
|
||||
value: 0.into(),
|
||||
data: "4d238c8e00000000000000000000000082a978b3f5962a5b0957d9ee9eef472ee55b42f1".from_hex().unwrap(),
|
||||
}.sign(&s0, Some(network_id));
|
||||
}.sign(&s0, Some(chain_id));
|
||||
client.miner().import_own_transaction(client.as_ref(), tx.into()).unwrap();
|
||||
client.update_sealing();
|
||||
// The transaction is not yet included so still unable to seal.
|
||||
@ -497,7 +497,7 @@ mod tests {
|
||||
action: Action::Call(Address::default()),
|
||||
value: 0.into(),
|
||||
data: Vec::new(),
|
||||
}.sign(&s0, Some(network_id));
|
||||
}.sign(&s0, Some(chain_id));
|
||||
client.miner().import_own_transaction(client.as_ref(), tx.into()).unwrap();
|
||||
client.update_sealing();
|
||||
// Able to seal again.
|
||||
|
@ -78,8 +78,8 @@ pub enum TransactionError {
|
||||
RecipientBanned,
|
||||
/// Contract creation code is banned.
|
||||
CodeBanned,
|
||||
/// Invalid network ID given.
|
||||
InvalidNetworkId,
|
||||
/// Invalid chain ID given.
|
||||
InvalidChainId,
|
||||
}
|
||||
|
||||
impl fmt::Display for TransactionError {
|
||||
@ -103,7 +103,7 @@ impl fmt::Display for TransactionError {
|
||||
SenderBanned => "Sender is temporarily banned.".into(),
|
||||
RecipientBanned => "Recipient is temporarily banned.".into(),
|
||||
CodeBanned => "Contract code is temporarily banned.".into(),
|
||||
InvalidNetworkId => "Transaction of this network ID is not allowed on this chain.".into(),
|
||||
InvalidChainId => "Transaction of this chain ID is not allowed on this chain.".into(),
|
||||
};
|
||||
|
||||
f.write_fmt(format_args!("Transaction error ({})", msg))
|
||||
|
@ -39,7 +39,10 @@ pub const PARITY_GAS_LIMIT_DETERMINANT: U256 = U256([37, 0, 0, 0]);
|
||||
|
||||
/// Number of blocks in an ethash snapshot.
|
||||
// make dependent on difficulty incrment divisor?
|
||||
const SNAPSHOT_BLOCKS: u64 = 30000;
|
||||
const SNAPSHOT_BLOCKS: u64 = 5000;
|
||||
/// Maximum number of blocks allowed in an ethash snapshot.
|
||||
const MAX_SNAPSHOT_BLOCKS: u64 = 30000;
|
||||
|
||||
|
||||
/// Ethash params.
|
||||
#[derive(Debug, PartialEq)]
|
||||
@ -184,7 +187,14 @@ impl Engine for Arc<Ethash> {
|
||||
|
||||
/// Additional engine-specific information for the user/developer concerning `header`.
|
||||
fn extra_info(&self, header: &Header) -> BTreeMap<String, String> {
|
||||
map!["nonce".to_owned() => format!("0x{}", header.nonce().hex()), "mixHash".to_owned() => format!("0x{}", header.mix_hash().hex())]
|
||||
if header.seal().len() == self.seal_fields() {
|
||||
map![
|
||||
"nonce".to_owned() => format!("0x{}", header.nonce().hex()),
|
||||
"mixHash".to_owned() => format!("0x{}", header.mix_hash().hex())
|
||||
]
|
||||
} else {
|
||||
BTreeMap::default()
|
||||
}
|
||||
}
|
||||
|
||||
fn schedule(&self, block_number: BlockNumber) -> Schedule {
|
||||
@ -206,7 +216,7 @@ impl Engine for Arc<Ethash> {
|
||||
}
|
||||
}
|
||||
|
||||
fn signing_network_id(&self, env_info: &EnvInfo) -> Option<u64> {
|
||||
fn signing_chain_id(&self, env_info: &EnvInfo) -> Option<u64> {
|
||||
if env_info.number >= self.params().eip155_transition {
|
||||
Some(self.params().chain_id)
|
||||
} else {
|
||||
@ -397,8 +407,8 @@ impl Engine for Arc<Ethash> {
|
||||
}
|
||||
|
||||
let check_low_s = header.number() >= self.ethash_params.homestead_transition;
|
||||
let network_id = if header.number() >= self.params().eip155_transition { Some(self.params().chain_id) } else { None };
|
||||
t.verify_basic(check_low_s, network_id, false)?;
|
||||
let chain_id = if header.number() >= self.params().eip155_transition { Some(self.params().chain_id) } else { None };
|
||||
t.verify_basic(check_low_s, chain_id, false)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -407,7 +417,7 @@ impl Engine for Arc<Ethash> {
|
||||
}
|
||||
|
||||
fn snapshot_components(&self) -> Option<Box<::snapshot::SnapshotComponents>> {
|
||||
Some(Box::new(::snapshot::PowSnapshot(SNAPSHOT_BLOCKS)))
|
||||
Some(Box::new(::snapshot::PowSnapshot::new(SNAPSHOT_BLOCKS, MAX_SNAPSHOT_BLOCKS)))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -36,25 +36,25 @@ fn do_json_test(json_data: &[u8]) -> Vec<String> {
|
||||
Some(x) if x < 3_000_000 => &homestead_schedule,
|
||||
Some(_) => &metropolis_schedule
|
||||
};
|
||||
let allow_network_id_of_one = number.map_or(false, |n| n >= 2_675_000);
|
||||
let allow_chain_id_of_one = number.map_or(false, |n| n >= 2_675_000);
|
||||
let allow_unsigned = number.map_or(false, |n| n >= 3_000_000);
|
||||
|
||||
let rlp: Vec<u8> = test.rlp.into();
|
||||
let res = UntrustedRlp::new(&rlp)
|
||||
.as_val()
|
||||
.map_err(From::from)
|
||||
.and_then(|t: UnverifiedTransaction| t.validate(schedule, schedule.have_delegate_call, allow_network_id_of_one, allow_unsigned));
|
||||
.and_then(|t: UnverifiedTransaction| t.validate(schedule, schedule.have_delegate_call, allow_chain_id_of_one, allow_unsigned));
|
||||
|
||||
fail_unless(test.transaction.is_none() == res.is_err(), "Validity different");
|
||||
if let (Some(tx), Some(sender)) = (test.transaction, test.sender) {
|
||||
let t = res.unwrap();
|
||||
fail_unless(SignedTransaction::new(t.clone()).unwrap().sender() == sender.into(), "sender mismatch");
|
||||
let is_acceptable_network_id = match t.network_id() {
|
||||
let is_acceptable_chain_id = match t.chain_id() {
|
||||
None => true,
|
||||
Some(1) if allow_network_id_of_one => true,
|
||||
Some(1) if allow_chain_id_of_one => true,
|
||||
_ => false,
|
||||
};
|
||||
fail_unless(is_acceptable_network_id, "Network ID unacceptable");
|
||||
fail_unless(is_acceptable_chain_id, "Network ID unacceptable");
|
||||
let data: Vec<u8> = tx.data.into();
|
||||
fail_unless(t.data == data, "data mismatch");
|
||||
fail_unless(t.gas_price == tx.gas_price.into(), "gas_price mismatch");
|
||||
|
@ -101,6 +101,9 @@ extern crate num;
|
||||
extern crate price_info;
|
||||
extern crate rand;
|
||||
extern crate rlp;
|
||||
|
||||
#[macro_use]
|
||||
extern crate rlp_derive;
|
||||
extern crate rustc_hex;
|
||||
extern crate semver;
|
||||
extern crate stats;
|
||||
|
@ -1306,10 +1306,10 @@ mod tests {
|
||||
}
|
||||
|
||||
fn transaction() -> SignedTransaction {
|
||||
transaction_with_network_id(2)
|
||||
transaction_with_chain_id(2)
|
||||
}
|
||||
|
||||
fn transaction_with_network_id(id: u64) -> SignedTransaction {
|
||||
fn transaction_with_chain_id(chain_id: u64) -> SignedTransaction {
|
||||
let keypair = Random.generate().unwrap();
|
||||
Transaction {
|
||||
action: Action::Create,
|
||||
@ -1318,7 +1318,7 @@ mod tests {
|
||||
gas: U256::from(100_000),
|
||||
gas_price: U256::zero(),
|
||||
nonce: U256::zero(),
|
||||
}.sign(keypair.secret(), Some(id))
|
||||
}.sign(keypair.secret(), Some(chain_id))
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -1399,14 +1399,14 @@ mod tests {
|
||||
|
||||
let client = generate_dummy_client(2);
|
||||
|
||||
assert_eq!(miner.import_external_transactions(&*client, vec![transaction_with_network_id(spec.network_id()).into()]).pop().unwrap().unwrap(), TransactionImportResult::Current);
|
||||
assert_eq!(miner.import_external_transactions(&*client, vec![transaction_with_chain_id(spec.chain_id()).into()]).pop().unwrap().unwrap(), TransactionImportResult::Current);
|
||||
|
||||
miner.update_sealing(&*client);
|
||||
client.flush_queue();
|
||||
assert!(miner.pending_block(0).is_none());
|
||||
assert_eq!(client.chain_info().best_block_number, 3 as BlockNumber);
|
||||
|
||||
assert_eq!(miner.import_own_transaction(&*client, PendingTransaction::new(transaction_with_network_id(spec.network_id()).into(), None)).unwrap(), TransactionImportResult::Current);
|
||||
assert_eq!(miner.import_own_transaction(&*client, PendingTransaction::new(transaction_with_chain_id(spec.chain_id()).into(), None)).unwrap(), TransactionImportResult::Current);
|
||||
|
||||
miner.update_sealing(&*client);
|
||||
client.flush_queue();
|
||||
|
@ -16,6 +16,7 @@
|
||||
|
||||
use std::fmt;
|
||||
use std::collections::BTreeMap;
|
||||
use itertools::Itertools;
|
||||
use util::*;
|
||||
use state::Account;
|
||||
use ethjson;
|
||||
|
@ -18,6 +18,7 @@
|
||||
|
||||
use std::fmt;
|
||||
use std::collections::BTreeMap;
|
||||
use itertools::Itertools;
|
||||
use util::*;
|
||||
use pod_account::{self, PodAccount};
|
||||
use types::state_diff::StateDiff;
|
||||
|
@ -37,11 +37,24 @@ use rand::OsRng;
|
||||
/// Snapshot creation and restoration for PoW chains.
|
||||
/// This includes blocks from the head of the chain as a
|
||||
/// loose assurance that the chain is valid.
|
||||
///
|
||||
/// The field is the number of blocks from the head of the chain
|
||||
/// to include in the snapshot.
|
||||
#[derive(Clone, Copy, PartialEq)]
|
||||
pub struct PowSnapshot(pub u64);
|
||||
pub struct PowSnapshot {
|
||||
/// Number of blocks from the head of the chain
|
||||
/// to include in the snapshot.
|
||||
pub blocks: u64,
|
||||
/// Number of to allow in the snapshot when restoring.
|
||||
pub max_restore_blocks: u64,
|
||||
}
|
||||
|
||||
impl PowSnapshot {
|
||||
/// Create a new instance.
|
||||
pub fn new(blocks: u64, max_restore_blocks: u64) -> PowSnapshot {
|
||||
PowSnapshot {
|
||||
blocks: blocks,
|
||||
max_restore_blocks: max_restore_blocks,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SnapshotComponents for PowSnapshot {
|
||||
fn chunk_all(
|
||||
@ -57,7 +70,7 @@ impl SnapshotComponents for PowSnapshot {
|
||||
current_hash: block_at,
|
||||
writer: chunk_sink,
|
||||
preferred_size: preferred_size,
|
||||
}.chunk_all(self.0)
|
||||
}.chunk_all(self.blocks)
|
||||
}
|
||||
|
||||
fn rebuilder(
|
||||
@ -66,7 +79,7 @@ impl SnapshotComponents for PowSnapshot {
|
||||
db: Arc<KeyValueDB>,
|
||||
manifest: &ManifestData,
|
||||
) -> Result<Box<Rebuilder>, ::error::Error> {
|
||||
PowRebuilder::new(chain, db, manifest, self.0).map(|r| Box::new(r) as Box<_>)
|
||||
PowRebuilder::new(chain, db, manifest, self.max_restore_blocks).map(|r| Box::new(r) as Box<_>)
|
||||
}
|
||||
|
||||
fn min_supported_version(&self) -> u64 { ::snapshot::MIN_SUPPORTED_STATE_CHUNK_VERSION }
|
||||
@ -218,7 +231,7 @@ impl Rebuilder for PowRebuilder {
|
||||
trace!(target: "snapshot", "restoring block chunk with {} blocks.", item_count - 3);
|
||||
|
||||
if self.fed_blocks + num_blocks > self.snapshot_blocks {
|
||||
return Err(Error::TooManyBlocks(self.snapshot_blocks, self.fed_blocks).into())
|
||||
return Err(Error::TooManyBlocks(self.snapshot_blocks, self.fed_blocks + num_blocks).into())
|
||||
}
|
||||
|
||||
// todo: assert here that these values are consistent with chunks being in order.
|
||||
|
@ -27,7 +27,7 @@ use std::path::{Path, PathBuf};
|
||||
|
||||
use util::Bytes;
|
||||
use util::hash::H256;
|
||||
use rlp::{self, Encodable, RlpStream, UntrustedRlp};
|
||||
use rlp::{RlpStream, UntrustedRlp};
|
||||
|
||||
use super::ManifestData;
|
||||
|
||||
@ -49,24 +49,9 @@ pub trait SnapshotWriter {
|
||||
}
|
||||
|
||||
// (hash, len, offset)
|
||||
#[derive(RlpEncodable, RlpDecodable)]
|
||||
struct ChunkInfo(H256, u64, u64);
|
||||
|
||||
impl Encodable for ChunkInfo {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(3);
|
||||
s.append(&self.0).append(&self.1).append(&self.2);
|
||||
}
|
||||
}
|
||||
|
||||
impl rlp::Decodable for ChunkInfo {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, rlp::DecoderError> {
|
||||
let hash = rlp.val_at(0)?;
|
||||
let len = rlp.val_at(1)?;
|
||||
let off = rlp.val_at(2)?;
|
||||
Ok(ChunkInfo(hash, len, off))
|
||||
}
|
||||
}
|
||||
|
||||
/// A packed snapshot writer. This writes snapshots to a single concatenated file.
|
||||
///
|
||||
/// The file format is very simple and consists of three parts:
|
||||
|
@ -130,7 +130,7 @@ fn make_chain(accounts: Arc<AccountProvider>, blocks_beyond: usize, transitions:
|
||||
action: Action::Call(Address::new()),
|
||||
value: 1.into(),
|
||||
data: Vec::new(),
|
||||
}.sign(&*RICH_SECRET, client.signing_network_id());
|
||||
}.sign(&*RICH_SECRET, client.signing_chain_id());
|
||||
|
||||
*nonce = *nonce + 1.into();
|
||||
vec![transaction]
|
||||
@ -176,7 +176,7 @@ fn make_chain(accounts: Arc<AccountProvider>, blocks_beyond: usize, transitions:
|
||||
action: Action::Call(addr),
|
||||
value: 0.into(),
|
||||
data: data,
|
||||
}.sign(&*RICH_SECRET, client.signing_network_id());
|
||||
}.sign(&*RICH_SECRET, client.signing_chain_id());
|
||||
|
||||
pending.push(transaction);
|
||||
|
||||
|
@ -30,7 +30,7 @@ use util::kvdb::{self, KeyValueDB, DBTransaction};
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
|
||||
const SNAPSHOT_MODE: ::snapshot::PowSnapshot = ::snapshot::PowSnapshot(30000);
|
||||
const SNAPSHOT_MODE: ::snapshot::PowSnapshot = ::snapshot::PowSnapshot { blocks: 30000, max_restore_blocks: 30000 };
|
||||
|
||||
fn chunk_and_restore(amount: u64) {
|
||||
let mut canon_chain = ChainGenerator::default();
|
||||
|
@ -380,6 +380,9 @@ impl Spec {
|
||||
/// Get the configured Network ID.
|
||||
pub fn network_id(&self) -> u64 { self.params().network_id }
|
||||
|
||||
/// Get the chain ID used for signing.
|
||||
pub fn chain_id(&self) -> u64 { self.params().chain_id }
|
||||
|
||||
/// Get the configured subprotocol name.
|
||||
pub fn subprotocol_name(&self) -> String { self.params().subprotocol_name.clone() }
|
||||
|
||||
|
@ -211,7 +211,7 @@ pub fn generate_dummy_client_with_spec_accounts_and_data<F>(get_test_spec: F, ac
|
||||
action: Action::Create,
|
||||
data: vec![],
|
||||
value: U256::zero(),
|
||||
}.sign(kp.secret(), Some(test_spec.network_id())), None).unwrap();
|
||||
}.sign(kp.secret(), Some(test_spec.chain_id())), None).unwrap();
|
||||
n += 1;
|
||||
}
|
||||
|
||||
|
@ -1,10 +1,9 @@
|
||||
use bloomchain::Bloom;
|
||||
use bloomchain::group::{BloomGroup, GroupPosition};
|
||||
use rlp::*;
|
||||
use basic_types::LogBloom;
|
||||
|
||||
/// Helper structure representing bloom of the trace.
|
||||
#[derive(Clone)]
|
||||
#[derive(Clone, RlpEncodableWrapper, RlpDecodableWrapper)]
|
||||
pub struct BlockTracesBloom(LogBloom);
|
||||
|
||||
impl From<LogBloom> for BlockTracesBloom {
|
||||
@ -28,7 +27,7 @@ impl Into<Bloom> for BlockTracesBloom {
|
||||
}
|
||||
|
||||
/// Represents group of X consecutive blooms.
|
||||
#[derive(Clone)]
|
||||
#[derive(Clone, RlpEncodableWrapper, RlpDecodableWrapper)]
|
||||
pub struct BlockTracesBloomGroup {
|
||||
blooms: Vec<BlockTracesBloom>,
|
||||
}
|
||||
@ -59,34 +58,6 @@ impl Into<BloomGroup> for BlockTracesBloomGroup {
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for BlockTracesBloom {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
LogBloom::decode(rlp).map(BlockTracesBloom)
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for BlockTracesBloom {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
Encodable::rlp_append(&self.0, s)
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for BlockTracesBloomGroup {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
let blooms = rlp.as_list()?;
|
||||
let group = BlockTracesBloomGroup {
|
||||
blooms: blooms
|
||||
};
|
||||
Ok(group)
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for BlockTracesBloomGroup {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.append_list(&self.blooms);
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents `BloomGroup` position in database.
|
||||
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
|
||||
pub struct TraceGroupPosition {
|
||||
|
@ -77,7 +77,7 @@ impl Decodable for FlatTrace {
|
||||
}
|
||||
|
||||
/// Represents all traces produced by a single transaction.
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
#[derive(Debug, PartialEq, Clone, RlpEncodableWrapper, RlpDecodableWrapper)]
|
||||
pub struct FlatTransactionTraces(Vec<FlatTrace>);
|
||||
|
||||
impl From<Vec<FlatTrace>> for FlatTransactionTraces {
|
||||
@ -99,18 +99,6 @@ impl FlatTransactionTraces {
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for FlatTransactionTraces {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.append_list(&self.0);
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for FlatTransactionTraces {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
Ok(FlatTransactionTraces(rlp.as_list()?))
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<Vec<FlatTrace>> for FlatTransactionTraces {
|
||||
fn into(self) -> Vec<FlatTrace> {
|
||||
self.0
|
||||
@ -118,7 +106,7 @@ impl Into<Vec<FlatTrace>> for FlatTransactionTraces {
|
||||
}
|
||||
|
||||
/// Represents all traces produced by transactions in a single block.
|
||||
#[derive(Debug, PartialEq, Clone, Default)]
|
||||
#[derive(Debug, PartialEq, Clone, Default, RlpEncodableWrapper, RlpDecodableWrapper)]
|
||||
pub struct FlatBlockTraces(Vec<FlatTransactionTraces>);
|
||||
|
||||
impl HeapSizeOf for FlatBlockTraces {
|
||||
@ -140,18 +128,6 @@ impl FlatBlockTraces {
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for FlatBlockTraces {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.append_list(&self.0);
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for FlatBlockTraces {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
Ok(FlatBlockTraces(rlp.as_list()?))
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<Vec<FlatTransactionTraces>> for FlatBlockTraces {
|
||||
fn into(self) -> Vec<FlatTransactionTraces> {
|
||||
self.0
|
||||
|
@ -27,7 +27,7 @@ use evm::CallType;
|
||||
use super::error::Error;
|
||||
|
||||
/// `Call` result.
|
||||
#[derive(Debug, Clone, PartialEq, Default)]
|
||||
#[derive(Debug, Clone, PartialEq, Default, RlpEncodable, RlpDecodable)]
|
||||
#[cfg_attr(feature = "ipc", binary)]
|
||||
pub struct CallResult {
|
||||
/// Gas used by call.
|
||||
@ -36,27 +36,8 @@ pub struct CallResult {
|
||||
pub output: Bytes,
|
||||
}
|
||||
|
||||
impl Encodable for CallResult {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(2);
|
||||
s.append(&self.gas_used);
|
||||
s.append(&self.output);
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for CallResult {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
let res = CallResult {
|
||||
gas_used: rlp.val_at(0)?,
|
||||
output: rlp.val_at(1)?,
|
||||
};
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
|
||||
/// `Create` result.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
#[derive(Debug, Clone, PartialEq, RlpEncodable, RlpDecodable)]
|
||||
#[cfg_attr(feature = "ipc", binary)]
|
||||
pub struct CreateResult {
|
||||
/// Gas used by create.
|
||||
@ -67,27 +48,6 @@ pub struct CreateResult {
|
||||
pub address: Address,
|
||||
}
|
||||
|
||||
impl Encodable for CreateResult {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(3);
|
||||
s.append(&self.gas_used);
|
||||
s.append(&self.code);
|
||||
s.append(&self.address);
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for CreateResult {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
let res = CreateResult {
|
||||
gas_used: rlp.val_at(0)?,
|
||||
code: rlp.val_at(1)?,
|
||||
address: rlp.val_at(2)?,
|
||||
};
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
|
||||
impl CreateResult {
|
||||
/// Returns bloom.
|
||||
pub fn bloom(&self) -> LogBloom {
|
||||
@ -96,7 +56,7 @@ impl CreateResult {
|
||||
}
|
||||
|
||||
/// Description of a _call_ action, either a `CALL` operation or a message transction.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
#[derive(Debug, Clone, PartialEq, RlpEncodable, RlpDecodable)]
|
||||
#[cfg_attr(feature = "ipc", binary)]
|
||||
pub struct Call {
|
||||
/// The sending account.
|
||||
@ -126,33 +86,6 @@ impl From<ActionParams> for Call {
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for Call {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(6);
|
||||
s.append(&self.from);
|
||||
s.append(&self.to);
|
||||
s.append(&self.value);
|
||||
s.append(&self.gas);
|
||||
s.append(&self.input);
|
||||
s.append(&self.call_type);
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for Call {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
let res = Call {
|
||||
from: rlp.val_at(0)?,
|
||||
to: rlp.val_at(1)?,
|
||||
value: rlp.val_at(2)?,
|
||||
gas: rlp.val_at(3)?,
|
||||
input: rlp.val_at(4)?,
|
||||
call_type: rlp.val_at(5)?,
|
||||
};
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
|
||||
impl Call {
|
||||
/// Returns call action bloom.
|
||||
/// The bloom contains from and to addresses.
|
||||
@ -163,7 +96,7 @@ impl Call {
|
||||
}
|
||||
|
||||
/// Description of a _create_ action, either a `CREATE` operation or a create transction.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
#[derive(Debug, Clone, PartialEq, RlpEncodable, RlpDecodable)]
|
||||
#[cfg_attr(feature = "ipc", binary)]
|
||||
pub struct Create {
|
||||
/// The address of the creator.
|
||||
@ -187,29 +120,6 @@ impl From<ActionParams> for Create {
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for Create {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(4);
|
||||
s.append(&self.from);
|
||||
s.append(&self.value);
|
||||
s.append(&self.gas);
|
||||
s.append(&self.init);
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for Create {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
let res = Create {
|
||||
from: rlp.val_at(0)?,
|
||||
value: rlp.val_at(1)?,
|
||||
gas: rlp.val_at(2)?,
|
||||
init: rlp.val_at(3)?,
|
||||
};
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
|
||||
impl Create {
|
||||
/// Returns bloom create action bloom.
|
||||
/// The bloom contains only from address.
|
||||
@ -219,7 +129,7 @@ impl Create {
|
||||
}
|
||||
|
||||
/// Suicide action.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
#[derive(Debug, Clone, PartialEq, RlpEncodable, RlpDecodable)]
|
||||
#[cfg_attr(feature = "ipc", binary)]
|
||||
pub struct Suicide {
|
||||
/// Suicided address.
|
||||
@ -238,28 +148,6 @@ impl Suicide {
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for Suicide {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(3);
|
||||
s.append(&self.address);
|
||||
s.append(&self.refund_address);
|
||||
s.append(&self.balance);
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for Suicide {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
let res = Suicide {
|
||||
address: rlp.val_at(0)?,
|
||||
refund_address: rlp.val_at(1)?,
|
||||
balance: rlp.val_at(2)?,
|
||||
};
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// Description of an action that we trace; will be either a call or a create.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
#[cfg_attr(feature = "ipc", binary)]
|
||||
@ -394,7 +282,7 @@ impl Res {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
#[derive(Debug, Clone, PartialEq, RlpEncodable, RlpDecodable)]
|
||||
#[cfg_attr(feature = "ipc", binary)]
|
||||
/// A diff of some chunk of memory.
|
||||
pub struct MemoryDiff {
|
||||
@ -404,24 +292,7 @@ pub struct MemoryDiff {
|
||||
pub data: Bytes,
|
||||
}
|
||||
|
||||
impl Encodable for MemoryDiff {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(2);
|
||||
s.append(&self.offset);
|
||||
s.append(&self.data);
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for MemoryDiff {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
Ok(MemoryDiff {
|
||||
offset: rlp.val_at(0)?,
|
||||
data: rlp.val_at(1)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
#[derive(Debug, Clone, PartialEq, RlpEncodable, RlpDecodable)]
|
||||
#[cfg_attr(feature = "ipc", binary)]
|
||||
/// A diff of some storage value.
|
||||
pub struct StorageDiff {
|
||||
@ -431,24 +302,7 @@ pub struct StorageDiff {
|
||||
pub value: U256,
|
||||
}
|
||||
|
||||
impl Encodable for StorageDiff {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(2);
|
||||
s.append(&self.location);
|
||||
s.append(&self.value);
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for StorageDiff {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
Ok(StorageDiff {
|
||||
location: rlp.val_at(0)?,
|
||||
value: rlp.val_at(1)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
#[derive(Debug, Clone, PartialEq, RlpEncodable, RlpDecodable)]
|
||||
#[cfg_attr(feature = "ipc", binary)]
|
||||
/// A record of an executed VM operation.
|
||||
pub struct VMExecutedOperation {
|
||||
@ -462,28 +316,7 @@ pub struct VMExecutedOperation {
|
||||
pub store_diff: Option<StorageDiff>,
|
||||
}
|
||||
|
||||
impl Encodable for VMExecutedOperation {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(4);
|
||||
s.append(&self.gas_used);
|
||||
s.append_list(&self.stack_push);
|
||||
s.append(&self.mem_diff);
|
||||
s.append(&self.store_diff);
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for VMExecutedOperation {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
Ok(VMExecutedOperation {
|
||||
gas_used: rlp.val_at(0)?,
|
||||
stack_push: rlp.list_at(1)?,
|
||||
mem_diff: rlp.val_at(2)?,
|
||||
store_diff: rlp.val_at(3)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Default)]
|
||||
#[derive(Debug, Clone, PartialEq, Default, RlpEncodable, RlpDecodable)]
|
||||
#[cfg_attr(feature = "ipc", binary)]
|
||||
/// A record of the execution of a single VM operation.
|
||||
pub struct VMOperation {
|
||||
@ -497,30 +330,7 @@ pub struct VMOperation {
|
||||
pub executed: Option<VMExecutedOperation>,
|
||||
}
|
||||
|
||||
impl Encodable for VMOperation {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(4);
|
||||
s.append(&self.pc);
|
||||
s.append(&self.instruction);
|
||||
s.append(&self.gas_cost);
|
||||
s.append(&self.executed);
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for VMOperation {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
let res = VMOperation {
|
||||
pc: rlp.val_at(0)?,
|
||||
instruction: rlp.val_at(1)?,
|
||||
gas_cost: rlp.val_at(2)?,
|
||||
executed: rlp.val_at(3)?,
|
||||
};
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Default)]
|
||||
#[derive(Debug, Clone, PartialEq, Default, RlpEncodable, RlpDecodable)]
|
||||
#[cfg_attr(feature = "ipc", binary)]
|
||||
/// A record of a full VM trace for a CALL/CREATE.
|
||||
pub struct VMTrace {
|
||||
@ -534,26 +344,3 @@ pub struct VMTrace {
|
||||
/// Thre is a 1:1 correspondance between these and a CALL/CREATE/CALLCODE/DELEGATECALL instruction.
|
||||
pub subs: Vec<VMTrace>,
|
||||
}
|
||||
|
||||
impl Encodable for VMTrace {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(4);
|
||||
s.append(&self.parent_step);
|
||||
s.append(&self.code);
|
||||
s.append_list(&self.operations);
|
||||
s.append_list(&self.subs);
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for VMTrace {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
let res = VMTrace {
|
||||
parent_step: rlp.val_at(0)?,
|
||||
code: rlp.val_at(1)?,
|
||||
operations: rlp.list_at(2)?,
|
||||
subs: rlp.list_at(3)?,
|
||||
};
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
|
@ -56,6 +56,15 @@ impl Decodable for Action {
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for Action {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
match *self {
|
||||
Action::Create => s.append_internal(&""),
|
||||
Action::Call(ref addr) => s.append_internal(addr),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// Transaction activation condition.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum Condition {
|
||||
@ -85,18 +94,15 @@ pub struct Transaction {
|
||||
|
||||
impl Transaction {
|
||||
/// Append object with a without signature into RLP stream
|
||||
pub fn rlp_append_unsigned_transaction(&self, s: &mut RlpStream, network_id: Option<u64>) {
|
||||
s.begin_list(if network_id.is_none() { 6 } else { 9 });
|
||||
pub fn rlp_append_unsigned_transaction(&self, s: &mut RlpStream, chain_id: Option<u64>) {
|
||||
s.begin_list(if chain_id.is_none() { 6 } else { 9 });
|
||||
s.append(&self.nonce);
|
||||
s.append(&self.gas_price);
|
||||
s.append(&self.gas);
|
||||
match self.action {
|
||||
Action::Create => s.append_empty_data(),
|
||||
Action::Call(ref to) => s.append(to)
|
||||
};
|
||||
s.append(&self.action);
|
||||
s.append(&self.value);
|
||||
s.append(&self.data);
|
||||
if let Some(n) = network_id {
|
||||
if let Some(n) = chain_id {
|
||||
s.append(&n);
|
||||
s.append(&0u8);
|
||||
s.append(&0u8);
|
||||
@ -157,27 +163,27 @@ impl From<ethjson::transaction::Transaction> for UnverifiedTransaction {
|
||||
|
||||
impl Transaction {
|
||||
/// The message hash of the transaction.
|
||||
pub fn hash(&self, network_id: Option<u64>) -> H256 {
|
||||
pub fn hash(&self, chain_id: Option<u64>) -> H256 {
|
||||
let mut stream = RlpStream::new();
|
||||
self.rlp_append_unsigned_transaction(&mut stream, network_id);
|
||||
self.rlp_append_unsigned_transaction(&mut stream, chain_id);
|
||||
stream.as_raw().sha3()
|
||||
}
|
||||
|
||||
/// Signs the transaction as coming from `sender`.
|
||||
pub fn sign(self, secret: &Secret, network_id: Option<u64>) -> SignedTransaction {
|
||||
let sig = ::ethkey::sign(secret, &self.hash(network_id))
|
||||
pub fn sign(self, secret: &Secret, chain_id: Option<u64>) -> SignedTransaction {
|
||||
let sig = ::ethkey::sign(secret, &self.hash(chain_id))
|
||||
.expect("data is valid and context has signing capabilities; qed");
|
||||
SignedTransaction::new(self.with_signature(sig, network_id))
|
||||
SignedTransaction::new(self.with_signature(sig, chain_id))
|
||||
.expect("secret is valid so it's recoverable")
|
||||
}
|
||||
|
||||
/// Signs the transaction with signature.
|
||||
pub fn with_signature(self, sig: Signature, network_id: Option<u64>) -> UnverifiedTransaction {
|
||||
pub fn with_signature(self, sig: Signature, chain_id: Option<u64>) -> UnverifiedTransaction {
|
||||
UnverifiedTransaction {
|
||||
unsigned: self,
|
||||
r: sig.r().into(),
|
||||
s: sig.s().into(),
|
||||
v: sig.v() as u64 + if let Some(n) = network_id { 35 + n * 2 } else { 27 },
|
||||
v: sig.v() as u64 + if let Some(n) = chain_id { 35 + n * 2 } else { 27 },
|
||||
hash: 0.into(),
|
||||
}.compute_hash()
|
||||
}
|
||||
@ -210,13 +216,13 @@ impl Transaction {
|
||||
}
|
||||
|
||||
/// Add EIP-86 compatible empty signature.
|
||||
pub fn null_sign(self, network_id: u64) -> SignedTransaction {
|
||||
pub fn null_sign(self, chain_id: u64) -> SignedTransaction {
|
||||
SignedTransaction {
|
||||
transaction: UnverifiedTransaction {
|
||||
unsigned: self,
|
||||
r: U256::zero(),
|
||||
s: U256::zero(),
|
||||
v: network_id,
|
||||
v: chain_id,
|
||||
hash: 0.into(),
|
||||
}.compute_hash(),
|
||||
sender: UNSIGNED_SENDER,
|
||||
@ -244,7 +250,7 @@ pub struct UnverifiedTransaction {
|
||||
/// Plain Transaction.
|
||||
unsigned: Transaction,
|
||||
/// The V field of the signature; the LS bit described which half of the curve our point falls
|
||||
/// in. The MS bits describe which network this transaction is for. If 27/28, its for all networks.
|
||||
/// in. The MS bits describe which chain this transaction is for. If 27/28, its for all chains.
|
||||
v: u64,
|
||||
/// The R field of the signature; helps describe the point on the curve.
|
||||
r: U256,
|
||||
@ -308,10 +314,7 @@ impl UnverifiedTransaction {
|
||||
s.append(&self.nonce);
|
||||
s.append(&self.gas_price);
|
||||
s.append(&self.gas);
|
||||
match self.action {
|
||||
Action::Create => s.append_empty_data(),
|
||||
Action::Call(ref to) => s.append(to)
|
||||
};
|
||||
s.append(&self.action);
|
||||
s.append(&self.value);
|
||||
s.append(&self.data);
|
||||
s.append(&self.v);
|
||||
@ -330,8 +333,8 @@ impl UnverifiedTransaction {
|
||||
/// The `v` value that appears in the RLP.
|
||||
pub fn original_v(&self) -> u64 { self.v }
|
||||
|
||||
/// The network ID, or `None` if this is a global transaction.
|
||||
pub fn network_id(&self) -> Option<u64> {
|
||||
/// The chain ID, or `None` if this is a global transaction.
|
||||
pub fn chain_id(&self) -> Option<u64> {
|
||||
match self.v {
|
||||
v if self.is_unsigned() => Some(v),
|
||||
v if v > 36 => Some((v - 35) / 2),
|
||||
@ -360,15 +363,15 @@ impl UnverifiedTransaction {
|
||||
|
||||
/// Recovers the public key of the sender.
|
||||
pub fn recover_public(&self) -> Result<Public, Error> {
|
||||
Ok(recover(&self.signature(), &self.unsigned.hash(self.network_id()))?)
|
||||
Ok(recover(&self.signature(), &self.unsigned.hash(self.chain_id()))?)
|
||||
}
|
||||
|
||||
/// Do basic validation, checking for valid signature and minimum gas,
|
||||
// TODO: consider use in block validation.
|
||||
#[cfg(test)]
|
||||
#[cfg(feature = "json-tests")]
|
||||
pub fn validate(self, schedule: &Schedule, require_low: bool, allow_network_id_of_one: bool, allow_empty_signature: bool) -> Result<UnverifiedTransaction, Error> {
|
||||
let chain_id = if allow_network_id_of_one { Some(1) } else { None };
|
||||
pub fn validate(self, schedule: &Schedule, require_low: bool, allow_chain_id_of_one: bool, allow_empty_signature: bool) -> Result<UnverifiedTransaction, Error> {
|
||||
let chain_id = if allow_chain_id_of_one { Some(1) } else { None };
|
||||
self.verify_basic(require_low, chain_id, allow_empty_signature)?;
|
||||
if !allow_empty_signature || !self.is_unsigned() {
|
||||
self.recover_public()?;
|
||||
@ -388,10 +391,10 @@ impl UnverifiedTransaction {
|
||||
if allow_empty_signature && self.is_unsigned() && !(self.gas_price.is_zero() && self.value.is_zero() && self.nonce.is_zero()) {
|
||||
return Err(EthkeyError::InvalidSignature.into())
|
||||
}
|
||||
match (self.network_id(), chain_id) {
|
||||
match (self.chain_id(), chain_id) {
|
||||
(None, _) => {},
|
||||
(Some(n), Some(m)) if n == m => {},
|
||||
_ => return Err(TransactionError::InvalidNetworkId.into()),
|
||||
_ => return Err(TransactionError::InvalidChainId.into()),
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
@ -555,7 +558,7 @@ mod tests {
|
||||
} else { panic!(); }
|
||||
assert_eq!(t.value, U256::from(0x0au64));
|
||||
assert_eq!(public_to_address(&t.recover_public().unwrap()), "0f65fe9276bc9a24ae7083ae28e2660ef72df99e".into());
|
||||
assert_eq!(t.network_id(), None);
|
||||
assert_eq!(t.chain_id(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -572,7 +575,7 @@ mod tests {
|
||||
data: b"Hello!".to_vec()
|
||||
}.sign(&key.secret(), None);
|
||||
assert_eq!(Address::from(key.public().sha3()), t.sender());
|
||||
assert_eq!(t.network_id(), None);
|
||||
assert_eq!(t.chain_id(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -586,15 +589,15 @@ mod tests {
|
||||
data: b"Hello!".to_vec()
|
||||
}.fake_sign(Address::from(0x69));
|
||||
assert_eq!(Address::from(0x69), t.sender());
|
||||
assert_eq!(t.network_id(), None);
|
||||
assert_eq!(t.chain_id(), None);
|
||||
|
||||
let t = t.clone();
|
||||
assert_eq!(Address::from(0x69), t.sender());
|
||||
assert_eq!(t.network_id(), None);
|
||||
assert_eq!(t.chain_id(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_recover_from_network_specific_signing() {
|
||||
fn should_recover_from_chain_specific_signing() {
|
||||
use ethkey::{Random, Generator};
|
||||
let key = Random.generate().unwrap();
|
||||
let t = Transaction {
|
||||
@ -606,7 +609,7 @@ mod tests {
|
||||
data: b"Hello!".to_vec()
|
||||
}.sign(&key.secret(), Some(69));
|
||||
assert_eq!(Address::from(key.public().sha3()), t.sender());
|
||||
assert_eq!(t.network_id(), Some(69));
|
||||
assert_eq!(t.chain_id(), Some(69));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -617,7 +620,7 @@ mod tests {
|
||||
let signed = decode(&FromHex::from_hex(tx_data).unwrap());
|
||||
let signed = SignedTransaction::new(signed).unwrap();
|
||||
assert_eq!(signed.sender(), address.into());
|
||||
flushln!("networkid: {:?}", signed.network_id());
|
||||
flushln!("chainid: {:?}", signed.chain_id());
|
||||
};
|
||||
|
||||
test_vector("f864808504a817c800825208943535353535353535353535353535353535353535808025a0044852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116da0044852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116d", "0xf0f6f18bca1b28cd68e4357452947e021241e9ce");
|
||||
|
@ -6,6 +6,7 @@ authors = ["Parity Technologies <admin@parity.io>"]
|
||||
|
||||
[dependencies]
|
||||
rlp = { path = "../../util/rlp" }
|
||||
rlp_derive = { path = "../../util/rlp_derive" }
|
||||
ethcore-util = { path = "../../util" }
|
||||
ethjson = { path = "../../json" }
|
||||
bloomable = { path = "../../util/bloomable" }
|
||||
|
@ -16,11 +16,10 @@
|
||||
|
||||
//! Basic account type -- the decoded RLP from the state trie.
|
||||
|
||||
use rlp::*;
|
||||
use util::{U256, H256};
|
||||
|
||||
/// Basic account type.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
|
||||
pub struct BasicAccount {
|
||||
/// Nonce of the account.
|
||||
pub nonce: U256,
|
||||
@ -31,24 +30,3 @@ pub struct BasicAccount {
|
||||
/// Code hash of the account.
|
||||
pub code_hash: H256,
|
||||
}
|
||||
|
||||
impl Encodable for BasicAccount {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(4)
|
||||
.append(&self.nonce)
|
||||
.append(&self.balance)
|
||||
.append(&self.storage_root)
|
||||
.append(&self.code_hash);
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for BasicAccount {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
Ok(BasicAccount {
|
||||
nonce: rlp.val_at(0)?,
|
||||
balance: rlp.val_at(1)?,
|
||||
storage_root: rlp.val_at(2)?,
|
||||
code_hash: rlp.val_at(3)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -19,6 +19,8 @@
|
||||
extern crate ethcore_util as util;
|
||||
extern crate ethjson;
|
||||
extern crate rlp;
|
||||
#[macro_use]
|
||||
extern crate rlp_derive;
|
||||
extern crate bloomable;
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -19,7 +19,6 @@
|
||||
use std::ops::Deref;
|
||||
use util::{H256, Address, Bytes, HeapSizeOf, Hashable};
|
||||
use bloomable::Bloomable;
|
||||
use rlp::*;
|
||||
|
||||
use {BlockNumber};
|
||||
use ethjson;
|
||||
@ -27,7 +26,7 @@ use ethjson;
|
||||
pub type LogBloom = ::util::H2048;
|
||||
|
||||
/// A record of execution for a `LOG` operation.
|
||||
#[derive(Default, Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Default, Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
|
||||
pub struct LogEntry {
|
||||
/// The address of the contract executing at the point of the `LOG` operation.
|
||||
pub address: Address,
|
||||
@ -37,26 +36,6 @@ pub struct LogEntry {
|
||||
pub data: Bytes,
|
||||
}
|
||||
|
||||
impl Encodable for LogEntry {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
s.begin_list(3);
|
||||
s.append(&self.address);
|
||||
s.append_list(&self.topics);
|
||||
s.append(&self.data);
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for LogEntry {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
let entry = LogEntry {
|
||||
address: rlp.val_at(0)?,
|
||||
topics: rlp.list_at(1)?,
|
||||
data: rlp.val_at(2)?,
|
||||
};
|
||||
Ok(entry)
|
||||
}
|
||||
}
|
||||
|
||||
impl HeapSizeOf for LogEntry {
|
||||
fn heap_size_of_children(&self) -> usize {
|
||||
self.topics.heap_size_of_children() + self.data.heap_size_of_children()
|
||||
|
@ -17,8 +17,9 @@
|
||||
//! Wasm env module bindings
|
||||
|
||||
use parity_wasm::elements::ValueType::*;
|
||||
use parity_wasm::interpreter::UserFunctionDescriptor;
|
||||
use parity_wasm::interpreter::{self, UserFunctionDescriptor};
|
||||
use parity_wasm::interpreter::UserFunctionDescriptor::*;
|
||||
use super::runtime::Runtime;
|
||||
|
||||
pub const SIGNATURES: &'static [UserFunctionDescriptor] = &[
|
||||
Static(
|
||||
@ -93,4 +94,17 @@ pub const SIGNATURES: &'static [UserFunctionDescriptor] = &[
|
||||
&[I32; 0],
|
||||
None
|
||||
),
|
||||
|
||||
Static(
|
||||
"_llvm_bswap_i64",
|
||||
&[I32; 2],
|
||||
Some(I32)
|
||||
),
|
||||
];
|
||||
|
||||
pub fn native_bindings<'a>(runtime: &'a mut Runtime) -> interpreter::UserFunctions<'a> {
|
||||
interpreter::UserFunctions {
|
||||
executor: runtime,
|
||||
functions: ::std::borrow::Cow::from(SIGNATURES),
|
||||
}
|
||||
}
|
@ -32,8 +32,6 @@ mod result;
|
||||
mod tests;
|
||||
mod env;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
const DEFAULT_STACK_SPACE: u32 = 5 * 1024 * 1024;
|
||||
|
||||
use parity_wasm::{interpreter, elements};
|
||||
@ -89,6 +87,7 @@ impl vm::Vm for WasmInterpreter {
|
||||
DEFAULT_STACK_SPACE,
|
||||
params.gas.low_u64(),
|
||||
RuntimeContext::new(params.address, params.sender),
|
||||
&self.program,
|
||||
);
|
||||
|
||||
let mut cursor = ::std::io::Cursor::new(&*code);
|
||||
@ -112,16 +111,8 @@ impl vm::Vm for WasmInterpreter {
|
||||
)?;
|
||||
|
||||
{
|
||||
let execution_params = interpreter::ExecutionParams::with_external(
|
||||
"env".into(),
|
||||
Arc::new(
|
||||
interpreter::env_native_module(env_instance, native_bindings(&mut runtime))
|
||||
.map_err(|err| {
|
||||
// todo: prefer explicit panic here also?
|
||||
vm::Error::Wasm(format!("Error instantiating native bindings: {:?}", err))
|
||||
})?
|
||||
)
|
||||
).add_argument(interpreter::RuntimeValue::I32(d_ptr.as_raw() as i32));
|
||||
let execution_params = runtime.execution_params()
|
||||
.add_argument(interpreter::RuntimeValue::I32(d_ptr.as_raw() as i32));
|
||||
|
||||
let module_instance = self.program.add_module("contract", contract_module, Some(&execution_params.externals))
|
||||
.map_err(|err| {
|
||||
@ -158,13 +149,6 @@ impl vm::Vm for WasmInterpreter {
|
||||
}
|
||||
}
|
||||
|
||||
fn native_bindings<'a>(runtime: &'a mut Runtime) -> interpreter::UserFunctions<'a> {
|
||||
interpreter::UserFunctions {
|
||||
executor: runtime,
|
||||
functions: ::std::borrow::Cow::from(env::SIGNATURES),
|
||||
}
|
||||
}
|
||||
|
||||
impl From<runtime::Error> for vm::Error {
|
||||
fn from(err: runtime::Error) -> vm::Error {
|
||||
vm::Error::Wasm(format!("WASM runtime-error: {:?}", err))
|
||||
|
@ -72,24 +72,26 @@ impl RuntimeContext {
|
||||
}
|
||||
|
||||
/// Runtime enviroment data for wasm contract execution
|
||||
pub struct Runtime<'a> {
|
||||
pub struct Runtime<'a, 'b> {
|
||||
gas_counter: u64,
|
||||
gas_limit: u64,
|
||||
dynamic_top: u32,
|
||||
ext: &'a mut vm::Ext,
|
||||
memory: Arc<interpreter::MemoryInstance>,
|
||||
context: RuntimeContext,
|
||||
instance: &'b interpreter::ProgramInstance,
|
||||
}
|
||||
|
||||
impl<'a> Runtime<'a> {
|
||||
impl<'a, 'b> Runtime<'a, 'b> {
|
||||
/// New runtime for wasm contract with specified params
|
||||
pub fn with_params<'b>(
|
||||
ext: &'b mut vm::Ext,
|
||||
pub fn with_params<'c, 'd>(
|
||||
ext: &'c mut vm::Ext,
|
||||
memory: Arc<interpreter::MemoryInstance>,
|
||||
stack_space: u32,
|
||||
gas_limit: u64,
|
||||
context: RuntimeContext,
|
||||
) -> Runtime<'b> {
|
||||
program_instance: &'d interpreter::ProgramInstance,
|
||||
) -> Runtime<'c, 'd> {
|
||||
Runtime {
|
||||
gas_counter: 0,
|
||||
gas_limit: gas_limit,
|
||||
@ -97,6 +99,7 @@ impl<'a> Runtime<'a> {
|
||||
memory: memory,
|
||||
ext: ext,
|
||||
context: context,
|
||||
instance: program_instance,
|
||||
}
|
||||
}
|
||||
|
||||
@ -449,9 +452,58 @@ impl<'a> Runtime<'a> {
|
||||
|
||||
Ok(Some(0i32.into()))
|
||||
}
|
||||
|
||||
fn bswap_32(x: u32) -> u32 {
|
||||
x >> 24 | x >> 8 & 0xff00 | x << 8 & 0xff0000 | x << 24
|
||||
}
|
||||
|
||||
fn bitswap_i64(&mut self, context: interpreter::CallerContext)
|
||||
-> Result<Option<interpreter::RuntimeValue>, interpreter::Error>
|
||||
{
|
||||
let x1 = context.value_stack.pop_as::<i32>()?;
|
||||
let x2 = context.value_stack.pop_as::<i32>()?;
|
||||
|
||||
let result = ((Runtime::bswap_32(x2 as u32) as u64) << 32
|
||||
| Runtime::bswap_32(x1 as u32) as u64) as i64;
|
||||
|
||||
self.return_i64(result)
|
||||
}
|
||||
|
||||
fn return_i64(&mut self, val: i64) -> Result<Option<interpreter::RuntimeValue>, interpreter::Error> {
|
||||
let uval = val as u64;
|
||||
let hi = (uval >> 32) as i32;
|
||||
let lo = (uval << 32 >> 32) as i32;
|
||||
|
||||
let target = self.instance.module("contract")
|
||||
.ok_or(interpreter::Error::Trap("Error locating main execution entry".to_owned()))?;
|
||||
target.execute_export(
|
||||
"setTempRet0",
|
||||
self.execution_params().add_argument(
|
||||
interpreter::RuntimeValue::I32(hi).into()
|
||||
),
|
||||
)?;
|
||||
Ok(Some(
|
||||
(lo).into()
|
||||
))
|
||||
}
|
||||
|
||||
pub fn execution_params(&mut self) -> interpreter::ExecutionParams {
|
||||
use super::env;
|
||||
|
||||
let env_instance = self.instance.module("env")
|
||||
.expect("Env module always exists; qed");
|
||||
|
||||
interpreter::ExecutionParams::with_external(
|
||||
"env".into(),
|
||||
Arc::new(
|
||||
interpreter::env_native_module(env_instance, env::native_bindings(self))
|
||||
.expect("Env module always exists; qed")
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> interpreter::UserFunctionExecutor for Runtime<'a> {
|
||||
impl<'a, 'b> interpreter::UserFunctionExecutor for Runtime<'a, 'b> {
|
||||
fn execute(&mut self, name: &str, context: interpreter::CallerContext)
|
||||
-> Result<Option<interpreter::RuntimeValue>, interpreter::Error>
|
||||
{
|
||||
@ -494,6 +546,9 @@ impl<'a> interpreter::UserFunctionExecutor for Runtime<'a> {
|
||||
"_emscripten_memcpy_big" => {
|
||||
self.mem_copy(context)
|
||||
},
|
||||
"_llvm_bswap_i64" => {
|
||||
self.bitswap_i64(context)
|
||||
},
|
||||
_ => {
|
||||
trace!(target: "wasm", "Trapped due to unhandled function: '{}'", name);
|
||||
self.user_trap(context)
|
||||
|
@ -414,3 +414,37 @@ fn storage_read() {
|
||||
assert_eq!(gas_left, U256::from(99682));
|
||||
assert_eq!(Address::from(&result[12..32]), address);
|
||||
}
|
||||
|
||||
|
||||
// Tests that contract's ability to read from a storage
|
||||
// Test prepopulates address into storage, than executes a contract which read that address from storage and write this address into result
|
||||
#[test]
|
||||
fn math_add() {
|
||||
::ethcore_logger::init_log();
|
||||
let code = load_sample!("math.wasm");
|
||||
|
||||
let mut params = ActionParams::default();
|
||||
params.gas = U256::from(100_000);
|
||||
params.code = Some(Arc::new(code));
|
||||
|
||||
let mut args = [0u8; 64];
|
||||
let arg_a = U256::from_dec_str("999999999999999999999999999999").unwrap();
|
||||
let arg_b = U256::from_dec_str("888888888888888888888888888888").unwrap();
|
||||
arg_a.to_big_endian(&mut args[0..32]);
|
||||
arg_b.to_big_endian(&mut args[32..64]);
|
||||
params.data = Some(args.to_vec());
|
||||
|
||||
let (gas_left, result) = {
|
||||
let mut interpreter = wasm_interpreter();
|
||||
let result = interpreter.exec(params, &mut FakeExt::new()).expect("Interpreter to execute without any errors");
|
||||
match result {
|
||||
GasLeft::Known(_) => { panic!("storage_read should return payload"); },
|
||||
GasLeft::NeedsReturn { gas_left: gas, data: result, apply_state: _apply } => (gas, result.to_vec()),
|
||||
}
|
||||
};
|
||||
|
||||
let sum: U256 = (&result[..]).into();
|
||||
|
||||
assert_eq!(gas_left, U256::from(96284));
|
||||
assert_eq!(sum, U256::from_dec_str("1888888888888888888888888888887").unwrap());
|
||||
}
|
||||
|
@ -116,6 +116,9 @@ impl trace::VMTracer for Informant {
|
||||
self.stack.extend_from_slice(stack_push);
|
||||
|
||||
if let Some((pos, data)) = mem_diff {
|
||||
if self.memory.len() < (pos + data.len()) {
|
||||
self.memory.resize(pos + data.len(), 0);
|
||||
}
|
||||
self.memory[pos..pos + data.len()].copy_from_slice(data);
|
||||
}
|
||||
|
||||
|
@ -47,9 +47,9 @@ EVM implementation for Parity.
|
||||
Copyright 2016, 2017 Parity Technologies (UK) Ltd
|
||||
|
||||
Usage:
|
||||
evmbin stats [options]
|
||||
evmbin [options]
|
||||
evmbin [-h | --help]
|
||||
parity-evm stats [options]
|
||||
parity-evm [options]
|
||||
parity-evm [-h | --help]
|
||||
|
||||
Transaction options:
|
||||
--code CODE Contract code as hex (without 0x).
|
||||
@ -116,7 +116,7 @@ struct Args {
|
||||
flag_gas: Option<String>,
|
||||
flag_gas_price: Option<String>,
|
||||
flag_input: Option<String>,
|
||||
flag_spec: Option<String>,
|
||||
flag_chain: Option<String>,
|
||||
flag_json: bool,
|
||||
}
|
||||
|
||||
@ -164,7 +164,7 @@ impl Args {
|
||||
}
|
||||
|
||||
pub fn spec(&self) -> Result<spec::Spec, String> {
|
||||
Ok(match self.flag_spec {
|
||||
Ok(match self.flag_chain {
|
||||
Some(ref filename) => {
|
||||
let file = fs::File::open(filename).map_err(|e| format!("{}", e))?;
|
||||
spec::Spec::load(::std::env::temp_dir(), file)?
|
||||
@ -188,3 +188,37 @@ fn die<T: fmt::Display>(msg: T) -> ! {
|
||||
println!("{}", msg);
|
||||
::std::process::exit(-1)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use docopt::Docopt;
|
||||
use super::{Args, USAGE};
|
||||
|
||||
fn run<T: AsRef<str>>(args: &[T]) -> Args {
|
||||
Docopt::new(USAGE).and_then(|d| d.argv(args.into_iter()).deserialize()).unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_parse_all_the_options() {
|
||||
let args = run(&[
|
||||
"parity-evm",
|
||||
"--json",
|
||||
"--gas", "1",
|
||||
"--gas-price", "2",
|
||||
"--from", "0000000000000000000000000000000000000003",
|
||||
"--to", "0000000000000000000000000000000000000004",
|
||||
"--code", "05",
|
||||
"--input", "06",
|
||||
"--chain", "./testfile",
|
||||
]);
|
||||
|
||||
assert_eq!(args.flag_json, true);
|
||||
assert_eq!(args.gas(), Ok(1.into()));
|
||||
assert_eq!(args.gas_price(), Ok(2.into()));
|
||||
assert_eq!(args.from(), Ok(3.into()));
|
||||
assert_eq!(args.to(), Ok(4.into()));
|
||||
assert_eq!(args.code(), Ok(Some(vec![05])));
|
||||
assert_eq!(args.data(), Ok(Some(vec![06])));
|
||||
assert_eq!(args.flag_chain, Some("./testfile".to_owned()));
|
||||
}
|
||||
}
|
||||
|
2
js/package-lock.json
generated
2
js/package-lock.json
generated
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "parity.js",
|
||||
"version": "1.8.14",
|
||||
"version": "1.8.17",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "parity.js",
|
||||
"version": "1.8.14",
|
||||
"version": "1.8.17",
|
||||
"main": "release/index.js",
|
||||
"jsnext:main": "src/index.js",
|
||||
"author": "Parity Team <admin@parity.io>",
|
||||
|
@ -566,9 +566,9 @@ The following options are possible for the \`defaultBlock\` parameter:
|
||||
type: Hash,
|
||||
desc: 'public key of the signer.'
|
||||
},
|
||||
networkId: {
|
||||
chainId: {
|
||||
type: Quantity,
|
||||
desc: 'the network id of the transaction, if any.'
|
||||
desc: 'the chain id of the transaction, if any.'
|
||||
},
|
||||
creates: {
|
||||
type: Hash,
|
||||
@ -1111,9 +1111,9 @@ The following options are possible for the \`defaultBlock\` parameter:
|
||||
type: Hash,
|
||||
desc: 'public key of the signer.'
|
||||
},
|
||||
networkId: {
|
||||
chainId: {
|
||||
type: Quantity,
|
||||
desc: 'the network id of the transaction, if any.'
|
||||
desc: 'the chain id of the transaction, if any.'
|
||||
},
|
||||
creates: {
|
||||
type: Hash,
|
||||
|
@ -403,7 +403,7 @@ export default {
|
||||
condition: {
|
||||
block: 1
|
||||
},
|
||||
networkId: null,
|
||||
chainId: null,
|
||||
nonce: '0x0',
|
||||
publicKey: '0x3fa8c08c65a83f6b4ea3e04e1cc70cbe3cd391499e3e05ab7dedf28aff9afc538200ff93e3f2b2cb5029f03c7ebee820d63a4c5a9541c83acebe293f54cacf0e',
|
||||
raw: '0xf868808502d20cff33830e57e09400a289b43e1e4825dbedf2a78ba60a640634dc40830fffff801ca034c333b0b91cd832a3414d628e3fea29a00055cebf5ba59f7038c188404c0cf3a0524fd9b35be170439b5ffe89694ae0cfc553cb49d1d8b643239e353351531532',
|
||||
@ -626,7 +626,7 @@ export default {
|
||||
condition: {
|
||||
block: 1
|
||||
},
|
||||
networkId: 1,
|
||||
chainId: 1,
|
||||
nonce: '0x5',
|
||||
publicKey: '0x96157302dade55a1178581333e57d60ffe6fdf5a99607890456a578b4e6b60e335037d61ed58aa4180f9fd747dc50d44a7924aa026acbfb988b5062b629d6c36',
|
||||
r: '0x92e8beb19af2bad0511d516a86e77fa73004c0811b2173657a55797bdf8558e1',
|
||||
@ -688,7 +688,7 @@ export default {
|
||||
condition: {
|
||||
block: 1
|
||||
},
|
||||
networkId: 1,
|
||||
chainId: 1,
|
||||
nonce: '0x5',
|
||||
publicKey: '0x96157302dade55a1178581333e57d60ffe6fdf5a99607890456a578b4e6b60e335037d61ed58aa4180f9fd747dc50d44a7924aa026acbfb988b5062b629d6c36',
|
||||
r: '0x92e8beb19af2bad0511d516a86e77fa73004c0811b2173657a55797bdf8558e1',
|
||||
@ -980,7 +980,7 @@ export default {
|
||||
creates: null,
|
||||
raw: '0xf86c018504a817c80082520894f5d405530dabfbd0c1cab7a5812f008aa5559adf882efc004ac03a49968025a0b40c6967a7e8bbdfd99a25fd306b9ef23b80e719514aeb7ddd19e2303d6fc139a06bf770ab08119e67dc29817e1412a0e3086f43da308c314db1b3bca9fb6d32bd',
|
||||
publicKey: '0xeba33fd74f06236e17475bc5b6d1bac718eac048350d77d3fc8fbcbd85782a57c821255623c4fd1ebc9d555d07df453b2579ee557b7203fc256ca3b3401e4027',
|
||||
networkId: 1,
|
||||
chainId: 1,
|
||||
standardV: '0x0',
|
||||
v: '0x25',
|
||||
r: '0xb40c6967a7e8bbdfd99a25fd306b9ef23b80e719514aeb7ddd19e2303d6fc139',
|
||||
|
@ -173,9 +173,9 @@ export class TransactionResponse {
|
||||
type: Data,
|
||||
desc: 'Public key of the signer.'
|
||||
},
|
||||
networkId: {
|
||||
chainId: {
|
||||
type: Quantity,
|
||||
desc: 'The network id of the transaction, if any.'
|
||||
desc: 'The chain id of the transaction, if any.'
|
||||
},
|
||||
standardV: {
|
||||
type: Quantity,
|
||||
|
@ -325,7 +325,8 @@ export default class SecureApi extends Api {
|
||||
_fetchSettings () {
|
||||
return Promise
|
||||
.all([
|
||||
this._uiApi.parity.dappsUrl(),
|
||||
// ignore dapps disabled errors
|
||||
this._uiApi.parity.dappsUrl().catch(() => null),
|
||||
this._uiApi.parity.wsUrl()
|
||||
])
|
||||
.then(([dappsUrl, wsUrl]) => {
|
||||
|
@ -53,7 +53,7 @@
|
||||
|
||||
.infoline,
|
||||
.uuidline {
|
||||
opacity: 0.25;
|
||||
opacity: 0.5;
|
||||
}
|
||||
|
||||
.uuidline {
|
||||
|
@ -28,11 +28,18 @@ export default class Store {
|
||||
this._migrateStore();
|
||||
|
||||
this._api = api;
|
||||
// Show the first run if it hasn't been shown before
|
||||
// (thus an undefined value)
|
||||
this.firstrunVisible = store.get(LS_FIRST_RUN_KEY) === undefined;
|
||||
|
||||
this._checkAccounts();
|
||||
// Show the first run the storage doesn't hold `false` value
|
||||
const firstrunVisible = store.get(LS_FIRST_RUN_KEY) !== false;
|
||||
|
||||
// Only check accounts if we might show the first run
|
||||
if (firstrunVisible) {
|
||||
api.transport.once('open', () => {
|
||||
this._checkAccounts();
|
||||
});
|
||||
} else {
|
||||
this.firstrunVisible = false;
|
||||
}
|
||||
}
|
||||
|
||||
@action closeFirstrun = () => {
|
||||
@ -50,7 +57,7 @@ export default class Store {
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrate the old LocalStorage ket format
|
||||
* Migrate the old LocalStorage key format
|
||||
* to the new one
|
||||
*/
|
||||
_migrateStore () {
|
||||
@ -70,12 +77,16 @@ export default class Store {
|
||||
this._api.parity.allAccountsInfo()
|
||||
])
|
||||
.then(([ vaults, info ]) => {
|
||||
const accounts = Object.keys(info).filter((address) => info[address].uuid);
|
||||
const accounts = Object.keys(info)
|
||||
.filter((address) => info[address].uuid)
|
||||
// In DEV mode, the empty phrase account is already added
|
||||
.filter((address) => address.toLowerCase() !== '0x00a329c0648769a73afac7f9381e08fb43dbea72');
|
||||
|
||||
// Has accounts if any vaults or accounts
|
||||
const hasAccounts = (accounts && accounts.length > 0) || (vaults && vaults.length > 0);
|
||||
|
||||
// Show First Run if no accounts and no vaults
|
||||
this.toggleFirstrun(this.firstrunVisible || !hasAccounts);
|
||||
this.toggleFirstrun(!hasAccounts);
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error('checkAccounts', error);
|
||||
|
@ -94,7 +94,7 @@ impl From<UiConfiguration> for HttpConfiguration {
|
||||
enabled: conf.enabled,
|
||||
interface: conf.interface,
|
||||
port: conf.port,
|
||||
apis: rpc_apis::ApiSet::SafeContext,
|
||||
apis: rpc_apis::ApiSet::UnsafeContext,
|
||||
cors: None,
|
||||
hosts: conf.hosts,
|
||||
server_threads: None,
|
||||
|
@ -56,7 +56,7 @@ use signer;
|
||||
use url;
|
||||
|
||||
// how often to take periodic snapshots.
|
||||
const SNAPSHOT_PERIOD: u64 = 10000;
|
||||
const SNAPSHOT_PERIOD: u64 = 5000;
|
||||
|
||||
// how many blocks to wait before starting a periodic snapshot.
|
||||
const SNAPSHOT_HISTORY: u64 = 100;
|
||||
|
@ -24,6 +24,7 @@ serde_json = "1.0"
|
||||
time = "0.1"
|
||||
tokio-timer = "0.1"
|
||||
transient-hashmap = "0.4"
|
||||
itertools = "0.5"
|
||||
|
||||
jsonrpc-core = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.7" }
|
||||
jsonrpc-http-server = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.7" }
|
||||
|
@ -18,9 +18,10 @@ use std::io::{self, Read, Write};
|
||||
use std::path::Path;
|
||||
use std::{fs, time, mem};
|
||||
|
||||
use itertools::Itertools;
|
||||
use rand::Rng;
|
||||
use rand::os::OsRng;
|
||||
use util::{H256, Hashable, Itertools};
|
||||
use util::{H256, Hashable};
|
||||
|
||||
/// Providing current time in seconds
|
||||
pub trait TimeProvider {
|
||||
|
@ -24,6 +24,7 @@ extern crate cid;
|
||||
extern crate crypto as rust_crypto;
|
||||
extern crate futures;
|
||||
extern crate futures_cpupool;
|
||||
extern crate itertools;
|
||||
extern crate multihash;
|
||||
extern crate order_stat;
|
||||
extern crate rand;
|
||||
|
@ -133,7 +133,7 @@ impl<C: MiningBlockChainClient, M: MinerService> Dispatcher for FullDispatcher<C
|
||||
-> BoxFuture<WithToken<SignedTransaction>, Error>
|
||||
{
|
||||
let (client, miner) = (self.client.clone(), self.miner.clone());
|
||||
let network_id = client.signing_network_id();
|
||||
let chain_id = client.signing_chain_id();
|
||||
let address = filled.from;
|
||||
future::done({
|
||||
let t = Transaction {
|
||||
@ -146,12 +146,12 @@ impl<C: MiningBlockChainClient, M: MinerService> Dispatcher for FullDispatcher<C
|
||||
};
|
||||
|
||||
if accounts.is_hardware_address(address) {
|
||||
hardware_signature(&*accounts, address, t, network_id).map(WithToken::No)
|
||||
hardware_signature(&*accounts, address, t, chain_id).map(WithToken::No)
|
||||
} else {
|
||||
let hash = t.hash(network_id);
|
||||
let hash = t.hash(chain_id);
|
||||
let signature = try_bf!(signature(&*accounts, address, hash, password));
|
||||
Ok(signature.map(|sig| {
|
||||
SignedTransaction::new(t.with_signature(sig, network_id))
|
||||
SignedTransaction::new(t.with_signature(sig, chain_id))
|
||||
.expect("Transaction was signed by AccountsProvider; it never produces invalid signatures; qed")
|
||||
}))
|
||||
}
|
||||
@ -358,7 +358,7 @@ impl Dispatcher for LightDispatcher {
|
||||
fn sign(&self, accounts: Arc<AccountProvider>, filled: FilledTransactionRequest, password: SignWith)
|
||||
-> BoxFuture<WithToken<SignedTransaction>, Error>
|
||||
{
|
||||
let network_id = self.client.signing_network_id();
|
||||
let chain_id = self.client.signing_chain_id();
|
||||
let address = filled.from;
|
||||
|
||||
let with_nonce = move |filled: FilledTransactionRequest, nonce| {
|
||||
@ -372,14 +372,14 @@ impl Dispatcher for LightDispatcher {
|
||||
};
|
||||
|
||||
if accounts.is_hardware_address(address) {
|
||||
return hardware_signature(&*accounts, address, t, network_id).map(WithToken::No)
|
||||
return hardware_signature(&*accounts, address, t, chain_id).map(WithToken::No)
|
||||
}
|
||||
|
||||
let hash = t.hash(network_id);
|
||||
let hash = t.hash(chain_id);
|
||||
let signature = signature(&*accounts, address, hash, password)?;
|
||||
|
||||
Ok(signature.map(|sig| {
|
||||
SignedTransaction::new(t.with_signature(sig, network_id))
|
||||
SignedTransaction::new(t.with_signature(sig, chain_id))
|
||||
.expect("Transaction was signed by AccountsProvider; it never produces invalid signatures; qed")
|
||||
}))
|
||||
};
|
||||
@ -552,20 +552,20 @@ fn signature(accounts: &AccountProvider, address: Address, hash: H256, password:
|
||||
}
|
||||
|
||||
// obtain a hardware signature from the given account.
|
||||
fn hardware_signature(accounts: &AccountProvider, address: Address, t: Transaction, network_id: Option<u64>)
|
||||
fn hardware_signature(accounts: &AccountProvider, address: Address, t: Transaction, chain_id: Option<u64>)
|
||||
-> Result<SignedTransaction, Error>
|
||||
{
|
||||
debug_assert!(accounts.is_hardware_address(address));
|
||||
|
||||
let mut stream = rlp::RlpStream::new();
|
||||
t.rlp_append_unsigned_transaction(&mut stream, network_id);
|
||||
t.rlp_append_unsigned_transaction(&mut stream, chain_id);
|
||||
let signature = accounts.sign_with_hardware(address, &stream.as_raw())
|
||||
.map_err(|e| {
|
||||
debug!(target: "miner", "Error signing transaction with hardware wallet: {}", e);
|
||||
errors::account("Error signing transaction with hardware wallet", e)
|
||||
})?;
|
||||
|
||||
SignedTransaction::new(t.with_signature(signature, network_id))
|
||||
SignedTransaction::new(t.with_signature(signature, chain_id))
|
||||
.map_err(|e| {
|
||||
debug!(target: "miner", "Hardware wallet has produced invalid signature: {}", e);
|
||||
errors::account("Invalid signature generated", e)
|
||||
|
@ -310,7 +310,7 @@ pub fn transaction_message(error: TransactionError) -> String {
|
||||
GasLimitExceeded { limit, got } => {
|
||||
format!("Transaction cost exceeds current gas limit. Limit: {}, got: {}. Try decreasing supplied gas.", limit, got)
|
||||
},
|
||||
InvalidNetworkId => "Invalid network id.".into(),
|
||||
InvalidChainId => "Invalid chain id.".into(),
|
||||
InvalidGasLimit(_) => "Supplied gas is beyond limit.".into(),
|
||||
SenderBanned => "Sender is banned in local queue.".into(),
|
||||
RecipientBanned => "Recipient is banned in local queue.".into(),
|
||||
|
@ -30,15 +30,17 @@ pub fn sign_call<B: MiningBlockChainClient, M: MinerService>(
|
||||
request: CallRequest,
|
||||
gas_cap: bool,
|
||||
) -> Result<SignedTransaction, Error> {
|
||||
let from = request.from.unwrap_or(0.into());
|
||||
let mut gas = request.gas.unwrap_or(U256::max_value());
|
||||
if gas_cap {
|
||||
let max_gas = 50_000_000.into();
|
||||
if gas > max_gas {
|
||||
let max_gas = 50_000_000.into();
|
||||
let gas = match request.gas {
|
||||
Some(gas) if gas_cap && gas > max_gas => {
|
||||
warn!("Gas limit capped to {} (from {})", max_gas, gas);
|
||||
gas = max_gas
|
||||
max_gas
|
||||
}
|
||||
}
|
||||
Some(gas) => gas,
|
||||
None if gas_cap => max_gas,
|
||||
None => U256::from(2) << 50,
|
||||
};
|
||||
let from = request.from.unwrap_or(0.into());
|
||||
|
||||
Ok(Transaction {
|
||||
nonce: request.nonce.unwrap_or_else(|| client.latest_nonce(&from)),
|
||||
|
@ -245,17 +245,27 @@ impl<C: Send + Sync + 'static> EthPubSub for EthPubSubClient<C> {
|
||||
kind: pubsub::Kind,
|
||||
params: Trailing<pubsub::Params>,
|
||||
) {
|
||||
match (kind, params.into()) {
|
||||
let error = match (kind, params.into()) {
|
||||
(pubsub::Kind::NewHeads, None) => {
|
||||
self.heads_subscribers.write().push(subscriber)
|
||||
self.heads_subscribers.write().push(subscriber);
|
||||
return;
|
||||
},
|
||||
(pubsub::Kind::Logs, Some(pubsub::Params::Logs(filter))) => {
|
||||
self.logs_subscribers.write().push(subscriber, filter.into());
|
||||
return;
|
||||
},
|
||||
(pubsub::Kind::NewHeads, _) => {
|
||||
errors::invalid_params("newHeads", "Expected no parameters.")
|
||||
},
|
||||
(pubsub::Kind::Logs, _) => {
|
||||
errors::invalid_params("logs", "Expected a filter object.")
|
||||
},
|
||||
_ => {
|
||||
let _ = subscriber.reject(errors::unimplemented(None));
|
||||
errors::unimplemented(None)
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
let _ = subscriber.reject(error);
|
||||
}
|
||||
|
||||
fn unsubscribe(&self, id: SubscriptionId) -> BoxFuture<bool, Error> {
|
||||
|
@ -124,9 +124,9 @@ impl<D: Dispatcher + 'static> Personal for PersonalClient<D> {
|
||||
.map(move |tx| (tx, dispatcher))
|
||||
})
|
||||
.and_then(|(pending_tx, dispatcher)| {
|
||||
let network_id = pending_tx.network_id();
|
||||
trace!(target: "miner", "send_transaction: dispatching tx: {} for network ID {:?}",
|
||||
::rlp::encode(&*pending_tx).into_vec().pretty(), network_id);
|
||||
let chain_id = pending_tx.chain_id();
|
||||
trace!(target: "miner", "send_transaction: dispatching tx: {} for chain ID {:?}",
|
||||
::rlp::encode(&*pending_tx).into_vec().pretty(), chain_id);
|
||||
|
||||
dispatcher.dispatch_transaction(pending_tx).map(Into::into)
|
||||
})
|
||||
|
@ -544,7 +544,7 @@ fn rpc_eth_pending_transaction_by_hash() {
|
||||
tester.miner.pending_transactions.lock().insert(H256::zero(), tx);
|
||||
}
|
||||
|
||||
let response = r#"{"jsonrpc":"2.0","result":{"blockHash":null,"blockNumber":null,"condition":null,"creates":null,"from":"0x0f65fe9276bc9a24ae7083ae28e2660ef72df99e","gas":"0x5208","gasPrice":"0x1","hash":"0x41df922fd0d4766fcc02e161f8295ec28522f329ae487f14d811e4b64c8d6e31","input":"0x","networkId":null,"nonce":"0x0","publicKey":"0x7ae46da747962c2ee46825839c1ef9298e3bd2e70ca2938495c3693a485ec3eaa8f196327881090ff64cf4fbb0a48485d4f83098e189ed3b7a87d5941b59f789","r":"0x48b55bfa915ac795c431978d8a6a992b628d557da5ff759b307d495a36649353","raw":"0xf85f800182520894095e7baea6a6c7c4c2dfeb977efac326af552d870a801ba048b55bfa915ac795c431978d8a6a992b628d557da5ff759b307d495a36649353a0efffd310ac743f371de3b9f7f9cb56c0b28ad43601b4ab949f53faa07bd2c804","s":"0xefffd310ac743f371de3b9f7f9cb56c0b28ad43601b4ab949f53faa07bd2c804","standardV":"0x0","to":"0x095e7baea6a6c7c4c2dfeb977efac326af552d87","transactionIndex":null,"v":"0x1b","value":"0xa"},"id":1}"#;
|
||||
let response = r#"{"jsonrpc":"2.0","result":{"blockHash":null,"blockNumber":null,"chainId":null,"condition":null,"creates":null,"from":"0x0f65fe9276bc9a24ae7083ae28e2660ef72df99e","gas":"0x5208","gasPrice":"0x1","hash":"0x41df922fd0d4766fcc02e161f8295ec28522f329ae487f14d811e4b64c8d6e31","input":"0x","nonce":"0x0","publicKey":"0x7ae46da747962c2ee46825839c1ef9298e3bd2e70ca2938495c3693a485ec3eaa8f196327881090ff64cf4fbb0a48485d4f83098e189ed3b7a87d5941b59f789","r":"0x48b55bfa915ac795c431978d8a6a992b628d557da5ff759b307d495a36649353","raw":"0xf85f800182520894095e7baea6a6c7c4c2dfeb977efac326af552d870a801ba048b55bfa915ac795c431978d8a6a992b628d557da5ff759b307d495a36649353a0efffd310ac743f371de3b9f7f9cb56c0b28ad43601b4ab949f53faa07bd2c804","s":"0xefffd310ac743f371de3b9f7f9cb56c0b28ad43601b4ab949f53faa07bd2c804","standardV":"0x0","to":"0x095e7baea6a6c7c4c2dfeb977efac326af552d87","transactionIndex":null,"v":"0x1b","value":"0xa"},"id":1}"#;
|
||||
let request = r#"{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "eth_getTransactionByHash",
|
||||
@ -860,12 +860,13 @@ fn rpc_eth_sign_transaction() {
|
||||
let response = r#"{"jsonrpc":"2.0","result":{"#.to_owned() +
|
||||
r#""raw":"0x"# + &rlp.to_hex() + r#"","# +
|
||||
r#""tx":{"# +
|
||||
r#""blockHash":null,"blockNumber":null,"condition":null,"creates":null,"# +
|
||||
r#""blockHash":null,"blockNumber":null,"# +
|
||||
&format!("\"chainId\":{},", t.chain_id().map_or("null".to_owned(), |n| format!("{}", n))) +
|
||||
r#""condition":null,"creates":null,"# +
|
||||
&format!("\"from\":\"0x{:?}\",", &address) +
|
||||
r#""gas":"0x76c0","gasPrice":"0x9184e72a000","# +
|
||||
&format!("\"hash\":\"0x{:?}\",", t.hash()) +
|
||||
r#""input":"0x","# +
|
||||
&format!("\"networkId\":{},", t.network_id().map_or("null".to_owned(), |n| format!("{}", n))) +
|
||||
r#""nonce":"0x1","# +
|
||||
&format!("\"publicKey\":\"0x{:?}\",", t.recover_public().unwrap()) +
|
||||
&format!("\"r\":\"0x{}\",", U256::from(signature.r()).to_hex()) +
|
||||
|
@ -233,7 +233,7 @@ fn rpc_parity_remove_transaction() {
|
||||
let hash = signed.hash();
|
||||
|
||||
let request = r#"{"jsonrpc": "2.0", "method": "parity_removeTransaction", "params":[""#.to_owned() + &format!("0x{:?}", hash) + r#""], "id": 1}"#;
|
||||
let response = r#"{"jsonrpc":"2.0","result":{"blockHash":null,"blockNumber":null,"condition":null,"creates":null,"from":"0x0000000000000000000000000000000000000002","gas":"0x76c0","gasPrice":"0x9184e72a000","hash":"0xa2e0da8a8064e0b9f93e95a53c2db6d01280efb8ac72a708d25487e67dd0f8fc","input":"0x","networkId":null,"nonce":"0x1","publicKey":null,"r":"0x1","raw":"0xe9018609184e72a0008276c0940000000000000000000000000000000000000005849184e72a80800101","s":"0x1","standardV":"0x4","to":"0x0000000000000000000000000000000000000005","transactionIndex":null,"v":"0x0","value":"0x9184e72a"},"id":1}"#;
|
||||
let response = r#"{"jsonrpc":"2.0","result":{"blockHash":null,"blockNumber":null,"chainId":null,"condition":null,"creates":null,"from":"0x0000000000000000000000000000000000000002","gas":"0x76c0","gasPrice":"0x9184e72a000","hash":"0xa2e0da8a8064e0b9f93e95a53c2db6d01280efb8ac72a708d25487e67dd0f8fc","input":"0x","nonce":"0x1","publicKey":null,"r":"0x1","raw":"0xe9018609184e72a0008276c0940000000000000000000000000000000000000005849184e72a80800101","s":"0x1","standardV":"0x4","to":"0x0000000000000000000000000000000000000005","transactionIndex":null,"v":"0x0","value":"0x9184e72a"},"id":1}"#;
|
||||
|
||||
miner.pending_transactions.lock().insert(hash, signed);
|
||||
assert_eq!(io.handle_request_sync(&request), Some(response.to_owned()));
|
||||
|
@ -454,12 +454,13 @@ fn should_confirm_sign_transaction_with_rlp() {
|
||||
let response = r#"{"jsonrpc":"2.0","result":{"#.to_owned() +
|
||||
r#""raw":"0x"# + &rlp.to_hex() + r#"","# +
|
||||
r#""tx":{"# +
|
||||
r#""blockHash":null,"blockNumber":null,"condition":null,"creates":null,"# +
|
||||
r#""blockHash":null,"blockNumber":null,"# +
|
||||
&format!("\"chainId\":{},", t.chain_id().map_or("null".to_owned(), |n| format!("{}", n))) +
|
||||
r#""condition":null,"creates":null,"# +
|
||||
&format!("\"from\":\"0x{:?}\",", &address) +
|
||||
r#""gas":"0x989680","gasPrice":"0x1000","# +
|
||||
&format!("\"hash\":\"0x{:?}\",", t.hash()) +
|
||||
r#""input":"0x","# +
|
||||
&format!("\"networkId\":{},", t.network_id().map_or("null".to_owned(), |n| format!("{}", n))) +
|
||||
r#""nonce":"0x0","# +
|
||||
&format!("\"publicKey\":\"0x{:?}\",", t.public_key().unwrap()) +
|
||||
&format!("\"r\":\"0x{}\",", U256::from(signature.r()).to_hex()) +
|
||||
|
@ -297,12 +297,13 @@ fn should_add_sign_transaction_to_the_queue() {
|
||||
let response = r#"{"jsonrpc":"2.0","result":{"#.to_owned() +
|
||||
r#""raw":"0x"# + &rlp.to_hex() + r#"","# +
|
||||
r#""tx":{"# +
|
||||
r#""blockHash":null,"blockNumber":null,"condition":null,"creates":null,"# +
|
||||
r#""blockHash":null,"blockNumber":null,"# +
|
||||
&format!("\"chainId\":{},", t.chain_id().map_or("null".to_owned(), |n| format!("{}", n))) +
|
||||
r#""condition":null,"creates":null,"# +
|
||||
&format!("\"from\":\"0x{:?}\",", &address) +
|
||||
r#""gas":"0x76c0","gasPrice":"0x9184e72a000","# +
|
||||
&format!("\"hash\":\"0x{:?}\",", t.hash()) +
|
||||
r#""input":"0x","# +
|
||||
&format!("\"networkId\":{},", t.network_id().map_or("null".to_owned(), |n| format!("{}", n))) +
|
||||
r#""nonce":"0x1","# +
|
||||
&format!("\"publicKey\":\"0x{:?}\",", t.public_key().unwrap()) +
|
||||
&format!("\"r\":\"0x{}\",", U256::from(signature.r()).to_hex()) +
|
||||
|
@ -214,7 +214,7 @@ impl<T: Serialize> Serialize for Rich<T> {
|
||||
// and serialize
|
||||
value.serialize(serializer)
|
||||
} else {
|
||||
Err(S::Error::custom("Unserializable structures."))
|
||||
Err(S::Error::custom("Unserializable structures: expected objects"))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -230,7 +230,7 @@ mod tests {
|
||||
fn test_serialize_block_transactions() {
|
||||
let t = BlockTransactions::Full(vec![Transaction::default()]);
|
||||
let serialized = serde_json::to_string(&t).unwrap();
|
||||
assert_eq!(serialized, r#"[{"hash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0","blockHash":null,"blockNumber":null,"transactionIndex":null,"from":"0x0000000000000000000000000000000000000000","to":null,"value":"0x0","gasPrice":"0x0","gas":"0x0","input":"0x","creates":null,"raw":"0x","publicKey":null,"networkId":null,"standardV":"0x0","v":"0x0","r":"0x0","s":"0x0","condition":null}]"#);
|
||||
assert_eq!(serialized, r#"[{"hash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0","blockHash":null,"blockNumber":null,"transactionIndex":null,"from":"0x0000000000000000000000000000000000000000","to":null,"value":"0x0","gasPrice":"0x0","gas":"0x0","input":"0x","creates":null,"raw":"0x","publicKey":null,"chainId":null,"standardV":"0x0","v":"0x0","r":"0x0","s":"0x0","condition":null}]"#);
|
||||
|
||||
let t = BlockTransactions::Hashes(vec![H256::default().into()]);
|
||||
let serialized = serde_json::to_string(&t).unwrap();
|
||||
|
@ -79,8 +79,12 @@ impl<'a> Visitor<'a> for BlockNumberVisitor {
|
||||
"latest" => Ok(BlockNumber::Latest),
|
||||
"earliest" => Ok(BlockNumber::Earliest),
|
||||
"pending" => Ok(BlockNumber::Pending),
|
||||
_ if value.starts_with("0x") => u64::from_str_radix(&value[2..], 16).map(BlockNumber::Num).map_err(|_| Error::custom("invalid block number")),
|
||||
_ => value.parse::<u64>().map(BlockNumber::Num).map_err(|_| Error::custom("invalid block number"))
|
||||
_ if value.starts_with("0x") => u64::from_str_radix(&value[2..], 16).map(BlockNumber::Num).map_err(|e| {
|
||||
Error::custom(format!("Invalid block number: {}", e))
|
||||
}),
|
||||
_ => value.parse::<u64>().map(BlockNumber::Num).map_err(|e| {
|
||||
Error::custom(format!("Invalid block number: {}", e))
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -81,9 +81,9 @@ impl<'a> Visitor<'a> for BytesVisitor {
|
||||
);
|
||||
Ok(Bytes::new(Vec::new()))
|
||||
} else if value.len() >= 2 && &value[0..2] == "0x" && value.len() & 1 == 0 {
|
||||
Ok(Bytes::new(FromHex::from_hex(&value[2..]).map_err(|_| Error::custom("invalid hex"))?))
|
||||
Ok(Bytes::new(FromHex::from_hex(&value[2..]).map_err(|e| Error::custom(format!("Invalid hex: {}", e)))?))
|
||||
} else {
|
||||
Err(Error::custom("invalid format"))
|
||||
Err(Error::custom("Invalid bytes format. Expected a 0x-prefixed hex string with even length"))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -121,7 +121,7 @@ impl<'a> Visitor<'a> for DerivationTypeVisitor {
|
||||
match value {
|
||||
"soft" => Ok(DerivationType::Soft),
|
||||
"hard" => Ok(DerivationType::Hard),
|
||||
_ => Err(Error::custom("invalid derivation type")),
|
||||
v => Err(Error::custom(format!("invalid derivation type: {:?}", v))),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -43,7 +43,7 @@ impl<'a, T> Deserialize<'a> for VariadicValue<T> where T: DeserializeOwned {
|
||||
|
||||
from_value(v.clone()).map(VariadicValue::Single)
|
||||
.or_else(|_| from_value(v).map(VariadicValue::Multiple))
|
||||
.map_err(|_| D::Error::custom("Invalid type."))
|
||||
.map_err(|err| D::Error::custom(format!("Invalid variadic value type: {}", err)))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -47,8 +47,12 @@ impl<'a> Visitor<'a> for IndexVisitor {
|
||||
|
||||
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: Error {
|
||||
match value {
|
||||
_ if value.starts_with("0x") => usize::from_str_radix(&value[2..], 16).map(Index).map_err(|_| Error::custom("invalid index")),
|
||||
_ => value.parse::<usize>().map(Index).map_err(|_| Error::custom("invalid index")),
|
||||
_ if value.starts_with("0x") => usize::from_str_radix(&value[2..], 16).map(Index).map_err(|e| {
|
||||
Error::custom(format!("Invalid index: {}", e))
|
||||
}),
|
||||
_ => value.parse::<usize>().map(Index).map_err(|e| {
|
||||
Error::custom(format!("Invalid index: {}", e))
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -84,7 +84,7 @@ impl<'a> Deserialize<'a> for Params {
|
||||
}
|
||||
|
||||
from_value(v.clone()).map(Params::Logs)
|
||||
.map_err(|_| D::Error::custom("Invalid type."))
|
||||
.map_err(|e| D::Error::custom(format!("Invalid Pub-Sub parameters: {}", e)))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -59,8 +59,8 @@ pub struct Transaction {
|
||||
#[serde(rename="publicKey")]
|
||||
pub public_key: Option<H512>,
|
||||
/// The network id of the transaction, if any.
|
||||
#[serde(rename="networkId")]
|
||||
pub network_id: Option<u64>,
|
||||
#[serde(rename="chainId")]
|
||||
pub chain_id: Option<u64>,
|
||||
/// The standardised V field of the signature (0 or 1).
|
||||
#[serde(rename="standardV")]
|
||||
pub standard_v: U256,
|
||||
@ -196,7 +196,7 @@ impl Transaction {
|
||||
},
|
||||
raw: ::rlp::encode(&t.signed).into_vec().into(),
|
||||
public_key: t.recover_public().ok().map(Into::into),
|
||||
network_id: t.network_id(),
|
||||
chain_id: t.chain_id(),
|
||||
standard_v: t.standard_v().into(),
|
||||
v: t.original_v().into(),
|
||||
r: signature.r().into(),
|
||||
@ -230,7 +230,7 @@ impl Transaction {
|
||||
},
|
||||
raw: ::rlp::encode(&t).into_vec().into(),
|
||||
public_key: t.public_key().map(Into::into),
|
||||
network_id: t.network_id(),
|
||||
chain_id: t.chain_id(),
|
||||
standard_v: t.standard_v().into(),
|
||||
v: t.original_v().into(),
|
||||
r: signature.r().into(),
|
||||
@ -273,7 +273,7 @@ mod tests {
|
||||
fn test_transaction_serialize() {
|
||||
let t = Transaction::default();
|
||||
let serialized = serde_json::to_string(&t).unwrap();
|
||||
assert_eq!(serialized, r#"{"hash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0","blockHash":null,"blockNumber":null,"transactionIndex":null,"from":"0x0000000000000000000000000000000000000000","to":null,"value":"0x0","gasPrice":"0x0","gas":"0x0","input":"0x","creates":null,"raw":"0x","publicKey":null,"networkId":null,"standardV":"0x0","v":"0x0","r":"0x0","s":"0x0","condition":null}"#);
|
||||
assert_eq!(serialized, r#"{"hash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0","blockHash":null,"blockNumber":null,"transactionIndex":null,"from":"0x0000000000000000000000000000000000000000","to":null,"value":"0x0","gasPrice":"0x0","gas":"0x0","input":"0x","creates":null,"raw":"0x","publicKey":null,"chainId":null,"standardV":"0x0","v":"0x0","r":"0x0","s":"0x0","condition":null}"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -9,7 +9,7 @@ version = "1.4.0"
|
||||
[dependencies]
|
||||
futures = "0.1"
|
||||
rpassword = "0.3.0"
|
||||
bigint = "3.0"
|
||||
bigint = "4.0"
|
||||
parity-rpc = { path = "../rpc" }
|
||||
parity-rpc-client = { path = "../rpc_client" }
|
||||
ethcore-util = { path = "../util" }
|
||||
|
@ -133,7 +133,7 @@ const MAX_TRANSACTION_PACKET_SIZE: usize = 8 * 1024 * 1024;
|
||||
// Maximal number of transactions in sent in single packet.
|
||||
const MAX_TRANSACTIONS_TO_PROPAGATE: usize = 64;
|
||||
// Min number of blocks to be behind for a snapshot sync
|
||||
const SNAPSHOT_RESTORE_THRESHOLD: BlockNumber = 100000;
|
||||
const SNAPSHOT_RESTORE_THRESHOLD: BlockNumber = 10000;
|
||||
const SNAPSHOT_MIN_PEERS: usize = 3;
|
||||
|
||||
const STATUS_PACKET: u8 = 0x00;
|
||||
|
@ -42,7 +42,7 @@ impl IoHandler<ClientIoMessage> for TestIoHandler {
|
||||
}
|
||||
}
|
||||
|
||||
fn new_tx(secret: &Secret, nonce: U256, network_id: u64) -> PendingTransaction {
|
||||
fn new_tx(secret: &Secret, nonce: U256, chain_id: u64) -> PendingTransaction {
|
||||
let signed = Transaction {
|
||||
nonce: nonce.into(),
|
||||
gas_price: 0.into(),
|
||||
@ -50,7 +50,7 @@ fn new_tx(secret: &Secret, nonce: U256, network_id: u64) -> PendingTransaction {
|
||||
action: Action::Call(Address::default()),
|
||||
value: 0.into(),
|
||||
data: Vec::new(),
|
||||
}.sign(secret, Some(network_id));
|
||||
}.sign(secret, Some(chain_id));
|
||||
PendingTransaction::new(signed, None)
|
||||
}
|
||||
|
||||
@ -62,7 +62,7 @@ fn authority_round() {
|
||||
ap.insert_account(s0.secret().clone(), "").unwrap();
|
||||
ap.insert_account(s1.secret().clone(), "").unwrap();
|
||||
|
||||
let network_id = Spec::new_test_round().network_id();
|
||||
let chain_id = Spec::new_test_round().chain_id();
|
||||
let mut net = TestNet::with_spec_and_accounts(2, SyncConfig::default(), Spec::new_test_round, Some(ap));
|
||||
let io_handler0: Arc<IoHandler<ClientIoMessage>> = Arc::new(TestIoHandler { client: net.peer(0).chain.clone() });
|
||||
let io_handler1: Arc<IoHandler<ClientIoMessage>> = Arc::new(TestIoHandler { client: net.peer(1).chain.clone() });
|
||||
@ -76,15 +76,15 @@ fn authority_round() {
|
||||
// exchange statuses
|
||||
net.sync();
|
||||
// Trigger block proposal
|
||||
net.peer(0).chain.miner().import_own_transaction(&*net.peer(0).chain, new_tx(s0.secret(), 0.into(), network_id)).unwrap();
|
||||
net.peer(1).chain.miner().import_own_transaction(&*net.peer(1).chain, new_tx(s1.secret(), 0.into(), network_id)).unwrap();
|
||||
net.peer(0).chain.miner().import_own_transaction(&*net.peer(0).chain, new_tx(s0.secret(), 0.into(), chain_id)).unwrap();
|
||||
net.peer(1).chain.miner().import_own_transaction(&*net.peer(1).chain, new_tx(s1.secret(), 0.into(), chain_id)).unwrap();
|
||||
// Sync a block
|
||||
net.sync();
|
||||
assert_eq!(net.peer(0).chain.chain_info().best_block_number, 1);
|
||||
assert_eq!(net.peer(1).chain.chain_info().best_block_number, 1);
|
||||
|
||||
net.peer(0).chain.miner().import_own_transaction(&*net.peer(0).chain, new_tx(s0.secret(), 1.into(), network_id)).unwrap();
|
||||
net.peer(1).chain.miner().import_own_transaction(&*net.peer(1).chain, new_tx(s1.secret(), 1.into(), network_id)).unwrap();
|
||||
net.peer(0).chain.miner().import_own_transaction(&*net.peer(0).chain, new_tx(s0.secret(), 1.into(), chain_id)).unwrap();
|
||||
net.peer(1).chain.miner().import_own_transaction(&*net.peer(1).chain, new_tx(s1.secret(), 1.into(), chain_id)).unwrap();
|
||||
// Move to next proposer step.
|
||||
net.peer(0).chain.engine().step();
|
||||
net.peer(1).chain.engine().step();
|
||||
@ -93,8 +93,8 @@ fn authority_round() {
|
||||
assert_eq!(net.peer(1).chain.chain_info().best_block_number, 2);
|
||||
|
||||
// Fork the network with equal height.
|
||||
net.peer(0).chain.miner().import_own_transaction(&*net.peer(0).chain, new_tx(s0.secret(), 2.into(), network_id)).unwrap();
|
||||
net.peer(1).chain.miner().import_own_transaction(&*net.peer(1).chain, new_tx(s1.secret(), 2.into(), network_id)).unwrap();
|
||||
net.peer(0).chain.miner().import_own_transaction(&*net.peer(0).chain, new_tx(s0.secret(), 2.into(), chain_id)).unwrap();
|
||||
net.peer(1).chain.miner().import_own_transaction(&*net.peer(1).chain, new_tx(s1.secret(), 2.into(), chain_id)).unwrap();
|
||||
// Let both nodes build one block.
|
||||
net.peer(0).chain.engine().step();
|
||||
let early_hash = net.peer(0).chain.chain_info().best_block_hash;
|
||||
@ -116,8 +116,8 @@ fn authority_round() {
|
||||
assert_eq!(ci1.best_block_hash, early_hash);
|
||||
|
||||
// Selfish miner
|
||||
net.peer(0).chain.miner().import_own_transaction(&*net.peer(0).chain, new_tx(s0.secret(), 3.into(), network_id)).unwrap();
|
||||
net.peer(1).chain.miner().import_own_transaction(&*net.peer(1).chain, new_tx(s1.secret(), 3.into(), network_id)).unwrap();
|
||||
net.peer(0).chain.miner().import_own_transaction(&*net.peer(0).chain, new_tx(s0.secret(), 3.into(), chain_id)).unwrap();
|
||||
net.peer(1).chain.miner().import_own_transaction(&*net.peer(1).chain, new_tx(s1.secret(), 3.into(), chain_id)).unwrap();
|
||||
// Node 0 is an earlier primary.
|
||||
net.peer(0).chain.engine().step();
|
||||
assert_eq!(net.peer(0).chain.chain_info().best_block_number, 4);
|
||||
@ -128,7 +128,7 @@ fn authority_round() {
|
||||
// Node 1 makes 2 blocks, but is a later primary on the first one.
|
||||
net.peer(1).chain.engine().step();
|
||||
net.peer(1).chain.engine().step();
|
||||
net.peer(1).chain.miner().import_own_transaction(&*net.peer(1).chain, new_tx(s1.secret(), 4.into(), network_id)).unwrap();
|
||||
net.peer(1).chain.miner().import_own_transaction(&*net.peer(1).chain, new_tx(s1.secret(), 4.into(), chain_id)).unwrap();
|
||||
net.peer(1).chain.engine().step();
|
||||
net.peer(1).chain.engine().step();
|
||||
assert_eq!(net.peer(1).chain.chain_info().best_block_number, 5);
|
||||
@ -149,7 +149,7 @@ fn tendermint() {
|
||||
ap.insert_account(s0.secret().clone(), "").unwrap();
|
||||
ap.insert_account(s1.secret().clone(), "").unwrap();
|
||||
|
||||
let network_id = Spec::new_test_tendermint().network_id();
|
||||
let chain_id = Spec::new_test_tendermint().chain_id();
|
||||
let mut net = TestNet::with_spec_and_accounts(2, SyncConfig::default(), Spec::new_test_tendermint, Some(ap));
|
||||
let io_handler0: Arc<IoHandler<ClientIoMessage>> = Arc::new(TestIoHandler { client: net.peer(0).chain.clone() });
|
||||
let io_handler1: Arc<IoHandler<ClientIoMessage>> = Arc::new(TestIoHandler { client: net.peer(1).chain.clone() });
|
||||
@ -165,7 +165,7 @@ fn tendermint() {
|
||||
// Exhange statuses
|
||||
net.sync();
|
||||
// Propose
|
||||
net.peer(0).chain.miner().import_own_transaction(&*net.peer(0).chain, new_tx(s0.secret(), 0.into(), network_id)).unwrap();
|
||||
net.peer(0).chain.miner().import_own_transaction(&*net.peer(0).chain, new_tx(s0.secret(), 0.into(), chain_id)).unwrap();
|
||||
net.sync();
|
||||
// Propose timeout, synchronous for now
|
||||
net.peer(0).chain.engine().step();
|
||||
@ -176,7 +176,7 @@ fn tendermint() {
|
||||
assert_eq!(net.peer(0).chain.chain_info().best_block_number, 1);
|
||||
assert_eq!(net.peer(1).chain.chain_info().best_block_number, 1);
|
||||
|
||||
net.peer(1).chain.miner().import_own_transaction(&*net.peer(1).chain, new_tx(s1.secret(), 0.into(), network_id)).unwrap();
|
||||
net.peer(1).chain.miner().import_own_transaction(&*net.peer(1).chain, new_tx(s1.secret(), 0.into(), chain_id)).unwrap();
|
||||
// Commit timeout
|
||||
net.peer(0).chain.engine().step();
|
||||
net.peer(1).chain.engine().step();
|
||||
@ -190,8 +190,8 @@ fn tendermint() {
|
||||
assert_eq!(net.peer(0).chain.chain_info().best_block_number, 2);
|
||||
assert_eq!(net.peer(1).chain.chain_info().best_block_number, 2);
|
||||
|
||||
net.peer(0).chain.miner().import_own_transaction(&*net.peer(0).chain, new_tx(s0.secret(), 1.into(), network_id)).unwrap();
|
||||
net.peer(1).chain.miner().import_own_transaction(&*net.peer(1).chain, new_tx(s1.secret(), 1.into(), network_id)).unwrap();
|
||||
net.peer(0).chain.miner().import_own_transaction(&*net.peer(0).chain, new_tx(s0.secret(), 1.into(), chain_id)).unwrap();
|
||||
net.peer(1).chain.miner().import_own_transaction(&*net.peer(1).chain, new_tx(s1.secret(), 1.into(), chain_id)).unwrap();
|
||||
// Peers get disconnected.
|
||||
// Commit
|
||||
net.peer(0).chain.engine().step();
|
||||
@ -199,8 +199,8 @@ fn tendermint() {
|
||||
// Propose
|
||||
net.peer(0).chain.engine().step();
|
||||
net.peer(1).chain.engine().step();
|
||||
net.peer(0).chain.miner().import_own_transaction(&*net.peer(0).chain, new_tx(s0.secret(), 2.into(), network_id)).unwrap();
|
||||
net.peer(1).chain.miner().import_own_transaction(&*net.peer(1).chain, new_tx(s1.secret(), 2.into(), network_id)).unwrap();
|
||||
net.peer(0).chain.miner().import_own_transaction(&*net.peer(0).chain, new_tx(s0.secret(), 2.into(), chain_id)).unwrap();
|
||||
net.peer(1).chain.miner().import_own_transaction(&*net.peer(1).chain, new_tx(s1.secret(), 2.into(), chain_id)).unwrap();
|
||||
// Send different prevotes
|
||||
net.sync();
|
||||
// Prevote timeout
|
||||
|
@ -19,7 +19,6 @@ rust-crypto = "0.2.34"
|
||||
elastic-array = "0.9"
|
||||
rlp = { path = "rlp" }
|
||||
heapsize = "0.4"
|
||||
itertools = "0.5"
|
||||
sha3 = { path = "sha3" }
|
||||
clippy = { version = "0.0.103", optional = true}
|
||||
ethcore-devtools = { path = "../devtools" }
|
||||
|
@ -8,7 +8,7 @@ version = "0.1.3"
|
||||
authors = ["Parity Technologies <admin@parity.io>"]
|
||||
|
||||
[dependencies]
|
||||
bigint = "3.0"
|
||||
bigint = "4.0"
|
||||
rustc-hex = "1.0"
|
||||
rand = "0.3.12"
|
||||
libc = "0.2"
|
||||
|
@ -37,9 +37,9 @@ struct BitVecJournal {
|
||||
|
||||
impl BitVecJournal {
|
||||
pub fn new(size: usize) -> BitVecJournal {
|
||||
let extra = if size % 8 > 0 { 1 } else { 0 };
|
||||
let extra = if size % 64 > 0 { 1 } else { 0 };
|
||||
BitVecJournal {
|
||||
elems: vec![0u64; size / 8 + extra],
|
||||
elems: vec![0u64; size / 64 + extra],
|
||||
journal: HashSet::new(),
|
||||
}
|
||||
}
|
||||
|
@ -15,7 +15,7 @@
|
||||
#![feature(test)]
|
||||
|
||||
extern crate test;
|
||||
extern crate bigint;
|
||||
extern crate ethcore_bigint as bigint;
|
||||
extern crate rlp;
|
||||
|
||||
use test::Bencher;
|
||||
|
15
util/rlp_derive/Cargo.toml
Normal file
15
util/rlp_derive/Cargo.toml
Normal file
@ -0,0 +1,15 @@
|
||||
[package]
|
||||
name = "rlp_derive"
|
||||
version = "0.1.0"
|
||||
authors = ["debris <marek.kotewicz@gmail.com>"]
|
||||
|
||||
[lib]
|
||||
name = "rlp_derive"
|
||||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
syn = "0.11.11"
|
||||
quote = "0.3.15"
|
||||
|
||||
[dev-dependencies]
|
||||
rlp = { path = "../rlp" }
|
139
util/rlp_derive/src/de.rs
Normal file
139
util/rlp_derive/src/de.rs
Normal file
@ -0,0 +1,139 @@
|
||||
use {syn, quote};
|
||||
|
||||
struct ParseQuotes {
|
||||
single: quote::Tokens,
|
||||
list: quote::Tokens,
|
||||
takes_index: bool,
|
||||
}
|
||||
|
||||
fn decodable_parse_quotes() -> ParseQuotes {
|
||||
ParseQuotes {
|
||||
single: quote! { rlp.val_at },
|
||||
list: quote! { rlp.list_at },
|
||||
takes_index: true,
|
||||
}
|
||||
}
|
||||
|
||||
fn decodable_wrapper_parse_quotes() -> ParseQuotes {
|
||||
ParseQuotes {
|
||||
single: quote! { rlp.as_val },
|
||||
list: quote! { rlp.as_list },
|
||||
takes_index: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn impl_decodable(ast: &syn::DeriveInput) -> quote::Tokens {
|
||||
let body = match ast.body {
|
||||
syn::Body::Struct(ref s) => s,
|
||||
_ => panic!("#[derive(RlpDecodable)] is only defined for structs."),
|
||||
};
|
||||
|
||||
let stmts: Vec<_> = match *body {
|
||||
syn::VariantData::Struct(ref fields) | syn::VariantData::Tuple(ref fields) =>
|
||||
fields.iter().enumerate().map(decodable_field_map).collect(),
|
||||
syn::VariantData::Unit => panic!("#[derive(RlpDecodable)] is not defined for Unit structs."),
|
||||
};
|
||||
|
||||
let name = &ast.ident;
|
||||
|
||||
let dummy_const = syn::Ident::new(format!("_IMPL_RLP_DECODABLE_FOR_{}", name));
|
||||
let impl_block = quote! {
|
||||
impl rlp::Decodable for #name {
|
||||
fn decode(rlp: &rlp::UntrustedRlp) -> Result<Self, rlp::DecoderError> {
|
||||
let result = #name {
|
||||
#(#stmts)*
|
||||
};
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
quote! {
|
||||
#[allow(non_upper_case_globals, unused_attributes, unused_qualifications)]
|
||||
const #dummy_const: () = {
|
||||
extern crate rlp;
|
||||
#impl_block
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
pub fn impl_decodable_wrapper(ast: &syn::DeriveInput) -> quote::Tokens {
|
||||
let body = match ast.body {
|
||||
syn::Body::Struct(ref s) => s,
|
||||
_ => panic!("#[derive(RlpDecodableWrapper)] is only defined for structs."),
|
||||
};
|
||||
|
||||
let stmt = match *body {
|
||||
syn::VariantData::Struct(ref fields) | syn::VariantData::Tuple(ref fields) => {
|
||||
if fields.len() == 1 {
|
||||
let field = fields.first().expect("fields.len() == 1; qed");
|
||||
decodable_field(0, field, decodable_wrapper_parse_quotes())
|
||||
} else {
|
||||
panic!("#[derive(RlpDecodableWrapper)] is only defined for structs with one field.")
|
||||
}
|
||||
},
|
||||
syn::VariantData::Unit => panic!("#[derive(RlpDecodableWrapper)] is not defined for Unit structs."),
|
||||
};
|
||||
|
||||
let name = &ast.ident;
|
||||
|
||||
let dummy_const = syn::Ident::new(format!("_IMPL_RLP_DECODABLE_FOR_{}", name));
|
||||
let impl_block = quote! {
|
||||
impl rlp::Decodable for #name {
|
||||
fn decode(rlp: &rlp::UntrustedRlp) -> Result<Self, rlp::DecoderError> {
|
||||
let result = #name {
|
||||
#stmt
|
||||
};
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
quote! {
|
||||
#[allow(non_upper_case_globals, unused_attributes, unused_qualifications)]
|
||||
const #dummy_const: () = {
|
||||
extern crate rlp;
|
||||
#impl_block
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn decodable_field_map(tuple: (usize, &syn::Field)) -> quote::Tokens {
|
||||
decodable_field(tuple.0, tuple.1, decodable_parse_quotes())
|
||||
}
|
||||
|
||||
fn decodable_field(index: usize, field: &syn::Field, quotes: ParseQuotes) -> quote::Tokens {
|
||||
let ident = match field.ident {
|
||||
Some(ref ident) => ident.to_string(),
|
||||
None => index.to_string(),
|
||||
};
|
||||
|
||||
let id = syn::Ident::new(ident);
|
||||
let index = syn::Ident::new(index.to_string());
|
||||
|
||||
let single = quotes.single;
|
||||
let list = quotes.list;
|
||||
|
||||
match field.ty {
|
||||
syn::Ty::Path(_, ref path) => {
|
||||
let ident = &path.segments.first().expect("there must be at least 1 segment").ident;
|
||||
if &ident.to_string() == "Vec" {
|
||||
if quotes.takes_index {
|
||||
quote! { #id: #list(#index)?, }
|
||||
} else {
|
||||
quote! { #id: #list()?, }
|
||||
}
|
||||
} else {
|
||||
if quotes.takes_index {
|
||||
quote! { #id: #single(#index)?, }
|
||||
} else {
|
||||
quote! { #id: #single()?, }
|
||||
}
|
||||
}
|
||||
},
|
||||
_ => panic!("rlp_derive not supported"),
|
||||
}
|
||||
}
|
||||
|
110
util/rlp_derive/src/en.rs
Normal file
110
util/rlp_derive/src/en.rs
Normal file
@ -0,0 +1,110 @@
|
||||
use {syn, quote};
|
||||
|
||||
pub fn impl_encodable(ast: &syn::DeriveInput) -> quote::Tokens {
|
||||
let body = match ast.body {
|
||||
syn::Body::Struct(ref s) => s,
|
||||
_ => panic!("#[derive(RlpEncodable)] is only defined for structs."),
|
||||
};
|
||||
|
||||
let stmts: Vec<_> = match *body {
|
||||
syn::VariantData::Struct(ref fields) | syn::VariantData::Tuple(ref fields) =>
|
||||
fields.iter().enumerate().map(encodable_field_map).collect(),
|
||||
syn::VariantData::Unit => panic!("#[derive(RlpEncodable)] is not defined for Unit structs."),
|
||||
};
|
||||
|
||||
let name = &ast.ident;
|
||||
|
||||
let stmts_len = syn::Ident::new(stmts.len().to_string());
|
||||
let dummy_const = syn::Ident::new(format!("_IMPL_RLP_ENCODABLE_FOR_{}", name));
|
||||
let impl_block = quote! {
|
||||
impl rlp::Encodable for #name {
|
||||
fn rlp_append(&self, stream: &mut rlp::RlpStream) {
|
||||
stream.begin_list(#stmts_len);
|
||||
#(#stmts)*
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
quote! {
|
||||
#[allow(non_upper_case_globals, unused_attributes, unused_qualifications)]
|
||||
const #dummy_const: () = {
|
||||
extern crate rlp;
|
||||
#impl_block
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
pub fn impl_encodable_wrapper(ast: &syn::DeriveInput) -> quote::Tokens {
|
||||
let body = match ast.body {
|
||||
syn::Body::Struct(ref s) => s,
|
||||
_ => panic!("#[derive(RlpEncodableWrapper)] is only defined for structs."),
|
||||
};
|
||||
|
||||
let stmt = match *body {
|
||||
syn::VariantData::Struct(ref fields) | syn::VariantData::Tuple(ref fields) => {
|
||||
if fields.len() == 1 {
|
||||
let field = fields.first().expect("fields.len() == 1; qed");
|
||||
encodable_field(0, field)
|
||||
} else {
|
||||
panic!("#[derive(RlpEncodableWrapper)] is only defined for structs with one field.")
|
||||
}
|
||||
},
|
||||
syn::VariantData::Unit => panic!("#[derive(RlpEncodableWrapper)] is not defined for Unit structs."),
|
||||
};
|
||||
|
||||
let name = &ast.ident;
|
||||
|
||||
let dummy_const = syn::Ident::new(format!("_IMPL_RLP_ENCODABLE_FOR_{}", name));
|
||||
let impl_block = quote! {
|
||||
impl rlp::Encodable for #name {
|
||||
fn rlp_append(&self, stream: &mut rlp::RlpStream) {
|
||||
#stmt
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
quote! {
|
||||
#[allow(non_upper_case_globals, unused_attributes, unused_qualifications)]
|
||||
const #dummy_const: () = {
|
||||
extern crate rlp;
|
||||
#impl_block
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn encodable_field_map(tuple: (usize, &syn::Field)) -> quote::Tokens {
|
||||
encodable_field(tuple.0, tuple.1)
|
||||
}
|
||||
|
||||
fn encodable_field(index: usize, field: &syn::Field) -> quote::Tokens {
|
||||
let ident = match field.ident {
|
||||
Some(ref ident) => ident.to_string(),
|
||||
None => index.to_string(),
|
||||
};
|
||||
|
||||
let id = syn::Ident::new(format!("self.{}", ident));
|
||||
|
||||
match field.ty {
|
||||
syn::Ty::Path(_, ref path) => {
|
||||
let top_segment = path.segments.first().expect("there must be at least 1 segment");
|
||||
let ident = &top_segment.ident;
|
||||
if &ident.to_string() == "Vec" {
|
||||
let inner_ident = match top_segment.parameters {
|
||||
syn::PathParameters::AngleBracketed(ref angle) => {
|
||||
let ty = angle.types.first().expect("Vec has only one angle bracketed type; qed");
|
||||
match *ty {
|
||||
syn::Ty::Path(_, ref path) => &path.segments.first().expect("there must be at least 1 segment").ident,
|
||||
_ => panic!("rlp_derive not supported"),
|
||||
}
|
||||
},
|
||||
_ => unreachable!("Vec has only one angle bracketed type; qed"),
|
||||
};
|
||||
quote! { stream.append_list::<#inner_ident, _>(&#id); }
|
||||
} else {
|
||||
quote! { stream.append(&#id); }
|
||||
}
|
||||
},
|
||||
_ => panic!("rlp_derive not supported"),
|
||||
}
|
||||
}
|
||||
|
43
util/rlp_derive/src/lib.rs
Normal file
43
util/rlp_derive/src/lib.rs
Normal file
@ -0,0 +1,43 @@
|
||||
extern crate proc_macro;
|
||||
extern crate syn;
|
||||
#[macro_use]
|
||||
extern crate quote;
|
||||
|
||||
mod en;
|
||||
mod de;
|
||||
|
||||
use proc_macro::TokenStream;
|
||||
use en::{impl_encodable, impl_encodable_wrapper};
|
||||
use de::{impl_decodable, impl_decodable_wrapper};
|
||||
|
||||
#[proc_macro_derive(RlpEncodable)]
|
||||
pub fn encodable(input: TokenStream) -> TokenStream {
|
||||
let s = input.to_string();
|
||||
let ast = syn::parse_derive_input(&s).unwrap();
|
||||
let gen = impl_encodable(&ast);
|
||||
gen.parse().unwrap()
|
||||
}
|
||||
|
||||
#[proc_macro_derive(RlpEncodableWrapper)]
|
||||
pub fn encodable_wrapper(input: TokenStream) -> TokenStream {
|
||||
let s = input.to_string();
|
||||
let ast = syn::parse_derive_input(&s).unwrap();
|
||||
let gen = impl_encodable_wrapper(&ast);
|
||||
gen.parse().unwrap()
|
||||
}
|
||||
|
||||
#[proc_macro_derive(RlpDecodable)]
|
||||
pub fn decodable(input: TokenStream) -> TokenStream {
|
||||
let s = input.to_string();
|
||||
let ast = syn::parse_derive_input(&s).unwrap();
|
||||
let gen = impl_decodable(&ast);
|
||||
gen.parse().unwrap()
|
||||
}
|
||||
|
||||
#[proc_macro_derive(RlpDecodableWrapper)]
|
||||
pub fn decodable_wrapper(input: TokenStream) -> TokenStream {
|
||||
let s = input.to_string();
|
||||
let ast = syn::parse_derive_input(&s).unwrap();
|
||||
let gen = impl_decodable_wrapper(&ast);
|
||||
gen.parse().unwrap()
|
||||
}
|
44
util/rlp_derive/tests/rlp.rs
Normal file
44
util/rlp_derive/tests/rlp.rs
Normal file
@ -0,0 +1,44 @@
|
||||
extern crate rlp;
|
||||
#[macro_use]
|
||||
extern crate rlp_derive;
|
||||
|
||||
use rlp::{encode, decode};
|
||||
|
||||
#[derive(Debug, PartialEq, RlpEncodable, RlpDecodable)]
|
||||
struct Foo {
|
||||
a: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, RlpEncodableWrapper, RlpDecodableWrapper)]
|
||||
struct FooWrapper {
|
||||
a: String,
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_encode_foo() {
|
||||
let foo = Foo {
|
||||
a: "cat".into(),
|
||||
};
|
||||
|
||||
let expected = vec![0xc4, 0x83, b'c', b'a', b't'];
|
||||
let out = encode(&foo).into_vec();
|
||||
assert_eq!(out, expected);
|
||||
|
||||
let decoded = decode(&expected);
|
||||
assert_eq!(foo, decoded);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_encode_foo_wrapper() {
|
||||
let foo = FooWrapper {
|
||||
a: "cat".into(),
|
||||
};
|
||||
|
||||
let expected = vec![0x83, b'c', b'a', b't'];
|
||||
let out = encode(&foo).into_vec();
|
||||
assert_eq!(out, expected);
|
||||
|
||||
let decoded = decode(&expected);
|
||||
assert_eq!(foo, decoded);
|
||||
}
|
||||
|
@ -21,7 +21,6 @@ use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use parking_lot::RwLock;
|
||||
use heapsize::HeapSizeOf;
|
||||
use itertools::Itertools;
|
||||
use rlp::*;
|
||||
use hashdb::*;
|
||||
use memorydb::*;
|
||||
@ -432,7 +431,9 @@ impl JournalDB for EarlyMergeDB {
|
||||
// - we write the key into our journal for this block;
|
||||
|
||||
r.begin_list(inserts.len());
|
||||
inserts.iter().foreach(|&(k, _)| {r.append(&k);});
|
||||
for &(k, _) in &inserts {
|
||||
r.append(&k);
|
||||
}
|
||||
r.append_list(&removes);
|
||||
Self::insert_keys(&inserts, &*self.backing, self.column, &mut refs, batch, trace);
|
||||
|
||||
|
@ -493,6 +493,7 @@ impl Database {
|
||||
}
|
||||
opts.set_parsed_options(&format!("max_total_wal_size={}", 64 * 1024 * 1024))?;
|
||||
opts.set_parsed_options("verify_checksums_in_compaction=0")?;
|
||||
opts.set_parsed_options("keep_log_file_num=1")?;
|
||||
opts.set_max_open_files(config.max_open_files);
|
||||
opts.create_if_missing(true);
|
||||
opts.set_use_fsync(false);
|
||||
|
@ -106,7 +106,6 @@ extern crate rlp;
|
||||
extern crate regex;
|
||||
extern crate lru_cache;
|
||||
extern crate heapsize;
|
||||
extern crate itertools;
|
||||
extern crate ethcore_logger;
|
||||
|
||||
#[macro_use]
|
||||
@ -153,7 +152,6 @@ pub use bigint::hash;
|
||||
|
||||
pub use ansi_term::{Colour, Style};
|
||||
pub use heapsize::HeapSizeOf;
|
||||
pub use itertools::Itertools;
|
||||
pub use parking_lot::{Condvar, Mutex, MutexGuard, RwLock, RwLockReadGuard, RwLockWriteGuard};
|
||||
|
||||
/// 160-bit integer representing account address
|
||||
|
@ -15,7 +15,6 @@
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use std::fmt;
|
||||
use itertools::Itertools;
|
||||
use hashdb::*;
|
||||
use nibbleslice::*;
|
||||
use rlp::*;
|
||||
@ -276,9 +275,15 @@ impl<'a> TrieDBIterator<'a> {
|
||||
|
||||
/// Descend into a payload.
|
||||
fn descend(&mut self, d: &[u8]) -> super::Result<()> {
|
||||
let node = Node::decoded(&self.db.get_raw_or_lookup(d)?).into();
|
||||
Ok(self.descend_into_node(node))
|
||||
}
|
||||
|
||||
/// Descend into a payload.
|
||||
fn descend_into_node(&mut self, node: OwnedNode) {
|
||||
self.trail.push(Crumb {
|
||||
status: Status::Entering,
|
||||
node: Node::decoded(&self.db.get_raw_or_lookup(d)?).into(),
|
||||
node: node,
|
||||
});
|
||||
match &self.trail.last().expect("just pushed item; qed").node {
|
||||
&OwnedNode::Leaf(ref n, _) | &OwnedNode::Extension(ref n, _) => {
|
||||
@ -286,14 +291,20 @@ impl<'a> TrieDBIterator<'a> {
|
||||
},
|
||||
_ => {}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// The present key.
|
||||
fn key(&self) -> Bytes {
|
||||
// collapse the key_nibbles down to bytes.
|
||||
self.key_nibbles.iter().step(2).zip(self.key_nibbles.iter().skip(1).step(2)).map(|(h, l)| h * 16 + l).collect()
|
||||
let nibbles = &self.key_nibbles;
|
||||
let mut i = 1;
|
||||
let mut result = Bytes::with_capacity(nibbles.len() / 2);
|
||||
let len = nibbles.len();
|
||||
while i < len {
|
||||
result.push(nibbles[i - 1] * 16 + nibbles[i]);
|
||||
i += 2;
|
||||
}
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
@ -311,52 +322,67 @@ impl<'a> Iterator for TrieDBIterator<'a> {
|
||||
type Item = TrieItem<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
enum IterStep {
|
||||
Continue,
|
||||
PopTrail,
|
||||
Descend(super::Result<DBValue>),
|
||||
}
|
||||
|
||||
loop {
|
||||
let b = match self.trail.last_mut() {
|
||||
Some(mut b) => { b.increment(); b.clone() },
|
||||
None => return None,
|
||||
let iter_step = {
|
||||
match self.trail.last_mut() {
|
||||
Some(b) => { b.increment(); },
|
||||
None => return None,
|
||||
}
|
||||
|
||||
let b = self.trail.last().expect("trail.last_mut().is_some(); qed");
|
||||
|
||||
match (b.status.clone(), &b.node) {
|
||||
(Status::Exiting, n) => {
|
||||
match *n {
|
||||
OwnedNode::Leaf(ref n, _) | OwnedNode::Extension(ref n, _) => {
|
||||
let l = self.key_nibbles.len();
|
||||
self.key_nibbles.truncate(l - n.len());
|
||||
},
|
||||
OwnedNode::Branch(_, _) => { self.key_nibbles.pop(); },
|
||||
_ => {}
|
||||
}
|
||||
IterStep::PopTrail
|
||||
},
|
||||
(Status::At, &OwnedNode::Leaf(_, ref v)) | (Status::At, &OwnedNode::Branch(_, Some(ref v))) => {
|
||||
return Some(Ok((self.key(), v.clone())));
|
||||
},
|
||||
(Status::At, &OwnedNode::Extension(_, ref d)) => IterStep::Descend(self.db.get_raw_or_lookup(&*d)),
|
||||
(Status::At, &OwnedNode::Branch(_, _)) => IterStep::Continue,
|
||||
(Status::AtChild(i), &OwnedNode::Branch(ref children, _)) if children[i].len() > 0 => {
|
||||
match i {
|
||||
0 => self.key_nibbles.push(0),
|
||||
i => *self.key_nibbles.last_mut()
|
||||
.expect("pushed as 0; moves sequentially; removed afterwards; qed") = i as u8,
|
||||
}
|
||||
IterStep::Descend(self.db.get_raw_or_lookup(&*children[i]))
|
||||
},
|
||||
(Status::AtChild(i), &OwnedNode::Branch(_, _)) => {
|
||||
if i == 0 {
|
||||
self.key_nibbles.push(0);
|
||||
}
|
||||
IterStep::Continue
|
||||
},
|
||||
_ => panic!() // Should never see Entering or AtChild without a Branch here.
|
||||
}
|
||||
};
|
||||
match (b.status, b.node) {
|
||||
(Status::Exiting, n) => {
|
||||
match n {
|
||||
OwnedNode::Leaf(n, _) | OwnedNode::Extension(n, _) => {
|
||||
let l = self.key_nibbles.len();
|
||||
self.key_nibbles.truncate(l - n.len());
|
||||
},
|
||||
OwnedNode::Branch(_, _) => { self.key_nibbles.pop(); },
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match iter_step {
|
||||
IterStep::PopTrail => {
|
||||
self.trail.pop();
|
||||
// continue
|
||||
},
|
||||
(Status::At, OwnedNode::Leaf(_, v)) | (Status::At, OwnedNode::Branch(_, Some(v))) => {
|
||||
return Some(Ok((self.key(), v)));
|
||||
IterStep::Descend(Ok(d)) => {
|
||||
self.descend_into_node(Node::decoded(&d).into())
|
||||
},
|
||||
(Status::At, OwnedNode::Extension(_, d)) => {
|
||||
if let Err(e) = self.descend(&*d) {
|
||||
return Some(Err(e));
|
||||
}
|
||||
// continue
|
||||
},
|
||||
(Status::At, OwnedNode::Branch(_, _)) => {},
|
||||
(Status::AtChild(i), OwnedNode::Branch(ref children, _)) if children[i].len() > 0 => {
|
||||
match i {
|
||||
0 => self.key_nibbles.push(0),
|
||||
i => *self.key_nibbles.last_mut()
|
||||
.expect("pushed as 0; moves sequentially; removed afterwards; qed") = i as u8,
|
||||
}
|
||||
if let Err(e) = self.descend(&*children[i]) {
|
||||
return Some(Err(e));
|
||||
}
|
||||
// continue
|
||||
},
|
||||
(Status::AtChild(i), OwnedNode::Branch(_, _)) => {
|
||||
if i == 0 {
|
||||
self.key_nibbles.push(0);
|
||||
}
|
||||
// continue
|
||||
},
|
||||
_ => panic!() // Should never see Entering or AtChild without a Branch here.
|
||||
IterStep::Descend(Err(e)) => {
|
||||
return Some(Err(e))
|
||||
}
|
||||
IterStep::Continue => {},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user